http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/bucket_map_join_tez1.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/bucket_map_join_tez1.q.out 
b/ql/src/test/results/clientpositive/spark/bucket_map_join_tez1.q.out
index 3165970..2fc9a3d 100644
--- a/ql/src/test/results/clientpositive/spark/bucket_map_join_tez1.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket_map_join_tez1.q.out
@@ -2662,7 +2662,7 @@ STAGE PLANS:
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 242 Data size: 2566 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: key (type: int), value (type: string)
+                      expressions: key (type: int), substr(value, 5) (type: 
string)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 242 Data size: 2566 Basic stats: 
COMPLETE Column stats: NONE
                       Reduce Output Operator
@@ -2703,22 +2703,18 @@ STAGE PLANS:
                   1 _col0 (type: int)
                 outputColumnNames: _col1, _col2
                 Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col1 (type: int), substr(_col2, 5) (type: 
string)
+                Group By Operator
+                  aggregations: sum(_col2)
+                  keys: _col1 (type: int)
+                  mode: hash
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
-                  Group By Operator
-                    aggregations: sum(_col1)
-                    keys: _col0 (type: int)
-                    mode: hash
-                    outputColumnNames: _col0, _col1
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: int)
                     Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: int)
-                      sort order: +
-                      Map-reduce partition columns: _col0 (type: int)
-                      Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
-                      value expressions: _col1 (type: double)
+                    value expressions: _col1 (type: double)
         Reducer 5 
             Execution mode: vectorized
             Reduce Operator Tree:
@@ -2827,7 +2823,7 @@ STAGE PLANS:
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 242 Data size: 2566 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: key (type: int), value (type: string)
+                      expressions: key (type: int), substr(value, 5) (type: 
string)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 242 Data size: 2566 Basic stats: 
COMPLETE Column stats: NONE
                       Reduce Output Operator
@@ -2868,22 +2864,18 @@ STAGE PLANS:
                   1 _col0 (type: int)
                 outputColumnNames: _col1, _col2
                 Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col1 (type: int), substr(_col2, 5) (type: 
string)
+                Group By Operator
+                  aggregations: sum(_col2)
+                  keys: _col1 (type: int)
+                  mode: hash
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
-                  Group By Operator
-                    aggregations: sum(_col1)
-                    keys: _col0 (type: int)
-                    mode: hash
-                    outputColumnNames: _col0, _col1
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: int)
                     Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: int)
-                      sort order: +
-                      Map-reduce partition columns: _col0 (type: int)
-                      Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
-                      value expressions: _col1 (type: double)
+                    value expressions: _col1 (type: double)
         Reducer 5 
             Execution mode: vectorized
             Reduce Operator Tree:
@@ -2973,7 +2965,7 @@ STAGE PLANS:
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 242 Data size: 2566 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: key (type: int), value (type: string)
+                      expressions: key (type: int), substr(value, 5) (type: 
string)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 242 Data size: 2566 Basic stats: 
COMPLETE Column stats: NONE
                       Reduce Output Operator
@@ -3033,22 +3025,18 @@ STAGE PLANS:
                   1 _col0 (type: int)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 266 Data size: 2822 Basic stats: 
COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: int), substr(_col1, 5) (type: 
string)
+                Group By Operator
+                  aggregations: sum(_col1)
+                  keys: _col0 (type: int)
+                  mode: hash
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 266 Data size: 2822 Basic stats: 
COMPLETE Column stats: NONE
-                  Group By Operator
-                    aggregations: sum(_col1)
-                    keys: _col0 (type: int)
-                    mode: hash
-                    outputColumnNames: _col0, _col1
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: int)
                     Statistics: Num rows: 266 Data size: 2822 Basic stats: 
COMPLETE Column stats: NONE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: int)
-                      sort order: +
-                      Map-reduce partition columns: _col0 (type: int)
-                      Statistics: Num rows: 266 Data size: 2822 Basic stats: 
COMPLETE Column stats: NONE
-                      value expressions: _col1 (type: double)
+                    value expressions: _col1 (type: double)
         Reducer 5 
             Execution mode: vectorized
             Reduce Operator Tree:
@@ -3138,7 +3126,7 @@ STAGE PLANS:
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 242 Data size: 2566 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: key (type: int), value (type: string)
+                      expressions: key (type: int), substr(value, 5) (type: 
string)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 242 Data size: 2566 Basic stats: 
COMPLETE Column stats: NONE
                       Reduce Output Operator
@@ -3198,22 +3186,18 @@ STAGE PLANS:
                   1 _col0 (type: int)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 266 Data size: 2822 Basic stats: 
COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: int), substr(_col1, 5) (type: 
string)
+                Group By Operator
+                  aggregations: sum(_col1)
+                  keys: _col0 (type: int)
+                  mode: hash
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 266 Data size: 2822 Basic stats: 
COMPLETE Column stats: NONE
-                  Group By Operator
-                    aggregations: sum(_col1)
-                    keys: _col0 (type: int)
-                    mode: hash
-                    outputColumnNames: _col0, _col1
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: int)
                     Statistics: Num rows: 266 Data size: 2822 Basic stats: 
COMPLETE Column stats: NONE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: int)
-                      sort order: +
-                      Map-reduce partition columns: _col0 (type: int)
-                      Statistics: Num rows: 266 Data size: 2822 Basic stats: 
COMPLETE Column stats: NONE
-                      value expressions: _col1 (type: double)
+                    value expressions: _col1 (type: double)
         Reducer 5 
             Execution mode: vectorized
             Reduce Operator Tree:

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/bucket_map_join_tez2.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/bucket_map_join_tez2.q.out 
b/ql/src/test/results/clientpositive/spark/bucket_map_join_tez2.q.out
index 2e2e5f8..aecb230 100644
--- a/ql/src/test/results/clientpositive/spark/bucket_map_join_tez2.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket_map_join_tez2.q.out
@@ -480,13 +480,13 @@ STAGE PLANS:
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 242 Data size: 2566 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: key (type: int), value (type: string)
-                      outputColumnNames: _col0, _col1
+                      expressions: key (type: int), value (type: string), 
UDFToDouble(key) (type: double)
+                      outputColumnNames: _col0, _col1, _col2
                       Statistics: Num rows: 242 Data size: 2566 Basic stats: 
COMPLETE Column stats: NONE
                       Spark HashTable Sink Operator
                         keys:
-                          0 UDFToDouble(_col0) (type: double)
-                          1 UDFToDouble(_col0) (type: double)
+                          0 _col2 (type: double)
+                          1 _col1 (type: double)
             Execution mode: vectorized
             Local Work:
               Map Reduce Local Work
@@ -505,15 +505,15 @@ STAGE PLANS:
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: key (type: string), value (type: string)
+                      expressions: value (type: string), UDFToDouble(key) 
(type: double)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                       Map Join Operator
                         condition map:
                              Inner Join 0 to 1
                         keys:
-                          0 UDFToDouble(_col0) (type: double)
-                          1 UDFToDouble(_col0) (type: double)
+                          0 _col2 (type: double)
+                          1 _col1 (type: double)
                         outputColumnNames: _col0, _col1, _col3
                         input vertices:
                           0 Map 1
@@ -573,13 +573,13 @@ STAGE PLANS:
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 242 Data size: 2566 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: key (type: int), value (type: string)
-                      outputColumnNames: _col0, _col1
+                      expressions: key (type: int), value (type: string), 
UDFToDouble(key) (type: double)
+                      outputColumnNames: _col0, _col1, _col2
                       Statistics: Num rows: 242 Data size: 2566 Basic stats: 
COMPLETE Column stats: NONE
                       Spark HashTable Sink Operator
                         keys:
-                          0 UDFToDouble(_col0) (type: double)
-                          1 UDFToDouble(_col0) (type: double)
+                          0 _col2 (type: double)
+                          1 _col1 (type: double)
             Execution mode: vectorized
             Local Work:
               Map Reduce Local Work
@@ -598,15 +598,15 @@ STAGE PLANS:
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: key (type: string), value (type: string)
+                      expressions: value (type: string), UDFToDouble(key) 
(type: double)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                       Map Join Operator
                         condition map:
                              Inner Join 0 to 1
                         keys:
-                          0 UDFToDouble(_col0) (type: double)
-                          1 UDFToDouble(_col0) (type: double)
+                          0 _col2 (type: double)
+                          1 _col1 (type: double)
                         outputColumnNames: _col0, _col1, _col3
                         input vertices:
                           0 Map 1
@@ -1408,10 +1408,14 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0
                 Statistics: Num rows: 121 Data size: 1283 Basic stats: 
COMPLETE Column stats: NONE
-                Spark HashTable Sink Operator
-                  keys:
-                    0 UDFToDouble(_col0) (type: double)
-                    1 UDFToDouble(_col0) (type: double)
+                Select Operator
+                  expressions: _col0 (type: string), UDFToDouble(_col0) (type: 
double)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 121 Data size: 1283 Basic stats: 
COMPLETE Column stats: NONE
+                  Spark HashTable Sink Operator
+                    keys:
+                      0 _col1 (type: double)
+                      1 _col1 (type: double)
 
   Stage: Stage-1
     Spark
@@ -1427,15 +1431,15 @@ STAGE PLANS:
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 242 Data size: 2566 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: key (type: int), value (type: string)
+                      expressions: value (type: string), UDFToDouble(key) 
(type: double)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 242 Data size: 2566 Basic stats: 
COMPLETE Column stats: NONE
                       Map Join Operator
                         condition map:
                              Inner Join 0 to 1
                         keys:
-                          0 UDFToDouble(_col0) (type: double)
-                          1 UDFToDouble(_col0) (type: double)
+                          0 _col1 (type: double)
+                          1 _col1 (type: double)
                         outputColumnNames: _col0, _col2
                         input vertices:
                           0 Reducer 2
@@ -1513,10 +1517,14 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0
                 Statistics: Num rows: 121 Data size: 1283 Basic stats: 
COMPLETE Column stats: NONE
-                Spark HashTable Sink Operator
-                  keys:
-                    0 UDFToDouble(_col0) (type: double)
-                    1 UDFToDouble(_col0) (type: double)
+                Select Operator
+                  expressions: _col0 (type: string), UDFToDouble(_col0) (type: 
double)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 121 Data size: 1283 Basic stats: 
COMPLETE Column stats: NONE
+                  Spark HashTable Sink Operator
+                    keys:
+                      0 _col1 (type: double)
+                      1 _col1 (type: double)
 
   Stage: Stage-1
     Spark
@@ -1532,15 +1540,15 @@ STAGE PLANS:
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 242 Data size: 2566 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: key (type: int), value (type: string)
+                      expressions: value (type: string), UDFToDouble(key) 
(type: double)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 242 Data size: 2566 Basic stats: 
COMPLETE Column stats: NONE
                       Map Join Operator
                         condition map:
                              Inner Join 0 to 1
                         keys:
-                          0 UDFToDouble(_col0) (type: double)
-                          1 UDFToDouble(_col0) (type: double)
+                          0 _col1 (type: double)
+                          1 _col1 (type: double)
                         outputColumnNames: _col0, _col2
                         input vertices:
                           0 Reducer 2
@@ -2386,7 +2394,7 @@ STAGE PLANS:
                     Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                     Select Operator
                       expressions: bucket_col (type: string), join_col (type: 
string), accounting_period (type: string)
-                      outputColumnNames: _col0, _col1, _col3
+                      outputColumnNames: _col0, _col1, _col2
                       Statistics: Num rows: 1 Data size: 0 Basic stats: 
PARTIAL Column stats: NONE
                       Spark HashTable Sink Operator
                         keys:
@@ -2422,13 +2430,13 @@ STAGE PLANS:
                         keys:
                           0 _col1 (type: string)
                           1 _col0 (type: string)
-                        outputColumnNames: _col0, _col3, _col4
+                        outputColumnNames: _col0, _col2, _col3
                         input vertices:
                           0 Map 1
                         Position of Big Table: 1
                         Statistics: Num rows: 4 Data size: 761 Basic stats: 
PARTIAL Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: string), _col4 (type: 
string), _col3 (type: string)
+                          expressions: _col0 (type: string), _col3 (type: 
string), _col2 (type: string)
                           outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 4 Data size: 761 Basic stats: 
PARTIAL Column stats: NONE
                           File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/bucketmapjoin1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin1.q.out 
b/ql/src/test/results/clientpositive/spark/bucketmapjoin1.q.out
index b1f4d51..e3160d8 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin1.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin1.q.out
@@ -42,7 +42,7 @@ OPTIMIZED SQL: SELECT `t0`.`key`, `t0`.`value`, `t2`.`value` 
AS `value1`
 FROM (SELECT `key`, `value`
 FROM `default`.`srcbucket_mapjoin_part_n1`
 WHERE `key` IS NOT NULL) AS `t0`
-INNER JOIN (SELECT `key`, `value`, CAST('2008-04-08' AS STRING) AS `ds`
+INNER JOIN (SELECT `key`, `value`
 FROM `default`.`srcbucket_mapjoin_part_2_n1`
 WHERE `ds` = '2008-04-08' AND `key` IS NOT NULL) AS `t2` ON `t0`.`key` = 
`t2`.`key`
 STAGE DEPENDENCIES:
@@ -146,7 +146,7 @@ OPTIMIZED SQL: SELECT `t0`.`key`, `t0`.`value`, 
`t2`.`value` AS `value1`
 FROM (SELECT `key`, `value`
 FROM `default`.`srcbucket_mapjoin_part_n1`
 WHERE `key` IS NOT NULL) AS `t0`
-INNER JOIN (SELECT `key`, `value`, CAST('2008-04-08' AS STRING) AS `ds`
+INNER JOIN (SELECT `key`, `value`
 FROM `default`.`srcbucket_mapjoin_part_2_n1`
 WHERE `ds` = '2008-04-08' AND `key` IS NOT NULL) AS `t2` ON `t0`.`key` = 
`t2`.`key`
 STAGE DEPENDENCIES:
@@ -344,7 +344,7 @@ OPTIMIZED SQL: SELECT `t0`.`key`, `t0`.`value`, 
`t2`.`value` AS `value1`
 FROM (SELECT `key`, `value`
 FROM `default`.`srcbucket_mapjoin_n1`
 WHERE `key` IS NOT NULL) AS `t0`
-INNER JOIN (SELECT `key`, `value`, CAST('2008-04-08' AS STRING) AS `ds`
+INNER JOIN (SELECT `key`, `value`
 FROM `default`.`srcbucket_mapjoin_part_n1`
 WHERE `ds` = '2008-04-08' AND `key` IS NOT NULL) AS `t2` ON `t0`.`key` = 
`t2`.`key`
 STAGE DEPENDENCIES:
@@ -722,7 +722,7 @@ OPTIMIZED SQL: SELECT `t0`.`key`, `t0`.`value`, 
`t2`.`value` AS `value1`
 FROM (SELECT `key`, `value`
 FROM `default`.`srcbucket_mapjoin_n1`
 WHERE `key` IS NOT NULL) AS `t0`
-INNER JOIN (SELECT `key`, `value`, CAST('2008-04-08' AS STRING) AS `ds`
+INNER JOIN (SELECT `key`, `value`
 FROM `default`.`srcbucket_mapjoin_part_n1`
 WHERE `ds` = '2008-04-08' AND `key` IS NOT NULL) AS `t2` ON `t0`.`key` = 
`t2`.`key`
 STAGE DEPENDENCIES:

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/bucketmapjoin2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin2.q.out 
b/ql/src/test/results/clientpositive/spark/bucketmapjoin2.q.out
index ff292dd..5d154a1 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin2.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin2.q.out
@@ -114,7 +114,7 @@ OPTIMIZED SQL: SELECT `t0`.`key`, `t0`.`value`, 
`t2`.`value` AS `value1`
 FROM (SELECT `key`, `value`
 FROM `default`.`srcbucket_mapjoin_part_n6`
 WHERE `key` IS NOT NULL) AS `t0`
-INNER JOIN (SELECT `key`, `value`, CAST('2008-04-08' AS STRING) AS `ds`
+INNER JOIN (SELECT `key`, `value`
 FROM `default`.`srcbucket_mapjoin_part_2_n5`
 WHERE `ds` = '2008-04-08' AND `key` IS NOT NULL) AS `t2` ON `t0`.`key` = 
`t2`.`key`
 STAGE DEPENDENCIES:
@@ -499,7 +499,7 @@ OPTIMIZED SQL: SELECT `t0`.`key`, `t0`.`value`, 
`t2`.`value` AS `value1`
 FROM (SELECT `key`, `value`
 FROM `default`.`srcbucket_mapjoin_part_n6`
 WHERE `key` IS NOT NULL) AS `t0`
-INNER JOIN (SELECT `key`, `value`, CAST('2008-04-08' AS STRING) AS `ds`
+INNER JOIN (SELECT `key`, `value`
 FROM `default`.`srcbucket_mapjoin_part_2_n5`
 WHERE `ds` = '2008-04-08' AND `key` IS NOT NULL) AS `t2` ON `t0`.`key` = 
`t2`.`key`
 STAGE DEPENDENCIES:

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out 
b/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out
index 899cfef..272c4c1 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin3.q.out
@@ -135,10 +135,10 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part_n13
 POSTHOOK: Input: default@srcbucket_mapjoin_part_n13@ds=2008-04-08
 POSTHOOK: Output: default@bucketmapjoin_tmp_result_n6
 OPTIMIZED SQL: SELECT `t0`.`key`, `t0`.`value`, `t2`.`value` AS `value1`
-FROM (SELECT `key`, `value`, CAST('2008-04-08' AS STRING) AS `ds`
+FROM (SELECT `key`, `value`
 FROM `default`.`srcbucket_mapjoin_part_2_n11`
 WHERE `ds` = '2008-04-08' AND `key` IS NOT NULL) AS `t0`
-INNER JOIN (SELECT `key`, `value`, CAST('2008-04-08' AS STRING) AS `ds`
+INNER JOIN (SELECT `key`, `value`
 FROM `default`.`srcbucket_mapjoin_part_n13`
 WHERE `ds` = '2008-04-08' AND `key` IS NOT NULL) AS `t2` ON `t0`.`key` = 
`t2`.`key`
 STAGE DEPENDENCIES:
@@ -318,10 +318,10 @@ STAGE PLANS:
                 keys:
                   0 _col0 (type: int)
                   1 _col0 (type: int)
-                outputColumnNames: _col0, _col1, _col4
+                outputColumnNames: _col0, _col1, _col3
                 Statistics: Num rows: 163 Data size: 63932 Basic stats: 
PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: CAST( _col0 AS STRING) (type: string), _col1 
(type: string), _col4 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 
(type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 163 Data size: 63932 Basic stats: 
PARTIAL Column stats: NONE
                   File Output Operator
@@ -520,10 +520,10 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part_n13
 POSTHOOK: Input: default@srcbucket_mapjoin_part_n13@ds=2008-04-08
 POSTHOOK: Output: default@bucketmapjoin_tmp_result_n6
 OPTIMIZED SQL: SELECT `t0`.`key`, `t0`.`value`, `t2`.`value` AS `value1`
-FROM (SELECT `key`, `value`, CAST('2008-04-08' AS STRING) AS `ds`
+FROM (SELECT `key`, `value`
 FROM `default`.`srcbucket_mapjoin_part_2_n11`
 WHERE `ds` = '2008-04-08' AND `key` IS NOT NULL) AS `t0`
-INNER JOIN (SELECT `key`, `value`, CAST('2008-04-08' AS STRING) AS `ds`
+INNER JOIN (SELECT `key`, `value`
 FROM `default`.`srcbucket_mapjoin_part_n13`
 WHERE `ds` = '2008-04-08' AND `key` IS NOT NULL) AS `t2` ON `t0`.`key` = 
`t2`.`key`
 STAGE DEPENDENCIES:
@@ -703,10 +703,10 @@ STAGE PLANS:
                 keys:
                   0 _col0 (type: int)
                   1 _col0 (type: int)
-                outputColumnNames: _col0, _col1, _col4
+                outputColumnNames: _col0, _col1, _col3
                 Statistics: Num rows: 163 Data size: 63932 Basic stats: 
PARTIAL Column stats: NONE
                 Select Operator
-                  expressions: CAST( _col0 AS STRING) (type: string), _col1 
(type: string), _col4 (type: string)
+                  expressions: CAST( _col0 AS STRING) (type: string), _col1 
(type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 163 Data size: 63932 Basic stats: 
PARTIAL Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_2.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_2.q.out 
b/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_2.q.out
index 59909dd..85983c2 100644
--- a/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_2.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_2.q.out
@@ -129,10 +129,10 @@ STAGE PLANS:
                         keys:
                           0 _col0 (type: int)
                           1 _col0 (type: int)
-                        outputColumnNames: _col0, _col1, _col4
+                        outputColumnNames: _col0, _col1, _col3
                         Statistics: Num rows: 92 Data size: 809 Basic stats: 
COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: int), concat(_col1, _col4) 
(type: string)
+                          expressions: _col0 (type: int), concat(_col1, _col3) 
(type: string)
                           outputColumnNames: _col0, _col1
                           Statistics: Num rows: 92 Data size: 809 Basic stats: 
COMPLETE Column stats: NONE
                           Reduce Output Operator
@@ -291,10 +291,10 @@ STAGE PLANS:
                         keys:
                           0 _col0 (type: int)
                           1 _col0 (type: int)
-                        outputColumnNames: _col0, _col1, _col4
+                        outputColumnNames: _col0, _col1, _col3
                         Statistics: Num rows: 92 Data size: 809 Basic stats: 
COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: int), concat(_col1, _col4) 
(type: string)
+                          expressions: _col0 (type: int), concat(_col1, _col3) 
(type: string)
                           outputColumnNames: _col0, _col1
                           Statistics: Num rows: 92 Data size: 809 Basic stats: 
COMPLETE Column stats: NONE
                           Reduce Output Operator
@@ -477,10 +477,10 @@ STAGE PLANS:
                         keys:
                           0 _col0 (type: int)
                           1 _col0 (type: int)
-                        outputColumnNames: _col0, _col1, _col4
+                        outputColumnNames: _col0, _col1, _col3
                         Statistics: Num rows: 184 Data size: 1619 Basic stats: 
COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: int), concat(_col1, _col4) 
(type: string)
+                          expressions: _col0 (type: int), concat(_col1, _col3) 
(type: string)
                           outputColumnNames: _col0, _col1
                           Statistics: Num rows: 184 Data size: 1619 Basic 
stats: COMPLETE Column stats: NONE
                           Reduce Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_4.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_4.q.out 
b/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_4.q.out
index 3258185..dafaa01 100644
--- a/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_4.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_4.q.out
@@ -130,12 +130,12 @@ STAGE PLANS:
                         keys:
                           0 _col0 (type: int)
                           1 _col0 (type: int)
-                        outputColumnNames: _col0, _col1, _col4
+                        outputColumnNames: _col0, _col1, _col3
                         input vertices:
                           0 Map 1
                         Statistics: Num rows: 92 Data size: 809 Basic stats: 
COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: int), concat(_col1, _col4) 
(type: string)
+                          expressions: _col0 (type: int), concat(_col1, _col3) 
(type: string)
                           outputColumnNames: _col1, _col2
                           Statistics: Num rows: 92 Data size: 809 Basic stats: 
COMPLETE Column stats: NONE
                           Reduce Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_6.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_6.q.out 
b/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_6.q.out
index 8f6a4cb..137b554 100644
--- a/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_6.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_6.q.out
@@ -132,12 +132,12 @@ STAGE PLANS:
                         keys:
                           0 _col0 (type: int), _col1 (type: int)
                           1 _col0 (type: int), _col1 (type: int)
-                        outputColumnNames: _col0, _col1, _col2, _col6
+                        outputColumnNames: _col0, _col1, _col2, _col5
                         input vertices:
                           0 Map 1
                         Statistics: Num rows: 92 Data size: 1076 Basic stats: 
COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: int), _col1 (type: int), 
concat(_col2, _col6) (type: string)
+                          expressions: _col0 (type: int), _col1 (type: int), 
concat(_col2, _col5) (type: string)
                           outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 92 Data size: 1076 Basic 
stats: COMPLETE Column stats: NONE
                           Reduce Output Operator
@@ -328,12 +328,12 @@ STAGE PLANS:
                         keys:
                           0 _col0 (type: int), _col1 (type: int)
                           1 _col0 (type: int), _col1 (type: int)
-                        outputColumnNames: _col0, _col1, _col2, _col6
+                        outputColumnNames: _col0, _col1, _col2, _col5
                         input vertices:
                           0 Map 1
                         Statistics: Num rows: 92 Data size: 1076 Basic stats: 
COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: int), _col1 (type: int), 
concat(_col2, _col6) (type: string)
+                          expressions: _col0 (type: int), _col1 (type: int), 
concat(_col2, _col5) (type: string)
                           outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 92 Data size: 1076 Basic 
stats: COMPLETE Column stats: NONE
                           Reduce Output Operator
@@ -524,12 +524,12 @@ STAGE PLANS:
                         keys:
                           0 _col0 (type: int), _col1 (type: int)
                           1 _col0 (type: int), _col1 (type: int)
-                        outputColumnNames: _col0, _col1, _col2, _col6
+                        outputColumnNames: _col0, _col1, _col2, _col5
                         input vertices:
                           0 Map 1
                         Statistics: Num rows: 92 Data size: 1076 Basic stats: 
COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col1 (type: int), _col0 (type: int), 
concat(_col2, _col6) (type: string)
+                          expressions: _col1 (type: int), _col0 (type: int), 
concat(_col2, _col5) (type: string)
                           outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 92 Data size: 1076 Basic 
stats: COMPLETE Column stats: NONE
                           Reduce Output Operator
@@ -655,12 +655,12 @@ STAGE PLANS:
                         keys:
                           0 _col0 (type: int), _col1 (type: int)
                           1 _col0 (type: int), _col1 (type: int)
-                        outputColumnNames: _col0, _col1, _col2, _col6
+                        outputColumnNames: _col0, _col1, _col2, _col5
                         input vertices:
                           0 Map 1
                         Statistics: Num rows: 92 Data size: 1076 Basic stats: 
COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col1 (type: int), _col0 (type: int), 
concat(_col2, _col6) (type: string)
+                          expressions: _col1 (type: int), _col0 (type: int), 
concat(_col2, _col5) (type: string)
                           outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 92 Data size: 1076 Basic 
stats: COMPLETE Column stats: NONE
                           Reduce Output Operator
@@ -792,12 +792,12 @@ STAGE PLANS:
                         keys:
                           0 _col0 (type: int), _col1 (type: int)
                           1 _col0 (type: int), _col1 (type: int)
-                        outputColumnNames: _col0, _col1, _col2, _col6
+                        outputColumnNames: _col0, _col1, _col2, _col5
                         input vertices:
                           0 Map 1
                         Statistics: Num rows: 92 Data size: 1076 Basic stats: 
COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: int), _col1 (type: int), 
concat(_col2, _col6) (type: string)
+                          expressions: _col0 (type: int), _col1 (type: int), 
concat(_col2, _col5) (type: string)
                           outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 92 Data size: 1076 Basic 
stats: COMPLETE Column stats: NONE
                           Reduce Output Operator
@@ -1006,12 +1006,12 @@ STAGE PLANS:
                         keys:
                           0 _col0 (type: int), _col1 (type: int)
                           1 _col0 (type: int), _col1 (type: int)
-                        outputColumnNames: _col0, _col1, _col2, _col6
+                        outputColumnNames: _col0, _col1, _col2, _col5
                         input vertices:
                           0 Map 1
                         Statistics: Num rows: 92 Data size: 1076 Basic stats: 
COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: int), _col1 (type: int), 
concat(_col2, _col6) (type: string)
+                          expressions: _col0 (type: int), _col1 (type: int), 
concat(_col2, _col5) (type: string)
                           outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 92 Data size: 1076 Basic 
stats: COMPLETE Column stats: NONE
                           Reduce Output Operator
@@ -1230,12 +1230,12 @@ STAGE PLANS:
                         keys:
                           0 _col0 (type: int), _col1 (type: int)
                           1 _col0 (type: int), _col1 (type: int)
-                        outputColumnNames: _col0, _col1, _col2, _col6
+                        outputColumnNames: _col0, _col1, _col2, _col5
                         input vertices:
                           0 Map 1
                         Statistics: Num rows: 92 Data size: 1076 Basic stats: 
COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: int), _col1 (type: int), 
concat(_col2, _col6) (type: string)
+                          expressions: _col0 (type: int), _col1 (type: int), 
concat(_col2, _col5) (type: string)
                           outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 92 Data size: 1076 Basic 
stats: COMPLETE Column stats: NONE
                           Reduce Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_7.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_7.q.out 
b/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_7.q.out
index f2f7673..fb4f192 100644
--- a/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_7.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_7.q.out
@@ -107,10 +107,10 @@ STAGE PLANS:
                         keys:
                           0 _col0 (type: int)
                           1 _col0 (type: int)
-                        outputColumnNames: _col0, _col1, _col4
+                        outputColumnNames: _col0, _col1, _col3
                         Statistics: Num rows: 92 Data size: 809 Basic stats: 
COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: int), concat(_col1, _col4) 
(type: string)
+                          expressions: _col0 (type: int), concat(_col1, _col3) 
(type: string)
                           outputColumnNames: _col0, _col1
                           Statistics: Num rows: 92 Data size: 809 Basic stats: 
COMPLETE Column stats: NONE
                           Reduce Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_8.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_8.q.out 
b/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_8.q.out
index 5dfb6f6..3e2bee3 100644
--- a/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_8.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketsortoptimize_insert_8.q.out
@@ -130,12 +130,12 @@ STAGE PLANS:
                         keys:
                           0 _col0 (type: int)
                           1 _col0 (type: int)
-                        outputColumnNames: _col0, _col1, _col3, _col4
+                        outputColumnNames: _col0, _col1, _col2, _col3
                         input vertices:
                           0 Map 1
                         Statistics: Num rows: 92 Data size: 809 Basic stats: 
COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col0 (type: int), _col3 (type: int), 
concat(_col1, _col4) (type: string)
+                          expressions: _col0 (type: int), _col2 (type: int), 
concat(_col1, _col3) (type: string)
                           outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 92 Data size: 809 Basic stats: 
COMPLETE Column stats: NONE
                           Reduce Output Operator
@@ -320,12 +320,12 @@ STAGE PLANS:
                         keys:
                           0 _col0 (type: int)
                           1 _col0 (type: int)
-                        outputColumnNames: _col0, _col1, _col3, _col4
+                        outputColumnNames: _col0, _col1, _col2, _col3
                         input vertices:
                           0 Map 1
                         Statistics: Num rows: 92 Data size: 809 Basic stats: 
COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col3 (type: int), _col0 (type: int), 
concat(_col1, _col4) (type: string)
+                          expressions: _col2 (type: int), _col0 (type: int), 
concat(_col1, _col3) (type: string)
                           outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 92 Data size: 809 Basic stats: 
COMPLETE Column stats: NONE
                           Reduce Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/constprog_partitioner.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/constprog_partitioner.q.out 
b/ql/src/test/results/clientpositive/spark/constprog_partitioner.q.out
index 2fa9dff..f0d201c 100644
--- a/ql/src/test/results/clientpositive/spark/constprog_partitioner.q.out
+++ b/ql/src/test/results/clientpositive/spark/constprog_partitioner.q.out
@@ -48,12 +48,12 @@ STAGE PLANS:
                     Statistics: Num rows: 500 Data size: 5301 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
                       expressions: value (type: string)
-                      outputColumnNames: _col1
+                      outputColumnNames: _col0
                       Statistics: Num rows: 500 Data size: 5301 Basic stats: 
COMPLETE Column stats: NONE
                       Reduce Output Operator
                         sort order: 
                         Statistics: Num rows: 500 Data size: 5301 Basic stats: 
COMPLETE Column stats: NONE
-                        value expressions: _col1 (type: string)
+                        value expressions: _col0 (type: string)
             Execution mode: vectorized
         Reducer 2 
             Reduce Operator Tree:
@@ -63,10 +63,10 @@ STAGE PLANS:
                 keys:
                   0 
                   1 
-                outputColumnNames: _col2
+                outputColumnNames: _col1
                 Statistics: Num rows: 250000 Data size: 5551000 Basic stats: 
COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: 100 (type: int), 101 (type: int), _col2 (type: 
string)
+                  expressions: 100 (type: int), 101 (type: int), _col1 (type: 
string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 250000 Data size: 5551000 Basic stats: 
COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/constprog_semijoin.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/constprog_semijoin.q.out 
b/ql/src/test/results/clientpositive/spark/constprog_semijoin.q.out
index 050fae9..6b4fa1e 100644
--- a/ql/src/test/results/clientpositive/spark/constprog_semijoin.q.out
+++ b/ql/src/test/results/clientpositive/spark/constprog_semijoin.q.out
@@ -417,10 +417,10 @@ STAGE PLANS:
                 keys:
                   0 _col0 (type: int)
                   1 _col0 (type: int)
-                outputColumnNames: _col0, _col4
+                outputColumnNames: _col0, _col2
                 Statistics: Num rows: 5 Data size: 121 Basic stats: COMPLETE 
Column stats: NONE
                 Select Operator
-                  expressions: _col0 (type: int), 't1val01' (type: string), 
_col4 (type: string)
+                  expressions: _col0 (type: int), 't1val01' (type: string), 
_col2 (type: string)
                   outputColumnNames: _col0, _col1, _col2
                   Statistics: Num rows: 5 Data size: 121 Basic stats: COMPLETE 
Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out 
b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
index c515988..2088ccd 100644
--- a/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
+++ b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
@@ -763,21 +763,22 @@ STAGE PLANS:
         Map 10 
             Map Operator Tree:
                 TableScan
-                  alias: date_dim
-                  filterExpr: ((d_year = 1999) and (d_moy = 3) and d_date_sk 
is not null) (type: boolean)
+                  alias: warehouse
+                  filterExpr: w_warehouse_sk is not null (type: boolean)
                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                   Filter Operator
-                    predicate: ((d_moy = 3) and (d_year = 1999) and d_date_sk 
is not null) (type: boolean)
+                    predicate: w_warehouse_sk is not null (type: boolean)
                     Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                     Select Operator
-                      expressions: d_date_sk (type: int)
-                      outputColumnNames: _col0
+                      expressions: w_warehouse_sk (type: int), 
w_warehouse_name (type: string)
+                      outputColumnNames: _col0, _col1
                       Statistics: Num rows: 1 Data size: 0 Basic stats: 
PARTIAL Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: int)
                         Statistics: Num rows: 1 Data size: 0 Basic stats: 
PARTIAL Column stats: NONE
+                        value expressions: _col1 (type: string)
             Execution mode: vectorized
         Map 11 
             Map Operator Tree:
@@ -821,41 +822,41 @@ STAGE PLANS:
         Map 17 
             Map Operator Tree:
                 TableScan
-                  alias: warehouse
-                  filterExpr: w_warehouse_sk is not null (type: boolean)
+                  alias: date_dim
+                  filterExpr: ((d_year = 1999) and (d_moy = 4) and d_date_sk 
is not null) (type: boolean)
                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                   Filter Operator
-                    predicate: w_warehouse_sk is not null (type: boolean)
+                    predicate: ((d_moy = 4) and (d_year = 1999) and d_date_sk 
is not null) (type: boolean)
                     Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                     Select Operator
-                      expressions: w_warehouse_sk (type: int), 
w_warehouse_name (type: string)
-                      outputColumnNames: _col0, _col1
+                      expressions: d_date_sk (type: int)
+                      outputColumnNames: _col0
                       Statistics: Num rows: 1 Data size: 0 Basic stats: 
PARTIAL Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: int)
                         Statistics: Num rows: 1 Data size: 0 Basic stats: 
PARTIAL Column stats: NONE
-                        value expressions: _col1 (type: string)
             Execution mode: vectorized
         Map 18 
             Map Operator Tree:
                 TableScan
-                  alias: date_dim
-                  filterExpr: ((d_year = 1999) and (d_moy = 4) and d_date_sk 
is not null) (type: boolean)
+                  alias: warehouse
+                  filterExpr: w_warehouse_sk is not null (type: boolean)
                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                   Filter Operator
-                    predicate: ((d_moy = 4) and (d_year = 1999) and d_date_sk 
is not null) (type: boolean)
+                    predicate: w_warehouse_sk is not null (type: boolean)
                     Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                     Select Operator
-                      expressions: d_date_sk (type: int)
-                      outputColumnNames: _col0
+                      expressions: w_warehouse_sk (type: int), 
w_warehouse_name (type: string)
+                      outputColumnNames: _col0, _col1
                       Statistics: Num rows: 1 Data size: 0 Basic stats: 
PARTIAL Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: int)
                         Statistics: Num rows: 1 Data size: 0 Basic stats: 
PARTIAL Column stats: NONE
+                        value expressions: _col1 (type: string)
             Execution mode: vectorized
         Map 8 
             Map Operator Tree:
@@ -879,22 +880,21 @@ STAGE PLANS:
         Map 9 
             Map Operator Tree:
                 TableScan
-                  alias: warehouse
-                  filterExpr: w_warehouse_sk is not null (type: boolean)
+                  alias: date_dim
+                  filterExpr: ((d_year = 1999) and (d_moy = 3) and d_date_sk 
is not null) (type: boolean)
                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                   Filter Operator
-                    predicate: w_warehouse_sk is not null (type: boolean)
+                    predicate: ((d_moy = 3) and (d_year = 1999) and d_date_sk 
is not null) (type: boolean)
                     Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                     Select Operator
-                      expressions: w_warehouse_sk (type: int), 
w_warehouse_name (type: string)
-                      outputColumnNames: _col0, _col1
+                      expressions: d_date_sk (type: int)
+                      outputColumnNames: _col0
                       Statistics: Num rows: 1 Data size: 0 Basic stats: 
PARTIAL Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: int)
                         Statistics: Num rows: 1 Data size: 0 Basic stats: 
PARTIAL Column stats: NONE
-                        value expressions: _col1 (type: string)
             Execution mode: vectorized
         Reducer 12 
             Reduce Operator Tree:
@@ -907,39 +907,39 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col2, _col3, _col4
                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col3 (type: int)
+                  key expressions: _col0 (type: int)
                   sort order: +
-                  Map-reduce partition columns: _col3 (type: int)
+                  Map-reduce partition columns: _col0 (type: int)
                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
-                  value expressions: _col0 (type: int), _col2 (type: int), 
_col4 (type: int)
+                  value expressions: _col2 (type: int), _col3 (type: int), 
_col4 (type: int)
         Reducer 13 
             Reduce Operator Tree:
               Join Operator
                 condition map:
                      Inner Join 0 to 1
                 keys:
-                  0 _col3 (type: int)
+                  0 _col0 (type: int)
                   1 _col0 (type: int)
-                outputColumnNames: _col0, _col2, _col4, _col5, _col6
+                outputColumnNames: _col2, _col3, _col4
                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: int)
+                  key expressions: _col3 (type: int)
                   sort order: +
-                  Map-reduce partition columns: _col0 (type: int)
+                  Map-reduce partition columns: _col3 (type: int)
                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
-                  value expressions: _col2 (type: int), _col4 (type: int), 
_col5 (type: int), _col6 (type: string)
+                  value expressions: _col2 (type: int), _col4 (type: int)
         Reducer 14 
             Reduce Operator Tree:
               Join Operator
                 condition map:
                      Inner Join 0 to 1
                 keys:
-                  0 _col0 (type: int)
+                  0 _col3 (type: int)
                   1 _col0 (type: int)
-                outputColumnNames: _col2, _col4, _col5, _col6
+                outputColumnNames: _col2, _col4, _col6, _col7
                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                 Select Operator
-                  expressions: _col6 (type: string), _col5 (type: int), _col4 
(type: int), _col2 (type: int), UDFToDouble(_col2) (type: double), 
(UDFToDouble(_col2) * UDFToDouble(_col2)) (type: double)
+                  expressions: _col7 (type: string), _col6 (type: int), _col4 
(type: int), _col2 (type: int), UDFToDouble(_col2) (type: double), 
(UDFToDouble(_col2) * UDFToDouble(_col2)) (type: double)
                   outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                   Group By Operator
@@ -991,39 +991,39 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col2, _col3, _col4
                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col3 (type: int)
+                  key expressions: _col0 (type: int)
                   sort order: +
-                  Map-reduce partition columns: _col3 (type: int)
+                  Map-reduce partition columns: _col0 (type: int)
                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
-                  value expressions: _col0 (type: int), _col2 (type: int), 
_col4 (type: int)
+                  value expressions: _col2 (type: int), _col3 (type: int), 
_col4 (type: int)
         Reducer 3 
             Reduce Operator Tree:
               Join Operator
                 condition map:
                      Inner Join 0 to 1
                 keys:
-                  0 _col3 (type: int)
+                  0 _col0 (type: int)
                   1 _col0 (type: int)
-                outputColumnNames: _col0, _col2, _col4, _col5, _col6
+                outputColumnNames: _col2, _col3, _col4
                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: int)
+                  key expressions: _col3 (type: int)
                   sort order: +
-                  Map-reduce partition columns: _col0 (type: int)
+                  Map-reduce partition columns: _col3 (type: int)
                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
-                  value expressions: _col2 (type: int), _col4 (type: int), 
_col5 (type: int), _col6 (type: string)
+                  value expressions: _col2 (type: int), _col4 (type: int)
         Reducer 4 
             Reduce Operator Tree:
               Join Operator
                 condition map:
                      Inner Join 0 to 1
                 keys:
-                  0 _col0 (type: int)
+                  0 _col3 (type: int)
                   1 _col0 (type: int)
-                outputColumnNames: _col2, _col4, _col5, _col6
+                outputColumnNames: _col2, _col4, _col6, _col7
                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                 Select Operator
-                  expressions: _col6 (type: string), _col5 (type: int), _col4 
(type: int), _col2 (type: int), UDFToDouble(_col2) (type: double), 
(UDFToDouble(_col2) * UDFToDouble(_col2)) (type: double)
+                  expressions: _col7 (type: string), _col6 (type: int), _col4 
(type: int), _col2 (type: int), UDFToDouble(_col2) (type: double), 
(UDFToDouble(_col2) * UDFToDouble(_col2)) (type: double)
                   outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
                   Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/filter_join_breaktask.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/filter_join_breaktask.q.out 
b/ql/src/test/results/clientpositive/spark/filter_join_breaktask.q.out
index d9dde1a..0ce54c5 100644
--- a/ql/src/test/results/clientpositive/spark/filter_join_breaktask.q.out
+++ b/ql/src/test/results/clientpositive/spark/filter_join_breaktask.q.out
@@ -35,13 +35,13 @@ POSTHOOK: Input: default@filter_join_breaktask
 POSTHOOK: Input: default@filter_join_breaktask@ds=2008-04-08
 #### A masked pattern was here ####
 OPTIMIZED SQL: SELECT `t4`.`key`, `t0`.`value`
-FROM (SELECT `value`, CAST('2008-04-08' AS STRING) AS `ds`
+FROM (SELECT `value`
 FROM `default`.`filter_join_breaktask`
 WHERE `ds` = '2008-04-08' AND `value` <> '') AS `t0`
-INNER JOIN (SELECT `key`, `value`, CAST('2008-04-08' AS STRING) AS `ds`
+INNER JOIN (SELECT `key`, `value`
 FROM `default`.`filter_join_breaktask`
 WHERE `ds` = '2008-04-08' AND `value` <> '' AND `key` IS NOT NULL) AS `t2` ON 
`t0`.`value` = `t2`.`value`
-INNER JOIN (SELECT `key`, CAST('2008-04-08' AS STRING) AS `ds`
+INNER JOIN (SELECT `key`
 FROM `default`.`filter_join_breaktask`
 WHERE `ds` = '2008-04-08' AND `key` IS NOT NULL) AS `t4` ON `t2`.`key` = 
`t4`.`key`
 STAGE DEPENDENCIES:
@@ -297,13 +297,13 @@ STAGE PLANS:
                 keys:
                   0 _col0 (type: string)
                   1 _col1 (type: string)
-                outputColumnNames: _col0, _col2
+                outputColumnNames: _col0, _col1
                 Statistics: Num rows: 27 Data size: 232 Basic stats: COMPLETE 
Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col2 (type: int)
+                  key expressions: _col1 (type: int)
                   null sort order: a
                   sort order: +
-                  Map-reduce partition columns: _col2 (type: int)
+                  Map-reduce partition columns: _col1 (type: int)
                   Statistics: Num rows: 27 Data size: 232 Basic stats: 
COMPLETE Column stats: NONE
                   tag: 0
                   value expressions: _col0 (type: string)
@@ -315,12 +315,12 @@ STAGE PLANS:
                 condition map:
                      Inner Join 0 to 1
                 keys:
-                  0 _col2 (type: int)
+                  0 _col1 (type: int)
                   1 _col0 (type: int)
-                outputColumnNames: _col0, _col5
+                outputColumnNames: _col0, _col3
                 Statistics: Num rows: 29 Data size: 255 Basic stats: COMPLETE 
Column stats: NONE
                 Select Operator
-                  expressions: _col5 (type: int), _col0 (type: string)
+                  expressions: _col3 (type: int), _col0 (type: string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 29 Data size: 255 Basic stats: 
COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/infer_bucket_sort_map_operators.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/infer_bucket_sort_map_operators.q.out
 
b/ql/src/test/results/clientpositive/spark/infer_bucket_sort_map_operators.q.out
index 99847cd..2f230c4 100644
--- 
a/ql/src/test/results/clientpositive/spark/infer_bucket_sort_map_operators.q.out
+++ 
b/ql/src/test/results/clientpositive/spark/infer_bucket_sort_map_operators.q.out
@@ -213,12 +213,16 @@ STAGE PLANS:
                       Filter Operator
                         predicate: _col1 is not null (type: boolean)
                         Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
-                        Reduce Output Operator
-                          key expressions: UDFToDouble(_col1) (type: double)
-                          sort order: +
-                          Map-reduce partition columns: UDFToDouble(_col1) 
(type: double)
+                        Select Operator
+                          expressions: _col0 (type: string), _col1 (type: 
bigint), UDFToDouble(_col1) (type: double)
+                          outputColumnNames: _col0, _col1, _col2
                           Statistics: Num rows: 250 Data size: 2656 Basic 
stats: COMPLETE Column stats: NONE
-                          value expressions: _col0 (type: string), _col1 
(type: bigint)
+                          Reduce Output Operator
+                            key expressions: _col2 (type: double)
+                            sort order: +
+                            Map-reduce partition columns: _col2 (type: double)
+                            Statistics: Num rows: 250 Data size: 2656 Basic 
stats: COMPLETE Column stats: NONE
+                            value expressions: _col0 (type: string), _col1 
(type: bigint)
             Execution mode: vectorized
         Map 3 
             Map Operator Tree:
@@ -230,13 +234,13 @@ STAGE PLANS:
                     predicate: value is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: value (type: string)
+                      expressions: UDFToDouble(value) (type: double)
                       outputColumnNames: _col0
                       Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                       Reduce Output Operator
-                        key expressions: UDFToDouble(_col0) (type: double)
+                        key expressions: _col0 (type: double)
                         sort order: +
-                        Map-reduce partition columns: UDFToDouble(_col0) 
(type: double)
+                        Map-reduce partition columns: _col0 (type: double)
                         Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
             Execution mode: vectorized
         Reducer 2 
@@ -245,8 +249,8 @@ STAGE PLANS:
                 condition map:
                      Inner Join 0 to 1
                 keys:
-                  0 UDFToDouble(_col1) (type: double)
-                  1 UDFToDouble(_col0) (type: double)
+                  0 _col2 (type: double)
+                  1 _col0 (type: double)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
                 Select Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/join13.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join13.q.out 
b/ql/src/test/results/clientpositive/spark/join13.q.out
index 0fba2f3..965f1bc 100644
--- a/ql/src/test/results/clientpositive/spark/join13.q.out
+++ b/ql/src/test/results/clientpositive/spark/join13.q.out
@@ -85,13 +85,13 @@ STAGE PLANS:
                     predicate: (UDFToDouble(key) < 200.0D) (type: boolean)
                     Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: key (type: string)
+                      expressions: UDFToDouble(key) (type: double)
                       outputColumnNames: _col0
                       Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
                       Reduce Output Operator
-                        key expressions: UDFToDouble(_col0) (type: double)
+                        key expressions: _col0 (type: double)
                         sort order: +
-                        Map-reduce partition columns: UDFToDouble(_col0) 
(type: double)
+                        Map-reduce partition columns: _col0 (type: double)
                         Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
             Execution mode: vectorized
         Reducer 2 
@@ -117,7 +117,7 @@ STAGE PLANS:
                      Inner Join 0 to 1
                 keys:
                   0 (UDFToDouble(_col2) + UDFToDouble(_col0)) (type: double)
-                  1 UDFToDouble(_col0) (type: double)
+                  1 _col0 (type: double)
                 outputColumnNames: _col1, _col2
                 Statistics: Num rows: 200 Data size: 2132 Basic stats: 
COMPLETE Column stats: NONE
                 Select Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/join2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join2.q.out 
b/ql/src/test/results/clientpositive/spark/join2.q.out
index c0d8ea4..3f64bc9 100644
--- a/ql/src/test/results/clientpositive/spark/join2.q.out
+++ b/ql/src/test/results/clientpositive/spark/join2.q.out
@@ -41,14 +41,15 @@ STAGE PLANS:
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: _col0
+                      expressions: key (type: string), UDFToDouble(key) (type: 
double)
+                      outputColumnNames: _col0, _col1
                       Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: string)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
+                        value expressions: _col1 (type: double)
             Execution mode: vectorized
         Map 4 
             Map Operator Tree:
@@ -60,14 +61,15 @@ STAGE PLANS:
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: _col0
+                      expressions: key (type: string), UDFToDouble(key) (type: 
double)
+                      outputColumnNames: _col0, _col1
                       Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: string)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
+                        value expressions: _col1 (type: double)
             Execution mode: vectorized
         Map 5 
             Map Operator Tree:
@@ -79,15 +81,15 @@ STAGE PLANS:
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: key (type: string), value (type: string)
+                      expressions: value (type: string), UDFToDouble(key) 
(type: double)
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                       Reduce Output Operator
-                        key expressions: UDFToDouble(_col0) (type: double)
+                        key expressions: _col1 (type: double)
                         sort order: +
-                        Map-reduce partition columns: UDFToDouble(_col0) 
(type: double)
+                        Map-reduce partition columns: _col1 (type: double)
                         Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-                        value expressions: _col1 (type: string)
+                        value expressions: _col0 (type: string)
             Execution mode: vectorized
         Reducer 2 
             Reduce Operator Tree:
@@ -97,12 +99,12 @@ STAGE PLANS:
                 keys:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
-                outputColumnNames: _col0, _col1
+                outputColumnNames: _col0, _col1, _col3
                 Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: (UDFToDouble(_col0) + UDFToDouble(_col1)) 
(type: double)
+                  key expressions: (_col1 + _col3) (type: double)
                   sort order: +
-                  Map-reduce partition columns: (UDFToDouble(_col0) + 
UDFToDouble(_col1)) (type: double)
+                  Map-reduce partition columns: (_col1 + _col3) (type: double)
                   Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
                   value expressions: _col0 (type: string)
         Reducer 3 
@@ -111,12 +113,12 @@ STAGE PLANS:
                 condition map:
                      Inner Join 0 to 1
                 keys:
-                  0 (UDFToDouble(_col0) + UDFToDouble(_col1)) (type: double)
-                  1 UDFToDouble(_col0) (type: double)
-                outputColumnNames: _col0, _col3
+                  0 (_col1 + _col3) (type: double)
+                  1 _col1 (type: double)
+                outputColumnNames: _col0, _col4
                 Statistics: Num rows: 605 Data size: 6427 Basic stats: 
COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col3 (type: 
string)
+                  expressions: UDFToInteger(_col0) (type: int), _col4 (type: 
string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 605 Data size: 6427 Basic stats: 
COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/join26.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join26.q.out 
b/ql/src/test/results/clientpositive/spark/join26.q.out
index 25ed83d..1133cca 100644
--- a/ql/src/test/results/clientpositive/spark/join26.q.out
+++ b/ql/src/test/results/clientpositive/spark/join26.q.out
@@ -29,7 +29,7 @@ POSTHOOK: Input: default@srcpart
 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 POSTHOOK: Output: default@dest_j1_n10
 OPTIMIZED SQL: SELECT `t4`.`key`, `t0`.`value`, `t2`.`value` AS `value1`
-FROM (SELECT `key`, `value`, CAST('2008-04-08' AS STRING) AS `ds`, `hr`
+FROM (SELECT `key`, `value`
 FROM `default`.`srcpart`
 WHERE `ds` = '2008-04-08' AND `hr` = 11 AND `key` IS NOT NULL) AS `t0`
 INNER JOIN ((SELECT `key`, `value`

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/join32.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join32.q.out 
b/ql/src/test/results/clientpositive/spark/join32.q.out
index 29cb0c9..a3b155e 100644
--- a/ql/src/test/results/clientpositive/spark/join32.q.out
+++ b/ql/src/test/results/clientpositive/spark/join32.q.out
@@ -29,7 +29,7 @@ POSTHOOK: Input: default@srcpart
 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 POSTHOOK: Output: default@dest_j1_n12
 OPTIMIZED SQL: SELECT `t4`.`key`, `t0`.`value`, `t2`.`value` AS `value1`
-FROM (SELECT `value`, CAST('2008-04-08' AS STRING) AS `ds`, `hr`
+FROM (SELECT `value`
 FROM `default`.`srcpart`
 WHERE `ds` = '2008-04-08' AND `hr` = 11 AND `value` IS NOT NULL) AS `t0`
 INNER JOIN ((SELECT `key`, `value`

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out 
b/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
index 58c9a73..7075137 100644
--- a/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
+++ b/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
@@ -37,7 +37,7 @@ POSTHOOK: Input: default@srcpart
 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 POSTHOOK: Output: default@dest_j1_n21
 OPTIMIZED SQL: SELECT `t4`.`key`, `t0`.`value`, `t2`.`value` AS `value1`
-FROM (SELECT `value`, CAST('2008-04-08' AS STRING) AS `ds`, `hr`
+FROM (SELECT `value`
 FROM `default`.`srcpart`
 WHERE `ds` = '2008-04-08' AND `hr` = 11 AND `value` IS NOT NULL) AS `t0`
 INNER JOIN ((SELECT `key`, `value`
@@ -1104,7 +1104,7 @@ POSTHOOK: Input: default@srcpart
 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 POSTHOOK: Output: default@dest_j2_n1
 OPTIMIZED SQL: SELECT `t4`.`key`, `t0`.`value`, `t4`.`value` AS `value1`
-FROM (SELECT `value`, CAST('2008-04-08' AS STRING) AS `ds`, `hr`
+FROM (SELECT `value`
 FROM `default`.`srcpart`
 WHERE `ds` = '2008-04-08' AND `hr` = 11 AND `value` IS NOT NULL) AS `t0`
 INNER JOIN ((SELECT `key`
@@ -1598,7 +1598,7 @@ POSTHOOK: Input: default@srcpart
 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 POSTHOOK: Output: default@dest_j2_n1
 OPTIMIZED SQL: SELECT `t2`.`key`, `t0`.`value`, `t2`.`value` AS `value1`
-FROM (SELECT `value`, CAST('2008-04-08' AS STRING) AS `ds`, `hr`
+FROM (SELECT `value`
 FROM `default`.`srcpart`
 WHERE `ds` = '2008-04-08' AND `hr` = 11 AND `value` IS NOT NULL) AS `t0`
 INNER JOIN ((SELECT `key`, `value`

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/join33.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join33.q.out 
b/ql/src/test/results/clientpositive/spark/join33.q.out
index 6471f5b..59c2f3a 100644
--- a/ql/src/test/results/clientpositive/spark/join33.q.out
+++ b/ql/src/test/results/clientpositive/spark/join33.q.out
@@ -29,7 +29,7 @@ POSTHOOK: Input: default@srcpart
 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 POSTHOOK: Output: default@dest_j1_n7
 OPTIMIZED SQL: SELECT `t4`.`key`, `t0`.`value`, `t2`.`value` AS `value1`
-FROM (SELECT `value`, CAST('2008-04-08' AS STRING) AS `ds`, `hr`
+FROM (SELECT `value`
 FROM `default`.`srcpart`
 WHERE `ds` = '2008-04-08' AND `hr` = 11 AND `value` IS NOT NULL) AS `t0`
 INNER JOIN ((SELECT `key`, `value`

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/join9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join9.q.out 
b/ql/src/test/results/clientpositive/spark/join9.q.out
index 7df9cb8..9d3e0cb 100644
--- a/ql/src/test/results/clientpositive/spark/join9.q.out
+++ b/ql/src/test/results/clientpositive/spark/join9.q.out
@@ -23,7 +23,7 @@ POSTHOOK: Input: default@srcpart
 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
 POSTHOOK: Output: default@dest1_n39
 OPTIMIZED SQL: SELECT `t0`.`key`, `t2`.`value`
-FROM (SELECT `key`, CAST('2008-04-08' AS STRING) AS `ds`, CAST('12' AS STRING) 
AS `hr`
+FROM (SELECT `key`
 FROM `default`.`srcpart`
 WHERE `ds` = '2008-04-08' AND `hr` = '12' AND `key` IS NOT NULL) AS `t0`
 INNER JOIN (SELECT `key`, `value`
@@ -206,10 +206,10 @@ STAGE PLANS:
                 keys:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
-                outputColumnNames: _col0, _col4
+                outputColumnNames: _col0, _col2
                 Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col4 (type: 
string)
+                  expressions: UDFToInteger(_col0) (type: int), _col2 (type: 
string)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/55887646/ql/src/test/results/clientpositive/spark/join_cond_pushdown_1.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/join_cond_pushdown_1.q.out 
b/ql/src/test/results/clientpositive/spark/join_cond_pushdown_1.q.out
index ca02e89..626a415 100644
--- a/ql/src/test/results/clientpositive/spark/join_cond_pushdown_1.q.out
+++ b/ql/src/test/results/clientpositive/spark/join_cond_pushdown_1.q.out
@@ -388,14 +388,14 @@ STAGE PLANS:
                     Statistics: Num rows: 13 Data size: 1573 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
                       expressions: p_name (type: string), p_mfgr (type: 
string), p_brand (type: string), p_type (type: string), p_size (type: int), 
p_container (type: string), p_retailprice (type: double), p_comment (type: 
string)
-                      outputColumnNames: _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, 
_col5, _col6, _col7
                       Statistics: Num rows: 13 Data size: 1573 Basic stats: 
COMPLETE Column stats: NONE
                       Reduce Output Operator
-                        key expressions: _col1 (type: string)
+                        key expressions: _col0 (type: string)
                         sort order: +
-                        Map-reduce partition columns: _col1 (type: string)
+                        Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 13 Data size: 1573 Basic stats: 
COMPLETE Column stats: NONE
-                        value expressions: _col2 (type: string), _col3 (type: 
string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 
(type: double), _col8 (type: string)
+                        value expressions: _col1 (type: string), _col2 (type: 
string), _col3 (type: string), _col4 (type: int), _col5 (type: string), _col6 
(type: double), _col7 (type: string)
             Execution mode: vectorized
         Map 5 
             Map Operator Tree:
@@ -418,13 +418,13 @@ STAGE PLANS:
                      Inner Join 0 to 1
                 keys:
                   0 _col1 (type: string)
-                  1 _col1 (type: string)
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col10, _col11, _col12, _col13, _col14, _col15, _col16, 
_col17
+                  1 _col0 (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, 
_col16
                 Statistics: Num rows: 28 Data size: 3461 Basic stats: COMPLETE 
Column stats: NONE
                 Reduce Output Operator
                   sort order: 
                   Statistics: Num rows: 28 Data size: 3461 Basic stats: 
COMPLETE Column stats: NONE
-                  value expressions: _col0 (type: int), _col1 (type: string), 
_col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: 
int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col10 
(type: string), _col11 (type: string), _col12 (type: string), _col13 (type: 
string), _col14 (type: int), _col15 (type: string), _col16 (type: double), 
_col17 (type: string)
+                  value expressions: _col0 (type: int), _col1 (type: string), 
_col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: 
int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col9 
(type: string), _col10 (type: string), _col11 (type: string), _col12 (type: 
string), _col13 (type: int), _col14 (type: string), _col15 (type: double), 
_col16 (type: string)
         Reducer 3 
             Reduce Operator Tree:
               Join Operator
@@ -433,10 +433,10 @@ STAGE PLANS:
                 keys:
                   0 
                   1 
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col10, _col11, _col12, _col13, _col14, _col15, _col16, 
_col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, 
_col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25
                 Statistics: Num rows: 728 Data size: 178830 Basic stats: 
COMPLETE Column stats: NONE
                 Select Operator
-                  expressions: _col18 (type: int), _col19 (type: string), 
_col20 (type: string), _col21 (type: string), _col22 (type: string), _col23 
(type: int), _col24 (type: string), _col25 (type: double), _col26 (type: 
string), 1 (type: int), _col10 (type: string), _col11 (type: string), _col12 
(type: string), _col13 (type: string), _col14 (type: int), _col15 (type: 
string), _col16 (type: double), _col17 (type: string), _col0 (type: int), _col1 
(type: string), _col2 (type: string), _col3 (type: string), _col4 (type: 
string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 
(type: string)
+                  expressions: _col17 (type: int), _col18 (type: string), 
_col19 (type: string), _col20 (type: string), _col21 (type: string), _col22 
(type: int), _col23 (type: string), _col24 (type: double), _col25 (type: 
string), 1 (type: int), _col9 (type: string), _col10 (type: string), _col11 
(type: string), _col12 (type: string), _col13 (type: int), _col14 (type: 
string), _col15 (type: double), _col16 (type: string), _col0 (type: int), _col1 
(type: string), _col2 (type: string), _col3 (type: string), _col4 (type: 
string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 
(type: string)
                   outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, 
_col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, 
_col26
                   Statistics: Num rows: 728 Data size: 178830 Basic stats: 
COMPLETE Column stats: NONE
                   File Output Operator

Reply via email to