http://git-wip-us.apache.org/repos/asf/hive/blob/62a3778e/ql/src/test/queries/clientpositive/schema_evol_text_vec_mapwork_table.q
----------------------------------------------------------------------
diff --git 
a/ql/src/test/queries/clientpositive/schema_evol_text_vec_mapwork_table.q 
b/ql/src/test/queries/clientpositive/schema_evol_text_vec_mapwork_table.q
index 3dd2085..cb5c422 100644
--- a/ql/src/test/queries/clientpositive/schema_evol_text_vec_mapwork_table.q
+++ b/ql/src/test/queries/clientpositive/schema_evol_text_vec_mapwork_table.q
@@ -9,6 +9,7 @@ SET hive.vectorized.execution.enabled=true;
 set hive.exec.dynamic.partition.mode=nonstrict;
 set hive.metastore.disallow.incompatible.col.type.changes=true;
 set hive.default.fileformat=textfile;
+set hive.llap.io.enabled=false;
 
 -- SORT_QUERY_RESULTS
 --
@@ -180,7 +181,8 @@ insert into table table_change_date_group_string_group_date
            (3, '2021-09-24', '2021-09-24', '2021-09-24', '2021-09-24', 
'2021-09-24', 'original');
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table table_change_date_group_string_group_date replace 
columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 
VARCHAR(15), b STRING);DESCRIBE FORMATTED 
table_change_date_group_string_group_date;
+alter table table_change_date_group_string_group_date replace 
columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 
VARCHAR(15), b STRING);
+DESCRIBE FORMATTED table_change_date_group_string_group_date;
 
 insert into table table_change_date_group_string_group_date
     values (4, '0004-09-22', '0004-09-22', '0004-09-22', '0004-09-22', 
'0004-09-22', 'new'),
@@ -368,7 +370,7 @@ drop table 
table_change_numeric_group_string_group_multi_ints_varchar_trunc;
 --
 -- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(FLOAT, DOUBLE, DECIMAL), STRING
 --
-CREATE TABLE 
table_change_numeric_group_string_group_floating_string(insert_num int, c1 
decimal(38,18), c2 float, c3 double, b STRING);
+CREATE TABLE 
table_change_numeric_group_string_group_floating_string(insert_num int, c1 
float, c2 double, c3 decimal(38,18), b STRING);
 
 insert into table table_change_numeric_group_string_group_floating_string
     values (1, -23866739993, 753.7028, -3651.672121, 'original'),
@@ -398,9 +400,9 @@ drop table 
table_change_numeric_group_string_group_floating_string;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(DECIMAL, FLOAT, DOUBLE), CHAR
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(FLOAT, DOUBLE, DECIMAL), CHAR
 --
-CREATE TABLE table_change_numeric_group_string_group_floating_char(insert_num 
int, c1 decimal(38,18), c2 float, c3 double, b STRING);
+CREATE TABLE table_change_numeric_group_string_group_floating_char(insert_num 
int, c1 float, c2 double, c3 decimal(38,18), b STRING);
 
 insert into table table_change_numeric_group_string_group_floating_char
     values (1, -23866739993, 753.7028, -3651.672121, 'original'),
@@ -430,9 +432,9 @@ drop table 
table_change_numeric_group_string_group_floating_char;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(DECIMAL, FLOAT, DOUBLE), CHAR truncation
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(FLOAT, DOUBLE, DECIMAL), CHAR truncation
 --
-CREATE TABLE 
table_change_numeric_group_string_group_floating_char_trunc(insert_num int, c1 
decimal(38,18), c2 float, c3 double, b STRING);
+CREATE TABLE 
table_change_numeric_group_string_group_floating_char_trunc(insert_num int, c1 
float, c2 double, c3 decimal(38,18), b STRING);
 
 insert into table table_change_numeric_group_string_group_floating_char_trunc
     values (1, -23866739993, 753.7028, -3651.672121, 'original'),
@@ -462,7 +464,7 @@ drop table 
table_change_numeric_group_string_group_floating_char_trunc;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(DECIMAL, FLOAT, DOUBLE), VARCHAR
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(FLOAT, DOUBLE, DECIMAL), VARCHAR
 --
 CREATE TABLE 
table_change_numeric_group_string_group_floating_varchar(insert_num int, c1 
float, c2 double, c3 decimal(38,18), b STRING);
 
@@ -494,9 +496,9 @@ drop table 
table_change_numeric_group_string_group_floating_varchar;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(DECIMAL, FLOAT, DOUBLE), VARCHAR truncation
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(FLOAT, DOUBLE, DECIMAL), VARCHAR truncation
 --
-CREATE TABLE 
table_change_numeric_group_string_group_floating_varchar_trunc(insert_num int, 
c1 decimal(38,18), c2 float, c3 double, b STRING);
+CREATE TABLE 
table_change_numeric_group_string_group_floating_varchar_trunc(insert_num int, 
c1 float, c2 double, c3 decimal(38,18), b STRING);
 
 insert into table 
table_change_numeric_group_string_group_floating_varchar_trunc
     values (1, -23866739993, 753.7028, -3651.672121, 'original'),
@@ -633,7 +635,7 @@ drop table table_change_string_group_string_group_varchar;
 --
 --
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE)
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: TINYINT, (SMALLINT, INT, BIGINT, FLOAT, DOUBLE, DECIMAL)
 --
 CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint(insert_num 
int, c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, b 
STRING);
 
@@ -646,7 +648,7 @@ insert into table 
table_change_lower_to_higher_numeric_group_tinyint
 select insert_num,c1,c2,c3,c4,c5,c6,b from 
table_change_lower_to_higher_numeric_group_tinyint order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table table_change_lower_to_higher_numeric_group_tinyint replace columns 
(insert_num int, c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, 
c6 DOUBLE, b STRING) ;
+alter table table_change_lower_to_higher_numeric_group_tinyint replace columns 
(insert_num int, c1 SMALLINT, c2 INT, c3 BIGINT, c4 FLOAT, c5 DOUBLE, c6 
decimal(38,18), b STRING) ;
 
 insert into table table_change_lower_to_higher_numeric_group_tinyint
     values (5, '774', '2031', '200', '12', '99', '0', 'new'),
@@ -665,7 +667,7 @@ drop table 
table_change_lower_to_higher_numeric_group_tinyint;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE)
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: SMALLINT, (INT, BIGINT, FLOAT, DOUBLE, DECIMAL)
 --
 CREATE TABLE table_change_lower_to_higher_numeric_group_smallint(insert_num 
int, c1 smallint, c2 smallint, c3 smallint, c4 smallint, c5 smallint, b STRING);
 
@@ -678,7 +680,7 @@ insert into table 
table_change_lower_to_higher_numeric_group_smallint
 select insert_num,c1,c2,c3,c4,c5,b from 
table_change_lower_to_higher_numeric_group_smallint order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table table_change_lower_to_higher_numeric_group_smallint replace 
columns (insert_num int, c1 INT, c2 BIGINT, c3 decimal(38,18), c4 FLOAT, c5 
DOUBLE, b STRING) ;
+alter table table_change_lower_to_higher_numeric_group_smallint replace 
columns (insert_num int, c1 INT, c2 BIGINT, c3 FLOAT, c4 DOUBLE, c5 
decimal(38,18), b STRING) ;
 
 insert into table table_change_lower_to_higher_numeric_group_smallint
     values (5, '774', '2031', '200', '12', '99', 'new'),
@@ -698,7 +700,7 @@ drop table 
table_change_lower_to_higher_numeric_group_smallint;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: INT, (BIGINT, DECIMAL, FLOAT, DOUBLE)
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: INT, (BIGINT, FLOAT, DOUBLE, DECIMAL)
 --
 CREATE TABLE table_change_lower_to_higher_numeric_group_int(insert_num int, c1 
int, c2 int, c3 int, c4 int, b STRING);
 
@@ -711,7 +713,7 @@ insert into table 
table_change_lower_to_higher_numeric_group_int
 select insert_num,c1,c2,c3,c4,b from 
table_change_lower_to_higher_numeric_group_int order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table table_change_lower_to_higher_numeric_group_int replace columns 
(insert_num int, c1 BIGINT, c2 decimal(38,18), c3 FLOAT, c4 DOUBLE,  b STRING) ;
+alter table table_change_lower_to_higher_numeric_group_int replace columns 
(insert_num int, c1 BIGINT, c2 FLOAT, c3 DOUBLE, c4 decimal(38,18), b STRING) ;
 
 insert into table table_change_lower_to_higher_numeric_group_int
     values (5, '774', '2031', '200', '12', 'new'),
@@ -730,7 +732,7 @@ drop table table_change_lower_to_higher_numeric_group_int;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: BIGINT, (DECIMAL, FLOAT, DOUBLE)
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: BIGINT, (FLOAT, DOUBLE, DECIMAL)
 --
 CREATE TABLE table_change_lower_to_higher_numeric_group_bigint(insert_num int, 
c1 bigint, c2 bigint, c3 bigint, b STRING);
 
@@ -743,7 +745,7 @@ insert into table 
table_change_lower_to_higher_numeric_group_bigint
 select insert_num,c1,c2,c3,b from 
table_change_lower_to_higher_numeric_group_bigint order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table table_change_lower_to_higher_numeric_group_bigint replace columns 
(insert_num int, c1 decimal(38,18), c2 FLOAT, c3 DOUBLE, b STRING) ;
+alter table table_change_lower_to_higher_numeric_group_bigint replace columns 
(insert_num int, c1 FLOAT, c2 DOUBLE, c3 decimal(38,18), b STRING) ;
 
 insert into table table_change_lower_to_higher_numeric_group_bigint
     values (5, '774', '2031', '200', 'new'),
@@ -762,22 +764,22 @@ drop table 
table_change_lower_to_higher_numeric_group_bigint;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: FLOAT, (DOUBLE, DECIMAL)
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: FLOAT, (FLOAT, DECIMAL)
 --
-CREATE TABLE table_change_lower_to_higher_numeric_group_decimal(insert_num 
int, c1 float, c2 float, b STRING);
+CREATE TABLE table_change_lower_to_higher_numeric_group_float(insert_num int, 
c1 float, c2 float, b STRING);
 
-insert into table table_change_lower_to_higher_numeric_group_decimal
+insert into table table_change_lower_to_higher_numeric_group_float
     values (1, -29.0764, -29.0764, 'original'),
            (2, 753.7028, 753.7028, 'original'),
            (3, -5000, -5000, 'original'),
            (4, 52927714, 52927714, 'original');
 
-select insert_num,c1,c2,b from 
table_change_lower_to_higher_numeric_group_decimal order by insert_num;
+select insert_num,c1,c2,b from 
table_change_lower_to_higher_numeric_group_float order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table table_change_lower_to_higher_numeric_group_decimal replace columns 
(insert_num int, c1 double, c2 decimal(38,18), b STRING) ;
+alter table table_change_lower_to_higher_numeric_group_float replace columns 
(insert_num int, c1 double, c2 decimal(38,18), b STRING) ;
 
-insert into table table_change_lower_to_higher_numeric_group_decimal
+insert into table table_change_lower_to_higher_numeric_group_float
     values (5, '7.74', '22.3', 'new'),
            (6, '56.1431', '90.9', 'new'),
            (7, '2.56', '25.6', 'new'),
@@ -786,31 +788,31 @@ insert into table 
table_change_lower_to_higher_numeric_group_decimal
            (10, '1.7', '17.8888', 'new');
 
 explain
-select insert_num,c1,c2,b from 
table_change_lower_to_higher_numeric_group_decimal order by insert_num;
+select insert_num,c1,c2,b from 
table_change_lower_to_higher_numeric_group_float order by insert_num;
 
-select insert_num,c1,c2,b from 
table_change_lower_to_higher_numeric_group_decimal order by insert_num;
+select insert_num,c1,c2,b from 
table_change_lower_to_higher_numeric_group_float order by insert_num;
 
-drop table table_change_lower_to_higher_numeric_group_decimal;
+drop table table_change_lower_to_higher_numeric_group_float;
 
 
 
 --
 -- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: DOUBLE, (DECIMAL)
 --
-CREATE TABLE table_change_lower_to_higher_numeric_group_float(insert_num int, 
c1 double, b STRING);
+CREATE TABLE table_change_lower_to_higher_numeric_group_double(insert_num int, 
c1 double, b STRING);
 
-insert into table table_change_lower_to_higher_numeric_group_float
+insert into table table_change_lower_to_higher_numeric_group_double
     values (1, -29.0764, 'original'),
            (2, 753.7028, 'original'),
            (3, -5000, 'original'),
            (4, 52927714, 'original');
 
-select insert_num,c1,b from table_change_lower_to_higher_numeric_group_float 
order by insert_num;
+select insert_num,c1,b from table_change_lower_to_higher_numeric_group_double 
order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table table_change_lower_to_higher_numeric_group_float replace columns 
(insert_num int, c1 decimal(38,18), b STRING) ;
+alter table table_change_lower_to_higher_numeric_group_double replace columns 
(insert_num int, c1 decimal(38,18), b STRING) ;
 
-insert into table table_change_lower_to_higher_numeric_group_float
+insert into table table_change_lower_to_higher_numeric_group_double
     values (5, '774', 'new'),
            (6, '561431', 'new'),
            (7, '256', 'new'),
@@ -819,8 +821,8 @@ insert into table 
table_change_lower_to_higher_numeric_group_float
            (10, '17', 'new');
 
 explain
-select insert_num,c1,b from table_change_lower_to_higher_numeric_group_float 
order by insert_num;
+select insert_num,c1,b from table_change_lower_to_higher_numeric_group_double 
order by insert_num;
 
-select insert_num,c1,b from table_change_lower_to_higher_numeric_group_float 
order by insert_num;
+select insert_num,c1,b from table_change_lower_to_higher_numeric_group_double 
order by insert_num;
 
-drop table table_change_lower_to_higher_numeric_group_float;
\ No newline at end of file
+drop table table_change_lower_to_higher_numeric_group_double;

http://git-wip-us.apache.org/repos/asf/hive/blob/62a3778e/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part.q
----------------------------------------------------------------------
diff --git 
a/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part.q 
b/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part.q
index 0ba38c9..4ee3857 100644
--- a/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part.q
+++ b/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part.q
@@ -10,6 +10,7 @@ SET hive.vectorized.execution.enabled=true;
 set hive.exec.dynamic.partition.mode=nonstrict;
 set hive.metastore.disallow.incompatible.col.type.changes=true;
 set hive.default.fileformat=textfile;
+set hive.llap.io.enabled=false;
 
 -- SORT_QUERY_RESULTS
 --
@@ -367,9 +368,9 @@ drop table 
part_change_numeric_group_string_group_multi_ints_varchar_trunc;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(DECIMAL, FLOAT, DOUBLE), STRING
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(FLOAT, DOUBLE, DECIMAL), STRING
 --
-CREATE TABLE part_change_numeric_group_string_group_floating_string(insert_num 
int, c1 decimal(38,18), c2 float, c3 double, b STRING) PARTITIONED BY(part INT);
+CREATE TABLE part_change_numeric_group_string_group_floating_string(insert_num 
int, c1 float, c2 double, c3 decimal(38,18), b STRING) PARTITIONED BY(part INT);
 
 insert into table part_change_numeric_group_string_group_floating_string 
partition(part=1)
     values (1, -23866739993, 753.7028, -3651.672121, 'original'),
@@ -399,9 +400,9 @@ drop table 
part_change_numeric_group_string_group_floating_string;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(DECIMAL, FLOAT, DOUBLE), CHAR
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(FLOAT, DOUBLE, DECIMAL), CHAR
 --
-CREATE TABLE part_change_numeric_group_string_group_floating_char(insert_num 
int, c1 decimal(38,18), c2 float, c3 double, b STRING) PARTITIONED BY(part INT);
+CREATE TABLE part_change_numeric_group_string_group_floating_char(insert_num 
int, c1 float, c2 double, c3 decimal(38,18), b STRING) PARTITIONED BY(part INT);
 
 insert into table part_change_numeric_group_string_group_floating_char 
partition(part=1)
     values (1, -23866739993, 753.7028, -3651.672121, 'original'),
@@ -431,9 +432,9 @@ drop table 
part_change_numeric_group_string_group_floating_char;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(DECIMAL, FLOAT, DOUBLE), CHAR truncation
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(FLOAT, DOUBLE, DECIMAL), CHAR truncation
 --
-CREATE TABLE 
part_change_numeric_group_string_group_floating_char_trunc(insert_num int, c1 
decimal(38,18), c2 float, c3 double, b STRING) PARTITIONED BY(part INT);
+CREATE TABLE 
part_change_numeric_group_string_group_floating_char_trunc(insert_num int, c1 
float, c2 double, c3 decimal(38,18), b STRING) PARTITIONED BY(part INT);
 
 insert into table part_change_numeric_group_string_group_floating_char_trunc 
partition(part=1)
     values (1, -23866739993, 753.7028, -3651.672121, 'original'),
@@ -463,7 +464,7 @@ drop table 
part_change_numeric_group_string_group_floating_char_trunc;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(DECIMAL, FLOAT, DOUBLE), VARCHAR
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(FLOAT, DOUBLE, DECIMAL), VARCHAR
 --
 CREATE TABLE 
part_change_numeric_group_string_group_floating_varchar(insert_num int, c1 
float, c2 double, c3 decimal(38,18), b STRING) PARTITIONED BY(part INT);
 
@@ -495,9 +496,9 @@ drop table 
part_change_numeric_group_string_group_floating_varchar;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(DECIMAL, FLOAT, DOUBLE), VARCHAR truncation
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(FLOAT, DOUBLE, DECIMAL), VARCHAR truncation
 --
-CREATE TABLE 
part_change_numeric_group_string_group_floating_varchar_trunc(insert_num int, 
c1 decimal(38,18), c2 float, c3 double, b STRING) PARTITIONED BY(part INT);
+CREATE TABLE 
part_change_numeric_group_string_group_floating_varchar_trunc(insert_num int, 
c1 float, c2 double, c3 decimal(38,18), b STRING) PARTITIONED BY(part INT);
 
 insert into table 
part_change_numeric_group_string_group_floating_varchar_trunc partition(part=1)
     values (1, -23866739993, 753.7028, -3651.672121, 'original'),
@@ -634,7 +635,7 @@ drop table part_change_string_group_string_group_varchar;
 --
 --
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE)
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: TINYINT, (SMALLINT, INT, BIGINT, FLOAT, DOUBLE, DECIMAL)
 --
 CREATE TABLE part_change_lower_to_higher_numeric_group_tinyint(insert_num int, 
c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, b 
STRING) PARTITIONED BY(part INT);
 
@@ -647,7 +648,7 @@ insert into table 
part_change_lower_to_higher_numeric_group_tinyint partition(pa
 select insert_num,part,c1,c2,c3,c4,c5,c6,b from 
part_change_lower_to_higher_numeric_group_tinyint order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table part_change_lower_to_higher_numeric_group_tinyint replace columns 
(insert_num int, c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, 
c6 DOUBLE, b STRING) ;
+alter table part_change_lower_to_higher_numeric_group_tinyint replace columns 
(insert_num int, c1 SMALLINT, c2 INT, c3 BIGINT, c4 FLOAT, c5 DOUBLE, c6 
decimal(38,18), b STRING) ;
 
 insert into table part_change_lower_to_higher_numeric_group_tinyint 
partition(part)
     values (5, '774', '2031', '200', '12', '99', '0', 'new', 2),
@@ -666,7 +667,7 @@ drop table 
part_change_lower_to_higher_numeric_group_tinyint;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE)
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: SMALLINT, (INT, BIGINT, FLOAT, DOUBLE, DECIMAL)
 --
 CREATE TABLE part_change_lower_to_higher_numeric_group_smallint(insert_num 
int, c1 smallint, c2 smallint, c3 smallint, c4 smallint, c5 smallint, b STRING) 
PARTITIONED BY(part INT);
 
@@ -679,7 +680,7 @@ insert into table 
part_change_lower_to_higher_numeric_group_smallint partition(p
 select insert_num,part,c1,c2,c3,c4,c5,b from 
part_change_lower_to_higher_numeric_group_smallint order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table part_change_lower_to_higher_numeric_group_smallint replace columns 
(insert_num int, c1 INT, c2 BIGINT, c3 decimal(38,18), c4 FLOAT, c5 DOUBLE, b 
STRING) ;
+alter table part_change_lower_to_higher_numeric_group_smallint replace columns 
(insert_num int, c1 INT, c2 BIGINT, c3 FLOAT, c4 DOUBLE, c5 decimal(38,18), b 
STRING) ;
 
 insert into table part_change_lower_to_higher_numeric_group_smallint 
partition(part)
     values (5, '774', '2031', '200', '12', '99', 'new', 2),
@@ -699,7 +700,7 @@ drop table 
part_change_lower_to_higher_numeric_group_smallint;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: INT, (BIGINT, DECIMAL, FLOAT, DOUBLE)
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: INT, (BIGINT, FLOAT, DOUBLE, DECIMAL)
 --
 CREATE TABLE part_change_lower_to_higher_numeric_group_int(insert_num int, c1 
int, c2 int, c3 int, c4 int, b STRING) PARTITIONED BY(part INT);
 
@@ -712,7 +713,7 @@ insert into table 
part_change_lower_to_higher_numeric_group_int partition(part=1
 select insert_num,part,c1,c2,c3,c4,b from 
part_change_lower_to_higher_numeric_group_int order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table part_change_lower_to_higher_numeric_group_int replace columns 
(insert_num int, c1 BIGINT, c2 decimal(38,18), c3 FLOAT, c4 DOUBLE,  b STRING) ;
+alter table part_change_lower_to_higher_numeric_group_int replace columns 
(insert_num int, c1 BIGINT, c2 FLOAT, c3 DOUBLE, c4 decimal(38,18), b STRING) ;
 
 insert into table part_change_lower_to_higher_numeric_group_int partition(part)
     values (5, '774', '2031', '200', '12', 'new', 2),
@@ -731,7 +732,7 @@ drop table part_change_lower_to_higher_numeric_group_int;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: BIGINT, (DECIMAL, FLOAT, DOUBLE)
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: BIGINT, (FLOAT, DOUBLE, DECIMAL)
 --
 CREATE TABLE part_change_lower_to_higher_numeric_group_bigint(insert_num int, 
c1 bigint, c2 bigint, c3 bigint, b STRING) PARTITIONED BY(part INT);
 
@@ -744,7 +745,7 @@ insert into table 
part_change_lower_to_higher_numeric_group_bigint partition(par
 select insert_num,part,c1,c2,c3,b from 
part_change_lower_to_higher_numeric_group_bigint order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table part_change_lower_to_higher_numeric_group_bigint replace columns 
(insert_num int, c1 decimal(38,18), c2 FLOAT, c3 DOUBLE, b STRING) ;
+alter table part_change_lower_to_higher_numeric_group_bigint replace columns 
(insert_num int, c1 FLOAT, c2 DOUBLE, c3 decimal(38,18), b STRING) ;
 
 insert into table part_change_lower_to_higher_numeric_group_bigint 
partition(part)
     values (5, '774', '2031', '200', 'new', 2),
@@ -765,58 +766,26 @@ drop table 
part_change_lower_to_higher_numeric_group_bigint;
 --
 -- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: FLOAT, (DOUBLE, DECIMAL)
 --
-CREATE TABLE part_change_lower_to_higher_numeric_group_decimal(insert_num int, 
c1 float, c2 float, b STRING) PARTITIONED BY(part INT);
+CREATE TABLE part_change_lower_to_higher_numeric_group_float(insert_num int, 
c1 float, c2 float, b STRING) PARTITIONED BY(part INT);
 
-insert into table part_change_lower_to_higher_numeric_group_decimal 
partition(part=1)
-    values (1, -29.0764, -29.0764, 'original'),
+insert into table part_change_lower_to_higher_numeric_group_float 
partition(part=1)
+    values (1, -29.0764,  -29.0764, 'original'),
            (2, 753.7028, 753.7028, 'original'),
            (3, -5000, -5000, 'original'),
            (4, 52927714, 52927714, 'original');
 
-select insert_num,part,c1,b from 
part_change_lower_to_higher_numeric_group_decimal order by insert_num;
-
--- Table-Non-Cascade CHANGE COLUMNS ...
-alter table part_change_lower_to_higher_numeric_group_decimal replace columns 
(insert_num int, c1 double, c2 decimal(38,18), b STRING) ;
-
-insert into table part_change_lower_to_higher_numeric_group_decimal 
partition(part)
-    values (5, '7.74', '22.3', 'new', 2),
-           (6, '56.1431', '90.9', 'new', 2),
-           (7, '2.56', '25.6', 'new', 2),
-           (8, '555.5', '55.55', 'new', 2),
-           (9, '10.0', '0.100', 'new', 1),
-           (10, '1.7', '17.8888', 'new', 1);
-
-explain
-select insert_num,part,c1,b from 
part_change_lower_to_higher_numeric_group_decimal order by insert_num;
-
-select insert_num,part,c1,b from 
part_change_lower_to_higher_numeric_group_decimal order by insert_num;
-
-drop table part_change_lower_to_higher_numeric_group_decimal;
-
-
---
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: DOUBLE, (DECIMAL)
---
-CREATE TABLE part_change_lower_to_higher_numeric_group_float(insert_num int, 
c1 double, b STRING) PARTITIONED BY(part INT);
-
-insert into table part_change_lower_to_higher_numeric_group_float 
partition(part=1)
-    values (1, -29.0764, 'original'),
-           (2, 753.7028, 'original'),
-           (3, -5000, 'original'),
-           (4, 52927714, 'original');
-
 select insert_num,part,c1,b from 
part_change_lower_to_higher_numeric_group_float order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table part_change_lower_to_higher_numeric_group_float replace columns 
(insert_num int, c1 decimal(38,18), b STRING) ;
+alter table part_change_lower_to_higher_numeric_group_float replace columns 
(insert_num int, c1 DOUBLE, c2 decimal(38,18), b STRING) ;
 
 insert into table part_change_lower_to_higher_numeric_group_float 
partition(part)
-    values (5, '774', 'new', 2),
-           (6, '561431', 'new', 2),
-           (7, '256', 'new', 2),
-           (8, '5555', 'new', 2),
-           (9, '100', 'new', 1),
-           (10, '17',  'new', 1);
+    values (5, '774', '774', 'new', 2),
+           (6, '561431', '561431', 'new', 2),
+           (7, '256', '256', 'new', 2),
+           (8, '5555', '5555', 'new', 2),
+           (9, '100', '100', 'new', 1),
+           (10, '17', '17', 'new', 1);
 
 explain
 select insert_num,part,c1,b from 
part_change_lower_to_higher_numeric_group_float order by insert_num;

http://git-wip-us.apache.org/repos/asf/hive/blob/62a3778e/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_complex.q
----------------------------------------------------------------------
diff --git 
a/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_complex.q
 
b/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_complex.q
index 7737abf..ff06067 100644
--- 
a/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_complex.q
+++ 
b/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_complex.q
@@ -10,6 +10,7 @@ SET hive.vectorized.execution.enabled=true;
 set hive.exec.dynamic.partition.mode=nonstrict;
 set hive.metastore.disallow.incompatible.col.type.changes=false;
 set hive.default.fileformat=textfile;
+set hive.llap.io.enabled=false;
 
 -- SORT_QUERY_RESULTS
 --

http://git-wip-us.apache.org/repos/asf/hive/blob/62a3778e/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_primitive.q
----------------------------------------------------------------------
diff --git 
a/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_primitive.q
 
b/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_primitive.q
index 62e1405..b3938cd 100644
--- 
a/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_primitive.q
+++ 
b/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_primitive.q
@@ -10,6 +10,7 @@ SET hive.vectorized.execution.enabled=true;
 set hive.exec.dynamic.partition.mode=nonstrict;
 set hive.metastore.disallow.incompatible.col.type.changes=false;
 set hive.default.fileformat=textfile;
+set hive.llap.io.enabled=false;
 
 -- SORT_QUERY_RESULTS
 --

http://git-wip-us.apache.org/repos/asf/hive/blob/62a3778e/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_table.q
----------------------------------------------------------------------
diff --git 
a/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_table.q 
b/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_table.q
index 7ab00a7..1a074c8 100644
--- a/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_table.q
+++ b/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_table.q
@@ -9,6 +9,7 @@ SET hive.vectorized.execution.enabled=true;
 set hive.exec.dynamic.partition.mode=nonstrict;
 set hive.metastore.disallow.incompatible.col.type.changes=true;
 set hive.default.fileformat=textfile;
+set hive.llap.io.enabled=false;
 
 -- SORT_QUERY_RESULTS
 --
@@ -180,7 +181,8 @@ insert into table table_change_date_group_string_group_date
            (3, '2021-09-24', '2021-09-24', '2021-09-24', '2021-09-24', 
'2021-09-24', 'original');
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table table_change_date_group_string_group_date replace 
columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 
VARCHAR(15), b STRING);DESCRIBE FORMATTED 
table_change_date_group_string_group_date;
+alter table table_change_date_group_string_group_date replace 
columns(insert_num int, c1 STRING, c2 CHAR(50), c3 CHAR(15), c4 VARCHAR(50), c5 
VARCHAR(15), b STRING);
+DESCRIBE FORMATTED table_change_date_group_string_group_date;
 
 insert into table table_change_date_group_string_group_date
     values (4, '0004-09-22', '0004-09-22', '0004-09-22', '0004-09-22', 
'0004-09-22', 'new'),
@@ -368,7 +370,7 @@ drop table 
table_change_numeric_group_string_group_multi_ints_varchar_trunc;
 --
 -- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(FLOAT, DOUBLE, DECIMAL), STRING
 --
-CREATE TABLE 
table_change_numeric_group_string_group_floating_string(insert_num int, c1 
decimal(38,18), c2 float, c3 double, b STRING);
+CREATE TABLE 
table_change_numeric_group_string_group_floating_string(insert_num int, c1 
float, c2 double, c3 decimal(38,18), b STRING);
 
 insert into table table_change_numeric_group_string_group_floating_string
     values (1, -23866739993, 753.7028, -3651.672121, 'original'),
@@ -398,9 +400,9 @@ drop table 
table_change_numeric_group_string_group_floating_string;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(DECIMAL, FLOAT, DOUBLE), CHAR
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(FLOAT, DOUBLE, DECIMAL), CHAR
 --
-CREATE TABLE table_change_numeric_group_string_group_floating_char(insert_num 
int, c1 decimal(38,18), c2 float, c3 double, b STRING);
+CREATE TABLE table_change_numeric_group_string_group_floating_char(insert_num 
int, c1 float, c2 double, c3 decimal(38,18), b STRING);
 
 insert into table table_change_numeric_group_string_group_floating_char
     values (1, -23866739993, 753.7028, -3651.672121, 'original'),
@@ -430,9 +432,9 @@ drop table 
table_change_numeric_group_string_group_floating_char;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(DECIMAL, FLOAT, DOUBLE), CHAR truncation
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(FLOAT, DOUBLE, DECIMAL), CHAR truncation
 --
-CREATE TABLE 
table_change_numeric_group_string_group_floating_char_trunc(insert_num int, c1 
decimal(38,18), c2 float, c3 double, b STRING);
+CREATE TABLE 
table_change_numeric_group_string_group_floating_char_trunc(insert_num int, c1 
float, c2 double, c3 decimal(38,18), b STRING);
 
 insert into table table_change_numeric_group_string_group_floating_char_trunc
     values (1, -23866739993, 753.7028, -3651.672121, 'original'),
@@ -462,7 +464,7 @@ drop table 
table_change_numeric_group_string_group_floating_char_trunc;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(DECIMAL, FLOAT, DOUBLE), VARCHAR
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(FLOAT, DOUBLE, DECIMAL), VARCHAR
 --
 CREATE TABLE 
table_change_numeric_group_string_group_floating_varchar(insert_num int, c1 
float, c2 double, c3 decimal(38,18), b STRING);
 
@@ -494,9 +496,9 @@ drop table 
table_change_numeric_group_string_group_floating_varchar;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(DECIMAL, FLOAT, DOUBLE), VARCHAR truncation
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for NUMERIC_GROUP -> STRING_GROUP: 
(FLOAT, DOUBLE, DECIMAL), VARCHAR truncation
 --
-CREATE TABLE 
table_change_numeric_group_string_group_floating_varchar_trunc(insert_num int, 
c1 decimal(38,18), c2 float, c3 double, b STRING);
+CREATE TABLE 
table_change_numeric_group_string_group_floating_varchar_trunc(insert_num int, 
c1 float, c2 double, c3 decimal(38,18), b STRING);
 
 insert into table 
table_change_numeric_group_string_group_floating_varchar_trunc
     values (1, -23866739993, 753.7028, -3651.672121, 'original'),
@@ -633,7 +635,7 @@ drop table table_change_string_group_string_group_varchar;
 --
 --
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: TINYINT, (SMALLINT, INT, BIGINT, DECIMAL, FLOAT, DOUBLE)
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: TINYINT, (SMALLINT, INT, BIGINT, FLOAT, DOUBLE, DECIMAL)
 --
 CREATE TABLE table_change_lower_to_higher_numeric_group_tinyint(insert_num 
int, c1 tinyint, c2 tinyint, c3 tinyint, c4 tinyint, c5 tinyint, c6 tinyint, b 
STRING);
 
@@ -646,7 +648,7 @@ insert into table 
table_change_lower_to_higher_numeric_group_tinyint
 select insert_num,c1,c2,c3,c4,c5,c6,b from 
table_change_lower_to_higher_numeric_group_tinyint order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table table_change_lower_to_higher_numeric_group_tinyint replace columns 
(insert_num int, c1 SMALLINT, c2 INT, c3 BIGINT, c4 decimal(38,18), c5 FLOAT, 
c6 DOUBLE, b STRING) ;
+alter table table_change_lower_to_higher_numeric_group_tinyint replace columns 
(insert_num int, c1 SMALLINT, c2 INT, c3 BIGINT, c4 FLOAT, c5 DOUBLE, c6 
decimal(38,18), b STRING) ;
 
 insert into table table_change_lower_to_higher_numeric_group_tinyint
     values (5, '774', '2031', '200', '12', '99', '0', 'new'),
@@ -665,7 +667,7 @@ drop table 
table_change_lower_to_higher_numeric_group_tinyint;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: SMALLINT, (INT, BIGINT, DECIMAL, FLOAT, DOUBLE)
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: SMALLINT, (INT, BIGINT, FLOAT, DOUBLE, DECIMAL)
 --
 CREATE TABLE table_change_lower_to_higher_numeric_group_smallint(insert_num 
int, c1 smallint, c2 smallint, c3 smallint, c4 smallint, c5 smallint, b STRING);
 
@@ -678,7 +680,7 @@ insert into table 
table_change_lower_to_higher_numeric_group_smallint
 select insert_num,c1,c2,c3,c4,c5,b from 
table_change_lower_to_higher_numeric_group_smallint order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table table_change_lower_to_higher_numeric_group_smallint replace 
columns (insert_num int, c1 INT, c2 BIGINT, c3 decimal(38,18), c4 FLOAT, c5 
DOUBLE, b STRING) ;
+alter table table_change_lower_to_higher_numeric_group_smallint replace 
columns (insert_num int, c1 INT, c2 BIGINT, c3 FLOAT, c4 DOUBLE, c5 
decimal(38,18), b STRING) ;
 
 insert into table table_change_lower_to_higher_numeric_group_smallint
     values (5, '774', '2031', '200', '12', '99', 'new'),
@@ -698,7 +700,7 @@ drop table 
table_change_lower_to_higher_numeric_group_smallint;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: INT, (BIGINT, DECIMAL, FLOAT, DOUBLE)
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: INT, (BIGINT, FLOAT, DOUBLE, DECIMAL)
 --
 CREATE TABLE table_change_lower_to_higher_numeric_group_int(insert_num int, c1 
int, c2 int, c3 int, c4 int, b STRING);
 
@@ -711,7 +713,7 @@ insert into table 
table_change_lower_to_higher_numeric_group_int
 select insert_num,c1,c2,c3,c4,b from 
table_change_lower_to_higher_numeric_group_int order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table table_change_lower_to_higher_numeric_group_int replace columns 
(insert_num int, c1 BIGINT, c2 decimal(38,18), c3 FLOAT, c4 DOUBLE,  b STRING) ;
+alter table table_change_lower_to_higher_numeric_group_int replace columns 
(insert_num int, c1 BIGINT, c2 FLOAT, c3 DOUBLE, c4 decimal(38,18), b STRING) ;
 
 insert into table table_change_lower_to_higher_numeric_group_int
     values (5, '774', '2031', '200', '12', 'new'),
@@ -730,7 +732,7 @@ drop table table_change_lower_to_higher_numeric_group_int;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: BIGINT, (DECIMAL, FLOAT, DOUBLE)
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: BIGINT, (FLOAT, DOUBLE, DECIMAL)
 --
 CREATE TABLE table_change_lower_to_higher_numeric_group_bigint(insert_num int, 
c1 bigint, c2 bigint, c3 bigint, b STRING);
 
@@ -743,7 +745,7 @@ insert into table 
table_change_lower_to_higher_numeric_group_bigint
 select insert_num,c1,c2,c3,b from 
table_change_lower_to_higher_numeric_group_bigint order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table table_change_lower_to_higher_numeric_group_bigint replace columns 
(insert_num int, c1 decimal(38,18), c2 FLOAT, c3 DOUBLE, b STRING) ;
+alter table table_change_lower_to_higher_numeric_group_bigint replace columns 
(insert_num int, c1 FLOAT, c2 DOUBLE, c3 decimal(38,18), b STRING) ;
 
 insert into table table_change_lower_to_higher_numeric_group_bigint
     values (5, '774', '2031', '200', 'new'),
@@ -762,22 +764,22 @@ drop table 
table_change_lower_to_higher_numeric_group_bigint;
 
 
 --
--- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: FLOAT, (DOUBLE, DECIMAL)
+-- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: FLOAT, (FLOAT, DECIMAL)
 --
-CREATE TABLE table_change_lower_to_higher_numeric_group_decimal(insert_num 
int, c1 float, c2 float, b STRING);
+CREATE TABLE table_change_lower_to_higher_numeric_group_float(insert_num int, 
c1 float, c2 float, b STRING);
 
-insert into table table_change_lower_to_higher_numeric_group_decimal
+insert into table table_change_lower_to_higher_numeric_group_float
     values (1, -29.0764, -29.0764, 'original'),
            (2, 753.7028, 753.7028, 'original'),
            (3, -5000, -5000, 'original'),
            (4, 52927714, 52927714, 'original');
 
-select insert_num,c1,c2,b from 
table_change_lower_to_higher_numeric_group_decimal order by insert_num;
+select insert_num,c1,c2,b from 
table_change_lower_to_higher_numeric_group_float order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table table_change_lower_to_higher_numeric_group_decimal replace columns 
(insert_num int, c1 double, c2 decimal(38,18), b STRING) ;
+alter table table_change_lower_to_higher_numeric_group_float replace columns 
(insert_num int, c1 double, c2 decimal(38,18), b STRING) ;
 
-insert into table table_change_lower_to_higher_numeric_group_decimal
+insert into table table_change_lower_to_higher_numeric_group_float
     values (5, '7.74', '22.3', 'new'),
            (6, '56.1431', '90.9', 'new'),
            (7, '2.56', '25.6', 'new'),
@@ -786,31 +788,31 @@ insert into table 
table_change_lower_to_higher_numeric_group_decimal
            (10, '1.7', '17.8888', 'new');
 
 explain
-select insert_num,c1,c2,b from 
table_change_lower_to_higher_numeric_group_decimal order by insert_num;
+select insert_num,c1,c2,b from 
table_change_lower_to_higher_numeric_group_float order by insert_num;
 
-select insert_num,c1,c2,b from 
table_change_lower_to_higher_numeric_group_decimal order by insert_num;
+select insert_num,c1,c2,b from 
table_change_lower_to_higher_numeric_group_float order by insert_num;
 
-drop table table_change_lower_to_higher_numeric_group_decimal;
+drop table table_change_lower_to_higher_numeric_group_float;
 
 
 
 --
 -- SUBSECTION: ALTER TABLE CHANGE COLUMNS for "lower" type to "higher" 
NUMERIC_GROUP: DOUBLE, (DECIMAL)
 --
-CREATE TABLE table_change_lower_to_higher_numeric_group_float(insert_num int, 
c1 double, b STRING);
+CREATE TABLE table_change_lower_to_higher_numeric_group_double(insert_num int, 
c1 double, b STRING);
 
-insert into table table_change_lower_to_higher_numeric_group_float
+insert into table table_change_lower_to_higher_numeric_group_double
     values (1, -29.0764, 'original'),
            (2, 753.7028, 'original'),
            (3, -5000, 'original'),
            (4, 52927714, 'original');
 
-select insert_num,c1,b from table_change_lower_to_higher_numeric_group_float 
order by insert_num;
+select insert_num,c1,b from table_change_lower_to_higher_numeric_group_double 
order by insert_num;
 
 -- Table-Non-Cascade CHANGE COLUMNS ...
-alter table table_change_lower_to_higher_numeric_group_float replace columns 
(insert_num int, c1 decimal(38,18), b STRING) ;
+alter table table_change_lower_to_higher_numeric_group_double replace columns 
(insert_num int, c1 decimal(38,18), b STRING) ;
 
-insert into table table_change_lower_to_higher_numeric_group_float
+insert into table table_change_lower_to_higher_numeric_group_double
     values (5, '774', 'new'),
            (6, '561431', 'new'),
            (7, '256', 'new'),
@@ -819,8 +821,8 @@ insert into table 
table_change_lower_to_higher_numeric_group_float
            (10, '17', 'new');
 
 explain
-select insert_num,c1,b from table_change_lower_to_higher_numeric_group_float 
order by insert_num;
+select insert_num,c1,b from table_change_lower_to_higher_numeric_group_double 
order by insert_num;
 
-select insert_num,c1,b from table_change_lower_to_higher_numeric_group_float 
order by insert_num;
+select insert_num,c1,b from table_change_lower_to_higher_numeric_group_double 
order by insert_num;
 
-drop table table_change_lower_to_higher_numeric_group_float;
\ No newline at end of file
+drop table table_change_lower_to_higher_numeric_group_double;

http://git-wip-us.apache.org/repos/asf/hive/blob/62a3778e/ql/src/test/queries/clientpositive/stats_list_bucket.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/stats_list_bucket.q 
b/ql/src/test/queries/clientpositive/stats_list_bucket.q
index 536702c..2b8f230 100644
--- a/ql/src/test/queries/clientpositive/stats_list_bucket.q
+++ b/ql/src/test/queries/clientpositive/stats_list_bucket.q
@@ -19,7 +19,7 @@ stored as rcfile;
 insert overwrite table stats_list_bucket partition (ds = '2008-04-08',  hr = 
'11')
   select key, value from src;
 
-desc formatted stats_list_bucket partition (ds = '2008-04-08',  hr = '11');
+desc stats_list_bucket partition (ds = '2008-04-08',  hr = '11');
 
 -- Also try non-partitioned table with list bucketing.
 -- Stats should show the same number of rows.
@@ -35,7 +35,7 @@ stored as rcfile;
 insert overwrite table stats_list_bucket_1
   select key, value from src;
 
-desc formatted stats_list_bucket_1;
+desc stats_list_bucket_1;
 
 drop table stats_list_bucket;
 drop table stats_list_bucket_1;

http://git-wip-us.apache.org/repos/asf/hive/blob/62a3778e/ql/src/test/queries/clientpositive/stats_null_optimizer.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/stats_null_optimizer.q 
b/ql/src/test/queries/clientpositive/stats_null_optimizer.q
index 1114e5a..de2817a 100644
--- a/ql/src/test/queries/clientpositive/stats_null_optimizer.q
+++ b/ql/src/test/queries/clientpositive/stats_null_optimizer.q
@@ -1,3 +1,5 @@
+set hive.compute.query.using.stats=true;
+
 explain select count(key) from (select null as key from src)src;
 
 select count(key) from (select null as key from src)src;

http://git-wip-us.apache.org/repos/asf/hive/blob/62a3778e/ql/src/test/queries/clientpositive/subquery_with_or_cond.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/subquery_with_or_cond.q 
b/ql/src/test/queries/clientpositive/subquery_with_or_cond.q
new file mode 100644
index 0000000..24ab9ba
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/subquery_with_or_cond.q
@@ -0,0 +1,5 @@
+
+select count(*) 
+from src 
+where src.key in (select key from src s1 where s1.key > '9') or src.value is 
not null or exists(select key from src);
+;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/62a3778e/ql/src/test/queries/clientpositive/udtf_explode.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/udtf_explode.q 
b/ql/src/test/queries/clientpositive/udtf_explode.q
index cc69dbe..74c614e 100644
--- a/ql/src/test/queries/clientpositive/udtf_explode.q
+++ b/ql/src/test/queries/clientpositive/udtf_explode.q
@@ -4,15 +4,15 @@ set hive.fetch.task.conversion=more;
 DESCRIBE FUNCTION explode;
 DESCRIBE FUNCTION EXTENDED explode;
 
-EXPLAIN EXTENDED SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3;
-EXPLAIN EXTENDED SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) 
AS myCol FROM src LIMIT 3) a GROUP BY a.myCol;
+EXPLAIN SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3;
+EXPLAIN SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol 
FROM src LIMIT 3) a GROUP BY a.myCol;
 
 SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3;
 SELECT explode(array(1,2,3)) AS (myCol) FROM src LIMIT 3;
 SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src 
LIMIT 3) a GROUP BY a.myCol;
 
 EXPLAIN SELECT explode(map(1,'one',2,'two',3,'three')) as (myKey,myVal) FROM 
src LIMIT 3;
-EXPLAIN EXTENDED SELECT a.myKey, a.myVal, count(1) FROM (SELECT 
explode(map(1,'one',2,'two',3,'three')) as (myKey,myVal) FROM src LIMIT 3) a 
GROUP BY a.myKey, a.myVal;
+EXPLAIN SELECT a.myKey, a.myVal, count(1) FROM (SELECT 
explode(map(1,'one',2,'two',3,'three')) as (myKey,myVal) FROM src LIMIT 3) a 
GROUP BY a.myKey, a.myVal;
 
 SELECT explode(map(1,'one',2,'two',3,'three')) as (myKey,myVal) FROM src LIMIT 
3;
 SELECT a.myKey, a.myVal, count(1) FROM (SELECT 
explode(map(1,'one',2,'two',3,'three')) as (myKey,myVal) FROM src LIMIT 3) a 
GROUP BY a.myKey, a.myVal;

http://git-wip-us.apache.org/repos/asf/hive/blob/62a3778e/ql/src/test/queries/clientpositive/union36.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/union36.q 
b/ql/src/test/queries/clientpositive/union36.q
index f050e1a..3a66c0c 100644
--- a/ql/src/test/queries/clientpositive/union36.q
+++ b/ql/src/test/queries/clientpositive/union36.q
@@ -10,3 +10,4 @@ select (x/sum(x) over()) as y from(select cast(1 as 
decimal(10,0))  as x from (s
 
 
 
+

http://git-wip-us.apache.org/repos/asf/hive/blob/62a3778e/ql/src/test/results/clientnegative/avro_non_nullable_union.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/avro_non_nullable_union.q.out 
b/ql/src/test/results/clientnegative/avro_non_nullable_union.q.out
index 6c087b8..c933081 100644
--- a/ql/src/test/results/clientnegative/avro_non_nullable_union.q.out
+++ b/ql/src/test/results/clientnegative/avro_non_nullable_union.q.out
@@ -1,10 +1,6 @@
-PREHOOK: query: -- verify that Hive fails to read a union type column from 
avro file with null union data if AVRO schema definition is not nullable
-
-DROP TABLE IF EXISTS union_nullable_test_text
+PREHOOK: query: DROP TABLE IF EXISTS union_nullable_test_text
 PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- verify that Hive fails to read a union type column from 
avro file with null union data if AVRO schema definition is not nullable
-
-DROP TABLE IF EXISTS union_nullable_test_text
+POSTHOOK: query: DROP TABLE IF EXISTS union_nullable_test_text
 POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE union_nullable_test_text (id int, value 
uniontype<int,double>) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' 
COLLECTION ITEMS TERMINATED BY ':' STORED AS textfile
 PREHOOK: type: CREATETABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/62a3778e/ql/src/test/results/clientpositive/autoColumnStats_4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/autoColumnStats_4.q.out 
b/ql/src/test/results/clientpositive/autoColumnStats_4.q.out
index c7b9b4f..0382f27 100644
--- a/ql/src/test/results/clientpositive/autoColumnStats_4.q.out
+++ b/ql/src/test/results/clientpositive/autoColumnStats_4.q.out
@@ -178,42 +178,62 @@ POSTHOOK: Input: default@alltypesorc
 POSTHOOK: Output: default@acid_dtt
 POSTHOOK: Lineage: acid_dtt.a SIMPLE 
[(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ]
 POSTHOOK: Lineage: acid_dtt.b EXPRESSION 
[(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, 
comment:null), ]
-PREHOOK: query: desc formatted acid_dtt
-PREHOOK: type: DESCTABLE
+PREHOOK: query: alter table acid_dtt update statistics set('numRows'='430', 
'rawDataSize'='1722')
+PREHOOK: type: ALTERTABLE_UPDATETABLESTATS
 PREHOOK: Input: default@acid_dtt
-POSTHOOK: query: desc formatted acid_dtt
-POSTHOOK: type: DESCTABLE
+PREHOOK: Output: default@acid_dtt
+POSTHOOK: query: alter table acid_dtt update statistics set('numRows'='430', 
'rawDataSize'='1722')
+POSTHOOK: type: ALTERTABLE_UPDATETABLESTATS
 POSTHOOK: Input: default@acid_dtt
-# col_name             data_type               comment             
-                
-a                      int                                         
-b                      varchar(128)                                
-                
-# Detailed Table Information            
-Database:              default                  
-#### A masked pattern was here ####
-Retention:             0                        
-#### A masked pattern was here ####
-Table Type:            MANAGED_TABLE            
-Table Parameters:               
-       COLUMN_STATS_ACCURATE   
{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\"}}
-       numFiles                2                   
-       numRows                 0                   
-       rawDataSize             0                   
-       totalSize               1714                
-       transactional           true                
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.ql.io.orc.OrcSerde        
-InputFormat:           org.apache.hadoop.hive.ql.io.orc.OrcInputFormat  
-OutputFormat:          org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat        
 
-Compressed:            No                       
-Num Buckets:           2                        
-Bucket Columns:        [a]                      
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
+POSTHOOK: Output: default@acid_dtt
+PREHOOK: query: explain select max(a) from acid_dtt
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select max(a) from acid_dtt
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: acid_dtt
+            Statistics: Num rows: 430 Data size: 1722 Basic stats: COMPLETE 
Column stats: NONE
+            Select Operator
+              expressions: a (type: int)
+              outputColumnNames: a
+              Statistics: Num rows: 430 Data size: 1722 Basic stats: COMPLETE 
Column stats: NONE
+              Group By Operator
+                aggregations: max(a)
+                mode: hash
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE 
Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE 
Column stats: NONE
+                  value expressions: _col0 (type: int)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: max(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column 
stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column 
stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
 PREHOOK: query: delete from acid_dtt where b = '0ruyd6Y50JpdGRf6HqD' or b = 
'2uLyD28144vklju213J1mr'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@acid_dtt
@@ -222,38 +242,51 @@ POSTHOOK: query: delete from acid_dtt where b = 
'0ruyd6Y50JpdGRf6HqD' or b = '2u
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@acid_dtt
 POSTHOOK: Output: default@acid_dtt
-PREHOOK: query: desc formatted acid_dtt
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@acid_dtt
-POSTHOOK: query: desc formatted acid_dtt
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@acid_dtt
-# col_name             data_type               comment             
-                
-a                      int                                         
-b                      varchar(128)                                
-                
-# Detailed Table Information            
-Database:              default                  
-#### A masked pattern was here ####
-Retention:             0                        
-#### A masked pattern was here ####
-Table Type:            MANAGED_TABLE            
-Table Parameters:               
-       numFiles                4                   
-       numRows                 0                   
-       rawDataSize             0                   
-       totalSize               2719                
-       transactional           true                
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.ql.io.orc.OrcSerde        
-InputFormat:           org.apache.hadoop.hive.ql.io.orc.OrcInputFormat  
-OutputFormat:          org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat        
 
-Compressed:            No                       
-Num Buckets:           2                        
-Bucket Columns:        [a]                      
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
+PREHOOK: query: explain select max(a) from acid_dtt
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select max(a) from acid_dtt
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: acid_dtt
+            Statistics: Num rows: 430 Data size: 1722 Basic stats: COMPLETE 
Column stats: NONE
+            Select Operator
+              expressions: a (type: int)
+              outputColumnNames: a
+              Statistics: Num rows: 430 Data size: 1722 Basic stats: COMPLETE 
Column stats: NONE
+              Group By Operator
+                aggregations: max(a)
+                mode: hash
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE 
Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE 
Column stats: NONE
+                  value expressions: _col0 (type: int)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: max(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column 
stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column 
stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+

http://git-wip-us.apache.org/repos/asf/hive/blob/62a3778e/ql/src/test/results/clientpositive/avro_nullable_union.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/avro_nullable_union.q.out 
b/ql/src/test/results/clientpositive/avro_nullable_union.q.out
index b80182e..d97aacc 100644
--- a/ql/src/test/results/clientpositive/avro_nullable_union.q.out
+++ b/ql/src/test/results/clientpositive/avro_nullable_union.q.out
@@ -1,10 +1,6 @@
-PREHOOK: query: -- verify that we can write a nullable union type column with 
both nullable and non-nullable data
-
-DROP TABLE IF EXISTS union_nullable_test_text
+PREHOOK: query: DROP TABLE IF EXISTS union_nullable_test_text
 PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- verify that we can write a nullable union type column with 
both nullable and non-nullable data
-
-DROP TABLE IF EXISTS union_nullable_test_text
+POSTHOOK: query: DROP TABLE IF EXISTS union_nullable_test_text
 POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE union_nullable_test_text (id int, value 
uniontype<int,double>) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' 
COLLECTION ITEMS TERMINATED BY ':' STORED AS textfile
 PREHOOK: type: CREATETABLE
@@ -71,13 +67,9 @@ POSTHOOK: query: DROP TABLE union_nullable_test_text
 POSTHOOK: type: DROPTABLE
 POSTHOOK: Input: default@union_nullable_test_text
 POSTHOOK: Output: default@union_nullable_test_text
-PREHOOK: query: -- verify that we can write a non nullable union type column 
with non-nullable data
-
-DROP TABLE IF EXISTS union_non_nullable_test_text
+PREHOOK: query: DROP TABLE IF EXISTS union_non_nullable_test_text
 PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- verify that we can write a non nullable union type column 
with non-nullable data
-
-DROP TABLE IF EXISTS union_non_nullable_test_text
+POSTHOOK: query: DROP TABLE IF EXISTS union_non_nullable_test_text
 POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE union_non_nullable_test_text (id int, value 
uniontype<int,double>) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' 
COLLECTION ITEMS TERMINATED BY ':' STORED AS textfile
 PREHOOK: type: CREATETABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/62a3778e/ql/src/test/results/clientpositive/avrocountemptytbl.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/avrocountemptytbl.q.out 
b/ql/src/test/results/clientpositive/avrocountemptytbl.q.out
index 72e8489..4270aea 100644
--- a/ql/src/test/results/clientpositive/avrocountemptytbl.q.out
+++ b/ql/src/test/results/clientpositive/avrocountemptytbl.q.out
@@ -54,5 +54,5 @@ POSTHOOK: query: select * from emptyavro
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@emptyavro
 #### A masked pattern was here ####
-0
 100
+0

http://git-wip-us.apache.org/repos/asf/hive/blob/62a3778e/ql/src/test/results/clientpositive/cbo_union_view.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cbo_union_view.q.out 
b/ql/src/test/results/clientpositive/cbo_union_view.q.out
index c8504a1..b04aaff 100644
--- a/ql/src/test/results/clientpositive/cbo_union_view.q.out
+++ b/ql/src/test/results/clientpositive/cbo_union_view.q.out
@@ -66,8 +66,6 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: src_union_1
-            properties:
-              insideView TRUE
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
             Filter Operator
               predicate: (key = 86) (type: boolean)
@@ -91,8 +89,6 @@ STAGE PLANS:
                           serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           TableScan
             alias: src_union_2
-            properties:
-              insideView TRUE
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
             Filter Operator
               predicate: (key = 86) (type: boolean)
@@ -116,8 +112,6 @@ STAGE PLANS:
                           serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           TableScan
             alias: src_union_3
-            properties:
-              insideView TRUE
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
             Filter Operator
               predicate: (key = 86) (type: boolean)
@@ -160,8 +154,6 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: src_union_1
-            properties:
-              insideView TRUE
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
             Filter Operator
               predicate: ((key = 86) and (ds = '1')) (type: boolean)
@@ -185,8 +177,6 @@ STAGE PLANS:
                           serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           TableScan
             alias: src_union_2
-            properties:
-              insideView TRUE
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
             Filter Operator
               predicate: ((key = 86) and (ds = '1')) (type: boolean)
@@ -210,8 +200,6 @@ STAGE PLANS:
                           serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           TableScan
             alias: src_union_3
-            properties:
-              insideView TRUE
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
             Filter Operator
               predicate: ((key = 86) and (ds = '1')) (type: boolean)

http://git-wip-us.apache.org/repos/asf/hive/blob/62a3778e/ql/src/test/results/clientpositive/druid_basic2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid_basic2.q.out 
b/ql/src/test/results/clientpositive/druid_basic2.q.out
index 3f478c2..a1a26cc 100644
--- a/ql/src/test/results/clientpositive/druid_basic2.q.out
+++ b/ql/src/test/results/clientpositive/druid_basic2.q.out
@@ -76,9 +76,6 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: druid_table_1
-          properties:
-            druid.query.json 
{"queryType":"select","dataSource":"wikipedia","descending":false,"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"dimensions":["robot"],"metrics":[],"granularity":"all","pagingSpec":{"threshold":16384},"context":{"druid.query.fetch":false}}
-            druid.query.type select
           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
           GatherStats: false
           Select Operator
@@ -103,9 +100,6 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: druid_table_1
-          properties:
-            druid.query.json 
{"queryType":"select","dataSource":"wikipedia","descending":false,"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"dimensions":[],"metrics":["delta"],"granularity":"all","pagingSpec":{"threshold":16384},"context":{"druid.query.fetch":false}}
-            druid.query.type select
           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
           GatherStats: false
           Select Operator
@@ -134,9 +128,6 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: druid_table_1
-          properties:
-            druid.query.json 
{"queryType":"select","dataSource":"wikipedia","descending":false,"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"selector","dimension":"language","value":"en"},"dimensions":["robot"],"metrics":[],"granularity":"all","pagingSpec":{"threshold":16384},"context":{"druid.query.fetch":false}}
-            druid.query.type select
           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
           GatherStats: false
           Select Operator
@@ -165,9 +156,6 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: druid_table_1
-          properties:
-            druid.query.json 
{"queryType":"groupBy","dataSource":"wikipedia","granularity":"all","dimensions":["robot"],"limitSpec":{"type":"default"},"filter":{"type":"selector","dimension":"language","value":"en"},"aggregations":[{"type":"longSum","name":"dummy_agg","fieldName":"dummy_agg"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
-            druid.query.type groupBy
           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
           GatherStats: false
           Select Operator
@@ -211,9 +199,6 @@ STAGE PLANS:
           TableScan
             alias: druid_table_1
             filterExpr: language is not null (type: boolean)
-            properties:
-              druid.query.json 
{"queryType":"select","dataSource":"wikipedia","descending":false,"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"dimensions":["robot","namespace","anonymous","unpatrolled","page","language","newpage","user"],"metrics":["count","added","delta","variation","deleted"],"granularity":"all","pagingSpec":{"threshold":16384},"context":{"druid.query.fetch":false}}
-              druid.query.type select
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
             GatherStats: false
             Filter Operator
@@ -236,9 +221,6 @@ STAGE PLANS:
           TableScan
             alias: druid_table_1
             filterExpr: language is not null (type: boolean)
-            properties:
-              druid.query.json 
{"queryType":"select","dataSource":"wikipedia","descending":false,"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"dimensions":["robot","namespace","anonymous","unpatrolled","page","language","newpage","user"],"metrics":["count","added","delta","variation","deleted"],"granularity":"all","pagingSpec":{"threshold":16384},"context":{"druid.query.fetch":false}}
-              druid.query.type select
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
             GatherStats: false
             Filter Operator
@@ -396,9 +378,6 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: druid_table_1
-            properties:
-              druid.query.json 
{"queryType":"select","dataSource":"wikipedia","descending":false,"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"selector","dimension":"language","value":"en"},"dimensions":[],"metrics":[],"granularity":"all","pagingSpec":{"threshold":16384},"context":{"druid.query.fetch":false}}
-              druid.query.type select
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: COMPLETE
             GatherStats: false
             Select Operator
@@ -411,9 +390,6 @@ STAGE PLANS:
                 auto parallelism: false
           TableScan
             alias: druid_table_1
-            properties:
-              druid.query.json 
{"queryType":"select","dataSource":"wikipedia","descending":false,"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"selector","dimension":"language","value":"en"},"dimensions":["robot"],"metrics":[],"granularity":"all","pagingSpec":{"threshold":16384},"context":{"druid.query.fetch":false}}
-              druid.query.type select
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
             GatherStats: false
             Reduce Output Operator
@@ -541,7 +517,8 @@ LIMIT 10
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
 
 STAGE PLANS:
   Stage: Stage-1
@@ -549,25 +526,27 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: druid_table_1
-            properties:
-              druid.query.json 
{"queryType":"groupBy","dataSource":"wikipedia","granularity":"day","dimensions":["robot","language"],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleMax","name":"$f3","fieldName":"added"},{"type":"doubleSum","name":"$f4","fieldName":"delta"}],"intervals":["1900-01-01T00:00:00.000/3000-01-01T00:00:00.000"]}
-              druid.query.type groupBy
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
             GatherStats: false
             Select Operator
-              expressions: robot (type: string), __time (type: timestamp), $f3 
(type: float), $f4 (type: float)
-              outputColumnNames: _col0, _col1, _col2, _col3
+              expressions: robot (type: string), language (type: string), 
__time (type: timestamp), added (type: float), delta (type: float)
+              outputColumnNames: robot, language, __time, added, delta
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
-              Reduce Output Operator
-                key expressions: UDFToInteger(_col0) (type: int), _col2 (type: 
float)
-                null sort order: az
-                sort order: +-
+              Group By Operator
+                aggregations: max(added), sum(delta)
+                keys: robot (type: string), language (type: string), 
floor_day(__time) (type: timestamp)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4
                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
-                tag: -1
-                TopN: 10
-                TopN Hash Memory Usage: 0.1
-                value expressions: _col0 (type: string), _col1 (type: 
timestamp), _col3 (type: float)
-                auto parallelism: false
+                Reduce Output Operator
+                  key expressions: _col0 (type: string), _col1 (type: string), 
_col2 (type: timestamp)
+                  null sort order: aaa
+                  sort order: +++
+                  Map-reduce partition columns: _col0 (type: string), _col1 
(type: string), _col2 (type: timestamp)
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
+                  tag: -1
+                  value expressions: _col3 (type: float), _col4 (type: double)
+                  auto parallelism: false
       Path -> Alias:
 #### A masked pattern was here ####
       Path -> Partition:
@@ -584,8 +563,6 @@ STAGE PLANS:
               columns.comments 'from deserializer','from deserializer','from 
deserializer','from deserializer','from deserializer','from deserializer','from 
deserializer','from deserializer','from deserializer','from deserializer','from 
deserializer','from deserializer','from deserializer','from deserializer'
               columns.types 
timestamp:string:string:string:string:string:string:string:string:float:float:float:float:float
               druid.datasource wikipedia
-              druid.query.json 
{"queryType":"groupBy","dataSource":"wikipedia","granularity":"day","dimensions":["robot","language"],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleMax","name":"$f3","fieldName":"added"},{"type":"doubleSum","name":"$f4","fieldName":"delta"}],"intervals":["1900-01-01T00:00:00.000/3000-01-01T00:00:00.000"]}
-              druid.query.type groupBy
 #### A masked pattern was here ####
               name default.druid_table_1
               numFiles 0
@@ -609,8 +586,6 @@ STAGE PLANS:
                 columns.comments 'from deserializer','from deserializer','from 
deserializer','from deserializer','from deserializer','from deserializer','from 
deserializer','from deserializer','from deserializer','from deserializer','from 
deserializer','from deserializer','from deserializer','from deserializer'
                 columns.types 
timestamp:string:string:string:string:string:string:string:string:float:float:float:float:float
                 druid.datasource wikipedia
-                druid.query.json 
{"queryType":"groupBy","dataSource":"wikipedia","granularity":"day","dimensions":["robot","language"],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleMax","name":"$f3","fieldName":"added"},{"type":"doubleSum","name":"$f4","fieldName":"delta"}],"intervals":["1900-01-01T00:00:00.000/3000-01-01T00:00:00.000"]}
-                druid.query.type groupBy
 #### A masked pattern was here ####
                 name default.druid_table_1
                 numFiles 0
@@ -629,8 +604,78 @@ STAGE PLANS:
         /druid_table_1 [druid_table_1]
       Needs Tagging: false
       Reduce Operator Tree:
+        Group By Operator
+          aggregations: max(VALUE._col0), sum(VALUE._col1)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 
(type: timestamp)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
+          Select Operator
+            expressions: _col0 (type: string), _col2 (type: timestamp), _col3 
(type: float), _col4 (type: double)
+            outputColumnNames: _col0, _col1, _col2, _col3
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+#### A masked pattern was here ####
+              NumFilesPerFileSink: 1
+              table:
+                  input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  properties:
+                    columns _col0,_col1,_col2,_col3
+                    columns.types string,timestamp,float,double
+                    escape.delim \
+                    serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+                  serde: 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            GatherStats: false
+            Reduce Output Operator
+              key expressions: UDFToInteger(_col0) (type: int), _col2 (type: 
float)
+              null sort order: az
+              sort order: +-
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
+              tag: -1
+              TopN: 10
+              TopN Hash Memory Usage: 0.1
+              value expressions: _col0 (type: string), _col1 (type: 
timestamp), _col3 (type: double)
+              auto parallelism: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: -mr-10004
+            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+            properties:
+              columns _col0,_col1,_col2,_col3
+              columns.types string,timestamp,float,double
+              escape.delim \
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+            serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+          
+              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+              properties:
+                columns _col0,_col1,_col2,_col3
+                columns.types string,timestamp,float,double
+                escape.delim \
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+              serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+      Truncated Path -> Alias:
+#### A masked pattern was here ####
+      Needs Tagging: false
+      Reduce Operator Tree:
         Select Operator
-          expressions: VALUE._col0 (type: string), VALUE._col1 (type: 
timestamp), KEY.reducesinkkey1 (type: float), VALUE._col2 (type: float)
+          expressions: VALUE._col0 (type: string), VALUE._col1 (type: 
timestamp), KEY.reducesinkkey1 (type: float), VALUE._col2 (type: double)
           outputColumnNames: _col0, _col1, _col2, _col3
           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
           Limit
@@ -648,7 +693,7 @@ STAGE PLANS:
                   output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                   properties:
                     columns _col0,_col1,_col2,_col3
-                    columns.types string:timestamp:float:float
+                    columns.types string:timestamp:float:double
                     escape.delim \
                     hive.serialization.extend.additional.nesting.levels true
                     serialization.escape.crlf true

Reply via email to