http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/column_access_stats.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/column_access_stats.q 
b/ql/src/test/queries/clientpositive/column_access_stats.q
index 8a7f476..78a3e38 100644
--- a/ql/src/test/queries/clientpositive/column_access_stats.q
+++ b/ql/src/test/queries/clientpositive/column_access_stats.q
@@ -6,114 +6,114 @@ SET hive.stats.collect.scancols=true;
 -- SORT_QUERY_RESULTS
 -- This test is used for testing the ColumnAccessAnalyzer
 
-CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
+CREATE TABLE T1_n127(key STRING, val STRING) STORED AS TEXTFILE;
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n127;
 
-CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
-CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE;
-CREATE TABLE T4(key STRING, val STRING) PARTITIONED BY (p STRING);
+CREATE TABLE T2_n75(key STRING, val STRING) STORED AS TEXTFILE;
+CREATE TABLE T3_n29(key STRING, val STRING) STORED AS TEXTFILE;
+CREATE TABLE T4_n16(key STRING, val STRING) PARTITIONED BY (p STRING);
 
 -- Simple select queries
-SELECT key FROM T1;
-SELECT key, val FROM T1;
-SELECT 1 FROM T1;
-SELECT key, val from T4 where p=1;
-SELECT val FROM T4 where p=1;
-SELECT p, val FROM T4 where p=1;
+SELECT key FROM T1_n127;
+SELECT key, val FROM T1_n127;
+SELECT 1 FROM T1_n127;
+SELECT key, val from T4_n16 where p=1;
+SELECT val FROM T4_n16 where p=1;
+SELECT p, val FROM T4_n16 where p=1;
 
 -- More complicated select queries
-EXPLAIN SELECT key FROM (SELECT key, val FROM T1) subq1;
-SELECT key FROM (SELECT key, val FROM T1) subq1;
-EXPLAIN SELECT k FROM (SELECT key as k, val as v FROM T1) subq1;
-SELECT k FROM (SELECT key as k, val as v FROM T1) subq1;
-SELECT key + 1 as k FROM T1;
-SELECT key + val as k FROM T1;
+EXPLAIN SELECT key FROM (SELECT key, val FROM T1_n127) subq1;
+SELECT key FROM (SELECT key, val FROM T1_n127) subq1;
+EXPLAIN SELECT k FROM (SELECT key as k, val as v FROM T1_n127) subq1;
+SELECT k FROM (SELECT key as k, val as v FROM T1_n127) subq1;
+SELECT key + 1 as k FROM T1_n127;
+SELECT key + val as k FROM T1_n127;
 
 -- Work with union
 EXPLAIN
 SELECT * FROM (
-SELECT key as c FROM T1
+SELECT key as c FROM T1_n127
  UNION ALL
-SELECT val as c FROM T1
+SELECT val as c FROM T1_n127
 ) subq1;
 
 SELECT * FROM (
-SELECT key as c FROM T1
+SELECT key as c FROM T1_n127
  UNION ALL
-SELECT val as c FROM T1
+SELECT val as c FROM T1_n127
 ) subq1;
 
 EXPLAIN
 SELECT * FROM (
-SELECT key as c FROM T1
+SELECT key as c FROM T1_n127
  UNION ALL
-SELECT key as c FROM T1
+SELECT key as c FROM T1_n127
 ) subq1;
 
 SELECT * FROM (
-SELECT key as c FROM T1
+SELECT key as c FROM T1_n127
  UNION ALL
-SELECT key as c FROM T1
+SELECT key as c FROM T1_n127
 ) subq1;
 
 -- Work with insert overwrite
-FROM T1
-INSERT OVERWRITE TABLE T2 SELECT key, count(1) GROUP BY key
-INSERT OVERWRITE TABLE T3 SELECT key, sum(val) GROUP BY key;
+FROM T1_n127
+INSERT OVERWRITE TABLE T2_n75 SELECT key, count(1) GROUP BY key
+INSERT OVERWRITE TABLE T3_n29 SELECT key, sum(val) GROUP BY key;
 
 -- Simple joins
 SELECT *
-FROM T1 JOIN T2
-ON T1.key = T2.key ;
+FROM T1_n127 JOIN T2_n75
+ON T1_n127.key = T2_n75.key ;
 
 EXPLAIN
-SELECT T1.key
-FROM T1 JOIN T2
-ON T1.key = T2.key;
+SELECT T1_n127.key
+FROM T1_n127 JOIN T2_n75
+ON T1_n127.key = T2_n75.key;
 
-SELECT T1.key
-FROM T1 JOIN T2
-ON T1.key = T2.key;
+SELECT T1_n127.key
+FROM T1_n127 JOIN T2_n75
+ON T1_n127.key = T2_n75.key;
 
 SELECT *
-FROM T1 JOIN T2
-ON T1.key = T2.key AND T1.val = T2.val;
+FROM T1_n127 JOIN T2_n75
+ON T1_n127.key = T2_n75.key AND T1_n127.val = T2_n75.val;
 
 -- Map join
 SELECT /*+ MAPJOIN(a) */ * 
-FROM T1 a JOIN T2 b 
+FROM T1_n127 a JOIN T2_n75 b 
 ON a.key = b.key;
 
 -- More joins
 EXPLAIN
 SELECT *
-FROM T1 JOIN T2
-ON T1.key = T2.key AND T1.val = 3 and T2.val = 3;
+FROM T1_n127 JOIN T2_n75
+ON T1_n127.key = T2_n75.key AND T1_n127.val = 3 and T2_n75.val = 3;
 
 SELECT *
-FROM T1 JOIN T2
-ON T1.key = T2.key AND T1.val = 3 and T2.val = 3;
+FROM T1_n127 JOIN T2_n75
+ON T1_n127.key = T2_n75.key AND T1_n127.val = 3 and T2_n75.val = 3;
 
 EXPLAIN
 SELECT subq1.val
 FROM 
 (
-  SELECT val FROM T1 WHERE key = 5  
+  SELECT val FROM T1_n127 WHERE key = 5  
 ) subq1
 JOIN 
 (
-  SELECT val FROM T2 WHERE key = 6
+  SELECT val FROM T2_n75 WHERE key = 6
 ) subq2 
 ON subq1.val = subq2.val;
 
 SELECT subq1.val
 FROM 
 (
-  SELECT val FROM T1 WHERE key = 5  
+  SELECT val FROM T1_n127 WHERE key = 5  
 ) subq1
 JOIN 
 (
-  SELECT val FROM T2 WHERE key = 6
+  SELECT val FROM T2_n75 WHERE key = 6
 ) subq2 
 ON subq1.val = subq2.val;
 
@@ -125,16 +125,16 @@ FROM
   SELECT subq1.key as key
   FROM
   (
-    SELECT key, val FROM T1
+    SELECT key, val FROM T1_n127
   ) subq1
   JOIN
   (
-    SELECT key, 'teststring' as val FROM T2
+    SELECT key, 'teststring' as val FROM T2_n75
   ) subq2
   ON subq1.key = subq2.key
-) T4
-JOIN T3
-ON T3.key = T4.key;
+) T4_n16
+JOIN T3_n29
+ON T3_n29.key = T4_n16.key;
 
 SELECT *
 FROM
@@ -142,16 +142,16 @@ FROM
   SELECT subq1.key as key
   FROM
   (
-    SELECT key, val FROM T1
+    SELECT key, val FROM T1_n127
   ) subq1
   JOIN
   (
-    SELECT key, 'teststring' as val FROM T2
+    SELECT key, 'teststring' as val FROM T2_n75
   ) subq2
   ON subq1.key = subq2.key
-) T4
-JOIN T3
-ON T3.key = T4.key;
+) T4_n16
+JOIN T3_n29
+ON T3_n29.key = T4_n16.key;
 
 -- for partitioned table
 SELECT * FROM srcpart TABLESAMPLE (10 ROWS);

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/column_pruner_multiple_children.q
----------------------------------------------------------------------
diff --git 
a/ql/src/test/queries/clientpositive/column_pruner_multiple_children.q 
b/ql/src/test/queries/clientpositive/column_pruner_multiple_children.q
index cb605a8..9001dd2 100644
--- a/ql/src/test/queries/clientpositive/column_pruner_multiple_children.q
+++ b/ql/src/test/queries/clientpositive/column_pruner_multiple_children.q
@@ -2,19 +2,19 @@
 set hive.map.aggr=false;
 set hive.stats.column.autogather=true;
 
-CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE;
+CREATE TABLE DEST1_n52(key INT, value STRING) STORED AS TEXTFILE;
 
-create table s as select * from src where key='10';
+create table s_n129 as select * from src where key='10';
 
-explain FROM S
-INSERT OVERWRITE TABLE DEST1 SELECT key, sum(SUBSTR(value,5)) GROUP BY key
+explain FROM S_n129
+INSERT OVERWRITE TABLE DEST1_n52 SELECT key, sum(SUBSTR(value,5)) GROUP BY key
 ;
 
-FROM S
-INSERT OVERWRITE TABLE DEST1 SELECT key, sum(SUBSTR(value,5)) GROUP BY key
+FROM S_n129
+INSERT OVERWRITE TABLE DEST1_n52 SELECT key, sum(SUBSTR(value,5)) GROUP BY key
 ;
 
-desc formatted DEST1;
+desc formatted DEST1_n52;
 
-desc formatted DEST1 key;
-desc formatted DEST1 value;
+desc formatted DEST1_n52 key;
+desc formatted DEST1_n52 value;

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/column_table_stats_orc.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/column_table_stats_orc.q 
b/ql/src/test/queries/clientpositive/column_table_stats_orc.q
index 51fccd2..8c99442 100644
--- a/ql/src/test/queries/clientpositive/column_table_stats_orc.q
+++ b/ql/src/test/queries/clientpositive/column_table_stats_orc.q
@@ -1,57 +1,57 @@
 set hive.mapred.mode=nonstrict;
 -- SORT_QUERY_RESULTS
 
-DROP TABLE IF EXISTS s;
+DROP TABLE IF EXISTS s_n0;
 
-CREATE TABLE s (key STRING COMMENT 'default', value STRING COMMENT 'default') 
STORED AS ORC;
+CREATE TABLE s_n0 (key STRING COMMENT 'default', value STRING COMMENT 
'default') STORED AS ORC;
 
-insert into table s values ('1','2');
+insert into table s_n0 values ('1','2');
 
-desc formatted s;
+desc formatted s_n0;
 
-explain extended analyze table s compute statistics for columns;
+explain extended analyze table s_n0 compute statistics for columns;
 
-analyze table s compute statistics for columns;
+analyze table s_n0 compute statistics for columns;
 
-desc formatted s;
+desc formatted s_n0;
 
-DROP TABLE IF EXISTS spart;
+DROP TABLE IF EXISTS spart_n0;
 
-CREATE TABLE spart (key STRING COMMENT 'default', value STRING COMMENT 
'default')
+CREATE TABLE spart_n0 (key STRING COMMENT 'default', value STRING COMMENT 
'default')
 PARTITIONED BY (ds STRING, hr STRING)
 STORED AS ORC;
 
-insert into table spart PARTITION (ds="2008-04-08", hr="12") values ('1','2');
-insert into table spart PARTITION (ds="2008-04-08", hr="11") values ('1','2');
+insert into table spart_n0 PARTITION (ds="2008-04-08", hr="12") values 
('1','2');
+insert into table spart_n0 PARTITION (ds="2008-04-08", hr="11") values 
('1','2');
 
-desc formatted spart;
+desc formatted spart_n0;
 
-explain extended analyze table spart compute statistics for columns;
+explain extended analyze table spart_n0 compute statistics for columns;
 
-analyze table spart compute statistics for columns;
+analyze table spart_n0 compute statistics for columns;
 
-desc formatted spart;
+desc formatted spart_n0;
 
-desc formatted spart PARTITION(ds='2008-04-08', hr=11);
-desc formatted spart PARTITION(ds='2008-04-08', hr=12);
+desc formatted spart_n0 PARTITION(ds='2008-04-08', hr=11);
+desc formatted spart_n0 PARTITION(ds='2008-04-08', hr=12);
 
 
-DROP TABLE IF EXISTS spart;
+DROP TABLE IF EXISTS spart_n0;
 
-CREATE TABLE spart (key STRING COMMENT 'default', value STRING COMMENT 
'default')
+CREATE TABLE spart_n0 (key STRING COMMENT 'default', value STRING COMMENT 
'default')
 PARTITIONED BY (ds STRING, hr STRING)
 STORED AS ORC;
 
-insert into table spart PARTITION (ds="2008-04-08", hr="12") values ('1','2');
-insert into table spart PARTITION (ds="2008-04-08", hr="11") values ('1','2');
+insert into table spart_n0 PARTITION (ds="2008-04-08", hr="12") values 
('1','2');
+insert into table spart_n0 PARTITION (ds="2008-04-08", hr="11") values 
('1','2');
 
-desc formatted spart;
+desc formatted spart_n0;
 
-explain extended analyze table spart partition(hr="11") compute statistics for 
columns;
+explain extended analyze table spart_n0 partition(hr="11") compute statistics 
for columns;
 
-analyze table spart partition(hr="11") compute statistics for columns;
+analyze table spart_n0 partition(hr="11") compute statistics for columns;
 
-desc formatted spart;
+desc formatted spart_n0;
 
-desc formatted spart PARTITION(ds='2008-04-08', hr=11);
-desc formatted spart PARTITION(ds='2008-04-08', hr=12);
+desc formatted spart_n0 PARTITION(ds='2008-04-08', hr=11);
+desc formatted spart_n0 PARTITION(ds='2008-04-08', hr=12);

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/columnstats_infinity.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/columnstats_infinity.q 
b/ql/src/test/queries/clientpositive/columnstats_infinity.q
index c99a1cb..b214fa9 100644
--- a/ql/src/test/queries/clientpositive/columnstats_infinity.q
+++ b/ql/src/test/queries/clientpositive/columnstats_infinity.q
@@ -1,44 +1,44 @@
 set hive.stats.column.autogather=false;
 
-CREATE TABLE schema_evolution_data(insert_num int, boolean1 boolean, tinyint1 
tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 decimal(38,18), 
float1 float, double1 double, string1 string, string2 string, date1 date, 
timestamp1 timestamp, boolean_str string, tinyint_str string, smallint_str 
string, int_str string, bigint_str string, decimal_str string, float_str 
string, double_str string, date_str string, timestamp_str string, filler string)
+CREATE TABLE schema_evolution_data_n45(insert_num int, boolean1 boolean, 
tinyint1 tinyint, smallint1 smallint, int1 int, bigint1 bigint, decimal1 
decimal(38,18), float1 float, double1 double, string1 string, string2 string, 
date1 date, timestamp1 timestamp, boolean_str string, tinyint_str string, 
smallint_str string, int_str string, bigint_str string, decimal_str string, 
float_str string, double_str string, date_str string, timestamp_str string, 
filler string)
 row format delimited fields terminated by '|' stored as textfile;
-load data local inpath 
'../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into 
table schema_evolution_data;
+load data local inpath 
'../../data/files/schema_evolution/schema_evolution_data.txt' overwrite into 
table schema_evolution_data_n45;
 
-CREATE TABLE 
table_change_numeric_group_string_group_floating_string_group(insert_num int,
+CREATE TABLE 
table_change_numeric_group_string_group_floating_string_group_n13(insert_num 
int,
               c1 decimal(38,18), c2 float, c3 double,
               c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 
float, c9 double,
               c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), 
c14 float, c15 double,
               b STRING);
 
-insert into table 
table_change_numeric_group_string_group_floating_string_group SELECT insert_num,
+insert into table 
table_change_numeric_group_string_group_floating_string_group_n13 SELECT 
insert_num,
               decimal1, float1, double1,
               decimal1, float1, double1, decimal1, float1, double1,
               decimal1, float1, double1, decimal1, float1, double1,
-             'original' FROM schema_evolution_data;
+             'original' FROM schema_evolution_data_n45;
 
-desc formatted table_change_numeric_group_string_group_floating_string_group;
+desc formatted 
table_change_numeric_group_string_group_floating_string_group_n13;
 
-analyze table table_change_numeric_group_string_group_floating_string_group 
compute statistics for columns;
+analyze table 
table_change_numeric_group_string_group_floating_string_group_n13 compute 
statistics for columns;
 
-desc formatted table_change_numeric_group_string_group_floating_string_group;
+desc formatted 
table_change_numeric_group_string_group_floating_string_group_n13;
 
-select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from 
table_change_numeric_group_string_group_floating_string_group;
+select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from 
table_change_numeric_group_string_group_floating_string_group_n13;
 
 set hive.stats.column.autogather=true;
 
-drop table table_change_numeric_group_string_group_floating_string_group;
+drop table table_change_numeric_group_string_group_floating_string_group_n13;
 
-CREATE TABLE 
table_change_numeric_group_string_group_floating_string_group(insert_num int,
+CREATE TABLE 
table_change_numeric_group_string_group_floating_string_group_n13(insert_num 
int,
               c1 decimal(38,18), c2 float, c3 double,
               c4 decimal(38,18), c5 float, c6 double, c7 decimal(38,18), c8 
float, c9 double,
               c10 decimal(38,18), c11 float, c12 double, c13 decimal(38,18), 
c14 float, c15 double,
               b STRING);
 
-insert into table 
table_change_numeric_group_string_group_floating_string_group SELECT insert_num,
+insert into table 
table_change_numeric_group_string_group_floating_string_group_n13 SELECT 
insert_num,
               decimal1, float1, double1,
               decimal1, float1, double1, decimal1, float1, double1,
               decimal1, float1, double1, decimal1, float1, double1,
-             'original' FROM schema_evolution_data;
+             'original' FROM schema_evolution_data_n45;
 
-desc formatted table_change_numeric_group_string_group_floating_string_group;
+desc formatted 
table_change_numeric_group_string_group_floating_string_group_n13;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/columnstats_partlvl_dp.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/columnstats_partlvl_dp.q 
b/ql/src/test/queries/clientpositive/columnstats_partlvl_dp.q
index c065edd..d47e20c 100644
--- a/ql/src/test/queries/clientpositive/columnstats_partlvl_dp.q
+++ b/ql/src/test/queries/clientpositive/columnstats_partlvl_dp.q
@@ -1,72 +1,72 @@
 set hive.mapred.mode=nonstrict;
-DROP TABLE Employee_Part;
+DROP TABLE Employee_Part_n0;
 
-CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by 
(employeeSalary double, country string)
+CREATE TABLE Employee_Part_n0(employeeID int, employeeName String) partitioned 
by (employeeSalary double, country string)
 row format delimited fields terminated by '|'  stored as textfile;
 
-LOAD DATA LOCAL INPATH "../../data/files/employee.dat"  INTO TABLE 
Employee_Part partition(employeeSalary='2000.0', country='USA');
-LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE 
Employee_Part partition(employeeSalary='2000.0', country='UK');
-LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE 
Employee_Part partition(employeeSalary='3000.0', country='USA');
-LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE 
Employee_Part partition(employeeSalary='4000.0', country='USA');
-LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE 
Employee_Part partition(employeeSalary='3500.0', country='UK');
-LOAD DATA LOCAL INPATH "../../data/files/employee.dat"  INTO TABLE 
Employee_Part partition(employeeSalary='3000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee.dat"  INTO TABLE 
Employee_Part_n0 partition(employeeSalary='2000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE 
Employee_Part_n0 partition(employeeSalary='2000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE 
Employee_Part_n0 partition(employeeSalary='3000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE 
Employee_Part_n0 partition(employeeSalary='4000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE 
Employee_Part_n0 partition(employeeSalary='3500.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee.dat"  INTO TABLE 
Employee_Part_n0 partition(employeeSalary='3000.0', country='UK');
 
 -- dynamic partitioning syntax
 explain 
-analyze table Employee_Part partition (employeeSalary='4000.0', country) 
compute statistics for columns employeeName, employeeID;
-analyze table Employee_Part partition (employeeSalary='4000.0', country) 
compute statistics for columns employeeName, employeeID;
+analyze table Employee_Part_n0 partition (employeeSalary='4000.0', country) 
compute statistics for columns employeeName, employeeID;
+analyze table Employee_Part_n0 partition (employeeSalary='4000.0', country) 
compute statistics for columns employeeName, employeeID;
 
-describe formatted Employee_Part partition (employeeSalary='4000.0', 
country='USA');
+describe formatted Employee_Part_n0 partition (employeeSalary='4000.0', 
country='USA');
 
-describe formatted Employee_Part partition (employeeSalary='4000.0', 
country='USA') employeeName;
+describe formatted Employee_Part_n0 partition (employeeSalary='4000.0', 
country='USA') employeeName;
 
 -- don't specify all partitioning keys
 explain        
-analyze table Employee_Part partition (employeeSalary='2000.0') compute 
statistics for columns employeeID;     
-analyze table Employee_Part partition (employeeSalary='2000.0') compute 
statistics for columns employeeID;
+analyze table Employee_Part_n0 partition (employeeSalary='2000.0') compute 
statistics for columns employeeID;  
+analyze table Employee_Part_n0 partition (employeeSalary='2000.0') compute 
statistics for columns employeeID;
 
-describe formatted Employee_Part partition (employeeSalary='2000.0', 
country='USA') employeeID;
-describe formatted Employee_Part partition (employeeSalary='2000.0', 
country='UK') employeeID;
+describe formatted Employee_Part_n0 partition (employeeSalary='2000.0', 
country='USA') employeeID;
+describe formatted Employee_Part_n0 partition (employeeSalary='2000.0', 
country='UK') employeeID;
 -- don't specify any partitioning keys
 explain        
-analyze table Employee_Part partition (employeeSalary) compute statistics for 
columns employeeID;      
-analyze table Employee_Part partition (employeeSalary) compute statistics for 
columns employeeID;
+analyze table Employee_Part_n0 partition (employeeSalary) compute statistics 
for columns employeeID;   
+analyze table Employee_Part_n0 partition (employeeSalary) compute statistics 
for columns employeeID;
 
-describe formatted Employee_Part partition (employeeSalary='3000.0', 
country='UK') employeeID;
+describe formatted Employee_Part_n0 partition (employeeSalary='3000.0', 
country='UK') employeeID;
 explain        
-analyze table Employee_Part partition (employeeSalary,country) compute 
statistics for columns; 
-analyze table Employee_Part partition (employeeSalary,country) compute 
statistics for columns;
+analyze table Employee_Part_n0 partition (employeeSalary,country) compute 
statistics for columns;      
+analyze table Employee_Part_n0 partition (employeeSalary,country) compute 
statistics for columns;
 
-describe formatted Employee_Part partition (employeeSalary='3500.0', 
country='UK') employeeName;
+describe formatted Employee_Part_n0 partition (employeeSalary='3500.0', 
country='UK') employeeName;
 
 -- partially populated stats
-drop table Employee;
-CREATE TABLE Employee(employeeID int, employeeName String) partitioned by 
(employeeSalary double, country string)
+drop table Employee_n0;
+CREATE TABLE Employee_n0(employeeID int, employeeName String) partitioned by 
(employeeSalary double, country string)
 row format delimited fields terminated by '|'  stored as textfile;
 
-LOAD DATA LOCAL INPATH "../../data/files/employee.dat"  INTO TABLE Employee 
partition(employeeSalary='2000.0', country='USA');
-LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee 
partition(employeeSalary='2000.0', country='UK');
-LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee 
partition(employeeSalary='3500.0', country='UK');
-LOAD DATA LOCAL INPATH "../../data/files/employee.dat"  INTO TABLE Employee 
partition(employeeSalary='3000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee.dat"  INTO TABLE Employee_n0 
partition(employeeSalary='2000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_n0 
partition(employeeSalary='2000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_n0 
partition(employeeSalary='3500.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee.dat"  INTO TABLE Employee_n0 
partition(employeeSalary='3000.0', country='UK');
 
-analyze table Employee partition (employeeSalary,country) compute statistics 
for columns;
+analyze table Employee_n0 partition (employeeSalary,country) compute 
statistics for columns;
 
-describe formatted Employee partition (employeeSalary='3500.0', country='UK') 
employeeName;
-LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee 
partition(employeeSalary='3000.0', country='USA');
-LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee 
partition(employeeSalary='4000.0', country='USA');
+describe formatted Employee_n0 partition (employeeSalary='3500.0', 
country='UK') employeeName;
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_n0 
partition(employeeSalary='3000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_n0 
partition(employeeSalary='4000.0', country='USA');
 
-analyze table Employee partition (employeeSalary) compute statistics for 
columns;
+analyze table Employee_n0 partition (employeeSalary) compute statistics for 
columns;
 
-describe formatted Employee partition (employeeSalary='3000.0', country='USA') 
employeeName;
+describe formatted Employee_n0 partition (employeeSalary='3000.0', 
country='USA') employeeName;
 
 -- add columns
-alter table Employee add columns (c int ,d string);
+alter table Employee_n0 add columns (c int ,d string);
 
-LOAD DATA LOCAL INPATH "../../data/files/employee_part.txt"  INTO TABLE 
Employee partition(employeeSalary='6000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee_part.txt"  INTO TABLE 
Employee_n0 partition(employeeSalary='6000.0', country='UK');
 
-analyze table Employee partition (employeeSalary='6000.0',country='UK') 
compute statistics for columns;
+analyze table Employee_n0 partition (employeeSalary='6000.0',country='UK') 
compute statistics for columns;
 
-describe formatted Employee partition (employeeSalary='6000.0', country='UK') 
employeeName;
-describe formatted Employee partition (employeeSalary='6000.0', country='UK') 
c;
-describe formatted Employee partition (employeeSalary='6000.0', country='UK') 
d;
+describe formatted Employee_n0 partition (employeeSalary='6000.0', 
country='UK') employeeName;
+describe formatted Employee_n0 partition (employeeSalary='6000.0', 
country='UK') c;
+describe formatted Employee_n0 partition (employeeSalary='6000.0', 
country='UK') d;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/combine2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/combine2.q 
b/ql/src/test/queries/clientpositive/combine2.q
index 313d03c..5b19bc0 100644
--- a/ql/src/test/queries/clientpositive/combine2.q
+++ b/ql/src/test/queries/clientpositive/combine2.q
@@ -15,7 +15,7 @@ set hive.merge.smallfiles.avgsize=0;
 
 -- SORT_QUERY_RESULTS
 
-create table combine2(key string) partitioned by (value string);
+create table combine2_n0(key string) partitioned by (value string);
 
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
 -- This test sets mapred.max.split.size=256 and hive.merge.smallfiles.avgsize=0
@@ -25,7 +25,7 @@ create table combine2(key string) partitioned by (value 
string);
 -- significant impact on the results results of this test.
 -- This issue was fixed in MAPREDUCE-2046 which is included in 0.22.
 
-insert overwrite table combine2 partition(value) 
+insert overwrite table combine2_n0 partition(value) 
 select * from (
    select key, value from src where key < 10
    union all 
@@ -33,17 +33,17 @@ select * from (
    union all
    select key, '2010-04-21 09:45:00' value from src where key = 19) s;
 
-show partitions combine2;
+show partitions combine2_n0;
 
 explain
-select key, value from combine2 where value is not null;
+select key, value from combine2_n0 where value is not null;
 
-select key, value from combine2 where value is not null;
+select key, value from combine2_n0 where value is not null;
 
 explain extended
-select count(1) from combine2 where value is not null;
+select count(1) from combine2_n0 where value is not null;
 
-select count(1) from combine2 where value is not null;
+select count(1) from combine2_n0 where value is not null;
 
 explain
 select ds, count(1) from srcpart where ds is not null group by ds;

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/comma_in_column_name.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/comma_in_column_name.q 
b/ql/src/test/queries/clientpositive/comma_in_column_name.q
index cb8823e..be2b252 100644
--- a/ql/src/test/queries/clientpositive/comma_in_column_name.q
+++ b/ql/src/test/queries/clientpositive/comma_in_column_name.q
@@ -1,14 +1,14 @@
-create table test (`x,y` int);
+create table test_n4 (`x,y` int);
 
-insert into test values (1),(2);
+insert into test_n4 values (1),(2);
 
-select `x,y` from test where `x,y` >=2 ;
+select `x,y` from test_n4 where `x,y` >=2 ;
 
-drop table test; 
+drop table test_n4; 
 
-create table test (`x,y` int) stored as orc;
+create table test_n4 (`x,y` int) stored as orc;
 
-insert into test values (1),(2);
+insert into test_n4 values (1),(2);
 
-select `x,y` from test where `x,y` <2 ;
+select `x,y` from test_n4 where `x,y` <2 ;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/constGby.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/constGby.q 
b/ql/src/test/queries/clientpositive/constGby.q
index bde2e79..d27554e 100644
--- a/ql/src/test/queries/clientpositive/constGby.q
+++ b/ql/src/test/queries/clientpositive/constGby.q
@@ -1,21 +1,21 @@
 set hive.mapred.mode=nonstrict;
 
-create table t1 (a int);
-analyze table t1 compute statistics;
-analyze table t1 compute statistics for columns;
+create table t1_n36 (a int);
+analyze table t1_n36 compute statistics;
+analyze table t1_n36 compute statistics for columns;
 
-explain select count(1) from t1 group by 1;
-select count(1) from t1 group by 1;
-select count(1) from t1;
-explain select count(*) from t1;
-select count(*) from t1;
-select count(1) from t1 group by 1=1;
-select count(1), max(a) from t1 group by 1=1;
+explain select count(1) from t1_n36 group by 1;
+select count(1) from t1_n36 group by 1;
+select count(1) from t1_n36;
+explain select count(*) from t1_n36;
+select count(*) from t1_n36;
+select count(1) from t1_n36 group by 1=1;
+select count(1), max(a) from t1_n36 group by 1=1;
 
 set hive.compute.query.using.stats=false;
 
-select count(1) from t1 group by 1;
-select count(1) from t1;
-select count(*) from t1;
-select count(1) from t1 group by 1=1;
-select count(1), max(a) from t1 group by 1=1;
+select count(1) from t1_n36 group by 1;
+select count(1) from t1_n36;
+select count(*) from t1_n36;
+select count(1) from t1_n36 group by 1=1;
+select count(1), max(a) from t1_n36 group by 1=1;

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/constantPropWhen.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/constantPropWhen.q 
b/ql/src/test/queries/clientpositive/constantPropWhen.q
index 03bfd54..1889664 100644
--- a/ql/src/test/queries/clientpositive/constantPropWhen.q
+++ b/ql/src/test/queries/clientpositive/constantPropWhen.q
@@ -1,45 +1,45 @@
 set hive.mapred.mode=nonstrict;
 set hive.optimize.constant.propagation=false;
 
-drop table test_1; 
+drop table test_1_n4; 
 
-create table test_1 (id int, id2 int); 
+create table test_1_n4 (id int, id2 int); 
 
-insert into table test_1 values (123, NULL), (NULL, NULL), (NULL, 123), (123, 
123);
+insert into table test_1_n4 values (123, NULL), (NULL, NULL), (NULL, 123), 
(123, 123);
 
-explain SELECT cast(CASE WHEN id = id2 THEN FALSE ELSE TRUE END AS BOOLEAN) AS 
b FROM test_1; 
+explain SELECT cast(CASE WHEN id = id2 THEN FALSE ELSE TRUE END AS BOOLEAN) AS 
b FROM test_1_n4; 
 
-SELECT cast(CASE WHEN id = id2 THEN FALSE ELSE TRUE END AS BOOLEAN) AS b FROM 
test_1; 
+SELECT cast(CASE WHEN id = id2 THEN FALSE ELSE TRUE END AS BOOLEAN) AS b FROM 
test_1_n4; 
 
-explain SELECT cast(CASE id when id2 THEN FALSE ELSE TRUE END AS BOOLEAN) AS b 
FROM test_1;
+explain SELECT cast(CASE id when id2 THEN FALSE ELSE TRUE END AS BOOLEAN) AS b 
FROM test_1_n4;
 
-SELECT cast(CASE id when id2 THEN FALSE ELSE TRUE END AS BOOLEAN) AS b FROM 
test_1;
+SELECT cast(CASE id when id2 THEN FALSE ELSE TRUE END AS BOOLEAN) AS b FROM 
test_1_n4;
 
-explain SELECT cast(CASE WHEN id = id2 THEN TRUE ELSE FALSE END AS BOOLEAN) AS 
b FROM test_1; 
+explain SELECT cast(CASE WHEN id = id2 THEN TRUE ELSE FALSE END AS BOOLEAN) AS 
b FROM test_1_n4; 
 
-SELECT cast(CASE WHEN id = id2 THEN TRUE ELSE FALSE END AS BOOLEAN) AS b FROM 
test_1; 
+SELECT cast(CASE WHEN id = id2 THEN TRUE ELSE FALSE END AS BOOLEAN) AS b FROM 
test_1_n4; 
 
-explain SELECT cast(CASE id when id2 THEN TRUE ELSE FALSE END AS BOOLEAN) AS b 
FROM test_1;
+explain SELECT cast(CASE id when id2 THEN TRUE ELSE FALSE END AS BOOLEAN) AS b 
FROM test_1_n4;
 
-SELECT cast(CASE id when id2 THEN TRUE ELSE FALSE END AS BOOLEAN) AS b FROM 
test_1;
+SELECT cast(CASE id when id2 THEN TRUE ELSE FALSE END AS BOOLEAN) AS b FROM 
test_1_n4;
 
 
 set hive.cbo.enable=false;
 set hive.optimize.constant.propagation=true;
 
-explain SELECT cast(CASE WHEN id = id2 THEN FALSE ELSE TRUE END AS BOOLEAN) AS 
b FROM test_1; 
+explain SELECT cast(CASE WHEN id = id2 THEN FALSE ELSE TRUE END AS BOOLEAN) AS 
b FROM test_1_n4; 
 
-SELECT cast(CASE WHEN id = id2 THEN FALSE ELSE TRUE END AS BOOLEAN) AS b FROM 
test_1; 
+SELECT cast(CASE WHEN id = id2 THEN FALSE ELSE TRUE END AS BOOLEAN) AS b FROM 
test_1_n4; 
 
-explain SELECT cast(CASE id when id2 THEN FALSE ELSE TRUE END AS BOOLEAN) AS b 
FROM test_1;
+explain SELECT cast(CASE id when id2 THEN FALSE ELSE TRUE END AS BOOLEAN) AS b 
FROM test_1_n4;
 
-SELECT cast(CASE id when id2 THEN FALSE ELSE TRUE END AS BOOLEAN) AS b FROM 
test_1;
+SELECT cast(CASE id when id2 THEN FALSE ELSE TRUE END AS BOOLEAN) AS b FROM 
test_1_n4;
 
-explain SELECT cast(CASE WHEN id = id2 THEN TRUE ELSE FALSE END AS BOOLEAN) AS 
b FROM test_1; 
+explain SELECT cast(CASE WHEN id = id2 THEN TRUE ELSE FALSE END AS BOOLEAN) AS 
b FROM test_1_n4; 
 
-SELECT cast(CASE WHEN id = id2 THEN TRUE ELSE FALSE END AS BOOLEAN) AS b FROM 
test_1; 
+SELECT cast(CASE WHEN id = id2 THEN TRUE ELSE FALSE END AS BOOLEAN) AS b FROM 
test_1_n4; 
 
-explain SELECT cast(CASE id when id2 THEN TRUE ELSE FALSE END AS BOOLEAN) AS b 
FROM test_1;
+explain SELECT cast(CASE id when id2 THEN TRUE ELSE FALSE END AS BOOLEAN) AS b 
FROM test_1_n4;
 
-SELECT cast(CASE id when id2 THEN TRUE ELSE FALSE END AS BOOLEAN) AS b FROM 
test_1;
+SELECT cast(CASE id when id2 THEN TRUE ELSE FALSE END AS BOOLEAN) AS b FROM 
test_1_n4;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/constant_prop_1.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/constant_prop_1.q 
b/ql/src/test/queries/clientpositive/constant_prop_1.q
index 3d3016f..683b730 100644
--- a/ql/src/test/queries/clientpositive/constant_prop_1.q
+++ b/ql/src/test/queries/clientpositive/constant_prop_1.q
@@ -36,7 +36,7 @@ select a.key, b.value from src a join src b where a.key = 
'238' and b.value = '2
 explain
 select a.key, b.value from src a join src b on a.key=b.key where b.value = 
'234';
 
-create table t (
+create table t_n26 (
 a int,
 b int,
 c int,
@@ -47,7 +47,7 @@ e int
 explain 
 select a2 as a3 from
 (select a1 as a2, c1 as c2 from
-(select a as a1, b as b1, c as c1 from t where a=1 and b=2 and c=3)sub1)sub2; 
+(select a as a1, b as b1, c as c1 from t_n26 where a=1 and b=2 and 
c=3)sub1)sub2; 
 
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/constantfolding.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/constantfolding.q 
b/ql/src/test/queries/clientpositive/constantfolding.q
index 210d35e..d5e4917 100644
--- a/ql/src/test/queries/clientpositive/constantfolding.q
+++ b/ql/src/test/queries/clientpositive/constantfolding.q
@@ -11,23 +11,23 @@ union all
 select * from (select 'k4' as key, ' ' as value from src limit 2)c;
   
 
-drop table if exists union_all_bug_test_1;
-drop table if exists union_all_bug_test_2;
-create table if not exists union_all_bug_test_1
+drop table if exists union_all_bug_test_1_n0;
+drop table if exists union_all_bug_test_2_n0;
+create table if not exists union_all_bug_test_1_n0
 (
 f1 int,
 f2 int
 );
 
-create table if not exists union_all_bug_test_2
+create table if not exists union_all_bug_test_2_n0
 (
 f1 int
 );
 
-insert into table union_all_bug_test_1 values (1,1);
-insert into table union_all_bug_test_2 values (1);
-insert into table union_all_bug_test_1 values (0,0);
-insert into table union_all_bug_test_2 values (0);
+insert into table union_all_bug_test_1_n0 values (1,1);
+insert into table union_all_bug_test_2_n0 values (1);
+insert into table union_all_bug_test_1_n0 values (0,0);
+insert into table union_all_bug_test_2_n0 values (0);
 
 
 
@@ -37,14 +37,14 @@ FROM (
 SELECT
 f1
 , if('helloworld' like '%hello%' ,f1,f2) as filter
-FROM union_all_bug_test_1
+FROM union_all_bug_test_1_n0
 
 union all
 
 select
 f1
 , 0 as filter
-from union_all_bug_test_2
+from union_all_bug_test_2_n0
 ) A
 WHERE (filter = 1 and f1 = 1);
 
@@ -65,9 +65,9 @@ explain select cast("1970-12-31 15:59:58.174" as TIMESTAMP) 
from src;
 -- numRows: 2 rawDataSize: 112
 explain select cast("1970-12-31 15:59:58.174" as DATE) from src;
 
-CREATE TABLE dest1(c1 STRING) STORED AS TEXTFILE;
+CREATE TABLE dest1_n127(c1 STRING) STORED AS TEXTFILE;
 
-FROM src INSERT OVERWRITE TABLE dest1 SELECT '  abc  ' WHERE src.key = 86;
+FROM src INSERT OVERWRITE TABLE dest1_n127 SELECT '  abc  ' WHERE src.key = 86;
 
 EXPLAIN
 SELECT ROUND(LN(3.0),12), LN(0.0), LN(-1), ROUND(LOG(3.0),12), LOG(0.0),
@@ -77,7 +77,7 @@ SELECT ROUND(LN(3.0),12), LN(0.0), LN(-1), 
ROUND(LOG(3.0),12), LOG(0.0),
        POW(2,3), POWER(2,3), POWER(2,-3), POWER(0.5, -3), POWER(4, 0.5),
        POWER(-1, 0.5), POWER(-1, 2), POWER(CAST (1 AS DECIMAL), CAST (0 AS 
INT)),
        POWER(CAST (2 AS DECIMAL), CAST (3 AS INT)), 
-       POW(CAST (2 AS DECIMAL), CAST(3 AS INT)) FROM dest1;
+       POW(CAST (2 AS DECIMAL), CAST(3 AS INT)) FROM dest1_n127;
 
 SELECT ROUND(LN(3.0),12), LN(0.0), LN(-1), ROUND(LOG(3.0),12), LOG(0.0),
        LOG(-1), ROUND(LOG2(3.0),12), LOG2(0.0), LOG2(-1),
@@ -86,4 +86,4 @@ SELECT ROUND(LN(3.0),12), LN(0.0), LN(-1), 
ROUND(LOG(3.0),12), LOG(0.0),
        POW(2,3), POWER(2,3), POWER(2,-3), POWER(0.5, -3), POWER(4, 0.5),
        POWER(-1, 0.5), POWER(-1, 2), POWER(CAST (1 AS DECIMAL), CAST (0 AS 
INT)),
        POWER(CAST (2 AS DECIMAL), CAST (3 AS INT)), 
-       POW(CAST (2 AS DECIMAL), CAST(3 AS INT)) FROM dest1;
+       POW(CAST (2 AS DECIMAL), CAST(3 AS INT)) FROM dest1_n127;

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/constprog_dp.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/constprog_dp.q 
b/ql/src/test/queries/clientpositive/constprog_dp.q
index d7fcb5e..1c36df3 100644
--- a/ql/src/test/queries/clientpositive/constprog_dp.q
+++ b/ql/src/test/queries/clientpositive/constprog_dp.q
@@ -2,11 +2,11 @@
 set hive.optimize.constant.propagation=true;
 set hive.exec.dynamic.partition.mode=nonstrict;
 
-create table dest(key string, value string) partitioned by (ds string);
+create table dest_n1(key string, value string) partitioned by (ds string);
 
 EXPLAIN
 from srcpart
-insert overwrite table dest partition (ds) select key, value, ds where 
ds='2008-04-08';
+insert overwrite table dest_n1 partition (ds) select key, value, ds where 
ds='2008-04-08';
 
 from srcpart
-insert overwrite table dest partition (ds) select key, value, ds where 
ds='2008-04-08';
+insert overwrite table dest_n1 partition (ds) select key, value, ds where 
ds='2008-04-08';

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/constprog_semijoin.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/constprog_semijoin.q 
b/ql/src/test/queries/clientpositive/constprog_semijoin.q
index a5546ec..eb92713 100644
--- a/ql/src/test/queries/clientpositive/constprog_semijoin.q
+++ b/ql/src/test/queries/clientpositive/constprog_semijoin.q
@@ -3,35 +3,35 @@ set hive.explain.user=true;
 
 -- SORT_QUERY_RESULTS
 
-create table table1 (id int, val string, val1 string, dimid int);
-insert into table1 (id, val, val1, dimid) values (1, 't1val01', 'val101', 
100), (2, 't1val02', 'val102', 200), (3, 't1val03', 'val103', 103), (3, 
't1val01', 'val104', 100), (2, 't1val05', 'val105', 200), (3, 't1val01', 
'val106', 103), (1, 't1val07', 'val107', 200), (2, 't1val01', 'val108', 200), 
(3, 't1val09', 'val109', 103), (4,'t1val01', 'val110', 200);
+create table table1_n10 (id int, val string, val1 string, dimid int);
+insert into table1_n10 (id, val, val1, dimid) values (1, 't1val01', 'val101', 
100), (2, 't1val02', 'val102', 200), (3, 't1val03', 'val103', 103), (3, 
't1val01', 'val104', 100), (2, 't1val05', 'val105', 200), (3, 't1val01', 
'val106', 103), (1, 't1val07', 'val107', 200), (2, 't1val01', 'val108', 200), 
(3, 't1val09', 'val109', 103), (4,'t1val01', 'val110', 200);
 
-create table table2 (id int, val2 string);
-insert into table2 (id, val2) values (1, 't2val201'), (2, 't2val202'), (3, 
't2val203');
+create table table2_n6 (id int, val2 string);
+insert into table2_n6 (id, val2) values (1, 't2val201'), (2, 't2val202'), (3, 
't2val203');
 
-create table table3 (id int);
-insert into table3 (id) values (100), (100), (101), (102), (103);
+create table table3_n0 (id int);
+insert into table3_n0 (id) values (100), (100), (101), (102), (103);
 
-explain select table1.id, table1.val, table1.val1 from table1 left semi join 
table3 on table1.dimid = table3.id where table1.val = 't1val01';
-select table1.id, table1.val, table1.val1 from table1 left semi join table3 on 
table1.dimid = table3.id where table1.val = 't1val01';
+explain select table1_n10.id, table1_n10.val, table1_n10.val1 from table1_n10 
left semi join table3_n0 on table1_n10.dimid = table3_n0.id where 
table1_n10.val = 't1val01';
+select table1_n10.id, table1_n10.val, table1_n10.val1 from table1_n10 left 
semi join table3_n0 on table1_n10.dimid = table3_n0.id where table1_n10.val = 
't1val01';
 
-explain select table1.id, table1.val, table2.val2 from table1 inner join 
table2 on table1.val = 't1val01' and table1.id = table2.id left semi join 
table3 on table1.dimid = table3.id;
-select table1.id, table1.val, table2.val2 from table1 inner join table2 on 
table1.val = 't1val01' and table1.id = table2.id left semi join table3 on 
table1.dimid = table3.id;
+explain select table1_n10.id, table1_n10.val, table2_n6.val2 from table1_n10 
inner join table2_n6 on table1_n10.val = 't1val01' and table1_n10.id = 
table2_n6.id left semi join table3_n0 on table1_n10.dimid = table3_n0.id;
+select table1_n10.id, table1_n10.val, table2_n6.val2 from table1_n10 inner 
join table2_n6 on table1_n10.val = 't1val01' and table1_n10.id = table2_n6.id 
left semi join table3_n0 on table1_n10.dimid = table3_n0.id;
 
-explain select table1.id, table1.val, table2.val2 from table1 left semi join 
table3 on table1.dimid = table3.id inner join table2 on table1.val = 't1val01' 
and table1.id = table2.id;
-select table1.id, table1.val, table2.val2 from table1 left semi join table3 on 
table1.dimid = table3.id inner join table2 on table1.val = 't1val01' and 
table1.id = table2.id;
+explain select table1_n10.id, table1_n10.val, table2_n6.val2 from table1_n10 
left semi join table3_n0 on table1_n10.dimid = table3_n0.id inner join 
table2_n6 on table1_n10.val = 't1val01' and table1_n10.id = table2_n6.id;
+select table1_n10.id, table1_n10.val, table2_n6.val2 from table1_n10 left semi 
join table3_n0 on table1_n10.dimid = table3_n0.id inner join table2_n6 on 
table1_n10.val = 't1val01' and table1_n10.id = table2_n6.id;
 
-explain select table1.id, table1.val, table1.val1 from table1 left semi join 
table3 on table1.dimid = table3.id and table3.id = 100 where table1.dimid <> 
100;
-select table1.id, table1.val, table1.val1 from table1 left semi join table3 on 
table1.dimid = table3.id and table3.id = 100 where table1.dimid <> 100;
+explain select table1_n10.id, table1_n10.val, table1_n10.val1 from table1_n10 
left semi join table3_n0 on table1_n10.dimid = table3_n0.id and table3_n0.id = 
100 where table1_n10.dimid <> 100;
+select table1_n10.id, table1_n10.val, table1_n10.val1 from table1_n10 left 
semi join table3_n0 on table1_n10.dimid = table3_n0.id and table3_n0.id = 100 
where table1_n10.dimid <> 100;
 
-explain select table1.id, table1.val, table1.val1 from table1 left semi join 
table3 on table1.dimid = table3.id and table3.id = 100 where table1.dimid  IN 
(100,200);
-select table1.id, table1.val, table1.val1 from table1 left semi join table3 on 
table1.dimid = table3.id and table3.id = 100 where table1.dimid  IN (100,200);
+explain select table1_n10.id, table1_n10.val, table1_n10.val1 from table1_n10 
left semi join table3_n0 on table1_n10.dimid = table3_n0.id and table3_n0.id = 
100 where table1_n10.dimid  IN (100,200);
+select table1_n10.id, table1_n10.val, table1_n10.val1 from table1_n10 left 
semi join table3_n0 on table1_n10.dimid = table3_n0.id and table3_n0.id = 100 
where table1_n10.dimid  IN (100,200);
 
-explain select table1.id, table1.val, table1.val1 from table1 left semi join 
table3 on table1.dimid = table3.id and table3.id = 100 where table1.dimid  = 
200;
-select table1.id, table1.val, table1.val1 from table1 left semi join table3 on 
table1.dimid = table3.id and table3.id = 100 where table1.dimid  = 200;
+explain select table1_n10.id, table1_n10.val, table1_n10.val1 from table1_n10 
left semi join table3_n0 on table1_n10.dimid = table3_n0.id and table3_n0.id = 
100 where table1_n10.dimid  = 200;
+select table1_n10.id, table1_n10.val, table1_n10.val1 from table1_n10 left 
semi join table3_n0 on table1_n10.dimid = table3_n0.id and table3_n0.id = 100 
where table1_n10.dimid  = 200;
 
-explain select table1.id, table1.val, table1.val1 from table1 left semi join 
table3 on table1.dimid = table3.id and table3.id = 100 where table1.dimid  = 
100;
-select table1.id, table1.val, table1.val1 from table1 left semi join table3 on 
table1.dimid = table3.id and table3.id = 100 where table1.dimid  = 100;
+explain select table1_n10.id, table1_n10.val, table1_n10.val1 from table1_n10 
left semi join table3_n0 on table1_n10.dimid = table3_n0.id and table3_n0.id = 
100 where table1_n10.dimid  = 100;
+select table1_n10.id, table1_n10.val, table1_n10.val1 from table1_n10 left 
semi join table3_n0 on table1_n10.dimid = table3_n0.id and table3_n0.id = 100 
where table1_n10.dimid  = 100;
 
-explain select table1.id, table1.val, table1.val1 from table1 left semi join 
table3 on table1.dimid = table3.id and table3.id = 100;
-select table1.id, table1.val, table1.val1 from table1 left semi join table3 on 
table1.dimid = table3.id and table3.id = 100;
+explain select table1_n10.id, table1_n10.val, table1_n10.val1 from table1_n10 
left semi join table3_n0 on table1_n10.dimid = table3_n0.id and table3_n0.id = 
100;
+select table1_n10.id, table1_n10.val, table1_n10.val1 from table1_n10 left 
semi join table3_n0 on table1_n10.dimid = table3_n0.id and table3_n0.id = 100;

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/constprog_type.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/constprog_type.q 
b/ql/src/test/queries/clientpositive/constprog_type.q
index 05e091a..c1a79a9 100644
--- a/ql/src/test/queries/clientpositive/constprog_type.q
+++ b/ql/src/test/queries/clientpositive/constprog_type.q
@@ -1,18 +1,18 @@
 --! qt:dataset:src
 set hive.optimize.constant.propagation=true;
 
-CREATE TABLE dest1(d date, t timestamp);
+CREATE TABLE dest1_n26(d date, t timestamp);
 
 EXPLAIN
-INSERT OVERWRITE TABLE dest1
+INSERT OVERWRITE TABLE dest1_n26
 SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as 
timestamp)
        FROM src tablesample (1 rows);
 
-INSERT OVERWRITE TABLE dest1
+INSERT OVERWRITE TABLE dest1_n26
 SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as 
timestamp)
        FROM src tablesample (1 rows);
 
-SELECT * FROM dest1;
+SELECT * FROM dest1_n26;
 
 SELECT key, value FROM src WHERE key = cast(86 as double);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/correlated_join_keys.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/correlated_join_keys.q 
b/ql/src/test/queries/clientpositive/correlated_join_keys.q
index 4c801de..48ff7ed 100644
--- a/ql/src/test/queries/clientpositive/correlated_join_keys.q
+++ b/ql/src/test/queries/clientpositive/correlated_join_keys.q
@@ -1,7 +1,7 @@
 
-drop table customer_address;
+drop table customer_address_n0;
 
-create table customer_address
+create table customer_address_n0
 (
     ca_address_sk             int,
     ca_address_id             string,
@@ -19,16 +19,16 @@ create table customer_address
 )
 row format delimited fields terminated by '|';
 
-load data local inpath '../../data/files/customer_address.txt' overwrite into 
table customer_address;
-analyze table customer_address compute statistics;
-analyze table customer_address compute statistics for columns ca_state, ca_zip;
+load data local inpath '../../data/files/customer_address.txt' overwrite into 
table customer_address_n0;
+analyze table customer_address_n0 compute statistics;
+analyze table customer_address_n0 compute statistics for columns ca_state, 
ca_zip;
 
 set hive.stats.fetch.column.stats=true;
 
 set hive.stats.correlated.multi.key.joins=false;
-explain select count(*) from customer_address a join customer_address b on 
(a.ca_zip = b.ca_zip and a.ca_state = b.ca_state);
+explain select count(*) from customer_address_n0 a join customer_address_n0 b 
on (a.ca_zip = b.ca_zip and a.ca_state = b.ca_state);
 
 set hive.stats.correlated.multi.key.joins=true;
-explain select count(*) from customer_address a join customer_address b on 
(a.ca_zip = b.ca_zip and a.ca_state = b.ca_state);
+explain select count(*) from customer_address_n0 a join customer_address_n0 b 
on (a.ca_zip = b.ca_zip and a.ca_state = b.ca_state);
 
-drop table customer_address;
+drop table customer_address_n0;

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/correlationoptimizer11.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/correlationoptimizer11.q 
b/ql/src/test/queries/clientpositive/correlationoptimizer11.q
index 8c99141..9f6997e 100644
--- a/ql/src/test/queries/clientpositive/correlationoptimizer11.q
+++ b/ql/src/test/queries/clientpositive/correlationoptimizer11.q
@@ -5,26 +5,26 @@ set hive.auto.convert.join=false;
 -- Tests in this file are used to make sure Correlation Optimizer
 -- can correctly handle tables with partitions
 
-CREATE TABLE part_table(key string, value string) PARTITIONED BY (partitionId 
int);
-INSERT OVERWRITE TABLE part_table PARTITION (partitionId=1)
+CREATE TABLE part_table_n1(key string, value string) PARTITIONED BY 
(partitionId int);
+INSERT OVERWRITE TABLE part_table_n1 PARTITION (partitionId=1)
   SELECT key, value FROM src ORDER BY key, value LIMIT 100;
-INSERT OVERWRITE TABLE part_table PARTITION (partitionId=2)
+INSERT OVERWRITE TABLE part_table_n1 PARTITION (partitionId=2)
   SELECT key, value FROM src1 ORDER BY key, value;
 
 set hive.optimize.correlation=false;
--- In this case, we should not do shared scan on part_table
+-- In this case, we should not do shared scan on part_table_n1
 -- because left and right tables of JOIN use different partitions
--- of part_table. With Correlation Optimizer we will generate
+-- of part_table_n1. With Correlation Optimizer we will generate
 -- 1 MR job.
 EXPLAIN
 SELECT x.key AS key, count(1) AS cnt
-FROM part_table x JOIN part_table y ON (x.key = y.key)
+FROM part_table_n1 x JOIN part_table_n1 y ON (x.key = y.key)
 WHERE x.partitionId = 1 AND
       y.partitionId = 2
 GROUP BY x.key;
 
 SELECT x.key AS key, count(1) AS cnt
-FROM part_table x JOIN part_table y ON (x.key = y.key)
+FROM part_table_n1 x JOIN part_table_n1 y ON (x.key = y.key)
 WHERE x.partitionId = 1 AND
       y.partitionId = 2
 GROUP BY x.key;
@@ -32,31 +32,31 @@ GROUP BY x.key;
 set hive.optimize.correlation=true;
 EXPLAIN
 SELECT x.key AS key, count(1) AS cnt
-FROM part_table x JOIN part_table y ON (x.key = y.key)
+FROM part_table_n1 x JOIN part_table_n1 y ON (x.key = y.key)
 WHERE x.partitionId = 1 AND
       y.partitionId = 2
 GROUP BY x.key;
 
 SELECT x.key AS key, count(1) AS cnt
-FROM part_table x JOIN part_table y ON (x.key = y.key)
+FROM part_table_n1 x JOIN part_table_n1 y ON (x.key = y.key)
 WHERE x.partitionId = 1 AND
       y.partitionId = 2
 GROUP BY x.key;
 
 set hive.optimize.correlation=false;
--- In this case, we should do shared scan on part_table
+-- In this case, we should do shared scan on part_table_n1
 -- because left and right tables of JOIN use the same partition
--- of part_table. With Correlation Optimizer we will generate
+-- of part_table_n1. With Correlation Optimizer we will generate
 -- 1 MR job.
 EXPLAIN
 SELECT x.key AS key, count(1) AS cnt
-FROM part_table x JOIN part_table y ON (x.key = y.key)
+FROM part_table_n1 x JOIN part_table_n1 y ON (x.key = y.key)
 WHERE x.partitionId = 2 AND
       y.partitionId = 2
 GROUP BY x.key;
 
 SELECT x.key AS key, count(1) AS cnt
-FROM part_table x JOIN part_table y ON (x.key = y.key)
+FROM part_table_n1 x JOIN part_table_n1 y ON (x.key = y.key)
 WHERE x.partitionId = 2 AND
       y.partitionId = 2
 GROUP BY x.key;
@@ -64,13 +64,13 @@ GROUP BY x.key;
 set hive.optimize.correlation=true;
 EXPLAIN
 SELECT x.key AS key, count(1) AS cnt
-FROM part_table x JOIN part_table y ON (x.key = y.key)
+FROM part_table_n1 x JOIN part_table_n1 y ON (x.key = y.key)
 WHERE x.partitionId = 2 AND
       y.partitionId = 2
 GROUP BY x.key;
 
 SELECT x.key AS key, count(1) AS cnt
-FROM part_table x JOIN part_table y ON (x.key = y.key)
+FROM part_table_n1 x JOIN part_table_n1 y ON (x.key = y.key)
 WHERE x.partitionId = 2 AND
       y.partitionId = 2
 GROUP BY x.key;

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/correlationoptimizer4.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/correlationoptimizer4.q 
b/ql/src/test/queries/clientpositive/correlationoptimizer4.q
index 02edeff..c34ff23 100644
--- a/ql/src/test/queries/clientpositive/correlationoptimizer4.q
+++ b/ql/src/test/queries/clientpositive/correlationoptimizer4.q
@@ -1,10 +1,10 @@
 set hive.mapred.mode=nonstrict;
-CREATE TABLE T1(key INT, val STRING);
-LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
-CREATE TABLE T2(key INT, val STRING);
-LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2;
-CREATE TABLE T3(key INT, val STRING);
-LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T3;
+CREATE TABLE T1_n146(key INT, val STRING);
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n146;
+CREATE TABLE T2_n86(key INT, val STRING);
+LOAD DATA LOCAL INPATH '../../data/files/T2.txt' INTO TABLE T2_n86;
+CREATE TABLE T3_n34(key INT, val STRING);
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T3_n34;
 
 set hive.auto.convert.join=false;
 set hive.optimize.correlation=false;
@@ -15,24 +15,24 @@ set hive.optimize.correlation=false;
 EXPLAIN
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT y.key AS key, count(1) AS cnt
-      FROM T2 x JOIN T1 y ON (x.key = y.key) JOIN T3 z ON (y.key = z.key)
+      FROM T2_n86 x JOIN T1_n146 y ON (x.key = y.key) JOIN T3_n34 z ON (y.key 
= z.key)
       GROUP BY y.key) tmp;
 
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT y.key AS key, count(1) AS cnt
-      FROM T2 x JOIN T1 y ON (x.key = y.key) JOIN T3 z ON (y.key = z.key)
+      FROM T2_n86 x JOIN T1_n146 y ON (x.key = y.key) JOIN T3_n34 z ON (y.key 
= z.key)
       GROUP BY y.key) tmp;
 
 set hive.optimize.correlation=true;
 EXPLAIN
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT y.key AS key, count(1) AS cnt
-      FROM T2 x JOIN T1 y ON (x.key = y.key) JOIN T3 z ON (y.key = z.key)
+      FROM T2_n86 x JOIN T1_n146 y ON (x.key = y.key) JOIN T3_n34 z ON (y.key 
= z.key)
       GROUP BY y.key) tmp;
 
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT y.key AS key, count(1) AS cnt
-      FROM T2 x JOIN T1 y ON (x.key = y.key) JOIN T3 z ON (y.key = z.key)
+      FROM T2_n86 x JOIN T1_n146 y ON (x.key = y.key) JOIN T3_n34 z ON (y.key 
= z.key)
       GROUP BY y.key) tmp;
 
 set hive.optimize.correlation=true;
@@ -41,12 +41,12 @@ set hive.auto.convert.join=true;
 EXPLAIN
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT y.key AS key, count(1) AS cnt
-      FROM T2 x JOIN T1 y ON (x.key = y.key) JOIN T3 z ON (y.key = z.key)
+      FROM T2_n86 x JOIN T1_n146 y ON (x.key = y.key) JOIN T3_n34 z ON (y.key 
= z.key)
       GROUP BY y.key) tmp;
 
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT y.key AS key, count(1) AS cnt
-      FROM T2 x JOIN T1 y ON (x.key = y.key) JOIN T3 z ON (y.key = z.key)
+      FROM T2_n86 x JOIN T1_n146 y ON (x.key = y.key) JOIN T3_n34 z ON (y.key 
= z.key)
       GROUP BY y.key) tmp;
 
 set hive.auto.convert.join=false;
@@ -56,24 +56,24 @@ set hive.optimize.correlation=false;
 EXPLAIN
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT x.key AS key, count(1) AS cnt
-      FROM T2 x LEFT OUTER JOIN T1 y ON (x.key = y.key) LEFT OUTER JOIN T3 z 
ON (y.key = z.key)
+      FROM T2_n86 x LEFT OUTER JOIN T1_n146 y ON (x.key = y.key) LEFT OUTER 
JOIN T3_n34 z ON (y.key = z.key)
       GROUP BY x.key) tmp;
 
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT x.key AS key, count(1) AS cnt
-      FROM T2 x LEFT OUTER JOIN T1 y ON (x.key = y.key) LEFT OUTER JOIN T3 z 
ON (y.key = z.key)
+      FROM T2_n86 x LEFT OUTER JOIN T1_n146 y ON (x.key = y.key) LEFT OUTER 
JOIN T3_n34 z ON (y.key = z.key)
       GROUP BY x.key) tmp;
 
 set hive.optimize.correlation=true;
 EXPLAIN
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT x.key AS key, count(1) AS cnt
-      FROM T2 x LEFT OUTER JOIN T1 y ON (x.key = y.key) LEFT OUTER JOIN T3 z 
ON (y.key = z.key)
+      FROM T2_n86 x LEFT OUTER JOIN T1_n146 y ON (x.key = y.key) LEFT OUTER 
JOIN T3_n34 z ON (y.key = z.key)
       GROUP BY x.key) tmp;
 
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT x.key AS key, count(1) AS cnt
-      FROM T2 x LEFT OUTER JOIN T1 y ON (x.key = y.key) LEFT OUTER JOIN T3 z 
ON (y.key = z.key)
+      FROM T2_n86 x LEFT OUTER JOIN T1_n146 y ON (x.key = y.key) LEFT OUTER 
JOIN T3_n34 z ON (y.key = z.key)
       GROUP BY x.key) tmp;
 
 set hive.optimize.correlation=true;
@@ -83,12 +83,12 @@ set hive.optimize.correlation=true;
 EXPLAIN
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT y.key AS key, count(1) AS cnt
-      FROM T2 x LEFT OUTER JOIN T1 y ON (x.key = y.key) LEFT OUTER JOIN T3 z 
ON (y.key = z.key)
+      FROM T2_n86 x LEFT OUTER JOIN T1_n146 y ON (x.key = y.key) LEFT OUTER 
JOIN T3_n34 z ON (y.key = z.key)
       GROUP BY y.key) tmp;
 
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT y.key AS key, count(1) AS cnt
-      FROM T2 x LEFT OUTER JOIN T1 y ON (x.key = y.key) LEFT OUTER JOIN T3 z 
ON (y.key = z.key)
+      FROM T2_n86 x LEFT OUTER JOIN T1_n146 y ON (x.key = y.key) LEFT OUTER 
JOIN T3_n34 z ON (y.key = z.key)
       GROUP BY y.key) tmp;
 
 set hive.optimize.correlation=false;
@@ -97,24 +97,24 @@ set hive.optimize.correlation=false;
 EXPLAIN
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT z.key AS key, count(1) AS cnt
-      FROM T2 x RIGHT OUTER JOIN T1 y ON (x.key = y.key) RIGHT OUTER JOIN T3 z 
ON (y.key = z.key)
+      FROM T2_n86 x RIGHT OUTER JOIN T1_n146 y ON (x.key = y.key) RIGHT OUTER 
JOIN T3_n34 z ON (y.key = z.key)
       GROUP BY z.key) tmp;
 
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT z.key AS key, count(1) AS cnt
-      FROM T2 x RIGHT OUTER JOIN T1 y ON (x.key = y.key) RIGHT OUTER JOIN T3 z 
ON (y.key = z.key)
+      FROM T2_n86 x RIGHT OUTER JOIN T1_n146 y ON (x.key = y.key) RIGHT OUTER 
JOIN T3_n34 z ON (y.key = z.key)
       GROUP BY z.key) tmp;
 
 set hive.optimize.correlation=true;
 EXPLAIN
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT z.key AS key, count(1) AS cnt
-      FROM T2 x RIGHT OUTER JOIN T1 y ON (x.key = y.key) RIGHT OUTER JOIN T3 z 
ON (y.key = z.key)
+      FROM T2_n86 x RIGHT OUTER JOIN T1_n146 y ON (x.key = y.key) RIGHT OUTER 
JOIN T3_n34 z ON (y.key = z.key)
       GROUP BY z.key) tmp;
 
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT z.key AS key, count(1) AS cnt
-      FROM T2 x RIGHT OUTER JOIN T1 y ON (x.key = y.key) RIGHT OUTER JOIN T3 z 
ON (y.key = z.key)
+      FROM T2_n86 x RIGHT OUTER JOIN T1_n146 y ON (x.key = y.key) RIGHT OUTER 
JOIN T3_n34 z ON (y.key = z.key)
       GROUP BY z.key) tmp;
 
 set hive.optimize.correlation=true;
@@ -124,12 +124,12 @@ set hive.optimize.correlation=true;
 EXPLAIN
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT y.key AS key, count(1) AS cnt
-      FROM T2 x RIGHT OUTER JOIN T1 y ON (x.key = y.key) RIGHT OUTER JOIN T3 z 
ON (y.key = z.key)
+      FROM T2_n86 x RIGHT OUTER JOIN T1_n146 y ON (x.key = y.key) RIGHT OUTER 
JOIN T3_n34 z ON (y.key = z.key)
       GROUP BY y.key) tmp;
 
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT y.key AS key, count(1) AS cnt
-      FROM T2 x RIGHT OUTER JOIN T1 y ON (x.key = y.key) RIGHT OUTER JOIN T3 z 
ON (y.key = z.key)
+      FROM T2_n86 x RIGHT OUTER JOIN T1_n146 y ON (x.key = y.key) RIGHT OUTER 
JOIN T3_n34 z ON (y.key = z.key)
       GROUP BY y.key) tmp;
 
 set hive.optimize.correlation=false;
@@ -138,22 +138,22 @@ set hive.optimize.correlation=false;
 EXPLAIN
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT y.key AS key, count(1) AS cnt
-      FROM T2 x FULL OUTER JOIN T1 y ON (x.key = y.key) FULL OUTER JOIN T3 z 
ON (y.key = z.key)
+      FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER 
JOIN T3_n34 z ON (y.key = z.key)
       GROUP BY y.key) tmp;
 
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT y.key AS key, count(1) AS cnt
-      FROM T2 x FULL OUTER JOIN T1 y ON (x.key = y.key) FULL OUTER JOIN T3 z 
ON (y.key = z.key)
+      FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER 
JOIN T3_n34 z ON (y.key = z.key)
       GROUP BY y.key) tmp;
 
 set hive.optimize.correlation=true;
 EXPLAIN
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT y.key AS key, count(1) AS cnt
-      FROM T2 x FULL OUTER JOIN T1 y ON (x.key = y.key) FULL OUTER JOIN T3 z 
ON (y.key = z.key)
+      FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER 
JOIN T3_n34 z ON (y.key = z.key)
       GROUP BY y.key) tmp;
 
 SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt))
 FROM (SELECT y.key AS key, count(1) AS cnt
-      FROM T2 x FULL OUTER JOIN T1 y ON (x.key = y.key) FULL OUTER JOIN T3 z 
ON (y.key = z.key)
+      FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER 
JOIN T3_n34 z ON (y.key = z.key)
       GROUP BY y.key) tmp;

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/correlationoptimizer5.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/correlationoptimizer5.q 
b/ql/src/test/queries/clientpositive/correlationoptimizer5.q
index 002fb12..d75a48f 100644
--- a/ql/src/test/queries/clientpositive/correlationoptimizer5.q
+++ b/ql/src/test/queries/clientpositive/correlationoptimizer5.q
@@ -1,14 +1,14 @@
 set hive.stats.column.autogather=false;
 -- Currently, a query with multiple FileSinkOperators are not supported.
 set hive.mapred.mode=nonstrict;
-CREATE TABLE T1(key INT, val STRING);
-LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1;
-CREATE TABLE T2(key INT, val STRING);
-LOAD DATA LOCAL INPATH '../../data/files/kv2.txt' INTO TABLE T2;
-CREATE TABLE T3(key INT, val STRING);
-LOAD DATA LOCAL INPATH '../../data/files/kv3.txt' INTO TABLE T3;
-CREATE TABLE T4(key INT, val STRING);
-LOAD DATA LOCAL INPATH '../../data/files/kv5.txt' INTO TABLE T4;
+CREATE TABLE T1_n19(key INT, val STRING);
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1_n19;
+CREATE TABLE T2_n11(key INT, val STRING);
+LOAD DATA LOCAL INPATH '../../data/files/kv2.txt' INTO TABLE T2_n11;
+CREATE TABLE T3_n5(key INT, val STRING);
+LOAD DATA LOCAL INPATH '../../data/files/kv3.txt' INTO TABLE T3_n5;
+CREATE TABLE T4_n1(key INT, val STRING);
+LOAD DATA LOCAL INPATH '../../data/files/kv5.txt' INTO TABLE T4_n1;
 
 CREATE TABLE dest_co1(key INT, val STRING);
 CREATE TABLE dest_co2(key INT, val STRING);
@@ -22,17 +22,17 @@ EXPLAIN
 INSERT OVERWRITE TABLE dest_co1
 SELECT b.key, d.val
 FROM
-(SELECT x.key, x.val FROM T1 x JOIN T2 y ON (x.key = y.key)) b
+(SELECT x.key, x.val FROM T1_n19 x JOIN T2_n11 y ON (x.key = y.key)) b
 JOIN
-(SELECT m.key, n.val FROM T3 m JOIN T4 n ON (m.key = n.key)) d
+(SELECT m.key, n.val FROM T3_n5 m JOIN T4_n1 n ON (m.key = n.key)) d
 ON b.key = d.key;
 
 INSERT OVERWRITE TABLE dest_co1
 SELECT b.key, d.val
 FROM
-(SELECT x.key, x.val FROM T1 x JOIN T2 y ON (x.key = y.key)) b
+(SELECT x.key, x.val FROM T1_n19 x JOIN T2_n11 y ON (x.key = y.key)) b
 JOIN
-(SELECT m.key, n.val FROM T3 m JOIN T4 n ON (m.key = n.key)) d
+(SELECT m.key, n.val FROM T3_n5 m JOIN T4_n1 n ON (m.key = n.key)) d
 ON b.key = d.key;
 
 set hive.optimize.correlation=true;
@@ -40,17 +40,17 @@ EXPLAIN
 INSERT OVERWRITE TABLE dest_co2
 SELECT b.key, d.val
 FROM
-(SELECT x.key, x.val FROM T1 x JOIN T2 y ON (x.key = y.key)) b
+(SELECT x.key, x.val FROM T1_n19 x JOIN T2_n11 y ON (x.key = y.key)) b
 JOIN
-(SELECT m.key, n.val FROM T3 m JOIN T4 n ON (m.key = n.key)) d
+(SELECT m.key, n.val FROM T3_n5 m JOIN T4_n1 n ON (m.key = n.key)) d
 ON b.key = d.key;
 
 INSERT OVERWRITE TABLE dest_co2
 SELECT b.key, d.val
 FROM
-(SELECT x.key, x.val FROM T1 x JOIN T2 y ON (x.key = y.key)) b
+(SELECT x.key, x.val FROM T1_n19 x JOIN T2_n11 y ON (x.key = y.key)) b
 JOIN
-(SELECT m.key, n.val FROM T3 m JOIN T4 n ON (m.key = n.key)) d
+(SELECT m.key, n.val FROM T3_n5 m JOIN T4_n1 n ON (m.key = n.key)) d
 ON b.key = d.key;
 
 set hive.optimize.correlation=true;
@@ -61,17 +61,17 @@ EXPLAIN
 INSERT OVERWRITE TABLE dest_co3
 SELECT b.key, d.val
 FROM
-(SELECT x.key, x.val FROM T1 x JOIN T2 y ON (x.key = y.key)) b
+(SELECT x.key, x.val FROM T1_n19 x JOIN T2_n11 y ON (x.key = y.key)) b
 JOIN
-(SELECT m.key, n.val FROM T3 m JOIN T4 n ON (m.key = n.key)) d
+(SELECT m.key, n.val FROM T3_n5 m JOIN T4_n1 n ON (m.key = n.key)) d
 ON b.key = d.key;
 
 INSERT OVERWRITE TABLE dest_co3
 SELECT b.key, d.val
 FROM
-(SELECT x.key, x.val FROM T1 x JOIN T2 y ON (x.key = y.key)) b
+(SELECT x.key, x.val FROM T1_n19 x JOIN T2_n11 y ON (x.key = y.key)) b
 JOIN
-(SELECT m.key, n.val FROM T3 m JOIN T4 n ON (m.key = n.key)) d
+(SELECT m.key, n.val FROM T3_n5 m JOIN T4_n1 n ON (m.key = n.key)) d
 ON b.key = d.key;
 
 -- dest_co1, dest_co2 and dest_co3 should be same

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/correlationoptimizer9.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/correlationoptimizer9.q 
b/ql/src/test/queries/clientpositive/correlationoptimizer9.q
index 38f9480..3f97b4e 100644
--- a/ql/src/test/queries/clientpositive/correlationoptimizer9.q
+++ b/ql/src/test/queries/clientpositive/correlationoptimizer9.q
@@ -2,27 +2,27 @@
 set hive.mapred.mode=nonstrict;
 -- SORT_QUERY_RESULTS
 
-CREATE TABLE tmp(c1 INT, c2 INT, c3 STRING, c4 STRING);
+CREATE TABLE tmp_n2(c1 INT, c2 INT, c3 STRING, c4 STRING);
 
 set hive.auto.convert.join=false;
 
-INSERT OVERWRITE TABLE tmp
+INSERT OVERWRITE TABLE tmp_n2
 SELECT x.key, y.key, x.value, y.value FROM src x JOIN src y ON (x.key = y.key);
 
 set hive.optimize.correlation=false;
 EXPLAIN
 SELECT xx.key, yy.key, xx.cnt, yy.cnt
 FROM 
-(SELECT x.c1 AS key, count(1) AS cnt FROM tmp x WHERE x.c1 < 120 GROUP BY 
x.c1) xx
+(SELECT x.c1 AS key, count(1) AS cnt FROM tmp_n2 x WHERE x.c1 < 120 GROUP BY 
x.c1) xx
 JOIN
-(SELECT x1.c2 AS key, count(1) AS cnt FROM tmp x1 WHERE x1.c2 > 100 GROUP BY 
x1.c2) yy
+(SELECT x1.c2 AS key, count(1) AS cnt FROM tmp_n2 x1 WHERE x1.c2 > 100 GROUP 
BY x1.c2) yy
 ON (xx.key = yy.key);
 
 SELECT xx.key, yy.key, xx.cnt, yy.cnt
 FROM 
-(SELECT x.c1 AS key, count(1) AS cnt FROM tmp x WHERE x.c1 < 120 GROUP BY 
x.c1) xx
+(SELECT x.c1 AS key, count(1) AS cnt FROM tmp_n2 x WHERE x.c1 < 120 GROUP BY 
x.c1) xx
 JOIN
-(SELECT x1.c2 AS key, count(1) AS cnt FROM tmp x1 WHERE x1.c2 > 100 GROUP BY 
x1.c2) yy
+(SELECT x1.c2 AS key, count(1) AS cnt FROM tmp_n2 x1 WHERE x1.c2 > 100 GROUP 
BY x1.c2) yy
 ON (xx.key = yy.key);
 
 set hive.optimize.correlation=true;
@@ -30,46 +30,46 @@ set hive.optimize.correlation=true;
 EXPLAIN
 SELECT xx.key, yy.key, xx.cnt, yy.cnt
 FROM 
-(SELECT x.c1 AS key, count(1) AS cnt FROM tmp x WHERE x.c1 < 120 GROUP BY 
x.c1) xx
+(SELECT x.c1 AS key, count(1) AS cnt FROM tmp_n2 x WHERE x.c1 < 120 GROUP BY 
x.c1) xx
 JOIN
-(SELECT x1.c2 AS key, count(1) AS cnt FROM tmp x1 WHERE x1.c2 > 100 GROUP BY 
x1.c2) yy
+(SELECT x1.c2 AS key, count(1) AS cnt FROM tmp_n2 x1 WHERE x1.c2 > 100 GROUP 
BY x1.c2) yy
 ON (xx.key = yy.key);
 
 SELECT xx.key, yy.key, xx.cnt, yy.cnt
 FROM 
-(SELECT x.c1 AS key, count(1) AS cnt FROM tmp x WHERE x.c1 < 120 GROUP BY 
x.c1) xx
+(SELECT x.c1 AS key, count(1) AS cnt FROM tmp_n2 x WHERE x.c1 < 120 GROUP BY 
x.c1) xx
 JOIN
-(SELECT x1.c2 AS key, count(1) AS cnt FROM tmp x1 WHERE x1.c2 > 100 GROUP BY 
x1.c2) yy
+(SELECT x1.c2 AS key, count(1) AS cnt FROM tmp_n2 x1 WHERE x1.c2 > 100 GROUP 
BY x1.c2) yy
 ON (xx.key = yy.key);
 
 set hive.optimize.correlation=false;
 EXPLAIN
 SELECT xx.key1, xx.key2, yy.key1, yy.key2, xx.cnt, yy.cnt
 FROM 
-(SELECT x.c1 AS key1, x.c3 AS key2, count(1) AS cnt FROM tmp x WHERE x.c1 < 
120 GROUP BY x.c1, x.c3) xx
+(SELECT x.c1 AS key1, x.c3 AS key2, count(1) AS cnt FROM tmp_n2 x WHERE x.c1 < 
120 GROUP BY x.c1, x.c3) xx
 JOIN
-(SELECT x1.c1 AS key1, x1.c3 AS key2, count(1) AS cnt FROM tmp x1 WHERE x1.c2 
> 100 GROUP BY x1.c1, x1.c3) yy
+(SELECT x1.c1 AS key1, x1.c3 AS key2, count(1) AS cnt FROM tmp_n2 x1 WHERE 
x1.c2 > 100 GROUP BY x1.c1, x1.c3) yy
 ON (xx.key1 = yy.key1 AND xx.key2 == yy.key2);
 
 SELECT xx.key1, xx.key2, yy.key1, yy.key2, xx.cnt, yy.cnt
 FROM 
-(SELECT x.c1 AS key1, x.c3 AS key2, count(1) AS cnt FROM tmp x WHERE x.c1 < 
120 GROUP BY x.c1, x.c3) xx
+(SELECT x.c1 AS key1, x.c3 AS key2, count(1) AS cnt FROM tmp_n2 x WHERE x.c1 < 
120 GROUP BY x.c1, x.c3) xx
 JOIN
-(SELECT x1.c1 AS key1, x1.c3 AS key2, count(1) AS cnt FROM tmp x1 WHERE x1.c2 
> 100 GROUP BY x1.c1, x1.c3) yy
+(SELECT x1.c1 AS key1, x1.c3 AS key2, count(1) AS cnt FROM tmp_n2 x1 WHERE 
x1.c2 > 100 GROUP BY x1.c1, x1.c3) yy
 ON (xx.key1 = yy.key1 AND xx.key2 == yy.key2);
 
 set hive.optimize.correlation=true;
 EXPLAIN
 SELECT xx.key1, xx.key2, yy.key1, yy.key2, xx.cnt, yy.cnt
 FROM 
-(SELECT x.c1 AS key1, x.c3 AS key2, count(1) AS cnt FROM tmp x WHERE x.c1 < 
120 GROUP BY x.c1, x.c3) xx
+(SELECT x.c1 AS key1, x.c3 AS key2, count(1) AS cnt FROM tmp_n2 x WHERE x.c1 < 
120 GROUP BY x.c1, x.c3) xx
 JOIN
-(SELECT x1.c1 AS key1, x1.c3 AS key2, count(1) AS cnt FROM tmp x1 WHERE x1.c2 
> 100 GROUP BY x1.c1, x1.c3) yy
+(SELECT x1.c1 AS key1, x1.c3 AS key2, count(1) AS cnt FROM tmp_n2 x1 WHERE 
x1.c2 > 100 GROUP BY x1.c1, x1.c3) yy
 ON (xx.key1 = yy.key1 AND xx.key2 == yy.key2);
 
 SELECT xx.key1, xx.key2, yy.key1, yy.key2, xx.cnt, yy.cnt
 FROM 
-(SELECT x.c1 AS key1, x.c3 AS key2, count(1) AS cnt FROM tmp x WHERE x.c1 < 
120 GROUP BY x.c1, x.c3) xx
+(SELECT x.c1 AS key1, x.c3 AS key2, count(1) AS cnt FROM tmp_n2 x WHERE x.c1 < 
120 GROUP BY x.c1, x.c3) xx
 JOIN
-(SELECT x1.c1 AS key1, x1.c3 AS key2, count(1) AS cnt FROM tmp x1 WHERE x1.c2 
> 100 GROUP BY x1.c1, x1.c3) yy
+(SELECT x1.c1 AS key1, x1.c3 AS key2, count(1) AS cnt FROM tmp_n2 x1 WHERE 
x1.c2 > 100 GROUP BY x1.c1, x1.c3) yy
 ON (xx.key1 = yy.key1 AND xx.key2 == yy.key2);

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/count.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/count.q 
b/ql/src/test/queries/clientpositive/count.q
index 2849d9a..a369f50 100644
--- a/ql/src/test/queries/clientpositive/count.q
+++ b/ql/src/test/queries/clientpositive/count.q
@@ -1,54 +1,54 @@
 set hive.mapred.mode=nonstrict;
 set hive.explain.user=false;
 -- SORT_QUERY_RESULTS
-create table abcd (a int, b int, c int, d int);
-LOAD DATA LOCAL INPATH '../../data/files/in4.txt' INTO TABLE abcd;
+create table abcd_n2 (a int, b int, c int, d int);
+LOAD DATA LOCAL INPATH '../../data/files/in4.txt' INTO TABLE abcd_n2;
 
-select * from abcd;
+select * from abcd_n2;
 set hive.map.aggr=true;
-explain select a, count(distinct b), count(distinct c), sum(d) from abcd group 
by a;
-select a, count(distinct b), count(distinct c), sum(d) from abcd group by a;
+explain select a, count(distinct b), count(distinct c), sum(d) from abcd_n2 
group by a;
+select a, count(distinct b), count(distinct c), sum(d) from abcd_n2 group by a;
 
-explain select count(1), count(*), count(a), count(b), count(c), count(d), 
count(distinct a), count(distinct b), count(distinct c), count(distinct d), 
count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct 
a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), 
count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), 
count(distinct a,b,c,d) from abcd;
-select count(1), count(*), count(a), count(b), count(c), count(d), 
count(distinct a), count(distinct b), count(distinct c), count(distinct d), 
count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct 
a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), 
count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), 
count(distinct a,b,c,d) from abcd;
+explain select count(1), count(*), count(a), count(b), count(c), count(d), 
count(distinct a), count(distinct b), count(distinct c), count(distinct d), 
count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct 
a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), 
count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), 
count(distinct a,b,c,d) from abcd_n2;
+select count(1), count(*), count(a), count(b), count(c), count(d), 
count(distinct a), count(distinct b), count(distinct c), count(distinct d), 
count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct 
a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), 
count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), 
count(distinct a,b,c,d) from abcd_n2;
 
 set hive.map.aggr=false;
-explain select a, count(distinct b), count(distinct c), sum(d) from abcd group 
by a;
-select a, count(distinct b), count(distinct c), sum(d) from abcd group by a;
+explain select a, count(distinct b), count(distinct c), sum(d) from abcd_n2 
group by a;
+select a, count(distinct b), count(distinct c), sum(d) from abcd_n2 group by a;
 
-explain select count(1), count(*), count(a), count(b), count(c), count(d), 
count(distinct a), count(distinct b), count(distinct c), count(distinct d), 
count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct 
a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), 
count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), 
count(distinct a,b,c,d) from abcd;
-select count(1), count(*), count(a), count(b), count(c), count(d), 
count(distinct a), count(distinct b), count(distinct c), count(distinct d), 
count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct 
a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), 
count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), 
count(distinct a,b,c,d) from abcd;
+explain select count(1), count(*), count(a), count(b), count(c), count(d), 
count(distinct a), count(distinct b), count(distinct c), count(distinct d), 
count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct 
a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), 
count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), 
count(distinct a,b,c,d) from abcd_n2;
+select count(1), count(*), count(a), count(b), count(c), count(d), 
count(distinct a), count(distinct b), count(distinct c), count(distinct d), 
count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct 
a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), 
count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), 
count(distinct a,b,c,d) from abcd_n2;
 
 set hive.cbo.returnpath.hiveop=true;
 
 set hive.map.aggr=true;
 --first aggregation with literal. gbinfo was generating wrong expression
-explain select count(1), count(*), count(a), count(b), count(c), count(d), 
count(distinct a), count(distinct b), count(distinct c), count(distinct d), 
count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct 
a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), 
count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), 
count(distinct a,b,c,d) from abcd;
-select count(1), count(*), count(a), count(b), count(c), count(d), 
count(distinct a), count(distinct b), count(distinct c), count(distinct d), 
count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct 
a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), 
count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), 
count(distinct a,b,c,d) from abcd;
+explain select count(1), count(*), count(a), count(b), count(c), count(d), 
count(distinct a), count(distinct b), count(distinct c), count(distinct d), 
count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct 
a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), 
count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), 
count(distinct a,b,c,d) from abcd_n2;
+select count(1), count(*), count(a), count(b), count(c), count(d), 
count(distinct a), count(distinct b), count(distinct c), count(distinct d), 
count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct 
a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), 
count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), 
count(distinct a,b,c,d) from abcd_n2;
 
 set hive.map.aggr=false;
-explain select count(distinct b) from abcd group by a;
-select count(distinct b) from abcd group by a;
+explain select count(distinct b) from abcd_n2 group by a;
+select count(distinct b) from abcd_n2 group by a;
 
-explain select count(distinct b) from abcd group by b;
-select count(distinct b) from abcd group by b;
+explain select count(distinct b) from abcd_n2 group by b;
+select count(distinct b) from abcd_n2 group by b;
 
-explain select count(distinct b) from abcd group by c;
-select count(distinct b) from abcd group by c;
+explain select count(distinct b) from abcd_n2 group by c;
+select count(distinct b) from abcd_n2 group by c;
 
-explain select count(b), count(distinct c) from abcd group by d;
-select count(b), count(distinct c) from abcd group by d;
+explain select count(b), count(distinct c) from abcd_n2 group by d;
+select count(b), count(distinct c) from abcd_n2 group by d;
 
 --non distinct aggregate with same column as group by key
-explain select a, count(distinct b), count(distinct c), sum(d), sum(d+d), 
sum(d*3), sum(b), sum(c), sum(a), sum(distinct a), sum(distinct b) from abcd 
group by a;
-select a, count(distinct b), count(distinct c), sum(d), sum(d+d), sum(d*3), 
sum(b), sum(c), sum(a), sum(distinct a), sum(distinct b) from abcd group by a;
+explain select a, count(distinct b), count(distinct c), sum(d), sum(d+d), 
sum(d*3), sum(b), sum(c), sum(a), sum(distinct a), sum(distinct b) from abcd_n2 
group by a;
+select a, count(distinct b), count(distinct c), sum(d), sum(d+d), sum(d*3), 
sum(b), sum(c), sum(a), sum(distinct a), sum(distinct b) from abcd_n2 group by 
a;
 
 --non distinct aggregate with same column as distinct aggregate
-explain select a, count(distinct b), count(distinct c), sum(d), sum(c) from 
abcd group by a;
-select a, count(distinct b), count(distinct c), sum(d), sum(c) from abcd group 
by a;
+explain select a, count(distinct b), count(distinct c), sum(d), sum(c) from 
abcd_n2 group by a;
+select a, count(distinct b), count(distinct c), sum(d), sum(c) from abcd_n2 
group by a;
 
 --aggregation with literal
-explain select count(1), count(*), count(a), count(b), count(c), count(d), 
count(distinct a), count(distinct b), count(distinct c), count(distinct d), 
count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct 
a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), 
count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), 
count(distinct a,b,c,d) from abcd;
-select count(1), count(*), count(a), count(b), count(c), count(d), 
count(distinct a), count(distinct b), count(distinct c), count(distinct d), 
count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct 
a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), 
count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), 
count(distinct a,b,c,d) from abcd;
+explain select count(1), count(*), count(a), count(b), count(c), count(d), 
count(distinct a), count(distinct b), count(distinct c), count(distinct d), 
count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct 
a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), 
count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), 
count(distinct a,b,c,d) from abcd_n2;
+select count(1), count(*), count(a), count(b), count(c), count(d), 
count(distinct a), count(distinct b), count(distinct c), count(distinct d), 
count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct 
a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), 
count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), 
count(distinct a,b,c,d) from abcd_n2;
 
 set hive.cbo.returnpath.hiveop=false;

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/create_1.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/create_1.q 
b/ql/src/test/queries/clientpositive/create_1.q
index f348e59..d1b89d2 100644
--- a/ql/src/test/queries/clientpositive/create_1.q
+++ b/ql/src/test/queries/clientpositive/create_1.q
@@ -1,14 +1,14 @@
 set fs.default.name=invalidscheme:///;
 
-CREATE TABLE table1 (a STRING, b STRING) STORED AS TEXTFILE;
-DESCRIBE table1;
-DESCRIBE EXTENDED table1;
+CREATE TABLE table1_n5 (a STRING, b STRING) STORED AS TEXTFILE;
+DESCRIBE table1_n5;
+DESCRIBE EXTENDED table1_n5;
 
-CREATE TABLE IF NOT EXISTS table1 (a STRING, b STRING) STORED AS TEXTFILE;
+CREATE TABLE IF NOT EXISTS table1_n5 (a STRING, b STRING) STORED AS TEXTFILE;
 
-CREATE TABLE IF NOT EXISTS table2 (a STRING, b INT) STORED AS TEXTFILE;
-DESCRIBE table2;
-DESCRIBE EXTENDED table2;
+CREATE TABLE IF NOT EXISTS table2_n2 (a STRING, b INT) STORED AS TEXTFILE;
+DESCRIBE table2_n2;
+DESCRIBE EXTENDED table2_n2;
 
 CREATE TABLE table3 (a STRING, b STRING)
 ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
@@ -22,8 +22,8 @@ STORED AS SEQUENCEFILE;
 DESCRIBE table4;
 DESCRIBE EXTENDED table4;
 
-CREATE TABLE table5 (a STRING, b STRING)
+CREATE TABLE table5_n1 (a STRING, b STRING)
 ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
 STORED AS RCFILE;
-DESCRIBE table5;
-DESCRIBE EXTENDED table5;
+DESCRIBE table5_n1;
+DESCRIBE EXTENDED table5_n1;

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/create_escape.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/create_escape.q 
b/ql/src/test/queries/clientpositive/create_escape.q
index d30b0da..a52d230 100644
--- a/ql/src/test/queries/clientpositive/create_escape.q
+++ b/ql/src/test/queries/clientpositive/create_escape.q
@@ -1,11 +1,11 @@
 --! qt:dataset:src
-CREATE TABLE table1 (a STRING, b STRING)
+CREATE TABLE table1_n6 (a STRING, b STRING)
 ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' ESCAPED BY '\\'
 STORED AS TEXTFILE;
 
-DESCRIBE table1;
-DESCRIBE EXTENDED table1;
+DESCRIBE table1_n6;
+DESCRIBE EXTENDED table1_n6;
 
-INSERT OVERWRITE TABLE table1 SELECT key, '\\\t\\' FROM src WHERE key = 86;
+INSERT OVERWRITE TABLE table1_n6 SELECT key, '\\\t\\' FROM src WHERE key = 86;
 
-SELECT * FROM table1;
+SELECT * FROM table1_n6;

http://git-wip-us.apache.org/repos/asf/hive/blob/38d3b8e1/ql/src/test/queries/clientpositive/create_genericudf.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/create_genericudf.q 
b/ql/src/test/queries/clientpositive/create_genericudf.q
index ad67027..4e63724 100644
--- a/ql/src/test/queries/clientpositive/create_genericudf.q
+++ b/ql/src/test/queries/clientpositive/create_genericudf.q
@@ -4,10 +4,10 @@ CREATE TEMPORARY FUNCTION test_translate AS 
'org.apache.hadoop.hive.ql.udf.gener
 
 CREATE TEMPORARY FUNCTION test_translate AS 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestTranslate';
 
-CREATE TABLE dest1(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, c6 
STRING, c7 STRING);
+CREATE TABLE dest1_n113(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, 
c6 STRING, c7 STRING);
 
 FROM src 
-INSERT OVERWRITE TABLE dest1 
+INSERT OVERWRITE TABLE dest1_n113 
 SELECT 
     test_translate('abc', 'a', 'b'),
     test_translate('abc', 'ab', 'bc'),
@@ -17,6 +17,6 @@ SELECT
     test_translate('abc', 'ab', 'b'),
     test_translate('abc', 'a', 'ab');
 
-SELECT dest1.* FROM dest1 LIMIT 1;
+SELECT dest1_n113.* FROM dest1_n113 LIMIT 1;
 
 DROP TEMPORARY FUNCTION test_translate;

Reply via email to