hive git commit: Addendum to HIVE-19396

2018-05-03 Thread hashutosh
Repository: hive
Updated Branches:
  refs/heads/master 1c3b82fb8 -> 39917ef44


Addendum to HIVE-19396


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/39917ef4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/39917ef4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/39917ef4

Branch: refs/heads/master
Commit: 39917ef4495c53597bac2bad7988cc8f0224972f
Parents: 1c3b82f
Author: Ashutosh Chauhan 
Authored: Thu May 3 21:28:47 2018 -0700
Committer: Ashutosh Chauhan 
Committed: Thu May 3 21:28:47 2018 -0700

--
 .../materialized_view_create_rewrite_4.q.out| 312 ++-
 .../clientpositive/tez/explainanalyze_3.q.out   |   8 +-
 .../clientpositive/tez/explainuser_3.q.out  |   8 +-
 .../results/clientpositive/tez/tez-tag.q.out|   4 +-
 4 files changed, 175 insertions(+), 157 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/39917ef4/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out
 
b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out
index c0862b0..4ffea62 100644
--- 
a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out
+++ 
b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out
@@ -659,24 +659,49 @@ POSTHOOK: query: EXPLAIN
 ALTER MATERIALIZED VIEW cmv_mat_view REBUILD
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-  Stage-4 depends on stages: Stage-3
+  Stage-2 is a root stage
+  Stage-3 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-3
+  Stage-4 depends on stages: Stage-0
+  Stage-6 depends on stages: Stage-4, Stage-5
+  Stage-1 depends on stages: Stage-3
+  Stage-5 depends on stages: Stage-1
 
 STAGE PLANS:
-  Stage: Stage-1
+  Stage: Stage-2
 Tez
  A masked pattern was here 
   Edges:
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
-Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 7 (ONE_TO_ONE_EDGE)
+Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
+Reducer 4 <- Reducer 2 (SIMPLE_EDGE)
+Reducer 6 <- Map 5 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
+Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
  A masked pattern was here 
   Vertices:
 Map 1 
 Map Operator Tree:
 TableScan
+  alias: default.cmv_mat_view
+  Statistics: Num rows: 2 Data size: 248 Basic stats: COMPLETE 
Column stats: COMPLETE
+  Filter Operator
+predicate: ((c > 10) and a is not null) (type: boolean)
+Statistics: Num rows: 1 Data size: 124 Basic stats: 
COMPLETE Column stats: COMPLETE
+Select Operator
+  expressions: a (type: int), c (type: decimal(10,2)), _c2 
(type: bigint), ROW__ID (type: struct)
+  outputColumnNames: _col0, _col1, _col2, _col3
+  Statistics: Num rows: 1 Data size: 200 Basic stats: 
COMPLETE Column stats: COMPLETE
+  Reduce Output Operator
+key expressions: _col0 (type: int), _col1 (type: 
decimal(10,2))
+sort order: ++
+Map-reduce partition columns: _col0 (type: int), _col1 
(type: decimal(10,2))
+Statistics: Num rows: 1 Data size: 200 Basic stats: 
COMPLETE Column stats: COMPLETE
+value expressions: _col2 (type: bigint), _col3 (type: 
struct)
+Execution mode: llap
+LLAP IO: may be used (ACID table)
+Map 5 
+Map Operator Tree:
+TableScan
   alias: cmv_basetable
   Statistics: Num rows: 5 Data size: 20 Basic stats: COMPLETE 
Column stats: COMPLETE
   Filter Operator
@@ -693,23 +718,23 @@ STAGE PLANS:
 Statistics: Num rows: 5 Data size: 20 Basic stats: 
COMPLETE Column stats: COMPLETE
 Execution mode: llap
 LLAP IO: may be used (ACID table)
-Map 4 
+Map 8 
 Map Operator Tree:
 TableScan
   alias: cmv_basetable_2
   Statistics: Num rows: 3 Data size: 360 Basic stats: COMPLETE 
Column stats: COMPLETE
   Filter Operator
-  

[4/8] hive git commit: HIVE-19396 : HiveOperation is incorrectly set for analyze statement (Ashutosh Chauhan via Zoltan Haindrich)m

2018-05-03 Thread hashutosh
http://git-wip-us.apache.org/repos/asf/hive/blob/1c3b82fb/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out
 
b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out
index 4a3fba9..c292baa 100644
--- 
a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out
+++ 
b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out
@@ -29,12 +29,12 @@ POSTHOOK: Lineage: cmv_basetable.b SCRIPT []
 POSTHOOK: Lineage: cmv_basetable.c SCRIPT []
 POSTHOOK: Lineage: cmv_basetable.d SCRIPT []
 PREHOOK: query: analyze table cmv_basetable compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@cmv_basetable
 PREHOOK: Output: default@cmv_basetable
  A masked pattern was here 
 POSTHOOK: query: analyze table cmv_basetable compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@cmv_basetable
 POSTHOOK: Output: default@cmv_basetable
  A masked pattern was here 
@@ -63,12 +63,12 @@ POSTHOOK: Lineage: cmv_basetable_2.b SCRIPT []
 POSTHOOK: Lineage: cmv_basetable_2.c SCRIPT []
 POSTHOOK: Lineage: cmv_basetable_2.d SCRIPT []
 PREHOOK: query: analyze table cmv_basetable_2 compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@cmv_basetable_2
 PREHOOK: Output: default@cmv_basetable_2
  A masked pattern was here 
 POSTHOOK: query: analyze table cmv_basetable_2 compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@cmv_basetable_2
 POSTHOOK: Output: default@cmv_basetable_2
  A masked pattern was here 
@@ -93,12 +93,12 @@ POSTHOOK: Input: default@cmv_basetable_2
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@cmv_mat_view
 PREHOOK: query: analyze table cmv_mat_view compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@cmv_mat_view
 PREHOOK: Output: default@cmv_mat_view
  A masked pattern was here 
 POSTHOOK: query: analyze table cmv_mat_view compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@cmv_mat_view
 POSTHOOK: Output: default@cmv_mat_view
  A masked pattern was here 
@@ -117,12 +117,12 @@ POSTHOOK: Lineage: cmv_basetable_2.b SCRIPT []
 POSTHOOK: Lineage: cmv_basetable_2.c SCRIPT []
 POSTHOOK: Lineage: cmv_basetable_2.d SCRIPT []
 PREHOOK: query: analyze table cmv_basetable_2 compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@cmv_basetable_2
 PREHOOK: Output: default@cmv_basetable_2
  A masked pattern was here 
 POSTHOOK: query: analyze table cmv_basetable_2 compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@cmv_basetable_2
 POSTHOOK: Output: default@cmv_basetable_2
  A masked pattern was here 
@@ -178,12 +178,12 @@ STAGE PLANS:
 Select Operator
   expressions: a (type: int)
   outputColumnNames: _col0
-  Statistics: Num rows: 1 Data size: 4 Basic stats: 
COMPLETE Column stats: COMPLETE
+  Statistics: Num rows: 1 Data size: 116 Basic stats: 
COMPLETE Column stats: COMPLETE
   Reduce Output Operator
 key expressions: _col0 (type: int)
 sort order: +
 Map-reduce partition columns: _col0 (type: int)
-Statistics: Num rows: 1 Data size: 4 Basic stats: 
COMPLETE Column stats: COMPLETE
+Statistics: Num rows: 1 Data size: 116 Basic stats: 
COMPLETE Column stats: COMPLETE
 Execution mode: llap
 LLAP IO: may be used (ACID table)
 Reducer 2 
@@ -277,17 +277,17 @@ STAGE PLANS:
   alias: cmv_basetable_2
   Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE 
Column stats: COMPLETE
   Filter Operator
-predicate: ((ROW__ID.writeid > 1) and (c > 10) and a is 
not null) (type: boolean)
+predicate: ((c > 10) and a is not null) (type: boolean)
 Statistics: Num rows: 1 Data size: 116 Basic stats: 
COMPLETE Column stats: COMPLETE
 Select Operator
   expressions: a (type: int), c (type: decimal(10,2))
   outputColumnNames: _col0, _col1
-  Statistics: Num rows: 1 Data size: 192 Basic stats: 
COMPLETE Column stats: COMPLETE
+  Statistics: Num rows: 1 Data size: 116 Basic stats: 
COMPLETE Column 

[1/8] hive git commit: HIVE-19396 : HiveOperation is incorrectly set for analyze statement (Ashutosh Chauhan via Zoltan Haindrich)m

2018-05-03 Thread hashutosh
Repository: hive
Updated Branches:
  refs/heads/master bf8e69643 -> 1c3b82fb8


http://git-wip-us.apache.org/repos/asf/hive/blob/1c3b82fb/ql/src/test/results/clientpositive/tunable_ndv.q.out
--
diff --git a/ql/src/test/results/clientpositive/tunable_ndv.q.out 
b/ql/src/test/results/clientpositive/tunable_ndv.q.out
index a232eaa..551591e 100644
--- a/ql/src/test/results/clientpositive/tunable_ndv.q.out
+++ b/ql/src/test/results/clientpositive/tunable_ndv.q.out
@@ -56,7 +56,7 @@ POSTHOOK: Lineage: loc_orc_1d PARTITION(year=2001).locid 
SIMPLE [(ext_loc)ext_lo
 POSTHOOK: Lineage: loc_orc_1d PARTITION(year=2001).state SIMPLE 
[(ext_loc)ext_loc.FieldSchema(name:state, type:string, comment:null), ]
 POSTHOOK: Lineage: loc_orc_1d PARTITION(year=2001).zip SIMPLE 
[(ext_loc)ext_loc.FieldSchema(name:zip, type:int, comment:null), ]
 PREHOOK: query: analyze table loc_orc_1d compute statistics for columns 
state,locid
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@loc_orc_1d
 PREHOOK: Input: default@loc_orc_1d@year=2000
 PREHOOK: Input: default@loc_orc_1d@year=2001
@@ -65,7 +65,7 @@ PREHOOK: Output: default@loc_orc_1d@year=2000
 PREHOOK: Output: default@loc_orc_1d@year=2001
  A masked pattern was here 
 POSTHOOK: query: analyze table loc_orc_1d compute statistics for columns 
state,locid
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@loc_orc_1d
 POSTHOOK: Input: default@loc_orc_1d@year=2000
 POSTHOOK: Input: default@loc_orc_1d@year=2001
@@ -200,56 +200,56 @@ POSTHOOK: Lineage: loc_orc_2d 
PARTITION(zip=94087,year=2000).state SIMPLE [(ext_
 POSTHOOK: Lineage: loc_orc_2d PARTITION(zip=94087,year=2001).locid SIMPLE 
[(ext_loc)ext_loc.FieldSchema(name:locid, type:int, comment:null), ]
 POSTHOOK: Lineage: loc_orc_2d PARTITION(zip=94087,year=2001).state SIMPLE 
[(ext_loc)ext_loc.FieldSchema(name:state, type:string, comment:null), ]
 PREHOOK: query: analyze table loc_orc_2d partition(zip=94086, year='2000') 
compute statistics for columns state,locid
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@loc_orc_2d
 PREHOOK: Input: default@loc_orc_2d@zip=94086/year=2000
 PREHOOK: Output: default@loc_orc_2d
 PREHOOK: Output: default@loc_orc_2d@zip=94086/year=2000
  A masked pattern was here 
 POSTHOOK: query: analyze table loc_orc_2d partition(zip=94086, year='2000') 
compute statistics for columns state,locid
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@loc_orc_2d
 POSTHOOK: Input: default@loc_orc_2d@zip=94086/year=2000
 POSTHOOK: Output: default@loc_orc_2d
 POSTHOOK: Output: default@loc_orc_2d@zip=94086/year=2000
  A masked pattern was here 
 PREHOOK: query: analyze table loc_orc_2d partition(zip=94087, year='2000') 
compute statistics for columns state,locid
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@loc_orc_2d
 PREHOOK: Input: default@loc_orc_2d@zip=94087/year=2000
 PREHOOK: Output: default@loc_orc_2d
 PREHOOK: Output: default@loc_orc_2d@zip=94087/year=2000
  A masked pattern was here 
 POSTHOOK: query: analyze table loc_orc_2d partition(zip=94087, year='2000') 
compute statistics for columns state,locid
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@loc_orc_2d
 POSTHOOK: Input: default@loc_orc_2d@zip=94087/year=2000
 POSTHOOK: Output: default@loc_orc_2d
 POSTHOOK: Output: default@loc_orc_2d@zip=94087/year=2000
  A masked pattern was here 
 PREHOOK: query: analyze table loc_orc_2d partition(zip=94086, year='2001') 
compute statistics for columns state,locid
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@loc_orc_2d
 PREHOOK: Input: default@loc_orc_2d@zip=94086/year=2001
 PREHOOK: Output: default@loc_orc_2d
 PREHOOK: Output: default@loc_orc_2d@zip=94086/year=2001
  A masked pattern was here 
 POSTHOOK: query: analyze table loc_orc_2d partition(zip=94086, year='2001') 
compute statistics for columns state,locid
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@loc_orc_2d
 POSTHOOK: Input: default@loc_orc_2d@zip=94086/year=2001
 POSTHOOK: Output: default@loc_orc_2d
 POSTHOOK: Output: default@loc_orc_2d@zip=94086/year=2001
  A masked pattern was here 
 PREHOOK: query: analyze table loc_orc_2d partition(zip=94087, year='2001') 
compute statistics for columns state,locid
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@loc_orc_2d
 PREHOOK: Input: default@loc_orc_2d@zip=94087/year=2001
 PREHOOK: Output: default@loc_orc_2d
 PREHOOK: Output: default@loc_orc_2d@zip=94087/year=2001
  A masked pattern was here 
 POSTHOOK: query: analyze table loc_orc_2d partition(zip=94087, year='2001') 
compute statistics for columns state,locid
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@loc_orc_2d
 POSTHOOK: 

[5/8] hive git commit: HIVE-19396 : HiveOperation is incorrectly set for analyze statement (Ashutosh Chauhan via Zoltan Haindrich)m

2018-05-03 Thread hashutosh
http://git-wip-us.apache.org/repos/asf/hive/blob/1c3b82fb/ql/src/test/results/clientpositive/llap/columnstats_part_coltype.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/columnstats_part_coltype.q.out 
b/ql/src/test/results/clientpositive/llap/columnstats_part_coltype.q.out
index fff076e..3b3843a 100644
--- a/ql/src/test/results/clientpositive/llap/columnstats_part_coltype.q.out
+++ b/ql/src/test/results/clientpositive/llap/columnstats_part_coltype.q.out
@@ -61,14 +61,14 @@ POSTHOOK: Output: 
default@partcolstats@ds=2015-04-03/hr=3/part=partB
 POSTHOOK: Lineage: partcolstats PARTITION(ds=2015-04-03,hr=3,part=partB).key 
EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partcolstats PARTITION(ds=2015-04-03,hr=3,part=partB).value 
SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: analyze table partcolstats partition (ds=date '2015-04-02', 
hr=2, part='partA') compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@partcolstats
 PREHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partA
 PREHOOK: Output: default@partcolstats
 PREHOOK: Output: default@partcolstats@ds=2015-04-02/hr=2/part=partA
  A masked pattern was here 
 POSTHOOK: query: analyze table partcolstats partition (ds=date '2015-04-02', 
hr=2, part='partA') compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@partcolstats
 POSTHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partA
 POSTHOOK: Output: default@partcolstats
@@ -147,7 +147,7 @@ num_falses
 bitVector  HL  
 
 commentfrom deserializer   
 
 PREHOOK: query: analyze table partcolstats partition (ds=date '2015-04-02', 
hr=2, part) compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@partcolstats
 PREHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partA
 PREHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partB
@@ -156,7 +156,7 @@ PREHOOK: Output: 
default@partcolstats@ds=2015-04-02/hr=2/part=partA
 PREHOOK: Output: default@partcolstats@ds=2015-04-02/hr=2/part=partB
  A masked pattern was here 
 POSTHOOK: query: analyze table partcolstats partition (ds=date '2015-04-02', 
hr=2, part) compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@partcolstats
 POSTHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partA
 POSTHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partB
@@ -237,7 +237,7 @@ num_falses
 bitVector  HL  
 
 commentfrom deserializer   
 
 PREHOOK: query: analyze table partcolstats partition (ds=date '2015-04-02', 
hr, part) compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@partcolstats
 PREHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partA
 PREHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partB
@@ -248,7 +248,7 @@ PREHOOK: Output: 
default@partcolstats@ds=2015-04-02/hr=2/part=partB
 PREHOOK: Output: default@partcolstats@ds=2015-04-02/hr=3/part=partA
  A masked pattern was here 
 POSTHOOK: query: analyze table partcolstats partition (ds=date '2015-04-02', 
hr, part) compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@partcolstats
 POSTHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partA
 POSTHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partB
@@ -367,7 +367,7 @@ num_falses
 bitVector  HL  
 
 commentfrom deserializer   
 
 PREHOOK: query: analyze table partcolstats partition (ds, hr, part) compute 
statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@partcolstats
 PREHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partA
 PREHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partB
@@ -382,7 +382,7 @@ PREHOOK: Output: 
default@partcolstats@ds=2015-04-03/hr=3/part=partA
 PREHOOK: Output: default@partcolstats@ds=2015-04-03/hr=3/part=partB
  A masked pattern was here 
 POSTHOOK: query: analyze table partcolstats partition (ds, hr, part) compute 
statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: 

[7/8] hive git commit: HIVE-19396 : HiveOperation is incorrectly set for analyze statement (Ashutosh Chauhan via Zoltan Haindrich)m

2018-05-03 Thread hashutosh
http://git-wip-us.apache.org/repos/asf/hive/blob/1c3b82fb/ql/src/test/results/clientpositive/columnstats_partlvl_dp.q.out
--
diff --git a/ql/src/test/results/clientpositive/columnstats_partlvl_dp.q.out 
b/ql/src/test/results/clientpositive/columnstats_partlvl_dp.q.out
index 66b44bf..b45249c 100644
--- a/ql/src/test/results/clientpositive/columnstats_partlvl_dp.q.out
+++ b/ql/src/test/results/clientpositive/columnstats_partlvl_dp.q.out
@@ -68,10 +68,10 @@ POSTHOOK: Output: default@employee_part
 POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=UK
 PREHOOK: query: explain 
 analyze table Employee_Part partition (employeeSalary='4000.0', country) 
compute statistics for columns employeeName, employeeID
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 POSTHOOK: query: explain 
 analyze table Employee_Part partition (employeeSalary='4000.0', country) 
compute statistics for columns employeeName, employeeID
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 STAGE DEPENDENCIES:
   Stage-0 is a root stage
   Stage-1 depends on stages: Stage-0
@@ -127,14 +127,14 @@ STAGE PLANS:
   Table: default.employee_part
 
 PREHOOK: query: analyze table Employee_Part partition 
(employeeSalary='4000.0', country) compute statistics for columns employeeName, 
employeeID
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@employee_part
 PREHOOK: Input: default@employee_part@employeesalary=4000.0/country=USA
 PREHOOK: Output: default@employee_part
 PREHOOK: Output: default@employee_part@employeesalary=4000.0/country=USA
  A masked pattern was here 
 POSTHOOK: query: analyze table Employee_Part partition 
(employeeSalary='4000.0', country) compute statistics for columns employeeName, 
employeeID
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@employee_part
 POSTHOOK: Input: default@employee_part@employeesalary=4000.0/country=USA
 POSTHOOK: Output: default@employee_part
@@ -199,10 +199,10 @@ bitVector HL
 commentfrom deserializer   
 
 PREHOOK: query: explain
 analyze table Employee_Part partition (employeeSalary='2000.0') compute 
statistics for columns employeeID
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 POSTHOOK: query: explain   
 analyze table Employee_Part partition (employeeSalary='2000.0') compute 
statistics for columns employeeID
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 STAGE DEPENDENCIES:
   Stage-0 is a root stage
   Stage-1 depends on stages: Stage-0
@@ -258,7 +258,7 @@ STAGE PLANS:
   Table: default.employee_part
 
 PREHOOK: query: analyze table Employee_Part partition 
(employeeSalary='2000.0') compute statistics for columns employeeID
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@employee_part
 PREHOOK: Input: default@employee_part@employeesalary=2000.0/country=UK
 PREHOOK: Input: default@employee_part@employeesalary=2000.0/country=USA
@@ -267,7 +267,7 @@ PREHOOK: Output: 
default@employee_part@employeesalary=2000.0/country=UK
 PREHOOK: Output: default@employee_part@employeesalary=2000.0/country=USA
  A masked pattern was here 
 POSTHOOK: query: analyze table Employee_Part partition 
(employeeSalary='2000.0') compute statistics for columns employeeID
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@employee_part
 POSTHOOK: Input: default@employee_part@employeesalary=2000.0/country=UK
 POSTHOOK: Input: default@employee_part@employeesalary=2000.0/country=USA
@@ -313,10 +313,10 @@ bitVector HL
 commentfrom deserializer   
 
 PREHOOK: query: explain
 analyze table Employee_Part partition (employeeSalary) compute statistics for 
columns employeeID
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 POSTHOOK: query: explain   
 analyze table Employee_Part partition (employeeSalary) compute statistics for 
columns employeeID
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 STAGE DEPENDENCIES:
   Stage-0 is a root stage
   Stage-1 depends on stages: Stage-0
@@ -372,7 +372,7 @@ STAGE PLANS:
   Table: default.employee_part
 
 PREHOOK: query: analyze table Employee_Part partition (employeeSalary) compute 
statistics for columns employeeID
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@employee_part
 PREHOOK: Input: default@employee_part@employeesalary=2000.0/country=UK
 PREHOOK: Input: default@employee_part@employeesalary=2000.0/country=USA
@@ -389,7 +389,7 @@ PREHOOK: Output: 
default@employee_part@employeesalary=3500.0/country=UK
 PREHOOK: Output: default@employee_part@employeesalary=4000.0/country=USA
  A masked pattern was here 
 POSTHOOK: query: analyze table 

[8/8] hive git commit: HIVE-19396 : HiveOperation is incorrectly set for analyze statement (Ashutosh Chauhan via Zoltan Haindrich)m

2018-05-03 Thread hashutosh
HIVE-19396 : HiveOperation is incorrectly set for analyze statement (Ashutosh 
Chauhan via Zoltan Haindrich)m

Signed-off-by: Ashutosh Chauhan 


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1c3b82fb
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1c3b82fb
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1c3b82fb

Branch: refs/heads/master
Commit: 1c3b82fb88bd439e93d39755391642f7dc0bf0ce
Parents: bf8e696
Author: Ashutosh Chauhan 
Authored: Thu May 3 18:52:09 2018 -0700
Committer: Ashutosh Chauhan 
Committed: Thu May 3 18:52:45 2018 -0700

--
 .../ql/parse/ColumnStatsSemanticAnalyzer.java   |   6 +-
 .../results/clientnegative/insertsel_fail.q.out |   2 +-
 .../load_data_parquet_empty.q.out   |   2 +-
 .../clientpositive/acid_table_stats.q.out   |   8 +-
 .../clientpositive/alterColumnStatsPart.q.out   |  12 +-
 .../alter_partition_update_status.q.out |   8 +-
 .../clientpositive/alter_rename_table.q.out |   4 +-
 .../alter_table_column_stats.q.out  |  16 +-
 .../alter_table_update_status.q.out |   8 +-
 ..._table_update_status_disable_bitvector.q.out |   8 +-
 .../clientpositive/analyze_tbl_date.q.out   |   4 +-
 .../clientpositive/analyze_tbl_part.q.out   |  20 +-
 .../annotate_stats_deep_filters.q.out   |   4 +-
 .../clientpositive/annotate_stats_filter.q.out  |   4 +-
 .../clientpositive/annotate_stats_groupby.q.out |   8 +-
 .../annotate_stats_groupby2.q.out   |   4 +-
 .../clientpositive/annotate_stats_join.q.out|  12 +-
 .../annotate_stats_join_pkfk.q.out  |  16 +-
 .../clientpositive/annotate_stats_limit.q.out   |   4 +-
 .../clientpositive/annotate_stats_part.q.out|   4 +-
 .../clientpositive/annotate_stats_select.q.out  |   4 +-
 .../clientpositive/annotate_stats_table.q.out   |   8 +-
 .../clientpositive/annotate_stats_union.q.out   |  12 +-
 .../clientpositive/autoColumnStats_3.q.out  |   8 +-
 .../results/clientpositive/avro_decimal.q.out   |   4 +-
 .../clientpositive/avro_decimal_native.q.out|   4 +-
 .../beeline/colstats_all_nulls.q.out|   4 +-
 .../materialized_view_create_rewrite.q.out  |   4 +-
 .../clientpositive/beeline/smb_mapjoin_13.q.out |   4 +-
 .../clientpositive/bucket_map_join_1.q.out  |   4 +-
 .../clientpositive/bucket_map_join_2.q.out  |   4 +-
 .../bucketmapjoin_negative3.q.out   |   4 +-
 .../cbo_rp_annotate_stats_groupby.q.out |   8 +-
 .../clientpositive/cbo_rp_auto_join1.q.out  |   8 +-
 .../clientpositive/colstats_all_nulls.q.out |   4 +-
 .../columnStatsUpdateForStatsOptimizer_2.q.out  |   4 +-
 .../clientpositive/columnstats_infinity.q.out   |   4 +-
 .../clientpositive/columnstats_partlvl.q.out|  56 +--
 .../clientpositive/columnstats_partlvl_dp.q.out |  44 +-
 .../clientpositive/columnstats_quoting.q.out|  16 +-
 .../clientpositive/columnstats_tbllvl.q.out |  52 +-
 .../results/clientpositive/compustat_avro.q.out |   4 +-
 .../clientpositive/compute_stats_date.q.out |   8 +-
 .../test/results/clientpositive/constGby.q.out  |   4 +-
 .../clientpositive/constant_prop_2.q.out|   4 +-
 .../clientpositive/constant_prop_3.q.out|  12 +-
 .../clientpositive/correlated_join_keys.q.out   |   4 +-
 .../results/clientpositive/decimal_stats.q.out  |   4 +-
 .../results/clientpositive/deleteAnalyze.q.out  |   8 +-
 .../display_colstats_tbllvl.q.out   |  24 +-
 .../results/clientpositive/distinct_stats.q.out |   4 +-
 .../clientpositive/drop_table_with_stats.q.out  |  24 +-
 .../encrypted/encryption_move_tbl.q.out |   4 +-
 .../exec_parallel_column_stats.q.out|   8 +-
 .../extrapolate_part_stats_date.q.out   |   4 +-
 .../extrapolate_part_stats_full.q.out   |  24 +-
 .../extrapolate_part_stats_partial.q.out|  24 +-
 .../test/results/clientpositive/fm-sketch.q.out |  28 +-
 .../clientpositive/groupby_sort_1_23.q.out  |  20 +-
 .../clientpositive/groupby_sort_skew_1_23.q.out |  20 +-
 ql/src/test/results/clientpositive/hll.q.out|  28 +-
 .../clientpositive/llap/acid_no_buckets.q.out   |  32 +-
 .../llap/bucket_map_join_tez1.q.out | 329 ++--
 .../llap/bucket_map_join_tez2.q.out | 180 ---
 .../columnStatsUpdateForStatsOptimizer_1.q.out  |  16 +-
 .../llap/column_table_stats.q.out   |  32 +-
 .../llap/column_table_stats_orc.q.out   |  24 +-
 .../llap/columnstats_part_coltype.q.out |  28 +-
 .../clientpositive/llap/deleteAnalyze.q.out |   8 +-
 .../llap/drop_partition_with_stats.q.out|  56 +--
 .../llap/dynamic_semijoin_reduction.q.out   |  16 +-
 .../llap/dynamic_semijoin_reduction_2.q.out |   4 +-
 

[2/8] hive git commit: HIVE-19396 : HiveOperation is incorrectly set for analyze statement (Ashutosh Chauhan via Zoltan Haindrich)m

2018-05-03 Thread hashutosh
http://git-wip-us.apache.org/repos/asf/hive/blob/1c3b82fb/ql/src/test/results/clientpositive/optimize_filter_literal.q.out
--
diff --git a/ql/src/test/results/clientpositive/optimize_filter_literal.q.out 
b/ql/src/test/results/clientpositive/optimize_filter_literal.q.out
index 7854e02..bb4f27d 100644
--- a/ql/src/test/results/clientpositive/optimize_filter_literal.q.out
+++ b/ql/src/test/results/clientpositive/optimize_filter_literal.q.out
@@ -87,14 +87,14 @@ POSTHOOK: Output: default@tab_part@ds=2008-04-08
 POSTHOOK: Lineage: tab_part PARTITION(ds=2008-04-08).key SIMPLE 
[(srcbucket_mapjoin_part)srcbucket_mapjoin_part.FieldSchema(name:key, type:int, 
comment:null), ]
 POSTHOOK: Lineage: tab_part PARTITION(ds=2008-04-08).value SIMPLE 
[(srcbucket_mapjoin_part)srcbucket_mapjoin_part.FieldSchema(name:value, 
type:string, comment:null), ]
 PREHOOK: query: analyze table tab_part partition (ds='2008-04-08') compute 
statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@tab_part
 PREHOOK: Input: default@tab_part@ds=2008-04-08
 PREHOOK: Output: default@tab_part
 PREHOOK: Output: default@tab_part@ds=2008-04-08
  A masked pattern was here 
 POSTHOOK: query: analyze table tab_part partition (ds='2008-04-08') compute 
statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@tab_part
 POSTHOOK: Input: default@tab_part@ds=2008-04-08
 POSTHOOK: Output: default@tab_part
@@ -123,14 +123,14 @@ POSTHOOK: Output: default@tab@ds=2008-04-08
 POSTHOOK: Lineage: tab PARTITION(ds=2008-04-08).key SIMPLE 
[(srcbucket_mapjoin)srcbucket_mapjoin.FieldSchema(name:key, type:int, 
comment:null), ]
 POSTHOOK: Lineage: tab PARTITION(ds=2008-04-08).value SIMPLE 
[(srcbucket_mapjoin)srcbucket_mapjoin.FieldSchema(name:value, type:string, 
comment:null), ]
 PREHOOK: query: analyze table tab partition (ds='2008-04-08') compute 
statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@tab
 PREHOOK: Input: default@tab@ds=2008-04-08
 PREHOOK: Output: default@tab
 PREHOOK: Output: default@tab@ds=2008-04-08
  A masked pattern was here 
 POSTHOOK: query: analyze table tab partition (ds='2008-04-08') compute 
statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@tab
 POSTHOOK: Input: default@tab@ds=2008-04-08
 POSTHOOK: Output: default@tab

http://git-wip-us.apache.org/repos/asf/hive/blob/1c3b82fb/ql/src/test/results/clientpositive/outer_reference_windowed.q.out
--
diff --git a/ql/src/test/results/clientpositive/outer_reference_windowed.q.out 
b/ql/src/test/results/clientpositive/outer_reference_windowed.q.out
index c6351eb..87cadb3 100644
--- a/ql/src/test/results/clientpositive/outer_reference_windowed.q.out
+++ b/ql/src/test/results/clientpositive/outer_reference_windowed.q.out
@@ -91,32 +91,32 @@ POSTHOOK: Output: default@e011_03
 POSTHOOK: Lineage: e011_03.c1 SIMPLE [(e011_01)e011_01.FieldSchema(name:c1, 
type:decimal(15,2), comment:null), ]
 POSTHOOK: Lineage: e011_03.c2 SIMPLE [(e011_01)e011_01.FieldSchema(name:c2, 
type:decimal(15,2), comment:null), ]
 PREHOOK: query: ANALYZE TABLE e011_01 COMPUTE STATISTICS FOR COLUMNS
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@e011_01
 PREHOOK: Output: default@e011_01
  A masked pattern was here 
 POSTHOOK: query: ANALYZE TABLE e011_01 COMPUTE STATISTICS FOR COLUMNS
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@e011_01
 POSTHOOK: Output: default@e011_01
  A masked pattern was here 
 PREHOOK: query: ANALYZE TABLE e011_02 COMPUTE STATISTICS FOR COLUMNS
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@e011_02
 PREHOOK: Output: default@e011_02
  A masked pattern was here 
 POSTHOOK: query: ANALYZE TABLE e011_02 COMPUTE STATISTICS FOR COLUMNS
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@e011_02
 POSTHOOK: Output: default@e011_02
  A masked pattern was here 
 PREHOOK: query: ANALYZE TABLE e011_03 COMPUTE STATISTICS FOR COLUMNS
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@e011_03
 PREHOOK: Output: default@e011_03
  A masked pattern was here 
 POSTHOOK: query: ANALYZE TABLE e011_03 COMPUTE STATISTICS FOR COLUMNS
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@e011_03
 POSTHOOK: Output: default@e011_03
  A masked pattern was here 

http://git-wip-us.apache.org/repos/asf/hive/blob/1c3b82fb/ql/src/test/results/clientpositive/partial_column_stats.q.out
--
diff --git a/ql/src/test/results/clientpositive/partial_column_stats.q.out 

[6/8] hive git commit: HIVE-19396 : HiveOperation is incorrectly set for analyze statement (Ashutosh Chauhan via Zoltan Haindrich)m

2018-05-03 Thread hashutosh
http://git-wip-us.apache.org/repos/asf/hive/blob/1c3b82fb/ql/src/test/results/clientpositive/llap/bucket_map_join_tez1.q.out
--
diff --git a/ql/src/test/results/clientpositive/llap/bucket_map_join_tez1.q.out 
b/ql/src/test/results/clientpositive/llap/bucket_map_join_tez1.q.out
index 3338ec2..116e00b 100644
--- a/ql/src/test/results/clientpositive/llap/bucket_map_join_tez1.q.out
+++ b/ql/src/test/results/clientpositive/llap/bucket_map_join_tez1.q.out
@@ -109,56 +109,56 @@ POSTHOOK: Output: default@tab@ds=2008-04-08
 POSTHOOK: Lineage: tab PARTITION(ds=2008-04-08).key SIMPLE 
[(srcbucket_mapjoin)srcbucket_mapjoin.FieldSchema(name:key, type:int, 
comment:null), ]
 POSTHOOK: Lineage: tab PARTITION(ds=2008-04-08).value SIMPLE 
[(srcbucket_mapjoin)srcbucket_mapjoin.FieldSchema(name:value, type:string, 
comment:null), ]
 PREHOOK: query: analyze table srcbucket_mapjoin compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@srcbucket_mapjoin
 PREHOOK: Input: default@srcbucket_mapjoin@ds=2008-04-08
 PREHOOK: Output: default@srcbucket_mapjoin
 PREHOOK: Output: default@srcbucket_mapjoin@ds=2008-04-08
  A masked pattern was here 
 POSTHOOK: query: analyze table srcbucket_mapjoin compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@srcbucket_mapjoin
 POSTHOOK: Input: default@srcbucket_mapjoin@ds=2008-04-08
 POSTHOOK: Output: default@srcbucket_mapjoin
 POSTHOOK: Output: default@srcbucket_mapjoin@ds=2008-04-08
  A masked pattern was here 
 PREHOOK: query: analyze table srcbucket_mapjoin_part compute statistics for 
columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@srcbucket_mapjoin_part
 PREHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08
 PREHOOK: Output: default@srcbucket_mapjoin_part
 PREHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
  A masked pattern was here 
 POSTHOOK: query: analyze table srcbucket_mapjoin_part compute statistics for 
columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@srcbucket_mapjoin_part
 POSTHOOK: Input: default@srcbucket_mapjoin_part@ds=2008-04-08
 POSTHOOK: Output: default@srcbucket_mapjoin_part
 POSTHOOK: Output: default@srcbucket_mapjoin_part@ds=2008-04-08
  A masked pattern was here 
 PREHOOK: query: analyze table tab compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@tab
 PREHOOK: Input: default@tab@ds=2008-04-08
 PREHOOK: Output: default@tab
 PREHOOK: Output: default@tab@ds=2008-04-08
  A masked pattern was here 
 POSTHOOK: query: analyze table tab compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@tab
 POSTHOOK: Input: default@tab@ds=2008-04-08
 POSTHOOK: Output: default@tab
 POSTHOOK: Output: default@tab@ds=2008-04-08
  A masked pattern was here 
 PREHOOK: query: analyze table tab_part compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@tab_part
 PREHOOK: Input: default@tab_part@ds=2008-04-08
 PREHOOK: Output: default@tab_part
 PREHOOK: Output: default@tab_part@ds=2008-04-08
  A masked pattern was here 
 POSTHOOK: query: analyze table tab_part compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@tab_part
 POSTHOOK: Input: default@tab_part@ds=2008-04-08
 POSTHOOK: Output: default@tab_part
@@ -235,25 +235,25 @@ STAGE PLANS:
   0 _col0 (type: int)
   1 _col0 (type: int)
 outputColumnNames: _col0, _col1, _col3
-Statistics: Num rows: 391 Data size: 72726 Basic stats: 
COMPLETE Column stats: COMPLETE
+Statistics: Num rows: 400 Data size: 74400 Basic stats: 
COMPLETE Column stats: COMPLETE
 Select Operator
   expressions: _col0 (type: int), _col1 (type: string), _col3 
(type: string)
   outputColumnNames: _col0, _col1, _col2
-  Statistics: Num rows: 391 Data size: 72726 Basic stats: 
COMPLETE Column stats: COMPLETE
+  Statistics: Num rows: 400 Data size: 74400 Basic stats: 
COMPLETE Column stats: COMPLETE
   Reduce Output Operator
 key expressions: _col0 (type: int), _col1 (type: string), 
_col2 (type: string)
 sort order: +++
-Statistics: Num rows: 391 Data size: 72726 Basic stats: 
COMPLETE Column stats: COMPLETE
+Statistics: Num rows: 400 Data size: 74400 Basic stats: 
COMPLETE Column stats: COMPLETE
 Reducer 3 
 Execution mode: vectorized, llap
 Reduce Operator Tree:
   Select Operator
 expressions: KEY.reducesinkkey0 (type: 

[3/8] hive git commit: HIVE-19396 : HiveOperation is incorrectly set for analyze statement (Ashutosh Chauhan via Zoltan Haindrich)m

2018-05-03 Thread hashutosh
http://git-wip-us.apache.org/repos/asf/hive/blob/1c3b82fb/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_4.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_4.q.out 
b/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_4.q.out
index 4da2ed3..e043441 100644
--- a/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_4.q.out
+++ b/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_4.q.out
@@ -34,12 +34,12 @@ POSTHOOK: Lineage: emps.empid SCRIPT []
 POSTHOOK: Lineage: emps.name SCRIPT []
 POSTHOOK: Lineage: emps.salary SCRIPT []
 PREHOOK: query: analyze table emps compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@emps
 PREHOOK: Output: default@emps
  A masked pattern was here 
 POSTHOOK: query: analyze table emps compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@emps
 POSTHOOK: Output: default@emps
  A masked pattern was here 
@@ -71,12 +71,12 @@ POSTHOOK: Lineage: depts.deptno SCRIPT []
 POSTHOOK: Lineage: depts.locationid SCRIPT []
 POSTHOOK: Lineage: depts.name SCRIPT []
 PREHOOK: query: analyze table depts compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@depts
 PREHOOK: Output: default@depts
  A masked pattern was here 
 POSTHOOK: query: analyze table depts compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@depts
 POSTHOOK: Output: default@depts
  A masked pattern was here 
@@ -105,12 +105,12 @@ POSTHOOK: Output: default@dependents
 POSTHOOK: Lineage: dependents.empid SCRIPT []
 POSTHOOK: Lineage: dependents.name SCRIPT []
 PREHOOK: query: analyze table dependents compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@dependents
 PREHOOK: Output: default@dependents
  A masked pattern was here 
 POSTHOOK: query: analyze table dependents compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@dependents
 POSTHOOK: Output: default@dependents
  A masked pattern was here 
@@ -139,12 +139,12 @@ POSTHOOK: Output: default@locations
 POSTHOOK: Lineage: locations.locationid SCRIPT []
 POSTHOOK: Lineage: locations.name SCRIPT []
 PREHOOK: query: analyze table locations compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@locations
 PREHOOK: Output: default@locations
  A masked pattern was here 
 POSTHOOK: query: analyze table locations compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@locations
 POSTHOOK: Output: default@locations
  A masked pattern was here 
@@ -187,12 +187,12 @@ POSTHOOK: Input: default@emps
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@mv1
 PREHOOK: query: analyze table mv1 compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@mv1
 PREHOOK: Output: default@mv1
  A masked pattern was here 
 POSTHOOK: query: analyze table mv1 compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@mv1
 POSTHOOK: Output: default@mv1
  A masked pattern was here 
@@ -301,12 +301,12 @@ POSTHOOK: Input: default@emps
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@mv1
 PREHOOK: query: analyze table mv1 compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@mv1
 PREHOOK: Output: default@mv1
  A masked pattern was here 
 POSTHOOK: query: analyze table mv1 compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@mv1
 POSTHOOK: Output: default@mv1
  A masked pattern was here 
@@ -376,12 +376,12 @@ POSTHOOK: Input: default@emps
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@mv1
 PREHOOK: query: analyze table mv1 compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@mv1
 PREHOOK: Output: default@mv1
  A masked pattern was here 
 POSTHOOK: query: analyze table mv1 compute statistics for columns
-POSTHOOK: type: QUERY
+POSTHOOK: type: ANALYZE_TABLE
 POSTHOOK: Input: default@mv1
 POSTHOOK: Output: default@mv1
  A masked pattern was here 
@@ -498,12 +498,12 @@ POSTHOOK: Input: default@emps
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@mv1
 PREHOOK: query: analyze table mv1 compute statistics for columns
-PREHOOK: type: QUERY
+PREHOOK: type: ANALYZE_TABLE
 PREHOOK: Input: default@mv1
 PREHOOK: Output: default@mv1
  A masked pattern was here 
 POSTHOOK: query: analyze 

hive git commit: HIVE-19212: Fix findbugs yetus pre-commit checks (Sahil Takiar, reviewed by Adam Szita, Peter Vary)

2018-05-03 Thread stakiar
Repository: hive
Updated Branches:
  refs/heads/master 70d835b98 -> bf8e69643


HIVE-19212: Fix findbugs yetus pre-commit checks (Sahil Takiar, reviewed by 
Adam Szita, Peter Vary)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/bf8e6964
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/bf8e6964
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/bf8e6964

Branch: refs/heads/master
Commit: bf8e6964389c984d768f6a4b110f2ba8198a49ef
Parents: 70d835b
Author: Sahil Takiar 
Authored: Thu May 3 15:31:09 2018 -0700
Committer: Sahil Takiar 
Committed: Thu May 3 15:41:41 2018 -0700

--
 dev-support/yetus-wrapper.sh| 47 +---
 .../apache/hive/ptest/execution/YetusPhase.java |  9 ++--
 .../ptest2/src/main/resources/yetus-exec.vm | 26 +++
 3 files changed, 41 insertions(+), 41 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/bf8e6964/dev-support/yetus-wrapper.sh
--
diff --git a/dev-support/yetus-wrapper.sh b/dev-support/yetus-wrapper.sh
index 58da1d2..3a814d5 100755
--- a/dev-support/yetus-wrapper.sh
+++ b/dev-support/yetus-wrapper.sh
@@ -91,34 +91,6 @@ if [[ $? != 0 ]]; then
 fi
 HIVE_PATCHPROCESS=${mytmpdir}
 
-CURLBIN=$(command -v curl)
-
-# Set FindBugs Home
-FINDBUGS_VERSION="3.0.1"
-if [[ ! -d "${HIVE_PATCHPROCESS}/findbugs-${FINDBUGS_VERSION}/" ]]; then
-  # Download FindBugs
-  FINDBUGS_BASEURL="http://prdownloads.sourceforge.net/findbugs/;
-  FINDBUGS_TARBALL="findbugs-${FINDBUGS_VERSION}.tar"
-
-  pushd "${HIVE_PATCHPROCESS}" >/dev/null
-  if [[ -n "${CURLBIN}" ]]; then
-"${CURLBIN}" -f -s -L -O "${FINDBUGS_BASEURL}/${FINDBUGS_TARBALL}.gz"
-if [[ $? != 0 ]]; then
-  yetus_error "ERROR: yetus-dl: unable to download 
${FINDBUGS_BASEURL}/${FINDBUGS_TARBALL}.gz"
-  exit 1
-fi
-  fi
-
-  gunzip -c "${FINDBUGS_TARBALL}.gz" | tar xpf -
-  if [[ $? != 0 ]]; then
-yetus_error "ERROR: ${FINDBUGS_TARBALL}.gz is corrupt. Investigate and 
then remove ${HIVE_PATCHPROCESS} to try again."
-exit 1
-  fi
-  popd >/dev/null
-fi
-
-export FINDBUGS_HOME=${HIVE_PATCHPROCESS}/findbugs-${FINDBUGS_VERSION}
-
 ##
 ## if we've already DL'd it, then short cut
 ##
@@ -130,10 +102,11 @@ fi
 ## need to DL, etc
 ##
 
-YETUS_BASEURL="https://archive.apache.org/dist/yetus/${HIVE_YETUS_VERSION}/;
-YETUS_TARBALL="yetus-${HIVE_YETUS_VERSION}-bin.tar"
+BASEURL="https://archive.apache.org/dist/yetus/${HIVE_YETUS_VERSION}/;
+TARBALL="yetus-${HIVE_YETUS_VERSION}-bin.tar"
 
 GPGBIN=$(command -v gpg)
+CURLBIN=$(command -v curl)
 
 pushd "${HIVE_PATCHPROCESS}" >/dev/null
 if [[ $? != 0 ]]; then
@@ -142,9 +115,9 @@ if [[ $? != 0 ]]; then
 fi
 
 if [[ -n "${CURLBIN}" ]]; then
-  "${CURLBIN}" -f -s -L -O "${YETUS_BASEURL}/${YETUS_TARBALL}.gz"
+  "${CURLBIN}" -f -s -L -O "${BASEURL}/${TARBALL}.gz"
   if [[ $? != 0 ]]; then
-yetus_error "ERROR: yetus-dl: unable to download 
${YETUS_BASEURL}/${YETUS_TARBALL}.gz"
+yetus_error "ERROR: yetus-dl: unable to download ${BASEURL}/${TARBALL}.gz"
 exit 1
   fi
 else
@@ -168,9 +141,9 @@ if [[ -n "${GPGBIN}" ]]; then
 yetus_error "ERROR: yetus-dl: unable to fetch 
https://dist.apache.org/repos/dist/release/yetus/KEYS;
 exit 1
   fi
-  "${CURLBIN}" -s -L -O "${YETUS_BASEURL}/${YETUS_TARBALL}.gz.asc"
+  "${CURLBIN}" -s -L -O "${BASEURL}/${TARBALL}.gz.asc"
   if [[ $? != 0 ]]; then
-yetus_error "ERROR: yetus-dl: unable to fetch 
${YETUS_BASEURL}/${YETUS_TARBALL}.gz.asc"
+yetus_error "ERROR: yetus-dl: unable to fetch ${BASEURL}/${TARBALL}.gz.asc"
 exit 1
   fi
   "${GPGBIN}" --homedir "${HIVE_PATCHPROCESS}/.gpg" --import 
"${HIVE_PATCHPROCESS}/KEYS_YETUS" >/dev/null 2>&1
@@ -178,16 +151,16 @@ if [[ -n "${GPGBIN}" ]]; then
 yetus_error "ERROR: yetus-dl: gpg unable to import 
${HIVE_PATCHPROCESS}/KEYS_YETUS"
 exit 1
   fi
-  "${GPGBIN}" --homedir "${HIVE_PATCHPROCESS}/.gpg" --verify 
"${YETUS_TARBALL}.gz.asc" >/dev/null 2>&1
+  "${GPGBIN}" --homedir "${HIVE_PATCHPROCESS}/.gpg" --verify 
"${TARBALL}.gz.asc" >/dev/null 2>&1
if [[ $? != 0 ]]; then
  yetus_error "ERROR: yetus-dl: gpg verify of tarball in 
${HIVE_PATCHPROCESS} failed"
  exit 1
fi
 fi
 
-gunzip -c "${YETUS_TARBALL}.gz" | tar xpf -
+gunzip -c "${TARBALL}.gz" | tar xpf -
 if [[ $? != 0 ]]; then
-  yetus_error "ERROR: ${YETUS_TARBALL}.gz is corrupt. Investigate and then 
remove ${HIVE_PATCHPROCESS} to try again."
+  yetus_error "ERROR: ${TARBALL}.gz is corrupt. Investigate and then remove 
${HIVE_PATCHPROCESS} to try again."
   exit 1
 fi
 

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8e6964/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/YetusPhase.java

[2/2] hive git commit: HIVE-19394 : WM_TRIGGER trigger creation failed with type cast from Integer to Boolean (Thai Bui, reviewed by Prasanth Jayachandran and Sergey Shelukhin) ADDENDUM

2018-05-03 Thread sershe
HIVE-19394 : WM_TRIGGER trigger creation failed with type cast from Integer to 
Boolean (Thai Bui, reviewed by Prasanth Jayachandran and Sergey Shelukhin) 
ADDENDUM


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5e5cd02a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5e5cd02a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5e5cd02a

Branch: refs/heads/branch-3
Commit: 5e5cd02a34d24c80dbf5b3c279fa51263ff6fed5
Parents: fbdd24b
Author: sergey 
Authored: Thu May 3 14:59:14 2018 -0700
Committer: sergey 
Committed: Thu May 3 14:59:22 2018 -0700

--
 .../src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/5e5cd02a/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
--
diff --git 
a/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
 
b/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
index dffcbd6..aa9a34f 100644
--- 
a/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
+++ 
b/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
@@ -69,7 +69,7 @@ CREATE TABLE "WM_TRIGGER" (
 "NAME" character varying(128) NOT NULL,
 "TRIGGER_EXPRESSION" character varying(1024) DEFAULT NULL::character 
varying,
 "ACTION_EXPRESSION" character varying(1024) DEFAULT NULL::character 
varying,
-"IS_IN_UNMANAGED" smallint NOT NULL DEFAULT false
+"IS_IN_UNMANAGED" smallint NOT NULL DEFAULT 0
 );
 
 ALTER TABLE ONLY "WM_TRIGGER"



[1/2] hive git commit: HIVE-19394 : WM_TRIGGER trigger creation failed with type cast from Integer to Boolean (Thai Bui, reviewed by Prasanth Jayachandran and Sergey Shelukhin) ADDENDUM

2018-05-03 Thread sershe
Repository: hive
Updated Branches:
  refs/heads/branch-3 fbdd24b9d -> 5e5cd02a3
  refs/heads/master db26f3413 -> 70d835b98


HIVE-19394 : WM_TRIGGER trigger creation failed with type cast from Integer to 
Boolean (Thai Bui, reviewed by Prasanth Jayachandran and Sergey Shelukhin) 
ADDENDUM


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/70d835b9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/70d835b9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/70d835b9

Branch: refs/heads/master
Commit: 70d835b984eda8b752a4de82478dcf332064f186
Parents: db26f34
Author: sergey 
Authored: Thu May 3 14:59:14 2018 -0700
Committer: sergey 
Committed: Thu May 3 14:59:14 2018 -0700

--
 .../src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/70d835b9/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
--
diff --git 
a/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
 
b/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
index dffcbd6..aa9a34f 100644
--- 
a/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
+++ 
b/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
@@ -69,7 +69,7 @@ CREATE TABLE "WM_TRIGGER" (
 "NAME" character varying(128) NOT NULL,
 "TRIGGER_EXPRESSION" character varying(1024) DEFAULT NULL::character 
varying,
 "ACTION_EXPRESSION" character varying(1024) DEFAULT NULL::character 
varying,
-"IS_IN_UNMANAGED" smallint NOT NULL DEFAULT false
+"IS_IN_UNMANAGED" smallint NOT NULL DEFAULT 0
 );
 
 ALTER TABLE ONLY "WM_TRIGGER"



hive git commit: HIVE-17824 : msck repair table should drop the missing partitions from metastore (Janaki Latha Lahorani reviewed by Vihang Karajgaonkar)

2018-05-03 Thread vihangk1
Repository: hive
Updated Branches:
  refs/heads/branch-2 b9b0b0aa2 -> db9752444


HIVE-17824 : msck repair table should drop the missing partitions from 
metastore (Janaki Latha Lahorani reviewed by Vihang Karajgaonkar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/db975244
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/db975244
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/db975244

Branch: refs/heads/branch-2
Commit: db975244405f7f6546df0b363c1a3aeddeb4ac8c
Parents: b9b0b0a
Author: Janaki Lahorani 
Authored: Thu May 3 11:09:31 2018 -0700
Committer: Vihang Karajgaonkar 
Committed: Thu May 3 11:09:31 2018 -0700

--
 .../org/apache/hadoop/hive/ql/QTestUtil.java|   3 +-
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java | 183 +++---
 .../apache/hadoop/hive/ql/metadata/Hive.java|  79 +
 .../hive/ql/parse/DDLSemanticAnalyzer.java  |  54 ++-
 .../org/apache/hadoop/hive/ql/parse/HiveLexer.g |   1 +
 .../apache/hadoop/hive/ql/parse/HiveParser.g|   8 +-
 .../apache/hadoop/hive/ql/plan/MsckDesc.java|  50 ++-
 .../exec/TestMsckDropPartitionsInBatches.java   | 342 +++
 .../queries/clientpositive/msck_repair_drop.q   | 180 ++
 .../clientpositive/msck_repair_drop.q.out   | 293 
 10 files changed, 1132 insertions(+), 61 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/db975244/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
--
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 551a6de..10a1348 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -1692,7 +1692,8 @@ public class QTestUtil {
   ".*at com\\.jolbox.*",
   ".*at com\\.zaxxer.*",
   
"org\\.apache\\.hadoop\\.hive\\.metastore\\.model\\.MConstraint@([0-9]|[a-z])*",
-  "^Repair: Added partition to metastore.*"
+  "^Repair: Added partition to metastore.*",
+  "^Repair: Dropped partition from metastore.*"
   });
 
   private final Pattern[] partialReservedPlanMask = toPattern(new String[] {

http://git-wip-us.apache.org/repos/asf/hive/blob/db975244/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 91f1b53..e9c0625 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -1957,56 +1957,90 @@ public class DDLTask extends Task implements 
Serializable {
 try {
   HiveMetaStoreChecker checker = new HiveMetaStoreChecker(db);
   String[] names = Utilities.getDbTableName(msckDesc.getTableName());
+
+  // checkMetastore call will fill in result with partitions that are 
present in filesystem
+  // and missing in metastore - accessed through getPartitionsNotInMs
+  // And partitions that are not present in filesystem and metadata exists 
in metastore -
+  // accessed through getPartitionNotOnFS
   checker.checkMetastore(names[0], names[1], msckDesc.getPartSpecs(), 
result);
   Set partsNotInMs = 
result.getPartitionsNotInMs();
-  if (msckDesc.isRepairPartitions() && !partsNotInMs.isEmpty()) {
-AbstractList vals = null;
-String settingStr = HiveConf.getVar(conf, 
HiveConf.ConfVars.HIVE_MSCK_PATH_VALIDATION);
-boolean doValidate = !("ignore".equals(settingStr));
-boolean doSkip = doValidate && "skip".equals(settingStr);
-// The default setting is "throw"; assume doValidate && !doSkip means 
throw.
-if (doValidate) {
-  // Validate that we can add partition without escaping. Escaping was 
originally intended
-  // to avoid creating invalid HDFS paths; however, if we escape the 
HDFS path (that we
-  // deem invalid but HDFS actually supports - it is possible to 
create HDFS paths with
-  // unprintable characters like ASCII 7), metastore will create 
another directory instead
-  // of the one we are trying to "repair" here.
-  Iterator iter = partsNotInMs.iterator();
-  while (iter.hasNext()) {
-CheckResult.PartitionResult part = iter.next();
-try {
-  vals = Warehouse.makeValsFromName(part.getPartitionName(), vals);
-} catch (MetaException ex) {
-  throw new HiveException(ex);
-}
-for (String val : vals) {

[2/2] hive git commit: HIVE-19394 : WM_TRIGGER trigger creation failed with type cast from Integer to Boolean (Thai Bui, reviewed by Prasanth Jayachandran and Sergey Shelukhin)

2018-05-03 Thread sershe
HIVE-19394 : WM_TRIGGER trigger creation failed with type cast from Integer to 
Boolean (Thai Bui, reviewed by Prasanth Jayachandran and Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fbdd24b9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fbdd24b9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fbdd24b9

Branch: refs/heads/branch-3
Commit: fbdd24b9dd08942c4e8b6d865a35660f34d4bcda
Parents: 5e0397e
Author: sergey 
Authored: Thu May 3 12:52:42 2018 -0700
Committer: sergey 
Committed: Thu May 3 12:53:05 2018 -0700

--
 .../src/main/sql/postgres/hive-schema-3.0.0.postgres.sql   | 2 +-
 .../src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql  | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/fbdd24b9/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql
--
diff --git 
a/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql 
b/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql
index 2484744..93afeaf 100644
--- a/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql
+++ b/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql
@@ -680,7 +680,7 @@ CREATE TABLE "WM_TRIGGER" (
 "NAME" character varying(128) NOT NULL,
 "TRIGGER_EXPRESSION" character varying(1024) DEFAULT NULL::character 
varying,
 "ACTION_EXPRESSION" character varying(1024) DEFAULT NULL::character 
varying,
-"IS_IN_UNMANAGED" boolean NOT NULL DEFAULT false
+"IS_IN_UNMANAGED" smallint NOT NULL DEFAULT 0
 );
 
 CREATE TABLE "WM_POOL_TO_TRIGGER" (

http://git-wip-us.apache.org/repos/asf/hive/blob/fbdd24b9/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
--
diff --git 
a/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
 
b/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
index f06f0dd..dffcbd6 100644
--- 
a/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
+++ 
b/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
@@ -69,7 +69,7 @@ CREATE TABLE "WM_TRIGGER" (
 "NAME" character varying(128) NOT NULL,
 "TRIGGER_EXPRESSION" character varying(1024) DEFAULT NULL::character 
varying,
 "ACTION_EXPRESSION" character varying(1024) DEFAULT NULL::character 
varying,
-"IS_IN_UNMANAGED" boolean NOT NULL DEFAULT false
+"IS_IN_UNMANAGED" smallint NOT NULL DEFAULT false
 );
 
 ALTER TABLE ONLY "WM_TRIGGER"



[1/2] hive git commit: HIVE-19394 : WM_TRIGGER trigger creation failed with type cast from Integer to Boolean (Thai Bui, reviewed by Prasanth Jayachandran and Sergey Shelukhin)

2018-05-03 Thread sershe
Repository: hive
Updated Branches:
  refs/heads/branch-3 5e0397efc -> fbdd24b9d
  refs/heads/master cc52e9b22 -> db26f3413


HIVE-19394 : WM_TRIGGER trigger creation failed with type cast from Integer to 
Boolean (Thai Bui, reviewed by Prasanth Jayachandran and Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/db26f341
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/db26f341
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/db26f341

Branch: refs/heads/master
Commit: db26f3413bf28ee2998ffca3b0179b2dbae95eae
Parents: cc52e9b
Author: sergey 
Authored: Thu May 3 12:52:42 2018 -0700
Committer: sergey 
Committed: Thu May 3 12:52:42 2018 -0700

--
 .../src/main/sql/postgres/hive-schema-3.0.0.postgres.sql   | 2 +-
 .../src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql  | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/db26f341/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql
--
diff --git 
a/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql 
b/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql
index 2484744..93afeaf 100644
--- a/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql
+++ b/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql
@@ -680,7 +680,7 @@ CREATE TABLE "WM_TRIGGER" (
 "NAME" character varying(128) NOT NULL,
 "TRIGGER_EXPRESSION" character varying(1024) DEFAULT NULL::character 
varying,
 "ACTION_EXPRESSION" character varying(1024) DEFAULT NULL::character 
varying,
-"IS_IN_UNMANAGED" boolean NOT NULL DEFAULT false
+"IS_IN_UNMANAGED" smallint NOT NULL DEFAULT 0
 );
 
 CREATE TABLE "WM_POOL_TO_TRIGGER" (

http://git-wip-us.apache.org/repos/asf/hive/blob/db26f341/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
--
diff --git 
a/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
 
b/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
index f06f0dd..dffcbd6 100644
--- 
a/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
+++ 
b/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql
@@ -69,7 +69,7 @@ CREATE TABLE "WM_TRIGGER" (
 "NAME" character varying(128) NOT NULL,
 "TRIGGER_EXPRESSION" character varying(1024) DEFAULT NULL::character 
varying,
 "ACTION_EXPRESSION" character varying(1024) DEFAULT NULL::character 
varying,
-"IS_IN_UNMANAGED" boolean NOT NULL DEFAULT false
+"IS_IN_UNMANAGED" smallint NOT NULL DEFAULT false
 );
 
 ALTER TABLE ONLY "WM_TRIGGER"



hive git commit: HIVE-19206: Automatic memory management for open streaming writers (Prasanth Jayachandran reviewed by Gopal V)

2018-05-03 Thread prasanthj
Repository: hive
Updated Branches:
  refs/heads/branch-3 9adf5a950 -> 5e0397efc


HIVE-19206: Automatic memory management for open streaming writers (Prasanth 
Jayachandran reviewed by Gopal V)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5e0397ef
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5e0397ef
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5e0397ef

Branch: refs/heads/branch-3
Commit: 5e0397efcd2f337d5eb756e9cba1d00b4a21ddc3
Parents: 9adf5a9
Author: Prasanth Jayachandran 
Authored: Thu May 3 11:56:03 2018 -0700
Committer: Prasanth Jayachandran 
Committed: Thu May 3 11:57:41 2018 -0700

--
 .../hadoop/hive/common/HeapMemoryMonitor.java   | 154 +
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  13 ++
 .../apache/hadoop/hive/ql/io/RecordUpdater.java |  12 +-
 .../hadoop/hive/ql/io/orc/OrcOutputFormat.java  |   5 +
 .../hadoop/hive/ql/io/orc/OrcRecordUpdater.java |   9 +
 .../hive/ql/exec/TestFileSinkOperator.java  |   5 +
 .../hive/streaming/AbstractRecordWriter.java| 169 ---
 .../apache/hive/streaming/ConnectionInfo.java   |  11 +-
 .../apache/hive/streaming/ConnectionStats.java  |  88 ++
 .../hive/streaming/HiveStreamingConnection.java |  85 ++
 .../hive/streaming/StreamingConnection.java |   7 +
 .../streaming/StrictDelimitedInputWriter.java   |   3 +-
 .../apache/hive/streaming/StrictJsonWriter.java |   3 +-
 .../hive/streaming/StrictRegexWriter.java   |   3 +-
 14 files changed, 487 insertions(+), 80 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/5e0397ef/common/src/java/org/apache/hadoop/hive/common/HeapMemoryMonitor.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/HeapMemoryMonitor.java 
b/common/src/java/org/apache/hadoop/hive/common/HeapMemoryMonitor.java
new file mode 100644
index 000..42286be
--- /dev/null
+++ b/common/src/java/org/apache/hadoop/hive/common/HeapMemoryMonitor.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common;
+
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryMXBean;
+import java.lang.management.MemoryNotificationInfo;
+import java.lang.management.MemoryPoolMXBean;
+import java.lang.management.MemoryType;
+import java.lang.management.MemoryUsage;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.management.NotificationEmitter;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Class that monitors memory usage and notifies the listeners when a certain 
of threshold of memory is used
+ * after GC (collection usage).
+ */
+public class HeapMemoryMonitor {
+  private static final Logger LOG = 
LoggerFactory.getLogger(HeapMemoryMonitor.class.getName());
+  // notifies when memory usage is 70% after GC
+  private static final double DEFAULT_THRESHOLD = 0.7d;
+  private static final MemoryPoolMXBean tenuredGenPool = getTenuredGenPool();
+
+  private final double threshold;
+  private List listeners = new ArrayList<>();
+
+  public interface Listener {
+void memoryUsageAboveThreshold(long usedMemory, long maxMemory);
+  }
+
+  public HeapMemoryMonitor(double threshold) {
+this.threshold = threshold <= 0.0d || threshold > 1.0d ? DEFAULT_THRESHOLD 
: threshold;
+setupTenuredGenPoolThreshold(tenuredGenPool);
+  }
+
+  private void setupTenuredGenPoolThreshold(final MemoryPoolMXBean 
tenuredGenPool) {
+if (tenuredGenPool == null) {
+  return;
+}
+for (MemoryPoolMXBean pool : ManagementFactory.getMemoryPoolMXBeans()) {
+  final long memoryThreshold = (int) Math.floor(pool.getUsage().getMax() * 
threshold);
+  final boolean isTenured = isTenured(pool);
+  if (!isTenured) {
+continue;
+  }
+  // set memory threshold on memory used after GC
+  final boolean isCollectionUsageThresholdSupported = 

hive git commit: HIVE-19206: Automatic memory management for open streaming writers (Prasanth Jayachandran reviewed by Gopal V)

2018-05-03 Thread prasanthj
Repository: hive
Updated Branches:
  refs/heads/master dfaf90f2b -> cc52e9b22


HIVE-19206: Automatic memory management for open streaming writers (Prasanth 
Jayachandran reviewed by Gopal V)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cc52e9b2
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cc52e9b2
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cc52e9b2

Branch: refs/heads/master
Commit: cc52e9b22bd64be117d3acd9918279aa355b237b
Parents: dfaf90f
Author: Prasanth Jayachandran 
Authored: Thu May 3 11:56:03 2018 -0700
Committer: Prasanth Jayachandran 
Committed: Thu May 3 11:56:03 2018 -0700

--
 .../hadoop/hive/common/HeapMemoryMonitor.java   | 154 +
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  13 ++
 .../apache/hadoop/hive/ql/io/RecordUpdater.java |  12 +-
 .../hadoop/hive/ql/io/orc/OrcOutputFormat.java  |   5 +
 .../hadoop/hive/ql/io/orc/OrcRecordUpdater.java |   9 +
 .../hive/ql/exec/TestFileSinkOperator.java  |   5 +
 .../hive/streaming/AbstractRecordWriter.java| 169 ---
 .../apache/hive/streaming/ConnectionInfo.java   |  11 +-
 .../apache/hive/streaming/ConnectionStats.java  |  88 ++
 .../hive/streaming/HiveStreamingConnection.java |  85 ++
 .../hive/streaming/StreamingConnection.java |   7 +
 .../streaming/StrictDelimitedInputWriter.java   |   3 +-
 .../apache/hive/streaming/StrictJsonWriter.java |   3 +-
 .../hive/streaming/StrictRegexWriter.java   |   3 +-
 14 files changed, 487 insertions(+), 80 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/cc52e9b2/common/src/java/org/apache/hadoop/hive/common/HeapMemoryMonitor.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/HeapMemoryMonitor.java 
b/common/src/java/org/apache/hadoop/hive/common/HeapMemoryMonitor.java
new file mode 100644
index 000..42286be
--- /dev/null
+++ b/common/src/java/org/apache/hadoop/hive/common/HeapMemoryMonitor.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common;
+
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryMXBean;
+import java.lang.management.MemoryNotificationInfo;
+import java.lang.management.MemoryPoolMXBean;
+import java.lang.management.MemoryType;
+import java.lang.management.MemoryUsage;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.management.NotificationEmitter;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Class that monitors memory usage and notifies the listeners when a certain 
of threshold of memory is used
+ * after GC (collection usage).
+ */
+public class HeapMemoryMonitor {
+  private static final Logger LOG = 
LoggerFactory.getLogger(HeapMemoryMonitor.class.getName());
+  // notifies when memory usage is 70% after GC
+  private static final double DEFAULT_THRESHOLD = 0.7d;
+  private static final MemoryPoolMXBean tenuredGenPool = getTenuredGenPool();
+
+  private final double threshold;
+  private List listeners = new ArrayList<>();
+
+  public interface Listener {
+void memoryUsageAboveThreshold(long usedMemory, long maxMemory);
+  }
+
+  public HeapMemoryMonitor(double threshold) {
+this.threshold = threshold <= 0.0d || threshold > 1.0d ? DEFAULT_THRESHOLD 
: threshold;
+setupTenuredGenPoolThreshold(tenuredGenPool);
+  }
+
+  private void setupTenuredGenPoolThreshold(final MemoryPoolMXBean 
tenuredGenPool) {
+if (tenuredGenPool == null) {
+  return;
+}
+for (MemoryPoolMXBean pool : ManagementFactory.getMemoryPoolMXBeans()) {
+  final long memoryThreshold = (int) Math.floor(pool.getUsage().getMax() * 
threshold);
+  final boolean isTenured = isTenured(pool);
+  if (!isTenured) {
+continue;
+  }
+  // set memory threshold on memory used after GC
+  final boolean isCollectionUsageThresholdSupported = 

hive git commit: HIVE-17457 - IOW Acid Insert Overwrite when the transaction fails (Eugene Koifman, reviewed by Sergey Shelukhin)

2018-05-03 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-3 ae4df6279 -> 9adf5a950


HIVE-17457 - IOW Acid Insert Overwrite when the transaction fails (Eugene 
Koifman, reviewed by Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9adf5a95
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9adf5a95
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9adf5a95

Branch: refs/heads/branch-3
Commit: 9adf5a95069ae5bbd65ce90b779c8dab180e0e70
Parents: ae4df62
Author: Eugene Koifman 
Authored: Thu May 3 09:23:30 2018 -0700
Committer: Eugene Koifman 
Committed: Thu May 3 09:23:30 2018 -0700

--
 .../hadoop/hive/ql/lockmgr/DbTxnManager.java |  2 +-
 .../apache/hadoop/hive/ql/TestTxnCommands.java   | 19 +--
 2 files changed, 18 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/9adf5a95/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java 
b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
index 68b4c3b..94f6b00 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
@@ -361,7 +361,7 @@ public final class DbTxnManager extends HiveTxnManagerImpl {
 return true;
   case INSERT_OVERWRITE:
 //see HIVE-18154
-return false; // TODO: is this still relevant for insert-only 
tables?
+return false;
   default:
 //not relevant for LOAD
 return false;

http://git-wip-us.apache.org/repos/asf/hive/blob/9adf5a95/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
--
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java 
b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
index 6a3be39..6faba42 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
@@ -74,11 +74,26 @@ public class TestTxnCommands extends 
TxnCommandsBaseForTests {
 return TEST_DATA_DIR;
   }
 
-  @Test//todo: what is this for?
+  /**
+   * tests that a failing Insert Overwrite (which creates a new base_x) is 
properly marked as
+   * aborted.
+   */
+  @Test
   public void testInsertOverwrite() throws Exception {
 runStatementOnDriver("insert overwrite table " + Table.NONACIDORCTBL + " 
select a,b from " + Table.NONACIDORCTBL2);
 runStatementOnDriver("create table " + Table.NONACIDORCTBL2 + "3(a int, b 
int) clustered by (a) into " + BUCKET_COUNT + " buckets stored as orc 
TBLPROPERTIES ('transactional'='false')");
-
+runStatementOnDriver("insert into " + Table.ACIDTBL + " values(1,2)");
+List rs = runStatementOnDriver("select a from " + Table.ACIDTBL + 
" where b = 2");
+Assert.assertEquals(1, rs.size());
+Assert.assertEquals("1", rs.get(0));
+hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODEROLLBACKTXN, true);
+runStatementOnDriver("insert into " + Table.ACIDTBL + " values(3,2)");
+hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODEROLLBACKTXN, false);
+runStatementOnDriver("insert into " + Table.ACIDTBL + " values(5,6)");
+rs = runStatementOnDriver("select a from " + Table.ACIDTBL + " order by 
a");
+Assert.assertEquals(2, rs.size());
+Assert.assertEquals("1", rs.get(0));
+Assert.assertEquals("5", rs.get(1));
   }
   @Ignore("not needed but useful for testing")
   @Test



hive git commit: HIVE-17457 - IOW Acid Insert Overwrite when the transaction fails (Eugene Koifman, reviewed by Sergey Shelukhin)

2018-05-03 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 2c7f9c26e -> dfaf90f2b


HIVE-17457 - IOW Acid Insert Overwrite when the transaction fails (Eugene 
Koifman, reviewed by Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/dfaf90f2
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/dfaf90f2
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/dfaf90f2

Branch: refs/heads/master
Commit: dfaf90f2b3a69477ea6b38144cf4ed55de9c4d95
Parents: 2c7f9c2
Author: Eugene Koifman 
Authored: Thu May 3 09:22:41 2018 -0700
Committer: Eugene Koifman 
Committed: Thu May 3 09:22:41 2018 -0700

--
 .../hadoop/hive/ql/lockmgr/DbTxnManager.java |  2 +-
 .../apache/hadoop/hive/ql/TestTxnCommands.java   | 19 +--
 2 files changed, 18 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/dfaf90f2/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java 
b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
index 68b4c3b..94f6b00 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
@@ -361,7 +361,7 @@ public final class DbTxnManager extends HiveTxnManagerImpl {
 return true;
   case INSERT_OVERWRITE:
 //see HIVE-18154
-return false; // TODO: is this still relevant for insert-only 
tables?
+return false;
   default:
 //not relevant for LOAD
 return false;

http://git-wip-us.apache.org/repos/asf/hive/blob/dfaf90f2/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
--
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java 
b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
index 6a3be39..6faba42 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
@@ -74,11 +74,26 @@ public class TestTxnCommands extends 
TxnCommandsBaseForTests {
 return TEST_DATA_DIR;
   }
 
-  @Test//todo: what is this for?
+  /**
+   * tests that a failing Insert Overwrite (which creates a new base_x) is 
properly marked as
+   * aborted.
+   */
+  @Test
   public void testInsertOverwrite() throws Exception {
 runStatementOnDriver("insert overwrite table " + Table.NONACIDORCTBL + " 
select a,b from " + Table.NONACIDORCTBL2);
 runStatementOnDriver("create table " + Table.NONACIDORCTBL2 + "3(a int, b 
int) clustered by (a) into " + BUCKET_COUNT + " buckets stored as orc 
TBLPROPERTIES ('transactional'='false')");
-
+runStatementOnDriver("insert into " + Table.ACIDTBL + " values(1,2)");
+List rs = runStatementOnDriver("select a from " + Table.ACIDTBL + 
" where b = 2");
+Assert.assertEquals(1, rs.size());
+Assert.assertEquals("1", rs.get(0));
+hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODEROLLBACKTXN, true);
+runStatementOnDriver("insert into " + Table.ACIDTBL + " values(3,2)");
+hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODEROLLBACKTXN, false);
+runStatementOnDriver("insert into " + Table.ACIDTBL + " values(5,6)");
+rs = runStatementOnDriver("select a from " + Table.ACIDTBL + " order by 
a");
+Assert.assertEquals(2, rs.size());
+Assert.assertEquals("1", rs.get(0));
+Assert.assertEquals("5", rs.get(1));
   }
   @Ignore("not needed but useful for testing")
   @Test