hive git commit: HIVE-16821: Vectorization: support Explain Analyze in vectorized mode (Gopal V, reviewed by Prasanth Jayachandran)

2018-01-24 Thread gopalv
Repository: hive
Updated Branches:
  refs/heads/master 6dc245241 -> cb866e894


HIVE-16821: Vectorization: support Explain Analyze in vectorized mode (Gopal V, 
reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cb866e89
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cb866e89
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cb866e89

Branch: refs/heads/master
Commit: cb866e894bc5cf536ab3ba7b0e1542e8dbda7932
Parents: 6dc2452
Author: Gopal V 
Authored: Wed Jan 24 22:35:26 2018 -0800
Committer: Gopal V 
Committed: Wed Jan 24 22:35:26 2018 -0800

--
 .../apache/hadoop/hive/ql/exec/Operator.java| 46 +---
 .../hadoop/hive/ql/exec/TableScanOperator.java  | 35 +++-
 .../VectorReduceSinkCommonOperator.java |  1 +
 .../optimizer/physical/PhysicalOptimizer.java   |  3 +-
 .../hadoop/hive/ql/parse/TezCompiler.java   |  3 +-
 .../hive/ql/parse/spark/SparkCompiler.java  |  3 +-
 .../queries/clientpositive/explainanalyze_3.q   |  1 +
 .../clientpositive/tez/explainanalyze_3.q.out   | 56 ++--
 8 files changed, 83 insertions(+), 65 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/cb866e89/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
index 2462938..199b181 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
@@ -113,9 +113,8 @@ public abstract class Operator 
implements Serializable,C
   private boolean useBucketizedHiveInputFormat;
 
   // Data structures specific for vectorized operators.
-  private int size;
-  private boolean selectedInUse;
-  private int[] selected;
+  private transient boolean multiChildren;
+  private transient int[] selected;
 
   // dummy operator (for not increasing seqId)
   protected Operator(String name, CompilationOpContext cContext) {
@@ -129,8 +128,6 @@ public abstract class Operator 
implements Serializable,C
 childOperators = new ArrayList();
 parentOperators = new ArrayList();
 abortOp = new AtomicBoolean(false);
-// Initializing data structures for vectorization
-selected = new int[VectorizedRowBatch.DEFAULT_SIZE];
   }
 
   public Operator(CompilationOpContext cContext) {
@@ -323,6 +320,9 @@ public abstract class Operator 
implements Serializable,C
 // String className = this.getClass().getName();
 
 this.done = false;
+this.runTimeNumRows = 0; // initializeOp can be overridden
+// Initializing data structures for vectorForward
+this.selected = new int[VectorizedRowBatch.DEFAULT_SIZE];
 if (state == State.INIT) {
   return;
 }
@@ -345,6 +345,7 @@ public abstract class Operator 
implements Serializable,C
 for (int i = 0; i < childOperatorsArray.length; i++) {
   childOperatorsArray[i] = childOperators.get(i);
 }
+multiChildren = childOperatorsArray.length > 1;
 childOperatorsTag = new int[childOperatorsArray.length];
 for (int i = 0; i < childOperatorsArray.length; i++) {
   List parentOperators =
@@ -487,7 +488,6 @@ public abstract class Operator 
implements Serializable,C
   protected void initializeOp(Configuration hconf) throws HiveException {
 this.hconf = hconf;
 rootInitializeCalled = true;
-runTimeNumRows = 0;
   }
 
   /**
@@ -704,6 +704,12 @@ public abstract class Operator 
implements Serializable,C
 // call the operator specific close routine
 closeOp(abort);
 
+// closeOp can be overriden
+if (conf != null && conf.getRuntimeStatsTmpDir() != null) {
+  publishRunTimeStats();
+}
+this.runTimeNumRows = 0;
+
 reporter = null;
 
 try {
@@ -733,10 +739,6 @@ public abstract class Operator 
implements Serializable,C
* should overwrite this funtion for their specific cleanup routine.
*/
   protected void closeOp(boolean abort) throws HiveException {
-if (conf != null && conf.getRuntimeStatsTmpDir() != null) {
-  publishRunTimeStats();
-}
-runTimeNumRows = 0;
   }
 
   private boolean jobCloseDone = false;
@@ -894,26 +896,32 @@ public abstract class Operator 
implements Serializable,C
 forward(row, rowInspector, false);
   }
 
+  protected void forward(VectorizedRowBatch vrg, ObjectInspector rowInspector)
+  throws HiveException {
+forward(vrg, rowInspector, true);
+  }
+
   protected void forward(Object row, ObjectInspector rowInspector, boolean 
isVectorized)
   throws HiveException {
-if (isVectorized && getNumChild() > 1) {
+if 

hive git commit: HIVE-18489: Automatically migrate s3n URIs to s3a URIs (addendum)

2018-01-24 Thread stakiar
Repository: hive
Updated Branches:
  refs/heads/master 4183ea900 -> 6dc245241


HIVE-18489: Automatically migrate s3n URIs to s3a URIs (addendum)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6dc24524
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6dc24524
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6dc24524

Branch: refs/heads/master
Commit: 6dc24524127fc165f212de739bfbf8425d6d2226
Parents: 4183ea9
Author: Sahil Takiar 
Authored: Wed Jan 24 14:59:23 2018 -0800
Committer: Sahil Takiar 
Committed: Wed Jan 24 14:59:23 2018 -0800

--
 .../postgres/048-HIVE-18489.postgres.sql| 24 ++--
 1 file changed, 12 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/6dc24524/metastore/scripts/upgrade/postgres/048-HIVE-18489.postgres.sql
--
diff --git a/metastore/scripts/upgrade/postgres/048-HIVE-18489.postgres.sql 
b/metastore/scripts/upgrade/postgres/048-HIVE-18489.postgres.sql
index fb4d0a2..ddedfe2 100644
--- a/metastore/scripts/upgrade/postgres/048-HIVE-18489.postgres.sql
+++ b/metastore/scripts/upgrade/postgres/048-HIVE-18489.postgres.sql
@@ -1,23 +1,23 @@
-UPDATE FUNC_RU
-  SET RESOURCE_URI = 's3a' || SUBSTR(RESOURCE_URI, 4)
+UPDATE "FUNC_RU"
+  SET "RESOURCE_URI" = 's3a' || SUBSTR("RESOURCE_URI", 4)
 WHERE
-  RESOURCE_URI LIKE 's3n://%'
+  "RESOURCE_URI" LIKE 's3n://%'
 ;
 
-UPDATE SKEWED_COL_VALUE_LOC_MAP
-  SET LOCATION = 's3a' || SUBSTR(LOCATION, 4)
+UPDATE "SKEWED_COL_VALUE_LOC_MAP"
+  SET "LOCATION" = 's3a' || SUBSTR("LOCATION", 4)
 WHERE
-  LOCATION LIKE 's3n://%'
+  "LOCATION" LIKE 's3n://%'
 ;
 
-UPDATE SDS
-  SET LOCATION = 's3a' || SUBSTR(LOCATION, 4)
+UPDATE "SDS"
+  SET "LOCATION" = 's3a' || SUBSTR("LOCATION", 4)
 WHERE
-  LOCATION LIKE 's3n://%'
+  "LOCATION" LIKE 's3n://%'
 ;
 
-UPDATE DBS
-  SET DB_LOCATION_URI = 's3a' || SUBSTR(DB_LOCATION_URI, 4)
+UPDATE "DBS"
+  SET "DB_LOCATION_URI" = 's3a' || SUBSTR("DB_LOCATION_URI", 4)
 WHERE
-  DB_LOCATION_URI LIKE 's3n://%'
+  "DB_LOCATION_URI" LIKE 's3n://%'
 ;



hive git commit: HIVE-18506 : LlapBaseInputFormat - negative array index (Oleg Danilov, reviewed by Sergey Shelukhin)

2018-01-24 Thread sershe
Repository: hive
Updated Branches:
  refs/heads/master ee802dba3 -> 4183ea900


HIVE-18506 : LlapBaseInputFormat - negative array index (Oleg Danilov, reviewed 
by Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4183ea90
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4183ea90
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4183ea90

Branch: refs/heads/master
Commit: 4183ea900e5dc2f862cdc330338add6935e1f518
Parents: ee802db
Author: sergey 
Authored: Wed Jan 24 14:43:52 2018 -0800
Committer: sergey 
Committed: Wed Jan 24 14:43:52 2018 -0800

--
 .../src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/4183ea90/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java
--
diff --git 
a/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java 
b/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java
index fc360d4..0120639 100644
--- 
a/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java
+++ 
b/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java
@@ -381,7 +381,7 @@ public class LlapBaseInputFormat>
 LOG.info("Finding random live service instance");
 Collection allInstances = instanceSet.getAll();
 if (allInstances.size() > 0) {
-  int randIdx = rand.nextInt() % allInstances.size();
+  int randIdx = rand.nextInt(allInstances.size());;
   serviceInstance = allInstances.toArray(serviceInstanceArray)[randIdx];
 }
 return serviceInstance;



hive git commit: HIVE-18202: Automatically migrate hbase.table.name to hbase.mapreduce.hfileoutputformat.table.name for hbase-based table (addendum)

2018-01-24 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 042698ce2 -> ee802dba3


HIVE-18202: Automatically migrate hbase.table.name to 
hbase.mapreduce.hfileoutputformat.table.name for hbase-based table (addendum)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ee802dba
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ee802dba
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ee802dba

Branch: refs/heads/master
Commit: ee802dba3f28d0467bbea045e6aa5c9bfac8e2a5
Parents: 042698c
Author: Aihua Xu 
Authored: Wed Jan 24 13:32:02 2018 -0800
Committer: Aihua Xu 
Committed: Wed Jan 24 13:32:02 2018 -0800

--
 metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/ee802dba/metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql
--
diff --git a/metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql 
b/metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql
index 0fdc615..c054322 100644
--- a/metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql
+++ b/metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql
@@ -1,6 +1,6 @@
-UPDATE TABLE_PARAMS
-  SET PARAM_KEY = 'hbase.mapreduce.hfileoutputformat.table.name'
+UPDATE "TABLE_PARAMS"
+  SET "PARAM_KEY" = 'hbase.mapreduce.hfileoutputformat.table.name'
 WHERE
-  PARAM_KEY = 'hbase.table.name'
+  "PARAM_KEY" = 'hbase.table.name'
 ;
 



hive git commit: HIVE-18485: Add more unit tests for hive.strict.checks.* properties (Sahil Takiar, reviewed by Peter Vary)

2018-01-24 Thread stakiar
Repository: hive
Updated Branches:
  refs/heads/master 09ce6cdf3 -> 042698ce2


HIVE-18485: Add more unit tests for hive.strict.checks.* properties (Sahil 
Takiar, reviewed by Peter Vary)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/042698ce
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/042698ce
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/042698ce

Branch: refs/heads/master
Commit: 042698ce29310d9557995dacf21d1ee7f2613b34
Parents: 09ce6cd
Author: Sahil Takiar 
Authored: Wed Jan 24 10:04:40 2018 -0800
Committer: Sahil Takiar 
Committed: Wed Jan 24 10:05:15 2018 -0800

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  4 ++--
 .../org/apache/hadoop/hive/ql/ErrorMsg.java | 11 +-
 .../clientnegative/alter_view_failure6_2.q  | 14 
 .../clientnegative/compare_double_bigint_2.q|  8 +++
 .../clientnegative/compare_string_bigint_2.q|  8 +++
 ql/src/test/queries/clientnegative/input4_2.q   |  8 +++
 .../queries/clientnegative/input_part0_neg_2.q  |  6 +
 .../test/queries/clientnegative/strict_join.q   |  5 +
 .../test/queries/clientnegative/strict_join_2.q |  6 +
 .../queries/clientnegative/strict_pruning_2.q   |  9 
 .../clientnegative/alter_view_failure6.q.out|  4 ++--
 .../clientnegative/alter_view_failure6_2.q.out  | 23 
 .../clientnegative/compare_double_bigint.q.out  |  2 +-
 .../compare_double_bigint_2.q.out   |  1 +
 .../clientnegative/compare_string_bigint.q.out  |  2 +-
 .../compare_string_bigint_2.q.out   |  1 +
 ql/src/test/results/clientnegative/input4.q.out |  2 +-
 .../test/results/clientnegative/input4_2.q.out  |  1 +
 .../clientnegative/input_part0_neg.q.out|  2 +-
 .../clientnegative/input_part0_neg_2.q.out  |  1 +
 .../results/clientnegative/strict_join.q.out|  2 +-
 .../results/clientnegative/strict_join_2.q.out  |  1 +
 .../results/clientnegative/strict_orderby.q.out |  2 +-
 .../results/clientnegative/strict_pruning.q.out |  2 +-
 .../clientnegative/strict_pruning_2.q.out   |  1 +
 25 files changed, 105 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/042698ce/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 858f22b..0c2cf05 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -4961,7 +4961,7 @@ public class HiveConf extends Configuration {
 
 private static final String NO_LIMIT_MSG = makeMessage(
 "Order by-s without limit", ConfVars.HIVE_STRICT_CHECKS_LARGE_QUERY);
-private static final String NO_PARTITIONLESS_MSG = makeMessage(
+public static final String NO_PARTITIONLESS_MSG = makeMessage(
 "Queries against partitioned tables without a partition filter",
 ConfVars.HIVE_STRICT_CHECKS_LARGE_QUERY);
 private static final String NO_COMPARES_MSG = makeMessage(
@@ -4972,7 +4972,7 @@ public class HiveConf extends Configuration {
 "Load into bucketed tables", ConfVars.HIVE_STRICT_CHECKS_BUCKETING);
 
 private static String makeMessage(String what, ConfVars setting) {
-  return what + " are disabled for safety reasons. If you know what you 
are doing, please set"
+  return what + " are disabled for safety reasons. If you know what you 
are doing, please set "
   + setting.varname + " to false and that " + 
ConfVars.HIVEMAPREDMODE.varname + " is not"
   + " set to 'strict' to proceed. Note that if you may get errors or 
incorrect results if"
   + " you make a mistake while using some of the unsafe features.";

http://git-wip-us.apache.org/repos/asf/hive/blob/042698ce/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
index 5baac18..134faee 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
@@ -114,15 +114,12 @@ public enum ErrorMsg {
   CLUSTERBY_ORDERBY_CONFLICT(10050, "Cannot have both CLUSTER BY and ORDER BY 
clauses"),
   NO_LIMIT_WITH_ORDERBY(10051, "In strict mode, if ORDER BY is specified, "
   + "LIMIT must also be specified"),
-  NO_CARTESIAN_PRODUCT(10052, "In strict mode, cartesian product is not 
allowed. "
-  + "If you really want to perform the operation, set 
hive.mapred.mode=nonstrict"),