hive git commit: HIVE-14333: ORC schema evolution from float to double changes precision and breaks filters (Prasanth Jayachandran reviewed by Matt McCline)

2016-07-27 Thread prasanthj
Repository: hive
Updated Branches:
  refs/heads/master 7938668b4 -> 602a5f38b


HIVE-14333: ORC schema evolution from float to double changes precision and 
breaks filters (Prasanth Jayachandran reviewed by Matt McCline)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/602a5f38
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/602a5f38
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/602a5f38

Branch: refs/heads/master
Commit: 602a5f38b855ed17354d52d54ac7a7c2cfe7f273
Parents: 7938668
Author: Prasanth Jayachandran 
Authored: Wed Jul 27 23:42:53 2016 -0700
Committer: Prasanth Jayachandran 
Committed: Wed Jul 27 23:42:53 2016 -0700

--
 .../orc/impl/ConvertTreeReaderFactory.java  |  28 +-
 .../apache/orc/impl/TestSchemaEvolution.java|  59 +
 .../clientpositive/orc_schema_evolution_float.q |  38 +++
 .../orc_schema_evolution_float.q.out| 263 +++
 .../schema_evol_orc_acid_mapwork_part.q.out |   4 +-
 .../schema_evol_orc_acid_mapwork_table.q.out|   4 +-
 .../schema_evol_orc_acidvec_mapwork_part.q.out  |   4 +-
 .../schema_evol_orc_acidvec_mapwork_table.q.out |   4 +-
 .../schema_evol_orc_nonvec_fetchwork_part.q.out |   4 +-
 ...schema_evol_orc_nonvec_fetchwork_table.q.out |   4 +-
 .../schema_evol_orc_nonvec_mapwork_part.q.out   |   4 +-
 .../schema_evol_orc_nonvec_mapwork_table.q.out  |   4 +-
 .../schema_evol_orc_vec_mapwork_part.q.out  |   4 +-
 .../schema_evol_orc_vec_mapwork_table.q.out |   4 +-
 .../tez/schema_evol_orc_acid_mapwork_part.q.out |   4 +-
 .../schema_evol_orc_acid_mapwork_table.q.out|   4 +-
 .../schema_evol_orc_acidvec_mapwork_part.q.out  |   4 +-
 .../schema_evol_orc_acidvec_mapwork_table.q.out |   4 +-
 .../schema_evol_orc_nonvec_fetchwork_part.q.out |   4 +-
 ...schema_evol_orc_nonvec_fetchwork_table.q.out |   4 +-
 .../schema_evol_orc_nonvec_mapwork_part.q.out   |   4 +-
 .../schema_evol_orc_nonvec_mapwork_table.q.out  |   4 +-
 .../tez/schema_evol_orc_vec_mapwork_part.q.out  |   4 +-
 .../tez/schema_evol_orc_vec_mapwork_table.q.out |   4 +-
 24 files changed, 427 insertions(+), 41 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/602a5f38/orc/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
--
diff --git a/orc/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java 
b/orc/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
index c0c6964..03378a9 100644
--- a/orc/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
+++ b/orc/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
@@ -983,8 +983,34 @@ public class ConvertTreeReaderFactory extends 
TreeReaderFactory {
 public void nextVector(ColumnVector previousVector,
boolean[] isNull,
final int batchSize) throws IOException {
-  // The DoubleColumnVector produced by FloatTreeReader is what we want.
+  // we get the DoubleColumnVector produced by float tree reader first, 
then iterate through
+  // the elements and make double -> float -> string -> double conversion 
to preserve the
+  // precision. When float tree reader reads float and assign it to 
double, java's widening
+  // conversion adds more precision which will break all comparisons.
+  // Example: float f = 74.72
+  // double d = f ---> 74.72000122070312
+  // Double.parseDouble(String.valueOf(f)) ---> 74.72
   floatTreeReader.nextVector(previousVector, isNull, batchSize);
+
+  DoubleColumnVector doubleColumnVector = (DoubleColumnVector) 
previousVector;
+  if (doubleColumnVector.isRepeating) {
+if (doubleColumnVector.noNulls || !doubleColumnVector.isNull[0]) {
+  final float f = (float) doubleColumnVector.vector[0];
+  doubleColumnVector.vector[0] = Double.parseDouble(String.valueOf(f));
+}
+  } else if (doubleColumnVector.noNulls){
+for (int i = 0; i < batchSize; i++) {
+  final float f = (float) doubleColumnVector.vector[i];
+  doubleColumnVector.vector[i] = Double.parseDouble(String.valueOf(f));
+}
+  } else {
+for (int i = 0; i < batchSize; i++) {
+  if (!doubleColumnVector.isNull[i]) {
+final float f = (float) doubleColumnVector.vector[i];
+doubleColumnVector.vector[i] = 
Double.parseDouble(String.valueOf(f));
+  }
+}
+  }
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/602a5f38/orc/src/test/org/apache/orc/impl/TestSchemaEvolution.java
--
diff --git a/orc/src/test/org/apache/orc/impl/TestSchemaEvolution.java 
b/orc/src/test/org/apache/orc/impl/TestSchemaEvolution.java
index

hive git commit: HIVE-14364. Update timeouts for llap comparator tests. (Siddharth Seth, reviewed by Gunther Hagleitner)

2016-07-27 Thread sseth
Repository: hive
Updated Branches:
  refs/heads/master 44d39c6f2 -> 7938668b4


HIVE-14364. Update timeouts for llap comparator tests. (Siddharth Seth, 
reviewed by Gunther Hagleitner)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7938668b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7938668b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7938668b

Branch: refs/heads/master
Commit: 7938668b40e4785175ee2278bde033e15d804767
Parents: 44d39c6
Author: Siddharth Seth 
Authored: Wed Jul 27 17:19:39 2016 -0700
Committer: Siddharth Seth 
Committed: Wed Jul 27 17:19:39 2016 -0700

--
 .../daemon/impl/comparator/TestFirstInFirstOutComparator.java  | 6 +++---
 .../daemon/impl/comparator/TestShortestJobFirstComparator.java | 6 +++---
 2 files changed, 6 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/7938668b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/comparator/TestFirstInFirstOutComparator.java
--
diff --git 
a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/comparator/TestFirstInFirstOutComparator.java
 
b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/comparator/TestFirstInFirstOutComparator.java
index 53c19b4..5ea62aa 100644
--- 
a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/comparator/TestFirstInFirstOutComparator.java
+++ 
b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/comparator/TestFirstInFirstOutComparator.java
@@ -89,7 +89,7 @@ public class TestFirstInFirstOutComparator {
 .build();
   }
 
-  @Test
+  @Test (timeout = 6)
   public void testWaitQueueComparator() throws InterruptedException {
 TaskWrapper r1 = createTaskWrapper(createRequest(1, 2, 5, 100), false, 
10);
 TaskWrapper r2 = createTaskWrapper(createRequest(2, 4, 4, 200), false, 
10);
@@ -246,7 +246,7 @@ public class TestFirstInFirstOutComparator {
 assertEquals(r2, queue.take());
   }
 
-  @Test(timeout = 5000)
+  @Test(timeout = 6)
   public void testWaitQueueComparatorWithinDagPriority() throws 
InterruptedException {
 TaskWrapper r1 = createTaskWrapper(createRequest(1, 1, 0, 100, 100, 10), 
false, 10);
 TaskWrapper r2 = createTaskWrapper(createRequest(2, 1, 0, 100, 100, 1), 
false, 10);
@@ -264,7 +264,7 @@ public class TestFirstInFirstOutComparator {
 assertEquals(r1, queue.take());
   }
 
-  @Test(timeout = 5000)
+  @Test(timeout = 6)
   public void testWaitQueueComparatorParallelism() throws InterruptedException 
{
 TaskWrapper r1 = createTaskWrapper(createRequest(1, 10, 3, 100, 100, 1), 
false, 10);
 TaskWrapper r2 = createTaskWrapper(createRequest(2, 10, 7, 100, 100, 1), 
false, 10);

http://git-wip-us.apache.org/repos/asf/hive/blob/7938668b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/comparator/TestShortestJobFirstComparator.java
--
diff --git 
a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/comparator/TestShortestJobFirstComparator.java
 
b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/comparator/TestShortestJobFirstComparator.java
index 9dafd15..f50c657 100644
--- 
a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/comparator/TestShortestJobFirstComparator.java
+++ 
b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/comparator/TestShortestJobFirstComparator.java
@@ -26,7 +26,7 @@ import org.junit.Test;
 public class TestShortestJobFirstComparator {
 
 
-  @Test(timeout = 5000)
+  @Test(timeout = 6)
   public void testWaitQueueComparator() throws InterruptedException {
 TaskWrapper r1 = createTaskWrapper(createSubmitWorkRequestProto(1, 2, 
100), false, 10);
 TaskWrapper r2 = createTaskWrapper(createSubmitWorkRequestProto(2, 4, 
200), false, 10);
@@ -161,7 +161,7 @@ public class TestShortestJobFirstComparator {
 assertEquals(r5, queue.take());
   }
 
-  @Test(timeout = 5000)
+  @Test(timeout = 6)
   public void testWaitQueueComparatorWithinDagPriority() throws 
InterruptedException {
 TaskWrapper r1 = createTaskWrapper(createSubmitWorkRequestProto(1, 1, 0, 
100, 10), false, 10);
 TaskWrapper r2 = createTaskWrapper(createSubmitWorkRequestProto(2, 1, 0, 
100, 1), false, 10);
@@ -179,7 +179,7 @@ public class TestShortestJobFirstComparator {
 assertEquals(r1, queue.take());
   }
 
-  @Test(timeout = 5000)
+  @Test(timeout = 6)
   public void testWaitQueueComparatorParallelism() throws InterruptedException 
{
 TaskWrapper r1 = createTaskWrapper(createSubmitWorkRequestProto(1, 10, 3, 
100, 1), false, 10); // 7 pending
 TaskWrapper r2 = createTaskWrapper(createSubmitWorkRe

hive git commit: HIVE-14332: Reduce logging from VectorMapOperator (Matt McCline, reviewed by Siddharth Seth)

2016-07-27 Thread mmccline
Repository: hive
Updated Branches:
  refs/heads/master e1ddf9c78 -> 44d39c6f2


HIVE-14332: Reduce logging from VectorMapOperator (Matt McCline, reviewed by 
Siddharth Seth)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/44d39c6f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/44d39c6f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/44d39c6f

Branch: refs/heads/master
Commit: 44d39c6f2f8e60c4ed7c0d0c133732436859920c
Parents: e1ddf9c
Author: Matt McCline 
Authored: Wed Jul 27 17:00:31 2016 -0700
Committer: Matt McCline 
Committed: Wed Jul 27 17:00:54 2016 -0700

--
 .../apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java  | 7 ---
 1 file changed, 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/44d39c6f/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
index 56af05e..a65cac0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
@@ -536,13 +536,6 @@ public class VectorMapOperator extends AbstractMapOperator 
{
 for (Map.Entry> entry : 
conf.getPathToAliases().entrySet()) {
   Path path = entry.getKey();
   PartitionDesc partDesc = conf.getPathToPartitionInfo().get(path);
-  ArrayList aliases = entry.getValue();
-
-  VectorPartitionDesc vectorPartDesc = partDesc.getVectorPartitionDesc();
-  LOG.info("VectorMapOperator path: " + path +
-  ", read type " + 
vectorPartDesc.getVectorMapOperatorReadType().name() +
-  ", vector deserialize type " + 
vectorPartDesc.getVectorDeserializeType().name() +
-  ", aliases " + aliases);
 
   VectorPartitionContext vectorPartitionContext;
   if (!partitionContextMap.containsKey(partDesc)) {



hive git commit: HIVE-14338: Delete/Alter table calls failing with HiveAccessControlException (Pengcheng Xiong, reviewed by Ashutosh Chauhan, Thejas M Nair)

2016-07-27 Thread pxiong
Repository: hive
Updated Branches:
  refs/heads/branch-2.1 c35a10e66 -> 0df731d76


HIVE-14338: Delete/Alter table calls failing with HiveAccessControlException 
(Pengcheng Xiong, reviewed by Ashutosh Chauhan, Thejas M Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0df731d7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0df731d7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0df731d7

Branch: refs/heads/branch-2.1
Commit: 0df731d7645041752794bb784e17e3d788e1f03b
Parents: c35a10e
Author: Pengcheng Xiong 
Authored: Wed Jul 27 16:58:21 2016 -0700
Committer: Pengcheng Xiong 
Committed: Wed Jul 27 17:00:18 2016 -0700

--
 .../src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java | 2 --
 .../test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java| 5 ++---
 2 files changed, 2 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0df731d7/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
--
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
index f6560d9..187ca4e 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
@@ -80,8 +80,6 @@ public class HCatCli {
 HiveConf conf = ss.getConf();
 
 HiveConf.setVar(conf, ConfVars.SEMANTIC_ANALYZER_HOOK, 
HCatSemanticAnalyzer.class.getName());
-conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
-
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
 String engine = HiveConf.getVar(conf, ConfVars.HIVE_EXECUTION_ENGINE);
 final String MR_ENGINE = "mr";
 if(!MR_ENGINE.equalsIgnoreCase(engine)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/0df731d7/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
--
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
index d029906..8aa510f 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
@@ -98,9 +98,6 @@ public class TestPermsGrp extends TestCase {
 hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
 hcatConf.setTimeVar(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, 60, 
TimeUnit.SECONDS);
 hcatConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
-hcatConf
-.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
-
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
 clientWH = new Warehouse(hcatConf);
 msc = new HiveMetaStoreClient(hcatConf);
 System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
@@ -198,6 +195,8 @@ public class TestPermsGrp extends TestCase {
   private void callHCatCli(String[] args) {
 List argsList = new ArrayList();
 argsList.add("-Dhive.support.concurrency=false");
+argsList
+
.add("-Dhive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
 argsList.addAll(Arrays.asList(args));
 HCatCli.main(argsList.toArray(new String[]{}));
   }



hive git commit: HIVE-14338: Delete/Alter table calls failing with HiveAccessControlException (Pengcheng Xiong, reviewed by Ashutosh Chauhan, Thejas M Nair)

2016-07-27 Thread pxiong
Repository: hive
Updated Branches:
  refs/heads/master 4941c17d7 -> e1ddf9c78


HIVE-14338: Delete/Alter table calls failing with HiveAccessControlException 
(Pengcheng Xiong, reviewed by Ashutosh Chauhan, Thejas M Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e1ddf9c7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e1ddf9c7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e1ddf9c7

Branch: refs/heads/master
Commit: e1ddf9c78b8d59dfd555fca1f2a9f5a371627f9f
Parents: 4941c17
Author: Pengcheng Xiong 
Authored: Wed Jul 27 16:58:21 2016 -0700
Committer: Pengcheng Xiong 
Committed: Wed Jul 27 16:58:21 2016 -0700

--
 .../src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java | 2 --
 .../test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java| 5 ++---
 2 files changed, 2 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e1ddf9c7/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
--
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
index f6560d9..187ca4e 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
@@ -80,8 +80,6 @@ public class HCatCli {
 HiveConf conf = ss.getConf();
 
 HiveConf.setVar(conf, ConfVars.SEMANTIC_ANALYZER_HOOK, 
HCatSemanticAnalyzer.class.getName());
-conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
-
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
 String engine = HiveConf.getVar(conf, ConfVars.HIVE_EXECUTION_ENGINE);
 final String MR_ENGINE = "mr";
 if(!MR_ENGINE.equalsIgnoreCase(engine)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/e1ddf9c7/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
--
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
index d029906..8aa510f 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
@@ -98,9 +98,6 @@ public class TestPermsGrp extends TestCase {
 hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
 hcatConf.setTimeVar(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, 60, 
TimeUnit.SECONDS);
 hcatConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
-hcatConf
-.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
-
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
 clientWH = new Warehouse(hcatConf);
 msc = new HiveMetaStoreClient(hcatConf);
 System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
@@ -198,6 +195,8 @@ public class TestPermsGrp extends TestCase {
   private void callHCatCli(String[] args) {
 List argsList = new ArrayList();
 argsList.add("-Dhive.support.concurrency=false");
+argsList
+
.add("-Dhive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
 argsList.addAll(Arrays.asList(args));
 HCatCli.main(argsList.toArray(new String[]{}));
   }



hive git commit: HIVE-14336: Make usage of VectorUDFAdaptor configurable (Matt McCline, reviewed by Sergey Shelukhin)

2016-07-27 Thread mmccline
Repository: hive
Updated Branches:
  refs/heads/master 9629c6ef1 -> 4941c17d7


HIVE-14336: Make usage of VectorUDFAdaptor configurable (Matt McCline, reviewed 
by Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4941c17d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4941c17d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4941c17d

Branch: refs/heads/master
Commit: 4941c17d7fa2e0a17fcf91ff9e034cd73db74c95
Parents: 9629c6e
Author: Matt McCline 
Authored: Wed Jul 27 16:48:27 2016 -0700
Committer: Matt McCline 
Committed: Wed Jul 27 16:48:27 2016 -0700

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   7 +
 .../ql/exec/vector/VectorGroupByOperator.java   |   3 +-
 .../exec/vector/VectorMapJoinBaseOperator.java  |   3 +-
 .../exec/vector/VectorSMBMapJoinOperator.java   |   3 +-
 .../ql/exec/vector/VectorizationContext.java| 159 +++-
 .../hive/ql/optimizer/physical/Vectorizer.java  |  13 +-
 .../clientpositive/vector_adaptor_usage_mode.q  | 177 
 .../vector_adaptor_usage_mode.q.out | 933 +++
 8 files changed, 1283 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/4941c17d/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index e92466f..aa7647b 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -2592,6 +2592,13 @@ public class HiveConf extends Configuration {
 
HIVE_VECTORIZATION_USE_ROW_DESERIALIZE("hive.vectorized.use.row.serde.deserialize",
 false,
 "This flag should be set to true to enable vectorizing using row 
deserialize.\n" +
 "The default value is false."),
+HIVE_VECTOR_ADAPTOR_USAGE_MODE("hive.vectorized.adaptor.usage.mode", 
"all", new StringSet("none", "chosen", "all"),
+"Specifies the extent to which the VectorUDFAdaptor will be used for 
UDFs that do not have a cooresponding vectorized class.\n" +
+"0. none   : disable any usage of VectorUDFAdaptor\n" +
+"1. chosen : use VectorUDFAdaptor for a small set of UDFs that were 
choosen for good performance\n" +
+"2. all: use VectorUDFAdaptor for all UDFs"
+),
+
 HIVE_TYPE_CHECK_ON_INSERT("hive.typecheck.on.insert", true, "This property 
has been extended to control "
 + "whether to check, convert, and normalize partition value to conform 
to its column type in "
 + "partition operations including but not limited to insert, such as 
alter, describe etc."),

http://git-wip-us.apache.org/repos/asf/hive/blob/4941c17d/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
index 6e53526..2605203 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
@@ -767,7 +767,8 @@ public class VectorGroupByOperator extends 
Operator implements
 
 isVectorOutput = desc.getVectorDesc().isVectorOutput();
 
-vOutContext = new VectorizationContext(getName(), 
desc.getOutputColumnNames());
+vOutContext = new VectorizationContext(getName(), 
desc.getOutputColumnNames(),
+/* vContextEnvironment */ vContext);
   }
 
   /** Kryo ctor. */

http://git-wip-us.apache.org/repos/asf/hive/blob/4941c17d/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinBaseOperator.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinBaseOperator.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinBaseOperator.java
index 902a183..bcde25f 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinBaseOperator.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinBaseOperator.java
@@ -88,7 +88,8 @@ public class VectorMapJoinBaseOperator extends 
MapJoinOperator implements Vector
 noOuterJoin = desc.isNoOuterJoin();
 
  // We are making a new output vectorized row batch.
-vOutContext = new VectorizationContext(getName(), 
desc.getOutputColumnNames());
+vOutContext = new VectorizationContext(getName(), 
desc.getOutputColumnNames(),
+/* vContextEnvironment */ vContext);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/b

hive git commit: HIVE-14293: PerfLogger.openScopes should be transient (Daniel Dai, reviewed by Prasanth Jayachandran)

2016-07-27 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-2.1 dec619ac5 -> c35a10e66


HIVE-14293: PerfLogger.openScopes should be transient (Daniel Dai, reviewed by 
Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c35a10e6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c35a10e6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c35a10e6

Branch: refs/heads/branch-2.1
Commit: c35a10e66c9b1b1f4be5fca7890fc287f23f0a1a
Parents: dec619a
Author: Daniel Dai 
Authored: Wed Jul 27 15:06:36 2016 -0700
Committer: Daniel Dai 
Committed: Wed Jul 27 15:07:25 2016 -0700

--
 common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java   | 2 +-
 ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/c35a10e6/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java 
b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
index 63d96be..6a5d22f 100644
--- a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
+++ b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
@@ -220,7 +220,7 @@ public class PerfLogger {
   }
 
   //Methods for metrics integration.  Each thread-local PerfLogger will 
open/close scope during each perf-log method.
-  Map openScopes = new HashMap();
+  transient Map openScopes = new HashMap();
 
   private void beginMetrics(String method) {
 Metrics metrics = MetricsFactory.getInstance();

http://git-wip-us.apache.org/repos/asf/hive/blob/c35a10e6/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
index 690c718..99cdaa0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
@@ -80,7 +80,7 @@ public class MapJoinOperator extends 
AbstractMapJoinOperator implem
   private static final long serialVersionUID = 1L;
   private static final Logger LOG = 
LoggerFactory.getLogger(MapJoinOperator.class.getName());
   private static final String CLASS_NAME = MapJoinOperator.class.getName();
-  private final PerfLogger perfLogger = SessionState.getPerfLogger();
+  private transient final PerfLogger perfLogger = SessionState.getPerfLogger();
 
   private transient String cacheKey;
   private transient ObjectCache cache;



hive git commit: HIVE-14293: PerfLogger.openScopes should be transient (Daniel Dai, reviewed by Prasanth Jayachandran)

2016-07-27 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/master 477a47d3b -> 9629c6ef1


HIVE-14293: PerfLogger.openScopes should be transient (Daniel Dai, reviewed by 
Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9629c6ef
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9629c6ef
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9629c6ef

Branch: refs/heads/master
Commit: 9629c6ef174c9123dc09ff8eb42b67347779f726
Parents: 477a47d
Author: Daniel Dai 
Authored: Wed Jul 27 15:06:36 2016 -0700
Committer: Daniel Dai 
Committed: Wed Jul 27 15:06:36 2016 -0700

--
 common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java   | 2 +-
 ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/9629c6ef/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java 
b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
index 63d96be..6a5d22f 100644
--- a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
+++ b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
@@ -220,7 +220,7 @@ public class PerfLogger {
   }
 
   //Methods for metrics integration.  Each thread-local PerfLogger will 
open/close scope during each perf-log method.
-  Map openScopes = new HashMap();
+  transient Map openScopes = new HashMap();
 
   private void beginMetrics(String method) {
 Metrics metrics = MetricsFactory.getInstance();

http://git-wip-us.apache.org/repos/asf/hive/blob/9629c6ef/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
index 690c718..99cdaa0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
@@ -80,7 +80,7 @@ public class MapJoinOperator extends 
AbstractMapJoinOperator implem
   private static final long serialVersionUID = 1L;
   private static final Logger LOG = 
LoggerFactory.getLogger(MapJoinOperator.class.getName());
   private static final String CLASS_NAME = MapJoinOperator.class.getName();
-  private final PerfLogger perfLogger = SessionState.getPerfLogger();
+  private transient final PerfLogger perfLogger = SessionState.getPerfLogger();
 
   private transient String cacheKey;
   private transient ObjectCache cache;



hive git commit: HIVE-14296: Session count is not decremented when HS2 clients do not shutdown cleanly. (Naveen Gangam, reviewed by Szehon Ho and Mohit Sabharwal)

2016-07-27 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master df3c5d059 -> 477a47d3b


HIVE-14296: Session count is not decremented when HS2 clients do not shutdown 
cleanly. (Naveen Gangam, reviewed by Szehon Ho and Mohit Sabharwal)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/477a47d3
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/477a47d3
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/477a47d3

Branch: refs/heads/master
Commit: 477a47d3b4b9e3da3c22465217c2024588f7f000
Parents: df3c5d0
Author: Aihua Xu 
Authored: Wed Jul 27 16:08:34 2016 -0400
Committer: Aihua Xu 
Committed: Wed Jul 27 16:08:34 2016 -0400

--
 .../java/org/apache/hive/service/cli/session/SessionManager.java | 4 +++-
 .../org/apache/hive/service/cli/thrift/ThriftCLIService.java | 3 ---
 2 files changed, 3 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/477a47d3/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
--
diff --git 
a/service/src/java/org/apache/hive/service/cli/session/SessionManager.java 
b/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
index 4f5a852..88dee48 100644
--- a/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
+++ b/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
@@ -351,14 +351,16 @@ public class SessionManager extends CompositeService {
   throw new HiveSQLException("Failed to execute session hooks: " + 
e.getMessage(), e);
 }
 handleToSession.put(session.getSessionHandle(), session);
+LOG.info("Session opened, " + session.getSessionHandle() + ", current 
sessions:" + getOpenSessionCount());
 return session;
   }
 
-  public void closeSession(SessionHandle sessionHandle) throws 
HiveSQLException {
+  public synchronized void closeSession(SessionHandle sessionHandle) throws 
HiveSQLException {
 HiveSession session = handleToSession.remove(sessionHandle);
 if (session == null) {
   throw new HiveSQLException("Session does not exist: " + sessionHandle);
 }
+LOG.info("Session closed, " + sessionHandle + ", current sessions:" + 
getOpenSessionCount());
 try {
   session.close();
 } finally {

http://git-wip-us.apache.org/repos/asf/hive/blob/477a47d3/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
--
diff --git 
a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java 
b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
index 0dd56cb..886492a 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
@@ -117,7 +117,6 @@ public abstract class ThriftCLIService extends 
AbstractService implements TCLISe
   protected CLIService cliService;
   private static final TStatus OK_STATUS = new 
TStatus(TStatusCode.SUCCESS_STATUS);
   protected static HiveAuthFactory hiveAuthFactory;
-  private static final AtomicInteger sessionCount = new AtomicInteger();
 
   protected int portNum;
   protected InetAddress serverIPAddress;
@@ -320,7 +319,6 @@ public abstract class ThriftCLIService extends 
AbstractService implements TCLISe
   if (context != null) {
 context.setSessionHandle(sessionHandle);
   }
-  LOG.info("Opened a session " + sessionHandle + ", current sessions: " + 
sessionCount.incrementAndGet());
 } catch (Exception e) {
   LOG.warn("Error opening session: ", e);
   resp.setStatus(HiveSQLException.toTStatus(e));
@@ -466,7 +464,6 @@ public abstract class ThriftCLIService extends 
AbstractService implements TCLISe
 try {
   SessionHandle sessionHandle = new SessionHandle(req.getSessionHandle());
   cliService.closeSession(sessionHandle);
-  LOG.info("Closed a session " + sessionHandle + ", current sessions: " + 
sessionCount.decrementAndGet());
   resp.setStatus(OK_STATUS);
   ThriftCLIServerContext context =
 (ThriftCLIServerContext)currentServerContext.get();



hive git commit: HIVE-14313 : Test failure TestMetaStoreMetrics.testConnections (Naveen Gangam via Szehon)

2016-07-27 Thread szehon
Repository: hive
Updated Branches:
  refs/heads/master 78d335612 -> df3c5d059


HIVE-14313 : Test failure TestMetaStoreMetrics.testConnections (Naveen Gangam 
via Szehon)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/df3c5d05
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/df3c5d05
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/df3c5d05

Branch: refs/heads/master
Commit: df3c5d05968f9abf4f2b101b9dc208c205f2253b
Parents: 78d3356
Author: Szehon Ho 
Authored: Wed Jul 27 10:10:41 2016 -0700
Committer: Szehon Ho 
Committed: Wed Jul 27 10:10:41 2016 -0700

--
 .../apache/hadoop/hive/common/metrics/MetricsTestUtils.java | 2 +-
 .../apache/hadoop/hive/metastore/TestMetaStoreMetrics.java  | 9 +
 2 files changed, 6 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/df3c5d05/common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java
--
diff --git 
a/common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java 
b/common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java
index 4aaa808..4667658 100644
--- 
a/common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java
+++ 
b/common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java
@@ -50,7 +50,7 @@ public class MetricsTestUtils {
 Assert.assertEquals(expectedValue.toString(), jsonNode.asText());
   }
 
-  private static JsonNode getJsonNode(String json, MetricsCategory category, 
String metricsName) throws Exception {
+  public static JsonNode getJsonNode(String json, MetricsCategory category, 
String metricsName) throws Exception {
 ObjectMapper objectMapper = new ObjectMapper();
 JsonNode rootNode = objectMapper.readTree(json);
 JsonNode categoryNode = rootNode.path(category.category);

http://git-wip-us.apache.org/repos/asf/hive/blob/df3c5d05/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java
index 788b665..f231af7 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMetrics.java
@@ -143,23 +143,24 @@ public class TestMetaStoreMetrics {
 
 //initial state is one connection
 String json = metrics.dumpJson();
-MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, 
MetricsConstant.OPEN_CONNECTIONS, 1);
+int initialCount = (new Integer((MetricsTestUtils.getJsonNode(json, 
MetricsTestUtils.COUNTER,
+   
MetricsConstant.OPEN_CONNECTIONS)).asText())).intValue();
 
 //create two connections
 HiveMetaStoreClient msc = new HiveMetaStoreClient(hiveConf);
 HiveMetaStoreClient msc2 = new HiveMetaStoreClient(hiveConf);
 
 json = metrics.dumpJson();
-MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, 
MetricsConstant.OPEN_CONNECTIONS, 3);
+MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, 
MetricsConstant.OPEN_CONNECTIONS, initialCount + 2);
 
 //close one connection, verify still two left
 msc.close();
 json = metrics.dumpJson();
-MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, 
MetricsConstant.OPEN_CONNECTIONS, 2);
+MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, 
MetricsConstant.OPEN_CONNECTIONS, initialCount + 1);
 
 //close one connection, verify still one left
 msc2.close();
 json = metrics.dumpJson();
-MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, 
MetricsConstant.OPEN_CONNECTIONS, 1);
+MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, 
MetricsConstant.OPEN_CONNECTIONS, initialCount);
   }
 }



hive git commit: HIVE-14335 : TaskDisplay's return value is not getting deserialized properly (Rajat Khandelwal via Szehon)

2016-07-27 Thread szehon
Repository: hive
Updated Branches:
  refs/heads/master 8b9263e20 -> 78d335612


HIVE-14335 : TaskDisplay's return value is not getting deserialized properly 
(Rajat Khandelwal via Szehon)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/78d33561
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/78d33561
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/78d33561

Branch: refs/heads/master
Commit: 78d3356127b96892e75ac243d554daee77612022
Parents: 8b9263e
Author: Szehon Ho 
Authored: Wed Jul 27 10:05:26 2016 -0700
Committer: Szehon Ho 
Committed: Wed Jul 27 10:05:26 2016 -0700

--
 ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java | 12 ++--
 .../org/apache/hive/service/cli/CLIServiceTest.java |  3 +++
 2 files changed, 9 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/78d33561/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java 
b/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java
index 703e997..bf6cb91 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java
@@ -65,7 +65,7 @@ public class QueryDisplay {
   @JsonIgnoreProperties(ignoreUnknown = true)
   public static class TaskDisplay {
 
-private Integer returnVal;  //if set, determines that task is complete.
+private Integer returnValue;  //if set, determines that task is complete.
 private String errorMsg;
 
 private Long beginTime;
@@ -95,12 +95,12 @@ public class QueryDisplay {
 }
 @JsonIgnore
 public synchronized String getStatus() {
-  if (returnVal == null) {
+  if (returnValue == null) {
 return "Running";
-  } else if (returnVal == 0) {
+  } else if (returnValue == 0) {
 return "Success, ReturnVal 0";
   } else {
-return "Failure, ReturnVal " + String.valueOf(returnVal);
+return "Failure, ReturnVal " + String.valueOf(returnValue);
   }
 }
 
@@ -116,7 +116,7 @@ public class QueryDisplay {
 }
 
 public synchronized Integer getReturnValue() {
-  return returnVal;
+  return returnValue;
 }
 
 public synchronized String getErrorMsg() {
@@ -186,7 +186,7 @@ public class QueryDisplay {
   public synchronized void setTaskResult(String taskId, TaskResult result) {
 TaskDisplay taskDisplay = tasks.get(taskId);
 if (taskDisplay != null) {
-  taskDisplay.returnVal = result.getExitVal();
+  taskDisplay.returnValue = result.getExitVal();
   if (result.getTaskError() != null) {
 taskDisplay.errorMsg = result.getTaskError().toString();
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/78d33561/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
--
diff --git a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java 
b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
index fb8ee4c..17d45ec 100644
--- a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
+++ b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
@@ -666,6 +666,9 @@ public abstract class CLIServiceTest {
   if (OperationState.CANCELED == state || state == OperationState.CLOSED
 || state == OperationState.FINISHED
 || state == OperationState.ERROR) {
+for (QueryDisplay.TaskDisplay display: taskStatuses) {
+  assertNotNull(display.getReturnValue());
+}
 break;
   }
   Thread.sleep(1000);



hive git commit: HIVE-14331: Task should set exception for failed map reduce job (Zhihai Xu via Jimmy Xiang)

2016-07-27 Thread jxiang
Repository: hive
Updated Branches:
  refs/heads/master 6897655d4 -> 8b9263e20


HIVE-14331: Task should set exception for failed map reduce job (Zhihai Xu via 
Jimmy Xiang)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8b9263e2
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8b9263e2
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8b9263e2

Branch: refs/heads/master
Commit: 8b9263e20fab47579226b7fb5f8461f2ac51a6bc
Parents: 6897655
Author: Zhihai Xu 
Authored: Wed Jul 27 08:26:50 2016 -0700
Committer: Jimmy Xiang 
Committed: Wed Jul 27 08:26:50 2016 -0700

--
 ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java  | 1 +
 ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java  | 2 ++
 .../org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java | 1 +
 .../hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java  | 1 +
 4 files changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/8b9263e2/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
index 8783960..cea9582 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
@@ -473,6 +473,7 @@ public class ExecDriver extends Task implements 
Serializable, Hadoop
 } catch (Exception e) {
   // jobClose needs to execute successfully otherwise fail task
   if (success) {
+setException(e);
 success = false;
 returnVal = 3;
 String mesg = "Job Commit failed with exception '" + 
Utilities.getNameMessage(e) + "'";

http://git-wip-us.apache.org/repos/asf/hive/blob/8b9263e2/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
index 376bab2..67a6dc7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
@@ -155,6 +155,7 @@ public class MergeFileTask extends Task 
implements Serializable,
   success = (returnVal == 0);
 
 } catch (Exception e) {
+  setException(e);
   String mesg = " with exception '" + Utilities.getNameMessage(e) + "'";
   if (rj != null) {
 mesg = "Ended Job = " + rj.getJobID() + mesg;
@@ -193,6 +194,7 @@ public class MergeFileTask extends Task 
implements Serializable,
// jobClose needs to execute successfully otherwise fail task
LOG.warn("Job close failed ",e);
 if (success) {
+  setException(e);
   success = false;
   returnVal = 3;
   String mesg = "Job Commit failed with exception '" +

http://git-wip-us.apache.org/repos/asf/hive/blob/8b9263e2/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
index 6131581..dcd0e97 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
@@ -228,6 +228,7 @@ public class PartialScanTask extends Task 
implements
 
 } catch (Exception e) {
   e.printStackTrace();
+  setException(e);
   String mesg = " with exception '" + Utilities.getNameMessage(e) + "'";
   if (rj != null) {
 mesg = "Ended Job = " + rj.getJobID() + mesg;

http://git-wip-us.apache.org/repos/asf/hive/blob/8b9263e2/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
index 2d29afc..fcfcf2f 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
@@ -192,6 +192,7 @@ public class ColumnTruncateTask extends 
Task implements Seri
 
 } catch (Exception e) {
   e.printStackTrace();
+  setException(e);
   String mesg = " with exception '" + Utilities.getNameMessage(e) + "'";
   if (rj != null) {
 mesg = "Ended Job = " 

[2/2] hive git commit: HIVE-13815: Improve logic to infer false predicates (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2016-07-27 Thread jcamacho
HIVE-13815: Improve logic to infer false predicates (Jesus Camacho Rodriguez, 
reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6897655d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6897655d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6897655d

Branch: refs/heads/master
Commit: 6897655d4c71220a505be5be3552b29f9cc734c1
Parents: c922546
Author: Jesus Camacho Rodriguez 
Authored: Thu Jul 21 20:44:45 2016 +0100
Committer: Jesus Camacho Rodriguez 
Committed: Wed Jul 27 12:36:17 2016 +0100

--
 .../test/results/positive/hbase_pushdown.q.out  |   4 +-
 .../hive/ql/optimizer/calcite/HiveRexUtil.java  |  78 ++-
 .../rules/HiveReduceExpressionsRule.java|   9 +-
 .../clientpositive/annotate_stats_filter.q.out  |   4 +-
 .../clientpositive/annotate_stats_part.q.out|   6 +-
 .../results/clientpositive/cbo_rp_join1.q.out   |  26 ++--
 .../results/clientpositive/constprog2.q.out |   2 +-
 .../clientpositive/constprog_partitioner.q.out  |   2 +-
 .../clientpositive/constprog_semijoin.q.out | 128 +--
 ql/src/test/results/clientpositive/cte_5.q.out  |  40 +++---
 .../clientpositive/filter_cond_pushdown.q.out   |  60 -
 ql/src/test/results/clientpositive/join42.q.out |   4 +-
 .../test/results/clientpositive/lineage3.q.out  |   4 +-
 .../results/clientpositive/llap/cte_5.q.out |  40 +++---
 .../llap/dynamic_partition_pruning_2.q.out  |   4 +-
 .../llap/hybridgrace_hashjoin_1.q.out   |   4 +-
 .../clientpositive/llap/tez_self_join.q.out |   2 +-
 .../test/results/clientpositive/mapjoin2.q.out  |   3 +
 .../test/results/clientpositive/mergejoin.q.out |  13 ++
 .../clientpositive/ppd_outer_join5.q.out|   4 +-
 .../results/clientpositive/ppd_udf_case.q.out   |   4 +-
 .../results/clientpositive/ppd_union_view.q.out |   2 +-
 .../results/clientpositive/smb_mapjoin_25.q.out |   8 +-
 .../spark/constprog_partitioner.q.out   |   2 +-
 .../spark/constprog_semijoin.q.out  | 128 +--
 .../clientpositive/spark/ppd_outer_join5.q.out  |   4 +-
 .../clientpositive/spark/smb_mapjoin_25.q.out   |   8 +-
 .../clientpositive/tez/constprog_semijoin.q.out |  88 ++---
 .../test/results/clientpositive/tez/cte_5.q.out |  39 +++---
 .../tez/dynamic_partition_pruning_2.q.out   |   4 +-
 .../tez/hybridgrace_hashjoin_1.q.out|   4 +-
 .../results/clientpositive/tez/mapjoin2.q.out   |   3 +
 .../results/clientpositive/tez/mergejoin.q.out  |  15 ++-
 .../clientpositive/tez/tez_self_join.q.out  |   2 +-
 34 files changed, 420 insertions(+), 328 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/6897655d/hbase-handler/src/test/results/positive/hbase_pushdown.q.out
--
diff --git a/hbase-handler/src/test/results/positive/hbase_pushdown.q.out 
b/hbase-handler/src/test/results/positive/hbase_pushdown.q.out
index 39c03eb..a5d8c6f 100644
--- a/hbase-handler/src/test/results/positive/hbase_pushdown.q.out
+++ b/hbase-handler/src/test/results/positive/hbase_pushdown.q.out
@@ -218,10 +218,10 @@ STAGE PLANS:
 alias: hbase_pushdown
 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
 Filter Operator
-  predicate: (((key = 80) and (key = 90)) and (value like '%90%')) 
(type: boolean)
+  predicate: false (type: boolean)
   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
   Select Operator
-expressions: 90 (type: int), value (type: string)
+expressions: key (type: int), value (type: string)
 outputColumnNames: _col0, _col1
 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
 File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/6897655d/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveRexUtil.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveRexUtil.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveRexUtil.java
index b1eca7d..1b327fe 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveRexUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveRexUtil.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hive.ql.optimizer.calcite;
 
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -30,6 +31,7 @@ import org.apache.calcite.linq4j.Ord;
 import org.apache.calcite.plan.RelOptUtil;
 im

[1/2] hive git commit: HIVE-13815: Improve logic to infer false predicates (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2016-07-27 Thread jcamacho
Repository: hive
Updated Branches:
  refs/heads/master c922546e8 -> 6897655d4


http://git-wip-us.apache.org/repos/asf/hive/blob/6897655d/ql/src/test/results/clientpositive/spark/constprog_semijoin.q.out
--
diff --git a/ql/src/test/results/clientpositive/spark/constprog_semijoin.q.out 
b/ql/src/test/results/clientpositive/spark/constprog_semijoin.q.out
index de829e2..c6a9b14 100644
--- a/ql/src/test/results/clientpositive/spark/constprog_semijoin.q.out
+++ b/ql/src/test/results/clientpositive/spark/constprog_semijoin.q.out
@@ -434,17 +434,17 @@ STAGE PLANS:
   alias: table1
   Statistics: Num rows: 10 Data size: 200 Basic stats: 
COMPLETE Column stats: NONE
   Filter Operator
-predicate: (((dimid = 100) = true) and (dimid <> 100)) 
(type: boolean)
-Statistics: Num rows: 5 Data size: 100 Basic stats: 
COMPLETE Column stats: NONE
+predicate: false (type: boolean)
+Statistics: Num rows: 1 Data size: 20 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
-  expressions: id (type: int), val (type: string), val1 
(type: string), dimid (type: int)
-  outputColumnNames: _col0, _col1, _col2, _col3
-  Statistics: Num rows: 5 Data size: 100 Basic stats: 
COMPLETE Column stats: NONE
+  expressions: id (type: int), val (type: string), val1 
(type: string)
+  outputColumnNames: _col0, _col1, _col2
+  Statistics: Num rows: 1 Data size: 20 Basic stats: 
COMPLETE Column stats: NONE
   Reduce Output Operator
-key expressions: _col3 (type: int), true (type: 
boolean)
+key expressions: 100 (type: int), true (type: boolean)
 sort order: ++
-Map-reduce partition columns: _col3 (type: int), true 
(type: boolean)
-Statistics: Num rows: 5 Data size: 100 Basic stats: 
COMPLETE Column stats: NONE
+Map-reduce partition columns: 100 (type: int), true 
(type: boolean)
+Statistics: Num rows: 1 Data size: 20 Basic stats: 
COMPLETE Column stats: NONE
 value expressions: _col0 (type: int), _col1 (type: 
string), _col2 (type: string)
 Map 3 
 Map Operator Tree:
@@ -452,10 +452,10 @@ STAGE PLANS:
   alias: table3
   Statistics: Num rows: 5 Data size: 15 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: (((id = 100) = true) and (id <> 100)) (type: 
boolean)
+predicate: ((id = 100) and (id = 100) is not null) (type: 
boolean)
 Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
-  expressions: id (type: int), true (type: boolean)
+  expressions: 100 (type: int), true (type: boolean)
   outputColumnNames: _col0, _col1
   Statistics: Num rows: 2 Data size: 6 Basic stats: 
COMPLETE Column stats: NONE
   Group By Operator
@@ -474,13 +474,13 @@ STAGE PLANS:
 condition map:
  Left Semi Join 0 to 1
 keys:
-  0 _col3 (type: int), true (type: boolean)
+  0 100 (type: int), true (type: boolean)
   1 _col0 (type: int), _col1 (type: boolean)
 outputColumnNames: _col0, _col1, _col2
-Statistics: Num rows: 5 Data size: 110 Basic stats: COMPLETE 
Column stats: NONE
+Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE 
Column stats: NONE
 File Output Operator
   compressed: false
-  Statistics: Num rows: 5 Data size: 110 Basic stats: COMPLETE 
Column stats: NONE
+  Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE 
Column stats: NONE
   table:
   input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
   output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -523,17 +523,17 @@ STAGE PLANS:
   alias: table1
   Statistics: Num rows: 10 Data size: 200 Basic stats: 
COMPLETE Column stats: NONE
   Filter Operator
-predicate: ((dimid) IN (100, 200) and ((dimid = 100) = 
true)) (type: boolean)
-Statistics: Num rows: 2 Data size: 40 Basic stats: 
COMPLETE Column stats: NONE
+predicate: ((dimid = 100) and (dimid = 100) is not null) 
(type: boolean)
+Statistics: Num rows: 5 Data size