hive git commit: HIVE-9423: HiveServer2: Provide the user with different error messages depending on the Thrift client exception code (Peter Vary via Chaoyu Tang)

2016-09-30 Thread ctang
Repository: hive
Updated Branches:
  refs/heads/branch-2.1 a109ff520 -> af4dec312


HIVE-9423: HiveServer2: Provide the user with different error messages 
depending on the Thrift client exception code (Peter Vary via Chaoyu Tang)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/af4dec31
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/af4dec31
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/af4dec31

Branch: refs/heads/branch-2.1
Commit: af4dec31238dd0c7dd174debf4d8bc5df9f9956b
Parents: a109ff5
Author: ctang 
Authored: Fri Sep 30 21:33:28 2016 -0400
Committer: ctang 
Committed: Fri Sep 30 21:33:28 2016 -0400

--
 beeline/pom.xml |  5 ++
 .../java/org/apache/hive/beeline/BeeLine.java   | 23 +++
 beeline/src/main/resources/BeeLine.properties   | 11 +++
 .../beeline/TestBeeLineExceptionHandling.java   | 72 
 4 files changed, 111 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/af4dec31/beeline/pom.xml
--
diff --git a/beeline/pom.xml b/beeline/pom.xml
index eaab306..c024590 100644
--- a/beeline/pom.xml
+++ b/beeline/pom.xml
@@ -119,6 +119,11 @@
   test
 
 
+  org.mockito
+  mockito-all
+  test
+
+
   postgresql
   postgresql
   9.1-901.jdbc4

http://git-wip-us.apache.org/repos/asf/hive/blob/af4dec31/beeline/src/java/org/apache/hive/beeline/BeeLine.java
--
diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java 
b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
index 9138613..856daf3 100644
--- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java
+++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
@@ -95,6 +95,7 @@ import org.apache.hive.beeline.cli.CliOptionsProcessor;
 
 import org.apache.hive.jdbc.Utils;
 import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
+import org.apache.thrift.transport.TTransportException;
 
 /**
  * A console SQL shell with command completion.
@@ -1755,6 +1756,28 @@ public class BeeLine implements Closeable {
   return;
 }
 
+if (e.getCause() instanceof TTransportException) {
+  switch (((TTransportException)e.getCause()).getType()) {
+case TTransportException.ALREADY_OPEN:
+  error(loc("hs2-connection-already-open"));
+  break;
+case TTransportException.END_OF_FILE:
+  error(loc("hs2-unexpected-end-of-file"));
+  break;
+case TTransportException.NOT_OPEN:
+  error(loc("hs2-could-not-open-connection"));
+  break;
+case TTransportException.TIMED_OUT:
+  error(loc("hs2-connection-timed-out"));
+  break;
+case TTransportException.UNKNOWN:
+  error(loc("hs2-unknown-connection-problem"));
+  break;
+default:
+  error(loc("hs2-unexpected-error"));
+  }
+}
+
 error(loc(e instanceof SQLWarning ? "Warning" : "Error",
 new Object[] {
 e.getMessage() == null ? "" : e.getMessage().trim(),

http://git-wip-us.apache.org/repos/asf/hive/blob/af4dec31/beeline/src/main/resources/BeeLine.properties
--
diff --git a/beeline/src/main/resources/BeeLine.properties 
b/beeline/src/main/resources/BeeLine.properties
index 16f23a8..12e379c 100644
--- a/beeline/src/main/resources/BeeLine.properties
+++ b/beeline/src/main/resources/BeeLine.properties
@@ -142,6 +142,17 @@ active-connections: 0#No active connections|1#{0} active 
connection:|1<{0} activ
 
 time-ms: ({0,number,#.###} seconds)
 
+hs2-connection-already-open: Socket already connected.
+hs2-unexpected-end-of-file: Unexpected end of file when reading from HS2 
server. The root \
+cause might be too many concurrent connections. Please ask the administrator 
to check the number \
+of active connections, and adjust hive.server2.thrift.max.worker.threads if 
applicable.
+hs2-could-not-open-connection: Could not open connection to the HS2 server. 
Please check the \
+server URI and if the URI is correct, then ask the administrator to check the 
server status.\
+hs2-connection-timed-out: Connection timeout when communicating with HS2 
server.
+hs2-unknown-connection-problem: Unknown HS2 problem when communicating with 
Thrift server.
+hs2-unexpected-error: Unexpected HS2 error when communicating with the Thrift 
server.
+
+
 cmd-usage: Usage: java org.apache.hive.cli.beeline.BeeLine \n \
 \  -uthe JDBC URL to connect to\n \
 \  -r  reconnect to last saved connect url (in 
conjunction with !save)\n \


[03/44] hive git commit: HIVE-14779 make DbTxnManager.HeartbeaterThread a daemon (Eugene Koifman, reviewed by Alan Gates)

2016-09-30 Thread sershe
HIVE-14779 make DbTxnManager.HeartbeaterThread a daemon (Eugene Koifman, 
reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3c55115b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3c55115b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3c55115b

Branch: refs/heads/hive-14535
Commit: 3c55115b6eaaa02442c9c487d5d328275e211567
Parents: eab7b40
Author: Eugene Koifman 
Authored: Mon Sep 19 17:14:03 2016 -0700
Committer: Eugene Koifman 
Committed: Mon Sep 19 17:14:03 2016 -0700

--
 ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/3c55115b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java 
b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
index a446999..da7505b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java
@@ -105,7 +105,7 @@ public class DbTxnManager extends HiveTxnManagerImpl {
   };
 
   private static AtomicInteger heartbeaterMSClientCount = new AtomicInteger(0);
-  private int heartbeaterThreadPoolSize = 0;
+  private static int heartbeaterThreadPoolSize = 0;
 
   private static SynchronizedMetaStoreClient getThreadLocalMSClient() {
 return threadLocalMSClient.get();
@@ -625,6 +625,7 @@ public class DbTxnManager extends HiveTxnManagerImpl {
   public static class HeartbeaterThread extends Thread {
 public HeartbeaterThread(Runnable target, String name) {
   super(target, name);
+  setDaemon(true);
 }
 
 @Override



[21/44] hive git commit: HIVE-14817. Shutdown the SessionManager timeoutChecker thread properly upon shutdown. (Siddharth Seth, reviewed by Thejas Nair)

2016-09-30 Thread sershe
HIVE-14817. Shutdown the SessionManager timeoutChecker thread properly upon 
shutdown. (Siddharth Seth, reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e08d94e5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e08d94e5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e08d94e5

Branch: refs/heads/hive-14535
Commit: e08d94e57d99245ebaa90c4be69dade84ba27172
Parents: 990927e
Author: Siddharth Seth 
Authored: Fri Sep 23 14:56:57 2016 -0700
Committer: Siddharth Seth 
Committed: Fri Sep 23 14:56:57 2016 -0700

--
 .../service/cli/session/SessionManager.java | 33 +++-
 1 file changed, 25 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e08d94e5/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
--
diff --git 
a/service/src/java/org/apache/hive/service/cli/session/SessionManager.java 
b/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
index 15bab06..76e759f 100644
--- a/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
+++ b/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
@@ -185,14 +185,20 @@ public class SessionManager extends CompositeService {
 }
   }
 
+  private final Object timeoutCheckerLock = new Object();
+
   private void startTimeoutChecker() {
 final long interval = Math.max(checkInterval, 3000l);  // minimum 3 seconds
-Runnable timeoutChecker = new Runnable() {
+final Runnable timeoutChecker = new Runnable() {
   @Override
   public void run() {
-for (sleepInterval(interval); !shutdown; sleepInterval(interval)) {
+sleepFor(interval);
+while (!shutdown) {
   long current = System.currentTimeMillis();
   for (HiveSession session : new 
ArrayList(handleToSession.values())) {
+if (shutdown) {
+  break;
+}
 if (sessionTimeout > 0 && session.getLastAccessTime() + 
sessionTimeout <= current
 && (!checkOperation || session.getNoOperationTime() > 
sessionTimeout)) {
   SessionHandle handle = session.getSessionHandle();
@@ -207,24 +213,35 @@ public class SessionManager extends CompositeService {
   session.closeExpiredOperations();
 }
   }
+  sleepFor(interval);
 }
   }
 
-  private void sleepInterval(long interval) {
-try {
-  Thread.sleep(interval);
-} catch (InterruptedException e) {
-  // ignore
+  private void sleepFor(long interval) {
+synchronized (timeoutCheckerLock) {
+  try {
+timeoutCheckerLock.wait(interval);
+  } catch (InterruptedException e) {
+// Ignore, and break.
+  }
 }
   }
 };
 backgroundOperationPool.execute(timeoutChecker);
   }
 
+  private void shutdownTimeoutChecker() {
+shutdown = true;
+synchronized (timeoutCheckerLock) {
+  timeoutCheckerLock.notify();
+}
+  }
+
+
   @Override
   public synchronized void stop() {
 super.stop();
-shutdown = true;
+shutdownTimeoutChecker();
 if (backgroundOperationPool != null) {
   backgroundOperationPool.shutdown();
   long timeout = hiveConf.getTimeVar(



[29/44] hive git commit: HIVE-7224: Set incremental printing to true by default in Beeline (Sahil Takiar, reviewed by Thejas M Nair>

2016-09-30 Thread sershe
HIVE-7224: Set incremental printing to true by default in Beeline (Sahil 
Takiar, reviewed by Thejas M Nair>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7d3da177
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7d3da177
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7d3da177

Branch: refs/heads/hive-14535
Commit: 7d3da1778fdbfdb9b3eb0a19a10260b0258e1f87
Parents: d3b88f6
Author: Sahil Takiar 
Authored: Tue Sep 27 17:41:59 2016 -0500
Committer: Sergio Pena 
Committed: Tue Sep 27 17:41:59 2016 -0500

--
 beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/7d3da177/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
--
diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java 
b/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
index 59fbca3..57b9c46 100644
--- a/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
+++ b/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
@@ -74,7 +74,7 @@ class BeeLineOpts implements Completer {
   private boolean autoCommit = false;
   private boolean verbose = false;
   private boolean force = false;
-  private boolean incremental = false;
+  private boolean incremental = true;
   private int incrementalBufferRows = DEFAULT_INCREMENTAL_BUFFER_ROWS;
   private boolean showWarnings = false;
   private boolean showNestedErrs = false;



[11/44] hive git commit: HIVE-14783 : bucketing column should be part of sorting for delete/update operation when spdo is on (Ashutosh Chauhan via Prasanth J) Addendum patch

2016-09-30 Thread sershe
HIVE-14783 : bucketing column should be part of sorting for delete/update 
operation when spdo is on (Ashutosh Chauhan via Prasanth J)
Addendum patch

Signed-off-by: Ashutosh Chauhan 


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ed82cfa9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ed82cfa9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ed82cfa9

Branch: refs/heads/hive-14535
Commit: ed82cfa914769cfabfc7460b7b5abbdae71e562a
Parents: 91082e5
Author: Ashutosh Chauhan 
Authored: Wed Sep 21 15:18:37 2016 -0700
Committer: Ashutosh Chauhan 
Committed: Wed Sep 21 15:19:18 2016 -0700

--
 .../hadoop/hive/ql/exec/FileSinkOperator.java   | 10 +-
 .../hadoop/hive/ql/exec/ReduceSinkOperator.java |  9 +
 .../optimizer/SortedDynPartitionOptimizer.java  |  4 ++--
 .../dynpart_sort_optimization_acid.q.out| 20 ++--
 4 files changed, 26 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/ed82cfa9/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
index e386717..eeba6cd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
@@ -766,19 +766,19 @@ public class FileSinkOperator extends 
TerminalOperator implements
 if (fpaths.acidLastBucket != bucketNum) {
   fpaths.acidLastBucket = bucketNum;
   // Switch files
-  fpaths.updaters[++fpaths.acidFileOffset] = 
HiveFileFormatUtils.getAcidRecordUpdater(
-  jc, conf.getTableInfo(), bucketNum, conf, 
fpaths.outPaths[fpaths.acidFileOffset],
+  
fpaths.updaters[conf.getDpSortState().equals(DPSortState.PARTITION_BUCKET_SORTED)
 ? 0 : ++fpaths.acidFileOffset] = HiveFileFormatUtils.getAcidRecordUpdater(
+  jc, conf.getTableInfo(), bucketNum, conf, 
fpaths.outPaths[conf.getDpSortState().equals(DPSortState.PARTITION_BUCKET_SORTED)
 ? 0 :fpaths.acidFileOffset],
   rowInspector, reporter, 0);
   if (isDebugEnabled) {
 LOG.debug("Created updater for bucket number " + bucketNum + " 
using file " +
-fpaths.outPaths[fpaths.acidFileOffset]);
+
fpaths.outPaths[conf.getDpSortState().equals(DPSortState.PARTITION_BUCKET_SORTED)
 ? 0 :fpaths.acidFileOffset]);
   }
 }
 
 if (conf.getWriteType() == AcidUtils.Operation.UPDATE) {
-  
fpaths.updaters[fpaths.acidFileOffset].update(conf.getTransactionId(), row);
+  
fpaths.updaters[conf.getDpSortState().equals(DPSortState.PARTITION_BUCKET_SORTED)
 ? 0 :fpaths.acidFileOffset].update(conf.getTransactionId(), row);
 } else if (conf.getWriteType() == AcidUtils.Operation.DELETE) {
-  
fpaths.updaters[fpaths.acidFileOffset].delete(conf.getTransactionId(), row);
+  
fpaths.updaters[conf.getDpSortState().equals(DPSortState.PARTITION_BUCKET_SORTED)
 ? 0 :fpaths.acidFileOffset].delete(conf.getTransactionId(), row);
 } else {
   throw new HiveException("Unknown write type " + 
conf.getWriteType().toString());
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ed82cfa9/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
index a9885d8..4eea6b9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
@@ -35,6 +35,8 @@ import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
 import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
 import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
@@ -78,6 +80,7 @@ public class ReduceSinkOperator extends 
TerminalOperator
   private transient ObjectInspector[] partitionObjectInspectors;
   private transient ObjectInspector[] bucketObjectInspectors;
   private transient int buckColIdxInKey;
+  private transient int buckColIdxInKeyForAcid = -1;
   private boolean firstRow;
   private transient int tag;

[17/44] hive git commit: HIVE-14805: Subquery inside a view will have the object in the subquery as the direct input (Aihua Xu, reviewed by Yongzhi Chen)

2016-09-30 Thread sershe
HIVE-14805: Subquery inside a view will have the object in the subquery as the 
direct input (Aihua Xu, reviewed by Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f284b6d0
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f284b6d0
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f284b6d0

Branch: refs/heads/hive-14535
Commit: f284b6d04aea51bce4e438f31a7b5ed8597df8fd
Parents: da376eb
Author: Aihua Xu 
Authored: Wed Sep 21 13:59:14 2016 -0400
Committer: Aihua Xu 
Committed: Fri Sep 23 09:07:15 2016 -0400

--
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  2 +-
 .../hadoop/hive/ql/plan/TestViewEntity.java | 31 +++
 .../results/clientpositive/cbo_union_view.q.out | 12 +
 .../results/clientpositive/ppd_union_view.q.out | 12 +
 .../results/clientpositive/union_view.q.out | 54 
 5 files changed, 110 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/f284b6d0/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 577d006..747f387 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -997,7 +997,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
 // Recursively do the first phase of semantic analysis for the subquery
 QBExpr qbexpr = new QBExpr(alias);
 
-doPhase1QBExpr(subqref, qbexpr, qb.getId(), alias);
+doPhase1QBExpr(subqref, qbexpr, qb.getId(), alias, qb.isInsideView());
 
 // If the alias is already there then we have a conflict
 if (qb.exists(alias)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/f284b6d0/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java
--
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java 
b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java
index 2d70a1b..fa01416 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java
@@ -141,4 +141,35 @@ public class TestViewEntity {
 
   }
 
+  /**
+   * Verify that the the query with the subquery inside a view will have the 
correct
+   * direct and indirect inputs.
+   * @throws Exception
+   */
+  @Test
+  public void testSubQueryInSubView() throws Exception {
+String prefix = "tvsubqueryinsubview" + NAME_PREFIX;
+final String tab1 = prefix + "t";
+final String view1 = prefix + "v";
+final String view2 = prefix + "v2";
+
+int ret = driver.run("create table " + tab1 + "(id 
int)").getResponseCode();
+assertEquals("Checking command success", 0, ret);
+ret = driver.run("create view " + view1 + " as select * from " + 
tab1).getResponseCode();
+assertEquals("Checking command success", 0, ret);
+
+ret = driver.run("create view " + view2 + " as select * from (select * 
from " + view1 + ") x").getResponseCode();
+assertEquals("Checking command success", 0, ret);
+
+driver.compile("select * from " + view2);
+// view entity
+assertEquals("default@" + view2, 
CheckInputReadEntity.readEntities[0].getName());
+
+// table1 and view1 as second read entity
+assertEquals("default@" + view1, 
CheckInputReadEntity.readEntities[1].getName());
+assertFalse("Table is not direct input", 
CheckInputReadEntity.readEntities[1].isDirect());
+assertEquals("default@" + tab1, 
CheckInputReadEntity.readEntities[2].getName());
+assertFalse("Table is not direct input", 
CheckInputReadEntity.readEntities[2].isDirect());
+
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f284b6d0/ql/src/test/results/clientpositive/cbo_union_view.q.out
--
diff --git a/ql/src/test/results/clientpositive/cbo_union_view.q.out 
b/ql/src/test/results/clientpositive/cbo_union_view.q.out
index ed6bba9..d179c28 100644
--- a/ql/src/test/results/clientpositive/cbo_union_view.q.out
+++ b/ql/src/test/results/clientpositive/cbo_union_view.q.out
@@ -64,6 +64,8 @@ STAGE PLANS:
   Map Operator Tree:
   TableScan
 alias: src_union_1
+properties:
+  insideView TRUE
 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
 Filter Operator
   predicate: (key = 86) (type: boolean)
@@ -87,6 +89,8 @@ STAGE PLANS:
   serde: 

[23/44] hive git commit: HIVE-14831: Missing Druid dependencies at runtime (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2016-09-30 Thread sershe
HIVE-14831: Missing Druid dependencies at runtime (Jesus Camacho Rodriguez, 
reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4ce5fe13
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4ce5fe13
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4ce5fe13

Branch: refs/heads/hive-14535
Commit: 4ce5fe131a5861b07c024c0529f6b2ebe63a4456
Parents: a213115
Author: Jesus Camacho Rodriguez 
Authored: Fri Sep 23 19:45:56 2016 +0100
Committer: Jesus Camacho Rodriguez 
Committed: Sat Sep 24 07:51:25 2016 +0100

--
 druid-handler/pom.xml | 11 +++
 1 file changed, 3 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/4ce5fe13/druid-handler/pom.xml
--
diff --git a/druid-handler/pom.xml b/druid-handler/pom.xml
index 2173cdc..0db542e 100644
--- a/druid-handler/pom.xml
+++ b/druid-handler/pom.xml
@@ -150,12 +150,8 @@
   org.apache.hive.druid.io.druid
 
 
-  com.metamx.emitter
-  
org.apache.hive.druid.com.metamx.emitter
-
-
-  com.metamx.http.client
-  
org.apache.hive.druid.com.metamx.http.client
+  com.metamx
+  
org.apache.hive.druid.com.metamx
 
 
   io.netty
@@ -173,8 +169,7 @@
   
 
   io.druid:*
-  com.metamx:emitter:*
-  com.metamx:http-client:*
+  com.metamx:*
   io.netty:*
   com.fasterxml.jackson.core:*
   com.fasterxml.jackson.datatype:*



[42/44] hive git commit: HIVE-14775: Cleanup IOException usage in Metrics APIs (Barna Zsombor Klara reviewed by Peter Vary, Gabor Szadovszky, Szehon Ho, Mohit Sabharwal)

2016-09-30 Thread sershe
HIVE-14775: Cleanup IOException usage in Metrics APIs (Barna Zsombor Klara 
reviewed by Peter Vary, Gabor Szadovszky, Szehon Ho, Mohit Sabharwal)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f903c4af
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f903c4af
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f903c4af

Branch: refs/heads/hive-14535
Commit: f903c4afad360ea66ec266abe8a3f414935c82ff
Parents: 45c1a09
Author: Mohit Sabharwal 
Authored: Fri Sep 30 15:13:14 2016 -0400
Committer: Mohit Sabharwal 
Committed: Fri Sep 30 15:13:14 2016 -0400

--
 .../hive/common/metrics/LegacyMetrics.java  |  96 ++---
 .../hive/common/metrics/MetricsMBean.java   |  13 +--
 .../hive/common/metrics/MetricsMBeanImpl.java   |  16 +--
 .../hive/common/metrics/common/Metrics.java |  31 ++
 .../metrics/metrics2/CodahaleMetrics.java   |  70 ++---
 .../apache/hadoop/hive/ql/log/PerfLogger.java   |  33 ++
 .../hive/common/metrics/TestLegacyMetrics.java  | 103 ++-
 .../hive/metastore/HMSMetricsListener.java  |  52 ++
 .../hadoop/hive/metastore/HiveMetaStore.java|  13 +--
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  13 +--
 .../hadoop/hive/ql/exec/mr/MapRedTask.java  |   6 +-
 .../hadoop/hive/ql/exec/mr/MapredLocalTask.java |   6 +-
 .../hadoop/hive/ql/exec/spark/SparkTask.java|   6 +-
 .../apache/hadoop/hive/ql/exec/tez/TezTask.java |   6 +-
 .../hive/service/cli/operation/Operation.java   |  22 ++--
 15 files changed, 176 insertions(+), 310 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/f903c4af/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java 
b/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java
index 9be9b50..ba2267b 100644
--- a/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java
+++ b/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java
@@ -21,11 +21,13 @@ import org.apache.hadoop.hive.common.metrics.common.Metrics;
 import org.apache.hadoop.hive.common.metrics.common.MetricsScope;
 import org.apache.hadoop.hive.common.metrics.common.MetricsVariable;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
 import java.lang.management.ManagementFactory;
 import java.util.HashMap;
 
+import javax.management.JMException;
 import javax.management.MBeanServer;
 import javax.management.MalformedObjectNameException;
 import javax.management.ObjectName;
@@ -47,6 +49,8 @@ import javax.management.ObjectName;
  */
 public class LegacyMetrics implements Metrics {
 
+  private static final Logger LOG = 
LoggerFactory.getLogger(LegacyMetrics.class);
+
   private LegacyMetrics() {
 // block
   }
@@ -59,12 +63,12 @@ public class LegacyMetrics implements Metrics {
*/
   public static class LegacyMetricsScope implements MetricsScope {
 
-final LegacyMetrics metrics;
+private final LegacyMetrics metrics;
 
-final String name;
-final String numCounter;
-final String timeCounter;
-final String avgTimeCounter;
+private final String name;
+private final String numCounter;
+private final String timeCounter;
+private final String avgTimeCounter;
 
 private boolean isOpen = false;
 private Long startTime = null;
@@ -72,9 +76,8 @@ public class LegacyMetrics implements Metrics {
 /**
  * Instantiates a named scope - intended to only be called by Metrics, so 
locally scoped.
  * @param name - name of the variable
- * @throws IOException
  */
-private LegacyMetricsScope(String name, LegacyMetrics metrics) throws 
IOException {
+private LegacyMetricsScope(String name, LegacyMetrics metrics) {
   this.metrics = metrics;
   this.name = name;
   this.numCounter = name + ".n";
@@ -83,33 +86,41 @@ public class LegacyMetrics implements Metrics {
   open();
 }
 
-public Long getNumCounter() throws IOException {
-  return (Long) metrics.get(numCounter);
+public Long getNumCounter() {
+  try {
+return (Long) metrics.get(numCounter);
+  } catch (JMException e) {
+LOG.warn("Could not find counter value for " + numCounter + ", 
returning null instead. ", e);
+return null;
+  }
 }
 
-public Long getTimeCounter() throws IOException {
-  return (Long) metrics.get(timeCounter);
+public Long getTimeCounter() {
+  try {
+return (Long) metrics.get(timeCounter);
+  } catch (JMException e) {
+

[43/44] hive git commit: HIVE-14865 Fix comments after HIVE-14350 (Eugene Koifman, reviewed by Alan Gates)

2016-09-30 Thread sershe
HIVE-14865 Fix comments after HIVE-14350 (Eugene Koifman, reviewed by Alan 
Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/297b4433
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/297b4433
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/297b4433

Branch: refs/heads/hive-14535
Commit: 297b4433cd2fdfb84182668bf7b1c524e92c6593
Parents: f903c4a
Author: Eugene Koifman 
Authored: Fri Sep 30 15:10:23 2016 -0700
Committer: Eugene Koifman 
Committed: Fri Sep 30 15:10:23 2016 -0700

--
 ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java | 6 --
 1 file changed, 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/297b4433/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
index cda5f39..f1eba5d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
@@ -866,7 +866,6 @@ public class AcidUtils {
* {@link txnList}.  Note that 'original' files are logically a 
base_Long.MIN_VALUE and thus
* cannot have any data for an open txn.  We could check {@link deltas} 
has files to cover
* [1,n] w/o gaps but this would almost never happen...*/
-  //todo: this should only care about 'open' tnxs (HIVE-14211)
   long[] exceptions = txnList.getInvalidTransactions();
   String minOpenTxn = exceptions != null && exceptions.length > 0 ?
 Long.toString(exceptions[0]) : "x";
@@ -910,11 +909,6 @@ public class AcidUtils {
* files within the snapshot.
*/
   private static boolean isValidBase(long baseTxnId, ValidTxnList txnList) {
-/*This implementation is suboptimal.  It considers open/aborted txns 
invalid while we are only
-* concerned with 'open' ones.  (Compaction removes any data that belongs 
to aborted txns and
-* reads skip anything that belongs to aborted txn, thus base_7 is still OK 
if the only exception
-* is txn 5 which is aborted).  So this implementation can generate false 
positives. (HIVE-14211)
-* */
 if(baseTxnId == Long.MIN_VALUE) {
   //such base is created by 1st compaction in case of non-acid to acid 
table conversion
   //By definition there are no open txns with id < 1.



[31/44] hive git commit: HIVE-14843: HIVE-14751 introduced ambiguity in grammar (Jesus Camacho Rodriguez, reviewed by Pengcheng Xiong)

2016-09-30 Thread sershe
HIVE-14843: HIVE-14751 introduced ambiguity in grammar (Jesus Camacho 
Rodriguez, reviewed by Pengcheng Xiong)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/667e9dd5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/667e9dd5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/667e9dd5

Branch: refs/heads/hive-14535
Commit: 667e9dd50ef4cfc9f743f8716da34339ec012f91
Parents: 871b55f
Author: Jesus Camacho Rodriguez 
Authored: Tue Sep 27 17:00:45 2016 +0100
Committer: Jesus Camacho Rodriguez 
Committed: Wed Sep 28 08:13:50 2016 +0100

--
 ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/667e9dd5/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
index e6b70a0..04f87b8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
@@ -347,8 +347,8 @@ intervalLiteral
 
 intervalQualifiers
 :
-KW_YEAR KW_TO KW_MONTH -> TOK_INTERVAL_YEAR_MONTH_LITERAL
-| KW_DAY KW_TO KW_SECOND -> TOK_INTERVAL_DAY_TIME_LITERAL
+(KW_YEAR KW_TO) => KW_YEAR KW_TO KW_MONTH -> 
TOK_INTERVAL_YEAR_MONTH_LITERAL
+| (KW_DAY KW_TO) => KW_DAY KW_TO KW_SECOND -> TOK_INTERVAL_DAY_TIME_LITERAL
 | KW_YEAR -> TOK_INTERVAL_YEAR_LITERAL
 | KW_MONTH -> TOK_INTERVAL_MONTH_LITERAL
 | KW_DAY -> TOK_INTERVAL_DAY_LITERAL



[35/44] hive git commit: HiveServer2: Provide the user with different error messages depending on the Thrift client exception code (Peter Vary via Chaoyu Tang)

2016-09-30 Thread sershe
HiveServer2: Provide the user with different error messages depending on the 
Thrift client exception code (Peter Vary via Chaoyu Tang)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d16d4f1b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d16d4f1b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d16d4f1b

Branch: refs/heads/hive-14535
Commit: d16d4f1bcc43d6ebcab0eaf5bc635fb88b60be5f
Parents: 291f3d5
Author: ctang 
Authored: Thu Sep 29 11:25:21 2016 -0400
Committer: ctang 
Committed: Thu Sep 29 11:25:21 2016 -0400

--
 .../java/org/apache/hive/beeline/BeeLine.java   | 20 +-
 beeline/src/main/resources/BeeLine.properties   | 11 ++-
 .../beeline/TestBeeLineExceptionHandling.java   | 72 
 3 files changed, 101 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/d16d4f1b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
--
diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java 
b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
index 5322ca6..79922d2 100644
--- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java
+++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
@@ -1778,7 +1778,25 @@ public class BeeLine implements Closeable {
 }
 
 if (e.getCause() instanceof TTransportException) {
-  error(loc("hs2-unavailable"));
+  switch (((TTransportException)e.getCause()).getType()) {
+case TTransportException.ALREADY_OPEN:
+  error(loc("hs2-connection-already-open"));
+  break;
+case TTransportException.END_OF_FILE:
+  error(loc("hs2-unexpected-end-of-file"));
+  break;
+case TTransportException.NOT_OPEN:
+  error(loc("hs2-could-not-open-connection"));
+  break;
+case TTransportException.TIMED_OUT:
+  error(loc("hs2-connection-timed-out"));
+  break;
+case TTransportException.UNKNOWN:
+  error(loc("hs2-unknown-connection-problem"));
+  break;
+default:
+  error(loc("hs2-unexpected-error"));
+  }
 }
 
 error(loc(e instanceof SQLWarning ? "Warning" : "Error",

http://git-wip-us.apache.org/repos/asf/hive/blob/d16d4f1b/beeline/src/main/resources/BeeLine.properties
--
diff --git a/beeline/src/main/resources/BeeLine.properties 
b/beeline/src/main/resources/BeeLine.properties
index 13321d2..ad79c01 100644
--- a/beeline/src/main/resources/BeeLine.properties
+++ b/beeline/src/main/resources/BeeLine.properties
@@ -142,7 +142,16 @@ active-connections: 0#No active connections|1#{0} active 
connection:|1<{0} activ
 
 time-ms: ({0,number,#.###} seconds)
 
-hs2-unavailable: HS2 may be unavailable, check server status
+hs2-connection-already-open: Socket already connected.
+hs2-unexpected-end-of-file: Unexpected end of file when reading from HS2 
server. The root \
+cause might be too many concurrent connections. Please ask the administrator 
to check the number \
+of active connections, and adjust hive.server2.thrift.max.worker.threads if 
applicable.
+hs2-could-not-open-connection: Could not open connection to the HS2 server. 
Please check the \
+server URI and if the URI is correct, then ask the administrator to check the 
server status.\
+hs2-connection-timed-out: Connection timeout when communicating with HS2 
server.
+hs2-unknown-connection-problem: Unknown HS2 problem when communicating with 
Thrift server.
+hs2-unexpected-error: Unexpected HS2 error when communicating with the Thrift 
server.
+
 
 cmd-usage: Usage: java org.apache.hive.cli.beeline.BeeLine \n \
 \  -uthe JDBC URL to connect to\n \

http://git-wip-us.apache.org/repos/asf/hive/blob/d16d4f1b/beeline/src/test/org/apache/hive/beeline/TestBeeLineExceptionHandling.java
--
diff --git 
a/beeline/src/test/org/apache/hive/beeline/TestBeeLineExceptionHandling.java 
b/beeline/src/test/org/apache/hive/beeline/TestBeeLineExceptionHandling.java
new file mode 100644
index 000..08579e8
--- /dev/null
+++ b/beeline/src/test/org/apache/hive/beeline/TestBeeLineExceptionHandling.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * 

[02/44] hive git commit: HIVE-14790: Jenkins is not displaying test results because 'set -e' is aborting the script too soon (Sergio Pena)

2016-09-30 Thread sershe
HIVE-14790: Jenkins is not displaying test results because 'set -e' is aborting 
the script too soon (Sergio Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/eab7b40c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/eab7b40c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/eab7b40c

Branch: refs/heads/hive-14535
Commit: eab7b40c7d197f3b4a7ea97fe5424a1100ef2ad5
Parents: 83ef6f9
Author: Sergio Pena 
Authored: Mon Sep 19 18:26:35 2016 -0500
Committer: Sergio Pena 
Committed: Mon Sep 19 18:27:21 2016 -0500

--
 dev-support/jenkins-execute-build.sh | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/eab7b40c/dev-support/jenkins-execute-build.sh
--
diff --git a/dev-support/jenkins-execute-build.sh 
b/dev-support/jenkins-execute-build.sh
index a9935e1..b2ba8e9 100644
--- a/dev-support/jenkins-execute-build.sh
+++ b/dev-support/jenkins-execute-build.sh
@@ -118,6 +118,8 @@ else
echo "ISSUE: unspecified PROFILE: $BUILD_PROFILE"
 fi
 
+set +e
+
 call_ptest_server --testHandle "$TEST_HANDLE" --endpoint "$PTEST_API_ENDPOINT" 
--logsEndpoint "$PTEST_LOG_ENDPOINT" \
--profile "$BUILD_PROFILE" ${optionalArgs[@]} "$@"
 



[16/44] hive git commit: HIVE-14579: Add support for date extract (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2016-09-30 Thread sershe
HIVE-14579: Add support for date extract (Jesus Camacho Rodriguez, reviewed by 
Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/da376eba
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/da376eba
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/da376eba

Branch: refs/heads/hive-14535
Commit: da376eba44ab090d693c2f9eb3f94ade04942b52
Parents: e532549
Author: Jesus Camacho Rodriguez 
Authored: Fri Sep 23 10:28:45 2016 +0100
Committer: Jesus Camacho Rodriguez 
Committed: Fri Sep 23 10:28:45 2016 +0100

--
 .../org/apache/hadoop/hive/ql/parse/HiveLexer.g |   1 +
 .../hadoop/hive/ql/parse/IdentifiersParser.g|  23 +++
 ql/src/test/queries/clientpositive/extract.q|  43 
 .../test/results/clientpositive/extract.q.out   | 200 +++
 4 files changed, 267 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/da376eba/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
index af659ad..b623187 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
@@ -331,6 +331,7 @@ KW_RELY: 'RELY';
 KW_NORELY: 'NORELY';
 KW_KEY: 'KEY';
 KW_ABORT: 'ABORT';
+KW_EXTRACT: 'EXTRACT';
 KW_FLOOR: 'FLOOR';
 
 // Operators

http://git-wip-us.apache.org/repos/asf/hive/blob/da376eba/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
index 7842d50..e6b70a0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
@@ -265,6 +265,28 @@ floorDateQualifiers
 | KW_SECOND -> Identifier["floor_second"]
 ;
 
+extractExpression
+:
+KW_EXTRACT
+LPAREN
+  (timeUnit=timeQualifiers)
+  KW_FROM
+  expression
+RPAREN -> ^(TOK_FUNCTION $timeUnit expression)
+;
+
+timeQualifiers
+:
+KW_YEAR -> Identifier["year"]
+| KW_QUARTER -> Identifier["quarter"]
+| KW_MONTH -> Identifier["month"]
+| KW_WEEK -> Identifier["weekofyear"]
+| KW_DAY -> Identifier["day"]
+| KW_HOUR -> Identifier["hour"]
+| KW_MINUTE -> Identifier["minute"]
+| KW_SECOND -> Identifier["second"]
+;
+
 constant
 @init { gParent.pushMsg("constant", state); }
 @after { gParent.popMsg(state); }
@@ -347,6 +369,7 @@ atomExpression
 (KW_NULL) => KW_NULL -> TOK_NULL
 | (constant) => constant
 | castExpression
+| extractExpression
 | floorExpression
 | caseExpression
 | whenExpression

http://git-wip-us.apache.org/repos/asf/hive/blob/da376eba/ql/src/test/queries/clientpositive/extract.q
--
diff --git a/ql/src/test/queries/clientpositive/extract.q 
b/ql/src/test/queries/clientpositive/extract.q
new file mode 100644
index 000..c09574b
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/extract.q
@@ -0,0 +1,43 @@
+drop table extract_udf;
+
+create table extract_udf (t timestamp);
+from (select * from src tablesample (1 rows)) s
+  insert overwrite table extract_udf 
+select '2011-05-06 07:08:09.1234567';
+
+explain
+select day(t)
+from extract_udf;
+
+select day(t)
+from extract_udf;
+
+-- new syntax
+explain
+select extract(day from t)
+from extract_udf;
+
+select extract(day from t)
+from extract_udf;
+
+
+select extract(second from t)
+from extract_udf;
+
+select extract(minute from t)
+from extract_udf;
+
+select extract(hour from t)
+from extract_udf;
+
+select extract(week from t)
+from extract_udf;
+
+select extract(month from t)
+from extract_udf;
+
+select extract(quarter from t)
+from extract_udf;
+
+select extract(year from t)
+from extract_udf;

http://git-wip-us.apache.org/repos/asf/hive/blob/da376eba/ql/src/test/results/clientpositive/extract.q.out
--
diff --git a/ql/src/test/results/clientpositive/extract.q.out 
b/ql/src/test/results/clientpositive/extract.q.out
new file mode 100644
index 000..73c9bc6
--- /dev/null
+++ b/ql/src/test/results/clientpositive/extract.q.out
@@ -0,0 +1,200 @@
+PREHOOK: query: drop table extract_udf
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table extract_udf
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table extract_udf (t timestamp)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default

[05/44] hive git commit: HIVE-14624 : LLAP: Use FQDN when submitting work to LLAP (Sergey Shelukhin, reviewed by Siddharth Seth)

2016-09-30 Thread sershe
HIVE-14624 : LLAP: Use FQDN when submitting work to LLAP (Sergey Shelukhin, 
reviewed by Siddharth Seth)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/19774029
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/19774029
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/19774029

Branch: refs/heads/hive-14535
Commit: 19774029c4c1d90982354c36840bb485d74faaf1
Parents: e297a15
Author: Sergey Shelukhin 
Authored: Tue Sep 20 11:30:49 2016 -0700
Committer: Sergey Shelukhin 
Committed: Tue Sep 20 11:30:59 2016 -0700

--
 .../java/org/apache/hadoop/hive/conf/HiveConf.java|  3 +++
 .../java/org/apache/hadoop/hive/llap/LlapUtil.java| 12 
 .../apache/hadoop/hive/llap/LlapBaseInputFormat.java  |  6 +++---
 .../hive/llap/tezplugins/LlapTaskCommunicator.java| 14 ++
 .../llap/tezplugins/TestLlapTaskCommunicator.java |  5 +
 5 files changed, 33 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/19774029/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 301159e..ccdfca6 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -393,6 +393,7 @@ public class HiveConf extends Configuration {
 llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_CONTAINER_ID.varname);
 llapDaemonVarsSetLocal.add(ConfVars.LLAP_VALIDATE_ACLS.varname);
 llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_LOGGER.varname);
+llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_AM_USE_FQDN.varname);
   }
 
   /**
@@ -2909,6 +2910,8 @@ public class HiveConf extends Configuration {
   new TimeValidator(TimeUnit.MILLISECONDS),
   "Amount of time to wait on connection failures to the AM from an LLAP 
daemon before\n" +
   "considering the AM to be dead.", 
"llap.am.liveness.connection.timeout-millis"),
+LLAP_DAEMON_AM_USE_FQDN("hive.llap.am.use.fqdn", false,
+"Whether to use FQDN of the AM machine when submitting work to LLAP."),
 // Not used yet - since the Writable RPC engine does not support this 
policy.
 LLAP_DAEMON_AM_LIVENESS_CONNECTION_SLEEP_BETWEEN_RETRIES_MS(
   "hive.llap.am.liveness.connection.sleep.between.retries.ms", "2000ms",

http://git-wip-us.apache.org/repos/asf/hive/blob/19774029/llap-common/src/java/org/apache/hadoop/hive/llap/LlapUtil.java
--
diff --git a/llap-common/src/java/org/apache/hadoop/hive/llap/LlapUtil.java 
b/llap-common/src/java/org/apache/hadoop/hive/llap/LlapUtil.java
index 0c04d9d..8352943 100644
--- a/llap-common/src/java/org/apache/hadoop/hive/llap/LlapUtil.java
+++ b/llap-common/src/java/org/apache/hadoop/hive/llap/LlapUtil.java
@@ -14,6 +14,8 @@
 package org.apache.hadoop.hive.llap;
 
 import java.io.IOException;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -25,6 +27,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto.Builder;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.slf4j.Logger;
@@ -180,4 +183,13 @@ public class LlapUtil {
   return sb.toString();
 }
   }
+
+  public static String getAmHostNameFromAddress(InetSocketAddress address, 
Configuration conf) {
+if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.LLAP_DAEMON_AM_USE_FQDN)) 
{
+  return address.getHostName();
+}
+InetAddress ia = address.getAddress();
+// getCanonicalHostName would either return FQDN, or an IP.
+return (ia == null) ? address.getHostName() : ia.getCanonicalHostName();
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/19774029/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java
--
diff --git 
a/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java 
b/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java
index 7dae4fc..288a8eb 100644
--- 
a/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java
+++ 
b/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java
@@ -160,7 +160,7 @@ public class 

[08/44] hive git commit: HIVE-14801 : improve TestPartitionNameWhitelistValidation stability (Thejas Nair, reviewed by Daniel Dai)

2016-09-30 Thread sershe
HIVE-14801 : improve TestPartitionNameWhitelistValidation stability (Thejas 
Nair, reviewed by Daniel Dai)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0c392b18
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0c392b18
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0c392b18

Branch: refs/heads/hive-14535
Commit: 0c392b185d98b4fb380a33a535b5f528625a47e8
Parents: 96508d3
Author: Thejas Nair 
Authored: Wed Sep 21 11:56:50 2016 -0700
Committer: Thejas Nair 
Committed: Wed Sep 21 11:56:50 2016 -0700

--
 .../TestPartitionNameWhitelistValidation.java   | 53 ++--
 1 file changed, 15 insertions(+), 38 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0c392b18/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestPartitionNameWhitelistValidation.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestPartitionNameWhitelistValidation.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestPartitionNameWhitelistValidation.java
index e0a905a..e3e175b 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestPartitionNameWhitelistValidation.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestPartitionNameWhitelistValidation.java
@@ -18,49 +18,38 @@
 
 package org.apache.hadoop.hive.metastore;
 
+import static org.junit.Assert.*;
+
 import java.util.ArrayList;
 import java.util.List;
 
-import junit.framework.Assert;
-import junit.framework.TestCase;
-
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.shims.ShimLoader;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 // Validate the metastore client call validatePartitionNameCharacters to 
ensure it throws
 // an exception if partition fields contain Unicode characters or commas
 
-public class TestPartitionNameWhitelistValidation extends TestCase {
+public class TestPartitionNameWhitelistValidation {
 
   private static final String partitionValidationPattern = 
"[\\x20-\\x7E&&[^,]]*";
+  private static HiveConf hiveConf;
+  private static HiveMetaStoreClient msc;
 
-  private HiveConf hiveConf;
-  private HiveMetaStoreClient msc;
-  private Driver driver;
-
-  @Override
-  protected void setUp() throws Exception {
-super.setUp();
+  @BeforeClass
+  public static void setupBeforeClass() throws Exception {
 
System.setProperty(HiveConf.ConfVars.METASTORE_PARTITION_NAME_WHITELIST_PATTERN.varname,
 partitionValidationPattern);
-int port = MetaStoreUtils.findFreePort();
-MetaStoreUtils.startMetaStore(port, 
ShimLoader.getHadoopThriftAuthBridge());
-hiveConf = new HiveConf(this.getClass());
-hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + 
port);
-hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
+hiveConf = new HiveConf();
 SessionState.start(new CliSessionState(hiveConf));
 msc = new HiveMetaStoreClient(hiveConf);
-driver = new Driver(hiveConf);
   }
 
   // Runs an instance of DisallowUnicodePreEventListener
   // Returns whether or not it succeeded
   private boolean runValidation(List partVals) {
-
 try {
   msc.validatePartitionNameCharacters(partVals);
 } catch (Exception e) {
@@ -72,74 +61,62 @@ public class TestPartitionNameWhitelistValidation extends 
TestCase {
 
   // Sample data
   private List getPartValsWithUnicode() {
-
 List partVals = new ArrayList();
 partVals.add("klâwen");
 partVals.add("tägelîch");
 
 return partVals;
-
   }
 
   private List getPartValsWithCommas() {
-
 List partVals = new ArrayList();
 partVals.add("a,b");
 partVals.add("c,d,e,f");
 
 return partVals;
-
   }
 
   private List getPartValsWithValidCharacters() {
-
 List partVals = new ArrayList();
 partVals.add("part1");
 partVals.add("part2");
 
 return partVals;
-
   }
 
   @Test
   public void testAddPartitionWithCommas() {
-
-Assert.assertFalse("Add a partition with commas in name",
+assertFalse("Add a partition with commas in name",
 runValidation(getPartValsWithCommas()));
   }
 
   @Test
   public void testAddPartitionWithUnicode() {
-
-Assert.assertFalse("Add a partition with unicode characters in name",
+assertFalse("Add a partition with unicode characters in name",
 runValidation(getPartValsWithUnicode()));
   }
 
   @Test
   public void testAddPartitionWithValidPartVal() {
-
-Assert.assertTrue("Add a partition 

[27/44] hive git commit: HIVE-14029: Update Spark version to 2.0.0 (Ferdinand Xu, via Li Rui, Szehon Ho and Sergio Pena)

2016-09-30 Thread sershe
HIVE-14029: Update Spark version to 2.0.0 (Ferdinand Xu, via Li Rui, Szehon Ho 
and Sergio Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ac977cc8
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ac977cc8
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ac977cc8

Branch: refs/heads/hive-14535
Commit: ac977cc88757b49fbbd5c3bb236adcedcaae396c
Parents: 7d3da17
Author: Ferdinand Xu 
Authored: Wed Sep 28 01:44:32 2016 +0800
Committer: Ferdinand Xu 
Committed: Wed Sep 28 01:44:32 2016 +0800

--
 pom.xml | 12 ++-
 ql/pom.xml  | 26 +-
 .../exec/spark/HiveBaseFunctionResultList.java  | 96 +---
 .../hive/ql/exec/spark/HiveMapFunction.java |  2 +-
 .../hive/ql/exec/spark/HiveReduceFunction.java  |  2 +-
 .../hive/ql/exec/spark/SortByShuffler.java  | 84 -
 .../spark/status/impl/JobMetricsListener.java   |  4 +-
 .../ql/exec/spark/TestHiveKVResultCache.java|  5 +-
 spark-client/pom.xml| 15 ++-
 .../hive/spark/client/MetricsCollection.java|  8 +-
 .../apache/hive/spark/client/RemoteDriver.java  |  4 +-
 .../hive/spark/client/metrics/InputMetrics.java |  9 +-
 .../hive/spark/client/metrics/Metrics.java  |  6 +-
 .../client/metrics/ShuffleReadMetrics.java  | 18 ++--
 .../client/metrics/ShuffleWriteMetrics.java |  4 +-
 .../spark/client/TestMetricsCollection.java |  8 +-
 16 files changed, 153 insertions(+), 150 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/ac977cc8/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 2fb78cd..756cc34 100644
--- a/pom.xml
+++ b/pom.xml
@@ -145,7 +145,7 @@
 2.4.0
 1.9.13
 
-2.4.2
+2.6.5
 5.5.23
 2.3.4
 2.3.1
@@ -155,6 +155,8 @@
 3.0.1
 7.6.0.v20120127
 1.14
+
+2.22.2
 2.12
 1.1
 2.8.1
@@ -168,7 +170,7 @@
 2.3
 1.9.5
 2.0.0-M5
-4.0.23.Final
+4.0.29.Final
 1.8.1
 0.16.0
 2.5.0
@@ -178,9 +180,9 @@
 0.8.4
 0.90.2-incubating
 2.2.0
-1.6.0
-2.10
-2.10.4
+2.0.0
+2.11
+2.11.8
 1.1
 0.2
 1.4

http://git-wip-us.apache.org/repos/asf/hive/blob/ac977cc8/ql/pom.xml
--
diff --git a/ql/pom.xml b/ql/pom.xml
index 02ddb80..2a93bb7 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -361,7 +361,7 @@
   ${calcite.version}
   
 
 
   org.hsqldb
@@ -380,14 +380,14 @@
   jackson-core
 
   
-   
+
 
   org.apache.calcite
   calcite-avatica
   ${calcite.version}
   
 
 
   org.hsqldb
@@ -685,6 +685,14 @@
  commmons-logging
  commons-logging

+   
+ org.glassfish.jersey.containers
+ *
+   
+   
+ org.glassfish.jersey.core
+ *
+   
  

 
@@ -692,6 +700,18 @@
   jersey-servlet
   test
 
+
+  org.glassfish.jersey.core
+  jersey-server
+  ${glassfish.jersey.version}
+  test
+
+
+  org.glassfish.jersey.containers
+  jersey-container-servlet-core
+  ${glassfish.jersey.version}
+  test
+
   
 
   

http://git-wip-us.apache.org/repos/asf/hive/blob/ac977cc8/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveBaseFunctionResultList.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveBaseFunctionResultList.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveBaseFunctionResultList.java
index 5b65036..0fc79f4 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveBaseFunctionResultList.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveBaseFunctionResultList.java
@@ -38,15 +38,14 @@ import com.google.common.base.Preconditions;
  * through Iterator interface.
  */
 @SuppressWarnings("rawtypes")
-public abstract class HiveBaseFunctionResultList implements
-Iterable, OutputCollector, Serializable {
+public abstract class HiveBaseFunctionResultList
+  implements Iterator, OutputCollector, Serializable {
   private static final long serialVersionUID = -1L;
   private final Iterator inputIterator;
   private boolean isClosed = false;
 
   // Contains results from last processed input record.
   private final HiveKVResultCache lastRecordOutput;
-  private boolean iteratorAlreadyCreated = false;
 
   public HiveBaseFunctionResultList(Iterator inputIterator) {
 this.inputIterator = 

[34/44] hive git commit: HIVE-14849: Support google-compute-engine provider on Hive ptest framework (Sergio Pena, reviewed by Prasanth Jayachandran)

2016-09-30 Thread sershe
HIVE-14849: Support google-compute-engine provider on Hive ptest framework 
(Sergio Pena, reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/291f3d50
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/291f3d50
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/291f3d50

Branch: refs/heads/hive-14535
Commit: 291f3d503d5a8627f86ef5f7fdd7880d8da4760c
Parents: cf72a73
Author: Sergio Pena 
Authored: Wed Sep 28 21:33:00 2016 -0500
Committer: Sergio Pena 
Committed: Wed Sep 28 21:33:00 2016 -0500

--
 .../ptest2/conf/cloudhost.properties.example|  37 +++
 testutils/ptest2/pom.xml|   5 +
 .../execution/context/CloudComputeService.java  | 224 +++
 .../context/CloudExecutionContextProvider.java  | 105 +++--
 4 files changed, 311 insertions(+), 60 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/291f3d50/testutils/ptest2/conf/cloudhost.properties.example
--
diff --git a/testutils/ptest2/conf/cloudhost.properties.example 
b/testutils/ptest2/conf/cloudhost.properties.example
new file mode 100644
index 000..c336052
--- /dev/null
+++ b/testutils/ptest2/conf/cloudhost.properties.example
@@ -0,0 +1,37 @@
+#
+# This is just an example of different cloudhost providers
+#
+
+# This context provides configurations for AWS EC2 and GCE (google compute 
engine)
+executionContextProvider = 
org.apache.hive.ptest.execution.context.CloudExecutionContextProvider$Builder
+
+# Option: GCE
+cloudProvider = google-compute-engine
+gceJsonFile = # GCE JSON KEY FILE
+instanceType = 
https://www.googleapis.com/compute/v1/projects//zones/us-central1-a/machineTypes/n1-standard-8
+imageId = 
https://www.googleapis.com/compute/v1/projects//global/images/hive-ptest-debian-8-20160927
+# keyPair = # UNUSED
+securityGroup = hive-ptest
+
+# Option: AWS
+cloudProvider = aws-ec2
+apiKey =# AWS ACCESS KEY
+accessKey = # AWS SECRET ACCESS KEY
+instanceType = c3.2xlarge
+imageId = us-west-1/ami-1fa1445b
+keyPair = hive-ptest
+securityGroup = hive-ptest
+
+# Generic options
+workingDirectory = /data/hive-ptest
+profileDirectory = /usr/local/hiveptest/etc/public/
+privateKey = /home/hiveptest/.ssh/hive-ptest-user-key
+dataDir = /data/hive-ptest/data/
+numHosts = 12
+groupName = hive-ptest-slaves
+localDirs = /home/hiveptest/
+user = hiveptest
+numThreads = 2
+maxLogDirectoriesPerProfile = 30
+userMetadata.owner = # USER
+maxHostsPerCreateRequest = 12
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/291f3d50/testutils/ptest2/pom.xml
--
diff --git a/testutils/ptest2/pom.xml b/testutils/ptest2/pom.xml
index cea29b6..97981fb 100644
--- a/testutils/ptest2/pom.xml
+++ b/testutils/ptest2/pom.xml
@@ -107,6 +107,11 @@ limitations under the License.
   ${jclouds.version}
 
 
+  org.apache.jclouds.labs
+  google-compute-engine
+  ${jclouds.version}
+
+
   org.apache.jclouds.driver
   jclouds-sshj
   ${jclouds.version}

http://git-wip-us.apache.org/repos/asf/hive/blob/291f3d50/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java
--
diff --git 
a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java
 
b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java
index 64ee68e..e26c5ca 100644
--- 
a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java
+++ 
b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java
@@ -18,11 +18,13 @@
  */
 package org.apache.hive.ptest.execution.context;
 
-import java.util.Collections;
-import java.util.Properties;
-import java.util.Map;
-import java.util.Set;
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
 
+import com.google.common.base.Supplier;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.io.Files;
 import org.jclouds.Constants;
 import org.jclouds.ContextBuilder;
 import org.jclouds.aws.ec2.compute.AWSEC2TemplateOptions;
@@ -34,7 +36,12 @@ import org.jclouds.compute.domain.ComputeMetadata;
 import org.jclouds.compute.domain.NodeMetadata;
 import org.jclouds.compute.domain.NodeMetadata.Status;
 import org.jclouds.compute.domain.Template;
+import org.jclouds.compute.options.TemplateOptions;
+import org.jclouds.domain.Credentials;
+import org.jclouds.googlecloud.GoogleCredentialsFromJson;
+import 

[39/44] hive git commit: HIVE-14819: FunctionInfo for permanent functions shows TEMPORARY FunctionType (Jason Dere, reviewed by Sergey Shelukhin)

2016-09-30 Thread sershe
HIVE-14819: FunctionInfo for permanent functions shows TEMPORARY FunctionType 
(Jason Dere, reviewed by Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/74a6ff67
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/74a6ff67
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/74a6ff67

Branch: refs/heads/hive-14535
Commit: 74a6ff678f9312d946a7d55d73bf4a60127de763
Parents: 474425a
Author: Jason Dere 
Authored: Thu Sep 29 13:35:17 2016 -0700
Committer: Jason Dere 
Committed: Thu Sep 29 13:35:17 2016 -0700

--
 .../hadoop/hive/ql/exec/FunctionInfo.java   |  20 ++--
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |   3 +-
 .../apache/hadoop/hive/ql/exec/Registry.java|  96 ++
 .../hadoop/hive/ql/exec/WindowFunctionInfo.java |   4 +-
 .../translator/SqlFunctionConverter.java|   2 +-
 .../hive/ql/exec/TestFunctionRegistry.java  | 100 +++
 6 files changed, 194 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/74a6ff67/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java
index 30ba996..8014dab 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java
@@ -73,36 +73,36 @@ public class FunctionInfo {
 this.discarded = new AtomicBoolean(false);  // shared to all session 
functions
   }
 
-  public FunctionInfo(boolean isNative, String displayName,
+  public FunctionInfo(FunctionType functionType, String displayName,
   GenericUDF genericUDF, FunctionResource... resources) {
-this.functionType = isNative ? FunctionType.BUILTIN : 
FunctionType.TEMPORARY;
+this.functionType = functionType;
 this.displayName = displayName;
 this.genericUDF = genericUDF;
 this.isInternalTableFunction = false;
 this.resources = resources;
   }
 
-  public FunctionInfo(boolean isNative, String displayName,
+  public FunctionInfo(FunctionType functionType, String displayName,
   GenericUDAFResolver genericUDAFResolver, FunctionResource... resources) {
-this.functionType = isNative ? FunctionType.BUILTIN : 
FunctionType.TEMPORARY;
+this.functionType = functionType;
 this.displayName = displayName;
 this.genericUDAFResolver = genericUDAFResolver;
 this.isInternalTableFunction = false;
 this.resources = resources;
   }
 
-  public FunctionInfo(boolean isNative, String displayName,
+  public FunctionInfo(FunctionType functionType, String displayName,
   GenericUDTF genericUDTF, FunctionResource... resources) {
-this.functionType = isNative ? FunctionType.BUILTIN : 
FunctionType.TEMPORARY;
+this.functionType = functionType;
 this.displayName = displayName;
 this.genericUDTF = genericUDTF;
 this.isInternalTableFunction = false;
 this.resources = resources;
   }
 
-  public FunctionInfo(boolean isNative, String displayName, Class tFnCls,
+  public FunctionInfo(FunctionType functionType, String displayName, Class tFnCls,
   FunctionResource... resources) {
-this.functionType = isNative ? FunctionType.BUILTIN : 
FunctionType.TEMPORARY;
+this.functionType = functionType;
 this.displayName = displayName;
 this.tableFunctionResolver = tFnCls;
 PartitionTableFunctionDescription def = AnnotationUtils.getAnnotation(
@@ -263,6 +263,10 @@ public class FunctionInfo {
 }
   }
 
+  public FunctionType getFunctionType() {
+return functionType;
+  }
+
   public static class FunctionResource {
 private final SessionState.ResourceType resourceType;
 private final String resourceURI;

http://git-wip-us.apache.org/repos/asf/hive/blob/74a6ff67/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index de74c3e..b277f5e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -1535,7 +1535,8 @@ public final class FunctionRegistry {
 }
 
 if (clazz != null) {
-  return system.isPermanentFunc(clazz);
+  // Use session registry - see Registry.isPermanentFunc()
+  return SessionState.getRegistryForWrite().isPermanentFunc(clazz);
 }
 return false;
   }


[38/44] hive git commit: HIVE-14854. Add a core cluster type to QTestUtil. (Siddharth Seth, reviewed by Prasanth Jayachandran)

2016-09-30 Thread sershe
HIVE-14854. Add a core cluster type to QTestUtil. (Siddharth Seth, reviewed by 
Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/474425aa
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/474425aa
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/474425aa

Branch: refs/heads/hive-14535
Commit: 474425aa62e3f25b119419439373aa684c6c2121
Parents: a6c6080
Author: Siddharth Seth 
Authored: Thu Sep 29 13:10:44 2016 -0700
Committer: Siddharth Seth 
Committed: Thu Sep 29 13:10:44 2016 -0700

--
 .../hive/cli/control/AbstractCliConfig.java | 13 +++-
 .../hadoop/hive/cli/control/CoreCliDriver.java  |  2 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java| 75 +++-
 .../hive/llap/daemon/impl/LlapDaemon.java   |  5 +-
 4 files changed, 56 insertions(+), 39 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/474425aa/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
--
diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
index 03d4075..c12f51e 100644
--- 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
+++ 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
@@ -409,7 +409,18 @@ public abstract class AbstractCliConfig {
   }
 
   protected void setMetastoreType(MetastoreType mt) {
-metastoreType=mt;
+String metaStoreTypeProperty = getSysPropValue("metaStoreType");
+if (metaStoreTypeProperty != null) {
+  if (metaStoreTypeProperty.equalsIgnoreCase("sql")) {
+metastoreType = MetastoreType.sql;
+  } else if (metaStoreTypeProperty.equalsIgnoreCase("hbase")) {
+metastoreType = MetastoreType.hbase;
+  } else {
+throw new IllegalArgumentException("Unknown metastore type: " + 
metaStoreTypeProperty);
+  }
+} else {
+  metastoreType = mt;
+}
   }
 
   public MetastoreType getMetastoreType() {

http://git-wip-us.apache.org/repos/asf/hive/blob/474425aa/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
--
diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
index d83ff45..a735346 100644
--- 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
+++ 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
@@ -61,7 +61,7 @@ public class CoreCliDriver extends CliAdapter {
 @Override
 public QTestUtil invokeInternal() throws Exception {
   return new QTestUtil((cliConfig.getResultsDir()), 
(cliConfig.getLogDir()), miniMR,
-  hiveConfDir, hadoopVer, initScript, cleanupScript, 
useHBaseMetastore, true, false,
+  hiveConfDir, hadoopVer, initScript, cleanupScript, 
useHBaseMetastore, true,
   cliConfig.getFsType());
 }
   }.invoke("QtestUtil instance created", LOG, true);

http://git-wip-us.apache.org/repos/asf/hive/blob/474425aa/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
--
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 909d7f6..e49ecd9 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -50,6 +50,7 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.Comparator;
 import java.util.Deque;
+import java.util.EnumSet;
 import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
@@ -152,7 +153,6 @@ public class QTestUtil {
 
   private String testWarehouse;
   private final String testFiles;
-  private final boolean localMode;
   protected final String outDir;
   protected final String logDir;
   private final TreeMap qMap;
@@ -411,6 +411,11 @@ public class QTestUtil {
 }
   }
 
+  private enum CoreClusterType {
+MR,
+TEZ,
+SPARK
+  }
 
   public enum FsType {
 local,
@@ -420,35 +425,48 @@ public class QTestUtil {
 
   public enum MiniClusterType {
 
-mr(FsType.hdfs),
-tez(FsType.hdfs),
-spark(FsType.local),
-miniSparkOnYarn(FsType.hdfs),
-llap(FsType.hdfs),
-none(FsType.local);
+mr(CoreClusterType.MR, FsType.hdfs),
+

[32/44] hive git commit: HIVE-12222: Define port range in property for RPCServer (Aihua Xu, reviewed by Xuefu Zhang)

2016-09-30 Thread sershe
HIVE-1: Define port range in property for RPCServer (Aihua Xu, reviewed by 
Xuefu Zhang)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e2bd513a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e2bd513a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e2bd513a

Branch: refs/heads/hive-14535
Commit: e2bd513a3970b141576f7ead25fc6cfcc5fcda17
Parents: 667e9dd
Author: Aihua Xu 
Authored: Thu Sep 22 14:20:51 2016 -0400
Committer: Aihua Xu 
Committed: Wed Sep 28 12:07:40 2016 -0400

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  3 ++
 .../hive/spark/client/rpc/RpcConfiguration.java | 38 +
 .../apache/hive/spark/client/rpc/RpcServer.java | 44 +---
 .../apache/hive/spark/client/rpc/TestRpc.java   | 37 +++-
 4 files changed, 115 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e2bd513a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 43a16d7..4c3ef3e 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -3080,6 +3080,9 @@ public class HiveConf extends Configuration {
   "Default is empty, which means the address will be determined in the 
same way as for hive.server2.thrift.bind.host." +
   "This is only necessary if the host has mutiple network addresses and if 
a different network address other than " +
   "hive.server2.thrift.bind.host is to be used."),
+SPARK_RPC_SERVER_PORT("hive.spark.client.rpc.server.port", "", "A list of 
port ranges which can be used by RPC server " +
+"with the format of 49152-49222,49228 and a random one is selected 
from the list. Default is empty, which randomly " +
+"selects one port from all available ones."),
 SPARK_DYNAMIC_PARTITION_PRUNING(
 "hive.spark.dynamic.partition.pruning", false,
 "When dynamic pruning is enabled, joins on partition keys will be 
processed by writing\n" +

http://git-wip-us.apache.org/repos/asf/hive/blob/e2bd513a/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java
--
diff --git 
a/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java
 
b/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java
index 210f8a4..8c59015 100644
--- 
a/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java
+++ 
b/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java
@@ -18,7 +18,9 @@
 package org.apache.hive.spark.client.rpc;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
@@ -107,6 +109,42 @@ public final class RpcConfiguration {
 return ServerUtils.getHostAddress(hiveHost).getHostName();
   }
 
+  /**
+   * Parses the port string like 49152-49222,49228 into the port list. A 
default 0
+   * is added for the empty port string.
+   * @return a list of configured ports.
+   * @exception IOException is thrown if the property is not configured 
properly
+   */
+  List getServerPorts() throws IOException {
+String errMsg = "Incorrect RPC server port configuration for HiveServer2";
+String portString = 
config.get(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT.varname);
+ArrayList ports = new ArrayList();
+try {
+  if(!StringUtils.isEmpty(portString)) {
+for (String portRange : portString.split(",")) {
+  String[] range = portRange.split("-");
+  if (range.length == 0 || range.length > 2
+  || (range.length == 2 && Integer.valueOf(range[0]) > 
Integer.valueOf(range[1]))) {
+throw new IOException(errMsg);
+  }
+  if (range.length == 1) {
+ports.add(Integer.valueOf(range[0]));
+  } else {
+for (int i = Integer.valueOf(range[0]); i <= 
Integer.valueOf(range[1]); i++) {
+  ports.add(i);
+}
+  }
+}
+  } else {
+ports.add(0);
+  }
+
+  return ports;
+} catch(NumberFormatException e) {
+  throw new IOException(errMsg);
+}
+  }
+
   String getRpcChannelLogLevel() {
 return config.get(HiveConf.ConfVars.SPARK_RPC_CHANNEL_LOG_LEVEL.varname);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/e2bd513a/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcServer.java

[33/44] hive git commit: HIVE-14824. Separate fstype from cluster type in QTestUtil. (Siddharth Seth, reviewed by Prasanth Jayachandran)

2016-09-30 Thread sershe
HIVE-14824. Separate fstype from cluster type in QTestUtil. (Siddharth Seth, 
reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cf72a737
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cf72a737
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cf72a737

Branch: refs/heads/hive-14535
Commit: cf72a73708b00b2e585d101258d95eb9cbd3791f
Parents: e2bd513
Author: Siddharth Seth 
Authored: Wed Sep 28 13:40:04 2016 -0700
Committer: Siddharth Seth 
Committed: Wed Sep 28 13:40:04 2016 -0700

--
 .../hive/cli/control/AbstractCliConfig.java | 10 +++
 .../hadoop/hive/cli/control/CliConfigs.java | 12 ++-
 .../hadoop/hive/cli/control/CoreCliDriver.java  |  3 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java| 84 +++-
 4 files changed, 69 insertions(+), 40 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/cf72a737/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
--
diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
index efbd465..03d4075 100644
--- 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
+++ 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
@@ -35,6 +35,7 @@ import java.util.regex.Pattern;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.QTestUtil;
+import org.apache.hadoop.hive.ql.QTestUtil.FsType;
 import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType;
 import com.google.common.base.Splitter;
 import com.google.common.collect.Sets;
@@ -63,6 +64,7 @@ public abstract class AbstractCliConfig {
   private String initScript;
   private String hiveConfDir;
   private MiniClusterType clusterType;
+  private FsType fsType;
 
   // FIXME: null value is treated differently on the other end..when those 
filter will be
   // moved...this may change
@@ -380,6 +382,14 @@ public abstract class AbstractCliConfig {
 }
   }
 
+  protected FsType getFsType() {
+return this.fsType;
+  }
+
+  protected void setFsType(FsType fsType) {
+this.fsType = fsType;
+  }
+
   private String getSysPropValue(String propName) {
 String propValue = System.getProperty(propName);
 if (propValue == null || propValue.trim().length() == 0) {

http://git-wip-us.apache.org/repos/asf/hive/blob/cf72a737/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
--
diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
index 0068b95..ca72282 100644
--- 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
+++ 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
@@ -21,6 +21,7 @@ import java.io.File;
 import java.net.MalformedURLException;
 import java.net.URL;
 
+import org.apache.hadoop.hive.ql.QTestUtil;
 import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType;
 import org.apache.hadoop.hive.ql.parse.CoreParseNegative;
 
@@ -171,8 +172,15 @@ public class CliConfigs {
 setInitScript("q_test_init_for_encryption.sql");
 setCleanupScript("q_test_cleanup_for_encryption.sql");
 
-setHiveConfDir("data/conf");
-setClusterType(MiniClusterType.encrypted);
+
+setClusterType(MiniClusterType.mr);
+setFsType(QTestUtil.FsType.encrypted_hdfs);
+if (getClusterType() == MiniClusterType.tez) {
+  setHiveConfDir("data/conf/tez");
+} else {
+  setHiveConfDir("data/conf");
+}
+
   } catch (Exception e) {
 throw new RuntimeException("can't construct cliconfig", e);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/cf72a737/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
--
diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
index db58f1d..d83ff45 100644
--- 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
+++ 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
@@ -61,7 +61,8 @@ public class CoreCliDriver extends CliAdapter {
 @Override
 public QTestUtil invokeInternal() throws Exception {
  

[19/44] hive git commit: HIVE-14713: LDAP Authentication Provider should be covered with unit tests (Illya Yalovyy, reviewed by Chaoyu Tang, Szehon Ho)

2016-09-30 Thread sershe
http://git-wip-us.apache.org/repos/asf/hive/blob/990927e3/service/src/java/org/apache/hive/service/auth/ldap/UserSearchFilterFactory.java
--
diff --git 
a/service/src/java/org/apache/hive/service/auth/ldap/UserSearchFilterFactory.java
 
b/service/src/java/org/apache/hive/service/auth/ldap/UserSearchFilterFactory.java
new file mode 100644
index 000..3218875
--- /dev/null
+++ 
b/service/src/java/org/apache/hive/service/auth/ldap/UserSearchFilterFactory.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth.ldap;
+
+import java.util.Collection;
+import javax.naming.NamingException;
+import javax.security.sasl.AuthenticationException;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+/**
+ * A factory for a {@link Filter} that check whether provided user could be 
found in the directory.
+ * 
+ * The produced filter object filters out all users that are not found in the 
directory.
+ */
+public final class UserSearchFilterFactory implements FilterFactory {
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public Filter getInstance(HiveConf conf) {
+Collection groupFilter = conf.getStringCollection(
+HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER.varname);
+Collection userFilter = conf.getStringCollection(
+HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERFILTER.varname);
+
+if (groupFilter.isEmpty() && userFilter.isEmpty()) {
+  return null;
+}
+
+return new UserSearchFilter();
+  }
+
+  private static final class UserSearchFilter implements Filter {
+@Override
+public void apply(DirSearch client, String user) throws 
AuthenticationException {
+  try {
+String userDn = client.findUserDn(user);
+
+// This should not be null because we were allowed to bind with this 
username
+// safe check in case we were able to bind anonymously.
+if (userDn == null) {
+  throw new AuthenticationException("Authentication failed: User 
search failed");
+}
+  } catch (NamingException e) {
+throw new AuthenticationException("LDAP Authentication failed for 
user", e);
+  }
+}
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/990927e3/service/src/test/org/apache/hive/service/auth/TestLdapAtnProviderWithMiniDS.java
--
diff --git 
a/service/src/test/org/apache/hive/service/auth/TestLdapAtnProviderWithMiniDS.java
 
b/service/src/test/org/apache/hive/service/auth/TestLdapAtnProviderWithMiniDS.java
index 089a059..23a048a 100644
--- 
a/service/src/test/org/apache/hive/service/auth/TestLdapAtnProviderWithMiniDS.java
+++ 
b/service/src/test/org/apache/hive/service/auth/TestLdapAtnProviderWithMiniDS.java
@@ -225,7 +225,6 @@ public class TestLdapAtnProviderWithMiniDS extends 
AbstractLdapTestUnit {
 hiveConf = new HiveConf();
 
 ldapProvider = new LdapAuthenticationProviderImpl(hiveConf);
-ldapProvider.init(hiveConf);
   }
 
   @AfterClass
@@ -259,7 +258,7 @@ public class TestLdapAtnProviderWithMiniDS extends 
AbstractLdapTestUnit {
   }
 }
 
-ldapProvider.init(hiveConf);
+ldapProvider = new LdapAuthenticationProviderImpl(hiveConf);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hive/blob/990927e3/service/src/test/org/apache/hive/service/auth/TestLdapAuthenticationProviderImpl.java
--
diff --git 
a/service/src/test/org/apache/hive/service/auth/TestLdapAuthenticationProviderImpl.java
 
b/service/src/test/org/apache/hive/service/auth/TestLdapAuthenticationProviderImpl.java
index f276906..4fad755 100644
--- 
a/service/src/test/org/apache/hive/service/auth/TestLdapAuthenticationProviderImpl.java
+++ 
b/service/src/test/org/apache/hive/service/auth/TestLdapAuthenticationProviderImpl.java
@@ -15,51 +15,260 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hive.service.auth;
 
+import java.io.IOException;
+import java.util.Arrays;
+import javax.naming.NamingException;
 import 

[01/44] hive git commit: HIVE-14680 : retain consistent splits /during/ (as opposed to across) LLAP failures on top of HIVE-14589 (Sergey Shelukhin, reviewed by Siddharth Seth)

2016-09-30 Thread sershe
Repository: hive
Updated Branches:
  refs/heads/hive-14535 70299dc48 -> 6d9144835


HIVE-14680 : retain consistent splits /during/ (as opposed to across) LLAP 
failures on top of HIVE-14589 (Sergey Shelukhin, reviewed by Siddharth Seth)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/83ef6f92
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/83ef6f92
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/83ef6f92

Branch: refs/heads/hive-14535
Commit: 83ef6f9272d71e1918ffc89635709b4f81e8aba9
Parents: 4340d46
Author: Sergey Shelukhin 
Authored: Mon Sep 19 16:11:16 2016 -0700
Committer: Sergey Shelukhin 
Committed: Mon Sep 19 16:11:16 2016 -0700

--
 .../hive/llap/registry/ServiceInstanceSet.java  |   7 +-
 .../registry/impl/InactiveServiceInstance.java  |  77 ++
 .../registry/impl/LlapFixedRegistryImpl.java|   2 +-
 .../impl/LlapZookeeperRegistryImpl.java |  34 -
 .../daemon/services/impl/LlapWebServices.java   |   2 +-
 .../tez/HostAffinitySplitLocationProvider.java  |  80 +++---
 .../apache/hadoop/hive/ql/exec/tez/Utils.java   |   8 +-
 .../TestHostAffinitySplitLocationProvider.java  | 150 +++
 .../apache/hadoop/hive/serde2/SerDeUtils.java   |  11 ++
 9 files changed, 306 insertions(+), 65 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/83ef6f92/llap-client/src/java/org/apache/hadoop/hive/llap/registry/ServiceInstanceSet.java
--
diff --git 
a/llap-client/src/java/org/apache/hadoop/hive/llap/registry/ServiceInstanceSet.java
 
b/llap-client/src/java/org/apache/hadoop/hive/llap/registry/ServiceInstanceSet.java
index 13b668d..1e8c895 100644
--- 
a/llap-client/src/java/org/apache/hadoop/hive/llap/registry/ServiceInstanceSet.java
+++ 
b/llap-client/src/java/org/apache/hadoop/hive/llap/registry/ServiceInstanceSet.java
@@ -14,7 +14,6 @@
 package org.apache.hadoop.hive.llap.registry;
 
 import java.util.Collection;
-import java.util.List;
 import java.util.Set;
 
 public interface ServiceInstanceSet {
@@ -32,9 +31,11 @@ public interface ServiceInstanceSet {
   /**
* Gets a list containing all the instances. This list has the same 
iteration order across
* different processes, assuming the list of registry entries is the same.
-   * @return
+   * @param consistentIndexes if true, also try to maintain the same exact 
index for each node
+   *  across calls, by inserting inactive instances to 
replace the
+   *  removed ones.
*/
-  public Collection getAllInstancesOrdered();
+  public Collection getAllInstancesOrdered(boolean 
consistentIndexes);
 
   /**
* Get an instance by worker identity.

http://git-wip-us.apache.org/repos/asf/hive/blob/83ef6f92/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/InactiveServiceInstance.java
--
diff --git 
a/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/InactiveServiceInstance.java
 
b/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/InactiveServiceInstance.java
new file mode 100644
index 000..79b7d51
--- /dev/null
+++ 
b/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/InactiveServiceInstance.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.hive.llap.registry.impl;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.llap.registry.ServiceInstance;
+import org.apache.hadoop.yarn.api.records.Resource;
+
+public class InactiveServiceInstance implements ServiceInstance {
+  private final String name;
+  public InactiveServiceInstance(String name) {
+this.name = name;
+  }
+
+  @Override
+  public String getWorkerIdentity() {
+return name;
+  }
+
+  @Override
+  public boolean isAlive() {
+return false;
+  }
+
+  @Override
+  public String getHost() {
+return null;
+  }
+
+  @Override
+  public int getRpcPort() {
+throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public int getManagementPort() {
+throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public int getShufflePort() {
+throw 

[20/44] hive git commit: HIVE-14713: LDAP Authentication Provider should be covered with unit tests (Illya Yalovyy, reviewed by Chaoyu Tang, Szehon Ho)

2016-09-30 Thread sershe
HIVE-14713: LDAP Authentication Provider should be covered with unit tests 
(Illya Yalovyy, reviewed by Chaoyu Tang, Szehon Ho)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/990927e3
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/990927e3
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/990927e3

Branch: refs/heads/hive-14535
Commit: 990927e3dcddcc7c82a16437d55d9f7ea9a1a447
Parents: 421d97a
Author: ctang 
Authored: Fri Sep 23 15:50:32 2016 -0400
Committer: ctang 
Committed: Fri Sep 23 15:50:32 2016 -0400

--
 service/pom.xml |   7 +
 .../auth/LdapAuthenticationProviderImpl.java| 657 ++-
 .../service/auth/ldap/ChainFilterFactory.java   |  78 +++
 .../auth/ldap/CustomQueryFilterFactory.java |  84 +++
 .../hive/service/auth/ldap/DirSearch.java   |  52 ++
 .../service/auth/ldap/DirSearchFactory.java |  37 ++
 .../apache/hive/service/auth/ldap/Filter.java   |  36 +
 .../hive/service/auth/ldap/FilterFactory.java   |  33 +
 .../service/auth/ldap/GroupFilterFactory.java   |  90 +++
 .../hive/service/auth/ldap/LdapSearch.java  | 155 +
 .../service/auth/ldap/LdapSearchFactory.java|  64 ++
 .../hive/service/auth/ldap/LdapUtils.java   | 228 +++
 .../apache/hive/service/auth/ldap/Query.java| 154 +
 .../hive/service/auth/ldap/QueryFactory.java| 135 
 .../service/auth/ldap/SearchResultHandler.java  | 163 +
 .../service/auth/ldap/UserFilterFactory.java|  75 +++
 .../auth/ldap/UserSearchFilterFactory.java  |  65 ++
 .../auth/TestLdapAtnProviderWithMiniDS.java |   3 +-
 .../TestLdapAuthenticationProviderImpl.java | 277 +++-
 .../hive/service/auth/ldap/Credentials.java |  41 ++
 .../hive/service/auth/ldap/LdapTestUtils.java   | 126 
 .../hive/service/auth/ldap/TestChainFilter.java | 103 +++
 .../auth/ldap/TestCustomQueryFilter.java|  85 +++
 .../hive/service/auth/ldap/TestGroupFilter.java | 101 +++
 .../hive/service/auth/ldap/TestLdapSearch.java  | 209 ++
 .../hive/service/auth/ldap/TestLdapUtils.java   | 103 +++
 .../hive/service/auth/ldap/TestQuery.java   |  59 ++
 .../service/auth/ldap/TestQueryFactory.java |  79 +++
 .../auth/ldap/TestSearchResultHandler.java  | 222 +++
 .../hive/service/auth/ldap/TestUserFilter.java  |  75 +++
 .../service/auth/ldap/TestUserSearchFilter.java |  94 +++
 31 files changed, 3062 insertions(+), 628 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/990927e3/service/pom.xml
--
diff --git a/service/pom.xml b/service/pom.xml
index ecea719..9306739 100644
--- a/service/pom.xml
+++ b/service/pom.xml
@@ -164,6 +164,13 @@
 
 
 
+  org.mockito
+  mockito-all
+  ${mockito-all.version}
+  test
+
+
+
   org.apache.directory.client.ldap
   ldap-client-api
   ${apache-directory-clientapi.version}

http://git-wip-us.apache.org/repos/asf/hive/blob/990927e3/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
--
diff --git 
a/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
 
b/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
index efd5393..c21da28 100644
--- 
a/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
+++ 
b/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
@@ -17,633 +17,106 @@
  */
 package org.apache.hive.service.auth;
 
-import java.util.ArrayList;
-import java.util.Hashtable;
-import java.util.List;
-import java.util.ListIterator;
-
-import javax.naming.Context;
-import javax.naming.NamingEnumeration;
-import javax.naming.NamingException;
-import javax.naming.directory.Attribute;
-import javax.naming.directory.Attributes;
-import javax.naming.directory.DirContext;
-import javax.naming.directory.InitialDirContext;
-import javax.naming.directory.SearchControls;
-import javax.naming.directory.SearchResult;
 import javax.security.sasl.AuthenticationException;
 
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableList;
+import java.util.Iterator;
+import java.util.List;
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hive.service.ServiceUtils;
+import org.apache.hive.service.auth.ldap.ChainFilterFactory;
+import org.apache.hive.service.auth.ldap.CustomQueryFilterFactory;
+import org.apache.hive.service.auth.ldap.LdapSearchFactory;
+import org.apache.hive.service.auth.ldap.Filter;
+import 

[13/44] hive git commit: HIVE-14814: metastoreClient is used directly in Hive cause NPE (Prasanth Jayachandran reviewed by Eugene Koifman)

2016-09-30 Thread sershe
HIVE-14814: metastoreClient is used directly in Hive cause NPE (Prasanth 
Jayachandran reviewed by Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1a3e4be3
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1a3e4be3
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1a3e4be3

Branch: refs/heads/hive-14535
Commit: 1a3e4be3dbd485f2630c7249254727ce58374d1c
Parents: c9224d5
Author: Prasanth Jayachandran 
Authored: Thu Sep 22 10:50:30 2016 -0700
Committer: Prasanth Jayachandran 
Committed: Thu Sep 22 10:50:30 2016 -0700

--
 ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/1a3e4be3/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index da46854..de6adb5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -1864,7 +1864,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
 for (Partition p : partitionsMap.values()) {
   partNames.add(p.getName());
 }
-metaStoreClient.addDynamicPartitions(txnId, tbl.getDbName(), 
tbl.getTableName(),
+getMSC().addDynamicPartitions(txnId, tbl.getDbName(), 
tbl.getTableName(),
   partNames, AcidUtils.toDataOperationType(operation));
   }
   LOG.info("Loaded " + partitionsMap.size() + " partitions");



[24/44] hive git commit: HIVE-3173 Add tests for JDBC getTypeInfo method (Xiu Gao via gates)

2016-09-30 Thread sershe
HIVE-3173 Add tests for JDBC getTypeInfo method (Xiu Gao via gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/19fd5613
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/19fd5613
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/19fd5613

Branch: refs/heads/hive-14535
Commit: 19fd56137caa23fbe8ef1e452a11603fc14f4325
Parents: 4ce5fe1
Author: Alan Gates 
Authored: Mon Sep 26 10:47:48 2016 -0700
Committer: Alan Gates 
Committed: Mon Sep 26 10:47:48 2016 -0700

--
 .../org/apache/hive/jdbc/TestJdbcDriver2.java   | 32 
 1 file changed, 32 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/19fd5613/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
index fc91f9d..ff4d63f 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
@@ -1145,6 +1145,38 @@ public class TestJdbcDriver2 {
 assertFalse("Unexpected table", rs.next());
   }
 
+  @Test
+  public void testMetaDataGetTypeInfo() throws SQLException {
+HiveBaseResultSet rs = (HiveBaseResultSet) con.getMetaData().getTypeInfo();
+Set typeInfos = new HashSet();
+typeInfos.add("BOOLEAN");
+typeInfos.add("TINYINT");
+typeInfos.add("SMALLINT");
+typeInfos.add("INT");
+typeInfos.add("BIGINT");
+typeInfos.add("FLOAT");
+typeInfos.add("DOUBLE");
+typeInfos.add("STRING");
+typeInfos.add("TIMESTAMP");
+typeInfos.add("BINARY");
+typeInfos.add("DECIMAL");
+typeInfos.add("ARRAY");
+typeInfos.add("MAP");
+typeInfos.add("STRUCT");
+typeInfos.add("UNIONTYPE");
+
+int cnt = 0;
+while (rs.next()) {
+  String typeInfo = rs.getString("TYPE_NAME");
+  assertEquals("Get by index different from get by name", rs.getString(1), 
typeInfo);
+  typeInfos.remove(typeInfo);
+  cnt++;
+}
+rs.close();
+assertEquals("Incorrect typeInfo count.", 0, typeInfos.size());
+assertTrue("Found less typeInfos than we test for.", cnt >= 
typeInfos.size());
+  }
+
   /**
* Test the type returned for pre-created table type table and view type 
table
* @param tableTypeNames expected table types



[10/44] hive git commit: HIVE-14461. Move hbase_bulk to run via TestCliDriver. (Siddharth Seth, reviewed by Prasanth Jayachandran)

2016-09-30 Thread sershe
HIVE-14461. Move hbase_bulk to run via TestCliDriver. (Siddharth Seth, reviewed 
by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/91082e5f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/91082e5f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/91082e5f

Branch: refs/heads/hive-14535
Commit: 91082e5fffbce87029e0a0280d50693a0ffdb1f8
Parents: 66af764
Author: Siddharth Seth 
Authored: Wed Sep 21 14:35:53 2016 -0700
Committer: Siddharth Seth 
Committed: Wed Sep 21 14:35:53 2016 -0700

--
 .../src/test/queries/positive/hbase_bulk.m  |  62 -
 .../src/test/queries/positive/hbase_bulk.q  |  62 +
 .../src/test/results/positive/hbase_bulk.m.out  | 133 ---
 .../src/test/results/positive/hbase_bulk.q.out  | 133 +++
 .../hive/cli/TestHBaseMinimrCliDriver.java  |  62 -
 .../hadoop/hive/cli/control/CliConfigs.java |  22 ---
 6 files changed, 195 insertions(+), 279 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/91082e5f/hbase-handler/src/test/queries/positive/hbase_bulk.m
--
diff --git a/hbase-handler/src/test/queries/positive/hbase_bulk.m 
b/hbase-handler/src/test/queries/positive/hbase_bulk.m
deleted file mode 100644
index f8bb47d..000
--- a/hbase-handler/src/test/queries/positive/hbase_bulk.m
+++ /dev/null
@@ -1,62 +0,0 @@
-drop table hbsort;
-drop table hbpartition;
-
-set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
-
--- this is a dummy table used for controlling how the HFiles are
--- created
-create table hbsort(key string, val string, val2 string)
-stored as
-INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
-OUTPUTFORMAT 'org.apache.hadoop.hive.hbase.HiveHFileOutputFormat'
-TBLPROPERTIES ('hfile.family.path' = '/tmp/hbsort/cf');
-
--- this is a dummy table used for controlling how the input file
--- for TotalOrderPartitioner is created
-create table hbpartition(part_break string)
-row format serde
-'org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe'
-stored as
-inputformat
-'org.apache.hadoop.mapred.TextInputFormat'
-outputformat
-'org.apache.hadoop.hive.ql.io.HiveNullValueSequenceFileOutputFormat'
-location '/tmp/data/hbpartition';
-
--- this should produce one file, but we do not
--- know what it will be called, so we will copy it to a well known
--- filename /tmp/hbpartition.lst
-insert overwrite table hbpartition
-select distinct value
-from src
-where value='val_100' or value='val_200';
-
-dfs -count /tmp/data/hbpartition;
-dfs -cp /tmp/data/hbpartition/* /tmp/hbpartition.lst;
-
-set mapred.reduce.tasks=3;
-set hive.mapred.partitioner=org.apache.hadoop.mapred.lib.TotalOrderPartitioner;
-set total.order.partitioner.natural.order=false;
-set total.order.partitioner.path=/tmp/hbpartition.lst;
-set mapreduce.totalorderpartitioner.naturalorder=false;
-set mapreduce.totalorderpartitioner.path=/tmp/hbpartition.lst;
-
--- this should produce three files in /tmp/hbsort/cf
--- include some trailing blanks and nulls to make sure we handle them correctly
-insert overwrite table hbsort
-select distinct value,
-  case when key=103 then cast(null as string) else key end,
-  case when key=103 then ''
-   else cast(key+1 as string) end
-from src
-cluster by value;
-
-dfs -count /tmp/hbsort/cf;
-
--- To get the files out to your local filesystem for loading into
--- HBase, run mkdir -p /tmp/blah/cf, then uncomment and
--- semicolon-terminate the line below before running this test:
--- dfs -copyToLocal /tmp/hbsort/cf/* /tmp/blah/cf
-
-drop table hbsort;
-drop table hbpartition;

http://git-wip-us.apache.org/repos/asf/hive/blob/91082e5f/hbase-handler/src/test/queries/positive/hbase_bulk.q
--
diff --git a/hbase-handler/src/test/queries/positive/hbase_bulk.q 
b/hbase-handler/src/test/queries/positive/hbase_bulk.q
new file mode 100644
index 000..f8bb47d
--- /dev/null
+++ b/hbase-handler/src/test/queries/positive/hbase_bulk.q
@@ -0,0 +1,62 @@
+drop table hbsort;
+drop table hbpartition;
+
+set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
+
+-- this is a dummy table used for controlling how the HFiles are
+-- created
+create table hbsort(key string, val string, val2 string)
+stored as
+INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
+OUTPUTFORMAT 'org.apache.hadoop.hive.hbase.HiveHFileOutputFormat'
+TBLPROPERTIES ('hfile.family.path' = '/tmp/hbsort/cf');
+
+-- this is a dummy table used for controlling how the input file
+-- for TotalOrderPartitioner is created
+create table hbpartition(part_break string)
+row format serde

[15/44] hive git commit: HIVE-14751: Add support for date truncation (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2016-09-30 Thread sershe
HIVE-14751: Add support for date truncation (Jesus Camacho Rodriguez, reviewed 
by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e532549f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e532549f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e532549f

Branch: refs/heads/hive-14535
Commit: e532549f162c29e6f621c9cb0ba7588e5882e4bc
Parents: 3237bb2
Author: Jesus Camacho Rodriguez 
Authored: Fri Sep 23 10:19:55 2016 +0100
Committer: Jesus Camacho Rodriguez 
Committed: Fri Sep 23 10:19:55 2016 +0100

--
 .../org/apache/hadoop/hive/ql/parse/HiveLexer.g |   3 +
 .../hadoop/hive/ql/parse/IdentifiersParser.g|  30 ++-
 .../apache/hadoop/hive/ql/udf/UDFDateFloor.java |   8 +-
 .../ql/udf/TestUDFDateFormatGranularity.java|  22 +-
 ql/src/test/queries/clientpositive/floor_time.q |  46 
 .../results/clientpositive/floor_time.q.out | 211 +++
 6 files changed, 306 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e532549f/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
index 7ceb005..af659ad 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
@@ -302,7 +302,9 @@ KW_CONF: 'CONF';
 KW_VALUES: 'VALUES';
 KW_RELOAD: 'RELOAD';
 KW_YEAR: 'YEAR';
+KW_QUARTER: 'QUARTER';
 KW_MONTH: 'MONTH';
+KW_WEEK: 'WEEK';
 KW_DAY: 'DAY';
 KW_HOUR: 'HOUR';
 KW_MINUTE: 'MINUTE';
@@ -329,6 +331,7 @@ KW_RELY: 'RELY';
 KW_NORELY: 'NORELY';
 KW_KEY: 'KEY';
 KW_ABORT: 'ABORT';
+KW_FLOOR: 'FLOOR';
 
 // Operators
 // NOTE: if you add a new function/operator, add it to sysFuncNames so that 
describe function _FUNC_ will work.

http://git-wip-us.apache.org/repos/asf/hive/blob/e532549f/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
index 9ba1865..7842d50 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
@@ -241,6 +241,30 @@ whenExpression
 KW_END -> ^(TOK_FUNCTION KW_WHEN expression*)
 ;
 
+floorExpression
+:
+KW_FLOOR
+LPAREN
+  expression
+  (KW_TO
+  (floorUnit=floorDateQualifiers))?
+RPAREN
+-> {floorUnit != null}? ^(TOK_FUNCTION $floorUnit expression)
+-> ^(TOK_FUNCTION Identifier["floor"] expression)
+;
+
+floorDateQualifiers
+:
+KW_YEAR -> Identifier["floor_year"]
+| KW_QUARTER -> Identifier["floor_quarter"]
+| KW_MONTH -> Identifier["floor_month"]
+| KW_WEEK -> Identifier["floor_week"]
+| KW_DAY -> Identifier["floor_day"]
+| KW_HOUR -> Identifier["floor_hour"]
+| KW_MINUTE -> Identifier["floor_minute"]
+| KW_SECOND -> Identifier["floor_second"]
+;
+
 constant
 @init { gParent.pushMsg("constant", state); }
 @after { gParent.popMsg(state); }
@@ -323,6 +347,7 @@ atomExpression
 (KW_NULL) => KW_NULL -> TOK_NULL
 | (constant) => constant
 | castExpression
+| floorExpression
 | caseExpression
 | whenExpression
 | (functionName LPAREN) => function
@@ -566,6 +591,7 @@ sysFuncNames
 | KW_IF
 | KW_CASE
 | KW_WHEN
+| KW_FLOOR
 | KW_TINYINT
 | KW_SMALLINT
 | KW_INT
@@ -654,13 +680,13 @@ nonReserved
 | KW_KEYS | KW_KEY_TYPE | KW_LAST | KW_LIMIT | KW_OFFSET | KW_LINES | 
KW_LOAD | KW_LOCATION | KW_LOCK | KW_LOCKS | KW_LOGICAL | KW_LONG
 | KW_MAPJOIN | KW_MATERIALIZED | KW_METADATA | KW_MINUS | KW_MINUTE | 
KW_MONTH | KW_MSCK | KW_NOSCAN | KW_NO_DROP | KW_NULLS | KW_OFFLINE
 | KW_OPTION | KW_OUTPUTDRIVER | KW_OUTPUTFORMAT | KW_OVERWRITE | KW_OWNER 
| KW_PARTITIONED | KW_PARTITIONS | KW_PLUS | KW_PRETTY
-| KW_PRINCIPALS | KW_PROTECTION | KW_PURGE | KW_READ | KW_READONLY | 
KW_REBUILD | KW_RECORDREADER | KW_RECORDWRITER
+| KW_PRINCIPALS | KW_PROTECTION | KW_PURGE | KW_QUARTER | KW_READ | 
KW_READONLY | KW_REBUILD | KW_RECORDREADER | KW_RECORDWRITER
 | KW_RELOAD | KW_RENAME | KW_REPAIR | KW_REPLACE | KW_REPLICATION | 
KW_RESTRICT | KW_REWRITE
 | KW_ROLE | KW_ROLES | KW_SCHEMA | KW_SCHEMAS | KW_SECOND | KW_SEMI | 
KW_SERDE | KW_SERDEPROPERTIES | KW_SERVER | KW_SETS | KW_SHARED
 | KW_SHOW | KW_SHOW_DATABASE | KW_SKEWED | KW_SORT | KW_SORTED | KW_SSL | 
KW_STATISTICS | KW_STORED
 | KW_STREAMTABLE | KW_STRING | KW_STRUCT | 

[09/44] hive git commit: HIVE-14782: Improve runtime of NegativeMinimrCliDriver (Prasanth Jayachandran reviewed by Siddharth Seth)

2016-09-30 Thread sershe
HIVE-14782: Improve runtime of NegativeMinimrCliDriver (Prasanth Jayachandran 
reviewed by Siddharth Seth)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/66af7643
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/66af7643
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/66af7643

Branch: refs/heads/hive-14535
Commit: 66af76435b5a9852f3fa1b8ce9a12c01128737fa
Parents: 0c392b1
Author: Prasanth Jayachandran 
Authored: Wed Sep 21 12:01:40 2016 -0700
Committer: Prasanth Jayachandran 
Committed: Wed Sep 21 12:01:40 2016 -0700

--
 itests/src/test/resources/testconfiguration.properties |  2 --
 .../org/apache/hadoop/hive/cli/control/CliConfigs.java |  4 ++--
 .../clientnegative/mapreduce_stack_trace_hadoop20.q| 13 -
 .../mapreduce_stack_trace_turnoff_hadoop20.q   | 13 -
 .../mapreduce_stack_trace_hadoop20.q.out   | 13 -
 .../mapreduce_stack_trace_turnoff_hadoop20.q.out   |  5 -
 .../src/test/resources/test-configuration2.properties  |  3 +--
 7 files changed, 3 insertions(+), 50 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/66af7643/itests/src/test/resources/testconfiguration.properties
--
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index 4597ace..e810a58 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -698,9 +698,7 @@ minimr.query.negative.files=cluster_tasklog_retrieval.q,\
   file_with_header_footer_negative.q,\
   local_mapred_error_cache.q,\
   mapreduce_stack_trace.q,\
-  mapreduce_stack_trace_hadoop20.q,\
   mapreduce_stack_trace_turnoff.q,\
-  mapreduce_stack_trace_turnoff_hadoop20.q,\
   minimr_broken_pipe.q,\
   table_nonprintable_negative.q,\
   udf_local_resource.q

http://git-wip-us.apache.org/repos/asf/hive/blob/66af7643/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
--
diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
index 69c4974..d74f51a 100644
--- 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
+++ 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
@@ -276,8 +276,8 @@ public class CliConfigs {
 setResultsDir("ql/src/test/results/clientnegative");
 setLogDir("itests/qtest/target/qfile-results/clientnegative");
 
-setInitScript("q_test_init.sql");
-setCleanupScript("q_test_cleanup.sql");
+setInitScript("q_test_init_for_encryption.sql");
+setCleanupScript("q_test_cleanup_for_encryption.sql");
 
 setHiveConfDir("");
 setClusterType(MiniClusterType.mr);

http://git-wip-us.apache.org/repos/asf/hive/blob/66af7643/ql/src/test/queries/clientnegative/mapreduce_stack_trace_hadoop20.q
--
diff --git 
a/ql/src/test/queries/clientnegative/mapreduce_stack_trace_hadoop20.q 
b/ql/src/test/queries/clientnegative/mapreduce_stack_trace_hadoop20.q
deleted file mode 100644
index 9d0548c..000
--- a/ql/src/test/queries/clientnegative/mapreduce_stack_trace_hadoop20.q
+++ /dev/null
@@ -1,13 +0,0 @@
-set hive.exec.mode.local.auto=false;
-set hive.exec.job.debug.capture.stacktraces=true;
-set 
hive.exec.failure.hooks=org.apache.hadoop.hive.ql.hooks.VerifySessionStateStackTracesHook;
-
-FROM src SELECT TRANSFORM(key, value) USING 'script_does_not_exist' AS (key, 
value);
-
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- Hadoop 0.23 changes the getTaskDiagnostics behavior
--- The Error Code of hive failure MapReduce job changes
--- In Hadoop 0.20
--- Hive failure MapReduce job gets 2 as Error Code
--- In Hadoop 0.23
--- Hive failure MapReduce job gets 2 as Error Code

http://git-wip-us.apache.org/repos/asf/hive/blob/66af7643/ql/src/test/queries/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q
--
diff --git 
a/ql/src/test/queries/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q 
b/ql/src/test/queries/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q
deleted file mode 100644
index e319944..000
--- 
a/ql/src/test/queries/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q
+++ /dev/null
@@ -1,13 +0,0 @@
-set hive.exec.mode.local.auto=false;
-set hive.exec.job.debug.capture.stacktraces=false;
-set 

[12/44] hive git commit: HIVE-14766 : ObjectStore.initialize() needs retry mechanisms in case of connection failures (Sushanth Sowmyan, reviewed by Thejas Nair)

2016-09-30 Thread sershe
HIVE-14766 : ObjectStore.initialize() needs retry mechanisms in case of 
connection failures (Sushanth Sowmyan, reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c9224d58
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c9224d58
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c9224d58

Branch: refs/heads/hive-14535
Commit: c9224d58cce6e0b0520598894e962c48ce9d97e3
Parents: ed82cfa
Author: Sushanth Sowmyan 
Authored: Wed Sep 21 23:25:04 2016 -0700
Committer: Thejas Nair 
Committed: Wed Sep 21 23:25:19 2016 -0700

--
 .../apache/hadoop/hive/metastore/FakeDerby.java | 424 +++
 .../metastore/TestObjectStoreInitRetry.java | 127 ++
 .../hadoop/hive/metastore/ObjectStore.java  |  71 
 3 files changed, 622 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/c9224d58/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/FakeDerby.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/FakeDerby.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/FakeDerby.java
new file mode 100644
index 000..51be504
--- /dev/null
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/FakeDerby.java
@@ -0,0 +1,424 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import java.lang.Exception;
+import java.lang.Override;
+import java.lang.RuntimeException;
+import java.lang.StackTraceElement;
+import java.sql.Array;
+import java.sql.Blob;
+import java.sql.CallableStatement;
+import java.sql.Clob;
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.DriverManager;
+import java.sql.DriverPropertyInfo;
+import java.sql.NClob;
+import java.sql.PreparedStatement;
+import java.sql.SQLClientInfoException;
+import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
+import java.sql.SQLWarning;
+import java.sql.SQLXML;
+import java.sql.Savepoint;
+import java.sql.Statement;
+import java.sql.Struct;
+import java.util.Map;
+import java.util.concurrent.Executor;
+import java.util.logging.Logger;
+import java.util.Properties;
+
+import javax.jdo.JDOCanRetryException;
+
+import junit.framework.TestCase;
+import org.junit.Test;
+
+import org.apache.derby.jdbc.EmbeddedDriver;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.ObjectStore;
+
+import org.apache.hadoop.hive.metastore.TestObjectStoreInitRetry;
+
+
+/**
+ * Fake derby driver - companion class to enable testing by 
TestObjectStoreInitRetry
+ */
+public class FakeDerby extends org.apache.derby.jdbc.EmbeddedDriver {
+
+  public class Connection implements java.sql.Connection {
+
+private java.sql.Connection _baseConn;
+
+public Connection(java.sql.Connection connection) {
+  TestObjectStoreInitRetry.debugTrace();
+  this._baseConn = connection;
+}
+
+@Override
+public Statement createStatement() throws SQLException {
+  TestObjectStoreInitRetry.debugTrace();
+  return _baseConn.createStatement();
+}
+
+@Override
+public PreparedStatement prepareStatement(String sql) throws SQLException {
+  TestObjectStoreInitRetry.debugTrace();
+  return _baseConn.prepareStatement(sql);
+}
+
+@Override
+public CallableStatement prepareCall(String sql) throws SQLException {
+  TestObjectStoreInitRetry.debugTrace();
+  return _baseConn.prepareCall(sql);
+}
+
+@Override
+public String nativeSQL(String sql) throws SQLException {
+  TestObjectStoreInitRetry.debugTrace();
+  return _baseConn.nativeSQL(sql);
+}
+
+@Override
+public void setAutoCommit(boolean autoCommit) throws SQLException {
+  TestObjectStoreInitRetry.debugTrace();
+  TestObjectStoreInitRetry.misbehave();
+ 

[25/44] hive git commit: HIVE-5867: JDBC driver and beeline should support executing an initial SQL script(Jianguo Tian, via Ferdinand Xu)

2016-09-30 Thread sershe
HIVE-5867: JDBC driver and beeline should support executing an initial SQL 
script(Jianguo Tian, via Ferdinand Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/737fd09a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/737fd09a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/737fd09a

Branch: refs/heads/hive-14535
Commit: 737fd09a20dbb4b728307f84825f7ddc4294ae02
Parents: 19fd561
Author: Jianguo Tian 
Authored: Tue Sep 27 04:01:49 2016 +0800
Committer: Ferdinand Xu 
Committed: Tue Sep 27 04:01:49 2016 +0800

--
 .../org/apache/hive/jdbc/HiveConnection.java| 79 
 jdbc/src/java/org/apache/hive/jdbc/Utils.java   |  1 +
 .../org/apache/hive/jdbc/TestJdbcDriver.java| 98 
 3 files changed, 178 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/737fd09a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
--
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index ad96a64..ce85320 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -67,8 +67,11 @@ import javax.net.ssl.TrustManagerFactory;
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 
+import java.io.BufferedReader;
+import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
+import java.io.InputStreamReader;
 import java.lang.reflect.InvocationHandler;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
@@ -92,6 +95,7 @@ import java.sql.SQLXML;
 import java.sql.Savepoint;
 import java.sql.Statement;
 import java.sql.Struct;
+import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
@@ -125,6 +129,7 @@ public class HiveConnection implements java.sql.Connection {
   private int loginTimeout = 0;
   private TProtocolVersion protocol;
   private int fetchSize = HiveStatement.DEFAULT_FETCH_SIZE;
+  private String initFile = null;
 
   public HiveConnection(String uri, Properties info) throws SQLException {
 setupLoginTimeout();
@@ -147,6 +152,9 @@ public class HiveConnection implements java.sql.Connection {
 if (sessConfMap.containsKey(JdbcConnectionParams.FETCH_SIZE)) {
   fetchSize = 
Integer.parseInt(sessConfMap.get(JdbcConnectionParams.FETCH_SIZE));
 }
+if (sessConfMap.containsKey(JdbcConnectionParams.INIT_FILE)) {
+  initFile = sessConfMap.get(JdbcConnectionParams.INIT_FILE);
+}
 
 // add supported protocols
 supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V1);
@@ -166,6 +174,7 @@ public class HiveConnection implements java.sql.Connection {
 
   // open client session
   openSession();
+  executeInitSql();
 } else {
   int maxRetries = 1;
   try {
@@ -184,6 +193,7 @@ public class HiveConnection implements java.sql.Connection {
   client = new TCLIService.Client(new TBinaryProtocol(transport));
   // open client session
   openSession();
+  executeInitSql();
 
   break;
 } catch (Exception e) {
@@ -218,6 +228,75 @@ public class HiveConnection implements java.sql.Connection 
{
 client = newSynchronizedClient(client);
   }
 
+  private void executeInitSql() throws SQLException {
+if (initFile != null) {
+  try {
+List sqlList = parseInitFile(initFile);
+Statement st = createStatement();
+for(String sql : sqlList) {
+  boolean hasResult = st.execute(sql);
+  if (hasResult) {
+ResultSet rs = st.getResultSet();
+while (rs.next()) {
+  System.out.println(rs.getString(1));
+}
+  }
+}
+  } catch(Exception e) {
+LOG.error("Failed to execute initial SQL");
+throw new SQLException(e.getMessage());
+  }
+}
+  }
+
+  public static List parseInitFile(String initFile) throws IOException 
{
+File file = new File(initFile);
+BufferedReader br = null;
+List initSqlList = null;
+try {
+  FileInputStream input = new FileInputStream(file);
+  br = new BufferedReader(new InputStreamReader(input, "UTF-8"));
+  String line;
+  StringBuilder sb = new StringBuilder("");
+  while ((line = br.readLine()) != null) {
+line = line.trim();
+if (line.length() != 0) {
+  if (line.startsWith("#") || line.startsWith("--")) {
+continue;
+  } else {
+line = line.concat(" ");
+sb.append(line);
+  }
+}
+  }
+  initSqlList = 

[18/44] hive git commit: HIVE-14820: RPC server for spark inside HS2 is not getting server address properly (Aihua Xu, reviewed by Yongzhi Chen)

2016-09-30 Thread sershe
HIVE-14820: RPC server for spark inside HS2 is not getting server address 
properly (Aihua Xu, reviewed by Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/421d97a8
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/421d97a8
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/421d97a8

Branch: refs/heads/hive-14535
Commit: 421d97a8d75490ca8ec698ef67f7ed8739e394f8
Parents: f284b6d
Author: Aihua Xu 
Authored: Thu Sep 22 15:46:21 2016 -0400
Committer: Aihua Xu 
Committed: Fri Sep 23 09:15:36 2016 -0400

--
 .../hive/spark/client/rpc/RpcConfiguration.java |  4 +--
 .../apache/hive/spark/client/rpc/TestRpc.java   | 29 +++-
 2 files changed, 30 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/421d97a8/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java
--
diff --git 
a/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java
 
b/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java
index e387659..210f8a4 100644
--- 
a/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java
+++ 
b/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java
@@ -97,11 +97,11 @@ public final class RpcConfiguration {
* @throws IOException
*/
   String getServerAddress() throws IOException {
-String hiveHost = config.get(HiveConf.ConfVars.SPARK_RPC_SERVER_ADDRESS);
+String hiveHost = 
config.get(HiveConf.ConfVars.SPARK_RPC_SERVER_ADDRESS.varname);
 if(StringUtils.isEmpty(hiveHost)) {
   hiveHost = System.getenv("HIVE_SERVER2_THRIFT_BIND_HOST");
   if (hiveHost == null) {
-hiveHost = config.get(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST);
+hiveHost = 
config.get(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST.varname);
   }
 }
 return ServerUtils.getHostAddress(hiveHost).getHostName();

http://git-wip-us.apache.org/repos/asf/hive/blob/421d97a8/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java
--
diff --git 
a/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java 
b/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java
index d7969c9..7bcf1df 100644
--- a/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java
+++ b/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java
@@ -18,26 +18,30 @@
 package org.apache.hive.spark.client.rpc;
 
 import java.io.Closeable;
+import java.net.InetAddress;
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.Map;
 import java.util.concurrent.CancellationException;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 import java.util.concurrent.atomic.AtomicInteger;
+
 import javax.security.sasl.SaslException;
 
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
+
 import io.netty.channel.ChannelHandlerContext;
 import io.netty.channel.embedded.EmbeddedChannel;
 import io.netty.channel.nio.NioEventLoopGroup;
 import io.netty.util.concurrent.Future;
+
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -117,6 +121,29 @@ public class TestRpc {
   }
 
   @Test
+  public void testServerAddress() throws Exception {
+String hostAddress = InetAddress.getLocalHost().getHostName();
+Map config = new HashMap();
+
+// Test if rpc_server_address is configured
+config.put(HiveConf.ConfVars.SPARK_RPC_SERVER_ADDRESS.varname, 
hostAddress);
+RpcServer server1 = autoClose(new RpcServer(config));
+assertTrue("Host address should match the expected one", 
server1.getAddress() == hostAddress);
+
+// Test if rpc_server_address is not configured but HS2 server host is 
configured
+config.put(HiveConf.ConfVars.SPARK_RPC_SERVER_ADDRESS.varname, "");
+config.put(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST.varname, 
hostAddress);
+RpcServer server2 = autoClose(new RpcServer(config));
+assertTrue("Host address should match the expected one", 
server2.getAddress() == hostAddress);
+
+// Test if both are not configured
+config.put(HiveConf.ConfVars.SPARK_RPC_SERVER_ADDRESS.varname, "");
+config.put(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST.varname, "");
+RpcServer server3 = autoClose(new 

[28/44] hive git commit: HIVE-14835: Improve ptest2 build time (Prasanth Jayachandran reviewed by Sergio Pena)

2016-09-30 Thread sershe
HIVE-14835: Improve ptest2 build time (Prasanth Jayachandran reviewed by Sergio 
Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d3b88f66
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d3b88f66
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d3b88f66

Branch: refs/heads/hive-14535
Commit: d3b88f664415ff114de74aa2a0da2f1e1acbf60d
Parents: 0c55d46
Author: Prasanth Jayachandran 
Authored: Tue Sep 27 10:59:33 2016 -0700
Committer: Prasanth Jayachandran 
Committed: Tue Sep 27 10:59:33 2016 -0700

--
 dev-support/jenkins-execute-build.sh   | 4 +---
 testutils/ptest2/src/main/resources/source-prep.vm | 4 ++--
 2 files changed, 3 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/d3b88f66/dev-support/jenkins-execute-build.sh
--
diff --git a/dev-support/jenkins-execute-build.sh 
b/dev-support/jenkins-execute-build.sh
index 2142942..972abae 100644
--- a/dev-support/jenkins-execute-build.sh
+++ b/dev-support/jenkins-execute-build.sh
@@ -70,9 +70,7 @@ test -n "$TEST_HANDLE" || fail "TEST_HANDLE must be specified 
and cannot be empt
 test -n "$PTEST_API_ENDPOINT" || fail "PTEST_API_ENDPOINT must be specified 
and cannot be empty."
 test -n "$PTEST_LOG_ENDPOINT" || fail "PTEST_LOG_ENDPOINT must be specified 
and cannot be empty."
 
-# WORKSPACE is an environment variable created by Jenkins, and it is the 
directory where the build is executed.
-# If not set, then default to $HOME
-MVN_REPO_LOCAL=${WORKSPACE:-$HOME}/.m2/repository
+MVN_REPO_LOCAL=${HOME}/.m2/repository
 
 # Directory where to build the ptest framework
 PTEST_BUILD_DIR="$PWD/hive/build"

http://git-wip-us.apache.org/repos/asf/hive/blob/d3b88f66/testutils/ptest2/src/main/resources/source-prep.vm
--
diff --git a/testutils/ptest2/src/main/resources/source-prep.vm 
b/testutils/ptest2/src/main/resources/source-prep.vm
index 67e6a95..0fc22be 100644
--- a/testutils/ptest2/src/main/resources/source-prep.vm
+++ b/testutils/ptest2/src/main/resources/source-prep.vm
@@ -102,11 +102,11 @@ cd $workingDir/
 fi
   done
 #end
-mvn -B clean install -DskipTests -Dmaven.repo.local=$workingDir/maven 
$mavenArgs $mavenBuildArgs
+mvn -B clean install -DskipTests -T 4 -q 
-Dmaven.repo.local=$workingDir/maven $mavenArgs $mavenBuildArgs
 if [[ -d "itests" ]]
 then
   cd itests
-  mvn -B clean install -DskipTests -Dmaven.repo.local=$workingDir/maven 
$mavenArgs $mavenBuildArgs
+  mvn -B clean install -DskipTests -T 4 -q 
-Dmaven.repo.local=$workingDir/maven $mavenArgs $mavenBuildArgs
 fi
   elif [[ "${buildTool}" == "ant" ]]
   then



[26/44] hive git commit: HIVE-14358: Add metrics for number of queries executed for each execution engine (Barna Zsombor Klara, reviewed by Gabor Szadovszky, Yongzhi Chen)

2016-09-30 Thread sershe
HIVE-14358: Add metrics for number of queries executed for each execution 
engine (Barna Zsombor Klara, reviewed by Gabor Szadovszky, Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0c55d46f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0c55d46f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0c55d46f

Branch: refs/heads/hive-14535
Commit: 0c55d46f2afdc7c282304839a10ac39221520316
Parents: 737fd09
Author: Yongzhi Chen 
Authored: Mon Sep 26 13:55:28 2016 -0400
Committer: Yongzhi Chen 
Committed: Tue Sep 27 09:23:16 2016 -0400

--
 .../common/metrics/common/MetricsConstant.java  |  7 +++
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  5 +++
 .../org/apache/hadoop/hive/ql/exec/Task.java|  9 
 .../hadoop/hive/ql/exec/mr/MapRedTask.java  | 11 +
 .../hadoop/hive/ql/exec/mr/MapredLocalTask.java | 11 +
 .../hadoop/hive/ql/exec/spark/SparkTask.java| 11 +
 .../apache/hadoop/hive/ql/exec/tez/TezTask.java | 11 +
 .../hadoop/hive/ql/exec/mr/TestMapRedTask.java  | 47 
 .../hive/ql/exec/mr/TestMapredLocalTask.java| 46 +++
 .../hive/ql/exec/spark/TestSparkTask.java   | 46 +++
 .../hadoop/hive/ql/exec/tez/TestTezTask.java| 17 +++
 11 files changed, 221 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0c55d46f/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java
 
b/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java
index 9dc96f9..c9d4087 100644
--- 
a/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java
+++ 
b/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsConstant.java
@@ -61,4 +61,11 @@ public class MetricsConstant {
   // The number of Hive operations that are waiting to enter the compile block
   public static final String WAITING_COMPILE_OPS = "waiting_compile_ops";
 
+  // The number of map reduce tasks executed by the HiveServer2 since the last 
restart
+  public static final String HIVE_MR_TASKS = "hive_mapred_tasks";
+  // The number of spark tasks executed by the HiveServer2 since the last 
restart
+  public static final String HIVE_SPARK_TASKS = "hive_spark_tasks";
+  // The number of tez tasks executed by the HiveServer2 since the last restart
+  public static final String HIVE_TEZ_TASKS = "hive_tez_tasks";
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/0c55d46f/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 42d398d..03c56e1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -1671,6 +1671,11 @@ public class Driver implements CommandProcessor {
 // incorrect results.
 assert tsk.getParentTasks() == null || tsk.getParentTasks().isEmpty();
 driverCxt.addToRunnable(tsk);
+
+Metrics metrics = MetricsFactory.getInstance();
+if (metrics != null) {
+  tsk.updateTaskMetrics(metrics);
+}
   }
 
   perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.RUN_TASKS);

http://git-wip-us.apache.org/repos/asf/hive/blob/0c55d46f/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
index eeaa543..e1bd291 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
@@ -27,6 +27,8 @@ import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 
+import org.apache.hadoop.hive.common.metrics.common.Metrics;
+import org.apache.hadoop.hive.common.metrics.common.MetricsConstant;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
@@ -534,6 +536,13 @@ public abstract class Task 
implements Serializable, Node
 }
   }
 
+  /**
+   * Provide metrics on the type and number of tasks executed by the HiveServer
+   * @param metrics
+   */
+  public void updateTaskMetrics(Metrics metrics) {
+// no metrics gathered by default
+   }
 
   public int getTaskTag() {
 return taskTag;


[07/44] hive git commit: HIVE-14714: Avoid misleading "java.io.IOException: Stream closed" when shutting down HoS (Gabor Szadovszky via Rui)

2016-09-30 Thread sershe
HIVE-14714: Avoid misleading "java.io.IOException: Stream closed" when shutting 
down HoS (Gabor Szadovszky via Rui)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/96508d34
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/96508d34
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/96508d34

Branch: refs/heads/hive-14535
Commit: 96508d341fbae4d9ff4268bdcff4849893d9c277
Parents: 62c45de
Author: Gabor Szadovszky 
Authored: Wed Sep 21 16:17:07 2016 +0800
Committer: Rui Li 
Committed: Wed Sep 21 16:17:07 2016 +0800

--
 .../java/org/apache/hive/spark/client/SparkClientImpl.java   | 8 
 1 file changed, 8 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/96508d34/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
--
diff --git 
a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java 
b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
index e8ca42a..936fdaf 100644
--- 
a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
+++ 
b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
@@ -621,6 +621,14 @@ class SparkClientImpl implements SparkClient {
 }
   }
 }
+  } catch (IOException e) {
+if (isAlive) {
+  LOG.warn("I/O error in redirector thread.", e);
+} else {
+  // When stopping the remote driver the process might be destroyed 
during reading from the stream.
+  // We should not log the related exceptions in a visible level as 
they might mislead the user.
+  LOG.debug("I/O error in redirector thread while stopping the remote 
driver", e);
+}
   } catch (Exception e) {
 LOG.warn("Error in redirector thread.", e);
   }



[22/44] hive git commit: HIVE-14818. Reduce number of retries while starting HiveServer for tests. (Siddharth Seth, reviewed by Prasanth Jayachandran)

2016-09-30 Thread sershe
HIVE-14818. Reduce number of retries while starting HiveServer for tests. 
(Siddharth Seth, reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a2131154
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a2131154
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a2131154

Branch: refs/heads/hive-14535
Commit: a21311544a4935785aa964e6f2c66b1669982e8a
Parents: e08d94e
Author: Siddharth Seth 
Authored: Fri Sep 23 14:58:20 2016 -0700
Committer: Siddharth Seth 
Committed: Fri Sep 23 14:58:20 2016 -0700

--
 common/src/java/org/apache/hadoop/hive/conf/HiveConf.java| 8 ++--
 .../src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java  | 4 
 .../src/java/org/apache/hive/service/server/HiveServer2.java | 7 +--
 3 files changed, 15 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a2131154/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index ccdfca6..43a16d7 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -2172,9 +2172,13 @@ public class HiveConf extends Configuration {
 "SSL Versions to disable for all Hive Servers"),
 
  // HiveServer2 specific configs
+
HIVE_SERVER2_SLEEP_INTERVAL_BETWEEN_START_ATTEMPTS("hive.server2.sleep.interval.between.start.attempts",
+"60s", new TimeValidator(TimeUnit.MILLISECONDS, 0l, true, 
Long.MAX_VALUE, true),
+"Amount of time to sleep between HiveServer2 start attempts. Primarily 
meant for tests"),
 HIVE_SERVER2_MAX_START_ATTEMPTS("hive.server2.max.start.attempts", 30L, 
new RangeValidator(0L, null),
-"Number of times HiveServer2 will attempt to start before exiting, 
sleeping 60 seconds " +
-"between retries. \n The default of 30 will keep trying for 30 
minutes."),
+"Number of times HiveServer2 will attempt to start before exiting. The 
sleep interval between retries" +
+" is determined by " + 
ConfVars.HIVE_SERVER2_SLEEP_INTERVAL_BETWEEN_START_ATTEMPTS.varname +
+"\n The default of 30 will keep trying for 30 minutes."),
 
HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY("hive.server2.support.dynamic.service.discovery",
 false,
 "Whether HiveServer2 supports dynamic service discovery for its 
clients. " +
 "To support this, each instance of HiveServer2 currently uses 
ZooKeeper to register itself, " +

http://git-wip-us.apache.org/repos/asf/hive/blob/a2131154/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
--
diff --git 
a/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java 
b/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
index abb80a2..176761f 100644
--- a/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
+++ b/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
@@ -22,6 +22,7 @@ import java.io.File;
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -187,6 +188,9 @@ public class MiniHS2 extends AbstractHiveService {
 super(hiveConf, "localhost",
 (usePortsFromConf ? 
hiveConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT) : 
MetaStoreUtils.findFreePort()),
 (usePortsFromConf ? 
hiveConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT) : 
MetaStoreUtils.findFreePort()));
+hiveConf.setLongVar(ConfVars.HIVE_SERVER2_MAX_START_ATTEMPTS, 3l);
+
hiveConf.setTimeVar(ConfVars.HIVE_SERVER2_SLEEP_INTERVAL_BETWEEN_START_ATTEMPTS,
 10,
+TimeUnit.SECONDS);
 this.miniClusterType = miniClusterType;
 this.useMiniKdc = useMiniKdc;
 this.serverPrincipal = serverPrincipal;

http://git-wip-us.apache.org/repos/asf/hive/blob/a2131154/service/src/java/org/apache/hive/service/server/HiveServer2.java
--
diff --git a/service/src/java/org/apache/hive/service/server/HiveServer2.java 
b/service/src/java/org/apache/hive/service/server/HiveServer2.java
index 58e9aeb..590b1f3 100644
--- a/service/src/java/org/apache/hive/service/server/HiveServer2.java
+++ b/service/src/java/org/apache/hive/service/server/HiveServer2.java
@@ -544,6 +544,9 @@ public class HiveServer2 extends CompositeService {
   LOG.info("Starting 

[41/44] hive git commit: HIVE-14100: Adding a new logged_in_user() UDF which returns the user provided when connecting (Peter Vary, reviewed by Mohit Sabharwal)

2016-09-30 Thread sershe
HIVE-14100: Adding a new logged_in_user() UDF which returns the user provided 
when connecting (Peter Vary, reviewed by Mohit Sabharwal)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/45c1a09b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/45c1a09b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/45c1a09b

Branch: refs/heads/hive-14535
Commit: 45c1a09b7b76e41f05520de4bb0e26bb6fadc21f
Parents: 0562efc
Author: Mohit Sabharwal 
Authored: Fri Sep 30 13:54:31 2016 -0400
Committer: Mohit Sabharwal 
Committed: Fri Sep 30 13:57:10 2016 -0400

--
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |  1 +
 .../ql/udf/generic/GenericUDFLoggedInUser.java  | 82 
 .../queries/clientpositive/udf_logged_in_user.q |  5 ++
 .../results/clientpositive/show_functions.q.out |  5 ++
 .../clientpositive/udf_logged_in_user.q.out | 22 ++
 5 files changed, 115 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/45c1a09b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index b277f5e..6870dfa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -344,6 +344,7 @@ public final class FunctionRegistry {
 system.registerGenericUDF("current_date", GenericUDFCurrentDate.class);
 system.registerGenericUDF("current_timestamp", 
GenericUDFCurrentTimestamp.class);
 system.registerGenericUDF("current_user", GenericUDFCurrentUser.class);
+system.registerGenericUDF("logged_in_user", GenericUDFLoggedInUser.class);
 
 system.registerGenericUDF("isnull", GenericUDFOPNull.class);
 system.registerGenericUDF("isnotnull", GenericUDFOPNotNull.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/45c1a09b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLoggedInUser.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLoggedInUser.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLoggedInUser.java
new file mode 100644
index 000..2915b86
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLoggedInUser.java
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.udf.UDFType;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.Text;
+
+@UDFType(deterministic = true)
+@Description(name = "logged_in_user", value = "_FUNC_() - Returns logged in 
user name",
+extended = "SessionState GetUserName - the username provided at 
session initialization")
+@NDV(maxNdv = 1)
+public class GenericUDFLoggedInUser extends GenericUDF {
+  protected Text loggedInUser;
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws 
UDFArgumentException {
+if (arguments.length != 0) {
+  throw new UDFArgumentLengthException(
+  "The function LOGGED_IN_USER does not take any arguments, but found 
" + arguments.length);
+}
+
+if (loggedInUser == null) {
+  String loggedInUserName = SessionState.get().getUserName();
+  if (loggedInUserName != null) {
+loggedInUser = new Text(loggedInUserName);
+  }
+}
+
+

[37/44] hive git commit: HIVE-14852. Change qtest logging to not redirect all logs to console. (Siddharth Seth, reviewed by Prasanth Jayachandran)

2016-09-30 Thread sershe
HIVE-14852. Change qtest logging to not redirect all logs to console. 
(Siddharth Seth, reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a6c60807
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a6c60807
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a6c60807

Branch: refs/heads/hive-14535
Commit: a6c60807eb1daccb940d9caaeb2f7cafa7643afe
Parents: 20304c0
Author: Siddharth Seth 
Authored: Thu Sep 29 10:42:21 2016 -0700
Committer: Siddharth Seth 
Committed: Thu Sep 29 10:43:01 2016 -0700

--
 itests/qtest-spark/pom.xml | 2 ++
 itests/qtest/pom.xml   | 2 ++
 pom.xml| 3 +++
 3 files changed, 7 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a6c60807/itests/qtest-spark/pom.xml
--
diff --git a/itests/qtest-spark/pom.xml b/itests/qtest-spark/pom.xml
index 1e6c3a2..240852e 100644
--- a/itests/qtest-spark/pom.xml
+++ b/itests/qtest-spark/pom.xml
@@ -30,6 +30,8 @@
 
   
 ../..
+
+OFF
 
 
 8.1.14.v20131031

http://git-wip-us.apache.org/repos/asf/hive/blob/a6c60807/itests/qtest/pom.xml
--
diff --git a/itests/qtest/pom.xml b/itests/qtest/pom.xml
index e762d0e..72028f3 100644
--- a/itests/qtest/pom.xml
+++ b/itests/qtest/pom.xml
@@ -30,6 +30,8 @@
 
   
 ../..
+
+OFF
 
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/a6c60807/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 756cc34..5d13344 100644
--- a/pom.xml
+++ b/pom.xml
@@ -78,6 +78,8 @@
 file://
 ${project.build.directory}/tmp
 file://${test.tmp.dir}
+
+INFO
 
${project.build.directory}/warehouse
 pfile://
 
@@ -1028,6 +1030,7 @@
 
${maven.repo.local}
 local
 
${test.log4j.scheme}${test.tmp.dir}/conf/hive-log4j2.properties
+
${test.console.log.level}
 true
 
 ${test.tmp.dir}



[06/44] hive git commit: HIVE-14793. Allow ptest branch to be specified, PROFILE override. (Siddharth Seth, reviewed by Sergio Peña)

2016-09-30 Thread sershe
HIVE-14793. Allow ptest branch to be specified, PROFILE override. (Siddharth 
Seth, reviewed by Sergio Peña)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/62c45de1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/62c45de1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/62c45de1

Branch: refs/heads/hive-14535
Commit: 62c45de1c12b2f8db6d726462cc93382aedc0905
Parents: 1977402
Author: Siddharth Seth 
Authored: Tue Sep 20 15:37:49 2016 -0700
Committer: Siddharth Seth 
Committed: Tue Sep 20 15:37:49 2016 -0700

--
 dev-support/jenkins-execute-build.sh | 28 +---
 1 file changed, 21 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/62c45de1/dev-support/jenkins-execute-build.sh
--
diff --git a/dev-support/jenkins-execute-build.sh 
b/dev-support/jenkins-execute-build.sh
index b2ba8e9..2142942 100644
--- a/dev-support/jenkins-execute-build.sh
+++ b/dev-support/jenkins-execute-build.sh
@@ -26,7 +26,16 @@ build_ptest_client() {
test -d $PTEST_BUILD_DIR || mkdir -p $PTEST_BUILD_DIR
cd $PTEST_BUILD_DIR &&  rm -rf hive
 
-   git clone --depth 1 https://github.com/apache/hive.git
+  unset GIT_CLONE_ARGS
+  if [ -n "${PTEST_GIT_BRANCH}" ]; then
+GIT_CLONE_ARGS=" -b ${PTEST_GIT_BRANCH}"
+  fi
+  if [ -z "${PTEST_GIT_REPO}" ]; then
+PTEST_GIT_REPO=https://github.com/apache/hive.git
+  fi
+  GIT_CLONE_ARGS=${GIT_CLONE_ARGS}" ${PTEST_GIT_REPO} hive"
+
+   git clone --depth 1 ${GIT_CLONE_ARGS}
cd hive/testutils/ptest2
mvn clean package -DskipTests -Drat.numUnapprovedLicenses=1000 
-Dmaven.repo.local=$MVN_REPO_LOCAL
 }
@@ -99,10 +108,13 @@ if [ -n "$JIRA_ISSUE" ]; then
fail "attachment $attachment_id is already tested for 
$JIRA_ISSUE"
fi
 
-   BUILD_PROFILE=`get_branch_profile $JIRA_PATCH_URL $JIRA_INFO_FILE`
-   if [ -z "$BUILD_PROFILE" ]; then
-   BUILD_PROFILE="$DEFAULT_BUILD_PROFILE"
-   fi
+  # Use the BUILD_PROFILE if it is provided. 
+  if [ -z ${BUILD_PROFILE} ]; then
+ BUILD_PROFILE=`get_branch_profile $JIRA_PATCH_URL $JIRA_INFO_FILE`
+ if [ -z "$BUILD_PROFILE" ]; then
+   BUILD_PROFILE="$DEFAULT_BUILD_PROFILE"
+ fi
+  fi
 
if is_clear_cache_set $JIRA_INFO_FILE; then
optionalArgs+=(--clearLibraryCache)
@@ -112,8 +124,10 @@ if [ -n "$JIRA_ISSUE" ]; then
 
echo "ISSUE: $JIRA_ISSUE PROFILE: $BUILD_PROFILE"
 else
-   # If not JIRA is specified, then use a default profile
-   BUILD_PROFILE="$DEFAULT_BUILD_PROFILE"
+   # If not JIRA is specified, and no BUILD_PROFILE provided, then use a 
default profile
+  if [ -z ${BUILD_PROFILE} ]; then
+   BUILD_PROFILE="$DEFAULT_BUILD_PROFILE"
+  fi
 
echo "ISSUE: unspecified PROFILE: $BUILD_PROFILE"
 fi



[04/44] hive git commit: HIVE-14651. Add a local cluster for Tez and LLAP. (Siddharth Seth, reviewed by Prasanth Jayachandran, Sergey Shelukhin)

2016-09-30 Thread sershe
HIVE-14651. Add a local cluster for Tez and LLAP. (Siddharth Seth,reviewed by 
Prasanth Jayachandran, Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e297a157
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e297a157
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e297a157

Branch: refs/heads/hive-14535
Commit: e297a157cfa57f0bd08843bf770856b2f168da75
Parents: 3c55115
Author: Siddharth Seth 
Authored: Tue Sep 20 11:09:11 2016 -0700
Committer: Siddharth Seth 
Committed: Tue Sep 20 11:09:11 2016 -0700

--
 data/conf/llap/tez-site.xml |  12 +
 data/conf/tez/hive-site.xml |  10 -
 .../org/apache/hadoop/hive/ql/QTestUtil.java| 281 ++-
 .../tezplugins/LlapTaskSchedulerService.java|   2 +-
 .../apache/hadoop/hive/shims/Hadoop23Shims.java |  73 +++--
 .../apache/hadoop/hive/shims/HadoopShims.java   |   2 +
 6 files changed, 274 insertions(+), 106 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e297a157/data/conf/llap/tez-site.xml
--
diff --git a/data/conf/llap/tez-site.xml b/data/conf/llap/tez-site.xml
index 940f390..6f1b9d2 100644
--- a/data/conf/llap/tez-site.xml
+++ b/data/conf/llap/tez-site.xml
@@ -1,6 +1,18 @@
 
+
   
 tez.am.dag.scheduler.class
 
org.apache.tez.dag.app.dag.impl.DAGSchedulerNaturalOrderControlled
   
+
+  
+  
+tez.am.task.max.failed.attempts
+2
+  
+  
+tez.runtime.shuffle.connect.timeout
+2
+  
+
 

http://git-wip-us.apache.org/repos/asf/hive/blob/e297a157/data/conf/tez/hive-site.xml
--
diff --git a/data/conf/tez/hive-site.xml b/data/conf/tez/hive-site.xml
index f3e4dae..dbff10c 100644
--- a/data/conf/tez/hive-site.xml
+++ b/data/conf/tez/hive-site.xml
@@ -269,16 +269,6 @@
 
 
 
-  hive.metastore.fastpath
-  true
-
-
-
-  hive.metastore.rawstore.impl
-  org.apache.hadoop.hive.metastore.hbase.HBaseStore
-
-
-
   hive.orc.splits.ms.footer.cache.enabled
   true
 

http://git-wip-us.apache.org/repos/asf/hive/blob/e297a157/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
--
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index f456dfb..0dfd727 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -63,6 +63,7 @@ import java.util.concurrent.TimeUnit;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import com.google.common.base.Preconditions;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
@@ -91,6 +92,7 @@ import org.apache.hadoop.hive.llap.daemon.impl.LlapDaemon;
 import org.apache.hadoop.hive.llap.io.api.LlapProxy;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.hbase.HBaseStore;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -117,6 +119,7 @@ import org.apache.hadoop.hive.shims.HadoopShims;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.util.Shell;
 import org.apache.hive.common.util.StreamPrinter;
+import org.apache.logging.log4j.util.Strings;
 import org.apache.tools.ant.BuildException;
 import org.apache.zookeeper.WatchedEvent;
 import org.apache.zookeeper.Watcher;
@@ -147,8 +150,13 @@ public class QTestUtil {
   private final static String defaultCleanupScript = "q_test_cleanup.sql";
   private final String[] testOnlyCommands = new String[]{"crypto"};
 
+  private static final String TEST_TMP_DIR_PROPERTY = "test.tmp.dir"; // 
typically target/tmp
+  private static final String BUILD_DIR_PROPERTY = "build.dir"; // typically 
target
+
   private String testWarehouse;
   private final String testFiles;
+  private final boolean useLocalFs;
+  private final boolean localMode;
   protected final String outDir;
   protected final String logDir;
   private final TreeMap qMap;
@@ -161,7 +169,7 @@ public class QTestUtil {
   private final Set qJavaVersionSpecificOutput;
   private static final String SORT_SUFFIX = ".sorted";
   private final HashSet srcTables;
-  private static MiniClusterType clusterType = MiniClusterType.none;
+  private final MiniClusterType clusterType;
   private ParseDriver pd;
   protected Hive db;
   protected QueryState queryState;

[14/44] hive git commit: HIVE-14774: Canceling query using Ctrl-C in beeline might lead to stale locks (Chaoyu Tang, reviewed by Jimmy Xiang, Mohit Sabharwal)

2016-09-30 Thread sershe
HIVE-14774: Canceling query using Ctrl-C in beeline might lead to stale locks 
(Chaoyu Tang, reviewed by Jimmy Xiang, Mohit Sabharwal)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3237bb27
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3237bb27
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3237bb27

Branch: refs/heads/hive-14535
Commit: 3237bb276b79d2a54d0a5d8a5ede400147017618
Parents: 1a3e4be
Author: ctang 
Authored: Thu Sep 22 14:47:47 2016 -0400
Committer: ctang 
Committed: Thu Sep 22 14:47:47 2016 -0400

--
 .../hive/service/cli/operation/SQLOperation.java  | 18 +++---
 1 file changed, 11 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/3237bb27/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
--
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java 
b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
index 6f2daf3..abdf8cd 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
@@ -399,6 +399,17 @@ public class SQLOperation extends 
ExecuteStatementOperation {
 
   private synchronized void cleanup(OperationState state) throws 
HiveSQLException {
 setState(state);
+
+if (shouldRunAsync()) {
+  Future backgroundHandle = getBackgroundHandle();
+  if (backgroundHandle != null) {
+boolean success = backgroundHandle.cancel(true);
+if (success) {
+  LOG.info("The running operation has been successfully interrupted.");
+}
+  }
+}
+
 if (driver != null) {
   driver.close();
   driver.destroy();
@@ -413,13 +424,6 @@ public class SQLOperation extends 
ExecuteStatementOperation {
   ss.deleteTmpErrOutputFile();
 }
 
-if (shouldRunAsync()) {
-  Future backgroundHandle = getBackgroundHandle();
-  if (backgroundHandle != null) {
-backgroundHandle.cancel(true);
-  }
-}
-
 // Shutdown the timeout thread if any, while closing this operation
 if ((timeoutExecutor != null) && (state != OperationState.TIMEDOUT) && 
(state.isTerminal())) {
   timeoutExecutor.shutdownNow();



[40/44] hive git commit: HIVE-14784: Operation logs are disabled automatically if the parent directory does not exist. (Naveen Gangam via Yongzhi Chen)

2016-09-30 Thread sershe
HIVE-14784: Operation logs are disabled automatically if the parent directory 
does not exist. (Naveen Gangam via Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0562efce
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0562efce
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0562efce

Branch: refs/heads/hive-14535
Commit: 0562efce642e70f1ac69eae6cca8c0a63230bafd
Parents: 74a6ff6
Author: Yongzhi Chen 
Authored: Fri Sep 30 10:39:11 2016 -0400
Committer: Yongzhi Chen 
Committed: Fri Sep 30 10:41:37 2016 -0400

--
 .../org/apache/hive/service/cli/operation/Operation.java | 11 +++
 1 file changed, 11 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0562efce/service/src/java/org/apache/hive/service/cli/operation/Operation.java
--
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/Operation.java 
b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
index 90fe76d..6a656f9 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
@@ -234,6 +234,17 @@ public abstract class Operation {
   operationLogFile.getAbsolutePath());
   operationLogFile.delete();
 }
+if (!operationLogFile.getParentFile().exists()) {
+  LOG.warn("Operations log directory for this session does not exist, 
it could have been deleted " +
+  "externally. Recreating the directory for future queries in this 
session but the older operation " +
+  "logs for this session are no longer available");
+  if (!operationLogFile.getParentFile().mkdir()) {
+LOG.warn("Log directory for this session could not be created, 
disabling " +
+"operation logs: " + 
operationLogFile.getParentFile().getAbsolutePath());
+isOperationLogEnabled = false;
+return;
+  }
+}
 if (!operationLogFile.createNewFile()) {
   // the log file already exists and cannot be deleted.
   // If it can be read/written, keep its contents and use it.



[44/44] hive git commit: HIVE-14671 : merge master into hive-14535 (Sergey Shelukhin)

2016-09-30 Thread sershe
HIVE-14671 : merge master into hive-14535 (Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6d914483
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6d914483
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6d914483

Branch: refs/heads/hive-14535
Commit: 6d9144835cbdda27fea172f0fb268f4f4aac1a0c
Parents: 70299dc 297b443
Author: Sergey Shelukhin 
Authored: Fri Sep 30 18:07:43 2016 -0700
Committer: Sergey Shelukhin 
Committed: Fri Sep 30 18:07:43 2016 -0700

--
 .../java/org/apache/hive/beeline/BeeLine.java   |  20 +-
 .../org/apache/hive/beeline/BeeLineOpts.java|   2 +-
 beeline/src/main/resources/BeeLine.properties   |  11 +-
 .../beeline/TestBeeLineExceptionHandling.java   |  72 ++
 .../hive/common/metrics/LegacyMetrics.java  |  96 +--
 .../hive/common/metrics/MetricsMBean.java   |  13 +-
 .../hive/common/metrics/MetricsMBeanImpl.java   |  16 +-
 .../hive/common/metrics/common/Metrics.java |  31 +-
 .../common/metrics/common/MetricsConstant.java  |   7 +
 .../metrics/metrics2/CodahaleMetrics.java   |  70 +-
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  14 +-
 .../apache/hadoop/hive/ql/log/PerfLogger.java   |  33 +-
 .../hive/common/metrics/TestLegacyMetrics.java  | 103 +--
 data/conf/llap/tez-site.xml |  12 +
 data/conf/tez/hive-site.xml |  10 -
 dev-support/jenkins-execute-build.sh|  30 +-
 druid-handler/pom.xml   |  11 +-
 .../src/test/queries/positive/hbase_bulk.m  |  62 --
 .../src/test/queries/positive/hbase_bulk.q  |  62 ++
 .../src/test/results/positive/hbase_bulk.m.out  | 133 
 .../src/test/results/positive/hbase_bulk.q.out  | 133 
 .../hcatalog/streaming/StreamingConnection.java |   2 +
 .../hcatalog/streaming/TransactionBatch.java|   8 +-
 .../org/apache/hive/jdbc/miniHS2/MiniHS2.java   |   4 +
 .../apache/hadoop/hive/metastore/FakeDerby.java | 424 
 .../metastore/TestObjectStoreInitRetry.java | 127 
 .../TestPartitionNameWhitelistValidation.java   |  53 +-
 .../org/apache/hive/jdbc/TestJdbcDriver2.java   |  32 +
 itests/qtest-spark/pom.xml  |   2 +
 itests/qtest/pom.xml|   2 +
 .../hive/cli/TestHBaseMinimrCliDriver.java  |  62 --
 .../test/resources/testconfiguration.properties |   2 -
 .../hive/cli/control/AbstractCliConfig.java |  23 +-
 .../hadoop/hive/cli/control/CliConfigs.java |  38 +-
 .../hadoop/hive/cli/control/CoreCliDriver.java  |   3 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java| 344 +++---
 .../org/apache/hive/jdbc/HiveConnection.java|  79 +++
 jdbc/src/java/org/apache/hive/jdbc/Utils.java   |   1 +
 .../org/apache/hive/jdbc/TestJdbcDriver.java|  98 +++
 .../hive/llap/registry/ServiceInstanceSet.java  |   7 +-
 .../registry/impl/InactiveServiceInstance.java  |  77 +++
 .../registry/impl/LlapFixedRegistryImpl.java|   2 +-
 .../impl/LlapZookeeperRegistryImpl.java |  34 +-
 .../org/apache/hadoop/hive/llap/LlapUtil.java   |  12 +
 .../hadoop/hive/llap/LlapBaseInputFormat.java   |   6 +-
 .../hive/llap/daemon/impl/LlapDaemon.java   |   5 +-
 .../daemon/services/impl/LlapWebServices.java   |   2 +-
 .../llap/tezplugins/LlapTaskCommunicator.java   |  14 +-
 .../tezplugins/LlapTaskSchedulerService.java|   2 +-
 .../tezplugins/TestLlapTaskCommunicator.java|   5 +
 .../hive/metastore/HMSMetricsListener.java  |  52 +-
 .../hadoop/hive/metastore/HiveMetaStore.java|  13 +-
 .../hadoop/hive/metastore/ObjectStore.java  |  71 ++
 pom.xml |  15 +-
 ql/pom.xml  |  26 +-
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  18 +-
 .../hadoop/hive/ql/exec/FileSinkOperator.java   |  10 +-
 .../hadoop/hive/ql/exec/FunctionInfo.java   |  20 +-
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |   4 +-
 .../hadoop/hive/ql/exec/ReduceSinkOperator.java |   9 +
 .../apache/hadoop/hive/ql/exec/Registry.java|  96 ++-
 .../org/apache/hadoop/hive/ql/exec/Task.java|   9 +
 .../hadoop/hive/ql/exec/WindowFunctionInfo.java |   4 +-
 .../hadoop/hive/ql/exec/mr/MapRedTask.java  |   7 +
 .../hadoop/hive/ql/exec/mr/MapredLocalTask.java |   7 +
 .../exec/spark/HiveBaseFunctionResultList.java  |  96 ++-
 .../hive/ql/exec/spark/HiveMapFunction.java |   2 +-
 .../hive/ql/exec/spark/HiveReduceFunction.java  |   2 +-
 .../hive/ql/exec/spark/SortByShuffler.java  |  84 ++-
 .../hadoop/hive/ql/exec/spark/SparkTask.java|   7 +
 .../spark/status/impl/JobMetricsListener.java   |   4 +-
 .../tez/HostAffinitySplitLocationProvider.java  |  80 ++-
 .../apache/hadoop/hive/ql/exec/tez/TezTask.java |   7 +
 .../apache/hadoop/hive/ql/exec/tez/Utils.java   |   8 +-
 

[36/44] hive git commit: HIVE-14778 document threading model of Streaming API (Eugene Koifman, reviewed by Alan Gates)

2016-09-30 Thread sershe
HIVE-14778 document threading model of Streaming API (Eugene Koifman, reviewed 
by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/20304c07
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/20304c07
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/20304c07

Branch: refs/heads/hive-14535
Commit: 20304c0705c4ad861b5915dacceaa6d6bdfe91fc
Parents: d16d4f1
Author: Eugene Koifman 
Authored: Thu Sep 29 10:41:42 2016 -0700
Committer: Eugene Koifman 
Committed: Thu Sep 29 10:41:42 2016 -0700

--
 .../apache/hive/hcatalog/streaming/StreamingConnection.java  | 2 ++
 .../org/apache/hive/hcatalog/streaming/TransactionBatch.java | 8 ++--
 2 files changed, 8 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/20304c07/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StreamingConnection.java
--
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StreamingConnection.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StreamingConnection.java
index 8785a21..a8f4089 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StreamingConnection.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StreamingConnection.java
@@ -22,6 +22,8 @@ import org.apache.hadoop.security.UserGroupInformation;
 
 /**
  * Represents a connection to a HiveEndPoint. Used to acquire transaction 
batches.
+ * Note: the expectation is that there is at most 1 TransactionBatch 
outstanding for any given
+ * StreamingConnection.  Violating this may result in "out of sequence 
response".
  */
 public interface StreamingConnection {
 

http://git-wip-us.apache.org/repos/asf/hive/blob/20304c07/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/TransactionBatch.java
--
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/TransactionBatch.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/TransactionBatch.java
index 3c8670d..3bcc510 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/TransactionBatch.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/TransactionBatch.java
@@ -24,8 +24,12 @@ import java.util.Collection;
 /**
  * Represents a set of Transactions returned by Hive. Supports opening, 
writing to
  * and commiting/aborting each transaction. The interface is designed to ensure
- * transactions in a batch are used up sequentially. Multiple transaction 
batches can be
- * used (initialized with separate RecordWriters) for concurrent streaming
+ * transactions in a batch are used up sequentially. To stream to the same 
HiveEndPoint
+ * concurrently, create separate StreamingConnections.
+ *
+ * Note on thread safety: At most 2 threads can run through a given 
TransactionBatch at the same
+ * time.  One thread may call {@link #heartbeat()} and the other all other 
methods.
+ * Violating this may result in "out of sequence response".
  *
  */
 public interface TransactionBatch  {



hive git commit: HIVE-14865 Fix comments after HIVE-14350 (Eugene Koifman, reviewed by Alan Gates)

2016-09-30 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master f903c4afa -> 297b4433c


HIVE-14865 Fix comments after HIVE-14350 (Eugene Koifman, reviewed by Alan 
Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/297b4433
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/297b4433
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/297b4433

Branch: refs/heads/master
Commit: 297b4433cd2fdfb84182668bf7b1c524e92c6593
Parents: f903c4a
Author: Eugene Koifman 
Authored: Fri Sep 30 15:10:23 2016 -0700
Committer: Eugene Koifman 
Committed: Fri Sep 30 15:10:23 2016 -0700

--
 ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java | 6 --
 1 file changed, 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/297b4433/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
index cda5f39..f1eba5d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
@@ -866,7 +866,6 @@ public class AcidUtils {
* {@link txnList}.  Note that 'original' files are logically a 
base_Long.MIN_VALUE and thus
* cannot have any data for an open txn.  We could check {@link deltas} 
has files to cover
* [1,n] w/o gaps but this would almost never happen...*/
-  //todo: this should only care about 'open' tnxs (HIVE-14211)
   long[] exceptions = txnList.getInvalidTransactions();
   String minOpenTxn = exceptions != null && exceptions.length > 0 ?
 Long.toString(exceptions[0]) : "x";
@@ -910,11 +909,6 @@ public class AcidUtils {
* files within the snapshot.
*/
   private static boolean isValidBase(long baseTxnId, ValidTxnList txnList) {
-/*This implementation is suboptimal.  It considers open/aborted txns 
invalid while we are only
-* concerned with 'open' ones.  (Compaction removes any data that belongs 
to aborted txns and
-* reads skip anything that belongs to aborted txn, thus base_7 is still OK 
if the only exception
-* is txn 5 which is aborted).  So this implementation can generate false 
positives. (HIVE-14211)
-* */
 if(baseTxnId == Long.MIN_VALUE) {
   //such base is created by 1st compaction in case of non-acid to acid 
table conversion
   //By definition there are no open txns with id < 1.



hive git commit: HIVE-14775: Cleanup IOException usage in Metrics APIs (Barna Zsombor Klara reviewed by Peter Vary, Gabor Szadovszky, Szehon Ho, Mohit Sabharwal)

2016-09-30 Thread mohits
Repository: hive
Updated Branches:
  refs/heads/master 45c1a09b7 -> f903c4afa


HIVE-14775: Cleanup IOException usage in Metrics APIs (Barna Zsombor Klara 
reviewed by Peter Vary, Gabor Szadovszky, Szehon Ho, Mohit Sabharwal)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f903c4af
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f903c4af
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f903c4af

Branch: refs/heads/master
Commit: f903c4afad360ea66ec266abe8a3f414935c82ff
Parents: 45c1a09
Author: Mohit Sabharwal 
Authored: Fri Sep 30 15:13:14 2016 -0400
Committer: Mohit Sabharwal 
Committed: Fri Sep 30 15:13:14 2016 -0400

--
 .../hive/common/metrics/LegacyMetrics.java  |  96 ++---
 .../hive/common/metrics/MetricsMBean.java   |  13 +--
 .../hive/common/metrics/MetricsMBeanImpl.java   |  16 +--
 .../hive/common/metrics/common/Metrics.java |  31 ++
 .../metrics/metrics2/CodahaleMetrics.java   |  70 ++---
 .../apache/hadoop/hive/ql/log/PerfLogger.java   |  33 ++
 .../hive/common/metrics/TestLegacyMetrics.java  | 103 ++-
 .../hive/metastore/HMSMetricsListener.java  |  52 ++
 .../hadoop/hive/metastore/HiveMetaStore.java|  13 +--
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  13 +--
 .../hadoop/hive/ql/exec/mr/MapRedTask.java  |   6 +-
 .../hadoop/hive/ql/exec/mr/MapredLocalTask.java |   6 +-
 .../hadoop/hive/ql/exec/spark/SparkTask.java|   6 +-
 .../apache/hadoop/hive/ql/exec/tez/TezTask.java |   6 +-
 .../hive/service/cli/operation/Operation.java   |  22 ++--
 15 files changed, 176 insertions(+), 310 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/f903c4af/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java 
b/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java
index 9be9b50..ba2267b 100644
--- a/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java
+++ b/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java
@@ -21,11 +21,13 @@ import org.apache.hadoop.hive.common.metrics.common.Metrics;
 import org.apache.hadoop.hive.common.metrics.common.MetricsScope;
 import org.apache.hadoop.hive.common.metrics.common.MetricsVariable;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
 import java.lang.management.ManagementFactory;
 import java.util.HashMap;
 
+import javax.management.JMException;
 import javax.management.MBeanServer;
 import javax.management.MalformedObjectNameException;
 import javax.management.ObjectName;
@@ -47,6 +49,8 @@ import javax.management.ObjectName;
  */
 public class LegacyMetrics implements Metrics {
 
+  private static final Logger LOG = 
LoggerFactory.getLogger(LegacyMetrics.class);
+
   private LegacyMetrics() {
 // block
   }
@@ -59,12 +63,12 @@ public class LegacyMetrics implements Metrics {
*/
   public static class LegacyMetricsScope implements MetricsScope {
 
-final LegacyMetrics metrics;
+private final LegacyMetrics metrics;
 
-final String name;
-final String numCounter;
-final String timeCounter;
-final String avgTimeCounter;
+private final String name;
+private final String numCounter;
+private final String timeCounter;
+private final String avgTimeCounter;
 
 private boolean isOpen = false;
 private Long startTime = null;
@@ -72,9 +76,8 @@ public class LegacyMetrics implements Metrics {
 /**
  * Instantiates a named scope - intended to only be called by Metrics, so 
locally scoped.
  * @param name - name of the variable
- * @throws IOException
  */
-private LegacyMetricsScope(String name, LegacyMetrics metrics) throws 
IOException {
+private LegacyMetricsScope(String name, LegacyMetrics metrics) {
   this.metrics = metrics;
   this.name = name;
   this.numCounter = name + ".n";
@@ -83,33 +86,41 @@ public class LegacyMetrics implements Metrics {
   open();
 }
 
-public Long getNumCounter() throws IOException {
-  return (Long) metrics.get(numCounter);
+public Long getNumCounter() {
+  try {
+return (Long) metrics.get(numCounter);
+  } catch (JMException e) {
+LOG.warn("Could not find counter value for " + numCounter + ", 
returning null instead. ", e);
+return null;
+  }
 }
 
-public Long getTimeCounter() throws IOException {
-  return (Long) metrics.get(timeCounter);
+public Long getTimeCounter() {
+  try {
+return (Long) 

hive git commit: HIVE-14100: Adding a new logged_in_user() UDF which returns the user provided when connecting (Peter Vary, reviewed by Mohit Sabharwal)

2016-09-30 Thread mohits
Repository: hive
Updated Branches:
  refs/heads/master 0562efce6 -> 45c1a09b7


HIVE-14100: Adding a new logged_in_user() UDF which returns the user provided 
when connecting (Peter Vary, reviewed by Mohit Sabharwal)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/45c1a09b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/45c1a09b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/45c1a09b

Branch: refs/heads/master
Commit: 45c1a09b7b76e41f05520de4bb0e26bb6fadc21f
Parents: 0562efc
Author: Mohit Sabharwal 
Authored: Fri Sep 30 13:54:31 2016 -0400
Committer: Mohit Sabharwal 
Committed: Fri Sep 30 13:57:10 2016 -0400

--
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |  1 +
 .../ql/udf/generic/GenericUDFLoggedInUser.java  | 82 
 .../queries/clientpositive/udf_logged_in_user.q |  5 ++
 .../results/clientpositive/show_functions.q.out |  5 ++
 .../clientpositive/udf_logged_in_user.q.out | 22 ++
 5 files changed, 115 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/45c1a09b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index b277f5e..6870dfa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -344,6 +344,7 @@ public final class FunctionRegistry {
 system.registerGenericUDF("current_date", GenericUDFCurrentDate.class);
 system.registerGenericUDF("current_timestamp", 
GenericUDFCurrentTimestamp.class);
 system.registerGenericUDF("current_user", GenericUDFCurrentUser.class);
+system.registerGenericUDF("logged_in_user", GenericUDFLoggedInUser.class);
 
 system.registerGenericUDF("isnull", GenericUDFOPNull.class);
 system.registerGenericUDF("isnotnull", GenericUDFOPNotNull.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/45c1a09b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLoggedInUser.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLoggedInUser.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLoggedInUser.java
new file mode 100644
index 000..2915b86
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLoggedInUser.java
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.udf.UDFType;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.Text;
+
+@UDFType(deterministic = true)
+@Description(name = "logged_in_user", value = "_FUNC_() - Returns logged in 
user name",
+extended = "SessionState GetUserName - the username provided at 
session initialization")
+@NDV(maxNdv = 1)
+public class GenericUDFLoggedInUser extends GenericUDF {
+  protected Text loggedInUser;
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws 
UDFArgumentException {
+if (arguments.length != 0) {
+  throw new UDFArgumentLengthException(
+  "The function LOGGED_IN_USER does not take any arguments, but found 
" + arguments.length);
+}
+
+if (loggedInUser == null) {
+  String loggedInUserName = SessionState.get().getUserName();
+  if (loggedInUserName != null) {
+  

hive git commit: HIVE-14784: Operation logs are disabled automatically if the parent directory does not exist. (Naveen Gangam via Yongzhi Chen)

2016-09-30 Thread ychena
Repository: hive
Updated Branches:
  refs/heads/master 74a6ff678 -> 0562efce6


HIVE-14784: Operation logs are disabled automatically if the parent directory 
does not exist. (Naveen Gangam via Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0562efce
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0562efce
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0562efce

Branch: refs/heads/master
Commit: 0562efce642e70f1ac69eae6cca8c0a63230bafd
Parents: 74a6ff6
Author: Yongzhi Chen 
Authored: Fri Sep 30 10:39:11 2016 -0400
Committer: Yongzhi Chen 
Committed: Fri Sep 30 10:41:37 2016 -0400

--
 .../org/apache/hive/service/cli/operation/Operation.java | 11 +++
 1 file changed, 11 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0562efce/service/src/java/org/apache/hive/service/cli/operation/Operation.java
--
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/Operation.java 
b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
index 90fe76d..6a656f9 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
@@ -234,6 +234,17 @@ public abstract class Operation {
   operationLogFile.getAbsolutePath());
   operationLogFile.delete();
 }
+if (!operationLogFile.getParentFile().exists()) {
+  LOG.warn("Operations log directory for this session does not exist, 
it could have been deleted " +
+  "externally. Recreating the directory for future queries in this 
session but the older operation " +
+  "logs for this session are no longer available");
+  if (!operationLogFile.getParentFile().mkdir()) {
+LOG.warn("Log directory for this session could not be created, 
disabling " +
+"operation logs: " + 
operationLogFile.getParentFile().getAbsolutePath());
+isOperationLogEnabled = false;
+return;
+  }
+}
 if (!operationLogFile.createNewFile()) {
   // the log file already exists and cannot be deleted.
   // If it can be read/written, keep its contents and use it.