hive git commit: HIVE-18038: org.apache.hadoop.hive.ql.session.OperationLog - Review (BELUGA BEHR, reviewed by Peter Vary)

2018-09-11 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master c8b3f9aa0 -> c399c4ed1


HIVE-18038: org.apache.hadoop.hive.ql.session.OperationLog - Review (BELUGA 
BEHR, reviewed by Peter Vary)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c399c4ed
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c399c4ed
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c399c4ed

Branch: refs/heads/master
Commit: c399c4ed108c631b34570e1253e37e3075b0e7e3
Parents: c8b3f9a
Author: Aihua Xu 
Authored: Tue Sep 11 10:27:46 2018 -0700
Committer: Aihua Xu 
Committed: Tue Sep 11 10:27:46 2018 -0700

--
 .../hadoop/hive/ql/session/OperationLog.java| 117 ++-
 1 file changed, 65 insertions(+), 52 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/c399c4ed/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java 
b/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java
index 6d75c29..c48dc42 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java
@@ -17,25 +17,35 @@
  */
 package org.apache.hadoop.hive.ql.session;
 
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.io.IOUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.*;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
-
 /**
  * OperationLog wraps the actual operation log file, and provides interface
  * for accessing, reading, writing, and removing the file.
  */
 public class OperationLog {
-  private static final Logger LOG = 
LoggerFactory.getLogger(OperationLog.class.getName());
+  private static final Logger LOG = 
LoggerFactory.getLogger(OperationLog.class);
 
   private final String operationName;
+
   private final LogFile logFile;
   // If in test mode then the LogDivertAppenderForTest created an extra log 
file containing only
   // the output needed for the qfile results.
@@ -45,7 +55,8 @@ public class OperationLog {
   private final boolean isShortLogs;
   // True if the logs should be removed after the operation. Should be used 
only in test mode
   private final boolean isRemoveLogs;
-  private LoggingLevel opLoggingLevel = LoggingLevel.UNKNOWN;
+
+  private final LoggingLevel opLoggingLevel;
 
   public enum LoggingLevel {
 NONE, EXECUTION, PERFORMANCE, VERBOSE, UNKNOWN
@@ -58,6 +69,8 @@ public class OperationLog {
 if 
(hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) 
{
   String logLevel = 
hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL);
   opLoggingLevel = getLoggingLevel(logLevel);
+} else {
+  opLoggingLevel = LoggingLevel.UNKNOWN;
 }
 
 // If in test mod create a test log file which will contain only logs 
which are supposed to
@@ -79,15 +92,17 @@ public class OperationLog {
   }
 
   public static LoggingLevel getLoggingLevel (String mode) {
-if (mode.equalsIgnoreCase("none")) {
+String m = StringUtils.defaultString(mode).toLowerCase();
+switch (m) {
+case "none":
   return LoggingLevel.NONE;
-} else if (mode.equalsIgnoreCase("execution")) {
+case "execution":
   return LoggingLevel.EXECUTION;
-} else if (mode.equalsIgnoreCase("verbose")) {
+case "verbose":
   return LoggingLevel.VERBOSE;
-} else if (mode.equalsIgnoreCase("performance")) {
+case "performance":
   return LoggingLevel.PERFORMANCE;
-} else {
+default:
   return LoggingLevel.UNKNOWN;
 }
   }
@@ -105,11 +120,8 @@ public class OperationLog {
*/
   public List readOperationLog(boolean isFetchFirst, long maxRows)
   throws SQLException {
-if (isShortLogs) {
-  return testLogFile.read(isFetchFirst, maxRows);
-} else {
-  return logFile.read(isFetchFirst, maxRows);
-}
+LogFile lf = (isShortLogs) ? testLogFile : logFile;
+return lf.read(isFetchFirst, maxRows);
   }
 
   /**
@@ -144,8 +156,10 @@ public class OperationLog {
   if (isFetchFirst) {
 resetIn();
   }
-
-  return readResults(maxRows);
+  if (maxRows >= (long) 

hive git commit: HIVE-20020: Hive contrib jar should not be in lib (Alice Fan, reviewed by Aihua Xu)

2018-09-10 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master b60b13f97 -> afb61aebf


HIVE-20020: Hive contrib jar should not be in lib (Alice Fan, reviewed by Aihua 
Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/afb61aeb
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/afb61aeb
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/afb61aeb

Branch: refs/heads/master
Commit: afb61aebfd475e2cb4f7935f94f765ae53bb174f
Parents: b60b13f
Author: Aihua Xu 
Authored: Mon Sep 10 11:31:16 2018 -0700
Committer: Aihua Xu 
Committed: Mon Sep 10 11:35:14 2018 -0700

--
 packaging/src/main/assembly/bin.xml | 10 ++
 1 file changed, 10 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/afb61aeb/packaging/src/main/assembly/bin.xml
--
diff --git a/packaging/src/main/assembly/bin.xml 
b/packaging/src/main/assembly/bin.xml
index eeed3ec..fceb1be 100644
--- a/packaging/src/main/assembly/bin.xml
+++ b/packaging/src/main/assembly/bin.xml
@@ -47,6 +47,7 @@
 co.cask.tephra:*
 commons-configuration:commons-configuration
 org.apache.hive:hive-jdbc:*:standalone
+org.apache.hive:hive-contrib
   
 
 
@@ -59,6 +60,15 @@
   
 
 
+  contrib/
+  false
+  false
+  true
+  
+org.apache.hive:hive-contrib
+  
+
+
   lib
   false
   false



[1/2] hive git commit: HIVE-20237: Do Not Print StackTraces to STDERR in HiveMetaStore (Alice Fan, reviewed Aihua Xu)

2018-08-22 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 4408661c0 -> ff2554328


HIVE-20237: Do Not Print StackTraces to STDERR in HiveMetaStore (Alice Fan, 
reviewed Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1f9c70e5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1f9c70e5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1f9c70e5

Branch: refs/heads/master
Commit: 1f9c70e5148add278a1cb9b2186fb9a65d7ed1c5
Parents: 4408661
Author: Aihua Xu 
Authored: Wed Aug 22 10:02:08 2018 -0700
Committer: Aihua Xu 
Committed: Wed Aug 22 10:02:08 2018 -0700

--
 .../hadoop/hive/metastore/HiveMetaStore.java| 280 +--
 1 file changed, 137 insertions(+), 143 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/1f9c70e5/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
--
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 324035a..067eb5a 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -9219,166 +9219,160 @@ public class HiveMetaStore extends 
ThriftHiveMetastore {
   public static void startMetaStore(int port, HadoopThriftAuthBridge bridge,
   Configuration conf, Lock startLock, Condition startCondition,
   AtomicBoolean startedServing) throws Throwable {
-try {
-  isMetaStoreRemote = true;
-  // Server will create new threads up to max as necessary. After an idle
-  // period, it will destroy threads to keep the number of threads in the
-  // pool to min.
-  long maxMessageSize = MetastoreConf.getLongVar(conf, 
ConfVars.SERVER_MAX_MESSAGE_SIZE);
-  int minWorkerThreads = MetastoreConf.getIntVar(conf, 
ConfVars.SERVER_MIN_THREADS);
-  int maxWorkerThreads = MetastoreConf.getIntVar(conf, 
ConfVars.SERVER_MAX_THREADS);
-  boolean tcpKeepAlive = MetastoreConf.getBoolVar(conf, 
ConfVars.TCP_KEEP_ALIVE);
-  boolean useFramedTransport = MetastoreConf.getBoolVar(conf, 
ConfVars.USE_THRIFT_FRAMED_TRANSPORT);
-  boolean useCompactProtocol = MetastoreConf.getBoolVar(conf, 
ConfVars.USE_THRIFT_COMPACT_PROTOCOL);
-  boolean useSSL = MetastoreConf.getBoolVar(conf, ConfVars.USE_SSL);
-  useSasl = MetastoreConf.getBoolVar(conf, ConfVars.USE_THRIFT_SASL);
-
-  if (useSasl) {
-// we are in secure mode. Login using keytab
-String kerberosName = SecurityUtil
-.getServerPrincipal(MetastoreConf.getVar(conf, 
ConfVars.KERBEROS_PRINCIPAL), "0.0.0.0");
-String keyTabFile = MetastoreConf.getVar(conf, 
ConfVars.KERBEROS_KEYTAB_FILE);
-UserGroupInformation.loginUserFromKeytab(kerberosName, keyTabFile);
-  }
-
-  TProcessor processor;
-  TTransportFactory transFactory;
-  final TProtocolFactory protocolFactory;
-  final TProtocolFactory inputProtoFactory;
-  if (useCompactProtocol) {
-protocolFactory = new TCompactProtocol.Factory();
-inputProtoFactory = new TCompactProtocol.Factory(maxMessageSize, 
maxMessageSize);
+isMetaStoreRemote = true;
+// Server will create new threads up to max as necessary. After an idle
+// period, it will destroy threads to keep the number of threads in the
+// pool to min.
+long maxMessageSize = MetastoreConf.getLongVar(conf, 
ConfVars.SERVER_MAX_MESSAGE_SIZE);
+int minWorkerThreads = MetastoreConf.getIntVar(conf, 
ConfVars.SERVER_MIN_THREADS);
+int maxWorkerThreads = MetastoreConf.getIntVar(conf, 
ConfVars.SERVER_MAX_THREADS);
+boolean tcpKeepAlive = MetastoreConf.getBoolVar(conf, 
ConfVars.TCP_KEEP_ALIVE);
+boolean useFramedTransport = MetastoreConf.getBoolVar(conf, 
ConfVars.USE_THRIFT_FRAMED_TRANSPORT);
+boolean useCompactProtocol = MetastoreConf.getBoolVar(conf, 
ConfVars.USE_THRIFT_COMPACT_PROTOCOL);
+boolean useSSL = MetastoreConf.getBoolVar(conf, ConfVars.USE_SSL);
+useSasl = MetastoreConf.getBoolVar(conf, ConfVars.USE_THRIFT_SASL);
+
+if (useSasl) {
+  // we are in secure mode. Login using keytab
+  String kerberosName = SecurityUtil
+  .getServerPrincipal(MetastoreConf.getVar(conf, 
ConfVars.KERBEROS_PRINCIPAL), "0.0.0.0");
+  String keyTabFile = MetastoreConf.getVar(conf, 
ConfVars.KERBEROS_KEYTAB_FILE);
+  UserGroupInformation.loginUserFromKeytab(kerberosName, keyTabFile);
+}
+
+TProcessor processor;
+TTransportFactory transFactory;
+

[2/2] hive git commit: HIVE-20246: Configurable collecting stats by using DO_NOT_UPDATE_STATS table property (Alice Fan, reviewed by Aihua Xu)

2018-08-22 Thread aihuaxu
HIVE-20246: Configurable collecting stats by using DO_NOT_UPDATE_STATS table 
property (Alice Fan, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ff255432
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ff255432
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ff255432

Branch: refs/heads/master
Commit: ff25543282532613c9cfa768a8bf8ee728b3734e
Parents: 1f9c70e
Author: Aihua Xu 
Authored: Wed Aug 22 10:05:12 2018 -0700
Committer: Aihua Xu 
Committed: Wed Aug 22 10:05:12 2018 -0700

--
 .../hadoop/hive/metastore/HiveMetaStore.java| 27 --
 .../hive/metastore/TestHiveMetaStore.java   | 57 +++-
 2 files changed, 79 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/ff255432/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
--
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 067eb5a..e971d0f 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -3174,8 +3174,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
 part.setCreateTime((int) time);
 part.putToParameters(hive_metastoreConstants.DDL_TIME, 
Long.toString(time));
 
-if (MetastoreConf.getBoolVar(conf, ConfVars.STATS_AUTO_GATHER) &&
-!MetaStoreServerUtils.isView(tbl)) {
+if (canUpdateStats(tbl)) {
   MetaStoreServerUtils.updatePartitionStatsFast(part, tbl, wh, 
madeDir, false, envContext, true);
 }
 
@@ -3791,6 +3790,27 @@ public class HiveMetaStore extends ThriftHiveMetastore {
   return result;
 }
 
+/**
+ * Verify if update stats while altering partition(s)
+ * For the following three cases HMS will not update partition stats
+ * 1) Table property 'DO_NOT_UPDATE_STATS' = True
+ * 2) HMS configuration property 'STATS_AUTO_GATHER' = False
+ * 3) Is View
+ */
+private boolean canUpdateStats(Table tbl) {
+Map tblParams = tbl.getParameters();
+boolean updateStatsTbl = true;
+if ((tblParams != null) && 
tblParams.containsKey(StatsSetupConst.DO_NOT_UPDATE_STATS)) {
+updateStatsTbl = 
!Boolean.valueOf(tblParams.get(StatsSetupConst.DO_NOT_UPDATE_STATS));
+}
+if (!MetastoreConf.getBoolVar(conf, ConfVars.STATS_AUTO_GATHER) ||
+MetaStoreServerUtils.isView(tbl) ||
+!updateStatsTbl) {
+  return false;
+}
+return true;
+}
+
 private void initializeAddedPartition(
 final Table tbl, final Partition part, boolean madeDir) throws 
MetaException {
   initializeAddedPartition(tbl, new 
PartitionSpecProxy.SimplePartitionWrapperIterator(part), madeDir);
@@ -3798,8 +3818,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
 
 private void initializeAddedPartition(
 final Table tbl, final PartitionSpecProxy.PartitionIterator part, 
boolean madeDir) throws MetaException {
-  if (MetastoreConf.getBoolVar(conf, ConfVars.STATS_AUTO_GATHER) &&
-  !MetaStoreServerUtils.isView(tbl)) {
+  if (canUpdateStats(tbl)) {
 MetaStoreServerUtils.updatePartitionStatsFast(part, tbl, wh, madeDir, 
false, null, true);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/ff255432/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
--
diff --git 
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
 
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
index 60beab6..4937d9d 100644
--- 
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
+++ 
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.metastore;
 
-import java.lang.reflect.Field;
 import java.io.IOException;
 import java.sql.Connection;
 import java.sql.DriverManager;
@@ -38,6 +37,8 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
+import java.lang.reflect.*;
+import static 

hive git commit: HIVE-20331: Query with union all, lateral view and Join fails with "cannot find parent in the child operator" (Aihua Xu, reviewed by Vihang Karajgaonkar)

2018-08-16 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master ddc246528 -> 61372dfaf


HIVE-20331: Query with union all, lateral view and Join fails with "cannot find 
parent in the child operator" (Aihua Xu, reviewed by Vihang Karajgaonkar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/61372dfa
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/61372dfa
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/61372dfa

Branch: refs/heads/master
Commit: 61372dfafa35b43be710554e4920e3b6ed4dacdf
Parents: ddc2465
Author: Aihua Xu 
Authored: Wed Aug 8 09:07:31 2018 -0700
Committer: Aihua Xu 
Committed: Thu Aug 16 10:57:21 2018 -0700

--
 .../hive/ql/optimizer/GenMRProcContext.java |  11 ++
 .../hive/ql/optimizer/GenMRTableScan1.java  |   2 +
 .../clientpositive/unionall_lateralview.q   |  29 +
 .../clientpositive/unionall_lateralview.q.out   | 105 +++
 4 files changed, 147 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/61372dfa/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
index f80395d..782ce16 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
@@ -219,6 +219,17 @@ public class GenMRProcContext implements NodeProcessorCtx {
   }
 
   /**
+   * The context is reused across the rules. Reset so the following info is not
+   * incorrectly carried over to the following optimizations starting with the 
new TS.
+   */
+  public void reset() {
+currTask = null;
+currTopOp = null;
+currUnionOp = null;
+currAliasId = null;
+  }
+
+  /**
* @return reducer to task mapping
*/
   public HashMap,

http://git-wip-us.apache.org/repos/asf/hive/blob/61372dfa/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
index 6295d7f..bb53ce8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
@@ -66,6 +66,8 @@ public class GenMRTableScan1 implements NodeProcessor {
   Object... nodeOutputs) throws SemanticException {
 TableScanOperator op = (TableScanOperator) nd;
 GenMRProcContext ctx = (GenMRProcContext) opProcCtx;
+ctx.reset();
+
 ParseContext parseCtx = ctx.getParseCtx();
 Table table = op.getConf().getTableMetadata();
 Class inputFormat = table.getInputFormatClass();

http://git-wip-us.apache.org/repos/asf/hive/blob/61372dfa/ql/src/test/queries/clientpositive/unionall_lateralview.q
--
diff --git a/ql/src/test/queries/clientpositive/unionall_lateralview.q 
b/ql/src/test/queries/clientpositive/unionall_lateralview.q
new file mode 100644
index 000..457f404
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/unionall_lateralview.q
@@ -0,0 +1,29 @@
+-- Test the case HIVE-20331 with the union all, lateral view and join
+DROP TABLE IF EXISTS unionall_lateralview1;
+DROP TABLE IF EXISTS unionall_lateralview2;
+CREATE TABLE unionall_lateralview1(col1 INT);
+INSERT INTO unionall_lateralview1 VALUES(1), (2);
+CREATE TABLE unionall_lateralview2(col1 INT);
+
+INSERT INTO unionall_lateralview2
+SELECT 1 AS `col1`
+FROM unionall_lateralview1
+UNION ALL
+  SELECT 2 AS `col1`
+  FROM
+(SELECT col1
+ FROM unionall_lateralview1
+) x1
+JOIN
+  (SELECT col1
+  FROM
+(SELECT
+  Row_Number() over (PARTITION BY col1 ORDER BY col1) AS `col1`
+FROM unionall_lateralview1
+) x2 lateral VIEW explode(map(10,1))`mapObj` AS `col2`, `col3`
+  ) `expdObj`;
+
+SELECT * FROM unionall_lateralview2 ORDER BY col1;
+
+DROP TABLE unionall_lateralview1;
+DROP TABLE unionall_lateralview2;

http://git-wip-us.apache.org/repos/asf/hive/blob/61372dfa/ql/src/test/results/clientpositive/unionall_lateralview.q.out
--
diff --git a/ql/src/test/results/clientpositive/unionall_lateralview.q.out 
b/ql/src/test/results/clientpositive/unionall_lateralview.q.out
new file mode 100644
index 000..db64777
--- /dev/null
+++ b/ql/src/test/results/clientpositive/unionall_lateralview.q.out
@@ -0,0 +1,105 @@
+PREHOOK: query: DROP TABLE IF EXISTS unionall_lateralview1
+PREHOOK: type: DROPTABLE

hive git commit: HIVE-20345: Drop database may hang if the tables get deleted from a different call (Aihua Xu, reviewed by Naveen Gangam)

2018-08-14 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 5f993e24c -> 86096b48f


HIVE-20345: Drop database may hang if the tables get deleted from a different 
call (Aihua Xu, reviewed by Naveen Gangam)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/86096b48
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/86096b48
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/86096b48

Branch: refs/heads/master
Commit: 86096b48f8ead2f228725c49953c60b2f852174b
Parents: 5f993e2
Author: Aihua Xu 
Authored: Wed Aug 8 18:37:54 2018 -0700
Committer: Aihua Xu 
Committed: Tue Aug 14 14:48:58 2018 -0700

--
 .../java/org/apache/hadoop/hive/metastore/HiveMetaStore.java | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/86096b48/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
--
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 95b08eb..324035a 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -1573,9 +1573,9 @@ public class HiveMetaStore extends ThriftHiveMetastore {
   
drop_table(MetaStoreUtils.prependCatalogToDbName(table.getCatName(), 
table.getDbName(), conf),
   table.getTableName(), false);
 }
-
-startIndex = endIndex;
   }
+
+  startIndex = endIndex;
 }
 
 if (ms.dropDatabase(catName, name)) {



hive git commit: HIVE-20136: Code Review of ArchiveUtils Class (BELUGA BEHR, reviewed by Aihua Xu)

2018-08-08 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 0fd23b6bd -> 7dce7b79b


HIVE-20136: Code Review of ArchiveUtils Class (BELUGA BEHR, reviewed by Aihua 
Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7dce7b79
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7dce7b79
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7dce7b79

Branch: refs/heads/master
Commit: 7dce7b79b9d877256046c5e595110a23de9bdcce
Parents: 0fd23b6
Author: Aihua Xu 
Authored: Wed Aug 8 10:40:14 2018 -0700
Committer: Aihua Xu 
Committed: Wed Aug 8 10:40:14 2018 -0700

--
 .../hadoop/hive/ql/exec/ArchiveUtils.java   | 78 
 1 file changed, 32 insertions(+), 46 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/7dce7b79/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java
index 5576d11..6ad0556 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java
@@ -21,13 +21,14 @@ package org.apache.hadoop.hive.ql.exec;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
-import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Warehouse;
@@ -41,7 +42,6 @@ import org.apache.hadoop.hive.ql.metadata.Table;
 
 /**
  * ArchiveUtils.
- *
  */
 @SuppressWarnings("nls")
 public final class ArchiveUtils {
@@ -105,7 +105,7 @@ public final class ArchiveUtils {
 throw new HiveException("Unable to get partitions directories prefix", 
e);
   }
   Path tableDir = tbl.getDataLocation();
-  if(tableDir == null) {
+  if (tableDir == null) {
 throw new HiveException("Table has no location set");
   }
   return new Path(tableDir, prefixSubdir);
@@ -136,16 +136,16 @@ public final class ArchiveUtils {
 public HarPathHelper(HiveConf hconf, URI archive, URI originalBase) throws 
HiveException {
   this.originalBase = addSlash(originalBase);
   String parentHost = archive.getHost();
-  String harHost = null;
+  String harHost = archive.getScheme();
   if (parentHost == null) {
-harHost = archive.getScheme() + "-localhost";
+harHost += "-localhost";
   } else {
-harHost = archive.getScheme() + "-" + parentHost;
+harHost += "-" + parentHost;
   }
 
   // have to make sure there's slash after .har, otherwise resolve doesn't 
work
-  String path = addSlash(archive.getPath());
-  if(!path.endsWith(".har/")) {
+  String path = StringUtils.appendIfMissing(archive.getPath(), "/");
+  if (!path.endsWith(".har/")) {
 throw new HiveException("HAR archive path must end with .har");
   }
   // harUri is used to access the partition's files, which are in the 
archive
@@ -155,41 +155,33 @@ public final class ArchiveUtils {
 base = new URI("har", archive.getUserInfo(), harHost, 
archive.getPort(),
   path, archive.getQuery(), archive.getFragment());
   } catch (URISyntaxException e) {
-throw new HiveException("Couldn't create har URI from archive URI", e);
+throw new HiveException("Could not create har URI from archive URI", 
e);
   }
 }
 
 public URI getHarUri(URI original) throws URISyntaxException {
   URI relative = originalBase.relativize(original);
   if (relative.isAbsolute()) {
-throw new URISyntaxException("Couldn't create URI for location.",
- "Relative: " + relative + " Base: "
- + base + " OriginalBase: " + 
originalBase);
+throw new URISyntaxException("Could not create URI for location.",
+"Relative: " + relative + " Base: " + base + " OriginalBase: "
++ originalBase);
   }
-
   return base.resolve(relative);
-
-
 }
   }
 
-  public static String addSlash(String s) {
-return s.endsWith("/") ? s : s + "/";
-  }
-
   /**
* Makes sure, that URI points to directory by adding slash to it.
* Useful in relativizing URIs.
*/
   public static URI addSlash(URI u) throws HiveException {
-if(u.getPath().endsWith("/")) {
+if (u.getPath().endsWith("/")) {
   return u;
-} else {
-  try {
-return new URI(u.getScheme(), 

hive git commit: HIVE-19809: Remove Deprecated Code From Utilities Class (BELUGA BEHR, reviewed by Aihua Xu)

2018-07-30 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 1c8449cce -> 109ec31dc


HIVE-19809: Remove Deprecated Code From Utilities Class (BELUGA BEHR, reviewed 
by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/109ec31d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/109ec31d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/109ec31d

Branch: refs/heads/master
Commit: 109ec31dc7de5678106e702c9bdd641c12cb1653
Parents: 1c8449c
Author: Aihua Xu 
Authored: Fri Jul 13 15:47:34 2018 -0700
Committer: Aihua Xu 
Committed: Mon Jul 30 11:05:26 2018 -0700

--
 .../org/apache/hadoop/hive/ql/QTestUtil.java|  4 ++-
 .../apache/hadoop/hive/ql/exec/Utilities.java   | 26 
 2 files changed, 3 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/109ec31d/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
--
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 4ed062e..5adbb63 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -36,6 +36,8 @@ import java.io.StringWriter;
 import java.io.UnsupportedEncodingException;
 import java.net.URL;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -1834,7 +1836,7 @@ public class QTestUtil {
   if (zooKeeperCluster == null) {
 //create temp dir
 String tmpBaseDir =  System.getProperty(TEST_TMP_DIR_PROPERTY);
-File tmpDir = Utilities.createTempDir(tmpBaseDir);
+File tmpDir = Files.createTempDirectory(Paths.get(tmpBaseDir), 
"tmp_").toFile();
 
 zooKeeperCluster = new MiniZooKeeperCluster();
 zkPort = zooKeeperCluster.startup(tmpDir);

http://git-wip-us.apache.org/repos/asf/hive/blob/109ec31d/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 80478ca..b677d46 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -3758,32 +3758,6 @@ public final class Utilities {
   }
 
   /**
-   * Create a temp dir in specified baseDir
-   * This can go away once hive moves to support only JDK 7
-   *  and can use Files.createTempDirectory
-   *  Guava Files.createTempDir() does not take a base dir
-   * @param baseDir - directory under which new temp dir will be created
-   * @return File object for new temp dir
-   */
-  public static File createTempDir(String baseDir){
-//try creating the temp dir MAX_ATTEMPTS times
-final int MAX_ATTEMPS = 30;
-for(int i = 0; i < MAX_ATTEMPS; i++){
-  //pick a random file name
-  String tempDirName = "tmp_" + ((int)(10 * Math.random()));
-
-  //return if dir could successfully be created with that file name
-  File tempDir = new File(baseDir, tempDirName);
-  if(tempDir.mkdir()){
-return tempDir;
-  }
-}
-throw new IllegalStateException("Failed to create a temp dir under "
-+ baseDir + " Giving up after " + MAX_ATTEMPS + " attempts");
-
-  }
-
-  /**
* Skip header lines in the table file when reading the record.
*
* @param currRecReader



hive git commit: HIVE-20153: Count and Sum UDF consume more memory in Hive 2+ (Aihua Xu, reviewed by Gopal V)

2018-07-27 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 0ad71121d -> 804b125e7


HIVE-20153: Count and Sum UDF consume more memory in Hive 2+ (Aihua Xu, 
reviewed by Gopal V)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/804b125e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/804b125e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/804b125e

Branch: refs/heads/master
Commit: 804b125e743516ca0139a22c42244b980ffaf47f
Parents: 0ad7112
Author: Aihua Xu 
Authored: Thu Jul 26 13:05:31 2018 -0700
Committer: Aihua Xu 
Committed: Fri Jul 27 13:41:16 2018 -0700

--
 .../apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java | 6 +-
 .../apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java   | 9 ++---
 2 files changed, 11 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/804b125e/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
index 2d7cc8d..c2414d2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
@@ -153,7 +153,7 @@ public class GenericUDAFCount implements 
GenericUDAFResolver2 {
 @Override
 public void reset(AggregationBuffer agg) throws HiveException {
   ((CountAgg) agg).value = 0;
-  ((CountAgg) agg).uniqueObjects = new HashSet();
+  ((CountAgg) agg).uniqueObjects = null;
 }
 
 @Override
@@ -177,7 +177,11 @@ public class GenericUDAFCount implements 
GenericUDAFResolver2 {
 
 // Skip the counting if the values are the same for windowing 
COUNT(DISTINCT) case
 if (countThisRow && isWindowingDistinct()) {
+  if (((CountAgg) agg).uniqueObjects == null) {
+((CountAgg) agg).uniqueObjects = new 
HashSet();
+  }
   HashSet uniqueObjs = ((CountAgg) 
agg).uniqueObjects;
+
   ObjectInspectorObject obj = new ObjectInspectorObject(
   ObjectInspectorUtils.copyToStandardObject(parameters, inputOI, 
ObjectInspectorCopyOption.JAVA),
   outputOI);

http://git-wip-us.apache.org/repos/asf/hive/blob/804b125e/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
index 1439b64..e30b903 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
@@ -184,6 +184,9 @@ public class GenericUDAFSum extends 
AbstractGenericUDAFResolver {
   }
 
   if (isWindowingDistinct()) {
+if (agg.uniqueObjects == null) {
+  agg.uniqueObjects = new HashSet();
+}
 HashSet uniqueObjs = agg.uniqueObjects;
 ObjectInspectorObject obj = input instanceof ObjectInspectorObject ?
 (ObjectInspectorObject)input :
@@ -266,7 +269,7 @@ public class GenericUDAFSum extends 
AbstractGenericUDAFResolver {
   SumAgg bdAgg = (SumAgg) agg;
   bdAgg.empty = true;
   bdAgg.sum = new HiveDecimalWritable(0);
-  bdAgg.uniqueObjects = new HashSet();
+  bdAgg.uniqueObjects = null;
 }
 
 boolean warned = false;
@@ -410,7 +413,7 @@ public class GenericUDAFSum extends 
AbstractGenericUDAFResolver {
   SumDoubleAgg myagg = (SumDoubleAgg) agg;
   myagg.empty = true;
   myagg.sum = 0.0;
-  myagg.uniqueObjects = new HashSet();
+  myagg.uniqueObjects = null;
 }
 
 boolean warned = false;
@@ -540,7 +543,7 @@ public class GenericUDAFSum extends 
AbstractGenericUDAFResolver {
   SumLongAgg myagg = (SumLongAgg) agg;
   myagg.empty = true;
   myagg.sum = 0L;
-  myagg.uniqueObjects = new HashSet();
+  myagg.uniqueObjects = null;
 }
 
 private boolean warned = false;



hive git commit: HIVE-18929: The method humanReadableInt in HiveStringUtils.java has a race condition. (Andrew Sherman, reviewed by Aihua Xu)

2018-07-26 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 5a3f12dd7 -> 1ad48825c


HIVE-18929: The method humanReadableInt in HiveStringUtils.java has a race 
condition. (Andrew Sherman, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1ad48825
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1ad48825
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1ad48825

Branch: refs/heads/master
Commit: 1ad48825c01ca326a9b7387d3e044a3a8b2ad81b
Parents: 5a3f12d
Author: Aihua Xu 
Authored: Thu Jul 26 16:46:35 2018 -0700
Committer: Aihua Xu 
Committed: Thu Jul 26 16:46:35 2018 -0700

--
 .../hive/common/util/HiveStringUtils.java   | 29 
 1 file changed, 29 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/1ad48825/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
--
diff --git a/common/src/java/org/apache/hive/common/util/HiveStringUtils.java 
b/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
index 6b14ad9..a4923f9 100644
--- a/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
+++ b/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
@@ -164,35 +164,6 @@ public class HiveStringUtils {
 return fullHostname;
   }
 
-  private static DecimalFormat oneDecimal = new DecimalFormat("0.0");
-
-  /**
-   * Given an integer, return a string that is in an approximate, but human
-   * readable format.
-   * It uses the bases 'k', 'm', and 'g' for 1024, 1024**2, and 1024**3.
-   * @param number the number to format
-   * @return a human readable form of the integer
-   */
-  public static String humanReadableInt(long number) {
-long absNumber = Math.abs(number);
-double result = number;
-String suffix = "";
-if (absNumber < 1024) {
-  // since no division has occurred, don't format with a decimal point
-  return String.valueOf(number);
-} else if (absNumber < 1024 * 1024) {
-  result = number / 1024.0;
-  suffix = "k";
-} else if (absNumber < 1024 * 1024 * 1024) {
-  result = number / (1024.0 * 1024);
-  suffix = "m";
-} else {
-  result = number / (1024.0 * 1024 * 1024);
-  suffix = "g";
-}
-return oneDecimal.format(result) + suffix;
-  }
-
   /**
* Format a percentage for presentation to the user.
* @param done the percentage to format (0.0 to 1.0)



hive git commit: HIVE-20037: Print root cause exception's toString() rather than getMessage() (Aihua Xu, reviewed by Sahil Takiar)

2018-07-12 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master a3eacb9dd -> 57dd30441


HIVE-20037: Print root cause exception's toString() rather than getMessage() 
(Aihua Xu, reviewed by Sahil Takiar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/57dd3044
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/57dd3044
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/57dd3044

Branch: refs/heads/master
Commit: 57dd30441a708f9fe653aea1c54df678ed459c34
Parents: a3eacb9
Author: Aihua Xu 
Authored: Fri Jun 29 14:40:43 2018 -0700
Committer: Aihua Xu 
Committed: Thu Jul 12 13:38:12 2018 -0700

--
 .../hive/ql/exec/spark/session/SparkSessionImpl.java  | 10 +++---
 .../exec/spark/session/TestSparkSessionManagerImpl.java   |  5 +++--
 2 files changed, 6 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/57dd3044/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
index 6e37d93..0f2f031 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
@@ -215,18 +215,14 @@ public class SparkSessionImpl implements SparkSession {
   sessionId, matchedString.toString());
 } else {
   return new HiveException(e, ErrorMsg.SPARK_CREATE_CLIENT_ERROR, 
sessionId,
-  getRootCause(oe));
+  Throwables.getRootCause(e).toString());
 }
   }
   e = e.getCause();
 }
 
-return new HiveException(oe, ErrorMsg.SPARK_CREATE_CLIENT_ERROR, 
sessionId, getRootCause(oe));
-  }
-
-  private String getRootCause(Throwable e) {
-Throwable rootCause = Throwables.getRootCause(e);
-return rootCause.getClass().getName() + ": " + rootCause.getMessage();
+return new HiveException(oe, ErrorMsg.SPARK_CREATE_CLIENT_ERROR, sessionId,
+Throwables.getRootCause(oe).toString());
   }
 
   private boolean matches(String input, String regex, StringBuilder 
matchedString) {

http://git-wip-us.apache.org/repos/asf/hive/blob/57dd3044/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/session/TestSparkSessionManagerImpl.java
--
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/session/TestSparkSessionManagerImpl.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/session/TestSparkSessionManagerImpl.java
index 15756da..6964764 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/session/TestSparkSessionManagerImpl.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/session/TestSparkSessionManagerImpl.java
@@ -180,8 +180,9 @@ public class TestSparkSessionManagerImpl {
 "initial executor number 5 must between min executor number10 and max 
executor number 50");
 
 // Other exceptions which defaults to SPARK_CREATE_CLIENT_ERROR
-e = new Exception("Other exception");
-checkHiveException(ss, e, ErrorMsg.SPARK_CREATE_CLIENT_ERROR, "Other 
exception");
+e = new java.lang.NoClassDefFoundError("org/apache/spark/SparkConf");
+checkHiveException(ss, e, ErrorMsg.SPARK_CREATE_CLIENT_ERROR,
+"java.lang.NoClassDefFoundError: org/apache/spark/SparkConf");
   }
 
   private void checkHiveException(SparkSessionImpl ss, Throwable e, ErrorMsg 
expectedErrMsg) {



hive git commit: HIVE-19948: HiveCli is not splitting the command by semicolon properly if quotes are inside the string (Aihua Xu, reviewed by Sahil Takiar)

2018-06-25 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master f2c4f3193 -> f37c5de6c


HIVE-19948: HiveCli is not splitting the command by semicolon properly if 
quotes are inside the string (Aihua Xu, reviewed by Sahil Takiar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f37c5de6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f37c5de6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f37c5de6

Branch: refs/heads/master
Commit: f37c5de6c32b9395d1b34fa3c02ed06d1bfbf6eb
Parents: f2c4f31
Author: Aihua Xu 
Authored: Wed Jun 20 09:01:10 2018 -0700
Committer: Aihua Xu 
Committed: Mon Jun 25 11:39:55 2018 -0700

--
 .../org/apache/hadoop/hive/cli/CliDriver.java   | 61 
 .../hadoop/hive/cli/TestCliDriverMethods.java   | 23 
 2 files changed, 59 insertions(+), 25 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/f37c5de6/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
--
diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java 
b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
index 68741f6..806663d 100644
--- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
+++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
@@ -416,44 +416,55 @@ public class CliDriver {
 }
   }
 
+  /**
+   * Split the line by semicolon by ignoring the ones in the single/double 
quotes.
+   *
+   */
   public static List splitSemiColon(String line) {
-boolean insideSingleQuote = false;
-boolean insideDoubleQuote = false;
+boolean inQuotes = false;
 boolean escape = false;
-int beginIndex = 0;
+
 List ret = new ArrayList<>();
+
+char quoteChar = '"';
+int beginIndex = 0;
 for (int index = 0; index < line.length(); index++) {
-  if (line.charAt(index) == '\'') {
-// take a look to see if it is escaped
-if (!escape) {
-  // flip the boolean variable
-  insideSingleQuote = !insideSingleQuote;
-}
-  } else if (line.charAt(index) == '\"') {
-// take a look to see if it is escaped
-if (!escape) {
-  // flip the boolean variable
-  insideDoubleQuote = !insideDoubleQuote;
-}
-  } else if (line.charAt(index) == ';') {
-if (insideSingleQuote || insideDoubleQuote) {
-  // do not split
-} else {
-  // split, do not include ; itself
+  char c = line.charAt(index);
+  switch (c) {
+  case ';':
+if (!inQuotes) {
   ret.add(line.substring(beginIndex, index));
   beginIndex = index + 1;
 }
-  } else {
-// nothing to do
+break;
+  case '"':
+  case '\'':
+if (!escape) {
+  if (!inQuotes) {
+quoteChar = c;
+inQuotes = !inQuotes;
+  } else {
+if (c == quoteChar) {
+  inQuotes = !inQuotes;
+}
+  }
+}
+break;
+  default:
+break;
   }
-  // set the escape
+
   if (escape) {
 escape = false;
-  } else if (line.charAt(index) == '\\') {
+  } else if (c == '\\') {
 escape = true;
   }
 }
-ret.add(line.substring(beginIndex));
+
+if (beginIndex < line.length()) {
+  ret.add(line.substring(beginIndex));
+}
+
 return ret;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f37c5de6/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
--
diff --git a/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java 
b/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
index c06ec3e..8f8f04e 100644
--- a/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
+++ b/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
@@ -53,6 +53,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.junit.Test;
 
 
 // Cannot call class TestCliDriver since that's the name of the generated
@@ -353,6 +354,28 @@ public class TestCliDriverMethods extends TestCase {
 }
   }
 
+  @Test
+  public void testCommandSplits() {
+// Test double quote in the string
+String cmd1 = "insert into escape1 partition (ds='1', part='\"') values 
(\"!\")";
+assertEquals(cmd1, CliDriver.splitSemiColon(cmd1).get(0));
+assertEquals(cmd1, CliDriver.splitSemiColon(cmd1 + ";").get(0));
+
+// Test escape
+String cmd2 = "insert into escape1 partition (ds='1', part='\"\\'') values 
(\"!\")";
+assertEquals(cmd2, 

hive git commit: HIVE-19899: Support stored as JsonFile (Aihua Xu, reviewed by Yongzhi Chen, BELUGA BEHR)

2018-06-21 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 6adab1c2a -> 24e16cc57


HIVE-19899: Support stored as JsonFile (Aihua Xu, reviewed by Yongzhi Chen, 
BELUGA BEHR)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/24e16cc5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/24e16cc5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/24e16cc5

Branch: refs/heads/master
Commit: 24e16cc57293ea6771cd55009f8cfd29870a39ee
Parents: 6adab1c
Author: Aihua Xu 
Authored: Thu Jun 14 13:35:49 2018 -0700
Committer: Aihua Xu 
Committed: Thu Jun 21 14:36:07 2018 -0700

--
 .../hcatalog/pig/AbstractHCatStorerTest.java|  2 +-
 .../pig/TestHCatLoaderComplexSchema.java|  3 ++
 .../hive/hcatalog/pig/TestHCatStorer.java   |  4 +-
 .../apache/hadoop/hive/ql/io/IOConstants.java   |  1 +
 .../ql/io/JsonFileStorageFormatDescriptor.java  | 51 
 ...he.hadoop.hive.ql.io.StorageFormatDescriptor |  1 +
 .../hive/ql/io/TestStorageFormatDescriptor.java |  3 ++
 .../test/queries/clientpositive/json_serde1.q   |  9 ++--
 .../results/clientpositive/json_serde1.q.out| 44 -
 9 files changed, 109 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/24e16cc5/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
--
diff --git 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
index 97277b5..a5cf3a5 100644
--- 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
+++ 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
@@ -54,7 +54,7 @@ import org.slf4j.LoggerFactory;
 public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   static Logger LOG = LoggerFactory.getLogger(AbstractHCatStorerTest.class);
   static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
-  String storageFormat;
+  protected String storageFormat;
 
   public AbstractHCatStorerTest() {
 storageFormat = getStorageFormat();

http://git-wip-us.apache.org/repos/asf/hive/blob/24e16cc5/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
--
diff --git 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
index 8f06d39..37e670c 100644
--- 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
+++ 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
@@ -75,6 +75,9 @@ public class TestHCatLoaderComplexSchema {
 put(IOConstants.PARQUETFILE, new HashSet() {{
   add("testMapNullKey");
 }});
+put(IOConstants.JSONFILE, new HashSet() {{
+  add("testMapNullKey");
+}});
   }};
 
   private String storageFormat;

http://git-wip-us.apache.org/repos/asf/hive/blob/24e16cc5/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
--
diff --git 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
index 477ea66..cb02139 100644
--- 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
+++ 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
@@ -86,8 +86,6 @@ public class TestHCatStorer extends AbstractHCatStorerTest {
 }
   };
 
-  private String storageFormat;
-
   @Parameterized.Parameters
   public static Collection generateParameters() {
 return StorageFormats.names();
@@ -99,7 +97,7 @@ public class TestHCatStorer extends AbstractHCatStorerTest {
 
   @Override
   String getStorageFormat() {
-return null;
+return this.storageFormat;
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hive/blob/24e16cc5/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java
index f60d296..2be864e 100644
--- 

hive git commit: HIVE-19203: Thread-Safety Issue in HiveMetaStore (Alice Fan, reviewed by Aihua Xu)

2018-06-12 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 230ae3629 -> 6814c8495


HIVE-19203: Thread-Safety Issue in HiveMetaStore (Alice Fan, reviewed by Aihua 
Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6814c849
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6814c849
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6814c849

Branch: refs/heads/master
Commit: 6814c84951b41486f6c8af07b0a79f8f0938b636
Parents: 230ae36
Author: Aihua Xu 
Authored: Tue Jun 12 15:16:38 2018 -0700
Committer: Aihua Xu 
Committed: Tue Jun 12 15:16:38 2018 -0700

--
 .../java/org/apache/hadoop/hive/metastore/HiveMetaStore.java | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/6814c849/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 8d7b0c3..7dbdba8 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -350,11 +350,11 @@ public class HiveMetaStore extends ThriftHiveMetastore {
   return null;
 }
 
-private static int nextSerialNum = 0;
+private static AtomicInteger nextSerialNum = new AtomicInteger();
 private static ThreadLocal threadLocalId = new 
ThreadLocal() {
   @Override
   protected Integer initialValue() {
-return nextSerialNum++;
+return nextSerialNum.getAndIncrement();
   }
 };
 



hive git commit: HIVE-19053: RemoteSparkJobStatus#getSparkJobInfo treats all exceptions as timeout errors (Aihua Xu, reviewed by Sahil Takiar)

2018-06-08 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 148635cb8 -> 913baef82


HIVE-19053: RemoteSparkJobStatus#getSparkJobInfo treats all exceptions as 
timeout errors (Aihua Xu, reviewed by Sahil Takiar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/913baef8
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/913baef8
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/913baef8

Branch: refs/heads/master
Commit: 913baef8279f6682f7b209a1f4e7c445a85f17e9
Parents: 148635c
Author: Aihua Xu 
Authored: Tue Jun 5 13:47:12 2018 -0700
Committer: Aihua Xu 
Committed: Fri Jun 8 11:58:34 2018 -0700

--
 ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java|  2 ++
 .../exec/spark/status/impl/RemoteSparkJobStatus.java   | 13 ++---
 2 files changed, 12 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/913baef8/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
index 8baf309..bc2cffa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
@@ -585,6 +585,8 @@ public enum ErrorMsg {
   "Cannot create Spark client on a closed session {0}", true),
 
   SPARK_JOB_INTERRUPTED(30044, "Spark job was interrupted while executing"),
+  SPARK_GET_JOB_INFO_INTERRUPTED(30045, "Spark job was interrupted while 
getting job info"),
+  SPARK_GET_JOB_INFO_EXECUTIONERROR(30046, "Spark job failed in execution 
while getting job info due to exception {0}"),
 
   //== 4 range starts here 
//
 

http://git-wip-us.apache.org/repos/asf/hive/blob/913baef8/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
index d2e28b0..832832b 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
@@ -25,6 +25,8 @@ import org.apache.hadoop.hive.ql.exec.spark.status.SparkStage;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Throwables;
+
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatistics;
 import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatisticsBuilder;
@@ -37,7 +39,6 @@ import org.apache.hive.spark.client.JobContext;
 import org.apache.hive.spark.client.JobHandle;
 import org.apache.hive.spark.client.SparkClient;
 import org.apache.hive.spark.counter.SparkCounters;
-
 import org.apache.spark.JobExecutionStatus;
 import org.apache.spark.SparkJobInfo;
 import org.apache.spark.SparkStageInfo;
@@ -47,8 +48,10 @@ import java.io.Serializable;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
 
 /**
  * Used with remove spark client.
@@ -198,10 +201,14 @@ public class RemoteSparkJobStatus implements 
SparkJobStatus {
 new GetJobInfoJob(jobHandle.getClientJobId(), sparkJobId));
 try {
   return getJobInfo.get(sparkClientTimeoutInSeconds, TimeUnit.SECONDS);
-} catch (Exception e) {
-  LOG.warn("Failed to get job info.", e);
+} catch (TimeoutException e) {
   throw new HiveException(e, ErrorMsg.SPARK_GET_JOB_INFO_TIMEOUT,
   Long.toString(sparkClientTimeoutInSeconds));
+} catch (InterruptedException e) {
+  throw new HiveException(e, ErrorMsg.SPARK_GET_JOB_INFO_INTERRUPTED);
+} catch (ExecutionException e) {
+  throw new HiveException(e, ErrorMsg.SPARK_GET_JOB_INFO_EXECUTIONERROR,
+  Throwables.getRootCause(e).getMessage());
 }
   }
 



hive git commit: HIVE-18766: Race condition during shutdown of RemoteDriver, error messages aren't always sent (Aihua Xu, reviewed by Sahil Takiar)

2018-06-06 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 77c145043 -> aae62d871


HIVE-18766: Race condition during shutdown of RemoteDriver, error messages 
aren't always sent (Aihua Xu, reviewed by Sahil Takiar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/aae62d87
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/aae62d87
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/aae62d87

Branch: refs/heads/master
Commit: aae62d871bd3bf61281b03e2ef183b214e610cd5
Parents: 77c1450
Author: Aihua Xu 
Authored: Tue Jun 5 13:16:54 2018 -0700
Committer: Aihua Xu 
Committed: Wed Jun 6 14:14:39 2018 -0700

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  2 +-
 .../apache/hive/spark/client/RemoteDriver.java  | 44 +---
 .../hive/spark/client/rpc/RpcConfiguration.java |  8 
 3 files changed, 37 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/aae62d87/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index cd425aa..dd42fd1 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -4206,7 +4206,7 @@ public class HiveConf extends Configuration {
 "in shuffle. This should result in less shuffled data."),
 SPARK_CLIENT_FUTURE_TIMEOUT("hive.spark.client.future.timeout",
   "60s", new TimeValidator(TimeUnit.SECONDS),
-  "Timeout for requests from Hive client to remote Spark driver."),
+  "Timeout for requests between Hive client and remote Spark driver."),
 SPARK_JOB_MONITOR_TIMEOUT("hive.spark.job.monitor.timeout",
   "60s", new TimeValidator(TimeUnit.SECONDS),
   "Timeout for job monitor to get Spark job state."),

http://git-wip-us.apache.org/repos/asf/hive/blob/aae62d87/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java
--
diff --git 
a/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java 
b/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java
index caa850c..8130860 100644
--- a/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java
+++ b/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java
@@ -35,6 +35,8 @@ import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.commons.io.FileUtils;
@@ -92,6 +94,8 @@ public class RemoteDriver {
   public static final String REMOTE_DRIVER_PORT_CONF = "--remote-port";
   public static final String REMOTE_DRIVER_CONF = "--remote-driver-conf";
 
+  private final long futureTimeout; // Rpc call timeout in milliseconds
+
   private RemoteDriver(String[] args) throws Exception {
 this.activeJobs = Maps.newConcurrentMap();
 this.jcLock = new Object();
@@ -135,7 +139,9 @@ public class RemoteDriver {
 String secret = mapConf.get(SparkClientFactory.CONF_KEY_SECRET);
 Preconditions.checkArgument(secret != null, "No secret provided.");
 
-int threadCount = new RpcConfiguration(mapConf).getRpcThreadCount();
+RpcConfiguration rpcConf = new RpcConfiguration(mapConf);
+futureTimeout = rpcConf.getFutureTimeoutMs();
+int threadCount = rpcConf.getRpcThreadCount();
 this.egroup = new NioEventLoopGroup(
 threadCount,
 new ThreadFactoryBuilder()
@@ -232,13 +238,19 @@ public class RemoteDriver {
   for (JobWrapper job : activeJobs.values()) {
 cancelJob(job);
   }
+
   if (error != null) {
-protocol.sendError(error);
+try {
+  protocol.sendError(error).get(futureTimeout, TimeUnit.MILLISECONDS);
+} catch(InterruptedException|ExecutionException|TimeoutException e) {
+  LOG.warn("Failed to send out the error during RemoteDriver 
shutdown", e);
+}
   }
   if (jc != null) {
 jc.stop();
   }
   clientRpc.close();
+
   egroup.shutdownGracefully();
   synchronized (shutdownLock) {
 shutdownLock.notifyAll();
@@ -265,34 +277,35 @@ public class RemoteDriver {
 
   private class DriverProtocol extends BaseProtocol {
 
-void sendError(Throwable error) {
+Future sendError(Throwable error) {
   LOG.debug("Send error to Client: {}", 
Throwables.getStackTraceAsString(error));
-  clientRpc.call(new 

hive git commit: HIVE-19662: Upgrade Avro to 1.8.2 (Janaki Lahorani, reviewed by Aihua Xu)

2018-05-30 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 15bf4eb98 -> 664de83d2


HIVE-19662: Upgrade Avro to 1.8.2 (Janaki Lahorani, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/664de83d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/664de83d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/664de83d

Branch: refs/heads/master
Commit: 664de83d24cbddc13abe4087328fc0fbd4268503
Parents: 15bf4eb
Author: Aihua Xu 
Authored: Wed May 30 11:20:54 2018 -0700
Committer: Aihua Xu 
Committed: Wed May 30 12:04:07 2018 -0700

--
 hbase-handler/pom.xml | 4 ++--
 pom.xml   | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/664de83d/hbase-handler/pom.xml
--
diff --git a/hbase-handler/pom.xml b/hbase-handler/pom.xml
index be8e0b9..06939a4 100644
--- a/hbase-handler/pom.xml
+++ b/hbase-handler/pom.xml
@@ -227,7 +227,7 @@
 
   org.apache.avro
   avro
-  1.7.6
+  ${avro.version}

   
 
@@ -249,7 +249,7 @@
   
 org.apache.avro
 avro-maven-plugin
-1.7.6
+${avro.version}
 

generate-test-sources

http://git-wip-us.apache.org/repos/asf/hive/blob/664de83d/pom.xml
--
diff --git a/pom.xml b/pom.xml
index db847a1..462b62f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -122,7 +122,7 @@
 
 0.8.0
 1.11.0
-1.7.7
+1.8.2
 0.8.0.RELEASE
 1.16.0
 4.2.4



hive git commit: HIVE-19424: NPE In MetaDataFormatters (Alice Fan, reviewed by Aihua Xu)

2018-05-16 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 6875c7655 -> 29f57fc73


HIVE-19424: NPE In MetaDataFormatters (Alice Fan, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/29f57fc7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/29f57fc7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/29f57fc7

Branch: refs/heads/master
Commit: 29f57fc73ef46dc4b5b6ea7c74e493e3cb3e2f7f
Parents: 6875c76
Author: Aihua Xu 
Authored: Wed May 16 11:27:53 2018 -0700
Committer: Aihua Xu 
Committed: Wed May 16 14:06:37 2018 -0700

--
 .../hive/ql/metadata/formatting/JsonMetaDataFormatter.java   | 8 
 .../hive/ql/metadata/formatting/TextMetaDataFormatter.java   | 8 
 2 files changed, 8 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/29f57fc7/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
index e7b5af6..df0a237 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
@@ -204,15 +204,15 @@ public class JsonMetaDataFormatter implements 
MetaDataFormatter {
 if (par.getLocation() != null) {
   tblLoc = par.getDataLocation().toString();
 }
-inputFormattCls = par.getInputFormatClass().getName();
-outputFormattCls = par.getOutputFormatClass().getName();
+inputFormattCls = par.getInputFormatClass() == null ? null : 
par.getInputFormatClass().getName();
+outputFormattCls = par.getOutputFormatClass() == null ? null : 
par.getOutputFormatClass().getName();
   }
 } else {
   if (tbl.getPath() != null) {
 tblLoc = tbl.getDataLocation().toString();
   }
-  inputFormattCls = tbl.getInputFormatClass().getName();
-  outputFormattCls = tbl.getOutputFormatClass().getName();
+  inputFormattCls = tbl.getInputFormatClass() == null ? null : 
tbl.getInputFormatClass().getName();
+  outputFormattCls = tbl.getOutputFormatClass() == null ? null : 
tbl.getOutputFormatClass().getName();
 }
 
 MapBuilder builder = MapBuilder.create();

http://git-wip-us.apache.org/repos/asf/hive/blob/29f57fc7/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
index 2529923..326cbed 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
@@ -291,15 +291,15 @@ class TextMetaDataFormatter implements MetaDataFormatter {
 if (par.getLocation() != null) {
   tblLoc = par.getDataLocation().toString();
 }
-inputFormattCls = par.getInputFormatClass().getName();
-outputFormattCls = par.getOutputFormatClass().getName();
+inputFormattCls = par.getInputFormatClass() == null ? null : 
par.getInputFormatClass().getName();
+outputFormattCls = par.getOutputFormatClass() == null ? null : 
par.getOutputFormatClass().getName();
   }
 } else {
   if (tbl.getPath() != null) {
 tblLoc = tbl.getDataLocation().toString();
   }
-  inputFormattCls = tbl.getInputFormatClass().getName();
-  outputFormattCls = tbl.getOutputFormatClass().getName();
+  inputFormattCls = tbl.getInputFormatClass() == null ? null : 
tbl.getInputFormatClass().getName();
+  outputFormattCls = tbl.getOutputFormatClass() == null ? null : 
tbl.getOutputFormatClass().getName();
 }
 
 String owner = tbl.getOwner();



hive git commit: HIVE-19496: Check untar folder (Aihua Xu, reviewed by Sahil Takiar)

2018-05-16 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-3 f1f265046 -> 3c44a3820


HIVE-19496: Check untar folder (Aihua Xu, reviewed by Sahil Takiar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3c44a382
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3c44a382
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3c44a382

Branch: refs/heads/branch-3
Commit: 3c44a3820424de3dc07a80714732b548ae147020
Parents: f1f2650
Author: Aihua Xu 
Authored: Thu May 10 14:51:57 2018 -0700
Committer: Aihua Xu 
Committed: Wed May 16 10:19:08 2018 -0700

--
 .../src/java/org/apache/hadoop/hive/common/CompressionUtils.java | 4 
 1 file changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/3c44a382/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java
index 681c506..d98632e 100644
--- a/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java
@@ -159,6 +159,10 @@ public class CompressionUtils {
 TarArchiveEntry entry = null;
 while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) {
   final File outputFile = new File(outputDir, entry.getName());
+  if (!outputFile.toPath().toAbsolutePath().normalize()
+  .startsWith(outputDir.toPath().toAbsolutePath().normalize())) {
+throw new IOException("Untarred file is not under the output 
directory");
+  }
   if (entry.isDirectory()) {
 if (flatten) {
   // no sub-directories



hive git commit: HIVE-19496: Check untar folder (Aihua Xu, reviewed by Sahil Takiar)

2018-05-15 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master ff446b779 -> 6e6b0cb7b


HIVE-19496: Check untar folder (Aihua Xu, reviewed by Sahil Takiar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6e6b0cb7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6e6b0cb7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6e6b0cb7

Branch: refs/heads/master
Commit: 6e6b0cb7b1950e6b0e4a19b0f9e2d185031dd83f
Parents: ff446b7
Author: Aihua Xu 
Authored: Thu May 10 14:51:57 2018 -0700
Committer: Aihua Xu 
Committed: Tue May 15 10:11:19 2018 -0700

--
 .../src/java/org/apache/hadoop/hive/common/CompressionUtils.java | 4 
 1 file changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/6e6b0cb7/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java
index 681c506..d98632e 100644
--- a/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java
@@ -159,6 +159,10 @@ public class CompressionUtils {
 TarArchiveEntry entry = null;
 while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) {
   final File outputFile = new File(outputDir, entry.getName());
+  if (!outputFile.toPath().toAbsolutePath().normalize()
+  .startsWith(outputDir.toPath().toAbsolutePath().normalize())) {
+throw new IOException("Untarred file is not under the output 
directory");
+  }
   if (entry.isDirectory()) {
 if (flatten) {
   // no sub-directories



hive git commit: HIVE-19228: Remove commons-httpclient 3.x usage (Janaki Lahorani reviewed by Aihua Xu)

2018-05-09 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master e1e497574 -> f2172cdbc


HIVE-19228: Remove commons-httpclient 3.x usage (Janaki Lahorani reviewed by 
Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f2172cdb
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f2172cdb
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f2172cdb

Branch: refs/heads/master
Commit: f2172cdbc3af92e18db68c3daac44a73e92eaf48
Parents: e1e4975
Author: Aihua Xu 
Authored: Wed May 9 10:56:32 2018 -0700
Committer: Aihua Xu 
Committed: Wed May 9 11:27:45 2018 -0700

--
 .../apache/hive/jdbc/TestActivePassiveHA.java   | 99 +---
 pom.xml |  6 --
 ql/pom.xml  | 15 ---
 .../hive/ql/parse/LoadSemanticAnalyzer.java | 16 +++-
 .../apache/hive/service/server/HiveServer2.java | 52 ++
 5 files changed, 111 insertions(+), 77 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/f2172cdb/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java
index c55271f..4055f13 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java
@@ -36,11 +36,6 @@ import java.util.Map;
 import java.util.UUID;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.HttpMethodBase;
-import org.apache.commons.httpclient.methods.DeleteMethod;
-import org.apache.commons.httpclient.methods.GetMethod;
-import org.apache.commons.httpclient.methods.OptionsMethod;
 import org.apache.curator.test.TestingServer;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
@@ -54,9 +49,22 @@ import 
org.apache.hive.service.server.HS2ActivePassiveHARegistryClient;
 import org.apache.hive.service.server.HiveServer2Instance;
 import org.apache.hive.service.server.TestHS2HttpServerPam;
 import org.apache.hive.service.servlet.HS2Peers;
+import org.apache.http.Header;
 import org.apache.http.HttpException;
 import org.apache.http.HttpHeaders;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpOptions;
+import org.apache.http.client.methods.HttpRequestBase;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.StatusLine;
+import org.apache.http.util.EntityUtils;
 import org.codehaus.jackson.map.ObjectMapper;
+import org.eclipse.jetty.http.HttpHeader;
+import org.eclipse.jetty.util.B64Code;
+import org.eclipse.jetty.util.StringUtil;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -406,7 +414,7 @@ public class TestActivePassiveHA {
   assertEquals("true", sendGet(url1, true));
 
   // trigger failover on miniHS2_1 without authorization header
-  assertEquals("Unauthorized", sendDelete(url1, false));
+  assertTrue(sendDelete(url1, false).contains("Unauthorized"));
   assertTrue(sendDelete(url1, true).contains("Failover successful!"));
   assertEquals(true, miniHS2_1.getNotLeaderTestFuture().get());
   assertEquals(false, miniHS2_1.isLeader());
@@ -541,56 +549,79 @@ public class TestActivePassiveHA {
   }
 
   private String sendGet(String url, boolean enableAuth) throws Exception {
-return sendAuthMethod(new GetMethod(url), enableAuth, false);
+return sendAuthMethod(new HttpGet(url), enableAuth, false);
   }
 
   private String sendGet(String url, boolean enableAuth, boolean enableCORS) 
throws Exception {
-return sendAuthMethod(new GetMethod(url), enableAuth, enableCORS);
+return sendAuthMethod(new HttpGet(url), enableAuth, enableCORS);
   }
 
   private String sendDelete(String url, boolean enableAuth) throws Exception {
-return sendAuthMethod(new DeleteMethod(url), enableAuth, false);
+return sendAuthMethod(new HttpDelete(url), enableAuth, false);
   }
 
   private String sendDelete(String url, boolean enableAuth, boolean 
enableCORS) throws Exception {
-return sendAuthMethod(new DeleteMethod(url), enableAuth, enableCORS);
+return sendAuthMethod(new HttpDelete(url), enableAuth, enableCORS);
   }
 
-  private String sendAuthMethod(HttpMethodBase method, boolean enableAuth, 
boolean enableCORS) throws Exception {
-

hive git commit: HIVE-18915: Better client logging when a HoS session can't be opened (Aihua Xu, reviewed by Sahil Takiar)

2018-04-30 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master f8bc4868e -> 3c4a9c6bc


HIVE-18915: Better client logging when a HoS session can't be opened (Aihua Xu, 
reviewed by Sahil Takiar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3c4a9c6b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3c4a9c6b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3c4a9c6b

Branch: refs/heads/master
Commit: 3c4a9c6bc37dfc870800523a6d26f667d0b43818
Parents: f8bc486
Author: Aihua Xu 
Authored: Thu Apr 12 17:12:56 2018 -0700
Committer: Aihua Xu 
Committed: Mon Apr 30 16:35:41 2018 -0700

--
 .../org/apache/hadoop/hive/ql/ErrorMsg.java |  2 +-
 .../ql/exec/spark/session/SparkSessionImpl.java | 30 +---
 .../session/TestSparkSessionManagerImpl.java|  6 ++--
 3 files changed, 17 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/3c4a9c6b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
index 7d33fa3..94dd636 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
@@ -576,7 +576,7 @@ public enum ErrorMsg {
   SPARK_CREATE_CLIENT_INTERRUPTED(30040,
   "Interrupted while creating Spark client for session {0}", true),
   SPARK_CREATE_CLIENT_ERROR(30041,
-  "Failed to create Spark client for Spark session {0}", true),
+  "Failed to create Spark client for Spark session {0}: {1}", true),
   SPARK_CREATE_CLIENT_INVALID_RESOURCE_REQUEST(30042,
   "Failed to create Spark client due to invalid resource request: {0}", 
true),
   SPARK_CREATE_CLIENT_CLOSED_SESSION(30043,

http://git-wip-us.apache.org/repos/asf/hive/blob/3c4a9c6b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
index 2d5d03e..189de19 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java
@@ -71,7 +71,6 @@ public class SparkSessionImpl implements SparkSession {
   private HiveSparkClient hiveSparkClient;
   private Path scratchDir;
   private final Object dirLock = new Object();
-  private String matchedString = null;
 
   public SparkSessionImpl() {
 sessionId = makeSessionId();
@@ -195,6 +194,7 @@ public class SparkSessionImpl implements SparkSession {
   @VisibleForTesting
   HiveException getHiveException(Throwable e) {
 Throwable oe = e;
+StringBuilder matchedString = new StringBuilder();
 while (e != null) {
   if (e instanceof TimeoutException) {
 return new HiveException(e, ErrorMsg.SPARK_CREATE_CLIENT_TIMEOUT);
@@ -202,31 +202,26 @@ public class SparkSessionImpl implements SparkSession {
 return new HiveException(e, ErrorMsg.SPARK_CREATE_CLIENT_INTERRUPTED, 
sessionId);
   } else if (e instanceof RuntimeException) {
 String sts = Throwables.getStackTraceAsString(e);
-if (matches(sts, AM_TIMEOUT_ERR)) {
+if (matches(sts, AM_TIMEOUT_ERR, matchedString)) {
   return new HiveException(e, ErrorMsg.SPARK_CREATE_CLIENT_TIMEOUT);
-} else if (matches(sts, UNKNOWN_QUEUE_ERR) || matches(sts, 
STOPPED_QUEUE_ERR)) {
-  return new HiveException(e, 
ErrorMsg.SPARK_CREATE_CLIENT_INVALID_QUEUE, matchedString);
-} else if (matches(sts, FULL_QUEUE_ERR)) {
-  return new HiveException(e, ErrorMsg.SPARK_CREATE_CLIENT_QUEUE_FULL, 
matchedString);
-} else if (matches(sts, INVALILD_MEM_ERR) || matches(sts, 
INVALID_CORE_ERR)) {
+} else if (matches(sts, UNKNOWN_QUEUE_ERR, matchedString) || 
matches(sts, STOPPED_QUEUE_ERR, matchedString)) {
+  return new HiveException(e, 
ErrorMsg.SPARK_CREATE_CLIENT_INVALID_QUEUE, matchedString.toString());
+} else if (matches(sts, FULL_QUEUE_ERR, matchedString)) {
+  return new HiveException(e, ErrorMsg.SPARK_CREATE_CLIENT_QUEUE_FULL, 
matchedString.toString());
+} else if (matches(sts, INVALILD_MEM_ERR, matchedString) || 
matches(sts, INVALID_CORE_ERR, matchedString)) {
   return new HiveException(e, 
ErrorMsg.SPARK_CREATE_CLIENT_INVALID_RESOURCE_REQUEST,
-  matchedString);
+  matchedString.toString());
 } else {
-  return new HiveException(e, ErrorMsg.SPARK_CREATE_CLIENT_ERROR, 

[1/2] hive git commit: HIVE-19204: Detailed errors from some tasks are not displayed to the client because the tasks don't set exception when they fail (Aihua Xu, reviewed by Sahil Takiar)

2018-04-25 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master f94ae7fec -> f30efbebf


HIVE-19204: Detailed errors from some tasks are not displayed to the client 
because the tasks don't set exception when they fail (Aihua Xu, reviewed by 
Sahil Takiar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/11b0d857
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/11b0d857
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/11b0d857

Branch: refs/heads/master
Commit: 11b0d85786cd58469d5662c3027e9389cff07710
Parents: f94ae7f
Author: Aihua Xu 
Authored: Mon Apr 16 10:36:02 2018 -0700
Committer: Aihua Xu 
Committed: Wed Apr 25 16:09:42 2018 -0700

--
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  6 -
 .../hive/ql/exec/ColumnStatsUpdateTask.java |  1 +
 .../hive/ql/exec/ExplainSQRewriteTask.java  |  8 +++---
 .../apache/hadoop/hive/ql/exec/ExplainTask.java |  5 ++--
 .../hive/ql/exec/MaterializedViewTask.java  |  1 +
 .../hadoop/hive/ql/exec/ReplCopyTask.java   |  4 +--
 .../apache/hadoop/hive/ql/exec/StatsTask.java   |  1 +
 .../hadoop/hive/ql/exec/mr/ExecDriver.java  |  4 +--
 .../io/rcfile/truncate/ColumnTruncateTask.java  | 26 +++-
 .../ql/reexec/ReExecutionOverlayPlugin.java |  2 +-
 10 files changed, 29 insertions(+), 29 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/11b0d857/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 4e8dbe2..f83bdaf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -2389,7 +2389,11 @@ public class Driver implements IDriver {
 if(downstreamError != null) {
   //here we assume that upstream code may have parametrized the msg from 
ErrorMsg
   //so we want to keep it
-  errorMessage += ". " + downstreamError.getMessage();
+  if (downstreamError.getMessage() != null) {
+errorMessage += ". " + downstreamError.getMessage();
+  } else {
+errorMessage += ". " + 
org.apache.hadoop.util.StringUtils.stringifyException(downstreamError);
+  }
 }
 else {
   ErrorMsg em = ErrorMsg.getErrorMsg(exitVal);

http://git-wip-us.apache.org/repos/asf/hive/blob/11b0d857/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
index 207b66f..a53ff5a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
@@ -300,6 +300,7 @@ public class ColumnStatsUpdateTask extends 
Task {
   Hive db = getHive();
   return persistColumnStats(db);
 } catch (Exception e) {
+  setException(e);
   LOG.info("Failed to persist stats in metastore", e);
 }
 return 1;

http://git-wip-us.apache.org/repos/asf/hive/blob/11b0d857/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainSQRewriteTask.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainSQRewriteTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainSQRewriteTask.java
index 80d54bf..1f9e9aa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainSQRewriteTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainSQRewriteTask.java
@@ -38,11 +38,13 @@ import org.apache.hadoop.hive.ql.parse.SubQueryDiagnostic;
 import org.apache.hadoop.hive.ql.plan.ExplainSQRewriteWork;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 public class ExplainSQRewriteTask extends Task 
implements Serializable {
   private static final long serialVersionUID = 1L;
+  private final Logger LOG = 
LoggerFactory.getLogger(this.getClass().getName());
 
   @Override
   public StageType getType() {
@@ -76,8 +78,8 @@ public class ExplainSQRewriteTask extends 
Task implements
   return (0);
 }
 catch (Exception e) {
-  console.printError("Failed with exception " + e.getMessage(),
-  "\n" + StringUtils.stringifyException(e));
+  setException(e);
+  LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
   return (1);
 }
 finally {


[2/2] hive git commit: HIVE-18986: Table rename will run java.lang.StackOverflowError in dataNucleus if the table contains large number of columns (Aihua Xu, reviewed by Yongzhi Chen)

2018-04-25 Thread aihuaxu
HIVE-18986: Table rename will run java.lang.StackOverflowError in dataNucleus 
if the table contains large number of columns (Aihua Xu, reviewed by Yongzhi 
Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f30efbeb
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f30efbeb
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f30efbeb

Branch: refs/heads/master
Commit: f30efbebf2ff85c55a5d9e3e2f86e0a51341df78
Parents: 11b0d85
Author: Aihua Xu 
Authored: Wed Apr 18 17:05:08 2018 -0700
Committer: Aihua Xu 
Committed: Wed Apr 25 16:10:30 2018 -0700

--
 .../queries/clientpositive/alter_rename_table.q | 12 ++-
 .../clientpositive/alter_rename_table.q.out | 88 
 .../apache/hadoop/hive/metastore/Batchable.java | 86 +++
 .../hive/metastore/MetaStoreDirectSql.java  | 61 ++
 .../hadoop/hive/metastore/ObjectStore.java  | 45 ++
 .../hive/metastore/conf/MetastoreConf.java  |  5 ++
 6 files changed, 227 insertions(+), 70 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/f30efbeb/ql/src/test/queries/clientpositive/alter_rename_table.q
--
diff --git a/ql/src/test/queries/clientpositive/alter_rename_table.q 
b/ql/src/test/queries/clientpositive/alter_rename_table.q
index 53fb230..bcf6ad5 100644
--- a/ql/src/test/queries/clientpositive/alter_rename_table.q
+++ b/ql/src/test/queries/clientpositive/alter_rename_table.q
@@ -36,4 +36,14 @@ create table source.src1 like default.src;
 load data local inpath '../../data/files/kv1.txt' overwrite into table 
source.src;
 
 ALTER TABLE source.src RENAME TO target.src1;
-select * from target.src1 tablesample (10 rows);
\ No newline at end of file
+select * from target.src1 tablesample (10 rows);
+
+set metastore.rawstore.batch.size=1;
+set metastore.try.direct.sql=false;
+
+create table source.src2 like default.src;
+load data local inpath '../../data/files/kv1.txt' overwrite into table 
source.src2;
+ANALYZE TABlE source.src2 COMPUTE STATISTICS FOR COLUMNS;
+ALTER TABLE source.src2 RENAME TO target.src3;
+DESC FORMATTED target.src3;
+select * from target.src3 tablesample (10 rows);

http://git-wip-us.apache.org/repos/asf/hive/blob/f30efbeb/ql/src/test/results/clientpositive/alter_rename_table.q.out
--
diff --git a/ql/src/test/results/clientpositive/alter_rename_table.q.out 
b/ql/src/test/results/clientpositive/alter_rename_table.q.out
index 732d8a2..9ac8fd2 100644
--- a/ql/src/test/results/clientpositive/alter_rename_table.q.out
+++ b/ql/src/test/results/clientpositive/alter_rename_table.q.out
@@ -261,3 +261,91 @@ POSTHOOK: Input: target@src1
 278val_278
 98 val_98
 484val_484
+PREHOOK: query: create table source.src2 like default.src
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:source
+PREHOOK: Output: source@src2
+POSTHOOK: query: create table source.src2 like default.src
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:source
+POSTHOOK: Output: source@src2
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite 
into table source.src2
+PREHOOK: type: LOAD
+ A masked pattern was here 
+PREHOOK: Output: source@src2
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite 
into table source.src2
+POSTHOOK: type: LOAD
+ A masked pattern was here 
+POSTHOOK: Output: source@src2
+PREHOOK: query: ANALYZE TABlE source.src2 COMPUTE STATISTICS FOR COLUMNS
+PREHOOK: type: QUERY
+PREHOOK: Input: source@src2
+ A masked pattern was here 
+PREHOOK: Output: source@src2
+POSTHOOK: query: ANALYZE TABlE source.src2 COMPUTE STATISTICS FOR COLUMNS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: source@src2
+ A masked pattern was here 
+POSTHOOK: Output: source@src2
+PREHOOK: query: ALTER TABLE source.src2 RENAME TO target.src3
+PREHOOK: type: ALTERTABLE_RENAME
+PREHOOK: Input: source@src2
+PREHOOK: Output: source@src2
+POSTHOOK: query: ALTER TABLE source.src2 RENAME TO target.src3
+POSTHOOK: type: ALTERTABLE_RENAME
+POSTHOOK: Input: source@src2
+POSTHOOK: Output: source@src2
+POSTHOOK: Output: target@src3
+PREHOOK: query: DESC FORMATTED target.src3
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: target@src3
+POSTHOOK: query: DESC FORMATTED target.src3
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: target@src3
+# col_name data_type   comment 
+keystring  default 
+value  string  default 
+
+# Detailed Table Information
+Database:  target   
+ A masked 

hive git commit: HIVE-17429: Hive JDBC doesn't return rows when querying Impala (Zach Amsden, reviewed by Aihua Xu)

2018-04-12 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-2.3 f78128a71 -> 1ca13b3ce


HIVE-17429: Hive JDBC doesn't return rows when querying Impala (Zach Amsden, 
reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1ca13b3c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1ca13b3c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1ca13b3c

Branch: refs/heads/branch-2.3
Commit: 1ca13b3cec97c4114090687079b6efbffeb498c3
Parents: f78128a
Author: Aihua Xu 
Authored: Thu Sep 7 17:17:14 2017 -0700
Committer: Aihua Xu 
Committed: Thu Apr 12 15:33:16 2018 -0700

--
 jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/1ca13b3c/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
--
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
index c385e2c..4c98517 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
@@ -254,7 +254,7 @@ public class HiveStatement implements java.sql.Statement {
 TGetOperationStatusResp status = waitForOperationToComplete();
 
 // The query should be completed by now
-if (!status.isHasResultSet()) {
+if (!status.isHasResultSet() && !stmtHandle.isHasResultSet()) {
   return false;
 }
 resultSet =  new 
HiveQueryResultSet.Builder(this).setClient(client).setSessionHandle(sessHandle)



hive git commit: HIVE-17429: Hive JDBC doesn't return rows when querying Impala (Zach Amsden, reviewed by Aihua Xu)

2018-04-12 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-2.2 9e10b88c3 -> 1846c5a73


HIVE-17429: Hive JDBC doesn't return rows when querying Impala (Zach Amsden, 
reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1846c5a7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1846c5a7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1846c5a7

Branch: refs/heads/branch-2.2
Commit: 1846c5a737ee32b49dbeb6dd58ca7968f68fa31a
Parents: 9e10b88
Author: Aihua Xu 
Authored: Thu Sep 7 17:17:14 2017 -0700
Committer: Aihua Xu 
Committed: Thu Apr 12 15:26:30 2018 -0700

--
 jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/1846c5a7/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
--
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
index ed0ed08..d54c964 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
@@ -254,7 +254,7 @@ public class HiveStatement implements java.sql.Statement {
 TGetOperationStatusResp status = waitForOperationToComplete();
 
 // The query should be completed by now
-if (!status.isHasResultSet()) {
+if (!status.isHasResultSet() && !stmtHandle.isHasResultSet()) {
   return false;
 }
 resultSet =  new 
HiveQueryResultSet.Builder(this).setClient(client).setSessionHandle(sessHandle)



hive git commit: HIVE-19018: beeline -e now requires semicolon even when used with query from command line (Aihua Xu, reviewed by Yongzhi Chen)

2018-03-30 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master eea736134 -> 52290e72b


HIVE-19018: beeline -e now requires semicolon even when used with query from 
command line (Aihua Xu, reviewed by Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/52290e72
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/52290e72
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/52290e72

Branch: refs/heads/master
Commit: 52290e72b3029eddc17f0a0482fd6c1a05840bb0
Parents: eea7361
Author: Aihua Xu 
Authored: Wed Mar 21 17:36:50 2018 -0700
Committer: Aihua Xu 
Committed: Fri Mar 30 10:59:02 2018 -0700

--
 .../src/java/org/apache/hive/beeline/BeeLine.java   |  2 ++
 .../src/java/org/apache/hive/beeline/Commands.java  |  1 -
 .../test/org/apache/hive/beeline/TestCommands.java  | 16 
 3 files changed, 18 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/52290e72/beeline/src/java/org/apache/hive/beeline/BeeLine.java
--
diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java 
b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
index 402fadd..c6d009c 100644
--- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java
+++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
@@ -795,6 +795,8 @@ public class BeeLine implements Closeable {
 int code = 0;
 if (cl.getOptionValues('e') != null) {
   commands = Arrays.asList(cl.getOptionValues('e'));
+  opts.setAllowMultiLineCommand(false); //When using -e, command is always 
a single line
+
 }
 
 if (!commands.isEmpty() && getOpts().getScriptFile() != null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/52290e72/beeline/src/java/org/apache/hive/beeline/Commands.java
--
diff --git a/beeline/src/java/org/apache/hive/beeline/Commands.java 
b/beeline/src/java/org/apache/hive/beeline/Commands.java
index d5806a4..e46c0cf 100644
--- a/beeline/src/java/org/apache/hive/beeline/Commands.java
+++ b/beeline/src/java/org/apache/hive/beeline/Commands.java
@@ -1072,7 +1072,6 @@ public class Commands {
* Check if the input line is a multi-line command which needs to read 
further
*/
   public String handleMultiLineCmd(String line) throws IOException {
-//When using -e, console reader is not initialized and command is always a 
single line
 int[] startQuote = {-1};
 line = HiveStringUtils.removeComments(line, startQuote);
 while (isMultiLine(line) && beeLine.getOpts().isAllowMultiLineCommand()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/52290e72/beeline/src/test/org/apache/hive/beeline/TestCommands.java
--
diff --git a/beeline/src/test/org/apache/hive/beeline/TestCommands.java 
b/beeline/src/test/org/apache/hive/beeline/TestCommands.java
index 8a51f98..567ca25 100644
--- a/beeline/src/test/org/apache/hive/beeline/TestCommands.java
+++ b/beeline/src/test/org/apache/hive/beeline/TestCommands.java
@@ -23,6 +23,8 @@ import org.junit.Test;
 import static org.apache.hive.common.util.HiveStringUtils.removeComments;
 import static org.junit.Assert.assertEquals;
 
+import java.io.IOException;
+
 public class TestCommands {
 
   @Test
@@ -43,5 +45,19 @@ public class TestCommands {
 assertEquals("'show --comments tables'", removeComments("'show --comments 
tables' --comments",escape));
 assertEquals("'\"show --comments tables\"'", removeComments("'\"show 
--comments tables\"' --comments",escape));
   }
+
+  /**
+   * Test the commands directly call from beeline.
+   * @throws IOException
+   */
+  @Test
+  public void testBeelineCommands() throws IOException {
+ // avoid System.exit() call in beeline which causes JVM to exit and fails the 
test
+System.setProperty(BeeLineOpts.PROPERTY_NAME_EXIT, "true");
+// Verify the command without ';' at the end also works fine
+BeeLine.mainWithInputRedirection(new String[] {"-u", "jdbc:hive2://", 
"-e", "select 3"}, null);
+BeeLine.mainWithInputRedirection(
+new String[] {"-u", "jdbc:hive2://", "-e", "create table t1(x int); 
show tables"}, null);
+  }
 }
 



hive git commit: HIVE-18433: Upgrade version of com.fasterxml.jackson (Janaki Lahorani, reviewed by Aihua Xu)

2018-03-27 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 680a58476 -> 245c39b4c


HIVE-18433: Upgrade version of com.fasterxml.jackson (Janaki Lahorani, reviewed 
by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/245c39b4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/245c39b4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/245c39b4

Branch: refs/heads/master
Commit: 245c39b4c8f711fbc1c9c00df013e4c7fcbdc0a2
Parents: 680a584
Author: Aihua Xu 
Authored: Tue Mar 27 13:51:02 2018 -0700
Committer: Aihua Xu 
Committed: Tue Mar 27 13:51:02 2018 -0700

--
 common/pom.xml  |  2 +-
 druid-handler/pom.xml   |  8 +
 .../hive/druid/DruidStorageHandlerUtils.java|  2 +-
 .../TestHiveDruidQueryBasedInputFormat.java |  6 ++--
 hcatalog/core/pom.xml   | 14 ++--
 hcatalog/server-extensions/pom.xml  | 14 ++--
 hcatalog/webhcat/svr/pom.xml| 15 ++---
 itests/hive-blobstore/pom.xml   |  6 ++--
 itests/qtest-druid/pom.xml  |  4 +--
 pom.xml | 35 ++--
 ql/pom.xml  | 21 +++-
 spark-client/pom.xml| 26 ++-
 standalone-metastore/pom.xml|  4 +--
 testutils/ptest2/pom.xml| 17 --
 .../hive/ptest/api/client/PTestClient.java  |  2 +-
 .../hive/ptest/execution/JIRAService.java   |  8 ++---
 16 files changed, 128 insertions(+), 56 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/245c39b4/common/pom.xml
--
diff --git a/common/pom.xml b/common/pom.xml
index 4da46f2..9393d87 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -250,7 +250,7 @@
 
   com.fasterxml.jackson.core
   jackson-databind
-  ${jackson.new.version}
+  ${jackson.version}
 
 
   com.github.joshelser

http://git-wip-us.apache.org/repos/asf/hive/blob/245c39b4/druid-handler/pom.xml
--
diff --git a/druid-handler/pom.xml b/druid-handler/pom.xml
index b53ddb4..93eb91e 100644
--- a/druid-handler/pom.xml
+++ b/druid-handler/pom.xml
@@ -37,6 +37,14 @@
 
 
 
+  com.fasterxml.jackson.dataformat
+  jackson-dataformat-smile
+
+
+  com.fasterxml.jackson.core
+  jackson-databind
+
+
   org.apache.hive
   hive-exec
   ${project.version}

http://git-wip-us.apache.org/repos/asf/hive/blob/245c39b4/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java
--
diff --git 
a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java
 
b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java
index a71a3af..233b288 100644
--- 
a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java
+++ 
b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java
@@ -280,7 +280,7 @@ public final class DruidStorageHandlerUtils {
 }
 for (FileStatus fileStatus : fss) {
   final DataSegment segment = JSON_MAPPER
-  .readValue(fs.open(fileStatus.getPath()), DataSegment.class);
+  .readValue((InputStream) fs.open(fileStatus.getPath()), 
DataSegment.class);
   publishedSegmentsBuilder.add(segment);
 }
 return publishedSegmentsBuilder.build();

http://git-wip-us.apache.org/repos/asf/hive/blob/245c39b4/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java
--
diff --git 
a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java
 
b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java
index bb43d51..898f97a 100644
--- 
a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java
+++ 
b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java
@@ -46,7 +46,8 @@ public class TestHiveDruidQueryBasedInputFormat extends 
TestCase {
   + "\"descending\":true,"
   + "\"virtualColumns\":[],"
   + "\"filter\":null,"
-  + "\"granularity\":\"DAY\","
+  + 
"\"granularity\":{\"type\":\"period\",\"period\":\"P1D\",\"timeZone\":"
+  + "\"America/Los_Angeles\",\"origin\":null},"
   + "\"aggregations\":[],"
   + 

[2/2] hive git commit: HIVE-14792: AvroSerde reads the remote schema-file at least once per mapper, per table reference. (Addendum)

2018-03-12 Thread aihuaxu
HIVE-14792: AvroSerde reads the remote schema-file at least once per mapper, 
per table reference. (Addendum)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9e10b88c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9e10b88c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9e10b88c

Branch: refs/heads/branch-2.2
Commit: 9e10b88c33a3ae9cb09cb230c4ae09e442203ba9
Parents: a20e63e
Author: Aihua Xu 
Authored: Thu Mar 8 11:33:37 2018 -0800
Committer: Aihua Xu 
Committed: Mon Mar 12 15:01:25 2018 -0700

--
 .../TablePropertyEnrichmentOptimizer.java   |  45 +++-
 .../avro_tableproperty_optimize.q   |  63 ++
 .../avro_tableproperty_optimize.q.out   | 226 +++
 3 files changed, 324 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/9e10b88c/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
index 5824490..154eb02 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
@@ -26,6 +26,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
@@ -40,8 +41,10 @@ import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde2.Deserializer;
+import org.apache.hive.common.util.ReflectionUtil;
 
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -78,29 +81,51 @@ class TablePropertyEnrichmentOptimizer extends Transform {
 }
   }
 
+  /**
+   * Retrieves the table properties as well as the properties from Serde.
+   */
+  private static Map getTableParameters(Table table) {
+Map originalTableParameters = new 
HashMap<>(table.getParameters());
+Properties tableMetadata = MetaStoreUtils.getTableMetadata(table);
+for (String property : tableMetadata.stringPropertyNames()) {
+  if (!originalTableParameters.containsKey(property)) {
+originalTableParameters.put(property, 
tableMetadata.getProperty(property));
+  }
+}
+return originalTableParameters;
+  }
+
   private static class Processor implements NodeProcessor {
 
 @Override
 public Object process(Node nd, Stack stack, NodeProcessorCtx 
procCtx, Object... nodeOutputs) throws SemanticException {
   TableScanOperator tsOp = (TableScanOperator) nd;
   WalkerCtx context = (WalkerCtx)procCtx;
-
   TableScanDesc tableScanDesc = tsOp.getConf();
   Table table = tsOp.getConf().getTableMetadata().getTTable();
-  Map tableParameters = table.getParameters();
-  Properties tableProperties = new Properties();
-  tableProperties.putAll(tableParameters);
 
-  Deserializer deserializer = 
tableScanDesc.getTableMetadata().getDeserializer();
-  String deserializerClassName = deserializer.getClass().getName();
+  Map originalTableParameters = getTableParameters(table);
+  if (LOG.isDebugEnabled()) {
+LOG.debug("Original Table parameters: " + originalTableParameters);
+  }
+  Properties clonedTableParameters = new Properties();
+  clonedTableParameters.putAll(originalTableParameters);
+
+  String deserializerClassName = null;
   try {
+deserializerClassName = 
tableScanDesc.getTableMetadata().getSd().getSerdeInfo().getSerializationLib();
+Deserializer deserializer = ReflectionUtil.newInstance(
+context.conf.getClassByName(deserializerClassName)
+.asSubclass(Deserializer.class),
+context.conf);
+
 if 
(context.serdeClassesUnderConsideration.contains(deserializerClassName)) {
-  deserializer.initialize(context.conf, tableProperties);
+  deserializer.initialize(context.conf, clonedTableParameters);
   LOG.debug("SerDe init succeeded for class: " + 
deserializerClassName);
-  for (Map.Entry property 

[1/2] hive git commit: Revert "HIVE-14792: AvroSerde reads the remote schema-file at least once per mapper, per table reference. (Addendum)"

2018-03-12 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-2.2 5949479f7 -> 9e10b88c3


Revert "HIVE-14792: AvroSerde reads the remote schema-file at least once per 
mapper, per table reference. (Addendum)"

This reverts commit 5949479f7e08987b67c4ee86c06c2d5949f75bee.


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a20e63e1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a20e63e1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a20e63e1

Branch: refs/heads/branch-2.2
Commit: a20e63e1eab675fcf2fa85ec9a320fec4fec6886
Parents: 5949479
Author: Aihua Xu 
Authored: Mon Mar 12 15:00:53 2018 -0700
Committer: Aihua Xu 
Committed: Mon Mar 12 15:00:53 2018 -0700

--
 .../TablePropertyEnrichmentOptimizer.java   |  45 +---
 .../avro_tableproperty_optimize.q   |  63 --
 .../avro_tableproperty_optimize.q.out   | 226 ---
 3 files changed, 10 insertions(+), 324 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a20e63e1/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
index d313f7d..5824490 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
@@ -26,7 +26,6 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
@@ -41,10 +40,8 @@ import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde2.Deserializer;
-import org.apache.hive.common.util.ReflectionUtil;
 
 import java.util.Arrays;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -81,51 +78,29 @@ class TablePropertyEnrichmentOptimizer extends Transform {
 }
   }
 
-  /**
-   * Retrieves the table properties as well as the properties from Serde.
-   */
-  private static Map getTableParameters(Table table) {
-Map originalTableParameters = new 
HashMap<>(table.getParameters());
-Properties tableMetadata = MetaStoreUtils.getTableMetadata(table);
-for (String property : tableMetadata.stringPropertyNames()) {
-  if (!originalTableParameters.containsKey(property)) {
-originalTableParameters.put(property, 
tableMetadata.getProperty(property));
-  }
-}
-return originalTableParameters;
-  }
-
   private static class Processor implements NodeProcessor {
 
 @Override
 public Object process(Node nd, Stack stack, NodeProcessorCtx 
procCtx, Object... nodeOutputs) throws SemanticException {
   TableScanOperator tsOp = (TableScanOperator) nd;
   WalkerCtx context = (WalkerCtx)procCtx;
+
   TableScanDesc tableScanDesc = tsOp.getConf();
   Table table = tsOp.getConf().getTableMetadata().getTTable();
+  Map tableParameters = table.getParameters();
+  Properties tableProperties = new Properties();
+  tableProperties.putAll(tableParameters);
 
-  Map originalTableParameters = getTableParameters(table);
-  if (LOG.isDebugEnabled()) {
-LOG.debug("Original Table parameters: " + originalTableParameters);
-  }
-  Properties clonedTableParameters = new Properties();
-  clonedTableParameters.putAll(originalTableParameters);
-
-  String deserializerClassName = null;
+  Deserializer deserializer = 
tableScanDesc.getTableMetadata().getDeserializer();
+  String deserializerClassName = deserializer.getClass().getName();
   try {
-deserializerClassName = 
tableScanDesc.getTableMetadata().getSd().getSerdeInfo().getSerializationLib();
-Deserializer deserializer = ReflectionUtil.newInstance(
-context.conf.getClassByName(deserializerClassName)
-.asSubclass(Deserializer.class),
-context.conf);
-
 if 
(context.serdeClassesUnderConsideration.contains(deserializerClassName)) {
-  deserializer.initialize(context.conf, clonedTableParameters);
+  

[1/2] hive git commit: Revert "HIVE-14792: AvroSerde reads the remote schema-file at least once per mapper, per table reference. (Addendum)"

2018-03-12 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-2.2 e8e3974d1 -> 5949479f7


Revert "HIVE-14792: AvroSerde reads the remote schema-file at least once per 
mapper, per table reference. (Addendum)"

This reverts commit e8e3974d157a550ec0112258b59b603d9829.


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/220d1998
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/220d1998
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/220d1998

Branch: refs/heads/branch-2.2
Commit: 220d199859e373f99e314e50121a149c10351a67
Parents: e8e3974
Author: Aihua Xu 
Authored: Mon Mar 12 14:52:36 2018 -0700
Committer: Aihua Xu 
Committed: Mon Mar 12 14:52:36 2018 -0700

--
 .../TablePropertyEnrichmentOptimizer.java   |  45 +---
 .../avro_tableproperty_optimize.q   |  63 --
 .../avro_tableproperty_optimize.q.out   | 226 ---
 3 files changed, 10 insertions(+), 324 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/220d1998/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
index d313f7d..5824490 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
@@ -26,7 +26,6 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
@@ -41,10 +40,8 @@ import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde2.Deserializer;
-import org.apache.hive.common.util.ReflectionUtil;
 
 import java.util.Arrays;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -81,51 +78,29 @@ class TablePropertyEnrichmentOptimizer extends Transform {
 }
   }
 
-  /**
-   * Retrieves the table properties as well as the properties from Serde.
-   */
-  private static Map getTableParameters(Table table) {
-Map originalTableParameters = new 
HashMap<>(table.getParameters());
-Properties tableMetadata = MetaStoreUtils.getTableMetadata(table);
-for (String property : tableMetadata.stringPropertyNames()) {
-  if (!originalTableParameters.containsKey(property)) {
-originalTableParameters.put(property, 
tableMetadata.getProperty(property));
-  }
-}
-return originalTableParameters;
-  }
-
   private static class Processor implements NodeProcessor {
 
 @Override
 public Object process(Node nd, Stack stack, NodeProcessorCtx 
procCtx, Object... nodeOutputs) throws SemanticException {
   TableScanOperator tsOp = (TableScanOperator) nd;
   WalkerCtx context = (WalkerCtx)procCtx;
+
   TableScanDesc tableScanDesc = tsOp.getConf();
   Table table = tsOp.getConf().getTableMetadata().getTTable();
+  Map tableParameters = table.getParameters();
+  Properties tableProperties = new Properties();
+  tableProperties.putAll(tableParameters);
 
-  Map originalTableParameters = getTableParameters(table);
-  if (LOG.isDebugEnabled()) {
-LOG.debug("Original Table parameters: " + originalTableParameters);
-  }
-  Properties clonedTableParameters = new Properties();
-  clonedTableParameters.putAll(originalTableParameters);
-
-  String deserializerClassName = null;
+  Deserializer deserializer = 
tableScanDesc.getTableMetadata().getDeserializer();
+  String deserializerClassName = deserializer.getClass().getName();
   try {
-deserializerClassName = 
tableScanDesc.getTableMetadata().getSd().getSerdeInfo().getSerializationLib();
-Deserializer deserializer = ReflectionUtil.newInstance(
-context.conf.getClassByName(deserializerClassName)
-.asSubclass(Deserializer.class),
-context.conf);
-
 if 
(context.serdeClassesUnderConsideration.contains(deserializerClassName)) {
-  deserializer.initialize(context.conf, clonedTableParameters);
+  

[2/2] hive git commit: HIVE-14792: AvroSerde reads the remote schema-file at least once per mapper, per table reference. (Addendum)

2018-03-12 Thread aihuaxu
HIVE-14792: AvroSerde reads the remote schema-file at least once per mapper, 
per table reference. (Addendum)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5949479f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5949479f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5949479f

Branch: refs/heads/branch-2.2
Commit: 5949479f7e08987b67c4ee86c06c2d5949f75bee
Parents: 220d199
Author: Aihua Xu 
Authored: Thu Mar 8 11:33:37 2018 -0800
Committer: Aihua Xu 
Committed: Mon Mar 12 14:52:53 2018 -0700

--
 .../TablePropertyEnrichmentOptimizer.java   |  45 +++-
 .../avro_tableproperty_optimize.q   |  63 ++
 .../avro_tableproperty_optimize.q.out   | 226 +++
 3 files changed, 324 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/5949479f/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
index 5824490..d313f7d 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
@@ -26,6 +26,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
@@ -40,8 +41,10 @@ import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde2.Deserializer;
+import org.apache.hive.common.util.ReflectionUtil;
 
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -78,29 +81,51 @@ class TablePropertyEnrichmentOptimizer extends Transform {
 }
   }
 
+  /**
+   * Retrieves the table properties as well as the properties from Serde.
+   */
+  private static Map getTableParameters(Table table) {
+Map originalTableParameters = new 
HashMap<>(table.getParameters());
+Properties tableMetadata = MetaStoreUtils.getTableMetadata(table);
+for (String property : tableMetadata.stringPropertyNames()) {
+  if (!originalTableParameters.containsKey(property)) {
+originalTableParameters.put(property, 
tableMetadata.getProperty(property));
+  }
+}
+return originalTableParameters;
+  }
+
   private static class Processor implements NodeProcessor {
 
 @Override
 public Object process(Node nd, Stack stack, NodeProcessorCtx 
procCtx, Object... nodeOutputs) throws SemanticException {
   TableScanOperator tsOp = (TableScanOperator) nd;
   WalkerCtx context = (WalkerCtx)procCtx;
-
   TableScanDesc tableScanDesc = tsOp.getConf();
   Table table = tsOp.getConf().getTableMetadata().getTTable();
-  Map tableParameters = table.getParameters();
-  Properties tableProperties = new Properties();
-  tableProperties.putAll(tableParameters);
 
-  Deserializer deserializer = 
tableScanDesc.getTableMetadata().getDeserializer();
-  String deserializerClassName = deserializer.getClass().getName();
+  Map originalTableParameters = getTableParameters(table);
+  if (LOG.isDebugEnabled()) {
+LOG.debug("Original Table parameters: " + originalTableParameters);
+  }
+  Properties clonedTableParameters = new Properties();
+  clonedTableParameters.putAll(originalTableParameters);
+
+  String deserializerClassName = null;
   try {
+deserializerClassName = 
tableScanDesc.getTableMetadata().getSd().getSerdeInfo().getSerializationLib();
+Deserializer deserializer = ReflectionUtil.newInstance(
+context.conf.getClassByName(deserializerClassName)
+.asSubclass(Deserializer.class),
+context.conf);
+
 if 
(context.serdeClassesUnderConsideration.contains(deserializerClassName)) {
-  deserializer.initialize(context.conf, tableProperties);
+  deserializer.initialize(context.conf, clonedTableParameters);
   LOG.debug("SerDe init succeeded for class: " + 
deserializerClassName);
-  for (Map.Entry 

hive git commit: HIVE-14792: AvroSerde reads the remote schema-file at least once per mapper, per table reference. (Addendum)

2018-03-12 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-2 cbcd846b7 -> ad5a2fa0e


HIVE-14792: AvroSerde reads the remote schema-file at least once per mapper, 
per table reference. (Addendum)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ad5a2fa0
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ad5a2fa0
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ad5a2fa0

Branch: refs/heads/branch-2
Commit: ad5a2fa0eed89bf88a4287965274c4988e71ee98
Parents: cbcd846
Author: Aihua Xu 
Authored: Thu Mar 8 11:33:37 2018 -0800
Committer: Aihua Xu 
Committed: Mon Mar 12 14:51:43 2018 -0700

--
 .../TablePropertyEnrichmentOptimizer.java   |  45 +++-
 .../avro_tableproperty_optimize.q   |  63 ++
 .../avro_tableproperty_optimize.q.out   | 226 +++
 3 files changed, 324 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/ad5a2fa0/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
index 5824490..154eb02 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
@@ -26,6 +26,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
@@ -40,8 +41,10 @@ import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde2.Deserializer;
+import org.apache.hive.common.util.ReflectionUtil;
 
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -78,29 +81,51 @@ class TablePropertyEnrichmentOptimizer extends Transform {
 }
   }
 
+  /**
+   * Retrieves the table properties as well as the properties from Serde.
+   */
+  private static Map getTableParameters(Table table) {
+Map originalTableParameters = new 
HashMap<>(table.getParameters());
+Properties tableMetadata = MetaStoreUtils.getTableMetadata(table);
+for (String property : tableMetadata.stringPropertyNames()) {
+  if (!originalTableParameters.containsKey(property)) {
+originalTableParameters.put(property, 
tableMetadata.getProperty(property));
+  }
+}
+return originalTableParameters;
+  }
+
   private static class Processor implements NodeProcessor {
 
 @Override
 public Object process(Node nd, Stack stack, NodeProcessorCtx 
procCtx, Object... nodeOutputs) throws SemanticException {
   TableScanOperator tsOp = (TableScanOperator) nd;
   WalkerCtx context = (WalkerCtx)procCtx;
-
   TableScanDesc tableScanDesc = tsOp.getConf();
   Table table = tsOp.getConf().getTableMetadata().getTTable();
-  Map tableParameters = table.getParameters();
-  Properties tableProperties = new Properties();
-  tableProperties.putAll(tableParameters);
 
-  Deserializer deserializer = 
tableScanDesc.getTableMetadata().getDeserializer();
-  String deserializerClassName = deserializer.getClass().getName();
+  Map originalTableParameters = getTableParameters(table);
+  if (LOG.isDebugEnabled()) {
+LOG.debug("Original Table parameters: " + originalTableParameters);
+  }
+  Properties clonedTableParameters = new Properties();
+  clonedTableParameters.putAll(originalTableParameters);
+
+  String deserializerClassName = null;
   try {
+deserializerClassName = 
tableScanDesc.getTableMetadata().getSd().getSerdeInfo().getSerializationLib();
+Deserializer deserializer = ReflectionUtil.newInstance(
+context.conf.getClassByName(deserializerClassName)
+.asSubclass(Deserializer.class),
+context.conf);
+
 if 
(context.serdeClassesUnderConsideration.contains(deserializerClassName)) {
-  deserializer.initialize(context.conf, tableProperties);
+  deserializer.initialize(context.conf, clonedTableParameters);
   LOG.debug("SerDe init 

hive git commit: HIVE-14792: AvroSerde reads the remote schema-file at least once per mapper, per table reference. (Addendum)

2018-03-12 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-2.2 5fee15988 -> e8e3974d1


HIVE-14792: AvroSerde reads the remote schema-file at least once per mapper, 
per table reference. (Addendum)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e8e3974d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e8e3974d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e8e3974d

Branch: refs/heads/branch-2.2
Commit: e8e3974d157a550ec0112258b59b603d9829
Parents: 5fee159
Author: Aihua Xu 
Authored: Thu Mar 8 11:33:37 2018 -0800
Committer: Aihua Xu 
Committed: Mon Mar 12 14:39:50 2018 -0700

--
 .../TablePropertyEnrichmentOptimizer.java   |  45 +++-
 .../avro_tableproperty_optimize.q   |  63 ++
 .../avro_tableproperty_optimize.q.out   | 226 +++
 3 files changed, 324 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e8e3974d/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
index 5824490..d313f7d 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
@@ -26,6 +26,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
@@ -40,8 +41,10 @@ import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde2.Deserializer;
+import org.apache.hive.common.util.ReflectionUtil;
 
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -78,29 +81,51 @@ class TablePropertyEnrichmentOptimizer extends Transform {
 }
   }
 
+  /**
+   * Retrieves the table properties as well as the properties from Serde.
+   */
+  private static Map getTableParameters(Table table) {
+Map originalTableParameters = new 
HashMap<>(table.getParameters());
+Properties tableMetadata = MetaStoreUtils.getTableMetadata(table);
+for (String property : tableMetadata.stringPropertyNames()) {
+  if (!originalTableParameters.containsKey(property)) {
+originalTableParameters.put(property, 
tableMetadata.getProperty(property));
+  }
+}
+return originalTableParameters;
+  }
+
   private static class Processor implements NodeProcessor {
 
 @Override
 public Object process(Node nd, Stack stack, NodeProcessorCtx 
procCtx, Object... nodeOutputs) throws SemanticException {
   TableScanOperator tsOp = (TableScanOperator) nd;
   WalkerCtx context = (WalkerCtx)procCtx;
-
   TableScanDesc tableScanDesc = tsOp.getConf();
   Table table = tsOp.getConf().getTableMetadata().getTTable();
-  Map tableParameters = table.getParameters();
-  Properties tableProperties = new Properties();
-  tableProperties.putAll(tableParameters);
 
-  Deserializer deserializer = 
tableScanDesc.getTableMetadata().getDeserializer();
-  String deserializerClassName = deserializer.getClass().getName();
+  Map originalTableParameters = getTableParameters(table);
+  if (LOG.isDebugEnabled()) {
+LOG.debug("Original Table parameters: " + originalTableParameters);
+  }
+  Properties clonedTableParameters = new Properties();
+  clonedTableParameters.putAll(originalTableParameters);
+
+  String deserializerClassName = null;
   try {
+deserializerClassName = 
tableScanDesc.getTableMetadata().getSd().getSerdeInfo().getSerializationLib();
+Deserializer deserializer = ReflectionUtil.newInstance(
+context.conf.getClassByName(deserializerClassName)
+.asSubclass(Deserializer.class),
+context.conf);
+
 if 
(context.serdeClassesUnderConsideration.contains(deserializerClassName)) {
-  deserializer.initialize(context.conf, tableProperties);
+  deserializer.initialize(context.conf, clonedTableParameters);
   LOG.debug("SerDe 

hive git commit: HIVE-14792: AvroSerde reads the remote schema-file at least once per mapper, per table reference. (Addendum)

2018-03-12 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 190c72e77 -> 1629ec058


HIVE-14792: AvroSerde reads the remote schema-file at least once per mapper, 
per table reference. (Addendum)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1629ec05
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1629ec05
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1629ec05

Branch: refs/heads/master
Commit: 1629ec058faf2dce581de4f393f0c6485c7425d7
Parents: 190c72e
Author: Aihua Xu 
Authored: Thu Mar 8 11:33:37 2018 -0800
Committer: Aihua Xu 
Committed: Mon Mar 12 14:16:38 2018 -0700

--
 .../TablePropertyEnrichmentOptimizer.java   |  45 +++-
 .../avro_tableproperty_optimize.q   |  63 ++
 .../avro_tableproperty_optimize.q.out   | 226 +++
 3 files changed, 324 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/1629ec05/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
index d806775..bc17691 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/TablePropertyEnrichmentOptimizer.java
@@ -26,6 +26,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
@@ -40,8 +41,10 @@ import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde2.Deserializer;
+import org.apache.hive.common.util.ReflectionUtil;
 
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -78,29 +81,51 @@ class TablePropertyEnrichmentOptimizer extends Transform {
 }
   }
 
+  /**
+   * Retrieves the table properties as well as the properties from Serde.
+   */
+  private static Map getTableParameters(Table table) {
+Map originalTableParameters = new 
HashMap<>(table.getParameters());
+Properties tableMetadata = MetaStoreUtils.getTableMetadata(table);
+for (String property : tableMetadata.stringPropertyNames()) {
+  if (!originalTableParameters.containsKey(property)) {
+originalTableParameters.put(property, 
tableMetadata.getProperty(property));
+  }
+}
+return originalTableParameters;
+  }
+
   private static class Processor implements NodeProcessor {
 
 @Override
 public Object process(Node nd, Stack stack, NodeProcessorCtx 
procCtx, Object... nodeOutputs) throws SemanticException {
   TableScanOperator tsOp = (TableScanOperator) nd;
   WalkerCtx context = (WalkerCtx)procCtx;
-
   TableScanDesc tableScanDesc = tsOp.getConf();
   Table table = tsOp.getConf().getTableMetadata().getTTable();
-  Map tableParameters = table.getParameters();
-  Properties tableProperties = new Properties();
-  tableProperties.putAll(tableParameters);
 
-  Deserializer deserializer = 
tableScanDesc.getTableMetadata().getDeserializer();
-  String deserializerClassName = deserializer.getClass().getName();
+  Map originalTableParameters = getTableParameters(table);
+  if (LOG.isDebugEnabled()) {
+LOG.debug("Original Table parameters: " + originalTableParameters);
+  }
+  Properties clonedTableParameters = new Properties();
+  clonedTableParameters.putAll(originalTableParameters);
+
+  String deserializerClassName = null;
   try {
+deserializerClassName = 
tableScanDesc.getTableMetadata().getSd().getSerdeInfo().getSerializationLib();
+Deserializer deserializer = ReflectionUtil.newInstance(
+context.conf.getClassByName(deserializerClassName)
+.asSubclass(Deserializer.class),
+context.conf);
+
 if 
(context.serdeClassesUnderConsideration.contains(deserializerClassName)) {
-  deserializer.initialize(context.conf, tableProperties);
+  deserializer.initialize(context.conf, clonedTableParameters);
   LOG.debug("SerDe init 

[1/2] hive git commit: HIVE-17735: ObjectStore.addNotificationEvent is leaking queries (Aihua Xu, reviewed by Yongzhi Chen)

2018-02-13 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 8cf36e733 -> 35605732b


HIVE-17735: ObjectStore.addNotificationEvent is leaking queries (Aihua Xu, 
reviewed by Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ec7ccc3a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ec7ccc3a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ec7ccc3a

Branch: refs/heads/master
Commit: ec7ccc3a452fa125719ca820b5f751ddd00686ec
Parents: 8cf36e7
Author: Aihua Xu 
Authored: Mon Feb 5 15:35:30 2018 -0800
Committer: Aihua Xu 
Committed: Tue Feb 13 13:04:15 2018 -0800

--
 .../hadoop/hive/metastore/ObjectStore.java  | 42 +++-
 .../hadoop/hive/metastore/TestObjectStore.java  |  2 +-
 2 files changed, 15 insertions(+), 29 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/ec7ccc3a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index d58ed67..edabaa1 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -3941,13 +3941,13 @@ public class ObjectStore implements RawStore, 
Configurable {
 }
 
 boolean success = false;
-QueryWrapper queryWrapper = new QueryWrapper();
+Query query = null;
 
 try {
   openTransaction();
   LOG.debug("execute removeUnusedColumnDescriptor");
 
-  Query query = pm.newQuery("select count(1) from " +
+  query = pm.newQuery("select count(1) from " +
 "org.apache.hadoop.hive.metastore.model.MStorageDescriptor where 
(this.cd == inCD)");
   query.declareParameters("MColumnDescriptor inCD");
   long count = ((Long)query.execute(oldCD)).longValue();
@@ -3960,7 +3960,7 @@ public class ObjectStore implements RawStore, 
Configurable {
   success = commitTransaction();
   LOG.debug("successfully deleted a CD in removeUnusedColumnDescriptor");
 } finally {
-  rollbackAndCleanup(success, queryWrapper);
+  rollbackAndCleanup(success, query);
 }
   }
 
@@ -8819,14 +8819,13 @@ public class ObjectStore implements RawStore, 
Configurable {
   public Function getFunction(String dbName, String funcName) throws 
MetaException {
 boolean commited = false;
 Function func = null;
+Query query = null;
 try {
   openTransaction();
   func = convertToFunction(getMFunction(dbName, funcName));
   commited = commitTransaction();
 } finally {
-  if (!commited) {
-rollbackTransaction();
-  }
+  rollbackAndCleanup(commited, query);
 }
 return func;
   }
@@ -8834,17 +8833,16 @@ public class ObjectStore implements RawStore, 
Configurable {
   @Override
   public List getAllFunctions() throws MetaException {
 boolean commited = false;
+Query query = null;
 try {
   openTransaction();
-  Query query = pm.newQuery(MFunction.class);
+  query = pm.newQuery(MFunction.class);
   List allFunctions = (List) query.execute();
   pm.retrieveAll(allFunctions);
   commited = commitTransaction();
   return convertToFunctions(allFunctions);
 } finally {
-  if (!commited) {
-rollbackTransaction();
-  }
+  rollbackAndCleanup(commited, query);
 }
   }
 
@@ -8905,10 +8903,7 @@ public class ObjectStore implements RawStore, 
Configurable {
   }
   return result;
 } finally {
-  if (!commited) {
-rollbackAndCleanup(commited, query);
-return null;
-  }
+  rollbackAndCleanup(commited, query);
 }
   }
 
@@ -8938,6 +8933,7 @@ public class ObjectStore implements RawStore, 
Configurable {
   query.setUnique(true);
   // only need to execute it to get db Lock
   query.execute();
+  query.closeAll();
 }).run();
   }
 
@@ -9003,8 +8999,8 @@ public class ObjectStore implements RawStore, 
Configurable {
 try {
   openTransaction();
   lockForUpdate();
-  Query objectQuery = pm.newQuery(MNotificationNextId.class);
-  Collection ids = (Collection) objectQuery.execute();
+  query = pm.newQuery(MNotificationNextId.class);
+  Collection ids = (Collection) query.execute();
   MNotificationNextId mNotificationNextId = null;
   boolean needToPersistId;
   if (CollectionUtils.isEmpty(ids)) {
@@ -9533,12 +9529,7 @@ public class ObjectStore implements RawStore, 
Configurable {
   }
   commited = commitTransaction();
 } finally {
-  

[2/2] hive git commit: HIVE-18586: Upgrade Derby to 10.14.1.0 (Janaki Lahorani, reviewed by Aihua Xu)

2018-02-13 Thread aihuaxu
HIVE-18586: Upgrade Derby to 10.14.1.0 (Janaki Lahorani, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/35605732
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/35605732
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/35605732

Branch: refs/heads/master
Commit: 35605732b2041eee809485718bfd951cdfae0980
Parents: ec7ccc3
Author: Aihua Xu 
Authored: Tue Feb 13 13:06:31 2018 -0800
Committer: Aihua Xu 
Committed: Tue Feb 13 13:06:31 2018 -0800

--
 .../org/apache/hive/hcatalog/DerbyPolicy.java   | 90 
 .../org/apache/hive/hcatalog/DerbyPolicy.java   | 90 
 .../apache/hive/hcatalog/cli/TestPermsGrp.java  |  3 +
 .../mapreduce/TestHCatPartitionPublish.java |  3 +
 .../org/apache/hive/hcatalog/package-info.java  | 22 +
 .../hive/hcatalog/api/TestHCatClient.java   |  4 +
 pom.xml |  2 +-
 .../metastore/TestHiveMetaStoreGetMetaConf.java | 25 --
 .../TestHiveMetaStorePartitionSpecs.java| 26 --
 9 files changed, 213 insertions(+), 52 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/35605732/core/src/test/java/org/apache/hive/hcatalog/DerbyPolicy.java
--
diff --git a/core/src/test/java/org/apache/hive/hcatalog/DerbyPolicy.java 
b/core/src/test/java/org/apache/hive/hcatalog/DerbyPolicy.java
new file mode 100644
index 000..cecf6dc
--- /dev/null
+++ b/core/src/test/java/org/apache/hive/hcatalog/DerbyPolicy.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hcatalog;
+
+import org.apache.derby.security.SystemPermission;
+
+import java.security.CodeSource;
+import java.security.Permission;
+import java.security.PermissionCollection;
+import java.security.Policy;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.Iterator;
+
+/**
+ * A security policy that grants usederbyinternals
+ *
+ * 
+ *   HCatalog tests use Security Manager to handle exits.  With Derby version 
10.14.1, if a
+ *   security manager is configured, embedded Derby requires usederbyinternals 
permission, and
+ *   that is checked directly using AccessController.checkPermission.  This 
class will be used to
+ *   setup a security policy to grant usederbyinternals, in tests that use 
NoExitSecurityManager.
+ * 
+ */
+public class DerbyPolicy extends Policy {
+
+  private static PermissionCollection perms;
+
+  public DerbyPolicy() {
+super();
+if (perms == null) {
+  perms = new DerbyPermissionCollection();
+  addPermissions();
+}
+  }
+
+  @Override
+  public PermissionCollection getPermissions(CodeSource codesource) {
+return perms;
+  }
+
+  private void addPermissions() {
+SystemPermission systemPermission = new SystemPermission("engine", 
"usederbyinternals");
+perms.add(systemPermission);
+  }
+
+  class DerbyPermissionCollection extends PermissionCollection {
+
+ArrayList perms = new ArrayList();
+
+public void add(Permission p) {
+  perms.add(p);
+}
+
+public boolean implies(Permission p) {
+  for (Iterator i = perms.iterator(); i.hasNext();) {
+if (((Permission) i.next()).implies(p)) {
+  return true;
+}
+  }
+  return false;
+}
+
+public Enumeration elements() {
+  return Collections.enumeration(perms);
+}
+
+public boolean isReadOnly() {
+  return false;
+}
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/35605732/hcatalog/core/src/test/java/org/apache/hive/hcatalog/DerbyPolicy.java
--
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/DerbyPolicy.java 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/DerbyPolicy.java
new file mode 100644
index 000..cecf6dc
--- /dev/null
+++ 

hive git commit: HIVE-18550: Keep the hbase table name property as hbase.table.name (Aihua Xu, reviewed by Yongzhi Chen)

2018-02-12 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master fa14a4365 -> 1eddbc06a


HIVE-18550: Keep the hbase table name property as hbase.table.name (Aihua Xu, 
reviewed by Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1eddbc06
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1eddbc06
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1eddbc06

Branch: refs/heads/master
Commit: 1eddbc06a6224cb860ecb2f331cb2462a57b228b
Parents: fa14a43
Author: Aihua Xu 
Authored: Fri Jan 26 15:30:52 2018 -0800
Committer: Aihua Xu 
Committed: Mon Feb 12 11:03:25 2018 -0800

--
 .../hadoop/hive/hbase/HiveHFileOutputFormat.java  | 14 +-
 hbase-handler/src/test/queries/positive/hbase_bulk.q  |  2 +-
 .../src/test/queries/positive/hbase_handler_bulk.q|  4 ++--
 .../test/results/positive/hbase_handler_bulk.q.out|  8 
 4 files changed, 20 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/1eddbc06/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java
--
diff --git 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java
 
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java
index d8dad06..4fa0272 100644
--- 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java
+++ 
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hive.common.FileUtils;
+import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.shims.ShimLoader;
@@ -64,7 +65,8 @@ public class HiveHFileOutputFormat extends
 HiveOutputFormat {
 
   public static final String HFILE_FAMILY_PATH = "hfile.family.path";
-
+  public static final String OUTPUT_TABLE_NAME_CONF_KEY =
+  "hbase.mapreduce.hfileoutputformat.table.name";
   static final Logger LOG = 
LoggerFactory.getLogger(HiveHFileOutputFormat.class.getName());
 
   private
@@ -95,6 +97,16 @@ public class HiveHFileOutputFormat extends
 Properties tableProperties,
 final Progressable progressable) throws IOException {
 
+String hbaseTableName = jc.get(HBaseSerDe.HBASE_TABLE_NAME);
+if (hbaseTableName == null) {
+  hbaseTableName = 
tableProperties.getProperty(hive_metastoreConstants.META_TABLE_NAME);
+  hbaseTableName = hbaseTableName.toLowerCase();
+  if (hbaseTableName.startsWith(HBaseStorageHandler.DEFAULT_PREFIX)) {
+hbaseTableName = 
hbaseTableName.substring(HBaseStorageHandler.DEFAULT_PREFIX.length());
+  }
+}
+jc.set(OUTPUT_TABLE_NAME_CONF_KEY, hbaseTableName);
+
 // Read configuration for the target path, first from jobconf, then from 
table properties
 String hfilePath = getFamilyPath(jc, tableProperties);
 if (hfilePath == null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/1eddbc06/hbase-handler/src/test/queries/positive/hbase_bulk.q
--
diff --git a/hbase-handler/src/test/queries/positive/hbase_bulk.q 
b/hbase-handler/src/test/queries/positive/hbase_bulk.q
index 5e0c14e..475aafc 100644
--- a/hbase-handler/src/test/queries/positive/hbase_bulk.q
+++ b/hbase-handler/src/test/queries/positive/hbase_bulk.q
@@ -9,7 +9,7 @@ create table hbsort(key string, val string, val2 string)
 stored as
 INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
 OUTPUTFORMAT 'org.apache.hadoop.hive.hbase.HiveHFileOutputFormat'
-TBLPROPERTIES ('hfile.family.path' = 
'/tmp/hbsort/cf','hbase.mapreduce.hfileoutputformat.table.name'='hbsort');
+TBLPROPERTIES ('hfile.family.path' = '/tmp/hbsort/cf');
 
 -- this is a dummy table used for controlling how the input file
 -- for TotalOrderPartitioner is created

http://git-wip-us.apache.org/repos/asf/hive/blob/1eddbc06/hbase-handler/src/test/queries/positive/hbase_handler_bulk.q
--
diff --git a/hbase-handler/src/test/queries/positive/hbase_handler_bulk.q 
b/hbase-handler/src/test/queries/positive/hbase_handler_bulk.q
index 5ac4dc8..d02a61f 100644
--- a/hbase-handler/src/test/queries/positive/hbase_handler_bulk.q
+++ b/hbase-handler/src/test/queries/positive/hbase_handler_bulk.q
@@ -6,7 +6,7 @@ drop table if exists hb_target;
 create table 

hive git commit: HIVE-18426: Memory leak in RoutingAppender for every hive operation (kalyan kumar kalvagadda, reviewed by Aihua Xu)

2018-01-29 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master d2a14fe28 -> 90f26bb44


HIVE-18426: Memory leak in RoutingAppender for every hive operation (kalyan 
kumar kalvagadda, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/90f26bb4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/90f26bb4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/90f26bb4

Branch: refs/heads/master
Commit: 90f26bb443176df01a402b80d344b16abfb33eff
Parents: d2a14fe
Author: Aihua Xu 
Authored: Mon Jan 29 15:21:23 2018 -0800
Committer: Aihua Xu 
Committed: Mon Jan 29 15:21:23 2018 -0800

--
 .../org/apache/hadoop/hive/common/LogUtils.java |  11 +-
 .../operation/TestOperationLoggingLayout.java   | 109 +++
 2 files changed, 88 insertions(+), 32 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/90f26bb4/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
index 5c7ec69..5068eb5 100644
--- a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
@@ -29,10 +29,8 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.core.Appender;
-import org.apache.logging.log4j.core.LogEvent;
 import org.apache.logging.log4j.core.LoggerContext;
 import org.apache.logging.log4j.core.appender.routing.RoutingAppender;
-import org.apache.logging.log4j.core.config.AppenderControl;
 import org.apache.logging.log4j.core.config.Configurator;
 import org.apache.logging.log4j.core.config.LoggerConfig;
 import org.apache.logging.log4j.core.impl.Log4jContextFactory;
@@ -248,14 +246,9 @@ public class LogUtils {
   // The appender is configured to use ${ctx:queryId} by 
registerRoutingAppender()
   try {
 Class clazz = routingAppender.getClass();
-Method method = clazz.getDeclaredMethod("getControl", String.class, 
LogEvent.class);
+Method method = clazz.getDeclaredMethod("deleteAppender", 
String.class);
 method.setAccessible(true);
-AppenderControl control = (AppenderControl) 
method.invoke(routingAppender, queryId, null);
-Appender subordinateAppender = control.getAppender();
-if (!subordinateAppender.isStopped()) {
-  // this will cause the subordinate appender to close its output 
stream.
-  subordinateAppender.stop();
-}
+method.invoke(routingAppender, queryId);
   } catch (NoSuchMethodException | SecurityException | 
IllegalAccessException |
   IllegalArgumentException | InvocationTargetException e) {
 l4j.warn("Unable to close the operation log appender for query id " + 
queryId, e);

http://git-wip-us.apache.org/repos/asf/hive/blob/90f26bb4/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java
 
b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java
index 8febe3e..d90d590 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java
@@ -102,14 +102,8 @@ public class TestOperationLoggingLayout {
 miniHS2.stop();
   }
 
-  @Test
-  public void testSwitchLogLayout() throws Exception {
-// verify whether the sql operation log is generated and fetch correctly.
-OperationHandle operationHandle = client.executeStatement(sessionHandle, 
sqlCntStar, null);
-RowSet rowSetLog = client.fetchResults(operationHandle, 
FetchOrientation.FETCH_FIRST, 1000,
-FetchType.LOG);
+  private String getQueryId(RowSet rowSetLog) {
 Iterator iter = rowSetLog.iterator();
-String queryId = null;
 // non-verbose pattern is %-5p : %m%n. Look for " : "
 while (iter.hasNext()) {
   String row = iter.next()[0].toString();
@@ -119,9 +113,32 @@ public class TestOperationLoggingLayout {
   String queryIdLoggingProbe = "INFO  : Query ID = ";
   int index = row.indexOf(queryIdLoggingProbe);
   if (index >= 0) {
-queryId = row.substring(queryIdLoggingProbe.length()).trim();
+return row.substring(queryIdLoggingProbe.length()).trim();
   }
 }
+return 

[2/3] hive git commit: Revert "HIVE-18202: Automatically migrate hbase.table.name to hbase.mapreduce.hfileoutputformat.table.name for hbase-based table (addendum)"

2018-01-26 Thread aihuaxu
Revert "HIVE-18202: Automatically migrate hbase.table.name to 
hbase.mapreduce.hfileoutputformat.table.name for hbase-based table (addendum)"

This reverts commit ee802dba3f28d0467bbea045e6aa5c9bfac8e2a5.


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c14a46dc
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c14a46dc
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c14a46dc

Branch: refs/heads/master
Commit: c14a46dc48ccc03ff135588c20571a522c2cb2b9
Parents: 0fc1cd4
Author: Aihua Xu 
Authored: Fri Jan 26 13:44:05 2018 -0800
Committer: Aihua Xu 
Committed: Fri Jan 26 13:44:05 2018 -0800

--
 metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/c14a46dc/metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql
--
diff --git a/metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql 
b/metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql
index c054322..0fdc615 100644
--- a/metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql
+++ b/metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql
@@ -1,6 +1,6 @@
-UPDATE "TABLE_PARAMS"
-  SET "PARAM_KEY" = 'hbase.mapreduce.hfileoutputformat.table.name'
+UPDATE TABLE_PARAMS
+  SET PARAM_KEY = 'hbase.mapreduce.hfileoutputformat.table.name'
 WHERE
-  "PARAM_KEY" = 'hbase.table.name'
+  PARAM_KEY = 'hbase.table.name'
 ;
 



[1/3] hive git commit: Revert "HIVE-18366: Update HBaseSerDe to use hbase.mapreduce.hfileoutputformat.table.name instead of hbase.table.name as the table name property (Aihua Xu, reviewed Yongzhi Chen

2018-01-26 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 255cf4ab2 -> 6b349b5f8


Revert "HIVE-18366: Update HBaseSerDe to use 
hbase.mapreduce.hfileoutputformat.table.name instead of hbase.table.name as the 
table name property (Aihua Xu, reviewed Yongzhi Chen)"

This reverts commit 146234906982c59953d2dad25c0f1e8978126243.


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0fc1cd4a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0fc1cd4a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0fc1cd4a

Branch: refs/heads/master
Commit: 0fc1cd4a33e4de9060b50da9ff8b54a74f00579b
Parents: 255cf4a
Author: Aihua Xu 
Authored: Fri Jan 26 13:43:38 2018 -0800
Committer: Aihua Xu 
Committed: Fri Jan 26 13:43:38 2018 -0800

--
 .../apache/hadoop/hive/hbase/HBaseSerDe.java|  2 +-
 .../src/test/queries/negative/cascade_dbdrop.q  |  2 +-
 .../queries/negative/cascade_dbdrop_hadoop20.q  |  2 +-
 .../src/test/queries/negative/hbase_ddl.q   |  2 +-
 .../test/queries/positive/external_table_ppd.q  |  2 +-
 .../hbase_binary_external_table_queries.q   |  6 +--
 .../queries/positive/hbase_binary_map_queries.q | 16 +++
 .../positive/hbase_binary_map_queries_prefix.q  |  6 +--
 .../positive/hbase_binary_storage_queries.q | 10 ++--
 .../test/queries/positive/hbase_custom_key.q|  4 +-
 .../test/queries/positive/hbase_custom_key2.q   |  2 +-
 .../test/queries/positive/hbase_custom_key3.q   |  2 +-
 .../src/test/queries/positive/hbase_ddl.q   |  2 +-
 .../src/test/queries/positive/hbase_queries.q   |  6 +--
 .../queries/positive/hbase_tablename_property.q | 14 --
 .../queries/positive/hbase_timestamp_format.q   |  4 +-
 .../src/test/queries/positive/hbase_viewjoins.q |  4 +-
 .../test/results/negative/cascade_dbdrop.q.out  |  4 +-
 .../negative/cascade_dbdrop_hadoop20.q.out  |  4 +-
 .../src/test/results/negative/hbase_ddl.q.out   |  4 +-
 .../results/positive/external_table_ppd.q.out   |  6 +--
 .../hbase_binary_external_table_queries.q.out   | 12 ++---
 .../positive/hbase_binary_map_queries.q.out | 32 ++---
 .../hbase_binary_map_queries_prefix.q.out   | 12 ++---
 .../positive/hbase_binary_storage_queries.q.out | 24 +-
 .../results/positive/hbase_custom_key.q.out |  8 ++--
 .../results/positive/hbase_custom_key2.q.out|  4 +-
 .../results/positive/hbase_custom_key3.q.out|  4 +-
 .../src/test/results/positive/hbase_ddl.q.out   |  8 ++--
 .../test/results/positive/hbase_queries.q.out   | 14 +++---
 .../positive/hbase_tablename_property.q.out | 49 
 .../positive/hbase_timestamp_format.q.out   |  8 ++--
 .../test/results/positive/hbase_viewjoins.q.out |  8 ++--
 .../hive/hcatalog/templeton/TestDesc.java   |  2 +-
 .../hbase/TestPigHBaseStorageHandler.java   | 21 ++---
 .../hadoop/hive/hbase/HBaseQTestUtil.java   |  2 +-
 36 files changed, 128 insertions(+), 184 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0fc1cd4a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
--
diff --git 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java 
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
index f203b8f..1553525 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
@@ -71,7 +71,7 @@ public class HBaseSerDe extends AbstractSerDe {
   public static final Logger LOG = LoggerFactory.getLogger(HBaseSerDe.class);
 
   public static final String HBASE_COLUMNS_MAPPING = "hbase.columns.mapping";
-  public static final String HBASE_TABLE_NAME = 
"hbase.mapreduce.hfileoutputformat.table.name";
+  public static final String HBASE_TABLE_NAME = "hbase.table.name";
   public static final String HBASE_TABLE_DEFAULT_STORAGE_TYPE = 
"hbase.table.default.storage.type";
   public static final String HBASE_KEY_COL = ":key";
   public static final String HBASE_TIMESTAMP_COL = ":timestamp";

http://git-wip-us.apache.org/repos/asf/hive/blob/0fc1cd4a/hbase-handler/src/test/queries/negative/cascade_dbdrop.q
--
diff --git a/hbase-handler/src/test/queries/negative/cascade_dbdrop.q 
b/hbase-handler/src/test/queries/negative/cascade_dbdrop.q
index 39ecc6b..7f9df5e 100644
--- a/hbase-handler/src/test/queries/negative/cascade_dbdrop.q
+++ b/hbase-handler/src/test/queries/negative/cascade_dbdrop.q
@@ -14,7 +14,7 @@ CREATE DATABASE hbaseDB;
 CREATE TABLE hbaseDB.hbase_table_0(key int, value string)
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = 

[3/3] hive git commit: Revert "HIVE-18202: Automatically migrate hbase.table.name to hbase.mapreduce.hfileoutputformat.table.name for hbase-based table (Aihua Xu, reviewed by Naveen Gangam)"

2018-01-26 Thread aihuaxu
Revert "HIVE-18202: Automatically migrate hbase.table.name to 
hbase.mapreduce.hfileoutputformat.table.name for hbase-based table (Aihua Xu, 
reviewed by Naveen Gangam)"

This reverts commit d37b8026ed25b4150fd3782331e6800459d40d7a.


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6b349b5f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6b349b5f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6b349b5f

Branch: refs/heads/master
Commit: 6b349b5f83cff54c4b6b04537227087b2b3d4a0d
Parents: c14a46d
Author: Aihua Xu 
Authored: Fri Jan 26 13:45:51 2018 -0800
Committer: Aihua Xu 
Committed: Fri Jan 26 13:45:51 2018 -0800

--
 metastore/scripts/upgrade/derby/047-HIVE-18202.derby.sql   | 6 --
 .../scripts/upgrade/derby/upgrade-2.3.0-to-3.0.0.derby.sql | 1 -
 metastore/scripts/upgrade/mssql/032-HIVE-18202.mssql.sql   | 6 --
 .../scripts/upgrade/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql | 1 -
 metastore/scripts/upgrade/mysql/047-HIVE-18202.mysql.sql   | 6 --
 .../scripts/upgrade/mysql/upgrade-2.3.0-to-3.0.0.mysql.sql | 1 -
 metastore/scripts/upgrade/oracle/047-HIVE-18202-oracle.sql | 6 --
 .../scripts/upgrade/oracle/upgrade-2.3.0-to-3.0.0.oracle.sql   | 1 -
 metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql | 6 --
 .../upgrade/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql   | 1 -
 10 files changed, 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/6b349b5f/metastore/scripts/upgrade/derby/047-HIVE-18202.derby.sql
--
diff --git a/metastore/scripts/upgrade/derby/047-HIVE-18202.derby.sql 
b/metastore/scripts/upgrade/derby/047-HIVE-18202.derby.sql
deleted file mode 100644
index 0fdc615..000
--- a/metastore/scripts/upgrade/derby/047-HIVE-18202.derby.sql
+++ /dev/null
@@ -1,6 +0,0 @@
-UPDATE TABLE_PARAMS
-  SET PARAM_KEY = 'hbase.mapreduce.hfileoutputformat.table.name'
-WHERE
-  PARAM_KEY = 'hbase.table.name'
-;
-

http://git-wip-us.apache.org/repos/asf/hive/blob/6b349b5f/metastore/scripts/upgrade/derby/upgrade-2.3.0-to-3.0.0.derby.sql
--
diff --git a/metastore/scripts/upgrade/derby/upgrade-2.3.0-to-3.0.0.derby.sql 
b/metastore/scripts/upgrade/derby/upgrade-2.3.0-to-3.0.0.derby.sql
index 4372498..3a11881 100644
--- a/metastore/scripts/upgrade/derby/upgrade-2.3.0-to-3.0.0.derby.sql
+++ b/metastore/scripts/upgrade/derby/upgrade-2.3.0-to-3.0.0.derby.sql
@@ -5,7 +5,6 @@ RUN '043-HIVE-16922.derby.sql';
 RUN '044-HIVE-16997.derby.sql';
 RUN '045-HIVE-16886.derby.sql';
 RUN '046-HIVE-17566.derby.sql';
-RUN '047-HIVE-18202.derby.sql';
 RUN '048-HIVE-14498.derby.sql';
 RUN '049-HIVE-18489.derby.sql';
 

http://git-wip-us.apache.org/repos/asf/hive/blob/6b349b5f/metastore/scripts/upgrade/mssql/032-HIVE-18202.mssql.sql
--
diff --git a/metastore/scripts/upgrade/mssql/032-HIVE-18202.mssql.sql 
b/metastore/scripts/upgrade/mssql/032-HIVE-18202.mssql.sql
deleted file mode 100644
index 0fdc615..000
--- a/metastore/scripts/upgrade/mssql/032-HIVE-18202.mssql.sql
+++ /dev/null
@@ -1,6 +0,0 @@
-UPDATE TABLE_PARAMS
-  SET PARAM_KEY = 'hbase.mapreduce.hfileoutputformat.table.name'
-WHERE
-  PARAM_KEY = 'hbase.table.name'
-;
-

http://git-wip-us.apache.org/repos/asf/hive/blob/6b349b5f/metastore/scripts/upgrade/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql
--
diff --git a/metastore/scripts/upgrade/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql 
b/metastore/scripts/upgrade/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql
index 52727f3..f5a260e 100644
--- a/metastore/scripts/upgrade/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql
+++ b/metastore/scripts/upgrade/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql
@@ -6,7 +6,6 @@ SELECT 'Upgrading MetaStore schema from 2.3.0 to 3.0.0' AS 
MESSAGE;
 :r 029-HIVE-16997.mssql.sql
 :r 030-HIVE-16886.mssql.sql
 :r 031-HIVE-17566.mssql.sql
-:r 032-HIVE-18202.mssql.sql
 :r 033-HIVE-14498.mssql.sql
 :r 034-HIVE-18489.mssql.sql
 

http://git-wip-us.apache.org/repos/asf/hive/blob/6b349b5f/metastore/scripts/upgrade/mysql/047-HIVE-18202.mysql.sql
--
diff --git a/metastore/scripts/upgrade/mysql/047-HIVE-18202.mysql.sql 
b/metastore/scripts/upgrade/mysql/047-HIVE-18202.mysql.sql
deleted file mode 100644
index 0fdc615..000
--- a/metastore/scripts/upgrade/mysql/047-HIVE-18202.mysql.sql
+++ /dev/null
@@ -1,6 +0,0 @@
-UPDATE TABLE_PARAMS
-  SET PARAM_KEY = 'hbase.mapreduce.hfileoutputformat.table.name'
-WHERE
-  PARAM_KEY = 'hbase.table.name'
-;
-


hive git commit: HIVE-18202: Automatically migrate hbase.table.name to hbase.mapreduce.hfileoutputformat.table.name for hbase-based table (addendum)

2018-01-24 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 042698ce2 -> ee802dba3


HIVE-18202: Automatically migrate hbase.table.name to 
hbase.mapreduce.hfileoutputformat.table.name for hbase-based table (addendum)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ee802dba
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ee802dba
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ee802dba

Branch: refs/heads/master
Commit: ee802dba3f28d0467bbea045e6aa5c9bfac8e2a5
Parents: 042698c
Author: Aihua Xu 
Authored: Wed Jan 24 13:32:02 2018 -0800
Committer: Aihua Xu 
Committed: Wed Jan 24 13:32:02 2018 -0800

--
 metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/ee802dba/metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql
--
diff --git a/metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql 
b/metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql
index 0fdc615..c054322 100644
--- a/metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql
+++ b/metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql
@@ -1,6 +1,6 @@
-UPDATE TABLE_PARAMS
-  SET PARAM_KEY = 'hbase.mapreduce.hfileoutputformat.table.name'
+UPDATE "TABLE_PARAMS"
+  SET "PARAM_KEY" = 'hbase.mapreduce.hfileoutputformat.table.name'
 WHERE
-  PARAM_KEY = 'hbase.table.name'
+  "PARAM_KEY" = 'hbase.table.name'
 ;
 



hive git commit: HIVE-18202: Automatically migrate hbase.table.name to hbase.mapreduce.hfileoutputformat.table.name for hbase-based table (Aihua Xu, reviewed by Naveen Gangam)

2018-01-09 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 146234906 -> d37b8026e


HIVE-18202: Automatically migrate hbase.table.name to 
hbase.mapreduce.hfileoutputformat.table.name for hbase-based table (Aihua Xu, 
reviewed by Naveen Gangam)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d37b8026
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d37b8026
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d37b8026

Branch: refs/heads/master
Commit: d37b8026ed25b4150fd3782331e6800459d40d7a
Parents: 1462349
Author: Aihua Xu 
Authored: Tue Jan 9 15:46:04 2018 -0800
Committer: Aihua Xu 
Committed: Tue Jan 9 15:46:04 2018 -0800

--
 metastore/scripts/upgrade/derby/047-HIVE-18202.derby.sql   | 6 ++
 .../scripts/upgrade/derby/upgrade-2.3.0-to-3.0.0.derby.sql | 1 +
 metastore/scripts/upgrade/mssql/032-HIVE-18202.mssql.sql   | 6 ++
 .../scripts/upgrade/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql | 1 +
 metastore/scripts/upgrade/mysql/047-HIVE-18202.mysql.sql   | 6 ++
 .../scripts/upgrade/mysql/upgrade-2.3.0-to-3.0.0.mysql.sql | 1 +
 metastore/scripts/upgrade/oracle/047-HIVE-18202-oracle.sql | 6 ++
 .../scripts/upgrade/oracle/upgrade-2.3.0-to-3.0.0.oracle.sql   | 1 +
 metastore/scripts/upgrade/postgres/046-HIVE-18202.postgres.sql | 6 ++
 .../upgrade/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql   | 1 +
 10 files changed, 35 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/d37b8026/metastore/scripts/upgrade/derby/047-HIVE-18202.derby.sql
--
diff --git a/metastore/scripts/upgrade/derby/047-HIVE-18202.derby.sql 
b/metastore/scripts/upgrade/derby/047-HIVE-18202.derby.sql
new file mode 100644
index 000..0fdc615
--- /dev/null
+++ b/metastore/scripts/upgrade/derby/047-HIVE-18202.derby.sql
@@ -0,0 +1,6 @@
+UPDATE TABLE_PARAMS
+  SET PARAM_KEY = 'hbase.mapreduce.hfileoutputformat.table.name'
+WHERE
+  PARAM_KEY = 'hbase.table.name'
+;
+

http://git-wip-us.apache.org/repos/asf/hive/blob/d37b8026/metastore/scripts/upgrade/derby/upgrade-2.3.0-to-3.0.0.derby.sql
--
diff --git a/metastore/scripts/upgrade/derby/upgrade-2.3.0-to-3.0.0.derby.sql 
b/metastore/scripts/upgrade/derby/upgrade-2.3.0-to-3.0.0.derby.sql
index 1f2647d..75b0e73 100644
--- a/metastore/scripts/upgrade/derby/upgrade-2.3.0-to-3.0.0.derby.sql
+++ b/metastore/scripts/upgrade/derby/upgrade-2.3.0-to-3.0.0.derby.sql
@@ -5,5 +5,6 @@ RUN '043-HIVE-16922.derby.sql';
 RUN '044-HIVE-16997.derby.sql';
 RUN '045-HIVE-16886.derby.sql';
 RUN '046-HIVE-17566.derby.sql';
+RUN '047-HIVE-18202.derby.sql';
 
 UPDATE "APP".VERSION SET SCHEMA_VERSION='3.0.0', VERSION_COMMENT='Hive release 
version 3.0.0' where VER_ID=1;

http://git-wip-us.apache.org/repos/asf/hive/blob/d37b8026/metastore/scripts/upgrade/mssql/032-HIVE-18202.mssql.sql
--
diff --git a/metastore/scripts/upgrade/mssql/032-HIVE-18202.mssql.sql 
b/metastore/scripts/upgrade/mssql/032-HIVE-18202.mssql.sql
new file mode 100644
index 000..0fdc615
--- /dev/null
+++ b/metastore/scripts/upgrade/mssql/032-HIVE-18202.mssql.sql
@@ -0,0 +1,6 @@
+UPDATE TABLE_PARAMS
+  SET PARAM_KEY = 'hbase.mapreduce.hfileoutputformat.table.name'
+WHERE
+  PARAM_KEY = 'hbase.table.name'
+;
+

http://git-wip-us.apache.org/repos/asf/hive/blob/d37b8026/metastore/scripts/upgrade/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql
--
diff --git a/metastore/scripts/upgrade/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql 
b/metastore/scripts/upgrade/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql
index 864a5e5..79189bb 100644
--- a/metastore/scripts/upgrade/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql
+++ b/metastore/scripts/upgrade/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql
@@ -6,6 +6,7 @@ SELECT 'Upgrading MetaStore schema from 2.3.0 to 3.0.0' AS 
MESSAGE;
 :r 029-HIVE-16997.mssql.sql
 :r 030-HIVE-16886.mssql.sql
 :r 031-HIVE-17566.mssql.sql
+:r 032-HIVE-18202.mssql.sql
 
 UPDATE VERSION SET SCHEMA_VERSION='3.0.0', VERSION_COMMENT='Hive release 
version 3.0.0' where VER_ID=1;
 SELECT 'Finished upgrading MetaStore schema from 2.3.0 to 3.0.0' AS MESSAGE;

http://git-wip-us.apache.org/repos/asf/hive/blob/d37b8026/metastore/scripts/upgrade/mysql/047-HIVE-18202.mysql.sql
--
diff --git a/metastore/scripts/upgrade/mysql/047-HIVE-18202.mysql.sql 
b/metastore/scripts/upgrade/mysql/047-HIVE-18202.mysql.sql
new file mode 100644
index 000..0fdc615
--- /dev/null
+++ b/metastore/scripts/upgrade/mysql/047-HIVE-18202.mysql.sql
@@ -0,0 +1,6 @@

hive git commit: HIVE-18366: Update HBaseSerDe to use hbase.mapreduce.hfileoutputformat.table.name instead of hbase.table.name as the table name property (Aihua Xu, reviewed Yongzhi Chen)

2018-01-09 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 504239869 -> 146234906


HIVE-18366: Update HBaseSerDe to use 
hbase.mapreduce.hfileoutputformat.table.name instead of hbase.table.name as the 
table name property (Aihua Xu, reviewed Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/14623490
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/14623490
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/14623490

Branch: refs/heads/master
Commit: 146234906982c59953d2dad25c0f1e8978126243
Parents: 5042398
Author: Aihua Xu 
Authored: Thu Jan 4 12:49:01 2018 -0800
Committer: Aihua Xu 
Committed: Tue Jan 9 13:54:41 2018 -0800

--
 .../apache/hadoop/hive/hbase/HBaseSerDe.java|  2 +-
 .../src/test/queries/negative/cascade_dbdrop.q  |  2 +-
 .../queries/negative/cascade_dbdrop_hadoop20.q  |  2 +-
 .../src/test/queries/negative/hbase_ddl.q   |  2 +-
 .../test/queries/positive/external_table_ppd.q  |  2 +-
 .../hbase_binary_external_table_queries.q   |  6 +--
 .../queries/positive/hbase_binary_map_queries.q | 16 +++
 .../positive/hbase_binary_map_queries_prefix.q  |  6 +--
 .../positive/hbase_binary_storage_queries.q | 10 ++---
 .../test/queries/positive/hbase_custom_key.q|  4 +-
 .../test/queries/positive/hbase_custom_key2.q   |  2 +-
 .../test/queries/positive/hbase_custom_key3.q   |  2 +-
 .../src/test/queries/positive/hbase_ddl.q   |  2 +-
 .../src/test/queries/positive/hbase_queries.q   |  6 +--
 .../queries/positive/hbase_tablename_property.q | 14 ++
 .../queries/positive/hbase_timestamp_format.q   |  4 +-
 .../src/test/queries/positive/hbase_viewjoins.q |  4 +-
 .../test/results/negative/cascade_dbdrop.q.out  |  4 +-
 .../negative/cascade_dbdrop_hadoop20.q.out  |  4 +-
 .../src/test/results/negative/hbase_ddl.q.out   |  4 +-
 .../results/positive/external_table_ppd.q.out   |  6 +--
 .../hbase_binary_external_table_queries.q.out   | 12 ++---
 .../positive/hbase_binary_map_queries.q.out | 32 ++---
 .../hbase_binary_map_queries_prefix.q.out   | 12 ++---
 .../positive/hbase_binary_storage_queries.q.out | 24 +-
 .../results/positive/hbase_custom_key.q.out |  8 ++--
 .../results/positive/hbase_custom_key2.q.out|  4 +-
 .../results/positive/hbase_custom_key3.q.out|  4 +-
 .../src/test/results/positive/hbase_ddl.q.out   |  8 ++--
 .../test/results/positive/hbase_queries.q.out   | 14 +++---
 .../positive/hbase_tablename_property.q.out | 47 
 .../positive/hbase_timestamp_format.q.out   |  8 ++--
 .../test/results/positive/hbase_viewjoins.q.out |  8 ++--
 .../hive/hcatalog/templeton/TestDesc.java   |  2 +-
 .../hbase/TestPigHBaseStorageHandler.java   | 21 +++--
 .../hadoop/hive/hbase/HBaseQTestUtil.java   |  2 +-
 36 files changed, 182 insertions(+), 128 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/14623490/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
--
diff --git 
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java 
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
index c2e7808..1f1bd56 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
@@ -71,7 +71,7 @@ public class HBaseSerDe extends AbstractSerDe {
   public static final Logger LOG = LoggerFactory.getLogger(HBaseSerDe.class);
 
   public static final String HBASE_COLUMNS_MAPPING = "hbase.columns.mapping";
-  public static final String HBASE_TABLE_NAME = "hbase.table.name";
+  public static final String HBASE_TABLE_NAME = 
"hbase.mapreduce.hfileoutputformat.table.name";
   public static final String HBASE_TABLE_DEFAULT_STORAGE_TYPE = 
"hbase.table.default.storage.type";
   public static final String HBASE_KEY_COL = ":key";
   public static final String HBASE_TIMESTAMP_COL = ":timestamp";

http://git-wip-us.apache.org/repos/asf/hive/blob/14623490/hbase-handler/src/test/queries/negative/cascade_dbdrop.q
--
diff --git a/hbase-handler/src/test/queries/negative/cascade_dbdrop.q 
b/hbase-handler/src/test/queries/negative/cascade_dbdrop.q
index 7f9df5e..39ecc6b 100644
--- a/hbase-handler/src/test/queries/negative/cascade_dbdrop.q
+++ b/hbase-handler/src/test/queries/negative/cascade_dbdrop.q
@@ -14,7 +14,7 @@ CREATE DATABASE hbaseDB;
 CREATE TABLE hbaseDB.hbase_table_0(key int, value string)
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string")
-TBLPROPERTIES ("hbase.table.name" = "hbase_table_0");

[2/3] hive git commit: HIVE-16826: Improvements for SeparatedValuesOutputFormat (BELUGA BEHR, reviewed by Aihua Xu)

2018-01-08 Thread aihuaxu
HIVE-16826: Improvements for SeparatedValuesOutputFormat (BELUGA BEHR, reviewed 
by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0ea2f288
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0ea2f288
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0ea2f288

Branch: refs/heads/master
Commit: 0ea2f288a01768a4f431f7a5634b050bc8c3e47a
Parents: aa45b02
Author: Aihua Xu 
Authored: Mon Jan 8 10:46:10 2018 -0800
Committer: Aihua Xu 
Committed: Mon Jan 8 10:46:10 2018 -0800

--
 .../beeline/SeparatedValuesOutputFormat.java| 141 +--
 1 file changed, 70 insertions(+), 71 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0ea2f288/beeline/src/java/org/apache/hive/beeline/SeparatedValuesOutputFormat.java
--
diff --git 
a/beeline/src/java/org/apache/hive/beeline/SeparatedValuesOutputFormat.java 
b/beeline/src/java/org/apache/hive/beeline/SeparatedValuesOutputFormat.java
index 9c24a20..172b5b5 100644
--- a/beeline/src/java/org/apache/hive/beeline/SeparatedValuesOutputFormat.java
+++ b/beeline/src/java/org/apache/hive/beeline/SeparatedValuesOutputFormat.java
@@ -22,112 +22,111 @@
  */
 package org.apache.hive.beeline;
 
-import java.io.IOException;
-import java.io.StringWriter;
-
-import org.apache.hadoop.io.IOUtils;
+import org.apache.commons.io.output.StringBuilderWriter;
+import org.apache.commons.lang.BooleanUtils;
+import org.apache.commons.lang.StringUtils;
 import org.supercsv.encoder.CsvEncoder;
+import org.supercsv.encoder.DefaultCsvEncoder;
 import org.supercsv.encoder.SelectiveCsvEncoder;
 import org.supercsv.io.CsvListWriter;
 import org.supercsv.prefs.CsvPreference;
 
 /**
- * OutputFormat for values separated by a delimiter.
+ * OutputFormat for values separated by a configurable delimiter
  */
 class SeparatedValuesOutputFormat implements OutputFormat {
+
+  public final static String DSV_OPT_OUTPUT_FORMAT = "dsv";
   public final static String DISABLE_QUOTING_FOR_SV = "disable.quoting.for.sv";
+  private final static char DEFAULT_QUOTE_CHAR = '"';
   private final BeeLine beeLine;
-  private CsvPreference quotedCsvPreference;
-  private CsvPreference unquotedCsvPreference;
+  private final StringBuilderWriter buffer;
+  private final char defaultSeparator;
 
   SeparatedValuesOutputFormat(BeeLine beeLine, char separator) {
 this.beeLine = beeLine;
-unquotedCsvPreference = getUnquotedCsvPreference(separator);
-quotedCsvPreference = new CsvPreference.Builder('"', separator, 
"").build();
+this.defaultSeparator = separator;
+this.buffer = new StringBuilderWriter();
   }
 
-  private static CsvPreference getUnquotedCsvPreference(char delimiter) {
-CsvEncoder noEncoder = new SelectiveCsvEncoder();
-return new CsvPreference.Builder('\0', delimiter, 
"").useEncoder(noEncoder).build();
-  }
+  private CsvPreference getCsvPreference() {
+char separator = this.defaultSeparator;
+char quoteChar = DEFAULT_QUOTE_CHAR;
+CsvEncoder encoder;
 
-  private void updateCsvPreference() {
-if (beeLine.getOpts().getOutputFormat().equals("dsv")) {
-  // check whether delimiter changed by user
-  char curDel = (char) getCsvPreference().getDelimiterChar();
-  char newDel = beeLine.getOpts().getDelimiterForDSV();
-  // if delimiter changed, rebuild the csv preference
-  if (newDel != curDel) {
-// "" is passed as the end of line symbol in following function, as
-// beeline itself adds newline
-if (isQuotingDisabled()) {
-  unquotedCsvPreference = getUnquotedCsvPreference(newDel);
-} else {
-  quotedCsvPreference = new CsvPreference.Builder('"', newDel, 
"").build();
-}
-  }
+if (DSV_OPT_OUTPUT_FORMAT.equals(beeLine.getOpts().getOutputFormat())) {
+  separator = beeLine.getOpts().getDelimiterForDSV();
 }
+
+if (isQuotingDisabled()) {
+  quoteChar = '\0';
+  encoder = new SelectiveCsvEncoder();
+} else {
+  encoder = new DefaultCsvEncoder();
+}
+
+return new CsvPreference.Builder(quoteChar, separator, 
StringUtils.EMPTY).useEncoder(encoder).build();
   }
 
   @Override
   public int print(Rows rows) {
-updateCsvPreference();
-
+CsvPreference csvPreference = getCsvPreference();
+CsvListWriter writer = new CsvListWriter(this.buffer, csvPreference);
 int count = 0;
+
+Rows.Row labels = (Rows.Row) rows.next();
+if (beeLine.getOpts().getShowHeader()) {
+  fillBuffer(writer, labels);
+  String line = getLine(this.buffer);
+  beeLine.output(line);
+}
+
 while (rows.hasNext()) {
-  if (count == 0 && !beeLine.getOpts().getShowHeader()) {
-rows.next();
-

[1/3] hive git commit: HIVE-17966: org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveArrayInspector - Review (BELUGA BEHR, reviewed by Aihua Xu)

2018-01-08 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master a6b88d9d2 -> 148807a98


HIVE-17966: 
org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveArrayInspector - Review 
(BELUGA BEHR, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/aa45b021
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/aa45b021
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/aa45b021

Branch: refs/heads/master
Commit: aa45b021f8f2e65afcb3134ea79c91db86668bcd
Parents: a6b88d9
Author: Aihua Xu 
Authored: Mon Jan 8 10:44:16 2018 -0800
Committer: Aihua Xu 
Committed: Mon Jan 8 10:44:16 2018 -0800

--
 .../serde/ParquetHiveArrayInspector.java| 41 
 1 file changed, 16 insertions(+), 25 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/aa45b021/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveArrayInspector.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveArrayInspector.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveArrayInspector.java
index 55614a3..c4a286d 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveArrayInspector.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveArrayInspector.java
@@ -14,6 +14,7 @@
 package org.apache.hadoop.hive.ql.io.parquet.serde;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -69,7 +70,7 @@ public class ParquetHiveArrayInspector implements 
SettableListObjectInspector {
 }
 
 if (data instanceof List) {
-  return ((List)data).get(index);
+  return ((List)data).get(index);
 }
 
 throw new UnsupportedOperationException("Cannot inspect " + 
data.getClass().getCanonicalName());
@@ -91,7 +92,7 @@ public class ParquetHiveArrayInspector implements 
SettableListObjectInspector {
 }
 
 if (data instanceof List) {
-  return ((List)data).size();
+  return ((List)data).size();
 }
 
 throw new UnsupportedOperationException("Cannot inspect " + 
data.getClass().getCanonicalName());
@@ -108,13 +109,7 @@ public class ParquetHiveArrayInspector implements 
SettableListObjectInspector {
   if (array == null) {
 return null;
   }
-
-  final List list = new ArrayList(array.length);
-  for (final Writable obj : array) {
-list.add(obj);
-  }
-
-  return list;
+  return new ArrayList(Arrays.asList(array));
 }
 
 if (data instanceof List) {
@@ -126,29 +121,27 @@ public class ParquetHiveArrayInspector implements 
SettableListObjectInspector {
 
   @Override
   public Object create(final int size) {
-final ArrayList result = new ArrayList(size);
-for (int i = 0; i < size; ++i) {
-  result.add(null);
-}
-return result;
+return new ArrayList(Arrays.asList(new Object[size]));
   }
 
   @Override
+  @SuppressWarnings("unchecked")
   public Object set(final Object list, final int index, final Object element) {
-final ArrayList l = (ArrayList) list;
+final List l = (List) list;
 l.set(index, element);
 return list;
   }
 
   @Override
+  @SuppressWarnings("unchecked")
   public Object resize(final Object list, final int newSize) {
-final ArrayList l = (ArrayList) list;
-l.ensureCapacity(newSize);
-while (l.size() < newSize) {
-  l.add(null);
-}
-while (l.size() > newSize) {
-  l.remove(l.size() - 1);
+final List l = (List) list;
+final int deltaSize = newSize - l.size();
+if (deltaSize > 0) {
+  l.addAll(Arrays.asList(new Object[deltaSize]));
+} else {
+  int size = l.size();
+  l.subList(size + deltaSize, size).clear();
 }
 return list;
   }
@@ -167,8 +160,6 @@ public class ParquetHiveArrayInspector implements 
SettableListObjectInspector {
 
   @Override
   public int hashCode() {
-int hash = 3;
-hash = 29 * hash + (this.arrayElementInspector != null ? 
this.arrayElementInspector.hashCode() : 0);
-return hash;
+return (this.arrayElementInspector != null ? 
this.arrayElementInspector.hashCode() : 0);
   }
 }



[3/3] hive git commit: HIVE-16970: General Improvements To org.apache.hadoop.hive.metastore.cache.CacheUtils (BELUGA BEHR, reviewed by Ashutosh Chauhan)

2018-01-08 Thread aihuaxu
HIVE-16970: General Improvements To 
org.apache.hadoop.hive.metastore.cache.CacheUtils (BELUGA BEHR, reviewed by 
Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/148807a9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/148807a9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/148807a9

Branch: refs/heads/master
Commit: 148807a98ad4f0672c9457fff8ba1501c237f1de
Parents: 0ea2f28
Author: Aihua Xu 
Authored: Mon Jan 8 10:48:11 2018 -0800
Committer: Aihua Xu 
Committed: Mon Jan 8 10:48:11 2018 -0800

--
 .../hadoop/hive/metastore/cache/CacheUtils.java | 54 +---
 1 file changed, 23 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/148807a9/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CacheUtils.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CacheUtils.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CacheUtils.java
index ab6b90f..798ada8 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CacheUtils.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CacheUtils.java
@@ -17,11 +17,12 @@
  */
 package org.apache.hadoop.hive.metastore.cache;
 
-import java.util.ArrayList;
-import java.util.HashMap;
+import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 import java.util.regex.Pattern;
 
+import org.apache.commons.collections.CollectionUtils;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.SkewedInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
@@ -43,14 +44,9 @@ public class CacheUtils {
 
   public static String buildKey(String dbName, String tableName, List 
partVals) {
 String key = buildKey(dbName, tableName);
-if (partVals == null || partVals.size() == 0) {
-  return key;
-}
-for (int i = 0; i < partVals.size(); i++) {
-  key += partVals.get(i);
-  if (i != partVals.size() - 1) {
-key += delimit;
-  }
+if (CollectionUtils.isNotEmpty(partVals)) {
+  key += delimit;
+  key += String.join(delimit, partVals);
 }
 return key;
   }
@@ -78,28 +74,24 @@ public class CacheUtils {
 String[] comps = key.split(delimit);
 result[0] = comps[0];
 result[1] = comps[1];
-List vals = new ArrayList<>();
-for (int i=2;i());
+  if (sdCopy.getBucketCols() == null) {
+sdCopy.setBucketCols(Collections.emptyList());
   }
-  if (sdCopy.getSortCols()==null) {
-sdCopy.setSortCols(new ArrayList<>());
+  if (sdCopy.getSortCols() == null) {
+sdCopy.setSortCols(Collections.emptyList());
   }
-  if (sdCopy.getSkewedInfo()==null) {
-sdCopy.setSkewedInfo(new SkewedInfo(new ArrayList<>(),
-new ArrayList<>(), new HashMap<>()));
+  if (sdCopy.getSkewedInfo() == null) {
+sdCopy.setSkewedInfo(new SkewedInfo(Collections.emptyList(),
+  Collections.emptyList(), Collections.emptyMap()));
   }
   sdCopy.setLocation(wrapper.getLocation());
   sdCopy.setParameters(wrapper.getParameters());
@@ -110,17 +102,17 @@ public class CacheUtils {
 
   static Partition assemble(PartitionWrapper wrapper, SharedCache sharedCache) 
{
 Partition p = wrapper.getPartition().deepCopy();
-if (wrapper.getSdHash()!=null) {
+if (wrapper.getSdHash() != null) {
   StorageDescriptor sdCopy = 
sharedCache.getSdFromCache(wrapper.getSdHash()).deepCopy();
-  if (sdCopy.getBucketCols()==null) {
-sdCopy.setBucketCols(new ArrayList<>());
+  if (sdCopy.getBucketCols() == null) {
+sdCopy.setBucketCols(Collections.emptyList());
   }
-  if (sdCopy.getSortCols()==null) {
-sdCopy.setSortCols(new ArrayList<>());
+  if (sdCopy.getSortCols() == null) {
+sdCopy.setSortCols(Collections.emptyList());
   }
-  if (sdCopy.getSkewedInfo()==null) {
-sdCopy.setSkewedInfo(new 

hive git commit: HIVE-17870: Update NoDeleteRollingFileAppender to use Log4j2 api (Andrew Sheman, reviewed by Aihua Xu)

2017-12-01 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 6a6522a09 -> 1b4baf474


HIVE-17870: Update NoDeleteRollingFileAppender to use Log4j2 api (Andrew 
Sheman, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1b4baf47
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1b4baf47
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1b4baf47

Branch: refs/heads/master
Commit: 1b4baf474c15377cc9f0bacdda317feabeefacaf
Parents: 6a6522a
Author: Aihua Xu 
Authored: Fri Dec 1 11:38:34 2017 -0800
Committer: Aihua Xu 
Committed: Fri Dec 1 13:09:34 2017 -0800

--
 pom.xml |   2 +-
 .../log/HushableRandomAccessFileAppender.java   |   3 +-
 .../ql/log/NoDeleteRollingFileAppender.java | 176 ---
 .../ql/log/SlidingFilenameRolloverStrategy.java |  82 +
 .../TestSlidingFilenameRolloverStrategy.java| 145 +++
 .../log4j2_test_sliding_rollover.properties |  69 
 testutils/ptest2/pom.xml|   2 +-
 7 files changed, 299 insertions(+), 180 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/1b4baf47/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 1682f47..6d8ab5e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -180,7 +180,7 @@
 3.0.3
 0.9.3
 0.9.3
-2.6.2
+2.8.2
 2.3
 1.4.1
 1.10.19

http://git-wip-us.apache.org/repos/asf/hive/blob/1b4baf47/ql/src/java/org/apache/hadoop/hive/ql/log/HushableRandomAccessFileAppender.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/log/HushableRandomAccessFileAppender.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/log/HushableRandomAccessFileAppender.java
index 639d1d8..0ff66df 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/log/HushableRandomAccessFileAppender.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/log/HushableRandomAccessFileAppender.java
@@ -176,8 +176,7 @@ public final class HushableRandomAccessFileAppender extends
   layout = PatternLayout.createDefaultLayout();
 }
 final RandomAccessFileManager manager = 
RandomAccessFileManager.getFileManager(
-fileName, isAppend, isFlush, bufferSize, advertiseURI, layout
-// , config  -- needed in later log4j versions
+fileName, isAppend, isFlush, bufferSize, advertiseURI, layout, config
 );
 if (manager == null) {
   return null;

http://git-wip-us.apache.org/repos/asf/hive/blob/1b4baf47/ql/src/java/org/apache/hadoop/hive/ql/log/NoDeleteRollingFileAppender.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/log/NoDeleteRollingFileAppender.java 
b/ql/src/java/org/apache/hadoop/hive/ql/log/NoDeleteRollingFileAppender.java
deleted file mode 100644
index be32f06..000
--- a/ql/src/java/org/apache/hadoop/hive/ql/log/NoDeleteRollingFileAppender.java
+++ /dev/null
@@ -1,176 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.log;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InterruptedIOException;
-import java.io.Writer;
-
-import org.apache.log4j.FileAppender;
-import org.apache.log4j.Layout;
-import org.apache.log4j.helpers.CountingQuietWriter;
-import org.apache.log4j.helpers.LogLog;
-import org.apache.log4j.helpers.OptionConverter;
-import org.apache.log4j.spi.LoggingEvent;
-
-public class NoDeleteRollingFileAppender extends FileAppender {
-  /**
-   * The default maximum file size is 10MB.
-   */
-  protected long maxFileSize = 10 * 1024 * 1024;
-
-  private long nextRollover = 0;
-
-  /**
-   * The default constructor simply calls its {@link FileAppender#FileAppender
-   * parents constructor}.
-   */
-  public NoDeleteRollingFileAppender() {
-  }
-
-  /**
-   * Instantiate a RollingFileAppender and open the file designated by
- 

hive git commit: HIVE-18023: Redact the expression in lineage info (Aihua Xu, reviewed by Yongzhi Chen)

2017-11-13 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master d8bc002af -> 83971dec5


HIVE-18023: Redact the expression in lineage info (Aihua Xu, reviewed by 
Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/83971dec
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/83971dec
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/83971dec

Branch: refs/heads/master
Commit: 83971dec5715071414597a2c0043752fe5186ecd
Parents: d8bc002
Author: Aihua Xu 
Authored: Wed Nov 8 14:23:27 2017 -0800
Committer: Aihua Xu 
Committed: Mon Nov 13 14:45:46 2017 -0800

--
 .../java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java   | 7 ---
 1 file changed, 4 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/83971dec/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java
index bdd98cf..2f764f8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java
@@ -175,7 +175,7 @@ public class LineageLogger implements 
ExecuteWithHookContext {
 
 List edges = getEdges(plan, index);
 Set vertices = getVertices(edges);
-writeEdges(writer, edges);
+writeEdges(writer, edges, hookContext.getConf());
 writeVertices(writer, vertices);
 writer.endObject();
 writer.close();
@@ -415,7 +415,8 @@ public class LineageLogger implements 
ExecuteWithHookContext {
   /**
* Write out an JSON array of edges.
*/
-  private void writeEdges(JsonWriter writer, List edges) throws 
IOException {
+  private void writeEdges(JsonWriter writer, List edges, HiveConf conf)
+  throws IOException, InstantiationException, IllegalAccessException, 
ClassNotFoundException {
 writer.name("edges");
 writer.beginArray();
 for (Edge edge: edges) {
@@ -433,7 +434,7 @@ public class LineageLogger implements 
ExecuteWithHookContext {
   }
   writer.endArray();
   if (edge.expr != null) {
-writer.name("expression").value(edge.expr);
+writer.name("expression").value(HookUtils.redactLogString(conf, 
edge.expr));
   }
   writer.name("edgeType").value(edge.type.name());
   writer.endObject();



hive git commit: HIVE-18009: Multiple lateral view query is slow on hive on spark (Aihua Xu, reviewed by Yongzhi Chen & Xuefu Zhang)

2017-11-13 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 51249505c -> d8bc002af


HIVE-18009: Multiple lateral view query is slow on hive on spark (Aihua Xu, 
reviewed by Yongzhi Chen & Xuefu Zhang)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d8bc002a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d8bc002a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d8bc002a

Branch: refs/heads/master
Commit: d8bc002af63b61e1c1facec1cb5d2f6609377d14
Parents: 5124950
Author: Aihua Xu 
Authored: Tue Nov 7 16:30:36 2017 -0800
Committer: Aihua Xu 
Committed: Mon Nov 13 14:41:20 2017 -0800

--
 .../test/resources/testconfiguration.properties |  1 +
 .../hive/ql/parse/spark/GenSparkUtils.java  | 30 +++---
 .../lateral_view_multi_lateralviews.q   | 21 +++
 .../lateral_view_multi_lateralviews.q.out   | 62 
 .../spark/lateral_view_multi_lateralviews.q.out | 62 
 5 files changed, 167 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/d8bc002a/itests/src/test/resources/testconfiguration.properties
--
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index 5a6dbb8..3b1005f 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -1123,6 +1123,7 @@ spark.query.files=add_part_multiple.q, \
   join_vc.q, \
   join_view.q, \
   lateral_view_explode2.q, \
+  lateral_view_multi_lateralviews.q, \
   leftsemijoin.q, \
   leftsemijoin_mr.q, \
   limit_pushdown.q, \

http://git-wip-us.apache.org/repos/asf/hive/blob/d8bc002a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
index 9bea4dd..604c8ae 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
@@ -28,6 +28,7 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.Stack;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -617,17 +618,28 @@ public class GenSparkUtils {
   }
 
   @SuppressWarnings("unchecked")
-  public static  T getChildOperator(Operator op, Class klazz) throws 
SemanticException {
-if (klazz.isInstance(op)) {
-  return (T) op;
-}
-List childOperators = op.getChildOperators();
-for (Operator childOp : childOperators) {
-  T result = getChildOperator(childOp, klazz);
-  if (result != null) {
-return result;
+  public static  T getChildOperator(Operator root, Class klazz) 
throws SemanticException {
+if (root == null) return null;
+
+HashSet visited = new HashSet();
+Stack stack = new Stack();
+stack.push(root);
+visited.add(root);
+
+while (!stack.isEmpty()) {
+  Operator op = stack.pop();
+  if (klazz.isInstance(op)) {
+return (T) op;
+  }
+  List childOperators = op.getChildOperators();
+  for (Operator childOp : childOperators) {
+if (!visited.contains(childOp)) {
+  stack.push(childOp);
+  visited.add(childOp);
+}
   }
 }
+
 return null;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/d8bc002a/ql/src/test/queries/clientpositive/lateral_view_multi_lateralviews.q
--
diff --git 
a/ql/src/test/queries/clientpositive/lateral_view_multi_lateralviews.q 
b/ql/src/test/queries/clientpositive/lateral_view_multi_lateralviews.q
new file mode 100644
index 000..29e026a
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/lateral_view_multi_lateralviews.q
@@ -0,0 +1,21 @@
+CREATE TABLE t1(x5 STRUCT>>> >);
+INSERT INTO t1 SELECT NAMED_STRUCT('x4', NAMED_STRUCT('x3', 
ARRAY(NAMED_STRUCT('x1', 'x1_1', 'x2', ARRAY('x2_1', 'x2_2');
+SELECT c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16
+FROM t1
+LATERAL VIEW EXPLODE(x5.x4.x3) lv as c1
+LATERAL VIEW EXPLODE(c1.x2) lv as c2
+LATERAL VIEW EXPLODE(x5.x4.x3) lv as c3
+LATERAL VIEW EXPLODE(c1.x2) lv as c4
+LATERAL VIEW EXPLODE(x5.x4.x3) lv as c5
+LATERAL VIEW EXPLODE(c1.x2) lv as c6
+LATERAL VIEW EXPLODE(x5.x4.x3) lv as c7
+LATERAL VIEW EXPLODE(c1.x2) lv as c8
+LATERAL VIEW EXPLODE(x5.x4.x3) lv as c9
+LATERAL VIEW EXPLODE(c1.x2) lv as c10
+LATERAL VIEW 

hive git commit: HIVE-17376: Upgrade snappy version to 1.1.4 (Aihua Xu, reviewed by Ashutosh Chauhan)

2017-11-09 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 3914a1b29 -> 499b13606


HIVE-17376: Upgrade snappy version to 1.1.4 (Aihua Xu, reviewed by Ashutosh 
Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/499b1360
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/499b1360
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/499b1360

Branch: refs/heads/master
Commit: 499b13606a949f5f8525134b66ee798975557eae
Parents: 3914a1b
Author: Aihua Xu 
Authored: Thu Nov 9 14:09:09 2017 -0800
Committer: Aihua Xu 
Committed: Thu Nov 9 14:09:09 2017 -0800

--
 pom.xml | 7 ++-
 1 file changed, 6 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/499b1360/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 0e9ae3d..dfb29ce 100644
--- a/pom.xml
+++ b/pom.xml
@@ -201,7 +201,7 @@
 2.11
 2.11.8
 1.1
-0.2
+1.1.4
 1.4
 1.5
 2.9.1
@@ -938,6 +938,11 @@
 jamon-runtime
 ${jamon-runtime.version}
   
+  
+org.xerial.snappy
+snappy-java
+${snappy.version}
+  
 
   
 



[1/2] hive git commit: HIVE-15016: Run tests with Hadoop 3.0.0-beta1

2017-11-07 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 7ea12e731 -> a3e872822


http://git-wip-us.apache.org/repos/asf/hive/blob/a3e87282/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
--
diff --git 
a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
 
b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
index f8f18b3..120b4af 100644
--- 
a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
+++ 
b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java
@@ -34,12 +34,16 @@ import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -94,10 +98,17 @@ public class TestPigHBaseStorageHandler extends 
SkeletonHBaseTest {
 
   }
 
-  private void populateHBaseTable(String tName) throws IOException {
+  private void populateHBaseTable(String tName, Connection connection) throws 
IOException {
 List myPuts = generatePuts(tName);
-HTable table = new HTable(getHbaseConf(), Bytes.toBytes(tName));
-table.put(myPuts);
+Table table = null;
+try {
+  table = connection.getTable(TableName.valueOf(tName));
+  table.put(myPuts);
+} finally {
+  if (table != null) {
+table.close();
+  }
+}
   }
 
   private List generatePuts(String tableName) throws IOException {
@@ -107,8 +118,8 @@ public class TestPigHBaseStorageHandler extends 
SkeletonHBaseTest {
 myPuts = new ArrayList();
 for (int i = 1; i <=10; i++) {
   Put put = new Put(Bytes.toBytes(i));
-  put.add(FAMILY, QUALIFIER1, 1, Bytes.toBytes("textA-" + i));
-  put.add(FAMILY, QUALIFIER2, 1, Bytes.toBytes("textB-" + i));
+  put.addColumn(FAMILY, QUALIFIER1, 1, Bytes.toBytes("textA-" + i));
+  put.addColumn(FAMILY, QUALIFIER2, 1, Bytes.toBytes("textB-" + i));
   myPuts.add(put);
 }
 return myPuts;
@@ -165,8 +176,22 @@ public class TestPigHBaseStorageHandler extends 
SkeletonHBaseTest {
 
 CommandProcessorResponse responseThree = driver.run(tableQuery);
 
-HBaseAdmin hAdmin = new HBaseAdmin(getHbaseConf());
-boolean doesTableExist = hAdmin.tableExists(hbaseTableName);
+Connection connection = null;
+Admin hAdmin = null;
+boolean doesTableExist = false;
+try {
+  connection = ConnectionFactory.createConnection(getHbaseConf());
+  hAdmin = connection.getAdmin();
+  doesTableExist = hAdmin.tableExists(TableName.valueOf(hbaseTableName));
+} finally {
+  if (hAdmin != null) {
+hAdmin.close();
+  }
+  if (connection != null) {
+connection.close();
+  }
+}
+
 assertTrue(doesTableExist);
 
 PigServer server = new 
PigServer(ExecType.LOCAL,hcatConf.getAllProperties());
@@ -220,17 +245,39 @@ public class TestPigHBaseStorageHandler extends 
SkeletonHBaseTest {
 
 CommandProcessorResponse responseThree = driver.run(tableQuery);
 
-HBaseAdmin hAdmin = new HBaseAdmin(getHbaseConf());
-boolean doesTableExist = hAdmin.tableExists(hbaseTableName);
-assertTrue(doesTableExist);
+Connection connection = null;
+Admin hAdmin = null;
+Table table = null;
+ResultScanner scanner = null;
+boolean doesTableExist = false;
+try {
+  connection = ConnectionFactory.createConnection(getHbaseConf());
+  hAdmin = connection.getAdmin();
+  doesTableExist = hAdmin.tableExists(TableName.valueOf(hbaseTableName));
+
+  assertTrue(doesTableExist);
+
+  populateHBaseTable(hbaseTableName, connection);
 
-populateHBaseTable(hbaseTableName);
+  table = connection.getTable(TableName.valueOf(hbaseTableName));
+  Scan scan = new Scan();
+  scan.addFamily(Bytes.toBytes("testFamily"));
+  scanner = table.getScanner(scan);
+} finally {
+  if (scanner != null) {
+scanner.close();
+  }
+  if (table != null ) {
+table.close();
+  }
+  if (hAdmin != null) {
+hAdmin.close();
+  }
+  if (connection != null) {
+connection.close();
+  }
+}
 
-Configuration conf = new 

[2/2] hive git commit: HIVE-15016: Run tests with Hadoop 3.0.0-beta1

2017-11-07 Thread aihuaxu
HIVE-15016: Run tests with Hadoop 3.0.0-beta1


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a3e87282
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a3e87282
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a3e87282

Branch: refs/heads/master
Commit: a3e872822ece99eb14a5a12ff8bb191d9e014788
Parents: 7ea12e7
Author: Aihua Xu 
Authored: Sat Oct 28 16:51:06 2017 -0700
Committer: Aihua Xu 
Committed: Tue Nov 7 08:39:35 2017 -0800

--
 common/pom.xml  |   6 +
 hbase-handler/pom.xml   |  45 +++-
 .../apache/hadoop/hive/hbase/HBaseMetaHook.java | 224 +++
 .../hadoop/hive/hbase/HBaseRowSerializer.java   |   4 +-
 .../hadoop/hive/hbase/HBaseStorageHandler.java  | 209 ++---
 .../hive/hbase/HiveHBaseInputFormatUtil.java|  11 -
 .../hive/hbase/HiveHBaseTableInputFormat.java   |   1 -
 .../hive/hbase/HiveHBaseTableOutputFormat.java  |  21 +-
 .../hive/hbase/HiveHFileOutputFormat.java   |  12 +-
 .../hadoop/hive/hbase/ResultWritable.java   |   5 +-
 .../hadoop/hive/hbase/TestHBaseSerDe.java   | 120 +-
 .../hadoop/hive/hbase/TestLazyHBaseObject.java  |  79 +++
 .../hive/hbase/TestPutResultWritable.java   |   7 +-
 .../src/test/queries/positive/hbase_bulk.q  |   2 +-
 .../test/queries/positive/hbase_handler_bulk.q  |   2 +-
 .../results/positive/hbase_handler_bulk.q.out   |   4 +-
 .../apache/hive/hcatalog/common/HCatUtil.java   |   2 +-
 .../rcfile/TestRCFileMapReduceInputFormat.java  |  14 +-
 .../hive/hcatalog/templeton/TestWebHCatE2e.java |   4 +-
 .../hcatalog/templeton/mock/MockUriInfo.java|  11 +
 itests/hcatalog-unit/pom.xml|   6 +
 .../hive/hcatalog/hbase/ManyMiniCluster.java|  27 ++-
 .../hive/hcatalog/hbase/SkeletonHBaseTest.java  |  24 +-
 .../hbase/TestPigHBaseStorageHandler.java   | 206 +++--
 itests/hive-minikdc/pom.xml |   8 +-
 itests/hive-unit-hadoop2/pom.xml|   6 +
 itests/hive-unit/pom.xml|  31 ++-
 .../apache/hadoop/hive/ql/TestAcidOnTez.java|   3 +
 .../hive/ql/txn/compactor/TestCompactor.java|   6 +-
 .../jdbc/TestJdbcWithLocalClusterSpark.java |   5 +
 ...stMultiSessionsHS2WithLocalClusterSpark.java |   5 +
 itests/qtest-accumulo/pom.xml   |  37 ++-
 itests/qtest-spark/pom.xml  |  21 +-
 itests/qtest/pom.xml|   6 +
 itests/util/pom.xml |  19 +-
 .../hadoop/hive/hbase/HBaseQTestUtil.java   |  37 +--
 .../hadoop/hive/hbase/HBaseTestSetup.java   |  46 ++--
 llap-server/pom.xml |  61 -
 .../llap/shufflehandler/ShuffleHandler.java |  26 +--
 metastore/pom.xml   |   2 +-
 pom.xml |  77 ++-
 .../apache/hadoop/hive/ql/io/TestRCFile.java|  17 +-
 .../exim_00_unsupported_schema.q.out|   2 +-
 .../test/results/clientnegative/external1.q.out |   2 +-
 .../test/results/clientnegative/external2.q.out |   2 +-
 serde/pom.xml   |   6 +
 shims/0.23/pom.xml  |  12 +
 .../apache/hadoop/hive/shims/Hadoop23Shims.java |  43 ++--
 .../org/apache/hadoop/fs/ProxyFileSystem.java   |  12 +
 shims/scheduler/pom.xml |   8 +-
 spark-client/pom.xml|   4 +
 standalone-metastore/pom.xml|  18 +-
 .../hadoop/hive/metastore/utils/HdfsUtils.java  |  10 +-
 .../ptest2/src/main/resources/batch-exec.vm |   2 +-
 54 files changed, 1027 insertions(+), 553 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a3e87282/common/pom.xml
--
diff --git a/common/pom.xml b/common/pom.xml
index 868e14d..aaeecc0 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -220,6 +220,12 @@
   io.dropwizard.metrics
   metrics-json
   ${dropwizard.version}
+  
+
+  com.fasterxml.jackson.core
+  jackson-databind
+
+  
 
 
   com.fasterxml.jackson.core

http://git-wip-us.apache.org/repos/asf/hive/blob/a3e87282/hbase-handler/pom.xml
--
diff --git a/hbase-handler/pom.xml b/hbase-handler/pom.xml
index 7f57b77..59f9dd8 100644
--- a/hbase-handler/pom.xml
+++ b/hbase-handler/pom.xml
@@ -81,8 +81,23 @@
   org.apache.hbase
   hbase-server
   ${hbase.version}
-  
- 
+  
+ 
+org.slf4j
+slf4j-log4j12
+  
+  
+commmons-logging
+  

hive git commit: HIVE-17826: Error writing to RandomAccessFile after operation log is closed (Andrew Sherman, reviewed by Aihua Xu)

2017-10-30 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master df321c843 -> b57837abc


HIVE-17826: Error writing to RandomAccessFile after operation log is closed 
(Andrew Sherman, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b57837ab
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b57837ab
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b57837ab

Branch: refs/heads/master
Commit: b57837abc0014a11d0db988b118ce266352ad1d2
Parents: df321c8
Author: Aihua Xu 
Authored: Mon Oct 30 13:39:12 2017 -0700
Committer: Aihua Xu 
Committed: Mon Oct 30 13:42:43 2017 -0700

--
 .../operation/TestOperationLoggingLayout.java   |  26 +++
 .../log/HushableRandomAccessFileAppender.java   | 191 +++
 .../hadoop/hive/ql/log/LogDivertAppender.java   |  20 +-
 .../hive/ql/log/LogDivertAppenderForTest.java   |  19 +-
 4 files changed, 233 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/b57837ab/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java
 
b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java
index 3c30069..8febe3e 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java
@@ -25,6 +25,7 @@ import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.log.HushableRandomAccessFileAppender;
 import org.apache.hadoop.hive.ql.log.LogDivertAppender;
 import org.apache.hadoop.hive.ql.log.LogDivertAppenderForTest;
 import org.apache.hive.jdbc.miniHS2.MiniHS2;
@@ -34,7 +35,9 @@ import org.apache.hive.service.cli.FetchType;
 import org.apache.hive.service.cli.OperationHandle;
 import org.apache.hive.service.cli.RowSet;
 import org.apache.hive.service.cli.SessionHandle;
+import org.apache.logging.log4j.Level;
 import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.AbstractLogEvent;
 import org.apache.logging.log4j.core.Appender;
 import org.apache.logging.log4j.core.LoggerContext;
 import org.apache.logging.log4j.core.appender.routing.RoutingAppender;
@@ -152,6 +155,16 @@ public class TestOperationLoggingLayout {
 Appender appender = appenderControl.getAppender();
 Assert.assertNotNull(msg + "could not find Appender for query id " + 
queryId + " from AppenderControl " + appenderControl, appender);
 Assert.assertEquals(msg + "Appender for query is in unexpected state", 
expectedStopped, appender.isStopped());
+Assert.assertTrue("Appender should be a 
HushableMutableRandomAccessAppender", appender instanceof 
HushableRandomAccessFileAppender);
+HushableRandomAccessFileAppender ra = (HushableRandomAccessFileAppender) 
appender;
+// Even if the appender is stopped it should not throw an exception when 
we log
+try {
+  ra.append(new LocalLogEvent());
+} catch (Exception e) {
+  e.printStackTrace();
+  Assert.fail("Caught exception while logging to an appender of class " + 
ra.getClass()
+  + " with stopped=" + ra.isStopped());
+}
   }
 
   private SessionHandle setupSession() throws Exception {
@@ -184,4 +197,17 @@ public class TestOperationLoggingLayout {
 
 return sessionHandle;
   }
+
+  /**
+   * A minimal LogEvent implementation for testing
+   */
+  private static class LocalLogEvent extends AbstractLogEvent {
+
+LocalLogEvent() {
+}
+
+@Override public Level getLevel() {
+  return Level.DEBUG;
+}
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/b57837ab/ql/src/java/org/apache/hadoop/hive/ql/log/HushableRandomAccessFileAppender.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/log/HushableRandomAccessFileAppender.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/log/HushableRandomAccessFileAppender.java
new file mode 100644
index 000..639d1d8
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/log/HushableRandomAccessFileAppender.java
@@ -0,0 +1,191 @@
+package org.apache.hadoop.hive.ql.log;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache license, Version 

hive git commit: HIVE-17831: HiveSemanticAnalyzerHookContext does not update the HiveOperation after sem.analyze() is called (Aihua Xu, reviewed by Sergio Pena)

2017-10-20 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-2.1 6db9fd6e4 -> f9bd36004


HIVE-17831: HiveSemanticAnalyzerHookContext does not update the HiveOperation 
after sem.analyze() is called (Aihua Xu, reviewed by Sergio Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f9bd3600
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f9bd3600
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f9bd3600

Branch: refs/heads/branch-2.1
Commit: f9bd360042311ef6677a6a1c879eb3d1bf59a95f
Parents: 6db9fd6
Author: Aihua Xu 
Authored: Wed Oct 18 17:01:20 2017 -0700
Committer: Aihua Xu 
Committed: Fri Oct 20 09:55:53 2017 -0700

--
 .../apache/hadoop/hive/hooks/TestHs2Hooks.java  | 36 
 .../HiveSemanticAnalyzerHookContextImpl.java|  1 +
 2 files changed, 37 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/f9bd3600/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
index dad516c..7f2517b 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
@@ -187,6 +187,7 @@ public class TestHs2Hooks {
 Properties connProp = new Properties();
 connProp.setProperty("user", System.getProperty("user.name"));
 connProp.setProperty("password", "");
+
 HiveConnection connection = new 
HiveConnection("jdbc:hive2://localhost:1/default", connProp);
 Statement stmt = connection.createStatement();
 stmt.executeQuery("show databases");
@@ -234,5 +235,40 @@ public class TestHs2Hooks {
 Assert.assertTrue(SemanticAnalysisHook.ipAddress,
 SemanticAnalysisHook.ipAddress.contains("127.0.0.1"));
 Assert.assertEquals("show tables", SemanticAnalysisHook.command);
+
+stmt.close();
+connection.close();
+  }
+
+  @Test
+  public void testPostAnalysisHookContexts() throws Throwable {
+Properties connProp = new Properties();
+connProp.setProperty("user", System.getProperty("user.name"));
+connProp.setProperty("password", "");
+
+HiveConnection connection = new 
HiveConnection("jdbc:hive2://localhost:1/default", connProp);
+Statement stmt = connection.createStatement();
+stmt.execute("create table testPostAnalysisHookContexts as select '3'");
+Throwable error = PostExecHook.error;
+if (error != null) {
+  throw error;
+}
+error = PreExecHook.error;
+if (error != null) {
+  throw error;
+}
+
+Assert.assertEquals(HiveOperation.CREATETABLE_AS_SELECT, 
SemanticAnalysisHook.commandType);
+
+error = SemanticAnalysisHook.preAnalyzeError;
+if (error != null) {
+  throw error;
+}
+error = SemanticAnalysisHook.postAnalyzeError;
+if (error != null) {
+  throw error;
+}
+stmt.close();
+connection.close();
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f9bd3600/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
index 1cc38a8..e28dc47 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
@@ -58,6 +58,7 @@ public class HiveSemanticAnalyzerHookContextImpl implements 
HiveSemanticAnalyzer
   public void update(BaseSemanticAnalyzer sem) {
 this.inputs = sem.getInputs();
 this.outputs = sem.getOutputs();
+this.commandType = sem.getQueryState().getHiveOperation();
   }
 
   @Override



hive git commit: HIVE-17831: HiveSemanticAnalyzerHookContext does not update the HiveOperation after sem.analyze() is called (Aihua Xu, reviewed by Sergio Pena)

2017-10-20 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-2.2 fbbffda3a -> 43d8647fd


HIVE-17831: HiveSemanticAnalyzerHookContext does not update the HiveOperation 
after sem.analyze() is called (Aihua Xu, reviewed by Sergio Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/43d8647f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/43d8647f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/43d8647f

Branch: refs/heads/branch-2.2
Commit: 43d8647fd0dbe350638478a7f9e8415cead813d2
Parents: fbbffda
Author: Aihua Xu 
Authored: Wed Oct 18 17:01:20 2017 -0700
Committer: Aihua Xu 
Committed: Fri Oct 20 09:55:02 2017 -0700

--
 .../apache/hadoop/hive/hooks/TestHs2Hooks.java  | 36 
 .../HiveSemanticAnalyzerHookContextImpl.java|  1 +
 2 files changed, 37 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/43d8647f/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
index dad516c..7f2517b 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
@@ -187,6 +187,7 @@ public class TestHs2Hooks {
 Properties connProp = new Properties();
 connProp.setProperty("user", System.getProperty("user.name"));
 connProp.setProperty("password", "");
+
 HiveConnection connection = new 
HiveConnection("jdbc:hive2://localhost:1/default", connProp);
 Statement stmt = connection.createStatement();
 stmt.executeQuery("show databases");
@@ -234,5 +235,40 @@ public class TestHs2Hooks {
 Assert.assertTrue(SemanticAnalysisHook.ipAddress,
 SemanticAnalysisHook.ipAddress.contains("127.0.0.1"));
 Assert.assertEquals("show tables", SemanticAnalysisHook.command);
+
+stmt.close();
+connection.close();
+  }
+
+  @Test
+  public void testPostAnalysisHookContexts() throws Throwable {
+Properties connProp = new Properties();
+connProp.setProperty("user", System.getProperty("user.name"));
+connProp.setProperty("password", "");
+
+HiveConnection connection = new 
HiveConnection("jdbc:hive2://localhost:1/default", connProp);
+Statement stmt = connection.createStatement();
+stmt.execute("create table testPostAnalysisHookContexts as select '3'");
+Throwable error = PostExecHook.error;
+if (error != null) {
+  throw error;
+}
+error = PreExecHook.error;
+if (error != null) {
+  throw error;
+}
+
+Assert.assertEquals(HiveOperation.CREATETABLE_AS_SELECT, 
SemanticAnalysisHook.commandType);
+
+error = SemanticAnalysisHook.preAnalyzeError;
+if (error != null) {
+  throw error;
+}
+error = SemanticAnalysisHook.postAnalyzeError;
+if (error != null) {
+  throw error;
+}
+stmt.close();
+connection.close();
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/43d8647f/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
index 1cc38a8..e28dc47 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
@@ -58,6 +58,7 @@ public class HiveSemanticAnalyzerHookContextImpl implements 
HiveSemanticAnalyzer
   public void update(BaseSemanticAnalyzer sem) {
 this.inputs = sem.getInputs();
 this.outputs = sem.getOutputs();
+this.commandType = sem.getQueryState().getHiveOperation();
   }
 
   @Override



hive git commit: HIVE-17831: HiveSemanticAnalyzerHookContext does not update the HiveOperation after sem.analyze() is called (Aihua Xu, reviewed by Sergio Pena)

2017-10-20 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-2 2106bf777 -> 417a7a654


HIVE-17831: HiveSemanticAnalyzerHookContext does not update the HiveOperation 
after sem.analyze() is called (Aihua Xu, reviewed by Sergio Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/417a7a65
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/417a7a65
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/417a7a65

Branch: refs/heads/branch-2
Commit: 417a7a6549370b22fc965751ed5602376db447c0
Parents: 2106bf7
Author: Aihua Xu 
Authored: Wed Oct 18 17:01:20 2017 -0700
Committer: Aihua Xu 
Committed: Fri Oct 20 09:47:57 2017 -0700

--
 .../apache/hadoop/hive/hooks/TestHs2Hooks.java  | 36 
 .../HiveSemanticAnalyzerHookContextImpl.java|  1 +
 2 files changed, 37 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/417a7a65/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
index dad516c..7f2517b 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
@@ -187,6 +187,7 @@ public class TestHs2Hooks {
 Properties connProp = new Properties();
 connProp.setProperty("user", System.getProperty("user.name"));
 connProp.setProperty("password", "");
+
 HiveConnection connection = new 
HiveConnection("jdbc:hive2://localhost:1/default", connProp);
 Statement stmt = connection.createStatement();
 stmt.executeQuery("show databases");
@@ -234,5 +235,40 @@ public class TestHs2Hooks {
 Assert.assertTrue(SemanticAnalysisHook.ipAddress,
 SemanticAnalysisHook.ipAddress.contains("127.0.0.1"));
 Assert.assertEquals("show tables", SemanticAnalysisHook.command);
+
+stmt.close();
+connection.close();
+  }
+
+  @Test
+  public void testPostAnalysisHookContexts() throws Throwable {
+Properties connProp = new Properties();
+connProp.setProperty("user", System.getProperty("user.name"));
+connProp.setProperty("password", "");
+
+HiveConnection connection = new 
HiveConnection("jdbc:hive2://localhost:1/default", connProp);
+Statement stmt = connection.createStatement();
+stmt.execute("create table testPostAnalysisHookContexts as select '3'");
+Throwable error = PostExecHook.error;
+if (error != null) {
+  throw error;
+}
+error = PreExecHook.error;
+if (error != null) {
+  throw error;
+}
+
+Assert.assertEquals(HiveOperation.CREATETABLE_AS_SELECT, 
SemanticAnalysisHook.commandType);
+
+error = SemanticAnalysisHook.preAnalyzeError;
+if (error != null) {
+  throw error;
+}
+error = SemanticAnalysisHook.postAnalyzeError;
+if (error != null) {
+  throw error;
+}
+stmt.close();
+connection.close();
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/417a7a65/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
index 1cc38a8..e28dc47 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
@@ -58,6 +58,7 @@ public class HiveSemanticAnalyzerHookContextImpl implements 
HiveSemanticAnalyzer
   public void update(BaseSemanticAnalyzer sem) {
 this.inputs = sem.getInputs();
 this.outputs = sem.getOutputs();
+this.commandType = sem.getQueryState().getHiveOperation();
   }
 
   @Override



hive git commit: HIVE-17831: HiveSemanticAnalyzerHookContext does not update the HiveOperation after sem.analyze() is called (Aihua Xu, reviewed by Sergio Pena)

2017-10-20 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 76d880937 -> b9bc82258


HIVE-17831: HiveSemanticAnalyzerHookContext does not update the HiveOperation 
after sem.analyze() is called (Aihua Xu, reviewed by Sergio Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b9bc8225
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b9bc8225
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b9bc8225

Branch: refs/heads/master
Commit: b9bc822585e9c7482f93dfb71f1769a0f009245d
Parents: 76d8809
Author: Aihua Xu 
Authored: Wed Oct 18 17:01:20 2017 -0700
Committer: Aihua Xu 
Committed: Fri Oct 20 09:36:33 2017 -0700

--
 .../apache/hadoop/hive/hooks/TestHs2Hooks.java  | 36 
 .../HiveSemanticAnalyzerHookContextImpl.java|  1 +
 2 files changed, 37 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/b9bc8225/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
index dad516c..7f2517b 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
@@ -187,6 +187,7 @@ public class TestHs2Hooks {
 Properties connProp = new Properties();
 connProp.setProperty("user", System.getProperty("user.name"));
 connProp.setProperty("password", "");
+
 HiveConnection connection = new 
HiveConnection("jdbc:hive2://localhost:1/default", connProp);
 Statement stmt = connection.createStatement();
 stmt.executeQuery("show databases");
@@ -234,5 +235,40 @@ public class TestHs2Hooks {
 Assert.assertTrue(SemanticAnalysisHook.ipAddress,
 SemanticAnalysisHook.ipAddress.contains("127.0.0.1"));
 Assert.assertEquals("show tables", SemanticAnalysisHook.command);
+
+stmt.close();
+connection.close();
+  }
+
+  @Test
+  public void testPostAnalysisHookContexts() throws Throwable {
+Properties connProp = new Properties();
+connProp.setProperty("user", System.getProperty("user.name"));
+connProp.setProperty("password", "");
+
+HiveConnection connection = new 
HiveConnection("jdbc:hive2://localhost:1/default", connProp);
+Statement stmt = connection.createStatement();
+stmt.execute("create table testPostAnalysisHookContexts as select '3'");
+Throwable error = PostExecHook.error;
+if (error != null) {
+  throw error;
+}
+error = PreExecHook.error;
+if (error != null) {
+  throw error;
+}
+
+Assert.assertEquals(HiveOperation.CREATETABLE_AS_SELECT, 
SemanticAnalysisHook.commandType);
+
+error = SemanticAnalysisHook.preAnalyzeError;
+if (error != null) {
+  throw error;
+}
+error = SemanticAnalysisHook.postAnalyzeError;
+if (error != null) {
+  throw error;
+}
+stmt.close();
+connection.close();
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/b9bc8225/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
index 1cc38a8..e28dc47 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
@@ -58,6 +58,7 @@ public class HiveSemanticAnalyzerHookContextImpl implements 
HiveSemanticAnalyzer
   public void update(BaseSemanticAnalyzer sem) {
 this.inputs = sem.getInputs();
 this.outputs = sem.getOutputs();
+this.commandType = sem.getQueryState().getHiveOperation();
   }
 
   @Override



hive git commit: HIVE-17679: http-generic-click-jacking for WebHcat server (Aihua Xu reviewed by Yongzhi Chen)

2017-10-05 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 26753ade2 -> 2902c7cc2


HIVE-17679: http-generic-click-jacking for WebHcat server (Aihua Xu reviewed by 
Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2902c7cc
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2902c7cc
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2902c7cc

Branch: refs/heads/master
Commit: 2902c7cc2ab20525139cafa8c594a09fb6c499f9
Parents: 26753ad
Author: Aihua Xu 
Authored: Tue Oct 3 09:44:07 2017 -0700
Committer: Aihua Xu 
Committed: Thu Oct 5 14:59:14 2017 -0700

--
 .../svr/src/main/config/webhcat-default.xml |  8 
 .../hive/hcatalog/templeton/AppConfig.java  |  1 +
 .../apache/hive/hcatalog/templeton/Main.java| 43 
 3 files changed, 52 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/2902c7cc/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
--
diff --git a/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml 
b/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
index fa8dbf8..2de8525 100644
--- a/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
+++ b/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
@@ -371,4 +371,12 @@
 in all PUT/POST requests, and rejects requests that do not have these.
 
   
+
+templeton.frame.options.filter
+DENY
+
+X-Frame-Options is added in HTTP response header with this value to 
prevent
+clickjacking attacks. Possible values are DENY, SAMEORIGIN, ALLOW-FROM 
uri.
+
+  
 

http://git-wip-us.apache.org/repos/asf/hive/blob/2902c7cc/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
--
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
index 0ea7d88..4232d4d 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
@@ -204,6 +204,7 @@ public class AppConfig extends Configuration {
   public static final String HIVE_EXTRA_FILES = "templeton.hive.extra.files";
 
   public static final String XSRF_FILTER_ENABLED = 
"templeton.xsrf.filter.enabled";
+  public static final String FRAME_OPTIONS_FILETER = 
"templeton.frame.options.filter";
 
   private static final Logger LOG = LoggerFactory.getLogger(AppConfig.class);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/2902c7cc/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
--
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
index 3ed3ece..02b9db9 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
@@ -53,7 +53,15 @@ import org.eclipse.jetty.xml.XmlConfiguration;
 import org.slf4j.bridge.SLF4JBridgeHandler;
 
 import javax.servlet.DispatcherType;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.annotation.WebFilter;
 import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
 
 /**
  * The main executable that starts up and runs the Server.
@@ -213,6 +221,8 @@ public class Main {
   LOG.warn("XSRF filter disabled");
 }
 
+root.addFilter(makeFrameOptionFilter(), "/" + SERVLET_PATH + "/*", 
dispatches);
+
 // Connect Jersey
 ServletHolder h = new ServletHolder(new 
ServletContainer(makeJerseyConfig()));
 root.addServlet(h, "/" + SERVLET_PATH + "/*");
@@ -259,6 +269,39 @@ public class Main {
 return authFilter;
   }
 
+  public FilterHolder makeFrameOptionFilter() {
+FilterHolder frameOptionFilter = new 
FilterHolder(XFrameOptionsFilter.class);
+frameOptionFilter.setInitParameter(AppConfig.FRAME_OPTIONS_FILETER, 
conf.get(AppConfig.FRAME_OPTIONS_FILETER));
+return frameOptionFilter;
+  }
+
+  public static class XFrameOptionsFilter implements Filter {
+private final static String defaultMode = "DENY";
+
+private String mode = null;
+
+@Override
+public void init(FilterConfig filterConfig) throws 

hive git commit: HIVE-17544: Provide classname info for function authorization (Aihua Xu, reviewed by Sergio Pena)

2017-10-04 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master addeab8d0 -> 31077be9b


HIVE-17544: Provide classname info for function authorization (Aihua Xu, 
reviewed by Sergio Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/31077be9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/31077be9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/31077be9

Branch: refs/heads/master
Commit: 31077be9b90832acc5eb1641690955945ed5a3a1
Parents: addeab8
Author: Aihua Xu 
Authored: Fri Sep 29 15:57:27 2017 -0700
Committer: Aihua Xu 
Committed: Wed Oct 4 13:06:04 2017 -0700

--
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  4 +++-
 .../org/apache/hadoop/hive/ql/hooks/Entity.java | 17 -
 .../hadoop/hive/ql/hooks/WriteEntity.java   |  5 ++--
 .../hive/ql/parse/FunctionSemanticAnalyzer.java |  9 ---
 .../plugin/HiveAuthorizerImpl.java  |  1 -
 .../plugin/HivePrivilegeObject.java | 25 
 6 files changed, 46 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/31077be9/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index f01edf8..1943c6d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -1103,6 +1103,7 @@ public class Driver implements CommandProcessor {
   String objName = null;
   List partKeys = null;
   List columns = null;
+  String className = null;
   switch(privObject.getType()){
   case DATABASE:
 dbname = privObject.getDatabase().getName();
@@ -1122,6 +1123,7 @@ public class Driver implements CommandProcessor {
   dbname = privObject.getDatabase().getName();
 }
 objName = privObject.getFunctionName();
+className = privObject.getClassName();
 break;
   case DUMMYPARTITION:
   case PARTITION:
@@ -1135,7 +1137,7 @@ public class Driver implements CommandProcessor {
   }
   HivePrivObjectActionType actionType = 
AuthorizationUtils.getActionType(privObject);
   HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType, 
dbname, objName,
-  partKeys, columns, actionType, null);
+  partKeys, columns, actionType, null, className);
   hivePrivobjs.add(hPrivObject);
 }
 return hivePrivobjs;

http://git-wip-us.apache.org/repos/asf/hive/blob/31077be9/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
index 820e4e2..c3c4512 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
@@ -76,6 +76,11 @@ public class Entity implements Serializable {
   private String stringObject;
 
   /**
+   * The class name for a function
+   */
+  private String className;
+
+  /**
* This is derived from t and p, but we need to serialize this field to make
* sure Entity.hashCode() does not need to recursively read into t and p.
*/
@@ -139,6 +144,14 @@ public class Entity implements Serializable {
 this.d = d;
   }
 
+  public String getClassName() {
+return this.className;
+  }
+
+  public void setClassName(String className) {
+this.className = className;
+  }
+
   public String getFunctionName() {
 if (typ == Type.FUNCTION) {
   return stringObject;
@@ -254,15 +267,17 @@ public class Entity implements Serializable {
* Create an entity representing a object with given name, database 
namespace and type
* @param database - database namespace
* @param strObj - object name as string
+   * @param className - function class name
* @param type - the entity type. this constructor only supports FUNCTION 
type currently
*/
-  public Entity(Database database, String strObj, Type type) {
+  public Entity(Database database, String strObj, String className, Type type) 
{
 if (type != Type.FUNCTION) {
   throw new IllegalArgumentException("This constructor is supported only 
for type:"
   + Type.FUNCTION);
 }
 this.database = database;
 this.stringObject = strObj;
+this.className = className;
 this.typ = type;
 this.complete = true;
 name = computeName();

http://git-wip-us.apache.org/repos/asf/hive/blob/31077be9/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
--
diff --git 

[2/2] hive git commit: HIVE-17619: Exclude avatica-core.jar dependency from avatica shaded jar (Aihua Xu, reviewed by Yongzhi Chen)

2017-09-29 Thread aihuaxu
HIVE-17619: Exclude avatica-core.jar dependency from avatica shaded jar (Aihua 
Xu, reviewed by Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2216dad5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2216dad5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2216dad5

Branch: refs/heads/master
Commit: 2216dad543c8a4f5ee732b386fb7c39ab48d0760
Parents: 8382008
Author: Aihua Xu 
Authored: Wed Sep 27 09:51:28 2017 -0700
Committer: Aihua Xu 
Committed: Fri Sep 29 10:58:25 2017 -0700

--
 ql/pom.xml | 4 
 1 file changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/2216dad5/ql/pom.xml
--
diff --git a/ql/pom.xml b/ql/pom.xml
index 3dfb421..59c4260 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -402,6 +402,10 @@
   avatica
   ${avatica.version}
   
+
+  org.apache.calcite.avatica
+  avatica-core
+
 



[1/2] hive git commit: HIVE-17624: MapredLocakTask running in separate JVM could throw ClassNotFoundException (Aihua Xu, reviewed by Yongzhi Chen)

2017-09-29 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 11beadff3 -> 2216dad54


HIVE-17624: MapredLocakTask running in separate JVM could throw 
ClassNotFoundException (Aihua Xu, reviewed by Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8382008e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8382008e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8382008e

Branch: refs/heads/master
Commit: 8382008ee00d8910b920a0c7a6d73f2f3400cb8a
Parents: 11beadf
Author: Aihua Xu 
Authored: Wed Sep 27 14:12:27 2017 -0700
Committer: Aihua Xu 
Committed: Fri Sep 29 09:48:25 2017 -0700

--
 .../hadoop/hive/ql/exec/mr/MapredLocalTask.java |7 +-
 .../queries/clientpositive/mapjoin_addjar.q |   13 +-
 .../results/clientpositive/mapjoin_addjar.q.out | 1045 +-
 .../clientpositive/spark/mapjoin_addjar.q.out   | 1045 +-
 4 files changed, 2079 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/8382008e/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
index c5d4f9a..9dfefee 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
@@ -71,6 +71,7 @@ import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hadoop.hive.ql.session.SessionState.ResourceType;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -184,12 +185,12 @@ public class MapredLocalTask extends 
Task implements Serializab
 IOUtils.closeQuietly(out);
   }
 
-
   String isSilent = 
"true".equalsIgnoreCase(System.getProperty("test.silent")) ? "-nolog" : "";
 
-  String jarCmd;
+  String libJars = ExecDriver.getResource(conf, ResourceType.JAR);
+  String libJarsOption = StringUtils.isEmpty(libJars) ? " " : " -libjars " 
+ libJars + " ";
 
-  jarCmd = hiveJar + " " + ExecDriver.class.getName();
+  String jarCmd = hiveJar + " " + ExecDriver.class.getName() + 
libJarsOption;
   String hiveConfArgs = ExecDriver.generateCmdLine(conf, ctx);
   String cmdLine = hadoopExec + " jar " + jarCmd + " -localtask -plan " + 
planPath.toString()
   + " " + isSilent + " " + hiveConfArgs;

http://git-wip-us.apache.org/repos/asf/hive/blob/8382008e/ql/src/test/queries/clientpositive/mapjoin_addjar.q
--
diff --git a/ql/src/test/queries/clientpositive/mapjoin_addjar.q 
b/ql/src/test/queries/clientpositive/mapjoin_addjar.q
index f56f074..1d71512 100644
--- a/ql/src/test/queries/clientpositive/mapjoin_addjar.q
+++ b/ql/src/test/queries/clientpositive/mapjoin_addjar.q
@@ -2,13 +2,14 @@
 set hive.auto.convert.join=true;
 set hive.auto.convert.join.use.nonstaged=false;
 
-add jar 
${system:maven.local.repository}/org/apache/hive/hcatalog/hive-hcatalog-core/${system:hive.version}/hive-hcatalog-core-${system:hive.version}.jar;
+ADD JAR 
${system:maven.local.repository}/org/apache/hive/hive-it-test-serde/${system:hive.version}/hive-it-test-serde-${system:hive.version}.jar;
+
+CREATE TABLE t1(KEY STRING, VALUE STRING) ROW FORMAT SERDE 
'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE;
+LOAD DATA LOCAL INPATH '../../data/files/kv1_cb.txt' INTO TABLE t1;
+
+select * from t1 l join t1 r on l.key =r.key;
 
-CREATE TABLE t1 (a string, b string)
-ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'
-;
-LOAD DATA LOCAL INPATH "../../data/files/sample.json" INTO TABLE t1;
-select * from src join t1 on src.key =t1.a;
 drop table t1;
+DELETE JAR 
${system:maven.local.repository}/org/apache/hive/hive-it-test-serde/${system:hive.version}/hive-it-test-serde-${system:hive.version}.jar;
 set hive.auto.convert.join=false;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/8382008e/ql/src/test/results/clientpositive/mapjoin_addjar.q.out
--
diff --git a/ql/src/test/results/clientpositive/mapjoin_addjar.q.out 
b/ql/src/test/results/clientpositive/mapjoin_addjar.q.out
index c107818..c73b985 100644
--- a/ql/src/test/results/clientpositive/mapjoin_addjar.q.out
+++ 

hive git commit: Revert "HIVE-17373: Upgrade some dependency versions (Aihua Xu, reviewed by Naveen Gangam)"

2017-09-26 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 09c60fd3a -> ccd02c837


Revert "HIVE-17373: Upgrade some dependency versions (Aihua Xu, reviewed by 
Naveen Gangam)"

This reverts commit 5f13f286cfddc4ed0a06eb40b7c69a5ca69ab300.


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ccd02c83
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ccd02c83
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ccd02c83

Branch: refs/heads/master
Commit: ccd02c83780c938069edec179d8d06500b80e779
Parents: 09c60fd
Author: Aihua Xu 
Authored: Tue Sep 26 13:52:31 2017 -0700
Committer: Aihua Xu 
Committed: Tue Sep 26 13:52:31 2017 -0700

--
 .../mapjoin/MapJoinMultiKeyBench.java   | 94 +++-
 .../mapjoin/MapJoinOneLongKeyBench.java | 94 +++-
 .../mapjoin/MapJoinOneStringKeyBench.java   | 94 +++-
 pom.xml |  6 +-
 .../hadoop/hive/ql/log/LogDivertAppender.java   | 25 +++---
 .../hive/ql/log/LogDivertAppenderForTest.java   | 18 ++--
 6 files changed, 304 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/ccd02c83/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBench.java
--
diff --git 
a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBench.java
 
b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBench.java
index 7a5b721..f183bb5 100644
--- 
a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBench.java
+++ 
b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBench.java
@@ -18,15 +18,107 @@
 
 package org.apache.hive.benchmark.vectorization.mapjoin;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.CompilationOpContext;
+import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.persistence.MapJoinBytesTableContainer;
+import org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectSerDeContext;
+import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer;
+import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe;
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorBatchDebug;
+import org.apache.hadoop.hive.ql.exec.vector.VectorColumnOutputMapping;
+import org.apache.hadoop.hive.ql.exec.vector.VectorColumnSourceMapping;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow;
+import org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator;
+import 
org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator;
+import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
+import 
org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator;
+import 
org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType;
+import 
org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType.GenerateCategory;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.ColAndCol;
+import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestConfig;
 import 
org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestConfig.MapJoinTestImplementation;
+import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestData;
+import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription;
+import 
org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VectorMapJoinFastMultiKeyHashMap;
+import 
org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VectorMapJoinFastTableContainer;
+import org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VerifyFastRow;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;

hive git commit: HIVE-17429: Hive JDBC doesn't return rows when querying Impala (Zach Amsden, reviewed by Aihua Xu)

2017-09-07 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master c52aba1a6 -> 8482c5fbe


HIVE-17429: Hive JDBC doesn't return rows when querying Impala (Zach Amsden, 
reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8482c5fb
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8482c5fb
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8482c5fb

Branch: refs/heads/master
Commit: 8482c5fbe9d2b9a62132e0e94d5578a5eaa22fbd
Parents: c52aba1
Author: Aihua Xu 
Authored: Thu Sep 7 17:17:14 2017 -0700
Committer: Aihua Xu 
Committed: Thu Sep 7 17:17:14 2017 -0700

--
 jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/8482c5fb/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
--
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
index b743b46..c6bd41f 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
@@ -263,7 +263,7 @@ public class HiveStatement implements java.sql.Statement {
 TGetOperationStatusResp status = waitForOperationToComplete();
 
 // The query should be completed by now
-if (!status.isHasResultSet()) {
+if (!status.isHasResultSet() && !stmtHandle.isHasResultSet()) {
   return false;
 }
 resultSet =  new 
HiveQueryResultSet.Builder(this).setClient(client).setSessionHandle(sessHandle)



hive git commit: HIVE-17373: Upgrade some dependency versions (Aihua Xu, reviewed by Naveen Gangam)

2017-09-05 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 9a09b78b6 -> 5f13f286c


HIVE-17373: Upgrade some dependency versions (Aihua Xu, reviewed by Naveen 
Gangam)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5f13f286
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5f13f286
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5f13f286

Branch: refs/heads/master
Commit: 5f13f286cfddc4ed0a06eb40b7c69a5ca69ab300
Parents: 9a09b78
Author: Aihua Xu 
Authored: Tue Aug 22 16:05:58 2017 -0700
Committer: Aihua Xu 
Committed: Tue Sep 5 11:23:49 2017 -0700

--
 .../mapjoin/MapJoinMultiKeyBench.java   | 94 +---
 .../mapjoin/MapJoinOneLongKeyBench.java | 94 +---
 .../mapjoin/MapJoinOneStringKeyBench.java   | 94 +---
 pom.xml |  6 +-
 .../hadoop/hive/ql/log/LogDivertAppender.java   | 25 +++---
 .../hive/ql/log/LogDivertAppenderForTest.java   | 18 ++--
 6 files changed, 27 insertions(+), 304 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/5f13f286/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBench.java
--
diff --git 
a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBench.java
 
b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBench.java
index f183bb5..7a5b721 100644
--- 
a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBench.java
+++ 
b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBench.java
@@ -18,107 +18,15 @@
 
 package org.apache.hive.benchmark.vectorization.mapjoin;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import org.apache.commons.lang.ArrayUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.CompilationOpContext;
-import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
-import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.exec.persistence.MapJoinBytesTableContainer;
-import org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectSerDeContext;
-import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer;
-import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe;
-import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.VectorBatchDebug;
-import org.apache.hadoop.hive.ql.exec.vector.VectorColumnOutputMapping;
-import org.apache.hadoop.hive.ql.exec.vector.VectorColumnSourceMapping;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow;
-import org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator;
-import 
org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator;
-import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
-import 
org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator;
-import 
org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType;
-import 
org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType.GenerateCategory;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.ColAndCol;
-import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestConfig;
 import 
org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestConfig.MapJoinTestImplementation;
-import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestData;
-import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription;
-import 
org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VectorMapJoinFastMultiKeyHashMap;
-import 
org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VectorMapJoinFastTableContainer;
-import org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VerifyFastRow;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
-import org.apache.hadoop.hive.ql.plan.JoinCondDesc;
-import 

hive git commit: HIVE-17357: Plugin jars are not properly added for LocalHiveSparkClient (Aihua Xu, reviewed by Xuefu Zhang)

2017-08-21 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master d012f8653 -> 78efa7a69


HIVE-17357: Plugin jars are not properly added for LocalHiveSparkClient (Aihua 
Xu, reviewed by Xuefu Zhang)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/78efa7a6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/78efa7a6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/78efa7a6

Branch: refs/heads/master
Commit: 78efa7a69b965f39a6f0883863052acbb0bc824c
Parents: d012f86
Author: Aihua Xu 
Authored: Fri Aug 18 10:10:42 2017 -0700
Committer: Aihua Xu 
Committed: Mon Aug 21 10:25:26 2017 -0700

--
 .../org/apache/hadoop/hive/ql/exec/spark/LocalHiveSparkClient.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/78efa7a6/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/LocalHiveSparkClient.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/LocalHiveSparkClient.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/LocalHiveSparkClient.java
index beeafd0..72f2f91 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/LocalHiveSparkClient.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/LocalHiveSparkClient.java
@@ -178,7 +178,7 @@ public class LocalHiveSparkClient implements 
HiveSparkClient {
 for (BaseWork work : sparkWork.getAllWork()) {
   work.configureJobConf(jobConf);
 }
-addJars(conf.get(MR_JAR_PROPERTY));
+addJars(jobConf.get(MR_JAR_PROPERTY));
 
 // add added files
 String addedFiles = Utilities.getResourceFiles(conf, 
SessionState.ResourceType.FILE);



[hive] Git Push Summary

2017-08-17 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master_backport [deleted] f756bf9c5


hive git commit: HIVE-17336: Missing class 'org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat' from Hive on Spark when inserting into hbase based table (Aihua Xu, reviewed by Vihang Karajgaonkar)

2017-08-17 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 002626d5e -> f756bf9c5


HIVE-17336: Missing class 
'org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat' from Hive on Spark 
when inserting into hbase based table (Aihua Xu, reviewed by Vihang 
Karajgaonkar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f756bf9c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f756bf9c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f756bf9c

Branch: refs/heads/master
Commit: f756bf9c59b2bbb3e9de8d73f2fb203a0c388550
Parents: 002626d
Author: Aihua Xu 
Authored: Wed Aug 16 15:53:55 2017 -0700
Committer: Aihua Xu 
Committed: Thu Aug 17 08:33:15 2017 -0700

--
 .../apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java   | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/f756bf9c/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
index 7091cea..78d5ff2 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
@@ -238,7 +238,8 @@ public class RemoteHiveSparkClient implements 
HiveSparkClient {
 for (BaseWork work : sparkWork.getAllWork()) {
   work.configureJobConf(jobConf);
 }
-addJars(conf.get(MR_JAR_PROPERTY));
+addJars(jobConf.get(MR_JAR_PROPERTY));
+
 // remove the location of container tokens
 conf.unset(MR_CREDENTIALS_LOCATION_PROPERTY);
 // add added files



hive git commit: HIVE-17336: Missing class 'org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat' from Hive on Spark when inserting into hbase based table (Aihua Xu, reviewed by Vihang Karajgaonkar)

2017-08-17 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master_backport [created] f756bf9c5


HIVE-17336: Missing class 
'org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat' from Hive on Spark 
when inserting into hbase based table (Aihua Xu, reviewed by Vihang 
Karajgaonkar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f756bf9c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f756bf9c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f756bf9c

Branch: refs/heads/master_backport
Commit: f756bf9c59b2bbb3e9de8d73f2fb203a0c388550
Parents: 002626d
Author: Aihua Xu 
Authored: Wed Aug 16 15:53:55 2017 -0700
Committer: Aihua Xu 
Committed: Thu Aug 17 08:33:15 2017 -0700

--
 .../apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java   | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/f756bf9c/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
index 7091cea..78d5ff2 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
@@ -238,7 +238,8 @@ public class RemoteHiveSparkClient implements 
HiveSparkClient {
 for (BaseWork work : sparkWork.getAllWork()) {
   work.configureJobConf(jobConf);
 }
-addJars(conf.get(MR_JAR_PROPERTY));
+addJars(jobConf.get(MR_JAR_PROPERTY));
+
 // remove the location of container tokens
 conf.unset(MR_CREDENTIALS_LOCATION_PROPERTY);
 // add added files



hive git commit: HIVE-16873: Remove Thread Cache From Logging (BELUGA BEHR reviewed by Aihua Xu)

2017-08-14 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 4f042cc46 -> 06d9a6bf1


HIVE-16873: Remove Thread Cache From Logging (BELUGA BEHR reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/06d9a6bf
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/06d9a6bf
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/06d9a6bf

Branch: refs/heads/master
Commit: 06d9a6bf142534db772f457c4d4a0c2a0c9cfe2e
Parents: 4f042cc
Author: Aihua Xu 
Authored: Mon Aug 14 13:37:57 2017 -0700
Committer: Aihua Xu 
Committed: Mon Aug 14 13:37:57 2017 -0700

--
 .../hadoop/hive/metastore/HiveMetaStore.java  | 18 ++
 1 file changed, 2 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/06d9a6bf/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 5b353c4..5405058 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -295,20 +295,9 @@ public class HiveMetaStore extends ThriftHiveMetastore {
 
 private static ExecutorService threadPool;
 
-public static final String AUDIT_FORMAT =
-"ugi=%s\t" + // ugi
-"ip=%s\t" + // remote IP
-"cmd=%s\t"; // command
 public static final Logger auditLog = LoggerFactory.getLogger(
 HiveMetaStore.class.getName() + ".audit");
-private static final ThreadLocal auditFormatter =
-new ThreadLocal() {
-  @Override
-  protected Formatter initialValue() {
-return new Formatter(new StringBuilder(AUDIT_FORMAT.length() * 4));
-  }
-};
-
+
 private static final void logAuditEvent(String cmd) {
   if (cmd == null) {
 return;
@@ -320,16 +309,13 @@ public class HiveMetaStore extends ThriftHiveMetastore {
   } catch (Exception ex) {
 throw new RuntimeException(ex);
   }
-  final Formatter fmt = auditFormatter.get();
-  ((StringBuilder) fmt.out()).setLength(0);
 
   String address = getIPAddress();
   if (address == null) {
 address = "unknown-ip-addr";
   }
 
-  auditLog.info(fmt.format(AUDIT_FORMAT, ugi.getUserName(),
-  address, cmd).toString());
+  auditLog.info("ugi={}ip={}   cmd={}  ", ugi.getUserName(), address, 
cmd);
 }
 
 private static String getIPAddress() {



hive git commit: HIVE-17155: findConfFile() in HiveConf.java has some issues with the conf path (Aihua Xu, reviewed by Yongzhi Chen)

2017-07-25 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 3c7fb2a97 -> 4af249581


HIVE-17155: findConfFile() in HiveConf.java has some issues with the conf path 
(Aihua Xu, reviewed by Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4af24958
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4af24958
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4af24958

Branch: refs/heads/master
Commit: 4af249581a4a25bd92c9c2dd9a09e590a0cf2831
Parents: 3c7fb2a
Author: Aihua Xu 
Authored: Tue Jul 25 08:14:40 2017 -0700
Committer: Aihua Xu 
Committed: Tue Jul 25 08:14:40 2017 -0700

--
 .../src/java/org/apache/hadoop/hive/conf/HiveConf.java  | 12 +---
 1 file changed, 9 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/4af24958/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 3cf76d0..f5e5974 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -161,19 +161,25 @@ public class HiveConf extends Configuration {
   result = checkConfigFile(new File(confPath, name));
   if (result == null) {
 String homePath = System.getenv("HIVE_HOME");
-String nameInConf = "conf" + File.pathSeparator + name;
+String nameInConf = "conf" + File.separator + name;
 result = checkConfigFile(new File(homePath, nameInConf));
 if (result == null) {
   URI jarUri = null;
   try {
-jarUri = 
HiveConf.class.getProtectionDomain().getCodeSource().getLocation().toURI();
+// Handle both file:// and jar:!{entry} in the case of shaded 
hive libs
+URL sourceUrl = 
HiveConf.class.getProtectionDomain().getCodeSource().getLocation();
+jarUri = sourceUrl.getProtocol().equalsIgnoreCase("jar") ? new 
URI(sourceUrl.getPath()) : sourceUrl.toURI();
   } catch (Throwable e) {
 if (l4j.isInfoEnabled()) {
   l4j.info("Cannot get jar URI", e);
 }
 System.err.println("Cannot get jar URI: " + e.getMessage());
   }
-  result = checkConfigFile(new File(new File(jarUri).getParentFile(), 
nameInConf));
+  // From the jar file, the parent is /lib folder
+  File parent = new File(jarUri).getParentFile();
+  if (parent != null) {
+result = checkConfigFile(new File(parent.getParentFile(), 
nameInConf));
+  }
 }
   }
 }



hive git commit: HIVE-17149: Hdfs directory is not cleared if partition creation failed on HMS (Barna Zsombor Klara, reviewed by Aihua Xu)

2017-07-24 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master a282e5d94 -> 549ac12dd


HIVE-17149: Hdfs directory is not cleared if partition creation failed on HMS 
(Barna Zsombor Klara, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/549ac12d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/549ac12d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/549ac12d

Branch: refs/heads/master
Commit: 549ac12dd3d41215aeb31d6d247a1c86f2ed85de
Parents: a282e5d
Author: Aihua Xu 
Authored: Mon Jul 24 16:21:07 2017 -0700
Committer: Aihua Xu 
Committed: Mon Jul 24 16:21:07 2017 -0700

--
 ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java | 10 ++
 1 file changed, 10 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/549ac12d/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index 02cea7c..d661f10 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -1781,6 +1781,16 @@ public class Hive {
   // In that case, we want to retry with alterPartition.
   LOG.debug("Caught AlreadyExistsException, trying to alter partition 
instead");
   setStatsPropAndAlterPartition(hasFollowingStatsTask, tbl, newTPart);
+} catch (Exception e) {
+  try {
+final FileSystem newPathFileSystem = 
newPartPath.getFileSystem(this.getConf());
+boolean isAutoPurge = 
"true".equalsIgnoreCase(tbl.getProperty("auto.purge"));
+final FileStatus status = 
newPathFileSystem.getFileStatus(newPartPath);
+Hive.trashFiles(newPathFileSystem, new FileStatus[] {status}, 
this.getConf(), isAutoPurge);
+  } catch (IOException io) {
+LOG.error("Could not delete partition directory contents after 
failed partition creation: ", io);
+  }
+  throw e;
 }
   } else {
 setStatsPropAndAlterPartition(hasFollowingStatsTask, tbl, newTPart);



hive git commit: HIVE-16911: Upgrade groovy version to 2.4.11 (Aihua Xu, reviewed by Yongzhi Chen)

2017-07-10 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 7f5460d8e -> 45a1ec87e


HIVE-16911: Upgrade groovy version to 2.4.11 (Aihua Xu, reviewed by Yongzhi 
Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/45a1ec87
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/45a1ec87
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/45a1ec87

Branch: refs/heads/master
Commit: 45a1ec87e84fc3803337232c981726b9ae7bae64
Parents: 7f5460d
Author: Aihua Xu 
Authored: Mon Jul 10 14:51:54 2017 -0700
Committer: Aihua Xu 
Committed: Mon Jul 10 14:51:54 2017 -0700

--
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/45a1ec87/pom.xml
--
diff --git a/pom.xml b/pom.xml
index f9fae59..04cbf11 100644
--- a/pom.xml
+++ b/pom.xml
@@ -139,7 +139,7 @@
 
0.1.2
 0.10.0
 14.0.1
-2.4.4
+2.4.11
 1.3.166
 2.8.0
 
${basedir}/${hive.path.to.root}/testutils/hadoop



hive git commit: HIVE-17048: Pass HiveOperation info to HiveSemanticAnalyzerHook through HiveSemanticAnalyzerHookContext (Aihua Xu, reviewed by Mohit Sabharwal)

2017-07-07 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 453f44cdb -> 6b87af747


HIVE-17048: Pass HiveOperation info to HiveSemanticAnalyzerHook through 
HiveSemanticAnalyzerHookContext (Aihua Xu, reviewed by Mohit Sabharwal)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6b87af74
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6b87af74
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6b87af74

Branch: refs/heads/master
Commit: 6b87af7477219a3b62acb4b8ff4e614d45816d68
Parents: 453f44c
Author: Aihua Xu 
Authored: Wed Jul 5 17:38:36 2017 -0700
Committer: Aihua Xu 
Committed: Fri Jul 7 10:46:43 2017 -0700

--
 .../hive/minikdc/TestHs2HooksWithMiniKdc.java   | 28 +++-
 .../apache/hadoop/hive/hooks/TestHs2Hooks.java  | 27 ---
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  1 +
 .../parse/HiveSemanticAnalyzerHookContext.java  |  5 
 .../HiveSemanticAnalyzerHookContextImpl.java| 12 +
 5 files changed, 50 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/6b87af74/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
--
diff --git 
a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
 
b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
index 4fabe47..1cd0ee8 100644
--- 
a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
+++ 
b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
@@ -24,7 +24,7 @@ import java.sql.Statement;
 import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -114,16 +114,16 @@ public class TestHs2HooksWithMiniKdc {
   throw error;
 }
 
-Assert.assertNotNull(PostExecHook.ipAddress, "ipaddress is null");
-Assert.assertNotNull(PostExecHook.userName, "userName is null");
-Assert.assertNotNull(PostExecHook.operation , "operation is null");
+Assert.assertNotNull("ipaddress is null", PostExecHook.ipAddress);
+Assert.assertNotNull("userName is null", PostExecHook.userName);
+Assert.assertNotNull("operation is null", PostExecHook.operation);
 Assert.assertEquals(MiniHiveKdc.HIVE_TEST_USER_1, PostExecHook.userName);
 Assert.assertTrue(PostExecHook.ipAddress, 
PostExecHook.ipAddress.contains("127.0.0.1"));
 Assert.assertEquals("SHOWTABLES", PostExecHook.operation);
 
-Assert.assertNotNull(PreExecHook.ipAddress, "ipaddress is null");
-Assert.assertNotNull(PreExecHook.userName, "userName is null");
-Assert.assertNotNull(PreExecHook.operation , "operation is null");
+Assert.assertNotNull("ipaddress is null", PreExecHook.ipAddress);
+Assert.assertNotNull("userName is null", PreExecHook.userName);
+Assert.assertNotNull("operation is null", PreExecHook.operation);
 Assert.assertEquals(MiniHiveKdc.HIVE_TEST_USER_1, PreExecHook.userName);
 Assert.assertTrue(PreExecHook.ipAddress, 
PreExecHook.ipAddress.contains("127.0.0.1"));
 Assert.assertEquals("SHOWTABLES", PreExecHook.operation);
@@ -137,12 +137,14 @@ public class TestHs2HooksWithMiniKdc {
   throw error;
 }
 
-Assert.assertNotNull(SemanticAnalysisHook.ipAddress,
-"semantic hook context ipaddress is null");
-Assert.assertNotNull(SemanticAnalysisHook.userName,
-"semantic hook context userName is null");
-Assert.assertNotNull(SemanticAnalysisHook.command ,
-"semantic hook context command is null");
+Assert.assertNotNull("semantic hook context ipaddress is null",
+SemanticAnalysisHook.ipAddress);
+Assert.assertNotNull("semantic hook context userName is null",
+SemanticAnalysisHook.userName);
+Assert.assertNotNull("semantic hook context command is null",
+SemanticAnalysisHook.command);
+Assert.assertNotNull("semantic hook context commandType is null",
+SemanticAnalysisHook.commandType);
 Assert.assertTrue(SemanticAnalysisHook.ipAddress,
 SemanticAnalysisHook.ipAddress.contains("127.0.0.1"));
 Assert.assertEquals("show tables", SemanticAnalysisHook.command);

http://git-wip-us.apache.org/repos/asf/hive/blob/6b87af74/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
index 

hive git commit: HIVE-16902: investigate "failed to remove operation log" errors (Aihua Xu, reviewed by Yongzhi Chen)

2017-06-16 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master f6be1a3c0 -> cb6bf8871


HIVE-16902: investigate "failed to remove operation log" errors (Aihua Xu, 
reviewed by Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cb6bf887
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cb6bf887
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cb6bf887

Branch: refs/heads/master
Commit: cb6bf8871ebaed2756d734368f644fbd4f6ad5d1
Parents: f6be1a3
Author: Aihua Xu 
Authored: Wed Jun 14 17:38:32 2017 -0400
Committer: Aihua Xu 
Committed: Fri Jun 16 10:39:02 2017 -0400

--
 .../org/apache/hadoop/hive/ql/session/OperationLog.java | 6 +++---
 .../org/apache/hive/service/cli/operation/Operation.java| 9 +
 2 files changed, 4 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/cb6bf887/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java 
b/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java
index ded4b18..1d1fc4e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java
@@ -51,7 +51,7 @@ public class OperationLog {
 NONE, EXECUTION, PERFORMANCE, VERBOSE, UNKNOWN
   }
 
-  public OperationLog(String name, File file, HiveConf hiveConf) throws 
FileNotFoundException {
+  public OperationLog(String name, File file, HiveConf hiveConf) {
 operationName = name;
 logFile = new LogFile(file);
 
@@ -133,7 +133,7 @@ public class OperationLog {
 private BufferedReader in;
 private volatile boolean isRemoved;
 
-LogFile(File file) throws FileNotFoundException {
+LogFile(File file) {
   this.file = file;
   isRemoved = false;
 }
@@ -157,7 +157,7 @@ public class OperationLog {
 if (in != null) {
   in.close();
 }
-if (!isRemoved && removeLog) {
+if (!isRemoved && removeLog && file.exists()) {
   FileUtils.forceDelete(file);
   isRemoved = true;
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/cb6bf887/service/src/java/org/apache/hive/service/cli/operation/Operation.java
--
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/Operation.java 
b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
index 4e78551..8d453d5 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
@@ -211,14 +211,7 @@ public abstract class Operation {
   isOperationLogEnabled = true;
 
   // create OperationLog object with above log file
-  try {
-operationLog = new OperationLog(opHandle.toString(), operationLogFile, 
parentSession.getHiveConf());
-  } catch (FileNotFoundException e) {
-LOG.warn("Unable to instantiate OperationLog object for operation: " +
-opHandle, e);
-isOperationLogEnabled = false;
-return;
-  }
+  operationLog = new OperationLog(opHandle.toString(), operationLogFile, 
parentSession.getHiveConf());
 }
   }
 



hive git commit: HIVE-16846: TestJdbcWithMiniHS2#testHttpHeaderSize test case is not testing in HTTP mode (Aihua Xu, reviewed by YongZhi Chen)

2017-06-09 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 4a8eaa571 -> 44653b57c


HIVE-16846: TestJdbcWithMiniHS2#testHttpHeaderSize test case is not testing in 
HTTP mode (Aihua Xu, reviewed by YongZhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/44653b57
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/44653b57
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/44653b57

Branch: refs/heads/master
Commit: 44653b57c2576f76359f2cf2ff2ef883b087959b
Parents: 4a8eaa5
Author: Aihua Xu 
Authored: Wed Jun 7 15:26:46 2017 -0400
Committer: Aihua Xu 
Committed: Fri Jun 9 10:51:14 2017 -0400

--
 .../apache/hive/jdbc/TestJdbcWithMiniHS2.java   | 36 +---
 1 file changed, 24 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/44653b57/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
index fc2cb08..4a9af80 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
@@ -195,9 +195,17 @@ public class TestJdbcWithMiniHS2 {
   }
 
   private static void startMiniHS2(HiveConf conf) throws Exception {
+startMiniHS2(conf, false);
+  }
+
+  private static void startMiniHS2(HiveConf conf, boolean httpMode) throws 
Exception {
 conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
 conf.setBoolVar(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED, false);
-miniHS2 = new 
MiniHS2.Builder().withConf(conf).cleanupLocalDirOnStartup(false).build();
+MiniHS2.Builder builder = new 
MiniHS2.Builder().withConf(conf).cleanupLocalDirOnStartup(false);
+if (httpMode) {
+  builder = builder.withHTTPTransport();
+}
+miniHS2 = builder.build();
 Map confOverlay = new HashMap();
 miniHS2.start(confOverlay);
   }
@@ -923,10 +931,9 @@ public class TestJdbcWithMiniHS2 {
 // Stop HiveServer2
 stopMiniHS2();
 HiveConf conf = new HiveConf();
-conf.set("hive.server2.transport.mode", "http");
-conf.setInt("hive.server2.thrift.http.request.header.size", 1024);
-conf.setInt("hive.server2.thrift.http.response.header.size", 1024);
-startMiniHS2(conf);
+
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_REQUEST_HEADER_SIZE, 
1024);
+
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_RESPONSE_HEADER_SIZE, 
1024);
+startMiniHS2(conf, true);
 
 // Username is added to the request header
 String userName = StringUtils.leftPad("*", 100);
@@ -945,25 +952,31 @@ public class TestJdbcWithMiniHS2 {
 // This should fail with given HTTP response code 413 in error message, 
since header is more
 // than the configured the header size
 userName = StringUtils.leftPad("*", 2000);
+Exception headerException = null;
 try {
+  conn = null;
   conn = getConnection(miniHS2.getJdbcURL(testDbName), userName, 
"password");
 } catch (Exception e) {
-  assertTrue("Header exception thrown", e != null);
-  assertTrue(e.getMessage().contains("HTTP Response code: 413"));
+  headerException = e;
 } finally {
   if (conn != null) {
 conn.close();
   }
+
+  assertTrue("Header exception should be thrown", headerException != null);
+  assertTrue("Incorrect HTTP Response:" + headerException.getMessage(),
+  headerException.getMessage().contains("HTTP Response code: 413"));
 }
 
 // Stop HiveServer2 to increase header size
 stopMiniHS2();
-conf.setInt("hive.server2.thrift.http.request.header.size", 3000);
-conf.setInt("hive.server2.thrift.http.response.header.size", 3000);
+
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_REQUEST_HEADER_SIZE, 
3000);
+
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_RESPONSE_HEADER_SIZE, 
3000);
 startMiniHS2(conf);
 
 // This should now go fine, since we increased the configured header size
 try {
+  conn = null;
   conn = getConnection(miniHS2.getJdbcURL(testDbName), userName, 
"password");
 } catch (Exception e) {
   fail("Not expecting exception: " + e);
@@ -986,10 +999,9 @@ public class TestJdbcWithMiniHS2 {
 // Stop HiveServer2
 stopMiniHS2();
 HiveConf conf = new HiveConf();
-conf.set("hive.server2.transport.mode", "http");
 // Set server's idle timeout to a very low value
-conf.set("hive.server2.thrift.http.max.idle.time", "5");
-startMiniHS2(conf);

hive git commit: HIVE-16769: Possible hive service startup due to the existing file /tmp/stderr (Aihua Xu, reviewed by Naveen Gangam)

2017-06-02 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 3f5e65288 -> 9f976


HIVE-16769: Possible hive service startup due to the existing file /tmp/stderr 
(Aihua Xu, reviewed by Naveen Gangam)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9f976fff
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9f976fff
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9f976fff

Branch: refs/heads/master
Commit: 9f976343a891c9d3e8d5fa0e70b2c622ff1a
Parents: 3f5e652
Author: Aihua Xu 
Authored: Wed May 31 17:08:29 2017 -0400
Committer: Aihua Xu 
Committed: Fri Jun 2 09:21:39 2017 -0400

--
 bin/ext/version.sh |  2 +-
 bin/hive   | 20 ++--
 2 files changed, 3 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/9f976fff/bin/ext/version.sh
--
diff --git a/bin/ext/version.sh b/bin/ext/version.sh
index b6a237b..92fee3d 100644
--- a/bin/ext/version.sh
+++ b/bin/ext/version.sh
@@ -27,7 +27,7 @@ version () {
 
   # hadoop 20 or newer - skip the aux_jars option and hiveconf
   CLASS=org.apache.hive.common.util.HiveVersionInfo
-  exec $HADOOP jar $JAR $CLASS 2>> ${STDERR}
+  exec $HADOOP jar $JAR $CLASS 2>&2
 }
 
 version_help () {

http://git-wip-us.apache.org/repos/asf/hive/blob/9f976fff/bin/hive
--
diff --git a/bin/hive b/bin/hive
index a7671c3..4a908e1 100755
--- a/bin/hive
+++ b/bin/hive
@@ -25,8 +25,6 @@ bin=`cd "$bin"; pwd`
 
 . "$bin"/hive-config.sh
 
-TMP_USER_DIR="/tmp/${USER}"
-STDERR="${TMP_USER_DIR}/stderr"
 SERVICE=""
 HELP=""
 SKIP_HBASECP=false
@@ -227,24 +225,10 @@ if [ ! -f ${HADOOP} ]; then
   exit 4;
 fi
 
-if [ ! -d ${TMP_USER_DIR} ]; then
-  mkdir -p ${TMP_USER_DIR} 2> /dev/null
-  if [ $? -ne 0 ]; then
-STDERR="/dev/tty"
-  fi
-fi
-
-if [ "${STDERR}" != "/dev/null" ] && [ ! -f ${STDERR} ]; then
-  touch ${STDERR} 2> /dev/null
-  if [ $? -ne 0 ]; then
-STDERR="/dev/tty"
-  fi
-fi
-
 if [ "$SKIP_HADOOPVERSION" = false ]; then
   # Make sure we're using a compatible version of Hadoop
   if [ "x$HADOOP_VERSION" == "x" ]; then
-  HADOOP_VERSION=$($HADOOP version 2>> ${STDERR} | awk -F"\t" '/Hadoop/ 
{print $0}' | cut -d' ' -f 2);
+  HADOOP_VERSION=$($HADOOP version 2>&2 | awk -F"\t" '/Hadoop/ {print $0}' 
| cut -d' ' -f 2);
   fi
   
   # Save the regex to a var to workaround quoting incompatabilities
@@ -295,7 +279,7 @@ if [ "$SKIP_HBASECP" = false ]; then
   if [[ -n $HBASE_BIN ]] ; then
 # exclude ZK, PB, and Guava (See HIVE-2055)
 # depends on HBASE-8438 (hbase-0.94.14+, hbase-0.96.1+) for `hbase 
mapredcp` command
-for x in $($HBASE_BIN mapredcp 2>> ${STDERR} | tr ':' '\n') ; do
+for x in $($HBASE_BIN mapredcp 2>&2 | tr ':' '\n') ; do
   if [[ $x == *zookeeper* || $x == *protobuf-java* || $x == *guava* ]] ; 
then
 continue
   fi



hive git commit: HIVE-16625: Extra '\0' characters in the output, when SeparatedValuesOutputFormat is used and the quoting is disabled (Peter Vary, reviewed by Zoltan Haindrich and Aihua Xu)

2017-05-31 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master f5b225021 -> 396cba458


HIVE-16625: Extra '\0' characters in the output, when 
SeparatedValuesOutputFormat is used and the quoting is disabled (Peter Vary, 
reviewed by Zoltan Haindrich and Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/396cba45
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/396cba45
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/396cba45

Branch: refs/heads/master
Commit: 396cba45822ab9e1519504cdde82df1d89b60c8f
Parents: f5b2250
Author: Aihua Xu 
Authored: Wed May 31 13:00:04 2017 -0400
Committer: Aihua Xu 
Committed: Wed May 31 13:01:47 2017 -0400

--
 .../beeline/SeparatedValuesOutputFormat.java|   11 +-
 .../test/resources/testconfiguration.properties |1 +
 .../java/org/apache/hive/beeline/QFile.java |1 +
 .../insert_overwrite_local_directory_1.q.out| 1769 ++
 4 files changed, 1780 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/396cba45/beeline/src/java/org/apache/hive/beeline/SeparatedValuesOutputFormat.java
--
diff --git 
a/beeline/src/java/org/apache/hive/beeline/SeparatedValuesOutputFormat.java 
b/beeline/src/java/org/apache/hive/beeline/SeparatedValuesOutputFormat.java
index 66d9fd0..9c24a20 100644
--- a/beeline/src/java/org/apache/hive/beeline/SeparatedValuesOutputFormat.java
+++ b/beeline/src/java/org/apache/hive/beeline/SeparatedValuesOutputFormat.java
@@ -26,6 +26,8 @@ import java.io.IOException;
 import java.io.StringWriter;
 
 import org.apache.hadoop.io.IOUtils;
+import org.supercsv.encoder.CsvEncoder;
+import org.supercsv.encoder.SelectiveCsvEncoder;
 import org.supercsv.io.CsvListWriter;
 import org.supercsv.prefs.CsvPreference;
 
@@ -40,10 +42,15 @@ class SeparatedValuesOutputFormat implements OutputFormat {
 
   SeparatedValuesOutputFormat(BeeLine beeLine, char separator) {
 this.beeLine = beeLine;
-unquotedCsvPreference = new CsvPreference.Builder('\0', separator, 
"").build();
+unquotedCsvPreference = getUnquotedCsvPreference(separator);
 quotedCsvPreference = new CsvPreference.Builder('"', separator, 
"").build();
   }
 
+  private static CsvPreference getUnquotedCsvPreference(char delimiter) {
+CsvEncoder noEncoder = new SelectiveCsvEncoder();
+return new CsvPreference.Builder('\0', delimiter, 
"").useEncoder(noEncoder).build();
+  }
+
   private void updateCsvPreference() {
 if (beeLine.getOpts().getOutputFormat().equals("dsv")) {
   // check whether delimiter changed by user
@@ -54,7 +61,7 @@ class SeparatedValuesOutputFormat implements OutputFormat {
 // "" is passed as the end of line symbol in following function, as
 // beeline itself adds newline
 if (isQuotingDisabled()) {
-  unquotedCsvPreference = new CsvPreference.Builder('\0', newDel, 
"").build();
+  unquotedCsvPreference = getUnquotedCsvPreference(newDel);
 } else {
   quotedCsvPreference = new CsvPreference.Builder('"', newDel, 
"").build();
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/396cba45/itests/src/test/resources/testconfiguration.properties
--
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index 47a13c9..f4a53df 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -759,6 +759,7 @@ encrypted.query.files=encryption_join_unencrypted_tbl.q,\
 beeline.positive.include=create_merge_compressed.q,\
   drop_with_concurrency.q,\
   escape_comments.q,\
+  insert_overwrite_local_directory_1.q,\
   mapjoin2.q,\
   materialized_view_create_rewrite.q,\
   smb_mapjoin_1.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/396cba45/itests/util/src/main/java/org/apache/hive/beeline/QFile.java
--
diff --git a/itests/util/src/main/java/org/apache/hive/beeline/QFile.java 
b/itests/util/src/main/java/org/apache/hive/beeline/QFile.java
index 0d00a9b..e70ac38 100644
--- a/itests/util/src/main/java/org/apache/hive/beeline/QFile.java
+++ b/itests/util/src/main/java/org/apache/hive/beeline/QFile.java
@@ -304,6 +304,7 @@ public final class QFile {
 .addFilter(".*/tmp/.*\n", MASK_PATTERN)
 .addFilter(".*file:.*\n", MASK_PATTERN)
 .addFilter(".*file\\..*\n", MASK_PATTERN)
+.addFilter(".*Output:.*/data/files/.*\n", MASK_PATTERN)
 .addFilter(".*CreateTime.*\n", MASK_PATTERN)
 .addFilter(".*transient_lastDdlTime.*\n", 

hive git commit: HIVE-16468: BeeLineDriver should be able to run tests against an externally created cluster (Peter Vary, reviewed by Vihang Karajgaonkar and Aihua Xu)

2017-05-19 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 3be1eedb1 -> 4045ca399


HIVE-16468: BeeLineDriver should be able to run tests against an externally 
created cluster (Peter Vary, reviewed by Vihang Karajgaonkar and Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4045ca39
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4045ca39
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4045ca39

Branch: refs/heads/master
Commit: 4045ca399228a8cac8005131cacee95568e6999a
Parents: 3be1eed
Author: Aihua Xu 
Authored: Fri May 19 11:42:28 2017 -0400
Committer: Aihua Xu 
Committed: Fri May 19 11:47:20 2017 -0400

--
 .../hive/cli/control/CoreBeeLineDriver.java | 58 
 1 file changed, 34 insertions(+), 24 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/4045ca39/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java
--
diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java
 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java
index 2be83ca..9dfc253 100644
--- 
a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java
+++ 
b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java
@@ -20,7 +20,9 @@ package org.apache.hadoop.hive.cli.control;
 import static org.junit.Assert.fail;
 
 import com.google.common.base.Strings;
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConfUtil;
 import org.apache.hadoop.hive.ql.QTestProcessExecResult;
 import org.apache.hadoop.hive.ql.hooks.PreExecutePrinter;
 import org.apache.hive.beeline.ConvertedOutputFile.Converter;
@@ -52,19 +54,40 @@ public class CoreBeeLineDriver extends CliAdapter {
   private QFileClientBuilder clientBuilder;
   private QFileBuilder fileBuilder;
 
-//  private static QTestUtil.QTestSetup miniZKCluster = null;
-
   public CoreBeeLineDriver(AbstractCliConfig testCliConfig) {
 super(testCliConfig);
 queryDirectory = new File(testCliConfig.getQueryDirectory());
 logDirectory = new File(testCliConfig.getLogDir());
 resultsDirectory = new File(testCliConfig.getResultsDir());
-testDataDirectory = new File(hiveRootDirectory, "data" + File.separator + 
"files");
+String testDataDirectoryName = System.getProperty("test.data.dir");
+if (testDataDirectoryName == null) {
+  testDataDirectory = new File(hiveRootDirectory, "data" + File.separator 
+ "files");
+} else {
+  testDataDirectory = new File(testDataDirectoryName);
+}
 testScriptDirectory = new File(hiveRootDirectory, "data" + File.separator 
+ "scripts");
 initScript = new File(testScriptDirectory, testCliConfig.getInitScript());
 cleanupScript = new File(testScriptDirectory, 
testCliConfig.getCleanupScript());
   }
 
+  private static MiniHS2 createMiniServer() throws Exception {
+HiveConf hiveConf = new HiveConf();
+// We do not need Zookeeper at the moment
+hiveConf.set(HiveConf.ConfVars.HIVE_LOCK_MANAGER.varname,
+"org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager");
+
+MiniHS2 miniHS2 = new MiniHS2.Builder()
+.withConf(hiveConf)
+.cleanupLocalDirOnStartup(true)
+.build();
+
+miniHS2.start(new HashMap());
+
+System.err.println(HiveConfUtil.dumpConfig(miniHS2.getHiveConf()));
+
+return miniHS2;
+  }
+
   @Override
   @BeforeClass
   public void beforeClass() throws Exception {
@@ -77,27 +100,17 @@ public class CoreBeeLineDriver extends CliAdapter {
   rewriteSourceTables = false;
 }
 
-HiveConf hiveConf = new HiveConf();
-// We do not need Zookeeper at the moment
-hiveConf.set(HiveConf.ConfVars.HIVE_LOCK_MANAGER.varname,
-"org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager");
-
-// But if we need later we can enable it with this, or create one ourself
-//miniZKCluster = new QTestUtil.QTestSetup();
-//miniZKCluster.preTest(hiveConf);
-
-hiveConf.logVars(System.err);
-System.err.flush();
-
-miniHS2 = new 
MiniHS2.Builder().withConf(hiveConf).cleanupLocalDirOnStartup(true).build();
-
-miniHS2.start(new HashMap());
+String beeLineUrl = System.getProperty("test.beeline.url");
+if (StringUtils.isEmpty(beeLineUrl)) {
+  miniHS2 = createMiniServer();
+  beeLineUrl = miniHS2.getJdbcURL();
+}
 
 clientBuilder = new QFileClientBuilder()
 .setJdbcDriver("org.apache.hive.jdbc.HiveDriver")
-.setJdbcUrl(miniHS2.getJdbcURL())
-.setUsername("user")
-

hive git commit: HIVE-16324: Truncate table should not work when EXTERNAL property of table is true (Vihang Karajgaonkar via Aihua Xu, reviewed by Thejas M Nair)

2017-05-18 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-2 446d3b86f -> d7a45b756


HIVE-16324: Truncate table should not work when EXTERNAL property of table is 
true (Vihang Karajgaonkar via Aihua Xu, reviewed by Thejas M Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d7a45b75
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d7a45b75
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d7a45b75

Branch: refs/heads/branch-2
Commit: d7a45b75623d1df9d212da5342cb95fdd55577e6
Parents: 446d3b8
Author: Aihua Xu 
Authored: Mon May 15 16:08:50 2017 -0400
Committer: Aihua Xu 
Committed: Thu May 18 10:29:43 2017 -0400

--
 .../apache/hadoop/hive/metastore/ObjectStore.java  |  6 +++---
 .../clientnegative/truncate_table_failure5.q   |  5 +
 .../clientnegative/truncate_table_failure6.q   |  5 +
 .../clientnegative/truncate_table_failure5.q.out   | 17 +
 .../clientnegative/truncate_table_failure6.q.out   | 17 +
 5 files changed, 47 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/d7a45b75/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 4aa4448..3e96d4a 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -1147,7 +1147,7 @@ public class ObjectStore implements RawStore, 
Configurable {
 return mConstraints;
   }
 
-@Override
+  @Override
   public Table getTable(String dbName, String tableName) throws MetaException {
 boolean commited = false;
 Table tbl = null;
@@ -1426,7 +1426,7 @@ public class ObjectStore implements RawStore, 
Configurable {
   // for backwards compatibility with old metastore persistence
   if (mtbl.getViewOriginalText() != null) {
 tableType = TableType.VIRTUAL_VIEW.toString();
-  } else if ("TRUE".equals(mtbl.getParameters().get("EXTERNAL"))) {
+  } else if (Boolean.parseBoolean(mtbl.getParameters().get("EXTERNAL"))) {
 tableType = TableType.EXTERNAL_TABLE.toString();
   } else {
 tableType = TableType.MANAGED_TABLE.toString();
@@ -1458,7 +1458,7 @@ public class ObjectStore implements RawStore, 
Configurable {
 // If the table has property EXTERNAL set, update table type
 // accordingly
 String tableType = tbl.getTableType();
-boolean isExternal = "TRUE".equals(tbl.getParameters().get("EXTERNAL"));
+boolean isExternal = 
Boolean.parseBoolean(tbl.getParameters().get("EXTERNAL"));
 if (TableType.MANAGED_TABLE.toString().equals(tableType)) {
   if (isExternal) {
 tableType = TableType.EXTERNAL_TABLE.toString();

http://git-wip-us.apache.org/repos/asf/hive/blob/d7a45b75/ql/src/test/queries/clientnegative/truncate_table_failure5.q
--
diff --git a/ql/src/test/queries/clientnegative/truncate_table_failure5.q 
b/ql/src/test/queries/clientnegative/truncate_table_failure5.q
new file mode 100644
index 000..efabb34
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/truncate_table_failure5.q
@@ -0,0 +1,5 @@
+create table external1 (col1 int, col2 string);
+alter table external1 set tblproperties ('EXTERNAL'='true');
+
+-- truncate on a non-managed table should throw exception
+truncate table external1;

http://git-wip-us.apache.org/repos/asf/hive/blob/d7a45b75/ql/src/test/queries/clientnegative/truncate_table_failure6.q
--
diff --git a/ql/src/test/queries/clientnegative/truncate_table_failure6.q 
b/ql/src/test/queries/clientnegative/truncate_table_failure6.q
new file mode 100644
index 000..b73ec63
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/truncate_table_failure6.q
@@ -0,0 +1,5 @@
+create table external1 (col1 int, col2 string);
+alter table external1 set tblproperties ('EXTERNAL'='TRUE');
+
+-- truncate on external table should throw exception. Property value of 
'EXTERNAL' is not case sensitive
+truncate table external1;

http://git-wip-us.apache.org/repos/asf/hive/blob/d7a45b75/ql/src/test/results/clientnegative/truncate_table_failure5.q.out
--
diff --git a/ql/src/test/results/clientnegative/truncate_table_failure5.q.out 
b/ql/src/test/results/clientnegative/truncate_table_failure5.q.out
new file mode 100644
index 000..d2f625d
--- /dev/null
+++ 

hive git commit: HIVE-16647: Improve the validation output to make the output to stderr and stdout more consistent (Aihua Xu, reviewed by Yongzhi Chen)

2017-05-16 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master c19981c66 -> 2bcbd29e8


HIVE-16647: Improve the validation output to make the output to stderr and 
stdout more consistent (Aihua Xu, reviewed by Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2bcbd29e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2bcbd29e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2bcbd29e

Branch: refs/heads/master
Commit: 2bcbd29e8d2fbc740c762997663cc40853892add
Parents: c19981c
Author: Aihua Xu 
Authored: Thu May 11 10:11:29 2017 -0400
Committer: Aihua Xu 
Committed: Tue May 16 09:14:49 2017 -0400

--
 .../org/apache/hive/beeline/HiveSchemaTool.java | 46 ++--
 1 file changed, 22 insertions(+), 24 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/2bcbd29e/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
--
diff --git a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java 
b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
index 7ab927a..86faeb0 100644
--- a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
+++ b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
@@ -191,7 +191,7 @@ public class HiveSchemaTool {
 try(Statement stmt = metastoreConn.createStatement();
 ResultSet res = stmt.executeQuery(versionQuery)) {
   if (!res.next()) {
-throw new HiveMetaException("Could not find version info in metastore 
VERSION table");
+throw new HiveMetaException("Could not find version info in metastore 
VERSION table.");
   }
   String currentSchemaVersion = res.getString(1);
   if (checkDuplicatedVersion && res.next()) {
@@ -244,7 +244,6 @@ public class HiveSchemaTool {
 }
 if (numOfInvalid > 0) {
   isValid = false;
-  System.err.println("Total number of invalid DB locations is: "+ 
numOfInvalid);
 }
 return isValid;
   }
@@ -305,7 +304,6 @@ public class HiveSchemaTool {
 }
 if (numOfInvalid > 0) {
   isValid = false;
-  System.err.println("Total number of invalid TABLE locations is: "+ 
numOfInvalid);
 }
 return isValid;
   }
@@ -367,7 +365,6 @@ public class HiveSchemaTool {
 }
 if (numOfInvalid > 0) {
   isValid = false;
-  System.err.println("Total number of invalid PARTITION locations is: "+ 
numOfInvalid);
 }
 return isValid;
   }
@@ -384,11 +381,11 @@ public class HiveSchemaTool {
 }
 
 if (getDbCommandParser(dbType).needsQuotedIdentifier()) {
-  skewedColLoc = "select t.\"TBL_NAME\", t.\"TBL_ID\", 
sk.\"STRING_LIST_ID_KID\", sk.\"LOCATION\" from \"TBLS\" t, \"SDS\" s, 
\"SKEWED_COL_VALUE_LOC_MAP\" sk "
-   + "where sk.\"SD_ID\" = s.\"SD_ID\" and s.\"SD_ID\" = t.\"SD_ID\" 
and sk.\"STRING_LIST_ID_KID\" >= ? and sk.\"STRING_LIST_ID_KID\" <= ? ";
+  skewedColLoc = "select t.\"TBL_NAME\", t.\"TBL_ID\", 
sk.\"STRING_LIST_ID_KID\", sk.\"LOCATION\", db.\"NAME\", db.\"DB_ID\" from 
\"TBLS\" t, \"SDS\" s, \"DBS\" db, \"SKEWED_COL_VALUE_LOC_MAP\" sk "
+   + "where sk.\"SD_ID\" = s.\"SD_ID\" and s.\"SD_ID\" = t.\"SD_ID\" 
and t.\"DB_ID\" = db.\"DB_ID\" and sk.\"STRING_LIST_ID_KID\" >= ? and 
sk.\"STRING_LIST_ID_KID\" <= ? ";
 } else {
-  skewedColLoc = "select t.TBL_NAME, t.TBL_ID, sk.STRING_LIST_ID_KID, 
sk.LOCATION from TBLS t, SDS s, SKEWED_COL_VALUE_LOC_MAP sk "
-   + "where sk.SD_ID = s.SD_ID and s.SD_ID = t.SD_ID and 
sk.STRING_LIST_ID_KID >= ? and sk.STRING_LIST_ID_KID <= ? ";
+  skewedColLoc = "select t.TBL_NAME, t.TBL_ID, sk.STRING_LIST_ID_KID, 
sk.LOCATION, db.NAME, db.DB_ID from TBLS t, SDS s, DBS db, 
SKEWED_COL_VALUE_LOC_MAP sk "
+   + "where sk.SD_ID = s.SD_ID and s.SD_ID = t.SD_ID and t.DB_ID = 
db.DB_ID and sk.STRING_LIST_ID_KID >= ? and sk.STRING_LIST_ID_KID <= ? ";
 }
 
 long maxID = 0, minID = 0;
@@ -410,7 +407,8 @@ public class HiveSchemaTool {
 res = pStmt.executeQuery();
 while (res.next()) {
   String locValue = res.getString(4);
-  String entity = "Table "  + getNameOrID(res,1,2) +
+  String entity = "Database " + getNameOrID(res,5,6) +
+  ", Table " + getNameOrID(res,1,2) +
   ", String list " + res.getString(3);
   if (!checkLocation(entity, locValue, defaultServers)) {
 numOfInvalid++;
@@ -425,7 +423,6 @@ public class HiveSchemaTool {
 }
 if (numOfInvalid > 0) {
   isValid = false;
-  System.err.println("Total number of invalid SKEWED_COL_VALUE_LOC_MAP 
locations is: "+ numOfInvalid);
 }
 return isValid;
   }
@@ -445,14 +442,14 @@ public class HiveSchemaTool {
   URI[] defaultServers) {
 

hive git commit: HIVE-16324: Truncate table should not work when EXTERNAL property of table is true (Vihang Karajgaonkar via Aihua Xu, reviewed by Thejas M Nair)

2017-05-15 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 413245ed9 -> 7d4554dd1


HIVE-16324: Truncate table should not work when EXTERNAL property of table is 
true (Vihang Karajgaonkar via Aihua Xu, reviewed by Thejas M Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7d4554dd
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7d4554dd
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7d4554dd

Branch: refs/heads/master
Commit: 7d4554dd1def433e0439cdbe7dfa665b6909e706
Parents: 413245e
Author: Aihua Xu 
Authored: Mon May 15 16:08:50 2017 -0400
Committer: Aihua Xu 
Committed: Mon May 15 16:08:50 2017 -0400

--
 .../apache/hadoop/hive/metastore/ObjectStore.java  |  6 +++---
 .../hadoop/hive/metastore/cache/CachedStore.java   |  2 +-
 .../clientnegative/truncate_table_failure5.q   |  5 +
 .../clientnegative/truncate_table_failure6.q   |  5 +
 .../clientnegative/truncate_table_failure5.q.out   | 17 +
 .../clientnegative/truncate_table_failure6.q.out   | 17 +
 6 files changed, 48 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/7d4554dd/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index ed19f42..ee48617 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -1155,7 +1155,7 @@ public class ObjectStore implements RawStore, 
Configurable {
 return mConstraints;
   }
 
-@Override
+  @Override
   public Table getTable(String dbName, String tableName) throws MetaException {
 boolean commited = false;
 Table tbl = null;
@@ -1434,7 +1434,7 @@ public class ObjectStore implements RawStore, 
Configurable {
   // for backwards compatibility with old metastore persistence
   if (mtbl.getViewOriginalText() != null) {
 tableType = TableType.VIRTUAL_VIEW.toString();
-  } else if ("TRUE".equals(mtbl.getParameters().get("EXTERNAL"))) {
+  } else if (Boolean.parseBoolean(mtbl.getParameters().get("EXTERNAL"))) {
 tableType = TableType.EXTERNAL_TABLE.toString();
   } else {
 tableType = TableType.MANAGED_TABLE.toString();
@@ -1466,7 +1466,7 @@ public class ObjectStore implements RawStore, 
Configurable {
 // If the table has property EXTERNAL set, update table type
 // accordingly
 String tableType = tbl.getTableType();
-boolean isExternal = "TRUE".equals(tbl.getParameters().get("EXTERNAL"));
+boolean isExternal = 
Boolean.parseBoolean(tbl.getParameters().get("EXTERNAL"));
 if (TableType.MANAGED_TABLE.toString().equals(tableType)) {
   if (isExternal) {
 tableType = TableType.EXTERNAL_TABLE.toString();

http://git-wip-us.apache.org/repos/asf/hive/blob/7d4554dd/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
index 5a187d8..1cc838f 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
@@ -447,7 +447,7 @@ public class CachedStore implements RawStore, Configurable {
 // If the table has property EXTERNAL set, update table type
 // accordingly
 String tableType = tbl.getTableType();
-boolean isExternal = "TRUE".equals(tbl.getParameters().get("EXTERNAL"));
+boolean isExternal = 
Boolean.parseBoolean(tbl.getParameters().get("EXTERNAL"));
 if (TableType.MANAGED_TABLE.toString().equals(tableType)) {
   if (isExternal) {
 tableType = TableType.EXTERNAL_TABLE.toString();

http://git-wip-us.apache.org/repos/asf/hive/blob/7d4554dd/ql/src/test/queries/clientnegative/truncate_table_failure5.q
--
diff --git a/ql/src/test/queries/clientnegative/truncate_table_failure5.q 
b/ql/src/test/queries/clientnegative/truncate_table_failure5.q
new file mode 100644
index 000..efabb34
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/truncate_table_failure5.q
@@ -0,0 +1,5 @@
+create table external1 (col1 int, col2 string);
+alter table external1 set tblproperties ('EXTERNAL'='true');
+
+-- truncate on a non-managed table should throw exception
+truncate table external1;


hive git commit: HIVE-16143: Improve msck repair batching (Vihang Karajgaonkar, reviewed by Sahil Takiar & Aihua Xu)

2017-05-15 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 86f74fdd2 -> 0efb93681


HIVE-16143: Improve msck repair batching (Vihang Karajgaonkar, reviewed by 
Sahil Takiar & Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0efb9368
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0efb9368
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0efb9368

Branch: refs/heads/master
Commit: 0efb93681099af4e4b7269c72d86fe379c99da7c
Parents: 86f74fd
Author: Aihua Xu 
Authored: Mon May 15 14:08:32 2017 -0400
Committer: Aihua Xu 
Committed: Mon May 15 14:08:32 2017 -0400

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  13 +-
 .../apache/hive/common/util/RetryUtilities.java | 112 ++
 .../hive/common/util/TestRetryUtilities.java| 150 
 .../test/queries/clientpositive/create_like.q   |   3 +-
 .../results/clientpositive/create_like.q.out|  15 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java|   3 +-
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |  89 ++---
 .../exec/TestMsckCreatePartitionsInBatches.java | 340 +++
 .../test/queries/clientpositive/msck_repair_0.q |  10 +
 .../test/queries/clientpositive/msck_repair_1.q |   8 +
 .../test/queries/clientpositive/msck_repair_2.q |   5 +
 .../test/queries/clientpositive/msck_repair_3.q |   4 +
 .../clientpositive/msck_repair_batchsize.q  |   4 +
 .../results/clientpositive/msck_repair_0.q.out  |  38 ++-
 .../results/clientpositive/msck_repair_1.q.out  |  28 +-
 .../results/clientpositive/msck_repair_2.q.out  |  28 +-
 .../results/clientpositive/msck_repair_3.q.out  |  28 +-
 .../clientpositive/msck_repair_batchsize.q.out  |  34 +-
 ql/src/test/results/clientpositive/repair.q.out |   3 +-
 19 files changed, 857 insertions(+), 58 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 6068f0d..1c37b6e 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -3369,9 +3369,16 @@ public class HiveConf extends Configuration {
" others; 'ignore' will skip the validation (legacy behavior, causes 
bugs in many cases)"),
 HIVE_MSCK_REPAIR_BATCH_SIZE(
 "hive.msck.repair.batch.size", 0,
-"Batch size for the msck repair command. If the value is greater than 
zero, "
-+ "it will execute batch wise with the configured batch size. "
-+ "The default value is zero. Zero means it will execute directly 
(Not batch wise)"),
+"Batch size for the msck repair command. If the value is greater than 
zero,\n "
++ "it will execute batch wise with the configured batch size. In 
case of errors while\n"
++ "adding unknown partitions the batch size is automatically 
reduced by half in the subsequent\n"
++ "retry attempt. The default value is zero which means it will 
execute directly (not batch wise)"),
+HIVE_MSCK_REPAIR_BATCH_MAX_RETRIES("hive.msck.repair.batch.max.retries", 0,
+"Maximum number of retries for the msck repair command when adding 
unknown partitions.\n "
++ "If the value is greater than zero it will retry adding unknown 
partitions until the maximum\n"
++ "number of attempts is reached or batch size is reduced to 0, 
whichever is earlier.\n"
++ "In each retry attempt it will reduce the batch size by a factor of 
2 until it reaches zero.\n"
++ "If the value is set to zero it will retry until the batch size 
becomes zero as described above."),
 
HIVE_SERVER2_LLAP_CONCURRENT_QUERIES("hive.server2.llap.concurrent.queries", -1,
 "The number of queries allowed in parallel via llap. Negative number 
implies 'infinite'."),
 HIVE_TEZ_ENABLE_MEMORY_MANAGER("hive.tez.enable.memory.manager", true,

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/common/src/java/org/apache/hive/common/util/RetryUtilities.java
--
diff --git a/common/src/java/org/apache/hive/common/util/RetryUtilities.java 
b/common/src/java/org/apache/hive/common/util/RetryUtilities.java
new file mode 100644
index 000..3a20f2c
--- /dev/null
+++ b/common/src/java/org/apache/hive/common/util/RetryUtilities.java
@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding 

[2/2] hive git commit: HIVE-16607: ColumnStatsAutoGatherContext regenerates HiveConf.HIVEQUERYID (Peter Vary, reviewed by Aihua Xu)

2017-05-15 Thread aihuaxu
HIVE-16607: ColumnStatsAutoGatherContext regenerates HiveConf.HIVEQUERYID 
(Peter Vary, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/86f74fdd
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/86f74fdd
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/86f74fdd

Branch: refs/heads/master
Commit: 86f74fdd26b83cee128d24055ee369e9b7f36201
Parents: 455ffdd
Author: Aihua Xu 
Authored: Mon May 15 11:12:04 2017 -0400
Committer: Aihua Xu 
Committed: Mon May 15 11:36:22 2017 -0400

--
 .../mapreduce/TestHCatMultiOutputFormat.java|   2 +-
 .../test/resources/testconfiguration.properties |   1 +
 .../org/apache/hadoop/hive/ql/QTestUtil.java|   4 +-
 .../java/org/apache/hive/beeline/QFile.java |  20 +-
 .../apache/hive/beeline/QFileBeeLineClient.java |   8 +-
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  28 +-
 .../org/apache/hadoop/hive/ql/QueryState.java   | 146 ++---
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |   2 +-
 .../ql/io/rcfile/stats/PartialScanTask.java |   3 +-
 .../metadata/HiveMaterializedViewsRegistry.java |   5 +-
 .../ql/parse/ColumnStatsAutoGatherContext.java  |   9 +-
 .../hadoop/hive/ql/exec/TestExecDriver.java |   3 +-
 .../ql/parse/TestMacroSemanticAnalyzer.java |   2 +-
 .../hadoop/hive/ql/parse/TestQBCompact.java |   2 +-
 .../ql/parse/TestQBJoinTreeApplyPredicate.java  |   3 +-
 .../hadoop/hive/ql/parse/TestQBSubQuery.java|   3 +-
 .../parse/TestReplicationSemanticAnalyzer.java  |   3 +-
 .../ql/parse/TestSemanticAnalyzerFactory.java   |   2 +-
 .../parse/TestUpdateDeleteSemanticAnalyzer.java |   2 +-
 .../TestHiveAuthorizationTaskFactory.java   |   2 +-
 .../parse/authorization/TestPrivilegesV1.java   |   4 +-
 .../parse/authorization/TestPrivilegesV2.java   |   2 +-
 .../materialized_view_create_rewrite.q.out  | 322 +++
 .../hive/service/cli/operation/Operation.java   |  12 +-
 .../service/cli/operation/SQLOperation.java |   6 +-
 25 files changed, 498 insertions(+), 98 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
--
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
index 6ff48ee..180e802 100644
--- 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
+++ 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
@@ -356,7 +356,7 @@ public class TestHCatMultiOutputFormat {
* @throws Exception if any error occurs
*/
   private List getTableData(String table, String database) throws 
Exception {
-QueryState queryState = new QueryState(null);
+QueryState queryState = new QueryState.Builder().build();
 HiveConf conf = queryState.getConf();
 conf.addResource("hive-site.xml");
 ArrayList results = new ArrayList();

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/itests/src/test/resources/testconfiguration.properties
--
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index a378a5d..51385cf 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -756,6 +756,7 @@ encrypted.query.files=encryption_join_unencrypted_tbl.q,\
 beeline.positive.include=drop_with_concurrency.q,\
   escape_comments.q,\
   mapjoin2.q,\
+  materialized_view_create_rewrite.q,\
   smb_mapjoin_1.q,\
   smb_mapjoin_10.q,\
   smb_mapjoin_11.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
--
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index b897ffa..d408321 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -566,7 +566,7 @@ public class QTestUtil {
   System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation());
 }
 
-queryState = new QueryState(new HiveConf(Driver.class));
+queryState = new QueryState.Builder().withHiveConf(new 
HiveConf(Driver.class)).build();
 if (useHBaseMetastore) {
   startMiniHBaseCluster();
 

hive git commit: HIVE-16577: Syntax error in the metastore init scripts for mssql (Vihang Karajgaonkar, reviewed by Aihua Xu & Thejas M Nair)

2017-05-08 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-2.3 3fbdca546 -> a4214f1a3


HIVE-16577: Syntax error in the metastore init scripts for mssql (Vihang 
Karajgaonkar, reviewed by Aihua Xu & Thejas M Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a4214f1a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a4214f1a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a4214f1a

Branch: refs/heads/branch-2.3
Commit: a4214f1a34b0a51110ab7e734db26fff67f2a1c3
Parents: 3fbdca5
Author: Aihua Xu 
Authored: Fri May 5 14:00:51 2017 -0400
Committer: Aihua Xu 
Committed: Mon May 8 13:51:55 2017 -0400

--
 metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql | 2 +-
 metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a4214f1a/metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql
--
diff --git a/metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql 
b/metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql
index 57dd30f..33730de 100644
--- a/metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql
+++ b/metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql
@@ -579,7 +579,7 @@ CREATE TABLE NOTIFICATION_LOG
 EVENT_TYPE nvarchar(32) NOT NULL,
 DB_NAME nvarchar(128) NULL,
 TBL_NAME nvarchar(256) NULL,
-MESSAGE_FORMAT nvarchar(16)
+MESSAGE_FORMAT nvarchar(16),
 MESSAGE text NULL
 );
 

http://git-wip-us.apache.org/repos/asf/hive/blob/a4214f1a/metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql
--
diff --git a/metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql 
b/metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql
index 8a80a50..c117a32 100644
--- a/metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql
+++ b/metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql
@@ -579,7 +579,7 @@ CREATE TABLE NOTIFICATION_LOG
 EVENT_TYPE nvarchar(32) NOT NULL,
 DB_NAME nvarchar(128) NULL,
 TBL_NAME nvarchar(256) NULL,
-MESSAGE_FORMAT nvarchar(16)
+MESSAGE_FORMAT nvarchar(16),
 MESSAGE text NULL
 );
 



hive git commit: HIVE-16450: Some metastore operations are not retried even with desired underlining exceptions (Aihua Xu, reviewed by Naveen Gangam & Peter Vary)

2017-05-08 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 54dbca69c -> 301e7c5ea


HIVE-16450: Some metastore operations are not retried even with desired 
underlining exceptions (Aihua Xu, reviewed by Naveen Gangam & Peter Vary)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/301e7c5e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/301e7c5e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/301e7c5e

Branch: refs/heads/master
Commit: 301e7c5eaba790687818a57d92b046f746bb3d76
Parents: 54dbca6
Author: Aihua Xu 
Authored: Fri Apr 14 10:53:58 2017 -0400
Committer: Aihua Xu 
Committed: Mon May 8 10:20:47 2017 -0400

--
 .../apache/hadoop/hive/metastore/Deadline.java  | 29 ++---
 .../hadoop/hive/metastore/MetaStoreUtils.java   | 24 ++
 .../hadoop/hive/metastore/ObjectStore.java  | 34 
 .../hive/metastore/RetryingHMSHandler.java  |  2 --
 4 files changed, 47 insertions(+), 42 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/301e7c5e/metastore/src/java/org/apache/hadoop/hive/metastore/Deadline.java
--
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/Deadline.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/Deadline.java
index 71d336a..6149224 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/Deadline.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/Deadline.java
@@ -86,15 +86,15 @@ public class Deadline {
*/
   public static void resetTimeout(long timeoutMs) throws MetaException {
 if (timeoutMs <= 0) {
-  throw newMetaException(new DeadlineException("The reset timeout value 
should be " +
+  throw MetaStoreUtils.newMetaException(new DeadlineException("The reset 
timeout value should be " +
   "larger than 0: " + timeoutMs));
 }
 Deadline deadline = getCurrentDeadline();
 if (deadline != null) {
   deadline.timeoutNanos = timeoutMs * 100L;
 } else {
-  throw newMetaException(new DeadlineException("The threadlocal Deadline 
is null," +
-  " please register it firstly."));
+  throw MetaStoreUtils.newMetaException(new DeadlineException("The 
threadlocal Deadline is null," +
+  " please register it first."));
 }
   }
 
@@ -105,8 +105,8 @@ public class Deadline {
   public static boolean startTimer(String method) throws MetaException {
 Deadline deadline = getCurrentDeadline();
 if (deadline == null) {
-  throw newMetaException(new DeadlineException("The threadlocal Deadline 
is null," +
-  " please register it firstly."));
+  throw MetaStoreUtils.newMetaException(new DeadlineException("The 
threadlocal Deadline is null," +
+  " please register it first."));
 }
 if (deadline.startTime != NO_DEADLINE) return false;
 deadline.method = method;
@@ -125,8 +125,8 @@ public class Deadline {
   deadline.startTime = NO_DEADLINE;
   deadline.method = null;
 } else {
-  throw newMetaException(new DeadlineException("The threadlocal Deadline 
is null," +
-  " please register it firstly."));
+  throw MetaStoreUtils.newMetaException(new DeadlineException("The 
threadlocal Deadline is null," +
+  " please register it first."));
 }
   }
 
@@ -146,7 +146,7 @@ public class Deadline {
 if (deadline != null) {
   deadline.check();
 } else {
-  throw newMetaException(new DeadlineException("The threadlocal Deadline 
is null," +
+  throw MetaStoreUtils.newMetaException(new DeadlineException("The 
threadlocal Deadline is null," +
   " please register it first."));
 }
   }
@@ -165,18 +165,7 @@ public class Deadline {
 + (elapsedTime / 100L) + "ms exceeds " + (timeoutNanos / 
100L)  + "ms");
   }
 } catch (DeadlineException e) {
-  throw newMetaException(e);
+  throw MetaStoreUtils.newMetaException(e);
 }
   }
-
-  /**
-   * convert DeadlineException to MetaException
-   * @param e
-   * @return
-   */
-  private static MetaException newMetaException(DeadlineException e) {
-MetaException metaException = new MetaException(e.getMessage());
-metaException.initCause(e);
-return metaException;
-  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/301e7c5e/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
index d67e03f..870896c 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
+++ 

hive git commit: HIVE-16577: Syntax error in the metastore init scripts for mssql (Vihang Karajgaonkar, reviewed by Aihua Xu & Thejas M Nair)

2017-05-05 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-2 08d559b15 -> afc3dabf5


HIVE-16577: Syntax error in the metastore init scripts for mssql (Vihang 
Karajgaonkar, reviewed by Aihua Xu & Thejas M Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/afc3dabf
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/afc3dabf
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/afc3dabf

Branch: refs/heads/branch-2
Commit: afc3dabf5eb3568d7372bb621c2d00adeac52a2d
Parents: 08d559b
Author: Aihua Xu 
Authored: Fri May 5 14:00:51 2017 -0400
Committer: Aihua Xu 
Committed: Fri May 5 14:03:00 2017 -0400

--
 metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql | 2 +-
 metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/afc3dabf/metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql
--
diff --git a/metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql 
b/metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql
index 57dd30f..33730de 100644
--- a/metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql
+++ b/metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql
@@ -579,7 +579,7 @@ CREATE TABLE NOTIFICATION_LOG
 EVENT_TYPE nvarchar(32) NOT NULL,
 DB_NAME nvarchar(128) NULL,
 TBL_NAME nvarchar(256) NULL,
-MESSAGE_FORMAT nvarchar(16)
+MESSAGE_FORMAT nvarchar(16),
 MESSAGE text NULL
 );
 

http://git-wip-us.apache.org/repos/asf/hive/blob/afc3dabf/metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql
--
diff --git a/metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql 
b/metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql
index 8a80a50..c117a32 100644
--- a/metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql
+++ b/metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql
@@ -579,7 +579,7 @@ CREATE TABLE NOTIFICATION_LOG
 EVENT_TYPE nvarchar(32) NOT NULL,
 DB_NAME nvarchar(128) NULL,
 TBL_NAME nvarchar(256) NULL,
-MESSAGE_FORMAT nvarchar(16)
+MESSAGE_FORMAT nvarchar(16),
 MESSAGE text NULL
 );
 



hive git commit: HIVE-16577: Syntax error in the metastore init scripts for mssql (Vihang Karajgaonkar, reviewed by Aihua Xu & Thejas M Nair)

2017-05-05 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 1fecb81f9 -> d09f3f81d


HIVE-16577: Syntax error in the metastore init scripts for mssql (Vihang 
Karajgaonkar, reviewed by Aihua Xu & Thejas M Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d09f3f81
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d09f3f81
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d09f3f81

Branch: refs/heads/master
Commit: d09f3f81d231e68727bbb39681a686c5f525114a
Parents: 1fecb81
Author: Aihua Xu 
Authored: Fri May 5 14:00:51 2017 -0400
Committer: Aihua Xu 
Committed: Fri May 5 14:00:51 2017 -0400

--
 metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql | 2 +-
 metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql | 2 +-
 metastore/scripts/upgrade/mssql/hive-schema-3.0.0.mssql.sql | 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/d09f3f81/metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql
--
diff --git a/metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql 
b/metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql
index 57dd30f..33730de 100644
--- a/metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql
+++ b/metastore/scripts/upgrade/mssql/hive-schema-2.2.0.mssql.sql
@@ -579,7 +579,7 @@ CREATE TABLE NOTIFICATION_LOG
 EVENT_TYPE nvarchar(32) NOT NULL,
 DB_NAME nvarchar(128) NULL,
 TBL_NAME nvarchar(256) NULL,
-MESSAGE_FORMAT nvarchar(16)
+MESSAGE_FORMAT nvarchar(16),
 MESSAGE text NULL
 );
 

http://git-wip-us.apache.org/repos/asf/hive/blob/d09f3f81/metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql
--
diff --git a/metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql 
b/metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql
index 8a80a50..c117a32 100644
--- a/metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql
+++ b/metastore/scripts/upgrade/mssql/hive-schema-2.3.0.mssql.sql
@@ -579,7 +579,7 @@ CREATE TABLE NOTIFICATION_LOG
 EVENT_TYPE nvarchar(32) NOT NULL,
 DB_NAME nvarchar(128) NULL,
 TBL_NAME nvarchar(256) NULL,
-MESSAGE_FORMAT nvarchar(16)
+MESSAGE_FORMAT nvarchar(16),
 MESSAGE text NULL
 );
 

http://git-wip-us.apache.org/repos/asf/hive/blob/d09f3f81/metastore/scripts/upgrade/mssql/hive-schema-3.0.0.mssql.sql
--
diff --git a/metastore/scripts/upgrade/mssql/hive-schema-3.0.0.mssql.sql 
b/metastore/scripts/upgrade/mssql/hive-schema-3.0.0.mssql.sql
index 98682a8..54d593c 100644
--- a/metastore/scripts/upgrade/mssql/hive-schema-3.0.0.mssql.sql
+++ b/metastore/scripts/upgrade/mssql/hive-schema-3.0.0.mssql.sql
@@ -579,7 +579,7 @@ CREATE TABLE NOTIFICATION_LOG
 EVENT_TYPE nvarchar(32) NOT NULL,
 DB_NAME nvarchar(128) NULL,
 TBL_NAME nvarchar(256) NULL,
-MESSAGE_FORMAT nvarchar(16)
+MESSAGE_FORMAT nvarchar(16),
 MESSAGE text NULL
 );
 



hive git commit: HIVE-16335: Beeline user HS2 connection file should use /etc/hive/conf instead of /etc/conf/hive (Vihang Karajgaonkar, reviewed by Aihua Xu)

2017-05-04 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-2 242180be3 -> 08d559b15


HIVE-16335: Beeline user HS2 connection file should use /etc/hive/conf instead 
of /etc/conf/hive (Vihang Karajgaonkar, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/08d559b1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/08d559b1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/08d559b1

Branch: refs/heads/branch-2
Commit: 08d559b1536064ef0e666e5baab78bb6b1b28159
Parents: 242180b
Author: Aihua Xu 
Authored: Tue Apr 4 19:12:50 2017 -0400
Committer: Aihua Xu 
Committed: Thu May 4 15:22:46 2017 -0400

--
 .../hive/beeline/hs2connection/UserHS2ConnectionFileParser.java| 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/08d559b1/beeline/src/java/org/apache/hive/beeline/hs2connection/UserHS2ConnectionFileParser.java
--
diff --git 
a/beeline/src/java/org/apache/hive/beeline/hs2connection/UserHS2ConnectionFileParser.java
 
b/beeline/src/java/org/apache/hive/beeline/hs2connection/UserHS2ConnectionFileParser.java
index 93a6231..7d7d9ae 100644
--- 
a/beeline/src/java/org/apache/hive/beeline/hs2connection/UserHS2ConnectionFileParser.java
+++ 
b/beeline/src/java/org/apache/hive/beeline/hs2connection/UserHS2ConnectionFileParser.java
@@ -44,7 +44,7 @@ public class UserHS2ConnectionFileParser implements 
HS2ConnectionFileParser {
   + (System.getProperty("os.name").toLowerCase().indexOf("windows") != 
-1 ? "" : ".")
   + "beeline" + File.separator;
   public static final String ETC_HIVE_CONF_LOCATION =
-  File.separator + "etc" + File.separator + "conf" + File.separator + 
"hive";
+  File.separator + "etc" + File.separator + "hive" + File.separator + 
"conf";
 
   private final List locations = new ArrayList<>();
   private static final Logger log = 
LoggerFactory.getLogger(UserHS2ConnectionFileParser.class);



hive git commit: HIVE-16346: inheritPerms should be conditional based on the target filesystem (Sahil Takiar, reviewed by Aihua Xu)

2017-04-28 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-2 ec5b137ab -> 7b1f6efa5


HIVE-16346: inheritPerms should be conditional based on the target filesystem 
(Sahil Takiar, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7b1f6efa
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7b1f6efa
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7b1f6efa

Branch: refs/heads/branch-2
Commit: 7b1f6efa52a62ea92e8e33c1db9050f38fdeec9c
Parents: ec5b137
Author: Aihua Xu 
Authored: Fri Apr 28 13:26:55 2017 -0400
Committer: Aihua Xu 
Committed: Fri Apr 28 13:39:22 2017 -0400

--
 common/pom.xml  |   8 +-
 .../hadoop/hive/common/BlobStorageUtils.java|  10 +-
 .../apache/hadoop/hive/common/FileUtils.java|  39 ++-
 .../apache/hadoop/hive/common/StorageUtils.java |  40 +++
 .../org/apache/hadoop/hive/io/HdfsUtils.java| 248 +++
 .../hive/common/TestBlobStorageUtils.java   |   8 +-
 .../hadoop/hive/common/TestStorageUtils.java|  57 +
 .../apache/hadoop/hive/io/TestHdfsUtils.java| 203 +++
 .../apache/hadoop/hive/metastore/Warehouse.java |   5 +-
 .../java/org/apache/hadoop/hive/ql/Context.java |   4 +-
 .../apache/hadoop/hive/ql/exec/CopyTask.java|   3 +-
 .../apache/hadoop/hive/ql/exec/MoveTask.java|   4 +-
 .../hadoop/hive/ql/exec/ReplCopyTask.java   |   3 +-
 .../apache/hadoop/hive/ql/metadata/Hive.java|  29 +--
 .../org/apache/hadoop/hive/io/HdfsUtils.java| 239 --
 .../apache/hadoop/hive/io/TestHdfsUtils.java| 184 --
 16 files changed, 621 insertions(+), 463 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/7b1f6efa/common/pom.xml
--
diff --git a/common/pom.xml b/common/pom.xml
index e1c15ee..84bb1e5 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -157,7 +157,13 @@
   commons-logging
 
   
-  
+
+
+  org.apache.hadoop
+  hadoop-hdfs
+  ${hadoop.version}
+  true
+
 
 
   com.google.code.tempus-fugit

http://git-wip-us.apache.org/repos/asf/hive/blob/7b1f6efa/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java
index e6a17cb..b7f1359 100644
--- a/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java
@@ -15,6 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.hadoop.hive.common;
 
 import org.apache.hadoop.conf.Configuration;
@@ -24,21 +25,23 @@ import org.apache.hadoop.hive.conf.HiveConf;
 
 import java.util.Collection;
 
+
 /**
  * Utilities for different blob (object) storage systems
  */
 public class BlobStorageUtils {
+
 private static final boolean DISABLE_BLOBSTORAGE_AS_SCRATCHDIR = false;
 
 public static boolean isBlobStoragePath(final Configuration conf, final 
Path path) {
 return path != null && isBlobStorageScheme(conf, 
path.toUri().getScheme());
 }
 
-public static boolean isBlobStorageFileSystem(final Configuration conf, 
final FileSystem fs) {
-return fs != null && isBlobStorageScheme(conf, fs.getScheme());
+static boolean isBlobStorageFileSystem(final Configuration conf, final 
FileSystem fs) {
+return fs != null && fs.getUri() != null && isBlobStorageScheme(conf, 
fs.getUri().getScheme());
 }
 
-public static boolean isBlobStorageScheme(final Configuration conf, final 
String scheme) {
+static boolean isBlobStorageScheme(final Configuration conf, final String 
scheme) {
 Collection supportedBlobStoreSchemes =
 
conf.getStringCollection(HiveConf.ConfVars.HIVE_BLOBSTORE_SUPPORTED_SCHEMES.varname);
 
@@ -61,4 +64,5 @@ public class BlobStorageUtils {
 
HiveConf.ConfVars.HIVE_BLOBSTORE_OPTIMIZATIONS_ENABLED.defaultBoolVal
 );
 }
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/7b1f6efa/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
index e586015..8ed8cc4 100644
--- a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
@@ -46,13 +46,11 @@ 

hive git commit: Revert "HIVE-16346: inheritPerms should be conditional based on the target filesystem (Sahil Takiar, reviewed by Aihua Xu)"

2017-04-28 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-2 ab3a24bf6 -> 2ba121757


Revert "HIVE-16346: inheritPerms should be conditional based on the target 
filesystem (Sahil Takiar, reviewed by Aihua Xu)"

This reverts commit cce4d5e78582c8744972d265147d39a345e082db.


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2ba12175
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2ba12175
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2ba12175

Branch: refs/heads/branch-2
Commit: 2ba121757699c551a7688f1e42c0d4ecedc826f8
Parents: ab3a24b
Author: Aihua Xu 
Authored: Fri Apr 28 11:22:22 2017 -0400
Committer: Aihua Xu 
Committed: Fri Apr 28 11:22:22 2017 -0400

--
 common/pom.xml  |  8 +--
 .../hadoop/hive/common/BlobStorageUtils.java| 10 +--
 .../apache/hadoop/hive/common/FileUtils.java| 39 ++-
 .../apache/hadoop/hive/common/StorageUtils.java | 40 ---
 .../hive/common/TestBlobStorageUtils.java   |  8 +--
 .../hadoop/hive/common/TestStorageUtils.java| 57 ---
 .../apache/hadoop/hive/metastore/Warehouse.java |  5 +-
 .../java/org/apache/hadoop/hive/ql/Context.java |  4 +-
 .../apache/hadoop/hive/ql/exec/CopyTask.java|  3 +-
 .../apache/hadoop/hive/ql/exec/MoveTask.java|  4 +-
 .../hadoop/hive/ql/exec/ReplCopyTask.java   |  3 +-
 .../apache/hadoop/hive/ql/metadata/Hive.java| 29 
 .../org/apache/hadoop/hive/io/HdfsUtils.java| 73 +---
 .../apache/hadoop/hive/io/TestHdfsUtils.java| 19 -
 14 files changed, 72 insertions(+), 230 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/2ba12175/common/pom.xml
--
diff --git a/common/pom.xml b/common/pom.xml
index 84bb1e5..e1c15ee 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -157,13 +157,7 @@
   commons-logging
 
   
-
-
-  org.apache.hadoop
-  hadoop-hdfs
-  ${hadoop.version}
-  true
-
+  
 
 
   com.google.code.tempus-fugit

http://git-wip-us.apache.org/repos/asf/hive/blob/2ba12175/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java
index b7f1359..e6a17cb 100644
--- a/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hive.common;
 
 import org.apache.hadoop.conf.Configuration;
@@ -25,23 +24,21 @@ import org.apache.hadoop.hive.conf.HiveConf;
 
 import java.util.Collection;
 
-
 /**
  * Utilities for different blob (object) storage systems
  */
 public class BlobStorageUtils {
-
 private static final boolean DISABLE_BLOBSTORAGE_AS_SCRATCHDIR = false;
 
 public static boolean isBlobStoragePath(final Configuration conf, final 
Path path) {
 return path != null && isBlobStorageScheme(conf, 
path.toUri().getScheme());
 }
 
-static boolean isBlobStorageFileSystem(final Configuration conf, final 
FileSystem fs) {
-return fs != null && fs.getUri() != null && isBlobStorageScheme(conf, 
fs.getUri().getScheme());
+public static boolean isBlobStorageFileSystem(final Configuration conf, 
final FileSystem fs) {
+return fs != null && isBlobStorageScheme(conf, fs.getScheme());
 }
 
-static boolean isBlobStorageScheme(final Configuration conf, final String 
scheme) {
+public static boolean isBlobStorageScheme(final Configuration conf, final 
String scheme) {
 Collection supportedBlobStoreSchemes =
 
conf.getStringCollection(HiveConf.ConfVars.HIVE_BLOBSTORE_SUPPORTED_SCHEMES.varname);
 
@@ -64,5 +61,4 @@ public class BlobStorageUtils {
 
HiveConf.ConfVars.HIVE_BLOBSTORE_OPTIMIZATIONS_ENABLED.defaultBoolVal
 );
 }
-
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/2ba12175/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
index 8ed8cc4..e586015 100644
--- a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
@@ -46,11 +46,13 @@ import org.apache.hadoop.fs.PathFilter;
 import 

hive git commit: HIVE-16346: inheritPerms should be conditional based on the target filesystem (Sahil Takiar, reviewed by Aihua Xu)

2017-04-24 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/branch-2 59faf3695 -> cce4d5e78


HIVE-16346: inheritPerms should be conditional based on the target filesystem 
(Sahil Takiar, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cce4d5e7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cce4d5e7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cce4d5e7

Branch: refs/heads/branch-2
Commit: cce4d5e78582c8744972d265147d39a345e082db
Parents: 59faf36
Author: Aihua Xu 
Authored: Mon Apr 24 16:40:57 2017 -0400
Committer: Aihua Xu 
Committed: Mon Apr 24 16:40:57 2017 -0400

--
 common/pom.xml  |  8 ++-
 .../hadoop/hive/common/BlobStorageUtils.java| 10 ++-
 .../apache/hadoop/hive/common/FileUtils.java| 39 +--
 .../apache/hadoop/hive/common/StorageUtils.java | 40 +++
 .../hive/common/TestBlobStorageUtils.java   |  8 +--
 .../hadoop/hive/common/TestStorageUtils.java| 57 +++
 .../apache/hadoop/hive/metastore/Warehouse.java |  5 +-
 .../java/org/apache/hadoop/hive/ql/Context.java |  4 +-
 .../apache/hadoop/hive/ql/exec/CopyTask.java|  3 +-
 .../apache/hadoop/hive/ql/exec/MoveTask.java|  4 +-
 .../hadoop/hive/ql/exec/ReplCopyTask.java   |  3 +-
 .../apache/hadoop/hive/ql/metadata/Hive.java| 29 
 .../org/apache/hadoop/hive/io/HdfsUtils.java| 73 +++-
 .../apache/hadoop/hive/io/TestHdfsUtils.java| 19 +
 14 files changed, 230 insertions(+), 72 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/common/pom.xml
--
diff --git a/common/pom.xml b/common/pom.xml
index e1c15ee..84bb1e5 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -157,7 +157,13 @@
   commons-logging
 
   
-  
+
+
+  org.apache.hadoop
+  hadoop-hdfs
+  ${hadoop.version}
+  true
+
 
 
   com.google.code.tempus-fugit

http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java
index e6a17cb..b7f1359 100644
--- a/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java
@@ -15,6 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.hadoop.hive.common;
 
 import org.apache.hadoop.conf.Configuration;
@@ -24,21 +25,23 @@ import org.apache.hadoop.hive.conf.HiveConf;
 
 import java.util.Collection;
 
+
 /**
  * Utilities for different blob (object) storage systems
  */
 public class BlobStorageUtils {
+
 private static final boolean DISABLE_BLOBSTORAGE_AS_SCRATCHDIR = false;
 
 public static boolean isBlobStoragePath(final Configuration conf, final 
Path path) {
 return path != null && isBlobStorageScheme(conf, 
path.toUri().getScheme());
 }
 
-public static boolean isBlobStorageFileSystem(final Configuration conf, 
final FileSystem fs) {
-return fs != null && isBlobStorageScheme(conf, fs.getScheme());
+static boolean isBlobStorageFileSystem(final Configuration conf, final 
FileSystem fs) {
+return fs != null && fs.getUri() != null && isBlobStorageScheme(conf, 
fs.getUri().getScheme());
 }
 
-public static boolean isBlobStorageScheme(final Configuration conf, final 
String scheme) {
+static boolean isBlobStorageScheme(final Configuration conf, final String 
scheme) {
 Collection supportedBlobStoreSchemes =
 
conf.getStringCollection(HiveConf.ConfVars.HIVE_BLOBSTORE_SUPPORTED_SCHEMES.varname);
 
@@ -61,4 +64,5 @@ public class BlobStorageUtils {
 
HiveConf.ConfVars.HIVE_BLOBSTORE_OPTIMIZATIONS_ENABLED.defaultBoolVal
 );
 }
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
index e586015..8ed8cc4 100644
--- a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
@@ -46,13 +46,11 @@ import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.fs.Trash;
 import org.apache.hadoop.fs.permission.FsAction;
 import 

[2/2] hive git commit: HIVE-16409: TestEventHandlerFactory has lacked the ASF header (Saijin Huang, reviewed by Aihua Xu)

2017-04-17 Thread aihuaxu
HIVE-16409: TestEventHandlerFactory has lacked the ASF header (Saijin Huang, 
reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/99f142c9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/99f142c9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/99f142c9

Branch: refs/heads/master
Commit: 99f142c9a010e74843497d813652c3ea44ad3085
Parents: 579f5d7
Author: Aihua Xu 
Authored: Mon Apr 17 09:26:39 2017 -0400
Committer: Aihua Xu 
Committed: Mon Apr 17 09:26:39 2017 -0400

--
 .../repl/events/TestEventHandlerFactory.java  | 18 ++
 1 file changed, 18 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/99f142c9/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/events/TestEventHandlerFactory.java
--
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/events/TestEventHandlerFactory.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/events/TestEventHandlerFactory.java
index 0526700..d44cb79 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/events/TestEventHandlerFactory.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/events/TestEventHandlerFactory.java
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.hive.ql.parse.repl.events;
 
 import org.apache.hadoop.hive.metastore.api.NotificationEvent;



[1/2] hive git commit: HIVE-15442: Driver.java has a redundancy code (Saijin Huang, reviewed by Aihua Xu)

2017-04-17 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 98250bbe6 -> 99f142c9a


HIVE-15442: Driver.java has a redundancy code (Saijin Huang, reviewed by Aihua 
Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/579f5d7e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/579f5d7e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/579f5d7e

Branch: refs/heads/master
Commit: 579f5d7e6ef87f108e5c482f30f72e612ef571ef
Parents: 98250bb
Author: Aihua Xu 
Authored: Mon Apr 17 09:25:39 2017 -0400
Committer: Aihua Xu 
Committed: Mon Apr 17 09:25:39 2017 -0400

--
 ql/src/java/org/apache/hadoop/hive/ql/Driver.java | 6 ++
 1 file changed, 2 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/579f5d7e/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index a2ce71d..03ea997 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -561,10 +561,8 @@ public class Driver implements CommandProcessor {
   if (conf.getBoolVar(ConfVars.HIVE_LOG_EXPLAIN_OUTPUT)) {
 String explainOutput = getExplainOutput(sem, plan, tree);
 if (explainOutput != null) {
-  if (conf.getBoolVar(ConfVars.HIVE_LOG_EXPLAIN_OUTPUT)) {
-LOG.info("EXPLAIN output for queryid " + queryId + " : "
-  + explainOutput);
-  }
+  LOG.info("EXPLAIN output for queryid " + queryId + " : "
++ explainOutput);
   if (conf.isWebUiQueryInfoCacheEnabled()) {
 queryDisplay.setExplainPlan(explainOutput);
   }



[2/4] hive git commit: HIVE-16340: Allow Kerberos + SSL connections to HMS (Sahil Takiar, reviewed by Aihua Xu)

2017-04-07 Thread aihuaxu
HIVE-16340: Allow Kerberos + SSL connections to HMS (Sahil Takiar, reviewed by 
Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c926f81e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c926f81e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c926f81e

Branch: refs/heads/master
Commit: c926f81e04a7bc49487ea4ef0e9d3a273d60f8aa
Parents: b466953
Author: Aihua Xu 
Authored: Fri Apr 7 09:48:05 2017 -0400
Committer: Aihua Xu 
Committed: Fri Apr 7 09:48:05 2017 -0400

--
 .../apache/hive/minikdc/TestSSLWithMiniKdc.java | 101 +
 .../java/org/hadoop/hive/jdbc/SSLTestUtils.java | 103 +
 .../test/java/org/apache/hive/jdbc/TestSSL.java | 150 ++-
 .../hadoop/hive/metastore/HiveMetaStore.java|  32 ++--
 .../hive/metastore/HiveMetaStoreClient.java |  51 ---
 .../hadoop/hive/metastore/MetaStoreUtils.java   |  15 +-
 6 files changed, 307 insertions(+), 145 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/c926f81e/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java
--
diff --git 
a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java
 
b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java
new file mode 100644
index 000..3153b9f
--- /dev/null
+++ 
b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.minikdc;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.Statement;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hive.jdbc.miniHS2.MiniHS2;
+import org.hadoop.hive.jdbc.SSLTestUtils;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+
+public class TestSSLWithMiniKdc {
+
+  private static MiniHS2 miniHS2 = null;
+  private static MiniHiveKdc miniHiveKdc = null;
+
+  @BeforeClass
+  public static void beforeTest() throws Exception {
+Class.forName(MiniHS2.getJdbcDriverName());
+
+HiveConf hiveConf = new HiveConf();
+
+SSLTestUtils.setMetastoreSslConf(hiveConf);
+hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
+
+miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf);
+
+setHMSSaslConf(miniHiveKdc, hiveConf);
+
+miniHS2 = MiniHiveKdc.getMiniHS2WithKerbWithRemoteHMS(miniHiveKdc, 
hiveConf);
+
+Map confOverlay = new HashMap<>();
+SSLTestUtils.setHttpConfOverlay(confOverlay);
+SSLTestUtils.setSslConfOverlay(confOverlay);
+
+miniHS2.start(confOverlay);
+  }
+
+  @AfterClass
+  public static void afterTest() throws Exception {
+miniHS2.stop();
+  }
+
+  @Test
+  public void testConnection() throws Exception {
+String tableName = "testTable";
+Path dataFilePath = new Path(SSLTestUtils.getDataFileDir(), "kv1.txt");
+Connection hs2Conn = getConnection(MiniHiveKdc.HIVE_TEST_USER_1);
+
+Statement stmt = hs2Conn.createStatement();
+
+SSLTestUtils.setupTestTableWithData(tableName, dataFilePath, hs2Conn);
+
+stmt.execute("select * from " + tableName);
+stmt.execute("drop table " + tableName);
+stmt.close();
+  }
+
+  private Connection getConnection(String userName) throws Exception {
+miniHiveKdc.loginUser(userName);
+return DriverManager.getConnection(miniHS2.getJdbcURL("default", 
SSLTestUtils.SSL_CONN_PARAMS),
+System.getProperty("user.name"), "bar");
+  }
+
+  private static void setHMSSaslConf(MiniHiveKdc miniHiveKdc, HiveConf conf) {
+   String hivePrincipal =
+
miniHiveKdc.getFullyQualifiedServicePrincipal(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL);
+String hiveKeytab = miniHiveKdc.getKeyTabFile(
+

[4/4] hive git commit: HIVE-16291: Hive fails when unions a parquet table with itself (Yibing Shi, reviewed by Ashutosh Chauhan & Aihua Xu)

2017-04-07 Thread aihuaxu
HIVE-16291: Hive fails when unions a parquet table with itself (Yibing Shi, 
reviewed by Ashutosh Chauhan & Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a5f94c07
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a5f94c07
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a5f94c07

Branch: refs/heads/master
Commit: a5f94c0710ac6ad04305e2cc320bb9eede301815
Parents: f8404d8
Author: Aihua Xu 
Authored: Fri Apr 7 09:57:25 2017 -0400
Committer: Aihua Xu 
Committed: Fri Apr 7 09:57:25 2017 -0400

--
 .../hadoop/hive/serde2/ColumnProjectionUtils.java |  6 ++
 .../hadoop/hive/serde2/TestColumnProjectionUtils.java | 14 ++
 2 files changed, 16 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a5f94c07/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java
--
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java 
b/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java
index 1354680..9844166 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java
@@ -26,6 +26,7 @@ import java.util.Set;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.common.util.HiveStringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -115,10 +116,7 @@ public final class ColumnProjectionUtils {
   public static void appendReadColumns(Configuration conf, List ids) {
 String id = toReadColumnIDString(ids);
 String old = conf.get(READ_COLUMN_IDS_CONF_STR, null);
-String newConfStr = id;
-if (old != null && !old.isEmpty()) {
-  newConfStr = newConfStr + StringUtils.COMMA_STR + old;
-}
+String newConfStr = HiveStringUtils.joinIgnoringEmpty(new String[] {id, 
old}, StringUtils.COMMA);
 setReadColumnIDConf(conf, newConfStr);
 // Set READ_ALL_COLUMNS to false
 conf.setBoolean(READ_ALL_COLUMNS, false);

http://git-wip-us.apache.org/repos/asf/hive/blob/a5f94c07/serde/src/test/org/apache/hadoop/hive/serde2/TestColumnProjectionUtils.java
--
diff --git 
a/serde/src/test/org/apache/hadoop/hive/serde2/TestColumnProjectionUtils.java 
b/serde/src/test/org/apache/hadoop/hive/serde2/TestColumnProjectionUtils.java
index 2b81b54..fbd9b07 100644
--- 
a/serde/src/test/org/apache/hadoop/hive/serde2/TestColumnProjectionUtils.java
+++ 
b/serde/src/test/org/apache/hadoop/hive/serde2/TestColumnProjectionUtils.java
@@ -85,6 +85,20 @@ public class TestColumnProjectionUtils {
   }
 
   @Test
+  public void testMultipleIdsWithEmpty() {
+List ids1 = Arrays.asList(1, 2);
+List ids2 = new ArrayList();
+List ids3 = Arrays.asList(2, 3);
+
+ColumnProjectionUtils.appendReadColumns(conf, ids1);
+ColumnProjectionUtils.appendReadColumns(conf, ids2);
+ColumnProjectionUtils.appendReadColumns(conf, ids3);
+
+List actual = ColumnProjectionUtils.getReadColumnIDs(conf);
+assertEquals(Arrays.asList(2, 3, 1), actual);
+  }
+
+  @Test
   public void testDeprecatedMethods() {
 List columnIds = new ArrayList();
 List actual;



[1/4] hive git commit: HIVE-16333: remove the redundant symbol "\" to appear red in sublime text 3 (Saijin Huang, reviewed by Aihua Xu)

2017-04-07 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 0d0e4976c -> a5f94c071


HIVE-16333: remove the redundant symbol "\" to appear red in sublime text 3 
(Saijin Huang, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b4669533
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b4669533
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b4669533

Branch: refs/heads/master
Commit: b46695335fd0fc6a1244a4309eb51b4dda43ad76
Parents: 0d0e497
Author: Aihua Xu 
Authored: Fri Apr 7 09:40:26 2017 -0400
Committer: Aihua Xu 
Committed: Fri Apr 7 09:40:26 2017 -0400

--
 .../src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/b4669533/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
index d378d06..59461e6 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
@@ -367,7 +367,7 @@ abstract class TxnHandler implements TxnStore, 
TxnStore.MutexAPI {
   try {
 /**
  * This runs at READ_COMMITTED for exactly the same reason as {@link 
#getOpenTxnsInfo()}
-\ */
+ */
 dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED);
 stmt = dbConn.createStatement();
 String s = "select ntxn_next - 1 from NEXT_TXN_ID";



[2/2] hive git commit: HIVE-16297: Improving hive logging configuration variables (Vihang Karajgaonkar, reviewed by Peter Vary & Aihua Xu)

2017-04-04 Thread aihuaxu
HIVE-16297: Improving hive logging configuration variables (Vihang 
Karajgaonkar, reviewed by Peter Vary & Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4e60ea3f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4e60ea3f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4e60ea3f

Branch: refs/heads/master
Commit: 4e60ea3f786ed597b0594c2452dd7d64c44645b9
Parents: c7a44eb
Author: Aihua Xu 
Authored: Tue Apr 4 19:14:28 2017 -0400
Committer: Aihua Xu 
Committed: Tue Apr 4 19:28:46 2017 -0400

--
 .../java/org/apache/hadoop/hive/conf/HiveConfUtil.java | 13 -
 common/src/java/org/apache/hive/http/ConfServlet.java  | 10 +++---
 .../apache/hadoop/hive/ql/exec/FileSinkOperator.java   | 10 --
 .../hive/ql/exec/spark/RemoteHiveSparkClient.java  |  9 +++--
 4 files changed, 26 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/4e60ea3f/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java
index 9ba08e5..dc02803 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java
@@ -94,11 +94,22 @@ public class HiveConfUtil {
   public static void stripConfigurations(Configuration conf, Set 
hiddenSet) {
 for (String name : hiddenSet) {
   if (conf.get(name) != null) {
-conf.set(name, "");
+conf.set(name, StringUtils.EMPTY);
   }
 }
   }
 
+  /**
+   * Searches the given configuration object and replaces all the 
configuration values for keys
+   * defined hive.conf.hidden.list by empty String
+   *
+   * @param conf - Configuration object which needs to be modified to remove 
sensitive keys
+   */
+  public static void stripConfigurations(Configuration conf) {
+Set hiddenSet = getHiddenSet(conf);
+stripConfigurations(conf, hiddenSet);
+  }
+
   public static void dumpConfig(Configuration originalConf, StringBuilder sb) {
 Set hiddenSet = getHiddenSet(originalConf);
 sb.append("Values omitted for security reason if present: 
").append(hiddenSet).append("\n");

http://git-wip-us.apache.org/repos/asf/hive/blob/4e60ea3f/common/src/java/org/apache/hive/http/ConfServlet.java
--
diff --git a/common/src/java/org/apache/hive/http/ConfServlet.java 
b/common/src/java/org/apache/hive/http/ConfServlet.java
index 253df4f..856a5d2 100644
--- a/common/src/java/org/apache/hive/http/ConfServlet.java
+++ b/common/src/java/org/apache/hive/http/ConfServlet.java
@@ -26,6 +26,7 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConfUtil;
 
 /**
  * A servlet to print out the running configuration data.
@@ -81,11 +82,14 @@ public class ConfServlet extends HttpServlet {
* Guts of the servlet - extracted for easy testing.
*/
   static void writeResponse(Configuration conf, Writer out, String format)
-throws IOException, BadFormatException {
+  throws IOException, BadFormatException {
+//redact the sensitive information from the configuration values
+Configuration hconf = new Configuration(conf);
+HiveConfUtil.stripConfigurations(hconf);
 if (FORMAT_JSON.equals(format)) {
-  Configuration.dumpConfiguration(conf, out);
+  Configuration.dumpConfiguration(hconf, out);
 } else if (FORMAT_XML.equals(format)) {
-  conf.writeXml(out);
+  hconf.writeXml(out);
 } else {
   throw new BadFormatException("Bad format: " + format);
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/4e60ea3f/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
index a9d03d0..4d727ba 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConfUtil;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import 

[1/2] hive git commit: HIVE-16335: Beeline user HS2 connection file should use /etc/hive/conf instead of /etc/conf/hive (Vihang Karajgaonkar, reviewed by Aihua Xu)

2017-04-04 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master 23ac04d3b -> 4e60ea3f7


HIVE-16335: Beeline user HS2 connection file should use /etc/hive/conf instead 
of /etc/conf/hive (Vihang Karajgaonkar, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c7a44eb7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c7a44eb7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c7a44eb7

Branch: refs/heads/master
Commit: c7a44eb707084c65722a9779d3fc32746b36ed09
Parents: 23ac04d
Author: Aihua Xu 
Authored: Tue Apr 4 19:12:50 2017 -0400
Committer: Aihua Xu 
Committed: Tue Apr 4 19:28:16 2017 -0400

--
 .../hive/beeline/hs2connection/UserHS2ConnectionFileParser.java| 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/c7a44eb7/beeline/src/java/org/apache/hive/beeline/hs2connection/UserHS2ConnectionFileParser.java
--
diff --git 
a/beeline/src/java/org/apache/hive/beeline/hs2connection/UserHS2ConnectionFileParser.java
 
b/beeline/src/java/org/apache/hive/beeline/hs2connection/UserHS2ConnectionFileParser.java
index 93a6231..7d7d9ae 100644
--- 
a/beeline/src/java/org/apache/hive/beeline/hs2connection/UserHS2ConnectionFileParser.java
+++ 
b/beeline/src/java/org/apache/hive/beeline/hs2connection/UserHS2ConnectionFileParser.java
@@ -44,7 +44,7 @@ public class UserHS2ConnectionFileParser implements 
HS2ConnectionFileParser {
   + (System.getProperty("os.name").toLowerCase().indexOf("windows") != 
-1 ? "" : ".")
   + "beeline" + File.separator;
   public static final String ETC_HIVE_CONF_LOCATION =
-  File.separator + "etc" + File.separator + "conf" + File.separator + 
"hive";
+  File.separator + "etc" + File.separator + "hive" + File.separator + 
"conf";
 
   private final List locations = new ArrayList<>();
   private static final Logger log = 
LoggerFactory.getLogger(UserHS2ConnectionFileParser.class);



  1   2   3   >