[hive] branch master updated: HIVE-22417: Remove stringifyException from MetaStore (#3478) (David Mollitor reviewed by Stamatis Zampetakis)

2022-08-05 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 778c838317 HIVE-22417: Remove stringifyException from MetaStore 
(#3478) (David Mollitor reviewed by Stamatis Zampetakis)
778c838317 is described below

commit 778c838317c952dcd273fd6c7a51491746a1d807
Author: belugabehr <12578579+belugab...@users.noreply.github.com>
AuthorDate: Fri Aug 5 11:25:48 2022 -0400

HIVE-22417: Remove stringifyException from MetaStore (#3478) (David 
Mollitor reviewed by Stamatis Zampetakis)
---
 .../hive/metastore/SerDeStorageSchemaReader.java   |  2 -
 .../apache/hadoop/hive/ql/exec/ReplCopyTask.java   |  5 +--
 .../repl/dump/events/AbstractEventHandler.java |  2 +-
 .../repl/dump/events/CreateFunctionHandler.java|  2 +-
 .../hadoop/hive/metastore/ReplChangeManager.java   | 47 ++
 .../hadoop/hive/metastore/utils/StringUtils.java   |  2 +
 .../apache/hadoop/hive/metastore/ObjectStore.java  |  9 +++--
 .../hive/metastore/utils/MetaStoreServerUtils.java |  4 +-
 8 files changed, 36 insertions(+), 37 deletions(-)

diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/SerDeStorageSchemaReader.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/SerDeStorageSchemaReader.java
index 958f21bb03..7f2b08c13a 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/SerDeStorageSchemaReader.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/SerDeStorageSchemaReader.java
@@ -23,7 +23,6 @@ import 
org.apache.hadoop.hive.metastore.api.EnvironmentContext;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.utils.StringUtils;
 
 import java.util.List;
 
@@ -47,7 +46,6 @@ public class SerDeStorageSchemaReader implements 
StorageSchemaReader {
   Deserializer s = HiveMetaStoreUtils.getDeserializer(conf, tbl, null, 
false);
   return HiveMetaStoreUtils.getFieldsFromDeserializer(tbl.getTableName(), 
s, conf);
 } catch (Exception e) {
-  StringUtils.stringifyException(e);
   throw new MetaException(e.getMessage());
 } finally {
   if (orgHiveLoader != null) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java
index 474859f99b..231f57455b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.exec;
 
 import org.apache.hadoop.hive.metastore.ReplChangeManager;
-import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
 import org.apache.hadoop.hive.ql.parse.EximUtil;
 import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
@@ -189,10 +188,10 @@ public class ReplCopyTask extends Task 
implements Serializable {
   ReplChangeManager.FileInfo f = ReplChangeManager
   .getFileInfo(new Path(fragments[0]), fragments[1], fragments[2], 
fragments[3], conf);
   filePaths.add(f);
-} catch (MetaException e) {
+} catch (IOException ioe) {
   // issue warning for missing file and throw exception
   LOG.warn("Cannot find {} in source repo or cmroot", fragments[0]);
-  throw new IOException(e.getMessage());
+  throw ioe;
 }
 // Note - we need srcFs rather than fs, because it is possible that 
the _files lists files
 // which are from a different filesystem than the fs where the _files 
file itself was loaded
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AbstractEventHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AbstractEventHandler.java
index f488b8577f..7f0830589d 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AbstractEventHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AbstractEventHandler.java
@@ -106,7 +106,7 @@ abstract class AbstractEventHandler 
implements EventHand
   }
 
   protected void writeFileEntry(Table table, Partition ptn, String file, 
Context withinContext)
-  throws IOException, LoginException, MetaException, 
HiveFatalException {
+  throws IOException, LoginException, HiveFatalException {
 HiveConf hiveConf = withinContext.hiveConf;
 String distCpDoAsUser = 
hiveConf.getVar(HiveConf.ConfVars.HIVE_DISTCP_DOAS_USER);
 if (!Utils.shouldDumpMetaDataOnly(withinContext.hiveConf)) {
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateFunctionHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateFunc

[hive] branch master updated: HIVE-25476: Remove Unused Dependencies for JDBC Driver (David Mollitor reviewed by Miklos Gergely)

2021-08-27 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 4905a12  HIVE-25476: Remove Unused Dependencies for JDBC Driver (David 
Mollitor reviewed by Miklos Gergely)
4905a12 is described below

commit 4905a1206bf0ddf0459fe2745d33c6afc04e63fe
Author: belugabehr <12578579+belugab...@users.noreply.github.com>
AuthorDate: Fri Aug 27 08:54:49 2021 -0400

HIVE-25476: Remove Unused Dependencies for JDBC Driver (David Mollitor 
reviewed by Miklos Gergely)
---
 jdbc/pom.xml | 11 ---
 1 file changed, 11 deletions(-)

diff --git a/jdbc/pom.xml b/jdbc/pom.xml
index a904ee2..1a910d1 100644
--- a/jdbc/pom.xml
+++ b/jdbc/pom.xml
@@ -62,16 +62,6 @@
 
 
   org.apache.hive
-  hive-serde
-  ${project.version}
-
-
-  org.apache.hive
-  hive-metastore
-  ${project.version}
-
-
-  org.apache.hive
   hive-shims
   ${project.version}
 
@@ -110,7 +100,6 @@
 
   org.apache.hadoop
   hadoop-common
-  ${hadoop.version}

 
   commons-beanutils


[hive] branch master updated: HIVE-25477: Clean Up JDBC Code (David Mollitor reviewed by Miklos Gergely)

2021-08-25 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new ad1f6c5  HIVE-25477: Clean Up JDBC Code (David Mollitor reviewed by 
Miklos Gergely)
ad1f6c5 is described below

commit ad1f6c5ee17d784de00075416c47753583b81e08
Author: belugabehr <12578579+belugab...@users.noreply.github.com>
AuthorDate: Wed Aug 25 12:51:55 2021 -0400

HIVE-25477: Clean Up JDBC Code (David Mollitor reviewed by Miklos Gergely)
---
 .../java/org/apache/hive/jdbc/HiveConnection.java  | 12 +---
 .../org/apache/hive/jdbc/HiveDatabaseMetaData.java | 82 --
 .../hive/jdbc/HttpRequestInterceptorBase.java  |  2 -
 jdbc/src/java/org/apache/hive/jdbc/Utils.java  |  4 +-
 4 files changed, 3 insertions(+), 97 deletions(-)

diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index bc5245f..4eefded 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -46,7 +46,6 @@ import java.sql.CallableStatement;
 import java.sql.Clob;
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
-import java.sql.DriverManager;
 import java.sql.NClob;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
@@ -68,7 +67,6 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.concurrent.Executor;
-import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.stream.Stream;
 
@@ -100,8 +98,6 @@ import org.apache.hive.jdbc.saml.IJdbcBrowserClientFactory;
 import org.apache.hive.service.rpc.thrift.TSetClientInfoResp;
 
 import org.apache.hive.service.rpc.thrift.TSetClientInfoReq;
-import org.apache.hadoop.hive.common.auth.HiveAuthUtils;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
 import org.apache.hive.service.auth.HiveAuthConstants;
 import org.apache.hive.service.auth.KerberosSaslHelper;
@@ -120,8 +116,6 @@ import org.apache.hive.service.rpc.thrift.TProtocolVersion;
 import org.apache.hive.service.rpc.thrift.TRenewDelegationTokenReq;
 import org.apache.hive.service.rpc.thrift.TRenewDelegationTokenResp;
 import org.apache.hive.service.rpc.thrift.TSessionHandle;
-import org.apache.hive.service.rpc.thrift.TSetClientInfoReq;
-import org.apache.hive.service.rpc.thrift.TSetClientInfoResp;
 import org.apache.http.HttpEntityEnclosingRequest;
 import org.apache.http.HttpRequest;
 import org.apache.http.HttpRequestInterceptor;
@@ -156,8 +150,6 @@ import org.apache.thrift.transport.TTransportException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.annotations.VisibleForTesting;
-
 /**
  * HiveConnection.
  *
@@ -696,7 +688,7 @@ public class HiveConnection implements java.sql.Connection {
 } else {
   for (final Class rejectException : 
this.nonRetriableClasses) {
 if (rejectException.isInstance(exception)) {
-  LOG.info("Not retrying as the class (" + exception.getClass() + 
") is an instance of is non-retriable class.");;
+  LOG.info("Not retrying as the class (" + exception.getClass() + 
") is an instance of is non-retriable class.");
   return false;
 }
   }
@@ -1674,7 +1666,7 @@ public class HiveConnection implements 
java.sql.Connection {
 }
 boolean rc = false;
 try {
-  String productName = new HiveDatabaseMetaData(this, client, sessHandle)
+  new HiveDatabaseMetaData(this, client, sessHandle)
   .getDatabaseProductName();
   rc = true;
 } catch (SQLException e) {
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java
index 2591785..d1cfd7e 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java
@@ -22,8 +22,6 @@ import java.util.ArrayList;
 
 import java.util.List;
 
-import jline.internal.Log;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
 import org.apache.hive.service.cli.TableSchema;
@@ -34,13 +32,11 @@ import java.sql.RowIdLifetime;
 import java.sql.SQLException;
 import java.sql.SQLFeatureNotSupportedException;
 import java.util.Arrays;
-import java.util.Comparator;
 import java.util.Map;
 import java.util.jar.Attributes;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hive.service.cli.GetInfoType;
 import org.apache.hive.service.rpc.thrift.TCLIService;
-import org.apache.hive

[hive] branch master updated (0b68031 -> 4a1c511)

2021-07-27 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 0b68031  HIVE-25058: PTF: TimestampValueBoundaryScanner can be 
optimised during range computation pt2 - isDistanceGreater (#) (Laszlo 
Bodor reviewed by Rajesh Balamohan)
 add 4a1c511  HIVE-25320: Purge hive.optimize.sort.dynamic.partition (Alex 
Sun via David Mollitor, reviewed by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 common/src/java/org/apache/hadoop/hive/conf/HiveConf.java   | 4 
 .../clientpositive/insert_overwrite_dynamic_partitions_merge_move.q | 1 -
 .../clientpositive/insert_overwrite_dynamic_partitions_merge_only.q | 1 -
 .../clientpositive/insert_overwrite_dynamic_partitions_move_only.q  | 1 -
 ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands3.java | 1 -
 ql/src/test/queries/clientpositive/acid_table_stats.q   | 3 ---
 ql/src/test/queries/clientpositive/dynpart_merge.q  | 1 -
 ql/src/test/queries/clientpositive/dynpart_sort_opt_bucketing.q | 6 --
 ql/src/test/queries/clientpositive/dynpart_sort_opt_vectorization.q | 1 -
 ql/src/test/queries/clientpositive/dynpart_sort_optimization.q  | 2 --
 ql/src/test/queries/clientpositive/dynpart_sort_optimization2.q | 1 -
 .../clientpositive/dynpart_sort_optimization_distribute_by.q| 1 -
 .../test/queries/clientpositive/materialized_view_partitioned_3.q   | 1 -
 ql/src/test/queries/clientpositive/merge_dynamic_partition.q| 1 -
 ql/src/test/queries/clientpositive/merge_dynamic_partition2.q   | 1 -
 ql/src/test/queries/clientpositive/mm_dp.q  | 1 -
 ql/src/test/queries/clientpositive/orc_merge10.q| 1 -
 ql/src/test/queries/clientpositive/orc_merge7.q | 1 -
 ql/src/test/queries/clientpositive/orc_merge_diff_fs.q  | 1 -
 ql/src/test/queries/clientpositive/orc_merge_incompat2.q| 1 -
 .../test/queries/clientpositive/schema_evol_orc_acid_part_update.q  | 2 --
 .../clientpositive/schema_evol_orc_acid_part_update_llap_io.q   | 2 --
 .../queries/clientpositive/temp_table_merge_dynamic_partition.q | 1 -
 .../queries/clientpositive/temp_table_merge_dynamic_partition2.q| 1 -
 ql/src/test/queries/clientpositive/vector_outer_join_constants.q| 2 --
 25 files changed, 39 deletions(-)


[hive] branch master updated (89ef06c -> 9ad5a0b)

2021-06-21 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 89ef06c  HIVE-24951: Table created with Uppercase name using CTAS does 
not produce result for select queries(Rajkumar Singh via Naveen Gangam)
 add 9ad5a0b  HIVE-25235: Remove ThreadPoolExecutorWithOomHook (David 
Mollitor reviewed by Miklos Gergely, Zhihua Deng)

No new revisions were added by this update.

Summary of changes:
 .../java/org/apache/hadoop/hive/conf/HiveConf.java |  3 -
 .../apache/hadoop/hive/ql/hooks/HookContext.java   |  4 +-
 .../apache/hadoop/hive/ql/hooks/TestHiveHooks.java | 11 
 .../hive/service/cli/session/SessionManager.java   |  2 +-
 .../cli/thrift/EmbeddedThriftBinaryCLIService.java |  2 +-
 .../cli/thrift/ThreadPoolExecutorWithOomHook.java  | 71 --
 .../service/cli/thrift/ThriftBinaryCLIService.java | 11 ++--
 .../service/cli/thrift/ThriftHttpCLIService.java   | 11 ++--
 .../apache/hive/service/server/HiveServer2.java|  5 +-
 .../service/server/HiveServer2OomHookRunner.java   | 47 --
 .../hive/service/auth/TestPlainSaslHelper.java |  2 +-
 .../cli/session/TestPluggableHiveSessionImpl.java  |  4 +-
 .../cli/session/TestSessionGlobalInitFile.java |  2 +-
 .../thrift/TestThreadPoolExecutorWithOomHook.java  | 70 -
 14 files changed, 18 insertions(+), 227 deletions(-)
 delete mode 100644 
service/src/java/org/apache/hive/service/cli/thrift/ThreadPoolExecutorWithOomHook.java
 delete mode 100644 
service/src/java/org/apache/hive/service/server/HiveServer2OomHookRunner.java
 delete mode 100644 
service/src/test/org/apache/hive/service/cli/thrift/TestThreadPoolExecutorWithOomHook.java


[hive] branch master updated (f5c39b1 -> 0c7903d)

2021-06-09 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from f5c39b1  HIVE-25195: Store Iceberg write commit and ctas information 
in QueryState (#2347) (Marton Bod, reviewed by Peter Vary)
 add 0c7903d  HIVE-25185: Improve Logging On Polling Tez Session from Pool 
(David Mollitor reviewed by Panagiotis Garefalakis)

No new revisions were added by this update.

Summary of changes:
 ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPool.java | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)


[hive] branch master updated (ec7c95d -> fd029c5)

2021-05-29 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from ec7c95d  HIVE-25057: Implement rollback for hive to iceberg migration 
(#2219) (Laszlo Pinter, reviewed by Marton Bod and Peter Vary)
 add fd029c5  HIVE-25177: Add Additional Debugging Help for HBase Reader 
(David Mollitor reviewed by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 .../apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java   | 11 +--
 1 file changed, 9 insertions(+), 2 deletions(-)


[hive] branch master updated (6d85094 -> 8864082)

2021-05-29 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 6d85094  HIVE-25102: Cache Iceberg table objects within same query 
(#2261) (Laszlo Pinter, reviewed by Marton Bod and Peter Vary)
 add 8864082  HIVE-25176: Print DAG ID to Console (David Mollitor reviewed 
by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 .../org/apache/hadoop/hive/ql/exec/tez/monitoring/TezJobMonitor.java | 1 +
 1 file changed, 1 insertion(+)


[hive] branch master updated (9ff9653 -> c929345)

2021-05-25 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 9ff9653  HIVE-24761: Support vectorization for bounded windows in PTF 
(#2099) (Laszlo Bodor reviewed by Ramesh Kumar Thangarajan)
 add c929345  HIVE-25112: Simplify TXN Compactor Heartbeat Thread (David 
Mollitor reviewed by Karen Coppage)

No new revisions were added by this update.

Summary of changes:
 .../hadoop/hive/ql/txn/compactor/Worker.java   | 44 +++---
 1 file changed, 22 insertions(+), 22 deletions(-)


[hive] branch master updated (66668ef -> c10aa53)

2021-05-24 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 8ef  HIVE-25151: Remove Unused Interner from HiveMetastoreChecker 
(David Mollitor reviewed by Miklos Gergely)
 add c10aa53  HIVE-24810: Use JDK 8 String Switch in TruncDateFromTimestamp 
(David Mollitor reviewed by Panagiotis Garefalakis)

No new revisions were added by this update.

Summary of changes:
 .../vector/expressions/TruncDateFromTimestamp.java | 26 --
 1 file changed, 19 insertions(+), 7 deletions(-)


[hive] branch master updated (1090c93 -> 66668ef)

2021-05-23 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 1090c93  HIVE-25152: Remove Superfluous Logging Code (David Mollitor 
reviewed by Miklos Gergely and Panagiotis Garefalakis)
 add 8ef  HIVE-25151: Remove Unused Interner from HiveMetastoreChecker 
(David Mollitor reviewed by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 .../java/org/apache/hadoop/hive/metastore/HiveMetaStoreChecker.java   | 4 
 1 file changed, 4 deletions(-)


[hive] branch master updated (35221a7 -> 1090c93)

2021-05-23 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 35221a7  HIVE-25142 : Rehashing in map join fast hash table causing 
corruption for large keys (#2300)
 add 1090c93  HIVE-25152: Remove Superfluous Logging Code (David Mollitor 
reviewed by Miklos Gergely and Panagiotis Garefalakis)

No new revisions were added by this update.

Summary of changes:
 .../predicate/AccumuloPredicateHandler.java|  4 +-
 .../java/org/apache/hadoop/hive/cli/CliDriver.java |  4 +-
 .../hadoop/hive/common/type/TimestampTZUtil.java   |  4 +-
 .../java/org/apache/hadoop/hive/conf/HiveConf.java |  4 +-
 .../org/apache/hadoop/hive/ql/log/PerfLogger.java  |  4 +-
 .../hive/http/Log4j2ConfiguratorServlet.java   |  8 +--
 .../hive/druid/io/DruidQueryBasedInputFormat.java  |  4 +-
 .../apache/hadoop/hive/hbase/HBaseSerDeHelper.java |  8 +--
 .../mapreduce/FileOutputCommitterContainer.java| 13 ++---
 .../hive/hcatalog/templeton/LauncherDelegator.java |  4 +-
 .../hive/hcatalog/templeton/ProxyUserSupport.java  | 18 ++-
 .../hadoop/hive/llap/LlapRowRecordReader.java  |  4 +-
 .../llap/ext/LlapTaskUmbilicalExternalClient.java  |  4 +-
 .../hadoop/hive/llap/registry/impl/SlotZnode.java  |  9 +---
 .../hive/registry/impl/ServiceInstanceBase.java|  4 +-
 .../apache/hadoop/hive/llap/AsyncPbRpcProxy.java   | 16 ++
 .../hive/llap/security/LlapTokenSelector.java  |  4 +-
 .../hadoop/hive/llap/security/SecretManager.java   |  4 +-
 .../hadoop/hive/llap/daemon/impl/AMReporter.java   |  8 +--
 .../hive/llap/daemon/impl/ContainerRunnerImpl.java | 16 ++
 .../hive/llap/daemon/impl/LlapTaskReporter.java|  8 +--
 .../hive/llap/daemon/impl/LlapTokenChecker.java|  4 +-
 .../hadoop/hive/llap/daemon/impl/QueryTracker.java | 20 ++--
 .../hive/llap/daemon/impl/TaskExecutorService.java | 39 --
 .../hive/llap/security/LlapServerSecurityInfo.java |  8 +--
 .../hive/llap/shufflehandler/ShuffleHandler.java   | 11 ++--
 .../hive/llap/tezplugins/LlapTaskCommunicator.java | 12 ++---
 .../llap/tezplugins/LlapTaskSchedulerService.java  | 32 +++-
 .../apache/hadoop/hive/ql/parse/ParseDriver.java   |  4 +-
 .../org/apache/hadoop/hive/llap/LlapHiveUtils.java |  5 +-
 .../apache/hadoop/hive/llap/ProactiveEviction.java |  8 +--
 .../hive/ql/exec/AppMasterEventOperator.java   | 15 ++
 .../hadoop/hive/ql/exec/CommonJoinOperator.java|  4 +-
 .../apache/hadoop/hive/ql/exec/DemuxOperator.java  | 29 ---
 .../hadoop/hive/ql/exec/FileSinkOperator.java  | 30 ---
 .../hadoop/hive/ql/exec/FunctionRegistry.java  |  4 +-
 .../hadoop/hive/ql/exec/MapJoinOperator.java   | 16 ++
 .../apache/hadoop/hive/ql/exec/MapOperator.java|  9 +---
 .../apache/hadoop/hive/ql/exec/MuxOperator.java|  8 +--
 .../hadoop/hive/ql/exec/ObjectCacheFactory.java|  6 +--
 .../hadoop/hive/ql/exec/OrcFileMergeOperator.java  |  8 +--
 .../hadoop/hive/ql/exec/ReduceSinkOperator.java|  9 +---
 .../hadoop/hive/ql/exec/SMBMapJoinOperator.java|  8 +--
 .../apache/hadoop/hive/ql/exec/ScriptOperator.java | 24 +++--
 .../apache/hadoop/hive/ql/exec/SelectOperator.java |  4 +-
 .../hive/ql/exec/SparkHashTableSinkOperator.java   |  4 +-
 .../hadoop/hive/ql/exec/TableScanOperator.java | 12 ++---
 .../apache/hadoop/hive/ql/exec/UnionOperator.java  |  2 +-
 .../apache/hadoop/hive/ql/exec/mr/ExecReducer.java | 14 ++---
 .../apache/hadoop/hive/ql/exec/mr/ObjectCache.java |  8 +--
 .../exec/persistence/BytesBytesMultiHashMap.java   |  4 +-
 .../ql/exec/tez/ColumnarSplitSizeEstimator.java|  8 +--
 .../hive/ql/exec/tez/CustomPartitionVertex.java| 11 ++--
 .../hive/ql/exec/tez/DynamicPartitionPruner.java   | 13 ++---
 .../hadoop/hive/ql/exec/tez/HashTableLoader.java   | 17 +++---
 .../hive/ql/exec/tez/HiveSplitGenerator.java   |  5 +-
 .../tez/HostAffinitySplitLocationProvider.java | 12 ++---
 .../hadoop/hive/ql/exec/tez/LlapObjectCache.java   | 17 ++
 .../hive/ql/exec/tez/SessionExpirationTracker.java | 19 +++
 .../hadoop/hive/ql/exec/tez/SplitGrouper.java  |  5 +-
 .../hadoop/hive/ql/exec/tez/TezSessionPool.java|  9 +---
 .../hive/ql/exec/tez/TezSessionPoolManager.java|  4 +-
 .../hadoop/hive/ql/exec/tez/TezSessionState.java   |  8 +--
 .../hive/ql/exec/tez/TriggerValidatorRunnable.java |  4 +-
 .../hadoop/hive/ql/exec/tez/WorkloadManager.java   |  4 +-
 .../hive/ql/exec/tez/monitoring/TezJobMonitor.java |  4 +-
 .../hive/ql/exec/vector/VectorGroupByOperator.java |  7 +--
 .../fast/VectorMapJoinFastHashTableLoader.java | 17 +++---
 .../reducesink/VectorReduceSinkCommonOperator.java |  8 +--
 .../hadoop/hive/ql/io/CombineHiveInputFormat.java  |  7 +--
 .../apache/hadoop/hive/ql/io/HiveInputFormat.java  | 40 ---
 .../hadoop/hive/ql/io/orc/ExternalCache.java   |  4 +-
 .../hadoop/hive/ql/io/orc

[hive] branch master updated (21ac457 -> d9886e4)

2021-05-20 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 21ac457  HIVE-25109: CBO fails when updating table has constraints 
defined (Krisztian Kasa, reviewed by Zoltan Haindrich)
 add d9886e4  HIVE-25126: Remove Thrift Exceptions From RawStore 
getCatalogs (David Mollitor reviewed by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 .../java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java | 2 +-
 .../src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java | 2 +-
 .../src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java| 2 +-
 .../src/main/java/org/apache/hadoop/hive/metastore/RawStore.java   | 3 +--
 .../main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java  | 2 +-
 .../apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java| 2 +-
 .../apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java| 2 +-
 7 files changed, 7 insertions(+), 8 deletions(-)


[hive] branch master updated (820662a -> f2de30c)

2021-05-13 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 820662a  Do not exclude avatica and protobuf for Iceberg (#2260) 
(Marton Bod, reviewed by Laszlo Pinter)
 add f2de30c  HIVE-25108: Do Not Log and Throw MetaExceptions (David 
Mollitor reviewed by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 .../hadoop/hive/metastore/HiveMetaStoreClient.java | 22 +++---
 .../apache/hadoop/hive/metastore/Warehouse.java| 12 ++--
 .../hive/metastore/utils/MetaStoreUtils.java   | 10 +++---
 .../apache/hadoop/hive/metastore/HMSHandler.java   | 12 ++--
 .../hadoop/hive/metastore/HiveMetaStoreFsImpl.java |  2 +-
 .../metastore/HiveMetaStoreClientPreCatalog.java   | 20 ++--
 6 files changed, 37 insertions(+), 41 deletions(-)


[hive] branch master updated (971f2cf -> 6ea0f62)

2021-04-23 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 971f2cf  HIVE-25051: Callers can access uninitialized MessageBuilder 
instance causing NPE (#2210)
 add 6ea0f62  HIVE-24772: Revamp Server Request Error Logging (David 
Mollitor reviewed by Aihua Xu, Zoltan Chovan)

No new revisions were added by this update.

Summary of changes:
 .../apache/hive/minikdc/TestJdbcWithMiniKdc.java   |  28 +---
 .../apache/hive/service/cli/HiveSQLException.java  | 167 +
 .../hive/service/cli/thrift/ThriftCLIService.java  |  72 -
 .../hive/service/cli/TestHiveSQLException.java | 115 +-
 4 files changed, 81 insertions(+), 301 deletions(-)


[hive] branch master updated (57dc676 -> 8f33832)

2021-03-14 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 57dc676  HIVE-24758: Log Tez Task DAG ID, DAG Session ID, HS2 Hostname 
(David Mollitor reviewed by Panagiotis Garefalakis))
 add 8f33832  HIVE-24739: Clarify Usage of Thrift TServerEventHandler and 
Count Number of Messages Processed (David Mollitor reviewed by Peter Vary, 
Zhihua Deng)

No new revisions were added by this update.

Summary of changes:
 .../service/cli/thrift/ThriftBinaryCLIService.java | 67 +++-
 .../hive/service/cli/thrift/ThriftCLIService.java  | 89 +++---
 2 files changed, 123 insertions(+), 33 deletions(-)



[hive] branch master updated (9f8dc0d -> 57dc676)

2021-03-13 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 9f8dc0d  HIVE-24832: Remove Spring Artifacts from Log4j Properties 
Files (David Mollitor reviewed by Peter Vary)
 add 57dc676  HIVE-24758: Log Tez Task DAG ID, DAG Session ID, HS2 Hostname 
(David Mollitor reviewed by Panagiotis Garefalakis))

No new revisions were added by this update.

Summary of changes:
 .../apache/hadoop/hive/ql/exec/tez/TezTask.java| 31 +++---
 1 file changed, 16 insertions(+), 15 deletions(-)



[hive] branch master updated (2c92427 -> 9f8dc0d)

2021-03-13 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 2c92427  HIVE-24841: Parallel edge fixer may run into NPE when RS is 
missing a duplicate column from the output schema (#2035) (Zoltan Haindrich 
reviewed by Krisztian Kasa)
 add 9f8dc0d  HIVE-24832: Remove Spring Artifacts from Log4j Properties 
Files (David Mollitor reviewed by Peter Vary)

No new revisions were added by this update.

Summary of changes:
 .../metastore-common/src/test/resources/log4j2.properties   | 6 --
 .../metastore-server/src/test/resources/log4j2.properties   | 6 --
 2 files changed, 12 deletions(-)



[hive] branch master updated (f73db94 -> 4ef69f6)

2021-03-01 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from f73db94  Revert "HIVE-24624: Repl Load should detect the compatible 
staging dir (Pratyushotpal Madhukar, reviewed by Aasha Medhi, Pravin Kumar 
Sinha)" (#2025)
 add 4ef69f6  HIVE-24723: Use ExecutorService in TezSessionPool (David 
Mollitor reviewed by Laszlo Bodor, Panagiotis Garefalakis)

No new revisions were added by this update.

Summary of changes:
 .../hadoop/hive/ql/exec/tez/TezSessionPool.java| 137 +++--
 1 file changed, 74 insertions(+), 63 deletions(-)



[hive] branch master updated (a896e5f -> 0df19a6)

2021-02-25 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from a896e5f  HIVE-24823: Fix ide error in BasePartitionEvaluato (#2014) 
(Zoltan Haindrich reviewed by Rajesh Balamohan)
 add 0df19a6  HIVE-24811: Extend Cached Dates to Other Areas (David 
Mollitor reviewed by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 .../hadoop/hive/common/type/TimestampUtils.java| 21 +++---
 .../apache/hadoop/hive/ql/udf/UDFDayOfWeek.java| 17 ++-
 .../apache/hadoop/hive/ql/udf/UDFWeekOfYear.java   | 19 ++---
 .../hadoop/hive/ql/udf/generic/GenericUDF.java |  7 ++---
 .../hive/ql/udf/generic/GenericUDFDateDiff.java| 20 +++--
 .../hive/ql/udf/generic/GenericUDFTrunc.java   |  6 ++--
 .../apache/hadoop/hive/serde2/RandomTypeUtil.java  |  4 +--
 .../primitive/PrimitiveObjectInspectorUtils.java   | 33 +-
 8 files changed, 65 insertions(+), 62 deletions(-)



[hive] branch master updated (1e4fc7a -> 0e9c0ce)

2021-02-23 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 1e4fc7a  HIVE-24775: Incorrect null handling when rebuilding 
Materialized view incrementally (Krisztian Kasa, reviewed by Jesus Camacho 
Rodriguez)
 add 0e9c0ce  HIVE-24808: Cache Parsed Dates (David Mollitor reviewed by 
Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 .../org/apache/hadoop/hive/common/type/Date.java   | 17 +-
 .../org/apache/hive/common/util/DateParser.java| 70 --
 .../apache/hive/common/util/TestDateParser.java| 16 ++---
 .../exec/vector/expressions/CastStringToDate.java  |  6 +-
 .../vector/expressions/TruncDateFromString.java|  3 +-
 .../vector/expressions/TruncDateFromTimestamp.java |  2 -
 .../vector/expressions/VectorUDFDateAddColCol.java | 11 ++--
 .../expressions/VectorUDFDateAddColScalar.java |  7 +--
 .../expressions/VectorUDFDateAddScalarCol.java |  3 +-
 .../hadoop/hive/ql/udf/generic/GenericUDFDate.java |  3 +-
 .../hive/ql/udf/generic/GenericUDFDateAdd.java |  3 +-
 11 files changed, 91 insertions(+), 50 deletions(-)



[hive] branch master updated (ffcc399 -> 6eed0a7)

2021-02-19 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from ffcc399  HIVE-24785: Fix HIVE_COMPACTOR_COMPACT_MM property (Peter 
Varga reviewed by Karen Coppage, Denys Kuzmenko)
 add 6eed0a7  HIVE-24693: Convert timestamps to zoned times without string 
operations (David Mollitor reviewed by Panagiotis Garefalakis))

No new revisions were added by this update.

Summary of changes:
 .../org/apache/hadoop/hive/common/type/Date.java   | 63 
 .../apache/hadoop/hive/common/type/Timestamp.java  | 87 ++
 .../hadoop/hive/common/type/TimestampTZUtil.java   |  9 ++-
 .../results/clientpositive/llap/udf_mask.q.out |  2 +-
 .../clientpositive/llap/udf_mask_first_n.q.out |  2 +-
 .../clientpositive/llap/udf_mask_last_n.q.out  |  2 +-
 .../llap/udf_mask_show_first_n.q.out   |  2 +-
 .../clientpositive/llap/udf_mask_show_last_n.q.out |  2 +-
 8 files changed, 113 insertions(+), 56 deletions(-)



[hive] branch master updated (632d70f -> a4a2627)

2021-02-05 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 632d70f  HIVE-24664: Support column aliases in Values clause 
(Krisztian Kasa, reviewed by Jesus Camacho Rodriguez)
 add a4a2627  HIVE-24657: Make Beeline Logging Dependencies Explicit (David 
Mollitor reviewed by Naveen Gangam)

No new revisions were added by this update.

Summary of changes:
 beeline/pom.xml | 44 +---
 1 file changed, 41 insertions(+), 3 deletions(-)



[hive] branch master updated (7281ab8 -> 8ef7f09)

2021-02-02 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 7281ab8  HIVE-24295: Apply schema merge to all shared work 
optimizations (#1662) (Zoltan Haindrich reviewed by Krisztian Kasa)
 add 8ef7f09  HIVE-24353: Performance: do not throw exceptions when parsing 
Timestamp (Vincenz Priesnitz via David Mollitor, reviewed by Bodor Laszlo, 
Panagiotis Garefalakis)

No new revisions were added by this update.

Summary of changes:
 .../hadoop/hive/common/type/TimestampTZUtil.java   | 40 +-
 1 file changed, 16 insertions(+), 24 deletions(-)



[hive] branch master updated (323ee0a -> 1781615)

2021-01-28 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 323ee0a  HIVE-24678: Add feature toggle to control SWO parallel edge 
support (#1912) (Zoltan Haindrich reviewed by Krisztian Kasa)
 add 1781615  HIVE-24661: Do Not Stringify Exception in Logger messages 
(David Mollitor reviewed by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 .../apache/hive/storage/jdbc/JdbcRecordWriter.java |  2 +-
 .../java/org/apache/hadoop/hive/ql/Compiler.java   |  2 +-
 ql/src/java/org/apache/hadoop/hive/ql/Context.java | 28 +++--
 ql/src/java/org/apache/hadoop/hive/ql/Driver.java  |  2 +-
 .../ql/ddl/table/partition/PartitionUtils.java |  5 +-
 .../hadoop/hive/ql/exec/ExplainSQRewriteTask.java  |  2 +-
 .../apache/hadoop/hive/ql/exec/ExplainTask.java|  2 +-
 .../apache/hadoop/hive/ql/exec/ReplCopyTask.java   |  3 +-
 .../apache/hadoop/hive/ql/exec/ScriptOperator.java | 22 +++
 .../java/org/apache/hadoop/hive/ql/exec/Task.java  |  3 +-
 .../apache/hadoop/hive/ql/exec/mr/ExecReducer.java |  2 +-
 .../apache/hadoop/hive/ql/exec/mr/MapRedTask.java  |  2 +-
 .../hadoop/hive/ql/exec/tez/MapRecordSource.java   |  3 +-
 .../hive/ql/exec/tez/MergeFileRecordProcessor.java |  3 +-
 .../hadoop/hive/ql/exec/tez/TezProcessor.java  |  5 +-
 .../hadoop/hive/ql/history/HiveHistoryImpl.java|  2 +-
 .../ql/io/rcfile/truncate/ColumnTruncateTask.java  |  2 +-
 .../org/apache/hadoop/hive/ql/metadata/Hive.java   | 72 +++---
 .../hadoop/hive/ql/optimizer/GenMapRedUtils.java   |  4 +-
 .../hive/ql/optimizer/SimpleFetchOptimizer.java|  4 +-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java |  6 +-
 .../hadoop/hive/ql/stats/ColStatsProcessor.java|  3 +-
 .../hadoop/hive/ql/txn/compactor/CompactorMR.java  | 11 +---
 .../hive/ql/udf/generic/GenericUDAFAverage.java|  3 +-
 .../objectinspector/ObjectInspectorUtils.java  |  2 +-
 .../hadoop/hive/metastore/ReplChangeManager.java   |  4 +-
 .../hive/metastore/txn/CompactionTxnHandler.java   |  4 +-
 .../hive/metastore/utils/MetaStoreServerUtils.java |  2 +-
 28 files changed, 83 insertions(+), 122 deletions(-)



[hive] branch master updated (e68cfc8 -> 7d23a73)

2021-01-21 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from e68cfc8  HIVE-24672: compute_stats_long.q fails for wrong reasons 
(Mustafa Iman, reviewed by Attila Magyar and Laszlo Bodor)
 add 7d23a73  HIVE-24658: Move LogUtil Class to Metastore Server from 
Common (David Mollitor reviewed by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 standalone-metastore/metastore-common/pom.xml  | 22 +-
 standalone-metastore/metastore-server/pom.xml  |  5 +
 .../hadoop/hive/metastore/utils/LogUtils.java  |  0
 standalone-metastore/pom.xml   | 12 
 4 files changed, 26 insertions(+), 13 deletions(-)
 rename standalone-metastore/{metastore-common => 
metastore-server}/src/main/java/org/apache/hadoop/hive/metastore/utils/LogUtils.java
 (100%)



[hive] branch master updated (368d288 -> d50543a)

2021-01-20 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 368d288  HIVE-24491 : setting custom job name is ineffective if the 
Tez session pool is configured or in case of session reuse (Raj Kumar Singh 
reviewed by Ashutosh Chauhan)
 add d50543a  HIVE-24659: Remove Commons Logger from serde Package (David 
Mollitor reviewed by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 serde/pom.xml  | 28 +-
 .../teradata/TeradataBinaryDataOutputStream.java   |  4 
 .../hive/serde2/teradata/TeradataBinarySerde.java  |  6 ++---
 3 files changed, 30 insertions(+), 8 deletions(-)



[hive] branch master updated (e08e639 -> b2321c3)

2021-01-13 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from e08e639  HIVE-23684: Large underestimation in NDV stats when input and 
join cardinality ratio is big (Vineet Garg, reviewed by Jesus Camacho Rodriguez)
 add b2321c3  HIVE-24627: Add Debug Logging to Hive JDBC Connection (David 
Mollitor reviewed by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 .../java/org/apache/hive/jdbc/HiveConnection.java  | 25 --
 1 file changed, 23 insertions(+), 2 deletions(-)



[hive] branch master updated (de0a811 -> 68a759a)

2021-01-12 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from de0a811  HIVE-24616: Add Logging to Track Query Status (David Mollitor 
reviewed by Miklos Gergely)
 add 68a759a  HIVE-24617: Review beeline Driver Scanning Code (David 
Mollitor reviewed by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 .../src/java/org/apache/hive/beeline/BeeLine.java  | 61 +-
 .../src/java/org/apache/hive/beeline/Commands.java |  6 +--
 2 files changed, 29 insertions(+), 38 deletions(-)



[hive] branch master updated (a8f9ed5 -> de0a811)

2021-01-12 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from a8f9ed5  HIVE-24586: Rename compaction 'attempted' status (Karen 
Coppage, reviewed by Peter Vary)
 add de0a811  HIVE-24616: Add Logging to Track Query Status (David Mollitor 
reviewed by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java | 5 +
 1 file changed, 5 insertions(+)



[hive] branch master updated: HIVE-24592: Revert "HIVE-24550: Cleanup only transaction information for the current DriverContext (Peter Varga via Peter Vary)"

2021-01-06 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new bf14f73  HIVE-24592: Revert "HIVE-24550: Cleanup only transaction 
information for the current DriverContext (Peter Varga via Peter Vary)"
bf14f73 is described below

commit bf14f73aaff3ed16bb35b0c21e644ba8fba52d10
Author: David Mollitor 
AuthorDate: Wed Jan 6 11:24:38 2021 -0500

HIVE-24592: Revert "HIVE-24550: Cleanup only transaction information for 
the current DriverContext (Peter Varga via Peter Vary)"

This reverts commit 3108ec61ed9ef1f1f29a56138e85c01c1837ab4e.
---
 .../hive/jdbc/miniHS2/TestHiveServer2Acid.java | 123 -
 ql/src/java/org/apache/hadoop/hive/ql/Driver.java  |   2 +-
 .../apache/hadoop/hive/ql/DriverTxnHandler.java|   8 +-
 .../hadoop/hive/ql/lockmgr/DbTxnManager.java   |   7 --
 .../hadoop/hive/ql/lockmgr/DummyTxnManager.java|   5 -
 .../hadoop/hive/ql/lockmgr/HiveTxnManager.java |   6 -
 6 files changed, 3 insertions(+), 148 deletions(-)

diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2Acid.java
 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2Acid.java
deleted file mode 100644
index 03be271..000
--- 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2Acid.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.jdbc.miniHS2;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
-import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hive.jdbc.TestJdbcWithMiniHS2;
-import org.apache.hive.service.cli.CLIServiceClient;
-import org.apache.hive.service.cli.HiveSQLException;
-import org.apache.hive.service.cli.OperationHandle;
-import org.apache.hive.service.cli.OperationState;
-import org.apache.hive.service.cli.OperationStatus;
-import org.apache.hive.service.cli.RowSet;
-import org.apache.hive.service.cli.SessionHandle;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.junit.Assert.assertEquals;
-
-/**
- * End to end test with MiniHS2 with ACID enabled.
- */
-public class TestHiveServer2Acid {
-
-  private static MiniHS2 miniHS2 = null;
-  private static Map confOverlay = new HashMap<>();
-
-  @BeforeClass
-  public static void beforeTest() throws Exception {
-HiveConf conf = new HiveConf();
-TxnDbUtil.setConfValues(conf);
-TxnDbUtil.prepDb(conf);
-miniHS2 = new MiniHS2(conf, MiniHS2.MiniClusterType.TEZ);
-confOverlay.put(ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "false");
-miniHS2.start(confOverlay);
-  }
-
-  @AfterClass
-  public static void afterTest() throws Exception {
-miniHS2.stop();
-  }
-
-  public static class SleepMsUDF extends UDF {
-public Integer evaluate(final Integer value, final Integer ms) {
-  try {
-Thread.sleep(ms);
-  } catch (InterruptedException e) {
-// No-op
-  }
-  return value;
-}
-  }
-
-  /**
-   * Test overlapping async queries in one session.
-   * Since TxnManager is shared in the session this can cause all kind of 
trouble.
-   * @throws Exception ex
-   */
-  @Test
-  public void testAsyncConcurrent() throws Exception {
-String tableName = "TestHiveServer2TestConnection";
-CLIServiceClient serviceClient = miniHS2.getServiceClient();
-SessionHandle sessHandle = serviceClient.openSession("foo", "bar");
-serviceClient.executeStatement(sessHandle, "DROP TABLE IF EXISTS " + 
tableName, confOverlay);
-serviceClient.executeStatement(sessHandle, "CREATE TABLE " + tableName + " 
(id INT)", confOverlay);
-serviceClient.executeStatement(sessHandle, "insert into " + ta

[hive] branch master updated (9c4c54c -> ff6f356)

2021-01-04 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 9c4c54c  HIVE-24434: Filter out materialized views for rewriting if 
plan pattern is not allowed (Krisztian Kasa, reviewed by Jesus Camacho 
Rodriguez)
 add ff6f356  HIVE-24560: Move Logging and Column Name,Type Parsing to 
AbstractSerde Class (David Mollitor reviewed by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 .../hadoop/hive/accumulo/serde/AccumuloSerDe.java  | 13 
 .../hadoop/hive/contrib/serde2/RegexSerDe.java | 55 ++---
 .../hive/contrib/serde2/TypedBytesSerDe.java   | 53 
 .../apache/hadoop/hive/druid/serde/DruidSerDe.java | 11 ++--
 .../apache/hive/hcatalog/data/HCatRecordSerDe.java | 46 ++
 .../org/apache/hive/hcatalog/data/JsonSerDe.java   |  9 +--
 .../org/apache/hive/storage/jdbc/JdbcSerDe.java| 17 ++
 .../apache/hadoop/hive/kafka/KafkaJsonSerDe.java   | 36 +++
 .../org/apache/hadoop/hive/kafka/KafkaSerDe.java   |  6 +-
 .../hadoop/hive/ql/exec/FunctionRegistry.java  |  4 +-
 .../hive/ql/exec/vector/VectorDeserializeRow.java  |  2 +-
 .../hive/ql/exec/vector/VectorizationContext.java  |  2 +-
 .../expressions/ConstantVectorExpression.java  |  2 +-
 .../hive/ql/io/arrow/ArrowColumnarBatchSerDe.java  | 36 ++-
 .../apache/hadoop/hive/ql/io/arrow/Serializer.java |  6 +-
 .../org/apache/hadoop/hive/ql/io/orc/OrcSerde.java | 48 +--
 .../apache/hadoop/hive/ql/io/orc/OrcStruct.java|  4 +-
 .../hive/ql/io/parquet/serde/ParquetHiveSerDe.java | 29 +
 .../hadoop/hive/ql/log/syslog/SyslogSerDe.java | 22 ---
 .../hive/ql/optimizer/physical/Vectorizer.java |  5 +-
 .../apache/hadoop/hive/ql/parse/PTFTranslator.java | 11 
 .../hadoop/hive/ql/plan/PTFDeserializer.java   | 15 +++--
 .../hive/ql/exec/vector/VectorVerifyFast.java  |  6 +-
 .../ql/exec/vector/mapjoin/fast/VerifyFastRow.java |  6 +-
 .../hive/serde2/AbstractEncodingAwareSerDe.java|  7 +--
 .../apache/hadoop/hive/serde2/AbstractSerDe.java   | 53 ++--
 .../org/apache/hadoop/hive/serde2/JsonSerDe.java   | 71 ++
 .../hive/serde2/MetadataTypedColumnsetSerDe.java   |  2 +-
 .../apache/hadoop/hive/serde2/OpenCSVSerde.java| 13 +---
 .../org/apache/hadoop/hive/serde2/RegexSerDe.java  | 45 +-
 .../hadoop/hive/serde2/avro/AvroDeserializer.java  |  4 +-
 .../hadoop/hive/serde2/avro/AvroSerializer.java|  3 +-
 .../serde2/binarysortable/BinarySortableSerDe.java | 44 +++---
 .../hadoop/hive/serde2/columnar/ColumnarSerDe.java |  5 --
 .../hadoop/hive/serde2/lazy/LazySimpleSerDe.java   | 13 
 .../apache/hadoop/hive/serde2/lazy/VerifyLazy.java |  2 +-
 .../hive/serde2/lazybinary/LazyBinarySerDe.java| 26 +---
 .../hive/serde2/typeinfo/StructTypeInfo.java   | 17 +++---
 .../apache/hadoop/hive/serde2/TestJsonSerDe.java   | 21 ---
 .../org/apache/hadoop/hive/serde2/VerifyFast.java  |  6 +-
 .../avro/TestAvroObjectInspectorGenerator.java |  5 +-
 41 files changed, 283 insertions(+), 498 deletions(-)



[hive] branch master updated (031b456 -> a97448f)

2020-12-21 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 031b456  HIVE-24547: Fix acid_vectorization_original (Krisztian Kasa, 
reviewed by Zoltan Haindrich)
 add a97448f  HIVE-24332: Make AbstractSerDe Superclass of all SerDes 
(David Mollitor reviewed by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 .../hadoop/hive/accumulo/serde/AccumuloSerDe.java  |   7 +-
 .../hive/accumulo/serde/TestAccumuloSerDe.java |  26 ++---
 .../serde/TestDefaultAccumuloRowIdFactory.java |   6 +-
 .../hadoop/hive/contrib/serde2/RegexSerDe.java |  88 +++
 .../hive/contrib/serde2/TypedBytesSerDe.java   |  12 +-
 .../hive/contrib/serde2/s3/S3LogDeserializer.java  |  25 +++--
 .../hadoop/hive/contrib/serde2/TestRegexSerDe.java |   3 +-
 .../apache/hadoop/hive/druid/serde/DruidSerDe.java |   5 +-
 .../hadoop/hive/druid/serde/TestDruidSerDe.java|  19 ++--
 .../org/apache/hadoop/hive/hbase/HBaseSerDe.java   |  24 ++--
 .../apache/hadoop/hive/hbase/TestHBaseSerDe.java   |  49 
 .../apache/hive/hcatalog/data/HCatRecordSerDe.java |  17 ++-
 .../org/apache/hive/hcatalog/data/JsonSerDe.java   |   7 +-
 .../hive/hcatalog/mapreduce/InternalUtil.java  |  11 +-
 .../hive/hcatalog/data/TestHCatRecordSerDe.java|   5 +-
 .../apache/hive/hcatalog/data/TestJsonSerDe.java   |  15 ++-
 .../rcfile/TestRCFileMapReduceInputFormat.java |   3 +-
 .../apache/hadoop/hive/serde2/CustomSerDe1.java|  14 ++-
 .../apache/hadoop/hive/serde2/CustomSerDe2.java|  15 ++-
 .../apache/hadoop/hive/serde2/CustomSerDe3.java|  22 ++--
 .../apache/hadoop/hive/serde2/CustomSerDe4.java|  81 +++--
 .../apache/hadoop/hive/serde2/CustomSerDe5.java|  81 ++---
 .../benchmark/storage/ColumnarStorageBench.java|   4 +-
 .../apache/hadoop/hive/serde2/TestSysLogSerDe.java |   5 +-
 .../org/apache/hadoop/hive/serde2/TestSerDe.java   |  11 +-
 .../org/apache/hive/storage/jdbc/JdbcSerDe.java|  47 
 .../apache/hadoop/hive/kafka/KafkaJsonSerDe.java   |  18 +--
 .../org/apache/hadoop/hive/kafka/KafkaSerDe.java   |  42 +++
 .../org/apache/hadoop/hive/kudu/KuduSerDe.java |   8 +-
 .../org/apache/hadoop/hive/kudu/TestKuduSerDe.java |   8 +-
 llap-client/pom.xml|  22 
 .../hadoop/hive/llap/LlapRowRecordReader.java  |   2 +-
 .../hadoop/hive/metastore/HiveMetaStoreUtils.java  |  42 ---
 .../ql/ddl/table/info/desc/DescTableOperation.java |   6 -
 .../hive/ql/exec/AppMasterEventOperator.java   |   8 +-
 .../apache/hadoop/hive/ql/exec/DemuxOperator.java  |  15 +--
 .../apache/hadoop/hive/ql/exec/FetchOperator.java  |   4 +-
 .../hadoop/hive/ql/exec/FileSinkOperator.java  |   7 +-
 .../hive/ql/exec/HashTableDummyOperator.java   |   9 +-
 .../hadoop/hive/ql/exec/HashTableSinkOperator.java |  11 +-
 .../org/apache/hadoop/hive/ql/exec/JoinUtil.java   |   5 +-
 .../hadoop/hive/ql/exec/MapJoinOperator.java   |  13 +--
 .../apache/hadoop/hive/ql/exec/MapOperator.java|   3 +-
 .../hadoop/hive/ql/exec/ReduceSinkOperator.java|  14 ++-
 .../apache/hadoop/hive/ql/exec/ScriptOperator.java |  17 ++-
 .../hadoop/hive/ql/exec/SkewJoinHandler.java   |   5 +-
 .../org/apache/hadoop/hive/ql/exec/Utilities.java  |   7 +-
 .../apache/hadoop/hive/ql/exec/mr/ExecReducer.java |  23 ++--
 .../ql/exec/spark/SparkDynamicPartitionPruner.java |   6 +-
 .../ql/exec/spark/SparkReduceRecordHandler.java|  27 +++--
 .../hive/ql/exec/tez/DynamicPartitionPruner.java   |   7 +-
 .../hive/ql/exec/tez/DynamicValueRegistryTez.java  |  10 +-
 .../hive/ql/exec/tez/ReduceRecordSource.java   |  27 ++---
 .../hive/ql/io/arrow/ArrowColumnarBatchSerDe.java  |  23 ++--
 .../apache/hadoop/hive/ql/io/arrow/Serializer.java |   7 +-
 .../org/apache/hadoop/hive/ql/io/orc/OrcSerde.java |  14 ++-
 .../hive/ql/io/parquet/serde/ParquetHiveSerDe.java |  19 ++--
 .../ql/io/protobuf/ProtobufBytesWritableSerDe.java |   6 +-
 .../hadoop/hive/ql/io/protobuf/ProtobufSerDe.java  |   9 +-
 .../hadoop/hive/ql/log/syslog/SyslogSerDe.java |   5 +-
 .../hadoop/hive/ql/optimizer/GenMapRedUtils.java   |  10 +-
 .../TablePropertyEnrichmentOptimizer.java  |   8 +-
 .../hive/ql/optimizer/physical/Vectorizer.java |  18 +--
 .../apache/hadoop/hive/ql/parse/PTFTranslator.java |   2 +-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java |   5 +-
 .../spark/SparkPartitionPruningSinkOperator.java   |   2 +-
 .../apache/hadoop/hive/ql/plan/MapJoinDesc.java|  12 +-
 .../hadoop/hive/ql/plan/PTFDeserializer.java   |   2 +-
 .../apache/hadoop/hive/ql/plan/PartitionDesc.java  |  10 +-
 .../org/apache/hadoop/hive/ql/plan/TableDesc.java  |  27 +++--
 .../hadoop/hive/ql/exec/TestFileSinkOperator.java  |   4 +-
 .../persistence/TestMapJoinTableContainer.java |   5 +-
 .../ql/exec/persistence

[hive] branch master updated (6abd458 -> 2cde696)

2020-12-17 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 6abd458  HIVE-24542: Prepare Guava for Upgrades (David Mollitor 
reviewed by Miklos Gergely)
 add 2cde696  HIVE-24432: Delete Notification Events in Batches (David 
Mollitor reviewed by Naveen Gangam, Aasha Medhi)

No new revisions were added by this update.

Summary of changes:
 .../apache/hadoop/hive/metastore/ObjectStore.java  | 117 ++---
 .../org/apache/hadoop/hive/metastore/RawStore.java |   3 +-
 .../hadoop/hive/metastore/TestObjectStore.java |   7 +-
 3 files changed, 84 insertions(+), 43 deletions(-)



[hive] branch master updated (3f5e01c -> 6abd458)

2020-12-17 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 3f5e01c  disable test
 add 6abd458  HIVE-24542: Prepare Guava for Upgrades (David Mollitor 
reviewed by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 common/pom.xml  |  4 
 .../org/apache/hadoop/hive/common/log/InPlaceUpdate.java| 13 +++--
 llap-common/pom.xml |  4 
 .../java/org/apache/hadoop/hive/llap/AsyncPbRpcProxy.java   |  2 +-
 llap-server/pom.xml |  4 
 .../org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java |  6 +++---
 .../hadoop/hive/llap/daemon/impl/LlapTaskReporter.java  |  2 +-
 .../hadoop/hive/llap/daemon/impl/TaskExecutorService.java   |  2 +-
 .../hive/llap/tezplugins/LlapTaskSchedulerService.java  | 10 ++
 .../org/apache/hadoop/hive/ql/exec/tez/WorkloadManager.java |  7 ---
 .../hadoop/hive/ql/exec/tez/SampleTezSessionState.java  |  3 ++-
 standalone-metastore/metastore-tools/tools-common/pom.xml   |  4 
 storage-api/pom.xml |  5 ++---
 13 files changed, 39 insertions(+), 27 deletions(-)



[hive] branch master updated (aab7fdd -> cce5961)

2020-12-15 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from aab7fdd  HIVE-24322: In case of direct insert, the attempt ID has to 
be checked when reading the manifest files (#1774) (reviewed by Adam Szita)
 add cce5961  HIVE-24460: Refactor Get Next Event ID for 
DbNotificationListener (David Mollitor reviewed by Miklos Gergely, Aasha Medhi)

No new revisions were added by this update.

Summary of changes:
 .../hcatalog/listener/DbNotificationListener.java  | 78 --
 .../hadoop/hive/metastore/tools/SQLGenerator.java  | 11 +++
 2 files changed, 67 insertions(+), 22 deletions(-)



[hive] branch master updated (fb046c7 -> 5317f42)

2020-12-14 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from fb046c7  HIVE-24503 : Optimize vector row serde by avoiding type check 
at run time. (Mahesh Kumar Behera, reviewed by  Rajesh Balamohan, Panos 
Garefalakis)
 add 5317f42  HIVE-24468: Use Event Time instead of Current Time in 
Notification Log DB Entry (David Mollitor reviewed by Aasha, Naveen Gangam)

No new revisions were added by this update.

Summary of changes:
 .../java/org/apache/hive/hcatalog/listener/DbNotificationListener.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)



[hive] branch master updated (d180445 -> f0814f0)

2020-11-30 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from d180445  Incorrect commit message for HIVE-24423 (missing reviewers)
 add f0814f0  HIVE-24424: Use PreparedStatements in DbNotificationListener 
getNextNLId (David Mollitor reviewed by Bodor Laszlo, Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 .../hcatalog/listener/DbNotificationListener.java  | 58 ++
 1 file changed, 38 insertions(+), 20 deletions(-)



[hive] branch master updated (23bc546 -> d180445)

2020-11-30 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 23bc546  HIVE-24423: Improve DbNotificationListener Thread (#1703)
 add d180445  Incorrect commit message for HIVE-24423 (missing reviewers)

No new revisions were added by this update.

Summary of changes:
 errata.txt | 1 +
 1 file changed, 1 insertion(+)



[hive] branch master updated (aed7c86 -> 23bc546)

2020-11-30 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from aed7c86  HIVE-24389: Trailing zeros of constant decimal numbers are 
removed (Krisztian Kasa, reviewed by Jesus Camacho Rodriguez)
 add 23bc546  HIVE-24423: Improve DbNotificationListener Thread (#1703)

No new revisions were added by this update.

Summary of changes:
 .../hcatalog/listener/DbNotificationListener.java  | 52 +-
 1 file changed, 20 insertions(+), 32 deletions(-)



[hive] branch master updated: HIVE-24321: Implement Default getSerDeStats in AbstractSerDe (David Mollitor, reviewed by Panagiotis Garefalakis, Miklos Gergely)

2020-10-30 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new a4c5ddd  HIVE-24321: Implement Default getSerDeStats in AbstractSerDe 
(David Mollitor, reviewed by Panagiotis Garefalakis, Miklos Gergely)
a4c5ddd is described below

commit a4c5ddd7da78d42343914d670c1c824bb1ad2395
Author: belugabehr <12578579+belugab...@users.noreply.github.com>
AuthorDate: Fri Oct 30 09:28:26 2020 -0400

HIVE-24321: Implement Default getSerDeStats in AbstractSerDe (David 
Mollitor, reviewed by Panagiotis Garefalakis, Miklos Gergely)
---
 .../hadoop/hive/accumulo/serde/AccumuloSerDe.java  |  6 --
 .../apache/hadoop/hive/contrib/serde2/RegexSerDe.java  |  7 ---
 .../hadoop/hive/contrib/serde2/TypedBytesSerDe.java|  5 -
 .../org/apache/hadoop/hive/druid/serde/DruidSerDe.java |  6 --
 .../java/org/apache/hadoop/hive/hbase/HBaseSerDe.java  |  7 ---
 .../org/apache/hive/hcatalog/data/HCatRecordSerDe.java |  8 
 .../java/org/apache/hive/hcatalog/data/JsonSerDe.java  |  7 ---
 .../org/apache/hadoop/hive/serde2/CustomSerDe1.java|  6 --
 .../org/apache/hadoop/hive/serde2/CustomSerDe2.java|  6 --
 .../java/org/apache/hadoop/hive/serde2/TestSerDe.java  |  6 --
 .../java/org/apache/hive/storage/jdbc/JdbcSerDe.java   |  7 ---
 .../java/org/apache/hadoop/hive/kudu/KuduSerDe.java|  7 ---
 .../hive/ql/io/arrow/ArrowColumnarBatchSerDe.java  |  6 --
 .../org/apache/hadoop/hive/ql/io/orc/OrcSerde.java | 17 +++--
 .../hive/ql/io/parquet/serde/ParquetHiveSerDe.java | 18 +++---
 .../hadoop/hive/ql/io/protobuf/ProtobufSerDe.java  |  6 --
 .../apache/hadoop/hive/ql/log/syslog/SyslogSerDe.java  |  6 --
 .../hadoop/hive/ql/exec/TestFileSinkOperator.java  |  5 -
 .../org/apache/hadoop/hive/serde2/AbstractSerDe.java   |  9 +++--
 .../java/org/apache/hadoop/hive/serde2/JsonSerDe.java  |  6 --
 .../hive/serde2/MetadataTypedColumnsetSerDe.java   |  6 --
 .../apache/hadoop/hive/serde2/MultiDelimitSerDe.java   |  6 --
 .../org/apache/hadoop/hive/serde2/NullStructSerDe.java |  5 -
 .../org/apache/hadoop/hive/serde2/OpenCSVSerde.java|  5 -
 .../java/org/apache/hadoop/hive/serde2/RegexSerDe.java |  6 --
 .../java/org/apache/hadoop/hive/serde2/TypedSerDe.java |  6 --
 .../org/apache/hadoop/hive/serde2/avro/AvroSerDe.java  |  7 ---
 .../serde2/binarysortable/BinarySortableSerDe.java |  7 ---
 .../hive/serde2/teradata/TeradataBinarySerde.java  |  6 --
 .../hive/serde2/thrift/ThriftJDBCBinarySerDe.java  |  6 --
 30 files changed, 13 insertions(+), 198 deletions(-)

diff --git 
a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java
 
b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java
index 6fa48dd..9607b6d 100644
--- 
a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java
+++ 
b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java
@@ -30,7 +30,6 @@ import org.apache.hadoop.hive.accumulo.columns.ColumnMapping;
 import org.apache.hadoop.hive.accumulo.columns.HiveAccumuloRowIdColumnMapping;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
 import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import 
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory;
@@ -142,11 +141,6 @@ public class AccumuloSerDe extends AbstractSerDe {
 return cachedObjectInspector;
   }
 
-  @Override
-  public SerDeStats getSerDeStats() {
-return null;
-  }
-
   public AccumuloSerDeParameters getParams() {
 return accumuloSerDeParameters;
   }
diff --git 
a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java 
b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java
index 45f32dd..5ac3d4a 100644
--- a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java
+++ b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java
@@ -32,7 +32,6 @@ import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
-import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -270,10 +269,4 @@ public class RegexSerDe extends AbstractSerDe {
 return outputRowText;
   }
 
-  @Override
-  public SerD

[hive] branch master updated (78840c2 -> 0e4e1ac)

2020-10-28 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 78840c2  HIVE-24294: TezSessionPool sessions can throw AssertionError 
(#1596) (Naresh Panchetty Ramanaiah, reviewed by Mustafa Iman, Laszlo Pinter)
 add 0e4e1ac  HIVE-23829: Compute Stats Incorrect for Binary Columns 
(Hunter Logan, reviewed by David Mollitor)

No new revisions were added by this update.

Summary of changes:
 .../clientpositive/acid_vectorization_original_tez.q   |  3 ++-
 .../test/queries/clientpositive/avro_nullable_fields.q |  3 ++-
 ql/src/test/queries/clientpositive/llap_text.q |  6 --
 .../queries/clientpositive/metadata_only_queries.q |  3 ++-
 .../metadata_only_queries_with_filters.q   |  3 ++-
 ql/src/test/queries/clientpositive/orc_llap_counters.q |  3 ++-
 .../test/queries/clientpositive/orc_llap_counters1.q   |  3 ++-
 .../clientpositive/vector_binary_join_groupby.q|  3 ++-
 .../vector_windowing_multipartitioning.q   |  3 ++-
 .../queries/clientpositive/vector_windowing_navfn.q|  3 ++-
 .../test/queries/clientpositive/windowing_distinct.q   |  3 ++-
 .../clientpositive/windowing_multipartitioning.q   |  3 ++-
 ql/src/test/queries/clientpositive/windowing_navfn.q   |  3 ++-
 .../clientpositive/llap/avro_nullable_fields.q.out |  2 ++
 .../test/results/clientpositive/llap/llap_text.q.out   |  8 ++--
 .../clientpositive/llap/metadata_only_queries.q.out|  2 ++
 .../llap/metadata_only_queries_with_filters.q.out  |  2 ++
 .../clientpositive/llap/orc_llap_counters.q.out|  2 ++
 .../clientpositive/llap/orc_llap_counters1.q.out   |  2 ++
 .../llap/vector_binary_join_groupby.q.out  |  2 ++
 .../llap/vector_windowing_multipartitioning.q.out  |  2 ++
 .../clientpositive/llap/vector_windowing_navfn.q.out   |  2 ++
 .../clientpositive/llap/windowing_distinct.q.out   |  2 ++
 .../llap/windowing_multipartitioning.q.out |  2 ++
 .../results/clientpositive/llap/windowing_navfn.q.out  |  2 ++
 .../tez/acid_vectorization_original_tez.q.out  |  2 ++
 .../hadoop/hive/serde2/lazy/LazySerDeParameters.java   | 11 ++-
 .../serde2/lazy/fast/LazySimpleDeserializeRead.java| 18 ++
 28 files changed, 78 insertions(+), 25 deletions(-)



[hive] branch master updated: HIVE-23874: Add debug line for fetchRequest in the HiveQueryResultSet (Hunter Logan, reviewed by David Mollitor)

2020-07-28 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 51e0d72  HIVE-23874: Add debug line for fetchRequest in the 
HiveQueryResultSet (Hunter Logan, reviewed by David Mollitor)
51e0d72 is described below

commit 51e0d7233c7df97b6b8fc3f243116b914db91c87
Author: Hunter Logan 
AuthorDate: Tue Jul 28 09:02:37 2020 -0400

HIVE-23874: Add debug line for fetchRequest in the HiveQueryResultSet 
(Hunter Logan, reviewed by David Mollitor)
---
 jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
index df31a25..34ec66b 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
@@ -332,7 +332,7 @@ public class HiveQueryResultSet extends HiveBaseResultSet {
 try {
   TFetchOrientation orientation = TFetchOrientation.FETCH_NEXT;
   if (fetchFirst) {
-// If we are asked to start from begining, clear the current fetched 
resultset
+// If we are asked to start from beginning, clear the current fetched 
resultset
 orientation = TFetchOrientation.FETCH_FIRST;
 fetchedRows = null;
 fetchedRowsItr = null;
@@ -341,6 +341,7 @@ public class HiveQueryResultSet extends HiveBaseResultSet {
   if (fetchedRows == null || !fetchedRowsItr.hasNext()) {
 TFetchResultsReq fetchReq = new TFetchResultsReq(stmtHandle,
 orientation, fetchSize);
+LOG.debug("HiveQueryResultsFetchReq: {}", fetchReq);
 TFetchResultsResp fetchResp;
 fetchResp = client.FetchResults(fetchReq);
 Utils.verifySuccessWithInfo(fetchResp.getStatus());



[hive] branch master updated: HIVE-23899: Replace Base64 in llap Packages (David Mollitor, reviewed by Miklos Gergely)

2020-07-24 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 82d7a2f  HIVE-23899: Replace Base64 in llap Packages (David Mollitor, 
reviewed by Miklos Gergely)
82d7a2f is described below

commit 82d7a2fb2fbd22a4f5f1fb6e1261a2920386ce56
Author: belugabehr <12578579+belugab...@users.noreply.github.com>
AuthorDate: Fri Jul 24 09:14:13 2020 -0400

HIVE-23899: Replace Base64 in llap Packages (David Mollitor, reviewed by 
Miklos Gergely)
---
 .../src/java/org/apache/hadoop/hive/registry/impl/TezAmInstance.java  | 4 ++--
 .../apache/hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java  | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git 
a/llap-client/src/java/org/apache/hadoop/hive/registry/impl/TezAmInstance.java 
b/llap-client/src/java/org/apache/hadoop/hive/registry/impl/TezAmInstance.java
index a862947..b31175e 100644
--- 
a/llap-client/src/java/org/apache/hadoop/hive/registry/impl/TezAmInstance.java
+++ 
b/llap-client/src/java/org/apache/hadoop/hive/registry/impl/TezAmInstance.java
@@ -14,8 +14,8 @@
 package org.apache.hadoop.hive.registry.impl;
 
 import java.io.IOException;
+import java.util.Base64;
 
-import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.registry.client.binding.RegistryTypeUtils;
 import org.apache.hadoop.registry.client.types.AddressTypes;
@@ -66,7 +66,7 @@ public class TezAmInstance extends ServiceInstanceBase {
 if (this.token != null) return token;
 String tokenString = 
getProperties().get(TezAmRegistryImpl.AM_PLUGIN_TOKEN);
 if (tokenString == null || tokenString.isEmpty()) return null;
-byte[] tokenBytes = Base64.decodeBase64(tokenString);
+byte[] tokenBytes = Base64.getDecoder().decode(tokenString);
 Token token = new Token<>();
 try {
   token.readFields(ByteStreams.newDataInput(tokenBytes));
diff --git 
a/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java
 
b/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java
index d4cc7a3..9012972 100644
--- 
a/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java
+++ 
b/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskSchedulerService.java
@@ -30,6 +30,7 @@ import 
org.apache.hadoop.hive.registry.ServiceInstanceStateChangeListener;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Base64;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Comparator;
@@ -67,7 +68,6 @@ import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
-import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang3.mutable.MutableInt;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.JvmPauseMonitor;
@@ -565,7 +565,7 @@ public class LlapTaskSchedulerService extends TaskScheduler 
{
   // This shouldn't really happen on a byte array.
   throw new RuntimeException(e);
 }
-return Base64.encodeBase64String(bytes);
+return Base64.getEncoder().withoutPadding().encodeToString(bytes);
   }
 
 



[hive] branch master updated: HIVE-20771: LazyBinarySerDe fails on empty structs (Clemens Valiente via David Mollitor, reviewed by Jesús Rodríguez)

2020-07-24 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 0a9f3bb  HIVE-20771:  LazyBinarySerDe fails on empty structs (Clemens 
Valiente via David Mollitor, reviewed by Jesús Rodríguez)
0a9f3bb is described below

commit 0a9f3bb9a18043dbafb0ac218b6721f3260be781
Author: Hunter Logan 
AuthorDate: Fri Jul 24 09:13:21 2020 -0400

HIVE-20771:  LazyBinarySerDe fails on empty structs (Clemens Valiente via 
David Mollitor, reviewed by Jesús Rodríguez)
---
 .../serde2/lazybinary/LazyBinaryNonPrimitive.java  |  4 +-
 .../serde2/lazybinary/TestLazyBinaryStruct.java| 67 ++
 2 files changed, 69 insertions(+), 2 deletions(-)

diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryNonPrimitive.java
 
b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryNonPrimitive.java
index 05d05c6..82946d3 100644
--- 
a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryNonPrimitive.java
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryNonPrimitive.java
@@ -50,8 +50,8 @@ public abstract class LazyBinaryNonPrimitive
 if (null == bytes) {
   throw new RuntimeException("bytes cannot be null!");
 }
-if (length <= 0) {
-  throw new RuntimeException("length should be positive!");
+if (length < 0) {
+  throw new RuntimeException("length should be non-negative!");
 }
 this.bytes = bytes;
 this.start = start;
diff --git 
a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinaryStruct.java
 
b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinaryStruct.java
new file mode 100644
index 000..aa6b823
--- /dev/null
+++ 
b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinaryStruct.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.lazybinary;
+
+import java.util.ArrayList;
+import java.util.Properties;
+import junit.framework.TestCase;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import 
org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryObjectInspectorFactory;
+import 
org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryStructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import 
org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
+import org.apache.hadoop.io.Writable;
+import org.junit.Test;
+
+public class TestLazyBinaryStruct extends TestCase {
+
+  @Test
+  public void testEmptyStruct() {
+LazyBinaryStructObjectInspector oi = LazyBinaryObjectInspectorFactory
+.getLazyBinaryStructObjectInspector(new ArrayList<>(), new 
ArrayList<>());
+
+ByteArrayRef byteRef = new ByteArrayRef();
+byteRef.setData(new byte[]{0});
+
+LazyBinaryStruct data = (LazyBinaryStruct) 
LazyBinaryFactory.createLazyBinaryObject(oi);
+data.init(byteRef, 0, 0);
+
+assertEquals(data.getRawDataSerializedSize(), 0);
+  }
+  
+  @Test
+  public void testEmptyStructWithSerde() throws SerDeException {
+LazyBinaryStructObjectInspector oi = LazyBinaryObjectInspectorFactory
+.getLazyBinaryStructObjectInspector(new ArrayList<>(), new 
ArrayList<>());
+StandardStructObjectInspector standardOI = ObjectInspectorFactory
+.getStandardStructObjectInspector(new ArrayList<>(), new 
ArrayList<>());
+Properties schema = new Properties();
+schema.setProperty(serdeConstants.LIST_COLUMNS, "col0");
+schema.setProperty(serdeConstants.LIST_COLUMN_TYPES, "struct<>");
+
+LazyBinarySerDe serde = new LazyBinarySerDe();
+SerDeUtils.initializeSerDe(serde, new Configuration(), schema, null);
+

[hive] branch master updated (610a663 -> 22d4493)

2020-07-23 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 610a663  HIVE-23198: Add matching logic between CacheTags and 
proactive eviction requests (Adam Szita, reviewed by Peter Vary)
 add 22d4493  HIVE-23865: Use More Java Collections Class (David Mollitor, 
reviewed by Miklos Gergely)

No new revisions were added by this update.

Summary of changes:
 .../apache/hive/hcatalog/pig/HCatBaseStorer.java   | 12 +++
 .../hive/ql/exec/tez/monitoring/DAGSummary.java|  3 +-
 .../ql/exec/tez/monitoring/FSCountersSummary.java  |  3 +-
 .../hive/ql/exec/tez/monitoring/LLAPioSummary.java |  3 +-
 .../hive/ql/exec/tez/monitoring/LlapWmSummary.java |  3 +-
 .../hive/ql/exec/vector/VectorizationContext.java  |  4 +--
 .../hadoop/hive/ql/lockmgr/DbTxnManager.java   |  4 +--
 .../org/apache/hadoop/hive/ql/metadata/Hive.java   |  7 ++--
 .../hive/ql/optimizer/PointLookupOptimizer.java|  4 +--
 .../hive/ql/parse/ImportSemanticAnalyzer.java  |  6 ++--
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java | 25 +++---
 .../apache/hadoop/hive/ql/udf/UDAFPercentile.java  | 11 +++
 .../LazyBinaryObjectInspectorFactory.java  | 38 +-
 .../MetadataListStructObjectInspector.java | 19 ---
 .../apache/hadoop/hive/metastore/Batchable.java|  5 +--
 .../hadoop/hive/metastore/HiveMetaStore.java   | 11 +++
 .../hadoop/hive/metastore/txn/TxnHandler.java  |  3 +-
 17 files changed, 64 insertions(+), 97 deletions(-)



[hive] branch master updated: HIVE-22674: Replace Base64 in serde Package (David Mollitor, reviewed by Naveen Gangam)

2020-07-22 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new eacb4f3  HIVE-22674: Replace Base64 in serde Package (David Mollitor, 
reviewed by Naveen Gangam)
eacb4f3 is described below

commit eacb4f344352b66009e8f84797d9db4c3ae0ade7
Author: belugabehr <12578579+belugab...@users.noreply.github.com>
AuthorDate: Wed Jul 22 09:42:55 2020 -0400

HIVE-22674: Replace Base64 in serde Package (David Mollitor, reviewed by 
Naveen Gangam)
---
 .../clientpositive/llap/compute_stats_binary.q.out   |  2 +-
 .../org/apache/hadoop/hive/serde2/lazy/LazyBinary.java   | 16 +---
 .../org/apache/hadoop/hive/serde2/lazy/LazyUtils.java|  4 ++--
 .../hive/serde2/lazy/fast/LazySimpleSerializeWrite.java  |  6 +++---
 .../hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java |  6 --
 5 files changed, 19 insertions(+), 15 deletions(-)

diff --git a/ql/src/test/results/clientpositive/llap/compute_stats_binary.q.out 
b/ql/src/test/results/clientpositive/llap/compute_stats_binary.q.out
index 133c01e..fc90c89 100644
--- a/ql/src/test/results/clientpositive/llap/compute_stats_binary.q.out
+++ b/ql/src/test/results/clientpositive/llap/compute_stats_binary.q.out
@@ -31,4 +31,4 @@ POSTHOOK: query: select compute_stats(a, 16) from tab_binary
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tab_binary
  A masked pattern was here 
-{"columntype":"Binary","maxlength":36,"avglength":20.0,"countnulls":0}
+{"columntype":"Binary","maxlength":58,"avglength":32.5,"countnulls":0}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java 
b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java
index 8c594a8..6ce4906 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java
@@ -18,9 +18,11 @@
 
 package org.apache.hadoop.hive.serde2.lazy;
 
-import org.apache.commons.codec.binary.Base64;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
+import java.util.Base64;
+
 import 
org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyBinaryObjectInspector;
 import org.apache.hadoop.io.BytesWritable;
 
@@ -47,17 +49,17 @@ public class LazyBinary extends 
LazyPrimitive 0 ? decoded : recv;
 data.set(decoded, 0, decoded.length);
   }
 
   // todo this should be configured in serde
   public static byte[] decodeIfNeeded(byte[] recv) {
-boolean arrayByteBase64 = Base64.isArrayByteBase64(recv);
-if (LOG.isDebugEnabled() && arrayByteBase64) {
-  LOG.debug("Data only contains Base64 alphabets only so try to decode the 
data.");
+try {
+  return Base64.getDecoder().decode(recv);
+} catch (IllegalArgumentException e) {
+  // use the original bytes in case decoding should fail
+  LOG.debug("Data does not contain only Base64 characters so return 
original byte array", e);
+  return recv;
 }
-return arrayByteBase64 ? Base64.decodeBase64(recv) : recv;
   }
 }
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java 
b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
index 544a668..65a76ac 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
@@ -24,9 +24,9 @@ import java.io.OutputStream;
 import java.nio.ByteBuffer;
 import java.nio.charset.CharacterCodingException;
 import java.util.Arrays;
+import java.util.Base64;
 import java.util.Map;
 
-import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -294,7 +294,7 @@ public final class LazyUtils {
   BytesWritable bw = ((BinaryObjectInspector) 
oi).getPrimitiveWritableObject(o);
   byte[] toEncode = new byte[bw.getLength()];
   System.arraycopy(bw.getBytes(), 0,toEncode, 0, bw.getLength());
-  byte[] toWrite = Base64.encodeBase64(toEncode);
+  byte[] toWrite = Base64.getEncoder().withoutPadding().encode(toEncode);
   out.write(toWrite, 0, toWrite.length);
   break;
 }
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
 
b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
index a42d6f4..4be9c40 100644
--- 
a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
@@ -21,11 +21,11 @@ package org.apache.hadoop.hive.serde2.lazy.fast;
 import java

[hive] branch master updated: HIVE-23797: Throw exception when no metastore found in zookeeper (Zhihua Deng, reviewed by David Mollitor)

2020-07-22 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 9b50557  HIVE-23797: Throw exception when no metastore found in 
zookeeper (Zhihua Deng, reviewed by David Mollitor)
9b50557 is described below

commit 9b505575ca420340c41ccb5acabd85a8803cbd45
Author: dengzh 
AuthorDate: Wed Jul 22 21:40:12 2020 +0800

HIVE-23797: Throw exception when no metastore found in zookeeper (Zhihua 
Deng, reviewed by David Mollitor)
---
 .../hadoop/hive/metastore/HiveMetaStoreClient.java |  7 ++
 .../hive/metastore/TestRemoteHMSZKNegative.java| 81 ++
 2 files changed, 88 insertions(+)

diff --git 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
 
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index a8d37c5..84a86c5 100644
--- 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ 
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -327,6 +327,13 @@ public class HiveMetaStoreClient implements 
IMetaStoreClient, AutoCloseable {
   MetaStoreUtils.logAndThrowMetaException(e);
 }
 
+if (metastoreUrisString.isEmpty() && 
"zookeeper".equalsIgnoreCase(serviceDiscoveryMode)) {
+  throw new MetaException("No metastore service discovered in ZooKeeper. "
+  + "Please ensure that at least one metastore server is online");
+}
+
+LOG.info("Resolved metastore uris: {}", metastoreUrisString);
+
 List metastoreURIArray = new ArrayList();
 try {
   for (String s : metastoreUrisString) {
diff --git 
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHMSZKNegative.java
 
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHMSZKNegative.java
new file mode 100644
index 000..e860691
--- /dev/null
+++ 
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHMSZKNegative.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.RetryOneTime;
+import org.apache.curator.test.TestingServer;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.ZooKeeperHiveHelper;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
+import org.apache.zookeeper.CreateMode;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Test MetaStore Client throws exception when no MetaStore spec found in 
zookeeper.
+ *
+ */
+public class TestRemoteHMSZKNegative {
+  private TestingServer zkServer;
+  private Configuration conf;
+  private CuratorFramework zkClient;
+  private String rootNamespace = this.getClass().getSimpleName();
+
+  @Before
+  public void setUp() throws Exception {
+zkServer = new TestingServer();
+conf = MetastoreConf.newMetastoreConf();
+MetastoreConf.setVar(conf, ConfVars.THRIFT_URIS, 
zkServer.getConnectString());
+MetastoreConf.setVar(conf, ConfVars.THRIFT_ZOOKEEPER_NAMESPACE, 
rootNamespace);
+MetastoreConf.setVar(conf, ConfVars.THRIFT_SERVICE_DISCOVERY_MODE, 
"zookeeper");
+zkClient = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), 
new RetryOneTime(2000));
+zkClient.start();
+zkClient.create()
+.creatingParentsIfNeeded()
+.withMode(CreateMode.PERSISTENT)
+.forPath(ZooKeeperHiveHelper.ZOOKEEPER_PATH_SEPARATOR + rootNamespace);
+  }
+
+  @Test
+  public void create

[hive] branch master updated (76f8182 -> b81252e)

2020-07-22 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 76f8182  HIVE-19703: GenericUDTFGetSplits never uses num splits 
argument (Jaume Marhuenda, reviewed by David Mollitor)
 add b81252e  HIVE-23875: Add vscode project files to gitignore (Hunter 
Logan, reviewed by David Mollitor)

No new revisions were added by this update.

Summary of changes:
 .gitignore | 1 +
 1 file changed, 1 insertion(+)



[hive] branch master updated: HIVE-19703: GenericUDTFGetSplits never uses num splits argument (Jaume Marhuenda, reviewed by David Mollitor)

2020-07-22 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 76f8182  HIVE-19703: GenericUDTFGetSplits never uses num splits 
argument (Jaume Marhuenda, reviewed by David Mollitor)
76f8182 is described below

commit 76f8182b52a63ead8143e8e4ce9cc53dad5b2bc4
Author: Jaume Marhuenda 
AuthorDate: Wed Jul 22 09:17:47 2020 -0400

HIVE-19703: GenericUDTFGetSplits never uses num splits argument (Jaume 
Marhuenda, reviewed by David Mollitor)
---
 .../jdbc/AbstractTestJdbcGenericUDTFGetSplits.java |8 +-
 .../hive/jdbc/TestJdbcGenericUDTFGetSplits.java|2 +-
 .../hive/jdbc/TestJdbcGenericUDTFGetSplits2.java   |2 +-
 .../hive/ql/exec/tez/HiveSplitGenerator.java   |   25 +-
 .../hive/ql/udf/generic/GenericUDTFGetSplits.java  |3 +-
 .../test/results/clientpositive/llap/mm_all.q.out  |2 +-
 .../test/results/clientpositive/llap/mm_dp.q.out   | 4062 ++--
 7 files changed, 2062 insertions(+), 2042 deletions(-)

diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractTestJdbcGenericUDTFGetSplits.java
 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractTestJdbcGenericUDTFGetSplits.java
index 21184bf..0e7dcad 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractTestJdbcGenericUDTFGetSplits.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/AbstractTestJdbcGenericUDTFGetSplits.java
@@ -157,9 +157,13 @@ public abstract class AbstractTestJdbcGenericUDTFGetSplits 
{
   }
 
   protected void testGenericUDTFOrderBySplitCount1(String udtfName, int[] 
expectedCounts) throws Exception {
-String query = "select " + udtfName + "(" + "'select value from " + 
tableName + "', 5)";
+String query = "select " + udtfName + "(" + "'select value from " + 
tableName + "', 10)";
 runQuery(query, getConfigs(), expectedCounts[0]);
 
+// Check number of splits is respected
+query = "select get_splits(" + "'select value from " + tableName + "', 3)";
+runQuery(query, getConfigs(), 3);
+
 query = "select " + udtfName + "(" + "'select value from " + tableName + " 
order by under_col', 5)";
 runQuery(query, getConfigs(), expectedCounts[1]);
 
@@ -182,7 +186,7 @@ public abstract class AbstractTestJdbcGenericUDTFGetSplits {
 List setCmds = getConfigs();
 setCmds.add("set 
hive.llap.external.splits.order.by.force.single.split=false");
 query = "select " + udtfName + "(" +
-"'select `value` from (select value from " + tableName + " where value 
is not null order by value) as t', 5)";
+"'select `value` from (select value from " + tableName + " where value 
is not null order by value) as t', 10)";
 runQuery(query, setCmds, expectedCounts[7]);
   }
 
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcGenericUDTFGetSplits.java
 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcGenericUDTFGetSplits.java
index 6ca5276..b24e1a7 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcGenericUDTFGetSplits.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcGenericUDTFGetSplits.java
@@ -45,7 +45,7 @@ public class TestJdbcGenericUDTFGetSplits extends 
AbstractTestJdbcGenericUDTFGet
 
   @Test(timeout = 20)
   public void testGenericUDTFOrderBySplitCount1OnPartitionedTable() throws 
Exception {
-super.testGenericUDTFOrderBySplitCount1OnPartitionedTable("get_splits", 
new int[]{10, 1, 2, 2, 2});
+super.testGenericUDTFOrderBySplitCount1OnPartitionedTable("get_splits", 
new int[]{5, 1, 2, 2, 2});
   }
 
 
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcGenericUDTFGetSplits2.java
 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcGenericUDTFGetSplits2.java
index 14d5e62..96b8d89 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcGenericUDTFGetSplits2.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcGenericUDTFGetSplits2.java
@@ -32,7 +32,7 @@ public class TestJdbcGenericUDTFGetSplits2 extends 
AbstractTestJdbcGenericUDTFGe
 
   @Test(timeout = 20)
   public void testGenericUDTFOrderBySplitCount1OnPartitionedTable() throws 
Exception {
-
super.testGenericUDTFOrderBySplitCount1OnPartitionedTable("get_llap_splits", 
new int[]{12, 3, 4, 4, 4});
+
super.testGenericUDTFOrderBySplitCount1OnPartitionedTable("get_llap_splits", 
new int[]{7, 3, 4, 4, 4});
   }
 
 }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java 
b/ql/src/java/org/apache/hadoop/hive/q

[hive] branch master updated: HIVE-20447: Add JSON Output Format to beeline (Hunter Logan via David Mollitor)

2020-07-21 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new e56a775  HIVE-20447: Add JSON Output Format to beeline (Hunter Logan 
via David Mollitor)
e56a775 is described below

commit e56a775c38732da5928c0f6555dc167b5a6d1fa7
Author: Hunter Logan 
AuthorDate: Tue Jul 21 09:10:46 2020 -0400

HIVE-20447: Add JSON Output Format to beeline (Hunter Logan via David 
Mollitor)
---
 beeline/pom.xml|   4 +
 .../src/java/org/apache/hive/beeline/BeeLine.java  |   2 +
 .../apache/hive/beeline/JSONFileOutputFormat.java  |  62 ++
 .../org/apache/hive/beeline/JSONOutputFormat.java  | 126 +
 beeline/src/main/resources/BeeLine.properties  |   4 +-
 .../hive/beeline/TestJSONFileOutputFormat.java | 106 +
 .../apache/hive/beeline/TestJSONOutputFormat.java  | 108 ++
 7 files changed, 410 insertions(+), 2 deletions(-)

diff --git a/beeline/pom.xml b/beeline/pom.xml
index c8be6a6..46bfe98 100644
--- a/beeline/pom.xml
+++ b/beeline/pom.xml
@@ -70,6 +70,10 @@
   commons-io
 
 
+  com.fasterxml.jackson.core
+  jackson-core
+
+
   jline
   jline
 
diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java 
b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
index cb14013..a86fe5c 100644
--- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java
+++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
@@ -186,6 +186,8 @@ public class BeeLine implements Closeable {
   "tsv", new DeprecatedSeparatedValuesOutputFormat(this, '\t'),
   "xmlattr", new XMLAttributeOutputFormat(this),
   "xmlelements", new XMLElementOutputFormat(this),
+  "json", new JSONOutputFormat(this),
+  "jsonfile", new JSONFileOutputFormat(this),
   });
 
   private List supportedLocalDriver =
diff --git a/beeline/src/java/org/apache/hive/beeline/JSONFileOutputFormat.java 
b/beeline/src/java/org/apache/hive/beeline/JSONFileOutputFormat.java
new file mode 100644
index 000..b5f2a84
--- /dev/null
+++ b/beeline/src/java/org/apache/hive/beeline/JSONFileOutputFormat.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.beeline;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+
+import com.fasterxml.jackson.core.util.MinimalPrettyPrinter;
+
+/**
+ * OutputFormat for hive JSON file format.
+ * Removes "{ "resultset": [...] }" wrapping and prints one object per line.
+ * This output format matches the same format as a Hive table created with 
JSONFILE file format:
+ * CREATE TABLE ... STORED AS JSONFILE;
+ * e.g.
+ * {"name":"Ritchie Tiger","age":40,"is_employed":true,"college":"RIT"}
+ * {"name":"Bobby Tables","age":8,"is_employed":false,"college":null}
+ * ...
+ * 
+ * Note the lack of "," at the end of lines.
+ * 
+ */ 
+public class JSONFileOutputFormat extends JSONOutputFormat {
+  
+
+  JSONFileOutputFormat(BeeLine beeLine) {
+super(beeLine);
+this.generator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
+  }
+
+  @Override
+  void printHeader(Rows.Row header) {}
+
+  @Override
+  void printFooter(Rows.Row header) {
+ByteArrayOutputStream buf = (ByteArrayOutputStream) 
generator.getOutputTarget();
+try {
+  generator.flush();
+  String out = buf.toString(StandardCharsets.UTF_8.name());
+  beeLine.output(out);
+} catch(IOException e) {
+  beeLine.handleException(e);
+}
+buf.reset();
+  }
+}
diff --git a/beeline/src/java/org/apache/hive/beeline/JSONOutputFormat.java 
b/beeline/src/java/org/apache/hive/beeline/JSONOutputFormat.java
new file mode 100644
index 000..ef0ddd3
--- /dev/null
+++ b/beeline/src/java/org/apache/hive/beeline/JSONOutputFormat.java
@@ -0,0 +1,

[hive] branch master updated: HIVE-23836: Make 'cols' dependent so that it cascade deletes (David Mollitor, reviewed by Ashutosh Chauhan)

2020-07-20 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new eeacbde  HIVE-23836: Make 'cols' dependent so that it cascade deletes 
(David Mollitor, reviewed by Ashutosh Chauhan)
eeacbde is described below

commit eeacbdef43f6abd9b1e31afac8b16f9aa4d4c014
Author: belugabehr <12578579+belugab...@users.noreply.github.com>
AuthorDate: Mon Jul 20 09:31:08 2020 -0400

HIVE-23836: Make 'cols' dependent so that it cascade deletes (David 
Mollitor, reviewed by Ashutosh Chauhan)
---
 standalone-metastore/metastore-server/src/main/resources/package.jdo | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git 
a/standalone-metastore/metastore-server/src/main/resources/package.jdo 
b/standalone-metastore/metastore-server/src/main/resources/package.jdo
index d1f4b33..9820101 100644
--- a/standalone-metastore/metastore-server/src/main/resources/package.jdo
+++ b/standalone-metastore/metastore-server/src/main/resources/package.jdo
@@ -346,7 +346,7 @@
 
   
   
-
+
 
   
 



[hive] branch master updated (5b545df -> 79cfa6f)

2020-07-17 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 5b545df  HIVE-23339: SBA does not check permissions for DB location 
specified in Create database query (Shubham Chaurasia, reviewed by Miklos 
Gergely) (#1011)
 add 79cfa6f  HIVE-23818: Use String Switch-Case Statement in StatUtills 
(David Mollitor, reviewed by Miklos Gergely, Panagiotis Garefalakis)

No new revisions were added by this update.

Summary of changes:
 .../apache/hadoop/hive/ql/stats/StatsUtils.java| 113 -
 .../hadoop/hive/ql/stats/TestStatsUtils.java   |   3 +-
 2 files changed, 65 insertions(+), 51 deletions(-)



[hive] branch master updated: HIVE-23856: Beeline Should Print Binary Data in Base64 (Hunter Logan via David Mollitor)

2020-07-16 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new acfc0d5  HIVE-23856: Beeline Should Print Binary Data in Base64 
(Hunter Logan via David Mollitor)
acfc0d5 is described below

commit acfc0d5d1f9ec12972d9be4c584e0803ab3053e1
Author: Hunter Logan 
AuthorDate: Thu Jul 16 09:23:07 2020 -0400

HIVE-23856: Beeline Should Print Binary Data in Base64 (Hunter Logan via 
David Mollitor)
---
 beeline/src/java/org/apache/hive/beeline/Rows.java| 8 +---
 beeline/src/test/org/apache/hive/beeline/TestIncrementalRows.java | 2 +-
 2 files changed, 6 insertions(+), 4 deletions(-)

diff --git a/beeline/src/java/org/apache/hive/beeline/Rows.java 
b/beeline/src/java/org/apache/hive/beeline/Rows.java
index 8826bcd..3d5eaec 100644
--- a/beeline/src/java/org/apache/hive/beeline/Rows.java
+++ b/beeline/src/java/org/apache/hive/beeline/Rows.java
@@ -22,12 +22,14 @@
  */
 package org.apache.hive.beeline;
 
+import java.nio.charset.StandardCharsets;
 import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
 import java.text.DecimalFormat;
 import java.text.NumberFormat;
 import java.util.Arrays;
+import java.util.Base64;
 import java.util.Iterator;
 
 import org.apache.hadoop.hive.common.cli.EscapeCRLFHelper;
@@ -41,7 +43,7 @@ abstract class Rows implements Iterator {
   final ResultSetMetaData rsMeta;
   final Boolean[] primaryKeys;
   final NumberFormat numberFormat;
-  private boolean convertBinaryArray;
+  private boolean convertBinaryArrayToString;
   private final String nullStr;
 
   Rows(BeeLine beeLine, ResultSet rs) throws SQLException {
@@ -55,7 +57,7 @@ abstract class Rows implements Iterator {
 } else {
   numberFormat = new DecimalFormat(beeLine.getOpts().getNumberFormat());
 }
-this.convertBinaryArray = 
beeLine.getOpts().getConvertBinaryArrayToString();
+this.convertBinaryArrayToString = 
beeLine.getOpts().getConvertBinaryArrayToString();
   }
 
   @Override
@@ -162,7 +164,7 @@ abstract class Rows implements Iterator {
 } else if (o instanceof Number) {
   value = numberFormat != null ? numberFormat.format(o) : o.toString();
 } else if (o instanceof byte[]) {
-  value = convertBinaryArray ? new String((byte[])o) : 
Arrays.toString((byte[])o);
+  value = convertBinaryArrayToString ? new String((byte[])o, 
StandardCharsets.UTF_8) : 
Base64.getEncoder().withoutPadding().encodeToString((byte[])o);
 } else {
   value = o.toString();
 }
diff --git a/beeline/src/test/org/apache/hive/beeline/TestIncrementalRows.java 
b/beeline/src/test/org/apache/hive/beeline/TestIncrementalRows.java
index 61661a1..eb4e27b 100644
--- a/beeline/src/test/org/apache/hive/beeline/TestIncrementalRows.java
+++ b/beeline/src/test/org/apache/hive/beeline/TestIncrementalRows.java
@@ -94,7 +94,7 @@ public class TestIncrementalRows {
 
 convertedIr.next();
 String row = convertedIr.next().toString();
-Assert.assertEquals("[[77, 77, 77]]", row);
+Assert.assertEquals("[TU1N]", row);
   }
 
   public void initNrOfResultSetCalls(final int iter) throws SQLException {



[hive] branch master updated: HIVE-22676: Replace Base64 in hive-service Package (#1090)

2020-07-02 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new bc00454  HIVE-22676: Replace Base64 in hive-service Package (#1090)
bc00454 is described below

commit bc00454c194413753ac1d7067044ca78c77e1a34
Author: belugabehr <12578579+belugab...@users.noreply.github.com>
AuthorDate: Thu Jul 2 08:32:08 2020 -0400

HIVE-22676: Replace Base64 in hive-service Package (#1090)
---
 jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java  |  2 +-
 service/src/java/org/apache/hive/service/CookieSigner.java |  4 ++--
 .../src/java/org/apache/hive/service/auth/HttpAuthUtils.java   |  6 ++
 .../org/apache/hive/service/cli/operation/SQLOperation.java|  8 
 .../org/apache/hive/service/cli/thrift/ThriftHttpServlet.java  | 10 +-
 5 files changed, 14 insertions(+), 16 deletions(-)

diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
index 57f681c..cd1c130 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
@@ -754,7 +754,7 @@ public class HiveStatement implements java.sql.Statement {
 // Set on the server side.
 // @see org.apache.hive.service.cli.operation.SQLOperation#prepare
 return (stmtHandle.isPresent())
-? 
Base64.getUrlEncoder().encodeToString(stmtHandle.get().getOperationId().getGuid()).trim()
+? 
Base64.getUrlEncoder().withoutPadding().encodeToString(stmtHandle.get().getOperationId().getGuid())
 : null;
   }
 
diff --git a/service/src/java/org/apache/hive/service/CookieSigner.java 
b/service/src/java/org/apache/hive/service/CookieSigner.java
index c4d88de..d1a41d3 100644
--- a/service/src/java/org/apache/hive/service/CookieSigner.java
+++ b/service/src/java/org/apache/hive/service/CookieSigner.java
@@ -20,8 +20,8 @@ package org.apache.hive.service;
 
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
+import java.util.Base64;
 
-import org.apache.commons.codec.binary.Base64;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -99,7 +99,7 @@ public class CookieSigner {
   md.update(str.getBytes());
   md.update(secretBytes);
   byte[] digest = md.digest();
-  return new Base64(0).encodeToString(digest);
+  return Base64.getEncoder().encodeToString(digest);
 } catch (NoSuchAlgorithmException ex) {
   throw new RuntimeException("Invalid SHA digest String: " + SHA_STRING +
 " " + ex.getMessage(), ex);
diff --git a/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java 
b/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
index 7dc11b2..31985d9 100644
--- a/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
+++ b/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
@@ -21,6 +21,7 @@ package org.apache.hive.service.auth;
 import java.security.PrivilegedExceptionAction;
 import java.security.SecureRandom;
 import java.util.Arrays;
+import java.util.Base64;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
@@ -29,7 +30,6 @@ import java.util.StringTokenizer;
 
 import javax.security.auth.Subject;
 
-import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.http.protocol.BasicHttpContext;
@@ -142,13 +142,11 @@ public final class HttpAuthUtils {
 public static final String SERVER_HTTP_URL = "SERVER_HTTP_URL";
 private final String serverPrincipal;
 private final String serverHttpUrl;
-private final Base64 base64codec;
 private final HttpContext httpContext;
 
 public HttpKerberosClientAction(String serverPrincipal, String 
serverHttpUrl) {
   this.serverPrincipal = serverPrincipal;
   this.serverHttpUrl = serverHttpUrl;
-  base64codec = new Base64(0);
   httpContext = new BasicHttpContext();
   httpContext.setAttribute(SERVER_HTTP_URL, serverHttpUrl);
 }
@@ -172,7 +170,7 @@ public final class HttpAuthUtils {
   byte[] outToken = gssContext.initSecContext(inToken, 0, inToken.length);
   gssContext.dispose();
   // Base64 encoded and stringified token for server
-  return new String(base64codec.encode(outToken));
+  return Base64.getEncoder().encodeToString(outToken);
 }
   }
 }
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java 
b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
index 811f377..a76878a 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOpe

[hive] branch master updated: HIVE-23704: Decode Base-64 String from HTTP Header (David Mollitor, reviewed by Ashutosh Chauhan)

2020-06-21 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 4c18dbb  HIVE-23704: Decode Base-64 String from HTTP Header (David 
Mollitor, reviewed by Ashutosh Chauhan)
4c18dbb is described below

commit 4c18dbb3627149965fe4a96a70f5866bbfe49643
Author: belugabehr <12578579+belugab...@users.noreply.github.com>
AuthorDate: Sun Jun 21 12:29:45 2020 -0400

HIVE-23704: Decode Base-64 String from HTTP Header (David Mollitor, 
reviewed by Ashutosh Chauhan)
---
 .../hive/service/cli/thrift/ThriftHttpServlet.java | 25 ++
 1 file changed, 11 insertions(+), 14 deletions(-)

diff --git 
a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java 
b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
index 2ccbb61..c20baea 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
@@ -593,11 +593,10 @@ public class ThriftHttpServlet extends TServlet {
 
   private String[] getAuthHeaderTokens(HttpServletRequest request,
   String authType) throws HttpAuthenticationException {
-String authHeaderBase64 = getAuthHeader(request, authType);
-String authHeaderString = StringUtils.newStringUtf8(
-Base64.decodeBase64(authHeaderBase64.getBytes()));
-String[] creds = authHeaderString.split(":");
-return creds;
+String authHeaderBase64Str = getAuthHeader(request, authType);
+String authHeaderString = 
StringUtils.newStringUtf8(Base64.decodeBase64(authHeaderBase64Str));
+
+return authHeaderString.split(":");
   }
 
   /**
@@ -616,15 +615,13 @@ public class ThriftHttpServlet extends TServlet {
   "from the client is empty.");
 }
 
-String authHeaderBase64String;
-int beginIndex;
-if (isKerberosAuthMode(authType)) {
-  beginIndex = (HttpAuthUtils.NEGOTIATE + " ").length();
-}
-else {
-  beginIndex = (HttpAuthUtils.BASIC + " ").length();
-}
-authHeaderBase64String = authHeader.substring(beginIndex);
+LOG.debug("HTTP Auth Header [{}]", authHeader);
+
+String[] parts = authHeader.split(" ");
+
+// Assume the Base-64 string is always the last thing in the header
+String authHeaderBase64String = parts[parts.length - 1];
+
 // Authorization header must have a payload
 if (authHeaderBase64String.isEmpty()) {
   throw new HttpAuthenticationException("Authorization header received " +



[hive] branch master updated: HIVE-19261: Avro SerDe's InstanceCache should not be synchronized on retrieve (Alexey Diomin via David Mollitor)

2020-06-19 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new b846bbe  HIVE-19261: Avro SerDe's InstanceCache should not be 
synchronized on retrieve (Alexey Diomin via David Mollitor)
b846bbe is described below

commit b846bbe229b2225ff1122ec14b91ae910b73772c
Author: Alexey 
AuthorDate: Fri Jun 19 20:36:24 2020 +0700

HIVE-19261: Avro SerDe's InstanceCache should not be synchronized on 
retrieve (Alexey Diomin via David Mollitor)
---
 .../apache/hadoop/hive/serde2/avro/InstanceCache.java | 19 +--
 1 file changed, 9 insertions(+), 10 deletions(-)

diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/avro/InstanceCache.java 
b/serde/src/java/org/apache/hadoop/hive/serde2/avro/InstanceCache.java
index 2d52020..3e80576 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/InstanceCache.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/InstanceCache.java
@@ -17,9 +17,9 @@
  */
 package org.apache.hadoop.hive.serde2.avro;
 
-import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -35,7 +35,7 @@ import org.slf4j.LoggerFactory;
  */
 public abstract class InstanceCache {
   private static final Logger LOG = 
LoggerFactory.getLogger(InstanceCache.class);
-  Map cache = new HashMap();
+  Map cache = new ConcurrentHashMap<>();
   
   public InstanceCache() {}
 
@@ -51,20 +51,19 @@ public abstract class InstanceCache {
* Retrieve (or create if it doesn't exist) the correct Instance for this
* SeedObject using 'seenSchemas' to resolve circular references
*/
-  public synchronized Instance retrieve(SeedObject hv,
-  Set seenSchemas) throws AvroSerdeException {
+  public Instance retrieve(SeedObject hv, Set seenSchemas)
+throws AvroSerdeException {
 if(LOG.isDebugEnabled()) LOG.debug("Checking for hv: " + hv.toString());
 
 if(cache.containsKey(hv)) {
   if(LOG.isDebugEnabled()) LOG.debug("Returning cache result.");
   return cache.get(hv);
+} else {
+  if(LOG.isDebugEnabled()) LOG.debug("Creating new instance and storing in 
cache");
+  Instance newInstance = makeInstance(hv, seenSchemas);
+  Instance cachedInstance = cache.putIfAbsent(hv, newInstance);
+  return cachedInstance == null ? newInstance : cachedInstance;
 }
-
-if(LOG.isDebugEnabled()) LOG.debug("Creating new instance and storing in 
cache");
-
-Instance instance = makeInstance(hv, seenSchemas);
-cache.put(hv, instance);
-return instance;
   }
 
   protected abstract Instance makeInstance(SeedObject hv,



[hive] branch master updated: HIVE-23602: Use Java Concurrent Package for Operation Handle Set (David Mollitor, reviewed by Peter Vary)

2020-06-18 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 7725d08  HIVE-23602: Use Java Concurrent Package for Operation Handle 
Set (David Mollitor, reviewed by Peter Vary)
7725d08 is described below

commit 7725d08f926ee38626e9e3ee1639fea58ba6d181
Author: belugabehr <12578579+belugab...@users.noreply.github.com>
AuthorDate: Thu Jun 18 18:34:45 2020 -0400

HIVE-23602: Use Java Concurrent Package for Operation Handle Set (David 
Mollitor, reviewed by Peter Vary)
---
 .../hive/service/cli/session/HiveSessionImpl.java  | 40 --
 1 file changed, 14 insertions(+), 26 deletions(-)

diff --git 
a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java 
b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
index 9c7ee54..c25b63f 100644
--- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
+++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
@@ -29,6 +29,7 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.Future;
 import java.util.concurrent.FutureTask;
 import java.util.concurrent.Semaphore;
@@ -114,7 +115,7 @@ public class HiveSessionImpl implements HiveSession {
   private SessionManager sessionManager;
   private OperationManager operationManager;
   // Synchronized by locking on itself.
-  private final Set opHandleSet = new 
HashSet();
+  private final Set opHandleSet = 
ConcurrentHashMap.newKeySet();
   private boolean isOperationLogEnabled;
   private File sessionLogDir;
   // TODO: the control flow for this needs to be defined. Hive is supposed to 
be thread-local.
@@ -719,15 +720,11 @@ public class HiveSessionImpl implements HiveSession {
   }
 
   private void addOpHandle(OperationHandle opHandle) {
-synchronized (opHandleSet) {
-  opHandleSet.add(opHandle);
-}
+opHandleSet.add(opHandle);
   }
 
   private void removeOpHandle(OperationHandle opHandle) {
-synchronized (opHandleSet) {
-  opHandleSet.remove(opHandle);
-}
+opHandleSet.remove(opHandle);
   }
 
   @Override
@@ -757,14 +754,13 @@ public class HiveSessionImpl implements HiveSession {
 try {
   acquire(true, false);
   // Iterate through the opHandles and close their operations
-  List ops = null;
-  synchronized (opHandleSet) {
-ops = new ArrayList<>(opHandleSet);
-opHandleSet.clear();
-  }
-  for (OperationHandle opHandle : ops) {
+  List closedOps = new ArrayList<>();
+  for (OperationHandle opHandle : opHandleSet) {
 operationManager.closeOperation(opHandle);
+closedOps.add(opHandle);
   }
+  opHandleSet.removeAll(closedOps);
+
   // Cleanup session log directory.
   cleanupSessionLogDir();
   HiveHistory hiveHist = sessionState.getHiveHistory();
@@ -849,12 +845,9 @@ public class HiveSessionImpl implements HiveSession {
 
   @Override
   public void closeExpiredOperations() {
-OperationHandle[] handles;
-synchronized (opHandleSet) {
-  handles = opHandleSet.toArray(new OperationHandle[opHandleSet.size()]);
-}
-if (handles.length > 0) {
-  List operations = 
operationManager.removeExpiredOperations(handles);
+List handles = new ArrayList<>(opHandleSet);
+if (!handles.isEmpty()) {
+  List operations = 
operationManager.removeExpiredOperations(handles.toArray(new 
OperationHandle[0]));
   if (!operations.isEmpty()) {
 closeTimedOutOperations(operations);
   }
@@ -863,10 +856,7 @@ public class HiveSessionImpl implements HiveSession {
 
   @Override
   public long getNoOperationTime() {
-boolean noMoreOpHandle = false;
-synchronized (opHandleSet) {
-  noMoreOpHandle = opHandleSet.isEmpty();
-}
+boolean noMoreOpHandle = opHandleSet.isEmpty();
 return noMoreOpHandle && !lockedByUser ? System.currentTimeMillis() - 
lastAccessTime : 0;
   }
 
@@ -906,9 +896,7 @@ public class HiveSessionImpl implements HiveSession {
 acquire(true, false);
 try {
   operationManager.closeOperation(opHandle);
-  synchronized (opHandleSet) {
-opHandleSet.remove(opHandle);
-  }
+  opHandleSet.remove(opHandle);
 } finally {
   release(true, false);
 }



[hive] branch branch-3 updated: HIVE-23026: Allow for custom YARN application name for TEZ queries (Jake Xie via David Mollitor)

2020-06-17 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch branch-3
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/branch-3 by this push:
 new 3ddabad  HIVE-23026: Allow for custom YARN application name for TEZ 
queries (Jake Xie via David Mollitor)
3ddabad is described below

commit 3ddabad88282279c0214e4a9f269c94f98b005dc
Author: JakeXie 
AuthorDate: Wed Jun 17 21:33:20 2020 +0800

HIVE-23026: Allow for custom YARN application name for TEZ queries (Jake 
Xie via David Mollitor)
---
 common/src/java/org/apache/hadoop/hive/conf/HiveConf.java   | 4 
 ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java | 3 ++-
 2 files changed, 6 insertions(+), 1 deletion(-)

diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index ddc4734..31ea29b 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -2087,6 +2087,10 @@ public class HiveConf extends Configuration {
 "This named is used by Tez to set the dag name. This name in turn will 
appear on \n" +
 "the Tez UI representing the work that was done. Used by Spark to set 
the query name, will show up in the\n" +
 "Spark UI."),
+HIVETEZJOBNAME("tez.job.name", "HIVE-%s",
+"This named is used by Tez to set the job name. This name in turn will 
appear on \n" +
+"the Yarn UI representing the Yarn Application Name. And The job name 
may be a \n" +
+"Java String.format() string, to which the session ID will be supplied 
as the single parameter."),
 
 HIVEOPTIMIZEBUCKETINGSORTING("hive.optimize.bucketingsorting", true,
 "Don't create a reducer for enforcing \n" +
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java
index 08e65a4..fabd1ce 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java
@@ -355,7 +355,8 @@ public class TezSessionState {
 
 setupSessionAcls(tezConfig, conf);
 
-final TezClient session = TezClient.newBuilder("HIVE-" + sessionId, 
tezConfig)
+String tezJobNameFormat = HiveConf.getVar(conf, ConfVars.HIVETEZJOBNAME);
+final TezClient session = 
TezClient.newBuilder(String.format(tezJobNameFormat, sessionId), tezConfig)
 .setIsSession(true).setLocalResources(commonLocalResources)
 
.setCredentials(llapCredentials).setServicePluginDescriptor(servicePluginsDescriptor)
 .build();



[hive] branch branch-2 updated (1d50a79 -> 4ef384c)

2020-06-17 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 1d50a79  HIVE-20033: Backport HIVE-19432 to branch-2 (Teddy Choi, 
reviewed by David Mollitor)
 add 4ef384c  HIVE-23026: Allow for custom YARN application name for TEZ 
queries (Jake Xie via David Mollitor)

No new revisions were added by this update.

Summary of changes:
 common/src/java/org/apache/hadoop/hive/conf/HiveConf.java   | 4 
 ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java | 3 ++-
 2 files changed, 6 insertions(+), 1 deletion(-)



[hive] branch master updated (ed639c4 -> f5999bc)

2020-06-17 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from ed639c4  HIVE-23418 : Add test.local.warehouse.dir for 
TestMiniLlapLocalDriver tests (Miklos Gergely, reviewed by Zoltan Haindrich)
 add f5999bc  HIVE-23026: Allow for custom YARN application name for TEZ 
queries (Jake Xie via David Mollitor)

No new revisions were added by this update.

Summary of changes:
 common/src/java/org/apache/hadoop/hive/conf/HiveConf.java   | 4 
 ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java | 3 ++-
 2 files changed, 6 insertions(+), 1 deletion(-)



[hive] branch master updated (ed639c4 -> f5999bc)

2020-06-17 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from ed639c4  HIVE-23418 : Add test.local.warehouse.dir for 
TestMiniLlapLocalDriver tests (Miklos Gergely, reviewed by Zoltan Haindrich)
 add f5999bc  HIVE-23026: Allow for custom YARN application name for TEZ 
queries (Jake Xie via David Mollitor)

No new revisions were added by this update.

Summary of changes:
 common/src/java/org/apache/hadoop/hive/conf/HiveConf.java   | 4 
 ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java | 3 ++-
 2 files changed, 6 insertions(+), 1 deletion(-)



[hive] branch master updated: HIVE-23592: Routine makeIntPair is Not Correct (David Mollitor, reviewed by Miklos Gergely)

2020-06-16 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new e74029d  HIVE-23592: Routine makeIntPair is Not Correct (David 
Mollitor, reviewed by Miklos Gergely)
e74029d is described below

commit e74029d4fd5c4bfc50d33a8f1155ffacc151ba8f
Author: belugabehr <12578579+belugab...@users.noreply.github.com>
AuthorDate: Tue Jun 16 09:23:19 2020 -0400

HIVE-23592: Routine makeIntPair is Not Correct (David Mollitor, reviewed by 
Miklos Gergely)
---
 .../org/apache/hadoop/hive/common/NumberUtils.java | 60 ++
 .../apache/hadoop/hive/common/TestNumberUtils.java | 57 
 .../hadoop/hive/llap/cache/BuddyAllocator.java | 15 ++
 .../hadoop/hive/ql/io/orc/encoded/IoTrace.java | 16 ++
 4 files changed, 125 insertions(+), 23 deletions(-)

diff --git a/common/src/java/org/apache/hadoop/hive/common/NumberUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/NumberUtils.java
new file mode 100644
index 000..c9fa424
--- /dev/null
+++ b/common/src/java/org/apache/hadoop/hive/common/NumberUtils.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common;
+
+/**
+ * Collection of {@link Number} manipulation utilities common across Hive.
+ */
+public final class NumberUtils {
+
+  private NumberUtils() {
+  }
+
+  /**
+   * Store two ints in a single long value.
+   *
+   * @param i1 First int to store
+   * @param i2 Second int to store
+   * @return The combined value stored in a long
+   */
+  public static long makeIntPair(int i1, int i2) {
+return (((long) i1) << 32) | (i2 & 0xL);
+  }
+
+  /**
+   * Get the first int stored in a long value.
+   *
+   * @param pair The pair generated from makeIntPair
+   * @return The first value stored in the long
+   */
+  public static int getFirstInt(long pair) {
+return (int) (pair >> 32);
+  }
+
+  /**
+   * Get the second int stored in a long value.
+   *
+   * @param pair The pair generated from makeIntPair
+   * @return The first value stored in the long
+   */
+  public static int getSecondInt(long pair) {
+return (int) pair;
+  }
+
+}
diff --git a/common/src/test/org/apache/hadoop/hive/common/TestNumberUtils.java 
b/common/src/test/org/apache/hadoop/hive/common/TestNumberUtils.java
new file mode 100644
index 000..c370dbd
--- /dev/null
+++ b/common/src/test/org/apache/hadoop/hive/common/TestNumberUtils.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Test suite for the {@link NumberUtils} class.
+ */
+public class TestNumberUtils {
+
+  @Test
+  public void testMinLong() {
+final long pair = NumberUtils.makeIntPair(Integer.MIN_VALUE, 
Integer.MIN_VALUE);
+Assert.assertEquals(Integer.MIN_VALUE, NumberUtils.getFirstInt(pair));
+Assert.assertEquals(Integer.MIN_VALUE, NumberUtils.getSecondInt(pair));
+  }
+
+  @Test
+  public void testMaxLong() {
+final long pair = NumberUtils.makeIntPair(Integer.MAX_VALUE, 
Integer.MAX_VALUE);
+Assert.assertEquals(Integer.MAX_VALUE, NumberUtils.getFirstInt(pair));
+Assert.assertEquals(Integ

[hive] branch master updated: HIVE-23266: Remove QueryWrapper from ObjectStore (David Mollitor, reviewed by Zhihua Deng)

2020-06-12 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new f77987f  HIVE-23266: Remove QueryWrapper from ObjectStore (David 
Mollitor, reviewed by Zhihua Deng)
f77987f is described below

commit f77987fbac6ab92acad78dda1047fd2a9aeff7bd
Author: belugabehr <12578579+belugab...@users.noreply.github.com>
AuthorDate: Fri Jun 12 21:01:50 2020 -0400

HIVE-23266: Remove QueryWrapper from ObjectStore (David Mollitor, reviewed 
by Zhihua Deng)
---
 .../apache/hadoop/hive/metastore/ObjectStore.java  | 839 ++---
 .../metatool/MetaToolTaskExecuteJDOQLQuery.java|  46 +-
 .../hive/metastore/VerifyingObjectStore.java   |   2 +
 .../TestMetaToolTaskExecuteJDOQLQuery.java |   3 +-
 4 files changed, 434 insertions(+), 456 deletions(-)

diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 43f28ae..7ca2a4a 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -306,25 +306,6 @@ public class ObjectStore implements RawStore, Configurable 
{
   private Counter directSqlErrors;
   private boolean areTxnStatsSupported = false;
 
-  /**
-   * A Autocloseable wrapper around Query class to pass the Query object to 
the caller and let the caller release
-   * the resources when the QueryWrapper goes out of scope
-   */
-  public static class QueryWrapper implements AutoCloseable {
-public Query query;
-
-/**
- * Explicitly closes the query object to release the resources
- */
-@Override
-public void close() {
-  if (query != null) {
-query.closeAll();
-query = null;
-  }
-}
-  }
-
   public ObjectStore() {
   }
 
@@ -945,21 +926,22 @@ public class ObjectStore implements RawStore, 
Configurable {
 LOG.info("Dropping database {}.{} along with all tables", catName, dbname);
 dbname = normalizeIdentifier(dbname);
 catName = normalizeIdentifier(catName);
-QueryWrapper queryWrapper = new QueryWrapper();
 try {
   openTransaction();
 
   // then drop the database
   MDatabase db = getMDatabase(catName, dbname);
   pm.retrieve(db);
-  List dbGrants = this.listDatabaseGrants(catName, dbname, 
null, queryWrapper);
+  List dbGrants = this.listDatabaseGrants(catName, dbname, 
null);
   if (CollectionUtils.isNotEmpty(dbGrants)) {
 pm.deletePersistentAll(dbGrants);
   }
   pm.deletePersistent(db);
   success = commitTransaction();
+} catch (Exception e) {
+  throw new MetaException(e.getMessage() + " " + 
org.apache.hadoop.hive.metastore.utils.StringUtils.stringifyException(e));
 } finally {
-  rollbackAndCleanup(success, queryWrapper);
+  rollbackAndCleanup(success, null);
 }
 return success;
   }
@@ -1337,7 +1319,6 @@ public class ObjectStore implements RawStore, 
Configurable {
 
 
 
-
   private List listAllTableConstraintsWithOptionalConstraintName(
   String catName, String dbName, String tableName, String constraintname) {
 catName = normalizeIdentifier(catName);
@@ -2804,7 +2785,21 @@ public class ObjectStore implements RawStore, 
Configurable {
   @Override
   public List getPartitions(String catName, String dbName, String 
tableName,
int maxParts) throws MetaException, 
NoSuchObjectException {
-return getPartitionsInternal(catName, dbName, tableName, maxParts, true, 
true);
+List results = Collections.emptyList();
+boolean success = false;
+
+LOG.debug("Executing getPartitions");
+
+try {
+  openTransaction();
+  results = getPartitionsInternal(catName, dbName, tableName, maxParts, 
true, true);
+  success = commitTransaction();
+} finally {
+  if (!success) {
+rollbackTransaction();
+  }
+}
+return results;
   }
 
   @Override
@@ -2853,19 +2848,19 @@ public class ObjectStore implements RawStore, 
Configurable {
 return partLocations;
   }
 
-  protected List getPartitionsInternal(String catName, String 
dbName, String tblName,
-  final int maxParts, boolean 
allowSql, boolean allowJdo)
-  throws MetaException, NoSuchObjectException {
+  protected List getPartitionsInternal(String catName, String 
dbName, String tblName, final int maxParts,
+  boolean allowSql, boolean allowJdo) throws MetaException, 
NoSuchObjectException {
 return new GetListHelper(catName, dbName, tblName, allowSql, 
allowJdo) {
   @Overrid

[hive] branch branch-2 updated (f004949 -> 1d50a79)

2020-06-12 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hive.git.


from f004949  HIVE-21790: Build Hive2 with JDK 1.8 (David Mollitor)
 add 1d50a79  HIVE-20033: Backport HIVE-19432 to branch-2 (Teddy Choi, 
reviewed by David Mollitor)

No new revisions were added by this update.

Summary of changes:
 .../service/cli/operation/GetTablesOperation.java  | 24 +-
 1 file changed, 14 insertions(+), 10 deletions(-)



[hive] branch branch-2 updated (afa8b78 -> f004949)

2020-06-12 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hive.git.


from afa8b78  HIVE-23663: Fix Travis YAML For Hive 2.x (David Mollitor, 
reviewed by Zoltan Haindrich)
 add f004949  HIVE-21790: Build Hive2 with JDK 1.8 (David Mollitor)

No new revisions were added by this update.

Summary of changes:
 Jenkinsfile  | 215 +++
 hcatalog/webhcat/svr/pom.xml |   2 +-
 itests/hive-jmh/pom.xml  |   1 -
 itests/hive-unit/pom.xml |   1 -
 pom.xml  |  59 +---
 storage-api/pom.xml  |  18 +---
 testutils/ptest2/pom.xml |  21 ++---
 7 files changed, 228 insertions(+), 89 deletions(-)
 create mode 100644 Jenkinsfile



[hive] branch master updated (4ead9d3 -> 16cad18)

2020-06-11 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 4ead9d3  HIVE-21894: Hadoop credential password storage for the Kafka 
Storage handler when security is SSL (#839)
 add 16cad18  HIVE-23615: Do not deference null pointers in Beeline 
Commands Class (Kirill Vlasov, reviewed by David Mollitor)

No new revisions were added by this update.

Summary of changes:
 beeline/src/java/org/apache/hive/beeline/Commands.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)



[hive] branch master updated: HIVE-21636 ReplaceAll() -> replace() for non regex strings (Xia Li, reviewed by David Mollitor)

2020-06-11 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new c805207  HIVE-21636 ReplaceAll() -> replace() for non regex strings 
(Xia Li, reviewed by David Mollitor)
c805207 is described below

commit c80520771c5d39b1eb80668008e2f81e482af8ef
Author: Xia Li <47727869+bd201...@users.noreply.github.com>
AuthorDate: Thu Jun 11 12:05:04 2020 -0500

HIVE-21636 ReplaceAll() -> replace() for non regex strings (Xia Li, 
reviewed by David Mollitor)
---
 .../apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java  | 2 +-
 .../apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java  | 2 +-
 vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java   | 4 ++--
 3 files changed, 4 insertions(+), 4 deletions(-)

diff --git 
a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java
 
b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java
index 3d6d55c..1a235fa 100644
--- 
a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java
+++ 
b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java
@@ -67,7 +67,7 @@ public class StringCompare implements PrimitiveComparison {
 
   @Override
   public boolean like(byte[] value) {
-String temp = new String(value).replaceAll("%", "[\\w]+?");
+String temp = new String(value).replace("%", "[\\w]+?");
 Pattern pattern = Pattern.compile(temp);
 boolean match = pattern.matcher(constant).matches();
 return match;
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
index 5d0bef4..88f3ef7 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
@@ -681,7 +681,7 @@ class FileOutputCommitterContainer extends 
OutputCommitterContainer {
 
   // construct a path pattern (e.g., /*/*) to find all dynamically 
generated paths
   String dynPathSpec = loadPath.toUri().getPath();
-  dynPathSpec = dynPathSpec.replaceAll("__HIVE_DEFAULT_PARTITION__", "*");
+  dynPathSpec = dynPathSpec.replace("__HIVE_DEFAULT_PARTITION__", "*");
 
   //  LOG.info("Searching for "+dynPathSpec);
   Path pathPattern = new Path(dynPathSpec);
diff --git 
a/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java 
b/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
index 54e06a5..b0b699d 100644
--- a/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
+++ b/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
@@ -3213,8 +3213,8 @@ public class GenVectorCode extends Task {
 // Read the template into a string;
 File templateFile = new File(joinPath(this.expressionTemplateDirectory, 
tdesc[0] + ".txt"));
 String templateString = readFile(templateFile);
-templateString = templateString.replaceAll("", className);
-templateString = templateString.replaceAll("", 
operatorName.toLowerCase());
+templateString = templateString.replace("", className);
+templateString = templateString.replace("", 
operatorName.toLowerCase());
 
 writeFile(templateFile.lastModified(), expressionOutputDirectory, 
expressionClassesDirectory,
className, templateString);



[hive] branch master updated: HIVE-23601: Hive Statement Clear Statement Handle on Error (David Mollitor, reviewed by Peter Vary)

2020-06-11 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 26945ee  HIVE-23601: Hive Statement Clear Statement Handle on Error 
(David Mollitor, reviewed by Peter Vary)
26945ee is described below

commit 26945ee16e3caa1bed1e25fab3a07b5190b441ac
Author: belugabehr <12578579+belugab...@users.noreply.github.com>
AuthorDate: Thu Jun 11 10:30:06 2020 -0400

HIVE-23601: Hive Statement Clear Statement Handle on Error (David Mollitor, 
reviewed by Peter Vary)
---
 .../java/org/apache/hive/jdbc/HiveStatement.java   | 64 --
 1 file changed, 35 insertions(+), 29 deletions(-)

diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
index ae60c32..6c90cef 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
@@ -73,7 +73,7 @@ public class HiveStatement implements java.sql.Statement {
 
   private final HiveConnection connection;
   private TCLIService.Iface client;
-  private TOperationHandle stmtHandle = null;
+  private Optional stmtHandle;
   private final TSessionHandle sessHandle;
   Map sessConf = new HashMap<>();
   private int fetchSize;
@@ -145,6 +145,7 @@ public class HiveStatement implements java.sql.Statement {
 this.defaultFetchSize = defaultFetchSize;
 this.fetchSize = (initFetchSize == 0) ? defaultFetchSize : initFetchSize;
 this.inPlaceUpdateStream = Optional.empty();
+this.stmtHandle = Optional.empty();
   }
 
   @Override
@@ -160,8 +161,8 @@ public class HiveStatement implements java.sql.Statement {
 }
 
 try {
-  if (stmtHandle != null) {
-TCancelOperationReq cancelReq = new TCancelOperationReq(stmtHandle);
+  if (stmtHandle.isPresent()) {
+TCancelOperationReq cancelReq = new 
TCancelOperationReq(stmtHandle.get());
 TCancelOperationResp cancelResp = client.CancelOperation(cancelReq);
 Utils.verifySuccessWithInfo(cancelResp.getStatus());
   }
@@ -189,11 +190,10 @@ public class HiveStatement implements java.sql.Statement {
*/
   private void closeStatementIfNeeded() throws SQLException {
 try {
-  if (stmtHandle != null) {
-TCloseOperationReq closeReq = new TCloseOperationReq(stmtHandle);
+  if (stmtHandle.isPresent()) {
+TCloseOperationReq closeReq = new TCloseOperationReq(stmtHandle.get());
 TCloseOperationResp closeResp = client.CloseOperation(closeReq);
 Utils.verifySuccessWithInfo(closeResp.getStatus());
-stmtHandle = null;
   }
 } catch (SQLException e) {
   throw e;
@@ -207,13 +207,17 @@ public class HiveStatement implements java.sql.Statement {
   throw new SQLException(errorMsg, "08S01", tae);
 } catch (Exception e) {
   throw new SQLException("Failed to close statement", "08S01", e);
+} finally {
+  stmtHandle = Optional.empty();
 }
   }
 
   void closeClientOperation() throws SQLException {
-closeStatementIfNeeded();
-isQueryClosed = true;
-stmtHandle = null;
+try {
+  closeStatementIfNeeded();
+} finally {
+  isQueryClosed = true;
+}
   }
 
   void closeOnResultSetCompletion() throws SQLException {
@@ -248,11 +252,11 @@ public class HiveStatement implements java.sql.Statement {
 TGetOperationStatusResp status = waitForOperationToComplete();
 
 // The query should be completed by now
-if (!status.isHasResultSet() && !stmtHandle.isHasResultSet()) {
+if (!status.isHasResultSet() && stmtHandle.isPresent() && 
!stmtHandle.get().isHasResultSet()) {
   return false;
 }
 resultSet = new HiveQueryResultSet.Builder(this).setClient(client)
-.setStmtHandle(stmtHandle).setMaxRows(maxRows).setFetchSize(fetchSize)
+
.setStmtHandle(stmtHandle.get()).setMaxRows(maxRows).setFetchSize(fetchSize)
 .setScrollable(isScrollableResultset)
 .build();
 return true;
@@ -281,7 +285,7 @@ public class HiveStatement implements java.sql.Statement {
 }
 resultSet =
 new HiveQueryResultSet.Builder(this).setClient(client)
-.setStmtHandle(stmtHandle).setMaxRows(maxRows)
+.setStmtHandle(stmtHandle.get()).setMaxRows(maxRows)
 .setFetchSize(fetchSize).setScrollable(isScrollableResultset)
 .build();
 return true;
@@ -305,7 +309,7 @@ public class HiveStatement implements java.sql.Statement {
 try {
   TExecuteStatementResp execResp = client.ExecuteStatement(execReq);
   Utils.verifySuccessWithInfo(execResp.getStatus());
-  stmtHandle = execResp.getOperationHandle();
+  stmtHandle = Optional.of(execResp.getOperationHandle());
 } catch (SQLException eS) {
   isLogBeingGe

[hive] branch master updated: HIVE-22681: Replace Base64 in hcatalog-webhcat Package (David Mollitor, reviewed by Ashutosh Chauhan)

2020-06-10 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 708fa20  HIVE-22681: Replace Base64 in hcatalog-webhcat Package (David 
Mollitor, reviewed by Ashutosh Chauhan)
708fa20 is described below

commit 708fa2037431e27c81fad8cbec82d4adecb440f4
Author: David Mollitor 
AuthorDate: Wed Jun 10 09:36:11 2020 -0400

HIVE-22681: Replace Base64 in hcatalog-webhcat Package (David Mollitor, 
reviewed by Ashutosh Chauhan)
---
 .../java/org/apache/hive/hcatalog/api/repl/ReplicationUtils.java| 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git 
a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/repl/ReplicationUtils.java
 
b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/repl/ReplicationUtils.java
index 9b942f2..69e3c13 100644
--- 
a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/repl/ReplicationUtils.java
+++ 
b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/repl/ReplicationUtils.java
@@ -21,7 +21,6 @@ package org.apache.hive.hcatalog.api.repl;
 
 import com.google.common.base.Function;
 import com.google.common.base.Objects;
-import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.io.IOExceptionWithCause;
 import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
 import org.apache.hive.hcatalog.api.HCatDatabase;
@@ -37,6 +36,7 @@ import java.io.DataInputStream;
 import java.io.DataOutput;
 import java.io.DataOutputStream;
 import java.io.IOException;
+import java.util.Base64;
 import java.util.Map;
 
 public class ReplicationUtils {
@@ -222,7 +222,7 @@ public class ReplicationUtils {
 DataOutput dataOutput = new DataOutputStream(baos);
 ReaderWriter.writeDatum(dataOutput,command.getClass().getName());
 command.write(dataOutput);
-return Base64.encodeBase64URLSafeString(baos.toByteArray());
+return Base64.getUrlEncoder().encodeToString(baos.toByteArray());
   }
 
   /**
@@ -234,7 +234,7 @@ public class ReplicationUtils {
* given a base64 String representation of it.
*/
public static Command deserializeCommand(String s) throws IOException {
-DataInput dataInput = new DataInputStream(new 
ByteArrayInputStream(Base64.decodeBase64(s)));
+DataInput dataInput = new DataInputStream(new 
ByteArrayInputStream(Base64.getUrlDecoder().decode(s)));
 String clazz = (String) ReaderWriter.readDatum(dataInput);
 Command cmd;
 try {



[hive] branch master updated (a42a329 -> 58e2a2f)

2020-06-10 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from a42a329  HIVE-23520: REPL: repl dump could add support for immutable 
dataset (Rajesh Balamohan, reviewed by Aasha Medhi)
 add 58e2a2f  HIVE-23526: Beeline may throw the misleading exception 
(Zhihua Deng, reviewed by David Mollitor, Zoltan Haindrich)

No new revisions were added by this update.

Summary of changes:
 jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java | 9 +
 1 file changed, 9 insertions(+)



[hive] branch master updated (59abbff -> cb863f3)

2020-06-09 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 59abbff  HIVE-23621: Enforce ASF headers on source files (#1062)
 add cb863f3  HIVE-23628: Add Default Message for Github PRs (David 
Mollitor, reviewed by Zoltan Haindrich)

No new revisions were added by this update.

Summary of changes:
 .github/pull_request_template.md | 6 ++
 1 file changed, 6 insertions(+)
 create mode 100644 .github/pull_request_template.md



[hive] branch branch-2 updated (8249d51 -> afa8b78)

2020-06-09 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a change to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hive.git.


from 8249d51  HIVE-17680: TestNegativeCliDriver[merge_negative_5.q] (László 
Bodor reviewed by Zoltan Haindrich)
 add afa8b78  HIVE-23663: Fix Travis YAML For Hive 2.x (David Mollitor, 
reviewed by Zoltan Haindrich)

No new revisions were added by this update.

Summary of changes:
 .travis.yml | 25 ++---
 1 file changed, 10 insertions(+), 15 deletions(-)



[hive] branch master updated: HIVE-22479: Fix typo in GenericUDF (Wanqiang Ji, reviewed by David Mollitor)

2020-06-08 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new da933ad  HIVE-22479: Fix typo in GenericUDF (Wanqiang Ji, reviewed by 
David Mollitor)
da933ad is described below

commit da933ad4731e1c2ce13551e55d32d1faf2f0e2dc
Author: Wanqiang Ji 
AuthorDate: Tue Jun 9 01:07:26 2020 +0800

HIVE-22479: Fix typo in GenericUDF (Wanqiang Ji, reviewed by David Mollitor)
---
 ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
index c1bf325..40991a8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
@@ -66,7 +66,7 @@ import org.apache.hadoop.io.LongWritable;
  * variable length of arguments. 3. It can accept an infinite number of 
function
  * signature - for example, it's easy to write a GenericUDF that accepts
  * arrayint, arrayarrayint and so on (arbitrary levels 
of nesting). 4. It
- * can do short-circuit evaluations using DeferedObject.
+ * can do short-circuit evaluations using {@link DeferredObject}.
  */
 @InterfaceAudience.Public
 @InterfaceStability.Stable
@@ -77,8 +77,8 @@ public abstract class GenericUDF implements Closeable {
   "th", "th", "th", "th", "th" };
 
   /**
-   * A Defered Object allows us to do lazy-evaluation and short-circuiting.
-   * GenericUDF use DeferedObject to pass arguments.
+   * A Deferred Object allows us to do lazy-evaluation and short-circuiting.
+   * GenericUDF use {@link DeferredObject} to pass arguments.
*/
   @InterfaceAudience.Public
   @InterfaceStability.Stable



[hive] branch master updated: [HIVE-23000] Improve travis.yml (Philipp Dallig, Reviewed by Zoltan Haindrich)

2020-06-08 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 46c9d6e  [HIVE-23000] Improve travis.yml (Philipp Dallig, Reviewed by 
Zoltan Haindrich)
46c9d6e is described below

commit 46c9d6eb9798bfcc575803f5e66d8537d9177c16
Author: Philipp Dallig 
AuthorDate: Mon Jun 8 15:39:44 2020 +0200

[HIVE-23000] Improve travis.yml (Philipp Dallig, Reviewed by Zoltan 
Haindrich)
---
 .travis.yml | 14 --
 1 file changed, 4 insertions(+), 10 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index d53cfc1..e576542 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -14,10 +14,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-# https://docs.travis-ci.com/user/ci-environment/
+# https://docs.travis-ci.com/user/reference/overview/
 # trusty - 7.5GB memory and 2 cores
-sudo: required
-dist: trusty
+dist: bionic
 
 # travis performs a shallow clone by default, in case of any issues
 # that requires full git history, enable this
@@ -25,7 +24,7 @@ dist: trusty
 
 language: java
 jdk:
-  - oraclejdk8
+  - openjdk8
 
 # disabling cache for 
/home/travis/.m2/repository/org/apache/hive/hive-jdbc/3.0.0-SNAPSHOT/hive-jdbc-3.0.0-SNAPSHOT-standalone.jar
 (Permission denied)
 #cache:
@@ -36,11 +35,6 @@ env:
   MAVEN_SKIP_RC=true
   MAVEN_OPTS="-Xmx2g"
 
-# workaround added: https://github.com/travis-ci/travis-ci/issues/4629
-before_install:
-  - sed -i.bak -e 
's|https://nexus.codehaus.org/snapshots/|https://oss.sonatype.org/content/repositories/codehaus-snapshots/|g'
 ~/.m2/settings.xml
-
-
-install: true
+install: skip
 
 script: mvn clean install -DskipTests -q -Pitests



[hive] branch master updated: HIVE-22675: Replace Base64 in hive-standalone-metastore Package (David Mollitor, reviewed by Ashutosh Chauhan)

2020-06-03 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 64b8c0b  HIVE-22675: Replace Base64 in hive-standalone-metastore 
Package (David Mollitor, reviewed by Ashutosh Chauhan)
64b8c0b is described below

commit 64b8c0bbacc3bfaa973d1d1636bf07dff4231db4
Author: David Mollitor 
AuthorDate: Wed Jun 3 12:55:48 2020 -0400

HIVE-22675: Replace Base64 in hive-standalone-metastore Package (David 
Mollitor, reviewed by Ashutosh Chauhan)
---
 .../org/apache/hadoop/hive/metastore/security/DBTokenStore.java   | 8 
 .../security/TokenStoreDelegationTokenSecretManager.java  | 6 +++---
 2 files changed, 7 insertions(+), 7 deletions(-)

diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/DBTokenStore.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/DBTokenStore.java
index f59915c..de68810 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/DBTokenStore.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/DBTokenStore.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hive.metastore.security;
 import java.io.IOException;
 import java.lang.reflect.InvocationTargetException;
 import java.util.ArrayList;
+import java.util.Base64;
 import java.util.List;
 
-import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import 
org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge.Server.ServerMode;
@@ -66,8 +66,8 @@ public class DBTokenStore implements DelegationTokenStore {
 
 try {
   String identifier = 
TokenStoreDelegationTokenSecretManager.encodeWritable(tokenIdentifier);
-  String tokenStr = Base64.encodeBase64URLSafeString(
-
MetastoreDelegationTokenSupport.encodeDelegationTokenInformation(token));
+  String tokenStr = Base64.getUrlEncoder()
+  
.encodeToString(MetastoreDelegationTokenSupport.encodeDelegationTokenInformation(token));
   boolean result = (Boolean)invokeOnTokenStore("addToken", new Object[] 
{identifier, tokenStr},
 String.class, String.class);
   LOG.trace("addToken: tokenIdentifier = {}, added = {}", tokenIdentifier, 
result);
@@ -85,7 +85,7 @@ public class DBTokenStore implements DelegationTokenStore {
   
TokenStoreDelegationTokenSecretManager.encodeWritable(tokenIdentifier)}, 
String.class);
   DelegationTokenInformation result = null;
   if (StringUtils.isNotEmpty(tokenStr)) {
-result = 
MetastoreDelegationTokenSupport.decodeDelegationTokenInformation(Base64.decodeBase64(tokenStr));
+result = 
MetastoreDelegationTokenSupport.decodeDelegationTokenInformation(Base64.getUrlDecoder().decode(tokenStr));
   }
   LOG.trace("getToken: tokenIdentifier = {}, result = {}", 
tokenIdentifier, result);
   return result;
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/TokenStoreDelegationTokenSecretManager.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/TokenStoreDelegationTokenSecretManager.java
index ee2ace8..6b58f2f 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/TokenStoreDelegationTokenSecretManager.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/TokenStoreDelegationTokenSecretManager.java
@@ -25,12 +25,12 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.lang.reflect.Method;
 import java.util.Arrays;
+import java.util.Base64;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.security.token.Token;
 import 
org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
@@ -176,11 +176,11 @@ public class TokenStoreDelegationTokenSecretManager 
extends DelegationTokenSecre
 DataOutputStream dos = new DataOutputStream(bos);
 key.write(dos);
 dos.flush();
-return Base64.encodeBase64URLSafeString(bos.toByteArray());
+return Base64.getUrlEncoder().encodeToString(bos.toByteArray());
   }
 
   public static void decodeWritable(Writable w, String idStr) throws 
IOException {
-DataInputStream in = new DataInputStream(new 
ByteArrayInputStream(Base64.decodeBase64(idStr)));
+DataInputStream in = new DataInputStream(new 
ByteArrayInputStream(Base64.getUrlDecoder().decode(idStr)));
 w.readFields(in);
   }
 



[hive] branch master updated: HIVE-18882: Minor Logging Improvements in Hive Metastore Client Connection (David Mollitor, reviewed by László Bodor)

2020-05-15 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 47d2fe1  HIVE-18882: Minor Logging Improvements in Hive Metastore 
Client Connection (David Mollitor, reviewed by László Bodor)
47d2fe1 is described below

commit 47d2fe19052ebc9192a05d6777cffa11aa79b82a
Author: David Mollitor 
AuthorDate: Fri May 15 09:48:06 2020 -0400

HIVE-18882: Minor Logging Improvements in Hive Metastore Client Connection 
(David Mollitor, reviewed by László Bodor)
---
 .../apache/hadoop/hive/metastore/HiveMetaStoreClient.java| 12 +---
 1 file changed, 5 insertions(+), 7 deletions(-)

diff --git 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
 
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index 64d3833..65df9c2 100644
--- 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ 
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -393,7 +393,7 @@ public class HiveMetaStoreClient implements 
IMetaStoreClient, AutoCloseable {
 JavaUtils.getClassLoader());
 return (URIResolverHook) ReflectionUtils.newInstance(uriResolverClass, 
null);
   } catch (Exception e) {
-LOG.error("Exception loading uri resolver hook" + e);
+LOG.error("Exception loading uri resolver hook", e);
 return null;
   }
 }
@@ -576,14 +576,14 @@ public class HiveMetaStoreClient implements 
IMetaStoreClient, AutoCloseable {
 
 for (int attempt = 0; !isConnected && attempt < retries; ++attempt) {
   for (URI store : metastoreUris) {
-LOG.info("Trying to connect to metastore with URI (" + store + ")");
+LOG.info("Trying to connect to metastore with URI ({})", store);
 
 try {
   if (useSSL) {
 try {
   String trustStorePath = MetastoreConf.getVar(conf, 
ConfVars.SSL_TRUSTSTORE_PATH).trim();
   if (trustStorePath.isEmpty()) {
-throw new 
IllegalArgumentException(ConfVars.SSL_TRUSTSTORE_PATH.toString()
+throw new IllegalArgumentException(ConfVars.SSL_TRUSTSTORE_PATH
 + " Not configured for SSL connection");
   }
   String trustStorePassword =
@@ -699,8 +699,6 @@ public class HiveMetaStoreClient implements 
IMetaStoreClient, AutoCloseable {
 tte = e;
 LOG.warn("Failed to connect to the MetaStore Server URI ({})",
 store);
-
-// Include stack trace in DEBUG logging
 LOG.debug("Failed to connect to the MetaStore Server URI ({})",
 store, e);
   }
@@ -3437,13 +3435,13 @@ public class HiveMetaStoreClient implements 
IMetaStoreClient, AutoCloseable {
 NotificationEventRequest rqst = new NotificationEventRequest(lastEventId);
 rqst.setMaxEvents(maxEvents);
 NotificationEventResponse rsp = client.get_next_notification(rqst);
-LOG.debug("Got back " + rsp.getEventsSize() + " events");
+LOG.debug("Got back {} events", rsp.getEventsSize());
 NotificationEventResponse filtered = new NotificationEventResponse();
 if (rsp != null && rsp.getEvents() != null) {
   long nextEventId = lastEventId + 1;
   long prevEventId = lastEventId;
   for (NotificationEvent e : rsp.getEvents()) {
-LOG.debug("Got event with id : " + e.getEventId());
+LOG.debug("Got event with id : {}", e.getEventId());
 if (e.getEventId() != nextEventId) {
   if (e.getEventId() == prevEventId) {
 LOG.error("NOTIFICATION_LOG table has multiple events with the 
same event Id {}. " +



[hive] branch master updated: HIVE-23099: Improve Logger for Operation Child Classes (David Mollitor, reviewed by Ashutosh Chauhan)

2020-05-14 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 390ad7d  HIVE-23099: Improve Logger for Operation Child Classes (David 
Mollitor, reviewed by Ashutosh Chauhan)
390ad7d is described below

commit 390ad7d3a0dd40aeee04a17c71695784b38e7094
Author: David Mollitor 
AuthorDate: Thu May 14 09:50:00 2020 -0400

HIVE-23099: Improve Logger for Operation Child Classes (David Mollitor, 
reviewed by Ashutosh Chauhan)
---
 .../cli/operation/GetCrossReferenceOperation.java  | 14 +-
 .../cli/operation/HiveCommandOperation.java| 22 
 .../hive/service/cli/operation/Operation.java  | 12 -
 .../hive/service/cli/operation/SQLOperation.java   | 30 +++---
 4 files changed, 38 insertions(+), 40 deletions(-)

diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java
 
b/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java
index 37f5b60..398af8a 100644
--- 
a/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java
+++ 
b/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java
@@ -116,7 +116,7 @@ public class GetCrossReferenceOperation extends 
MetadataOperation {
 this.foreignSchemaName = foreignSchema;
 this.foreignTableName = foreignTable;
 this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, 
getProtocolVersion(), false);
-LOG.info("Starting GetCrossReferenceOperation with the following 
parameters:"
+log.info("Starting GetCrossReferenceOperation with the following 
parameters:"
 + " parentCatalogName={}, parentSchemaName={}, parentTableName={}, 
foreignCatalog={}, "
 + "foreignSchema={}, foreignTable={}", parentCatalogName, 
parentSchemaName,
 parentTableName, foreignCatalog, foreignSchema, foreignTable);
@@ -125,7 +125,7 @@ public class GetCrossReferenceOperation extends 
MetadataOperation {
   @Override
   public void runInternal() throws HiveSQLException {
 setState(OperationState.RUNNING);
-LOG.info("Fetching cross reference metadata");
+log.info("Fetching cross reference metadata");
 try {
IMetaStoreClient metastoreClient = 
getParentSession().getMetaStoreClient();
  ForeignKeysRequest fkReq = new ForeignKeysRequest(parentSchemaName, 
parentTableName, foreignSchemaName, foreignTableName);
@@ -141,16 +141,16 @@ public class GetCrossReferenceOperation extends 
MetadataOperation {
 fk.getKey_seq(), fk.getUpdate_rule(), fk.getDelete_rule(), 
fk.getFk_name(),
 fk.getPk_name(), 0};
 rowSet.addRow(rowData);
-if (LOG.isDebugEnabled()) {
+if (log.isDebugEnabled()) {
   String debugMessage = getDebugMessage("cross reference", 
RESULT_SET_SCHEMA);
-  LOG.debug(debugMessage, rowData);
+  log.debug(debugMessage, rowData);
 }
   }
-  if (LOG.isDebugEnabled() && rowSet.numRows() == 0) {
-LOG.debug("No cross reference metadata has been returned.");
+  if (log.isDebugEnabled() && rowSet.numRows() == 0) {
+log.debug("No cross reference metadata has been returned.");
   }
   setState(OperationState.FINISHED);
-  LOG.info("Fetching cross reference metadata has been successfully 
finished");
+  log.info("Fetching cross reference metadata has been successfully 
finished");
 } catch (Exception e) {
   setState(OperationState.ERROR);
   throw new HiveSQLException(e);
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
 
b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
index c83273b..3a0506b 100644
--- 
a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
+++ 
b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
@@ -67,8 +67,8 @@ public class HiveCommandOperation extends 
ExecuteStatementOperation {
 
   private void setupSessionIO(SessionState sessionState) {
 try {
-  LOG.info("Putting temp output to file " + 
sessionState.getTmpOutputFile().toString()
-  + " and error output to file " + 
sessionState.getTmpErrOutputFile().toString());
+  log.info("Putting temp output to file " + sessionState.getTmpOutputFile()
+  + " and error output to file " + sessionState.getTmpErrOutputFile());
   sessionState.in = null; // hive server's session input stream is not used
   // open a per-session file in auto-flush mode for writing temp results 
and tmp error output
   sessionState.out = new SessionStream

[hive] branch master updated: HIVE-23407: Prompt Beeline Users To Enable Verbose Logging on Error (David Mollitor, reviewed by Ashutosh Chauhan)

2020-05-14 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 43ac992  HIVE-23407: Prompt Beeline Users To Enable Verbose Logging on 
Error (David Mollitor, reviewed by Ashutosh Chauhan)
43ac992 is described below

commit 43ac992baf3242204544533c6bca10791cf8a6a7
Author: David Mollitor 
AuthorDate: Thu May 14 09:36:02 2020 -0400

HIVE-23407: Prompt Beeline Users To Enable Verbose Logging on Error (David 
Mollitor, reviewed by Ashutosh Chauhan)
---
 beeline/src/main/resources/BeeLine.properties | 9 ++---
 1 file changed, 6 insertions(+), 3 deletions(-)

diff --git a/beeline/src/main/resources/BeeLine.properties 
b/beeline/src/main/resources/BeeLine.properties
index d4b8f17..1a70e85 100644
--- a/beeline/src/main/resources/BeeLine.properties
+++ b/beeline/src/main/resources/BeeLine.properties
@@ -148,10 +148,13 @@ hs2-unexpected-end-of-file: Unexpected end of file when 
reading from HS2 server.
 cause might be too many concurrent connections. Please ask the administrator 
to check the number \
 of active connections, and adjust hive.server2.thrift.max.worker.threads if 
applicable.
 hs2-could-not-open-connection: Could not open connection to the HS2 server. 
Please check the \
-server URI and if the URI is correct, then ask the administrator to check the 
server status.
+server URI and if the URI is correct, then ask the administrator to check the 
server status. \
+Enable verbose error messages (--verbose=true) for more information.
 hs2-connection-timed-out: Connection timeout when communicating with HS2 
server.
-hs2-unknown-connection-problem: Unknown HS2 problem when communicating with 
Thrift server.
-hs2-unexpected-error: Unexpected HS2 error when communicating with the Thrift 
server.
+hs2-unknown-connection-problem: Unknown HS2 problem when communicating with 
Thrift server. \
+Enable verbose error messages (--verbose=true) for more information.
+hs2-unexpected-error: Unexpected HS2 error when communicating with the Thrift 
server. \
+Enable verbose error messages (--verbose=true) for more information.
 interrupt-ctrl-c: Interrupting... Please be patient this may take some time.
 
 



[hive] branch master updated: HIVE-23414: Detail Hive Java Compatibility (David Mollitor, reviewed by Naveen Gangam)

2020-05-11 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new ee4daec  HIVE-23414: Detail Hive Java Compatibility (David Mollitor, 
reviewed by Naveen Gangam)
ee4daec is described below

commit ee4daec7a49e78a5209ca63ae5616331e847c147
Author: David Mollitor 
AuthorDate: Mon May 11 14:37:57 2020 -0400

HIVE-23414: Detail Hive Java Compatibility (David Mollitor, reviewed by 
Naveen Gangam)
---
 README.md | 19 +--
 1 file changed, 17 insertions(+), 2 deletions(-)

diff --git a/README.md b/README.md
index 849b72d..fe5c456 100644
--- a/README.md
+++ b/README.md
@@ -75,9 +75,24 @@ Getting Started
 Requirements
 
 
-- Java 1.7 or 1.8
+Java
+--
 
-- Hadoop 1.x, 2.x, 3.x (3.x required for Hive 3.x)
+| Hive Version  | Java Version  |
+| - |:-:|
+| Hive 1.0  | Java 6|
+| Hive 1.1  | Java 6|
+| Hive 1.2  | Java 7|
+| Hive 2.x  | Java 7|
+| Hive 3.x  | Java 8|
+| Hive 4.x  | Java 8|
+
+
+Hadoop
+--
+
+- Hadoop 1.x, 2.x
+- Hadoop 3.x (Hive 3.x)
 
 
 Upgrading from older versions of Hive



[hive] branch master updated: HIVE-23124: Review of SQLOperation Class (David Mollitor, reviewed by Peter Vary)

2020-05-07 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new bbfb0f8  HIVE-23124: Review of SQLOperation Class (David Mollitor, 
reviewed by Peter Vary)
bbfb0f8 is described below

commit bbfb0f804202db8c8423a61ffb7c9fe8d888309b
Author: David Mollitor 
AuthorDate: Thu May 7 11:35:21 2020 -0400

HIVE-23124: Review of SQLOperation Class (David Mollitor, reviewed by Peter 
Vary)
---
 .../hive/service/cli/operation/SQLOperation.java   | 291 ++---
 1 file changed, 137 insertions(+), 154 deletions(-)

diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java 
b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
index 25b6ab3..75b84d3 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
@@ -19,7 +19,6 @@
 package org.apache.hive.service.cli.operation;
 
 import java.io.ByteArrayOutputStream;
-import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
@@ -27,18 +26,21 @@ import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Optional;
 import java.util.Properties;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.RejectedExecutionException;
 import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.ScheduledThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
 
 import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hive.common.LogUtils;
 import org.apache.hadoop.hive.common.io.SessionStream;
 import org.apache.hadoop.hive.common.metrics.common.Metrics;
@@ -78,22 +80,19 @@ import org.apache.hive.service.cli.RowSetFactory;
 import org.apache.hive.service.cli.TableSchema;
 import org.apache.hive.service.cli.session.HiveSession;
 import org.apache.hive.service.server.ThreadWithGarbageCleanup;
-import org.codehaus.jackson.JsonGenerationException;
-import org.codehaus.jackson.map.JsonMappingException;
 import org.codehaus.jackson.map.ObjectMapper;
 
 /**
  * SQLOperation.
- *
  */
 public class SQLOperation extends ExecuteStatementOperation {
   private IDriver driver = null;
-  private TableSchema resultSchema;
+  private Optional resultSchema;
   private AbstractSerDe serde = null;
   private boolean fetchStarted = false;
   private volatile MetricsScope currentSQLStateScope;
-  private QueryInfo queryInfo;
-  private long queryTimeout;
+  private final QueryInfo queryInfo;
+  private final long queryTimeout;
   private ScheduledExecutorService timeoutExecutor;
   private final boolean runAsync;
   private final long operationLogCleanupDelayMs;
@@ -102,21 +101,25 @@ public class SQLOperation extends 
ExecuteStatementOperation {
   /**
* A map to track query count running by each user
*/
-  private static Map userQueries = new HashMap();
+  private static final Map USER_QUERIES = new 
ConcurrentHashMap<>();
   private static final String ACTIVE_SQL_USER = 
MetricsConstant.SQL_OPERATION_PREFIX + "active_user";
-  private MetricsScope submittedQryScp;
+  private final Optional submittedQryScp;
 
   public SQLOperation(HiveSession parentSession, String statement, Map confOverlay,
   boolean runInBackground, long queryTimeout) {
 // TODO: call setRemoteUser in ExecuteStatementOperation or higher.
 super(parentSession, statement, confOverlay, runInBackground);
 this.runAsync = runInBackground;
-this.queryTimeout = queryTimeout;
-long timeout = HiveConf.getTimeVar(queryState.getConf(),
-HiveConf.ConfVars.HIVE_QUERY_TIMEOUT_SECONDS, TimeUnit.SECONDS);
+this.resultSchema = Optional.empty();
+
+final long timeout =
+HiveConf.getTimeVar(queryState.getConf(), 
HiveConf.ConfVars.HIVE_QUERY_TIMEOUT_SECONDS, TimeUnit.SECONDS);
 if (timeout > 0 && (queryTimeout <= 0 || timeout < queryTimeout)) {
   this.queryTimeout = timeout;
+} else {
+  this.queryTimeout = queryTimeout;
 }
+
 this.operationLogCleanupDelayMs = HiveConf.getTimeVar(queryState.getConf(),
   HiveConf.ConfVars.HIVE_SERVER2_OPERATION_LOG_CLEANUP_DELAY, 
TimeUnit.MILLISECONDS);
 
@@ -125,10 +128,9 @@ public class SQLOperation extends 
ExecuteStatementOperation {
 queryInfo = new QueryInfo(getState().toString(), 
getParentSession().getUserName()

[hive] branch master updated: HIVE-23307: Cache ColumnIndex in HiveBaseResultSet (David Mollitor, reviewed by Naveen Gangam)

2020-05-07 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 16ef874  HIVE-23307: Cache ColumnIndex in HiveBaseResultSet (David 
Mollitor, reviewed by Naveen Gangam)
16ef874 is described below

commit 16ef87444a18a74f296b50c38b53b57f074b5280
Author: David Mollitor 
AuthorDate: Thu May 7 10:06:09 2020 -0400

HIVE-23307: Cache ColumnIndex in HiveBaseResultSet (David Mollitor, 
reviewed by Naveen Gangam)
---
 .../org/apache/hive/jdbc/HiveBaseResultSet.java| 19 +++--
 .../apache/hive/jdbc/TestHiveBaseResultSet.java| 93 ++
 2 files changed, 107 insertions(+), 5 deletions(-)

diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
index a69ea95..45de932 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
@@ -42,6 +42,7 @@ import java.sql.Statement;
 import java.sql.Time;
 import java.sql.Timestamp;
 import java.util.Calendar;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
@@ -65,6 +66,7 @@ public abstract class HiveBaseResultSet implements ResultSet {
   protected List normalizedColumnNames;
   protected List columnTypes;
   protected List columnAttributes;
+  private final Map columnNameIndexCache = new HashMap<>();
 
   private TableSchema schema;
 
@@ -95,19 +97,26 @@ public abstract class HiveBaseResultSet implements 
ResultSet {
 
   @Override
   public int findColumn(final String columnName) throws SQLException {
-int columnIndex = 0;
-if (columnName != null) {
-  final String lcColumnName = columnName.toLowerCase();
+if (columnName == null) {
+  throw new SQLException("null column name not supported");
+}
+final String lcColumnName = columnName.toLowerCase();
+final Integer result = 
this.columnNameIndexCache.computeIfAbsent(lcColumnName, cn -> {
+  int columnIndex = 0;
   for (final String normalizedColumnName : normalizedColumnNames) {
 ++columnIndex;
 final int idx = normalizedColumnName.lastIndexOf('.');
 final String name = (idx == -1) ? normalizedColumnName : 
normalizedColumnName.substring(1 + idx);
-if (name.equals(lcColumnName) || 
normalizedColumnName.equals(lcColumnName)) {
+if (name.equals(cn) || normalizedColumnName.equals(cn)) {
   return columnIndex;
 }
   }
+  return null;
+});
+if (result == null) {
+  throw new SQLException("Could not find " + columnName + " in " + 
normalizedColumnNames);
 }
-throw new SQLException("Could not find " + columnName + " in " + 
normalizedColumnNames);
+return result.intValue();
   }
 
   @Override
diff --git a/jdbc/src/test/org/apache/hive/jdbc/TestHiveBaseResultSet.java 
b/jdbc/src/test/org/apache/hive/jdbc/TestHiveBaseResultSet.java
index 9d42317..bca26f3 100644
--- a/jdbc/src/test/org/apache/hive/jdbc/TestHiveBaseResultSet.java
+++ b/jdbc/src/test/org/apache/hive/jdbc/TestHiveBaseResultSet.java
@@ -20,9 +20,11 @@ package org.apache.hive.jdbc;
 
 import static org.mockito.Mockito.when;
 
+import java.lang.reflect.Field;
 import java.nio.charset.StandardCharsets;
 import java.sql.SQLException;
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.List;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -30,6 +32,7 @@ import org.apache.hive.service.cli.TableSchema;
 import org.junit.Assert;
 import org.junit.Test;
 import org.mockito.Mockito;
+import org.mockito.internal.util.reflection.FieldSetter;
 
 /**
  * Test suite for {@link HiveBaseResultSet} class.
@@ -237,4 +240,94 @@ public class TestHiveBaseResultSet {
 Assert.assertFalse(resultSet.wasNull());
   }
 
+  @Test
+  public void testFindColumnUnqualified() throws Exception {
+FieldSchema fieldSchema1 = new FieldSchema();
+fieldSchema1.setType("int");
+
+FieldSchema fieldSchema2 = new FieldSchema();
+fieldSchema2.setType("int");
+
+FieldSchema fieldSchema3 = new FieldSchema();
+fieldSchema3.setType("int");
+
+List fieldSchemas = Arrays.asList(fieldSchema1, fieldSchema2, 
fieldSchema3);
+TableSchema schema = new TableSchema(fieldSchemas);
+
+HiveBaseResultSet resultSet = Mockito.mock(HiveBaseResultSet.class);
+resultSet.row = new Object[] { new Integer(1), new Integer(2), new 
Integer(3) };
+resultSet.normalizedColumnNames = Arrays.asList("one", "two", "three");
+
+Field executorField = 
HiveBaseResultSet.class.getDeclaredField("columnNameIndexCache");
+FieldSetter.setField(resultSet, executorField, new HashMap<>());
+
+when(resultSet.getSchema()).the

[hive] branch master updated: Cleanup and add tests for HiveBaseResultSet.java (David Mollitor, reviewed by Peter Vary)

2020-05-05 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 2b0e424  Cleanup and add tests for HiveBaseResultSet.java (David 
Mollitor, reviewed by Peter Vary)
2b0e424 is described below

commit 2b0e424daa67bfa62e695701533fb80b8f10f383
Author: David Mollitor 
AuthorDate: Tue May 5 09:11:38 2020 -0400

Cleanup and add tests for HiveBaseResultSet.java (David Mollitor, reviewed 
by Peter Vary)
---
 .../org/apache/hive/jdbc/HiveBaseResultSet.java| 598 ++---
 .../apache/hive/jdbc/TestHiveBaseResultSet.java| 240 +
 2 files changed, 649 insertions(+), 189 deletions(-)

diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
index 350648f..a69ea95 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
@@ -52,8 +52,8 @@ import org.apache.hadoop.hive.serde2.thrift.Type;
 import org.apache.hive.service.cli.TableSchema;
 
 /**
- * Data independent base class which implements the common part of
- * all Hive result sets.
+ * Data independent base class which implements the common part of all Hive
+ * result sets.
  */
 public abstract class HiveBaseResultSet implements ResultSet {
 
@@ -68,358 +68,457 @@ public abstract class HiveBaseResultSet implements 
ResultSet {
 
   private TableSchema schema;
 
+  @Override
   public boolean absolute(int row) throws SQLException {
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
+  @Override
   public void afterLast() throws SQLException {
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
+  @Override
   public void beforeFirst() throws SQLException {
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
+  @Override
   public void cancelRowUpdates() throws SQLException {
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
+  @Override
   public void deleteRow() throws SQLException {
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  public int findColumn(String columnName) throws SQLException {
+  @Override
+  public int findColumn(final String columnName) throws SQLException {
 int columnIndex = 0;
-boolean findColumn = false;
-for (String normalizedColumnName : normalizedColumnNames) {
-  ++columnIndex;
-  String[] names = normalizedColumnName.split("\\.");
-  String name = names[names.length -1];
-  if (name.equalsIgnoreCase(columnName) || 
normalizedColumnName.equalsIgnoreCase(columnName)) {
-findColumn = true;
-break;
+if (columnName != null) {
+  final String lcColumnName = columnName.toLowerCase();
+  for (final String normalizedColumnName : normalizedColumnNames) {
+++columnIndex;
+final int idx = normalizedColumnName.lastIndexOf('.');
+final String name = (idx == -1) ? normalizedColumnName : 
normalizedColumnName.substring(1 + idx);
+if (name.equals(lcColumnName) || 
normalizedColumnName.equals(lcColumnName)) {
+  return columnIndex;
+}
   }
 }
-if (!findColumn) {
-  throw new SQLException("Could not find " + columnName + " in " + 
normalizedColumnNames);
-} else {
-  return columnIndex;
-}
+throw new SQLException("Could not find " + columnName + " in " + 
normalizedColumnNames);
   }
 
+  @Override
   public boolean first() throws SQLException {
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
+  @Override
   public Array getArray(int i) throws SQLException {
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
+  @Override
   public Array getArray(String colName) throws SQLException {
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
+  @Override
   public InputStream getAsciiStream(int columnIndex) throws SQLException {
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
+  @Override
   public InputStream getAsciiStream(String columnName) throws SQLException {
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
+  @Override
   public BigDecimal getBigDecimal(int columnIndex) throws SQLException {
-Object val = getObject(columnIndex);
-
-if (val == null || val instanceof BigDecimal) {
-  return (BigDecimal)val;
+final Object val = getObject(columnIndex);
+if (val == null) {
+  return null;
 }
-
-throw new SQLException("Illegal conversion");
+if (val instanceof BigDecimal) {
+  ret

[hive] branch master updated: HIVE-23117: Review of HiveStatement Class (David Mollitor reviewed by Peter Vary)

2020-05-01 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new cdd55aa  HIVE-23117: Review of HiveStatement Class (David Mollitor 
reviewed by Peter Vary)
cdd55aa is described below

commit cdd55aa319a3440963a886ebfff11cd2a240781d
Author: David Mollitor 
AuthorDate: Fri May 1 09:36:20 2020 -0400

HIVE-23117: Review of HiveStatement Class (David Mollitor reviewed by Peter 
Vary)
---
 .../java/org/apache/hive/jdbc/HiveStatement.java   | 385 -
 .../apache/hive/jdbc/logs/InPlaceUpdateStream.java |  14 -
 2 files changed, 59 insertions(+), 340 deletions(-)

diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
index a74a3a8..4b61ce1 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
@@ -51,17 +51,20 @@ import java.sql.SQLTimeoutException;
 import java.sql.SQLWarning;
 import java.util.ArrayList;
 import java.util.Base64;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
+import java.util.Optional;
 
 /**
- * HiveStatement.
- *
+ * The object used for executing a static SQL statement and returning the
+ * results it produces.
  */
 public class HiveStatement implements java.sql.Statement {
-  public static final Logger LOG = 
LoggerFactory.getLogger(HiveStatement.class.getName());
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(HiveStatement.class);
 
   public static final String QUERY_CANCELLED_MESSAGE = "Query was cancelled.";
   private static final int DEFAULT_FETCH_SIZE =
@@ -71,10 +74,10 @@ public class HiveStatement implements java.sql.Statement {
   private TCLIService.Iface client;
   private TOperationHandle stmtHandle = null;
   private final TSessionHandle sessHandle;
-  Map sessConf = new HashMap();
+  Map sessConf = new HashMap<>();
   private int fetchSize;
   private final int defaultFetchSize;
-  private boolean isScrollableResultset = false;
+  private final boolean isScrollableResultset;
   private boolean isOperationComplete = false;
   private boolean closeOnResultSetCompletion = false;
   /**
@@ -118,15 +121,9 @@ public class HiveStatement implements java.sql.Statement {
*/
   private boolean isLogBeingGenerated = true;
 
-  /**
-   * Keep this state so we can know whether the statement is submitted to HS2 
and start execution
-   * successfully.
-   */
-  private boolean isExecuteStatementFailed = false;
-
   private int queryTimeout = 0;
 
-  private InPlaceUpdateStream inPlaceUpdateStream = InPlaceUpdateStream.NO_OP;
+  private Optional inPlaceUpdateStream;
 
   public HiveStatement(HiveConnection connection, TCLIService.Iface client,
   TSessionHandle sessHandle) {
@@ -146,25 +143,14 @@ public class HiveStatement implements java.sql.Statement {
 this.isScrollableResultset = isScrollableResultset;
 this.defaultFetchSize = defaultFetchSize;
 this.fetchSize = (initFetchSize == 0) ? defaultFetchSize : initFetchSize;
+this.inPlaceUpdateStream = Optional.empty();
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.Statement#addBatch(java.lang.String)
-   */
-
   @Override
   public void addBatch(String sql) throws SQLException {
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.Statement#cancel()
-   */
-
   @Override
   public void cancel() throws SQLException {
 checkConnection("cancel");
@@ -181,28 +167,16 @@ public class HiveStatement implements java.sql.Statement {
 } catch (SQLException e) {
   throw e;
 } catch (Exception e) {
-  throw new SQLException(e.toString(), "08S01", e);
+  throw new SQLException("Failed to cancel statement", "08S01", e);
 }
 isCancelled = true;
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.Statement#clearBatch()
-   */
-
   @Override
   public void clearBatch() throws SQLException {
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.Statement#clearWarnings()
-   */
-
   @Override
   public void clearWarnings() throws SQLException {
 warningChain = null;
@@ -223,14 +197,13 @@ public class HiveStatement implements java.sql.Statement {
 } catch (SQLException e) {
   throw e;
 } catch (Exception e) {
-  throw new SQLException(e.toString(), "08S01", e);
+  throw new SQLException("Failed to close statement", "08S01", e);
 }
   }
 
   void closeClientOperation() throws SQLException {
 closeStatementIfNeeded();
 isQueryClosed = true;
-isExec

[hive] branch master updated: HIVE-23113: Clean Up HiveCallableStatement (David Mollitor, reviewed by Peter Vary)

2020-04-20 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 6a47f9e  HIVE-23113: Clean Up HiveCallableStatement (David Mollitor, 
reviewed by Peter Vary)
6a47f9e is described below

commit 6a47f9e292b1976f238be3f7c706aeac038731ed
Author: David Mollitor 
AuthorDate: Mon Apr 20 09:18:15 2020 -0400

HIVE-23113: Clean Up HiveCallableStatement (David Mollitor, reviewed by 
Peter Vary)
---
 .../apache/hive/jdbc/HiveCallableStatement.java| 1889 +++-
 1 file changed, 276 insertions(+), 1613 deletions(-)

diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveCallableStatement.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveCallableStatement.java
index 581eb91..6de1771 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveCallableStatement.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveCallableStatement.java
@@ -43,2420 +43,1083 @@ import java.util.Calendar;
 import java.util.Map;
 
 /**
- * HiveCallableStatement.
- *
+ * The Statement used to execute SQL stored procedures. The JDBC API provides a
+ * stored procedure SQL escape syntax that allows stored procedures to be 
called
+ * in a standard way for all RDBMSs. Hive does not support SQL stored
+ * procedures.
  */
 public class HiveCallableStatement implements java.sql.CallableStatement {
+
   private final Connection connection;
 
   /**
+   * Constructor.
*
+   * @param connection the connection
*/
   public HiveCallableStatement(Connection connection) {
 this.connection = connection;
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.CallableStatement#getArray(int)
-   */
-
+  @Override
   public Array getArray(int i) throws SQLException {
-// TODO Auto-generated method stub
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.CallableStatement#getArray(java.lang.String)
-   */
-
+  @Override
   public Array getArray(String parameterName) throws SQLException {
-// TODO Auto-generated method stub
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.CallableStatement#getBigDecimal(int)
-   */
-
+  @Override
   public BigDecimal getBigDecimal(int parameterIndex) throws SQLException {
-// TODO Auto-generated method stub
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.CallableStatement#getBigDecimal(java.lang.String)
-   */
-
+  @Override
   public BigDecimal getBigDecimal(String parameterName) throws SQLException {
-// TODO Auto-generated method stub
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.CallableStatement#getBigDecimal(int, int)
-   */
-
+  @Override
   public BigDecimal getBigDecimal(int parameterIndex, int scale) throws 
SQLException {
-// TODO Auto-generated method stub
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.CallableStatement#getBlob(int)
-   */
-
+  @Override
   public Blob getBlob(int i) throws SQLException {
-// TODO Auto-generated method stub
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.CallableStatement#getBlob(java.lang.String)
-   */
-
+  @Override
   public Blob getBlob(String parameterName) throws SQLException {
-// TODO Auto-generated method stub
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.CallableStatement#getBoolean(int)
-   */
-
+  @Override
   public boolean getBoolean(int parameterIndex) throws SQLException {
-// TODO Auto-generated method stub
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.CallableStatement#getBoolean(java.lang.String)
-   */
-
+  @Override
   public boolean getBoolean(String parameterName) throws SQLException {
-// TODO Auto-generated method stub
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.CallableStatement#getByte(int)
-   */
-
+  @Override
   public byte getByte(int parameterIndex) throws SQLException {
-// TODO Auto-generated method stub
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.CallableStatement#getByte(java.lang.String)
-   */
-
+  @Override
   public byte getByte(String parameterName) throws SQLExcept

[hive] branch master updated: HIVE-23171: Create Tool To Visualize Hive Parser Tree (David Mollitor, reviewed by Miklos Gergely)

2020-04-16 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 83f917c  HIVE-23171: Create Tool To Visualize Hive Parser Tree (David 
Mollitor, reviewed by Miklos Gergely)
83f917c is described below

commit 83f917c787d60543f171b23d28ceda44d69c235d
Author: David Mollitor 
AuthorDate: Thu Apr 16 10:32:39 2020 -0400

HIVE-23171: Create Tool To Visualize Hive Parser Tree (David Mollitor, 
reviewed by Miklos Gergely)
---
 parser/pom.xml |   7 +-
 .../org/apache/hadoop/hive/ql/parse/HqlParser.java | 145 +
 2 files changed, 148 insertions(+), 4 deletions(-)

diff --git a/parser/pom.xml b/parser/pom.xml
index 05fd78d..18e0ad8 100644
--- a/parser/pom.xml
+++ b/parser/pom.xml
@@ -49,14 +49,13 @@
 
   org.antlr
   antlr-runtime
-  ${antlr.version}
 
 
   org.antlr
-  ST4
-  ${ST4.version}
+  stringtemplate
+  3.2.1
+  test
 
-
   
 
   
diff --git a/parser/src/test/org/apache/hadoop/hive/ql/parse/HqlParser.java 
b/parser/src/test/org/apache/hadoop/hive/ql/parse/HqlParser.java
new file mode 100644
index 000..e74172c
--- /dev/null
+++ b/parser/src/test/org/apache/hadoop/hive/ql/parse/HqlParser.java
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse;
+
+import org.antlr.runtime.ANTLRStringStream;
+import org.antlr.runtime.CharStream;
+import org.antlr.runtime.CommonToken;
+import org.antlr.runtime.RecognitionException;
+import org.antlr.runtime.Token;
+import org.antlr.runtime.TokenRewriteStream;
+import org.antlr.runtime.TokenStream;
+import org.antlr.runtime.tree.CommonTree;
+import org.antlr.runtime.tree.CommonTreeAdaptor;
+import org.antlr.runtime.tree.DOTTreeGenerator;
+import org.antlr.runtime.tree.TreeAdaptor;
+import org.antlr.stringtemplate.StringTemplate;
+
+/**
+ * A simple command-line application that accepts an SQL statement as a single
+ * argument. The SQl is parsed with the Hive SQL (HQL) parser and outputs the
+ * parse graph in the DOT (graphviz) file format. DOT is a graph description
+ * language. DOT graphs are typically files with the filename extension gv or
+ * dot. This information
+ *
+ * @see https://dreampuf.github.io/GraphvizOnline/;>Graphviz Online
+ *  Render
+ */
+public class HqlParser {
+
+  /**
+   * The main entry point of this application.
+   */
+  public static void main(String[] args) throws RecognitionException {
+HiveLexer lexer = new HiveLexer(new ANTLRNoCaseStringStream(args[0]));
+HiveParser parser = new HiveParser(new TokenRewriteStream(lexer));
+parser.setTreeAdaptor(ADAPTOR);
+CommonTree tree = parser.statement().getTree();
+DOTTreeGenerator gen = new DOTTreeGenerator();
+StringTemplate st = gen.toDOT(tree);
+System.out.println(st);
+  }
+
+  /**
+   * Tree adaptor for making antlr return ASTNodes instead of CommonTree nodes
+   * so that the graph walking algorithms and the rules framework defined in
+   * ql.lib can be used with the AST Nodes.
+   */
+  public static final TreeAdaptor ADAPTOR = new CommonTreeAdaptor() {
+/**
+ * Creates an ASTNode for the given token. The ASTNode is a wrapper around
+ * antlr's CommonTree class that implements the Node interface.
+ *
+ * @param payload The token.
+ * @return Object (which is actually an ASTNode) for the token.
+ */
+@Override
+public Object create(Token payload) {
+  return new ASTNode(payload);
+}
+
+@Override
+public Token createToken(int tokenType, String text) {
+  if (tokenType == HiveParser.TOK_SETCOLREF) {
+// ParseUtils.processSetColsNode() can change type of TOK_SETCOLREF
+// nodes later
+return new CommonToken(tokenType, text);
+  } else {
+return new ImmutableCommonToken(tokenType, text);
+  }
+}
+
+@Override
+public Object dupNode(Object t) {
+  return create(((CommonTree) t).token);
+}
+
+@Override
+public 

[hive] branch master updated: HIVE-23098: Allow Operation assertState to Accept a Collection (David Mollitor, reviewed by Naveen Gangam)

2020-04-14 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 818a68f  HIVE-23098: Allow Operation assertState to Accept a 
Collection (David Mollitor, reviewed by Naveen Gangam)
818a68f is described below

commit 818a68ff9b5396dc99237469db25b87513bfff7e
Author: David Mollitor 
AuthorDate: Tue Apr 14 20:42:58 2020 -0400

HIVE-23098: Allow Operation assertState to Accept a Collection (David 
Mollitor, reviewed by Naveen Gangam)
---
 .../apache/hive/service/cli/operation/GetCatalogsOperation.java| 5 ++---
 .../org/apache/hive/service/cli/operation/GetColumnsOperation.java | 5 ++---
 .../hive/service/cli/operation/GetCrossReferenceOperation.java | 7 +++
 .../apache/hive/service/cli/operation/GetFunctionsOperation.java   | 7 +++
 .../apache/hive/service/cli/operation/GetPrimaryKeysOperation.java | 7 +++
 .../org/apache/hive/service/cli/operation/GetSchemasOperation.java | 7 +++
 .../apache/hive/service/cli/operation/GetTableTypesOperation.java  | 7 +++
 .../org/apache/hive/service/cli/operation/GetTablesOperation.java  | 5 +++--
 .../apache/hive/service/cli/operation/GetTypeInfoOperation.java| 7 +++
 .../src/java/org/apache/hive/service/cli/operation/Operation.java  | 7 +++
 .../java/org/apache/hive/service/cli/operation/SQLOperation.java   | 3 ++-
 11 files changed, 30 insertions(+), 37 deletions(-)

diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
 
b/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
index d7fc1e8..d824ad6 100644
--- 
a/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
+++ 
b/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
@@ -18,8 +18,7 @@
 
 package org.apache.hive.service.cli.operation;
 
-import java.util.ArrayList;
-import java.util.Arrays;
+import java.util.Collections;
 
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hive.service.cli.FetchOrientation;
@@ -80,7 +79,7 @@ public class GetCatalogsOperation extends MetadataOperation {
*/
   @Override
   public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) 
throws HiveSQLException {
-assertState(new 
ArrayList(Arrays.asList(OperationState.FINISHED)));
+assertState(Collections.singleton(OperationState.FINISHED));
 validateDefaultFetchOrientation(orientation);
 if (orientation.equals(FetchOrientation.FETCH_FIRST)) {
   rowSet.setStartOffset(0);
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
 
b/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
index 6bbdce5..6d0b587 100644
--- 
a/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
+++ 
b/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
@@ -20,7 +20,6 @@ package org.apache.hive.service.cli.operation;
 
 import java.sql.DatabaseMetaData;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -247,7 +246,7 @@ public class GetColumnsOperation extends MetadataOperation {
*/
   @Override
   public TableSchema getResultSetSchema() throws HiveSQLException {
-assertState(new 
ArrayList(Arrays.asList(OperationState.FINISHED)));
+assertState(Collections.singleton(OperationState.FINISHED));
 return RESULT_SET_SCHEMA;
   }
 
@@ -256,7 +255,7 @@ public class GetColumnsOperation extends MetadataOperation {
*/
   @Override
   public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) 
throws HiveSQLException {
-assertState(new 
ArrayList(Arrays.asList(OperationState.FINISHED)));
+assertState(Collections.singleton(OperationState.FINISHED));
 validateDefaultFetchOrientation(orientation);
 if (orientation.equals(FetchOrientation.FETCH_FIRST)) {
   rowSet.setStartOffset(0);
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java
 
b/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java
index e39502f..37f5b60 100644
--- 
a/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java
+++ 
b/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java
@@ -18,8 +18,7 @@
 
 package org.apache.hive.service.cli.operation;
 
-import java.util.ArrayList;
-import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
@@ -163,7 +162,7 @@ public class GetCrossReferenceOperation extends 
MetadataOperation {
*/
   @Override
   public TableSchema getResultSetSchema() throws

[hive] branch master updated: HIVE-23183: Make TABLE Token Optional in TRUNCATE Statement (David Mollitor, reviewed by Miklos Gergely)

2020-04-14 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new ffad656  HIVE-23183: Make TABLE Token Optional in TRUNCATE Statement 
(David Mollitor, reviewed by Miklos Gergely)
ffad656 is described below

commit ffad6567b2be5ad02a48d73b5f5acbb899eb07c5
Author: David Mollitor 
AuthorDate: Tue Apr 14 09:48:14 2020 -0400

HIVE-23183: Make TABLE Token Optional in TRUNCATE Statement (David 
Mollitor, reviewed by Miklos Gergely)
---
 .../org/apache/hadoop/hive/ql/parse/HiveParser.g   |  2 +-
 .../test/queries/clientpositive/truncate_table.q   | 11 -
 .../results/clientpositive/truncate_table.q.out| 53 ++
 3 files changed, 64 insertions(+), 2 deletions(-)

diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g 
b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index 2b865f3..b03b098 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -1185,7 +1185,7 @@ createTableStatement
 truncateTableStatement
 @init { pushMsg("truncate table statement", state); }
 @after { popMsg(state); }
-: KW_TRUNCATE KW_TABLE tablePartitionPrefix (KW_COLUMNS LPAREN 
columnNameList RPAREN)? force?
+: KW_TRUNCATE KW_TABLE? tablePartitionPrefix (KW_COLUMNS LPAREN 
columnNameList RPAREN)? force?
 -> ^(TOK_TRUNCATETABLE tablePartitionPrefix columnNameList? force?);
 
 dropTableStatement
diff --git a/ql/src/test/queries/clientpositive/truncate_table.q 
b/ql/src/test/queries/clientpositive/truncate_table.q
index 6a1ce7a..5c20c89 100644
--- a/ql/src/test/queries/clientpositive/truncate_table.q
+++ b/ql/src/test/queries/clientpositive/truncate_table.q
@@ -1,5 +1,8 @@
 create table src_truncate (key string, value string);
-load data local inpath '../../data/files/kv1.txt' into table src_truncate;;
+load data local inpath '../../data/files/kv1.txt' into table src_truncate;
+
+create table src_truncate_alt (key string, value string);
+load data local inpath '../../data/files/kv1.txt' into table src_truncate_alt;
 
 create table srcpart_truncate (key string, value string) partitioned by (ds 
string, hr string);
 alter table srcpart_truncate add partition (ds='2008-04-08', hr='11');
@@ -23,6 +26,12 @@ TRUNCATE TABLE src_truncate;
 select * from src_truncate;
 select count (*) from src_truncate;
 
+-- truncate non-partitioned table with alternative syntax
+explain TRUNCATE src_truncate_alt;
+TRUNCATE src_truncate_alt;
+select * from src_truncate_alt;
+select count (*) from src_truncate_alt;
+
 -- truncate a partition
 explain TRUNCATE TABLE srcpart_truncate partition (ds='2008-04-08', hr='11');
 TRUNCATE TABLE srcpart_truncate partition (ds='2008-04-08', hr='11');
diff --git a/ql/src/test/results/clientpositive/truncate_table.q.out 
b/ql/src/test/results/clientpositive/truncate_table.q.out
index 0607bcb..8feafce 100644
--- a/ql/src/test/results/clientpositive/truncate_table.q.out
+++ b/ql/src/test/results/clientpositive/truncate_table.q.out
@@ -14,6 +14,22 @@ POSTHOOK: query: load data local inpath 
'../../data/files/kv1.txt' into table sr
 POSTHOOK: type: LOAD
  A masked pattern was here 
 POSTHOOK: Output: default@src_truncate
+PREHOOK: query: create table src_truncate_alt (key string, value string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@src_truncate_alt
+POSTHOOK: query: create table src_truncate_alt (key string, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@src_truncate_alt
+PREHOOK: query: load data local inpath '../../data/files/kv1.txt' into table 
src_truncate_alt
+PREHOOK: type: LOAD
+ A masked pattern was here 
+PREHOOK: Output: default@src_truncate_alt
+POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' into table 
src_truncate_alt
+POSTHOOK: type: LOAD
+ A masked pattern was here 
+POSTHOOK: Output: default@src_truncate_alt
 PREHOOK: query: create table srcpart_truncate (key string, value string) 
partitioned by (ds string, hr string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
@@ -151,6 +167,43 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_truncate
  A masked pattern was here 
 0
+PREHOOK: query: explain TRUNCATE src_truncate_alt
+PREHOOK: type: TRUNCATETABLE
+PREHOOK: Output: default@src_truncate_alt
+POSTHOOK: query: explain TRUNCATE src_truncate_alt
+POSTHOOK: type: TRUNCATETABLE
+POSTHOOK: Output: default@src_truncate_alt
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+Truncate Table or Partition
+  table name: default.src_truncate_alt
+
+PREHOOK: query: TRUNCATE src_truncate_alt
+PREHOOK: type: TRUNCATETABLE
+PREHOOK: Output: default@src

[hive] branch master updated: HIVE-23128: SHOW CREATE TABLE Creates Incorrect Syntax When Database Specified (David Mollitor, reviewed by Miklos Gergely)

2020-04-08 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new d2163cb  HIVE-23128: SHOW CREATE TABLE Creates Incorrect Syntax When 
Database Specified (David Mollitor, reviewed by Miklos Gergely)
d2163cb is described below

commit d2163cbfb8bacf859fa8572e24c8533bb2dcb0f3
Author: David Mollitor 
AuthorDate: Wed Apr 8 15:30:07 2020 -0400

HIVE-23128: SHOW CREATE TABLE Creates Incorrect Syntax When Database 
Specified (David Mollitor, reviewed by Miklos Gergely)
---
 .../table/create/show/ShowCreateTableAnalyzer.java | 18 +++---
 .../ddl/table/create/show/ShowCreateTableDesc.java | 18 --
 .../create/show/ShowCreateTableOperation.java  | 38 +++---
 .../clientpositive/llap/whroot_external1.q.out |  6 ++--
 .../show_create_table_db_table.q.out   |  8 ++---
 .../show_create_table_temp_table.q.out |  2 +-
 6 files changed, 63 insertions(+), 27 deletions(-)

diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableAnalyzer.java
index c7479da..b362837 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableAnalyzer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableAnalyzer.java
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.hive.ql.ddl.table.create.show;
 
+import java.util.Map.Entry;
+
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.ddl.DDLWork;
 import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
@@ -43,15 +46,20 @@ public class ShowCreateTableAnalyzer extends 
BaseSemanticAnalyzer {
   public void analyzeInternal(ASTNode root) throws SemanticException {
 ctx.setResFile(ctx.getLocalTmpPath());
 
-String tableName = getUnescapedName((ASTNode)root.getChild(0));
-Table tab = getTable(tableName);
-inputs.add(new ReadEntity(tab));
+Entry tableIdentifier = getDbTableNamePair((ASTNode) 
root.getChild(0));
+Table table = getTable(tableIdentifier.getKey(), 
tableIdentifier.getValue(), true);
+
+inputs.add(new ReadEntity(table));
+
+// If no DB was specified in statement, do not include it in the final 
output
+ShowCreateTableDesc desc = new ShowCreateTableDesc(table.getDbName(), 
table.getTableName(),
+ctx.getResFile().toString(), 
StringUtils.isBlank(tableIdentifier.getKey()));
 
-ShowCreateTableDesc desc = new ShowCreateTableDesc(tableName, 
ctx.getResFile().toString());
 Task task = TaskFactory.get(new DDLWork(getInputs(), 
getOutputs(), desc));
+task.setFetchSource(true);
+
 rootTasks.add(task);
 
-task.setFetchSource(true);
 setFetchTask(createFetchTask(ShowCreateTableDesc.SCHEMA));
   }
 }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableDesc.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableDesc.java
index 4687cbc..cd580b7 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableDesc.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableDesc.java
@@ -33,12 +33,16 @@ public class ShowCreateTableDesc implements DDLDesc, 
Serializable {
 
   public static final String SCHEMA = "createtab_stmt#string";
 
-  private final String resFile;
+  private final String databaseName;
   private final String tableName;
+  private final String resFile;
+  private final boolean isRelative;
 
-  public ShowCreateTableDesc(String tableName, String resFile) {
+  public ShowCreateTableDesc(String databaseName, String tableName, String 
resFile, boolean isRelative) {
+this.databaseName = databaseName;
 this.tableName = tableName;
 this.resFile = resFile;
+this.isRelative = isRelative;
   }
 
   @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
@@ -50,4 +54,14 @@ public class ShowCreateTableDesc implements DDLDesc, 
Serializable {
   public String getTableName() {
 return tableName;
   }
+
+  @Explain(displayName = "database name", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
+  public String getDatabaseName() {
+return databaseName;
+  }
+
+  @Explain(displayName = "relative table location", explainLevels = { 
Level.EXTENDED })
+  public boolean isRelative() {
+return isRelative;
+  }
 }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableOperation.java
index e07559f..51d9f10 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCrea

[hive] branch master updated: HIVE-23096: Review Code Path for getResults (David Mollitor reviewed by Naveen Gangnam)

2020-04-01 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 484d182  HIVE-23096: Review Code Path for getResults (David Mollitor 
reviewed by Naveen Gangnam)
484d182 is described below

commit 484d1823152bb3becaf5a753673d581fa405d9e5
Author: David Mollitor 
AuthorDate: Wed Apr 1 09:25:15 2020 -0400

HIVE-23096: Review Code Path for getResults (David Mollitor reviewed by 
Naveen Gangnam)
---
 ql/src/java/org/apache/hadoop/hive/ql/Driver.java| 11 +++
 .../apache/hive/service/cli/operation/SQLOperation.java  | 16 +---
 2 files changed, 16 insertions(+), 11 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 7024910..517b0cc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.LinkedList;
 import java.util.List;
@@ -888,9 +889,10 @@ public class Driver implements IDriver {
 }
 
 int numRows = 0;
-String row = null;
 
 while (numRows < maxRows) {
+  final String row;
+
   if (driverContext.getResStream() == null) {
 return (numRows > 0);
   }
@@ -900,16 +902,17 @@ public class Driver implements IDriver {
   try {
 ss = Utilities.readColumn(driverContext.getResStream(), bos);
 if (bos.getLength() > 0) {
-  row = new String(bos.getData(), 0, bos.getLength(), "UTF-8");
+  row = new String(bos.getData(), 0, bos.getLength(), 
StandardCharsets.UTF_8);
 } else if (ss == Utilities.StreamStatus.TERMINATED) {
-  row = new String();
+  row = "";
+} else {
+  row = null;
 }
 
 if (row != null) {
   numRows++;
   res.add(row);
 }
-row = null;
   } catch (IOException e) {
 CONSOLE.printError("FAILED: Unexpected IO exception : " + 
e.getMessage());
 return false;
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java 
b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
index 96770f4..eefd644 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
@@ -96,6 +96,7 @@ public class SQLOperation extends ExecuteStatementOperation {
   private ScheduledExecutorService timeoutExecutor;
   private final boolean runAsync;
   private final long operationLogCleanupDelayMs;
+  private final ArrayList convey = new ArrayList<>();
 
   /**
* A map to track query count running by each user
@@ -444,8 +445,6 @@ public class SQLOperation extends ExecuteStatementOperation 
{
 return resultSchema;
   }
 
-  private transient final List convey = new ArrayList();
-
   @Override
   public RowSet getNextRowSet(FetchOrientation orientation, long maxRows)
 throws HiveSQLException {
@@ -461,7 +460,6 @@ public class SQLOperation extends ExecuteStatementOperation 
{
   maxRows = 1;
   isBlobBased = true;
 }
-driver.setMaxRows(Math.toIntExact(maxRows));
 RowSet rowSet = RowSetFactory.create(getResultSetSchema(), 
getProtocolVersion(), isBlobBased);
 try {
   /* if client is requesting fetch-from-start and its not the first time 
reading from this operation
@@ -471,15 +469,19 @@ public class SQLOperation extends 
ExecuteStatementOperation {
 driver.resetFetch();
   }
   fetchStarted = true;
-  driver.setMaxRows(Math.toIntExact(maxRows));
+
+  final int capacity = Math.toIntExact(maxRows);
+  convey.ensureCapacity(capacity);
+  driver.setMaxRows(capacity);
   if (driver.getResults(convey)) {
+if (convey.size() == capacity) {
+  LOG.info("Result set buffer filled to capacity [{}]", capacity);
+}
 return decode(convey, rowSet);
   }
   return rowSet;
-} catch (IOException e) {
-  throw new HiveSQLException(e);
 } catch (Exception e) {
-  throw new HiveSQLException(e);
+  throw new HiveSQLException("Unable to get the next row set", e);
 } finally {
   convey.clear();
 }



[hive] branch master updated: HIVE-23079: Remove Calls to printStackTrace in Module hive-serde (David Mollitor reviewed by Peter Vary)

2020-04-01 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 3b22b51  HIVE-23079: Remove Calls to printStackTrace in Module 
hive-serde (David Mollitor reviewed by Peter Vary)
3b22b51 is described below

commit 3b22b515f0628dfb7a74ed3f50444c0926cf70a6
Author: David Mollitor 
AuthorDate: Wed Apr 1 09:18:43 2020 -0400

HIVE-23079: Remove Calls to printStackTrace in Module hive-serde (David 
Mollitor reviewed by Peter Vary)
---
 .../java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java   | 2 --
 .../org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java | 1 -
 .../org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeSet.java | 1 -
 3 files changed, 4 deletions(-)

diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java 
b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java
index 948cddc..2b832ac 100644
--- 
a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java
@@ -163,7 +163,6 @@ public class DynamicSerDe extends AbstractSerDe {
   deserializeReuse = bt.deserialize(deserializeReuse, iprot_);
   return deserializeReuse;
 } catch (Exception e) {
-  e.printStackTrace();
   throw new SerDeException(e);
 }
   }
@@ -220,7 +219,6 @@ public class DynamicSerDe extends AbstractSerDe {
   bt.serialize(obj, objInspector, oprot_);
   oprot_.getTransport().flush();
 } catch (Exception e) {
-  e.printStackTrace();
   throw new SerDeException(e);
 }
 ret.set(bos_.getData(), 0, bos_.getLength());
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java
 
b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java
index 3f086cd..07f7105 100644
--- 
a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java
@@ -63,7 +63,6 @@ public class DynamicSerDeTypeMap extends DynamicSerDeTypeBase 
{
   Map l = Collections.singletonMap(o, o2);
   return l.getClass();
 } catch (Exception e) {
-  e.printStackTrace();
   throw new RuntimeException(e);
 }
   }
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeSet.java
 
b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeSet.java
index f41959b..58346d7 100644
--- 
a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeSet.java
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeSet.java
@@ -57,7 +57,6 @@ public class DynamicSerDeTypeSet extends DynamicSerDeTypeBase 
{
   Set l = Collections.singleton(o);
   return l.getClass();
 } catch (Exception e) {
-  e.printStackTrace();
   throw new RuntimeException(e);
 }
   }



[hive] branch master updated: HIVE-23078: Remove HiveDriver SecurityManager Check (David Mollitor reviewed by Naveen Gangam)

2020-03-31 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 3de2dc1  HIVE-23078: Remove HiveDriver SecurityManager Check (David 
Mollitor reviewed by Naveen Gangam)
3de2dc1 is described below

commit 3de2dc1cd5db015e3c0f43a2fb16faf19e14efd7
Author: David Mollitor 
AuthorDate: Tue Mar 31 09:21:45 2020 -0400

HIVE-23078: Remove HiveDriver SecurityManager Check (David Mollitor 
reviewed by Naveen Gangam)
---
 jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java | 12 
 1 file changed, 12 deletions(-)

diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
index faa4d17..edd9e57 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
@@ -67,18 +67,6 @@ public class HiveDriver implements Driver {
*/
   private static final String PORT_PROPERTY_KEY = "PORT";
 
-
-  /**
-   *
-   */
-  public HiveDriver() {
-// TODO Auto-generated constructor stub
-SecurityManager security = System.getSecurityManager();
-if (security != null) {
-  security.checkWrite("foobah");
-}
-  }
-
   /**
* Checks whether a given url is in a valid format.
*



[hive] branch master updated: HIVE-23077: Remove Calls to printStackTrace in Module hive-jdbc (David Mollitor reviewed by Naveen Gangam)

2020-03-30 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 6275513  HIVE-23077: Remove Calls to printStackTrace in Module 
hive-jdbc (David Mollitor reviewed by Naveen Gangam)
6275513 is described below

commit 62755130eb008a0b09397f78d4fe947819b18273
Author: David Mollitor 
AuthorDate: Mon Mar 30 09:24:33 2020 -0400

HIVE-23077: Remove Calls to printStackTrace in Module hive-jdbc (David 
Mollitor reviewed by Naveen Gangam)
---
 jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java | 3 +--
 jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java| 3 +--
 2 files changed, 2 insertions(+), 4 deletions(-)

diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
index dfaa40f..350648f 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
@@ -419,8 +419,7 @@ public abstract class HiveBaseResultSet implements 
ResultSet {
   wasNull = evaluated == null;
   return evaluated;
 } catch (Exception e) {
-  e.printStackTrace();
-  throw new SQLException("Unrecognized column type:" + columnType, e);
+  throw new SQLException("Unrecognized column type: " + columnType, e);
 }
   }
 
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
index 102683e..faa4d17 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
@@ -43,8 +43,7 @@ public class HiveDriver implements Driver {
 try {
   java.sql.DriverManager.registerDriver(new HiveDriver());
 } catch (SQLException e) {
-  // TODO Auto-generated catch block
-  e.printStackTrace();
+  throw new RuntimeException("Failed to register driver", e);
 }
   }
 



[hive] branch master updated: HIVE-23080: Clean Up HivePreparedStatement (David Mollitor reviewed by Naveen Gangam)

2020-03-27 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 20fccec  HIVE-23080: Clean Up HivePreparedStatement (David Mollitor 
reviewed by Naveen Gangam)
20fccec is described below

commit 20fccec2319e5b5c3a3772fc0d22a6ae3935875e
Author: David Mollitor 
AuthorDate: Fri Mar 27 16:52:00 2020 -0400

HIVE-23080: Clean Up HivePreparedStatement (David Mollitor reviewed by 
Naveen Gangam)
---
 .../apache/hive/jdbc/HivePreparedStatement.java| 469 -
 1 file changed, 77 insertions(+), 392 deletions(-)

diff --git a/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java 
b/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
index f86b112..dcd46f9 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
@@ -50,8 +50,7 @@ import org.apache.hive.service.rpc.thrift.TCLIService;
 import org.apache.hive.service.rpc.thrift.TSessionHandle;
 
 /**
- * HivePreparedStatement.
- *
+ * An object that represents a pre-compiled SQL statement.
  */
 public class HivePreparedStatement extends HiveStatement implements 
PreparedStatement {
   private final String sql;
@@ -67,23 +66,12 @@ public class HivePreparedStatement extends HiveStatement 
implements PreparedStat
 this.sql = sql;
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.PreparedStatement#addBatch()
-   */
-
+  @Override
   public void addBatch() throws SQLException {
-// TODO Auto-generated method stub
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.PreparedStatement#clearParameters()
-   */
-
+  @Override
   public void clearParameters() throws SQLException {
 this.parameters.clear();
   }
@@ -96,7 +84,7 @@ public class HivePreparedStatement extends HiveStatement 
implements PreparedStat
*
*  @throws SQLException
*/
-
+  @Override
   public boolean execute() throws SQLException {
 return super.execute(updateSql(sql, parameters));
   }
@@ -107,17 +95,12 @@ public class HivePreparedStatement extends HiveStatement 
implements PreparedStat
*  @return ResultSet
*  @throws SQLException
*/
-
+  @Override
   public ResultSet executeQuery() throws SQLException {
 return super.executeQuery(updateSql(sql, parameters));
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.PreparedStatement#executeUpdate()
-   */
-
+  @Override
   public int executeUpdate() throws SQLException {
 return super.executeUpdate(updateSql(sql, parameters));
   }
@@ -131,41 +114,40 @@ public class HivePreparedStatement extends HiveStatement 
implements PreparedStat
* @throws SQLException 
*/
   private String updateSql(final String sql, HashMap 
parameters) throws SQLException {
-List  parts=splitSqlStatement(sql);
-
+List parts = splitSqlStatement(sql);
+
 StringBuilder newSql = new StringBuilder(parts.get(0));
-for(int i=1;i splitSqlStatement(String sql) {
-List parts=new ArrayList<>();
-int apCount=0;
-int off=0;
-boolean skip=false;
+List parts = new ArrayList<>();
+int apCount = 0;
+int off = 0;
+boolean skip = false;
 
 for (int i = 0; i < sql.length(); i++) {
   char c = sql.charAt(i);
-  if(skip){
-skip=false;
+  if (skip) {
+skip = false;
 continue;
   }
   switch (c) {
@@ -185,414 +167,196 @@ public class HivePreparedStatement extends 
HiveStatement implements PreparedStat
 break;
   }
 }
-parts.add(sql.substring(off,sql.length()));
+parts.add(sql.substring(off, sql.length()));
 return parts;
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.PreparedStatement#getMetaData()
-   */
-
+  @Override
   public ResultSetMetaData getMetaData() throws SQLException {
-// TODO Auto-generated method stub
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.PreparedStatement#getParameterMetaData()
-   */
-
+  @Override
   public ParameterMetaData getParameterMetaData() throws SQLException {
-// TODO Auto-generated method stub
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.PreparedStatement#setArray(int, java.sql.Array)
-   */
-
+  @Override
   public void setArray(int i, Array x) throws SQLException {
-// TODO Auto-generated method stub
 throw new SQLFeatureNotSupportedException("Method not supported");
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.sql.PreparedStatement#setAsciiStream(int, java.io.InputStream)
-   */
-
+  @Override
   public void setAsciiStream(

[hive] branch master updated: HIVE-23007: Do Not Consider Client Session For Default Fetch Size (David Mollitor reviewed by Naveen Gangam)

2020-03-27 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 804bf60  HIVE-23007: Do Not Consider Client Session For Default Fetch 
Size (David Mollitor reviewed by Naveen Gangam)
804bf60 is described below

commit 804bf60e6f6b363297855804c9351c80299b9280
Author: David Mollitor 
AuthorDate: Fri Mar 27 12:01:19 2020 -0400

HIVE-23007: Do Not Consider Client Session For Default Fetch Size (David 
Mollitor reviewed by Naveen Gangam)
---
 .../hive/service/cli/thrift/ThriftCLIService.java | 19 ---
 1 file changed, 8 insertions(+), 11 deletions(-)

diff --git 
a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java 
b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
index a7fe049..6f8ad00 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
@@ -28,7 +28,7 @@ import org.apache.hive.service.rpc.thrift.TSetClientInfoResp;
 import java.io.IOException;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
-import java.util.HashMap;
+import java.util.Collections;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 import javax.security.auth.login.LoginException;
@@ -321,17 +321,14 @@ public abstract class ThriftCLIService extends 
AbstractService implements TCLISe
 LOG.info("Client protocol version: " + req.getClient_protocol());
 TOpenSessionResp resp = new TOpenSessionResp();
 try {
-  SessionHandle sessionHandle = getSessionHandle(req, resp);
+  final SessionHandle sessionHandle = getSessionHandle(req, resp);
+
+  final int fetchSize = 
hiveConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE);
+
   resp.setSessionHandle(sessionHandle.toTSessionHandle());
-  Map configurationMap = new HashMap();
-  // Set the updated fetch size from the server into the configuration map 
for the client
-  HiveConf sessionConf = cliService.getSessionConf(sessionHandle);
-  configurationMap.put(
-
HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE.varname,
-Integer.toString(sessionConf != null ?
-  
sessionConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE)
 :
-  
hiveConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE)));
-  resp.setConfiguration(configurationMap);
+  resp.setConfiguration(Collections
+  
.singletonMap(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE.varname,
+  Integer.toString(fetchSize)));
   resp.setStatus(OK_STATUS);
   ThriftCLIServerContext context =
 (ThriftCLIServerContext)currentServerContext.get();



[hive] branch master updated: HIVE-23005: Consider Default JDBC Fetch Size From HS2 (David Mollitor reviewed by Naveen Gangam)

2020-03-25 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new a69e676  HIVE-23005: Consider Default JDBC Fetch Size From HS2 (David 
Mollitor reviewed by Naveen Gangam)
a69e676 is described below

commit a69e676e90dd023072fb15d384f41ddb7a164445
Author: David Mollitor 
AuthorDate: Wed Mar 25 18:01:11 2020 -0400

HIVE-23005: Consider Default JDBC Fetch Size From HS2 (David Mollitor 
reviewed by Naveen Gangam)
---
 .../org/apache/hive/jdbc/TestJdbcWithMiniHS2.java  |  23 +---
 .../java/org/apache/hive/jdbc/HiveConnection.java  |  38 ---
 .../java/org/apache/hive/jdbc/HiveStatement.java   |  43 ---
 .../org/apache/hive/jdbc/TestHiveStatement.java| 123 +
 ql/src/java/org/apache/hadoop/hive/ql/Driver.java  |   6 +-
 .../hive/service/cli/operation/SQLOperation.java   |   4 +-
 .../hive/service/cli/session/HiveSessionImpl.java  |  18 ---
 .../hive/service/cli/thrift/ThriftCLIService.java  |  14 ++-
 8 files changed, 158 insertions(+), 111 deletions(-)

diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
index 7fa6796..2100906 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
@@ -1500,30 +1500,11 @@ public class TestJdbcWithMiniHS2 {
 
   @Test
   public void testFetchSize() throws Exception {
-// Test setting fetch size below max
 Connection fsConn = getConnection(miniHS2.getJdbcURL("default", 
"fetchSize=50", ""),
   System.getProperty("user.name"), "bar");
 Statement stmt = fsConn.createStatement();
-stmt.execute("set hive.server2.thrift.resultset.serialize.in.tasks=true");
-int fetchSize = stmt.getFetchSize();
-assertEquals(50, fetchSize);
-stmt.close();
-fsConn.close();
-// Test setting fetch size above max
-fsConn = getConnection(
-  miniHS2.getJdbcURL(
-"default",
-"fetchSize=" + (miniHS2.getHiveConf().getIntVar(
-  HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_MAX_FETCH_SIZE) + 1),
-""),
-  System.getProperty("user.name"), "bar");
-stmt = fsConn.createStatement();
-stmt.execute("set hive.server2.thrift.resultset.serialize.in.tasks=true");
-fetchSize = stmt.getFetchSize();
-assertEquals(
-  miniHS2.getHiveConf().getIntVar(
-HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_MAX_FETCH_SIZE),
-  fetchSize);
+stmt.execute("set");
+assertEquals(50, stmt.getFetchSize());
 stmt.close();
 fsConn.close();
   }
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index cbf6632..7f0d8dc 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -28,6 +28,7 @@ import org.apache.hive.service.rpc.thrift.TSetClientInfoResp;
 import org.apache.hive.service.rpc.thrift.TSetClientInfoReq;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hive.common.auth.HiveAuthUtils;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
 import org.apache.hive.service.auth.HiveAuthConstants;
 import org.apache.hive.service.auth.KerberosSaslHelper;
@@ -70,6 +71,7 @@ import org.apache.thrift.transport.TTransport;
 import org.apache.thrift.transport.TTransportException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 import javax.net.ssl.KeyManagerFactory;
 import javax.net.ssl.SSLContext;
 import javax.net.ssl.TrustManagerFactory;
@@ -143,7 +145,8 @@ public class HiveConnection implements java.sql.Connection {
   private final List supportedProtocols = new 
LinkedList();
   private int loginTimeout = 0;
   private TProtocolVersion protocol;
-  private int fetchSize = HiveStatement.DEFAULT_FETCH_SIZE;
+  private final int initFetchSize;
+  private int defaultFetchSize;
   private String initFile = null;
   private String wmPool = null, wmApp = null;
   private Properties clientInfo;
@@ -261,9 +264,8 @@ public class HiveConnection implements java.sql.Connection {
 port = connParams.getPort();
 isEmbeddedMode = connParams.isEmbeddedMode();
 
-if (sessConfMap.containsKey(JdbcConnectionParams.FETCH_SIZE)) {
-  fetchSize = 
Integer.parseInt(sessConfMap.get(JdbcConnectionParams.FETCH_SIZE));
-}
+initFetchSize = 
Integer.parseInt(sessConfMap.getOrDefault(JdbcConnectionParams.FETCH_SIZE, 
"0"));
+
 if (sessConfMap.containsKey(JdbcConnectionParams.INIT_F

[hive] branch master updated: HIVE-23064: Remove Calls to printStackTrace in Module hive-exec (David Mollitor reviewed by Peter Vary)

2020-03-25 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 66302c4  HIVE-23064: Remove Calls to printStackTrace in Module 
hive-exec (David Mollitor reviewed by Peter Vary)
66302c4 is described below

commit 66302c4f7e62236471f5060064d28d317051b9bd
Author: David Mollitor 
AuthorDate: Wed Mar 25 16:46:19 2020 -0400

HIVE-23064: Remove Calls to printStackTrace in Module hive-exec (David 
Mollitor reviewed by Peter Vary)
---
 ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java | 12 +++-
 .../hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java   |  3 ---
 .../org/apache/hadoop/hive/ql/exec/FileSinkOperator.java |  2 --
 .../apache/hadoop/hive/ql/exec/HashTableDummyOperator.java   |  1 -
 ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java |  6 +-
 .../java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java  |  3 +--
 ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java|  6 --
 .../java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java   |  2 --
 .../hadoop/hive/ql/exec/tez/CustomPartitionVertex.java   |  1 -
 ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java |  9 ++---
 .../org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java |  1 -
 ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java  |  1 -
 ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java |  2 +-
 .../org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java   |  6 ++
 .../hive/ql/optimizer/physical/LocalMapJoinProcFactory.java  |  2 +-
 .../ql/optimizer/physical/SortMergeJoinTaskDispatcher.java   |  6 ++
 16 files changed, 25 insertions(+), 38 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java 
b/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
index 0d7b92d..4328665 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
@@ -60,6 +60,8 @@ import org.apache.thrift.TException;
 import org.apache.thrift.protocol.TBinaryProtocol;
 import org.apache.thrift.protocol.TJSONProtocol;
 import org.apache.thrift.transport.TMemoryBuffer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.annotations.VisibleForTesting;
 
@@ -71,6 +73,8 @@ import com.google.common.annotations.VisibleForTesting;
 public class QueryPlan implements Serializable {
   private static final long serialVersionUID = 1L;
 
+  private static final Logger LOG = LoggerFactory.getLogger(QueryPlan.class);
+
   private String cboInfo;
   private String queryString;
   private String optimizedCBOPlan;
@@ -643,7 +647,7 @@ public class QueryPlan implements Serializable {
 try {
   return getJSONQuery(getQueryPlan());
 } catch (Exception e) {
-  e.printStackTrace();
+  LOG.warn("Unable to produce query plan JSON string", e);
   return e.toString();
 }
   }
@@ -655,8 +659,7 @@ public class QueryPlan implements Serializable {
 try {
   q.write(oprot);
 } catch (TException e) {
-  // TODO Auto-generated catch block
-  e.printStackTrace();
+  LOG.warn("Unable to produce query plan Thrift string", e);
   return q.toString();
 }
 return tmb.toString("UTF-8");
@@ -669,8 +672,7 @@ public class QueryPlan implements Serializable {
 try {
   q.write(oprot);
 } catch (TException e) {
-  // TODO Auto-generated catch block
-  e.printStackTrace();
+  LOG.warn("Unable to produce query plan binary string", e);
   return q.toString();
 }
 byte[] buf = new byte[tmb.length()];
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java
index 06f60ab..9d4bf79 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java
@@ -113,7 +113,6 @@ public final class DDLSemanticAnalyzerFactory {
   BaseSemanticAnalyzer analyzer = 
analyzerClass.getConstructor(QueryState.class).newInstance(queryState);
   return analyzer;
 } catch (Exception e) {
-  e.printStackTrace();
   throw new RuntimeException(e);
 }
   }
@@ -126,7 +125,6 @@ public final class DDLSemanticAnalyzerFactory {
   analyzerClass.getConstructor(QueryState.class, 
Hive.class).newInstance(queryState, db);
   return analyzer;
 } catch (Exception e) {
-  e.printStackTrace();
   throw new RuntimeException(e);
 }
   }
@@ -148,7 +146,6 @@ public final class DDLSemanticAnalyzerFactory {
   return TYPE_TO_ANALYZER.get(actualType);
 }
   } catch (Exception e) {
-e.printStackTrace();
 throw new RuntimeException(e);
   }
  

[hive] branch master updated: HIVE-23057: ColumnStatsMergerFactory NPE Possible (David Mollitor reviewed by Zoltan Haindrich)

2020-03-25 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new ab31df6  HIVE-23057: ColumnStatsMergerFactory NPE Possible (David 
Mollitor reviewed by Zoltan Haindrich)
ab31df6 is described below

commit ab31df6b819925f6ea785371d6ab40106b342b07
Author: David Mollitor 
AuthorDate: Wed Mar 25 16:39:50 2020 -0400

HIVE-23057: ColumnStatsMergerFactory NPE Possible (David Mollitor reviewed 
by Zoltan Haindrich)
---
 .../merge/ColumnStatsMergerFactory.java| 95 --
 1 file changed, 52 insertions(+), 43 deletions(-)

diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/ColumnStatsMergerFactory.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/ColumnStatsMergerFactory.java
index 261437b..04a2649 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/ColumnStatsMergerFactory.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/ColumnStatsMergerFactory.java
@@ -19,6 +19,8 @@
 
 package org.apache.hadoop.hive.metastore.columnstats.merge;
 
+import java.util.Objects;
+
 import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
@@ -31,60 +33,64 @@ import 
org.apache.hadoop.hive.metastore.columnstats.cache.LongColumnStatsDataIns
 import 
org.apache.hadoop.hive.metastore.columnstats.cache.StringColumnStatsDataInspector;
 import 
org.apache.hadoop.hive.metastore.columnstats.cache.TimestampColumnStatsDataInspector;
 
+import com.google.common.base.Preconditions;
+
 public class ColumnStatsMergerFactory {
 
   private ColumnStatsMergerFactory() {
   }
 
-  public static ColumnStatsMerger getColumnStatsMerger(ColumnStatisticsObj 
statsObjNew,
-  ColumnStatisticsObj statsObjOld) {
-ColumnStatsMerger agg;
-_Fields typeNew = statsObjNew.getStatsData().getSetField();
-_Fields typeOld = statsObjOld.getStatsData().getSetField();
-// make sure that they have the same type
-typeNew = typeNew == typeOld ? typeNew : null;
+  /**
+   * Get a statistics merger to merge the given statistics object.
+   *
+   * @param statsObjNew A statistics object to merger
+   * @param statsObjOld A statistics object to merger
+   * @return A ColumnStatsMerger object that can process the requested type
+   * @throws IllegalArgumentException if the column statistics objects are of
+   *   two different types or if they are of an unknown type
+   * @throws NullPointerException if statistics object is {@code null}
+   */
+  public static ColumnStatsMerger getColumnStatsMerger(final 
ColumnStatisticsObj statsObjNew,
+  final ColumnStatisticsObj statsObjOld) {
+Objects.requireNonNull(statsObjNew, "Column 1 statistcs cannot be null");
+Objects.requireNonNull(statsObjOld, "Column 2 statistcs cannot be null");
+
+final _Fields typeNew = statsObjNew.getStatsData().getSetField();
+final _Fields typeOld = statsObjOld.getStatsData().getSetField();
+
+Preconditions.checkArgument(typeNew == typeOld, "The column types must 
match: [" + typeNew + "::" + typeOld + "]");
+
 switch (typeNew) {
 case BOOLEAN_STATS:
-  agg = new BooleanColumnStatsMerger();
-  break;
-case LONG_STATS: {
-  agg = new LongColumnStatsMerger();
-  break;
-}
-case DOUBLE_STATS: {
-  agg = new DoubleColumnStatsMerger();
-  break;
-}
-case STRING_STATS: {
-  agg = new StringColumnStatsMerger();
-  break;
-}
+  return new BooleanColumnStatsMerger();
+case LONG_STATS:
+  return new LongColumnStatsMerger();
+case DOUBLE_STATS:
+  return new DoubleColumnStatsMerger();
+case STRING_STATS:
+  return new StringColumnStatsMerger();
 case BINARY_STATS:
-  agg = new BinaryColumnStatsMerger();
-  break;
-case DECIMAL_STATS: {
-  agg = new DecimalColumnStatsMerger();
-  break;
-}
-case DATE_STATS: {
-  agg = new DateColumnStatsMerger();
-  break;
-}
-case TIMESTAMP_STATS: {
-  agg = new TimestampColumnStatsMerger();
-  break;
-}
+  return new BinaryColumnStatsMerger();
+case DECIMAL_STATS:
+  return new DecimalColumnStatsMerger();
+case DATE_STATS:
+  return new DateColumnStatsMerger();
+case TIMESTAMP_STATS:
+  return new TimestampColumnStatsMerger();
 default:
-  throw new IllegalArgumentException("Unknown stats type " + 
statsObjNew.getStatsData().getSetField());
+  throw new IllegalArgument

[hive] branch master updated: HIVE-23037: Print Logging Information for Exception in AcidUtils tryListLocatedHdfsStatus (David Mollitor, reviewed by Peter Vary)

2020-03-25 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 3c42258  HIVE-23037: Print Logging Information for Exception in 
AcidUtils tryListLocatedHdfsStatus (David Mollitor, reviewed by Peter Vary)
3c42258 is described below

commit 3c4225861e2ab47f571cf82599236db10ca80f7c
Author: David Mollitor 
AuthorDate: Wed Mar 25 09:37:03 2020 -0400

HIVE-23037: Print Logging Information for Exception in AcidUtils 
tryListLocatedHdfsStatus (David Mollitor, reviewed by Peter Vary)
---
 .../org/apache/hadoop/hive/ql/io/AcidUtils.java| 25 ++
 .../org/apache/hadoop/hive/shims/HadoopShims.java  | 13 +++
 2 files changed, 29 insertions(+), 9 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
index dbbe6f1..d5a31df 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
@@ -1936,22 +1936,26 @@ public class AcidUtils {
 
   private static List 
tryListLocatedHdfsStatus(Ref useFileIds, FileSystem fs,
   Path directory) {
-List childrenWithId = null;
 if (useFileIds == null) {
-  return childrenWithId;
+  return null;
 }
-Boolean val = useFileIds.value;
+
+List childrenWithId = null;
+final Boolean val = useFileIds.value;
 if (val == null || val) {
   try {
 childrenWithId = SHIMS.listLocatedHdfsStatus(fs, directory, 
hiddenFileFilter);
 if (val == null) {
   useFileIds.value = true;
 }
-  } catch (Throwable t) {
-LOG.error("Failed to get files with ID; using regular API: " + 
t.getMessage());
-if (val == null && t instanceof UnsupportedOperationException) {
+  } catch (UnsupportedOperationException uoe) {
+LOG.info("Failed to get files with ID; using regular API: " + 
uoe.getMessage());
+if (val == null) {
   useFileIds.value = false;
 }
+  } catch (IOException ioe) {
+LOG.info("Failed to get files with ID; using regular API: " + 
ioe.getMessage());
+LOG.debug("Failed to get files with ID", ioe);
   }
 }
 return childrenWithId;
@@ -3137,11 +3141,14 @@ public class AcidUtils {
   useFileIds.value = true; // The call succeeded, so presumably the 
API is there.
 }
 return result;
-  } catch (Throwable t) {
-LOG.error("Failed to get files with ID; using regular API: " + 
t.getMessage());
-if (val == null && t instanceof UnsupportedOperationException) {
+  } catch (UnsupportedOperationException uoe) {
+LOG.warn("Failed to get files with ID; using regular API: " + 
uoe.getMessage());
+if (val == null) {
   useFileIds.value = false;
 }
+  } catch (IOException ioe) {
+LOG.info("Failed to get files with ID; using regular API: " + 
ioe.getMessage());
+LOG.debug("Failed to get files with ID", ioe);
   }
 }
 
diff --git 
a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java 
b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
index f71f5a5..8784213 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
@@ -251,6 +251,19 @@ public interface HadoopShims {
 Class> rrClass) throws IOException;
   }
 
+  /**
+   * List a directory for file status with ID.
+   *
+   * @param fs The {@code FileSystem} to load the path
+   * @param path The directory to list
+   * @param filter A filter to use on the files in the directory
+   * @return A list of file status with IDs
+   * @throws IOException An I/O exception of some sort has occurred
+   * @throws FileNotFoundException If the path is not found in the
+   *   {@code FileSystem}
+   * @throws UnsupportedOperationException the {@code FileSystem} is not a
+   *   {@code DistributedFileSystem}
+   */
   List listLocatedHdfsStatus(
   FileSystem fs, Path path, PathFilter filter) throws IOException;
 



[hive] branch master updated: HIVE-23065: Remove Calls to printStackTrace in Module hive-service (David Mollitor reviewed by Peter Vary)

2020-03-24 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new ac5941c  HIVE-23065: Remove Calls to printStackTrace in Module 
hive-service (David Mollitor reviewed by Peter Vary)
ac5941c is described below

commit ac5941cf31754ee5fbfe4b6f0ca573c0948115a2
Author: David Mollitor 
AuthorDate: Tue Mar 24 10:58:08 2020 -0400

HIVE-23065: Remove Calls to printStackTrace in Module hive-service (David 
Mollitor reviewed by Peter Vary)
---
 .../org/apache/hive/service/cli/operation/HiveCommandOperation.java | 1 -
 .../src/java/org/apache/hive/service/cli/operation/SQLOperation.java| 2 --
 2 files changed, 3 deletions(-)

diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
 
b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
index 8f04ec3..c83273b 100644
--- 
a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
+++ 
b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
@@ -91,7 +91,6 @@ public class HiveCommandOperation extends 
ExecuteStatementOperation {
 new SessionStream(System.err, true, StandardCharsets.UTF_8.name());
   } catch (UnsupportedEncodingException ee) {
 LOG.error("Error creating PrintStream", e);
-ee.printStackTrace();
 sessionState.out = null;
 sessionState.err = null;
   }
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java 
b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
index 468ce10..4a35cf0 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
@@ -145,7 +145,6 @@ public class SQLOperation extends ExecuteStatementOperation 
{
   new SessionStream(System.err, true, StandardCharsets.UTF_8.name());
 } catch (UnsupportedEncodingException e) {
 LOG.error("Error creating PrintStream", e);
-e.printStackTrace();
 sessionState.out = null;
 sessionState.info = null;
 sessionState.err = null;
@@ -594,7 +593,6 @@ public class SQLOperation extends ExecuteStatementOperation 
{
   SerDeUtils.initializeSerDe(serde, queryState.getConf(), props, null);
 
 } catch (Exception ex) {
-  ex.printStackTrace();
   throw new SQLException("Could not create ResultSet: " + ex.getMessage(), 
ex);
 }
 return serde;



  1   2   >