[hive] branch master updated: HIVE-23037: Print Logging Information for Exception in AcidUtils tryListLocatedHdfsStatus (David Mollitor, reviewed by Peter Vary)

2020-03-25 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 3c42258  HIVE-23037: Print Logging Information for Exception in 
AcidUtils tryListLocatedHdfsStatus (David Mollitor, reviewed by Peter Vary)
3c42258 is described below

commit 3c4225861e2ab47f571cf82599236db10ca80f7c
Author: David Mollitor 
AuthorDate: Wed Mar 25 09:37:03 2020 -0400

HIVE-23037: Print Logging Information for Exception in AcidUtils 
tryListLocatedHdfsStatus (David Mollitor, reviewed by Peter Vary)
---
 .../org/apache/hadoop/hive/ql/io/AcidUtils.java| 25 ++
 .../org/apache/hadoop/hive/shims/HadoopShims.java  | 13 +++
 2 files changed, 29 insertions(+), 9 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
index dbbe6f1..d5a31df 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
@@ -1936,22 +1936,26 @@ public class AcidUtils {
 
   private static List 
tryListLocatedHdfsStatus(Ref useFileIds, FileSystem fs,
   Path directory) {
-List childrenWithId = null;
 if (useFileIds == null) {
-  return childrenWithId;
+  return null;
 }
-Boolean val = useFileIds.value;
+
+List childrenWithId = null;
+final Boolean val = useFileIds.value;
 if (val == null || val) {
   try {
 childrenWithId = SHIMS.listLocatedHdfsStatus(fs, directory, 
hiddenFileFilter);
 if (val == null) {
   useFileIds.value = true;
 }
-  } catch (Throwable t) {
-LOG.error("Failed to get files with ID; using regular API: " + 
t.getMessage());
-if (val == null && t instanceof UnsupportedOperationException) {
+  } catch (UnsupportedOperationException uoe) {
+LOG.info("Failed to get files with ID; using regular API: " + 
uoe.getMessage());
+if (val == null) {
   useFileIds.value = false;
 }
+  } catch (IOException ioe) {
+LOG.info("Failed to get files with ID; using regular API: " + 
ioe.getMessage());
+LOG.debug("Failed to get files with ID", ioe);
   }
 }
 return childrenWithId;
@@ -3137,11 +3141,14 @@ public class AcidUtils {
   useFileIds.value = true; // The call succeeded, so presumably the 
API is there.
 }
 return result;
-  } catch (Throwable t) {
-LOG.error("Failed to get files with ID; using regular API: " + 
t.getMessage());
-if (val == null && t instanceof UnsupportedOperationException) {
+  } catch (UnsupportedOperationException uoe) {
+LOG.warn("Failed to get files with ID; using regular API: " + 
uoe.getMessage());
+if (val == null) {
   useFileIds.value = false;
 }
+  } catch (IOException ioe) {
+LOG.info("Failed to get files with ID; using regular API: " + 
ioe.getMessage());
+LOG.debug("Failed to get files with ID", ioe);
   }
 }
 
diff --git 
a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java 
b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
index f71f5a5..8784213 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
@@ -251,6 +251,19 @@ public interface HadoopShims {
 Class> rrClass) throws IOException;
   }
 
+  /**
+   * List a directory for file status with ID.
+   *
+   * @param fs The {@code FileSystem} to load the path
+   * @param path The directory to list
+   * @param filter A filter to use on the files in the directory
+   * @return A list of file status with IDs
+   * @throws IOException An I/O exception of some sort has occurred
+   * @throws FileNotFoundException If the path is not found in the
+   *   {@code FileSystem}
+   * @throws UnsupportedOperationException the {@code FileSystem} is not a
+   *   {@code DistributedFileSystem}
+   */
   List listLocatedHdfsStatus(
   FileSystem fs, Path path, PathFilter filter) throws IOException;
 



[hive] branch master updated: HIVE-23057: ColumnStatsMergerFactory NPE Possible (David Mollitor reviewed by Zoltan Haindrich)

2020-03-25 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new ab31df6  HIVE-23057: ColumnStatsMergerFactory NPE Possible (David 
Mollitor reviewed by Zoltan Haindrich)
ab31df6 is described below

commit ab31df6b819925f6ea785371d6ab40106b342b07
Author: David Mollitor 
AuthorDate: Wed Mar 25 16:39:50 2020 -0400

HIVE-23057: ColumnStatsMergerFactory NPE Possible (David Mollitor reviewed 
by Zoltan Haindrich)
---
 .../merge/ColumnStatsMergerFactory.java| 95 --
 1 file changed, 52 insertions(+), 43 deletions(-)

diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/ColumnStatsMergerFactory.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/ColumnStatsMergerFactory.java
index 261437b..04a2649 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/ColumnStatsMergerFactory.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/columnstats/merge/ColumnStatsMergerFactory.java
@@ -19,6 +19,8 @@
 
 package org.apache.hadoop.hive.metastore.columnstats.merge;
 
+import java.util.Objects;
+
 import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
@@ -31,60 +33,64 @@ import 
org.apache.hadoop.hive.metastore.columnstats.cache.LongColumnStatsDataIns
 import 
org.apache.hadoop.hive.metastore.columnstats.cache.StringColumnStatsDataInspector;
 import 
org.apache.hadoop.hive.metastore.columnstats.cache.TimestampColumnStatsDataInspector;
 
+import com.google.common.base.Preconditions;
+
 public class ColumnStatsMergerFactory {
 
   private ColumnStatsMergerFactory() {
   }
 
-  public static ColumnStatsMerger getColumnStatsMerger(ColumnStatisticsObj 
statsObjNew,
-  ColumnStatisticsObj statsObjOld) {
-ColumnStatsMerger agg;
-_Fields typeNew = statsObjNew.getStatsData().getSetField();
-_Fields typeOld = statsObjOld.getStatsData().getSetField();
-// make sure that they have the same type
-typeNew = typeNew == typeOld ? typeNew : null;
+  /**
+   * Get a statistics merger to merge the given statistics object.
+   *
+   * @param statsObjNew A statistics object to merger
+   * @param statsObjOld A statistics object to merger
+   * @return A ColumnStatsMerger object that can process the requested type
+   * @throws IllegalArgumentException if the column statistics objects are of
+   *   two different types or if they are of an unknown type
+   * @throws NullPointerException if statistics object is {@code null}
+   */
+  public static ColumnStatsMerger getColumnStatsMerger(final 
ColumnStatisticsObj statsObjNew,
+  final ColumnStatisticsObj statsObjOld) {
+Objects.requireNonNull(statsObjNew, "Column 1 statistcs cannot be null");
+Objects.requireNonNull(statsObjOld, "Column 2 statistcs cannot be null");
+
+final _Fields typeNew = statsObjNew.getStatsData().getSetField();
+final _Fields typeOld = statsObjOld.getStatsData().getSetField();
+
+Preconditions.checkArgument(typeNew == typeOld, "The column types must 
match: [" + typeNew + "::" + typeOld + "]");
+
 switch (typeNew) {
 case BOOLEAN_STATS:
-  agg = new BooleanColumnStatsMerger();
-  break;
-case LONG_STATS: {
-  agg = new LongColumnStatsMerger();
-  break;
-}
-case DOUBLE_STATS: {
-  agg = new DoubleColumnStatsMerger();
-  break;
-}
-case STRING_STATS: {
-  agg = new StringColumnStatsMerger();
-  break;
-}
+  return new BooleanColumnStatsMerger();
+case LONG_STATS:
+  return new LongColumnStatsMerger();
+case DOUBLE_STATS:
+  return new DoubleColumnStatsMerger();
+case STRING_STATS:
+  return new StringColumnStatsMerger();
 case BINARY_STATS:
-  agg = new BinaryColumnStatsMerger();
-  break;
-case DECIMAL_STATS: {
-  agg = new DecimalColumnStatsMerger();
-  break;
-}
-case DATE_STATS: {
-  agg = new DateColumnStatsMerger();
-  break;
-}
-case TIMESTAMP_STATS: {
-  agg = new TimestampColumnStatsMerger();
-  break;
-}
+  return new BinaryColumnStatsMerger();
+case DECIMAL_STATS:
+  return new DecimalColumnStatsMerger();
+case DATE_STATS:
+  return new DateColumnStatsMerger();
+case TIMESTAMP_STATS:
+  return new TimestampColumnStatsMerger();
 default:
-  throw new IllegalArgumentException("Unknown stats type " + 
statsObjNew.getStatsData().getSetField());
+  throw new IllegalArgumentException("Unknown stats type: " + 
statsObjNew.getStatsData().getS

[hive] branch master updated: HIVE-23064: Remove Calls to printStackTrace in Module hive-exec (David Mollitor reviewed by Peter Vary)

2020-03-25 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 66302c4  HIVE-23064: Remove Calls to printStackTrace in Module 
hive-exec (David Mollitor reviewed by Peter Vary)
66302c4 is described below

commit 66302c4f7e62236471f5060064d28d317051b9bd
Author: David Mollitor 
AuthorDate: Wed Mar 25 16:46:19 2020 -0400

HIVE-23064: Remove Calls to printStackTrace in Module hive-exec (David 
Mollitor reviewed by Peter Vary)
---
 ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java | 12 +++-
 .../hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java   |  3 ---
 .../org/apache/hadoop/hive/ql/exec/FileSinkOperator.java |  2 --
 .../apache/hadoop/hive/ql/exec/HashTableDummyOperator.java   |  1 -
 ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java |  6 +-
 .../java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java  |  3 +--
 ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java|  6 --
 .../java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java   |  2 --
 .../hadoop/hive/ql/exec/tez/CustomPartitionVertex.java   |  1 -
 ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java |  9 ++---
 .../org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java |  1 -
 ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java  |  1 -
 ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java |  2 +-
 .../org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java   |  6 ++
 .../hive/ql/optimizer/physical/LocalMapJoinProcFactory.java  |  2 +-
 .../ql/optimizer/physical/SortMergeJoinTaskDispatcher.java   |  6 ++
 16 files changed, 25 insertions(+), 38 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java 
b/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
index 0d7b92d..4328665 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
@@ -60,6 +60,8 @@ import org.apache.thrift.TException;
 import org.apache.thrift.protocol.TBinaryProtocol;
 import org.apache.thrift.protocol.TJSONProtocol;
 import org.apache.thrift.transport.TMemoryBuffer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.annotations.VisibleForTesting;
 
@@ -71,6 +73,8 @@ import com.google.common.annotations.VisibleForTesting;
 public class QueryPlan implements Serializable {
   private static final long serialVersionUID = 1L;
 
+  private static final Logger LOG = LoggerFactory.getLogger(QueryPlan.class);
+
   private String cboInfo;
   private String queryString;
   private String optimizedCBOPlan;
@@ -643,7 +647,7 @@ public class QueryPlan implements Serializable {
 try {
   return getJSONQuery(getQueryPlan());
 } catch (Exception e) {
-  e.printStackTrace();
+  LOG.warn("Unable to produce query plan JSON string", e);
   return e.toString();
 }
   }
@@ -655,8 +659,7 @@ public class QueryPlan implements Serializable {
 try {
   q.write(oprot);
 } catch (TException e) {
-  // TODO Auto-generated catch block
-  e.printStackTrace();
+  LOG.warn("Unable to produce query plan Thrift string", e);
   return q.toString();
 }
 return tmb.toString("UTF-8");
@@ -669,8 +672,7 @@ public class QueryPlan implements Serializable {
 try {
   q.write(oprot);
 } catch (TException e) {
-  // TODO Auto-generated catch block
-  e.printStackTrace();
+  LOG.warn("Unable to produce query plan binary string", e);
   return q.toString();
 }
 byte[] buf = new byte[tmb.length()];
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java
index 06f60ab..9d4bf79 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java
@@ -113,7 +113,6 @@ public final class DDLSemanticAnalyzerFactory {
   BaseSemanticAnalyzer analyzer = 
analyzerClass.getConstructor(QueryState.class).newInstance(queryState);
   return analyzer;
 } catch (Exception e) {
-  e.printStackTrace();
   throw new RuntimeException(e);
 }
   }
@@ -126,7 +125,6 @@ public final class DDLSemanticAnalyzerFactory {
   analyzerClass.getConstructor(QueryState.class, 
Hive.class).newInstance(queryState, db);
   return analyzer;
 } catch (Exception e) {
-  e.printStackTrace();
   throw new RuntimeException(e);
 }
   }
@@ -148,7 +146,6 @@ public final class DDLSemanticAnalyzerFactory {
   return TYPE_TO_ANALYZER.get(actualType);
 }
   } catch (Exception e) {
-e.printStackTrace();
 throw new RuntimeException(e);
   }
 }
diff --git a/ql/src/java/org/apache/h

[hive] branch master updated: HIVE-23005: Consider Default JDBC Fetch Size From HS2 (David Mollitor reviewed by Naveen Gangam)

2020-03-25 Thread dmollitor
This is an automated email from the ASF dual-hosted git repository.

dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new a69e676  HIVE-23005: Consider Default JDBC Fetch Size From HS2 (David 
Mollitor reviewed by Naveen Gangam)
a69e676 is described below

commit a69e676e90dd023072fb15d384f41ddb7a164445
Author: David Mollitor 
AuthorDate: Wed Mar 25 18:01:11 2020 -0400

HIVE-23005: Consider Default JDBC Fetch Size From HS2 (David Mollitor 
reviewed by Naveen Gangam)
---
 .../org/apache/hive/jdbc/TestJdbcWithMiniHS2.java  |  23 +---
 .../java/org/apache/hive/jdbc/HiveConnection.java  |  38 ---
 .../java/org/apache/hive/jdbc/HiveStatement.java   |  43 ---
 .../org/apache/hive/jdbc/TestHiveStatement.java| 123 +
 ql/src/java/org/apache/hadoop/hive/ql/Driver.java  |   6 +-
 .../hive/service/cli/operation/SQLOperation.java   |   4 +-
 .../hive/service/cli/session/HiveSessionImpl.java  |  18 ---
 .../hive/service/cli/thrift/ThriftCLIService.java  |  14 ++-
 8 files changed, 158 insertions(+), 111 deletions(-)

diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
index 7fa6796..2100906 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
@@ -1500,30 +1500,11 @@ public class TestJdbcWithMiniHS2 {
 
   @Test
   public void testFetchSize() throws Exception {
-// Test setting fetch size below max
 Connection fsConn = getConnection(miniHS2.getJdbcURL("default", 
"fetchSize=50", ""),
   System.getProperty("user.name"), "bar");
 Statement stmt = fsConn.createStatement();
-stmt.execute("set hive.server2.thrift.resultset.serialize.in.tasks=true");
-int fetchSize = stmt.getFetchSize();
-assertEquals(50, fetchSize);
-stmt.close();
-fsConn.close();
-// Test setting fetch size above max
-fsConn = getConnection(
-  miniHS2.getJdbcURL(
-"default",
-"fetchSize=" + (miniHS2.getHiveConf().getIntVar(
-  HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_MAX_FETCH_SIZE) + 1),
-""),
-  System.getProperty("user.name"), "bar");
-stmt = fsConn.createStatement();
-stmt.execute("set hive.server2.thrift.resultset.serialize.in.tasks=true");
-fetchSize = stmt.getFetchSize();
-assertEquals(
-  miniHS2.getHiveConf().getIntVar(
-HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_MAX_FETCH_SIZE),
-  fetchSize);
+stmt.execute("set");
+assertEquals(50, stmt.getFetchSize());
 stmt.close();
 fsConn.close();
   }
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index cbf6632..7f0d8dc 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -28,6 +28,7 @@ import org.apache.hive.service.rpc.thrift.TSetClientInfoResp;
 import org.apache.hive.service.rpc.thrift.TSetClientInfoReq;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hive.common.auth.HiveAuthUtils;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
 import org.apache.hive.service.auth.HiveAuthConstants;
 import org.apache.hive.service.auth.KerberosSaslHelper;
@@ -70,6 +71,7 @@ import org.apache.thrift.transport.TTransport;
 import org.apache.thrift.transport.TTransportException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 import javax.net.ssl.KeyManagerFactory;
 import javax.net.ssl.SSLContext;
 import javax.net.ssl.TrustManagerFactory;
@@ -143,7 +145,8 @@ public class HiveConnection implements java.sql.Connection {
   private final List supportedProtocols = new 
LinkedList();
   private int loginTimeout = 0;
   private TProtocolVersion protocol;
-  private int fetchSize = HiveStatement.DEFAULT_FETCH_SIZE;
+  private final int initFetchSize;
+  private int defaultFetchSize;
   private String initFile = null;
   private String wmPool = null, wmApp = null;
   private Properties clientInfo;
@@ -261,9 +264,8 @@ public class HiveConnection implements java.sql.Connection {
 port = connParams.getPort();
 isEmbeddedMode = connParams.isEmbeddedMode();
 
-if (sessConfMap.containsKey(JdbcConnectionParams.FETCH_SIZE)) {
-  fetchSize = 
Integer.parseInt(sessConfMap.get(JdbcConnectionParams.FETCH_SIZE));
-}
+initFetchSize = 
Integer.parseInt(sessConfMap.getOrDefault(JdbcConnectionParams.FETCH_SIZE, 
"0"));
+
 if (sessConfMap.containsKey(JdbcConnectionParams.INIT_FILE)) {
   initFile = sessConfMap.get(JdbcConnectionParams.INIT_FILE);
 }
@@ -832,9 +834,6 @@ public class HiveConnection implements java.sql

[hive] branch master updated: HIVE-22997: Copy external table to target during Repl Dump operation ( Pravin Kumar Sinha, reviewed by Aasha Medhi, Anishek Agarwal)

2020-03-25 Thread anishek
This is an automated email from the ASF dual-hosted git repository.

anishek pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
 new 4a591b0  HIVE-22997: Copy external table to target during Repl Dump 
operation ( Pravin Kumar Sinha, reviewed by Aasha Medhi, Anishek Agarwal)
4a591b0 is described below

commit 4a591b0bf79a3e8c8592f2383f209788fd4f86d3
Author: Pravin Kumar Sinha 
AuthorDate: Thu Mar 26 11:25:27 2020 +0530

HIVE-22997: Copy external table to target during Repl Dump operation ( 
Pravin Kumar Sinha, reviewed by Aasha Medhi, Anishek Agarwal)
---
 .../hadoop/hive/ql/parse/ReplicationTestUtils.java |  15 ++
 .../parse/TestReplicationOnHDFSEncryptedZones.java |  12 +-
 .../hive/ql/parse/TestReplicationScenarios.java| 229 +-
 .../TestReplicationScenariosExternalTables.java| 107 +
 ...icationScenariosExternalTablesMetaDataOnly.java |   4 +-
 .../parse/TestScheduledReplicationScenarios.java   |  49 
 .../parse/TestTableLevelReplicationScenarios.java  |  21 +-
 .../apache/hadoop/hive/ql/plan/api/StageType.java  |   8 +-
 .../apache/hadoop/hive/ql/exec/TaskFactory.java|  13 +-
 .../{ReplLoadCompleteAckTask.java => AckTask.java} |  17 +-
 .../{ReplLoadCompleteAckWork.java => AckWork.java} |  19 +-
 .../hadoop/hive/ql/exec/repl/DirCopyTask.java  | 210 
 .../hadoop/hive/ql/exec/repl/DirCopyWork.java  |  53 +
 .../ql/exec/repl/ExternalTableCopyTaskBuilder.java | 264 -
 .../hadoop/hive/ql/exec/repl/ReplDumpTask.java | 219 +++--
 .../hadoop/hive/ql/exec/repl/ReplDumpWork.java |  82 +++
 .../hive/ql/exec/repl/ReplExternalTables.java  |  13 +-
 .../hadoop/hive/ql/exec/repl/ReplLoadTask.java |  55 ++---
 .../hadoop/hive/ql/exec/repl/ReplLoadWork.java |  17 +-
 .../org/apache/hadoop/hive/ql/parse/EximUtil.java  |  47 +++-
 .../hive/ql/parse/ReplicationSemanticAnalyzer.java |  59 ++---
 .../hive/ql/parse/repl/dump/PartitionExport.java   |  15 +-
 .../hive/ql/parse/repl/dump/TableExport.java   |  23 +-
 .../hadoop/hive/ql/exec/repl/TestReplDumpTask.java |   7 +-
 24 files changed, 999 insertions(+), 559 deletions(-)

diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/ReplicationTestUtils.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/ReplicationTestUtils.java
index a82bbad..e0c3ed2 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/ReplicationTestUtils.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/ReplicationTestUtils.java
@@ -27,6 +27,7 @@ import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Arrays;
 import java.util.Set;
@@ -516,6 +517,20 @@ public class ReplicationTestUtils {
 );
   }
 
+  public static List externalTableWithClause(List 
externalTableBasePathWithClause, Boolean bootstrap,
+ Boolean includeExtTbl) {
+List withClause = new ArrayList<>(externalTableBasePathWithClause);
+if (bootstrap != null) {
+  withClause.add("'" + HiveConf.ConfVars.REPL_BOOTSTRAP_EXTERNAL_TABLES + 
"'='" + Boolean.toString(bootstrap)
+  + "'");
+}
+if (includeExtTbl != null) {
+  withClause.add("'" + HiveConf.ConfVars.REPL_INCLUDE_EXTERNAL_TABLES + 
"'='" + Boolean.toString(includeExtTbl)
+  + "'");
+}
+return withClause;
+  }
+
   public static void assertExternalFileInfo(WarehouseInstance primary,
   List expected,
   Path externalTableInfoFile) throws 
IOException {
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationOnHDFSEncryptedZones.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationOnHDFSEncryptedZones.java
index f6a33bc..bed0235 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationOnHDFSEncryptedZones.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationOnHDFSEncryptedZones.java
@@ -36,7 +36,9 @@ import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.Arrays;
 import java.util.HashMap;
+import java.util.List;
 
 import static 
org.apache.hadoop.hive.conf.HiveConf.ConfVars.METASTORE_AGGREGATE_STATS_CACHE_ENABLED;
 import static 
org.apache.hadoop.hive.metastore.ReplChangeManager.SOURCE_OF_REPLICATION;
@@ -102,12 +104,20 @@ public class TestReplicationOnHDFSEncryptedZones {
   put(HiveConf.ConfVars.REPLDIR.varname, primary.repldDir);
 }}, "test_key123");
 
+List dumpWithClause = Arrays.asList(
+"'hive.repl.add.raw.reserved.nam