HIVE-17628 : always use fully qualified path for tables/partitions/etc. (Sergey 
Shelukhin, reviewed by Ashutosh Chauhan and Thejas M Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/32b9716f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/32b9716f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/32b9716f

Branch: refs/heads/hive-14535
Commit: 32b9716fe73b89198ef2714039127e451669e5fb
Parents: a860795
Author: sergey <ser...@apache.org>
Authored: Fri Sep 29 13:34:15 2017 -0700
Committer: sergey <ser...@apache.org>
Committed: Fri Sep 29 13:56:18 2017 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java   | 18 ++++++++----------
 .../org/apache/hadoop/hive/ql/exec/Utilities.java | 10 ----------
 .../org/apache/hadoop/hive/ql/metadata/Hive.java  |  6 +++---
 .../hadoop/hive/ql/plan/CreateTableDesc.java      |  2 +-
 .../test/results/clientnegative/external2.q.out   |  2 +-
 5 files changed, 13 insertions(+), 25 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/32b9716f/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 2cf5bfd..2e4e2fa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -1016,10 +1016,8 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
     }
 
     String indexTableName = crtIndex.getIndexTableName();
-    if (!Utilities.isDefaultNameNode(conf)) {
-      // If location is specified - ensure that it is a full qualified name
-      makeLocationQualified(crtIndex, indexTableName);
-    }
+    // If location is specified - ensure that it is a full qualified name
+    makeLocationQualified(crtIndex, indexTableName);
 
     db
     .createIndex(
@@ -4363,9 +4361,7 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
     database.setOwnerName(SessionState.getUserFromAuthenticator());
     database.setOwnerType(PrincipalType.USER);
     try {
-      if (!Utilities.isDefaultNameNode(conf)) {
-        makeLocationQualified(database);
-      }
+      makeLocationQualified(database);
       db.createDatabase(database, crtDb.getIfNotExists());
     }
     catch (AlreadyExistsException ex) {
@@ -4631,8 +4627,8 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
       }
     }
 
-    if (!Utilities.isDefaultNameNode(conf)) {
-      // If location is specified - ensure that it is a full qualified name
+    // If location is specified - ensure that it is a full qualified name
+    if (DDLTask.doesTableNeedLocation(tbl)) {
       makeLocationQualified(tbl.getDbName(), tbl.getTTable().getSd(), 
tbl.getTableName(), conf);
     }
 
@@ -4954,9 +4950,11 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
     // this method could be moved to the HiveStorageHandler interface.
     boolean retval = true;
     if (tbl.getStorageHandler() != null) {
+      // TODO: why doesn't this check class name rather than toString?
       String sh = tbl.getStorageHandler().toString();
       retval = !sh.equals("org.apache.hadoop.hive.hbase.HBaseStorageHandler")
-              && !sh.equals(Constants.DRUID_HIVE_STORAGE_HANDLER_ID);
+              && !sh.equals(Constants.DRUID_HIVE_STORAGE_HANDLER_ID)
+              && 
!sh.equals("org.apache.hadoop.hive.accumulo.AccumuloStorageHandler");
     }
     return retval;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/32b9716f/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index ae70cba..ae63727 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -3666,16 +3666,6 @@ public final class Utilities {
   }
 
   /**
-   * Checks if current hive script was executed with non-default namenode
-   *
-   * @return True/False
-   */
-  public static boolean isDefaultNameNode(HiveConf conf) {
-    return !conf.getChangedProperties().containsKey(
-        CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
-  }
-
-  /**
    * Checks if the current HiveServer2 logging operation level is >= 
PERFORMANCE.
    * @param conf Hive configuration.
    * @return true if current HiveServer2 logging operation level is >= 
PERFORMANCE.

http://git-wip-us.apache.org/repos/asf/hive/blob/32b9716f/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index e8a0474..436a2fe 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -703,7 +703,7 @@ public class Hive {
     try {
       validatePartition(newPart);
       String location = newPart.getLocation();
-      if (location != null && !Utilities.isDefaultNameNode(conf)) {
+      if (location != null) {
         location = Utilities.getQualifiedPath(conf, new Path(location));
         newPart.setLocation(location);
       }
@@ -747,7 +747,7 @@ public class Hive {
           tmpPart.getParameters().remove(hive_metastoreConstants.DDL_TIME);
         }
         String location = tmpPart.getLocation();
-        if (location != null && !Utilities.isDefaultNameNode(conf)) {
+        if (location != null) {
           location = Utilities.getQualifiedPath(conf, new Path(location));
           tmpPart.setLocation(location);
         }
@@ -2235,7 +2235,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
       Table tbl, AddPartitionDesc.OnePartitionDesc addSpec) throws 
HiveException {
     Path location = addSpec.getLocation() != null
         ? new Path(tbl.getPath(), addSpec.getLocation()) : null;
-    if (location !=null && !Utilities.isDefaultNameNode(conf)) {
+    if (location != null) {
       // Ensure that it is a full qualified path (in most cases it will be 
since tbl.getPath() is full qualified)
       location = new Path(Utilities.getQualifiedPath(conf, location));
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/32b9716f/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
index 4320421..a05cb96 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
@@ -804,7 +804,7 @@ public class CreateTableDesc extends DDLDesc implements 
Serializable {
       
tbl.getTTable().getSd().setOutputFormat(tbl.getOutputFormatClass().getName());
     }
 
-    if (!Utilities.isDefaultNameNode(conf) && 
DDLTask.doesTableNeedLocation(tbl)) {
+    if (DDLTask.doesTableNeedLocation(tbl)) {
       // If location is specified - ensure that it is a full qualified name
       DDLTask.makeLocationQualified(tbl.getDbName(), tbl.getTTable().getSd(), 
tableName, conf);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/32b9716f/ql/src/test/results/clientnegative/external2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/external2.q.out 
b/ql/src/test/results/clientnegative/external2.q.out
index 706432a..eb5518c 100644
--- a/ql/src/test/results/clientnegative/external2.q.out
+++ b/ql/src/test/results/clientnegative/external2.q.out
@@ -10,4 +10,4 @@ POSTHOOK: Output: default@external2
 PREHOOK: type: ALTERTABLE_ADDPARTS
 #### A masked pattern was here ####
 PREHOOK: Output: default@external2
-FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.exec.DDLTask. 
MetaException(message:java.io.IOException: Got exception: java.io.IOException 
No FileSystem for scheme: invalidscheme)
+FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.exec.DDLTask. java.io.IOException: No FileSystem for 
scheme: invalidscheme

Reply via email to