Repository: hive
Updated Branches:
  refs/heads/branch-2.1 b540a65f3 -> f5a598cba
  refs/heads/master b772fed09 -> 5783ab858


HIVE-14055 : directSql - getting the number of partitions is broken (Sergey 
Shelukhin, reviewed by Sergio Peña)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5783ab85
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5783ab85
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5783ab85

Branch: refs/heads/master
Commit: 5783ab8586f0595b7778f64c409442de3ed5b74f
Parents: b772fed
Author: Sergey Shelukhin <[email protected]>
Authored: Wed Jun 22 18:25:42 2016 -0700
Committer: Sergey Shelukhin <[email protected]>
Committed: Wed Jun 22 18:25:42 2016 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/conf/HiveConf.java   | 51 ++++++++++++++-
 .../hive/metastore/MetaStoreDirectSql.java      | 64 ++++++++-----------
 .../hadoop/hive/metastore/ObjectStore.java      | 67 +++++++++++---------
 .../hadoop/hive/metastore/hbase/HBaseStore.java | 12 ++--
 4 files changed, 117 insertions(+), 77 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/5783ab85/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 08ee23f..faf9088 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -44,6 +44,7 @@ import org.slf4j.LoggerFactory;
 import javax.security.auth.login.LoginException;
 
 import java.io.*;
+import java.net.URI;
 import java.net.URL;
 import java.net.URLDecoder;
 import java.net.URLEncoder;
@@ -135,9 +136,9 @@ public class HiveConf extends Configuration {
     hiveDefaultURL = classLoader.getResource("hive-default.xml");
 
     // Look for hive-site.xml on the CLASSPATH and log its location if found.
-    hiveSiteURL = classLoader.getResource("hive-site.xml");
-    hivemetastoreSiteUrl = classLoader.getResource("hivemetastore-site.xml");
-    hiveServer2SiteUrl = classLoader.getResource("hiveserver2-site.xml");
+    hiveSiteURL = findConfigFile(classLoader, "hive-site.xml", true);
+    hivemetastoreSiteUrl = findConfigFile(classLoader, 
"hivemetastore-site.xml", false);
+    hiveServer2SiteUrl = findConfigFile(classLoader, "hiveserver2-site.xml", 
false);
 
     for (ConfVars confVar : ConfVars.values()) {
       vars.put(confVar.varname, confVar);
@@ -148,6 +149,50 @@ public class HiveConf extends Configuration {
     llapDaemonVarsSet = 
Collections.unmodifiableSet(llapDaemonConfVarsSetLocal);
   }
 
+  private static URL findConfigFile(ClassLoader classLoader, String name, 
boolean doLog) {
+    URL result = classLoader.getResource(name);
+    if (result == null) {
+      String confPath = System.getenv("HIVE_CONF_DIR");
+      result = checkConfigFile(new File(confPath, name));
+      if (result == null) {
+        String homePath = System.getenv("HIVE_HOME");
+        String nameInConf = "conf" + File.pathSeparator + name;
+        result = checkConfigFile(new File(homePath, nameInConf));
+        if (result == null) {
+          URI jarUri = null;
+          try {
+            jarUri = 
HiveConf.class.getProtectionDomain().getCodeSource().getLocation().toURI();
+          } catch (Throwable e) {
+            if (l4j.isInfoEnabled()) {
+              l4j.info("Cannot get jar URI", e);
+            }
+            System.err.println("Cannot get jar URI: " + e.getMessage());
+          }
+          result = checkConfigFile(new File(new File(jarUri).getParentFile(), 
nameInConf));
+        }
+      }
+    }
+    if (doLog && l4j.isInfoEnabled()) {
+      l4j.info("Found configuration file " + result);
+    }
+    return result;
+  }
+
+  private static URL checkConfigFile(File f) {
+    try {
+      return (f.exists() && f.isFile()) ? f.toURI().toURL() : null;
+    } catch (Throwable e) {
+      if (l4j.isInfoEnabled()) {
+        l4j.info("Error looking for config " + f, e);
+      }
+      System.err.println("Error looking for config " + f + ": " + 
e.getMessage());
+      return null;
+    }
+  }
+
+
+
+
   @InterfaceAudience.Private
   public static final String PREFIX_LLAP = "llap.";
   @InterfaceAudience.Private

http://git-wip-us.apache.org/repos/asf/hive/blob/5783ab85/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
index c135179..b809269 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
@@ -370,42 +370,34 @@ class MetaStoreDirectSql {
 
   /**
    * Gets partitions by using direct SQL queries.
-   * @param table The table.
-   * @param tree The expression tree from which the SQL filter will be derived.
+   * @param filter The filter.
    * @param max The maximum number of partitions to return.
-   * @return List of partitions. Null if SQL filter cannot be derived.
+   * @return List of partitions.
    */
   public List<Partition> getPartitionsViaSqlFilter(
-      Table table, ExpressionTree tree, Integer max) throws MetaException {
-    assert tree != null;
-    List<Object> params = new ArrayList<Object>();
-    List<String> joins = new ArrayList<String>();
-    // Derby and Oracle do not interpret filters ANSI-properly in some cases 
and need a workaround.
-    boolean dbHasJoinCastBug = (dbType == DB.DERBY || dbType == DB.ORACLE);
-    String sqlFilter = PartitionFilterGenerator.generateSqlFilter(
-        table, tree, params, joins, dbHasJoinCastBug, defaultPartName, dbType);
-    if (sqlFilter == null) {
-      return null; // Cannot make SQL filter to push down.
-    }
-    Boolean isViewTable = isViewTable(table);
-    return getPartitionsViaSqlFilterInternal(table.getDbName(), 
table.getTableName(),
-        isViewTable, sqlFilter, params, joins, max);
+      SqlFilterForPushdown filter, Integer max) throws MetaException {
+    Boolean isViewTable = isViewTable(filter.table);
+    return getPartitionsViaSqlFilterInternal(filter.table.getDbName(), 
filter.table.getTableName(),
+        isViewTable, filter.filter, filter.params, filter.joins, max);
   }
 
-  public int getNumPartitionsViaSqlFilter(Table table, ExpressionTree tree) 
throws MetaException {
-    List<Object> params = new ArrayList<Object>();
-    List<String>joins = new ArrayList<String>();
+  public static class SqlFilterForPushdown {
+    private List<Object> params = new ArrayList<Object>();
+    private List<String> joins = new ArrayList<String>();
+    private String filter;
+    private Table table;
+  }
+
+  public boolean generateSqlFilterForPushdown(
+      Table table, ExpressionTree tree, SqlFilterForPushdown result) throws 
MetaException {
     // Derby and Oracle do not interpret filters ANSI-properly in some cases 
and need a workaround.
     boolean dbHasJoinCastBug = (dbType == DB.DERBY || dbType == DB.ORACLE);
-    String sqlFilter = PartitionFilterGenerator.generateSqlFilter(
-        table, tree, params, joins, dbHasJoinCastBug, defaultPartName, dbType);
-    if (sqlFilter == null) {
-      return 0; // Cannot make SQL filter to push down.
-    }
-    return getNumPartitionsViaSqlFilterInternal(table.getDbName(), 
table.getTableName(), sqlFilter, params, joins);
+    result.table = table;
+    result.filter = PartitionFilterGenerator.generateSqlFilter(
+        table, tree, result.params, result.joins, dbHasJoinCastBug, 
defaultPartName, dbType);
+    return result.filter != null;
   }
 
-
   /**
    * Gets all partitions of a table by using direct SQL queries.
    * @param dbName Metastore db name.
@@ -827,12 +819,10 @@ class MetaStoreDirectSql {
     return orderedResult;
   }
 
-  private int getNumPartitionsViaSqlFilterInternal(String dbName, String 
tblName,
-                                                   String sqlFilter, 
List<Object> paramsForFilter,
-                                                   List<String> 
joinsForFilter) throws MetaException {
+  public int getNumPartitionsViaSqlFilter(SqlFilterForPushdown filter) throws 
MetaException {
     boolean doTrace = LOG.isDebugEnabled();
-    dbName = dbName.toLowerCase();
-    tblName = tblName.toLowerCase();
+    String dbName = filter.table.getDbName().toLowerCase();
+    String tblName = filter.table.getTableName().toLowerCase();
 
     // Get number of partitions by doing count on PART_ID.
     String queryText = "select count(\"PARTITIONS\".\"PART_ID\") from 
\"PARTITIONS\""
@@ -840,14 +830,14 @@ class MetaStoreDirectSql {
       + "    and \"TBLS\".\"TBL_NAME\" = ? "
       + "  inner join \"DBS\" on \"TBLS\".\"DB_ID\" = \"DBS\".\"DB_ID\" "
       + "     and \"DBS\".\"NAME\" = ? "
-      + join(joinsForFilter, ' ')
-      + (sqlFilter == null ? "" : (" where " + sqlFilter));
+      + join(filter.joins, ' ')
+      + (filter.filter == null ? "" : (" where " + filter.filter));
 
-    Object[] params = new Object[paramsForFilter.size() + 2];
+    Object[] params = new Object[filter.params.size() + 2];
     params[0] = tblName;
     params[1] = dbName;
-    for (int i = 0; i < paramsForFilter.size(); ++i) {
-      params[i + 2] = paramsForFilter.get(i);
+    for (int i = 0; i < filter.params.size(); ++i) {
+      params[i + 2] = filter.params.get(i);
     }
 
     long start = doTrace ? System.nanoTime() : 0;

http://git-wip-us.apache.org/repos/asf/hive/blob/5783ab85/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index da188d3..f67efcd 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -69,6 +69,7 @@ import 
org.apache.hadoop.hive.common.metrics.common.MetricsConstant;
 import org.apache.hadoop.hive.common.metrics.common.MetricsFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import 
org.apache.hadoop.hive.metastore.MetaStoreDirectSql.SqlFilterForPushdown;
 import org.apache.hadoop.hive.metastore.api.AggrStats;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
@@ -2412,16 +2413,16 @@ public class ObjectStore implements RawStore, 
Configurable {
         // If we have some sort of expression tree, try SQL filter pushdown.
         List<Partition> result = null;
         if (exprTree != null) {
-          result = directSql.getPartitionsViaSqlFilter(ctx.getTable(), 
exprTree, null);
-        }
-        if (result == null) {
-          // We couldn't do SQL filter pushdown. Get names via normal means.
-          List<String> partNames = new LinkedList<String>();
-          hasUnknownPartitions.set(getPartitionNamesPrunedByExprNoTxn(
-              ctx.getTable(), expr, defaultPartitionName, maxParts, 
partNames));
-          result = directSql.getPartitionsViaSqlFilter(dbName, tblName, 
partNames);
+          SqlFilterForPushdown filter = new SqlFilterForPushdown();
+          if (directSql.generateSqlFilterForPushdown(ctx.getTable(), exprTree, 
filter)) {
+            return directSql.getPartitionsViaSqlFilter(filter, null);
+          }
         }
-        return result;
+        // We couldn't do SQL filter pushdown. Get names via normal means.
+        List<String> partNames = new LinkedList<String>();
+        hasUnknownPartitions.set(getPartitionNamesPrunedByExprNoTxn(
+            ctx.getTable(), expr, defaultPartitionName, maxParts, partNames));
+        return directSql.getPartitionsViaSqlFilter(dbName, tblName, partNames);
       }
 
       @Override
@@ -2650,6 +2651,9 @@ public class ObjectStore implements RawStore, 
Configurable {
       this.doUseDirectSql = allowSql && isConfigEnabled && 
directSql.isCompatibleDatastore();
     }
 
+    protected boolean canUseDirectSql(GetHelper<T> ctx) throws MetaException {
+      return true; // By default, assume we can user directSQL - that's kind 
of the point.
+    }
     protected abstract String describeResult();
     protected abstract T getSqlResult(GetHelper<T> ctx) throws MetaException;
     protected abstract T getJdoResult(
@@ -2661,13 +2665,16 @@ public class ObjectStore implements RawStore, 
Configurable {
         if (doUseDirectSql) {
           try {
             directSql.prepareTxn();
-            setResult(getSqlResult(this));
+            this.results = getSqlResult(this);
           } catch (Exception ex) {
             handleDirectSqlError(ex);
           }
         }
+        // Note that this will be invoked in 2 cases:
+        //    1) DirectSQL was disabled to start with;
+        //    2) DirectSQL threw and was disabled in handleDirectSqlError.
         if (!doUseDirectSql) {
-          setResult(getJdoResult(this));
+          this.results = getJdoResult(this);
         }
         return commit();
       } catch (NoSuchObjectException ex) {
@@ -2688,11 +2695,7 @@ public class ObjectStore implements RawStore, 
Configurable {
       if (initTable && (tblName != null)) {
         table = ensureGetTable(dbName, tblName);
       }
-    }
-
-    private boolean setResult(T results) {
-      this.results = results;
-      return this.results != null;
+      doUseDirectSql = doUseDirectSql && canUseDirectSql(this);
     }
 
     private void handleDirectSqlError(Exception ex) throws MetaException, 
NoSuchObjectException {
@@ -2780,10 +2783,6 @@ public class ObjectStore implements RawStore, 
Configurable {
       return message.toString();
     }
 
-    public void disableDirectSql() {
-      this.doUseDirectSql = false;
-    }
-
     private T commit() {
       success = commitTransaction();
       if (doTrace) {
@@ -2863,15 +2862,21 @@ public class ObjectStore implements RawStore, 
Configurable {
     final ExpressionTree tree = (filter != null && !filter.isEmpty())
       ? PartFilterExprUtil.getFilterParser(filter).tree : 
ExpressionTree.EMPTY_TREE;
     return new GetHelper<Integer>(dbName, tblName, allowSql, allowJdo) {
+      private SqlFilterForPushdown filter = new SqlFilterForPushdown();
       @Override
       protected String describeResult() {
-        return null;
+        return "Partition count";
       }
 
+      protected boolean canUseDirectSql(GetHelper<Integer> ctx) throws 
MetaException {
+        return directSql.generateSqlFilterForPushdown(ctx.getTable(), tree, 
filter);
+      };
+
       @Override
       protected Integer getSqlResult(GetHelper<Integer> ctx) throws 
MetaException {
-        return directSql.getNumPartitionsViaSqlFilter(ctx.getTable(), tree);
+        return directSql.getNumPartitionsViaSqlFilter(filter);
       }
+
       @Override
       protected Integer getJdoResult(
         GetHelper<Integer> ctx) throws MetaException, NoSuchObjectException {
@@ -2885,19 +2890,19 @@ public class ObjectStore implements RawStore, 
Configurable {
       throws MetaException, NoSuchObjectException {
     final ExpressionTree tree = (filter != null && !filter.isEmpty())
         ? PartFilterExprUtil.getFilterParser(filter).tree : 
ExpressionTree.EMPTY_TREE;
-
     return new GetListHelper<Partition>(dbName, tblName, allowSql, allowJdo) {
+      private SqlFilterForPushdown filter = new SqlFilterForPushdown();
+
+      @Override
+      protected boolean canUseDirectSql(GetHelper<List<Partition>> ctx) throws 
MetaException {
+        return directSql.generateSqlFilterForPushdown(ctx.getTable(), tree, 
filter);
+      };
+
       @Override
       protected List<Partition> getSqlResult(GetHelper<List<Partition>> ctx) 
throws MetaException {
-        List<Partition> parts = directSql.getPartitionsViaSqlFilter(
-            ctx.getTable(), tree, (maxParts < 0) ? null : (int)maxParts);
-        if (parts == null) {
-          // Cannot push down SQL filter. The message has been logged 
internally.
-          // This is not an error so don't roll back, just go to JDO.
-          ctx.disableDirectSql();
-        }
-        return parts;
+        return directSql.getPartitionsViaSqlFilter(filter, (maxParts < 0) ? 
null : (int)maxParts);
       }
+
       @Override
       protected List<Partition> getJdoResult(
           GetHelper<List<Partition>> ctx) throws MetaException, 
NoSuchObjectException {

http://git-wip-us.apache.org/repos/asf/hive/blob/5783ab85/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
index 31f0d7b..2f837bb 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
@@ -2575,7 +2575,7 @@ public class HBaseStore implements RawStore {
   @Override
   public List<SQLPrimaryKey> getPrimaryKeys(String db_name, String tbl_name)
     throws MetaException {
-    // TODO Auto-generated method stub
+    // TODO: WTF?
     return null;
   }
 
@@ -2583,7 +2583,7 @@ public class HBaseStore implements RawStore {
   public List<SQLForeignKey> getForeignKeys(String parent_db_name,
     String parent_tbl_name, String foreign_db_name, String foreign_tbl_name)
     throws MetaException {
-    // TODO Auto-generated method stub
+    // TODO: WTF?
     return null;
   }
 
@@ -2591,24 +2591,24 @@ public class HBaseStore implements RawStore {
   public void createTableWithConstraints(Table tbl,
     List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys)
     throws InvalidObjectException, MetaException {
-    // TODO Auto-generated method stub
+    // TODO: WTF?
   }
 
   @Override
   public void dropConstraint(String dbName, String tableName,
     String constraintName) throws NoSuchObjectException {
-    // TODO Auto-generated method stub 
+    // TODO: WTF?
   }
 
   @Override
   public void addPrimaryKeys(List<SQLPrimaryKey> pks)
     throws InvalidObjectException, MetaException {
-    // TODO Auto-generated method stub
+    // TODO: WTF?
   }
 
   @Override
   public void addForeignKeys(List<SQLForeignKey> fks)
     throws InvalidObjectException, MetaException {
-    // TODO Auto-generated method stub
+    // TODO: WTF?
   }
 }

Reply via email to