Repository: hive
Updated Branches:
  refs/heads/master 16521c400 -> b678ed85d


http://git-wip-us.apache.org/repos/asf/hive/blob/b678ed85/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
 
b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
index a100e9f..9a1d159 100644
--- 
a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
+++ 
b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
@@ -50,6 +50,7 @@ import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
 import org.apache.hadoop.hive.metastore.api.Role;
 import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.TableMeta;
 import org.apache.hadoop.hive.metastore.api.Type;
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
 import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
@@ -220,6 +221,12 @@ public class DummyRawStoreControlledCommit implements 
RawStore, Configurable {
   }
 
   @Override
+  public List<TableMeta> getTableMeta(String dbNames, String tableNames, 
List<String> tableTypes)
+      throws MetaException {
+    return objectStore.getTableMeta(dbNames, tableNames, tableTypes);
+  }
+
+  @Override
   public List<Table> getTableObjectsByName(String dbName, List<String> 
tableNames)
       throws MetaException, UnknownDBException {
     return objectStore.getTableObjectsByName(dbName, tableNames);

http://git-wip-us.apache.org/repos/asf/hive/blob/b678ed85/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
 
b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
index f6100e6..8dde0af 100644
--- 
a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
+++ 
b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
@@ -51,6 +51,7 @@ import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
 import org.apache.hadoop.hive.metastore.api.Role;
 import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.TableMeta;
 import org.apache.hadoop.hive.metastore.api.Type;
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
 import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
@@ -224,6 +225,12 @@ public class DummyRawStoreForJdoConnection implements 
RawStore {
   }
 
   @Override
+  public List<TableMeta> getTableMeta(String dbNames, String tableNames, 
List<String> tableTypes)
+      throws MetaException {
+    return Collections.emptyList();
+  }
+
+  @Override
   public List<Table> getTableObjectsByName(String dbname, List<String> 
tableNames)
       throws MetaException, UnknownDBException {
 

http://git-wip-us.apache.org/repos/asf/hive/blob/b678ed85/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
index 7af9d85..581a919 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
@@ -50,6 +50,7 @@ import 
org.apache.hadoop.hive.metastore.api.InvalidOperationException;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
+import org.apache.hadoop.hive.metastore.api.TableMeta;
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
 import org.apache.hadoop.hive.metastore.api.UnknownTableException;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -161,11 +162,7 @@ public class SessionHiveMetaStoreClient extends 
HiveMetaStoreClient implements I
     Matcher matcher = pattern.matcher("");
     Set<String> combinedTableNames = new HashSet<String>();
     for (String tableName : tables.keySet()) {
-      if (matcher == null) {
-        matcher = pattern.matcher(tableName);
-      } else {
-        matcher.reset(tableName);
-      }
+      matcher.reset(tableName);
       if (matcher.matches()) {
         combinedTableNames.add(tableName);
       }
@@ -177,6 +174,55 @@ public class SessionHiveMetaStoreClient extends 
HiveMetaStoreClient implements I
     Collections.sort(tableNames);
     return tableNames;
   }
+  
+  @Override
+  public List<TableMeta> getTableMeta(String dbPatterns, String tablePatterns, 
List<String> tableTypes)
+      throws MetaException {
+    List<TableMeta> tableMetas = super.getTableMeta(dbPatterns, tablePatterns, 
tableTypes);
+    Map<String, Map<String, Table>> tmpTables = getTempTables();
+    if (tmpTables.isEmpty()) {
+      return tableMetas;
+    }
+
+    List<Matcher> dbPatternList = new ArrayList<>();
+    for (String element : dbPatterns.split("\\|")) {
+      dbPatternList.add(Pattern.compile(element.replaceAll("\\*", 
".*")).matcher(""));
+    }
+    List<Matcher> tblPatternList = new ArrayList<>();
+    for (String element : tablePatterns.split("\\|")) {
+      tblPatternList.add(Pattern.compile(element.replaceAll("\\*", 
".*")).matcher(""));
+    }
+    StringBuilder builder = new StringBuilder();
+    for (Map.Entry<String, Map<String, Table>> outer : tmpTables.entrySet()) {
+      if (!matchesAny(outer.getKey(), dbPatternList)) {
+        continue;
+      }
+      for (Map.Entry<String, Table> inner : outer.getValue().entrySet()) {
+        Table table = inner.getValue();
+        String tableName = table.getTableName();
+        String typeString = table.getTableType().name();
+        if (tableTypes != null && !tableTypes.contains(typeString)) {
+          continue;
+        }
+        if (!matchesAny(inner.getKey(), tblPatternList)) {
+          continue;
+        }
+        TableMeta tableMeta = new TableMeta(table.getDbName(), tableName, 
typeString);
+        tableMeta.setComments(table.getProperty("comment"));
+        tableMetas.add(tableMeta);
+      }
+    }
+    return tableMetas;
+  }
+  
+  private boolean matchesAny(String string, List<Matcher> matchers) {
+    for (Matcher matcher : matchers) {
+      if (matcher.reset(string).matches()) {
+        return true;
+      }
+    }
+    return matchers.isEmpty();
+  }
 
   @Override
   public List<org.apache.hadoop.hive.metastore.api.Table> 
getTableObjectsByName(String dbName,
@@ -508,12 +554,16 @@ public class SessionHiveMetaStoreClient extends 
HiveMetaStoreClient implements I
   }
 
   public static Map<String, Table> getTempTablesForDatabase(String dbName) {
+    return getTempTables().get(dbName);
+  }
+  
+  public static Map<String, Map<String, Table>> getTempTables() {
     SessionState ss = SessionState.get();
     if (ss == null) {
       LOG.debug("No current SessionState, skipping temp tables");
-      return null;
+      return Collections.emptyMap();
     }
-    return ss.getTempTables().get(dbName);
+    return ss.getTempTables();
   }
 
   private Map<String, ColumnStatisticsObj> 
getTempTableColumnStatsForTable(String dbName,

http://git-wip-us.apache.org/repos/asf/hive/blob/b678ed85/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
----------------------------------------------------------------------
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
 
b/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
index 296280f..65bbc1c 100644
--- 
a/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
+++ 
b/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
@@ -22,14 +22,11 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.ql.metadata.TableIterable;
+import org.apache.hadoop.hive.metastore.api.TableMeta;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObjectUtils;
-import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.service.cli.FetchOrientation;
 import org.apache.hive.service.cli.HiveSQLException;
 import org.apache.hive.service.cli.OperationState;
@@ -48,7 +45,7 @@ public class GetTablesOperation extends MetadataOperation {
   private final String catalogName;
   private final String schemaName;
   private final String tableName;
-  private final List<String> tableTypes = new ArrayList<String>();
+  private final List<String> tableTypeList;
   private final RowSet rowSet;
   private final TableTypeMapping tableTypeMapping;
 
@@ -58,7 +55,14 @@ public class GetTablesOperation extends MetadataOperation {
   .addStringColumn("TABLE_SCHEM", "Schema name.")
   .addStringColumn("TABLE_NAME", "Table name.")
   .addStringColumn("TABLE_TYPE", "The table type, e.g. \"TABLE\", \"VIEW\", 
etc.")
-  .addStringColumn("REMARKS", "Comments about the table.");
+  .addStringColumn("REMARKS", "Comments about the table.")
+  .addStringColumn("TYPE_CAT", "The types catalog.")
+  .addStringColumn("TYPE_SCHEM", "The types schema.")
+  .addStringColumn("TYPE_NAME", "Type name.")
+  .addStringColumn("SELF_REFERENCING_COL_NAME", 
+      "Name of the designated \"identifier\" column of a typed table.")
+  .addStringColumn("REF_GENERATION", 
+      "Specifies how values in SELF_REFERENCING_COL_NAME are created.");
 
   protected GetTablesOperation(HiveSession parentSession,
       String catalogName, String schemaName, String tableName,
@@ -72,7 +76,12 @@ public class GetTablesOperation extends MetadataOperation {
     tableTypeMapping =
         TableTypeMappingFactory.getTableTypeMapping(tableMappingStr);
     if (tableTypes != null) {
-      this.tableTypes.addAll(tableTypes);
+      tableTypeList = new ArrayList<String>();
+      for (String tableType : tableTypes) {
+        tableTypeList.add(tableTypeMapping.mapToHiveType(tableType.trim()));
+      }
+    } else {
+      tableTypeList = null;
     }
     this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, 
getProtocolVersion());
   }
@@ -91,23 +100,17 @@ public class GetTablesOperation extends MetadataOperation {
       }
 
       String tablePattern = convertIdentifierPattern(tableName, true);
-      int maxBatchSize = 
SessionState.get().getConf().getIntVar(ConfVars.METASTORE_BATCH_RETRIEVE_MAX);
 
-      for (String dbName : metastoreClient.getDatabases(schemaPattern)) {
-        List<String> tableNames = metastoreClient.getTables(dbName, 
tablePattern);
-        for (Table table : new TableIterable(metastoreClient, dbName, 
tableNames, maxBatchSize)) {
-          Object[] rowData = new Object[] {
+      for (TableMeta tableMeta : 
+          metastoreClient.getTableMeta(schemaPattern, tablePattern, 
tableTypeList)) {
+        rowSet.addRow(new Object[] {
               DEFAULT_HIVE_CATALOG,
-              table.getDbName(),
-              table.getTableName(),
-              tableTypeMapping.mapToClientType(table.getTableType()),
-              table.getParameters().get("comment")
-              };
-          if (tableTypes.isEmpty() || tableTypes.contains(
-                tableTypeMapping.mapToClientType(table.getTableType()))) {
-            rowSet.addRow(rowData);
-          }
-        }
+              tableMeta.getDbName(),
+              tableMeta.getTableName(),
+              tableTypeMapping.mapToClientType(tableMeta.getTableType()),
+              tableMeta.getComments(),
+              null, null, null, null, null
+              });
       }
       setState(OperationState.FINISHED);
     } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/hive/blob/b678ed85/service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java
----------------------------------------------------------------------
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java 
b/service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java
index 4595ef5..285b4f9 100644
--- 
a/service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java
+++ 
b/service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java
@@ -18,7 +18,6 @@
 
 package org.apache.hive.service.cli.operation;
 
-import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -95,16 +94,30 @@ public abstract class MetadataOperation extends Operation {
    * other hand is done locally inside the hive code and that requires the 
regex wildchar
    * format '.*'  This is driven by the datanucleusFormat flag.
    */
-  private String convertPattern(final String pattern, boolean 
datanucleusFormat) {
+  private String convertPattern(String pattern, boolean datanucleusFormat) {
     String wStr;
     if (datanucleusFormat) {
       wStr = "*";
     } else {
       wStr = ".*";
     }
-    return pattern
-        .replaceAll("([^\\\\])%", "$1" + wStr).replaceAll("\\\\%", 
"%").replaceAll("^%", wStr)
-        .replaceAll("([^\\\\])_", "$1.").replaceAll("\\\\_", 
"_").replaceAll("^_", ".");
+    pattern = replaceAll(pattern, "([^\\\\])%", "$1" + wStr);
+    pattern = replaceAll(pattern, "\\\\%", "%");
+    pattern = replaceAll(pattern, "^%", wStr);
+    pattern = replaceAll(pattern, "([^\\\\])_", "$1.");
+    pattern = replaceAll(pattern, "\\\\_", "_");
+    pattern = replaceAll(pattern, "^_", ".");
+    return pattern;
+  }
+  
+  private String replaceAll(String input, final String pattern, final String 
replace) {
+    while (true) {
+      String replaced = input.replaceAll(pattern, replace);
+      if (replaced.equals(input)) {
+        return replaced;
+      }
+      input = replaced;
+    }
   }
 
   protected boolean isAuthV2Enabled(){

Reply via email to