This is an automated email from the ASF dual-hosted git repository.

ngangam pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new dab693f  HIVE-24769: Included owner information included in Table 
object in HiveMetaStoreClient#getTables()
dab693f is described below

commit dab693f73484c697c71cf88b5704b04b1aeaadb9
Author: saihemanth <saihema...@cloudera.com>
AuthorDate: Wed Feb 10 14:32:19 2021 -0700

    HIVE-24769: Included owner information included in Table object in 
HiveMetaStoreClient#getTables()
---
 .../hcatalog/listener/DummyRawStoreFailEvent.java  |   4 +-
 .../hadoop/hive/metastore/TestAcidTableSetup.java  |   2 +-
 .../TestMetaStoreMultipleEncryptionZones.java      |   2 +-
 .../plugin/AuthorizationMetaStoreFilterHook.java   |  41 ++++++++
 .../hadoop/hive/metastore/TestMetastoreExpr.java   |   3 +-
 .../hive/ql/metadata/TestHiveMetaStoreChecker.java |   3 +-
 .../gen/thrift/gen-cpp/hive_metastore_types.cpp    |  22 ++++
 .../src/gen/thrift/gen-cpp/hive_metastore_types.h  |  12 ++-
 .../hive/metastore/api/GetTablesRequest.java       | 114 ++++++++++++++++++++-
 .../thrift/gen-php/metastore/GetTablesRequest.php  |  24 +++++
 .../src/gen/thrift/gen-py/hive_metastore/ttypes.py |  14 ++-
 .../src/gen/thrift/gen-rb/hive_metastore_types.rb  |   4 +-
 .../hadoop/hive/metastore/HiveMetaStoreClient.java |  24 ++++-
 .../src/main/thrift/hive_metastore.thrift          |   3 +-
 .../apache/hadoop/hive/metastore/HMSHandler.java   |  58 ++++++-----
 .../hive/metastore/HiveMetaStoreChecker.java       |   8 +-
 .../apache/hadoop/hive/metastore/ObjectStore.java  |  34 +++---
 .../org/apache/hadoop/hive/metastore/RawStore.java |   2 +-
 .../hadoop/hive/metastore/cache/CachedStore.java   |   4 +-
 .../metastore/DummyRawStoreControlledCommit.java   |   2 +-
 .../metastore/DummyRawStoreForJdoConnection.java   |   2 +-
 .../hadoop/hive/metastore/TestHiveMetaStore.java   |   2 +-
 .../hive/metastore/client/TestTablesGetExists.java |  11 +-
 23 files changed, 328 insertions(+), 67 deletions(-)

diff --git 
a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
 
b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
index 7d7360b..ac03616 100644
--- 
a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
+++ 
b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
@@ -416,8 +416,8 @@ public class DummyRawStoreFailEvent implements RawStore, 
Configurable {
 
   @Override
   public List<Table> getTableObjectsByName(String catName, String dbName, 
List<String> tableNames,
-          GetProjectionsSpec projectionSpec) throws MetaException, 
UnknownDBException {
-    return objectStore.getTableObjectsByName(catName, dbName, tableNames, 
projectionSpec);
+          GetProjectionsSpec projectionSpec, String tablePattern) throws 
MetaException, UnknownDBException {
+    return objectStore.getTableObjectsByName(catName, dbName, tableNames, 
projectionSpec, tablePattern);
   }
 
   @Override
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAcidTableSetup.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAcidTableSetup.java
index 66be9f2..0f004a3 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAcidTableSetup.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAcidTableSetup.java
@@ -227,7 +227,7 @@ public class TestAcidTableSetup {
         client.dropTable(dbName, tableName);
       }
       client.dropDatabase(dbName);
-    } catch (NoSuchObjectException|InvalidOperationException e) {
+    } catch (NoSuchObjectException|InvalidOperationException|MetaException e) {
       // NOP
     }
   }
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMultipleEncryptionZones.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMultipleEncryptionZones.java
index 38850c7..4dcfb22 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMultipleEncryptionZones.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreMultipleEncryptionZones.java
@@ -1648,7 +1648,7 @@ public class TestMetaStoreMultipleEncryptionZones {
         client.dropTable(dbName, tableName);
       }
       client.dropDatabase(dbName);
-    } catch (NoSuchObjectException|InvalidOperationException e) {
+    } catch (NoSuchObjectException|InvalidOperationException|MetaException e) {
       // NOP
     }
   }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java
index f5b8b12..3bacfb4 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java
@@ -27,6 +27,7 @@ import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience.Private;
 import org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl;
 import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.TableMeta;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -50,6 +51,37 @@ public class AuthorizationMetaStoreFilterHook extends 
DefaultMetaStoreFilterHook
     List<HivePrivilegeObject> listObjs = getHivePrivObjects(dbName, tableList);
     return getTableNames(getFilteredObjects(listObjs));
   }
+  @Override
+  public List<Table> filterTables(List<Table> tableList) throws MetaException {
+    List<HivePrivilegeObject> listObjs = getHivePrivObjects(tableList);
+    return getFilteredTableList(getFilteredObjects(listObjs),tableList);
+  }
+
+  private List<Table> getFilteredTableList(List<HivePrivilegeObject> 
hivePrivilegeObjects, List<Table> tableList) {
+    List<Table> ret = new ArrayList<>();
+    for(HivePrivilegeObject hivePrivilegeObject:hivePrivilegeObjects) {
+      String dbName  = hivePrivilegeObject.getDbname();
+      String tblName = hivePrivilegeObject.getObjectName();
+      Table  table   = getFilteredTable(dbName,tblName,tableList);
+      if (table != null) {
+        ret.add(table);
+      }
+    }
+    return ret;
+  }
+
+  private Table getFilteredTable(String dbName, String tblName, List<Table> 
tableList) {
+    Table ret = null;
+    for (Table table: tableList) {
+      String databaseName = table.getDbName();
+      String tableName = table.getTableName();
+      if (dbName.equals(databaseName) && tblName.equals(tableName)) {
+        ret = table;
+        break;
+      }
+    }
+    return ret;
+  }
 
   @Override
   public List<String> filterDatabases(List<String> dbList) throws 
MetaException {
@@ -101,6 +133,15 @@ public class AuthorizationMetaStoreFilterHook extends 
DefaultMetaStoreFilterHook
     return objs;
   }
 
+  private List<HivePrivilegeObject> getHivePrivObjects(List<Table> tableList) {
+    List<HivePrivilegeObject> objs = new ArrayList<HivePrivilegeObject>();
+    for(Table tableObject : tableList) {
+      objs.add(new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, 
tableObject.getDbName(), tableObject.getTableName(), null, null,
+              HivePrivilegeObject.HivePrivObjectActionType.OTHER, null, null, 
tableObject.getOwner(), tableObject.getOwnerType()));
+    }
+    return objs;
+  }
+
    @Override
    public List<TableMeta> filterTableMetas(String catName,String 
dbName,List<TableMeta> tableMetas) throws MetaException {
      List<String> tableNames = new ArrayList<>();
diff --git 
a/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java 
b/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java
index d0da0f8..f037499 100644
--- a/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java
+++ b/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.metastore.api.Partition;
@@ -101,7 +102,7 @@ public class TestMetastoreExpr {
         client.dropTable(dbName, tableName);
       }
       client.dropDatabase(dbName);
-    } catch (NoSuchObjectException ignore) {
+    } catch (NoSuchObjectException|MetaException ignore) {
     } catch (InvalidOperationException ignore) {
     }
   }
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java 
b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
index a5bf049..4ad4a74 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.MetastoreException;
 import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
@@ -124,7 +125,7 @@ public class TestHiveMetaStoreChecker {
   }
 
   @Test
-  public void testTableCheck() throws HiveException, IOException, TException, 
MetastoreException {
+  public void testTableCheck() throws HiveException, IOException, TException, 
MetastoreException,MetaException {
     CheckResult result = checker.checkMetastore(catName, dbName, null, null, 
null);
     // we haven't added anything so should return an all ok
     assertEquals(Collections.<String>emptySet(), result.getTablesNotInMs());
diff --git 
a/standalone-metastore/metastore-common/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
 
b/standalone-metastore/metastore-common/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
index ecaaecc..6f5d1d9 100644
--- 
a/standalone-metastore/metastore-common/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
+++ 
b/standalone-metastore/metastore-common/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
@@ -31801,6 +31801,11 @@ void GetTablesRequest::__set_projectionSpec(const 
GetProjectionsSpec& val) {
   this->projectionSpec = val;
 __isset.projectionSpec = true;
 }
+
+void GetTablesRequest::__set_tablesPattern(const std::string& val) {
+  this->tablesPattern = val;
+__isset.tablesPattern = true;
+}
 std::ostream& operator<<(std::ostream& out, const GetTablesRequest& obj)
 {
   obj.printTo(out);
@@ -31910,6 +31915,14 @@ uint32_t 
GetTablesRequest::read(::apache::thrift::protocol::TProtocol* iprot) {
           xfer += iprot->skip(ftype);
         }
         break;
+      case 8:
+        if (ftype == ::apache::thrift::protocol::T_STRING) {
+          xfer += iprot->readString(this->tablesPattern);
+          this->__isset.tablesPattern = true;
+        } else {
+          xfer += iprot->skip(ftype);
+        }
+        break;
       default:
         xfer += iprot->skip(ftype);
         break;
@@ -31979,6 +31992,11 @@ uint32_t 
GetTablesRequest::write(::apache::thrift::protocol::TProtocol* oprot) c
     xfer += this->projectionSpec.write(oprot);
     xfer += oprot->writeFieldEnd();
   }
+  if (this->__isset.tablesPattern) {
+    xfer += oprot->writeFieldBegin("tablesPattern", 
::apache::thrift::protocol::T_STRING, 8);
+    xfer += oprot->writeString(this->tablesPattern);
+    xfer += oprot->writeFieldEnd();
+  }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
   return xfer;
@@ -31993,6 +32011,7 @@ void swap(GetTablesRequest &a, GetTablesRequest &b) {
   swap(a.processorCapabilities, b.processorCapabilities);
   swap(a.processorIdentifier, b.processorIdentifier);
   swap(a.projectionSpec, b.projectionSpec);
+  swap(a.tablesPattern, b.tablesPattern);
   swap(a.__isset, b.__isset);
 }
 
@@ -32004,6 +32023,7 @@ GetTablesRequest::GetTablesRequest(const 
GetTablesRequest& other1192) {
   processorCapabilities = other1192.processorCapabilities;
   processorIdentifier = other1192.processorIdentifier;
   projectionSpec = other1192.projectionSpec;
+  tablesPattern = other1192.tablesPattern;
   __isset = other1192.__isset;
 }
 GetTablesRequest& GetTablesRequest::operator=(const GetTablesRequest& 
other1193) {
@@ -32014,6 +32034,7 @@ GetTablesRequest& GetTablesRequest::operator=(const 
GetTablesRequest& other1193)
   processorCapabilities = other1193.processorCapabilities;
   processorIdentifier = other1193.processorIdentifier;
   projectionSpec = other1193.projectionSpec;
+  tablesPattern = other1193.tablesPattern;
   __isset = other1193.__isset;
   return *this;
 }
@@ -32027,6 +32048,7 @@ void GetTablesRequest::printTo(std::ostream& out) const 
{
   out << ", " << "processorCapabilities="; (__isset.processorCapabilities ? 
(out << to_string(processorCapabilities)) : (out << "<null>"));
   out << ", " << "processorIdentifier="; (__isset.processorIdentifier ? (out 
<< to_string(processorIdentifier)) : (out << "<null>"));
   out << ", " << "projectionSpec="; (__isset.projectionSpec ? (out << 
to_string(projectionSpec)) : (out << "<null>"));
+  out << ", " << "tablesPattern="; (__isset.tablesPattern ? (out << 
to_string(tablesPattern)) : (out << "<null>"));
   out << ")";
 }
 
diff --git 
a/standalone-metastore/metastore-common/src/gen/thrift/gen-cpp/hive_metastore_types.h
 
b/standalone-metastore/metastore-common/src/gen/thrift/gen-cpp/hive_metastore_types.h
index efb1a86..b4f8aa9 100644
--- 
a/standalone-metastore/metastore-common/src/gen/thrift/gen-cpp/hive_metastore_types.h
+++ 
b/standalone-metastore/metastore-common/src/gen/thrift/gen-cpp/hive_metastore_types.h
@@ -11768,13 +11768,14 @@ void swap(GetTableResult &a, GetTableResult &b);
 std::ostream& operator<<(std::ostream& out, const GetTableResult& obj);
 
 typedef struct _GetTablesRequest__isset {
-  _GetTablesRequest__isset() : tblNames(false), capabilities(false), 
catName(false), processorCapabilities(false), processorIdentifier(false), 
projectionSpec(false) {}
+  _GetTablesRequest__isset() : tblNames(false), capabilities(false), 
catName(false), processorCapabilities(false), processorIdentifier(false), 
projectionSpec(false), tablesPattern(false) {}
   bool tblNames :1;
   bool capabilities :1;
   bool catName :1;
   bool processorCapabilities :1;
   bool processorIdentifier :1;
   bool projectionSpec :1;
+  bool tablesPattern :1;
 } _GetTablesRequest__isset;
 
 class GetTablesRequest : public virtual ::apache::thrift::TBase {
@@ -11782,7 +11783,7 @@ class GetTablesRequest : public virtual 
::apache::thrift::TBase {
 
   GetTablesRequest(const GetTablesRequest&);
   GetTablesRequest& operator=(const GetTablesRequest&);
-  GetTablesRequest() : dbName(), catName(), processorIdentifier() {
+  GetTablesRequest() : dbName(), catName(), processorIdentifier(), 
tablesPattern() {
   }
 
   virtual ~GetTablesRequest() noexcept;
@@ -11793,6 +11794,7 @@ class GetTablesRequest : public virtual 
::apache::thrift::TBase {
   std::vector<std::string>  processorCapabilities;
   std::string processorIdentifier;
   GetProjectionsSpec projectionSpec;
+  std::string tablesPattern;
 
   _GetTablesRequest__isset __isset;
 
@@ -11810,6 +11812,8 @@ class GetTablesRequest : public virtual 
::apache::thrift::TBase {
 
   void __set_projectionSpec(const GetProjectionsSpec& val);
 
+  void __set_tablesPattern(const std::string& val);
+
   bool operator == (const GetTablesRequest & rhs) const
   {
     if (!(dbName == rhs.dbName))
@@ -11838,6 +11842,10 @@ class GetTablesRequest : public virtual 
::apache::thrift::TBase {
       return false;
     else if (__isset.projectionSpec && !(projectionSpec == rhs.projectionSpec))
       return false;
+    if (__isset.tablesPattern != rhs.__isset.tablesPattern)
+      return false;
+    else if (__isset.tablesPattern && !(tablesPattern == rhs.tablesPattern))
+      return false;
     return true;
   }
   bool operator != (const GetTablesRequest &rhs) const {
diff --git 
a/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetTablesRequest.java
 
b/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetTablesRequest.java
index 7fe5a85..a8909f2 100644
--- 
a/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetTablesRequest.java
+++ 
b/standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetTablesRequest.java
@@ -18,6 +18,7 @@ package org.apache.hadoop.hive.metastore.api;
   private static final org.apache.thrift.protocol.TField 
PROCESSOR_CAPABILITIES_FIELD_DESC = new 
org.apache.thrift.protocol.TField("processorCapabilities", 
org.apache.thrift.protocol.TType.LIST, (short)5);
   private static final org.apache.thrift.protocol.TField 
PROCESSOR_IDENTIFIER_FIELD_DESC = new 
org.apache.thrift.protocol.TField("processorIdentifier", 
org.apache.thrift.protocol.TType.STRING, (short)6);
   private static final org.apache.thrift.protocol.TField 
PROJECTION_SPEC_FIELD_DESC = new 
org.apache.thrift.protocol.TField("projectionSpec", 
org.apache.thrift.protocol.TType.STRUCT, (short)7);
+  private static final org.apache.thrift.protocol.TField 
TABLES_PATTERN_FIELD_DESC = new 
org.apache.thrift.protocol.TField("tablesPattern", 
org.apache.thrift.protocol.TType.STRING, (short)8);
 
   private static final org.apache.thrift.scheme.SchemeFactory 
STANDARD_SCHEME_FACTORY = new GetTablesRequestStandardSchemeFactory();
   private static final org.apache.thrift.scheme.SchemeFactory 
TUPLE_SCHEME_FACTORY = new GetTablesRequestTupleSchemeFactory();
@@ -29,6 +30,7 @@ package org.apache.hadoop.hive.metastore.api;
   private @org.apache.thrift.annotation.Nullable 
java.util.List<java.lang.String> processorCapabilities; // optional
   private @org.apache.thrift.annotation.Nullable java.lang.String 
processorIdentifier; // optional
   private @org.apache.thrift.annotation.Nullable GetProjectionsSpec 
projectionSpec; // optional
+  private @org.apache.thrift.annotation.Nullable java.lang.String 
tablesPattern; // optional
 
   /** The set of fields this struct contains, along with convenience methods 
for finding and manipulating them. */
   public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -38,7 +40,8 @@ package org.apache.hadoop.hive.metastore.api;
     CAT_NAME((short)4, "catName"),
     PROCESSOR_CAPABILITIES((short)5, "processorCapabilities"),
     PROCESSOR_IDENTIFIER((short)6, "processorIdentifier"),
-    PROJECTION_SPEC((short)7, "projectionSpec");
+    PROJECTION_SPEC((short)7, "projectionSpec"),
+    TABLES_PATTERN((short)8, "tablesPattern");
 
     private static final java.util.Map<java.lang.String, _Fields> byName = new 
java.util.HashMap<java.lang.String, _Fields>();
 
@@ -68,6 +71,8 @@ package org.apache.hadoop.hive.metastore.api;
           return PROCESSOR_IDENTIFIER;
         case 7: // PROJECTION_SPEC
           return PROJECTION_SPEC;
+        case 8: // TABLES_PATTERN
+          return TABLES_PATTERN;
         default:
           return null;
       }
@@ -109,7 +114,7 @@ package org.apache.hadoop.hive.metastore.api;
   }
 
   // isset id assignments
-  private static final _Fields optionals[] = 
{_Fields.TBL_NAMES,_Fields.CAPABILITIES,_Fields.CAT_NAME,_Fields.PROCESSOR_CAPABILITIES,_Fields.PROCESSOR_IDENTIFIER,_Fields.PROJECTION_SPEC};
+  private static final _Fields optionals[] = 
{_Fields.TBL_NAMES,_Fields.CAPABILITIES,_Fields.CAT_NAME,_Fields.PROCESSOR_CAPABILITIES,_Fields.PROCESSOR_IDENTIFIER,_Fields.PROJECTION_SPEC,_Fields.TABLES_PATTERN};
   public static final java.util.Map<_Fields, 
org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
   static {
     java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = 
new java.util.EnumMap<_Fields, 
org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
@@ -129,6 +134,8 @@ package org.apache.hadoop.hive.metastore.api;
         new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
     tmpMap.put(_Fields.PROJECTION_SPEC, new 
org.apache.thrift.meta_data.FieldMetaData("projectionSpec", 
org.apache.thrift.TFieldRequirementType.OPTIONAL, 
         new 
org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT,
 GetProjectionsSpec.class)));
+    tmpMap.put(_Fields.TABLES_PATTERN, new 
org.apache.thrift.meta_data.FieldMetaData("tablesPattern", 
org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
     metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
     
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(GetTablesRequest.class,
 metaDataMap);
   }
@@ -170,6 +177,9 @@ package org.apache.hadoop.hive.metastore.api;
     if (other.isSetProjectionSpec()) {
       this.projectionSpec = new GetProjectionsSpec(other.projectionSpec);
     }
+    if (other.isSetTablesPattern()) {
+      this.tablesPattern = other.tablesPattern;
+    }
   }
 
   public GetTablesRequest deepCopy() {
@@ -185,6 +195,7 @@ package org.apache.hadoop.hive.metastore.api;
     this.processorCapabilities = null;
     this.processorIdentifier = null;
     this.projectionSpec = null;
+    this.tablesPattern = null;
   }
 
   @org.apache.thrift.annotation.Nullable
@@ -387,6 +398,30 @@ package org.apache.hadoop.hive.metastore.api;
     }
   }
 
+  @org.apache.thrift.annotation.Nullable
+  public java.lang.String getTablesPattern() {
+    return this.tablesPattern;
+  }
+
+  public void setTablesPattern(@org.apache.thrift.annotation.Nullable 
java.lang.String tablesPattern) {
+    this.tablesPattern = tablesPattern;
+  }
+
+  public void unsetTablesPattern() {
+    this.tablesPattern = null;
+  }
+
+  /** Returns true if field tablesPattern is set (has been assigned a value) 
and false otherwise */
+  public boolean isSetTablesPattern() {
+    return this.tablesPattern != null;
+  }
+
+  public void setTablesPatternIsSet(boolean value) {
+    if (!value) {
+      this.tablesPattern = null;
+    }
+  }
+
   public void setFieldValue(_Fields field, 
@org.apache.thrift.annotation.Nullable java.lang.Object value) {
     switch (field) {
     case DB_NAME:
@@ -445,6 +480,14 @@ package org.apache.hadoop.hive.metastore.api;
       }
       break;
 
+    case TABLES_PATTERN:
+      if (value == null) {
+        unsetTablesPattern();
+      } else {
+        setTablesPattern((java.lang.String)value);
+      }
+      break;
+
     }
   }
 
@@ -472,6 +515,9 @@ package org.apache.hadoop.hive.metastore.api;
     case PROJECTION_SPEC:
       return getProjectionSpec();
 
+    case TABLES_PATTERN:
+      return getTablesPattern();
+
     }
     throw new java.lang.IllegalStateException();
   }
@@ -497,6 +543,8 @@ package org.apache.hadoop.hive.metastore.api;
       return isSetProcessorIdentifier();
     case PROJECTION_SPEC:
       return isSetProjectionSpec();
+    case TABLES_PATTERN:
+      return isSetTablesPattern();
     }
     throw new java.lang.IllegalStateException();
   }
@@ -579,6 +627,15 @@ package org.apache.hadoop.hive.metastore.api;
         return false;
     }
 
+    boolean this_present_tablesPattern = true && this.isSetTablesPattern();
+    boolean that_present_tablesPattern = true && that.isSetTablesPattern();
+    if (this_present_tablesPattern || that_present_tablesPattern) {
+      if (!(this_present_tablesPattern && that_present_tablesPattern))
+        return false;
+      if (!this.tablesPattern.equals(that.tablesPattern))
+        return false;
+    }
+
     return true;
   }
 
@@ -614,6 +671,10 @@ package org.apache.hadoop.hive.metastore.api;
     if (isSetProjectionSpec())
       hashCode = hashCode * 8191 + projectionSpec.hashCode();
 
+    hashCode = hashCode * 8191 + ((isSetTablesPattern()) ? 131071 : 524287);
+    if (isSetTablesPattern())
+      hashCode = hashCode * 8191 + tablesPattern.hashCode();
+
     return hashCode;
   }
 
@@ -695,6 +756,16 @@ package org.apache.hadoop.hive.metastore.api;
         return lastComparison;
       }
     }
+    lastComparison = 
java.lang.Boolean.valueOf(isSetTablesPattern()).compareTo(other.isSetTablesPattern());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetTablesPattern()) {
+      lastComparison = 
org.apache.thrift.TBaseHelper.compareTo(this.tablesPattern, 
other.tablesPattern);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
     return 0;
   }
 
@@ -783,6 +854,16 @@ package org.apache.hadoop.hive.metastore.api;
       }
       first = false;
     }
+    if (isSetTablesPattern()) {
+      if (!first) sb.append(", ");
+      sb.append("tablesPattern:");
+      if (this.tablesPattern == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.tablesPattern);
+      }
+      first = false;
+    }
     sb.append(")");
     return sb.toString();
   }
@@ -914,6 +995,14 @@ package org.apache.hadoop.hive.metastore.api;
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
             }
             break;
+          case 8: // TABLES_PATTERN
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.tablesPattern = iprot.readString();
+              struct.setTablesPatternIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+            }
+            break;
           default:
             org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
         }
@@ -988,6 +1077,13 @@ package org.apache.hadoop.hive.metastore.api;
           oprot.writeFieldEnd();
         }
       }
+      if (struct.tablesPattern != null) {
+        if (struct.isSetTablesPattern()) {
+          oprot.writeFieldBegin(TABLES_PATTERN_FIELD_DESC);
+          oprot.writeString(struct.tablesPattern);
+          oprot.writeFieldEnd();
+        }
+      }
       oprot.writeFieldStop();
       oprot.writeStructEnd();
     }
@@ -1025,7 +1121,10 @@ package org.apache.hadoop.hive.metastore.api;
       if (struct.isSetProjectionSpec()) {
         optionals.set(5);
       }
-      oprot.writeBitSet(optionals, 6);
+      if (struct.isSetTablesPattern()) {
+        optionals.set(6);
+      }
+      oprot.writeBitSet(optionals, 7);
       if (struct.isSetTblNames()) {
         {
           oprot.writeI32(struct.tblNames.size());
@@ -1056,6 +1155,9 @@ package org.apache.hadoop.hive.metastore.api;
       if (struct.isSetProjectionSpec()) {
         struct.projectionSpec.write(oprot);
       }
+      if (struct.isSetTablesPattern()) {
+        oprot.writeString(struct.tablesPattern);
+      }
     }
 
     @Override
@@ -1063,7 +1165,7 @@ package org.apache.hadoop.hive.metastore.api;
       org.apache.thrift.protocol.TTupleProtocol iprot = 
(org.apache.thrift.protocol.TTupleProtocol) prot;
       struct.dbName = iprot.readString();
       struct.setDbNameIsSet(true);
-      java.util.BitSet incoming = iprot.readBitSet(6);
+      java.util.BitSet incoming = iprot.readBitSet(7);
       if (incoming.get(0)) {
         {
           org.apache.thrift.protocol.TList _list1044 = new 
org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, 
iprot.readI32());
@@ -1108,6 +1210,10 @@ package org.apache.hadoop.hive.metastore.api;
         struct.projectionSpec.read(iprot);
         struct.setProjectionSpecIsSet(true);
       }
+      if (incoming.get(6)) {
+        struct.tablesPattern = iprot.readString();
+        struct.setTablesPatternIsSet(true);
+      }
     }
   }
 
diff --git 
a/standalone-metastore/metastore-common/src/gen/thrift/gen-php/metastore/GetTablesRequest.php
 
b/standalone-metastore/metastore-common/src/gen/thrift/gen-php/metastore/GetTablesRequest.php
index b2389f1..1c6f894 100644
--- 
a/standalone-metastore/metastore-common/src/gen/thrift/gen-php/metastore/GetTablesRequest.php
+++ 
b/standalone-metastore/metastore-common/src/gen/thrift/gen-php/metastore/GetTablesRequest.php
@@ -66,6 +66,11 @@ class GetTablesRequest
             'type' => TType::STRUCT,
             'class' => '\metastore\GetProjectionsSpec',
         ),
+        8 => array(
+            'var' => 'tablesPattern',
+            'isRequired' => false,
+            'type' => TType::STRING,
+        ),
     );
 
     /**
@@ -96,6 +101,10 @@ class GetTablesRequest
      * @var \metastore\GetProjectionsSpec
      */
     public $projectionSpec = null;
+    /**
+     * @var string
+     */
+    public $tablesPattern = null;
 
     public function __construct($vals = null)
     {
@@ -121,6 +130,9 @@ class GetTablesRequest
             if (isset($vals['projectionSpec'])) {
                 $this->projectionSpec = $vals['projectionSpec'];
             }
+            if (isset($vals['tablesPattern'])) {
+                $this->tablesPattern = $vals['tablesPattern'];
+            }
         }
     }
 
@@ -212,6 +224,13 @@ class GetTablesRequest
                         $xfer += $input->skip($ftype);
                     }
                     break;
+                case 8:
+                    if ($ftype == TType::STRING) {
+                        $xfer += $input->readString($this->tablesPattern);
+                    } else {
+                        $xfer += $input->skip($ftype);
+                    }
+                    break;
                 default:
                     $xfer += $input->skip($ftype);
                     break;
@@ -281,6 +300,11 @@ class GetTablesRequest
             $xfer += $this->projectionSpec->write($output);
             $xfer += $output->writeFieldEnd();
         }
+        if ($this->tablesPattern !== null) {
+            $xfer += $output->writeFieldBegin('tablesPattern', TType::STRING, 
8);
+            $xfer += $output->writeString($this->tablesPattern);
+            $xfer += $output->writeFieldEnd();
+        }
         $xfer += $output->writeFieldStop();
         $xfer += $output->writeStructEnd();
         return $xfer;
diff --git 
a/standalone-metastore/metastore-common/src/gen/thrift/gen-py/hive_metastore/ttypes.py
 
b/standalone-metastore/metastore-common/src/gen/thrift/gen-py/hive_metastore/ttypes.py
index e38a6f8..50458d9 100644
--- 
a/standalone-metastore/metastore-common/src/gen/thrift/gen-py/hive_metastore/ttypes.py
+++ 
b/standalone-metastore/metastore-common/src/gen/thrift/gen-py/hive_metastore/ttypes.py
@@ -18216,11 +18216,12 @@ class GetTablesRequest(object):
      - processorCapabilities
      - processorIdentifier
      - projectionSpec
+     - tablesPattern
 
     """
 
 
-    def __init__(self, dbName=None, tblNames=None, capabilities=None, 
catName=None, processorCapabilities=None, processorIdentifier=None, 
projectionSpec=None,):
+    def __init__(self, dbName=None, tblNames=None, capabilities=None, 
catName=None, processorCapabilities=None, processorIdentifier=None, 
projectionSpec=None, tablesPattern=None,):
         self.dbName = dbName
         self.tblNames = tblNames
         self.capabilities = capabilities
@@ -18228,6 +18229,7 @@ class GetTablesRequest(object):
         self.processorCapabilities = processorCapabilities
         self.processorIdentifier = processorIdentifier
         self.projectionSpec = projectionSpec
+        self.tablesPattern = tablesPattern
 
     def read(self, iprot):
         if iprot._fast_decode is not None and isinstance(iprot.trans, 
TTransport.CReadableTransport) and self.thrift_spec is not None:
@@ -18285,6 +18287,11 @@ class GetTablesRequest(object):
                     self.projectionSpec.read(iprot)
                 else:
                     iprot.skip(ftype)
+            elif fid == 8:
+                if ftype == TType.STRING:
+                    self.tablesPattern = iprot.readString().decode('utf-8') if 
sys.version_info[0] == 2 else iprot.readString()
+                else:
+                    iprot.skip(ftype)
             else:
                 iprot.skip(ftype)
             iprot.readFieldEnd()
@@ -18329,6 +18336,10 @@ class GetTablesRequest(object):
             oprot.writeFieldBegin('projectionSpec', TType.STRUCT, 7)
             self.projectionSpec.write(oprot)
             oprot.writeFieldEnd()
+        if self.tablesPattern is not None:
+            oprot.writeFieldBegin('tablesPattern', TType.STRING, 8)
+            oprot.writeString(self.tablesPattern.encode('utf-8') if 
sys.version_info[0] == 2 else self.tablesPattern)
+            oprot.writeFieldEnd()
         oprot.writeFieldStop()
         oprot.writeStructEnd()
 
@@ -29493,6 +29504,7 @@ GetTablesRequest.thrift_spec = (
     (5, TType.LIST, 'processorCapabilities', (TType.STRING, 'UTF8', False), 
None, ),  # 5
     (6, TType.STRING, 'processorIdentifier', 'UTF8', None, ),  # 6
     (7, TType.STRUCT, 'projectionSpec', [GetProjectionsSpec, None], None, ),  
# 7
+    (8, TType.STRING, 'tablesPattern', 'UTF8', None, ),  # 8
 )
 all_structs.append(GetTablesResult)
 GetTablesResult.thrift_spec = (
diff --git 
a/standalone-metastore/metastore-common/src/gen/thrift/gen-rb/hive_metastore_types.rb
 
b/standalone-metastore/metastore-common/src/gen/thrift/gen-rb/hive_metastore_types.rb
index 61bb225..00f1ced 100644
--- 
a/standalone-metastore/metastore-common/src/gen/thrift/gen-rb/hive_metastore_types.rb
+++ 
b/standalone-metastore/metastore-common/src/gen/thrift/gen-rb/hive_metastore_types.rb
@@ -5185,6 +5185,7 @@ class GetTablesRequest
   PROCESSORCAPABILITIES = 5
   PROCESSORIDENTIFIER = 6
   PROJECTIONSPEC = 7
+  TABLESPATTERN = 8
 
   FIELDS = {
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
@@ -5193,7 +5194,8 @@ class GetTablesRequest
     CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true},
     PROCESSORCAPABILITIES => {:type => ::Thrift::Types::LIST, :name => 
'processorCapabilities', :element => {:type => ::Thrift::Types::STRING}, 
:optional => true},
     PROCESSORIDENTIFIER => {:type => ::Thrift::Types::STRING, :name => 
'processorIdentifier', :optional => true},
-    PROJECTIONSPEC => {:type => ::Thrift::Types::STRUCT, :name => 
'projectionSpec', :class => ::GetProjectionsSpec, :optional => true}
+    PROJECTIONSPEC => {:type => ::Thrift::Types::STRUCT, :name => 
'projectionSpec', :class => ::GetProjectionsSpec, :optional => true},
+    TABLESPATTERN => {:type => ::Thrift::Types::STRING, :name => 
'tablesPattern', :optional => true}
   }
 
   def struct_fields; FIELDS; end
diff --git 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
 
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index 9933161..a09a01e 100644
--- 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ 
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -2556,8 +2556,28 @@ public class HiveMetaStoreClient implements 
IMetaStoreClient, AutoCloseable {
   @Override
   public List<String> getTables(String catName, String dbName, String 
tablePattern)
       throws TException {
-    List<String> tables = client.get_tables(prependCatalogToDbName(catName, 
dbName, conf), tablePattern);
-    return FilterUtils.filterTableNamesIfEnabled(isClientFilterEnabled, 
filterHook, catName, dbName, tables);
+    List<String> tables = new ArrayList<>();
+    GetProjectionsSpec projectionsSpec = new GetProjectionsSpec();
+    projectionsSpec.setFieldList(Arrays.asList("dbName", "tableName", "owner", 
"ownerType"));
+    GetTablesRequest req = new GetTablesRequest(dbName);
+    req.setCatName(catName);
+    req.setCapabilities(version);
+    req.setTblNames(null);
+    if(tablePattern == null){
+      tablePattern = ".*";
+    }
+    req.setTablesPattern(tablePattern);
+    if (processorCapabilities != null)
+      req.setProcessorCapabilities(new 
ArrayList<String>(Arrays.asList(processorCapabilities)));
+    if (processorIdentifier != null)
+      req.setProcessorIdentifier(processorIdentifier);
+    req.setProjectionSpec(projectionsSpec);
+    List<Table> tableObjects = 
client.get_table_objects_by_name_req(req).getTables();
+    tableObjects = 
deepCopyTables(FilterUtils.filterTablesIfEnabled(isClientFilterEnabled, 
filterHook, tableObjects));
+    for (Table tbl : tableObjects) {
+      tables.add(tbl.getTableName());
+    }
+    return tables;
   }
 
   @Override
diff --git 
a/standalone-metastore/metastore-common/src/main/thrift/hive_metastore.thrift 
b/standalone-metastore/metastore-common/src/main/thrift/hive_metastore.thrift
index 5651743..d9021f9 100644
--- 
a/standalone-metastore/metastore-common/src/main/thrift/hive_metastore.thrift
+++ 
b/standalone-metastore/metastore-common/src/main/thrift/hive_metastore.thrift
@@ -1527,7 +1527,8 @@ struct GetTablesRequest {
   4: optional string catName,
   5: optional list<string> processorCapabilities,
   6: optional string processorIdentifier,
-  7: optional GetProjectionsSpec projectionSpec
+  7: optional GetProjectionsSpec projectionSpec,
+  8: optional string tablesPattern
 }
 
 struct GetTablesResult {
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
index e87af7a..5582f9e 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
@@ -3778,20 +3778,20 @@ public class HMSHandler extends FacebookBase implements 
IHMSHandler {
   public List<Table> get_table_objects_by_name(final String dbName, final 
List<String> tableNames)
       throws MetaException, InvalidOperationException, UnknownDBException {
     String[] parsedDbName = parseDbName(dbName, conf);
-    return getTableObjectsInternal(parsedDbName[CAT_NAME], 
parsedDbName[DB_NAME], tableNames, null, null);
+    return getTableObjectsInternal(parsedDbName[CAT_NAME], 
parsedDbName[DB_NAME], tableNames, null, null, null);
   }
 
   @Override
   public GetTablesResult get_table_objects_by_name_req(GetTablesRequest req) 
throws TException {
     String catName = req.isSetCatName() ? req.getCatName() : 
getDefaultCatalog(conf);
     return new GetTablesResult(getTableObjectsInternal(catName, 
req.getDbName(),
-        req.getTblNames(), req.getCapabilities(), req.getProjectionSpec()));
+        req.getTblNames(), req.getCapabilities(), req.getProjectionSpec(), 
req.getTablesPattern()));
   }
 
   private List<Table> getTableObjectsInternal(String catName, String dbName,
                                               List<String> tableNames,
                                               ClientCapabilities capabilities,
-                                              GetProjectionsSpec 
projectionsSpec)
+                                              GetProjectionsSpec 
projectionsSpec, String tablePattern)
       throws MetaException, InvalidOperationException, UnknownDBException {
     if (isInTest) {
       assertClientHasCapability(capabilities, ClientCapability.TEST_CAPABILITY,
@@ -3815,31 +3815,35 @@ public class HMSHandler extends FacebookBase implements 
IHMSHandler {
       if (dbName == null || dbName.isEmpty()) {
         throw new UnknownDBException("DB name is null or empty");
       }
-      if (tableNames == null) {
-        throw new InvalidOperationException(dbName + " cannot find null 
tables");
-      }
-
-      // The list of table names could contain duplicates. 
RawStore.getTableObjectsByName()
-      // only guarantees returning no duplicate table objects in one batch. If 
we need
-      // to break into multiple batches, remove duplicates first.
-      List<String> distinctTableNames = tableNames;
-      if (distinctTableNames.size() > tableBatchSize) {
-        List<String> lowercaseTableNames = new ArrayList<>();
-        for (String tableName : tableNames) {
-          
lowercaseTableNames.add(org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier(tableName));
+      RawStore ms = getMS();
+      if(tablePattern != null){
+        tables = ms.getTableObjectsByName(catName, dbName, tableNames, 
projectionsSpec, tablePattern);
+      }else {
+        if (tableNames == null) {
+          throw new InvalidOperationException(dbName + " cannot find null 
tables");
+        }
+
+        // The list of table names could contain duplicates. 
RawStore.getTableObjectsByName()
+        // only guarantees returning no duplicate table objects in one batch. 
If we need
+        // to break into multiple batches, remove duplicates first.
+        List<String> distinctTableNames = tableNames;
+        if (distinctTableNames.size() > tableBatchSize) {
+          List<String> lowercaseTableNames = new ArrayList<>();
+          for (String tableName : tableNames) {
+            
lowercaseTableNames.add(org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier(tableName));
+          }
+          distinctTableNames = new ArrayList<>(new 
HashSet<>(lowercaseTableNames));
         }
-        distinctTableNames = new ArrayList<>(new 
HashSet<>(lowercaseTableNames));
-      }
 
-      RawStore ms = getMS();
-      int startIndex = 0;
-      // Retrieve the tables from the metastore in batches. Some databases like
-      // Oracle cannot have over 1000 expressions in a in-list
-      while (startIndex < distinctTableNames.size()) {
-        int endIndex = Math.min(startIndex + tableBatchSize, 
distinctTableNames.size());
-        tables.addAll(ms.getTableObjectsByName(catName, dbName, 
distinctTableNames.subList(
-            startIndex, endIndex), projectionsSpec));
-        startIndex = endIndex;
+        int startIndex = 0;
+        // Retrieve the tables from the metastore in batches. Some databases 
like
+        // Oracle cannot have over 1000 expressions in a in-list
+        while (startIndex < distinctTableNames.size()) {
+          int endIndex = Math.min(startIndex + tableBatchSize, 
distinctTableNames.size());
+          tables.addAll(ms.getTableObjectsByName(catName, dbName, 
distinctTableNames.subList(
+                  startIndex, endIndex), projectionsSpec, tablePattern));
+          startIndex = endIndex;
+        }
       }
       for (Table t : tables) {
         if (t.getParameters() != null && 
MetaStoreUtils.isInsertOnlyTableParam(t.getParameters())) {
@@ -3848,7 +3852,7 @@ public class HMSHandler extends FacebookBase implements 
IHMSHandler {
         }
       }
 
-      FilterUtils.filterTablesIfEnabled(isServerFilterEnabled, filterHook, 
tables);
+      tables = FilterUtils.filterTablesIfEnabled(isServerFilterEnabled, 
filterHook, tables);
     } catch (MetaException | InvalidOperationException | UnknownDBException e) 
{
       ex = e;
       throw e;
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreChecker.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreChecker.java
index 4c17570..823f558 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreChecker.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreChecker.java
@@ -63,6 +63,7 @@ import 
org.apache.hadoop.hive.metastore.api.MetastoreException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.UnknownDBException;
 import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
 import org.apache.hadoop.hive.metastore.txn.TxnUtils;
 import org.apache.hadoop.hive.metastore.utils.FileUtils;
@@ -140,7 +141,12 @@ public class HiveMetaStoreChecker {
       if (tableName == null || "".equals(tableName)) {
         // TODO: I do not think this is used by anything other than tests
         // no table specified, check all tables and all partitions.
-        List<String> tables = getMsc().getTables(catName, dbName, ".*");
+        List<String> tables = new ArrayList<>();
+        try{
+          tables = getMsc().getTables(catName, dbName, ".*");
+        }catch(UnknownDBException ex){
+          //ignore database exception.
+        }
         for (String currentTableName : tables) {
           checkTable(catName, dbName, currentTableName, null, null, result);
         }
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 8f6e7be..c238e85 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -2061,7 +2061,7 @@ public class ObjectStore implements RawStore, 
Configurable {
 
   @Override
   public List<Table> getTableObjectsByName(String catName, String db, 
List<String> tbl_names,
-      GetProjectionsSpec projectionSpec) throws MetaException, 
UnknownDBException {
+      GetProjectionsSpec projectionSpec, String tablePattern) throws 
MetaException, UnknownDBException {
     List<Table> tables = new ArrayList<>();
     boolean committed = false;
     Query query = null;
@@ -2072,15 +2072,25 @@ public class ObjectStore implements RawStore, 
Configurable {
       db = normalizeIdentifier(db);
       catName = normalizeIdentifier(catName);
 
-      List<String> lowered_tbl_names = new ArrayList<>(tbl_names.size());
-      for (String t : tbl_names) {
-        lowered_tbl_names.add(normalizeIdentifier(t));
+      List<String> lowered_tbl_names = new ArrayList<>();
+      if(tbl_names != null) {
+        lowered_tbl_names = new ArrayList<>(tbl_names.size());
+        for (String t : tbl_names) {
+          lowered_tbl_names.add(normalizeIdentifier(t));
+        }
       }
 
-      query = pm.newQuery(MTable.class);
-      query.setFilter("database.name == db && database.catalogName == cat && 
tbl_names.contains(tableName)");
-      query.declareParameters("java.lang.String db, java.lang.String cat, 
java.util.Collection tbl_names");
-
+      StringBuilder filterBuilder = new StringBuilder();
+      List<String> parameterVals = new ArrayList<>();
+      appendSimpleCondition(filterBuilder, "database.name", new String[] {db}, 
parameterVals);
+      appendSimpleCondition(filterBuilder, "database.catalogName", new 
String[] {catName}, parameterVals);
+      if(tbl_names != null){
+        appendSimpleCondition(filterBuilder, "tableName", 
lowered_tbl_names.toArray(new String[0]), parameterVals);
+      }
+      if(tablePattern != null){
+        appendPatternCondition(filterBuilder, "tableName", tablePattern, 
parameterVals);
+      }
+      query = pm.newQuery(MTable.class, filterBuilder.toString()) ;
       List<String> projectionFields = null;
 
       // If a projection specification has been set, validate it and translate 
it to JDO columns.
@@ -2096,11 +2106,11 @@ public class ObjectStore implements RawStore, 
Configurable {
       }
 
       if (projectionFields == null) {
-        mtables = (List<MTable>) query.execute(db, catName, lowered_tbl_names);
+        mtables = (List<MTable>) 
query.executeWithArray(parameterVals.toArray(new String[parameterVals.size()]));
       } else {
         if (projectionFields.size() > 1) {
           // Execute the query to fetch the partial results.
-          List<Object[]> results = (List<Object[]>) query.execute(db, catName, 
lowered_tbl_names);
+          List<Object[]> results = (List<Object[]>) 
query.executeWithArray(parameterVals.toArray(new String[parameterVals.size()]));
           // Declare the tables array to return the list of tables
           mtables = new ArrayList<>(results.size());
           // Iterate through each row of the result and create the MTable 
object.
@@ -2115,7 +2125,7 @@ public class ObjectStore implements RawStore, 
Configurable {
           }
         } else if (projectionFields.size() == 1) {
           // Execute the query to fetch the partial results.
-          List<Object> results = (List<Object>) query.execute(db, catName, 
lowered_tbl_names);
+          List<Object[]> results = (List<Object[]>) 
query.executeWithArray(parameterVals.toArray(new String[parameterVals.size()]));
           // Iterate through each row of the result and create the MTable 
object.
           mtables = new ArrayList<>(results.size());
           for (Object row : results) {
@@ -2162,7 +2172,7 @@ public class ObjectStore implements RawStore, 
Configurable {
   @Override
   public List<Table> getTableObjectsByName(String catName, String db, 
List<String> tbl_names)
           throws MetaException, UnknownDBException {
-    return getTableObjectsByName(catName, db, tbl_names, null);
+    return getTableObjectsByName(catName, db, tbl_names, null, null);
   }
 
   /** Makes shallow copy of a list to avoid DataNucleus mucking with our 
objects. */
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
index 4a15345..27cf3f7 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
@@ -579,7 +579,7 @@ public interface RawStore extends Configurable {
    * @throws MetaException failure in querying the RDBMS.
    */
   List<Table> getTableObjectsByName(String catName, String dbname, 
List<String> tableNames,
-                                    GetProjectionsSpec projectionSpec) throws 
MetaException, UnknownDBException;
+                                    GetProjectionsSpec projectionSpec, String 
tablePattern) throws MetaException, UnknownDBException;
 
   /**
    * Get all tables in a database.
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
index 253031d..0fcc782 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
@@ -1561,8 +1561,8 @@ public class CachedStore implements RawStore, 
Configurable {
 
   @Override
   public List<Table> getTableObjectsByName(String catName, String db, 
List<String> tbl_names,
-          GetProjectionsSpec projectionsSpec) throws MetaException, 
UnknownDBException {
-    return getTableObjectsByName(catName, db, tbl_names, null);
+          GetProjectionsSpec projectionsSpec, String tablePattern) throws 
MetaException, UnknownDBException {
+    return rawStore.getTableObjectsByName(catName, db, tbl_names, 
projectionsSpec, tablePattern);
   }
 
   @Override public List<String> getAllTables(String catName, String dbName) 
throws MetaException {
diff --git 
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
 
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
index 1cbaeeb..04bd911 100644
--- 
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
+++ 
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
@@ -387,7 +387,7 @@ public class DummyRawStoreControlledCommit implements 
RawStore, Configurable {
 
   @Override
   public List<Table> getTableObjectsByName(String catName, String dbname, 
List<String> tableNames,
-                                           GetProjectionsSpec projectionSpec) 
throws MetaException, UnknownDBException {
+                                           GetProjectionsSpec projectionSpec, 
String tablePattern) throws MetaException, UnknownDBException {
     return Collections.emptyList();
   }
 
diff --git 
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
 
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
index ea789ff..8b321f4 100644
--- 
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
+++ 
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
@@ -398,7 +398,7 @@ public class DummyRawStoreForJdoConnection implements 
RawStore {
 
   @Override
   public List<Table> getTableObjectsByName(String catName, String dbname, 
List<String> tableNames,
-          GetProjectionsSpec projectionSpec) throws MetaException, 
UnknownDBException {
+          GetProjectionsSpec projectionSpec, String tablePattern) throws 
MetaException, UnknownDBException {
 
     return Collections.emptyList();
   }
diff --git 
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
 
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
index c53f1bc..9743c92 100644
--- 
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
+++ 
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
@@ -2197,7 +2197,7 @@ public abstract class TestHiveMetaStore {
         client.dropTable(dbName, tableName);
       }
       client.dropDatabase(dbName);
-    } catch (NoSuchObjectException|InvalidOperationException e) {
+    } catch (NoSuchObjectException|InvalidOperationException|MetaException e) {
       // NOP
     }
   }
diff --git 
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/client/TestTablesGetExists.java
 
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/client/TestTablesGetExists.java
index 3f1bef0..273054c 100644
--- 
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/client/TestTablesGetExists.java
+++ 
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/client/TestTablesGetExists.java
@@ -284,8 +284,11 @@ public class TestTablesGetExists extends 
MetaStoreClientTest {
     Assert.assertEquals("No such functions size", 0, tables.size());
 
     // No such database
-    tables = client.getTables("no_such_database", OTHER_DATABASE);
-    Assert.assertEquals("No such table size", 0, tables.size());
+    try {
+      tables = client.getTables("no_such_database", OTHER_DATABASE);
+    }catch (MetaException exception) {
+      // Ignoring Expected exception
+    }
   }
 
   @Test
@@ -739,9 +742,9 @@ public class TestTablesGetExists extends 
MetaStoreClientTest {
     Assert.assertFalse("Table not exists", client.tableExists(catName, dbName, 
"non_existing_table"));
   }
 
-  @Test
+  @Test(expected = UnknownDBException.class)
   public void getTablesBogusCatalog() throws TException {
-    Assert.assertEquals(0, client.getTables("nosuch", DEFAULT_DATABASE_NAME, 
"*_to_find_*").size());
+    client.getTables("nosuch", DEFAULT_DATABASE_NAME, "*_to_find_*");
   }
 
   @Test

Reply via email to