This is an automated email from the ASF dual-hosted git repository.

csringhofer pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/impala.git


The following commit(s) were added to refs/heads/master by this push:
     new cd82ac735 IMPALA-14341: Iceberg MetaProviders Blacklist DBs/Tables
cd82ac735 is described below

commit cd82ac735d06f85665043558ff90007df082c0c3
Author: jasonmfehr <[email protected]>
AuthorDate: Thu Dec 18 16:32:40 2025 -0800

    IMPALA-14341: Iceberg MetaProviders Blacklist DBs/Tables
    
    Databases and tables managed by Iceberg Rest Catalogs are
    now filtered based on the blacklisted dbs and tables
    startup parameters.
    
    Testing was accomplished by new JUnit tests to exercise
    Java code and custom cluster tests that run queries against
    blacklisted dbs and tables.
    
    Generated-by: Github Copilot (Claude Sonnet 4.5)
    Change-Id: I73a06d30dcdbb579f92c2afa5928beb6c5a13348
    Reviewed-on: http://gerrit.cloudera.org:8080/23833
    Tested-by: Impala Public Jenkins <[email protected]>
    Reviewed-by: Zoltan Borok-Nagy <[email protected]>
---
 fe/pom.xml                                         |   1 +
 .../impala/catalog/CatalogServiceCatalog.java      |  28 +-
 .../catalog/local/BlacklistingMetaProvider.java    |  68 +++++
 .../catalog/local/MetaProviderDecorator.java       | 166 ++++++++++++
 .../impala/service/catalogmanager/LocalImpl.java   |   6 +-
 .../apache/impala/util/CatalogBlacklistUtils.java  | 186 ++++++++++++--
 .../local/BlacklistingMetaProviderTest.java        | 186 ++++++++++++++
 .../catalog/local/MetaProviderDecoratorTest.java   | 284 +++++++++++++++++++++
 .../impala/util/CatalogBlacklistUtilsTest.java     | 257 ++++++++++++++-----
 .../iceberg-rest-catalog-blacklist-db.test         |  32 +++
 .../iceberg-rest-catalog-blacklist-tables.test     |  27 ++
 tests/custom_cluster/test_iceberg_rest_catalog.py  |  38 +++
 tests/custom_cluster/test_sys_db.py                |   3 +-
 13 files changed, 1171 insertions(+), 111 deletions(-)

diff --git a/fe/pom.xml b/fe/pom.xml
index 25aa141fb..7f9b9d312 100644
--- a/fe/pom.xml
+++ b/fe/pom.xml
@@ -762,6 +762,7 @@ under the License.
                         -->
                 <source>${project.basedir}/generated-sources/gen-java</source>
                 
<source>${project.build.directory}/generated-sources/cup</source>
+                
<source>${project.build.directory}/generated-sources/jflex</source>
                 
<source>${project.basedir}/src/compat-${hive.dist.type}/java</source>
                </sources>
             </configuration>
diff --git 
a/fe/src/main/java/org/apache/impala/catalog/CatalogServiceCatalog.java 
b/fe/src/main/java/org/apache/impala/catalog/CatalogServiceCatalog.java
index 6706bc7ec..f3402d2d3 100644
--- a/fe/src/main/java/org/apache/impala/catalog/CatalogServiceCatalog.java
+++ b/fe/src/main/java/org/apache/impala/catalog/CatalogServiceCatalog.java
@@ -140,7 +140,6 @@ import org.apache.impala.thrift.TPrivilege;
 import org.apache.impala.thrift.TResetMetadataRequest;
 import org.apache.impala.thrift.TSetEventProcessorStatusResponse;
 import org.apache.impala.thrift.TStatus;
-import org.apache.impala.thrift.TSystemTableName;
 import org.apache.impala.thrift.TTable;
 import org.apache.impala.thrift.TTableName;
 import org.apache.impala.thrift.TTableType;
@@ -378,11 +377,6 @@ public class CatalogServiceCatalog extends Catalog {
 
   private AuthorizationManager authzManager_;
 
-  // Databases that will be skipped in loading.
-  private final Set<String> blacklistedDbs_;
-  // Tables that will be skipped in loading.
-  private final Set<TableName> blacklistedTables_;
-
   // Table properties that require file metadata reload
   private final Set<String> whitelistedTblProperties_;
 
@@ -408,8 +402,6 @@ public class CatalogServiceCatalog extends Catalog {
   // True if initial reset() has been triggered internally.
   private boolean triggeredInitialReset_ = false;
 
-  private final List<String> impalaSysTables;
-
   /**
    * Initialize the CatalogServiceCatalog using a given MetastoreClientPool 
impl.
    *
@@ -422,10 +414,6 @@ public class CatalogServiceCatalog extends Catalog {
       String localLibraryPath, MetaStoreClientPool metaStoreClientPool)
       throws ImpalaException {
     super(metaStoreClientPool);
-    blacklistedDbs_ = CatalogBlacklistUtils.parseBlacklistedDbs(
-        BackendConfig.INSTANCE.getBlacklistedDbs(), LOG);
-    blacklistedTables_ = CatalogBlacklistUtils.parseBlacklistedTables(
-        BackendConfig.INSTANCE.getBlacklistedTables(), LOG);
     maxSkippedUpdatesLockContention_ = BackendConfig.INSTANCE
         .getBackendCfg().catalog_max_lock_skipped_topic_updates;
     Preconditions.checkState(maxSkippedUpdatesLockContention_ > 0,
@@ -434,9 +422,6 @@ public class CatalogServiceCatalog extends Catalog {
         .getBackendCfg().topic_update_tbl_max_wait_time_ms;
     Preconditions.checkState(topicUpdateTblLockMaxWaitTimeMs_ >= 0,
         "topic_update_tbl_max_wait_time_ms must be positive");
-    impalaSysTables = Arrays.asList(
-        BackendConfig.INSTANCE.queryLogTableName(),
-        TSystemTableName.IMPALA_QUERY_LIVE.toString().toLowerCase());
     tableLoadingMgr_ = new TableLoadingMgr(this, numLoadingThreads);
     loadInBackground_ = loadInBackground;
     try {
@@ -577,12 +562,7 @@ public class CatalogServiceCatalog extends Catalog {
   }
 
   protected boolean isBlacklistedDbInternal(String loweredDbName) {
-    if (BackendConfig.INSTANCE.enableWorkloadMgmt()
-        && loweredDbName.equalsIgnoreCase(Db.SYS)) {
-      // Override 'sys' for Impala system tables.
-      return false;
-    }
-    return blacklistedDbs_.contains(loweredDbName);
+    return CatalogBlacklistUtils.isDbBlacklisted(loweredDbName);
   }
 
   /**
@@ -590,11 +570,7 @@ public class CatalogServiceCatalog extends Catalog {
    */
   public boolean isBlacklistedTable(TableName table) {
     Preconditions.checkNotNull(table);
-    if (table.getDb().equalsIgnoreCase(Db.SYS) && 
blacklistedDbs_.contains(Db.SYS)) {
-      // If we've overridden the database blacklist, only allow Impala system 
tables.
-      return !impalaSysTables.contains(table.getTbl());
-    }
-    return blacklistedTables_.contains(table);
+    return CatalogBlacklistUtils.isTableBlacklisted(table);
   }
 
   /**
diff --git 
a/fe/src/main/java/org/apache/impala/catalog/local/BlacklistingMetaProvider.java
 
b/fe/src/main/java/org/apache/impala/catalog/local/BlacklistingMetaProvider.java
new file mode 100644
index 000000000..93f3f159c
--- /dev/null
+++ 
b/fe/src/main/java/org/apache/impala/catalog/local/BlacklistingMetaProvider.java
@@ -0,0 +1,68 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.impala.catalog.local;
+
+import static 
org.apache.impala.util.CatalogBlacklistUtils.getBlacklistedDbsCount;
+import static 
org.apache.impala.util.CatalogBlacklistUtils.getBlacklistedTablesCount;
+import static 
org.apache.impala.util.CatalogBlacklistUtils.getBlacklistedTablesDbs;
+import static org.apache.impala.util.CatalogBlacklistUtils.isDbBlacklisted;
+import static org.apache.impala.util.CatalogBlacklistUtils.isTableBlacklisted;
+
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.UnknownDBException;
+import org.apache.impala.thrift.TBriefTableMeta;
+import org.apache.thrift.TException;
+
+import com.google.common.collect.ImmutableCollection;
+import com.google.common.collect.ImmutableList;
+
+/**
+ * A {@link MetaProvider} that decorates another {@link MetaProvider} adding 
functionality
+ * to filter out blacklisted databases and tables based on the blacklists 
defined in
+ * {@link CatalogBlacklistUtils}.
+ */
+public class BlacklistingMetaProvider extends MetaProviderDecorator {
+
+  public BlacklistingMetaProvider(final MetaProvider delegate) {
+    super(delegate);
+  }
+
+  @Override
+  public ImmutableList<String> loadDbList() throws TException {
+    if (getBlacklistedDbsCount() == 0) {
+      return super.loadDbList();
+    }
+
+    return super.loadDbList().stream().filter(
+        dbName -> 
!isDbBlacklisted(dbName)).collect(ImmutableList.toImmutableList());
+  }
+
+  @Override
+  public ImmutableCollection<TBriefTableMeta> loadTableList(String dbName)
+      throws MetaException, UnknownDBException, TException {
+    if (getBlacklistedTablesCount() == 0
+        || !getBlacklistedTablesDbs().contains(dbName.toLowerCase())) {
+      return super.loadTableList(dbName);
+    }
+
+    return super.loadTableList(dbName).stream().filter(
+        tableMeta -> !isTableBlacklisted(dbName, tableMeta.getName()))
+            .collect(ImmutableList.toImmutableList());
+  }
+
+}
diff --git 
a/fe/src/main/java/org/apache/impala/catalog/local/MetaProviderDecorator.java 
b/fe/src/main/java/org/apache/impala/catalog/local/MetaProviderDecorator.java
new file mode 100644
index 000000000..b8fa1ec0b
--- /dev/null
+++ 
b/fe/src/main/java/org/apache/impala/catalog/local/MetaProviderDecorator.java
@@ -0,0 +1,166 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.impala.catalog.local;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.UnknownDBException;
+import org.apache.impala.authorization.AuthorizationPolicy;
+import org.apache.impala.catalog.CatalogException;
+import org.apache.impala.catalog.DataSource;
+import org.apache.impala.catalog.Function;
+import org.apache.impala.catalog.HdfsCachePool;
+import org.apache.impala.catalog.SqlConstraints;
+import org.apache.impala.catalog.local.LocalIcebergTable.TableParams;
+import org.apache.impala.common.Pair;
+import org.apache.impala.thrift.TBriefTableMeta;
+import org.apache.impala.thrift.TNetworkAddress;
+import org.apache.impala.thrift.TPartialTableInfo;
+import org.apache.impala.thrift.TValidWriteIdList;
+import org.apache.impala.util.ListMap;
+import org.apache.thrift.TException;
+
+import com.google.common.collect.ImmutableCollection;
+import com.google.common.collect.ImmutableList;
+
+/**
+ * A MetaProvider that decorates another MetaProvider. All methods simply 
delegate to
+ * the wrapped MetaProvider. This class can be extended to override specific
+ * methods to add additional functionality.
+ *
+ * This class implements the decorator design pattern.
+ */
+public abstract class MetaProviderDecorator implements MetaProvider {
+  private final MetaProvider decoratedObj_;
+
+  protected MetaProviderDecorator(final MetaProvider decoratedOb) {
+    this.decoratedObj_ = decoratedOb;
+  }
+
+  public String getURI() {
+    return this.decoratedObj_.getURI();
+  }
+
+  public AuthorizationPolicy getAuthPolicy() {
+    return this.decoratedObj_.getAuthPolicy();
+  }
+
+  public boolean isReady() {
+    return this.decoratedObj_.isReady();
+  }
+
+  public void waitForIsReady(long timeoutMs) {
+    this.decoratedObj_.waitForIsReady(timeoutMs);
+  }
+
+  public void setIsReady(boolean isReady) {
+    this.decoratedObj_.setIsReady(isReady);
+  }
+
+  public ImmutableList<String> loadDbList() throws TException {
+    return this.decoratedObj_.loadDbList();
+  }
+
+  public Database loadDb(String dbName) throws TException {
+    return this.decoratedObj_.loadDb(dbName);
+  }
+
+  public ImmutableCollection<TBriefTableMeta> loadTableList(String dbName)
+      throws MetaException, UnknownDBException, TException {
+    return this.decoratedObj_.loadTableList(dbName);
+  }
+
+  public Pair<Table, TableMetaRef> loadTable(String dbName, String tableName)
+      throws NoSuchObjectException, MetaException, TException {
+    return this.decoratedObj_.loadTable(dbName, tableName);
+  }
+
+  public Pair<Table, TableMetaRef> getTableIfPresent(String dbName, String 
tableName) {
+    return this.decoratedObj_.getTableIfPresent(dbName, tableName);
+  }
+
+  public String loadNullPartitionKeyValue()
+      throws MetaException, TException {
+    return this.decoratedObj_.loadNullPartitionKeyValue();
+  }
+
+  public List<PartitionRef> loadPartitionList(TableMetaRef table)
+      throws MetaException, TException {
+    return this.decoratedObj_.loadPartitionList(table);
+  }
+
+  public SqlConstraints loadConstraints(TableMetaRef table,
+      Table msTbl) throws MetaException, TException {
+    return this.decoratedObj_.loadConstraints(table, msTbl);
+  }
+
+  public List<String> loadFunctionNames(String dbName) throws TException {
+    return this.decoratedObj_.loadFunctionNames(dbName);
+  }
+
+  public ImmutableList<Function> loadFunction(String dbName, String 
functionName)
+      throws TException {
+    return this.decoratedObj_.loadFunction(dbName, functionName);
+  }
+
+  public ImmutableList<DataSource> loadDataSources() throws TException {
+    return this.decoratedObj_.loadDataSources();
+  }
+
+  public DataSource loadDataSource(String dsName) throws TException {
+    return this.decoratedObj_.loadDataSource(dsName);
+  }
+
+  public Map<String, PartitionMetadata> loadPartitionsByRefs(TableMetaRef 
table,
+      List<String> partitionColumnNames, ListMap<TNetworkAddress> hostIndex,
+      List<PartitionRef> partitionRefs)
+      throws MetaException, TException, CatalogException {
+    return this.decoratedObj_.loadPartitionsByRefs(table, 
partitionColumnNames, hostIndex,
+        partitionRefs);
+  }
+
+  public List<ColumnStatisticsObj> loadTableColumnStatistics(TableMetaRef 
table,
+      List<String> colNames) throws TException {
+    return this.decoratedObj_.loadTableColumnStatistics(table, colNames);
+  }
+
+  public TPartialTableInfo loadIcebergTable(
+      final TableMetaRef table) throws TException {
+    return this.decoratedObj_.loadIcebergTable(table);
+  }
+
+  public org.apache.iceberg.Table loadIcebergApiTable(
+      final TableMetaRef table, TableParams param, Table msTable) throws 
TException {
+    return this.decoratedObj_.loadIcebergApiTable(table, param, msTable);
+  }
+
+  public TValidWriteIdList getValidWriteIdList(TableMetaRef ref) {
+    return this.decoratedObj_.getValidWriteIdList(ref);
+  }
+
+  public Iterable<HdfsCachePool> getHdfsCachePools() {
+    return this.decoratedObj_.getHdfsCachePools();
+  }
+
+}
diff --git 
a/fe/src/main/java/org/apache/impala/service/catalogmanager/LocalImpl.java 
b/fe/src/main/java/org/apache/impala/service/catalogmanager/LocalImpl.java
index de53c13dd..d92ffcc14 100644
--- a/fe/src/main/java/org/apache/impala/service/catalogmanager/LocalImpl.java
+++ b/fe/src/main/java/org/apache/impala/service/catalogmanager/LocalImpl.java
@@ -20,9 +20,10 @@ import java.io.File;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Properties;
-import java.util.stream.Collectors;
+
 import org.apache.iceberg.exceptions.RESTException;
 import org.apache.impala.catalog.FeCatalog;
+import org.apache.impala.catalog.local.BlacklistingMetaProvider;
 import org.apache.impala.catalog.local.CatalogdMetaProvider;
 import org.apache.impala.catalog.local.IcebergMetaProvider;
 import org.apache.impala.catalog.local.LocalCatalog;
@@ -104,7 +105,8 @@ class LocalImpl extends FeCatalogManager {
     List<MetaProvider> list = new ArrayList<>();
     for (Properties properties : loader.loadConfigs()) {
       try {
-        IcebergMetaProvider icebergMetaProvider = new 
IcebergMetaProvider(properties);
+        MetaProvider icebergMetaProvider =
+            new BlacklistingMetaProvider(new IcebergMetaProvider(properties));
         list.add(icebergMetaProvider);
       } catch (RESTException e) {
         LOG.error(String.format(
diff --git a/fe/src/main/java/org/apache/impala/util/CatalogBlacklistUtils.java 
b/fe/src/main/java/org/apache/impala/util/CatalogBlacklistUtils.java
index b8777e320..c5226afea 100644
--- a/fe/src/main/java/org/apache/impala/util/CatalogBlacklistUtils.java
+++ b/fe/src/main/java/org/apache/impala/util/CatalogBlacklistUtils.java
@@ -17,38 +17,84 @@
 
 package org.apache.impala.util;
 
-import org.apache.impala.analysis.TableName;
-import org.apache.impala.catalog.Db;
-
 import java.util.Set;
+import java.util.stream.Collectors;
 
+import org.apache.impala.analysis.TableName;
+import org.apache.impala.catalog.Db;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.service.BackendConfig;
+import org.apache.impala.thrift.TSystemTableName;
 import org.slf4j.Logger;
 
 import com.google.common.base.Preconditions;
 import com.google.common.base.Splitter;
+import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Sets;
 
-public class CatalogBlacklistUtils {
-  private final static Set<String> BLACKLISTED_DBS =
-      CatalogBlacklistUtils.parseBlacklistedDbsFromConfigs();
-  private final static Set<TableName> BLACKLISTED_TABLES =
-      CatalogBlacklistUtils.parseBlacklistedTablesFromConfigs();
+public final class CatalogBlacklistUtils {
+  private static Set<String> BLACKLISTED_DBS;
+  private static Set<TableName> BLACKLISTED_TABLES;
+  private static Set<String> BLACKLISTED_TABLES_DBS;
+  private static Set<String> IMPALA_SYS_TABLES;
+
+  static {
+    reload();
+  }
+
+  /**
+   * Class contains only static functions.
+   */
+  private CatalogBlacklistUtils() {
+  }
+
+
+  /**
+   * Builds the dbs and tables blacklists sets.
+   * This method re-parses the blacklist configurations from {@link 
BackendConfig}
+   * and updates the static BLACKLISTED_DBS and BLACKLISTED_TABLES {@link 
Set}s.
+   *
+   * Should be called at JVM startup (in main code) or after backend 
configuration
+   * changes (in tests).
+   */
+  static void reload() {
+    BLACKLISTED_DBS = parseBlacklistedDbsFromConfigs();
+
+    BLACKLISTED_TABLES = parseBlacklistedTablesFromConfigs();
+
+    BLACKLISTED_TABLES_DBS = BLACKLISTED_TABLES.stream()
+        .map(t -> t.getDb().toLowerCase()).collect(Collectors.toSet());
+
+    IMPALA_SYS_TABLES = BackendConfig.INSTANCE == null ? ImmutableSet.of() :
+        ImmutableSet.of(BackendConfig.INSTANCE.queryLogTableName(),
+        TSystemTableName.IMPALA_QUERY_LIVE.toString().toLowerCase());
+  }
 
   /**
    * Parse blacklisted databases from backend configs.
+   * Retrieves the blacklisted databases configuration string from {@link 
BackendConfig}
+   * and delegates to {@link #parseBlacklistedDbs(String, Logger)} for parsing.
+   *
+   * @return a set of lowercase database names that are blacklisted, or an 
empty
+   *         set if no databases are blacklisted or {@link BackendConfig} is 
not
+   *         initialized.
    */
-  public static Set<String> parseBlacklistedDbsFromConfigs() {
+  private static Set<String> parseBlacklistedDbsFromConfigs() {
     return parseBlacklistedDbs(
         BackendConfig.INSTANCE == null ? "" : 
BackendConfig.INSTANCE.getBlacklistedDbs(),
         null);
   }
 
   /**
-   * Prase blacklisted tables from backend configs.
+   * Parse blacklisted tables from backend configs.
+   * Retrieves the blacklisted tables configuration string from {@link 
BackendConfig}
+   * and delegates to {@link #parseBlacklistedTables(String, Logger)} for 
parsing.
+   *
+   * @return a set of {@link TableName} objects representing blacklisted 
tables, or an
+   *         empty set if no tables are blacklisted or {@link BackendConfig} 
is not
+   *         initialized.
    */
-  public static Set<TableName> parseBlacklistedTablesFromConfigs() {
+  private static Set<TableName> parseBlacklistedTablesFromConfigs() {
     return parseBlacklistedTables(
         BackendConfig.INSTANCE == null ? "" :
             BackendConfig.INSTANCE.getBlacklistedTables(),
@@ -56,10 +102,17 @@ public class CatalogBlacklistUtils {
   }
 
   /**
-   * Parse blacklisted databases from given configs string. Pass Logger if 
logging is
-   * necessary.
+   * Parse blacklisted databases from given configs string.
+   * The input string should be a comma-separated list of database names. 
Database names
+   * are converted to lowercase. Empty strings and whitespace are ignored.
+   *
+   * @param blacklistedDbsConfig a comma-separated string of database names to 
blacklist,
+   *                             must not be null
+   * @param logger optional {@link Logger} for logging parsed database names, 
can be null
+   *               if logging is not required
+   * @return a set of lowercase database names that are blacklisted
    */
-  public static Set<String> parseBlacklistedDbs(String blacklistedDbsConfig,
+  private static Set<String> parseBlacklistedDbs(String blacklistedDbsConfig,
       Logger logger) {
     Preconditions.checkNotNull(blacklistedDbsConfig);
     Set<String> blacklistedDbs = Sets.newHashSet();
@@ -72,9 +125,18 @@ public class CatalogBlacklistUtils {
   }
 
   /**
-   * Parse blacklisted tables from configs string. Pass Logger if logging is 
necessary.
+   * Parse blacklisted tables from configs string.
+   * The input string should be a comma-separated list of table names in the 
format
+   * "database.table". Invalid table names are logged as warnings and skipped.
+   * Empty strings and whitespace are ignored.
+   *
+   * @param blacklistedTablesConfig a comma-separated string of table names to 
blacklist,
+   *                                must not be null
+   * @param logger optional {@link Logger} for logging parsed table names and 
warnings
+   *               about invalid table names, can be null if logging is not 
required
+   * @return a set of {@link TableName} objects representing blacklisted tables
    */
-  public static Set<TableName> parseBlacklistedTables(String 
blacklistedTablesConfig,
+  private static Set<TableName> parseBlacklistedTables(String 
blacklistedTablesConfig,
       Logger logger) {
     Preconditions.checkNotNull(blacklistedTablesConfig);
     Set<TableName> blacklistedTables = Sets.newHashSet();
@@ -94,21 +156,101 @@ public class CatalogBlacklistUtils {
     return blacklistedTables;
   }
 
+  /**
+   * Verify that a database name is not blacklisted.
+   *
+   * @param dbName the name of the database to verify
+   * @throws AnalysisException if the database name is blacklisted
+   */
   public static void verifyDbName(String dbName) throws AnalysisException {
-    if (BackendConfig.INSTANCE.enableWorkloadMgmt() && 
dbName.equalsIgnoreCase(Db.SYS)) {
-      // Override system DB for Impala system tables.
-      return;
-    }
-    if (BLACKLISTED_DBS.contains(dbName)) {
+    if (isDbBlacklisted(dbName)) {
       throw new AnalysisException("Invalid db name: " + dbName
           + ". It has been blacklisted using --blacklisted_dbs");
     }
   }
 
+  /**
+   * Check if a database is blacklisted.
+   * Note: The system database ({@link Db#SYS}) is exempt from blacklisting 
when
+   * workload management is enabled in {@link BackendConfig}.
+   *
+   * @param dbName the name of the database to check
+   * @return true if the database is blacklisted, false otherwise
+   */
+  public static boolean isDbBlacklisted(String dbName) {
+    if (BackendConfig.INSTANCE.enableWorkloadMgmt() && 
dbName.equalsIgnoreCase(Db.SYS)) {
+      // Override system DB for Impala system tables.
+      return false;
+    }
+    return BLACKLISTED_DBS.contains(dbName);
+  }
+
+  /**
+   * Get the count of blacklisted databases.
+   *
+   * @return <code>int</code> representing the number of blacklisted databases
+   */
+  public static int getBlacklistedDbsCount() {
+    return BLACKLISTED_DBS.size();
+  }
+
+  /**
+   * Verify that a table name is not blacklisted.
+   *
+   * @param table the {@link TableName} object representing the table to verify
+   * @throws AnalysisException if the table name is blacklisted
+   */
   public static void verifyTableName(TableName table) throws AnalysisException 
{
-    if (BLACKLISTED_TABLES.contains(table)) {
+    if (isTableBlacklisted(table)) {
       throw new AnalysisException("Invalid table/view name: " + table
           + ". It has been blacklisted using --blacklisted_tables");
     }
   }
+
+  /**
+   * Check if a table is blacklisted.
+   *
+   * @param table the {@link TableName} object representing the table to check
+   * @return true if the table is blacklisted, false otherwise
+   */
+  public static boolean isTableBlacklisted(TableName table) {
+    if (!table.getDb().equalsIgnoreCase(Db.SYS)) {
+      return BLACKLISTED_TABLES.contains(table);
+    }
+
+    return BLACKLISTED_DBS.contains(Db.SYS) && !IMPALA_SYS_TABLES.contains(
+        table.getTbl().toLowerCase());
+  }
+
+  /**
+   * Get the count of blacklisted tables.
+   *
+   * @return <code>int</code> representing the number of blacklisted tables
+   */
+  public static int getBlacklistedTablesCount() {
+    return BLACKLISTED_TABLES.size();
+  }
+
+  /**
+   * Returns an {@link ImmutableSet} of all names of the databases for all 
blacklisted
+   * tables.
+   *
+   * @return {@link Set<String>} containing the names of databases with 
blacklisted tables
+   */
+  public static Set<String> getBlacklistedTablesDbs() {
+    return BLACKLISTED_TABLES_DBS;
+  }
+
+  /**
+   * Check if a table is blacklisted given its database and table name.
+   * This is a convenience method that constructs a {@link TableName} object 
and delegates
+   * to {@link #isTableBlacklisted(TableName)}.
+   *
+   * @param db the name of the database containing the table
+   * @param table the name of the table to check
+   * @return true if the table is blacklisted, false otherwise
+   */
+  public static boolean isTableBlacklisted(String db, String table) {
+    return isTableBlacklisted(new TableName(db, table));
+  }
 }
diff --git 
a/fe/src/test/java/org/apache/impala/catalog/local/BlacklistingMetaProviderTest.java
 
b/fe/src/test/java/org/apache/impala/catalog/local/BlacklistingMetaProviderTest.java
new file mode 100644
index 000000000..a11beb6af
--- /dev/null
+++ 
b/fe/src/test/java/org/apache/impala/catalog/local/BlacklistingMetaProviderTest.java
@@ -0,0 +1,186 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.impala.catalog.local;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.sameInstance;
+import static org.hamcrest.Matchers.arrayContainingInAnyOrder;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+
+import org.apache.impala.service.BackendConfig;
+import org.apache.impala.thrift.TBackendGflags;
+import org.apache.impala.thrift.TBriefTableMeta;
+import org.apache.impala.util.CatalogBlacklistUtilsTest;
+import org.apache.thrift.TException;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import com.google.common.collect.ImmutableCollection;
+import com.google.common.collect.ImmutableList;
+
+public class BlacklistingMetaProviderTest {
+
+  private static TBackendGflags origFlags;
+
+  @BeforeClass
+  public static void setup() {
+    // The original BackendConfig need to be saved so they can be restored and 
not break
+    // other tests.
+    if (BackendConfig.INSTANCE == null) {
+      BackendConfig.create(new TBackendGflags());
+    }
+    origFlags = BackendConfig.INSTANCE.getBackendCfg();
+  }
+
+  @AfterClass
+  public static void teardown() {
+    BackendConfig.create(origFlags);
+  }
+
+  @Test
+  public void testLoadDbList() throws TException {
+    // Configure backend with blacklisted databases.
+    CatalogBlacklistUtilsTest.setBlacklist("blacklisted_db1,blacklisted_db2", 
"");
+
+    // Create mock provider delegate that returns a list including both 
blacklisted and
+    // non-blacklisted databases.
+    MetaProvider mockDelegate = Mockito.mock(MetaProvider.class);
+    
Mockito.when(mockDelegate.loadDbList()).thenReturn(ImmutableList.of("allowed_db1",
+        "blacklisted_db1", "allowed_db2", "blacklisted_db2", "allowed_db3"));
+
+    // Create the blacklisting provider
+    BlacklistingMetaProvider fixture = new 
BlacklistingMetaProvider(mockDelegate);
+
+    // Call loadDbList and verify blacklisted databases are filtered out.
+    ImmutableList<String> result = fixture.loadDbList();
+
+    // Should have 3 databases: allowed_db1, allowed_db2, allowed_db3.
+    assertThat(result.size(), equalTo(3));
+    assertTrue(result.contains("allowed_db1"));
+    assertTrue(result.contains("allowed_db2"));
+    assertTrue(result.contains("allowed_db3"));
+
+    Mockito.verify(mockDelegate).loadDbList();
+    Mockito.verifyNoMoreInteractions(mockDelegate);
+  }
+
+  @Test
+  public void testLoadDbListWithNonBlacklistedDbs() throws TException {
+    // Configure backend with empty blacklist.
+    CatalogBlacklistUtilsTest.setBlacklist("", "");
+
+    // Create mock provider delegate that returns a list of databases.
+    MetaProvider mockDelegate = Mockito.mock(MetaProvider.class);
+    
Mockito.when(mockDelegate.loadDbList()).thenReturn(ImmutableList.of("regular_db1",
+        "regular_db2", "regular_db3"));
+
+    BlacklistingMetaProvider fixture = new 
BlacklistingMetaProvider(mockDelegate);
+    ImmutableList<String> result = fixture.loadDbList();
+
+    // Verify that all databases are returned.
+    assertThat(result.size(), equalTo(3));
+    assertTrue(result.contains("regular_db1"));
+    assertTrue(result.contains("regular_db2"));
+    assertTrue(result.contains("regular_db3"));
+
+    Mockito.verify(mockDelegate).loadDbList();
+    Mockito.verifyNoMoreInteractions(mockDelegate);
+  }
+
+  @Test
+  public void testLoadDbListNoBlacklistedDbs() throws TException {
+    // Configure backend with no blacklisted databases.
+    CatalogBlacklistUtilsTest.setBlacklist("", "");
+
+    // Create list of databases.
+    ImmutableList<String> dbList = ImmutableList.of("db1", "db2");
+
+    // Create mock provider delegate that returns a list of databases.
+    MetaProvider mockDelegate = Mockito.mock(MetaProvider.class);
+    Mockito.when(mockDelegate.loadDbList()).thenReturn(dbList);
+
+    BlacklistingMetaProvider fixture = new 
BlacklistingMetaProvider(mockDelegate);
+    ImmutableList<String> result = fixture.loadDbList();
+
+    // Verify that all databases are returned.
+    assertThat(result, sameInstance(dbList));
+
+    Mockito.verify(mockDelegate).loadDbList();
+    Mockito.verifyNoMoreInteractions(mockDelegate);
+  }
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testLoadTableList() throws TException {
+    // Configure backend with blacklisted tables.
+    CatalogBlacklistUtilsTest.setBlacklist("", "db1.foo,db2.bar");
+
+    // Create mock provider delegate that returns a list including both 
blacklisted and
+    // non-blacklisted tables.
+    MetaProvider mockDelegate = Mockito.mock(MetaProvider.class);
+    
Mockito.when(mockDelegate.loadTableList("db1")).thenReturn(ImmutableList.of(
+        new TBriefTableMeta("foo"), new TBriefTableMeta("bar")));
+    
Mockito.when(mockDelegate.loadTableList("db2")).thenReturn(ImmutableList.of(
+        new TBriefTableMeta("foo"), new TBriefTableMeta("bar")));
+    
Mockito.when(mockDelegate.loadTableList("db3")).thenReturn(ImmutableList.of(
+        new TBriefTableMeta("foo"), new TBriefTableMeta("bar")));
+
+    BlacklistingMetaProvider fixture = new 
BlacklistingMetaProvider(mockDelegate);
+
+    assertThat(fixture.loadTableList("db1").toArray(),
+        arrayContainingInAnyOrder(equalTo(new TBriefTableMeta("bar"))));
+
+    assertThat(fixture.loadTableList("db2").toArray(),
+        arrayContainingInAnyOrder(equalTo(new TBriefTableMeta("foo"))));
+
+    assertThat(fixture.loadTableList("db3").toArray(), 
arrayContainingInAnyOrder(
+        equalTo(new TBriefTableMeta("foo")), equalTo(new 
TBriefTableMeta("bar"))));
+
+    Mockito.verify(mockDelegate).loadTableList("db1");
+    Mockito.verify(mockDelegate).loadTableList("db2");
+    Mockito.verify(mockDelegate).loadTableList("db3");
+    Mockito.verifyNoMoreInteractions(mockDelegate);
+  }
+
+  @Test
+  public void testLoadTableListWithNoBlacklistedTables() throws TException {
+    // Configure backend with no blacklisted tables.
+    CatalogBlacklistUtilsTest.setBlacklist("", "");
+
+    // Create list of tables.
+    ImmutableCollection<TBriefTableMeta> tablesList =
+        ImmutableList.of(new TBriefTableMeta("tbl1"), new 
TBriefTableMeta("tbl2"));
+
+    // Create mock provider delegate that returns a list of databases.
+    MetaProvider mockDelegate = Mockito.mock(MetaProvider.class);
+    Mockito.when(mockDelegate.loadTableList("db1")).thenReturn(tablesList);
+
+    BlacklistingMetaProvider fixture = new 
BlacklistingMetaProvider(mockDelegate);
+    ImmutableCollection<TBriefTableMeta> result = fixture.loadTableList("db1");
+
+    // Verify that all databases are returned.
+    assertThat(result, sameInstance(tablesList));
+
+    Mockito.verify(mockDelegate).loadTableList("db1");
+    Mockito.verifyNoMoreInteractions(mockDelegate);
+  }
+
+}
diff --git 
a/fe/src/test/java/org/apache/impala/catalog/local/MetaProviderDecoratorTest.java
 
b/fe/src/test/java/org/apache/impala/catalog/local/MetaProviderDecoratorTest.java
new file mode 100644
index 000000000..84396a911
--- /dev/null
+++ 
b/fe/src/test/java/org/apache/impala/catalog/local/MetaProviderDecoratorTest.java
@@ -0,0 +1,284 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.impala.catalog.local;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.sameInstance;
+import static org.junit.Assert.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyList;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+import static org.mockito.Mockito.when;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.impala.authorization.AuthorizationPolicy;
+import org.apache.impala.catalog.CatalogException;
+import org.apache.impala.catalog.DataSource;
+import org.apache.impala.catalog.Function;
+import org.apache.impala.catalog.HdfsCachePool;
+import org.apache.impala.catalog.SqlConstraints;
+import org.apache.impala.catalog.local.LocalIcebergTable.TableParams;
+import org.apache.impala.catalog.local.MetaProvider.PartitionMetadata;
+import org.apache.impala.catalog.local.MetaProvider.PartitionRef;
+import org.apache.impala.catalog.local.MetaProvider.TableMetaRef;
+import org.apache.impala.common.Pair;
+import org.apache.impala.thrift.TBriefTableMeta;
+import org.apache.impala.thrift.TNetworkAddress;
+import org.apache.impala.thrift.TPartialTableInfo;
+import org.apache.impala.thrift.TValidWriteIdList;
+import org.apache.impala.util.ListMap;
+import org.apache.thrift.TException;
+import org.junit.Test;
+
+import com.google.common.collect.ImmutableCollection;
+import com.google.common.collect.ImmutableList;
+
+public class MetaProviderDecoratorTest {
+
+  // Expected number of public methods in MetaProvider interface
+  // Update this constant when adding new methods to MetaProvider
+  private static final int EXPECTED_METHOD_COUNT = 23;
+
+  @Test
+  public void testMetaProviderMethodCount() {
+    // Use reflection to count public methods in MetaProvider interface
+    assertThat("Number of public methods in MetaProvider interface has 
changed. " +
+        "Update EXPECTED_METHOD_COUNT and add tests for new methods.",
+        MetaProvider.class.getDeclaredMethods().length, 
equalTo(EXPECTED_METHOD_COUNT));
+
+    assertThat("Abstract class MetaProviderDecorator is missing an 
implementation for " +
+        "one or more methods on the MetaProvider interface",
+        MetaProvider.class.getDeclaredMethods().length,
+        equalTo(MetaProviderDecorator.class.getDeclaredMethods().length));
+  }
+
+  @Test
+  public void testAllMethodsDelegation() throws TException, CatalogException {
+    // Create a mock MetaProvider
+    MetaProvider mockDecorated = mock(MetaProvider.class);
+
+    // Create a concrete implementation of MetaProviderDecorator for testing
+    MetaProviderDecorator fixture = new MetaProviderDecorator(mockDecorated) 
{};
+
+    // Setup mock return values
+    String expectedUri = "test-uri";
+    AuthorizationPolicy expectedAuthPolicy = mock(AuthorizationPolicy.class);
+    boolean expectedIsReady = true;
+    ImmutableList<String> expectedDbList = ImmutableList.of("db1", "db2");
+    Database expectedDatabase = new Database();
+    ImmutableCollection<TBriefTableMeta> expectedTableList =
+        ImmutableList.of(new TBriefTableMeta());
+    Table expectedTable = new Table();
+    TableMetaRef expectedTableRef = mock(TableMetaRef.class);
+    Pair<Table, TableMetaRef> expectedTablePair =
+        new Pair<>(expectedTable, expectedTableRef);
+    String expectedNullPartitionKey = "__HIVE_DEFAULT_PARTITION__";
+    List<PartitionRef> expectedPartitionRefs = new ArrayList<>();
+    SqlConstraints expectedConstraints = mock(SqlConstraints.class);
+    List<String> expectedFunctionNames = ImmutableList.of("func1", "func2");
+    ImmutableList<Function> expectedFunctions = ImmutableList.of();
+    ImmutableList<DataSource> expectedDataSources = ImmutableList.of();
+    DataSource expectedDataSource = mock(DataSource.class);
+    Map<String, PartitionMetadata> expectedPartitionMetadata = new HashMap<>();
+    List<ColumnStatisticsObj> expectedColStats = new ArrayList<>();
+    TPartialTableInfo expectedIcebergTableInfo = new TPartialTableInfo();
+    org.apache.iceberg.Table expectedIcebergApiTable =
+        mock(org.apache.iceberg.Table.class);
+    TValidWriteIdList expectedValidWriteIdList = new TValidWriteIdList();
+    Iterable<HdfsCachePool> expectedCachePools = ImmutableList.of();
+
+    // Configure mock behavior
+    when(mockDecorated.getURI()).thenReturn(expectedUri);
+    when(mockDecorated.getAuthPolicy()).thenReturn(expectedAuthPolicy);
+    when(mockDecorated.isReady()).thenReturn(expectedIsReady);
+    when(mockDecorated.loadDbList()).thenReturn(expectedDbList);
+    when(mockDecorated.loadDb(anyString())).thenReturn(expectedDatabase);
+    
when(mockDecorated.loadTableList(anyString())).thenReturn(expectedTableList);
+    when(mockDecorated.loadTable(anyString(), anyString()))
+        .thenReturn(expectedTablePair);
+    when(mockDecorated.getTableIfPresent(anyString(), anyString()))
+        .thenReturn(expectedTablePair);
+    
when(mockDecorated.loadNullPartitionKeyValue()).thenReturn(expectedNullPartitionKey);
+    when(mockDecorated.loadPartitionList(any(TableMetaRef.class)))
+        .thenReturn(expectedPartitionRefs);
+    when(mockDecorated.loadConstraints(any(TableMetaRef.class), 
any(Table.class)))
+        .thenReturn(expectedConstraints);
+    
when(mockDecorated.loadFunctionNames(anyString())).thenReturn(expectedFunctionNames);
+    when(mockDecorated.loadFunction(anyString(), anyString()))
+        .thenReturn(expectedFunctions);
+    when(mockDecorated.loadDataSources()).thenReturn(expectedDataSources);
+    
when(mockDecorated.loadDataSource(anyString())).thenReturn(expectedDataSource);
+    when(mockDecorated.loadPartitionsByRefs(any(TableMetaRef.class), anyList(),
+        any(ListMap.class), anyList())).thenReturn(expectedPartitionMetadata);
+    when(mockDecorated.loadTableColumnStatistics(any(TableMetaRef.class), 
anyList()))
+        .thenReturn(expectedColStats);
+    when(mockDecorated.loadIcebergTable(any(TableMetaRef.class)))
+        .thenReturn(expectedIcebergTableInfo);
+    when(mockDecorated.loadIcebergApiTable(any(TableMetaRef.class),
+        any(TableParams.class), 
any(Table.class))).thenReturn(expectedIcebergApiTable);
+    when(mockDecorated.getValidWriteIdList(any(TableMetaRef.class)))
+        .thenReturn(expectedValidWriteIdList);
+    when(mockDecorated.getHdfsCachePools()).thenReturn(expectedCachePools);
+
+    // Test getURI()
+    String actualUri = fixture.getURI();
+    assertThat(actualUri, equalTo(expectedUri));
+    verify(mockDecorated).getURI();
+
+    // Test getAuthPolicy()
+    AuthorizationPolicy actualAuthPolicy = fixture.getAuthPolicy();
+    assertThat(actualAuthPolicy, sameInstance(expectedAuthPolicy));
+    verify(mockDecorated).getAuthPolicy();
+
+    // Test isReady()
+    boolean actualIsReady = fixture.isReady();
+    assertThat(actualIsReady, equalTo(expectedIsReady));
+    verify(mockDecorated).isReady();
+
+    // Test waitForIsReady(long)
+    long timeout = 1000L;
+    fixture.waitForIsReady(timeout);
+    verify(mockDecorated).waitForIsReady(eq(timeout));
+
+    // Test setIsReady(boolean)
+    fixture.setIsReady(false);
+    verify(mockDecorated).setIsReady(eq(false));
+
+    // Test loadDbList()
+    ImmutableList<String> actualDbList = fixture.loadDbList();
+    assertThat(actualDbList, equalTo(expectedDbList));
+    verify(mockDecorated).loadDbList();
+
+    // Test loadDb(String)
+    Database actualDatabase = fixture.loadDb("testDb");
+    assertThat(actualDatabase, sameInstance(expectedDatabase));
+    verify(mockDecorated).loadDb(eq("testDb"));
+
+    // Test loadTableList(String)
+    ImmutableCollection<TBriefTableMeta> actualTableList =
+        fixture.loadTableList("testDb");
+    assertThat(actualTableList, sameInstance(expectedTableList));
+    verify(mockDecorated).loadTableList(eq("testDb"));
+
+    // Test loadTable(String, String)
+    Pair<Table, TableMetaRef> actualTablePair =
+        fixture.loadTable("testDb", "testTable");
+    assertThat(actualTablePair, sameInstance(expectedTablePair));
+    verify(mockDecorated).loadTable(eq("testDb"), eq("testTable"));
+
+    // Test getTableIfPresent(String, String)
+    Pair<Table, TableMetaRef> actualTableIfPresent =
+        fixture.getTableIfPresent("testDb", "testTable");
+    assertThat(actualTableIfPresent, sameInstance(expectedTablePair));
+    verify(mockDecorated).getTableIfPresent(eq("testDb"), eq("testTable"));
+
+    // Test loadNullPartitionKeyValue()
+    String actualNullPartitionKey = fixture.loadNullPartitionKeyValue();
+    assertThat(actualNullPartitionKey, equalTo(expectedNullPartitionKey));
+    verify(mockDecorated).loadNullPartitionKeyValue();
+
+    // Test loadPartitionList(TableMetaRef)
+    List<PartitionRef> actualPartitionRefs =
+        fixture.loadPartitionList(expectedTableRef);
+    assertThat(actualPartitionRefs, sameInstance(expectedPartitionRefs));
+    verify(mockDecorated).loadPartitionList(eq(expectedTableRef));
+
+    // Test loadConstraints(TableMetaRef, Table)
+    SqlConstraints actualConstraints =
+        fixture.loadConstraints(expectedTableRef, expectedTable);
+    assertThat(actualConstraints, sameInstance(expectedConstraints));
+    verify(mockDecorated).loadConstraints(eq(expectedTableRef), 
eq(expectedTable));
+
+    // Test loadFunctionNames(String)
+    List<String> actualFunctionNames = fixture.loadFunctionNames("testDb");
+    assertThat(actualFunctionNames, equalTo(expectedFunctionNames));
+    verify(mockDecorated).loadFunctionNames(eq("testDb"));
+
+    // Test loadFunction(String, String)
+    ImmutableList<Function> actualFunctions =
+        fixture.loadFunction("testDb", "testFunc");
+    assertThat(actualFunctions, equalTo(expectedFunctions));
+    verify(mockDecorated).loadFunction(eq("testDb"), eq("testFunc"));
+
+    // Test loadDataSources()
+    ImmutableList<DataSource> actualDataSources = fixture.loadDataSources();
+    assertThat(actualDataSources, equalTo(expectedDataSources));
+    verify(mockDecorated).loadDataSources();
+
+    // Test loadDataSource(String)
+    DataSource actualDataSource = fixture.loadDataSource("testDs");
+    assertThat(actualDataSource, sameInstance(expectedDataSource));
+    verify(mockDecorated).loadDataSource(eq("testDs"));
+
+    // Test loadPartitionsByRefs(...)
+    List<String> partCols = ImmutableList.of("col1");
+    ListMap<TNetworkAddress> hostIndex = new ListMap<>();
+    List<PartitionRef> partRefs = new ArrayList<>();
+    Map<String, PartitionMetadata> actualPartitionMetadata =
+        fixture.loadPartitionsByRefs(expectedTableRef, partCols, hostIndex, 
partRefs);
+    assertThat(actualPartitionMetadata, 
sameInstance(expectedPartitionMetadata));
+    verify(mockDecorated).loadPartitionsByRefs(
+        eq(expectedTableRef), eq(partCols), eq(hostIndex), eq(partRefs));
+
+    // Test loadTableColumnStatistics(TableMetaRef, List<String>)
+    List<String> colNames = ImmutableList.of("col1", "col2");
+    List<ColumnStatisticsObj> actualColStats =
+        fixture.loadTableColumnStatistics(expectedTableRef, colNames);
+    assertThat(actualColStats, sameInstance(expectedColStats));
+    verify(mockDecorated).loadTableColumnStatistics(eq(expectedTableRef), 
eq(colNames));
+
+    // Test loadIcebergTable(TableMetaRef)
+    TPartialTableInfo actualIcebergTableInfo =
+        fixture.loadIcebergTable(expectedTableRef);
+    assertThat(actualIcebergTableInfo, sameInstance(expectedIcebergTableInfo));
+    verify(mockDecorated).loadIcebergTable(eq(expectedTableRef));
+
+    // Test loadIcebergApiTable(TableMetaRef, TableParams, Table)
+    TableParams tableParams = mock(TableParams.class);
+    org.apache.iceberg.Table actualIcebergApiTable =
+        fixture.loadIcebergApiTable(expectedTableRef, tableParams, 
expectedTable);
+    assertThat(actualIcebergApiTable, sameInstance(expectedIcebergApiTable));
+    verify(mockDecorated).loadIcebergApiTable(
+        eq(expectedTableRef), eq(tableParams), eq(expectedTable));
+
+    // Test getValidWriteIdList(TableMetaRef)
+    TValidWriteIdList actualValidWriteIdList =
+        fixture.getValidWriteIdList(expectedTableRef);
+    assertThat(actualValidWriteIdList, sameInstance(expectedValidWriteIdList));
+    verify(mockDecorated).getValidWriteIdList(eq(expectedTableRef));
+
+    // Test getHdfsCachePools()
+    Iterable<HdfsCachePool> actualCachePools = fixture.getHdfsCachePools();
+    assertThat(actualCachePools, sameInstance(expectedCachePools));
+    verify(mockDecorated).getHdfsCachePools();
+
+    verifyNoMoreInteractions(mockDecorated);
+  }
+
+}
diff --git 
a/fe/src/test/java/org/apache/impala/util/CatalogBlacklistUtilsTest.java 
b/fe/src/test/java/org/apache/impala/util/CatalogBlacklistUtilsTest.java
index eb1a28deb..d80704c9b 100644
--- a/fe/src/test/java/org/apache/impala/util/CatalogBlacklistUtilsTest.java
+++ b/fe/src/test/java/org/apache/impala/util/CatalogBlacklistUtilsTest.java
@@ -17,79 +17,216 @@
 
 package org.apache.impala.util;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import java.util.Set;
+import static 
org.apache.impala.util.CatalogBlacklistUtils.getBlacklistedDbsCount;
+import static 
org.apache.impala.util.CatalogBlacklistUtils.getBlacklistedTablesCount;
+import static 
org.apache.impala.util.CatalogBlacklistUtils.getBlacklistedTablesDbs;
+import static org.apache.impala.util.CatalogBlacklistUtils.isDbBlacklisted;
+import static org.apache.impala.util.CatalogBlacklistUtils.isTableBlacklisted;
+import static org.apache.impala.util.CatalogBlacklistUtils.reload;
+import static org.apache.impala.util.CatalogBlacklistUtils.verifyDbName;
+import static org.apache.impala.util.CatalogBlacklistUtils.verifyTableName;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.is;
+import static 
org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.fail;
 
 import org.apache.impala.analysis.TableName;
 import org.apache.impala.catalog.Catalog;
+import org.apache.impala.common.AnalysisException;
+import org.apache.impala.service.BackendConfig;
+import org.apache.impala.thrift.TBackendGflags;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 public class CatalogBlacklistUtilsTest {
 
+  private static TBackendGflags origFlags;
+
+  @BeforeClass
+  public static void setup() {
+    // The original BackendConfig need to be saved so they can be restored and 
not break
+    // other tests.
+    if (BackendConfig.INSTANCE == null) {
+      BackendConfig.create(new TBackendGflags());
+    }
+    origFlags = BackendConfig.INSTANCE.getBackendCfg();
+  }
+
+  @AfterClass
+  public static void teardown() {
+    BackendConfig.create(origFlags);
+  }
+
   @Test
-  public void testParsingBlacklistedDbs() {
-    Set<String> blacklistedDbs;
-
-    blacklistedDbs = CatalogBlacklistUtils.parseBlacklistedDbs("db1,db2", 
null);
-    assertEquals(blacklistedDbs.size(), 2);
-    assertTrue(blacklistedDbs.contains("db1"));
-    assertTrue(blacklistedDbs.contains("db2"));
-
-    // Test spaces
-    blacklistedDbs = CatalogBlacklistUtils.parseBlacklistedDbs(" db1 , db2 ", 
null);
-    assertEquals(blacklistedDbs.size(), 2);
-    assertTrue(blacklistedDbs.contains("db1"));
-    assertTrue(blacklistedDbs.contains("db2"));
-    blacklistedDbs = CatalogBlacklistUtils.parseBlacklistedDbs(" ", null);
-    assertTrue(blacklistedDbs.isEmpty());
-
-    // Test lower/upper cases
-    blacklistedDbs = CatalogBlacklistUtils.parseBlacklistedDbs("DB1,Db2", 
null);
-    assertEquals(blacklistedDbs.size(), 2);
-    assertTrue(blacklistedDbs.contains("db1"));
-    assertTrue(blacklistedDbs.contains("db2"));
+  public void testParsingBlacklistedDbsHappyPath() throws AnalysisException {
+    setBlacklist("db1,db2", "");
+
+    assertThat(getBlacklistedDbsCount(), equalTo(2));
+    assertThat(isDbBlacklisted("db1"), is(true));
+    assertThat(isDbBlacklisted("db2"), is(true));
+    assertThat(isDbBlacklisted("db3"), is(false));
+
+    verifyDbName("db3");
+    try {
+      verifyDbName("db1");
+      fail("Expected AnalysisException for blacklisted db");
+    } catch (AnalysisException e) {
+      assertThat(e.getMessage(), equalTo("Invalid db name: db1. It has been 
blacklisted "
+          + "using --blacklisted_dbs"));
+    }
+  }
 
-    // Test abnormal inputs
-    blacklistedDbs = CatalogBlacklistUtils.parseBlacklistedDbs("db1,", null);
-    assertEquals(blacklistedDbs.size(), 1);
-    assertTrue(blacklistedDbs.contains("db1"));
+  @Test
+  public void testParsingBlacklistedDbsNamesWithSpaces() {
+    setBlacklist(" db1 , db2 ", "");
+
+    assertThat(getBlacklistedDbsCount(), equalTo(2));
+    assertThat(isDbBlacklisted("db1"), is(true));
+    assertThat(isDbBlacklisted("db2"), is(true));
+    assertThat(isDbBlacklisted("db3"), is(false));
+  }
+
+  @Test
+  public void testParsingBlacklistedDbsCaseInsensitiveNames() {
+    setBlacklist("DB1,Db2", "");
+
+    assertThat(getBlacklistedDbsCount(), equalTo(2));
+    assertThat(isDbBlacklisted("db1"), is(true));
+    assertThat(isDbBlacklisted("db2"), is(true));
+    assertThat(isDbBlacklisted("db3"), is(false));
   }
 
   @Test
-  public void testParsingBlacklistedTables() {
-    Set<TableName> blacklistedTables;
-
-    blacklistedTables = CatalogBlacklistUtils.parseBlacklistedTables(
-        "db3.foo,db3.bar", null);
-    assertEquals(blacklistedTables.size(), 2);
-    assertTrue(blacklistedTables.contains(new TableName("db3", "foo")));
-    assertTrue(blacklistedTables.contains(new TableName("db3", "bar")));
-
-    // Test spaces
-    blacklistedTables = CatalogBlacklistUtils.parseBlacklistedTables(
-        " db3 . foo , db3 . bar  ", null);
-    assertEquals(blacklistedTables.size(), 2);
-    assertTrue(blacklistedTables.contains(new TableName("db3", "foo")));
-    assertTrue(blacklistedTables.contains(new TableName("db3", "bar")));
-
-    // Test defaults
-    blacklistedTables = CatalogBlacklistUtils.parseBlacklistedTables("foo", 
null);
-    assertEquals(blacklistedTables.size(), 1);
-    assertTrue(blacklistedTables.contains(new TableName(Catalog.DEFAULT_DB, 
"foo")));
-
-    // Test lower/upper cases
-    blacklistedTables = CatalogBlacklistUtils.parseBlacklistedTables(
-        "DB3.Foo,db3.Bar", null);
-    assertEquals(blacklistedTables.size(), 2);
-    assertTrue(blacklistedTables.contains(new TableName("db3", "foo")));
-    assertTrue(blacklistedTables.contains(new TableName("db3", "bar")));
+  public void testParsingBlacklistedDbsInvalidNames() {
+    setBlacklist("db1,", "");
+
+    assertThat(getBlacklistedDbsCount(), equalTo(1));
+    assertThat(isDbBlacklisted("db1"), is(true));
+    assertThat(isDbBlacklisted("db2"), is(false));
+    assertThat(isDbBlacklisted("db3"), is(false));
+  }
 
+  @Test
+  public void testParsingBlacklistedDbsNone() {
+    assertThat(getBlacklistedDbsCount(), equalTo(0));
+  }
+
+  @Test
+  public void testParsingBlacklistedTablesHappyPath() throws AnalysisException 
{
+    TableName foo = new TableName("db3", "foo");
+    TableName baz = new TableName("db3", "baz");
+    setBlacklist("", "db3.foo,db3.bar,db4.tbl1");
+
+    assertThat(getBlacklistedTablesCount(), equalTo(3));
+    assertThat(isTableBlacklisted(foo.getDb(), foo.getTbl()), is(true));
+    assertThat(isTableBlacklisted(foo), is(true));
+    assertThat(isTableBlacklisted("db3", "bar"), is(true));
+    assertThat(isTableBlacklisted(new TableName("db3", "bar")), is(true));
+    assertThat(isTableBlacklisted(baz.getDb(), baz.getTbl()), is(false));
+    assertThat(isTableBlacklisted(baz), is(false));
+    assertThat(isTableBlacklisted("db4", "tbl1"), is(true));
+    assertThat(getBlacklistedTablesDbs(), containsInAnyOrder("db3", "db4"));
+
+    verifyTableName(baz);
+    try {
+      verifyTableName(foo);
+      fail("Expected AnalysisException for blacklisted table");
+    } catch (AnalysisException e) {
+      assertThat(e.getMessage(), equalTo("Invalid table/view name: " + foo
+          + ". It has been blacklisted using --blacklisted_tables"));
+    }
+  }
+
+  @Test
+  public void testParsingBlacklistedTablesNamesWithInputSpaces() {
+    setBlacklist("", " db3 . foo , db3 . bar  ");
+
+    assertThat(getBlacklistedTablesCount(), equalTo(2));
+    assertThat(isTableBlacklisted("db3", "foo"), is(true));
+    assertThat(isTableBlacklisted("db3", "bar"), is(true));
+    assertThat(isTableBlacklisted("db3", "baz"), is(false));
+    assertThat(getBlacklistedTablesDbs(), containsInAnyOrder("db3"));
+  }
+
+  @Test
+  public void testParsingBlacklistedTablesNamesWithoutDb() {
+    setBlacklist("", "foo");
+
+    assertThat(getBlacklistedTablesCount(), equalTo(1));
+    assertThat(isTableBlacklisted(Catalog.DEFAULT_DB, "foo"), is(true));
+    assertThat(getBlacklistedTablesDbs(), containsInAnyOrder("default"));
+  }
+
+  @Test
+  public void testParsingBlacklistedTablesCaseInsensitiveNames() {
+    setBlacklist("", "DB3.Foo,db3.Bar");
+
+    assertThat(getBlacklistedTablesCount(), equalTo(2));
+    assertThat(isTableBlacklisted("db3", "foo"), is(true));
+    assertThat(isTableBlacklisted("db3", "bar"), is(true));
+    assertThat(getBlacklistedTablesDbs(), containsInAnyOrder("db3"));
+  }
+
+  @Test
+  public void testParsingBlacklistedTablesInvalidNames() {
     // Test abnormal inputs
-    blacklistedTables = 
CatalogBlacklistUtils.parseBlacklistedTables("db3.,.bar,,", null);
-    assertEquals(blacklistedTables.size(), 1);
-    assertTrue(blacklistedTables.contains(new TableName(Catalog.DEFAULT_DB, 
"bar")));
+    setBlacklist("", "db3.,.bar,,");
+
+    assertThat(getBlacklistedTablesCount(), equalTo(1));
+    assertThat(isTableBlacklisted(Catalog.DEFAULT_DB, "bar"), is(true));
+    assertThat(getBlacklistedTablesDbs(), containsInAnyOrder("default"));
+  }
+
+  @Test
+  public void testParsingBlacklistedDbsAndTables() {
+    setBlacklist("db1,db2", "db3.foo,db3.bar");
+
+    assertThat(getBlacklistedDbsCount(), equalTo(2));
+    assertThat(isDbBlacklisted("db1"), is(true));
+    assertThat(isDbBlacklisted("db2"), is(true));
+    assertThat(isDbBlacklisted("db3"), is(false));
+
+    assertThat(getBlacklistedTablesCount(), equalTo(2));
+    assertThat(isTableBlacklisted("db1", "foo"), is(false));
+    assertThat(isTableBlacklisted("db2", "bar"), is(false));
+    assertThat(isTableBlacklisted("db3", "foo"), is(true));
+    assertThat(isTableBlacklisted("db3", "bar"), is(true));
+    assertThat(isTableBlacklisted("db3", "baz"), is(false));
+    assertThat(getBlacklistedTablesDbs(), containsInAnyOrder("db3"));
   }
+
+  @Test
+  public void testWorkloadManagementEnabled() {
+    setBlacklist("sys", "", true);
+
+    assertThat(isDbBlacklisted("sys"), is(false));
+    assertThat(isTableBlacklisted("sys", "impala_query_log"), is(false));
+    assertThat(isTableBlacklisted("sys", "impala_query_live"), is(false));
+    assertThat(isTableBlacklisted("sys", "other_tbl"), is(true));
+  }
+
+  @Test
+  public void testWorkloadManagementDisabled() {
+    setBlacklist("sys", "", false);
+
+    assertThat(isDbBlacklisted("sys"), is(true));
+  }
+
+  public static void setBlacklist(String blacklistedDbs, String 
blacklistedTables,
+      boolean enableWorkloadMgmt) {
+    TBackendGflags backendGflags = new TBackendGflags();
+    backendGflags.setBlacklisted_dbs(blacklistedDbs);
+    backendGflags.setBlacklisted_tables(blacklistedTables);
+    backendGflags.setEnable_workload_mgmt(enableWorkloadMgmt);
+    backendGflags.setQuery_log_table_name("impala_query_log");
+    BackendConfig.create(backendGflags, false);
+    reload();
+  }
+
+  public static void setBlacklist(String blacklistedDbs, String 
blacklistedTables) {
+    setBlacklist(blacklistedDbs, blacklistedTables, false);
+  }
+
 }
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-rest-catalog-blacklist-db.test
 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-rest-catalog-blacklist-db.test
new file mode 100644
index 000000000..778caf982
--- /dev/null
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-rest-catalog-blacklist-db.test
@@ -0,0 +1,32 @@
+====
+---- QUERY
+# Blacklisted databases are not shown.
+show databases "ice";
+---- RESULTS: MULTI_LINE
+Query: show databases "ice"
+row_regex:.*Fetched 0 row.*
+====
+---- QUERY
+# Blacklisted databases cannot be used.
+use ice;
+---- CATCH
+AnalysisException: Database does not exist: ice
+====
+---- QUERY
+# Cannot select from any tables in a blacklisted database.
+select count(*) from ice.airports_parquet;
+---- CATCH
+AnalysisException: Could not resolve table reference: 'ice.airports_parquet'
+====
+---- QUERY
+# Cannot create tables via Iceberg REST Catalog, guard against that 
functionality
+# bypassing the blacklist in the future.
+create table ice.foo (id string) stored as iceberg LOCATION 
'/test-warehouse/iceberg_test/hadoop_catalog/ice/foo';
+---- CATCH
+AnalysisException: Database does not exist: ice
+====
+---- QUERY
+create database ice comment 'iceberg catalog blacklist db test' LOCATION 
'/test-warehouse/iceberg_test/hadoop_catalog/ice'
+---- CATCH
+AnalysisException: Invalid db name: ice. It has been blacklisted using 
--blacklisted_dbs
+====
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/iceberg-rest-catalog-blacklist-tables.test
 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-rest-catalog-blacklist-tables.test
new file mode 100644
index 000000000..6178cd9df
--- /dev/null
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/iceberg-rest-catalog-blacklist-tables.test
@@ -0,0 +1,27 @@
+====
+---- QUERY
+# Blacklisted tables are not resolvable.
+select count(*) from ice.airports_parquet;
+---- CATCH
+AnalysisException: Could not resolve table reference: 'ice.airports_parquet'
+====
+---- QUERY
+# Blacklisted tables cannot be shown.
+show tables like 'airports_parquet';
+---- RESULTS: MULTI_LINE
+Query: show tables like 'airports_parquet'
+row_regex:.*Fetched 0 row.*
+====
+---- QUERY
+# Blacklisted tables cannot be inserted into.
+insert into airports_parquet (iata) values ('foo');
+---- CATCH
+row_regex:.*AnalysisException: Table does not exist: ice.airports_parquet
+====
+---- QUERY
+# Cannot create tables via Iceberg REST Catalog, guard against that 
functionality
+# bypassing the blacklist in the future.
+create table ice.airports_parquet (id string) stored as iceberg LOCATION 
'/test-warehouse/iceberg_test/hadoop_catalog/ice/airports_parquet';
+---- CATCH
+AnalysisException: Invalid table/view name: ice.airports_parquet. It has been 
blacklisted using --blacklisted_tables
+====
diff --git a/tests/custom_cluster/test_iceberg_rest_catalog.py 
b/tests/custom_cluster/test_iceberg_rest_catalog.py
index 59d672aaa..04544fae0 100644
--- a/tests/custom_cluster/test_iceberg_rest_catalog.py
+++ b/tests/custom_cluster/test_iceberg_rest_catalog.py
@@ -115,6 +115,25 @@ class 
TestIcebergRestCatalogWithHms(IcebergRestCatalogTests):
     self.run_test_case('QueryTest/iceberg-multiple-rest-catalogs',
                        vector, use_db="ice")
 
+  @RestServerProperties({'port': 9084})
+  @CustomClusterTestSuite.with_args(
+     impalad_args="{} --blacklisted_dbs=ice".format(MULTICATALOG_IMPALAD_ARGS),
+     catalogd_args=MULTICATALOG_CATALOGD_ARGS)
+  @pytest.mark.execute_serially
+  def test_rest_catalog_multicatalog_blacklisted_db(self, vector):
+    self.run_test_case('QueryTest/iceberg-rest-catalog-blacklist-db', vector,
+        use_db="default")
+
+  @RestServerProperties({'port': 9084})
+  @CustomClusterTestSuite.with_args(
+      impalad_args="{} --blacklisted_tables=ice.airports_parquet"
+                   .format(REST_STANDALONE_IMPALAD_ARGS),
+      catalogd_args=MULTICATALOG_CATALOGD_ARGS)
+  @pytest.mark.execute_serially
+  def test_rest_catalog_multicatalog_blacklisted_tables(self, vector):
+    self.run_test_case('QueryTest/iceberg-rest-catalog-blacklist-tables',
+        vector, use_db="ice")
+
 
 class TestIcebergRestCatalogNoHms(IcebergRestCatalogTests):
   """Test suite for Iceberg REST Catalog. HMS is stopped while tests are 
running"""
@@ -175,3 +194,22 @@ class TestIcebergRestCatalogNoHms(IcebergRestCatalogTests):
   def test_multiple_rest_catalogs_with_ambiguous_tables(self, vector):
     
self.run_test_case('QueryTest/iceberg-multiple-rest-catalogs-ambiguous-name',
                        vector, use_db="ice")
+
+  @RestServerProperties({'port': 9084})
+  @CustomClusterTestSuite.with_args(
+     impalad_args="{} 
--blacklisted_dbs=ice".format(REST_STANDALONE_IMPALAD_ARGS),
+     start_args=NO_CATALOGD_STARTARGS)
+  @pytest.mark.execute_serially
+  def test_rest_catalog_basic_blacklisted_db(self, vector):
+    self.run_test_case('QueryTest/iceberg-rest-catalog-blacklist-db', vector,
+        use_db="default")
+
+  @RestServerProperties({'port': 9084})
+  @CustomClusterTestSuite.with_args(
+      impalad_args="{} --blacklisted_tables=ice.airports_parquet"
+                   .format(REST_STANDALONE_IMPALAD_ARGS),
+      start_args=NO_CATALOGD_STARTARGS)
+  @pytest.mark.execute_serially
+  def test_rest_catalog_basic_blacklisted_tables(self, vector):
+    self.run_test_case('QueryTest/iceberg-rest-catalog-blacklist-tables',
+        vector, use_db="ice")
diff --git a/tests/custom_cluster/test_sys_db.py 
b/tests/custom_cluster/test_sys_db.py
index f33c6c904..4ba1f3f95 100644
--- a/tests/custom_cluster/test_sys_db.py
+++ b/tests/custom_cluster/test_sys_db.py
@@ -58,7 +58,8 @@ class TestSysDb(CustomClusterTestSuite):
       assert False, "table '{0}' should have failed to create but was created" 
\
           .format(table_name)
     except IMPALA_CONNECTION_EXCEPTION as e:
-      expected_error = "IllegalStateException: Can't create blacklisted table: 
{0}" \
+      expected_error = "AnalysisException: Invalid table/view name: " \
+          "{0}. It has been blacklisted using --blacklisted_tables" \
           .format(table_name)
       assert error_msg_startswith(str(e), expected_error), \
           "table '{0}' failed to create but for the wrong reason:\n{1}\n" \

Reply via email to