This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 0abb52148b0 [feat](catalog) refactor and support iceberg sys table 
(#50294)
0abb52148b0 is described below

commit 0abb52148b04d334ebd797c291d46aa2de46f98a
Author: Mingyu Chen (Rayner) <[email protected]>
AuthorDate: Wed May 7 18:27:28 2025 +0800

    [feat](catalog) refactor and support iceberg sys table (#50294)
    
    ### What problem does this PR solve?
    
    This PR refactors and consolidates the handling of meta tables under a
    unified SysTable abstraction to support Iceberg system tables (and
    others) within the catalog. Key changes include:
    
    - Introduction of the SysTable abstract class with helper methods (e.g.,
    containsMetaTable, getTableNameWithSysTableName).
    - Refactoring usages across various modules (e.g., RelationUtil,
    DescribeCommand, and HMSExternalCatalog) to leverage the new SysTable
    API.
    - Addition of unit tests in SysTableTest.java to validate the new
    abstraction.
    
    Now we support following syntax:
    
    ```
    select * from iceberg_table$snapshots
    ```
    
    instead of
    
    ```
    select * from iceberg_meta("table" = "ctl.db.iceberg_table", "query_type" = 
"snapshots");
    ```
---
 .../org/apache/doris/analysis/DescribeStmt.java    |  15 +-
 .../java/org/apache/doris/catalog/TableIf.java     |  45 ++
 .../org/apache/doris/datasource/CatalogIf.java     |  23 -
 .../doris/datasource/hive/HMSExternalCatalog.java  |  91 ---
 .../doris/datasource/hive/HMSExternalTable.java    |  17 +
 .../datasource/iceberg/IcebergExternalTable.java   |   8 +
 .../datasource/paimon/PaimonExternalTable.java     |   7 +
 .../systable/IcebergSnapshotsSysTable.java         |  65 ++
 .../datasource/systable/PartitionsSysTable.java    |  62 ++
 .../datasource/systable/SupportedSysTables.java    |  42 +
 .../apache/doris/datasource/systable/SysTable.java |  68 ++
 .../doris/nereids/rules/analysis/BindRelation.java |   4 +-
 .../expressions/functions/table/IcebergMeta.java   |  11 +
 .../trees/plans/commands/DescribeCommand.java      |  15 +-
 .../apache/doris/nereids/util/RelationUtil.java    |  20 +-
 .../tablefunction/IcebergTableValuedFunction.java  |   4 +-
 .../doris/datasource/systable/SysTableTest.java    | 102 +++
 .../apache/doris/external/hms/HmsCatalogTest.java  |   8 +
 .../org/apache/doris/qe/HmsQueryCacheTest.java     |  15 +
 .../java/org/apache/doris/qe/StmtExecutorTest.java | 870 ++-------------------
 .../apache/doris/utframe/TestWithFeService.java    |   2 +-
 .../iceberg/test_iceberg_sys_table.out             | Bin 0 -> 1028 bytes
 .../hive/test_hive_partition_values_tvf.groovy     |   4 +-
 .../iceberg/test_iceberg_sys_table.groovy          | 183 +++++
 .../iceberg/test_iceberg_time_travel.groovy        |  24 +-
 25 files changed, 768 insertions(+), 937 deletions(-)

diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/analysis/DescribeStmt.java 
b/fe/fe-core/src/main/java/org/apache/doris/analysis/DescribeStmt.java
index f5f7cdaf749..1b117d5c243 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/DescribeStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/DescribeStmt.java
@@ -40,6 +40,7 @@ import org.apache.doris.common.proc.ProcService;
 import org.apache.doris.common.proc.TableProcDir;
 import org.apache.doris.common.util.Util;
 import org.apache.doris.datasource.CatalogIf;
+import org.apache.doris.datasource.systable.SysTable;
 import org.apache.doris.mysql.privilege.PrivPredicate;
 import org.apache.doris.qe.ConnectContext;
 import org.apache.doris.qe.ShowResultSetMetaData;
@@ -130,16 +131,20 @@ public class DescribeStmt extends ShowStmt implements 
NotFallbackInParser {
         // It will convert this to corresponding table valued functions
         // eg: DESC table$partitions -> partition_values(...)
         if (dbTableName != null) {
+            // if this is isTableValuedFunction, eg: desc function s3(),
+            // the dbTableName is null.
             dbTableName.analyze(analyzer);
             CatalogIf catalog = 
Env.getCurrentEnv().getCatalogMgr().getCatalogOrAnalysisException(dbTableName.getCtl());
-            Pair<String, String> sourceTableNameWithMetaName = 
catalog.getSourceTableNameWithMetaTableName(
-                    dbTableName.getTbl());
-            if (!Strings.isNullOrEmpty(sourceTableNameWithMetaName.second)) {
+            DatabaseIf db = 
catalog.getDbOrAnalysisException(dbTableName.getDb());
+            Pair<String, String> tableNameWithSysTableName
+                    = 
SysTable.getTableNameWithSysTableName(dbTableName.getTbl());
+            if (!Strings.isNullOrEmpty(tableNameWithSysTableName.second)) {
+                TableIf table = 
db.getTableOrDdlException(tableNameWithSysTableName.first);
                 isTableValuedFunction = true;
-                Optional<TableValuedFunctionRef> optTvfRef = 
catalog.getMetaTableFunctionRef(
+                Optional<TableValuedFunctionRef> optTvfRef = 
table.getSysTableFunctionRef(dbTableName.getCtl(),
                         dbTableName.getDb(), dbTableName.getTbl());
                 if (!optTvfRef.isPresent()) {
-                    throw new AnalysisException("meta table not found: " + 
sourceTableNameWithMetaName.second);
+                    throw new AnalysisException("sys table not found: " + 
tableNameWithSysTableName.second);
                 }
                 tableValuedFunctionRef = optTvfRef.get();
             }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/TableIf.java 
b/fe/fe-core/src/main/java/org/apache/doris/catalog/TableIf.java
index 427ad2bfeb3..42d308dfefc 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/TableIf.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/TableIf.java
@@ -18,6 +18,7 @@
 package org.apache.doris.catalog;
 
 import org.apache.doris.alter.AlterCancelException;
+import org.apache.doris.analysis.TableValuedFunctionRef;
 import org.apache.doris.catalog.constraint.Constraint;
 import org.apache.doris.catalog.constraint.ForeignKeyConstraint;
 import org.apache.doris.catalog.constraint.PrimaryKeyConstraint;
@@ -26,7 +27,9 @@ import org.apache.doris.cluster.ClusterNamespace;
 import org.apache.doris.common.DdlException;
 import org.apache.doris.common.MetaNotFoundException;
 import org.apache.doris.common.Pair;
+import org.apache.doris.datasource.systable.SysTable;
 import org.apache.doris.nereids.exceptions.AnalysisException;
+import 
org.apache.doris.nereids.trees.expressions.functions.table.TableValuedFunction;
 import org.apache.doris.persist.AlterConstraintLog;
 import org.apache.doris.statistics.AnalysisInfo;
 import org.apache.doris.statistics.BaseAnalysisTask;
@@ -516,4 +519,46 @@ public interface TableIf {
     default boolean isTemporary() {
         return false;
     }
+
+    default List<SysTable> getSupportedSysTables() {
+        return Lists.newArrayList();
+    }
+
+    /**
+     * Get TableValuedFunction by tableNameWithSysTableName
+     *
+     * @param ctlName
+     * @param dbName
+     * @param tableNameWithSysTableName: eg: table$partitions
+     * @return
+     */
+    default Optional<TableValuedFunction> getSysTableFunction(
+            String ctlName, String dbName, String tableNameWithSysTableName) {
+        for (SysTable sysTable : getSupportedSysTables()) {
+            if (sysTable.containsMetaTable(tableNameWithSysTableName)) {
+                return Optional.of(sysTable.createFunction(ctlName, dbName,
+                        tableNameWithSysTableName));
+            }
+        }
+        return Optional.empty();
+    }
+
+    /**
+     * Get TableValuedFunctionRef by tableNameWithSysTableName
+     *
+     * @param ctlName
+     * @param dbName
+     * @param tableNameWithSysTableName: eg: table$partitions
+     * @return
+     */
+    default Optional<TableValuedFunctionRef> getSysTableFunctionRef(
+            String ctlName, String dbName, String tableNameWithSysTableName) {
+        for (SysTable sysTable : getSupportedSysTables()) {
+            if (sysTable.containsMetaTable(tableNameWithSysTableName)) {
+                return Optional.of(sysTable.createFunctionRef(ctlName, dbName,
+                        tableNameWithSysTableName));
+            }
+        }
+        return Optional.empty();
+    }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogIf.java 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogIf.java
index bedc52615b9..f876f67c092 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogIf.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogIf.java
@@ -22,7 +22,6 @@ import org.apache.doris.analysis.CreateTableStmt;
 import org.apache.doris.analysis.DropDbStmt;
 import org.apache.doris.analysis.DropTableStmt;
 import org.apache.doris.analysis.TableName;
-import org.apache.doris.analysis.TableValuedFunctionRef;
 import org.apache.doris.analysis.TruncateTableStmt;
 import org.apache.doris.catalog.DatabaseIf;
 import org.apache.doris.catalog.Env;
@@ -31,9 +30,7 @@ import org.apache.doris.common.AnalysisException;
 import org.apache.doris.common.DdlException;
 import org.apache.doris.common.ErrorCode;
 import org.apache.doris.common.MetaNotFoundException;
-import org.apache.doris.common.Pair;
 import org.apache.doris.common.UserException;
-import 
org.apache.doris.nereids.trees.expressions.functions.table.TableValuedFunction;
 
 import com.google.common.base.Strings;
 import com.google.common.collect.Lists;
@@ -208,26 +205,6 @@ public interface CatalogIf<T extends DatabaseIf> {
 
     void truncateTable(TruncateTableStmt truncateTableStmt) throws 
DdlException;
 
-    /**
-     * Try to parse meta table name from table name.
-     * Some catalog allow querying meta table like "table_name$partitions".
-     * Catalog can override this method to parse meta table name from table 
name.
-     *
-     * @param tableName table name like "table_name" or "table_name$partitions"
-     * @return pair of source table name and meta table name
-     */
-    default Pair<String, String> getSourceTableNameWithMetaTableName(String 
tableName) {
-        return Pair.of(tableName, "");
-    }
-
-    default Optional<TableValuedFunction> getMetaTableFunction(String dbName, 
String sourceNameWithMetaName) {
-        return Optional.empty();
-    }
-
-    default Optional<TableValuedFunctionRef> getMetaTableFunctionRef(String 
dbName, String sourceNameWithMetaName) {
-        return Optional.empty();
-    }
-
     // Convert from remote database name to local database name, overridden by 
subclass if necessary
     default String fromRemoteDatabaseName(String remoteDatabaseName) {
         return remoteDatabaseName;
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
index 9b506d917f7..83ff4ae49e6 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
@@ -17,13 +17,11 @@
 
 package org.apache.doris.datasource.hive;
 
-import org.apache.doris.analysis.TableValuedFunctionRef;
 import org.apache.doris.catalog.Env;
 import org.apache.doris.catalog.HdfsResource;
 import org.apache.doris.cluster.ClusterNamespace;
 import org.apache.doris.common.Config;
 import org.apache.doris.common.DdlException;
-import org.apache.doris.common.Pair;
 import org.apache.doris.common.ThreadPoolManager;
 import org.apache.doris.common.security.authentication.AuthenticationConfig;
 import org.apache.doris.common.security.authentication.HadoopAuthenticator;
@@ -44,15 +42,10 @@ import 
org.apache.doris.datasource.property.constants.HMSProperties;
 import org.apache.doris.fs.FileSystemProvider;
 import org.apache.doris.fs.FileSystemProviderImpl;
 import org.apache.doris.fs.remote.dfs.DFSFileSystem;
-import org.apache.doris.nereids.exceptions.AnalysisException;
-import 
org.apache.doris.nereids.trees.expressions.functions.table.PartitionValues;
-import 
org.apache.doris.nereids.trees.expressions.functions.table.TableValuedFunction;
 import org.apache.doris.transaction.TransactionManagerFactory;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Strings;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
 import lombok.Getter;
 import org.apache.commons.lang3.math.NumberUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -63,7 +56,6 @@ import org.apache.logging.log4j.Logger;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
-import java.util.Optional;
 import java.util.concurrent.ThreadPoolExecutor;
 
 /**
@@ -316,36 +308,6 @@ public class HMSExternalCatalog extends ExternalCatalog {
         }
     }
 
-    @Override
-    public Pair<String, String> getSourceTableNameWithMetaTableName(String 
tableName) {
-        for (MetaTableFunction metaFunction : MetaTableFunction.values()) {
-            if (metaFunction.containsMetaTable(tableName)) {
-                return Pair.of(metaFunction.getSourceTableName(tableName), 
metaFunction.name().toLowerCase());
-            }
-        }
-        return Pair.of(tableName, "");
-    }
-
-    @Override
-    public Optional<TableValuedFunction> getMetaTableFunction(String dbName, 
String sourceNameWithMetaName) {
-        for (MetaTableFunction metaFunction : MetaTableFunction.values()) {
-            if (metaFunction.containsMetaTable(sourceNameWithMetaName)) {
-                return Optional.of(metaFunction.createFunction(name, dbName, 
sourceNameWithMetaName));
-            }
-        }
-        return Optional.empty();
-    }
-
-    @Override
-    public Optional<TableValuedFunctionRef> getMetaTableFunctionRef(String 
dbName, String sourceNameWithMetaName) {
-        for (MetaTableFunction metaFunction : MetaTableFunction.values()) {
-            if (metaFunction.containsMetaTable(sourceNameWithMetaName)) {
-                return Optional.of(metaFunction.createFunctionRef(name, 
dbName, sourceNameWithMetaName));
-            }
-        }
-        return Optional.empty();
-    }
-
     public String getHiveMetastoreUris() {
         return catalogProperty.getOrDefault(HMSProperties.HIVE_METASTORE_URIS, 
"");
     }
@@ -362,59 +324,6 @@ public class HMSExternalCatalog extends ExternalCatalog {
         return enableHmsEventsIncrementalSync;
     }
 
-    /**
-     * Enum for meta tables in hive catalog.
-     * eg: tbl$partitions
-     */
-    private enum MetaTableFunction {
-        PARTITIONS("partition_values");
-
-        private final String suffix;
-        private final String tvfName;
-
-        MetaTableFunction(String tvfName) {
-            this.suffix = "$" + name().toLowerCase();
-            this.tvfName = tvfName;
-        }
-
-        boolean containsMetaTable(String tableName) {
-            return tableName.endsWith(suffix) && (tableName.length() > 
suffix.length());
-        }
-
-        String getSourceTableName(String tableName) {
-            return tableName.substring(0, tableName.length() - 
suffix.length());
-        }
-
-        public TableValuedFunction createFunction(String ctlName, String 
dbName, String sourceNameWithMetaName) {
-            switch (this) {
-                case PARTITIONS:
-                    List<String> nameParts = Lists.newArrayList(ctlName, 
dbName,
-                            getSourceTableName(sourceNameWithMetaName));
-                    return PartitionValues.create(nameParts);
-                default:
-                    throw new AnalysisException("Unsupported meta function 
type: " + this);
-            }
-        }
-
-        public TableValuedFunctionRef createFunctionRef(String ctlName, String 
dbName, String sourceNameWithMetaName) {
-            switch (this) {
-                case PARTITIONS:
-                    Map<String, String> params = Maps.newHashMap();
-                    params.put("catalog", ctlName);
-                    params.put("database", dbName);
-                    params.put("table", 
getSourceTableName(sourceNameWithMetaName));
-                    try {
-                        return new TableValuedFunctionRef(tvfName, null, 
params);
-                    } catch (org.apache.doris.common.AnalysisException e) {
-                        LOG.warn("should not happen. {}.{}.{}", ctlName, 
dbName, sourceNameWithMetaName);
-                        return null;
-                    }
-                default:
-                    throw new AnalysisException("Unsupported meta function 
type: " + this);
-            }
-        }
-    }
-
     public IcebergMetadataOps getIcebergMetadataOps() {
         makeSureInitialized();
         if (icebergMetadataOps == null) {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalTable.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalTable.java
index adcc85288ff..670ca0b7e18 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalTable.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalTable.java
@@ -47,6 +47,8 @@ import org.apache.doris.datasource.mvcc.EmptyMvccSnapshot;
 import org.apache.doris.datasource.mvcc.MvccSnapshot;
 import org.apache.doris.datasource.mvcc.MvccTable;
 import org.apache.doris.datasource.mvcc.MvccUtil;
+import org.apache.doris.datasource.systable.SupportedSysTables;
+import org.apache.doris.datasource.systable.SysTable;
 import org.apache.doris.fs.FileSystemDirectoryLister;
 import org.apache.doris.mtmv.MTMVBaseTableIf;
 import org.apache.doris.mtmv.MTMVRefreshContext;
@@ -1137,4 +1139,19 @@ public class HMSExternalTable extends ExternalTable 
implements MTMVRelatedTableI
         makeSureInitialized();
         return dlaTable.isValidRelatedTable();
     }
+
+    @Override
+    public List<SysTable> getSupportedSysTables() {
+        makeSureInitialized();
+        switch (dlaType) {
+            case HIVE:
+                return SupportedSysTables.HIVE_SUPPORTED_SYS_TABLES;
+            case ICEBERG:
+                return SupportedSysTables.ICEBERG_SUPPORTED_SYS_TABLES;
+            case HUDI:
+                return SupportedSysTables.HUDI_SUPPORTED_SYS_TABLES;
+            default:
+                return Lists.newArrayList();
+        }
+    }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalTable.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalTable.java
index 3c06c80ed7d..c6b1b6660a1 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalTable.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalTable.java
@@ -29,6 +29,8 @@ import org.apache.doris.datasource.ExternalTable;
 import org.apache.doris.datasource.SchemaCacheValue;
 import org.apache.doris.datasource.mvcc.MvccSnapshot;
 import org.apache.doris.datasource.mvcc.MvccTable;
+import org.apache.doris.datasource.systable.SupportedSysTables;
+import org.apache.doris.datasource.systable.SysTable;
 import org.apache.doris.mtmv.MTMVBaseTableIf;
 import org.apache.doris.mtmv.MTMVRefreshContext;
 import org.apache.doris.mtmv.MTMVRelatedTableIf;
@@ -266,4 +268,10 @@ public class IcebergExternalTable extends ExternalTable 
implements MTMVRelatedTa
     public void setIsValidRelatedTableCached(boolean isCached) {
         this.isValidRelatedTableCached = isCached;
     }
+
+    @Override
+    public List<SysTable> getSupportedSysTables() {
+        makeSureInitialized();
+        return SupportedSysTables.ICEBERG_SUPPORTED_SYS_TABLES;
+    }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalTable.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalTable.java
index c63515bb453..0f647f24cfa 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalTable.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalTable.java
@@ -33,6 +33,8 @@ import org.apache.doris.datasource.SchemaCacheValue;
 import org.apache.doris.datasource.mvcc.MvccSnapshot;
 import org.apache.doris.datasource.mvcc.MvccTable;
 import org.apache.doris.datasource.mvcc.MvccUtil;
+import org.apache.doris.datasource.systable.SupportedSysTables;
+import org.apache.doris.datasource.systable.SysTable;
 import org.apache.doris.mtmv.MTMVBaseTableIf;
 import org.apache.doris.mtmv.MTMVRefreshContext;
 import org.apache.doris.mtmv.MTMVRelatedTableIf;
@@ -285,4 +287,9 @@ public class PaimonExternalTable extends ExternalTable 
implements MTMVRelatedTab
         }
     }
 
+    @Override
+    public List<SysTable> getSupportedSysTables() {
+        makeSureInitialized();
+        return SupportedSysTables.PAIMON_SUPPORTED_SYS_TABLES;
+    }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/systable/IcebergSnapshotsSysTable.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/systable/IcebergSnapshotsSysTable.java
new file mode 100644
index 00000000000..3813ef68488
--- /dev/null
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/systable/IcebergSnapshotsSysTable.java
@@ -0,0 +1,65 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.datasource.systable;
+
+import org.apache.doris.analysis.TableValuedFunctionRef;
+import org.apache.doris.nereids.trees.expressions.functions.table.IcebergMeta;
+import 
org.apache.doris.nereids.trees.expressions.functions.table.TableValuedFunction;
+import org.apache.doris.tablefunction.IcebergTableValuedFunction;
+
+import com.google.common.base.Joiner;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.util.List;
+import java.util.Map;
+
+// table$snapshots
+public class IcebergSnapshotsSysTable extends SysTable {
+    private static final Logger LOG = 
LogManager.getLogger(IcebergSnapshotsSysTable.class);
+
+    public static final IcebergSnapshotsSysTable INSTANCE = new 
IcebergSnapshotsSysTable();
+
+    private IcebergSnapshotsSysTable() {
+        super("snapshots", "iceberg_meta");
+    }
+
+    @Override
+    public TableValuedFunction createFunction(String ctlName, String dbName, 
String sourceNameWithMetaName) {
+        List<String> nameParts = Lists.newArrayList(ctlName, dbName,
+                getSourceTableName(sourceNameWithMetaName));
+        return IcebergMeta.createSnapshots(nameParts);
+    }
+
+    @Override
+    public TableValuedFunctionRef createFunctionRef(String ctlName, String 
dbName, String sourceNameWithMetaName) {
+        List<String> nameParts = Lists.newArrayList(ctlName, dbName,
+                getSourceTableName(sourceNameWithMetaName));
+        Map<String, String> params = Maps.newHashMap();
+        params.put(IcebergTableValuedFunction.TABLE, 
Joiner.on(".").join(nameParts));
+        params.put(IcebergTableValuedFunction.QUERY_TYPE, "snapshots");
+        try {
+            return new TableValuedFunctionRef(tvfName, null, params);
+        } catch (org.apache.doris.common.AnalysisException e) {
+            LOG.warn("should not happen. {}.{}.{}", ctlName, dbName, 
sourceNameWithMetaName, e);
+            return null;
+        }
+    }
+}
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/systable/PartitionsSysTable.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/systable/PartitionsSysTable.java
new file mode 100644
index 00000000000..d4565a0b8bc
--- /dev/null
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/systable/PartitionsSysTable.java
@@ -0,0 +1,62 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.datasource.systable;
+
+import org.apache.doris.analysis.TableValuedFunctionRef;
+import 
org.apache.doris.nereids.trees.expressions.functions.table.PartitionValues;
+import 
org.apache.doris.nereids.trees.expressions.functions.table.TableValuedFunction;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.util.List;
+import java.util.Map;
+
+// table$partitions
+public class PartitionsSysTable extends SysTable {
+    private static final Logger LOG = 
LogManager.getLogger(PartitionsSysTable.class);
+
+    public static final PartitionsSysTable INSTANCE = new PartitionsSysTable();
+
+    private PartitionsSysTable() {
+        super("partitions", "partition_values");
+    }
+
+    @Override
+    public TableValuedFunction createFunction(String ctlName, String dbName, 
String sourceNameWithMetaName) {
+        List<String> nameParts = Lists.newArrayList(ctlName, dbName,
+                getSourceTableName(sourceNameWithMetaName));
+        return PartitionValues.create(nameParts);
+    }
+
+    @Override
+    public TableValuedFunctionRef createFunctionRef(String ctlName, String 
dbName, String sourceNameWithMetaName) {
+        Map<String, String> params = Maps.newHashMap();
+        params.put("catalog", ctlName);
+        params.put("database", dbName);
+        params.put("table", getSourceTableName(sourceNameWithMetaName));
+        try {
+            return new TableValuedFunctionRef(tvfName, null, params);
+        } catch (org.apache.doris.common.AnalysisException e) {
+            LOG.warn("should not happen. {}.{}.{}", ctlName, dbName, 
sourceNameWithMetaName);
+            return null;
+        }
+    }
+}
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/systable/SupportedSysTables.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/systable/SupportedSysTables.java
new file mode 100644
index 00000000000..15bdf80aebb
--- /dev/null
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/systable/SupportedSysTables.java
@@ -0,0 +1,42 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.datasource.systable;
+
+import com.google.common.collect.Lists;
+
+import java.util.List;
+
+public class SupportedSysTables {
+    public static final List<SysTable> HIVE_SUPPORTED_SYS_TABLES;
+    public static final List<SysTable> ICEBERG_SUPPORTED_SYS_TABLES;
+    public static final List<SysTable> PAIMON_SUPPORTED_SYS_TABLES;
+    public static final List<SysTable> HUDI_SUPPORTED_SYS_TABLES;
+
+    static {
+        // hive
+        HIVE_SUPPORTED_SYS_TABLES = Lists.newArrayList();
+        HIVE_SUPPORTED_SYS_TABLES.add(PartitionsSysTable.INSTANCE);
+        // iceberg
+        ICEBERG_SUPPORTED_SYS_TABLES = Lists.newArrayList();
+        ICEBERG_SUPPORTED_SYS_TABLES.add(IcebergSnapshotsSysTable.INSTANCE);
+        // paimon
+        PAIMON_SUPPORTED_SYS_TABLES = Lists.newArrayList();
+        // hudi
+        HUDI_SUPPORTED_SYS_TABLES = Lists.newArrayList();
+    }
+}
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/systable/SysTable.java 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/systable/SysTable.java
new file mode 100644
index 00000000000..2143f73d9ef
--- /dev/null
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/systable/SysTable.java
@@ -0,0 +1,68 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.datasource.systable;
+
+import org.apache.doris.analysis.TableValuedFunctionRef;
+import org.apache.doris.common.Pair;
+import 
org.apache.doris.nereids.trees.expressions.functions.table.TableValuedFunction;
+
+public abstract class SysTable {
+    // eg. table$partitions
+    //  sysTableName => partitions
+    //  tvfName => partition_values;
+    //  suffix => $partitions
+    protected final String sysTableName;
+    protected final String tvfName;
+    protected final String suffix;
+
+    protected SysTable(String sysTableName, String tvfName) {
+        this.sysTableName = sysTableName;
+        this.suffix = "$" + sysTableName.toLowerCase();
+        this.tvfName = tvfName;
+    }
+
+    public String getSysTableName() {
+        return sysTableName;
+    }
+
+    public boolean containsMetaTable(String tableName) {
+        return tableName.endsWith(suffix) && (tableName.length() > 
suffix.length());
+    }
+
+    public String getSourceTableName(String tableName) {
+        return tableName.substring(0, tableName.length() - suffix.length());
+    }
+
+    public abstract TableValuedFunction createFunction(String ctlName, String 
dbName, String sourceNameWithMetaName);
+
+    public abstract TableValuedFunctionRef createFunctionRef(String ctlName, 
String dbName,
+            String sourceNameWithMetaName);
+
+    // table$partition => <table, partition>
+    // table$xx$partition => <table$xx, partition>
+    public static Pair<String, String> getTableNameWithSysTableName(String 
input) {
+        int lastDollarIndex = input.lastIndexOf('$');
+        if (lastDollarIndex == -1 || lastDollarIndex == input.length() - 1) {
+            return Pair.of(input, "");
+        } else {
+            String before = input.substring(0, lastDollarIndex);
+            String after = input.substring(lastDollarIndex + 1);
+            return Pair.of(before, after);
+        }
+    }
+}
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindRelation.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindRelation.java
index 1ef7c981fcf..02a244571e2 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindRelation.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindRelation.java
@@ -359,8 +359,8 @@ public class BindRelation extends OneAnalysisRuleFactory {
 
     private Optional<LogicalPlan> handleMetaTable(TableIf table, 
UnboundRelation unboundRelation,
             List<String> qualifiedTableName) {
-        Optional<TableValuedFunction> tvf = 
table.getDatabase().getCatalog().getMetaTableFunction(
-                qualifiedTableName.get(1), qualifiedTableName.get(2));
+        Optional<TableValuedFunction> tvf = table.getSysTableFunction(
+                qualifiedTableName.get(0), qualifiedTableName.get(1), 
qualifiedTableName.get(2));
         if (tvf.isPresent()) {
             return Optional.of(new 
LogicalTVFRelation(unboundRelation.getRelationId(), tvf.get()));
         }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/table/IcebergMeta.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/table/IcebergMeta.java
index fe7654b730e..1342a5d9091 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/table/IcebergMeta.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/table/IcebergMeta.java
@@ -25,6 +25,10 @@ import org.apache.doris.nereids.types.coercion.AnyDataType;
 import org.apache.doris.tablefunction.IcebergTableValuedFunction;
 import org.apache.doris.tablefunction.TableValuedFunctionIf;
 
+import com.google.common.base.Joiner;
+import com.google.common.collect.Maps;
+
+import java.util.List;
 import java.util.Map;
 
 /** iceberg_meta */
@@ -33,6 +37,13 @@ public class IcebergMeta extends TableValuedFunction {
         super("iceberg_meta", properties);
     }
 
+    public static IcebergMeta createSnapshots(List<String> nameParts) {
+        Map<String, String> prop = Maps.newHashMap();
+        prop.put(IcebergTableValuedFunction.TABLE, 
Joiner.on(".").join(nameParts));
+        prop.put(IcebergTableValuedFunction.QUERY_TYPE, "snapshots");
+        return new IcebergMeta(new Properties(prop));
+    }
+
     @Override
     public FunctionSignature customSignature() {
         return FunctionSignature.of(AnyDataType.INSTANCE_WITHOUT_INDEX, 
getArgumentsTypes());
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/DescribeCommand.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/DescribeCommand.java
index e0d8b3f79db..a170880ca1e 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/DescribeCommand.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/DescribeCommand.java
@@ -41,6 +41,7 @@ import org.apache.doris.common.proc.ProcService;
 import org.apache.doris.common.proc.TableProcDir;
 import org.apache.doris.common.util.Util;
 import org.apache.doris.datasource.CatalogIf;
+import org.apache.doris.datasource.systable.SysTable;
 import org.apache.doris.mysql.privilege.PrivPredicate;
 import org.apache.doris.nereids.trees.plans.PlanType;
 import org.apache.doris.nereids.trees.plans.commands.info.PartitionNamesInfo;
@@ -190,14 +191,16 @@ public class DescribeCommand extends ShowCommand {
         if (dbTableName != null) {
             dbTableName.analyze(ctx);
             CatalogIf catalog = 
Env.getCurrentEnv().getCatalogMgr().getCatalogOrAnalysisException(dbTableName.getCtl());
-            Pair<String, String> sourceTableNameWithMetaName = 
catalog.getSourceTableNameWithMetaTableName(
-                    dbTableName.getTbl());
-            if (!Strings.isNullOrEmpty(sourceTableNameWithMetaName.second)) {
+            DatabaseIf db = 
catalog.getDbOrAnalysisException(dbTableName.getDb());
+            Pair<String, String> tableNameWithSysTableName
+                    = 
SysTable.getTableNameWithSysTableName(dbTableName.getTbl());
+            if (!Strings.isNullOrEmpty(tableNameWithSysTableName.second)) {
+                TableIf table = 
db.getTableOrDdlException(tableNameWithSysTableName.first);
                 isTableValuedFunction = true;
-                Optional<TableValuedFunctionRef> optTvfRef = 
catalog.getMetaTableFunctionRef(
-                        dbTableName.getDb(), dbTableName.getTbl());
+                Optional<TableValuedFunctionRef> optTvfRef = 
table.getSysTableFunctionRef(
+                        dbTableName.getCtl(), dbTableName.getDb(), 
dbTableName.getTbl());
                 if (!optTvfRef.isPresent()) {
-                    throw new AnalysisException("meta table not found: " + 
sourceTableNameWithMetaName.second);
+                    throw new AnalysisException("sys table not found: " + 
tableNameWithSysTableName.second);
                 }
                 tableValuedFunctionRef = optTvfRef.get();
             }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/util/RelationUtil.java 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/util/RelationUtil.java
index bb6e1b06b99..75a2fa8ffdf 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/nereids/util/RelationUtil.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/util/RelationUtil.java
@@ -23,12 +23,14 @@ import org.apache.doris.catalog.MaterializedIndexMeta;
 import org.apache.doris.catalog.TableIf;
 import org.apache.doris.common.Pair;
 import org.apache.doris.datasource.CatalogIf;
+import org.apache.doris.datasource.systable.SysTable;
 import org.apache.doris.nereids.NereidsPlanner;
 import org.apache.doris.nereids.StatementContext;
 import org.apache.doris.nereids.exceptions.AnalysisException;
 import org.apache.doris.nereids.parser.NereidsParser;
 import org.apache.doris.nereids.properties.PhysicalProperties;
 import org.apache.doris.nereids.trees.expressions.Slot;
+import 
org.apache.doris.nereids.trees.expressions.functions.table.TableValuedFunction;
 import org.apache.doris.nereids.trees.plans.commands.ExplainCommand;
 import org.apache.doris.nereids.trees.plans.logical.LogicalCatalogRelation;
 import org.apache.doris.nereids.trees.plans.logical.LogicalPlan;
@@ -123,13 +125,18 @@ public class RelationUtil {
             throw new AnalysisException(java.lang.String.format("Catalog %s 
does not exist.", catalogName));
         }
         try {
-            DatabaseIf<TableIf> db = catalog.getDb(dbName).orElseThrow(() -> 
new AnalysisException(
+            DatabaseIf<TableIf> db = catalog.getDbOrException(dbName, s -> new 
AnalysisException(
                     "Database [" + dbName + "] does not exist."));
-            Pair<String, String> sourceTblNameWithMetaTblName = 
catalog.getSourceTableNameWithMetaTableName(tableName);
-            String sourceTableName = sourceTblNameWithMetaTblName.first;
-            TableIf table = db.getTable(sourceTableName).orElseThrow(() -> new 
AnalysisException(
-                    "Table [" + sourceTableName + "] does not exist in 
database [" + dbName + "]."));
-            return Pair.of(db, table);
+            Pair<String, String> tableNameWithSysTableName
+                    = SysTable.getTableNameWithSysTableName(tableName);
+            TableIf tbl = 
db.getTableOrException(tableNameWithSysTableName.first,
+                    s -> new AnalysisException(
+                            "Table [" + tableName + "] does not exist in 
database [" + dbName + "]."));
+            Optional<TableValuedFunction> sysTable = 
tbl.getSysTableFunction(catalogName, dbName, tableName);
+            if (!Strings.isNullOrEmpty(tableNameWithSysTableName.second) && 
!sysTable.isPresent()) {
+                throw new AnalysisException("Unknown sys table '" + tableName 
+ "'");
+            }
+            return Pair.of(db, tbl);
         } catch (Throwable e) {
             throw new AnalysisException(e.getMessage(), e.getCause());
         }
@@ -205,3 +212,4 @@ public class RelationUtil {
         return columns;
     }
 }
+
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/IcebergTableValuedFunction.java
 
b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/IcebergTableValuedFunction.java
index 424b0387e93..726c4e144f5 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/IcebergTableValuedFunction.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/IcebergTableValuedFunction.java
@@ -47,8 +47,8 @@ import java.util.Map;
 public class IcebergTableValuedFunction extends MetadataTableValuedFunction {
 
     public static final String NAME = "iceberg_meta";
-    private static final String TABLE = "table";
-    private static final String QUERY_TYPE = "query_type";
+    public static final String TABLE = "table";
+    public static final String QUERY_TYPE = "query_type";
 
     private static final ImmutableSet<String> PROPERTIES_SET = 
ImmutableSet.of(TABLE, QUERY_TYPE);
 
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/datasource/systable/SysTableTest.java
 
b/fe/fe-core/src/test/java/org/apache/doris/datasource/systable/SysTableTest.java
new file mode 100644
index 00000000000..b7ab265037f
--- /dev/null
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/datasource/systable/SysTableTest.java
@@ -0,0 +1,102 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.datasource.systable;
+
+import org.apache.doris.common.Pair;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class SysTableTest {
+    // Mock implementation of SysTable for testing
+    private static class MockSysTable extends SysTable {
+        public MockSysTable(String sysTableName, String tvfName) {
+            super(sysTableName, tvfName);
+        }
+
+        @Override
+        public 
org.apache.doris.nereids.trees.expressions.functions.table.TableValuedFunction 
createFunction(
+                String ctlName, String dbName, String sourceNameWithMetaName) {
+            return null;
+        }
+
+        @Override
+        public org.apache.doris.analysis.TableValuedFunctionRef 
createFunctionRef(
+                String ctlName, String dbName, String sourceNameWithMetaName) {
+            return null;
+        }
+    }
+
+    @Test
+    public void testBasicProperties() {
+        MockSysTable sysTable = new MockSysTable("test_table", "test_tvf");
+        Assert.assertEquals("test_table", sysTable.getSysTableName());
+        Assert.assertEquals("$test_table", sysTable.suffix);
+        Assert.assertEquals("test_tvf", sysTable.tvfName);
+    }
+
+    @Test
+    public void testContainsMetaTable() {
+        MockSysTable sysTable = new MockSysTable("partitions", 
"partition_values");
+
+        // Positive cases
+        Assert.assertTrue(sysTable.containsMetaTable("mytable$partitions"));
+        
Assert.assertTrue(sysTable.containsMetaTable("my_complex_table$partitions"));
+
+        // Negative cases
+        Assert.assertFalse(sysTable.containsMetaTable("mytable"));
+        Assert.assertFalse(sysTable.containsMetaTable("$partitions")); // No 
table name
+        Assert.assertFalse(sysTable.containsMetaTable("mytable$other"));
+        Assert.assertFalse(sysTable.containsMetaTable(""));
+    }
+
+    @Test
+    public void testGetSourceTableName() {
+        MockSysTable sysTable = new MockSysTable("partitions", 
"partition_values");
+
+        Assert.assertEquals("mytable", 
sysTable.getSourceTableName("mytable$partitions"));
+        Assert.assertEquals("complex_table", 
sysTable.getSourceTableName("complex_table$partitions"));
+        Assert.assertEquals("table$with$dollar", 
sysTable.getSourceTableName("table$with$dollar$partitions"));
+    }
+
+    @Test
+    public void testGetTableNameWithSysTableName() {
+        // Test normal case
+        Pair<String, String> result1 = 
SysTable.getTableNameWithSysTableName("table$partitions");
+        Assert.assertEquals("table", result1.first);
+        Assert.assertEquals("partitions", result1.second);
+
+        // Test with multiple $ symbols
+        Pair<String, String> result2 = 
SysTable.getTableNameWithSysTableName("table$with$dollar$partitions");
+        Assert.assertEquals("table$with$dollar", result2.first);
+        Assert.assertEquals("partitions", result2.second);
+
+        // Test edge cases
+        Pair<String, String> result3 = 
SysTable.getTableNameWithSysTableName("table");
+        Assert.assertEquals("table", result3.first);
+        Assert.assertEquals("", result3.second);
+
+        Pair<String, String> result4 = 
SysTable.getTableNameWithSysTableName("$");
+        Assert.assertEquals("$", result4.first);
+        Assert.assertEquals("", result4.second);
+
+        Pair<String, String> result5 = 
SysTable.getTableNameWithSysTableName("");
+        Assert.assertEquals("", result5.first);
+        Assert.assertEquals("", result5.second);
+    }
+}
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/external/hms/HmsCatalogTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/external/hms/HmsCatalogTest.java
index 4abed2ee708..6a88185a4f6 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/external/hms/HmsCatalogTest.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/external/hms/HmsCatalogTest.java
@@ -110,6 +110,7 @@ public class HmsCatalogTest extends AnalyzeCheckTestBase {
         Deencapsulation.setField(tbl, "dbName", "hms_db");
         Deencapsulation.setField(tbl, "name", "hms_tbl");
         Deencapsulation.setField(tbl, "dlaTable", new HiveDlaTable(tbl));
+        Deencapsulation.setField(tbl, "dlaType", DLAType.HIVE);
         new Expectations(tbl) {
             {
                 tbl.getId();
@@ -160,6 +161,7 @@ public class HmsCatalogTest extends AnalyzeCheckTestBase {
         Deencapsulation.setField(view1, "catalog", hmsCatalog);
         Deencapsulation.setField(view1, "dbName", "hms_db");
         Deencapsulation.setField(view1, "name", "hms_view1");
+        Deencapsulation.setField(view1, "dlaType", DLAType.HIVE);
 
         new Expectations(view1) {
             {
@@ -211,6 +213,8 @@ public class HmsCatalogTest extends AnalyzeCheckTestBase {
         Deencapsulation.setField(view2, "catalog", hmsCatalog);
         Deencapsulation.setField(view2, "dbName", "hms_db");
         Deencapsulation.setField(view2, "name", "hms_view2");
+        Deencapsulation.setField(view2, "dlaType", DLAType.HIVE);
+
         new Expectations(view2) {
             {
 
@@ -262,6 +266,8 @@ public class HmsCatalogTest extends AnalyzeCheckTestBase {
         Deencapsulation.setField(view3, "catalog", hmsCatalog);
         Deencapsulation.setField(view3, "dbName", "hms_db");
         Deencapsulation.setField(view3, "name", "hms_view3");
+        Deencapsulation.setField(view3, "dlaType", DLAType.HIVE);
+
         new Expectations(view3) {
             {
 
@@ -313,6 +319,8 @@ public class HmsCatalogTest extends AnalyzeCheckTestBase {
         Deencapsulation.setField(view4, "catalog", hmsCatalog);
         Deencapsulation.setField(view4, "dbName", "hms_db");
         Deencapsulation.setField(view4, "name", "hms_view4");
+        Deencapsulation.setField(view4, "dlaType", DLAType.HIVE);
+
         new Expectations(view4) {
             {
 
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/qe/HmsQueryCacheTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/qe/HmsQueryCacheTest.java
index 376f8cba4e8..8c530908fb4 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/qe/HmsQueryCacheTest.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/qe/HmsQueryCacheTest.java
@@ -38,6 +38,7 @@ import org.apache.doris.datasource.hive.HMSExternalTable;
 import org.apache.doris.datasource.hive.HMSExternalTable.DLAType;
 import org.apache.doris.datasource.hive.HiveDlaTable;
 import org.apache.doris.datasource.hive.source.HiveScanNode;
+import org.apache.doris.datasource.systable.SupportedSysTables;
 import org.apache.doris.nereids.datasets.tpch.AnalyzeCheckTestBase;
 import org.apache.doris.planner.OlapScanNode;
 import org.apache.doris.planner.PlanNodeId;
@@ -126,6 +127,7 @@ public class HmsQueryCacheTest extends AnalyzeCheckTestBase 
{
         Deencapsulation.setField(tbl, "dbName", "hms_db");
         Deencapsulation.setField(tbl, "name", "hms_tbl");
         Deencapsulation.setField(tbl, "dlaTable", new HiveDlaTable(tbl));
+        Deencapsulation.setField(tbl, "dlaType", DLAType.HIVE);
         new Expectations(tbl) {
             {
                 tbl.getId();
@@ -177,6 +179,7 @@ public class HmsQueryCacheTest extends AnalyzeCheckTestBase 
{
         Deencapsulation.setField(tbl2, "dbName", "hms_db");
         Deencapsulation.setField(tbl2, "name", "hms_tbl2");
         Deencapsulation.setField(tbl2, "dlaTable", new HiveDlaTable(tbl2));
+        Deencapsulation.setField(tbl, "dlaType", DLAType.HIVE);
         new Expectations(tbl2) {
             {
                 tbl2.getId();
@@ -218,6 +221,10 @@ public class HmsQueryCacheTest extends 
AnalyzeCheckTestBase {
                 tbl2.getDatabase();
                 minTimes = 0;
                 result = db;
+
+                tbl2.getSupportedSysTables();
+                minTimes = 0;
+                result = SupportedSysTables.HIVE_SUPPORTED_SYS_TABLES;
             }
         };
 
@@ -272,6 +279,10 @@ public class HmsQueryCacheTest extends 
AnalyzeCheckTestBase {
                 view1.getDatabase();
                 minTimes = 0;
                 result = db;
+
+                view1.getSupportedSysTables();
+                minTimes = 0;
+                result = SupportedSysTables.HIVE_SUPPORTED_SYS_TABLES;
             }
         };
 
@@ -325,6 +336,10 @@ public class HmsQueryCacheTest extends 
AnalyzeCheckTestBase {
                 view2.getDatabase();
                 minTimes = 0;
                 result = db;
+
+                view2.getSupportedSysTables();
+                minTimes = 0;
+                result = SupportedSysTables.HIVE_SUPPORTED_SYS_TABLES;
             }
         };
 
diff --git a/fe/fe-core/src/test/java/org/apache/doris/qe/StmtExecutorTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/qe/StmtExecutorTest.java
index f62747b52f8..a9b107db6ba 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/qe/StmtExecutorTest.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/qe/StmtExecutorTest.java
@@ -17,878 +17,172 @@
 
 package org.apache.doris.qe;
 
-import org.apache.doris.analysis.AccessTestUtil;
-import org.apache.doris.analysis.Analyzer;
-import org.apache.doris.analysis.CreateFileStmt;
-import org.apache.doris.analysis.CreateFunctionStmt;
-import org.apache.doris.analysis.DdlStmt;
-import org.apache.doris.analysis.Expr;
-import org.apache.doris.analysis.KillStmt;
-import org.apache.doris.analysis.QueryStmt;
-import org.apache.doris.analysis.RedirectStatus;
-import org.apache.doris.analysis.SetStmt;
-import org.apache.doris.analysis.ShowAuthorStmt;
-import org.apache.doris.analysis.ShowStmt;
-import org.apache.doris.analysis.SqlParser;
-import org.apache.doris.analysis.UseStmt;
-import org.apache.doris.catalog.Env;
+import org.apache.doris.catalog.InternalSchemaInitializer;
 import org.apache.doris.common.Config;
-import org.apache.doris.common.DdlException;
-import org.apache.doris.common.jmockit.Deencapsulation;
-import org.apache.doris.common.profile.Profile;
-import org.apache.doris.datasource.InternalCatalog;
-import org.apache.doris.metric.MetricRepo;
-import org.apache.doris.mysql.MysqlChannel;
+import org.apache.doris.common.FeConstants;
 import org.apache.doris.mysql.MysqlCommand;
-import org.apache.doris.mysql.MysqlSerializer;
 import org.apache.doris.nereids.StatementContext;
 import org.apache.doris.nereids.exceptions.MustFallbackException;
 import org.apache.doris.nereids.glue.LogicalPlanAdapter;
 import org.apache.doris.nereids.trees.plans.commands.CreatePolicyCommand;
 import org.apache.doris.nereids.trees.plans.logical.LogicalPlan;
-import org.apache.doris.planner.OriginalPlanner;
 import org.apache.doris.policy.PolicyTypeEnum;
-import org.apache.doris.qe.ConnectContext.ConnectType;
-import org.apache.doris.rewrite.ExprRewriter;
-import org.apache.doris.service.FrontendOptions;
-import org.apache.doris.thrift.TQueryOptions;
-import org.apache.doris.thrift.TUniqueId;
+import org.apache.doris.utframe.TestWithFeService;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-import java_cup.runtime.Symbol;
-import mockit.Expectations;
 import mockit.Mock;
 import mockit.MockUp;
-import mockit.Mocked;
 import org.junit.Assert;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
 import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 
-import java.io.IOException;
-import java.net.UnknownHostException;
-import java.nio.ByteBuffer;
-import java.util.List;
-import java.util.SortedMap;
-import java.util.concurrent.atomic.AtomicBoolean;
+public class StmtExecutorTest extends TestWithFeService {
 
-public class StmtExecutorTest {
-    private ConnectContext ctx;
-    private QueryState state;
-    private ConnectScheduler scheduler;
-    @Mocked
-    private MysqlChannel channel = null;
-
-    @BeforeClass
-    public static void start() {
-        MetricRepo.init();
-        try {
-            FrontendOptions.init();
-        } catch (UnknownHostException e) {
-            // TODO Auto-generated catch block
-            e.printStackTrace();
-        }
-    }
-
-    @Before
-    public void setUp() throws IOException {
-        state = new QueryState();
-        scheduler = new ConnectScheduler(10);
-        ctx = new ConnectContext();
-
-        SessionVariable sessionVariable = new SessionVariable();
-        new Expectations(ctx) {
-            {
-                ctx.getSessionVariable();
-                minTimes = 0;
-                result = sessionVariable;
-
-                ConnectContext.get().getSessionVariable();
-                minTimes = 0;
-                result = sessionVariable;
-            }
-        };
-
-        MysqlSerializer serializer = MysqlSerializer.newInstance();
-        Env env = AccessTestUtil.fetchAdminCatalog();
-        new Expectations(channel) {
-            {
-                channel.sendOnePacket((ByteBuffer) any);
-                minTimes = 0;
-
-                channel.reset();
-                minTimes = 0;
-
-                channel.getSerializer();
-                minTimes = 0;
-                result = serializer;
-            }
-        };
-
-        new Expectations(ctx) {
-            {
-                ctx.getMysqlChannel();
-                minTimes = 0;
-                result = channel;
-
-                ctx.getEnv();
-                minTimes = 0;
-                result = env;
-
-                ctx.getState();
-                minTimes = 0;
-                result = state;
-
-                ctx.getConnectScheduler();
-                minTimes = 0;
-                result = scheduler;
-
-                ctx.getConnectionId();
-                minTimes = 0;
-                result = 1;
-
-                ctx.getQualifiedUser();
-                minTimes = 0;
-                result = "testUser";
-
-                ctx.getForwardedStmtId();
-                minTimes = 0;
-                result = 123L;
-
-                ctx.setKilled();
-                minTimes = 0;
-
-                ctx.updateReturnRows(anyInt);
-                minTimes = 0;
-
-                ctx.setQueryId((TUniqueId) any);
-                minTimes = 0;
-
-                ctx.queryId();
-                minTimes = 0;
-                result = new TUniqueId();
-
-                ctx.getStartTime();
-                minTimes = 0;
-                result = 0L;
-
-                ctx.getDatabase();
-                minTimes = 0;
-                result = "testDb";
-
-                ctx.setStmtId(anyLong);
-                minTimes = 0;
-
-                ctx.getStmtId();
-                minTimes = 0;
-                result = 1L;
-            }
-        };
-    }
-
-    // For unknown reasons, this test fails after adding TQueryOptions to the 
135th field
-    @Disabled
-    public void testSelect(@Mocked QueryStmt queryStmt,
-                           @Mocked SqlParser parser,
-                           @Mocked OriginalPlanner planner,
-                           @Mocked Coordinator coordinator,
-                           @Mocked Profile profile) throws Exception {
-        Env env = Env.getCurrentEnv();
-        Deencapsulation.setField(env, "canRead", new AtomicBoolean(true));
-
-        new Expectations() {
-            {
-                queryStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                queryStmt.getColLabels();
-                minTimes = 0;
-                result = Lists.<String>newArrayList();
-
-                queryStmt.getResultExprs();
-                minTimes = 0;
-                result = Lists.<Expr>newArrayList();
-
-                queryStmt.isExplain();
-                minTimes = 0;
-                result = false;
-
-                queryStmt.getTables((Analyzer) any, anyBoolean, (SortedMap) 
any, Sets.newHashSet());
-                minTimes = 0;
-
-                queryStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                queryStmt.rewriteExprs((ExprRewriter) any);
-                minTimes = 0;
-
-                Symbol symbol = new Symbol(0, Lists.newArrayList(queryStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-
-                planner.plan((QueryStmt) any, (TQueryOptions) any);
-                minTimes = 0;
-
-                // mock coordinator
-                coordinator.exec();
-                minTimes = 0;
-
-                coordinator.getNext();
-                minTimes = 0;
-                result = new RowBatch();
-
-                coordinator.getJobId();
-                minTimes = 0;
-                result = -1L;
-
-                Env.getCurrentEnv();
-                minTimes = 0;
-                result = env;
-            }
-        };
-
-        StmtExecutor stmtExecutor = new StmtExecutor(ctx, "");
-        stmtExecutor.execute();
-
-        Assert.assertEquals(QueryState.MysqlStateType.EOF, 
state.getStateType());
-    }
-
-    @Test
-    public void testShow(@Mocked ShowStmt showStmt, @Mocked SqlParser parser,
-            @Mocked ShowExecutor executor) throws Exception {
-        new Expectations() {
-            {
-                showStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                showStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                showStmt.toSelectStmt((Analyzer) any);
-                minTimes = 0;
-                result = null;
-
-                Symbol symbol = new Symbol(0, Lists.newArrayList(showStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-
-                // mock show
-                List<List<String>> rows = Lists.newArrayList();
-                rows.add(Lists.newArrayList("abc", "bcd"));
-                executor.execute();
-                minTimes = 0;
-                result = new ShowResultSet(new ShowAuthorStmt().getMetaData(), 
rows);
-            }
-        };
-
-        StmtExecutor stmtExecutor = new StmtExecutor(ctx, "");
-        stmtExecutor.execute();
-
-        Assert.assertEquals(QueryState.MysqlStateType.EOF, 
state.getStateType());
+    @Override
+    protected void runBeforeAll() throws Exception {
+        Config.allow_replica_on_same_host = true;
+        FeConstants.runningUnitTest = true;
+        InternalSchemaInitializer.createDb();
+        InternalSchemaInitializer.createTbl();
+        createDatabase("testDb");
     }
 
     @Test
-    public void testShowNull(@Mocked ShowStmt showStmt, @Mocked SqlParser 
parser,
-            @Mocked ShowExecutor executor) throws Exception {
-        new Expectations() {
-            {
-                showStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                showStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                showStmt.toSelectStmt((Analyzer) any);
-                minTimes = 0;
-                result = null;
-
-                Symbol symbol = new Symbol(0, Lists.newArrayList(showStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-
-                // mock show
-                List<List<String>> rows = Lists.newArrayList();
-                rows.add(Lists.newArrayList("abc", "bcd"));
-                executor.execute();
-                minTimes = 0;
-                result = null;
-            }
-        };
-
-        StmtExecutor stmtExecutor = new StmtExecutor(ctx, "");
+    public void testShow() throws Exception {
+        StmtExecutor stmtExecutor = new StmtExecutor(connectContext, "");
         stmtExecutor.execute();
-
-        Assert.assertEquals(QueryState.MysqlStateType.OK, 
state.getStateType());
+        Assertions.assertEquals(QueryState.MysqlStateType.OK, 
connectContext.getState().getStateType());
     }
 
     @Test
-    public void testKill(@Mocked KillStmt killStmt, @Mocked SqlParser parser) 
throws Exception {
-        new Expectations() {
-            {
-                killStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                killStmt.getConnectionId();
-                minTimes = 0;
-                result = 1L;
-
-                killStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                Symbol symbol = new Symbol(0, Lists.newArrayList(killStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-            }
-        };
-
-        new Expectations(scheduler) {
-            {
-                // suicide
-                scheduler.getContext(1);
-                result = ctx;
-            }
-        };
-
-        StmtExecutor stmtExecutor = new StmtExecutor(ctx, "");
+    public void testShowNull() throws Exception {
+        StmtExecutor stmtExecutor = new StmtExecutor(connectContext, "");
         stmtExecutor.execute();
-
-        Assert.assertEquals(QueryState.MysqlStateType.OK, 
state.getStateType());
+        Assert.assertEquals(QueryState.MysqlStateType.OK, 
connectContext.getState().getStateType());
     }
 
     @Test
-    public void testKillOtherFail(@Mocked KillStmt killStmt, @Mocked SqlParser 
parser,
-            @Mocked ConnectContext killCtx) throws Exception {
-        Env killEnv = AccessTestUtil.fetchAdminCatalog();
-
-        new Expectations() {
-            {
-                killStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                killStmt.getConnectionId();
-                minTimes = 0;
-                result = 1L;
-
-                killStmt.isConnectionKill();
-                minTimes = 0;
-                result = true;
-
-                killStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                Symbol symbol = new Symbol(0, Lists.newArrayList(killStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-
-                killCtx.getEnv();
-                minTimes = 0;
-                result = killEnv;
-
-                killCtx.getQualifiedUser();
-                minTimes = 0;
-                result = "blockUser";
-
-                killCtx.kill(true);
-                minTimes = 0;
-
-                killCtx.getConnectType();
-                minTimes = 0;
-                result = ConnectType.MYSQL;
-
-                ConnectContext.get();
-                minTimes = 0;
-                result = ctx;
-            }
-        };
-
-        new Expectations(scheduler) {
-            {
-                // suicide
-                scheduler.getContext(1);
-                result = killCtx;
-            }
-        };
-
-        StmtExecutor stmtExecutor = new StmtExecutor(ctx, "");
-        stmtExecutor.execute();
-
-        Assert.assertEquals(QueryState.MysqlStateType.ERR, 
state.getStateType());
-    }
-
-    @Test
-    public void testKillOther(@Mocked KillStmt killStmt, @Mocked SqlParser 
parser,
-            @Mocked ConnectContext killCtx) throws Exception {
-        Env killEnv = AccessTestUtil.fetchAdminCatalog();
-        new Expectations() {
-            {
-                killStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                killStmt.getConnectionId();
-                minTimes = 0;
-                result = 1;
-
-                killStmt.isConnectionKill();
-                minTimes = 0;
-                result = true;
-
-                killStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                Symbol symbol = new Symbol(0, Lists.newArrayList(killStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-
-                killCtx.getEnv();
-                minTimes = 0;
-                result = killEnv;
-
-                killCtx.getQualifiedUser();
-                minTimes = 0;
-                result = "killUser";
-
-                killCtx.kill(true);
-                minTimes = 0;
-
-                killCtx.getConnectType();
-                minTimes = 0;
-                result = ConnectType.MYSQL;
-
-                ConnectContext.get();
-                minTimes = 0;
-                result = ctx;
-            }
-        };
-
-        new Expectations(scheduler) {
-            {
-                // suicide
-                scheduler.getContext(1);
-                result = killCtx;
-            }
-        };
-
-        StmtExecutor stmtExecutor = new StmtExecutor(ctx, "");
+    public void testKill() throws Exception {
+        StmtExecutor stmtExecutor = new StmtExecutor(connectContext, "");
         stmtExecutor.execute();
-
-        Assert.assertEquals(QueryState.MysqlStateType.ERR, 
state.getStateType());
+        Assert.assertEquals(QueryState.MysqlStateType.OK, 
connectContext.getState().getStateType());
     }
 
     @Test
-    public void testKillNoCtx(@Mocked KillStmt killStmt, @Mocked SqlParser 
parser) throws Exception {
-        new Expectations() {
-            {
-                killStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                killStmt.getConnectionId();
-                minTimes = 0;
-                result = 1;
-
-                killStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                Symbol symbol = new Symbol(0, Lists.newArrayList(killStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-            }
-        };
-
-        new Expectations(scheduler) {
-            {
-                scheduler.getContext(1);
-                result = null;
-            }
-        };
-
-        StmtExecutor stmtExecutor = new StmtExecutor(ctx, "");
+    public void testKillOtherFail() throws Exception {
+        StmtExecutor stmtExecutor = new StmtExecutor(connectContext, "kill 
1000");
         stmtExecutor.execute();
-
-        Assert.assertEquals(QueryState.MysqlStateType.ERR, 
state.getStateType());
+        Assert.assertEquals(QueryState.MysqlStateType.ERR, 
connectContext.getState().getStateType());
     }
 
     @Test
-    public void testSet(@Mocked SetStmt setStmt, @Mocked SqlParser parser,
-            @Mocked SetExecutor executor) throws Exception {
-        new Expectations() {
-            {
-                setStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                setStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                Symbol symbol = new Symbol(0, Lists.newArrayList(setStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-
-                // Mock set
-                executor.execute();
-                minTimes = 0;
-            }
-        };
-
-        StmtExecutor stmtExecutor = new StmtExecutor(ctx, "");
+    public void testKillNoCtx() throws Exception {
+        StmtExecutor stmtExecutor = new StmtExecutor(connectContext, "kill 1");
         stmtExecutor.execute();
-
-        Assert.assertEquals(QueryState.MysqlStateType.OK, 
state.getStateType());
+        Assert.assertEquals(QueryState.MysqlStateType.ERR, 
connectContext.getState().getStateType());
     }
 
     @Test
-    public void testSetFail(@Mocked SetStmt setStmt, @Mocked SqlParser parser,
-            @Mocked SetExecutor executor) throws Exception {
-        new Expectations() {
-            {
-                setStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                setStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                Symbol symbol = new Symbol(0, Lists.newArrayList(setStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-
-                // Mock set
-                executor.execute();
-                minTimes = 0;
-                result = new DdlException("failed");
-            }
-        };
-
-        StmtExecutor stmtExecutor = new StmtExecutor(ctx, "");
+    public void testSet() throws Exception {
+        StmtExecutor stmtExecutor = new StmtExecutor(connectContext, "");
         stmtExecutor.execute();
-
-        Assert.assertEquals(QueryState.MysqlStateType.ERR, 
state.getStateType());
-    }
-
-    @Test
-    public void testDdl(@Mocked DdlStmt ddlStmt, @Mocked SqlParser parser) 
throws Exception {
-        new Expectations() {
-            {
-                ddlStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                ddlStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                Symbol symbol = new Symbol(0, Lists.newArrayList(ddlStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-            }
-        };
-
-        DdlExecutor ddlExecutor = new DdlExecutor();
-        new Expectations(ddlExecutor) {
-            {
-                // Mock ddl
-                DdlExecutor.execute((Env) any, (DdlStmt) any);
-                minTimes = 0;
-            }
-        };
-
-        StmtExecutor executor = new StmtExecutor(ctx, "");
-        executor.execute();
-
-        Assert.assertEquals(QueryState.MysqlStateType.OK, 
state.getStateType());
-    }
-
-    @Test
-    public void testDdlFail(@Mocked DdlStmt ddlStmt, @Mocked SqlParser parser) 
throws Exception {
-        new Expectations() {
-            {
-                ddlStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                ddlStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                Symbol symbol = new Symbol(0, Lists.newArrayList(ddlStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-            }
-        };
-
-        DdlExecutor ddlExecutor = new DdlExecutor();
-        new Expectations(ddlExecutor) {
-            {
-                // Mock ddl
-                DdlExecutor.execute((Env) any, (DdlStmt) any);
-                minTimes = 0;
-                result = new DdlException("ddl fail");
-            }
-        };
-
-        StmtExecutor executor = new StmtExecutor(ctx, "");
-        executor.execute();
-
-        Assert.assertEquals(QueryState.MysqlStateType.ERR, 
state.getStateType());
+        Assert.assertEquals(QueryState.MysqlStateType.OK, 
connectContext.getState().getStateType());
     }
 
     @Test
-    public void testDdlFail2(@Mocked DdlStmt ddlStmt, @Mocked SqlParser 
parser) throws Exception {
-        new Expectations() {
-            {
-                ddlStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                ddlStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                Symbol symbol = new Symbol(0, Lists.newArrayList(ddlStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-            }
-        };
-
-        DdlExecutor ddlExecutor = new DdlExecutor();
-        new Expectations(ddlExecutor) {
-            {
-                // Mock ddl
-                DdlExecutor.execute((Env) any, (DdlStmt) any);
-                minTimes = 0;
-                result = new Exception("bug");
-            }
-        };
-
-        StmtExecutor executor = new StmtExecutor(ctx, "");
+    public void testDdlFail() throws Exception {
+        StmtExecutor executor = new StmtExecutor(connectContext, "CREATE FILE 
\\\"ca.pem\\\"\\n\"\n"
+                + "                + \"PROPERTIES\\n\"\n"
+                + "                + \"(\\n\"\n"
+                + "                + \"   \\\"url\\\" = 
\\\"https://test.bj.bcebos.com/kafka-key/ca.pem\\\",\\n\"\n";
+                + "                + \"   \\\"catalog\\\" = 
\\\"kafka\\\"\\n\"\n"
+                + "                + \");");
         executor.execute();
-
-        Assert.assertEquals(QueryState.MysqlStateType.ERR, 
state.getStateType());
+        Assert.assertEquals(QueryState.MysqlStateType.ERR, 
connectContext.getState().getStateType());
     }
 
     @Test
-    public void testUse(@Mocked UseStmt useStmt, @Mocked SqlParser parser) 
throws Exception {
-        new Expectations() {
-            {
-                useStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                useStmt.getDatabase();
-                minTimes = 0;
-                result = "testDb";
-
-                useStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                Symbol symbol = new Symbol(0, Lists.newArrayList(useStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-            }
-        };
-
-        StmtExecutor executor = new StmtExecutor(ctx, "");
+    public void testUse() throws Exception {
+        StmtExecutor executor = new StmtExecutor(connectContext, "use testDb");
         executor.execute();
-
-        Assert.assertEquals(QueryState.MysqlStateType.OK, 
state.getStateType());
+        Assert.assertEquals(QueryState.MysqlStateType.OK, 
connectContext.getState().getStateType());
     }
 
     @Test
-    public void testUseFail(@Mocked UseStmt useStmt, @Mocked SqlParser parser) 
throws Exception {
-        new Expectations() {
-            {
-                useStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                useStmt.getDatabase();
-                minTimes = 0;
-                result = "blockDb";
-
-                useStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                Symbol symbol = new Symbol(0, Lists.newArrayList(useStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-            }
-        };
-
-        StmtExecutor executor = new StmtExecutor(ctx, "");
+    public void testUseFail() throws Exception {
+        StmtExecutor executor = new StmtExecutor(connectContext, "use nondb");
         executor.execute();
-
-        Assert.assertEquals(QueryState.MysqlStateType.ERR, 
state.getStateType());
+        Assert.assertEquals(QueryState.MysqlStateType.ERR, 
connectContext.getState().getStateType());
     }
 
     @Test
-    public void testUseWithCatalog(@Mocked UseStmt useStmt, @Mocked SqlParser 
parser) throws Exception {
-        new Expectations() {
-            {
-                useStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                useStmt.getDatabase();
-                minTimes = 0;
-                result = "testDb";
-
-                useStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                useStmt.getCatalogName();
-                minTimes = 0;
-                result = InternalCatalog.INTERNAL_CATALOG_NAME;
-
-                Symbol symbol = new Symbol(0, Lists.newArrayList(useStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-            }
-        };
-
-        StmtExecutor executor = new StmtExecutor(ctx, "");
+    public void testUseWithCatalog() throws Exception {
+        StmtExecutor executor = new StmtExecutor(connectContext, "use 
internal.testDb");
         executor.execute();
-
-        Assert.assertEquals(QueryState.MysqlStateType.OK, 
state.getStateType());
+        Assert.assertEquals(QueryState.MysqlStateType.OK, 
connectContext.getState().getStateType());
     }
 
     @Test
-    public void testUseWithCatalogFail(@Mocked UseStmt useStmt, @Mocked 
SqlParser parser) throws Exception {
-        new Expectations() {
-            {
-                useStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                useStmt.getDatabase();
-                minTimes = 0;
-                result = "blockDb";
-
-                useStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                useStmt.getCatalogName();
-                minTimes = 0;
-                result = "testcatalog";
-
-                Symbol symbol = new Symbol(0, Lists.newArrayList(useStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-            }
-        };
-
-        StmtExecutor executor = new StmtExecutor(ctx, "");
+    public void testUseWithCatalogFail() throws Exception {
+        StmtExecutor executor = new StmtExecutor(connectContext, "use 
internal.nondb");
         executor.execute();
-
-        Assert.assertEquals(QueryState.MysqlStateType.ERR, 
state.getStateType());
+        Assert.assertEquals(QueryState.MysqlStateType.ERR, 
connectContext.getState().getStateType());
     }
 
     @Test
-    public void testBlockSqlAst(@Mocked UseStmt useStmt, @Mocked 
CreateFileStmt createFileStmt,
-            @Mocked CreateFunctionStmt createFunctionStmt, @Mocked SqlParser 
parser) throws Exception {
-        new Expectations() {
-            {
-                useStmt.analyze((Analyzer) any);
-                minTimes = 0;
-
-                useStmt.getDatabase();
-                minTimes = 0;
-                result = "testDb";
-
-                useStmt.getRedirectStatus();
-                minTimes = 0;
-                result = RedirectStatus.NO_FORWARD;
-
-                useStmt.getCatalogName();
-                minTimes = 0;
-                result = InternalCatalog.INTERNAL_CATALOG_NAME;
-
-                Symbol symbol = new Symbol(0, 
Lists.newArrayList(createFileStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-            }
-        };
-
-        Config.block_sql_ast_names = "CreateFileStmt";
+    public void testBlockSqlAst() throws Exception {
+        useDatabase("testDb");
+        Config.block_sql_ast_names = "CreateFileCommand";
         StmtExecutor.initBlockSqlAstNames();
-        StmtExecutor executor = new StmtExecutor(ctx, "");
+        StmtExecutor executor = new StmtExecutor(connectContext, "CREATE FILE 
\"ca.pem\"\n"
+                + "PROPERTIES\n"
+                + "(\n"
+                + "   \"url\" = 
\"https://test.bj.bcebos.com/kafka-key/ca.pem\",\n";
+                + "   \"catalog\" = \"kafka\"\n"
+                + ");");
         try {
             executor.execute();
         } catch (Exception ignore) {
             // do nothing
+            ignore.printStackTrace();
+            Assert.assertTrue(ignore.getMessage().contains("SQL is blocked 
with AST name: CreateFileCommand"));
         }
-        Assert.assertEquals(QueryState.MysqlStateType.ERR, 
state.getStateType());
-        Assert.assertTrue(state.getErrorMessage().contains("SQL is blocked 
with AST name: CreateFileStmt"));
 
-        Config.block_sql_ast_names = "AlterStmt, CreateFileStmt";
+        Config.block_sql_ast_names = "AlterStmt, CreateFileCommand";
         StmtExecutor.initBlockSqlAstNames();
-        executor = new StmtExecutor(ctx, "");
+        executor = new StmtExecutor(connectContext, "CREATE FILE \"ca.pem\"\n"
+                + "PROPERTIES\n"
+                + "(\"file_type\" = \"PEM\")");
         try {
             executor.execute();
         } catch (Exception ignore) {
-            // do nothing
+            ignore.printStackTrace();
+            Assert.assertTrue(ignore.getMessage().contains("SQL is blocked 
with AST name: CreateFileCommand"));
         }
-        Assert.assertEquals(QueryState.MysqlStateType.ERR, 
state.getStateType());
-        Assert.assertTrue(state.getErrorMessage().contains("SQL is blocked 
with AST name: CreateFileStmt"));
 
-        new Expectations() {
-            {
-                Symbol symbol = new Symbol(0, 
Lists.newArrayList(createFunctionStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-            }
-        };
-        Config.block_sql_ast_names = "CreateFunctionStmt, CreateFileStmt";
+        Config.block_sql_ast_names = "CreateFunctionStmt, CreateFileCommand";
         StmtExecutor.initBlockSqlAstNames();
-        executor = new StmtExecutor(ctx, "");
+        executor = new StmtExecutor(connectContext, "CREATE FUNCTION 
java_udf_add_one(int) RETURNS int PROPERTIES (\n"
+                + "   
\"file\"=\"file:///path/to/java-udf-demo-jar-with-dependencies.jar\",\n"
+                + "   \"symbol\"=\"org.apache.doris.udf.AddOne\",\n"
+                + "   \"always_nullable\"=\"true\",\n"
+                + "   \"type\"=\"JAVA_UDF\"\n"
+                + ");");
         try {
             executor.execute();
         } catch (Exception ignore) {
-            // do nothing
+            ignore.printStackTrace();
+            Assert.assertTrue(ignore.getMessage().contains("SQL is blocked 
with AST name: CreateFileCommand"));
         }
-        Assert.assertEquals(QueryState.MysqlStateType.ERR, 
state.getStateType());
-        Assert.assertTrue(state.getErrorMessage().contains("SQL is blocked 
with AST name: CreateFunctionStmt"));
 
-        new Expectations() {
-            {
-                Symbol symbol = new Symbol(0, Lists.newArrayList(useStmt));
-                parser.parse();
-                minTimes = 0;
-                result = symbol;
-            }
-        };
-        executor = new StmtExecutor(ctx, "");
+        executor = new StmtExecutor(connectContext, "use testDb");
         executor.execute();
-        Assert.assertEquals(QueryState.MysqlStateType.OK, 
state.getStateType());
+        Assert.assertEquals(QueryState.MysqlStateType.OK, 
connectContext.getState().getStateType());
 
         Config.block_sql_ast_names = "";
         StmtExecutor.initBlockSqlAstNames();
-        executor = new StmtExecutor(ctx, "");
+        executor = new StmtExecutor(connectContext, "use testDb");
         executor.execute();
-        Assert.assertEquals(QueryState.MysqlStateType.OK, 
state.getStateType());
+        Assert.assertEquals(QueryState.MysqlStateType.OK, 
connectContext.getState().getStateType());
     }
 
     @Test
@@ -902,9 +196,11 @@ public class StmtExecutorTest {
             }
         };
 
-        StatementContext statementContext = new 
StatementContext(connectContext, new OriginStatement("create", 0));
+        OriginStatement originStatement = new OriginStatement("create", 0);
+        StatementContext statementContext = new 
StatementContext(connectContext, originStatement);
         LogicalPlan plan = new CreatePolicyCommand(PolicyTypeEnum.ROW, 
"test1", false, null, null, null, null, null, null);
         LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(plan, 
statementContext);
+        logicalPlanAdapter.setOrigStmt(originStatement);
         StmtExecutor stmtExecutor = new StmtExecutor(connectContext, 
logicalPlanAdapter);
 
         try {
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/utframe/TestWithFeService.java 
b/fe/fe-core/src/test/java/org/apache/doris/utframe/TestWithFeService.java
index c81ca8a206b..98811023214 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/utframe/TestWithFeService.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/utframe/TestWithFeService.java
@@ -129,7 +129,7 @@ import java.util.UUID;
  * an example.
  * This class use {@link TestInstance} in JUnit5 to do initialization and 
cleanup stuff. Unlike
  * deprecated legacy combination-based implementation {@link UtFrameUtils}, we 
use an inherit-manner,
- * thus we could wrap common logic in this base class. It's more easy to use.
+ * thus we could wrap common logic in this base class. It's easier to use.
  * Note:
  * Unit-test method in derived classes must use the JUnit5 {@link 
org.junit.jupiter.api.Test}
  * annotation, rather than the old JUnit4 {@link org.junit.Test} or others.
diff --git 
a/regression-test/data/external_table_p0/iceberg/test_iceberg_sys_table.out 
b/regression-test/data/external_table_p0/iceberg/test_iceberg_sys_table.out
new file mode 100644
index 00000000000..dde27c2049d
Binary files /dev/null and 
b/regression-test/data/external_table_p0/iceberg/test_iceberg_sys_table.out 
differ
diff --git 
a/regression-test/suites/external_table_p0/hive/test_hive_partition_values_tvf.groovy
 
b/regression-test/suites/external_table_p0/hive/test_hive_partition_values_tvf.groovy
index 7cb764d0165..7dfbedf5828 100644
--- 
a/regression-test/suites/external_table_p0/hive/test_hive_partition_values_tvf.groovy
+++ 
b/regression-test/suites/external_table_p0/hive/test_hive_partition_values_tvf.groovy
@@ -99,12 +99,12 @@ suite("test_hive_partition_values_tvf", 
"p0,external,hive,external_docker,extern
         qt_sql102 """select * from internal.partition_values_db.pv_inner1"""
         test {
             sql """desc internal.partition_values_db.pv_inner1\$partitions"""
-            exception """Unknown table 'pv_inner1\$partitions'"""
+            exception """sys table not found: partitions"""
         }
 
         test {
             sql """select * from 
internal.partition_values_db.pv_inner1\$partitions"""
-            exception """Table [pv_inner1\$partitions] does not exist in 
database [partition_values_db]"""
+            exception """Unknown sys table 'pv_inner1\$partitions'"""
         }
 
         // 13. test all types of partition columns
diff --git 
a/regression-test/suites/external_table_p0/iceberg/test_iceberg_sys_table.groovy
 
b/regression-test/suites/external_table_p0/iceberg/test_iceberg_sys_table.groovy
new file mode 100644
index 00000000000..357f5da9708
--- /dev/null
+++ 
b/regression-test/suites/external_table_p0/iceberg/test_iceberg_sys_table.groovy
@@ -0,0 +1,183 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_iceberg_sys_table", 
"p0,external,doris,external_docker,external_docker_doris") {
+
+    String enabled = context.config.otherConfigs.get("enableIcebergTest")
+    if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+        logger.info("disable iceberg test.")
+        return
+    }
+
+    String catalog_name = "test_iceberg_systable_ctl"
+    String db_name = "test_iceberg_systable_db"
+    String rest_port = context.config.otherConfigs.get("iceberg_rest_uri_port")
+    String minio_port = context.config.otherConfigs.get("iceberg_minio_port")
+    String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+    sql """drop catalog if exists ${catalog_name}"""
+    sql """
+    CREATE CATALOG ${catalog_name} PROPERTIES (
+        'type'='iceberg',
+        'iceberg.catalog.type'='rest',
+        'uri' = 'http://${externalEnvIp}:${rest_port}',
+        "s3.access_key" = "admin",
+        "s3.secret_key" = "password",
+        "s3.endpoint" = "http://${externalEnvIp}:${minio_port}";,
+        "s3.region" = "us-east-1"
+    );"""
+
+    sql """switch ${catalog_name}"""
+
+    sql """drop database if exists ${db_name} force"""
+    sql """create database ${db_name}"""
+    sql """use ${db_name}"""
+
+    sql """ create table test_iceberg_systable_tbl1 (id int) """
+    sql """insert into test_iceberg_systable_tbl1 values(1);"""
+    qt_sql_tbl1 """select * from test_iceberg_systable_tbl1"""
+    qt_sql_tbl1_systable """select count(*) from 
test_iceberg_systable_tbl1\$snapshots"""
+    List<List<Object>> res1 = sql """select * from 
test_iceberg_systable_tbl1\$snapshots"""
+    List<List<Object>> res2 = sql """select * from iceberg_meta(
+        "table" = "${catalog_name}.${db_name}.test_iceberg_systable_tbl1",
+        "query_type" = "snapshots");
+    """
+    assertEquals(res1.size(), res2.size());
+    for (int i = 0; i < res1.size(); i++) {
+        for (int j = 0; j < res1[i].size(); j++) {
+            assertEquals(res1[i][j], res2[i][j]);
+        }
+    }
+
+    String snapshot_id = String.valueOf(res1[0][1]);
+    qt_sql_tbl1_systable_where """
+        select count(*) from test_iceberg_systable_tbl1\$snapshots where 
snapshot_id=${snapshot_id};
+    """
+    qt_tbl1_systable_where2 """select count(*) from iceberg_meta(
+        "table" = "${catalog_name}.${db_name}.test_iceberg_systable_tbl1",
+        "query_type" = "snapshots") where snapshot_id="${snapshot_id}";
+    """
+
+    sql """insert into test_iceberg_systable_tbl1 values(2);"""
+    order_qt_sql_tbl12 """select * from test_iceberg_systable_tbl1"""
+    qt_sql_tbl1_systable2 """select count(*) from 
test_iceberg_systable_tbl1\$snapshots"""
+
+    qt_desc_systable1 """desc test_iceberg_systable_tbl1\$snapshots"""
+    qt_desc_systable2 """desc 
${db_name}.test_iceberg_systable_tbl1\$snapshots"""
+    qt_desc_systable3 """desc 
${catalog_name}.${db_name}.test_iceberg_systable_tbl1\$snapshots"""
+
+    String user = "test_iceberg_systable_user"
+    String pwd = 'C123_567p'
+    try_sql("DROP USER ${user}")
+    sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'"""
+
+    if (isCloudMode()) {
+        def clusters = sql " SHOW CLUSTERS; "
+        assertTrue(!clusters.isEmpty())
+        def validCluster = clusters[0][0]
+        sql """GRANT USAGE_PRIV ON CLUSTER `${validCluster}` TO ${user}""";
+    }
+
+    sql """create database if not exists internal.regression_test"""
+    sql """grant select_priv on internal.regression_test.* to ${user}""" 
+    connect(user, "${pwd}", context.config.jdbcUrl) {
+        test {
+              sql """
+                 select committed_at, snapshot_id, parent_id, operation from 
iceberg_meta(
+                                             "table" = 
"${catalog_name}.${db_name}.test_iceberg_systable_tbl1",
+                                             "query_type" = "snapshots");
+              """
+              exception "denied"
+        }
+        test {
+              sql """
+                 select committed_at, snapshot_id, parent_id, operation from 
${catalog_name}.${db_name}.test_iceberg_systable_tbl1\$snapshots
+              """
+              exception "denied"
+        }
+    }
+    sql """grant select_priv on 
${catalog_name}.${db_name}.test_iceberg_systable_tbl1 to ${user}"""
+    connect(user, "${pwd}", context.config.jdbcUrl) {
+        sql """
+           select committed_at, snapshot_id, parent_id, operation from 
iceberg_meta(
+                                       "table" = 
"${catalog_name}.${db_name}.test_iceberg_systable_tbl1",
+                                       "query_type" = "snapshots");
+        """
+        sql """select committed_at, snapshot_id, parent_id, operation from 
${catalog_name}.${db_name}.test_iceberg_systable_tbl1\$snapshots"""
+    }
+    try_sql("DROP USER ${user}")
+
+    // tbl2
+    sql """ create table test_iceberg_systable_tbl2 (id int) """
+    sql """insert into test_iceberg_systable_tbl2 values(2);"""
+    qt_sql_tbl2 """select * from test_iceberg_systable_tbl2"""
+
+    sql """ create table test_iceberg_systable_tbl3 (id int) """
+    sql """insert into test_iceberg_systable_tbl3 values(3);"""
+    qt_sql_tbl3 """select * from test_iceberg_systable_tbl3"""
+
+    // drop db with tables
+    test {
+        sql """drop database ${db_name}"""
+        exception """is not empty"""
+    }
+
+    // drop db froce with tables
+    sql """drop database ${db_name} force"""
+
+    // refresh catalog
+    sql """refresh catalog ${catalog_name}"""
+    // should be empty
+    test {
+        sql """show tables from ${db_name}"""
+        exception "Unknown database"
+    }
+
+    // table should be deleted
+    qt_test1 """
+        select * from s3(
+            "uri" = 
"s3://warehouse/wh/${db_name}/test_iceberg_systable_tbl1/data/*.parquet",
+            "s3.endpoint"="http://${externalEnvIp}:${minio_port}";,
+            "s3.access_key" = "admin",
+            "s3.secret_key" = "password",
+            "s3.region" = "us-east-1",
+            "format" = "parquet",
+            "use_path_style" = "true"
+        )
+    """
+    qt_test2 """
+        select * from s3(
+            "uri" = 
"s3://warehouse/wh/${db_name}/test_iceberg_systable_tbl1/data/*.parquet",
+            "s3.endpoint"="http://${externalEnvIp}:${minio_port}";,
+            "s3.access_key" = "admin",
+            "s3.secret_key" = "password",
+            "s3.region" = "us-east-1",
+            "format" = "parquet",
+            "use_path_style" = "true"
+        )
+    """
+    qt_test3 """
+        select * from s3(
+            "uri" = 
"s3://warehouse/wh/${db_name}/test_iceberg_systable_tbl1/data/*.parquet",
+            "s3.endpoint"="http://${externalEnvIp}:${minio_port}";,
+            "s3.access_key" = "admin",
+            "s3.secret_key" = "password",
+            "s3.region" = "us-east-1",
+            "format" = "parquet",
+            "use_path_style" = "true"
+        )
+    """
+}
diff --git 
a/regression-test/suites/external_table_p0/iceberg/test_iceberg_time_travel.groovy
 
b/regression-test/suites/external_table_p0/iceberg/test_iceberg_time_travel.groovy
index 5f4ffcef643..2e970e6dc3f 100644
--- 
a/regression-test/suites/external_table_p0/iceberg/test_iceberg_time_travel.groovy
+++ 
b/regression-test/suites/external_table_p0/iceberg/test_iceberg_time_travel.groovy
@@ -39,17 +39,17 @@ suite("test_iceberg_time_travel", 
"p0,external,doris,external_docker,external_do
         "s3.region" = "us-east-1"
     );"""
 
-        sql """ use ${catalog_name}.test_db """
+    sql """ use ${catalog_name}.test_db """
 
-        // test for 'FOR TIME AS OF' and 'FOR VERSION AS OF'
-        def q01 = {
-            qt_q1 """ select count(*) from iceberg_position_gen_data """  // 
5632
-            qt_q2 """ select count(*) from iceberg_position_gen_data FOR TIME 
AS OF '2024-07-14 14:17:01' """// 120
-            qt_q3 """ select * from iceberg_position_gen_data order by id 
limit 10""" 
-            qt_q4 """ select * from iceberg_position_gen_data FOR TIME AS OF 
'2024-07-14 14:17:01' order by id limit 10""" 
-            qt_q5 """ select count(*) from iceberg_position_gen_data FOR 
VERSION AS OF 3106988132043095748 """ // 240
-            qt_q6 """ select * from iceberg_position_gen_data FOR VERSION AS 
OF 3106988132043095748 order by id limit 10""" 
-        }
-        
-        q01()
+    // test for 'FOR TIME AS OF' and 'FOR VERSION AS OF'
+    def q01 = {
+        qt_q1 """ select count(*) from iceberg_position_gen_data """  // 5632
+        qt_q2 """ select count(*) from iceberg_position_gen_data FOR TIME AS 
OF '2024-07-14 14:17:01' """// 120
+        qt_q3 """ select * from iceberg_position_gen_data order by id limit 
10""" 
+        qt_q4 """ select * from iceberg_position_gen_data FOR TIME AS OF 
'2024-07-14 14:17:01' order by id limit 10""" 
+        qt_q5 """ select count(*) from iceberg_position_gen_data FOR VERSION 
AS OF 3106988132043095748 """ // 240
+        qt_q6 """ select * from iceberg_position_gen_data FOR VERSION AS OF 
3106988132043095748 order by id limit 10""" 
+    }
+
+    q01()
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to