This is an automated email from the ASF dual-hosted git repository.

starocean999 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new bde30532f0a [clean](planner) Remove unused code in PlanNode and delete 
the implementations in its subclasses (#52442)
bde30532f0a is described below

commit bde30532f0a15455718b8321d954cf907ce39e41
Author: zhangdong <[email protected]>
AuthorDate: Tue Jul 1 15:48:07 2025 +0800

    [clean](planner) Remove unused code in PlanNode and delete the 
implementations in its subclasses (#52442)
    
    
    Remove unused code in PlanNode and delete the implementations in its
    subclasses
---
 .../java/org/apache/doris/analysis/Analyzer.java   |   2 +-
 .../apache/doris/datasource/ExternalScanNode.java  |   9 -
 .../apache/doris/datasource/FileQueryScanNode.java |  18 -
 .../doris/datasource/es/source/EsScanNode.java     |  13 -
 .../doris/datasource/hive/source/HiveScanNode.java |  13 -
 .../datasource/iceberg/source/IcebergScanNode.java |  13 -
 .../doris/datasource/jdbc/source/JdbcScanNode.java |  23 --
 .../doris/datasource/odbc/source/OdbcScanNode.java |  24 --
 .../datasource/tvf/source/MetadataScanNode.java    |   6 -
 .../org/apache/doris/planner/AggregationNode.java  | 143 --------
 .../org/apache/doris/planner/AnalyticEvalNode.java |  42 ---
 .../apache/doris/planner/AssertNumRowsNode.java    |  17 -
 .../planner/BackendPartitionedSchemaScanNode.java  |  14 -
 .../org/apache/doris/planner/DataGenScanNode.java  |  15 -
 .../org/apache/doris/planner/EmptySetNode.java     |  28 --
 .../org/apache/doris/planner/ExchangeNode.java     |  24 --
 .../org/apache/doris/planner/FileLoadScanNode.java |  32 --
 .../apache/doris/planner/GroupCommitScanNode.java  |   5 -
 .../org/apache/doris/planner/HashJoinNode.java     |  52 ---
 .../org/apache/doris/planner/JoinNodeBase.java     | 120 -------
 .../org/apache/doris/planner/MysqlScanNode.java    |  19 --
 .../apache/doris/planner/NestedLoopJoinNode.java   |   9 -
 .../org/apache/doris/planner/OlapScanNode.java     | 120 -------
 .../java/org/apache/doris/planner/PlanNode.java    | 367 ---------------------
 .../java/org/apache/doris/planner/RepeatNode.java  |  64 ----
 .../java/org/apache/doris/planner/ScanNode.java    |  48 ---
 .../org/apache/doris/planner/SchemaScanNode.java   |  17 -
 .../java/org/apache/doris/planner/SelectNode.java  |  23 --
 .../org/apache/doris/planner/SetOperationNode.java | 105 ------
 .../java/org/apache/doris/planner/SortNode.java    |  90 -----
 .../apache/doris/planner/TableFunctionNode.java    |  41 ---
 .../doris/planner/TestExternalTableScanNode.java   |  16 -
 .../doris/rewrite/ExtractCommonFactorsRule.java    |   2 +-
 33 files changed, 2 insertions(+), 1532 deletions(-)

diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/Analyzer.java 
b/fe/fe-core/src/main/java/org/apache/doris/analysis/Analyzer.java
index 8a3aa47fc17..8666ca17948 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/Analyzer.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/Analyzer.java
@@ -2662,7 +2662,7 @@ public class Analyzer {
             return;
         }
         List<SlotId> refdIdList = Lists.newArrayList();
-        planRoot.getMaterializedIds(analyzer, refdIdList);
+        // planRoot.getMaterializedIds(analyzer, refdIdList);
         if (outputExprs != null) {
             Expr.getIds(outputExprs, null, refdIdList);
         }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalScanNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalScanNode.java
index 0d67a9e44b6..e619e750e45 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalScanNode.java
@@ -17,7 +17,6 @@
 
 package org.apache.doris.datasource;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.common.UserException;
 import org.apache.doris.planner.PlanNodeId;
@@ -57,14 +56,6 @@ public abstract class ExternalScanNode extends ScanNode {
         this.needCheckColumnPriv = needCheckColumnPriv;
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-        computeStats(analyzer);
-        computeColumnsFilter();
-        initBackendPolicy();
-    }
-
     // For Nereids
     @Override
     public void init() throws UserException {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/FileQueryScanNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/FileQueryScanNode.java
index 47ed17cb704..4e34904aec3 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/FileQueryScanNode.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/FileQueryScanNode.java
@@ -17,7 +17,6 @@
 
 package org.apache.doris.datasource;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.SlotDescriptor;
 import org.apache.doris.analysis.TableSample;
 import org.apache.doris.analysis.TableScanParams;
@@ -115,18 +114,6 @@ public abstract class FileQueryScanNode extends 
FileScanNode {
         this.sessionVariable = sv;
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        if (ConnectContext.get().getExecutor() != null) {
-            
ConnectContext.get().getExecutor().getSummaryProfile().setInitScanNodeStartTime();
-        }
-        super.init(analyzer);
-        doInitialize();
-        if (ConnectContext.get().getExecutor() != null) {
-            
ConnectContext.get().getExecutor().getSummaryProfile().setInitScanNodeFinishTime();
-        }
-    }
-
     /**
      * Init ExternalFileScanNode, ONLY used for Nereids. Should NOT use this 
function in anywhere else.
      */
@@ -203,11 +190,6 @@ public abstract class FileQueryScanNode extends 
FileScanNode {
         this.tableSample = tSample;
     }
 
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
-        doFinalize();
-    }
-
     @Override
     public void finalizeForNereids() throws UserException {
         doFinalize();
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/es/source/EsScanNode.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/es/source/EsScanNode.java
index 0afb426188e..e78721d8589 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/es/source/EsScanNode.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/es/source/EsScanNode.java
@@ -17,7 +17,6 @@
 
 package org.apache.doris.datasource.es.source;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.Expr;
 import org.apache.doris.analysis.SlotDescriptor;
 import org.apache.doris.analysis.TupleDescriptor;
@@ -101,24 +100,12 @@ public class EsScanNode extends ExternalScanNode {
         esTablePartitions = table.getEsTablePartitions();
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-        buildQuery();
-    }
-
     @Override
     public void init() throws UserException {
         super.init();
         buildQuery();
     }
 
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
-        buildQuery();
-        doFinalize();
-    }
-
     @Override
     public void finalizeForNereids() throws UserException {
         buildQuery();
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/source/HiveScanNode.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/source/HiveScanNode.java
index 5f6a64407b6..1d4e8cfdbf0 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/source/HiveScanNode.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/source/HiveScanNode.java
@@ -17,7 +17,6 @@
 
 package org.apache.doris.datasource.hive.source;
 
-import org.apache.doris.analysis.FunctionCallExpr;
 import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.catalog.Column;
 import org.apache.doris.catalog.Env;
@@ -542,18 +541,6 @@ public class HiveScanNode extends FileQueryScanNode {
         return fileAttributes;
     }
 
-    @Override
-    public boolean pushDownAggNoGrouping(FunctionCallExpr aggExpr) {
-
-        String aggFunctionName = aggExpr.getFnName().getFunction();
-        return aggFunctionName.equalsIgnoreCase("COUNT");
-    }
-
-    @Override
-    public boolean pushDownAggNoGroupingCheckCol(FunctionCallExpr aggExpr, 
Column col) {
-        return !col.isAllowNull();
-    }
-
     @Override
     protected TFileCompressType getFileCompressType(FileSplit fileSplit) 
throws UserException {
         TFileCompressType compressType = super.getFileCompressType(fileSplit);
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/source/IcebergScanNode.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/source/IcebergScanNode.java
index a2635bd95e4..ccd2a06fb5d 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/source/IcebergScanNode.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/source/IcebergScanNode.java
@@ -18,11 +18,9 @@
 package org.apache.doris.datasource.iceberg.source;
 
 import org.apache.doris.analysis.Expr;
-import org.apache.doris.analysis.FunctionCallExpr;
 import org.apache.doris.analysis.TableScanParams;
 import org.apache.doris.analysis.TableSnapshot;
 import org.apache.doris.analysis.TupleDescriptor;
-import org.apache.doris.catalog.Column;
 import org.apache.doris.catalog.Env;
 import org.apache.doris.catalog.TableIf;
 import org.apache.doris.common.DdlException;
@@ -491,17 +489,6 @@ public class IcebergScanNode extends FileQueryScanNode {
         return source.getCatalog().getCatalogProperty().getHadoopProperties();
     }
 
-    @Override
-    public boolean pushDownAggNoGrouping(FunctionCallExpr aggExpr) {
-        String aggFunctionName = 
aggExpr.getFnName().getFunction().toUpperCase();
-        return "COUNT".equals(aggFunctionName);
-    }
-
-    @Override
-    public boolean pushDownAggNoGroupingCheckCol(FunctionCallExpr aggExpr, 
Column col) {
-        return !col.isAllowNull();
-    }
-
     @VisibleForTesting
     public long getCountFromSnapshot() throws UserException {
         IcebergTableQueryInfo info = getSpecifiedSnapshot();
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/source/JdbcScanNode.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/source/JdbcScanNode.java
index 019cceca6a7..0d3e9e53ee2 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/source/JdbcScanNode.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/source/JdbcScanNode.java
@@ -17,7 +17,6 @@
 
 package org.apache.doris.datasource.jdbc.source;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.BinaryPredicate;
 import org.apache.doris.analysis.BoolLiteral;
 import org.apache.doris.analysis.CastExpr;
@@ -99,10 +98,6 @@ public class JdbcScanNode extends ExternalScanNode {
         tableName = tbl.getExternalTableName();
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-    }
 
     /**
      * Used for Nereids. Should NOT use this function in anywhere else.
@@ -251,14 +246,6 @@ public class JdbcScanNode extends ExternalScanNode {
         return output.toString();
     }
 
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
-        // Convert predicates to Jdbc columns and filters.
-        createJdbcColumns();
-        createJdbcFilters();
-        createScanRangeLocations();
-    }
-
     @Override
     public void finalizeForNereids() throws UserException {
         createJdbcColumns();
@@ -271,16 +258,6 @@ public class JdbcScanNode extends ExternalScanNode {
         scanRangeLocations = 
Lists.newArrayList(createSingleScanRangeLocations(backendPolicy));
     }
 
-    @Override
-    public void computeStats(Analyzer analyzer) throws UserException {
-        super.computeStats(analyzer);
-        // even if current node scan has no data,at least on backend will be 
assigned when the fragment actually execute
-        numNodes = numNodes <= 0 ? 1 : numNodes;
-
-        
StatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);
-        cardinality = (long) statsDeriveResult.getRowCount();
-    }
-
     @Override
     protected void toThrift(TPlanNode msg) {
         msg.node_type = TPlanNodeType.JDBC_SCAN_NODE;
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/odbc/source/OdbcScanNode.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/odbc/source/OdbcScanNode.java
index bb9dfddc921..9c2b45f44cf 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/odbc/source/OdbcScanNode.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/odbc/source/OdbcScanNode.java
@@ -17,7 +17,6 @@
 
 package org.apache.doris.datasource.odbc.source;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.Expr;
 import org.apache.doris.analysis.ExprSubstitutionMap;
 import org.apache.doris.analysis.FunctionCallExpr;
@@ -79,11 +78,6 @@ public class OdbcScanNode extends ExternalScanNode {
         this.tbl = tbl;
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-    }
-
     /**
      * Used for Nereids. Should NOT use this function in anywhere else.
      */
@@ -101,14 +95,6 @@ public class OdbcScanNode extends ExternalScanNode {
         return helper.addValue(super.debugString()).toString();
     }
 
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
-        // Convert predicates to Odbc columns and filters.
-        createOdbcColumns();
-        createOdbcFilters();
-        createScanRangeLocations();
-    }
-
     @Override
     public void finalizeForNereids() throws UserException {
         createOdbcColumns();
@@ -235,16 +221,6 @@ public class OdbcScanNode extends ExternalScanNode {
         super.toThrift(msg);
     }
 
-    @Override
-    public void computeStats(Analyzer analyzer) throws UserException {
-        super.computeStats(analyzer);
-        // even if current node scan has no data,at least on backend will be 
assigned when the fragment actually execute
-        numNodes = numNodes <= 0 ? 1 : numNodes;
-
-        
StatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);
-        cardinality = (long) statsDeriveResult.getRowCount();
-    }
-
     @Override
     public StatsDelta genStatsDelta() throws AnalysisException {
         return new StatsDelta(Env.getCurrentEnv().getCurrentCatalog().getId(),
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/tvf/source/MetadataScanNode.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/tvf/source/MetadataScanNode.java
index 07e5ce2895a..fd3fcf24d92 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/tvf/source/MetadataScanNode.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/tvf/source/MetadataScanNode.java
@@ -17,7 +17,6 @@
 
 package org.apache.doris.datasource.tvf.source;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.common.UserException;
 import org.apache.doris.datasource.ExternalScanNode;
@@ -101,11 +100,6 @@ public class MetadataScanNode extends ExternalScanNode {
         return scanRangeLocations;
     }
 
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
-        createScanRangeLocations();
-    }
-
     @Override
     public boolean needToCheckColumnPriv() {
         return false;
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/AggregationNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/AggregationNode.java
index 446f49c3782..1ebefdf16a1 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/AggregationNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/AggregationNode.java
@@ -21,19 +21,13 @@
 package org.apache.doris.planner;
 
 import org.apache.doris.analysis.AggregateInfo;
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.Expr;
 import org.apache.doris.analysis.FunctionCallExpr;
 import org.apache.doris.analysis.SlotDescriptor;
-import org.apache.doris.analysis.SlotId;
 import org.apache.doris.analysis.SlotRef;
 import org.apache.doris.analysis.SortInfo;
-import org.apache.doris.analysis.TupleDescriptor;
-import org.apache.doris.common.NotImplementedException;
-import org.apache.doris.common.UserException;
 import org.apache.doris.planner.normalize.Normalizer;
 import org.apache.doris.statistics.StatisticalType;
-import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.thrift.TAggregationNode;
 import org.apache.doris.thrift.TExplainLevel;
 import org.apache.doris.thrift.TExpr;
@@ -47,14 +41,11 @@ import com.google.common.base.MoreObjects;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 
-import java.util.ArrayList;
 import java.util.List;
-import java.util.Set;
 import java.util.stream.Collectors;
 
 /**
@@ -143,70 +134,6 @@ public class AggregationNode extends PlanNode {
         tupleIds.add(aggInfo.getIntermediateTupleId());
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        // Assign predicates to the top-most agg in the single-node plan that 
can evaluate
-        // them, as follows: For non-distinct aggs place them in the 1st phase 
agg node. For
-        // distinct aggs place them in the 2nd phase agg node. The conjuncts 
are
-        // transferred to the proper place in the multi-node plan via 
transferConjuncts().
-        if (tupleIds.get(0).equals(aggInfo.getResultTupleId()) && 
!aggInfo.isMerge()) {
-            // Ignore predicates bound by a grouping slot produced by a 
SlotRef grouping expr.
-            // Those predicates are already evaluated below this agg node 
(e.g., in a scan),
-            // because the grouping slot must be in the same equivalence class 
as another slot
-            // below this agg node. We must not ignore other grouping slots in 
order to retain
-            // conjuncts bound by those grouping slots in 
createEquivConjuncts() (IMPALA-2089).
-            // Those conjuncts cannot be redundant because our equivalence 
classes do not
-            // capture dependencies with non-SlotRef exprs.
-            // Set<SlotId> groupBySlots = Sets.newHashSet();
-            // for (int i = 0; i < aggInfo.getGroupingExprs().size(); ++i) {
-            //    if (aggInfo.getGroupingExprs().get(i).unwrapSlotRef(true) == 
null) continue;
-            //    
groupBySlots.add(aggInfo.getOutputTupleDesc().getSlots().get(i).getId());
-            // }
-            // ArrayList<Expr> bindingPredicates =
-            //         analyzer.getBoundPredicates(tupleIds.get(0), 
groupBySlots, true);
-            ArrayList<Expr> bindingPredicates = Lists.newArrayList();
-            conjuncts.addAll(bindingPredicates);
-
-            // also add remaining unassigned conjuncts_
-            assignConjuncts(analyzer);
-
-            // TODO(zc)
-            // analyzer.createEquivConjuncts(tupleIds_.get(0), conjuncts_, 
groupBySlots);
-        }
-        // TODO(zc)
-        // conjuncts_ = orderConjunctsByCost(conjuncts_);
-
-        // Compute the mem layout for both tuples here for simplicity.
-        aggInfo.getOutputTupleDesc().computeStatAndMemLayout();
-        aggInfo.getIntermediateTupleDesc().computeStatAndMemLayout();
-
-        // do this at the end so it can take all conjuncts into account
-        computeStats(analyzer);
-
-        // don't call createDefaultSMap(), it would point our conjuncts (= 
Having clause)
-        // to our input; our conjuncts don't get substituted because they 
already
-        // refer to our output
-        outputSmap = getCombinedChildSmap();
-        if (aggInfo.isMerge()) {
-            aggInfo.substitute(aggInfo.getIntermediateSmap(), analyzer);
-        }
-        aggInfo.substitute(outputSmap, analyzer);
-
-        // assert consistent aggregate expr and slot materialization
-        // aggInfo.checkConsistency();
-    }
-
-    @Override
-    public void computeStats(Analyzer analyzer) throws UserException {
-        super.computeStats(analyzer);
-        if (!analyzer.safeIsEnableJoinReorderBasedCost()) {
-            return;
-        }
-
-        
StatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);
-        cardinality = (long) statsDeriveResult.getRowCount();
-    }
-
     @Override
     protected void computeOldCardinality() {
         List<Expr> groupingExprs = aggInfo.getGroupingExprs();
@@ -418,76 +345,6 @@ public class AggregationNode extends PlanNode {
         return output.toString();
     }
 
-    @Override
-    public void getMaterializedIds(Analyzer analyzer, List<SlotId> ids) {
-        super.getMaterializedIds(analyzer, ids);
-
-        // we indirectly reference all grouping slots (because we write them)
-        // so they're all materialized.
-        aggInfo.getRefdSlots(ids);
-    }
-
-    @Override
-    public Set<SlotId> computeInputSlotIds(Analyzer analyzer) throws 
NotImplementedException {
-        Set<SlotId> result = Sets.newHashSet();
-        // compute group by slot
-        ArrayList<Expr> groupingExprs = aggInfo.getGroupingExprs();
-        List<SlotId> groupingSlotIds = Lists.newArrayList();
-        Expr.getIds(groupingExprs, null, groupingSlotIds);
-        result.addAll(groupingSlotIds);
-
-        // compute agg function slot
-        ArrayList<FunctionCallExpr> aggregateExprs = 
aggInfo.getAggregateExprs();
-        List<SlotId> aggregateSlotIds = Lists.newArrayList();
-        Expr.getIds(aggregateExprs, null, aggregateSlotIds);
-        result.addAll(aggregateSlotIds);
-
-        // case: select count(*) from test
-        // result is empty
-        // Actually need to take a column as the input column of the agg 
operator
-        if (result.isEmpty()) {
-            TupleDescriptor tupleDesc = 
analyzer.getTupleDesc(getChild(0).getOutputTupleIds().get(0));
-            // If the query result is empty set such as: select count(*) from 
table where 1=2
-            // then the materialized slot will be empty
-            // So the result should be empty also.
-            if (!tupleDesc.getMaterializedSlots().isEmpty()) {
-                result.add(tupleDesc.getMaterializedSlots().get(0).getId());
-            }
-        } else {
-            // if some input slot for aggregate slot which is not 
materialized, we need to remove it from the result
-            TupleDescriptor tupleDescriptor = aggInfo.getOutputTupleDesc();
-            ArrayList<SlotDescriptor> slots = tupleDescriptor.getSlots();
-            Set<SlotId> allUnRequestIds = Sets.newHashSet();
-            Set<SlotId> allRequestIds = Sets.newHashSet();
-            for (SlotDescriptor slot : slots) {
-                if (!slot.isMaterialized()) {
-                    List<SlotId> unRequestIds = Lists.newArrayList();
-                    Expr.getIds(slot.getSourceExprs(), null, unRequestIds);
-                    allUnRequestIds.addAll(unRequestIds);
-                } else {
-                    List<SlotId> requestIds = Lists.newArrayList();
-                    Expr.getIds(slot.getSourceExprs(), null, requestIds);
-                    allRequestIds.addAll(requestIds);
-                }
-            }
-            allRequestIds.forEach(allUnRequestIds::remove);
-            groupingSlotIds.forEach(allUnRequestIds::remove);
-            allUnRequestIds.forEach(result::remove);
-        }
-        return result;
-    }
-
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
-        super.finalize(analyzer);
-        List<Expr> groupingExprs = aggInfo.getGroupingExprs();
-        for (int i = 0; i < groupingExprs.size(); i++) {
-            
aggInfo.getOutputTupleDesc().getSlots().get(i).setIsNullable(groupingExprs.get(i).isNullable());
-            
aggInfo.getIntermediateTupleDesc().getSlots().get(i).setIsNullable(groupingExprs.get(i).isNullable());
-            aggInfo.getOutputTupleDesc().computeMemLayout();
-        }
-    }
-
     // If `GroupingExprs` is empty and agg need to finalize, the result must 
be output by single instance
     @Override
     public boolean isSerialOperator() {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/AnalyticEvalNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/AnalyticEvalNode.java
index 7b5998717a2..8dac8b102f8 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/AnalyticEvalNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/AnalyticEvalNode.java
@@ -21,14 +21,11 @@
 package org.apache.doris.planner;
 
 import org.apache.doris.analysis.AnalyticWindow;
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.Expr;
 import org.apache.doris.analysis.ExprSubstitutionMap;
 import org.apache.doris.analysis.OrderByElement;
 import org.apache.doris.analysis.TupleDescriptor;
-import org.apache.doris.common.UserException;
 import org.apache.doris.statistics.StatisticalType;
-import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.thrift.TAnalyticNode;
 import org.apache.doris.thrift.TExplainLevel;
 import org.apache.doris.thrift.TPlanNode;
@@ -139,45 +136,6 @@ public class AnalyticEvalNode extends PlanNode {
         return orderByElements;
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        analyzer.getDescTbl().computeStatAndMemLayout();
-        intermediateTupleDesc.computeStatAndMemLayout();
-        // we add the analyticInfo's smap to the combined smap of our child
-        outputSmap = logicalToPhysicalSmap;
-        createDefaultSmap(analyzer);
-
-        // Do not assign any conjuncts here: the conjuncts out of our 
SelectStmt's
-        // Where clause have already been assigned, and conjuncts coming out 
of an
-        // enclosing scope need to be evaluated *after* all analytic 
computations.
-
-        // do this at the end so it can take all conjuncts into account
-        computeStats(analyzer);
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("desctbl: " + analyzer.getDescTbl().debugString());
-        }
-
-        // point fn calls, partition and ordering exprs at our input
-        ExprSubstitutionMap childSmap = getCombinedChildSmap();
-        analyticFnCalls = Expr.substituteList(analyticFnCalls, childSmap, 
analyzer, false);
-        substitutedPartitionExprs = Expr.substituteList(partitionExprs, 
childSmap,
-                                    analyzer, false);
-        orderByElements = OrderByElement.substitute(orderByElements, 
childSmap, analyzer);
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("evalnode: " + debugString());
-        }
-    }
-
-    @Override
-    protected void computeStats(Analyzer analyzer) throws UserException {
-        super.computeStats(analyzer);
-        if (!analyzer.safeIsEnableJoinReorderBasedCost()) {
-            return;
-        }
-        
StatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);
-        cardinality = (long) statsDeriveResult.getRowCount();
-    }
-
     @Override
     protected void computeOldCardinality() {
         cardinality = getChild(0).cardinality;
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/AssertNumRowsNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/AssertNumRowsNode.java
index a4c4aa42c65..8f7ba2d63c9 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/AssertNumRowsNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/AssertNumRowsNode.java
@@ -17,12 +17,9 @@
 
 package org.apache.doris.planner;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.AssertNumRowsElement;
 import org.apache.doris.analysis.TupleDescriptor;
-import org.apache.doris.common.UserException;
 import org.apache.doris.statistics.StatisticalType;
-import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.thrift.TAssertNumRowsNode;
 import org.apache.doris.thrift.TExplainLevel;
 import org.apache.doris.thrift.TPlanNode;
@@ -72,20 +69,6 @@ public class AssertNumRowsNode extends PlanNode {
         this.shouldConvertOutputToNullable = convertToNullable;
     }
 
-
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-        super.computeStats(analyzer);
-        if (analyzer.safeIsEnableJoinReorderBasedCost()) {
-            
StatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);
-            cardinality = (long) statsDeriveResult.getRowCount();
-        }
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("stats AssertNumRows: cardinality={}", cardinality);
-        }
-    }
-
     @Override
     public String getNodeExplainString(String prefix, TExplainLevel 
detailLevel) {
         if (detailLevel == TExplainLevel.BRIEF) {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/BackendPartitionedSchemaScanNode.java
 
b/fe/fe-core/src/main/java/org/apache/doris/planner/BackendPartitionedSchemaScanNode.java
index 6482054df0a..200df156e97 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/planner/BackendPartitionedSchemaScanNode.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/planner/BackendPartitionedSchemaScanNode.java
@@ -17,7 +17,6 @@
 
 package org.apache.doris.planner;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.LiteralExpr;
 import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.catalog.Column;
@@ -100,19 +99,6 @@ public class BackendPartitionedSchemaScanNode extends 
SchemaScanNode {
         this.tableIf = table;
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-        computeColumnsFilter();
-        computePartitionInfo();
-    }
-
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
-        super.finalize(analyzer);
-        createScanRangeLocations();
-    }
-
     @Override
     public void finalizeForNereids() throws UserException {
         computeColumnsFilter();
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/DataGenScanNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/DataGenScanNode.java
index 995a7f2b1fb..3c426ad2bbd 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/DataGenScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/DataGenScanNode.java
@@ -17,7 +17,6 @@
 
 package org.apache.doris.planner;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.common.NereidsException;
 import org.apache.doris.common.UserException;
@@ -55,11 +54,6 @@ public class DataGenScanNode extends ExternalScanNode {
         this.tvf = tvf;
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-    }
-
     public DataGenTableValuedFunction getTvf() {
         return tvf;
     }
@@ -69,15 +63,6 @@ public class DataGenScanNode extends ExternalScanNode {
         return scanRangeLocations;
     }
 
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
-        if (isFinalized) {
-            return;
-        }
-        createScanRangeLocations();
-        isFinalized = true;
-    }
-
     @Override
     protected void toThrift(TPlanNode msg) {
         msg.node_type = TPlanNodeType.DATA_GEN_SCAN_NODE;
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/EmptySetNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/EmptySetNode.java
index e262797a4fb..908eff2d6bb 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/EmptySetNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/EmptySetNode.java
@@ -17,11 +17,8 @@
 
 package org.apache.doris.planner;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.TupleId;
-import org.apache.doris.common.UserException;
 import org.apache.doris.statistics.StatisticalType;
-import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.thrift.TPlanNode;
 import org.apache.doris.thrift.TPlanNodeType;
 
@@ -46,31 +43,6 @@ public class EmptySetNode extends PlanNode {
         Preconditions.checkArgument(tupleIds.size() > 0);
     }
 
-    @Override
-    public void computeStats(Analyzer analyzer) throws UserException {
-        
StatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);
-        cardinality = (long) statsDeriveResult.getRowCount();
-        avgRowSize = 0;
-        numNodes = 1;
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("stats EmptySet:" + id + ", cardinality: " + 
cardinality);
-        }
-    }
-
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        Preconditions.checkState(conjuncts.isEmpty());
-        // If the physical output tuple produced by an AnalyticEvalNode wasn't 
created
-        // the logical output tuple is returned by getMaterializedTupleIds(). 
It needs
-        // to be set as materialized (even though it isn't) to avoid failing 
precondition
-        // checks generating the thrift for slot refs that may reference this 
tuple.
-        for (TupleId id : tupleIds) {
-            analyzer.getTupleDesc(id).setIsMaterialized(true);
-        }
-        computeTupleStatAndMemLayout(analyzer);
-        computeStats(analyzer);
-    }
-
     @Override
     protected void toThrift(TPlanNode msg) {
         msg.node_type = TPlanNodeType.EMPTY_SET_NODE;
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/ExchangeNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/ExchangeNode.java
index 01b1ce7d1f6..e98e20e9dcf 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/ExchangeNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/ExchangeNode.java
@@ -20,14 +20,11 @@
 
 package org.apache.doris.planner;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.SortInfo;
 import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.analysis.TupleId;
-import org.apache.doris.common.UserException;
 import org.apache.doris.qe.ConnectContext;
 import org.apache.doris.statistics.StatisticalType;
-import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.thrift.TExchangeNode;
 import org.apache.doris.thrift.TExplainLevel;
 import org.apache.doris.thrift.TPartitionType;
@@ -36,7 +33,6 @@ import org.apache.doris.thrift.TPlanNodeType;
 
 import com.google.common.base.MoreObjects;
 import com.google.common.base.MoreObjects.ToStringHelper;
-import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
@@ -135,26 +131,6 @@ public class ExchangeNode extends PlanNode {
         }
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-        Preconditions.checkState(conjuncts.isEmpty());
-        if (!analyzer.safeIsEnableJoinReorderBasedCost()) {
-            return;
-        }
-        computeStats(analyzer);
-    }
-
-    @Override
-    protected void computeStats(Analyzer analyzer) throws UserException {
-        Preconditions.checkState(children.size() == 1);
-        
StatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);
-        cardinality = (long) statsDeriveResult.getRowCount();
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("stats Exchange:" + id + ", cardinality: " + 
cardinality);
-        }
-    }
-
     public SortInfo getMergeInfo() {
         return mergeInfo;
     }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/FileLoadScanNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/FileLoadScanNode.java
index 5794d3eb927..01513aad7c7 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/FileLoadScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/FileLoadScanNode.java
@@ -119,15 +119,6 @@ public class FileLoadScanNode extends FileScanNode {
         fileGroupInfos.add(fileGroupInfo);
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-        for (FileGroupInfo fileGroupInfo : fileGroupInfos) {
-            this.scanProviders.add(new LoadScanProvider(fileGroupInfo, desc));
-        }
-        initParamCreateContexts(analyzer);
-    }
-
     // For each scan provider, create a corresponding ParamCreateContext
     private void initParamCreateContexts(Analyzer analyzer) throws 
UserException {
         for (LoadScanProvider scanProvider : scanProviders) {
@@ -193,29 +184,6 @@ public class FileLoadScanNode extends FileScanNode {
         return newWhereExpr;
     }
 
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
-        Preconditions.checkState(contexts.size() == scanProviders.size(),
-                contexts.size() + " vs. " + scanProviders.size());
-        // ATTN: for load scan node, do not use backend policy in 
ExternalScanNode.
-        // Because backend policy in ExternalScanNode may only contain compute 
backend.
-        // But for load job, we should select backends from all backends, both 
compute and mix.
-        BeSelectionPolicy policy = new BeSelectionPolicy.Builder()
-                .needQueryAvailable()
-                .needLoadAvailable()
-                .build();
-        FederationBackendPolicy localBackendPolicy = new 
FederationBackendPolicy();
-        localBackendPolicy.init(policy);
-        for (int i = 0; i < contexts.size(); ++i) {
-            FileLoadScanNode.ParamCreateContext context = contexts.get(i);
-            LoadScanProvider scanProvider = scanProviders.get(i);
-            finalizeParamsForLoad(context, analyzer);
-            createScanRangeLocations(context, scanProvider, 
localBackendPolicy);
-            this.selectedSplitNum += scanProvider.getInputSplitNum();
-            this.totalFileSize += scanProvider.getInputFileSize();
-        }
-    }
-
     public void finalizeForNereids(TUniqueId loadId, 
List<NereidsFileGroupInfo> fileGroupInfos,
             List<NereidsParamCreateContext> contexts, 
NereidsLoadPlanInfoCollector.LoadPlanInfo loadPlanInfo)
             throws UserException {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/GroupCommitScanNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/GroupCommitScanNode.java
index 0751187e4fc..ef95ee61e3c 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/GroupCommitScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/GroupCommitScanNode.java
@@ -17,7 +17,6 @@
 
 package org.apache.doris.planner;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.common.UserException;
 import org.apache.doris.datasource.ExternalScanNode;
@@ -46,10 +45,6 @@ public class GroupCommitScanNode extends ExternalScanNode {
     protected void createScanRangeLocations() throws UserException {
     }
 
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
-    }
-
     @Override
     public List<TScanRangeLocations> getScanRangeLocations(long 
maxScanRangeLength) {
         return Lists.newArrayList();
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/HashJoinNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/HashJoinNode.java
index 14b55bc436c..4d449cf57d8 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/HashJoinNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/HashJoinNode.java
@@ -36,7 +36,6 @@ import org.apache.doris.catalog.OlapTable;
 import org.apache.doris.catalog.TableIf;
 import org.apache.doris.common.CheckedMath;
 import org.apache.doris.common.Pair;
-import org.apache.doris.common.UserException;
 import org.apache.doris.nereids.trees.expressions.ExprId;
 import org.apache.doris.statistics.StatisticalType;
 import org.apache.doris.thrift.TEqJoinCondition;
@@ -298,12 +297,6 @@ public class HashJoinNode extends JoinNodeBase {
         return hashOutputSlotIds;
     }
 
-    @Override
-    public void initOutputSlotIds(Set<SlotId> requiredSlotIdSet, Analyzer 
analyzer) {
-        super.initOutputSlotIds(requiredSlotIdSet, analyzer);
-        initHashOutputSlotIds(outputSlotIds, analyzer);
-    }
-
     @Override
     protected void computeOtherConjuncts(Analyzer analyzer, 
ExprSubstitutionMap originToIntermediateSmap) {
         otherJoinConjuncts = Expr.substituteList(otherJoinConjuncts, 
originToIntermediateSmap, analyzer, false);
@@ -321,29 +314,6 @@ public class HashJoinNode extends JoinNodeBase {
         return joinConjunctSlotIds;
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-
-        ExprSubstitutionMap combinedChildSmap = 
getCombinedChildWithoutTupleIsNullSmap();
-        List<Expr> newEqJoinConjuncts = Expr.substituteList(eqJoinConjuncts, 
combinedChildSmap, analyzer, false);
-        eqJoinConjuncts =
-                newEqJoinConjuncts.stream().map(entity -> {
-                            BinaryPredicate predicate = (BinaryPredicate) 
entity;
-                            if 
(predicate.getOp().equals(BinaryPredicate.Operator.EQ_FOR_NULL)) {
-                                
Preconditions.checkArgument(predicate.getChildren().size() == 2);
-                                if (!predicate.getChild(0).isNullable() || 
!predicate.getChild(1).isNullable()) {
-                                    
predicate.setOp(BinaryPredicate.Operator.EQ);
-                                }
-                            }
-                            return predicate;
-                        }
-                ).collect(Collectors.toList());
-        otherJoinConjuncts = Expr.substituteList(otherJoinConjuncts, 
combinedChildSmap, analyzer, false);
-
-        computeOutputTuple(analyzer);
-    }
-
     @Override
     protected Pair<Boolean, Boolean> needToCopyRightAndLeft() {
         boolean copyleft = true;
@@ -541,15 +511,6 @@ public class HashJoinNode extends JoinNodeBase {
         return result;
     }
 
-
-    @Override
-    public void computeStats(Analyzer analyzer) throws UserException {
-        super.computeStats(analyzer);
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("stats HashJoin:" + id + ", cardinality: " + 
cardinality);
-        }
-    }
-
     @Override
     protected void computeOldCardinality() {
         // For a join between child(0) and child(1), we look for join 
conditions "L.c = R.d"
@@ -739,19 +700,6 @@ public class HashJoinNode extends JoinNodeBase {
         return helper.toString();
     }
 
-    @Override
-    public void getMaterializedIds(Analyzer analyzer, List<SlotId> ids) {
-        super.getMaterializedIds(analyzer, ids);
-        // we also need to materialize everything referenced by eqJoinConjuncts
-        // and otherJoinConjuncts
-        for (Expr eqJoinPredicate : eqJoinConjuncts) {
-            eqJoinPredicate.getIds(null, ids);
-        }
-        for (Expr e : otherJoinConjuncts) {
-            e.getIds(null, ids);
-        }
-    }
-
     //nereids only
     public void addSlotIdToHashOutputSlotIds(SlotId slotId) {
         hashOutputSlotIds.add(slotId);
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/JoinNodeBase.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/JoinNodeBase.java
index 046e6e3d262..1ae16311f05 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/JoinNodeBase.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/JoinNodeBase.java
@@ -29,26 +29,20 @@ import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.analysis.TupleId;
 import org.apache.doris.analysis.TupleIsNullPredicate;
 import org.apache.doris.common.AnalysisException;
-import org.apache.doris.common.NotImplementedException;
 import org.apache.doris.common.Pair;
 import org.apache.doris.common.UserException;
 import org.apache.doris.statistics.StatisticalType;
-import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.thrift.TNullSide;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 
 import java.util.ArrayList;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
-import java.util.stream.Collectors;
 
 public abstract class JoinNodeBase extends PlanNode {
     private static final Logger LOG = LogManager.getLogger(JoinNodeBase.class);
@@ -265,66 +259,6 @@ public abstract class JoinNodeBase extends PlanNode {
         outputSmap = ExprSubstitutionMap.composeAndReplace(outputSmap, 
srcTblRefToOutputTupleSmap, analyzer);
     }
 
-    @Override
-    public void initOutputSlotIds(Set<SlotId> requiredSlotIdSet, Analyzer 
analyzer) {
-        outputSlotIds = Lists.newArrayList();
-        List<TupleDescriptor> outputTupleDescList = Lists.newArrayList();
-        if (outputTupleDesc != null) {
-            outputTupleDescList.add(outputTupleDesc);
-        } else {
-            for (TupleId tupleId : tupleIds) {
-                outputTupleDescList.add(analyzer.getTupleDesc(tupleId));
-            }
-        }
-        SlotId firstMaterializedSlotId = null;
-        for (TupleDescriptor tupleDescriptor : outputTupleDescList) {
-            for (SlotDescriptor slotDescriptor : tupleDescriptor.getSlots()) {
-                if (slotDescriptor.isMaterialized()) {
-                    if ((requiredSlotIdSet == null || 
requiredSlotIdSet.contains(slotDescriptor.getId()))) {
-                        outputSlotIds.add(slotDescriptor.getId());
-                    }
-                    if (firstMaterializedSlotId == null) {
-                        firstMaterializedSlotId = slotDescriptor.getId();
-                    }
-                }
-            }
-        }
-
-        // be may be possible to output correct row number without any column 
data in future
-        // but for now, in order to have correct output row number, should 
keep at least one slot.
-        // use first materialized slot if outputSlotIds is empty.
-        if (outputSlotIds.isEmpty() && firstMaterializedSlotId != null) {
-            outputSlotIds.add(firstMaterializedSlotId);
-        }
-    }
-
-    @Override
-    public void projectOutputTuple() {
-        if (outputTupleDesc == null) {
-            return;
-        }
-        if (outputTupleDesc.getSlots().size() == outputSlotIds.size()) {
-            return;
-        }
-        Iterator<SlotDescriptor> iterator = 
outputTupleDesc.getSlots().iterator();
-        while (iterator.hasNext()) {
-            SlotDescriptor slotDescriptor = iterator.next();
-            boolean keep = false;
-            for (SlotId outputSlotId : outputSlotIds) {
-                if (slotDescriptor.getId().equals(outputSlotId)) {
-                    keep = true;
-                    break;
-                }
-            }
-            if (!keep) {
-                iterator.remove();
-                SlotRef slotRef = new SlotRef(slotDescriptor);
-                vSrcToOutputSMap.removeByRhsExpr(slotRef);
-            }
-        }
-        outputTupleDesc.computeStatAndMemLayout();
-    }
-
     protected abstract Pair<Boolean, Boolean> needToCopyRightAndLeft();
 
     protected abstract void computeOtherConjuncts(Analyzer analyzer, 
ExprSubstitutionMap originToIntermediateSmap);
@@ -420,38 +354,6 @@ public abstract class JoinNodeBase extends PlanNode {
 
     protected abstract List<SlotId> computeSlotIdsForJoinConjuncts(Analyzer 
analyzer);
 
-    @Override
-    public Set<SlotId> computeInputSlotIds(Analyzer analyzer) throws 
NotImplementedException {
-        Set<SlotId> result = Sets.newHashSet();
-        Preconditions.checkState(outputSlotIds != null);
-        // step1: change output slot id to src slot id
-        if (vSrcToOutputSMap != null) {
-            for (SlotId slotId : outputSlotIds) {
-                SlotRef slotRef = new 
SlotRef(analyzer.getDescTbl().getSlotDesc(slotId));
-                Expr srcExpr = vSrcToOutputSMap.mappingForRhsExpr(slotRef);
-                if (srcExpr == null) {
-                    result.add(slotId);
-                } else {
-                    List<SlotRef> srcSlotRefList = Lists.newArrayList();
-                    srcExpr.collect(SlotRef.class, srcSlotRefList);
-                    result.addAll(srcSlotRefList.stream().map(e -> 
e.getSlotId()).collect(Collectors.toList()));
-                }
-            }
-        }
-        result.addAll(computeSlotIdsForJoinConjuncts(analyzer));
-        // conjunct
-        List<SlotId> conjunctSlotIds = Lists.newArrayList();
-        Expr.getIds(conjuncts, null, conjunctSlotIds);
-        result.addAll(conjunctSlotIds);
-        return result;
-    }
-
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
-        super.finalize(analyzer);
-        computeIntermediateTuple(analyzer);
-    }
-
     /**
      * Only for Nereids.
      */
@@ -467,18 +369,6 @@ public abstract class JoinNodeBase extends PlanNode {
         return innerRef;
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-        assignedConjuncts = analyzer.getAssignedConjuncts();
-        // outSmap replace in outer join may cause NULL be replace by literal
-        // so need replace the outsmap in nullableTupleID
-        computeStats(analyzer);
-
-        if (isMarkJoin() && !joinOp.supportMarkJoin()) {
-            throw new UserException("Mark join is supported only for LEFT SEMI 
JOIN/LEFT ANTI JOIN/CROSS JOIN");
-        }
-    }
 
     /**
      * If parent wants to get join node tupleids,
@@ -533,16 +423,6 @@ public abstract class JoinNodeBase extends PlanNode {
         }
     }
 
-    @Override
-    public void computeStats(Analyzer analyzer) throws UserException {
-        super.computeStats(analyzer);
-        if (!analyzer.safeIsEnableJoinReorderBasedCost()) {
-            return;
-        }
-        
StatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);
-        cardinality = (long) statsDeriveResult.getRowCount();
-    }
-
     @Override
     public int getNumInstances() {
         return Math.max(children.get(0).getNumInstances(), 
children.get(1).getNumInstances());
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/MysqlScanNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/MysqlScanNode.java
index fe75530dae9..3d4d2f4ab4d 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/MysqlScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/MysqlScanNode.java
@@ -29,7 +29,6 @@ import org.apache.doris.catalog.TableIf.TableType;
 import org.apache.doris.common.UserException;
 import org.apache.doris.datasource.ExternalScanNode;
 import org.apache.doris.statistics.StatisticalType;
-import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.thrift.TExplainLevel;
 import org.apache.doris.thrift.TMySQLScanNode;
 import org.apache.doris.thrift.TPlanNode;
@@ -69,14 +68,6 @@ public class MysqlScanNode extends ExternalScanNode {
         return helper.addValue(super.debugString()).toString();
     }
 
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
-        // Convert predicates to MySQL columns and filters.
-        createMySQLColumns(analyzer);
-        createMySQLFilters(analyzer);
-        createScanRangeLocations();
-    }
-
     @Override
     protected void createScanRangeLocations() throws UserException {
         scanRangeLocations = 
Lists.newArrayList(createSingleScanRangeLocations(backendPolicy));
@@ -167,14 +158,4 @@ public class MysqlScanNode extends ExternalScanNode {
     public int getNumInstances() {
         return 1;
     }
-
-    @Override
-    public void computeStats(Analyzer analyzer) throws UserException {
-        super.computeStats(analyzer);
-        // even if current node scan has no data,at least on backend will be 
assigned when the fragment actually execute
-        numNodes = numNodes <= 0 ? 1 : numNodes;
-
-        
StatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);
-        cardinality = (long) statsDeriveResult.getRowCount();
-    }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/NestedLoopJoinNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/NestedLoopJoinNode.java
index efbd5ec87e5..9579193c72e 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/NestedLoopJoinNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/NestedLoopJoinNode.java
@@ -27,7 +27,6 @@ import org.apache.doris.analysis.TableRef;
 import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.analysis.TupleId;
 import org.apache.doris.common.Pair;
-import org.apache.doris.common.UserException;
 import org.apache.doris.statistics.StatisticalType;
 import org.apache.doris.thrift.TExplainLevel;
 import org.apache.doris.thrift.TNestedLoopJoinNode;
@@ -201,14 +200,6 @@ public class NestedLoopJoinNode extends JoinNodeBase {
         msg.node_type = TPlanNodeType.CROSS_JOIN_NODE;
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-        ExprSubstitutionMap combinedChildSmap = 
getCombinedChildWithoutTupleIsNullSmap();
-        joinConjuncts = Expr.substituteList(joinConjuncts, combinedChildSmap, 
analyzer, false);
-        computeCrossRuntimeFilterExpr();
-        computeOutputTuple(analyzer);
-    }
 
     private void computeCrossRuntimeFilterExpr() {
         for (int i = conjuncts.size() - 1; i >= 0; --i) {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/OlapScanNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/OlapScanNode.java
index b6648019a7d..cb63ed70c83 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/OlapScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/OlapScanNode.java
@@ -24,7 +24,6 @@ import org.apache.doris.analysis.CastExpr;
 import org.apache.doris.analysis.CreateMaterializedViewStmt;
 import org.apache.doris.analysis.DescriptorTable;
 import org.apache.doris.analysis.Expr;
-import org.apache.doris.analysis.FunctionCallExpr;
 import org.apache.doris.analysis.InPredicate;
 import org.apache.doris.analysis.IntLiteral;
 import org.apache.doris.analysis.LiteralExpr;
@@ -72,7 +71,6 @@ import 
org.apache.doris.planner.normalize.PartitionRangePredicateNormalizer;
 import org.apache.doris.qe.ConnectContext;
 import org.apache.doris.resource.computegroup.ComputeGroup;
 import org.apache.doris.statistics.StatisticalType;
-import org.apache.doris.statistics.StatsDeriveResult;
 import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.statistics.query.StatsDelta;
 import org.apache.doris.system.Backend;
@@ -545,35 +543,6 @@ public class OlapScanNode extends ScanNode {
         return helper.toString();
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-
-        filterDeletedRows(analyzer);
-        if (olapTable.getPartitionInfo().enableAutomaticPartition()) {
-            partitionsInfo = olapTable.getPartitionInfo();
-            analyzerPartitionExpr(analyzer, partitionsInfo);
-        }
-        computeColumnsFilter();
-        computePartitionInfo();
-        computeTupleState(analyzer);
-
-        /**
-         * Compute InAccurate cardinality before mv selector and tablet 
pruning.
-         * - Accurate statistical information relies on the selector of 
materialized
-         * views and bucket reduction.
-         * - However, Those both processes occur after the reorder algorithm is
-         * completed.
-         * - When Join reorder is turned on, the cardinality must be 
calculated before
-         * the reorder algorithm.
-         * - So only an inaccurate cardinality can be calculated here.
-         */
-        mockRowCountInStatistic();
-        if (analyzer.safeIsEnableJoinReorderBasedCost()) {
-            computeInaccurateCardinality();
-        }
-    }
-
     /**
      * Init OlapScanNode, ONLY used for Nereids. Should NOT use this function 
in anywhere else.
      */
@@ -598,57 +567,12 @@ public class OlapScanNode extends ScanNode {
         }
     }
 
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("OlapScanNode get scan range locations. Tuple: {}", 
desc);
-        }
-        /**
-         * If JoinReorder is turned on, it will be calculated init(), and this 
value is
-         * not accurate.
-         * In the following logic, cardinality will be accurately calculated 
again.
-         * So here we need to reset the value of cardinality.
-         */
-        if (analyzer.safeIsEnableJoinReorderBasedCost()) {
-            cardinality = 0;
-        }
-
-        try {
-            createScanRangeLocations();
-        } catch (AnalysisException e) {
-            throw new UserException(e.getMessage());
-        }
-
-        // Relatively accurate cardinality according to ScanRange in
-        // getScanRangeLocations
-        computeStats(analyzer);
-        computeNumNodes();
-    }
-
     public void computeTupleState(Analyzer analyzer) {
         for (TupleId id : tupleIds) {
             analyzer.getDescTbl().getTupleDesc(id).computeStat();
         }
     }
 
-    @Override
-    public void computeStats(Analyzer analyzer) throws UserException {
-        super.computeStats(analyzer);
-        if (cardinality > 0) {
-            avgRowSize = totalBytes / (float) cardinality * COMPRESSION_RATIO;
-            capCardinalityAtLimit();
-        }
-        // when node scan has no data, cardinality should be 0 instead of a 
invalid
-        // value after computeStats()
-        cardinality = cardinality == -1 ? 0 : cardinality;
-
-        // update statsDeriveResult for real statistics
-        // After statistics collection is complete, remove the logic
-        if (analyzer.safeIsEnableJoinReorderBasedCost()) {
-            statsDeriveResult = new StatsDeriveResult(cardinality, 
statsDeriveResult.getSlotIdToColumnStats());
-        }
-    }
-
     @Override
     protected void computeNumNodes() {
         if (cardinality > 0) {
@@ -1463,16 +1387,6 @@ public class OlapScanNode extends ScanNode {
         return scanRangeLocations.size();
     }
 
-    @Override
-    public void setShouldColoScan() {
-        shouldColoScan = true;
-    }
-
-    @Override
-    public boolean getShouldColoScan() {
-        return shouldColoScan;
-    }
-
     public int getBucketNum() {
         // In bucket shuffle join, we have 2 situation.
         // 1. Only one partition: in this case, we use 
scanNode.getTotalTabletsNum() to get the right bucket num
@@ -1936,40 +1850,6 @@ public class OlapScanNode extends ScanNode {
                 scanReplicaIds);
     }
 
-    @Override
-    public boolean pushDownAggNoGrouping(FunctionCallExpr aggExpr) {
-        KeysType type = getOlapTable().getKeysType();
-        if (type == KeysType.UNIQUE_KEYS || type == KeysType.PRIMARY_KEYS) {
-            return false;
-        }
-
-        String aggFunctionName = aggExpr.getFnName().getFunction();
-        if (aggFunctionName.equalsIgnoreCase("COUNT") && type != 
KeysType.DUP_KEYS) {
-            return false;
-        }
-
-        return true;
-    }
-
-    @Override
-    public boolean pushDownAggNoGroupingCheckCol(FunctionCallExpr aggExpr, 
Column col) {
-        KeysType type = getOlapTable().getKeysType();
-
-        // The value column of the agg does not support zone_map index.
-        if (type == KeysType.AGG_KEYS && !col.isKey()) {
-            return false;
-        }
-
-        if (aggExpr.getChild(0) instanceof SlotRef) {
-            SlotRef slot = (SlotRef) aggExpr.getChild(0);
-            if (CreateMaterializedViewStmt.isMVColumn(slot.getColumnName()) && 
slot.getColumn().isAggregated()) {
-                return false;
-            }
-        }
-
-        return true;
-    }
-
     @Override
     public int getScanRangeNum() {
         return getScanTabletIds().size();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/PlanNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/PlanNode.java
index 11a6fc93f41..d9add61e824 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/PlanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/PlanNode.java
@@ -20,24 +20,17 @@
 
 package org.apache.doris.planner;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.BitmapFilterPredicate;
 import org.apache.doris.analysis.CompoundPredicate;
 import org.apache.doris.analysis.Expr;
 import org.apache.doris.analysis.ExprId;
 import org.apache.doris.analysis.ExprSubstitutionMap;
-import org.apache.doris.analysis.FunctionCallExpr;
 import org.apache.doris.analysis.SlotDescriptor;
 import org.apache.doris.analysis.SlotId;
 import org.apache.doris.analysis.SlotRef;
 import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.analysis.TupleId;
-import org.apache.doris.catalog.Column;
-import org.apache.doris.catalog.OlapTable;
-import org.apache.doris.catalog.Type;
-import org.apache.doris.common.AnalysisException;
 import org.apache.doris.common.Id;
-import org.apache.doris.common.NotImplementedException;
 import org.apache.doris.common.Pair;
 import org.apache.doris.common.TreeNode;
 import org.apache.doris.common.UserException;
@@ -55,7 +48,6 @@ import org.apache.doris.thrift.TPushAggOp;
 
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Predicates;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
@@ -67,7 +59,6 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Comparator;
-import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -305,15 +296,6 @@ public abstract class PlanNode extends TreeNode<PlanNode> 
implements PlanStats {
         }
     }
 
-    public void setLimitAndOffset(long limit, long offset) {
-        if (this.limit == -1) {
-            this.limit = limit;
-        } else if (limit != -1) {
-            this.limit = Math.min(this.limit - offset, limit);
-        }
-        this.offset += offset;
-    }
-
     public void setOffset(long offset) {
         this.offset = offset;
     }
@@ -322,10 +304,6 @@ public abstract class PlanNode extends TreeNode<PlanNode> 
implements PlanStats {
         return limit > -1;
     }
 
-    public boolean hasOffset() {
-        return offset != 0;
-    }
-
     public void setCardinality(long cardinality) {
         this.cardinality = cardinality;
     }
@@ -360,20 +338,6 @@ public abstract class PlanNode extends TreeNode<PlanNode> 
implements PlanStats {
         }
     }
 
-    public void unsetLimit() {
-        limit = -1;
-    }
-
-    protected List<TupleId> getAllScanTupleIds() {
-        List<TupleId> tupleIds = Lists.newArrayList();
-        List<ScanNode> scanNodes = Lists.newArrayList();
-        collectAll(Predicates.instanceOf(ScanNode.class), scanNodes);
-        for (ScanNode node : scanNodes) {
-            tupleIds.addAll(node.getTupleIds());
-        }
-        return tupleIds;
-    }
-
     public void resetTupleIds(ArrayList<TupleId> tupleIds) {
         this.tupleIds = tupleIds;
     }
@@ -387,10 +351,6 @@ public abstract class PlanNode extends TreeNode<PlanNode> 
implements PlanStats {
         return tblRefIds;
     }
 
-    public void setTblRefIds(ArrayList<TupleId> ids) {
-        tblRefIds = ids;
-    }
-
     public ArrayList<TupleId> getOutputTblRefIds() {
         return tblRefIds;
     }
@@ -470,19 +430,6 @@ public abstract class PlanNode extends TreeNode<PlanNode> 
implements PlanStats {
         }
     }
 
-    public void setAssignedConjuncts(Set<ExprId> conjuncts) {
-        assignedConjuncts = conjuncts;
-    }
-
-    public Set<ExprId> getAssignedConjuncts() {
-        return assignedConjuncts;
-    }
-
-    public void transferConjuncts(PlanNode recipient) {
-        recipient.conjuncts.addAll(conjuncts);
-        conjuncts.clear();
-    }
-
     public void addPreFilterConjuncts(List<Expr> conjuncts) {
         if (conjuncts == null) {
             return;
@@ -490,15 +437,6 @@ public abstract class PlanNode extends TreeNode<PlanNode> 
implements PlanStats {
         this.preFilterConjuncts.addAll(conjuncts);
     }
 
-    /**
-     * Call computeStatAndMemLayout() for all materialized tuples.
-     */
-    protected void computeTupleStatAndMemLayout(Analyzer analyzer) {
-        for (TupleId id : tupleIds) {
-            analyzer.getDescTbl().getTupleDesc(id).computeStatAndMemLayout();
-        }
-    }
-
     public String getExplainString() {
         return getExplainString("", "", TExplainLevel.VERBOSE);
     }
@@ -706,53 +644,12 @@ public abstract class PlanNode extends TreeNode<PlanNode> 
implements PlanStats {
         }
     }
 
-    /**
-     * Computes internal state, including planner-relevant statistics.
-     * Call this once on the root of the plan tree before calling toThrift().
-     * Subclasses need to override this.
-     */
-    public void finalize(Analyzer analyzer) throws UserException {
-        for (Expr expr : conjuncts) {
-            Set<SlotRef> slotRefs = new HashSet<>();
-            expr.getSlotRefsBoundByTupleIds(tupleIds, slotRefs);
-            for (SlotRef slotRef : slotRefs) {
-                slotRef.getDesc().setIsMaterialized(true);
-            }
-            for (TupleId tupleId : tupleIds) {
-                analyzer.getTupleDesc(tupleId).computeMemLayout();
-            }
-        }
-        for (PlanNode child : children) {
-            child.finalize(analyzer);
-        }
-        computeNumNodes();
-        if (!analyzer.safeIsEnableJoinReorderBasedCost()) {
-            computeOldCardinality();
-        }
-    }
-
     protected void computeNumNodes() {
         if (!children.isEmpty()) {
             numNodes = getChild(0).numNodes;
         }
     }
 
-    /**
-     * Computes planner statistics: avgRowSize.
-     * Subclasses need to override this.
-     * Assumes that it has already been called on all children.
-     * This is broken out of finalize() so that it can be called separately
-     * from finalize() (to facilitate inserting additional nodes during plan
-     * partitioning w/o the need to call finalize() recursively on the whole 
tree again).
-     */
-    protected void computeStats(Analyzer analyzer) throws UserException {
-        avgRowSize = 0.0F;
-        for (TupleId tid : tupleIds) {
-            TupleDescriptor desc = analyzer.getTupleDesc(tid);
-            avgRowSize += desc.getAvgSerializedSize();
-        }
-    }
-
     /**
      * This function will calculate the cardinality when the old join reorder 
algorithm is enabled.
      * This value is used to determine the distributed way(broadcast of 
shuffle) of join in the distributed planning.
@@ -784,123 +681,8 @@ public abstract class PlanNode extends TreeNode<PlanNode> 
implements PlanStats {
         return outputSmap;
     }
 
-    public void setOutputSmap(ExprSubstitutionMap smap, Analyzer analyzer) {
-        outputSmap = smap;
-    }
-
-    public void setWithoutTupleIsNullOutputSmap(ExprSubstitutionMap smap) {
-        withoutTupleIsNullOutputSmap = smap;
-    }
-
-    public ExprSubstitutionMap getWithoutTupleIsNullOutputSmap() {
-        return withoutTupleIsNullOutputSmap == null ? outputSmap : 
withoutTupleIsNullOutputSmap;
-    }
-
     public void init() throws UserException {}
 
-    public void init(Analyzer analyzer) throws UserException {
-        assignConjuncts(analyzer);
-        createDefaultSmap(analyzer);
-        castConjuncts();
-    }
-
-    private void castConjuncts() throws AnalysisException {
-        for (int i = 0; i < conjuncts.size(); ++i) {
-            Expr expr = conjuncts.get(i);
-            if (!expr.getType().isBoolean()) {
-                try {
-                    conjuncts.set(i, expr.castTo(Type.BOOLEAN));
-                } catch (AnalysisException e) {
-                    LOG.warn("{} is not boolean and can not be cast to 
boolean", expr.toSql(), e);
-                    throw new AnalysisException("conjuncts " + expr.toSql() + 
" is not boolean");
-                }
-            }
-        }
-    }
-
-    /**
-     * Assign remaining unassigned conjuncts.
-     */
-    protected void assignConjuncts(Analyzer analyzer) {
-        // we cannot plan conjuncts on exchange node, so we just skip the node.
-        if (this instanceof ExchangeNode) {
-            return;
-        }
-        List<Expr> unassigned = analyzer.getUnassignedConjuncts(this);
-        for (Expr unassignedConjunct : unassigned) {
-            addConjunct(unassignedConjunct);
-        }
-        analyzer.markConjunctsAssigned(unassigned);
-    }
-
-    /**
-     * Returns an smap that combines the children's smaps.
-     */
-    protected ExprSubstitutionMap getCombinedChildSmap() {
-        if (getChildren().size() == 0) {
-            return new ExprSubstitutionMap();
-        }
-
-        if (getChildren().size() == 1) {
-            return getChild(0).getOutputSmap();
-        }
-
-        ExprSubstitutionMap result = ExprSubstitutionMap.combine(
-                getChild(0).getOutputSmap(), getChild(1).getOutputSmap());
-
-        for (int i = 2; i < getChildren().size(); ++i) {
-            result = ExprSubstitutionMap.combine(result, 
getChild(i).getOutputSmap());
-        }
-
-        return result;
-    }
-
-    protected ExprSubstitutionMap getCombinedChildWithoutTupleIsNullSmap() {
-        if (getChildren().size() == 0) {
-            return new ExprSubstitutionMap();
-        }
-        if (getChildren().size() == 1) {
-            return getChild(0).getWithoutTupleIsNullOutputSmap();
-        }
-        ExprSubstitutionMap result = ExprSubstitutionMap.combine(
-                getChild(0).getWithoutTupleIsNullOutputSmap(),
-                getChild(1).getWithoutTupleIsNullOutputSmap());
-
-        for (int i = 2; i < getChildren().size(); ++i) {
-            result = ExprSubstitutionMap.combine(
-                    result, getChild(i).getWithoutTupleIsNullOutputSmap());
-        }
-
-        return result;
-    }
-
-    /**
-     * Sets outputSmap_ to compose(existing smap, combined child smap). Also
-     * substitutes conjuncts_ using the combined child smap.
-     *
-     * @throws AnalysisException
-     */
-    protected void createDefaultSmap(Analyzer analyzer) throws UserException {
-        ExprSubstitutionMap combinedChildSmap = getCombinedChildSmap();
-        outputSmap =
-                ExprSubstitutionMap.compose(outputSmap, combinedChildSmap, 
analyzer);
-
-        conjuncts = Expr.substituteList(conjuncts, outputSmap, analyzer, 
false);
-    }
-
-    /**
-     * Appends ids of slots that need to be materialized for this tree of 
nodes.
-     * By default, only slots referenced by conjuncts need to be materialized
-     * (the rationale being that only conjuncts need to be evaluated 
explicitly;
-     * exprs that are turned into scan predicates, etc., are evaluated 
implicitly).
-     */
-    public void getMaterializedIds(Analyzer analyzer, List<SlotId> ids) {
-        for (PlanNode childNode : children) {
-            childNode.getMaterializedIds(analyzer, ids);
-        }
-        Expr.getIds(getConjuncts(), null, ids);
-    }
-
     // Convert this plan node into msg (excluding children), which requires 
setting
     // the node type and the node-specific field.
     protected abstract void toThrift(TPlanNode msg);
@@ -1011,23 +793,10 @@ public abstract class PlanNode extends 
TreeNode<PlanNode> implements PlanStats {
         return output.toString();
     }
 
-    /**
-     * Returns true if stats-related variables are valid.
-     */
-    protected boolean hasValidStats() {
-        return (numNodes == -1 || numNodes >= 0) && (cardinality == -1 || 
cardinality >= 0);
-    }
-
     public int getNumInstances() {
         return this.children.get(0).getNumInstances();
     }
 
-    public void setShouldColoScan() {}
-
-    public boolean getShouldColoScan() {
-        return false;
-    }
-
     public void setNumInstances(int numInstances) {
         this.numInstances = numInstances;
     }
@@ -1108,27 +877,6 @@ public abstract class PlanNode extends TreeNode<PlanNode> 
implements PlanStats {
         return prod;
     }
 
-    // Compute the cardinality after applying conjuncts based on 
'preConjunctCardinality'.
-    protected void applyConjunctsSelectivity() {
-        if (cardinality == -1) {
-            return;
-        }
-        applySelectivity();
-    }
-
-    // Compute the cardinality after applying conjuncts with 'selectivity', 
based on
-    // 'preConjunctCardinality'.
-    private void applySelectivity() {
-        double selectivity = computeSelectivity();
-        Preconditions.checkState(cardinality >= 0);
-        double preConjunctCardinality = cardinality;
-        cardinality = Math.round(cardinality * selectivity);
-        // don't round cardinality down to zero for safety.
-        if (cardinality == 0 && preConjunctCardinality > 0) {
-            cardinality = 1;
-        }
-    }
-
     /**
      * find planNode recursively based on the planNodeId
      */
@@ -1156,53 +904,6 @@ public abstract class PlanNode extends TreeNode<PlanNode> 
implements PlanStats {
         return sb.toString();
     }
 
-    public ScanNode getScanNodeInOneFragmentBySlotRef(SlotRef slotRef) {
-        TupleId tupleId = slotRef.getDesc().getParent().getId();
-        if (this instanceof ScanNode && tupleIds.contains(tupleId)) {
-            return (ScanNode) this;
-        } else if (this instanceof HashJoinNode) {
-            HashJoinNode hashJoinNode = (HashJoinNode) this;
-            SlotRef inputSlotRef = hashJoinNode.getMappedInputSlotRef(slotRef);
-            if (inputSlotRef != null) {
-                for (PlanNode planNode : children) {
-                    ScanNode scanNode = 
planNode.getScanNodeInOneFragmentBySlotRef(inputSlotRef);
-                    if (scanNode != null) {
-                        return scanNode;
-                    }
-                }
-            } else {
-                return null;
-            }
-        } else if (!(this instanceof ExchangeNode)) {
-            for (PlanNode planNode : children) {
-                ScanNode scanNode = 
planNode.getScanNodeInOneFragmentBySlotRef(slotRef);
-                if (scanNode != null) {
-                    return scanNode;
-                }
-            }
-        }
-        return null;
-    }
-
-    public SlotRef findSrcSlotRef(SlotRef slotRef) {
-        if (slotRef.getSrcSlotRef() != null) {
-            slotRef = slotRef.getSrcSlotRef();
-        }
-        if (slotRef.getTable() instanceof OlapTable) {
-            return slotRef;
-        }
-        if (this instanceof HashJoinNode) {
-            HashJoinNode hashJoinNode = (HashJoinNode) this;
-            SlotRef inputSlotRef = hashJoinNode.getMappedInputSlotRef(slotRef);
-            if (inputSlotRef != null) {
-                return hashJoinNode.getChild(0).findSrcSlotRef(inputSlotRef);
-            } else {
-                return slotRef;
-            }
-        }
-        return slotRef;
-    }
-
     protected void addRuntimeFilter(RuntimeFilter filter) {
         runtimeFilters.add(filter);
     }
@@ -1226,62 +927,6 @@ public abstract class PlanNode extends TreeNode<PlanNode> 
implements PlanStats {
         return Joiner.on(", ").join(filtersStr) + "\n";
     }
 
-    /**
-     * If an plan node implements this method, the plan node itself supports 
project optimization.
-     * @param requiredSlotIdSet: The upper plan node's requirement slot set 
for the current plan node.
-     *                        The requiredSlotIdSet could be null when the 
upper plan node cannot
-     *                         calculate the required slot.
-     * @param analyzer
-     * @throws NotImplementedException
-     *
-     * For example:
-     * Query: select a.k1 from a, b where a.k1=b.k1
-     * PlanNodeTree:
-     *     output exprs: a.k1
-     *           |
-     *     hash join node
-     *   (input slots: a.k1, b.k1)
-     *        |      |
-     *  scan a(k1)   scan b(k1)
-     *
-     * Function params: requiredSlotIdSet = a.k1
-     * After function:
-     *     hash join node
-     *   (output slots: a.k1)
-     *   (input slots: a.k1, b.k1)
-     */
-    public void initOutputSlotIds(Set<SlotId> requiredSlotIdSet, Analyzer 
analyzer) throws NotImplementedException {
-        throw new NotImplementedException("The `initOutputSlotIds` hasn't been 
implemented in " + planNodeName);
-    }
-
-    public void projectOutputTuple() throws NotImplementedException {
-        throw new NotImplementedException("The `projectOutputTuple` hasn't 
been implemented in " + planNodeName + ". "
-        + "But it does not affect the project optimizer");
-    }
-
-    /**
-     * If an plan node implements this method, its child plan node has the 
ability to implement the project.
-     * The return value of this method will be used as
-     *     the input(requiredSlotIdSet) of child plan node method 
initOutputSlotIds.
-     * That is to say, only when the plan node implements this method,
-     *     its children can realize project optimization.
-     *
-     * @return The requiredSlotIdSet of this plan node
-     * @throws NotImplementedException
-     * PlanNodeTree:
-     *         agg node(group by a.k1)
-     *           |
-     *     hash join node(a.k1=b.k1)
-     *        |      |
-     *  scan a(k1)   scan b(k1)
-     * After function:
-     *         agg node
-     *    (required slots: a.k1)
-     */
-    public Set<SlotId> computeInputSlotIds(Analyzer analyzer) throws 
NotImplementedException {
-        throw new NotImplementedException("The `computeInputSlotIds` hasn't 
been implemented in " + planNodeName);
-    }
-
     @Override
     public String toString() {
         StringBuilder sb = new StringBuilder();
@@ -1314,10 +959,6 @@ public abstract class PlanNode extends TreeNode<PlanNode> 
implements PlanStats {
         return projectList;
     }
 
-    public List<SlotId> getOutputSlotIds() {
-        return outputSlotIds;
-    }
-
     public void setConjuncts(Set<Expr> exprs) {
         conjuncts = new ArrayList<>(exprs);
     }
@@ -1340,14 +981,6 @@ public abstract class PlanNode extends TreeNode<PlanNode> 
implements PlanStats {
         return pushDownAggNoGroupingOp;
     }
 
-    public boolean pushDownAggNoGrouping(FunctionCallExpr aggExpr) {
-        return false;
-    }
-
-    public boolean pushDownAggNoGroupingCheckCol(FunctionCallExpr aggExpr, 
Column col) {
-        return false;
-    }
-
     public void setNereidsId(int nereidsId) {
         this.nereidsId = nereidsId;
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/RepeatNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/RepeatNode.java
index 2bc4e847ac3..e982e414dfb 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/RepeatNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/RepeatNode.java
@@ -17,32 +17,23 @@
 
 package org.apache.doris.planner;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.Expr;
-import org.apache.doris.analysis.ExprSubstitutionMap;
 import org.apache.doris.analysis.GroupByClause;
 import org.apache.doris.analysis.GroupingInfo;
-import org.apache.doris.analysis.SlotDescriptor;
 import org.apache.doris.analysis.SlotId;
 import org.apache.doris.analysis.TupleDescriptor;
-import org.apache.doris.analysis.TupleId;
-import org.apache.doris.common.UserException;
 import org.apache.doris.statistics.StatisticalType;
-import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.thrift.TExplainLevel;
 import org.apache.doris.thrift.TPlanNode;
 import org.apache.doris.thrift.TPlanNodeType;
 import org.apache.doris.thrift.TRepeatNode;
 
 import com.google.common.base.MoreObjects;
-import com.google.common.base.Preconditions;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 
 import java.util.ArrayList;
-import java.util.BitSet;
-import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Objects;
@@ -112,61 +103,6 @@ public class RepeatNode extends PlanNode {
         return slotIdList;
     }
 
-    @Override
-    public void computeStats(Analyzer analyzer) throws UserException {
-        avgRowSize = 0;
-        numNodes = 1;
-
-        
StatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);
-        cardinality = (long) statsDeriveResult.getRowCount();
-
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("stats Sort: cardinality=" + cardinality);
-        }
-    }
-
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        Preconditions.checkState(conjuncts.isEmpty());
-        ExprSubstitutionMap childSmap = getCombinedChildSmap();
-        
groupByClause.substituteGroupingExprs(groupingInfo.getVirtualSlotRefs(), 
childSmap, analyzer);
-        groupingInfo.substitutePreRepeatExprs(childSmap, analyzer);
-        outputSmap = groupingInfo.getOutputTupleSmap();
-        conjuncts = Expr.substituteList(conjuncts, outputSmap, analyzer, 
false);
-        outputTupleDesc = groupingInfo.getOutputTupleDesc();
-        List<TupleId> inputTupleIds = input.getOutputTupleIds();
-        if (inputTupleIds.size() == 1) {
-            // used for MaterializedViewSelector getTableIdToColumnNames
-            
outputTupleDesc.setTable(analyzer.getTupleDesc(inputTupleIds.get(0)).getTable());
-        }
-
-        outputTupleDesc.computeStatAndMemLayout();
-
-        List<Set<SlotId>> groupingIdList = new ArrayList<>();
-        List<SlotDescriptor> groupingSlotDescList = 
groupingInfo.getGroupingSlotDescList();
-        for (BitSet bitSet : 
Collections.unmodifiableList(groupingInfo.getGroupingIdList())) {
-            Set<SlotId> slotIdSet = new HashSet<>();
-            for (int i = 0; i < groupingSlotDescList.size(); i++) {
-                if (bitSet.get(i)) {
-                    slotIdSet.add(groupingSlotDescList.get(i).getId());
-                }
-            }
-            groupingIdList.add(slotIdSet);
-        }
-
-        this.repeatSlotIdList = buildIdSetList(groupingIdList);
-        allSlotId = new HashSet<>();
-        for (Set<Integer> s : this.repeatSlotIdList) {
-            allSlotId.addAll(s);
-        }
-        this.groupingList = 
groupingInfo.genGroupingList(groupByClause.getGroupingExprs());
-        for (TupleId id : tupleIds) {
-            analyzer.getTupleDesc(id).setIsMaterialized(true);
-        }
-        computeTupleStatAndMemLayout(analyzer);
-        computeStats(analyzer);
-    }
-
     @Override
     protected void toThrift(TPlanNode msg) {
         msg.node_type = TPlanNodeType.REPEAT_NODE;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/ScanNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/ScanNode.java
index 110c64d398a..cf0e9900971 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/ScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/ScanNode.java
@@ -62,7 +62,6 @@ import org.apache.doris.thrift.TScanRangeLocation;
 import org.apache.doris.thrift.TScanRangeLocations;
 
 import com.google.common.base.MoreObjects;
-import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Range;
@@ -123,14 +122,6 @@ public abstract class ScanNode extends PlanNode implements 
SplitGenerator {
         this.desc = desc;
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-        this.analyzer = analyzer;
-        // materialize conjuncts in where
-        analyzer.materializeSlots(conjuncts);
-    }
-
     /**
      * Helper function to parse a "host:port" address string into 
TNetworkAddress
      * This is called with ipaddress:port when doing scan range assigment.
@@ -653,45 +644,6 @@ public abstract class ScanNode extends PlanNode implements 
SplitGenerator {
         }
     }
 
-    @Override
-    public void initOutputSlotIds(Set<SlotId> requiredSlotIdSet, Analyzer 
analyzer) {
-        if (outputTupleDesc != null && requiredSlotIdSet != null) {
-            Preconditions.checkNotNull(outputSmap);
-            ArrayList<SlotId> materializedSlotIds = 
outputTupleDesc.getMaterializedSlotIds();
-            Preconditions.checkState(projectList != null && projectList.size() 
<= materializedSlotIds.size(),
-                    "projectList's size should be less than 
materializedSlotIds's size");
-            boolean hasNewSlot = false;
-            if (projectList.size() < materializedSlotIds.size()) {
-                // need recreate projectList based on materializedSlotIds
-                hasNewSlot = true;
-            }
-
-            // find new project expr from outputSmap based on requiredSlotIdSet
-            ArrayList<SlotId> allSlots = outputTupleDesc.getAllSlotIds();
-            for (SlotId slotId : requiredSlotIdSet) {
-                if (!materializedSlotIds.contains(slotId) && 
allSlots.contains(slotId)) {
-                    SlotDescriptor slot = 
outputTupleDesc.getSlot(slotId.asInt());
-                    for (Expr expr : outputSmap.getRhs()) {
-                        if (expr instanceof SlotRef && ((SlotRef) 
expr).getSlotId() == slotId) {
-                            slot.setIsMaterialized(true);
-                            outputSlotToProjectExpr.put(slotId, 
expr.getSrcSlotRef());
-                            hasNewSlot = true;
-                        }
-                    }
-                }
-            }
-
-            if (hasNewSlot) {
-                // recreate the project list
-                projectList.clear();
-                materializedSlotIds = outputTupleDesc.getMaterializedSlotIds();
-                for (SlotId slotId : materializedSlotIds) {
-                    projectList.add(outputSlotToProjectExpr.get(slotId));
-                }
-            }
-        }
-    }
-
     public List<TupleId> getOutputTupleIds() {
         if (outputTupleDesc != null) {
             return Lists.newArrayList(outputTupleDesc.getId());
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/SchemaScanNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/SchemaScanNode.java
index bad10c181c3..665a6d1b297 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/SchemaScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/SchemaScanNode.java
@@ -17,7 +17,6 @@
 
 package org.apache.doris.planner;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.Expr;
 import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.catalog.Env;
@@ -90,22 +89,6 @@ public class SchemaScanNode extends ScanNode {
         return helper.addValue(super.debugString()).toString();
     }
 
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
-        // Convert predicates to MySQL columns and filters.
-        schemaCatalog = analyzer.getSchemaCatalog();
-        schemaDb = analyzer.getSchemaDb();
-        schemaTable = analyzer.getSchemaTable();
-        if 
(ConnectContext.get().getSessionVariable().enableSchemaScanFromMasterFe
-                && tableName.equalsIgnoreCase("tables")) {
-            frontendIP = Env.getCurrentEnv().getMasterHost();
-            frontendPort = Env.getCurrentEnv().getMasterRpcPort();
-        } else {
-            frontendIP = FrontendOptions.getLocalHostAddress();
-            frontendPort = Config.rpc_port;
-        }
-    }
-
     @Override
     public void finalizeForNereids() throws UserException {
         if 
(ConnectContext.get().getSessionVariable().enableSchemaScanFromMasterFe
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/SelectNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/SelectNode.java
index 734e9338352..b0a8f36f2dc 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/SelectNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/SelectNode.java
@@ -20,11 +20,8 @@
 
 package org.apache.doris.planner;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.Expr;
-import org.apache.doris.common.UserException;
 import org.apache.doris.statistics.StatisticalType;
-import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.thrift.TExplainLevel;
 import org.apache.doris.thrift.TPlanNode;
 import org.apache.doris.thrift.TPlanNodeType;
@@ -63,26 +60,6 @@ public class SelectNode extends PlanNode {
         msg.node_type = TPlanNodeType.SELECT_NODE;
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-        analyzer.markConjunctsAssigned(conjuncts);
-        computeStats(analyzer);
-    }
-
-    @Override
-    public void computeStats(Analyzer analyzer) throws UserException {
-        super.computeStats(analyzer);
-        if (!analyzer.safeIsEnableJoinReorderBasedCost()) {
-            return;
-        }
-        
StatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);
-        cardinality = (long) statsDeriveResult.getRowCount();
-
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("stats Select: cardinality={}", this.cardinality);
-        }
-    }
 
     @Override
     protected void computeOldCardinality() {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/SetOperationNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/SetOperationNode.java
index 081882b1ff2..12e5e66606d 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/SetOperationNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/SetOperationNode.java
@@ -25,7 +25,6 @@ import org.apache.doris.analysis.SlotRef;
 import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.analysis.TupleId;
 import org.apache.doris.common.CheckedMath;
-import org.apache.doris.common.UserException;
 import org.apache.doris.statistics.StatisticalType;
 import org.apache.doris.thrift.TExceptNode;
 import org.apache.doris.thrift.TExplainLevel;
@@ -153,93 +152,6 @@ public abstract class SetOperationNode extends PlanNode {
         return materializedConstExprLists;
     }
 
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
-        super.finalize(analyzer);
-        // the resultExprLists should be substituted by child's output smap
-        // because the result exprs are column A, B, but the child output 
exprs are column B, A
-        // after substituted, the next computePassthrough method will get 
correct info to do its job
-        List<List<Expr>> substitutedResultExprLists = Lists.newArrayList();
-        for (int i = 0; i < resultExprLists.size(); ++i) {
-            substitutedResultExprLists.add(Expr.substituteList(
-                    resultExprLists.get(i), children.get(i).getOutputSmap(), 
analyzer, true));
-        }
-        resultExprLists = substitutedResultExprLists;
-        // In Doris-6380, moved computePassthrough() and the materialized 
position of resultExprs/constExprs
-        // from this.init() to this.finalize(), and will not call 
SetOperationNode::init() again at the end
-        // of createSetOperationNodeFragment().
-        //
-        // Reasons for move computePassthrough():
-        //      Because the byteSize of the tuple corresponding to 
OlapScanNode is updated after
-        //      singleNodePlanner.createSingleNodePlan() and before 
singleNodePlan.finalize(),
-        //      calling computePassthrough() in SetOperationNode::init() may 
not be able to accurately determine whether
-        //      the child is pass through. In the previous logic , Will call 
SetOperationNode::init() again
-        //      at the end of createSetOperationNodeFragment().
-        //
-        // Reasons for move materialized position of resultExprs/constExprs:
-        //     Because the output slot is materialized at various positions in 
the planner stage, this is to ensure that
-        //     eventually the resultExprs/constExprs and the corresponding 
output slot have the same materialized state.
-        //     And the order of materialized resultExprs must be the same as 
the order of child adjusted by
-        //     computePassthrough(), so resultExprs materialized must be 
placed after computePassthrough().
-
-        // except Node must not reorder the child
-        if (!(this instanceof ExceptNode)) {
-            computePassthrough(analyzer);
-        }
-        // drop resultExprs/constExprs that aren't getting materialized (= 
where the
-        // corresponding output slot isn't being materialized)
-        materializedResultExprLists.clear();
-        Preconditions.checkState(resultExprLists.size() == children.size());
-        List<SlotDescriptor> slots = 
analyzer.getDescTbl().getTupleDesc(tupleId).getSlots();
-        for (int i = 0; i < resultExprLists.size(); ++i) {
-            List<Expr> exprList = resultExprLists.get(i);
-            List<Expr> newExprList = Lists.newArrayList();
-            Preconditions.checkState(exprList.size() == slots.size());
-            for (int j = 0; j < exprList.size(); ++j) {
-                if (slots.get(j).isMaterialized()) {
-                    newExprList.add(exprList.get(j));
-                }
-            }
-            materializedResultExprLists.add(newExprList);
-        }
-        Preconditions.checkState(
-                materializedResultExprLists.size() == getChildren().size());
-
-        materializedConstExprLists.clear();
-        for (List<Expr> exprList : constExprLists) {
-            Preconditions.checkState(exprList.size() == slots.size());
-            List<Expr> newExprList = Lists.newArrayList();
-            for (int i = 0; i < exprList.size(); ++i) {
-                if (slots.get(i).isMaterialized()) {
-                    newExprList.add(exprList.get(i));
-                }
-            }
-            materializedConstExprLists.add(newExprList);
-        }
-        if (!resultExprLists.isEmpty()) {
-            List<Expr> exprs = resultExprLists.get(0);
-            TupleDescriptor tupleDescriptor = analyzer.getTupleDesc(tupleId);
-            for (int i = 0; i < exprs.size(); i++) {
-                boolean isNullable = exprs.get(i).isNullable();
-                for (int j = 1; j < resultExprLists.size(); j++) {
-                    isNullable = isNullable || 
resultExprLists.get(j).get(i).isNullable();
-                }
-                tupleDescriptor.getSlots().get(i).setIsNullable(
-                        tupleDescriptor.getSlots().get(i).getIsNullable() || 
isNullable);
-                tupleDescriptor.computeMemLayout();
-            }
-        }
-    }
-
-    @Override
-    public void computeStats(Analyzer analyzer) throws UserException {
-        super.computeStats(analyzer);
-        if (!analyzer.safeIsEnableJoinReorderBasedCost()) {
-            return;
-        }
-        computeCardinality();
-    }
-
     @Override
     protected void computeOldCardinality() {
         computeCardinality();
@@ -364,23 +276,6 @@ public abstract class SetOperationNode extends PlanNode {
         children = newChildren;
     }
 
-    /**
-     * Must be called after addChild()/addConstExprList(). Computes the 
materialized
-     * result/const expr lists based on the materialized slots of this 
UnionNode's
-     * produced tuple. The UnionNode doesn't need an smap: like a ScanNode, it
-     * materializes an original tuple.
-     * There is no need to call assignConjuncts() because all non-constant 
conjuncts
-     * have already been assigned to the set operation operands, and all 
constant conjuncts have
-     * been evaluated during registration to set analyzer.hasEmptyResultSet_.
-     */
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        Preconditions.checkState(conjuncts.isEmpty());
-        createDefaultSmap(analyzer);
-        computeTupleStatAndMemLayout(analyzer);
-        computeStats(analyzer);
-    }
-
     protected void toThrift(TPlanNode msg, TPlanNodeType nodeType) {
         Preconditions.checkState(materializedResultExprLists.size() == 
children.size());
         List<List<TExpr>> texprLists = Lists.newArrayList();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/SortNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/SortNode.java
index 4a014bd3834..41d2f807bf3 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/SortNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/SortNode.java
@@ -20,19 +20,12 @@
 
 package org.apache.doris.planner;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.Expr;
-import org.apache.doris.analysis.ExprSubstitutionMap;
 import org.apache.doris.analysis.SlotDescriptor;
-import org.apache.doris.analysis.SlotId;
-import org.apache.doris.analysis.SlotRef;
 import org.apache.doris.analysis.SortInfo;
-import org.apache.doris.common.NotImplementedException;
 import org.apache.doris.common.Pair;
-import org.apache.doris.common.UserException;
 import org.apache.doris.qe.ConnectContext;
 import org.apache.doris.statistics.StatisticalType;
-import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.thrift.TExplainLevel;
 import org.apache.doris.thrift.TPlanNode;
 import org.apache.doris.thrift.TPlanNodeType;
@@ -48,10 +41,8 @@ import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 
 import java.util.ArrayList;
-import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
-import java.util.Set;
 
 /**
  * Sorting.
@@ -244,21 +235,6 @@ public class SortNode extends PlanNode {
         return output.toString();
     }
 
-    @Override
-    protected void computeStats(Analyzer analyzer) throws UserException {
-        super.computeStats(analyzer);
-        if (!analyzer.safeIsEnableJoinReorderBasedCost()) {
-            return;
-        }
-
-        
StatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);
-        cardinality = (long) statsDeriveResult.getRowCount();
-
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("stats Sort: cardinality=" + cardinality);
-        }
-    }
-
     @Override
     protected void computeOldCardinality() {
         cardinality = getChild(0).cardinality;
@@ -274,63 +250,6 @@ public class SortNode extends PlanNode {
         }
     }
 
-    public void init(Analyzer analyzer) throws UserException {
-        // Compute the memory layout for the generated tuple.
-        computeStats(analyzer);
-        // createDefaultSmap(analyzer);
-        // // populate resolvedTupleExprs and outputSmap_
-        // List<SlotDescriptor> sortTupleSlots = 
info.getSortTupleDescriptor().getSlots();
-        // List<Expr> slotExprs = info.getSortTupleSlotExprs_();
-        // Preconditions.checkState(sortTupleSlots.size() == slotExprs.size());
-
-        // populate resolvedTupleExprs_ and outputSmap_
-        List<SlotDescriptor> sortTupleSlots = 
info.getSortTupleDescriptor().getSlots();
-        List<Expr> slotExprs = info.getSortTupleSlotExprs();
-        Preconditions.checkState(sortTupleSlots.size() == slotExprs.size());
-
-        resolvedTupleExprs = Lists.newArrayList();
-        outputSmap = new ExprSubstitutionMap();
-
-        for (int i = 0; i < slotExprs.size(); ++i) {
-            resolvedTupleExprs.add(slotExprs.get(i));
-            outputSmap.put(slotExprs.get(i), new 
SlotRef(sortTupleSlots.get(i)));
-            nullabilityChangedFlags.add(slotExprs.get(i).isNullable());
-        }
-
-        ExprSubstitutionMap childSmap = getCombinedChildSmap();
-        resolvedTupleExprs = Expr.substituteList(resolvedTupleExprs, 
childSmap, analyzer, false);
-
-        for (int i = 0; i < resolvedTupleExprs.size(); ++i) {
-            nullabilityChangedFlags.set(i, nullabilityChangedFlags.get(i) ^ 
resolvedTupleExprs.get(i).isNullable());
-        }
-
-        // Remap the ordering exprs to the tuple materialized by this sort 
node. The mapping
-        // is a composition of the childSmap and the outputSmap_ because the 
child node may
-        // have also remapped its input (e.g., as in a series of 
(sort->analytic)* nodes).
-        // Parent nodes have to do the same so set the composition as the 
outputSmap_.
-        outputSmap = ExprSubstitutionMap.compose(childSmap, outputSmap, 
analyzer);
-        info.substituteOrderingExprs(outputSmap, analyzer);
-
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("sort id " + tupleIds.get(0).toString() + " smap: "
-                    + outputSmap.debugString());
-            LOG.debug("sort input exprs: " + 
Expr.debugString(resolvedTupleExprs));
-        }
-    }
-
-    @Override
-    public void getMaterializedIds(Analyzer analyzer, List<SlotId> ids) {
-        super.getMaterializedIds(analyzer, ids);
-        Expr.getIds(info.getOrderingExprs(), null, ids);
-    }
-
-    @Override
-    public void initOutputSlotIds(Set<SlotId> requiredSlotIdSet, Analyzer 
analyzer) {
-        // need call materializeRequiredSlots again to make sure required 
slots is materialized by children
-        // requiredSlotIdSet parameter means nothing for sort node, just call 
materializeRequiredSlots is enough
-        info.materializeRequiredSlots(analyzer, outputSmap);
-    }
-
     private void removeUnusedExprs() {
         if (!isUnusedExprRemoved) {
             if (resolvedTupleExprs != null) {
@@ -384,15 +303,6 @@ public class SortNode extends PlanNode {
                 "[" + Joiner.on(" ").join(strings) + 
"]").addValue(super.debugString()).toString();
     }
 
-    @Override
-    public Set<SlotId> computeInputSlotIds(Analyzer analyzer) throws 
NotImplementedException {
-        removeUnusedExprs();
-        List<Expr> materializedTupleExprs = new 
ArrayList<>(resolvedTupleExprs);
-        List<SlotId> result = Lists.newArrayList();
-        Expr.getIds(materializedTupleExprs, null, result);
-        return new HashSet<>(result);
-    }
-
     // If it's analytic sort or not merged by a followed exchange node, it 
must output the global ordered data.
     @Override
     public boolean isSerialOperator() {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/TableFunctionNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/TableFunctionNode.java
index ce5aa9d1972..c7ef0589885 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/TableFunctionNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/TableFunctionNode.java
@@ -25,9 +25,7 @@ import org.apache.doris.analysis.SlotId;
 import org.apache.doris.analysis.SlotRef;
 import org.apache.doris.analysis.TupleId;
 import org.apache.doris.common.AnalysisException;
-import org.apache.doris.common.UserException;
 import org.apache.doris.statistics.StatisticalType;
-import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.thrift.TExplainLevel;
 import org.apache.doris.thrift.TPlanNode;
 import org.apache.doris.thrift.TPlanNodeType;
@@ -149,45 +147,6 @@ public class TableFunctionNode extends PlanNode {
         }
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-        fnCallExprList = new ArrayList<>(lateralViewRefs.stream().map(e -> 
e.getFnExpr()).collect(Collectors.toList()));
-        Set<SlotRef> outputSlotRef = Sets.newHashSet();
-        for (Expr expr : conjuncts) {
-            expr.getSlotRefsBoundByTupleIds(tupleIds, outputSlotRef);
-            Expr dst = outputSmap.get(expr);
-            if (dst != null) {
-                dst.getSlotRefsBoundByTupleIds(tupleIds, outputSlotRef);
-            }
-        }
-        for (SlotRef slotRef : outputSlotRef) {
-            outputSlotIds.add(slotRef.getSlotId());
-        }
-        /*
-        When the expression of the lateral view involves the column of the 
subquery,
-        the column needs to be rewritten as the real column in the subquery 
through childrenSmap.
-        Example:
-          select e1 from (select a from t1) tmp1 lateral view explode_split(a, 
",") tmp2 as e1
-          Slot 'a' is originally linked to tuple 'tmp1'. <tmp1.a>
-          But tmp1 is just a virtual and unreal inline view tuple.
-          So we need to push down 'a' and hang it on the real tuple 't1'. 
<t1.a>
-         */
-        outputSmap = getCombinedChildSmap();
-        fnCallExprList = Expr.substituteList(fnCallExprList, outputSmap, 
analyzer, false);
-        // end
-
-        computeStats(analyzer);
-    }
-
-    @Override
-    protected void computeStats(Analyzer analyzer) throws UserException {
-        super.computeStats(analyzer);
-
-        
StatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);
-        cardinality = (long) statsDeriveResult.getRowCount();
-    }
-
     @Override
     public String getNodeExplainString(String prefix, TExplainLevel 
detailLevel) {
         StringBuilder output = new StringBuilder();
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/TestExternalTableScanNode.java
 
b/fe/fe-core/src/main/java/org/apache/doris/planner/TestExternalTableScanNode.java
index 3d6461b923c..e5671c5e293 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/planner/TestExternalTableScanNode.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/planner/TestExternalTableScanNode.java
@@ -17,12 +17,10 @@
 
 package org.apache.doris.planner;
 
-import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.common.UserException;
 import org.apache.doris.datasource.ExternalScanNode;
 import org.apache.doris.statistics.StatisticalType;
-import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.thrift.TExplainLevel;
 import org.apache.doris.thrift.TPlanNode;
 import org.apache.doris.thrift.TPlanNodeType;
@@ -57,25 +55,11 @@ public class TestExternalTableScanNode extends 
ExternalScanNode {
         return output.toString();
     }
 
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
-        createScanRangeLocations();
-    }
-
     @Override
     protected void createScanRangeLocations() throws UserException {
         scanRangeLocations = 
Lists.newArrayList(createSingleScanRangeLocations(backendPolicy));
     }
 
-    @Override
-    public void computeStats(Analyzer analyzer) throws UserException {
-        super.computeStats(analyzer);
-        // even if current node scan has no data,at least on backend will be 
assigned when the fragment actually execute
-        numNodes = numNodes <= 0 ? 1 : numNodes;
-        
StatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);
-        cardinality = (long) statsDeriveResult.getRowCount();
-    }
-
     @Override
     protected void toThrift(TPlanNode msg) {
         msg.node_type = TPlanNodeType.TEST_EXTERNAL_SCAN_NODE;
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/rewrite/ExtractCommonFactorsRule.java
 
b/fe/fe-core/src/main/java/org/apache/doris/rewrite/ExtractCommonFactorsRule.java
index eafccae4932..c41be6296db 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/rewrite/ExtractCommonFactorsRule.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/rewrite/ExtractCommonFactorsRule.java
@@ -63,7 +63,7 @@ import java.util.stream.Collectors;
  *
  * TODO: extract wide common factors could generate redundant conjuncts when 
whole expression could be extracted.
  * Currently, redundant conjuncts will be removed when be assigned to {@link 
PlanNode}
- * by calling {@link PlanNode#addConjunct(Expr)}, {@link 
PlanNode#addConjuncts(List)}, {@link PlanNode#init(Analyzer)}.
+ * by calling {@link PlanNode#addConjunct(Expr)}, {@link 
PlanNode#addConjuncts(List)}}.
  * But, we should remove redundant conjuncts generated by redundant conjuncts 
in this rule.
  */
 public class ExtractCommonFactorsRule implements ExprRewriteRule {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to