This is an automated email from the ASF dual-hosted git repository.

lijibing pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 4c7885b8f4d Support identical column name in different index. (#32792)
4c7885b8f4d is described below

commit 4c7885b8f4db305b7836793c7c918f450c2ffffc
Author: Jibing-Li <[email protected]>
AuthorDate: Thu Mar 28 17:22:28 2024 +0800

    Support identical column name in different index. (#32792)
---
 .../apache/doris/analysis/ShowColumnStatsStmt.java |   9 +-
 .../java/org/apache/doris/catalog/OlapTable.java   |  38 ++----
 .../main/java/org/apache/doris/catalog/Table.java  |  11 +-
 .../java/org/apache/doris/catalog/TableIf.java     |   7 +-
 .../org/apache/doris/datasource/ExternalTable.java |  26 ++--
 .../java/org/apache/doris/qe/ShowExecutor.java     |  37 +++---
 .../org/apache/doris/statistics/AnalysisInfo.java  |  20 +--
 .../doris/statistics/AnalysisInfoBuilder.java      |  12 +-
 .../org/apache/doris/statistics/AnalysisJob.java   |   4 +-
 .../apache/doris/statistics/AnalysisManager.java   | 143 +++++++--------------
 .../apache/doris/statistics/OlapAnalysisTask.java  |   9 +-
 .../doris/statistics/StatisticsAutoCollector.java  |  40 +++---
 .../doris/statistics/StatisticsCollector.java      |   2 +-
 .../doris/statistics/StatisticsRepository.java     |   3 +-
 .../apache/doris/statistics/TableStatsMeta.java    |  76 +++++------
 .../doris/statistics/AnalysisManagerTest.java      |  65 ++++++----
 .../doris/statistics/AnalysisTaskExecutorTest.java |  11 +-
 .../org/apache/doris/statistics/AnalyzeTest.java   |  11 +-
 .../apache/doris/statistics/HistogramTaskTest.java |   1 -
 .../statistics/StatisticsAutoCollectorTest.java    | 121 +++--------------
 .../doris/statistics/TableStatsMetaTest.java       |   4 +-
 .../suites/statistics/analyze_stats.groovy         |   4 +-
 .../suites/statistics/test_analyze_mtmv.groovy     |  40 +++---
 .../suites/statistics/test_analyze_mv.groovy       |  38 +++---
 24 files changed, 293 insertions(+), 439 deletions(-)

diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowColumnStatsStmt.java 
b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowColumnStatsStmt.java
index 37be76b20df..a4216f55661 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowColumnStatsStmt.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowColumnStatsStmt.java
@@ -32,6 +32,7 @@ import org.apache.doris.mysql.privilege.PrivPredicate;
 import org.apache.doris.qe.ConnectContext;
 import org.apache.doris.qe.ShowResultSet;
 import org.apache.doris.qe.ShowResultSetMetaData;
+import org.apache.doris.statistics.AnalysisManager;
 import org.apache.doris.statistics.ColStatsMeta;
 import org.apache.doris.statistics.ColumnStatistic;
 
@@ -138,14 +139,15 @@ public class ShowColumnStatsStmt extends ShowStmt {
 
     public ShowResultSet constructResultSet(List<Pair<Pair<String, String>, 
ColumnStatistic>> columnStatistics) {
         List<List<String>> result = Lists.newArrayList();
+        AnalysisManager analysisManager = 
Env.getCurrentEnv().getAnalysisManager();
         columnStatistics.forEach(p -> {
             if (p.second.isUnKnown) {
                 return;
             }
-
             List<String> row = Lists.newArrayList();
-            row.add(p.first.first);
+            // p data structure is Pair<Pair<IndexName, ColumnName>, 
ColumnStatistic>
             row.add(p.first.second);
+            row.add(p.first.first);
             row.add(String.valueOf(p.second.count));
             row.add(String.valueOf(p.second.ndv));
             row.add(String.valueOf(p.second.numNulls));
@@ -153,8 +155,7 @@ public class ShowColumnStatsStmt extends ShowStmt {
             row.add(String.valueOf(p.second.avgSizeByte));
             row.add(String.valueOf(p.second.minExpr == null ? "N/A" : 
p.second.minExpr.toSql()));
             row.add(String.valueOf(p.second.maxExpr == null ? "N/A" : 
p.second.maxExpr.toSql()));
-            ColStatsMeta colStatsMeta = 
Env.getCurrentEnv().getAnalysisManager().findColStatsMeta(table.getId(),
-                    p.first.first);
+            ColStatsMeta colStatsMeta = 
analysisManager.findColStatsMeta(table.getId(), p.first.first, p.first.second);
             row.add(String.valueOf(colStatsMeta == null ? "N/A" : 
colStatsMeta.analysisMethod));
             row.add(String.valueOf(colStatsMeta == null ? "N/A" : 
colStatsMeta.analysisType));
             row.add(String.valueOf(colStatsMeta == null ? "N/A" : 
colStatsMeta.jobType));
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/OlapTable.java 
b/fe/fe-core/src/main/java/org/apache/doris/catalog/OlapTable.java
index 57ce9d11807..89bba35645d 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/OlapTable.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/OlapTable.java
@@ -1284,11 +1284,11 @@ public class OlapTable extends Table implements 
MTMVRelatedTableIf {
         if (tblStats == null) {
             return true;
         }
-        if (!tblStats.analyzeColumns().containsAll(getBaseSchema()
+        if 
(!tblStats.analyzeColumns().containsAll(getColumnIndexPairs(getSchemaAllIndexes(false)
                 .stream()
                 .filter(c -> !StatisticsUtil.isUnsupportedType(c.getType()))
                 .map(Column::getName)
-                .collect(Collectors.toSet()))) {
+                .collect(Collectors.toSet())))) {
             return true;
         }
         long rowCount = getRowCount();
@@ -1301,34 +1301,20 @@ public class OlapTable extends Table implements 
MTMVRelatedTableIf {
     }
 
     @Override
-    public Map<String, Set<String>> findReAnalyzeNeededPartitions() {
-        TableStatsMeta tableStats = 
Env.getCurrentEnv().getAnalysisManager().findTableStatsStatus(getId());
-        Set<String> allPartitions = 
getPartitionNames().stream().map(this::getPartition)
-                
.filter(Partition::hasData).map(Partition::getName).collect(Collectors.toSet());
-        if (tableStats == null) {
-            Map<String, Set<String>> ret = Maps.newHashMap();
-            for (Column col : getSchemaAllIndexes(false)) {
-                if (StatisticsUtil.isUnsupportedType(col.getType())) {
+    public List<Pair<String, String>> getColumnIndexPairs(Set<String> columns) 
{
+        List<Pair<String, String>> ret = Lists.newArrayList();
+        // Check the schema of all indexes for each given column name,
+        // If the column name exists in the index, add the <IndexName, 
ColumnName> pair to return list.
+        for (String column : columns) {
+            for (MaterializedIndexMeta meta : indexIdToMeta.values()) {
+                Column col = meta.getColumnByName(column);
+                if (col == null || 
StatisticsUtil.isUnsupportedType(col.getType())) {
                     continue;
                 }
-                ret.put(col.getName(), allPartitions);
+                ret.add(Pair.of(getIndexNameById(meta.getIndexId()), column));
             }
-            return ret;
         }
-        Map<String, Set<String>> colToPart = new HashMap<>();
-        for (Column col : getSchemaAllIndexes(false)) {
-            if (StatisticsUtil.isUnsupportedType(col.getType())) {
-                continue;
-            }
-            long lastUpdateTime = 
tableStats.findColumnLastUpdateTime(col.getName());
-            Set<String> partitions = getPartitionNames().stream()
-                    .map(this::getPartition)
-                    .filter(Partition::hasData)
-                    .filter(partition -> partition.getVisibleVersionTime() >= 
lastUpdateTime).map(Partition::getName)
-                    .collect(Collectors.toSet());
-            colToPart.put(col.getName(), partitions);
-        }
-        return colToPart;
+        return ret;
     }
 
     @Override
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Table.java 
b/fe/fe-core/src/main/java/org/apache/doris/catalog/Table.java
index 35f5b14efc5..52655fa0649 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Table.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Table.java
@@ -24,6 +24,7 @@ import org.apache.doris.common.DdlException;
 import org.apache.doris.common.ErrorCode;
 import org.apache.doris.common.FeMetaVersion;
 import org.apache.doris.common.MetaNotFoundException;
+import org.apache.doris.common.Pair;
 import org.apache.doris.common.io.Text;
 import org.apache.doris.common.io.Writable;
 import org.apache.doris.common.util.QueryableReentrantReadWriteLock;
@@ -647,11 +648,6 @@ public abstract class Table extends MetaObject implements 
Writable, TableIf {
         return true;
     }
 
-    @Override
-    public Map<String, Set<String>> findReAnalyzeNeededPartitions() {
-        return Collections.emptyMap();
-    }
-
     @Override
     public List<Long> getChunkSizes() {
         throw new NotImplementedException("getChunkSized not implemented");
@@ -661,4 +657,9 @@ public abstract class Table extends MetaObject implements 
Writable, TableIf {
     public long fetchRowCount() {
         return 0;
     }
+
+    @Override
+    public List<Pair<String, String>> getColumnIndexPairs(Set<String> columns) 
{
+        return Lists.newArrayList();
+    }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/TableIf.java 
b/fe/fe-core/src/main/java/org/apache/doris/catalog/TableIf.java
index 484dd3bb6eb..f7c8b4b8325 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/TableIf.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/TableIf.java
@@ -25,6 +25,7 @@ import org.apache.doris.catalog.constraint.UniqueConstraint;
 import org.apache.doris.cluster.ClusterNamespace;
 import org.apache.doris.common.DdlException;
 import org.apache.doris.common.MetaNotFoundException;
+import org.apache.doris.common.Pair;
 import org.apache.doris.nereids.exceptions.AnalysisException;
 import org.apache.doris.persist.AlterConstraintLog;
 import org.apache.doris.statistics.AnalysisInfo;
@@ -184,7 +185,11 @@ public interface TableIf {
 
     boolean needReAnalyzeTable(TableStatsMeta tblStats);
 
-    Map<String, Set<String>> findReAnalyzeNeededPartitions();
+    /**
+     * @param columns Set of column names.
+     * @return List of pairs. Each pair is <IndexName, ColumnName>. For 
external table, index name is table name.
+     */
+    List<Pair<String, String>> getColumnIndexPairs(Set<String> columns);
 
     // Get all the chunk sizes of this table. Now, only HMS external table 
implemented this interface.
     // For HMS external table, the return result is a list of all the files' 
size.
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalTable.java 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalTable.java
index 7f82d0d3876..82390b91656 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalTable.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalTable.java
@@ -24,6 +24,7 @@ import org.apache.doris.catalog.TableAttributes;
 import org.apache.doris.catalog.TableIf;
 import org.apache.doris.catalog.constraint.Constraint;
 import org.apache.doris.common.AnalysisException;
+import org.apache.doris.common.Pair;
 import org.apache.doris.common.io.Text;
 import org.apache.doris.common.io.Writable;
 import org.apache.doris.common.util.Util;
@@ -36,7 +37,7 @@ import org.apache.doris.statistics.TableStatsMeta;
 import org.apache.doris.statistics.util.StatisticsUtil;
 import org.apache.doris.thrift.TTableDescriptor;
 
-import com.google.common.collect.Sets;
+import com.google.common.collect.Lists;
 import com.google.gson.annotations.SerializedName;
 import lombok.Getter;
 import org.apache.commons.lang3.NotImplementedException;
@@ -46,7 +47,6 @@ import org.apache.logging.log4j.Logger;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Optional;
@@ -322,11 +322,12 @@ public class ExternalTable implements TableIf, Writable, 
GsonPostProcessable {
         if (tblStats == null) {
             return true;
         }
-        if (!tblStats.analyzeColumns().containsAll(getBaseSchema()
+        if (!tblStats.analyzeColumns().containsAll(getColumnIndexPairs(
+                getBaseSchema()
                 .stream()
                 .filter(c -> !StatisticsUtil.isUnsupportedType(c.getType()))
                 .map(Column::getName)
-                .collect(Collectors.toSet()))) {
+                .collect(Collectors.toSet())))) {
             return true;
         }
         return System.currentTimeMillis()
@@ -334,12 +335,17 @@ public class ExternalTable implements TableIf, Writable, 
GsonPostProcessable {
     }
 
     @Override
-    public Map<String, Set<String>> findReAnalyzeNeededPartitions() {
-        HashSet<String> partitions = Sets.newHashSet();
-        // TODO: Find a way to collect external table partitions that need to 
be analyzed.
-        partitions.add("Dummy Partition");
-        return getBaseSchema().stream().filter(c -> 
!StatisticsUtil.isUnsupportedType(c.getType()))
-                .collect(Collectors.toMap(Column::getName, k -> partitions));
+    public List<Pair<String, String>> getColumnIndexPairs(Set<String> columns) 
{
+        List<Pair<String, String>> ret = Lists.newArrayList();
+        for (String column : columns) {
+            Column col = getColumn(column);
+            if (col == null || 
StatisticsUtil.isUnsupportedType(col.getType())) {
+                continue;
+            }
+            // External table put table name as index name.
+            ret.add(Pair.of(String.valueOf(name), column));
+        }
+        return ret;
     }
 
     @Override
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java 
b/fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java
index 6a763769afc..a0d369eafde 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java
@@ -2535,16 +2535,18 @@ public class ShowExecutor {
     private void getStatsForAllColumns(List<Pair<Pair<String, String>, 
ColumnStatistic>> columnStatistics,
                                        TableIf tableIf) throws 
AnalysisException {
         List<ResultRow> resultRows = 
StatisticsRepository.queryColumnStatisticsForTable(tableIf.getId());
+        // row[4] is index id, row[5] is column name.
         for (ResultRow row : resultRows) {
-            String indexName = "N/A";
+            String indexName = tableIf.getName();
             long indexId = Long.parseLong(row.get(4));
-            if (indexId != -1) {
-                indexName = ((OlapTable) tableIf).getIndexNameById(indexId);
-                if (indexName == null) {
-                    continue;
-                }
+            if (tableIf instanceof OlapTable) {
+                OlapTable olapTable = (OlapTable) tableIf;
+                indexName = olapTable.getIndexNameById(indexId == -1 ? 
olapTable.getBaseIndexId() : indexId);
+            }
+            if (indexName == null) {
+                continue;
             }
-            columnStatistics.add(Pair.of(Pair.of(row.get(5), indexName), 
ColumnStatistic.fromResultRow(row)));
+            columnStatistics.add(Pair.of(Pair.of(indexName, row.get(5)), 
ColumnStatistic.fromResultRow(row)));
         }
     }
 
@@ -2561,28 +2563,29 @@ public class ShowExecutor {
                 indexIds.add(-1L);
             }
             for (long indexId : indexIds) {
-                String indexName = "N/A";
-                if (indexId != -1) {
-                    indexName = ((OlapTable) 
tableIf).getIndexNameById(indexId);
-                    if (indexName == null) {
-                        continue;
-                    }
+                String indexName = tableIf.getName();
+                if (tableIf instanceof OlapTable) {
+                    OlapTable olapTable = (OlapTable) tableIf;
+                    indexName = olapTable.getIndexNameById(indexId == -1 ? 
olapTable.getBaseIndexId() : indexId);
+                }
+                if (indexName == null) {
+                    continue;
                 }
                 // Show column statistics in columnStatisticsCache.
                 if (showCache) {
                     ColumnStatistic columnStatistic = 
Env.getCurrentEnv().getStatisticsCache().getColumnStatistics(
                             tableIf.getDatabase().getCatalog().getId(),
                             tableIf.getDatabase().getId(), tableIf.getId(), 
indexId, colName);
-                    columnStatistics.add(Pair.of(Pair.of(colName, indexName), 
columnStatistic));
+                    columnStatistics.add(Pair.of(Pair.of(indexName, colName), 
columnStatistic));
                 } else if (partitionNames == null) {
                     ColumnStatistic columnStatistic =
                             
StatisticsRepository.queryColumnStatisticsByName(tableIf.getId(), indexId, 
colName);
-                    columnStatistics.add(Pair.of(Pair.of(colName, indexName), 
columnStatistic));
+                    columnStatistics.add(Pair.of(Pair.of(indexName, colName), 
columnStatistic));
                 } else {
                     String finalIndexName = indexName;
                     
columnStatistics.addAll(StatisticsRepository.queryColumnStatisticsByPartitions(tableName,
                             colName, partitionNames.getPartitionNames())
-                            .stream().map(s -> Pair.of(Pair.of(colName, 
finalIndexName), s))
+                            .stream().map(s -> Pair.of(Pair.of(finalIndexName, 
colName), s))
                             .collect(Collectors.toList()));
                 }
             }
@@ -2986,7 +2989,7 @@ public class ShowExecutor {
             if (table instanceof OlapTable && analysisInfo.indexId != -1) {
                 row.add(((OlapTable) 
table).getIndexNameById(analysisInfo.indexId));
             } else {
-                row.add("N/A");
+                row.add(table.getName());
             }
             row.add(analysisInfo.message);
             row.add(TimeUtils.DATETIME_FORMAT.format(
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisInfo.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisInfo.java
index c707107e0e0..c167db2228d 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisInfo.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisInfo.java
@@ -18,6 +18,7 @@
 package org.apache.doris.statistics;
 
 import org.apache.doris.catalog.TableIf;
+import org.apache.doris.common.Pair;
 import org.apache.doris.common.io.Text;
 import org.apache.doris.common.io.Writable;
 import org.apache.doris.persist.gson.GsonUtils;
@@ -35,7 +36,6 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.text.ParseException;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
 import java.util.StringJoiner;
 
@@ -95,8 +95,8 @@ public class AnalysisInfo implements Writable {
     @SerializedName("tblId")
     public final long tblId;
 
-    // TODO: Map here is wired, List is enough
-    public final Map<String, Set<String>> colToPartitions;
+    // Pair<IndexName, ColumnName>
+    public final List<Pair<String, String>> jobColumns;
 
     public final Set<String> partitionNames;
 
@@ -200,7 +200,7 @@ public class AnalysisInfo implements Writable {
     public final boolean userInject;
 
     public AnalysisInfo(long jobId, long taskId, List<Long> taskIds, long 
catalogId, long dbId, long tblId,
-            Map<String, Set<String>> colToPartitions, Set<String> 
partitionNames, String colName, Long indexId,
+            List<Pair<String, String>> jobColumns, Set<String> partitionNames, 
String colName, Long indexId,
             JobType jobType, AnalysisMode analysisMode, AnalysisMethod 
analysisMethod, AnalysisType analysisType,
             int samplePercent, long sampleRows, int maxBucketNum, long 
periodTimeInMs, String message,
             long lastExecTimeInMs, long timeCostInMs, AnalysisState state, 
ScheduleType scheduleType,
@@ -213,7 +213,7 @@ public class AnalysisInfo implements Writable {
         this.catalogId = catalogId;
         this.dbId = dbId;
         this.tblId = tblId;
-        this.colToPartitions = colToPartitions;
+        this.jobColumns = jobColumns;
         this.partitionNames = partitionNames;
         this.colName = colName;
         this.indexId = indexId;
@@ -268,8 +268,8 @@ public class AnalysisInfo implements Writable {
         if (maxBucketNum > 0) {
             sj.add("MaxBucketNum: " + maxBucketNum);
         }
-        if (colToPartitions != null) {
-            sj.add("colToPartitions: " + getColToPartitionStr());
+        if (jobColumns != null) {
+            sj.add("jobColumns: " + getJobColumns());
         }
         if (lastExecTimeInMs > 0) {
             sj.add("LastExecTime: " + 
StatisticsUtil.getReadableTime(lastExecTimeInMs));
@@ -301,12 +301,12 @@ public class AnalysisInfo implements Writable {
         taskIds.add(taskId);
     }
 
-    public String getColToPartitionStr() {
-        if (colToPartitions == null || colToPartitions.isEmpty()) {
+    public String getJobColumns() {
+        if (jobColumns == null || jobColumns.isEmpty()) {
             return "";
         }
         Gson gson = new Gson();
-        return gson.toJson(colToPartitions);
+        return gson.toJson(jobColumns);
     }
 
     @Override
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisInfoBuilder.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisInfoBuilder.java
index 22f3d22b3ce..00cf9f7b1bc 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisInfoBuilder.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisInfoBuilder.java
@@ -17,6 +17,7 @@
 
 package org.apache.doris.statistics;
 
+import org.apache.doris.common.Pair;
 import org.apache.doris.statistics.AnalysisInfo.AnalysisMethod;
 import org.apache.doris.statistics.AnalysisInfo.AnalysisMode;
 import org.apache.doris.statistics.AnalysisInfo.AnalysisType;
@@ -26,7 +27,6 @@ import org.apache.doris.statistics.AnalysisInfo.ScheduleType;
 import org.apache.logging.log4j.core.util.CronExpression;
 
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
 
 public class AnalysisInfoBuilder {
@@ -36,7 +36,7 @@ public class AnalysisInfoBuilder {
     private long catalogId;
     private long dbId;
     private long tblId;
-    private Map<String, Set<String>> colToPartitions;
+    private List<Pair<String, String>> jobColumns;
     private Set<String> partitionNames;
     private String colName;
     private long indexId = -1L;
@@ -75,7 +75,7 @@ public class AnalysisInfoBuilder {
         catalogId = info.catalogId;
         dbId = info.dbId;
         tblId = info.tblId;
-        colToPartitions = info.colToPartitions;
+        jobColumns = info.jobColumns;
         partitionNames = info.partitionNames;
         colName = info.colName;
         indexId = info.indexId;
@@ -135,8 +135,8 @@ public class AnalysisInfoBuilder {
         return this;
     }
 
-    public AnalysisInfoBuilder setColToPartitions(Map<String, Set<String>> 
colToPartitions) {
-        this.colToPartitions = colToPartitions;
+    public AnalysisInfoBuilder setJobColumns(List<Pair<String, String>> 
jobColumns) {
+        this.jobColumns = jobColumns;
         return this;
     }
 
@@ -276,7 +276,7 @@ public class AnalysisInfoBuilder {
     }
 
     public AnalysisInfo build() {
-        return new AnalysisInfo(jobId, taskId, taskIds, catalogId, dbId, 
tblId, colToPartitions, partitionNames,
+        return new AnalysisInfo(jobId, taskId, taskIds, catalogId, dbId, 
tblId, jobColumns, partitionNames,
                 colName, indexId, jobType, analysisMode, analysisMethod, 
analysisType, samplePercent,
                 sampleRows, maxBucketNum, periodTimeInMs, message, 
lastExecTimeInMs, timeCostInMs, state, scheduleType,
                 externalTableLevelTask, partitionOnly, samplingPartition, 
isAllPartition, partitionCount,
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJob.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJob.java
index f52764bd6c9..5fd5e43be53 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJob.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJob.java
@@ -180,13 +180,13 @@ public class AnalysisJob {
     public void deregisterJob() {
         analysisManager.removeJob(jobInfo.jobId);
         for (BaseAnalysisTask task : queryingTask) {
-            task.info.colToPartitions.clear();
+            task.info.jobColumns.clear();
             if (task.info.partitionNames != null) {
                 task.info.partitionNames.clear();
             }
         }
         for (BaseAnalysisTask task : queryFinished) {
-            task.info.colToPartitions.clear();
+            task.info.jobColumns.clear();
             if (task.info.partitionNames != null) {
                 task.info.partitionNames.clear();
             }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisManager.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisManager.java
index 258c33305af..66d6d38f381 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisManager.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisManager.java
@@ -38,6 +38,7 @@ import org.apache.doris.common.Config;
 import org.apache.doris.common.DdlException;
 import org.apache.doris.common.FeConstants;
 import org.apache.doris.common.FeMetaVersion;
+import org.apache.doris.common.Pair;
 import org.apache.doris.common.ThreadPoolManager;
 import org.apache.doris.common.ThreadPoolManager.BlockedPolicy;
 import org.apache.doris.common.io.Text;
@@ -82,7 +83,6 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
@@ -219,7 +219,7 @@ public class AnalysisManager implements Writable {
     @VisibleForTesting
     protected AnalysisInfo buildAndAssignJob(AnalyzeTblStmt stmt) throws 
DdlException {
         AnalysisInfo jobInfo = buildAnalysisJobInfo(stmt);
-        if (jobInfo.colToPartitions.isEmpty()) {
+        if (jobInfo.jobColumns.isEmpty()) {
             // No statistics need to be collected or updated
             return null;
         }
@@ -292,51 +292,6 @@ public class AnalysisManager implements Writable {
         }
     }
 
-    /**
-     * Gets the partitions for which statistics are to be collected. First 
verify that
-     * there are partitions that have been deleted but have historical 
statistics(invalid statistics),
-     * if there are these partitions, we need to delete them to avoid errors 
in summary table level statistics.
-     * Then get the partitions for which statistics need to be collected based 
on collection mode (incremental/full).
-     * <p>
-     * note:
-     * If there is no invalid statistics, it does not need to collect/update
-     * statistics if the following conditions are met:
-     * - in full collection mode, the partitioned table does not have 
partitions
-     * - in incremental collection mode, partition statistics already exist
-     * <p>
-     * TODO Supports incremental collection of statistics from materialized 
views
-     */
-    private Map<String, Set<String>> validateAndGetPartitions(TableIf table, 
Set<String> columnNames,
-            Set<String> partitionNames, AnalysisType analysisType) throws 
DdlException {
-
-        Map<String, Set<String>> columnToPartitions = columnNames.stream()
-                .collect(Collectors.toMap(
-                        columnName -> columnName,
-                        columnName -> new HashSet<>(partitionNames == null ? 
Collections.emptySet() : partitionNames)
-                ));
-
-        if (analysisType == AnalysisType.HISTOGRAM) {
-            // Collecting histograms does not need to support incremental 
collection,
-            // and will automatically cover historical statistics
-            return columnToPartitions;
-        }
-
-        if (table instanceof HMSExternalTable) {
-            // TODO Currently, we do not support INCREMENTAL collection for 
external table.
-            // One reason is external table partition id couldn't convert to a 
Long value.
-            // Will solve this problem later.
-            return columnToPartitions;
-        }
-
-        if (analysisType == AnalysisType.FUNDAMENTALS) {
-            Map<String, Set<String>> result = 
table.findReAnalyzeNeededPartitions();
-            result.keySet().retainAll(columnNames);
-            return result;
-        }
-
-        return columnToPartitions;
-    }
-
     // Make sure colName of job has all the column as this AnalyzeStmt 
specified, no matter whether it will be analyzed
     // or not.
     @VisibleForTesting
@@ -362,12 +317,6 @@ public class AnalysisManager implements Writable {
         infoBuilder.setCatalogId(stmt.getCatalogId());
         infoBuilder.setDBId(stmt.getDbId());
         infoBuilder.setTblId(stmt.getTable().getId());
-        // TODO: Refactor later, DON'T MODIFY IT RIGHT NOW
-        StringJoiner stringJoiner = new StringJoiner(",", "[", "]");
-        for (String colName : columnNames) {
-            stringJoiner.add(colName);
-        }
-        infoBuilder.setColName(stringJoiner.toString());
         infoBuilder.setPartitionNames(partitionNames);
         infoBuilder.setPartitionOnly(partitionOnly);
         infoBuilder.setSamplingPartition(isSamplingPartition);
@@ -391,20 +340,23 @@ public class AnalysisManager implements Writable {
 
         if (analysisType == AnalysisType.HISTOGRAM) {
             int numBuckets = stmt.getNumBuckets();
-            int maxBucketNum = numBuckets > 0 ? numBuckets
-                    : StatisticConstants.HISTOGRAM_MAX_BUCKET_NUM;
+            int maxBucketNum = numBuckets > 0 ? numBuckets : 
StatisticConstants.HISTOGRAM_MAX_BUCKET_NUM;
             infoBuilder.setMaxBucketNum(maxBucketNum);
         }
 
         long periodTimeInMs = stmt.getPeriodTimeInMs();
         infoBuilder.setPeriodTimeInMs(periodTimeInMs);
-
-        Map<String, Set<String>> colToPartitions = 
validateAndGetPartitions(table, columnNames,
-                partitionNames, analysisType);
-        infoBuilder.setColToPartitions(colToPartitions);
+        List<Pair<String, String>> jobColumns = 
table.getColumnIndexPairs(columnNames);
+        infoBuilder.setJobColumns(jobColumns);
+        StringJoiner stringJoiner = new StringJoiner(",", "[", "]");
+        for (Pair<String, String> pair : jobColumns) {
+            stringJoiner.add(pair.toString());
+        }
+        infoBuilder.setColName(stringJoiner.toString());
         infoBuilder.setTaskIds(Lists.newArrayList());
         infoBuilder.setTblUpdateTime(table.getUpdateTime());
-        infoBuilder.setEmptyJob(table instanceof OlapTable && 
table.getRowCount() == 0);
+        infoBuilder.setEmptyJob(table instanceof OlapTable && 
table.getRowCount() == 0
+                && analysisMethod.equals(AnalysisMethod.SAMPLE));
         return infoBuilder.build();
     }
 
@@ -420,35 +372,28 @@ public class AnalysisManager implements Writable {
 
     public void createTaskForEachColumns(AnalysisInfo jobInfo, Map<Long, 
BaseAnalysisTask> analysisTasks,
             boolean isSync) throws DdlException {
-        Map<String, Set<String>> columnToPartitions = jobInfo.colToPartitions;
+        List<Pair<String, String>> jobColumns = jobInfo.jobColumns;
         TableIf table = jobInfo.getTable();
-        for (Entry<String, Set<String>> entry : columnToPartitions.entrySet()) 
{
-            String colName = entry.getKey();
-            List<Long> indexIds = Lists.newArrayList();
-            // Get index id this column belongs to for OlapTable. Set it to -1 
for baseIndex id.
-            if (table instanceof OlapTable) {
-                indexIds = ((OlapTable) table).getMvColumnIndexIds(colName);
-            } else {
-                indexIds.add(-1L);
-            }
+        for (Pair<String, String> pair : jobColumns) {
             AnalysisInfoBuilder colTaskInfoBuilder = new 
AnalysisInfoBuilder(jobInfo);
-            if (jobInfo.analysisType != AnalysisType.HISTOGRAM) {
-                colTaskInfoBuilder.setAnalysisType(AnalysisType.FUNDAMENTALS);
-                Map<String, Set<String>> colToParts = new HashMap<>();
-                colToParts.put(colName, entry.getValue());
-                colTaskInfoBuilder.setColToPartitions(colToParts);
-            }
-            for (long indexId : indexIds) {
-                long taskId = Env.getCurrentEnv().getNextId();
-                AnalysisInfo analysisInfo = 
colTaskInfoBuilder.setColName(colName).setIndexId(indexId)
-                        
.setTaskId(taskId).setLastExecTimeInMs(System.currentTimeMillis()).build();
-                analysisTasks.put(taskId, createTask(analysisInfo));
-                jobInfo.addTaskId(taskId);
-                if (isSync) {
-                    continue;
+            colTaskInfoBuilder.setAnalysisType(AnalysisType.FUNDAMENTALS);
+            long taskId = Env.getCurrentEnv().getNextId();
+            long indexId = -1;
+            if (table instanceof OlapTable) {
+                OlapTable olapTable = (OlapTable) table;
+                indexId = olapTable.getIndexIdByName(pair.first);
+                if (indexId == olapTable.getBaseIndexId()) {
+                    indexId = -1;
                 }
-                replayCreateAnalysisTask(analysisInfo);
             }
+            AnalysisInfo analysisInfo = 
colTaskInfoBuilder.setColName(pair.second).setIndexId(indexId)
+                    
.setTaskId(taskId).setLastExecTimeInMs(System.currentTimeMillis()).build();
+            analysisTasks.put(taskId, createTask(analysisInfo));
+            jobInfo.addTaskId(taskId);
+            if (isSync) {
+                continue;
+            }
+            replayCreateAnalysisTask(analysisInfo);
         }
     }
 
@@ -565,7 +510,9 @@ public class AnalysisManager implements Writable {
             tableStats.update(jobInfo, tbl);
             logCreateTableStats(tableStats);
         }
-        jobInfo.colToPartitions.clear();
+        if (jobInfo.jobColumns != null) {
+            jobInfo.jobColumns.clear();
+        }
         if (jobInfo.partitionNames != null) {
             jobInfo.partitionNames.clear();
         }
@@ -712,7 +659,16 @@ public class AnalysisManager implements Writable {
                 indexIds.add(-1L);
             }
             for (long indexId : indexIds) {
-                tableStats.removeColumn(column);
+                String indexName = table.getName();
+                if (table instanceof OlapTable) {
+                    OlapTable olapTable = (OlapTable) table;
+                    if (indexId == -1) {
+                        indexName = 
olapTable.getIndexNameById(olapTable.getBaseIndexId());
+                    } else {
+                        indexName = olapTable.getIndexNameById(indexId);
+                    }
+                }
+                tableStats.removeColumn(indexName, column);
                 statisticsCache.invalidate(tableId, indexId, column);
             }
         }
@@ -1088,25 +1044,16 @@ public class AnalysisManager implements Writable {
         analysisJobIdToTaskMap.put(jobInfo.jobId, taskInfos);
     }
 
-    // Remove col stats status from TableStats if failed load some col stats 
after analyze corresponding column so that
-    // we could make sure it would be analyzed again soon if user or system 
submit job for that column again.
-    public void removeColStatsStatus(long tblId, String colName) {
-        TableStatsMeta tableStats = findTableStatsStatus(tblId);
-        if (tableStats != null) {
-            tableStats.removeColumn(colName);
-        }
-    }
-
     public void removeTableStats(long tableId) {
         idToTblStats.remove(tableId);
     }
 
-    public ColStatsMeta findColStatsMeta(long tblId, String colName) {
+    public ColStatsMeta findColStatsMeta(long tblId, String indexName, String 
colName) {
         TableStatsMeta tableStats = findTableStatsStatus(tblId);
         if (tableStats == null) {
             return null;
         }
-        return tableStats.findColumnStatsMeta(colName);
+        return tableStats.findColumnStatsMeta(indexName, colName);
     }
 
     public AnalysisJob findJob(long id) {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/OlapAnalysisTask.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/OlapAnalysisTask.java
index d26de9d9de7..0ec1c4561d3 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/statistics/OlapAnalysisTask.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/statistics/OlapAnalysisTask.java
@@ -65,13 +65,8 @@ public class OlapAnalysisTask extends BaseAnalysisTask {
     }
 
     public void doExecute() throws Exception {
-        Set<String> partitionNames = info.colToPartitions.get(info.colName);
-        if (StatisticsUtil.isEmptyTable(tbl, info.analysisMethod)
-                || partitionNames == null || partitionNames.isEmpty()) {
-            if (partitionNames == null) {
-                LOG.warn("Table {}.{}.{}, partitionNames for column {} is 
null. ColToPartitions:[{}]",
-                        info.catalogId, info.dbId, info.tblId, info.colName, 
info.colToPartitions);
-            }
+        List<Pair<String, String>> columnList = info.jobColumns;
+        if (StatisticsUtil.isEmptyTable(tbl, info.analysisMethod) || 
columnList == null || columnList.isEmpty()) {
             StatsId statsId = new StatsId(concatColumnStatsId(), 
info.catalogId, info.dbId,
                     info.tblId, info.indexId, info.colName, null);
             job.appendBuf(this, Arrays.asList(new ColStatsData(statsId)));
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsAutoCollector.java
 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsAutoCollector.java
index dbb7046467a..9ca971845b7 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsAutoCollector.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsAutoCollector.java
@@ -25,6 +25,7 @@ import org.apache.doris.catalog.Partition;
 import org.apache.doris.catalog.TableIf;
 import org.apache.doris.common.Config;
 import org.apache.doris.common.DdlException;
+import org.apache.doris.common.Pair;
 import org.apache.doris.common.util.TimeUtils;
 import org.apache.doris.datasource.CatalogIf;
 import org.apache.doris.datasource.hive.HMSExternalTable;
@@ -39,10 +40,9 @@ import org.apache.logging.log4j.Logger;
 
 import java.time.LocalTime;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
+import java.util.StringJoiner;
 import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
 
@@ -121,7 +121,7 @@ public class StatisticsAutoCollector extends 
StatisticsCollector {
                     analysisTaskExecutor.clear();
                     break;
                 }
-                analysisInfo = getReAnalyzeRequiredPart(analysisInfo);
+                analysisInfo = getNeedAnalyzeColumns(analysisInfo);
                 if (analysisInfo == null) {
                     continue;
                 }
@@ -186,11 +186,7 @@ public class StatisticsAutoCollector extends 
StatisticsCollector {
                 .setCatalogId(db.getCatalog().getId())
                 .setDBId(db.getId())
                 .setTblId(table.getId())
-                .setColName(
-                        table.getSchemaAllIndexes(false).stream()
-                            .filter(c -> 
!StatisticsUtil.isUnsupportedType(c.getType()))
-                            
.map(Column::getName).collect(Collectors.joining(","))
-                )
+                .setColName(null)
                 .setAnalysisType(AnalysisInfo.AnalysisType.FUNDAMENTALS)
                 .setAnalysisMode(AnalysisInfo.AnalysisMode.INCREMENTAL)
                 .setAnalysisMethod(analysisMethod)
@@ -202,13 +198,14 @@ public class StatisticsAutoCollector extends 
StatisticsCollector {
                 .setLastExecTimeInMs(System.currentTimeMillis())
                 .setJobType(JobType.SYSTEM)
                 .setTblUpdateTime(table.getUpdateTime())
-                .setEmptyJob(table instanceof OlapTable && table.getRowCount() 
== 0)
+                .setEmptyJob(table instanceof OlapTable && table.getRowCount() 
== 0
+                    && analysisMethod.equals(AnalysisMethod.SAMPLE))
                 .build();
         analysisInfos.add(jobInfo);
     }
 
     @VisibleForTesting
-    protected AnalysisInfo getReAnalyzeRequiredPart(AnalysisInfo jobInfo) {
+    protected AnalysisInfo getNeedAnalyzeColumns(AnalysisInfo jobInfo) {
         TableIf table = StatisticsUtil.findTable(jobInfo.catalogId, 
jobInfo.dbId, jobInfo.tblId);
         // Skip tables that are too wide.
         if (table.getBaseSchema().size() > 
StatisticsUtil.getAutoAnalyzeTableWidthThreshold()) {
@@ -218,26 +215,25 @@ public class StatisticsAutoCollector extends 
StatisticsCollector {
         AnalysisManager analysisManager = 
Env.getServingEnv().getAnalysisManager();
         TableStatsMeta tblStats = 
analysisManager.findTableStatsStatus(table.getId());
 
-        Map<String, Set<String>> needRunPartitions = null;
-        String colNames = jobInfo.colName;
+        List<Pair<String, String>> needRunColumns = null;
         if (table.needReAnalyzeTable(tblStats)) {
-            needRunPartitions = table.findReAnalyzeNeededPartitions();
+            needRunColumns = 
table.getColumnIndexPairs(table.getSchemaAllIndexes(false)
+                .stream().map(Column::getName).collect(Collectors.toSet()));
         } else if (table instanceof OlapTable && 
tblStats.newPartitionLoaded.get()) {
             OlapTable olapTable = (OlapTable) table;
-            needRunPartitions = new HashMap<>();
-            Set<String> partitionColumnNames = 
olapTable.getPartitionInfo().getPartitionColumns().stream()
-                    .map(Column::getName).collect(Collectors.toSet());
-            colNames = 
partitionColumnNames.stream().collect(Collectors.joining(","));
             Set<String> partitionNames = olapTable.getAllPartitions().stream()
                     .map(Partition::getName).collect(Collectors.toSet());
-            for (String column : partitionColumnNames) {
-                needRunPartitions.put(column, partitionNames);
-            }
+            needRunColumns = olapTable.getColumnIndexPairs(partitionNames);
         }
 
-        if (needRunPartitions == null || needRunPartitions.isEmpty()) {
+        if (needRunColumns == null || needRunColumns.isEmpty()) {
             return null;
         }
-        return new 
AnalysisInfoBuilder(jobInfo).setColName(colNames).setColToPartitions(needRunPartitions).build();
+        StringJoiner stringJoiner = new StringJoiner(",", "[", "]");
+        for (Pair<String, String> pair : needRunColumns) {
+            stringJoiner.add(pair.toString());
+        }
+        return new AnalysisInfoBuilder(jobInfo)
+            
.setColName(stringJoiner.toString()).setJobColumns(needRunColumns).build();
     }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsCollector.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsCollector.java
index 0985b9b2b95..ec187fe893a 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsCollector.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsCollector.java
@@ -61,7 +61,7 @@ public abstract class StatisticsCollector extends 
MasterDaemon {
     @VisibleForTesting
     protected void createSystemAnalysisJob(AnalysisInfo jobInfo)
             throws DdlException {
-        if (jobInfo.colToPartitions.isEmpty()) {
+        if (jobInfo.jobColumns.isEmpty()) {
             // No statistics need to be collected or updated
             return;
         }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsRepository.java
 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsRepository.java
index 5ac9b7305c7..5caa5bd9751 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsRepository.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsRepository.java
@@ -29,6 +29,7 @@ import org.apache.doris.common.FeConstants;
 import org.apache.doris.statistics.util.DBObjects;
 import org.apache.doris.statistics.util.StatisticsUtil;
 
+import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import org.apache.commons.text.StringSubstitutor;
 import org.apache.logging.log4j.LogManager;
@@ -320,7 +321,7 @@ public class StatisticsRepository {
             AnalysisInfo mockedJobInfo = new AnalysisInfoBuilder()
                     .setTblUpdateTime(System.currentTimeMillis())
                     .setColName("")
-                    .setColToPartitions(Maps.newHashMap())
+                    .setJobColumns(Lists.newArrayList())
                     .setUserInject(true)
                     .setJobType(AnalysisInfo.JobType.MANUAL)
                     .build();
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/TableStatsMeta.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/TableStatsMeta.java
index 9231c6a2bc7..3b9b1e2bead 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/statistics/TableStatsMeta.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/statistics/TableStatsMeta.java
@@ -21,11 +21,11 @@ import org.apache.doris.catalog.Column;
 import org.apache.doris.catalog.OlapTable;
 import org.apache.doris.catalog.PartitionInfo;
 import org.apache.doris.catalog.TableIf;
+import org.apache.doris.common.Pair;
 import org.apache.doris.common.io.Text;
 import org.apache.doris.common.io.Writable;
 import org.apache.doris.persist.gson.GsonUtils;
 import org.apache.doris.statistics.AnalysisInfo.JobType;
-import org.apache.doris.statistics.util.StatisticsUtil;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.gson.annotations.SerializedName;
@@ -33,8 +33,6 @@ import com.google.gson.annotations.SerializedName;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.util.Arrays;
-import java.util.List;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
@@ -64,7 +62,11 @@ public class TableStatsMeta implements Writable {
     public long updatedTime;
 
     @SerializedName("colNameToColStatsMeta")
-    private ConcurrentMap<String, ColStatsMeta> colNameToColStatsMeta = new 
ConcurrentHashMap<>();
+    private ConcurrentMap<String, ColStatsMeta> 
deprecatedColNameToColStatsMeta = new ConcurrentHashMap<>();
+
+    @SerializedName("colToColStatsMeta")
+    // <IndexName, ColumnName> -> ColStatsMeta
+    private ConcurrentMap<Pair<String, String>, ColStatsMeta> 
colToColStatsMeta = new ConcurrentHashMap<>();
 
     @SerializedName("trigger")
     public JobType jobType;
@@ -100,52 +102,34 @@ public class TableStatsMeta implements Writable {
         String json = Text.readString(dataInput);
         TableStatsMeta tableStats = GsonUtils.GSON.fromJson(json, 
TableStatsMeta.class);
         // Might be null counterintuitively, for compatible
-        if (tableStats.colNameToColStatsMeta == null) {
-            tableStats.colNameToColStatsMeta = new ConcurrentHashMap<>();
+        if (tableStats.colToColStatsMeta == null) {
+            tableStats.colToColStatsMeta = new ConcurrentHashMap<>();
         }
-        return tableStats;
-    }
-
-    public long findColumnLastUpdateTime(String colName) {
-        ColStatsMeta colStatsMeta = colNameToColStatsMeta.get(colName);
-        if (colStatsMeta == null) {
-            return 0;
+        if (tableStats.deprecatedColNameToColStatsMeta != null) {
+            tableStats.convertDeprecatedColStatsToNewVersion();
         }
-        return colStatsMeta.updatedTime;
-    }
-
-    public ColStatsMeta findColumnStatsMeta(String colName) {
-        return colNameToColStatsMeta.get(colName);
+        return tableStats;
     }
 
-    public void removeColumn(String colName) {
-        colNameToColStatsMeta.remove(colName);
+    public ColStatsMeta findColumnStatsMeta(String indexName, String colName) {
+        return colToColStatsMeta.get(Pair.of(indexName, colName));
     }
 
-    public Set<String> analyzeColumns() {
-        return colNameToColStatsMeta.keySet();
+    public void removeColumn(String indexName, String colName) {
+        colToColStatsMeta.remove(Pair.of(indexName, colName));
     }
 
-    public void reset() {
-        updatedTime = 0;
-        colNameToColStatsMeta.values().forEach(ColStatsMeta::clear);
+    public Set<Pair<String, String>> analyzeColumns() {
+        return colToColStatsMeta.keySet();
     }
 
     public void update(AnalysisInfo analyzedJob, TableIf tableIf) {
         updatedTime = analyzedJob.tblUpdateTime;
         userInjected = analyzedJob.userInject;
-        String colNameStr = analyzedJob.colName;
-        // colName field AnalyzeJob's format likes: "[col1, col2]", we need to 
remove brackets here
-        // TODO: Refactor this later
-        if (analyzedJob.colName.startsWith("[") && 
analyzedJob.colName.endsWith("]")) {
-            colNameStr = colNameStr.substring(1, colNameStr.length() - 1);
-        }
-        List<String> cols = Arrays.stream(colNameStr.split(","))
-                .map(String::trim).filter(s -> 
!s.isEmpty()).collect(Collectors.toList());
-        for (String col : cols) {
-            ColStatsMeta colStatsMeta = colNameToColStatsMeta.get(col);
+        for (Pair<String, String> colPair : analyzedJob.jobColumns) {
+            ColStatsMeta colStatsMeta = colToColStatsMeta.get(colPair);
             if (colStatsMeta == null) {
-                colNameToColStatsMeta.put(col, new ColStatsMeta(updatedTime,
+                colToColStatsMeta.put(colPair, new ColStatsMeta(updatedTime,
                         analyzedJob.analysisMethod, analyzedJob.analysisType, 
analyzedJob.jobType, 0));
             } else {
                 colStatsMeta.updatedTime = updatedTime;
@@ -159,21 +143,27 @@ public class TableStatsMeta implements Writable {
             if (tableIf instanceof OlapTable) {
                 rowCount = analyzedJob.emptyJob ? 0 : tableIf.getRowCount();
             }
-            if (!analyzedJob.emptyJob && analyzedJob.colToPartitions.keySet()
-                    .containsAll(tableIf.getBaseSchema().stream()
-                            .filter(c -> 
!StatisticsUtil.isUnsupportedType(c.getType()))
-                            
.map(Column::getName).collect(Collectors.toSet()))) {
+            if (analyzedJob.emptyJob) {
+                return;
+            }
+            if (analyzedJob.jobColumns.containsAll(
+                    tableIf.getColumnIndexPairs(
+                    
tableIf.getSchemaAllIndexes(false).stream().map(Column::getName).collect(Collectors.toSet()))))
 {
                 updatedRows.set(0);
                 newPartitionLoaded.set(false);
             }
             if (tableIf instanceof OlapTable) {
                 PartitionInfo partitionInfo = ((OlapTable) 
tableIf).getPartitionInfo();
-                if (partitionInfo != null && 
analyzedJob.colToPartitions.keySet()
-                        
.containsAll(partitionInfo.getPartitionColumns().stream()
-                            
.map(Column::getName).collect(Collectors.toSet()))) {
+                if (partitionInfo != null && analyzedJob.jobColumns
+                        
.containsAll(tableIf.getColumnIndexPairs(partitionInfo.getPartitionColumns().stream()
+                            
.map(Column::getName).collect(Collectors.toSet())))) {
                     newPartitionLoaded.set(false);
                 }
             }
         }
     }
+
+    public void convertDeprecatedColStatsToNewVersion() {
+        deprecatedColNameToColStatsMeta = null;
+    }
 }
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalysisManagerTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalysisManagerTest.java
index f8a77fe06db..674456b0b46 100644
--- 
a/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalysisManagerTest.java
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalysisManagerTest.java
@@ -27,6 +27,7 @@ import org.apache.doris.catalog.OlapTable;
 import org.apache.doris.catalog.PrimitiveType;
 import org.apache.doris.common.Config;
 import org.apache.doris.common.DdlException;
+import org.apache.doris.common.Pair;
 import org.apache.doris.statistics.AnalysisInfo.AnalysisType;
 import org.apache.doris.statistics.AnalysisInfo.JobType;
 import org.apache.doris.statistics.AnalysisInfo.ScheduleType;
@@ -45,9 +46,9 @@ import org.junit.jupiter.api.Test;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 // CHECKSTYLE OFF
 public class AnalysisManagerTest {
@@ -109,7 +110,7 @@ public class AnalysisManagerTest {
     // test build sync job
     @Test
     public void testBuildAndAssignJob1() throws Exception {
-        AnalysisInfo analysisInfo = new 
AnalysisInfoBuilder().setColToPartitions(new HashMap<>()).build();
+        AnalysisInfo analysisInfo = new 
AnalysisInfoBuilder().setJobColumns(new ArrayList<>()).build();
         new MockUp<StatisticsUtil>() {
 
             @Mock
@@ -167,12 +168,7 @@ public class AnalysisManagerTest {
 
         AnalysisManager analysisManager = new AnalysisManager();
         
Assertions.assertNull(analysisManager.buildAndAssignJob(analyzeTblStmt));
-        analysisInfo.colToPartitions.put("c1", new HashSet<String>() {
-            {
-                add("p1");
-                add("p2");
-            }
-        });
+        analysisInfo.jobColumns.add(Pair.of("index1", "c1"));
         analysisManager.buildAndAssignJob(analyzeTblStmt);
         new Expectations() {
             {
@@ -191,7 +187,7 @@ public class AnalysisManagerTest {
     // test build async job
     @Test
     public void testBuildAndAssignJob2(@Injectable OlapAnalysisTask 
analysisTask) throws Exception {
-        AnalysisInfo analysisInfo = new 
AnalysisInfoBuilder().setColToPartitions(new HashMap<>())
+        AnalysisInfo analysisInfo = new 
AnalysisInfoBuilder().setJobColumns(new ArrayList<>())
                 .setScheduleType(ScheduleType.PERIOD)
                 .build();
         new MockUp<StatisticsUtil>() {
@@ -255,12 +251,7 @@ public class AnalysisManagerTest {
             }
         }));
         AnalysisManager analysisManager = new AnalysisManager();
-        analysisInfo.colToPartitions.put("c1", new HashSet<String>() {
-            {
-                add("p1");
-                add("p2");
-            }
-        });
+        analysisInfo.jobColumns.add(Pair.of("index1", "c1"));
         analysisManager.buildAndAssignJob(analyzeTblStmt);
         new Expectations() {
             {
@@ -274,15 +265,7 @@ public class AnalysisManagerTest {
     public void testReAnalyze() {
         new MockUp<OlapTable>() {
 
-            int count = 0;
-            int[] rowCount = new int[]{100, 100, 200, 200, 1, 1};
-
             final Column c = new Column("col1", PrimitiveType.INT);
-            @Mock
-            public long getRowCount() {
-                return rowCount[count++];
-            }
-
             @Mock
             public List<Column> getBaseSchema() {
                 return Lists.newArrayList(c);
@@ -291,22 +274,52 @@ public class AnalysisManagerTest {
             @Mock
             public List<Column> getColumns() { return Lists.newArrayList(c); }
 
+            @Mock
+            public List<Pair<String, String>> getColumnIndexPairs(Set<String> 
columns) {
+                List<Pair<String, String>> jobList = Lists.newArrayList();
+                jobList.add(Pair.of("1", "1"));
+                jobList.add(Pair.of("2", "2"));
+                jobList.add(Pair.of("3", "3"));
+                return jobList;
+            }
         };
         OlapTable olapTable = new OlapTable();
+        List<Pair<String, String>> jobList = Lists.newArrayList();
+        jobList.add(Pair.of("1", "1"));
+        jobList.add(Pair.of("2", "2"));
+        TableStatsMeta stats0 = new TableStatsMeta(
+            0, new AnalysisInfoBuilder().setJobColumns(jobList)
+            .setColName("col1").build(), olapTable);
+        Assertions.assertTrue(olapTable.needReAnalyzeTable(stats0));
+
+        new MockUp<OlapTable>() {
+            int count = 0;
+            int[] rowCount = new int[]{100, 100, 200, 200, 1, 1};
+
+            @Mock
+            public long getRowCount() {
+                return rowCount[count++];
+            }
+            @Mock
+            public List<Pair<String, String>> getColumnIndexPairs(Set<String> 
columns) {
+                List<Pair<String, String>> jobList = Lists.newArrayList();
+                return jobList;
+            }
+        };
         TableStatsMeta stats1 = new TableStatsMeta(
-                50, new AnalysisInfoBuilder().setColToPartitions(new 
HashMap<>())
+                50, new AnalysisInfoBuilder().setJobColumns(new ArrayList<>())
                 .setColName("col1").build(), olapTable);
         stats1.updatedRows.addAndGet(50);
 
         Assertions.assertTrue(olapTable.needReAnalyzeTable(stats1));
         TableStatsMeta stats2 = new TableStatsMeta(
                 190, new AnalysisInfoBuilder()
-                .setColToPartitions(new 
HashMap<>()).setColName("col1").build(), olapTable);
+                .setJobColumns(new ArrayList<>()).setColName("col1").build(), 
olapTable);
         stats2.updatedRows.addAndGet(20);
         Assertions.assertFalse(olapTable.needReAnalyzeTable(stats2));
 
         TableStatsMeta stats3 = new TableStatsMeta(0, new AnalysisInfoBuilder()
-                .setColToPartitions(new 
HashMap<>()).setEmptyJob(true).setColName("col1").build(), olapTable);
+                .setJobColumns(new 
ArrayList<>()).setEmptyJob(true).setColName("col1").build(), olapTable);
         Assertions.assertTrue(olapTable.needReAnalyzeTable(stats3));
 
     }
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalysisTaskExecutorTest.java
 
b/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalysisTaskExecutorTest.java
index b17ba3e68db..5698f0e9b20 100644
--- 
a/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalysisTaskExecutorTest.java
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalysisTaskExecutorTest.java
@@ -22,6 +22,7 @@ import org.apache.doris.catalog.Database;
 import org.apache.doris.catalog.InternalSchemaInitializer;
 import org.apache.doris.catalog.OlapTable;
 import org.apache.doris.catalog.PrimitiveType;
+import org.apache.doris.common.Pair;
 import org.apache.doris.common.jmockit.Deencapsulation;
 import org.apache.doris.datasource.InternalCatalog;
 import org.apache.doris.qe.StmtExecutor;
@@ -33,7 +34,7 @@ import org.apache.doris.statistics.util.DBObjects;
 import org.apache.doris.statistics.util.StatisticsUtil;
 import org.apache.doris.utframe.TestWithFeService;
 
-import com.google.common.collect.Maps;
+import com.google.common.collect.Lists;
 import mockit.Mock;
 import mockit.MockUp;
 import mockit.Mocked;
@@ -41,10 +42,8 @@ import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Test;
 
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.atomic.AtomicBoolean;
 
@@ -158,8 +157,8 @@ public class AnalysisTaskExecutorTest extends 
TestWithFeService {
         };
 
         AnalysisTaskExecutor analysisTaskExecutor = new 
AnalysisTaskExecutor(1);
-        HashMap<String, Set<String>> colToPartitions = Maps.newHashMap();
-        colToPartitions.put("col1", Collections.singleton("t1"));
+        List<Pair<String, String>> columns = Lists.newArrayList();
+        columns.add(Pair.of("col1", "t1"));
         AnalysisInfo analysisInfo = new 
AnalysisInfoBuilder().setJobId(0).setTaskId(0)
                 .setCatalogId(0).setDBId(0).setTblId(0)
                 .setColName("col1").setJobType(JobType.MANUAL)
@@ -167,7 +166,7 @@ public class AnalysisTaskExecutorTest extends 
TestWithFeService {
                 .setAnalysisMethod(AnalysisMethod.FULL)
                 .setAnalysisType(AnalysisType.FUNDAMENTALS)
                 .setState(AnalysisState.RUNNING)
-                .setColToPartitions(colToPartitions)
+                .setJobColumns(columns)
                 .build();
         OlapAnalysisTask task = new OlapAnalysisTask(analysisInfo);
 
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalyzeTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalyzeTest.java
index 483cd3c0326..bf6ce32e155 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalyzeTest.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalyzeTest.java
@@ -23,6 +23,7 @@ import org.apache.doris.catalog.InternalSchemaInitializer;
 import org.apache.doris.catalog.OlapTable;
 import org.apache.doris.catalog.PrimitiveType;
 import org.apache.doris.common.FeConstants;
+import org.apache.doris.common.Pair;
 import org.apache.doris.datasource.InternalCatalog;
 import org.apache.doris.qe.AutoCloseConnectContext;
 import org.apache.doris.qe.ConnectContext;
@@ -35,7 +36,7 @@ import org.apache.doris.statistics.util.DBObjects;
 import org.apache.doris.statistics.util.StatisticsUtil;
 import org.apache.doris.utframe.TestWithFeService;
 
-import com.google.common.collect.Maps;
+import com.google.common.collect.Lists;
 import mockit.Expectations;
 import mockit.Mock;
 import mockit.MockUp;
@@ -45,10 +46,8 @@ import org.junit.jupiter.api.Test;
 
 import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
 
 public class AnalyzeTest extends TestWithFeService {
 
@@ -160,8 +159,8 @@ public class AnalyzeTest extends TestWithFeService {
             @Mock
             protected void runQuery(String sql) {}
         };
-        HashMap<String, Set<String>> colToPartitions = Maps.newHashMap();
-        colToPartitions.put("col1", Collections.singleton("t1"));
+        List<Pair<String, String>> colList = Lists.newArrayList();
+        colList.add(Pair.of("col1", "index1"));
         AnalysisInfo analysisJobInfo = new 
AnalysisInfoBuilder().setJobId(0).setTaskId(0)
                 .setCatalogId(0)
                 .setDBId(0)
@@ -170,7 +169,7 @@ public class AnalyzeTest extends TestWithFeService {
                 .setAnalysisMode(AnalysisMode.FULL)
                 .setAnalysisMethod(AnalysisMethod.FULL)
                 .setAnalysisType(AnalysisType.FUNDAMENTALS)
-                .setColToPartitions(colToPartitions)
+                .setJobColumns(colList)
                 .setState(AnalysisState.RUNNING)
                 .build();
         new OlapAnalysisTask(analysisJobInfo).doExecute();
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/statistics/HistogramTaskTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/statistics/HistogramTaskTest.java
index 4217fb5a0db..09bf4dd94c6 100644
--- 
a/fe/fe-core/src/test/java/org/apache/doris/statistics/HistogramTaskTest.java
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/statistics/HistogramTaskTest.java
@@ -90,7 +90,6 @@ public class HistogramTaskTest extends TestWithFeService {
 
             for (Entry<Long, BaseAnalysisTask> infoEntry : 
taskInfo.entrySet()) {
                 BaseAnalysisTask task = infoEntry.getValue();
-                Assertions.assertEquals(AnalysisType.HISTOGRAM, 
task.info.analysisType);
                 Assertions.assertEquals("col1", task.info.colName);
             }
         }
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/statistics/StatisticsAutoCollectorTest.java
 
b/fe/fe-core/src/test/java/org/apache/doris/statistics/StatisticsAutoCollectorTest.java
index 678e7580f8e..f7b75261cc5 100644
--- 
a/fe/fe-core/src/test/java/org/apache/doris/statistics/StatisticsAutoCollectorTest.java
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/statistics/StatisticsAutoCollectorTest.java
@@ -31,16 +31,12 @@ import org.apache.doris.catalog.View;
 import org.apache.doris.common.Config;
 import org.apache.doris.common.DdlException;
 import org.apache.doris.common.FeConstants;
+import org.apache.doris.common.Pair;
 import org.apache.doris.datasource.CatalogIf;
 import org.apache.doris.datasource.InternalCatalog;
-import org.apache.doris.statistics.AnalysisInfo.AnalysisMethod;
-import org.apache.doris.statistics.AnalysisInfo.AnalysisType;
-import org.apache.doris.statistics.AnalysisInfo.JobType;
 import org.apache.doris.statistics.util.StatisticsUtil;
 
 import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
 import mockit.Expectations;
 import mockit.Injectable;
 import mockit.Mock;
@@ -54,7 +50,6 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -144,97 +139,32 @@ public class StatisticsAutoCollectorTest {
         StatisticsAutoCollector saa = new StatisticsAutoCollector();
         List<AnalysisInfo> analysisInfoList = saa.constructAnalysisInfo(new 
Database(1, "anydb"));
         Assertions.assertEquals(1, analysisInfoList.size());
-        Assertions.assertEquals("c1", 
analysisInfoList.get(0).colName.split(",")[0]);
+        Assertions.assertNull(analysisInfoList.get(0).colName);
     }
 
     @Test
-    public void testGetReAnalyzeRequiredPart0() {
+    public void testSkipWideTable() {
 
         TableIf tableIf = new OlapTable();
 
         new MockUp<OlapTable>() {
-            @Mock
-            protected Map<String, Set<String>> findReAnalyzeNeededPartitions() 
{
-                Set<String> partitionNames = new HashSet<>();
-                partitionNames.add("p1");
-                partitionNames.add("p2");
-                Map<String, Set<String>> map = new HashMap<>();
-                map.put("col1", partitionNames);
-                return map;
-            }
-
-            @Mock
-            public long getRowCount() {
-                return 100;
-            }
-
             @Mock
             public List<Column> getBaseSchema() {
                 return Lists.newArrayList(new Column("col1", Type.INT), new 
Column("col2", Type.INT));
             }
-        };
-
-        new MockUp<StatisticsUtil>() {
-            @Mock
-            public TableIf findTable(long catalogName, long dbName, long 
tblName) {
-                return tableIf;
-            }
-        };
-        AnalysisInfo analysisInfo = new 
AnalysisInfoBuilder().setAnalysisMethod(AnalysisMethod.FULL)
-                .setColToPartitions(new HashMap<>()).setAnalysisType(
-                
AnalysisType.FUNDAMENTALS).setColName("col1").setJobType(JobType.SYSTEM).build();
-        new MockUp<AnalysisManager>() {
-
-            int count = 0;
-
-            TableStatsMeta[] tableStatsArr =
-                    new TableStatsMeta[] {new TableStatsMeta(0, analysisInfo, 
tableIf),
-                            new TableStatsMeta(0, analysisInfo, tableIf), 
null};
-
-            {
-                tableStatsArr[0].updatedRows.addAndGet(100);
-                tableStatsArr[1].updatedRows.addAndGet(0);
-            }
-
-            @Mock
-            public TableStatsMeta findTableStatsStatus(long tblId) {
-                return tableStatsArr[count++];
-            }
-        };
-
-        new MockUp<StatisticsAutoCollector>() {
-            @Mock
-            public AnalysisInfo getAnalysisJobInfo(AnalysisInfo jobInfo, 
TableIf table,
-                    Set<String> needRunPartitions) {
-                return new AnalysisInfoBuilder().build();
-            }
-        };
-        StatisticsAutoCollector statisticsAutoCollector = new 
StatisticsAutoCollector();
-        AnalysisInfo analysisInfo2 = new AnalysisInfoBuilder()
-                .setCatalogId(0)
-                .setDBId(0)
-                .setTblId(0).build();
-        
Assertions.assertNotNull(statisticsAutoCollector.getReAnalyzeRequiredPart(analysisInfo2));
-        // uncomment it when updatedRows gets ready
-        // 
Assertions.assertNull(statisticsAutoCollector.getReAnalyzeRequiredPart(analysisInfo2));
-        
Assertions.assertNotNull(statisticsAutoCollector.getReAnalyzeRequiredPart(analysisInfo2));
-    }
-
-    @Test
-    public void testSkipWideTable() {
-
-        TableIf tableIf = new OlapTable();
 
-        new MockUp<OlapTable>() {
             @Mock
-            public List<Column> getBaseSchema() {
-                return Lists.newArrayList(new Column("col1", Type.INT), new 
Column("col2", Type.INT));
+            public List<Pair<String, String>> getColumnIndexPairs(Set<String> 
columns) {
+                ArrayList<Pair<String, String>> list = Lists.newArrayList();
+                list.add(Pair.of("1", "1"));
+                return list;
             }
         };
 
         new MockUp<StatisticsUtil>() {
             int count = 0;
-            int [] thresholds = {1, 10};
+            int[] thresholds = {1, 10};
+
             @Mock
             public TableIf findTable(long catalogName, long dbName, long 
tblName) {
                 return tableIf;
@@ -246,19 +176,10 @@ public class StatisticsAutoCollectorTest {
             }
         };
 
-        new MockUp<OlapTable>() {
-            @Mock
-            public Map<String, Set<String>> findReAnalyzeNeededPartitions() {
-                HashMap<String, Set<String>> ret = Maps.newHashMap();
-                ret.put("key1", Sets.newHashSet());
-                return ret;
-            }
-        };
-
         AnalysisInfo analysisInfo = new AnalysisInfoBuilder().build();
         StatisticsAutoCollector statisticsAutoCollector = new 
StatisticsAutoCollector();
-        
Assertions.assertNull(statisticsAutoCollector.getReAnalyzeRequiredPart(analysisInfo));
-        
Assertions.assertNotNull(statisticsAutoCollector.getReAnalyzeRequiredPart(analysisInfo));
+        
Assertions.assertNull(statisticsAutoCollector.getNeedAnalyzeColumns(analysisInfo));
+        
Assertions.assertNotNull(statisticsAutoCollector.getNeedAnalyzeColumns(analysisInfo));
     }
 
     @Test
@@ -400,13 +321,9 @@ public class StatisticsAutoCollectorTest {
         List<AnalysisInfo> jobInfos = new ArrayList<>();
         sac.createAnalyzeJobForTbl(db, jobInfos, t1);
         AnalysisInfo jobInfo = jobInfos.get(0);
-        Map<String, Set<String>> colToPartitions = new HashMap<>();
-        colToPartitions.put("test", new HashSet<String>() {
-            {
-                add("p1");
-            }
-        });
-        jobInfo = new 
AnalysisInfoBuilder(jobInfo).setColToPartitions(colToPartitions).build();
+        List<Pair<String, String>> columnNames = Lists.newArrayList();
+        columnNames.add(Pair.of("test", "t1"));
+        jobInfo = new 
AnalysisInfoBuilder(jobInfo).setJobColumns(columnNames).build();
         Map<Long, BaseAnalysisTask> analysisTasks = new HashMap<>();
         AnalysisManager analysisManager = 
Env.getCurrentEnv().getAnalysisManager();
         analysisManager.createTaskForEachColumns(jobInfo, analysisTasks, 
false);
@@ -472,13 +389,9 @@ public class StatisticsAutoCollectorTest {
         List<AnalysisInfo> jobInfos = new ArrayList<>();
         sac.createAnalyzeJobForTbl(db, jobInfos, t1);
         AnalysisInfo jobInfo = jobInfos.get(0);
-        Map<String, Set<String>> colToPartitions = new HashMap<>();
-        colToPartitions.put("test", new HashSet<String>() {
-            {
-                add("p1");
-            }
-        });
-        jobInfo = new 
AnalysisInfoBuilder(jobInfo).setColToPartitions(colToPartitions).build();
+        List<Pair<String, String>> colNames = Lists.newArrayList();
+        colNames.add(Pair.of("test", "1"));
+        jobInfo = new 
AnalysisInfoBuilder(jobInfo).setJobColumns(colNames).build();
         Map<Long, BaseAnalysisTask> analysisTasks = new HashMap<>();
         AnalysisManager analysisManager = 
Env.getCurrentEnv().getAnalysisManager();
         analysisManager.createTaskForEachColumns(jobInfo, analysisTasks, 
false);
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/statistics/TableStatsMetaTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/statistics/TableStatsMetaTest.java
index b5e73ba09da..94eab9e00cc 100644
--- 
a/fe/fe-core/src/test/java/org/apache/doris/statistics/TableStatsMetaTest.java
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/statistics/TableStatsMetaTest.java
@@ -25,7 +25,7 @@ import mockit.Mocked;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Test;
 
-import java.util.HashMap;
+import java.util.ArrayList;
 
 class TableStatsMetaTest {
 
@@ -38,7 +38,7 @@ class TableStatsMetaTest {
             }
         };
         TableStatsMeta tableStatsMeta = new TableStatsMeta();
-        AnalysisInfo jobInfo = new 
AnalysisInfoBuilder().setColToPartitions(new HashMap<>())
+        AnalysisInfo jobInfo = new AnalysisInfoBuilder().setJobColumns(new 
ArrayList<>())
                 .setColName("col1").build();
         tableStatsMeta.update(jobInfo, table);
         Assertions.assertEquals(4, tableStatsMeta.rowCount);
diff --git a/regression-test/suites/statistics/analyze_stats.groovy 
b/regression-test/suites/statistics/analyze_stats.groovy
index 699e595df59..8cfec966233 100644
--- a/regression-test/suites/statistics/analyze_stats.groovy
+++ b/regression-test/suites/statistics/analyze_stats.groovy
@@ -1122,10 +1122,10 @@ PARTITION `p599` VALUES IN (599)
         System.out.println(actual_result)
         return expected_result.containsAll(actual_result) && 
actual_result.containsAll(expected_result)
     }
-    assert check_column(afterDropped, "[col2, col3]")
+    assert check_column(afterDropped, "[test_meta_management:col2, 
test_meta_management:col3]")
     sql """ANALYZE TABLE test_meta_management WITH SYNC"""
     afterDropped = sql """SHOW TABLE STATS test_meta_management"""
-    assert check_column(afterDropped, "[col1, col2, col3]")
+    assert check_column(afterDropped, "[test_meta_management:col1, 
test_meta_management:col2, test_meta_management:col3]")
 
     sql """ DROP TABLE IF EXISTS test_updated_rows """
     sql """
diff --git a/regression-test/suites/statistics/test_analyze_mtmv.groovy 
b/regression-test/suites/statistics/test_analyze_mtmv.groovy
index 7662fd1fbbe..3655a35390b 100644
--- a/regression-test/suites/statistics/test_analyze_mtmv.groovy
+++ b/regression-test/suites/statistics/test_analyze_mtmv.groovy
@@ -143,7 +143,7 @@ suite("test_analyze_mtmv") {
     result_sample = sql """show column stats mv1(l_shipdate)"""
     assertEquals(1, result_sample.size())
     assertEquals("l_shipdate", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("3.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -157,7 +157,7 @@ suite("test_analyze_mtmv") {
     result_sample = sql """show column cached stats mv1(l_shipdate)"""
     assertEquals(1, result_sample.size())
     assertEquals("l_shipdate", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("3.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -171,7 +171,7 @@ suite("test_analyze_mtmv") {
     result_sample = sql """show column stats mv1(o_orderdate)"""
     assertEquals(1, result_sample.size())
     assertEquals("o_orderdate", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("3.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -185,7 +185,7 @@ suite("test_analyze_mtmv") {
     result_sample = sql """show column cached stats mv1(o_orderdate)"""
     assertEquals(1, result_sample.size())
     assertEquals("o_orderdate", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("3.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -199,7 +199,7 @@ suite("test_analyze_mtmv") {
     result_sample = sql """show column stats mv1(l_partkey)"""
     assertEquals(1, result_sample.size())
     assertEquals("l_partkey", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("1.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -213,7 +213,7 @@ suite("test_analyze_mtmv") {
     result_sample = sql """show column cached stats mv1(l_partkey)"""
     assertEquals(1, result_sample.size())
     assertEquals("l_partkey", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("1.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -227,7 +227,7 @@ suite("test_analyze_mtmv") {
     result_sample = sql """show column stats mv1(l_suppkey)"""
     assertEquals(1, result_sample.size())
     assertEquals("l_suppkey", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("1.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -241,7 +241,7 @@ suite("test_analyze_mtmv") {
     result_sample = sql """show column cached stats mv1(l_suppkey)"""
     assertEquals(1, result_sample.size())
     assertEquals("l_suppkey", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("1.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -255,7 +255,7 @@ suite("test_analyze_mtmv") {
     result_sample = sql """show column stats mv1(sum_total)"""
     assertEquals(1, result_sample.size())
     assertEquals("sum_total", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("2.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -269,7 +269,7 @@ suite("test_analyze_mtmv") {
     result_sample = sql """show column cached stats mv1(sum_total)"""
     assertEquals(1, result_sample.size())
     assertEquals("sum_total", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("2.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -298,7 +298,7 @@ suite("test_analyze_mtmv") {
     }
     assertEquals(1, result_sample.size())
     assertEquals("l_shipdate", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("3.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -319,7 +319,7 @@ suite("test_analyze_mtmv") {
     }
     assertEquals(1, result_sample.size())
     assertEquals("l_shipdate", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("3.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -340,7 +340,7 @@ suite("test_analyze_mtmv") {
     }
     assertEquals(1, result_sample.size())
     assertEquals("o_orderdate", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("3.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -361,7 +361,7 @@ suite("test_analyze_mtmv") {
     }
     assertEquals(1, result_sample.size())
     assertEquals("o_orderdate", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("3.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -382,7 +382,7 @@ suite("test_analyze_mtmv") {
     }
     assertEquals(1, result_sample.size())
     assertEquals("l_partkey", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("1.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -403,7 +403,7 @@ suite("test_analyze_mtmv") {
     }
     assertEquals(1, result_sample.size())
     assertEquals("l_partkey", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("1.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -424,7 +424,7 @@ suite("test_analyze_mtmv") {
     }
     assertEquals(1, result_sample.size())
     assertEquals("l_suppkey", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("1.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -445,7 +445,7 @@ suite("test_analyze_mtmv") {
     }
     assertEquals(1, result_sample.size())
     assertEquals("l_suppkey", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("1.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -466,7 +466,7 @@ suite("test_analyze_mtmv") {
     }
     assertEquals(1, result_sample.size())
     assertEquals("sum_total", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("2.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
@@ -487,7 +487,7 @@ suite("test_analyze_mtmv") {
     }
     assertEquals(1, result_sample.size())
     assertEquals("sum_total", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mv1", result_sample[0][1])
     assertEquals("3.0", result_sample[0][2])
     assertEquals("2.0", result_sample[0][3])
     assertEquals("0.0", result_sample[0][4])
diff --git a/regression-test/suites/statistics/test_analyze_mv.groovy 
b/regression-test/suites/statistics/test_analyze_mv.groovy
index 635837e6c16..3348623acaf 100644
--- a/regression-test/suites/statistics/test_analyze_mv.groovy
+++ b/regression-test/suites/statistics/test_analyze_mv.groovy
@@ -145,7 +145,7 @@ suite("test_analyze_mv") {
     def result_sample = sql """show column stats mvTestDup(key1)"""
     assertEquals(1, result_sample.size())
     assertEquals("key1", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mvTestDup", result_sample[0][1])
     assertEquals("6.0", result_sample[0][2])
     assertEquals("4.0", result_sample[0][3])
     assertEquals("1", result_sample[0][7])
@@ -157,7 +157,7 @@ suite("test_analyze_mv") {
     result_sample = sql """show column stats mvTestDup(value1)"""
     assertEquals(1, result_sample.size())
     assertEquals("value1", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mvTestDup", result_sample[0][1])
     assertEquals("6.0", result_sample[0][2])
     assertEquals("4.0", result_sample[0][3])
     assertEquals("3", result_sample[0][7])
@@ -252,9 +252,9 @@ suite("test_analyze_mv") {
 
     result_sample = sql """show column stats mvTestAgg(key1)"""
     assertEquals(2, result_sample.size())
-    if (result_sample[0][1] == "N/A") {
+    if (result_sample[0][1] == "mvTestAgg") {
         assertEquals("key1", result_sample[0][0])
-        assertEquals("N/A", result_sample[0][1])
+        assertEquals("mvTestAgg", result_sample[0][1])
         assertEquals("5.0", result_sample[0][2])
         assertEquals("4.0", result_sample[0][3])
         assertEquals("1", result_sample[0][7])
@@ -267,7 +267,7 @@ suite("test_analyze_mv") {
         assertEquals("1001", result_sample[1][8])
     } else {
         assertEquals("key1", result_sample[1][0])
-        assertEquals("N/A", result_sample[1][1])
+        assertEquals("mvTestAgg", result_sample[1][1])
         assertEquals("5.0", result_sample[1][2])
         assertEquals("4.0", result_sample[1][3])
         assertEquals("1", result_sample[1][7])
@@ -282,9 +282,9 @@ suite("test_analyze_mv") {
 
     result_sample = sql """show column stats mvTestAgg(value1)"""
     assertEquals(2, result_sample.size())
-    if (result_sample[0][1] == "N/A") {
+    if (result_sample[0][1] == "mvTestAgg") {
         assertEquals("value1", result_sample[0][0])
-        assertEquals("N/A", result_sample[0][1])
+        assertEquals("mvTestAgg", result_sample[0][1])
         assertEquals("5.0", result_sample[0][2])
         assertEquals("5.0", result_sample[0][3])
         assertEquals("6", result_sample[0][7])
@@ -297,7 +297,7 @@ suite("test_analyze_mv") {
         assertEquals("3001", result_sample[1][8])
     } else {
         assertEquals("value1", result_sample[1][0])
-        assertEquals("N/A", result_sample[1][1])
+        assertEquals("mvTestAgg", result_sample[1][1])
         assertEquals("5.0", result_sample[1][2])
         assertEquals("5.0", result_sample[1][3])
         assertEquals("6", result_sample[1][7])
@@ -313,7 +313,7 @@ suite("test_analyze_mv") {
     result_sample = sql """show column stats mvTestAgg(key2)"""
     assertEquals(1, result_sample.size())
     assertEquals("key2", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mvTestAgg", result_sample[0][1])
     assertEquals("5.0", result_sample[0][2])
     assertEquals("5.0", result_sample[0][3])
     assertEquals("2", result_sample[0][7])
@@ -323,7 +323,7 @@ suite("test_analyze_mv") {
     result_sample = sql """show column stats mvTestAgg(value2)"""
     assertEquals(1, result_sample.size())
     assertEquals("value2", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mvTestAgg", result_sample[0][1])
     assertEquals("5.0", result_sample[0][2])
     assertEquals("5.0", result_sample[0][3])
     assertEquals("4", result_sample[0][7])
@@ -391,7 +391,7 @@ suite("test_analyze_mv") {
     result_sample = sql """show column stats mvTestUni(key1)"""
     assertEquals(1, result_sample.size())
     assertEquals("key1", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mvTestUni", result_sample[0][1])
     assertEquals("5.0", result_sample[0][2])
     assertEquals("4.0", result_sample[0][3])
     assertEquals("1", result_sample[0][7])
@@ -444,7 +444,7 @@ suite("test_analyze_mv") {
     }
     assertEquals(1, result_sample.size())
     assertEquals("key1", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mvTestDup", result_sample[0][1])
     assertEquals("6.0", result_sample[0][2])
     assertEquals("4.0", result_sample[0][3])
     assertEquals("1", result_sample[0][7])
@@ -462,7 +462,7 @@ suite("test_analyze_mv") {
     }
     assertEquals(1, result_sample.size())
     assertEquals("value1", result_sample[0][0])
-    assertEquals("N/A", result_sample[0][1])
+    assertEquals("mvTestDup", result_sample[0][1])
     assertEquals("6.0", result_sample[0][2])
     assertEquals("4.0", result_sample[0][3])
     assertEquals("3", result_sample[0][7])
@@ -558,11 +558,11 @@ suite("test_analyze_mv") {
         logger.info("col " + colName + " in index " + indexName + " found ? " 
+ found)
         assertTrue(found)
     }
-    verifyTaskStatus(result_sample, "key1", "N/A")
-    verifyTaskStatus(result_sample, "key2", "N/A")
-    verifyTaskStatus(result_sample, "value1", "N/A")
-    verifyTaskStatus(result_sample, "value2", "N/A")
-    verifyTaskStatus(result_sample, "value3", "N/A")
+    verifyTaskStatus(result_sample, "key1", "mvTestDup")
+    verifyTaskStatus(result_sample, "key2", "mvTestDup")
+    verifyTaskStatus(result_sample, "value1", "mvTestDup")
+    verifyTaskStatus(result_sample, "value2", "mvTestDup")
+    verifyTaskStatus(result_sample, "value3", "mvTestDup")
     verifyTaskStatus(result_sample, "mv_key1", "mv1")
     verifyTaskStatus(result_sample, "mv_key1", "mv3")
     verifyTaskStatus(result_sample, "mv_key2", "mv2")
@@ -580,7 +580,7 @@ suite("test_analyze_mv") {
     def result = sql """show column cached stats mvTestDup(key1)"""
     assertEquals(1, result.size())
     assertEquals("key1", result[0][0])
-    assertEquals("N/A", result[0][1])
+    assertEquals("mvTestDup", result[0][1])
     assertEquals("50.0", result[0][2])
     assertEquals("1.0", result[0][3])
     assertEquals("1.0", result[0][4])


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to