This is an automated email from the ASF dual-hosted git repository.

morrysnow pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new a3062c662c [feature-wip](statistics) support statistics injection and 
show statistics (#14201)
a3062c662c is described below

commit a3062c662c9e58014d0ed26364bdadcf259e69b6
Author: Kikyou1997 <33112463+kikyou1...@users.noreply.github.com>
AuthorDate: Tue Nov 15 11:29:51 2022 +0800

    [feature-wip](statistics) support statistics injection and show statistics 
(#14201)
    
    1. Reduce the configuration options for statistics framework, and add 
comment for those rest.
    2. Move the logic of creation of analysis job to the `StatisticsRepository` 
which defined all the functions used to interact with internal statistics table
    3. Move AnalysisJobScheduler to the statistics package
    4. Support display and injections manually for statistics
---
 .../doris/analysis/AlterColumnStatsStmt.java       |  10 +-
 .../apache/doris/analysis/ShowColumnStatsStmt.java |  84 ++++++--
 .../main/java/org/apache/doris/catalog/Env.java    |  46 +----
 .../java/org/apache/doris/catalog/TableIf.java     |  14 ++
 .../main/java/org/apache/doris/common/Config.java  |  43 +---
 .../persist/StaleStatisticsRecordsDetector.java    |  69 -------
 .../org/apache/doris/planner/OlapScanNode.java     |   1 -
 .../main/java/org/apache/doris/qe/DdlExecutor.java |   3 +-
 .../java/org/apache/doris/qe/ShowExecutor.java     |  54 +++--
 .../org/apache/doris/statistics/AnalysisJob.java   |  14 +-
 .../doris/statistics/AnalysisJobExecutor.java      |   4 +-
 .../AnalysisJobScheduler.java                      |   8 +-
 .../doris/statistics/AnalysisJobWrapper.java       |   2 +-
 .../org/apache/doris/statistics/ColumnStat.java    |   1 +
 .../apache/doris/statistics/ColumnStatistic.java   |   1 +
 .../doris/statistics/StatisticConstants.java       |  36 ++++
 .../statistics/StatisticStorageInitializer.java    |  10 +-
 .../apache/doris/statistics/StatisticsCache.java   |  11 +-
 .../doris/statistics/StatisticsCacheLoader.java    |   1 +
 .../apache/doris/statistics/StatisticsManager.java |  36 ----
 .../doris/statistics/StatisticsRepository.java     | 220 +++++++++++++++++++++
 .../doris/statistics/StatisticsTableCleaner.java   |   5 +-
 .../DBObjects.java}                                |  22 ++-
 .../statistics/{ => util}/StatisticsUtil.java      |  64 +++++-
 .../doris/cluster/DecommissionBackendTest.java     |  16 +-
 .../doris/statistics/AnalysisJobExecutorTest.java  |   1 -
 .../apache/doris/statistics/AnalysisJobTest.java   |   2 +-
 .../org/apache/doris/statistics/CacheTest.java     |   1 +
 .../data/statistics/alter_col_stats.out            |  11 ++
 .../suites/statistics/alter_col_stats.groovy       |  53 +++++
 30 files changed, 575 insertions(+), 268 deletions(-)

diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterColumnStatsStmt.java 
b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterColumnStatsStmt.java
index 3ff91ce61f..b1c6699da7 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterColumnStatsStmt.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterColumnStatsStmt.java
@@ -55,12 +55,14 @@ import java.util.Set;
 public class AlterColumnStatsStmt extends DdlStmt {
 
     private static final ImmutableSet<StatsType> CONFIGURABLE_PROPERTIES_SET = 
new ImmutableSet.Builder<StatsType>()
+            .add(StatsType.ROW_COUNT)
             .add(ColumnStat.NDV)
             .add(ColumnStat.AVG_SIZE)
             .add(ColumnStat.MAX_SIZE)
             .add(ColumnStat.NUM_NULLS)
             .add(ColumnStat.MIN_VALUE)
             .add(ColumnStat.MAX_VALUE)
+            .add(StatsType.DATA_SIZE)
             .build();
 
     private final TableName tableName;
@@ -162,10 +164,6 @@ public class AlterColumnStatsStmt extends DdlStmt {
                 throw new AnalysisException("Partition does not exist: " + 
optional.get());
             }
             partitionNames.addAll(optPartitionNames.getPartitionNames());
-        } else {
-            if 
(!olapTable.getPartitionInfo().getType().equals(PartitionType.UNPARTITIONED)) {
-                throw new AnalysisException("For partitioned tables, 
partitions should be specified");
-            }
         }
     }
 
@@ -187,4 +185,8 @@ public class AlterColumnStatsStmt extends DdlStmt {
         }
         return sb.toString();
     }
+
+    public String getValue(StatsType statsType) {
+        return statsTypeToValue.get(statsType);
+    }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowColumnStatsStmt.java 
b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowColumnStatsStmt.java
index eed814989b..8b41eca6a1 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowColumnStatsStmt.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowColumnStatsStmt.java
@@ -18,15 +18,23 @@
 package org.apache.doris.analysis;
 
 import org.apache.doris.catalog.Column;
+import org.apache.doris.catalog.DatabaseIf;
+import org.apache.doris.catalog.Env;
 import org.apache.doris.catalog.ScalarType;
+import org.apache.doris.catalog.TableIf;
+import org.apache.doris.common.AnalysisException;
+import org.apache.doris.common.ErrorReport;
+import org.apache.doris.common.Pair;
 import org.apache.doris.common.UserException;
 import org.apache.doris.common.util.Util;
+import org.apache.doris.datasource.CatalogIf;
+import org.apache.doris.qe.ShowResultSet;
 import org.apache.doris.qe.ShowResultSetMetaData;
-import org.apache.doris.statistics.ColumnStat;
+import org.apache.doris.statistics.ColumnStatistic;
 
 import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
 
-import java.util.Collections;
 import java.util.List;
 
 public class ShowColumnStatsStmt extends ShowStmt {
@@ -34,17 +42,23 @@ public class ShowColumnStatsStmt extends ShowStmt {
     private static final ImmutableList<String> TITLE_NAMES =
             new ImmutableList.Builder<String>()
                     .add("column_name")
-                    .add(ColumnStat.NDV.getValue())
-                    .add(ColumnStat.AVG_SIZE.getValue())
-                    .add(ColumnStat.MAX_SIZE.getValue())
-                    .add(ColumnStat.NUM_NULLS.getValue())
-                    .add(ColumnStat.MIN_VALUE.getValue())
-                    .add(ColumnStat.MAX_VALUE.getValue())
+                    .add("count")
+                    .add("ndv")
+                    .add("num_null")
+                    .add("data_size")
+                    .add("avg_size_byte")
+                    .add("min")
+                    .add("max")
+                    .add("min_expr")
+                    .add("max_expr")
                     .build();
 
     private final TableName tableName;
+
     private final PartitionNames partitionNames;
 
+    private TableIf table;
+
     public ShowColumnStatsStmt(TableName tableName, PartitionNames 
partitionNames) {
         this.tableName = tableName;
         this.partitionNames = partitionNames;
@@ -54,22 +68,29 @@ public class ShowColumnStatsStmt extends ShowStmt {
         return tableName;
     }
 
-    public List<String> getPartitionNames() {
-        if (partitionNames == null) {
-            return Collections.emptyList();
-        }
-        return partitionNames.getPartitionNames();
-    }
-
     @Override
     public void analyze(Analyzer analyzer) throws UserException {
         super.analyze(analyzer);
         tableName.analyze(analyzer);
-        // disallow external catalog
-        Util.prohibitExternalCatalog(tableName.getCtl(), 
this.getClass().getSimpleName());
-
         if (partitionNames != null) {
             partitionNames.analyze(analyzer);
+            if (partitionNames.getPartitionNames().size() > 1) {
+                throw new AnalysisException("Only one partition name could be 
specified");
+            }
+        }
+        // disallow external catalog
+        Util.prohibitExternalCatalog(tableName.getCtl(), 
this.getClass().getSimpleName());
+        CatalogIf<DatabaseIf> catalog = 
Env.getCurrentEnv().getCatalogMgr().getCatalog(tableName.getCtl());
+        if (catalog == null) {
+            ErrorReport.reportAnalysisException("Catalog: {} not exists", 
tableName.getCtl());
+        }
+        DatabaseIf<TableIf> db = catalog.getDb(tableName.getDb()).orElse(null);
+        if (db == null) {
+            ErrorReport.reportAnalysisException("DB: {} not exists", 
tableName.getDb());
+        }
+        table = db.getTable(tableName.getTbl()).orElse(null);
+        if (table == null) {
+            ErrorReport.reportAnalysisException("Table: {} not exists", 
tableName.getTbl());
         }
     }
 
@@ -82,4 +103,31 @@ public class ShowColumnStatsStmt extends ShowStmt {
         }
         return builder.build();
     }
+
+    public TableIf getTable() {
+        return table;
+    }
+
+    public ShowResultSet constructResultSet(List<Pair<String, 
ColumnStatistic>> columnStatistics) {
+        List<List<String>> result = Lists.newArrayList();
+        columnStatistics.forEach(p -> {
+            List<String> row = Lists.newArrayList();
+            row.add(p.first);
+            row.add(String.valueOf(p.second.count));
+            row.add(String.valueOf(p.second.ndv));
+            row.add(String.valueOf(p.second.numNulls));
+            row.add(String.valueOf(p.second.dataSize));
+            row.add(String.valueOf(p.second.avgSizeByte));
+            row.add(String.valueOf(p.second.minValue));
+            row.add(String.valueOf(p.second.maxValue));
+            row.add(String.valueOf(p.second.minExpr.toSql()));
+            row.add(String.valueOf(p.second.maxExpr.toSql()));
+            result.add(row);
+        });
+        return new ShowResultSet(getMetaData(), result);
+    }
+
+    public PartitionNames getPartitionNames() {
+        return partitionNames;
+    }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java 
b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java
index 8ce6d7a346..be7f085af4 100755
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java
@@ -169,7 +169,6 @@ import org.apache.doris.metric.MetricRepo;
 import org.apache.doris.mtmv.MTMVJobManager;
 import org.apache.doris.mysql.privilege.PaloAuth;
 import org.apache.doris.mysql.privilege.PrivPredicate;
-import org.apache.doris.persist.AnalysisJobScheduler;
 import org.apache.doris.persist.BackendIdsUpdateInfo;
 import org.apache.doris.persist.BackendReplicasInfo;
 import org.apache.doris.persist.BackendTabletsInfo;
@@ -209,18 +208,14 @@ import org.apache.doris.qe.JournalObservable;
 import org.apache.doris.qe.VariableMgr;
 import org.apache.doris.resource.Tag;
 import org.apache.doris.service.FrontendOptions;
-import org.apache.doris.statistics.AnalysisJobInfo;
-import org.apache.doris.statistics.AnalysisJobInfo.AnalysisType;
-import org.apache.doris.statistics.AnalysisJobInfo.JobState;
-import org.apache.doris.statistics.AnalysisJobInfo.ScheduleType;
-import org.apache.doris.statistics.StatisticConstants;
+import org.apache.doris.statistics.AnalysisJobScheduler;
 import org.apache.doris.statistics.StatisticStorageInitializer;
 import org.apache.doris.statistics.StatisticsCache;
 import org.apache.doris.statistics.StatisticsJobManager;
 import org.apache.doris.statistics.StatisticsJobScheduler;
 import org.apache.doris.statistics.StatisticsManager;
+import org.apache.doris.statistics.StatisticsRepository;
 import org.apache.doris.statistics.StatisticsTaskScheduler;
-import org.apache.doris.statistics.StatisticsUtil;
 import org.apache.doris.system.Backend;
 import org.apache.doris.system.Frontend;
 import org.apache.doris.system.HeartbeatMgr;
@@ -252,7 +247,6 @@ import com.sleepycat.je.rep.NetworkRestoreConfig;
 import lombok.Setter;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.text.StringSubstitutor;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 import org.codehaus.jackson.map.ObjectMapper;
@@ -5237,40 +5231,6 @@ public class Env {
     //  2. support sample job
     //  3. support period job
     public void createAnalysisJob(AnalyzeStmt analyzeStmt) {
-        String catalogName = analyzeStmt.getCatalogName();
-        String db = analyzeStmt.getDBName();
-        String tbl = analyzeStmt.getTblName();
-        List<String> colNames = analyzeStmt.getOptColumnNames();
-        String persistAnalysisJobSQLTemplate = "INSERT INTO " + 
StatisticConstants.STATISTIC_DB_NAME + "."
-                + StatisticConstants.ANALYSIS_JOB_TABLE + " VALUES(${jobId}, 
'${catalogName}', '${dbName}',"
-                + "'${tblName}','${colName}', '${jobType}', '${analysisType}', 
'${message}', '${lastExecTimeInMs}',"
-                + "'${state}', '${scheduleType}')";
-        if (colNames != null) {
-            for (String colName : colNames) {
-                AnalysisJobInfo analysisJobInfo = new 
AnalysisJobInfo(Env.getCurrentEnv().getNextId(), catalogName, db,
-                        tbl, colName, AnalysisJobInfo.JobType.MANUAL, 
ScheduleType.ONCE);
-                analysisJobInfo.analysisType = AnalysisType.FULL;
-                Map<String, String> params = new HashMap<>();
-                params.put("jobId", String.valueOf(analysisJobInfo.jobId));
-                params.put("catalogName", analysisJobInfo.catalogName);
-                params.put("dbName", analysisJobInfo.dbName);
-                params.put("tblName", analysisJobInfo.tblName);
-                params.put("colName", analysisJobInfo.colName);
-                params.put("jobType", analysisJobInfo.jobType.toString());
-                params.put("analysisType", 
analysisJobInfo.analysisType.toString());
-                params.put("message", "");
-                params.put("lastExecTimeInMs", "0");
-                params.put("state", JobState.PENDING.toString());
-                params.put("scheduleType", 
analysisJobInfo.scheduleType.toString());
-                try {
-                    StatisticsUtil.execUpdate(
-                            new 
StringSubstitutor(params).replace(persistAnalysisJobSQLTemplate));
-                } catch (Exception e) {
-                    LOG.warn("Failed to persite job for column: {}", colName, 
e);
-                    return;
-                }
-                
Env.getCurrentEnv().getAnalysisJobScheduler().schedule(analysisJobInfo);
-            }
-        }
+        StatisticsRepository.createAnalysisJob(analyzeStmt);
     }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/TableIf.java 
b/fe/fe-core/src/main/java/org/apache/doris/catalog/TableIf.java
index 5d0426ae52..24a93c2df8 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/TableIf.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/TableIf.java
@@ -22,7 +22,9 @@ import org.apache.doris.common.DdlException;
 import org.apache.doris.common.MetaNotFoundException;
 import org.apache.doris.thrift.TTableDescriptor;
 
+import java.util.Collections;
 import java.util.List;
+import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
 public interface TableIf {
@@ -173,5 +175,17 @@ public interface TableIf {
             }
         }
     }
+
+    default List<Column> getColumns() {
+        return Collections.emptyList();
+    }
+
+    default Set<String> getPartitionNames() {
+        return Collections.emptySet();
+    }
+
+    default Partition getPartition(String name) {
+        return null;
+    }
 }
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/Config.java 
b/fe/fe-core/src/main/java/org/apache/doris/common/Config.java
index 7b131d6236..50c677013a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/common/Config.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/common/Config.java
@@ -1847,48 +1847,19 @@ public class Config extends ConfigBase {
     @ConfField(mutable = true, masterOnly = false)
     public static long hive_metastore_client_timeout_second = 10;
 
-    @ConfField(mutable = false)
-    public static int statistic_table_bucket_count = 7;
-
-    @ConfField
-    public static long statistics_max_mem_per_query_in_bytes = 2L * 1024 * 
1024 * 1024;
-
-    @ConfField
-    public static int statistic_parallel_exec_instance_num = 1;
-
+    /**
+     * Used to determined how many statistics collection SQL could run 
simultaneously.
+     */
     @ConfField
     public static int statistics_simultaneously_running_job_num = 10;
 
+    /**
+     * Internal table replica num, once set, user should promise the avaible 
BE is greater than this value,
+     * otherwise the statistics related internal table creation would be 
failed.
+     */
     @ConfField
     public static int statistic_internal_table_replica_num = 1;
 
-    @ConfField
-    public static int statistic_clean_interval_in_hours = 24 * 2;
-
-    @ConfField
-    public static int statistics_stale_statistics_fetch_size = 1000;
-
-    @ConfField
-    public static int 
statistics_outdated_record_detector_running_interval_in_minutes = 5;
-
-    @ConfField
-    public static int statistics_records_outdated_time_in_ms = 2 * 24 * 3600 * 
1000;
-
-    @ConfField
-    public static int statistics_job_execution_timeout_in_min = 5;
-
-    @ConfField
-    public static int statistics_table_creation_retry_interval_in_seconds = 5;
-
-    @ConfField
-    public static int statistics_cache_max_size = 100000;
-
-    @ConfField
-    public static int statistics_cache_valid_duration_in_hours = 24 * 2;
-
-    @ConfField
-    public static int statistics_cache_refresh_interval = 24 * 2;
-
     /**
      * if table has too many replicas, Fe occur oom when schema change.
      * 10W replicas is a reasonable value for testing.
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/persist/StaleStatisticsRecordsDetector.java
 
b/fe/fe-core/src/main/java/org/apache/doris/persist/StaleStatisticsRecordsDetector.java
deleted file mode 100644
index 8f814f578d..0000000000
--- 
a/fe/fe-core/src/main/java/org/apache/doris/persist/StaleStatisticsRecordsDetector.java
+++ /dev/null
@@ -1,69 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-package org.apache.doris.persist;
-
-import org.apache.doris.catalog.Env;
-import org.apache.doris.common.Config;
-import org.apache.doris.common.util.MasterDaemon;
-import org.apache.doris.statistics.AnalysisJobInfo;
-import org.apache.doris.statistics.StatisticConstants;
-import org.apache.doris.statistics.StatisticsUtil;
-import org.apache.doris.statistics.util.InternalQueryResult.ResultRow;
-
-import org.apache.commons.text.StringSubstitutor;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.apache.thrift.TException;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class StaleStatisticsRecordsDetector extends MasterDaemon {
-
-    private static final Logger LOG = 
LogManager.getLogger(StaleStatisticsRecordsDetector.class);
-
-    private static final String FETCH_STALE_RECORDS_SQL_TEMPLATE = "SELECT * 
FROM "
-            + StatisticConstants.STATISTIC_DB_NAME + "."
-            + StatisticConstants.ANALYSIS_JOB_TABLE
-            + " WHERE now() - last_exec_time_in_ms > ${expiredTime} "
-            + " order by last_exec_time_in_ms";
-
-    public StaleStatisticsRecordsDetector() {
-        super("Stale Statistics Records Detector",
-                (long) 
Config.statistics_outdated_record_detector_running_interval_in_minutes * 60 * 
1000);
-    }
-
-    @Override
-    protected void runAfterCatalogReady() {
-        Map<String, String> params = new HashMap<>();
-        params.put("expiredTime", 
String.valueOf(Config.statistics_records_outdated_time_in_ms));
-        StringSubstitutor stringSubstitutor = new StringSubstitutor(params);
-        List<ResultRow> resultBatches =
-                
StatisticsUtil.execStatisticQuery(stringSubstitutor.replace(FETCH_STALE_RECORDS_SQL_TEMPLATE));
-        List<AnalysisJobInfo> analysisJobInfos = null;
-        try {
-            analysisJobInfos = 
StatisticsUtil.deserializeToAnalysisJob(resultBatches);
-        } catch (TException e) {
-            LOG.warn("Deserialize returned thrift failed!", e);
-            return;
-        }
-        AnalysisJobScheduler analysisJobScheduler = 
Env.getCurrentEnv().getAnalysisJobScheduler();
-        analysisJobScheduler.scheduleJobs(analysisJobInfos);
-    }
-}
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/OlapScanNode.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/OlapScanNode.java
index 00e32338c9..34acce0264 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/OlapScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/OlapScanNode.java
@@ -685,7 +685,6 @@ public class OlapScanNode extends ScanNode {
 
                 // for CBO
                 if (!collectedStat && replica.getRowCount() != -1) {
-                    cardinality += replica.getRowCount();
                     totalBytes += replica.getDataSize();
                     collectedStat = true;
                 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/DdlExecutor.java 
b/fe/fe-core/src/main/java/org/apache/doris/qe/DdlExecutor.java
index e1f1925053..d48f8fdfdf 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/DdlExecutor.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/DdlExecutor.java
@@ -118,6 +118,7 @@ import org.apache.doris.common.Config;
 import org.apache.doris.common.DdlException;
 import org.apache.doris.load.EtlJobType;
 import org.apache.doris.load.sync.SyncJobManager;
+import org.apache.doris.statistics.StatisticsRepository;
 
 /**
  * Use for execute ddl.
@@ -167,7 +168,7 @@ public class DdlExecutor {
         } else if (ddlStmt instanceof AlterTableStatsStmt) {
             
env.getStatisticsManager().alterTableStatistics((AlterTableStatsStmt) ddlStmt);
         } else if (ddlStmt instanceof AlterColumnStatsStmt) {
-            
env.getStatisticsManager().alterColumnStatistics((AlterColumnStatsStmt) 
ddlStmt);
+            StatisticsRepository.alterColumnStatistics((AlterColumnStatsStmt) 
ddlStmt);
         } else if (ddlStmt instanceof AlterViewStmt) {
             env.alterView((AlterViewStmt) ddlStmt);
         } else if (ddlStmt instanceof CancelAlterTableStmt) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java 
b/fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java
index 6eaf68c3bb..183487ee99 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java
@@ -95,6 +95,7 @@ import org.apache.doris.analysis.ShowTrashStmt;
 import org.apache.doris.analysis.ShowUserPropertyStmt;
 import org.apache.doris.analysis.ShowVariablesStmt;
 import org.apache.doris.analysis.ShowViewStmt;
+import org.apache.doris.analysis.TableName;
 import org.apache.doris.backup.AbstractJob;
 import org.apache.doris.backup.BackupJob;
 import org.apache.doris.backup.Repository;
@@ -139,6 +140,7 @@ import org.apache.doris.common.ErrorReport;
 import org.apache.doris.common.FeConstants;
 import org.apache.doris.common.MarkedCountDownLatch;
 import org.apache.doris.common.MetaNotFoundException;
+import org.apache.doris.common.Pair;
 import org.apache.doris.common.PatternMatcher;
 import org.apache.doris.common.proc.BackendsProcDir;
 import org.apache.doris.common.proc.FrontendsProcNode;
@@ -176,7 +178,9 @@ import org.apache.doris.mtmv.MTMVJobManager;
 import org.apache.doris.mtmv.metadata.MTMVJob;
 import org.apache.doris.mtmv.metadata.MTMVTask;
 import org.apache.doris.mysql.privilege.PrivPredicate;
+import org.apache.doris.statistics.ColumnStatistic;
 import org.apache.doris.statistics.StatisticsJobManager;
+import org.apache.doris.statistics.StatisticsRepository;
 import org.apache.doris.system.Backend;
 import org.apache.doris.system.Diagnoser;
 import org.apache.doris.system.SystemInfoService;
@@ -596,7 +600,7 @@ public class ShowExecutor {
         for (BaseParam param : infos) {
             final int percent = (int) (param.getFloatParam(0) * 100f);
             rows.add(Lists.newArrayList(param.getStringParam(0), 
param.getStringParam(1), param.getStringParam(2),
-                                        String.valueOf(percent + "%")));
+                    String.valueOf(percent + "%")));
         }
 
         resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
@@ -836,7 +840,7 @@ public class ShowExecutor {
         PatternMatcher matcher = null;
         if (showStmt.getPattern() != null) {
             matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
-                                                        
CaseSensibility.VARIABLES.getCaseSensibility());
+                    CaseSensibility.VARIABLES.getCaseSensibility());
         }
         List<List<String>> rows = VariableMgr.dump(showStmt.getType(), 
ctx.getSessionVariable(), matcher);
         resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
@@ -888,7 +892,7 @@ public class ShowExecutor {
             } else {
                 if (showStmt.isView()) {
                     
ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_OBJECT, 
showStmt.getDb(),
-                                                        showStmt.getTable(), 
"VIEW");
+                            showStmt.getTable(), "VIEW");
                 }
                 rows.add(Lists.newArrayList(table.getName(), 
createTableStmt.get(0)));
                 resultSet = table.getType() != TableType.MATERIALIZED_VIEW
@@ -1041,7 +1045,7 @@ public class ShowExecutor {
             } else if (categories.size() > 1) {
                 // Send category list
                 resultSet = new ShowResultSet(helpStmt.getCategoryMetaData(),
-                                              
Lists.<List<String>>newArrayList(categories));
+                        Lists.<List<String>>newArrayList(categories));
             } else {
                 // Send topic list and sub-category list
                 List<List<String>> rows = Lists.newArrayList();
@@ -1318,9 +1322,9 @@ public class ShowExecutor {
                     tableName = routineLoadJob.getTableName();
                 } catch (MetaNotFoundException e) {
                     LOG.warn(new LogBuilder(LogKey.ROUTINE_LOAD_JOB, 
routineLoadJob.getId())
-                                     .add("error_msg", "The table metadata of 
job has been changed. "
-                                             + "The job will be cancelled 
automatically")
-                                     .build(), e);
+                            .add("error_msg", "The table metadata of job has 
been changed. "
+                                    + "The job will be cancelled 
automatically")
+                            .build(), e);
                 }
                 if (!Env.getCurrentEnv().getAuth()
                         .checkTblPriv(ConnectContext.get(), dbFullName, 
tableName, PrivPredicate.LOAD)) {
@@ -1339,7 +1343,7 @@ public class ShowExecutor {
         if (!Strings.isNullOrEmpty(showRoutineLoadStmt.getName()) && 
rows.size() == 0) {
             // if the jobName has been specified
             throw new AnalysisException("There is no job named " + 
showRoutineLoadStmt.getName()
-                                                + " in db " + 
showRoutineLoadStmt.getDbFullName()
+                    + " in db " + showRoutineLoadStmt.getDbFullName()
                     + ". Include history? " + 
showRoutineLoadStmt.isIncludeHistory());
         }
         resultSet = new ShowResultSet(showRoutineLoadStmt.getMetaData(), rows);
@@ -1359,7 +1363,7 @@ public class ShowExecutor {
         }
         if (routineLoadJob == null) {
             throw new AnalysisException("The job named " + 
showRoutineLoadTaskStmt.getJobName() + "does not exists "
-                                                + "or job state is stopped or 
cancelled");
+                    + "or job state is stopped or cancelled");
         }
 
         // check auth
@@ -1540,7 +1544,7 @@ public class ShowExecutor {
             } while (false);
 
             String detailCmd = String.format("SHOW PROC 
'/dbs/%d/%d/partitions/%d/%d/%d';",
-                                             dbId, tableId, partitionId, 
indexId, tabletId);
+                    dbId, tableId, partitionId, indexId, tabletId);
             rows.add(Lists.newArrayList(dbName, tableName, partitionName, 
indexName,
                     dbId.toString(), tableId.toString(),
                     partitionId.toString(), indexId.toString(),
@@ -1571,7 +1575,7 @@ public class ShowExecutor {
                 } else {
                     partitions = olapTable.getPartitions();
                 }
-                List<List<Comparable>> tabletInfos =  new ArrayList<>();
+                List<List<Comparable>> tabletInfos = new ArrayList<>();
                 String indexName = showStmt.getIndexName();
                 long indexId = -1;
                 if (indexName != null) {
@@ -2145,9 +2149,31 @@ public class ShowExecutor {
 
     private void handleShowColumnStats() throws AnalysisException {
         ShowColumnStatsStmt showColumnStatsStmt = (ShowColumnStatsStmt) stmt;
-        List<List<String>> results = Env.getCurrentEnv().getStatisticsManager()
-                .showColumnStatsList(showColumnStatsStmt);
-        resultSet = new ShowResultSet(showColumnStatsStmt.getMetaData(), 
results);
+        TableName tableName = showColumnStatsStmt.getTableName();
+        TableIf tableIf = showColumnStatsStmt.getTable();
+        if (!Env.getCurrentEnv().getAuth()
+                .checkTblPriv(ConnectContext.get(), tableName.getDb(), 
tableName.getTbl(), PrivPredicate.SHOW)) {
+            
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, 
"Permission denied",
+                    ConnectContext.get().getQualifiedUser(), 
ConnectContext.get().getRemoteIP(),
+                    tableName.getDb() + ": " + tableName.getTbl());
+        }
+        List<Pair<String, ColumnStatistic>> columnStatistics = new 
ArrayList<>();
+        PartitionNames partitionNames = 
showColumnStatsStmt.getPartitionNames();
+        for (Column column : tableIf.getColumns()) {
+            String colName = column.getName();
+            if (partitionNames == null) {
+                ColumnStatistic columnStatistic =
+                        
StatisticsRepository.queryColumnStatisticsByName(tableIf.getId(), colName);
+                columnStatistics.add(Pair.of(column.getName(), 
columnStatistic));
+            } else {
+                
columnStatistics.addAll(StatisticsRepository.queryColumnStatisticsByPartitions(tableName,
+                        colName, 
showColumnStatsStmt.getPartitionNames().getPartitionNames())
+                        .stream().map(s -> Pair.of(colName, s))
+                        .collect(Collectors.toList()));
+            }
+
+        }
+        resultSet = showColumnStatsStmt.constructResultSet(columnStatistics);
     }
 
     public void handleShowSqlBlockRule() throws AnalysisException {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJob.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJob.java
index 47c2665d4c..d34f030f87 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJob.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJob.java
@@ -18,15 +18,15 @@
 package org.apache.doris.statistics;
 
 import org.apache.doris.catalog.Column;
-import org.apache.doris.catalog.Database;
+import org.apache.doris.catalog.DatabaseIf;
 import org.apache.doris.catalog.Env;
 import org.apache.doris.catalog.Partition;
-import org.apache.doris.catalog.Table;
+import org.apache.doris.catalog.TableIf;
 import org.apache.doris.datasource.CatalogIf;
-import org.apache.doris.persist.AnalysisJobScheduler;
 import org.apache.doris.qe.ConnectContext;
 import org.apache.doris.qe.StmtExecutor;
 import org.apache.doris.statistics.AnalysisJobInfo.JobState;
+import org.apache.doris.statistics.util.StatisticsUtil;
 
 import org.apache.commons.text.StringSubstitutor;
 
@@ -42,11 +42,11 @@ public class AnalysisJob {
 
     private final AnalysisJobInfo info;
 
-    private CatalogIf catalog;
+    private CatalogIf<DatabaseIf> catalog;
 
-    private Database db;
+    private DatabaseIf<TableIf> db;
 
-    private Table tbl;
+    private TableIf tbl;
 
     private Column col;
 
@@ -65,7 +65,7 @@ public class AnalysisJob {
                     String.format("Catalog with name: %s not exists", 
info.dbName), System.currentTimeMillis());
             return;
         }
-        db = 
Env.getCurrentEnv().getInternalCatalog().getDb(info.dbName).orElse(null);
+        db = catalog.getDb(info.dbName).orElse(null);
         if (db == null) {
             analysisJobScheduler.updateJobStatus(info.jobId, JobState.FAILED,
                     String.format("DB with name %s not exists", info.dbName), 
System.currentTimeMillis());
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJobExecutor.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJobExecutor.java
index 6683305a91..c73bf349b8 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJobExecutor.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJobExecutor.java
@@ -20,7 +20,6 @@ package org.apache.doris.statistics;
 import org.apache.doris.common.Config;
 import org.apache.doris.common.ThreadPoolManager;
 import org.apache.doris.common.ThreadPoolManager.BlockedPolicy;
-import org.apache.doris.persist.AnalysisJobScheduler;
 import org.apache.doris.statistics.AnalysisJobInfo.JobState;
 import org.apache.doris.statistics.util.BlockingCounter;
 
@@ -76,8 +75,7 @@ public class AnalysisJobExecutor extends Thread {
             try {
                 AnalysisJobWrapper jobWrapper = jobQueue.take();
                 try {
-                    long timeout = TimeUnit.MINUTES.toMillis(5)
-                            - (System.currentTimeMillis() - 
jobWrapper.getStartTime());
+                    long timeout = 
StatisticConstants.STATISTICS_TASKS_TIMEOUT_IN_MS;
                     jobWrapper.get(timeout < 0 ? 0 : timeout, 
TimeUnit.MILLISECONDS);
                 } catch (Exception e) {
                     jobWrapper.cancel();
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/persist/AnalysisJobScheduler.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJobScheduler.java
similarity index 94%
rename from 
fe/fe-core/src/main/java/org/apache/doris/persist/AnalysisJobScheduler.java
rename to 
fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJobScheduler.java
index be308f1e55..734aa259e8 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/persist/AnalysisJobScheduler.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJobScheduler.java
@@ -15,15 +15,11 @@
 // specific language governing permissions and limitations
 // under the License.
 
-package org.apache.doris.persist;
+package org.apache.doris.statistics;
 
-import org.apache.doris.statistics.AnalysisJob;
-import org.apache.doris.statistics.AnalysisJobExecutor;
-import org.apache.doris.statistics.AnalysisJobInfo;
 import org.apache.doris.statistics.AnalysisJobInfo.JobState;
 import org.apache.doris.statistics.AnalysisJobInfo.JobType;
-import org.apache.doris.statistics.StatisticConstants;
-import org.apache.doris.statistics.StatisticsUtil;
+import org.apache.doris.statistics.util.StatisticsUtil;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.text.StringSubstitutor;
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJobWrapper.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJobWrapper.java
index 9337f0e07a..7db8aad2eb 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJobWrapper.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisJobWrapper.java
@@ -73,7 +73,7 @@ public class AnalysisJobWrapper extends FutureTask<Void> {
         } finally {
             executor.decr();
         }
-        return super.cancel(true);
+        return super.cancel(false);
     }
 
     public long getStartTime() {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/ColumnStat.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/ColumnStat.java
index 2c78845e34..d6333a27a7 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/statistics/ColumnStat.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/statistics/ColumnStat.java
@@ -21,6 +21,7 @@ import org.apache.doris.analysis.LiteralExpr;
 import org.apache.doris.catalog.Type;
 import org.apache.doris.common.AnalysisException;
 import org.apache.doris.common.util.Util;
+import org.apache.doris.statistics.util.StatisticsUtil;
 
 import com.google.common.collect.Lists;
 
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/ColumnStatistic.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/ColumnStatistic.java
index 130114c0f6..a72ee6631f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/statistics/ColumnStatistic.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/statistics/ColumnStatistic.java
@@ -26,6 +26,7 @@ import org.apache.doris.catalog.Type;
 import org.apache.doris.datasource.CatalogIf;
 import org.apache.doris.qe.StmtExecutor;
 import org.apache.doris.statistics.util.InternalQueryResult.ResultRow;
+import org.apache.doris.statistics.util.StatisticsUtil;
 
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticConstants.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticConstants.java
index 2ca3a4b8bc..300387e75b 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticConstants.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticConstants.java
@@ -17,6 +17,8 @@
 
 package org.apache.doris.statistics;
 
+import java.util.concurrent.TimeUnit;
+
 public class StatisticConstants {
     public static final String STATISTIC_DB_NAME = "__internal_schema";
 
@@ -27,4 +29,38 @@ public class StatisticConstants {
     public static final int MAX_NAME_LEN = 64;
 
     public static final int ID_LEN = 4096;
+
+    public static final int STATISTIC_PARALLEL_EXEC_INSTANCE_NUM = 1;
+
+    public static final int STATISTICS_CACHE_VALID_DURATION_IN_HOURS = 24 * 2;
+
+    public static final int STATISTICS_CACHE_REFRESH_INTERVAL = 24 * 2;
+
+    /**
+     * Bucket count fot column_statistics and analysis_job table.
+     */
+    public static final int STATISTIC_TABLE_BUCKET_COUNT = 7;
+
+    public static final long STATISTICS_MAX_MEM_PER_QUERY_IN_BYTES = 2L * 1024 
* 1024 * 1024;
+
+    /**
+     * Determine the execution interval for 'Statistics Table Cleaner' thread.
+     */
+    public static final int STATISTIC_CLEAN_INTERVAL_IN_HOURS = 24 * 2;
+
+    /**
+     * If statistics related table creation failed, will retry after below 
seconds.
+     */
+    public static final int 
STATISTICS_TABLE_CREATION_RETRY_INTERVAL_IN_SECONDS = 5;
+
+    /**
+     * The max cached item in `StatisticsCache`.
+     */
+    public static final long STATISTICS_RECORDS_CACHE_SIZE = 100000;
+
+    /**
+     * If analysys job execution time exceeds this time, it would be cancelled.
+     */
+    public static final long STATISTICS_TASKS_TIMEOUT_IN_MS = 
TimeUnit.MINUTES.toMillis(10);
+
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticStorageInitializer.java
 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticStorageInitializer.java
index 32f4feb56a..fc80610b06 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticStorageInitializer.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticStorageInitializer.java
@@ -33,6 +33,7 @@ import org.apache.doris.cluster.ClusterNamespace;
 import org.apache.doris.common.Config;
 import org.apache.doris.common.DdlException;
 import org.apache.doris.common.UserException;
+import org.apache.doris.statistics.util.StatisticsUtil;
 import org.apache.doris.system.SystemInfoService;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -57,7 +58,8 @@ public class StatisticStorageInitializer extends Thread {
         }
         while (true) {
             try {
-                
Thread.currentThread().join(Config.statistics_table_creation_retry_interval_in_seconds
 * 1000L);
+                Thread.currentThread()
+                        
.join(StatisticConstants.STATISTICS_TABLE_CREATION_RETRY_INTERVAL_IN_SECONDS * 
1000L);
                 createDB();
                 createTbl();
                 break;
@@ -110,7 +112,8 @@ public class StatisticStorageInitializer extends Thread {
         KeysDesc keysDesc = new KeysDesc(KeysType.UNIQUE_KEYS,
                 Lists.newArrayList("id"));
 
-        DistributionDesc distributionDesc = new 
HashDistributionDesc(Config.statistic_table_bucket_count,
+        DistributionDesc distributionDesc = new HashDistributionDesc(
+                StatisticConstants.STATISTIC_TABLE_BUCKET_COUNT,
                 Lists.newArrayList("id"));
         Map<String, String> properties = new HashMap<String, String>() {
             {
@@ -145,7 +148,8 @@ public class StatisticStorageInitializer extends Thread {
         KeysDesc keysDesc = new KeysDesc(KeysType.UNIQUE_KEYS,
                 Lists.newArrayList("job_id"));
 
-        DistributionDesc distributionDesc = new 
HashDistributionDesc(Config.statistic_table_bucket_count,
+        DistributionDesc distributionDesc = new HashDistributionDesc(
+                StatisticConstants.STATISTIC_TABLE_BUCKET_COUNT,
                 Lists.newArrayList("job_id"));
         Map<String, String> properties = new HashMap<String, String>() {
             {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsCache.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsCache.java
index 86c60dddcc..0b39cde495 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsCache.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsCache.java
@@ -17,7 +17,6 @@
 
 package org.apache.doris.statistics;
 
-import org.apache.doris.common.Config;
 import org.apache.doris.qe.ConnectContext;
 
 import com.github.benmanes.caffeine.cache.AsyncLoadingCache;
@@ -33,9 +32,9 @@ public class StatisticsCache {
     private static final Logger LOG = 
LogManager.getLogger(StatisticsCache.class);
 
     private final AsyncLoadingCache<StatisticsCacheKey, ColumnStatistic> cache 
= Caffeine.newBuilder()
-            .maximumSize(Config.statistics_cache_max_size)
-            
.expireAfterAccess(Duration.ofHours(Config.statistics_cache_valid_duration_in_hours))
-            
.refreshAfterWrite(Duration.ofHours(Config.statistics_cache_refresh_interval))
+            .maximumSize(StatisticConstants.STATISTICS_RECORDS_CACHE_SIZE)
+            
.expireAfterAccess(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_VALID_DURATION_IN_HOURS))
+            
.refreshAfterWrite(Duration.ofHours(StatisticConstants.STATISTICS_CACHE_REFRESH_INTERVAL))
             .buildAsync(new StatisticsCacheLoader());
 
     public ColumnStatistic getColumnStatistics(long tblId, String colName) {
@@ -59,4 +58,8 @@ public class StatisticsCache {
     public void eraseExpiredCache(long tblId, String colName) {
         cache.synchronous().invalidate(new StatisticsCacheKey(tblId, colName));
     }
+
+    public void updateCache(long tblId, String colName, ColumnStatistic 
statistic) {
+        cache.synchronous().put(new StatisticsCacheKey(tblId, colName), 
statistic);
+    }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsCacheLoader.java
 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsCacheLoader.java
index 0e806b13a4..a2e2033481 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsCacheLoader.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsCacheLoader.java
@@ -18,6 +18,7 @@
 package org.apache.doris.statistics;
 
 import org.apache.doris.statistics.util.InternalQueryResult.ResultRow;
+import org.apache.doris.statistics.util.StatisticsUtil;
 
 import com.github.benmanes.caffeine.cache.AsyncCacheLoader;
 import org.apache.commons.collections.CollectionUtils;
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsManager.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsManager.java
index cbee4fb0eb..2c9a856a47 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsManager.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsManager.java
@@ -20,7 +20,6 @@ package org.apache.doris.statistics;
 import org.apache.doris.analysis.AlterColumnStatsStmt;
 import org.apache.doris.analysis.AlterTableStatsStmt;
 import org.apache.doris.analysis.DropTableStatsStmt;
-import org.apache.doris.analysis.ShowColumnStatsStmt;
 import org.apache.doris.analysis.ShowTableStatsStmt;
 import org.apache.doris.analysis.TableName;
 import org.apache.doris.catalog.Column;
@@ -297,41 +296,6 @@ public class StatisticsManager {
         return result;
     }
 
-    /**
-     * Get the column statistics of a table. if specified partition name,
-     * get the column statistics of the partition.
-     *
-     * @param stmt statement
-     * @return column statistics for  a partition or table
-     * @throws AnalysisException statistics not exist
-     */
-    public List<List<String>> showColumnStatsList(ShowColumnStatsStmt stmt) 
throws AnalysisException {
-        TableName tableName = stmt.getTableName();
-        List<String> partitionNames = stmt.getPartitionNames();
-
-        // check meta
-        Table table = validateTableName(tableName);
-
-        // check priv
-        if (!Env.getCurrentEnv().getAuth()
-                .checkTblPriv(ConnectContext.get(), tableName.getDb(), 
tableName.getTbl(), PrivPredicate.SHOW)) {
-            
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, 
"SHOW CREATE TABLE",
-                    ConnectContext.get().getQualifiedUser(), 
ConnectContext.get().getRemoteIP(),
-                    tableName.getDb() + ": " + tableName.getTbl());
-        }
-
-        if (partitionNames.isEmpty()) {
-            return showColumnStats(table.getId());
-        }
-
-        List<List<String>> result = Lists.newArrayList();
-        for (String partitionName : partitionNames) {
-            validatePartitionName(table, partitionName);
-            result.addAll(showColumnStats(table.getId(), partitionName));
-        }
-        return result;
-    }
-
     private List<String> showTableStats(Table table) throws AnalysisException {
         TableStats tableStats = statistics.getTableStats(table.getId());
         if (tableStats == null) {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsRepository.java
 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsRepository.java
new file mode 100644
index 0000000000..1e86b5a7ac
--- /dev/null
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsRepository.java
@@ -0,0 +1,220 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.statistics;
+
+import org.apache.doris.analysis.AlterColumnStatsStmt;
+import org.apache.doris.analysis.AnalyzeStmt;
+import org.apache.doris.analysis.TableName;
+import org.apache.doris.catalog.Column;
+import org.apache.doris.catalog.Env;
+import org.apache.doris.catalog.Partition;
+import org.apache.doris.common.AnalysisException;
+import org.apache.doris.statistics.AnalysisJobInfo.AnalysisType;
+import org.apache.doris.statistics.AnalysisJobInfo.JobState;
+import org.apache.doris.statistics.AnalysisJobInfo.ScheduleType;
+import org.apache.doris.statistics.util.DBObjects;
+import org.apache.doris.statistics.util.InternalQueryResult.ResultRow;
+import org.apache.doris.statistics.util.StatisticsUtil;
+import org.apache.doris.system.SystemInfoService;
+
+import org.apache.commons.text.StringSubstitutor;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.StringJoiner;
+import java.util.stream.Collectors;
+
+/**
+ * All the logic that interacts with internal statistics table should be 
placed here.
+ */
+public class StatisticsRepository {
+
+    private static final Logger LOG = 
LogManager.getLogger(StatisticsRepository.class);
+
+    private static final String FULL_QUALIFIED_DB_NAME = "`" + 
SystemInfoService.DEFAULT_CLUSTER + ":"
+            + StatisticConstants.STATISTIC_DB_NAME + "`";
+
+    private static final String FULL_QUALIFIED_COLUMN_STATISTICS_NAME = 
FULL_QUALIFIED_DB_NAME + "."
+            + "`" + StatisticConstants.STATISTIC_TBL_NAME + "`";
+
+    private static final String FULL_QUALIFIED_ANALYSIS_JOB_TABLE_NAME = 
FULL_QUALIFIED_DB_NAME + "."
+            + "`" + StatisticConstants.ANALYSIS_JOB_TABLE + "`";
+
+    private static final String FETCH_COLUMN_STATISTIC_TEMPLATE = "SELECT * 
FROM "
+            + FULL_QUALIFIED_COLUMN_STATISTICS_NAME
+            + " WHERE `id` = '${id}'";
+
+    private static final String FETCH_PARTITIONS_STATISTIC_TEMPLATE = "SELECT 
* FROM "
+            + FULL_QUALIFIED_COLUMN_STATISTICS_NAME
+            + " WHERE `id` IN (${idList})";
+
+    private static final String PERSIST_ANALYSIS_JOB_SQL_TEMPLATE = "INSERT 
INTO "
+            + FULL_QUALIFIED_ANALYSIS_JOB_TABLE_NAME + " VALUES(${jobId}, 
'${catalogName}', '${dbName}',"
+            + "'${tblName}','${colName}', '${jobType}', '${analysisType}', 
'${message}', '${lastExecTimeInMs}',"
+            + "'${state}', '${scheduleType}')";
+
+    private static final String INSERT_INTO_COLUMN_STATISTICS = "INSERT INTO "
+            + FULL_QUALIFIED_COLUMN_STATISTICS_NAME + " VALUES('${id}', 
${catalogId}, ${dbId}, ${tblId}, '${colId}',"
+            + "${partId}, ${count}, ${ndv}, ${nullCount}, '${min}', '${max}', 
${dataSize}, NOW())";
+
+    public static ColumnStatistic queryColumnStatisticsByName(long tableId, 
String colName) {
+        ResultRow resultRow = queryColumnStatisticById(tableId, colName);
+        if (resultRow == null) {
+            return ColumnStatistic.UNKNOWN;
+        }
+        return ColumnStatistic.fromResultRow(resultRow);
+    }
+
+    public static List<ColumnStatistic> 
queryColumnStatisticsByPartitions(TableName tableName, String colName,
+            List<String> partitionNames) throws AnalysisException {
+        DBObjects dbObjects = 
StatisticsUtil.convertTableNameToObjects(tableName);
+        Set<Long> partitionIds = new HashSet<>();
+        for (String partitionName : partitionNames) {
+            Partition partition = dbObjects.table.getPartition(partitionName);
+            if (partition == null) {
+                throw new AnalysisException(String.format("partition:%s not 
exists", partitionName));
+            }
+            partitionIds.add(partition.getId());
+        }
+        return queryPartitionStatistics(dbObjects.table.getId(),
+                    colName, 
partitionIds).stream().map(ColumnStatistic::fromResultRow).collect(
+                    Collectors.toList());
+    }
+
+    public static ResultRow queryColumnStatisticById(long tblId, String 
colName) {
+        Map<String, String> map = new HashMap<>();
+        String id = constructId(tblId, colName);
+        map.put("id", id);
+        List<ResultRow> rows = 
StatisticsUtil.executeQuery(FETCH_COLUMN_STATISTIC_TEMPLATE, map);
+        int size = rows.size();
+        if (size > 1) {
+            throw new IllegalStateException(String.format("id: %s should be 
unique, but return more than one row", id));
+        }
+        return size == 0 ? null : rows.get(0);
+    }
+
+    public static List<ResultRow> queryPartitionStatistics(long tblId, String 
colName, Set<Long> partIds) {
+        StringJoiner sj = new StringJoiner(",");
+        for (Long partId : partIds) {
+            sj.add("'" + constructId(tblId, colName, partId) + "'");
+        }
+        Map<String, String> params = new HashMap<>();
+        params.put("idList", sj.toString());
+        List<ResultRow> rows = 
StatisticsUtil.executeQuery(FETCH_PARTITIONS_STATISTIC_TEMPLATE, params);
+        return rows == null ? Collections.emptyList() : rows;
+    }
+
+    private static String constructId(Object... params) {
+        StringJoiner stringJoiner = new StringJoiner("-");
+        for (Object param : params) {
+            stringJoiner.add(param.toString());
+        }
+        return stringJoiner.toString();
+    }
+
+    public static void createAnalysisJob(AnalyzeStmt analyzeStmt) {
+        String catalogName = analyzeStmt.getCatalogName();
+        String db = analyzeStmt.getDBName();
+        String tbl = analyzeStmt.getTblName();
+        List<String> colNames = analyzeStmt.getOptColumnNames();
+
+        if (colNames != null) {
+            for (String colName : colNames) {
+                AnalysisJobInfo analysisJobInfo = new 
AnalysisJobInfo(Env.getCurrentEnv().getNextId(), catalogName, db,
+                        tbl, colName, AnalysisJobInfo.JobType.MANUAL, 
ScheduleType.ONCE);
+                analysisJobInfo.analysisType = AnalysisType.FULL;
+                Map<String, String> params = new HashMap<>();
+                params.put("jobId", String.valueOf(analysisJobInfo.jobId));
+                params.put("catalogName", analysisJobInfo.catalogName);
+                params.put("dbName", analysisJobInfo.dbName);
+                params.put("tblName", analysisJobInfo.tblName);
+                params.put("colName", analysisJobInfo.colName);
+                params.put("jobType", analysisJobInfo.jobType.toString());
+                params.put("analysisType", 
analysisJobInfo.analysisType.toString());
+                params.put("message", "");
+                params.put("lastExecTimeInMs", "0");
+                params.put("state", JobState.PENDING.toString());
+                params.put("scheduleType", 
analysisJobInfo.scheduleType.toString());
+                try {
+                    StatisticsUtil.execUpdate(
+                            new 
StringSubstitutor(params).replace(PERSIST_ANALYSIS_JOB_SQL_TEMPLATE));
+                } catch (Exception e) {
+                    LOG.warn("Failed to persite job for column: {}", colName, 
e);
+                    return;
+                }
+                
Env.getCurrentEnv().getAnalysisJobScheduler().schedule(analysisJobInfo);
+            }
+        }
+    }
+
+    public static void alterColumnStatistics(AlterColumnStatsStmt 
alterColumnStatsStmt) throws Exception {
+        TableName tableName = alterColumnStatsStmt.getTableName();
+        DBObjects objects = 
StatisticsUtil.convertTableNameToObjects(tableName);
+        String rowCount = alterColumnStatsStmt.getValue(StatsType.ROW_COUNT);
+        String ndv = alterColumnStatsStmt.getValue(StatsType.NDV);
+        String nullCount = alterColumnStatsStmt.getValue(StatsType.NUM_NULLS);
+        String min = alterColumnStatsStmt.getValue(StatsType.MIN_VALUE);
+        String max = alterColumnStatsStmt.getValue(StatsType.MAX_VALUE);
+        String dataSize = alterColumnStatsStmt.getValue(StatsType.DATA_SIZE);
+        ColumnStatisticBuilder builder = new ColumnStatisticBuilder();
+        String colName = alterColumnStatsStmt.getColumnName();
+        Column column = objects.table.getColumn(colName);
+        if (rowCount != null) {
+            builder.setCount(Double.parseDouble(rowCount));
+        }
+        if (ndv != null) {
+            builder.setNdv(Double.parseDouble(ndv));
+        }
+        if (nullCount != null) {
+            builder.setNumNulls(Double.parseDouble(nullCount));
+        }
+        if (min != null) {
+            builder.setMinExpr(StatisticsUtil.readableValue(column.getType(), 
min));
+            builder.setMinValue(StatisticsUtil.convertStringToDouble(min));
+        }
+        if (max != null) {
+            builder.setMaxExpr(StatisticsUtil.readableValue(column.getType(), 
max));
+            builder.setMaxValue(StatisticsUtil.convertStringToDouble(max));
+        }
+        if (dataSize != null) {
+            builder.setDataSize(Double.parseDouble(dataSize));
+        }
+        ColumnStatistic columnStatistic = builder.build();
+        Map<String, String> params = new HashMap<>();
+        params.put("id", constructId(objects.table.getId(), colName));
+        params.put("catalogId", String.valueOf(objects.catalog.getId()));
+        params.put("dbId", String.valueOf(objects.db.getId()));
+        params.put("tblId", String.valueOf(objects.table.getId()));
+        params.put("colId", String.valueOf(colName));
+        params.put("partId", "NULL");
+        params.put("count", String.valueOf(columnStatistic.count));
+        params.put("ndv", String.valueOf(columnStatistic.ndv));
+        params.put("nullCount", String.valueOf(columnStatistic.numNulls));
+        params.put("min", min == null ? "NULL" : min);
+        params.put("max", max == null ? "NULL" : max);
+        params.put("dataSize", String.valueOf(columnStatistic.dataSize));
+        StatisticsUtil.execUpdate(INSERT_INTO_COLUMN_STATISTICS, params);
+        
Env.getCurrentEnv().getStatisticsCache().updateCache(objects.table.getId(), 
colName, builder.build());
+    }
+}
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsTableCleaner.java
 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsTableCleaner.java
index e047565c38..3687ff0c13 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsTableCleaner.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsTableCleaner.java
@@ -22,8 +22,8 @@ import org.apache.doris.catalog.Database;
 import org.apache.doris.catalog.Env;
 import org.apache.doris.catalog.OlapTable;
 import org.apache.doris.catalog.Table;
-import org.apache.doris.common.Config;
 import org.apache.doris.common.util.MasterDaemon;
+import org.apache.doris.statistics.util.StatisticsUtil;
 
 import org.apache.commons.text.StringSubstitutor;
 import org.apache.logging.log4j.LogManager;
@@ -45,7 +45,8 @@ public class StatisticsTableCleaner extends MasterDaemon {
     private static final Logger LOG = 
LogManager.getLogger(StatisticsTableCleaner.class);
 
     public StatisticsTableCleaner() {
-        super("Statistics Table Cleaner", (long) 
Config.statistic_clean_interval_in_hours * 3600 * 1000);
+        super("Statistics Table Cleaner",
+                StatisticConstants.STATISTIC_CLEAN_INTERVAL_IN_HOURS * 3600 * 
1000);
     }
 
     @Override
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticConstants.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/util/DBObjects.java
similarity index 61%
copy from 
fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticConstants.java
copy to fe/fe-core/src/main/java/org/apache/doris/statistics/util/DBObjects.java
index 2ca3a4b8bc..3e68d0e027 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticConstants.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/statistics/util/DBObjects.java
@@ -15,16 +15,24 @@
 // specific language governing permissions and limitations
 // under the License.
 
-package org.apache.doris.statistics;
+package org.apache.doris.statistics.util;
 
-public class StatisticConstants {
-    public static final String STATISTIC_DB_NAME = "__internal_schema";
+import org.apache.doris.catalog.DatabaseIf;
+import org.apache.doris.catalog.TableIf;
+import org.apache.doris.datasource.CatalogIf;
 
-    public static final String STATISTIC_TBL_NAME = "column_statistics";
+public class DBObjects {
 
-    public static final String ANALYSIS_JOB_TABLE = "analysis_jobs";
+    public final CatalogIf<DatabaseIf> catalog;
 
-    public static final int MAX_NAME_LEN = 64;
+    public final DatabaseIf<TableIf> db;
 
-    public static final int ID_LEN = 4096;
+    public final TableIf table;
+
+    public DBObjects(CatalogIf<DatabaseIf> catalog,
+            DatabaseIf<TableIf> db, TableIf table) {
+        this.catalog = catalog;
+        this.db = db;
+        this.table = table;
+    }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsUtil.java 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/util/StatisticsUtil.java
similarity index 76%
rename from 
fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsUtil.java
rename to 
fe/fe-core/src/main/java/org/apache/doris/statistics/util/StatisticsUtil.java
index 9db415f536..511a11fdfa 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/statistics/StatisticsUtil.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/statistics/util/StatisticsUtil.java
@@ -15,7 +15,8 @@
 // specific language governing permissions and limitations
 // under the License.
 
-package org.apache.doris.statistics;
+package org.apache.doris.statistics.util;
+
 
 import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.BoolLiteral;
@@ -27,44 +28,72 @@ import org.apache.doris.analysis.LargeIntLiteral;
 import org.apache.doris.analysis.LiteralExpr;
 import org.apache.doris.analysis.StatementBase;
 import org.apache.doris.analysis.StringLiteral;
+import org.apache.doris.analysis.TableName;
 import org.apache.doris.analysis.UserIdentity;
+import org.apache.doris.catalog.DatabaseIf;
 import org.apache.doris.catalog.Env;
 import org.apache.doris.catalog.PrimitiveType;
 import org.apache.doris.catalog.ScalarType;
+import org.apache.doris.catalog.TableIf;
 import org.apache.doris.catalog.Type;
 import org.apache.doris.common.AnalysisException;
-import org.apache.doris.common.Config;
 import org.apache.doris.common.UserException;
+import org.apache.doris.datasource.CatalogIf;
 import org.apache.doris.nereids.trees.expressions.literal.DateTimeLiteral;
 import org.apache.doris.qe.ConnectContext;
 import org.apache.doris.qe.SessionVariable;
 import org.apache.doris.qe.StmtExecutor;
+import org.apache.doris.statistics.AnalysisJobInfo;
+import org.apache.doris.statistics.ColumnStatistic;
+import org.apache.doris.statistics.StatisticConstants;
 import org.apache.doris.statistics.util.InternalQueryResult.ResultRow;
 import org.apache.doris.system.SystemInfoService;
 import org.apache.doris.thrift.TUniqueId;
 
 import com.google.common.base.Preconditions;
+import org.apache.commons.text.StringSubstitutor;
 import org.apache.thrift.TException;
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 import java.util.UUID;
 import java.util.stream.Collectors;
 
 public class StatisticsUtil {
 
+    public static List<ResultRow> executeQuery(String template, Map<String, 
String> params) {
+        StringSubstitutor stringSubstitutor = new StringSubstitutor(params);
+        String sql = stringSubstitutor.replace(template);
+        return execStatisticQuery(sql);
+    }
+
+    public static void execUpdate(String template, Map<String, String> params) 
throws Exception {
+        StringSubstitutor stringSubstitutor = new StringSubstitutor(params);
+        String sql = stringSubstitutor.replace(template);
+        execUpdate(sql);
+    }
+
     public static List<ResultRow> execStatisticQuery(String sql) {
         ConnectContext connectContext = StatisticsUtil.buildConnectContext();
-        StmtExecutor stmtExecutor = new StmtExecutor(connectContext, sql);
-        connectContext.setExecutor(stmtExecutor);
-        return stmtExecutor.executeInternalQuery();
+        try {
+            StmtExecutor stmtExecutor = new StmtExecutor(connectContext, sql);
+            connectContext.setExecutor(stmtExecutor);
+            return stmtExecutor.executeInternalQuery();
+        } finally {
+            connectContext.kill(false);
+        }
     }
 
     public static void execUpdate(String sql) throws Exception {
         ConnectContext connectContext = StatisticsUtil.buildConnectContext();
-        StmtExecutor stmtExecutor = new StmtExecutor(connectContext, sql);
-        connectContext.setExecutor(stmtExecutor);
-        stmtExecutor.execute();
+        try {
+            StmtExecutor stmtExecutor = new StmtExecutor(connectContext, sql);
+            connectContext.setExecutor(stmtExecutor);
+            stmtExecutor.execute();
+        } finally {
+            connectContext.kill(false);
+        }
     }
 
     // TODO: finish this.
@@ -81,9 +110,9 @@ public class StatisticsUtil {
         ConnectContext connectContext = new ConnectContext();
         SessionVariable sessionVariable = connectContext.getSessionVariable();
         sessionVariable.internalSession = true;
-        
sessionVariable.setMaxExecMemByte(Config.statistics_max_mem_per_query_in_bytes);
+        
sessionVariable.setMaxExecMemByte(StatisticConstants.STATISTICS_MAX_MEM_PER_QUERY_IN_BYTES);
         sessionVariable.setEnableInsertStrict(true);
-        sessionVariable.parallelExecInstanceNum = 
Config.statistic_parallel_exec_instance_num;
+        sessionVariable.parallelExecInstanceNum = 
StatisticConstants.STATISTIC_PARALLEL_EXEC_INSTANCE_NUM;
         connectContext.setEnv(Env.getCurrentEnv());
         connectContext.setDatabase(StatisticConstants.STATISTIC_DB_NAME);
         connectContext.setQualifiedUser(UserIdentity.ROOT.getQualifiedUser());
@@ -216,4 +245,19 @@ public class StatisticsUtil {
         return (double) v;
     }
 
+    public static DBObjects convertTableNameToObjects(TableName tableName) {
+        CatalogIf<DatabaseIf> catalogIf = 
Env.getCurrentEnv().getCatalogMgr().getCatalog(tableName.getCtl());
+        if (catalogIf == null) {
+            throw new IllegalStateException(String.format("Catalog:%s doesn't 
exist", tableName.getCtl()));
+        }
+        DatabaseIf<TableIf> databaseIf = 
catalogIf.getDbNullable(tableName.getDb());
+        if (databaseIf == null) {
+            throw new IllegalStateException(String.format("DB:%s doesn't 
exist", tableName.getDb()));
+        }
+        TableIf tableIf = databaseIf.getTableNullable(tableName.getTbl());
+        if (tableIf == null) {
+            throw new IllegalStateException(String.format("Table:%s doesn't 
exist", tableName.getTbl()));
+        }
+        return new DBObjects(catalogIf, databaseIf, tableIf);
+    }
 }
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/cluster/DecommissionBackendTest.java
 
b/fe/fe-core/src/test/java/org/apache/doris/cluster/DecommissionBackendTest.java
index 959512792e..87a6f851c5 100644
--- 
a/fe/fe-core/src/test/java/org/apache/doris/cluster/DecommissionBackendTest.java
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/cluster/DecommissionBackendTest.java
@@ -18,10 +18,13 @@
 package org.apache.doris.cluster;
 
 import org.apache.doris.analysis.AlterSystemStmt;
+import org.apache.doris.catalog.Database;
 import org.apache.doris.catalog.Env;
 import org.apache.doris.common.Config;
 import org.apache.doris.common.FeConstants;
+import org.apache.doris.statistics.StatisticConstants;
 import org.apache.doris.system.Backend;
+import org.apache.doris.system.SystemInfoService;
 import org.apache.doris.utframe.TestWithFeService;
 
 import com.google.common.collect.ImmutableMap;
@@ -83,7 +86,18 @@ public class DecommissionBackendTest extends 
TestWithFeService {
         }
 
         Assertions.assertEquals(backendNum() - 1, 
Env.getCurrentSystemInfo().getIdToBackend().size());
-        Assertions.assertEquals(tabletNum + 
Config.statistic_table_bucket_count * 2,
+
+        Database db = null;
+        long waitLimitMs = 5 * 1000;
+        do {
+            db = Env.getCurrentEnv().getInternalCatalog()
+                    .getDb(SystemInfoService.DEFAULT_CLUSTER + ":" + 
StatisticConstants.STATISTIC_DB_NAME)
+                    .orElse(null);
+            Thread.sleep(100);
+            waitLimitMs -= 100;
+        } while (db == null && waitLimitMs > 0);
+        // For now, we have pre-built internal table: analysis_job and 
column_statistics
+        Assertions.assertEquals(tabletNum + 
StatisticConstants.STATISTIC_TABLE_BUCKET_COUNT * 2,
                 Env.getCurrentInvertedIndex().getTabletMetaMap().size());
 
     }
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalysisJobExecutorTest.java
 
b/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalysisJobExecutorTest.java
index 4ab1a2b63b..6ee7a09495 100644
--- 
a/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalysisJobExecutorTest.java
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalysisJobExecutorTest.java
@@ -19,7 +19,6 @@ package org.apache.doris.statistics;
 
 import org.apache.doris.catalog.Env;
 import org.apache.doris.common.jmockit.Deencapsulation;
-import org.apache.doris.persist.AnalysisJobScheduler;
 import org.apache.doris.statistics.AnalysisJobInfo.JobType;
 import org.apache.doris.statistics.AnalysisJobInfo.ScheduleType;
 import org.apache.doris.statistics.util.BlockingCounter;
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalysisJobTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalysisJobTest.java
index 43dd1374e4..b0e5207f29 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalysisJobTest.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/statistics/AnalysisJobTest.java
@@ -18,11 +18,11 @@
 package org.apache.doris.statistics;
 
 import org.apache.doris.catalog.Env;
-import org.apache.doris.persist.AnalysisJobScheduler;
 import org.apache.doris.qe.ConnectContext;
 import org.apache.doris.qe.StmtExecutor;
 import org.apache.doris.statistics.AnalysisJobInfo.JobType;
 import org.apache.doris.statistics.AnalysisJobInfo.ScheduleType;
+import org.apache.doris.statistics.util.StatisticsUtil;
 import org.apache.doris.utframe.TestWithFeService;
 
 import mockit.Expectations;
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/statistics/CacheTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/statistics/CacheTest.java
index 57c8e863e3..eb45e9c681 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/statistics/CacheTest.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/statistics/CacheTest.java
@@ -20,6 +20,7 @@ package org.apache.doris.statistics;
 import org.apache.doris.catalog.Column;
 import org.apache.doris.catalog.PrimitiveType;
 import org.apache.doris.statistics.util.InternalQueryResult.ResultRow;
+import org.apache.doris.statistics.util.StatisticsUtil;
 import org.apache.doris.utframe.TestWithFeService;
 
 import mockit.Expectations;
diff --git a/regression-test/data/statistics/alter_col_stats.out 
b/regression-test/data/statistics/alter_col_stats.out
new file mode 100644
index 0000000000..4a304bd935
--- /dev/null
+++ b/regression-test/data/statistics/alter_col_stats.out
@@ -0,0 +1,11 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !sql --
+col1   3.0     3.0     0.0     3.0     0.0     6.9895866216790098E18   
7.1337018097548657E18   'a'     'c'
+col2   3.0     3.0     0.0     48.0    0.0     2.0110101E13    2.0130101E13    
'2011-01-01'    '2013-01-01'
+id     3.0     3.0     0.0     24.0    0.0     1.0     3.0     1       3
+
+-- !sql2 --
+col1   114.0   1.48064528E8    0.0     511.0   0.0     3.5308221078584689E18   
3.8911100780481085E18   '1'     '6'
+col2   3.0     3.0     0.0     48.0    0.0     2.0110101E13    2.0130101E13    
'2011-01-01'    '2013-01-01'
+id     3.0     3.0     0.0     24.0    0.0     1.0     3.0     1       3
+
diff --git a/regression-test/suites/statistics/alter_col_stats.groovy 
b/regression-test/suites/statistics/alter_col_stats.groovy
new file mode 100644
index 0000000000..554016ef73
--- /dev/null
+++ b/regression-test/suites/statistics/alter_col_stats.groovy
@@ -0,0 +1,53 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("alter_column_stats") {
+    sql """DROP TABLE IF EXISTS statistics_test"""
+    sql """
+        CREATE TABLE statistics_test (
+            `id` BIGINT,
+            `col1` VARCHAR,
+            `col2` DATE
+        ) DUPLICATE KEY(`id`)
+        DISTRIBUTED BY HASH(`id`) BUCKETS 3
+        PROPERTIES (
+            "replication_num"="1"
+        );
+    """
+
+    sql """INSERT INTO statistics_test VALUES(1, 'a', '2011-01-01')"""
+    sql """INSERT INTO statistics_test VALUES(2, 'b', '2012-01-01')"""
+    sql """INSERT INTO statistics_test VALUES(3, 'c', '2013-01-01')"""
+
+    sql """ANALYZE statistics_test"""
+
+    sleep(3000)
+
+    qt_sql """
+        SHOW COLUMN STATS statistics_test
+    """
+
+    sql """
+            ALTER TABLE statistics_test
+            MODIFY COLUMN col1 SET STATS('ndv'='148064528', 'num_nulls'='0', 
'min_value'='1', 'max_value'='6',
+            'row_count'='114', 'data_size'='511');
+        """
+
+    qt_sql2 """
+        SHOW COLUMN STATS statistics_test
+    """
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to