morrySnow commented on code in PR #20264:
URL: https://github.com/apache/doris/pull/20264#discussion_r1211470496


##########
fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java:
##########
@@ -416,13 +421,16 @@ public ShowResultSet execute() throws AnalysisException {
             handleShowTypeCastStmt();
         } else if (stmt instanceof ShowBuildIndexStmt) {
             handleShowBuildIndexStmt();
+        } else if (stmt instanceof ShowAnalyzeTaskStatus) {
+            handleShowAnalyzeTaskStatus();
         } else {
             handleEmtpy();
         }
 
         return resultSet;
     }
 
+

Review Comment:
   remove this blank



##########
fe/fe-core/src/main/cup/sql_parser.cup:
##########
@@ -2793,11 +2793,7 @@ analyze_stmt ::=
         }
         // Rule: If no type is specified, see if there is a specified column
         if (!properties.containsKey("analysis.type")) {
-            if ((cols == null)) {
-                properties.put("analysis.type", "INDEX");
-            } else {
-                properties.put("analysis.type", "COLUMN");
-            }
+            properties.put("analysis.type", "FUNDAMENTALS");

Review Comment:
   need update docs



##########
fe/fe-core/src/main/java/org/apache/doris/persist/OperationType.java:
##########
@@ -295,6 +295,15 @@ public class OperationType {
     // update binlog config
     public static final short OP_UPDATE_BINLOG_CONFIG = 425;
 
+    public static final short OP_CREATE_ANALYSIS_TASK = 435;

Review Comment:
   recommand use from 430 to 439



##########
fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisManager.java:
##########
@@ -384,53 +415,59 @@ private void createTaskForMVIdx(AnalysisTaskInfo jobInfo, 
Map<Long, BaseAnalysis
                 }
                 long indexId = meta.getIndexId();
                 long taskId = Env.getCurrentEnv().getNextId();
-                AnalysisTaskInfoBuilder indexTaskInfoBuilder = new 
AnalysisTaskInfoBuilder(jobInfo);
-                AnalysisTaskInfo analysisTaskInfo = 
indexTaskInfoBuilder.setIndexId(indexId)
+                AnalysisInfoBuilder indexTaskInfoBuilder = new 
AnalysisInfoBuilder(jobInfo);
+                AnalysisInfo analysisInfo = 
indexTaskInfoBuilder.setIndexId(indexId)
                         .setTaskId(taskId).build();
-                analysisTasks.put(taskId, createTask(analysisTaskInfo));
-                if (isSync && 
!ConnectContext.get().getSessionVariable().enableSaveStatisticsSyncJob) {
+                analysisTasks.put(taskId, createTask(analysisInfo));
+                if (isSync) {
                     return;
                 }
-                try {
-                    StatisticsRepository.persistAnalysisTask(analysisTaskInfo);
-                } catch (Exception e) {
-                    throw new DdlException("Failed to create analysis task", 
e);
-                }
+                logCreateAnalysisJob(analysisInfo);
             }
         } finally {
             olapTable.readUnlock();
         }
     }
 
-    private void createTaskForEachColumns(AnalysisTaskInfo jobInfo, Map<Long, 
BaseAnalysisTask> analysisTasks,
+    private void createTaskForEachColumns(AnalysisInfo jobInfo, Map<Long, 
BaseAnalysisTask> analysisTasks,
             boolean isSync) throws DdlException {
         Map<String, Set<String>> columnToPartitions = jobInfo.colToPartitions;
         for (Entry<String, Set<String>> entry : columnToPartitions.entrySet()) 
{
             long indexId = -1;
             long taskId = Env.getCurrentEnv().getNextId();
             String colName = entry.getKey();
-            AnalysisTaskInfoBuilder colTaskInfoBuilder = new 
AnalysisTaskInfoBuilder(jobInfo);
+            AnalysisInfoBuilder colTaskInfoBuilder = new 
AnalysisInfoBuilder(jobInfo);
             if (jobInfo.analysisType != AnalysisType.HISTOGRAM) {
-                colTaskInfoBuilder.setAnalysisType(AnalysisType.COLUMN);
+                colTaskInfoBuilder.setAnalysisType(AnalysisType.FUNDAMENTALS);
                 
colTaskInfoBuilder.setColToPartitions(Collections.singletonMap(colName, 
entry.getValue()));
             }
-            AnalysisTaskInfo analysisTaskInfo = 
colTaskInfoBuilder.setColName(colName).setIndexId(indexId)
+            AnalysisInfo analysisInfo = 
colTaskInfoBuilder.setColName(colName).setIndexId(indexId)
                     .setTaskId(taskId).build();
-            analysisTasks.put(taskId, createTask(analysisTaskInfo));
-            if (isSync && 
!ConnectContext.get().getSessionVariable().enableSaveStatisticsSyncJob) {
+            analysisTasks.put(taskId, createTask(analysisInfo));
+            if (isSync) {
                 continue;
             }
             try {
-                StatisticsRepository.persistAnalysisTask(analysisTaskInfo);
+                logCreateAnalysisTask(analysisInfo);
             } catch (Exception e) {
                 throw new DdlException("Failed to create analysis task", e);
             }
         }
     }
 
-    private void createTaskForExternalTable(AnalysisTaskInfo jobInfo,
-                                            Map<Long, BaseAnalysisTask> 
analysisTasks,
-                                            boolean isSync) throws 
DdlException {
+    private void logCreateAnalysisTask(AnalysisInfo analysisInfo) {
+        Env.getCurrentEnv().getEditLog().logCreateAnalysisTasks(analysisInfo);
+        analysisTaskInfoMap.put(analysisInfo.taskId, analysisInfo);

Review Comment:
   ```suggestion
           analysisTaskInfoMap.put(analysisInfo.taskId, analysisInfo);
           
Env.getCurrentEnv().getEditLog().logCreateAnalysisTasks(analysisInfo);
   ```



##########
fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisManager.java:
##########
@@ -384,53 +415,59 @@ private void createTaskForMVIdx(AnalysisTaskInfo jobInfo, 
Map<Long, BaseAnalysis
                 }
                 long indexId = meta.getIndexId();
                 long taskId = Env.getCurrentEnv().getNextId();
-                AnalysisTaskInfoBuilder indexTaskInfoBuilder = new 
AnalysisTaskInfoBuilder(jobInfo);
-                AnalysisTaskInfo analysisTaskInfo = 
indexTaskInfoBuilder.setIndexId(indexId)
+                AnalysisInfoBuilder indexTaskInfoBuilder = new 
AnalysisInfoBuilder(jobInfo);
+                AnalysisInfo analysisInfo = 
indexTaskInfoBuilder.setIndexId(indexId)
                         .setTaskId(taskId).build();
-                analysisTasks.put(taskId, createTask(analysisTaskInfo));
-                if (isSync && 
!ConnectContext.get().getSessionVariable().enableSaveStatisticsSyncJob) {
+                analysisTasks.put(taskId, createTask(analysisInfo));
+                if (isSync) {
                     return;
                 }
-                try {
-                    StatisticsRepository.persistAnalysisTask(analysisTaskInfo);
-                } catch (Exception e) {
-                    throw new DdlException("Failed to create analysis task", 
e);
-                }
+                logCreateAnalysisJob(analysisInfo);
             }
         } finally {
             olapTable.readUnlock();
         }
     }
 
-    private void createTaskForEachColumns(AnalysisTaskInfo jobInfo, Map<Long, 
BaseAnalysisTask> analysisTasks,
+    private void createTaskForEachColumns(AnalysisInfo jobInfo, Map<Long, 
BaseAnalysisTask> analysisTasks,
             boolean isSync) throws DdlException {
         Map<String, Set<String>> columnToPartitions = jobInfo.colToPartitions;
         for (Entry<String, Set<String>> entry : columnToPartitions.entrySet()) 
{
             long indexId = -1;
             long taskId = Env.getCurrentEnv().getNextId();
             String colName = entry.getKey();
-            AnalysisTaskInfoBuilder colTaskInfoBuilder = new 
AnalysisTaskInfoBuilder(jobInfo);
+            AnalysisInfoBuilder colTaskInfoBuilder = new 
AnalysisInfoBuilder(jobInfo);
             if (jobInfo.analysisType != AnalysisType.HISTOGRAM) {
-                colTaskInfoBuilder.setAnalysisType(AnalysisType.COLUMN);
+                colTaskInfoBuilder.setAnalysisType(AnalysisType.FUNDAMENTALS);
                 
colTaskInfoBuilder.setColToPartitions(Collections.singletonMap(colName, 
entry.getValue()));
             }
-            AnalysisTaskInfo analysisTaskInfo = 
colTaskInfoBuilder.setColName(colName).setIndexId(indexId)
+            AnalysisInfo analysisInfo = 
colTaskInfoBuilder.setColName(colName).setIndexId(indexId)
                     .setTaskId(taskId).build();
-            analysisTasks.put(taskId, createTask(analysisTaskInfo));
-            if (isSync && 
!ConnectContext.get().getSessionVariable().enableSaveStatisticsSyncJob) {
+            analysisTasks.put(taskId, createTask(analysisInfo));
+            if (isSync) {
                 continue;
             }
             try {
-                StatisticsRepository.persistAnalysisTask(analysisTaskInfo);
+                logCreateAnalysisTask(analysisInfo);
             } catch (Exception e) {
                 throw new DdlException("Failed to create analysis task", e);
             }
         }
     }
 
-    private void createTaskForExternalTable(AnalysisTaskInfo jobInfo,
-                                            Map<Long, BaseAnalysisTask> 
analysisTasks,
-                                            boolean isSync) throws 
DdlException {
+    private void logCreateAnalysisTask(AnalysisInfo analysisInfo) {
+        Env.getCurrentEnv().getEditLog().logCreateAnalysisTasks(analysisInfo);
+        analysisTaskInfoMap.put(analysisInfo.taskId, analysisInfo);
+    }
+
+    private void logCreateAnalysisJob(AnalysisInfo analysisJob) {
+        Env.getCurrentEnv().getEditLog().logCreateAnalysisJob(analysisJob);
+        analysisJobInfoMap.put(analysisJob.jobId, analysisJob);

Review Comment:
   ```suggestion
           analysisJobInfoMap.put(analysisJob.jobId, analysisJob);
           Env.getCurrentEnv().getEditLog().logCreateAnalysisJob(analysisJob);
   ```



##########
fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisManager.java:
##########
@@ -675,10 +718,57 @@ public void execute() {
         }
 
         private void updateSyncTaskStatus(BaseAnalysisTask task, AnalysisState 
state) {
-            if 
(ConnectContext.get().getSessionVariable().enableSaveStatisticsSyncJob) {
-                Env.getCurrentEnv().getAnalysisManager()
-                        .updateTaskStatus(task.info, state, "", 
System.currentTimeMillis());
-            }
+            Env.getCurrentEnv().getAnalysisManager()
+                    .updateTaskStatus(task.info, state, "", 
System.currentTimeMillis());
+        }
+    }
+
+    public List<AnalysisInfo> findAutomaticAnalysisJobs() {
+        synchronized (analysisJobInfoMap) {
+            return analysisJobInfoMap.values().stream()
+                    .filter(a ->
+                            a.scheduleType.equals(ScheduleType.AUTOMATIC)
+                                    && (!(a.state.equals(AnalysisState.RUNNING)
+                                    || a.state.equals(AnalysisState.PENDING)))
+                                    && System.currentTimeMillis() - 
a.lastExecTimeInMs
+                                    > 
TimeUnit.MINUTES.toMillis(Config.auto_check_statistics_in_minutes))
+                    .collect(Collectors.toList());
         }
     }
+
+    public List<AnalysisInfo> findPeriodicJobs() {
+        synchronized (analysisJobInfoMap) {
+            return analysisJobInfoMap.values().stream()
+                    .filter(a -> a.scheduleType.equals(ScheduleType.PERIOD)
+                            && (a.state.equals(AnalysisState.FINISHED))
+                            && System.currentTimeMillis() - a.lastExecTimeInMs 
> a.periodTimeInMs)
+                    .collect(Collectors.toList());
+        }
+    }
+
+    public List<AnalysisInfo> findTasks(long jobId) {
+        synchronized (analysisTaskInfoMap) {
+            return analysisTaskInfoMap.values().stream().filter(i -> i.jobId 
== jobId).collect(Collectors.toList());
+        }
+    }
+
+    public void removeAll(List<AnalysisInfo> analysisInfos) {
+        for (AnalysisInfo analysisInfo : analysisInfos) {
+            analysisTaskInfoMap.remove(analysisInfo.taskId);
+        }
+    }
+
+    public void dropAnalyzeJob(DropAnalyzeJobStmt analyzeJobStmt) throws 
DdlException {
+        AnalysisInfo jobInfo = 
analysisJobInfoMap.get(analyzeJobStmt.getJobId());
+        if (jobInfo == null) {
+            throw new DdlException(String.format("Analyze job [%d] not 
exists", jobInfo.jobId));
+        }
+        checkPriv(jobInfo);
+        long jobId = analyzeJobStmt.getJobId();
+        AnalyzeDeletionLog analyzeDeletionLog = new AnalyzeDeletionLog(jobId);
+        
Env.getCurrentEnv().getEditLog().logDeleteAnalysisJob(analyzeDeletionLog);

Review Comment:
   all operation on edit log should do action first and then write edit log



##########
fe/fe-core/src/main/java/org/apache/doris/statistics/AnalysisInfoBuilder.java:
##########
@@ -17,24 +17,24 @@
 
 package org.apache.doris.statistics;
 
-import org.apache.doris.statistics.AnalysisTaskInfo.AnalysisMethod;
-import org.apache.doris.statistics.AnalysisTaskInfo.AnalysisMode;
-import org.apache.doris.statistics.AnalysisTaskInfo.AnalysisType;
-import org.apache.doris.statistics.AnalysisTaskInfo.JobType;
-import org.apache.doris.statistics.AnalysisTaskInfo.ScheduleType;
+import org.apache.doris.statistics.AnalysisInfo.AnalysisMethod;
+import org.apache.doris.statistics.AnalysisInfo.AnalysisMode;
+import org.apache.doris.statistics.AnalysisInfo.AnalysisType;
+import org.apache.doris.statistics.AnalysisInfo.JobType;
+import org.apache.doris.statistics.AnalysisInfo.ScheduleType;
 
 import java.util.Map;
 import java.util.Set;
 
-public class AnalysisTaskInfoBuilder {
+public class AnalysisInfoBuilder {
     private long jobId;
     private long taskId;
     private String catalogName;
     private String dbName;
     private String tblName;
     private Map<String, Set<String>> colToPartitions;
     private String colName;
-    private Long indexId;
+    private Long indexId = -1L;

Review Comment:
   why use boxing type Long?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to