This is an automated email from the ASF dual-hosted git repository.

dataroaring pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new 6ae399f3f04 branch-3.0: [chore](job) display sequence column in show 
routine load #53441 (#53503)
6ae399f3f04 is described below

commit 6ae399f3f04ec039b7a4577037f24f0e81e9ca11
Author: github-actions[bot] 
<41898282+github-actions[bot]@users.noreply.github.com>
AuthorDate: Mon Jul 21 20:08:41 2025 +0800

    branch-3.0: [chore](job) display sequence column in show routine load 
#53441 (#53503)
    
    Cherry-picked from #53441
    
    Co-authored-by: hui lai <[email protected]>
---
 .../doris/load/routineload/RoutineLoadJob.java     | 10 +++++++--
 .../routine_load/test_show_routine_load.groovy     | 24 ++++++++++++++++++++--
 2 files changed, 30 insertions(+), 4 deletions(-)

diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/load/routineload/RoutineLoadJob.java
 
b/fe/fe-core/src/main/java/org/apache/doris/load/routineload/RoutineLoadJob.java
index 5ea984ebfac..30ef3f54f1a 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/load/routineload/RoutineLoadJob.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/load/routineload/RoutineLoadJob.java
@@ -1839,8 +1839,10 @@ public abstract class RoutineLoadJob
         }
     }
 
+    // jobPropertiesJsonString contains both load properties and job 
properties defined in CreateRoutineLoadStmt
     public String jobPropertiesToJsonString() {
         Map<String, String> jobProperties = Maps.newHashMap();
+        // load properties defined in CreateRoutineLoadStmt
         jobProperties.put("partitions", partitions == null
                 ? STAR_STRING : 
Joiner.on(",").join(partitions.getPartitionNames()));
         jobProperties.put("columnToColumnExpr", columnDescs == null
@@ -1855,6 +1857,12 @@ public abstract class RoutineLoadJob
             jobProperties.put(LoadStmt.KEY_IN_PARAM_LINE_DELIMITER,
                     lineDelimiter == null ? "\n" : lineDelimiter.toString());
         }
+        jobProperties.put(LoadStmt.KEY_IN_PARAM_DELETE_CONDITION,
+                deleteCondition == null ? STAR_STRING : 
deleteCondition.toSqlWithoutTbl());
+        jobProperties.put(LoadStmt.KEY_IN_PARAM_SEQUENCE_COL,
+                sequenceCol == null ? STAR_STRING : sequenceCol);
+
+        // job properties defined in CreateRoutineLoadStmt
         jobProperties.put(CreateRoutineLoadStmt.PARTIAL_COLUMNS, 
String.valueOf(isPartialUpdate));
         jobProperties.put(CreateRoutineLoadStmt.MAX_ERROR_NUMBER_PROPERTY, 
String.valueOf(maxErrorNum));
         
jobProperties.put(CreateRoutineLoadStmt.MAX_BATCH_INTERVAL_SEC_PROPERTY, 
String.valueOf(maxBatchIntervalS));
@@ -1866,8 +1874,6 @@ public abstract class RoutineLoadJob
                 String.valueOf(desireTaskConcurrentNum));
         jobProperties.put(LoadStmt.EXEC_MEM_LIMIT, 
String.valueOf(execMemLimit));
         jobProperties.put(LoadStmt.KEY_IN_PARAM_MERGE_TYPE, 
mergeType.toString());
-        jobProperties.put(LoadStmt.KEY_IN_PARAM_DELETE_CONDITION,
-                deleteCondition == null ? STAR_STRING : 
deleteCondition.toSqlWithoutTbl());
         jobProperties.putAll(this.jobProperties);
         Gson gson = new GsonBuilder().disableHtmlEscaping().create();
         return gson.toJson(jobProperties);
diff --git 
a/regression-test/suites/load_p0/routine_load/test_show_routine_load.groovy 
b/regression-test/suites/load_p0/routine_load/test_show_routine_load.groovy
index 6075dc20dbe..d6b31db11f9 100644
--- a/regression-test/suites/load_p0/routine_load/test_show_routine_load.groovy
+++ b/regression-test/suites/load_p0/routine_load/test_show_routine_load.groovy
@@ -46,9 +46,7 @@ suite("test_show_routine_load","p0") {
                 producer.send(record)
             }
         }
-    }
 
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
         def tableName = "test_show_routine_load"
         sql """ DROP TABLE IF EXISTS ${tableName} """
         sql """
@@ -66,6 +64,7 @@ suite("test_show_routine_load","p0") {
             PROPERTIES ("replication_allocation" = "tag.location.default: 1");
         """
 
+        // test show routine load command
         try {
             sql """
                 CREATE ROUTINE LOAD testShow ON ${tableName}
@@ -146,5 +145,26 @@ suite("test_show_routine_load","p0") {
             sql "stop routine load for testShow"
             sql "stop routine load for testShow1"
         }
+
+        // test show routine load properties
+        try {
+            sql """
+                CREATE ROUTINE LOAD testShow ON ${tableName}
+                COLUMNS TERMINATED BY ",",
+                ORDER BY k1
+                FROM KAFKA
+                (
+                    "kafka_broker_list" = "${externalEnvIp}:${kafka_port}",
+                    "kafka_topic" = "${kafkaCsvTpoics[0]}",
+                    "property.kafka_default_offsets" = "OFFSET_BEGINNING"
+                );
+            """
+            def res = sql "show routine load for testShow"
+            def json = parseJson(res[0][11])
+            log.info("routine load job properties: 
${res[0][11].toString()}".toString())
+            assertEquals("k1", json.sequence_col.toString())
+        } finally {
+            sql "stop routine load for testShow"
+        }
     }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to