This is an automated email from the ASF dual-hosted git repository.

dataroaring pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new f4cea1a73ce branch-3.0: [enhance](nereids) SqlParser support load data 
into temporary partition #45025 (#45227)
f4cea1a73ce is described below

commit f4cea1a73cea39832d384c17a9a12b509a4df538
Author: github-actions[bot] 
<41898282+github-actions[bot]@users.noreply.github.com>
AuthorDate: Fri Dec 20 00:35:43 2024 +0800

    branch-3.0: [enhance](nereids) SqlParser support load data into temporary 
partition #45025 (#45227)
    
    Cherry-picked from #45025
    
    Co-authored-by: zxealous <zhouchang...@baidu.com>
---
 .../antlr4/org/apache/doris/nereids/DorisParser.g4 |  2 +-
 .../test_broker_load_with_partition.out            |  8 ++++
 .../test_broker_load_with_partition.groovy         | 54 ++++++++++++++++++++++
 3 files changed, 63 insertions(+), 1 deletion(-)

diff --git a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 
b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4
index 14d2b95a8ce..53e79f0bb28 100644
--- a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4
+++ b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4
@@ -912,7 +912,7 @@ identityOrFunction
 dataDesc
     : ((WITH)? mergeType)? DATA INFILE LEFT_PAREN filePaths+=STRING_LITERAL 
(COMMA filePath+=STRING_LITERAL)* RIGHT_PAREN
         INTO TABLE targetTableName=identifier
-        (PARTITION partition=identifierList)?
+        (partitionSpec)?
         (COLUMNS TERMINATED BY comma=STRING_LITERAL)?
         (LINES TERMINATED BY separator=STRING_LITERAL)?
         (FORMAT AS format=identifierOrText)?
diff --git 
a/regression-test/data/load_p0/broker_load/test_broker_load_with_partition.out 
b/regression-test/data/load_p0/broker_load/test_broker_load_with_partition.out
index 5015d98b3c9..e62e67a8f74 100644
--- 
a/regression-test/data/load_p0/broker_load/test_broker_load_with_partition.out
+++ 
b/regression-test/data/load_p0/broker_load/test_broker_load_with_partition.out
@@ -8,3 +8,11 @@
 11001  2023-09-01      1       2       10
 11001  2023-09-01      2       1       10
 
+-- !select --
+11001  2023-10-01      1       3       10
+11001  2023-10-01      2       2       10
+11001  2023-10-01      2       3       10
+11001  2023-09-01      1       1       10
+11001  2023-09-01      1       2       10
+11001  2023-09-01      2       1       10
+
diff --git 
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_partition.groovy
 
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_partition.groovy
index 032b48baf60..45f0cc50be7 100644
--- 
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_partition.groovy
+++ 
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_partition.groovy
@@ -54,6 +54,13 @@ suite("test_broker_load_with_partition", "load_p0") {
         assertTrue(result2.size() == 1)
         assertTrue(result2[0].size() == 1)
         assertTrue(result2[0][0] == 1, "Insert should update 1 rows")
+
+        def result3 = sql """
+            ALTER TABLE ${testTable} ADD TEMPORARY PARTITION partition_t 
VALUES LESS THAN ("2023-11-01");
+            """
+        assertTrue(result3.size() == 1)
+        assertTrue(result3[0].size() == 1)
+        assertTrue(result3[0][0] == 0, "Alter table should update 0 rows")
     }
 
     def load_from_hdfs_partition = {testTablex, label, hdfsFilePath, format, 
brokerName, hdfsUser, hdfsPasswd ->
@@ -78,6 +85,28 @@ suite("test_broker_load_with_partition", "load_p0") {
         assertTrue(result1[0][0] == 0, "Query OK, 0 rows affected")
     }
 
+    def load_from_hdfs_tmp_partition = {testTablex, label, hdfsFilePath, 
format, brokerName, hdfsUser, hdfsPasswd ->
+        def result1= sql """
+                        LOAD LABEL ${label} (
+                            DATA INFILE("${hdfsFilePath}")
+                            INTO TABLE ${testTablex}
+                            TEMPORARY PARTITION (`partition_t`)
+                            COLUMNS TERMINATED BY ","
+                            FORMAT as "${format}"
+                        )
+                        with BROKER "${brokerName}" (
+                        "username"="${hdfsUser}",
+                        "password"="${hdfsPasswd}")
+                        PROPERTIES  (
+                        "timeout"="1200",
+                        "max_filter_ratio"="0.1");
+                        """
+
+        assertTrue(result1.size() == 1)
+        assertTrue(result1[0].size() == 1)
+        assertTrue(result1[0][0] == 0, "Query OK, 0 rows affected")
+    }
+
     def check_load_result = {checklabel, testTablex ->
         max_try_milli_secs = 10000
         while(max_try_milli_secs) {
@@ -97,6 +126,25 @@ suite("test_broker_load_with_partition", "load_p0") {
         }
     }
 
+    def check_load_tmp_partition_result = {checklabel, testTablex ->
+        max_try_milli_secs = 10000
+        while(max_try_milli_secs) {
+            result = sql "show load where label = '${checklabel}'"
+            log.info("result: ${result}")
+            if(result[0][2] == "FINISHED") {
+                //sql "sync"
+                qt_select "select * from ${testTablex} TEMPORARY PARTITION 
(`partition_t`) order by k1"
+                break
+            } else {
+                sleep(1000) // wait 1 second every time
+                max_try_milli_secs -= 1000
+                if(max_try_milli_secs <= 0) {
+                    assertEquals(1, 2)
+                }
+            }
+        }
+    }
+
     // if 'enableHdfs' in regression-conf.groovy has been set to true,
     // the test will run these case as below.
     if (enableHdfs()) {
@@ -115,6 +163,12 @@ suite("test_broker_load_with_partition", "load_p0") {
                                 brokerName, hdfsUser, hdfsPasswd)
 
             check_load_result.call(test_load_label, testTable)
+
+            def test_tmp_partition_load_label = 
UUID.randomUUID().toString().replaceAll("-", "")
+            load_from_hdfs_tmp_partition.call(testTable, 
test_tmp_partition_load_label,
+                                hdfs_csv_file_path, "csv", brokerName, 
hdfsUser, hdfsPasswd)
+
+            
check_load_tmp_partition_result.call(test_tmp_partition_load_label, testTable)
         } finally {
             try_sql("DROP TABLE IF EXISTS ${testTable}")
         }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to