This is an automated email from the ASF dual-hosted git repository.

caiconghui pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new f2d8dd438ad [fix](stream_load) fix stream load may failed caused by 
column name with keyword (#35822)
f2d8dd438ad is described below

commit f2d8dd438adf346c74d20c776b767af26e6a6f77
Author: caiconghui <55968745+caicong...@users.noreply.github.com>
AuthorDate: Tue Jun 4 21:14:06 2024 +0800

    [fix](stream_load) fix stream load may failed caused by column name with 
keyword (#35822)
    
    let
    KW_SQL,
    KW_CACHE,
    KW_COLOCATE,
    KW_COMPRESS_TYPE,
    KW_DORIS_INTERNAL_TABLE_ID,
    KW_HOTSPOT,
    KW_PRIVILEGES,
    KW_RECENT,
    KW_STAGES,
    KW_WARM,
    KW_UP,
    KW_CONVERT_LSC
    be as non-reserved
    
    Co-authored-by: caiconghui1 <caicongh...@jd.com>
---
 fe/fe-core/src/main/cup/sql_parser.cup             | 24 +++++++
 .../data/load_p0/stream_load/test_keyword.csv      | 10 +++
 .../stream_load/test_stream_load_keyword.out       | 13 ++++
 .../stream_load/test_stream_load_keyword.groovy    | 75 ++++++++++++++++++++++
 4 files changed, 122 insertions(+)

diff --git a/fe/fe-core/src/main/cup/sql_parser.cup 
b/fe/fe-core/src/main/cup/sql_parser.cup
index a333a7ca968..368879ae42d 100644
--- a/fe/fe-core/src/main/cup/sql_parser.cup
+++ b/fe/fe-core/src/main/cup/sql_parser.cup
@@ -8478,6 +8478,30 @@ keyword ::=
     {: RESULT = id; :}
     | KW_MATCH_PHRASE_EDGE:id
     {: RESULT = id; :}
+    | KW_SQL:id
+    {: RESULT = id; :}
+    | KW_CACHE:id
+    {: RESULT = id; :}
+    | KW_COLOCATE:id
+    {: RESULT = id; :}
+    | KW_COMPRESS_TYPE:id
+    {: RESULT = id; :}
+    | KW_DORIS_INTERNAL_TABLE_ID:id
+    {: RESULT = id; :}
+    | KW_HOTSPOT:id
+    {: RESULT = id; :}
+    | KW_PRIVILEGES:id
+    {: RESULT = id; :}
+    | KW_RECENT:id
+    {: RESULT = id; :}
+    | KW_STAGES:id
+    {: RESULT = id; :}
+    | KW_WARM:id
+    {: RESULT = id; :}
+    | KW_UP:id
+    {: RESULT = id; :}
+    | KW_CONVERT_LSC:id
+    {: RESULT = id; :}
     ;
 
 // Identifier that contain keyword
diff --git a/regression-test/data/load_p0/stream_load/test_keyword.csv 
b/regression-test/data/load_p0/stream_load/test_keyword.csv
new file mode 100644
index 00000000000..991ea68318d
--- /dev/null
+++ b/regression-test/data/load_p0/stream_load/test_keyword.csv
@@ -0,0 +1,10 @@
+1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16
+2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17
+3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18
+4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19
+5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20
+6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21
+7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22
+8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23
+9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24
+10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25
\ No newline at end of file
diff --git 
a/regression-test/data/load_p0/stream_load/test_stream_load_keyword.out 
b/regression-test/data/load_p0/stream_load/test_stream_load_keyword.out
new file mode 100644
index 00000000000..35ddbf4534f
--- /dev/null
+++ b/regression-test/data/load_p0/stream_load/test_stream_load_keyword.out
@@ -0,0 +1,13 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !sql --
+1      2       3       4       5       6       7       8       9       10      
11      12      13      14      15      16
+2      3       4       5       6       7       8       9       10      11      
12      13      14      15      16      17
+3      4       5       6       7       8       9       10      11      12      
13      14      15      16      17      18
+4      5       6       7       8       9       10      11      12      13      
14      15      16      17      18      19
+5      6       7       8       9       10      11      12      13      14      
15      16      17      18      19      20
+6      7       8       9       10      11      12      13      14      15      
16      17      18      19      20      21
+7      8       9       10      11      12      13      14      15      16      
17      18      19      20      21      22
+8      9       10      11      12      13      14      15      16      17      
18      19      20      21      22      23
+9      10      11      12      13      14      15      16      17      18      
19      20      21      22      23      24
+10     11      12      13      14      15      16      17      18      19      
20      21      22      23      24      25
+
diff --git 
a/regression-test/suites/load_p0/stream_load/test_stream_load_keyword.groovy 
b/regression-test/suites/load_p0/stream_load/test_stream_load_keyword.groovy
new file mode 100644
index 00000000000..f784455d95e
--- /dev/null
+++ b/regression-test/suites/load_p0/stream_load/test_stream_load_keyword.groovy
@@ -0,0 +1,75 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_stream_load_keyword", "p0") {
+    def tableName = "test_stream_load_keyword"
+    try {
+        sql """ DROP TABLE IF EXISTS ${tableName} """
+        sql """
+            CREATE TABLE IF NOT EXISTS ${tableName} (
+                `k1` bigint(20) NULL,
+                `k2` bigint(20) NULL,
+                 sql int(11) SUM NULL,
+                 cache int(11) REPLACE NULL,
+                 colocate int(11) REPLACE_IF_NOT_NULL NULL,
+                 compress_type int(11) REPLACE_IF_NOT_NULL NULL,
+                 doris_internal_table_id int(11) REPLACE_IF_NOT_NULL NULL,
+                 `dual` int(11) REPLACE_IF_NOT_NULL NULL,
+                 hotspot int(11) REPLACE_IF_NOT_NULL NULL,
+                 `overwrite` int(11) REPLACE_IF_NOT_NULL NULL,
+                 privileges int(11) REPLACE_IF_NOT_NULL NULL,
+                 recent int(11) REPLACE_IF_NOT_NULL NULL,
+                 stages int(11) REPLACE_IF_NOT_NULL NULL,
+                 warm int(11) REPLACE_IF_NOT_NULL NULL,
+                 up int(11) REPLACE_IF_NOT_NULL NULL,
+                 convert_lsc int(11) REPLACE_IF_NOT_NULL NULL,
+            ) ENGINE=OLAP
+            AGGREGATE KEY(`k1`, `k2`)
+            COMMENT 'OLAP'
+            DISTRIBUTED BY HASH(`k1`, `k2`) BUCKETS 3
+            PROPERTIES ("replication_allocation" = "tag.location.default: 1");
+        """
+
+        // test columns with keyword success
+        streamLoad {
+            table "${tableName}"
+
+            file 'test_keyword.csv'
+
+            set 'columns', 'k1, k2, sql, cache, colocate, compress_type, 
doris_internal_table_id, `dual`,' +
+                    ' hotspot, `overwrite`, privileges, recent, stages, warm, 
up, convert_lsc'
+
+            set 'column_separator', ','
+
+            check { result, exception, startTime, endTime ->
+                if (exception != null) {
+                    throw exception
+                }
+                log.info("Stream load result: ${result}".toString())
+                def json = parseJson(result)
+                assertEquals("success", json.Status.toLowerCase())
+                assertEquals(10, json.NumberTotalRows)
+            }
+            time 10000 // limit inflight 10s
+        }
+
+        sql "sync"
+        qt_sql "select * from ${tableName} order by k1"
+    } finally {
+        sql """ DROP TABLE IF EXISTS ${tableName} """
+    }
+}
\ No newline at end of file


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to