This is an automated email from the ASF dual-hosted git repository.

dataroaring pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new efc2ec58458 [cherry-pick][test](doc) add some import-example in 
doris's doc to regression test(#43123) (#43416)
efc2ec58458 is described below

commit efc2ec584582f335466a725ef3f5404dc0bc7209
Author: yagagagaga <zhangminkefromflyd...@gmail.com>
AuthorDate: Fri Nov 8 15:40:57 2024 +0800

    [cherry-pick][test](doc) add some import-example in doris's doc to 
regression test(#43123) (#43416)
    
    (cherry picked from commit a3e8de76de0a15f7974d23b1231630843d7559ea)
---
 .../import/import-way/insert-into-manual.md.out    |   7 +
 .../org/apache/doris/regression/suite/Suite.groovy |  24 +++-
 .../import/import-way/client_local.csv             |   6 +
 .../doc/data-operate/import/import-way/data.csv    |   2 +
 .../import-way/group-commit-manual.md.groovy       | 154 ++++++++++++++++++++
 .../import/import-way/insert-into-manual.md.groovy |  93 ++++++++++++
 .../import/import-way/mysql-load-manual.md.groovy  | 160 +++++++++++++++++++++
 7 files changed, 445 insertions(+), 1 deletion(-)

diff --git 
a/regression-test/data/doc/data-operate/import/import-way/insert-into-manual.md.out
 
b/regression-test/data/doc/data-operate/import/import-way/insert-into-manual.md.out
new file mode 100644
index 00000000000..9de2e13f029
--- /dev/null
+++ 
b/regression-test/data/doc/data-operate/import/import-way/insert-into-manual.md.out
@@ -0,0 +1,7 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !sql --
+5
+
+-- !sql --
+3
+
diff --git 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
index e5ffdbde414..2802e51f45f 100644
--- 
a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
+++ 
b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy
@@ -475,7 +475,7 @@ class Suite implements GroovyInterceptable {
         return res;
     }
 
-    String getMasterIp(Connection conn) {
+    String getMasterIp(Connection conn = null) {
         def result = sql_return_maparray_impl("select Host, QueryPort, 
IsMaster from frontends();", conn)
         logger.info("get master fe: ${result}")
 
@@ -493,6 +493,28 @@ class Suite implements GroovyInterceptable {
         return masterHost;
     }
 
+    int getMasterPort(String type = "http") {
+        def result = sql_return_maparray_impl("select 
EditLogPort,HttpPort,QueryPort,RpcPort,ArrowFlightSqlPort from frontends() 
where IsMaster = 'true';")
+        if (result.size() != 1) {
+            throw new RuntimeException("could not find Master in this Cluster")
+        }
+        type = type.toLowerCase()
+        switch (type) {
+            case "editlog":
+                return result[0].EditLogPort as int
+            case "http":
+                return result[0].HttpPort as int
+            case ["query", "jdbc", "mysql"]:
+                return result[0].QueryPort as int
+            case ["rpc", "thrift"]:
+                return result[0].RpcPort as int
+            case ["arrow", "arrowflight"]:
+                return result[0].ArrowFlightSqlPort as int
+            default:
+                throw new RuntimeException("Unknown type: '${type}', you 
should select one of this type:[editlog, http, mysql, thrift, arrowflight]")
+        }
+    }
+
     def jdbc_sql_return_maparray(String sqlStr) {
         return sql_return_maparray_impl(sqlStr, context.getConnection())
     }
diff --git 
a/regression-test/suites/doc/data-operate/import/import-way/client_local.csv 
b/regression-test/suites/doc/data-operate/import/import-way/client_local.csv
new file mode 100644
index 00000000000..f674458778e
--- /dev/null
+++ b/regression-test/suites/doc/data-operate/import/import-way/client_local.csv
@@ -0,0 +1,6 @@
+1,10
+2,20
+3,30
+4,40
+5,50
+6,60
diff --git a/regression-test/suites/doc/data-operate/import/import-way/data.csv 
b/regression-test/suites/doc/data-operate/import/import-way/data.csv
new file mode 100644
index 00000000000..9ce46086f69
--- /dev/null
+++ b/regression-test/suites/doc/data-operate/import/import-way/data.csv
@@ -0,0 +1,2 @@
+6,Amy,60
+7,Ross,98
diff --git 
a/regression-test/suites/doc/data-operate/import/import-way/group-commit-manual.md.groovy
 
b/regression-test/suites/doc/data-operate/import/import-way/group-commit-manual.md.groovy
new file mode 100644
index 00000000000..2f082e12f88
--- /dev/null
+++ 
b/regression-test/suites/doc/data-operate/import/import-way/group-commit-manual.md.groovy
@@ -0,0 +1,154 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+
+import org.junit.jupiter.api.Assertions
+
+import java.sql.Connection
+import java.sql.DriverManager
+import java.sql.PreparedStatement
+import java.sql.Statement
+
+suite("docs/data-operate/import/import-way/group-commit-manual.md", 
"p0,nonConcurrent") {
+    try {
+        sql "CREATE DATABASE IF NOT EXISTS `db`; use `db`;"
+        sql "DROP TABLE IF EXISTS `dt`;"
+        sql """
+            CREATE TABLE `dt` (
+                `id` int(11) NOT NULL,
+                `name` varchar(50) NULL,
+                `score` int(11) NULL
+            ) ENGINE=OLAP
+            DUPLICATE KEY(`id`)
+            DISTRIBUTED BY HASH(`id`) BUCKETS 1
+            PROPERTIES (
+                "replication_num" = "1"
+            );
+        """
+
+        Demo.HOST = getMasterIp()
+        Demo.PORT = getMasterPort("mysql")
+        Demo.USER = context.config.jdbcUser
+        Demo.PASSWD = context.config.jdbcPassword
+
+        Demo.groupCommitInsert()
+        Demo.groupCommitInsertBatch()
+
+
+        sql "truncate table dt;"
+        multi_sql """
+            set group_commit = async_mode;
+            insert into dt values(1, 'Bob', 90), (2, 'Alice', 99);
+            insert into dt(id, name) values(3, 'John');
+            select * from dt;
+            select * from dt;
+        """
+
+        multi_sql """
+            set group_commit = sync_mode;
+            insert into dt values(4, 'Bob', 90), (5, 'Alice', 99);
+            select * from dt;
+        """
+
+        sql "set group_commit = off_mode;"
+        cmd """curl --location-trusted -u 
${context.config.jdbcUser}:${context.config.jdbcPassword} -T 
${context.file.parent}/data.csv -H "group_commit:async_mode"  -H 
"column_separator:,"  
http://${context.config.feHttpAddress}/api/db/dt/_stream_load""";
+        cmd """curl --location-trusted -u 
${context.config.jdbcUser}:${context.config.jdbcPassword} -T 
${context.file.parent}/data.csv -H "group_commit:sync_mode"  -H 
"column_separator:,"  
http://${context.config.feHttpAddress}/api/db/dt/_stream_load""";
+        cmd """curl --location-trusted -u 
${context.config.jdbcUser}:${context.config.jdbcPassword} -T 
${context.file.parent}/data.csv  -H "group_commit:async_mode" -H "sql:insert 
into db.dt select * from http_stream('column_separator'=',', 'format' = 'CSV')" 
 http://${context.config.feHttpAddress}/api/_http_stream""";
+        cmd """curl --location-trusted -u 
${context.config.jdbcUser}:${context.config.jdbcPassword} -T 
${context.file.parent}/data.csv  -H "group_commit:sync_mode" -H "sql:insert 
into db.dt select * from http_stream('column_separator'=',', 'format' = 'CSV')" 
 http://${context.config.feHttpAddress}/api/_http_stream""";
+
+        sql """ALTER TABLE dt SET ("group_commit_interval_ms" = "2000");"""
+        sql """ALTER TABLE dt SET ("group_commit_data_bytes" = "134217728");"""
+    } catch (Throwable t) {
+        Assertions.fail("examples in 
docs/data-operate/import/import-way/group-commit-manual.md failed to exec, 
please fix it", t)
+    }
+}
+
+class Demo {
+    static String JDBC_DRIVER = "com.mysql.jdbc.Driver";
+    static String URL_PATTERN = 
"jdbc:mysql://%s:%d/%s?useServerPrepStmts=true";
+    static String HOST = "127.0.0.1";
+    static int PORT = 9030;
+    static String DB = "db";
+    static String TBL = "dt";
+    static String USER = "root";
+    static String PASSWD = "";
+    static int INSERT_BATCH_SIZE = 10;
+
+    static final void groupCommitInsert() throws Exception {
+        Class.forName(JDBC_DRIVER);
+        Connection conn = 
DriverManager.getConnection(String.format(URL_PATTERN, HOST, PORT, DB), USER, 
PASSWD)
+        try {
+            // set session variable 'group_commit'
+            Statement statement = conn.createStatement()
+            try {
+                statement.execute("SET group_commit = async_mode;");
+            } finally {
+                statement.close()
+            }
+
+            String query = "insert into " + TBL + " values(?, ?, ?)";
+            PreparedStatement stmt = conn.prepareStatement(query)
+            try {
+                for (int i = 0; i < INSERT_BATCH_SIZE; i++) {
+                    stmt.setInt(1, i);
+                    stmt.setString(2, "name" + i);
+                    stmt.setInt(3, i + 10);
+                    int result = stmt.executeUpdate();
+                    System.out.println("rows: " + result);
+                }
+            } finally {
+                stmt.close()
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw e
+        } finally {
+            conn.close()
+        }
+    }
+
+    static final void groupCommitInsertBatch() throws Exception {
+        Class.forName(JDBC_DRIVER);
+        // add rewriteBatchedStatements=true and cachePrepStmts=true in JDBC 
url
+        // set session variables by sessionVariables=group_commit=async_mode 
in JDBC url
+        Connection conn = DriverManager.getConnection(
+                String.format(URL_PATTERN + 
"&rewriteBatchedStatements=true&cachePrepStmts=true&sessionVariables=group_commit=async_mode",
 HOST, PORT, DB), USER, PASSWD)
+        try {
+            String query = "insert into " + TBL + " values(?, ?, ?)";
+            PreparedStatement stmt = conn.prepareStatement(query)
+            try {
+                for (int j = 0; j < 5; j++) {
+                    // 10 rows per insert
+                    for (int i = 0; i < INSERT_BATCH_SIZE; i++) {
+                        stmt.setInt(1, i);
+                        stmt.setString(2, "name" + i);
+                        stmt.setInt(3, i + 10);
+                        stmt.addBatch();
+                    }
+                    int[] result = stmt.executeBatch();
+                }
+            } finally {
+                stmt.close()
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw e
+        } finally {
+            conn.close()
+        }
+    }
+}
diff --git 
a/regression-test/suites/doc/data-operate/import/import-way/insert-into-manual.md.groovy
 
b/regression-test/suites/doc/data-operate/import/import-way/insert-into-manual.md.groovy
new file mode 100644
index 00000000000..625d66f4682
--- /dev/null
+++ 
b/regression-test/suites/doc/data-operate/import/import-way/insert-into-manual.md.groovy
@@ -0,0 +1,93 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import org.junit.jupiter.api.Assertions;
+
+suite("docs/data-operate/import/import-way/insert-into-manual.md") {
+    try {
+        multi_sql """
+            CREATE DATABASE IF NOT EXISTS testdb;
+            DROP TABLE IF EXISTS testdb.test_table;
+        """
+        sql """
+            CREATE TABLE testdb.test_table(
+                user_id            BIGINT       NOT NULL COMMENT "用户 ID",
+                name               VARCHAR(20)           COMMENT "用户姓名",
+                age                INT                   COMMENT "用户年龄"
+            )
+            DUPLICATE KEY(user_id)
+            DISTRIBUTED BY HASH(user_id) BUCKETS 10
+            PROPERTIES ("replication_num" = "1");
+        """
+        sql """
+            INSERT INTO testdb.test_table (user_id, name, age)
+            VALUES (1, "Emily", 25),
+                   (2, "Benjamin", 35),
+                   (3, "Olivia", 28),
+                   (4, "Alexander", 60),
+                   (5, "Ava", 17);
+        """
+        qt_sql "SELECT COUNT(*) FROM testdb.test_table;"
+
+        sql "DROP TABLE IF EXISTS testdb.test_table2;"
+        sql "CREATE TABLE testdb.test_table2 LIKE testdb.test_table;"
+        sql """
+            INSERT INTO testdb.test_table2
+            SELECT * FROM testdb.test_table WHERE age < 30;
+        """
+        qt_sql "SELECT COUNT(*) FROM testdb.test_table2;"
+        sql "SHOW LOAD FROM testdb;"
+
+
+        multi_sql """
+            CREATE TABLE IF NOT EXISTS empty_tbl (k1 VARCHAR(32)) PROPERTIES 
("replication_num" = "1");
+            CREATE TABLE IF NOT EXISTS tbl1 LIKE empty_tbl;
+            CREATE TABLE IF NOT EXISTS tbl2 LIKE empty_tbl;
+        """
+        sql "INSERT INTO tbl1 SELECT * FROM empty_tbl;"
+        sql "CLEAN LABEL FROM ${curDbName}"
+        multi_sql """
+            INSERT INTO tbl1 SELECT * FROM tbl2;
+            INSERT INTO tbl1 WITH LABEL my_label1 SELECT * FROM tbl2;
+            INSERT INTO tbl1 SELECT * FROM tbl2;
+            INSERT INTO tbl1 SELECT * FROM tbl2;
+        """
+        sql """SHOW LOAD WHERE label="xxx";"""
+        try {
+            """SHOW TRANSACTION WHERE id=4005;"""
+        } catch (Exception e) {
+            if (!e.getMessage().contains("transaction with id 4005 does not 
exist")) {
+                logger.error("this sql should not throw other error")
+                throw e
+            }
+        }
+        sql """INSERT INTO tbl1 SELECT * FROM tbl2 WHERE k1 = "a";"""
+        if (!isCloudMode()) {
+            // skip this case if this is a cloud cluster
+            try {
+                sql "INSERT INTO tbl1 SELECT LPAD('foo', 100, 'bar');"
+                Assertions.fail("this sql should fail, because we want get err 
url ")
+            } catch (Exception e) {
+                var msg = e.getMessage()
+                var err_url = msg.substring(msg.lastIndexOf("http"))
+                sql """SHOW LOAD WARNINGS ON "${err_url}";"""
+            }
+        }
+    } catch (Throwable t) {
+        Assertions.fail("examples in 
docs/data-operate/import/import-way/insert-into-manual.md failed to exec, 
please fix it", t)
+    }
+}
diff --git 
a/regression-test/suites/doc/data-operate/import/import-way/mysql-load-manual.md.groovy
 
b/regression-test/suites/doc/data-operate/import/import-way/mysql-load-manual.md.groovy
new file mode 100644
index 00000000000..9b350402aaf
--- /dev/null
+++ 
b/regression-test/suites/doc/data-operate/import/import-way/mysql-load-manual.md.groovy
@@ -0,0 +1,160 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import org.junit.jupiter.api.Assertions
+
+suite("docs/data-operate/import/import-way/mysql-load-manual.md") {
+    def is_linux = 
System.getProperty("os.name").toLowerCase().contains("linux")
+    def run_cmd = { String cmdText ->
+        try {
+            println(cmd cmdText)
+            return true
+        } catch (Exception ignored) {
+            return false
+        }
+    }
+    if (!is_linux) {
+        logger.warn("don't run this case if not in Linux")
+        return
+    }
+    logger.info("check if has installed mysql cmd")
+    if (run_cmd("mysql --help 2>&1 >/dev/null") || run_cmd("yum -y install 
mysql")) {
+        logger.info("mysql cmd can work properly, go continue")
+    } else {
+        logger.warn("could not install mysql cmd client, skip this case")
+        return
+    }
+    def writeToFile = {String path, String data ->
+        OutputStreamWriter w = null
+        try {
+            w = new OutputStreamWriter(new FileOutputStream(path))
+            w.write(data)
+            w.flush()
+            w.close()
+        } finally {
+            if (w != null) w.close()
+        }
+    }
+    def load_local = {String sql ->
+        if (is_linux) {
+            var output = cmd """
+                cd ${context.file.parent} && \\ 
+                cat << EOF | mysql --local-infile -vvv -h ${getMasterIp()} -P 
${getMasterPort("mysql")} -u ${context.config.jdbcUser} 
${context.config.jdbcPassword.isEmpty() ? "" : "-p  
${context.config.jdbcPassword}"} -D testdb
+${sql}
+EOF
+            """
+            println(output)
+        }
+    }
+
+    try {
+        multi_sql """
+            CREATE DATABASE IF NOT EXISTS testdb;
+            USE testdb;
+            DROP TABLE IF EXISTS t1;
+        """
+        sql """
+            CREATE TABLE testdb.t1 (
+                pk     INT, 
+                v1     INT SUM
+            ) AGGREGATE KEY (pk) 
+            DISTRIBUTED BY hash (pk)
+            PROPERTIES ("replication_num" = "1");
+        """
+        load_local """
+            LOAD DATA LOCAL
+            INFILE 'client_local.csv'
+            INTO TABLE testdb.t1
+            COLUMNS TERMINATED BY ','
+            LINES TERMINATED BY '\\n';
+        """
+
+        try {
+            sql """show load warnings where 
label='b612907c-ccf4-4ac2-82fe-107ece655f0f';"""
+        } catch (Exception e) {
+            if (!e.getMessage().contains("job is not exist")) {
+                logger.error("occurring other error is not in expected")
+                throw e
+            }
+        }
+
+        sql "CREATE DATABASE IF NOT EXISTS testDb"
+        sql "DROP TABLE IF EXISTS testDb.testTbl"
+        sql """
+            CREATE TABLE testDb.testTbl (
+                k1     INT, 
+                k2     INT, 
+                v1     INT SUM
+            ) AGGREGATE KEY (k1,k2)
+            PARTITION BY RANGE(k1) (
+                PARTITION p1 VALUES LESS THAN (1),
+                PARTITION p2 VALUES LESS THAN (2),
+                PARTITION p3 VALUES LESS THAN (3)
+            )
+            DISTRIBUTED BY hash (k1)
+            PROPERTIES ("replication_num" = "1");
+        """
+        writeToFile("${context.file.parent}/testData", "1\t2\t3\n")
+        load_local """
+            LOAD DATA LOCAL
+            INFILE 'testData'
+            INTO TABLE testDb.testTbl
+            PROPERTIES ("timeout"="100");
+        """
+        load_local """
+            LOAD DATA LOCAL
+            INFILE 'testData'
+            INTO TABLE testDb.testTbl
+            PROPERTIES ("max_filter_ratio"="0.2");
+        """
+        load_local """
+            LOAD DATA LOCAL
+            INFILE 'testData'
+            INTO TABLE testDb.testTbl
+            (k2, k1, v1);
+        """
+        writeToFile("${context.file.parent}/testData", "1,2,3\n")
+        load_local """
+            LOAD DATA LOCAL
+            INFILE 'testData'
+            INTO TABLE testDb.testTbl
+            COLUMNS TERMINATED BY ','
+            LINES TERMINATED BY '\\n';
+        """
+        writeToFile("${context.file.parent}/testData", "1\t2\t3\n")
+        load_local """
+            LOAD DATA LOCAL
+            INFILE 'testData'
+            INTO TABLE testDb.testTbl
+            PARTITION (p1, p2);
+        """
+        load_local """
+            LOAD DATA LOCAL
+            INFILE 'testData'
+            INTO TABLE testDb.testTbl
+            PROPERTIES ("timezone"="Africa/Abidjan");
+        """
+        load_local """
+            LOAD DATA LOCAL
+            INFILE 'testData'
+            INTO TABLE testDb.testTbl
+            PROPERTIES ("exec_mem_limit"="10737418240");
+        """
+    } catch (Throwable t) {
+        Assertions.fail("examples in 
docs/data-operate/import/import-way/mysql-load-manual.md failed to exec, please 
fix it", t)
+    }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to