This is an automated email from the ASF dual-hosted git repository.
morningman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new 700069c5f72 [fix](regression-test) fix
`test_outfile_csv_with_names_and_types` case (#46214)
700069c5f72 is described below
commit 700069c5f729c87e85e3402213c3ffe56dcac994
Author: Tiewei Fang <[email protected]>
AuthorDate: Thu Jan 2 12:30:29 2025 +0800
[fix](regression-test) fix `test_outfile_csv_with_names_and_types` case
(#46214)
Problem Summary:
fix `test_outfile_csv_with_names_and_types` case
---
.../test_outfile_csv_with_names_and_types.out | 25 ---
.../test_outfile_csv_with_names_and_types.groovy | 12 +-
.../test_outfile_csv_with_names_and_types.groovy | 172 ---------------------
3 files changed, 10 insertions(+), 199 deletions(-)
diff --git
a/regression-test/data/nereids_p0/outfile/test_outfile_csv_with_names_and_types.out
b/regression-test/data/nereids_p0/outfile/test_outfile_csv_with_names_and_types.out
deleted file mode 100644
index 63fa1d523c8..00000000000
---
a/regression-test/data/nereids_p0/outfile/test_outfile_csv_with_names_and_types.out
+++ /dev/null
@@ -1,25 +0,0 @@
--- This file is automatically generated. You should know what you did if you
want to edit this
--- !select_default1 --
-1 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 1 1 true 1 1 1 1.1 1.1 char1
1
-10 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
\N \N \N \N \N \N \N \N \N \N
\N
-2 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 2 2 true 2 2 2 2.2 2.2 char2
2
-3 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 3 3 true 3 3 3 3.3 3.3 char3
3
-4 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 4 4 true 4 4 4 4.4 4.4 char4
4
-5 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 5 5 true 5 5 5 5.5 5.5 char5
5
-6 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 6 6 true 6 6 6 6.6 6.6 char6
6
-7 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 7 7 true 7 7 7 7.7 7.7 char7
7
-8 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 8 8 true 8 8 8 8.8 8.8 char8
8
-9 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 9 9 true 9 9 9 9.9 9.9 char9
9
-
--- !select_default2 --
-1 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 1 1 true 1 1 1 1.1 1.1 char1
1
-10 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
\N \N \N \N \N \N \N \N \N \N
\N
-2 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 2 2 true 2 2 2 2.2 2.2 char2
2
-3 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 3 3 true 3 3 3 3.3 3.3 char3
3
-4 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 4 4 true 4 4 4 4.4 4.4 char4
4
-5 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 5 5 true 5 5 5 5.5 5.5 char5
5
-6 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 6 6 true 6 6 6 6.6 6.6 char6
6
-7 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 7 7 true 7 7 7 7.7 7.7 char7
7
-8 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 8 8 true 8 8 8 8.8 8.8 char8
8
-9 2017-10-01 2017-10-01T00:00 2017-10-01
2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111
Beijing 9 9 true 9 9 9 9.9 9.9 char9
9
-
diff --git
a/regression-test/suites/export_p0/test_outfile_csv_with_names_and_types.groovy
b/regression-test/suites/export_p0/test_outfile_csv_with_names_and_types.groovy
index 8d40cb25af1..639988eb240 100644
---
a/regression-test/suites/export_p0/test_outfile_csv_with_names_and_types.groovy
+++
b/regression-test/suites/export_p0/test_outfile_csv_with_names_and_types.groovy
@@ -115,10 +115,18 @@ suite("test_outfile_csv_with_names_and_types") {
// check column names
String columnNames =
"""user_id,date,datetime,date_1,datetime_1,datetime_2,datetime_3,city,age,sex,bool_col,int_col,bigint_col,largeint_col,float_col,double_col,char_col,decimal_col,ipv4_col,ipv6_col"""
- String columnTypes =
"""INT,DATEV2,DATETIMEV2,DATEV2,DATETIMEV2,DATETIMEV2,DATETIMEV2,VARCHAR,SMALLINT,TINYINT,BOOL,INT,BIGINT,INT,FLOAT,DOUBLE,CHAR,DECIMAL128I,IPV4,IPV6"""
+ String[] columnTypes =
["INT","DATEV2","DATETIMEV2","DATEV2","DATETIMEV2","DATETIMEV2","DATETIMEV2","VARCHAR","SMALLINT","TINYINT","BOOL","INT","BIGINT","INT","FLOAT","DOUBLE","CHAR","DECIMAL128I","IPV4","IPV6"]
List<String> outLines =
Files.readAllLines(Paths.get(files[0].getAbsolutePath()),
StandardCharsets.UTF_8);
assertEquals(columnNames, outLines.get(0))
- assertEquals(columnTypes, outLines.get(1))
+ // check type
+ String[] splitType = outLines.get(1).split(",");
+ for (int j = 0; j < columnTypes.length; ++j) {
+ if (j == 16) {
+ assertTrue("CHAR".equals(splitType[j]) ||
"VARCHAR".equals(splitType[j]))
+ } else {
+ assertEquals(columnTypes[j], splitType[j])
+ }
+ }
// check data correctness
sql """ DROP TABLE IF EXISTS ${tableName2} """
diff --git
a/regression-test/suites/nereids_p0/outfile/test_outfile_csv_with_names_and_types.groovy
b/regression-test/suites/nereids_p0/outfile/test_outfile_csv_with_names_and_types.groovy
deleted file mode 100644
index fe92d0972d0..00000000000
---
a/regression-test/suites/nereids_p0/outfile/test_outfile_csv_with_names_and_types.groovy
+++ /dev/null
@@ -1,172 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements. See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership. The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License. You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied. See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-import org.codehaus.groovy.runtime.IOGroovyMethods
-
-import java.nio.charset.StandardCharsets
-import java.nio.file.Files
-import java.nio.file.Paths
-
-suite("test_outfile_csv_with_names_and_types") {
- sql 'set enable_nereids_planner=true'
- sql 'set enable_fallback_to_original_planner=false'
-
-
- def dbName = "test_outfile_csv_with_names_and_types"
- sql "CREATE DATABASE IF NOT EXISTS ${dbName}"
- sql "USE $dbName"
- StringBuilder strBuilder = new StringBuilder()
- strBuilder.append("curl --location-trusted -u " + context.config.jdbcUser
+ ":" + context.config.jdbcPassword)
- strBuilder.append(" http://" + context.config.feHttpAddress +
"/rest/v1/config/fe")
-
- String command = strBuilder.toString()
- def process = command.toString().execute()
- def code = process.waitFor()
- def err = IOGroovyMethods.getText(new BufferedReader(new
InputStreamReader(process.getErrorStream())));
- def out = process.getText()
- logger.info("Request FE Config: code=" + code + ", out=" + out + ", err="
+ err)
- assertEquals(code, 0)
- def response = parseJson(out.trim())
- assertEquals(response.code, 0)
- assertEquals(response.msg, "success")
- def configJson = response.data.rows
- boolean enableOutfileToLocal = false
- for (Object conf: configJson) {
- assert conf instanceof Map
- if (((Map<String, String>) conf).get("Name").toLowerCase() ==
"enable_outfile_to_local") {
- enableOutfileToLocal = ((Map<String, String>)
conf).get("Value").toLowerCase() == "true"
- }
- }
- if (!enableOutfileToLocal) {
- logger.warn("Please set enable_outfile_to_local to true to run
test_outfile")
- return
- }
- def tableName = "outfil_csv_with_names_and_types_test"
- def tableName2 = "outfil_csv_with_names_and_types_test2"
- def uuid = UUID.randomUUID().toString()
- def outFilePath = """/tmp/test_outfile_with_names_types_${uuid}"""
-
- try {
- sql """ DROP TABLE IF EXISTS ${tableName} """
- sql """
- CREATE TABLE IF NOT EXISTS ${tableName} (
- `user_id` INT NOT NULL COMMENT "用户id",
- `date` DATE NOT NULL COMMENT "数据灌入日期时间",
- `datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
- `date_1` DATEV2 NOT NULL COMMENT "",
- `datetime_1` DATETIMEV2 NOT NULL COMMENT "",
- `datetime_2` DATETIMEV2(3) NOT NULL COMMENT "",
- `datetime_3` DATETIMEV2(6) NOT NULL COMMENT "",
- `city` VARCHAR(20) COMMENT "用户所在城市",
- `age` SMALLINT COMMENT "用户年龄",
- `sex` TINYINT COMMENT "用户性别",
- `bool_col` boolean COMMENT "",
- `int_col` int COMMENT "",
- `bigint_col` bigint COMMENT "",
- `largeint_col` int COMMENT "",
- `float_col` float COMMENT "",
- `double_col` double COMMENT "",
- `char_col` CHAR(10) COMMENT "",
- `decimal_col` decimal COMMENT ""
- )
- DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
- """
- StringBuilder sb = new StringBuilder()
- int i = 1
- for (; i < 10; i ++) {
- sb.append("""
- (${i}, '2017-10-01', '2017-10-01 00:00:00', '2017-10-01',
'2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', '2017-10-01
00:00:00.111111', 'Beijing', ${i}, ${i % 128}, true, ${i}, ${i}, ${i},
${i}.${i}, ${i}.${i}, 'char${i}', ${i}),
- """)
- }
- sb.append("""
- (${i}, '2017-10-01', '2017-10-01 00:00:00', '2017-10-01',
'2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', '2017-10-01
00:00:00.111111', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
NULL)
- """)
- sql """ INSERT INTO ${tableName} VALUES
- ${sb.toString()}
- """
- order_qt_select_default1 """ SELECT * FROM ${tableName} t ORDER BY
user_id; """
-
- // check outfile
- File path = new File(outFilePath)
- if (!path.exists()) {
- assert path.mkdirs()
- } else {
- throw new IllegalStateException("""${outFilePath} already exists!
""")
- }
- sql """
- SELECT * FROM ${tableName} t ORDER BY user_id INTO OUTFILE
"file://${outFilePath}/" FORMAT AS csv_with_names_and_types
- PROPERTIES("column_separator" = ",");
- """
- File[] files = path.listFiles()
- assert files.length == 1
-
- // check column names
- String columnNames =
"""user_id,date,datetime,date_1,datetime_1,datetime_2,datetime_3,city,age,sex,bool_col,int_col,bigint_col,largeint_col,float_col,double_col,char_col,decimal_col"""
- String columnTypes =
"""INT,DATEV2,DATETIMEV2,DATEV2,DATETIMEV2,DATETIMEV2,DATETIMEV2,VARCHAR,SMALLINT,TINYINT,BOOL,INT,BIGINT,INT,FLOAT,DOUBLE,CHAR,DECIMAL128I"""
- List<String> outLines =
Files.readAllLines(Paths.get(files[0].getAbsolutePath()),
StandardCharsets.UTF_8);
- assertEquals(columnNames, outLines.get(0))
- assertEquals(columnTypes, outLines.get(1))
-
- // check data correctness
- sql """ DROP TABLE IF EXISTS ${tableName2} """
- sql """
- CREATE TABLE IF NOT EXISTS ${tableName2} (
- `user_id` INT NOT NULL COMMENT "用户id",
- `date` DATE NOT NULL COMMENT "数据灌入日期时间",
- `datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
- `date_1` DATEV2 NOT NULL COMMENT "",
- `datetime_1` DATETIMEV2 NOT NULL COMMENT "",
- `datetime_2` DATETIMEV2(3) NOT NULL COMMENT "",
- `datetime_3` DATETIMEV2(6) NOT NULL COMMENT "",
- `city` VARCHAR(20) COMMENT "用户所在城市",
- `age` SMALLINT COMMENT "用户年龄",
- `sex` TINYINT COMMENT "用户性别",
- `bool_col` boolean COMMENT "",
- `int_col` int COMMENT "",
- `bigint_col` bigint COMMENT "",
- `largeint_col` int COMMENT "",
- `float_col` float COMMENT "",
- `double_col` double COMMENT "",
- `char_col` CHAR(10) COMMENT "",
- `decimal_col` decimal COMMENT ""
- )
- DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
- """
-
- StringBuilder commandBuilder = new StringBuilder()
- commandBuilder.append("""curl -v --location-trusted -u
${context.config.feHttpUser}:${context.config.feHttpPassword}""")
- commandBuilder.append(""" -H format:csv_with_names_and_types -H
column_separator:, -T """ + files[0].getAbsolutePath() + """
http://${context.config.feHttpAddress}/api/""" + dbName + "/" + tableName2 +
"/_stream_load")
- command = commandBuilder.toString()
- process = command.execute()
- code = process.waitFor()
- err = IOGroovyMethods.getText(new BufferedReader(new
InputStreamReader(process.getErrorStream())))
- out = process.getText()
- logger.info("Run command: command=" + command + ",code=" + code + ",
out=" + out + ", err=" + err)
- assertEquals(code, 0)
- order_qt_select_default2 """ SELECT * FROM ${tableName2} t ORDER BY
user_id; """
- } finally {
- try_sql("DROP TABLE IF EXISTS ${tableName}")
- try_sql("DROP TABLE IF EXISTS ${tableName2}")
- File path = new File(outFilePath)
- if (path.exists()) {
- for (File f: path.listFiles()) {
- f.delete();
- }
- path.delete();
- }
- }
-}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]