This is an automated email from the ASF dual-hosted git repository. kxiao pushed a commit to branch branch-2.0 in repository https://gitbox.apache.org/repos/asf/doris.git
commit a92632700cdd32481e8acbf7ca4f81e7cdc922a6 Author: zhangguoqiang <18372634...@163.com> AuthorDate: Tue Oct 17 10:11:57 2023 +0800 [Enhance](regression) add hive out file check (#25475) add hive out file check fix hive sql state with " ; " --- .../hive/test_prepare_hive_data_in_case.out | 11 +++++ .../org/apache/doris/regression/suite/Suite.groovy | 48 +++++++++++++++++++--- .../hive/test_prepare_hive_data_in_case.groovy | 11 ++++- 3 files changed, 62 insertions(+), 8 deletions(-) diff --git a/regression-test/data/external_table_p0/hive/test_prepare_hive_data_in_case.out b/regression-test/data/external_table_p0/hive/test_prepare_hive_data_in_case.out new file mode 100644 index 00000000000..7259a57a915 --- /dev/null +++ b/regression-test/data/external_table_p0/hive/test_prepare_hive_data_in_case.out @@ -0,0 +1,11 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !hive_docker_01 -- +eee fff +ccc ddd +aaa bbb + +-- !sql_02 -- +eee fff +ccc ddd +aaa bbb + diff --git a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy index 6cd22bb663f..f03a59a0042 100644 --- a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy +++ b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy @@ -551,7 +551,7 @@ class Suite implements GroovyInterceptable { } List<List<Object>> hive_docker(String sqlStr, boolean isOrder = false){ - String cleanedSqlStr = sqlStr.replaceAll(/;+$/, '') + String cleanedSqlStr = sqlStr.replaceAll("\\s*;\\s*\$", "") def (result, meta) = JdbcUtils.executeToList(context.getHiveDockerConnection(), cleanedSqlStr) if (isOrder) { result = DataUtils.sortByToString(result) @@ -560,7 +560,7 @@ class Suite implements GroovyInterceptable { } List<List<Object>> hive_remote(String sqlStr, boolean isOrder = false){ - String cleanedSqlStr = sqlStr.replaceAll(/;+$/, '') + String cleanedSqlStr = sqlStr.replaceAll("\\s*;\\s*\$", "") def (result, meta) = JdbcUtils.executeToList(context.getHiveRemoteConnection(), cleanedSqlStr) if (isOrder) { result = DataUtils.sortByToString(result) @@ -572,9 +572,23 @@ class Suite implements GroovyInterceptable { if (context.config.generateOutputFile || context.config.forceGenerateOutputFile) { Tuple2<List<List<Object>>, ResultSetMetaData> tupleResult = null if (arg instanceof PreparedStatement) { - tupleResult = JdbcUtils.executeToStringList(context.getConnection(), (PreparedStatement) arg) + if (tag.contains("hive_docker")) { + tupleResult = JdbcUtils.executeToStringList(context.getHiveDockerConnection(), (PreparedStatement) arg) + }else if (tag.contains("hive_remote")) { + tupleResult = JdbcUtils.executeToStringList(context.getHiveRemoteConnection(), (PreparedStatement) arg) + } + else{ + tupleResult = JdbcUtils.executeToStringList(context.getConnection(), (PreparedStatement) arg) + } } else { - tupleResult = JdbcUtils.executeToStringList(context.getConnection(), (String) arg) + if (tag.contains("hive_docker")) { + tupleResult = JdbcUtils.executeToStringList(context.getHiveDockerConnection(), (String) arg) + }else if (tag.contains("hive_remote")) { + tupleResult = JdbcUtils.executeToStringList(context.getHiveRemoteConnection(), (String) arg) + } + else{ + tupleResult = JdbcUtils.executeToStringList(context.getConnection(), (String) arg) + } } def (result, meta) = tupleResult if (isOrder) { @@ -596,9 +610,23 @@ class Suite implements GroovyInterceptable { OutputUtils.TagBlockIterator expectCsvResults = context.getOutputIterator().next() Tuple2<List<List<Object>>, ResultSetMetaData> tupleResult = null if (arg instanceof PreparedStatement) { - tupleResult = JdbcUtils.executeToStringList(context.getConnection(), (PreparedStatement) arg) + if (tag.contains("hive_docker")) { + tupleResult = JdbcUtils.executeToStringList(context.getHiveDockerConnection(), (PreparedStatement) arg) + }else if (tag.contains("hive_remote")) { + tupleResult = JdbcUtils.executeToStringList(context.getHiveRemoteConnection(), (PreparedStatement) arg) + } + else{ + tupleResult = JdbcUtils.executeToStringList(context.getConnection(), (PreparedStatement) arg) + } } else { - tupleResult = JdbcUtils.executeToStringList(context.getConnection(), (String) arg) + if (tag.contains("hive_docker")) { + tupleResult = JdbcUtils.executeToStringList(context.getHiveDockerConnection(), (String) arg) + }else if (tag.contains("hive_remote")) { + tupleResult = JdbcUtils.executeToStringList(context.getHiveRemoteConnection(), (String) arg) + } + else{ + tupleResult = JdbcUtils.executeToStringList(context.getConnection(), (String) arg) + } } def (realResults, meta) = tupleResult if (isOrder) { @@ -627,6 +655,14 @@ class Suite implements GroovyInterceptable { void quickTest(String tag, String sql, boolean isOrder = false) { logger.info("Execute tag: ${tag}, ${isOrder ? "order_" : ""}sql: ${sql}".toString()) + if (tag.contains("hive_docker")) { + String cleanedSqlStr = sql.replaceAll("\\s*;\\s*\$", "") + sql = cleanedSqlStr + } + if (tag.contains("hive_remote")) { + String cleanedSqlStr = sql.replaceAll("\\s*;\\s*\$", "") + sql = cleanedSqlStr + } quickRunTest(tag, sql, isOrder) } diff --git a/regression-test/suites/external_table_p0/hive/test_prepare_hive_data_in_case.groovy b/regression-test/suites/external_table_p0/hive/test_prepare_hive_data_in_case.groovy index b56ba5806cb..6569d57f211 100644 --- a/regression-test/suites/external_table_p0/hive/test_prepare_hive_data_in_case.groovy +++ b/regression-test/suites/external_table_p0/hive/test_prepare_hive_data_in_case.groovy @@ -24,10 +24,13 @@ suite("test_prepare_hive_data_in_case", "p0,external,hive,external_docker,extern String hms_port = context.config.otherConfigs.get("hms_port") hive_docker """show databases;""" - hive_docker """drop table if exists default.test_prepare_hive_data_in_case;""" - hive_docker """create table default.test_prepare_hive_data_in_case (k1 String, k2 String);""" + hive_docker """drop table if exists default.test_prepare_hive_data_in_case; """ + hive_docker """ + create table default.test_prepare_hive_data_in_case (k1 String, k2 String); + """ hive_docker """insert into default.test_prepare_hive_data_in_case values ('aaa','bbb'),('ccc','ddd'),('eee','fff')""" def values = hive_docker """select count(*) from `default`.test_prepare_hive_data_in_case;""" + log.info(values.toString()) sql """drop catalog if exists test_prepare_hive_data_in_case;""" @@ -39,6 +42,10 @@ suite("test_prepare_hive_data_in_case", "p0,external,hive,external_docker,extern log.info(values2.toString()) assertEquals(values[0][0],values2[0][0]) + qt_hive_docker_01 """select * from default.test_prepare_hive_data_in_case order by k1 desc ;""" + + qt_sql_02 """ select * from test_prepare_hive_data_in_case.`default`.test_prepare_hive_data_in_case order by k1 desc;""" + } finally { } } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org