This is an automated email from the ASF dual-hosted git repository.

kxiao pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-2.1 by this push:
     new 4dcceaefea2 [test](ES Catalog) Add test cases for ES 5.x (#34441) 
(#36993)
4dcceaefea2 is described below

commit 4dcceaefea24d24ba5d30bf30e7365f8a5c32fbb
Author: qiye <jianliang5...@gmail.com>
AuthorDate: Fri Jun 28 16:58:07 2024 +0800

    [test](ES Catalog) Add test cases for ES 5.x (#34441) (#36993)
    
    backport #34441
---
 .../docker-compose/elasticsearch/es.env            |  1 +
 .../docker-compose/elasticsearch/es.yaml.tpl       | 26 +++++++++
 .../elasticsearch/scripts/data/data3_es5.json      | 28 ++++++++++
 .../elasticsearch/scripts/es_init.sh               | 21 ++++++++
 regression-test/conf/regression-conf.groovy        |  1 +
 .../data/external_table_p0/es/test_es_query.out    | 62 ++++++++++++++++++++++
 .../pipeline/external/conf/regression-conf.groovy  |  1 +
 .../external_table_p0/es/test_es_query.groovy      | 30 +++++++++++
 8 files changed, 170 insertions(+)

diff --git a/docker/thirdparties/docker-compose/elasticsearch/es.env 
b/docker/thirdparties/docker-compose/elasticsearch/es.env
index 0b8138fb340..a98cc1c3663 100644
--- a/docker/thirdparties/docker-compose/elasticsearch/es.env
+++ b/docker/thirdparties/docker-compose/elasticsearch/es.env
@@ -19,3 +19,4 @@
 DOCKER_ES_6_EXTERNAL_PORT=19200
 DOCKER_ES_7_EXTERNAL_PORT=29200
 DOCKER_ES_8_EXTERNAL_PORT=39200
+DOCKER_ES_5_EXTERNAL_PORT=59200
diff --git a/docker/thirdparties/docker-compose/elasticsearch/es.yaml.tpl 
b/docker/thirdparties/docker-compose/elasticsearch/es.yaml.tpl
index 25415380906..5acbec13465 100644
--- a/docker/thirdparties/docker-compose/elasticsearch/es.yaml.tpl
+++ b/docker/thirdparties/docker-compose/elasticsearch/es.yaml.tpl
@@ -18,6 +18,31 @@
 version: "3.9"
 
 services:
+  doris--es_5:
+    image: elasticsearch:5.6.16
+    ports:
+      - ${DOCKER_ES_5_EXTERNAL_PORT}:9200
+    environment:
+      cluster.name: "elasticsearch5"
+      ES_JAVA_OPTS: "-Xms256m -Xmx256m"
+      discovery.type: "single-node"
+      xpack.security.enabled: "false"
+      cluster.routing.allocation.disk.threshold_enabled: true 
+      cluster.routing.allocation.disk.watermark.low: 500mb 
+      cluster.routing.allocation.disk.watermark.high: 300mb
+      cluster.routing.allocation.disk.watermark.flood_stage: 200mb
+      ES_LOG_STYLE: "file"
+    volumes:
+      - ./data/es5/:/usr/share/elasticsearch/data
+      - ./logs/es5/:/usr/share/elasticsearch/logs
+      - 
./config/es5/log4j2.properties:/usr/share/elasticsearch/log4j2.properties
+    networks:
+      - doris--es
+    healthcheck:
+      test: [ "CMD", "curl", 
"localhost:9200/_cluster/health?wait_for_status=green" ]
+      interval: 30s
+      timeout: 10s
+      retries: 100
   doris--es_6:
     # es official not provide 6.x image for arm/v8, use compatible image.
     # https://github.com/dockhippie/elasticsearch/tree/master/v6.8
@@ -96,6 +121,7 @@ services:
     volumes:
       - ./scripts/:/mnt/scripts
     environment:
+      ES_5_HOST: "doris--es_5"
       ES_6_HOST: "doris--es_6"
       ES_7_HOST: "doris--es_7"
       ES_8_HOST: "doris--es_8"
diff --git 
a/docker/thirdparties/docker-compose/elasticsearch/scripts/data/data3_es5.json 
b/docker/thirdparties/docker-compose/elasticsearch/scripts/data/data3_es5.json
new file mode 100755
index 00000000000..f4cc19ff9ec
--- /dev/null
+++ 
b/docker/thirdparties/docker-compose/elasticsearch/scripts/data/data3_es5.json
@@ -0,0 +1,28 @@
+{
+  "test1": "string3",
+  "test2": "text3_4*5",
+  "test3": 5.0,
+  "test4": "2022-08-08",
+  "test5": 3333.22,
+  "test6": "2022-08-08T12:10:10.151",
+  "c_bool": [true, false, true, true],
+  "c_byte": [1, -2, -3, 4],
+  "c_short": [128, 129, -129, -130],
+  "c_integer": [32768, 32769, -32769, -32770],
+  "c_long": [-1, 0, 1, 2],
+  "c_unsigned_long": [0, 1, 2, 3],
+  "c_float": [1.0, 1.1, 1.2, 1.3],
+  "c_half_float": [1, 2, 3, 4],
+  "c_double": [1, 2, 3, 4],
+  "c_scaled_float": [1, 2, 3, 4],
+  "c_date": ["2020-01-01", "2020-01-02"],
+  "c_datetime": ["2020-01-01 12:00:00", "2020-01-02 13:01:01"],
+  "c_keyword": ["a", "b", "c"],
+  "c_text": ["d", "e", "f"],
+  "c_ip": ["192.168.0.1", "127.0.0.1"],
+  "c_person": [
+    {"name": "Andy", "age": 18},
+    {"name": "Tim", "age": 28}
+  ],
+  "message": "I'm not null or empty"
+}
diff --git 
a/docker/thirdparties/docker-compose/elasticsearch/scripts/es_init.sh 
b/docker/thirdparties/docker-compose/elasticsearch/scripts/es_init.sh
index 51364bbdf82..5c865e660ad 100755
--- a/docker/thirdparties/docker-compose/elasticsearch/scripts/es_init.sh
+++ b/docker/thirdparties/docker-compose/elasticsearch/scripts/es_init.sh
@@ -16,6 +16,27 @@
 # specific language governing permissions and limitations
 # under the License.
 
+# es 5
+# create index test1
+# shellcheck disable=SC2154
+curl "http://${ES_5_HOST}:9200/test1"; -H "Content-Type:application/json" -X 
PUT -d "@/mnt/scripts/index/es6_test1.json"
+# create index test2_20220808
+curl "http://${ES_5_HOST}:9200/test2_20220808"; -H 
"Content-Type:application/json" -X PUT -d '@/mnt/scripts/index/es6_test2.json'
+# put data for test1
+curl "http://${ES_5_HOST}:9200/test1/doc/1"; -H "Content-Type:application/json" 
-X POST -d '@/mnt/scripts/data/data1_es6.json'
+curl "http://${ES_5_HOST}:9200/test1/doc/2"; -H "Content-Type:application/json" 
-X POST -d '@/mnt/scripts/data/data2_es6.json'
+# only difference between es5 and es6
+curl "http://${ES_5_HOST}:9200/test1/doc/3"; -H "Content-Type:application/json" 
-X POST -d '@/mnt/scripts/data/data3_es5.json'
+# put data for test2_20220808
+curl "http://${ES_5_HOST}:9200/test2_20220808/doc/1"; -H 
"Content-Type:application/json" -X POST -d '@/mnt/scripts/data/data1_es6.json'
+curl "http://${ES_5_HOST}:9200/test2_20220808/doc/2"; -H 
"Content-Type:application/json" -X POST -d '@/mnt/scripts/data/data2_es6.json'
+curl "http://${ES_5_HOST}:9200/test2_20220808/doc/3"; -H 
"Content-Type:application/json" -X POST -d '@/mnt/scripts/data/data3_es5.json'
+# put _meta for array
+curl "http://${ES_5_HOST}:9200/test1/doc/_mapping"; -H 
"Content-Type:application/json" -X PUT -d "@/mnt/scripts/index/array_meta.json"
+curl "http://${ES_5_HOST}:9200/test2_20220808/doc/_mapping"; -H 
"Content-Type:application/json" -X PUT -d "@/mnt/scripts/index/array_meta.json"
+# create index .hide
+curl "http://${ES_5_HOST}:9200/.hide"; -H "Content-Type:application/json" -X 
PUT -d "@/mnt/scripts/index/es6_hide.json"
+
 # es 6
 # create index test1
 # shellcheck disable=SC2154
diff --git a/regression-test/conf/regression-conf.groovy 
b/regression-test/conf/regression-conf.groovy
index ae14fdf7401..08b51ce46aa 100644
--- a/regression-test/conf/regression-conf.groovy
+++ b/regression-test/conf/regression-conf.groovy
@@ -145,6 +145,7 @@ kafka_port=19193
 // elasticsearch catalog test config
 // See `docker/thirdparties/start-thirdparties-docker.sh`
 enableEsTest=false
+es_5_port=59200
 es_6_port=19200
 es_7_port=29200
 es_8_port=39200
diff --git a/regression-test/data/external_table_p0/es/test_es_query.out 
b/regression-test/data/external_table_p0/es/test_es_query.out
index 2e98ee6a174..605e2f1aa93 100644
--- a/regression-test/data/external_table_p0/es/test_es_query.out
+++ b/regression-test/data/external_table_p0/es/test_es_query.out
@@ -36,6 +36,68 @@ I'm not null or empty
 2022-08-08     2022-08-10T12:10:10     2022-08-10T12:10:10     
2022-08-10T04:10:10     2022-08-10T20:10:10
 2022-08-08     2022-08-11T12:10:10     2022-08-11T12:10:10     
2022-08-11T12:10:10     2022-08-11T11:10:10
 
+-- !sql_5_02 --
+[1, 0, 1, 1]   [1, -2, -3, 4]  ["2020-01-01", "2020-01-02"]    ["2020-01-01 
12:00:00", "2020-01-02 13:01:01"]  [1, 2, 3, 4]    [1, 1.1, 1.2, 1.3]      [1, 
2, 3, 4]    [32768, 32769, -32769, -32770]  ["192.168.0.1", "127.0.0.1"]    
["a", "b", "c"] [-1, 0, 1, 2]   ["{"name":"Andy","age":18}", 
"{"name":"Tim","age":28}"] [1, 2, 3, 4]    [128, 129, -129, -130]  ["d", "e", 
"f"] [0, 1, 2, 3]    \N      string1 text#1  3.14    2022-08-08T00:00        
12345   2022-08-08T20:10:10
+
+-- !sql_5_03 --
+[1, 0, 1, 1]   [1, -2, -3, 4]  ["2020-01-01", "2020-01-02"]    ["2020-01-01 
12:00:00", "2020-01-02 13:01:01"]  [1, 2, 3, 4]    [1, 1.1, 1.2, 1.3]      [1, 
2, 3, 4]    [32768, 32769, -32769, -32770]  ["192.168.0.1", "127.0.0.1"]    
["a", "b", "c"] [-1, 0, 1, 2]   ["{"name":"Andy","age":18}", 
"{"name":"Tim","age":28}"] [1, 2, 3, 4]    [128, 129, -129, -130]  ["d", "e", 
"f"] [0, 1, 2, 3]    \N      string1 text#1  3.14    2022-08-08T00:00        
12345   2022-08-08T20:10:10
+[1, 0, 1, 1]   [1, -2, -3, 4]  ["2020-01-01", "2020-01-02"]    ["2020-01-01 
12:00:00", "2020-01-02 13:01:01"]  [1, 2, 3, 4]    [1, 1.1, 1.2, 1.3]      [1, 
2, 3, 4]    [32768, 32769, -32769, -32770]  ["192.168.0.1", "127.0.0.1"]    
["a", "b", "c"] [-1, 0, 1, 2]   ["{"name":"Andy","age":18}", 
"{"name":"Tim","age":28}"] [1, 2, 3, 4]    [128, 129, -129, -130]  ["d", "e", 
"f"] [0, 1, 2, 3]            string2 text2   4.0     2022-08-08T00:00        
2222    2022-08-08T12:10:10
+[1, 0, 1, 1]   [1, -2, -3, 4]  ["2020-01-01", "2020-01-02"]    ["2020-01-01 
12:00:00", "2020-01-02 13:01:01"]  [1, 2, 3, 4]    [1, 1.1, 1.2, 1.3]      [1, 
2, 3, 4]    [32768, 32769, -32769, -32770]  ["192.168.0.1", "127.0.0.1"]    
["a", "b", "c"] [-1, 0, 1, 2]   ["{"name":"Andy","age":18}", 
"{"name":"Tim","age":28}"] [1, 2, 3, 4]    [128, 129, -129, -130]  ["d", "e", 
"f"] [0, 1, 2, 3]    I'm not null or empty   string3 text3_4*5       5.0     
2022-08-08T00:00        3333    2022-08-08T20:10:10
+
+-- !sql_5_04 --
+[1, 0, 1, 1]   [1, -2, -3, 4]  ["2020-01-01", "2020-01-02"]    ["2020-01-01 
12:00:00", "2020-01-02 13:01:01"]  [1, 2, 3, 4]    [1, 1.1, 1.2, 1.3]      [1, 
2, 3, 4]    [32768, 32769, -32769, -32770]  ["192.168.0.1", "127.0.0.1"]    
["a", "b", "c"] [-1, 0, 1, 2]   ["{"name":"Andy","age":18}", 
"{"name":"Tim","age":28}"] [1, 2, 3, 4]    [128, 129, -129, -130]  ["d", "e", 
"f"] [0, 1, 2, 3]            string2 text2   4.0     2022-08-08T00:00        
2222    2022-08-08T12:10:10
+
+-- !sql_5_05 --
+true   1       128     32768   -1      0       1.0     1.0     1.0     1.0     
2020-01-01      2020-01-01T12:00        a       d       192.168.0.1     
{"name":"Andy","age":18}
+true   1       128     32768   -1      0       1.0     1.0     1.0     1.0     
2020-01-01      2020-01-01T12:00        a       d       192.168.0.1     
{"name":"Andy","age":18}
+true   1       128     32768   -1      0       1.0     1.0     1.0     1.0     
2020-01-01      2020-01-01T12:00        a       d       192.168.0.1     
{"name":"Andy","age":18}
+
+-- !sql_5_06 --
+true   1       128     32768   -1      0       1.0     1.0     1.0     1.0     
2020-01-01      2020-01-01T12:00        a       d       192.168.0.1     
{"name":"Andy","age":18}
+true   1       128     32768   -1      0       1.0     1.0     1.0     1.0     
2020-01-01      2020-01-01T12:00        a       d       192.168.0.1     
{"name":"Andy","age":18}
+true   1       128     32768   -1      0       1.0     1.0     1.0     1.0     
2020-01-01      2020-01-01T12:00        a       d       192.168.0.1     
{"name":"Andy","age":18}
+
+-- !sql_5_07 --
+[1, 0, 1, 1]   [1, -2, -3, 4]  ["2020-01-01", "2020-01-02"]    ["2020-01-01 
12:00:00", "2020-01-02 13:01:01"]  [1, 2, 3, 4]    [1, 1.1, 1.2, 1.3]      [1, 
2, 3, 4]    [32768, 32769, -32769, -32770]  ["192.168.0.1", "127.0.0.1"]    
["a", "b", "c"] [-1, 0, 1, 2]   ["{"name":"Andy","age":18}", 
"{"name":"Tim","age":28}"] [1, 2, 3, 4]    [128, 129, -129, -130]  ["d", "e", 
"f"] [0, 1, 2, 3]    \N      string1 text#1  3.14    2022-08-08T00:00        
12345   2022-08-08T20:10:10
+
+-- !sql_5_08 --
+[1, 0, 1, 1]   [1, -2, -3, 4]  [128, 129, -129, -130]  [32768, 32769, -32769, 
-32770]  [-1, 0, 1, 2]   [0, 1, 2, 3]    [1, 1.1, 1.2, 1.3]      [1, 2, 3, 4]   
 [1, 2, 3, 4]    [1, 2, 3, 4]    ["2020-01-01", "2020-01-02"]    ["2020-01-01 
12:00:00", "2020-01-02 13:01:01"]  ["a", "b", "c"] ["d", "e", "f"] 
["192.168.0.1", "127.0.0.1"]    ["{"name":"Andy","age":18}", 
"{"name":"Tim","age":28}"]
+[1, 0, 1, 1]   [1, -2, -3, 4]  [128, 129, -129, -130]  [32768, 32769, -32769, 
-32770]  [-1, 0, 1, 2]   [0, 1, 2, 3]    [1, 1.1, 1.2, 1.3]      [1, 2, 3, 4]   
 [1, 2, 3, 4]    [1, 2, 3, 4]    ["2020-01-01", "2020-01-02"]    ["2020-01-01 
12:00:00", "2020-01-02 13:01:01"]  ["a", "b", "c"] ["d", "e", "f"] 
["192.168.0.1", "127.0.0.1"]    ["{"name":"Andy","age":18}", 
"{"name":"Tim","age":28}"]
+[1, 0, 1, 1]   [1, -2, -3, 4]  [128, 129, -129, -130]  [32768, 32769, -32769, 
-32770]  [-1, 0, 1, 2]   [0, 1, 2, 3]    [1, 1.1, 1.2, 1.3]      [1, 2, 3, 4]   
 [1, 2, 3, 4]    [1, 2, 3, 4]    ["2020-01-01", "2020-01-02"]    ["2020-01-01 
12:00:00", "2020-01-02 13:01:01"]  ["a", "b", "c"] ["d", "e", "f"] 
["192.168.0.1", "127.0.0.1"]    ["{"name":"Andy","age":18}", 
"{"name":"Tim","age":28}"]
+
+-- !sql_5_09 --
+[1, 0, 1, 1]   [1, -2, -3, 4]  [128, 129, -129, -130]  [32768, 32769, -32769, 
-32770]  [-1, 0, 1, 2]   [0, 1, 2, 3]    [1, 1.1, 1.2, 1.3]      [1, 2, 3, 4]   
 [1, 2, 3, 4]    [1, 2, 3, 4]    ["2020-01-01", "2020-01-02"]    ["2020-01-01 
12:00:00", "2020-01-02 13:01:01"]  ["a", "b", "c"] ["d", "e", "f"] 
["192.168.0.1", "127.0.0.1"]    ["{"name":"Andy","age":18}", 
"{"name":"Tim","age":28}"]
+[1, 0, 1, 1]   [1, -2, -3, 4]  [128, 129, -129, -130]  [32768, 32769, -32769, 
-32770]  [-1, 0, 1, 2]   [0, 1, 2, 3]    [1, 1.1, 1.2, 1.3]      [1, 2, 3, 4]   
 [1, 2, 3, 4]    [1, 2, 3, 4]    ["2020-01-01", "2020-01-02"]    ["2020-01-01 
12:00:00", "2020-01-02 13:01:01"]  ["a", "b", "c"] ["d", "e", "f"] 
["192.168.0.1", "127.0.0.1"]    ["{"name":"Andy","age":18}", 
"{"name":"Tim","age":28}"]
+[1, 0, 1, 1]   [1, -2, -3, 4]  [128, 129, -129, -130]  [32768, 32769, -32769, 
-32770]  [-1, 0, 1, 2]   [0, 1, 2, 3]    [1, 1.1, 1.2, 1.3]      [1, 2, 3, 4]   
 [1, 2, 3, 4]    [1, 2, 3, 4]    ["2020-01-01", "2020-01-02"]    ["2020-01-01 
12:00:00", "2020-01-02 13:01:01"]  ["a", "b", "c"] ["d", "e", "f"] 
["192.168.0.1", "127.0.0.1"]    ["{"name":"Andy","age":18}", 
"{"name":"Tim","age":28}"]
+
+-- !sql_5_10 --
+[1, 0, 1, 1]   [1, -2, -3, 4]  ["2020-01-01", "2020-01-02"]    ["2020-01-01 
12:00:00", "2020-01-02 13:01:01"]  [1, 2, 3, 4]    [1, 1.1, 1.2, 1.3]      [1, 
2, 3, 4]    [32768, 32769, -32769, -32770]  ["192.168.0.1", "127.0.0.1"]    
["a", "b", "c"] [-1, 0, 1, 2]   ["{"name":"Andy","age":18}", 
"{"name":"Tim","age":28}"] [1, 2, 3, 4]    [128, 129, -129, -130]  ["d", "e", 
"f"] [0, 1, 2, 3]    \N      string1 text#1  3.14    2022-08-08T00:00        
12345   2022-08-08T20:10:10
+
+-- !sql_5_11 --
+[1, 0, 1, 1]   [1, -2, -3, 4]  ["2020-01-01", "2020-01-02"]    ["2020-01-01 
12:00:00", "2020-01-02 13:01:01"]  [1, 2, 3, 4]    [1, 1.1, 1.2, 1.3]      [1, 
2, 3, 4]    [32768, 32769, -32769, -32770]  ["192.168.0.1", "127.0.0.1"]    
["a", "b", "c"] [-1, 0, 1, 2]   ["{"name":"Andy","age":18}", 
"{"name":"Tim","age":28}"] [1, 2, 3, 4]    [128, 129, -129, -130]  ["d", "e", 
"f"] [0, 1, 2, 3]            string2 text2   4.0     2022-08-08T00:00        
2222    2022-08-08T12:10:10
+
+-- !sql_5_12 --
+[1, 0, 1, 1]   [1, -2, -3, 4]  ["2020-01-01", "2020-01-02"]    ["2020-01-01 
12:00:00", "2020-01-02 13:01:01"]  [1, 2, 3, 4]    [1, 1.1, 1.2, 1.3]      [1, 
2, 3, 4]    [32768, 32769, -32769, -32770]  ["192.168.0.1", "127.0.0.1"]    
["a", "b", "c"] [-1, 0, 1, 2]   ["{"name":"Andy","age":18}", 
"{"name":"Tim","age":28}"] [1, 2, 3, 4]    [128, 129, -129, -130]  ["d", "e", 
"f"] [0, 1, 2, 3]    I'm not null or empty   string3 text3_4*5       5.0     
2022-08-08T00:00        3333    2022-08-08T20:10:10
+
+-- !sql_5_13 --
+2022-08-08T20:10:10
+
+-- !sql_5_14 --
+2022-08-08T12:10:10
+
+-- !sql_5_15 --
+2022-08-08T20:10:10
+
+-- !sql_5_16 --
+I'm not null or empty
+
+-- !sql_5_17 --
+
+I'm not null or empty
+
+-- !sql_5_18 --
+I'm not null or empty
+
 -- !sql_6_02 --
 [1, 0, 1, 1]   [1, -2, -3, 4]  ["2020-01-01", "2020-01-02"]    ["2020-01-01 
12:00:00", "2020-01-02 13:01:01"]  [1, 2, 3, 4]    [1, 1.1, 1.2, 1.3]      [1, 
2, 3, 4]    [32768, 32769, -32769, -32770]  ["192.168.0.1", "127.0.0.1"]    
["a", "b", "c"] [-1, 0, 1, 2]   ["{"name":"Andy","age":18}", 
"{"name":"Tim","age":28}"] [1, 2, 3, 4]    [128, 129, -129, -130]  ["d", "e", 
"f"] [0, 1, 2, 3]    \N      string1 text#1  3.14    2022-08-08T00:00        
12345   2022-08-08T20:10:10
 
diff --git a/regression-test/pipeline/external/conf/regression-conf.groovy 
b/regression-test/pipeline/external/conf/regression-conf.groovy
index 2b44abb7300..f7743ce858e 100644
--- a/regression-test/pipeline/external/conf/regression-conf.groovy
+++ b/regression-test/pipeline/external/conf/regression-conf.groovy
@@ -110,6 +110,7 @@ hive3ServerPort=13000
 hive3PgPort=5732
 
 enableEsTest=true
+es_5_port=59200
 es_6_port="19200/"
 es_7_port=29200
 es_8_port=39200
diff --git a/regression-test/suites/external_table_p0/es/test_es_query.groovy 
b/regression-test/suites/external_table_p0/es/test_es_query.groovy
index f2af00d6fe6..38556c32812 100644
--- a/regression-test/suites/external_table_p0/es/test_es_query.groovy
+++ b/regression-test/suites/external_table_p0/es/test_es_query.groovy
@@ -19,10 +19,12 @@ suite("test_es_query", 
"p0,external,es,external_docker,external_docker_es") {
     String enabled = context.config.otherConfigs.get("enableEsTest")
     if (enabled != null && enabled.equalsIgnoreCase("true")) {
         String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+        String es_5_port = context.config.otherConfigs.get("es_5_port")
         String es_6_port = context.config.otherConfigs.get("es_6_port")
         String es_7_port = context.config.otherConfigs.get("es_7_port")
         String es_8_port = context.config.otherConfigs.get("es_8_port")
 
+        sql """drop catalog if exists test_es_query_es5;"""
         sql """drop catalog if exists test_es_query_es6;"""
         sql """drop catalog if exists test_es_query_es7;"""
         sql """drop catalog if exists test_es_query_es8;"""
@@ -30,6 +32,15 @@ suite("test_es_query", 
"p0,external,es,external_docker,external_docker_es") {
         sql """drop table if exists test_v2;"""
 
         // test old create-catalog syntax for compatibility
+        sql """
+            create catalog test_es_query_es5
+            properties (
+                "type"="es",
+                "elasticsearch.hosts"="http://${externalEnvIp}:$es_5_port";,
+                "elasticsearch.nodes_discovery"="false",
+                "elasticsearch.keyword_sniff"="true"
+            );
+        """
         sql """
             create catalog test_es_query_es6
             properties (
@@ -159,6 +170,25 @@ suite("test_es_query", 
"p0,external,es,external_docker,external_docker_es") {
         order_qt_sql21 """select * from test_v2 where esquery(test2, 
'{"match":{"test2":"text#1"}}')"""
         order_qt_sql22 """select test4,test5,test6,test7,test8 from test_v2 
order by test8"""
 
+        sql """switch test_es_query_es5"""
+        order_qt_sql_5_02 """select * from test1 where test2='text#1'"""
+        order_qt_sql_5_03 """select * from test2_20220808 where test4 >= 
'2022-08-08 00:00:00' and test4 < '2022-08-08 23:59:59'"""
+        order_qt_sql_5_04 """select * from test2_20220808 where 
substring(test2, 2) = 'ext2'"""
+        order_qt_sql_5_05 """select c_bool[1], c_byte[1], c_short[1], 
c_integer[1], c_long[1], c_unsigned_long[1], c_float[1], c_half_float[1], 
c_double[1], c_scaled_float[1], c_date[1], c_datetime[1], c_keyword[1], 
c_text[1], c_ip[1], c_person[1] from test1"""
+        order_qt_sql_5_06 """select c_bool[1], c_byte[1], c_short[1], 
c_integer[1], c_long[1], c_unsigned_long[1], c_float[1], c_half_float[1], 
c_double[1], c_scaled_float[1], c_date[1], c_datetime[1], c_keyword[1], 
c_text[1], c_ip[1], c_person[1] from test2_20220808"""
+        order_qt_sql_5_07 """select * from test1 where esquery(test2, 
'{"match":{"test2":"text#1"}}')"""
+        order_qt_sql_5_08 """select c_bool, c_byte, c_short, c_integer, 
c_long, c_unsigned_long, c_float, c_half_float, c_double, c_scaled_float, 
c_date, c_datetime, c_keyword, c_text, c_ip, c_person from test1"""
+        order_qt_sql_5_09 """select c_bool, c_byte, c_short, c_integer, 
c_long, c_unsigned_long, c_float, c_half_float, c_double, c_scaled_float, 
c_date, c_datetime, c_keyword, c_text, c_ip, c_person from test2_20220808"""
+        order_qt_sql_5_10 """select * from test1 where test1='string1'"""
+        order_qt_sql_5_11 """select * from test1 where test1='string2'"""
+        order_qt_sql_5_12 """select * from test1 where test1='string3'"""
+        order_qt_sql_5_13 """select test6 from test1 where test1='string1'"""
+        order_qt_sql_5_14 """select test6 from test1 where test1='string2'"""
+        order_qt_sql_5_15 """select test6 from test1 where test1='string3'"""
+        order_qt_sql_5_16 """select message from test1 where message != ''"""
+        order_qt_sql_5_17 """select message from test1 where message is not 
null"""
+        order_qt_sql_5_18 """select message from test1 where 
not_null_or_empty(message)"""
+
         sql """switch test_es_query_es6"""
         // order_qt_sql_6_01 """show tables"""
         order_qt_sql_6_02 """select * from test1 where test2='text#1'"""


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to