This is an automated email from the ASF dual-hosted git repository. dataroaring pushed a commit to branch branch-2.0 in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-2.0 by this push: new ad6cc9cde96 Revert "[Improvement](regression-test) add http_stream case (#24930)" (#25556) ad6cc9cde96 is described below commit ad6cc9cde96a058b3c54da9af125f29f4cb5b6fe Author: HHoflittlefish777 <77738092+hhoflittlefish...@users.noreply.github.com> AuthorDate: Wed Oct 18 10:50:51 2023 +0800 Revert "[Improvement](regression-test) add http_stream case (#24930)" (#25556) This reverts commit 8adf4f6634b76d67f74127d95004fdc87f707586. --- .../http_stream/test_http_stream_properties.out | 23 -- .../load_p0/stream_load/basic_array_data.csv.bz2 | Bin 6096 -> 6094 bytes .../load_p0/stream_load/basic_array_data.csv.gz | Bin 4615 -> 4624 bytes .../http_stream/test_http_stream_properties.groovy | 387 --------------------- 4 files changed, 410 deletions(-) diff --git a/regression-test/data/load_p0/http_stream/test_http_stream_properties.out b/regression-test/data/load_p0/http_stream/test_http_stream_properties.out deleted file mode 100644 index 93bde743d03..00000000000 --- a/regression-test/data/load_p0/http_stream/test_http_stream_properties.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !sql_squence -- -8 2023-08-14 true 109 -31573 -1362465190 3990845741226497177 2732763251146840270 -25698.553 1.312831962567818E9 99999999.9 99999999.9 2023-03-07T14:13:19 2022-10-18 2023-07-16T05:03:13 D PBn1wa6X8WneZYLMac11zzyhGl7tPXB5XgjmOV8L6uav9ja5oY433ktb2yhyQQIqBveZPkme {"animal":"lion","weight":200,"habitat":["savannah","grassland"]} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 \N -20 2023-08-17 false -5 18158 784479801 1485484354598941738 -6632681928222776815 9708.431 -3.30432620706069E8 -99999999.9 99999999.9 2022-09-15T21:40:55 2023-02-23 2023-08-13T21:31:54 O X 2pYmX2vAhfEEHZZYPsgAmda1G7otnwx5TmUC879FPhDeIjvWI79ksBZpfFG2gp7jhCSbpZiecKGklB5SvG8tm31i5SUqe1xrWgLt4HSq7lMJWp75tx2kxD7pRIOpn {"name":"Sarah","age":30,"city":"London","isMarried":false} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不 [...] -21 2023-08-18 false 63 -27847 -35409596 8638201997392767650 4919963231735304178 -23382.541 -1.803403621426313E9 -22009767.0 99999999.9 2023-03-31T10:56:14 2023-01-20 2023-02-18T13:37:52 N T PSiFwUEx3eVFNtjlnQ70YkgZNvKrGmQ2DN5K9yYHiSdFWeEDB1UpL3Frt8z1kEAIWRDWqXZuyi {"city":"Sydney","population":5312000,"area":2058.7} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 \N -31 2023-08-27 false 17 -18849 1728109133 3266501886640700374 527195452623418935 -24062.328 -1.514348021262435E9 -99999999.9 -99999999.9 2022-10-07T03:24:23 2022-09-25 \N 0 8 yKMiAntORoRa8svnMfcxlOPwwND1m5s2fdS26Xu6cfs6HK5SAibqIp9h8sZcpjHy4 {"team":"Manchester United","players":["Ronaldo","Rooney","Giggs"],"coach":"Ole Gunnar Solskjaer"} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 \N -41 2023-08-27 true -104 22750 \N 8527773271030840740 5554497317268279215 -5296.828 -1.71564688801304E9 -99999999.9 99999999.9 2022-12-02T17:56:44 2022-10-12 2023-02-19T07:02:54 V \N E9GzQdTwX1ITUQz27IVznAs6Ca4WwprKk6Odjs6SH75D2F1089QiY3HQ52LXRD1V6xAWjhLE2hWgW3EdHuAOnUDVrb5V {"food":"Sushi","price":10,"restaurant":"Sushi King"} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 \N -49 2023-08-08 false \N 16275 -2144851675 -2303421957908954634 -46526938720058765 -13141.143 -6.866322332302E8 99999999.9 -99999999.9 2022-09-01T00:16:01 2023-03-25 2022-09-07T14:59:03 s yvuILR2iNxfe8RRml {"student":true,"name":"Alice","grade":9,"subjects":["math","science","history"]} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 \N -50 2023-08-06 true 109 -6330 1479023892 -8630800697573159428 -1645095773540208759 17880.96 -1.453844792013949E9 -99999999.9 -99999999.9 2022-09-22T02:03:21 2023-05-14 2023-03-25T02:18:34 m JKnIgXvGVidGiWl9YRSi3mFI7wHKt1sBpWSadKF8VX3LAuElm4sdc9gtxREaUr57oikSYlU8We8h1MWqQlYNiJObl {"city":"Tokyo","temperature":20.5,"humidity":75} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 \N -50 2023-08-24 true 15 14403 \N -6418906115745394180 9205303779366462513 -4331.549 -6.15112179557648E8 99999999.9 -99999999.9 2022-12-29T02:27:20 2023-06-01 2023-08-12T04:50:04 a eCl38sztIvBQvGvGKyYZmyMXy9vIJx197iu3JwP9doJGcrYUl9Uova0rz4iCCgrjlAiZU18Fs9YtCq830nhM {"band":"The Beatles","members":["John Lennon","Paul McCartney","George Harrison","Ringo Starr"]} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 \N -57 2023-08-19 true 2 -25462 -74112029 6458082754318544493 -7910671781690629051 -15205.859 -3.06870797484914E8 99999999.9 -99999999.9 2023-07-10T18:39:10 2023-02-12 2023-01-27T07:26:06 y Xi9nDVrLv8m6AwEpUxmtzFAuK48sQ {"name":"John","age":25,"city":"New York"} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 \N -58 2023-08-22 \N 0 -18231 1832867360 6997858407575297145 2480714305422728023 -5450.489 1.475901032138386E9 -99999999.9 -99999999.9 2023-02-02T05:13:24 2022-09-18 2023-04-23T10:51:15 k LdFXF7Kmfzgmnn2R6zLsXdmi3A2cLBLq4G4WDVNDhxvH7dYH8Kga2WA47uSIxp6NSrwPSdw0ssB1TS8RFJTDJAB0Uba3e05NL2Aiw0ja {"restaurant":"Pizza Hut","menu":["pizza","pasta","salad"]} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 \N -60 2023-08-27 false -52 -2338 -757056972 1047567408607120856 6541476642780646552 6614.0894 -1.204448798517855E9 99999999.9 99999999.9 2022-12-29T14:47:30 2022-09-24 2023-08-01T12:41:59 O F RM4F1Ke7lkcnuxF2nK0j9VBW3MDcgyHR4pseBjtFnqS6GUkVFuzF6u3Cp9Nv7ab0O6UYrpP4DhU {"game":"Chess","players":2,"time":"1 hour"} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 \N -62 2023-08-21 false 81 20302 -200761532 6365479976421007608 \N -29916.533 1.709141750828478E9 99999999.9 -99999999.9 2023-05-04T01:14:51 2022-09-17 2022-12-04T19:30:09 d v BKWy9dTNg1aZW7ancEJAmEDOPK5TwFsNSHbI78emu9gymeIlx5NoLmyii0QAqdzRvSQPZKiqKkwInGCTIBnK1yYkK7zD {"username":"user123","password":"pass123","email":"user...@example.com"} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 \N -65 2023-08-09 false 94 31514 814994517 -297697460695940343 734910652450318597 -13061.892 6.2750847041706E7 -9808654.0 \N 2023-08-14T22:01:27 2023-05-19 2022-11-13T13:44:28 V aGeMsI24O12chGlP5ak0AHghAz7bu5MargJBStHnt0yMnChH0JnfYhsfH1u59XIHkJKMsHYktBqORkGlovu8V47E74KeFpaqxn5yLyXfDbhhzUKf {"language":"Python","version":3.9,"frameworks":["Django","Flask"]} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 \N -66 2023-08-15 true -91 28378 609923317 4872185586197131212 1207709464099378591 \N -1.863683325985123E9 -99999999.9 -99999999.9 2022-09-24T10:39:23 2022-09-24 2022-10-16T18:36:43 Y z AI1BSPQdKiHJiQH1kguyLSWsDXkC7zwy7PwgWnyGSaa9tBKRex8vHBdxg2QSKZKL2mV2lHz7iI1PnsTd4MXDcIKhqiHyPuQPt2tEtgt0UgF6 {"book":{"title":"The Great Gatsby","author":"F. Scott Fitzgerald"},"year":1925} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤 [...] -68 2023-08-23 true -73 20117 1737338128 795638676048937749 -5551546237562433901 -30627.04 6.8589475684545E7 99999999.9 99999999.9 2022-12-28T20:26:51 2022-10-04 2023-07-30T00:20:06 y keZ3JlWWpdnPBejf0cuiCQCVBBTd5gjvO08NVdcAFewqL7nRT4N9lnvSU6pWmletA5VbPQCeQapJdcnQCHfZUDCf4ulCnczyqr7SGrbGRT0XYcd7iktKM {"country":"Brazil","continent":"South America","population":211049527} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而 [...] -80 2023-08-18 false -18 -8971 679027874 6535956962935330265 3960889045799757165 -13219.76 1.187161924505394E9 -99999999.9 -99999999.9 2023-03-11T07:40 2022-11-29 2023-01-14T07:24:07 \N D 3Nhx6xX1qdwaq7lxwLRSKMtJFbC03swWv12mpySSVysH3igGZTiGPuKMsYW7HAkf6CWc7c0nzqDsjuH3FYVMNCWRmfxMrmY8rykQCC4Ve {"car":"BMW","model":"X5","year":2020,"color":"black"} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 \N -81 2023-08-23 false 106 11492 -667795397 4480250461471356146 -5346660566234294101 9082.75 3.85167225902608E8 -99999999.9 99999999.9 2023-03-20T03:33:16 2022-11-24 2023-02-16T18:29:41 G 9 Lk3eNVQNjucbekD1rZmUlGPiXS5JvcWr2LQzRU8GSGIbSag {"flower":"rose","color":"red","fragrance":true} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 \N -85 2023-08-11 true -7 24304 -2043877415 -2024144417867729183 \N 5363.0244 -5.78615669042831E8 -99999999.9 -99999999.9 2023-07-15T01:07:41 2023-08-13 2023-01-20T11:57:48 i WQ9dh9ajPu0y {"country":"France","capital":"Paris","population":67081000} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 \N -90 2023-08-27 true 22 16456 -1476824962 -3279894870153540825 8990195191470116763 26651.906 2.06860148942546E8 -99999999.9 -99999999.9 2022-10-07T03:11:03 2023-03-18 2023-04-15T00:38:33 T L QW0GQ3GoMtHgxPQOWGfVaveynahNpsNs09siMFA1OtO6QEDBQTdivmGyq7bFzejAqwbbVQQpREAmeLjcFSXLnQuou2KbwYD {"company":"Apple","products":[{"name":"iPhone","price":1000},{"name":"MacBook","price":1500}]} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 [...] -91 2023-08-27 true 90 2465 702240964 6373830997821598984 305860046137409400 15991.356 1.599972327386147E9 -99999999.9 \N 2023-04-26T19:31:10 2023-07-21 \N 2 B7YKYBYT8w0YC926bZ8Yz1VzyiWw2NWDAiTlEoPVyz9AXGti2Npg1FxWqWk4hEaALw0ZBSuiAIPj41lq36g5QRpPmAjNPK {"fruit":"apple","color":"red","qty":5,"price":2.5} true 1 2 3 4 5 6.0 7.0 888888888 999999999 2023-08-24 2023-08-24T12:00 2023-08-24 2023-08-24T12:00 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 我能吞下玻璃而不伤身体 \N - diff --git a/regression-test/data/load_p0/stream_load/basic_array_data.csv.bz2 b/regression-test/data/load_p0/stream_load/basic_array_data.csv.bz2 index 7fe73b0e0d5..ea0e4779944 100644 Binary files a/regression-test/data/load_p0/stream_load/basic_array_data.csv.bz2 and b/regression-test/data/load_p0/stream_load/basic_array_data.csv.bz2 differ diff --git a/regression-test/data/load_p0/stream_load/basic_array_data.csv.gz b/regression-test/data/load_p0/stream_load/basic_array_data.csv.gz index 0f88cce595b..185cfdb78a9 100644 Binary files a/regression-test/data/load_p0/stream_load/basic_array_data.csv.gz and b/regression-test/data/load_p0/stream_load/basic_array_data.csv.gz differ diff --git a/regression-test/suites/load_p0/http_stream/test_http_stream_properties.groovy b/regression-test/suites/load_p0/http_stream/test_http_stream_properties.groovy deleted file mode 100644 index b217dcb57d4..00000000000 --- a/regression-test/suites/load_p0/http_stream/test_http_stream_properties.groovy +++ /dev/null @@ -1,387 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -suite("test_http_stream_properties", "p0") { - - def tables = [ - "dup_tbl_basic", - "uniq_tbl_basic", - "mow_tbl_basic", - "agg_tbl_basic", - "dup_tbl_array", - "uniq_tbl_array", - "mow_tbl_array", - ] - - def columns = [ - "c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19", - "c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19", - "c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19", - "c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,to_bitmap(c5) c19,HLL_HASH(c5) c20,TO_QUANTILE_STATE(c5,1.0) c21,to_bitmap(c6) c22,HLL_HASH(c6) c23,TO_QUANTILE_STATE(c6,1.0) c24", - "c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18", - "c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18", - "c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18", - ] - - def target_columns = [ - "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18", - "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18", - "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18", - "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18,k19,k20,k21,kd19,kd20,kd21", - "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17", - "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17", - "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17", - ] - - def timezoneColumns = [ - "k00=unix_timestamp('2007-11-30 10:30:19'),c1,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c1", - "k00=unix_timestamp('2007-11-30 10:30:19'),c1,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c1", - "k00=unix_timestamp('2007-11-30 10:30:19'),c1,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c1", - "k00=unix_timestamp('2007-11-30 10:30:19'),c1,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c1,k19=to_bitmap(c5),k20=HLL_HASH(c5),k21=TO_QUANTILE_STATE(c5,1.0),kd19=to_bitmap(c6),kd20=HLL_HASH(c6),kd21=TO_QUANTILE_STATE(c6,1.0)", - "k00=unix_timestamp('2007-11-30 10:30:19'),c1,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18", - "k00=unix_timestamp('2007-11-30 10:30:19'),c1,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18", - "k00=unix_timestamp('2007-11-30 10:30:19'),c1,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18", - ] - - def files = [ - "../stream_load/basic_data.csv", - "../stream_load/basic_data.csv", - "../stream_load/basic_data.csv", - "../stream_load/basic_data.csv", - "../stream_load/basic_array_data.csv", - "../stream_load/basic_array_data.csv", - "../stream_load/basic_array_data.csv" - ] - - def timezoneFiles = [ - "basic_data_timezone.csv", - "basic_data_timezone.csv", - "basic_data_timezone.csv", - "basic_data_timezone.csv", - "basic_array_data_timezone.csv", - "basic_array_data_timezone.csv", - "basic_array_data_timezone.csv", - ] - - def errorFiles = [ - "basic_data_with_errors.csv", - "basic_data_with_errors.csv", - "basic_data_with_errors.csv", - "basic_data_with_errors.csv", - "basic_array_data_with_errors.csv", - "basic_array_data_with_errors.csv", - "basic_array_data_with_errors.csv", - ] - - // def compress_type = [ - // "gz", - // "bz2", - // "lz4", - // "deflate", - // "lzo", - // ] - - // def compress_files = [ - // "basic_data.csv.gz", - // "basic_data.csv.bz2", - // "basic_data.csv.lz4", - // "basic_data.csv.deflate", - // "basic_data.csv.lzo", - // "basic_array_data.csv.gz", - // "basic_array_data.csv.bz2", - // "basic_array_data.csv.lz4", - // "basic_array_data.csv.deflate", - // "basic_array_data.csv.lzo", - // ] - def compress_files = [ - "dup_tbl_basic": [ - ["../stream_load/basic_data.csv.gz", "gz"], - ["../stream_load/basic_data.csv.bz2", "bz2"], - ], - "uniq_tbl_basic": [ - ["../stream_load/basic_data.csv.gz", "gz"], - ["../stream_load/basic_data.csv.bz2", "bz2"], - ], - "mow_tbl_basic": [ - ["../stream_load/basic_data.csv.gz", "gz"], - ["../stream_load/basic_data.csv.bz2", "bz2"], - ], - "agg_tbl_basic": [ - ["../stream_load/basic_data.csv.gz", "gz"], - ["../stream_load/basic_data.csv.bz2", "bz2"], - ], - "dup_tbl_array": [ - ["../stream_load/basic_array_data.csv.gz", "gz"], - ["../stream_load/basic_array_data.csv.bz2", "bz2"], - ], - "uniq_tbl_array": [ - ["../stream_load/basic_array_data.csv.gz", "gz"], - ["../stream_load/basic_array_data.csv.bz2", "bz2"], - ], - "mow_tbl_array": [ - ["../stream_load/basic_array_data.csv.gz", "gz"], - ["../stream_load/basic_array_data.csv.bz2", "bz2"], - ], - ] - - def loadedRows = [12,12,12,12,8,8,15] - - def filteredRows = [8,8,8,8,12,12,5] - - def maxFilterRatio = [0.4,0.4,0.4,0.4,0.6,0.6,0.6] - - InetSocketAddress address = context.config.feHttpInetSocketAddress - String user = context.config.feHttpUser - String password = context.config.feHttpPassword - String db = context.config.getDbNameByFile(context.file) - - def i = 0 - try { - for (String tableName in tables) { - sql new File("""${context.file.parent}/../stream_load/ddl/${tableName}_drop.sql""").text - sql new File("""${context.file.parent}/../stream_load/ddl/${tableName}_create.sql""").text - - def tableNm = "stream_load_" + tableName - - streamLoad { - set 'version', '1' - set 'sql', """ - insert into ${db}.${tableNm}(${target_columns[i]}) select ${columns[i]} from http_stream("format"="csv", "column_separator"="|") - """ - file files[i] - time 10000 // limit inflight 10s - check { result, exception, startTime, endTime -> - if (exception != null) { - throw exception - } - log.info("Stream load result: ${result}".toString()) - def json = parseJson(result) - assertEquals("success", json.Status.toLowerCase()) - assertEquals(20, json.NumberTotalRows) - assertEquals(20, json.NumberLoadedRows) - assertEquals(0, json.NumberFilteredRows) - assertEquals(0, json.NumberUnselectedRows) - } - } - i++ - } - } finally { - for (String tableName in tables) { - sql new File("""${context.file.parent}/../stream_load/ddl/${tableName}_drop.sql""").text - } - } - - // TODO timezone - - // TODO strict_mode - - // TODO max_filter_ratio - - // sequence - try { - sql new File("""${context.file.parent}/../stream_load/ddl/uniq_tbl_basic_drop_sequence.sql""").text - sql new File("""${context.file.parent}/../stream_load/ddl//uniq_tbl_basic_create_sequence.sql""").text - - String tableNm = "stream_load_uniq_tbl_basic_sequence" - - streamLoad { - set 'version', '1' - set 'sql', """ - insert into ${db}.${tableNm}(${target_columns[0]}) select ${columns[0]} from http_stream("format"="CSV", "column_separator"="|") - """ - file files[0] - time 10000 // limit inflight 10s - - check { result, exception, startTime, endTime -> - if (exception != null) { - throw exception - } - log.info("Stream load result: ${result}".toString()) - def json = parseJson(result) - assertEquals("success", json.Status.toLowerCase()) - assertEquals(20, json.NumberTotalRows) - assertEquals(20, json.NumberLoadedRows) - assertEquals(0, json.NumberFilteredRows) - assertEquals(0, json.NumberUnselectedRows) - } - } - qt_sql_squence "select * from stream_load_uniq_tbl_basic_sequence order by k00,k01" - } finally { - sql new File("""${context.file.parent}/../stream_load/ddl/uniq_tbl_basic_drop_sequence.sql""").text - } - - // TODO merge type - - // TODO two_phase_commit - - // compress_type - // gz/bz2 - // TODO lzo/deflate/lz4 - i = 0 - try { - for (String tableName in tables) { - compress_files[tableName].each { fileName, type -> { - sql new File("""${context.file.parent}/../stream_load/ddl/${tableName}_drop.sql""").text - sql new File("""${context.file.parent}/../stream_load/ddl/${tableName}_create.sql""").text - def tableNm = "stream_load_" + tableName - streamLoad { - set 'version', '1' - set 'sql', """ - insert into ${db}.${tableNm}(${target_columns[i]}) select ${columns[i]} from http_stream("format"="CSV", "column_separator"="|", "compress_type"="${type}") - """ - file fileName - time 10000 // limit inflight 10s - - check { result, exception, startTime, endTime -> - if (exception != null) { - throw exception - } - log.info("Stream load result: ${tableName}".toString()) - def json = parseJson(result) - assertEquals("success", json.Status.toLowerCase()) - assertEquals(20, json.NumberTotalRows) - assertEquals(20, json.NumberLoadedRows) - assertEquals(0, json.NumberFilteredRows) - assertEquals(0, json.NumberUnselectedRows) - } - } - } - } - i++ - } - } finally { - for (String table in tables) { - sql new File("""${context.file.parent}/../stream_load/ddl/${table}_drop.sql""").text - } - } - - // skip_lines - i = 0 - try { - for (String tableName in tables) { - sql new File("""${context.file.parent}/../stream_load/ddl/${tableName}_drop.sql""").text - sql new File("""${context.file.parent}/../stream_load/ddl/${tableName}_create.sql""").text - - def tableNm = "stream_load_" + tableName - - streamLoad { - set 'version', '1' - set 'sql', """ - insert into ${db}.${tableNm}(${target_columns[i]}) select ${columns[i]} from http_stream("format"="CSV", "column_separator"="|", "skip_lines"="2") - """ - file files[i] - time 10000 // limit inflight 10s - - check { result, exception, startTime, endTime -> - if (exception != null) { - throw exception - } - log.info("Stream load result: ${result}".toString()) - def json = parseJson(result) - assertEquals("success", json.Status.toLowerCase()) - assertEquals(18, json.NumberTotalRows) - assertEquals(18, json.NumberLoadedRows) - assertEquals(0, json.NumberFilteredRows) - assertEquals(0, json.NumberUnselectedRows) - } - } - if (i <= 3) { - qt_sql_skip_lines "select * from ${tableNm} order by k00,k01" - } else { - qt_sql_skip_lines "select * from ${tableNm} order by k00" - } - i++ - } - } finally { - for (String table in tables) { - sql new File("""${context.file.parent}/../stream_load/ddl/${table}_drop.sql""").text - } - } - - // column_separator - i = 0 - try { - for (String tableName in tables) { - sql new File("""${context.file.parent}/../stream_load/ddl/${tableName}_drop.sql""").text - sql new File("""${context.file.parent}/../stream_load/ddl/${tableName}_create.sql""").text - def tableNm = "stream_load_" + tableName - streamLoad { - set 'version', '1' - set 'sql', """ - insert into ${db}.${tableNm}(${target_columns[i]}) select ${columns[i]} from http_stream("format"="CSV", "column_separator"=",") - """ - file files[i] - time 10000 // limit inflight 10s - - check { result, exception, startTime, endTime -> - if (exception != null) { - throw exception - } - log.info("Stream load result: ${result}".toString()) - def json = parseJson(result) - assertEquals("fail", json.Status.toLowerCase()) - // assertEquals(0, json.NumberTotalRows) - // assertEquals(0, json.NumberLoadedRows) - // assertEquals(0, json.NumberFilteredRows) - // assertEquals(0, json.NumberUnselectedRows) - } - } - i++ - } - } finally { - for (String table in tables) { - sql new File("""${context.file.parent}/../stream_load/ddl/${table}_drop.sql""").text - } - } - - // line_delimiter - i = 0 - try { - for (String tableName in tables) { - sql new File("""${context.file.parent}/../stream_load/ddl/${tableName}_drop.sql""").text - sql new File("""${context.file.parent}/../stream_load/ddl/${tableName}_create.sql""").text - def tableNm = "stream_load_" + tableName - streamLoad { - set 'version', '1' - set 'sql', """ - insert into ${db}.${tableNm}(${target_columns[i]}) select ${columns[i]} from http_stream("format"="CSV", "column_separator"=",", "line_delimiter"=",") - """ - file files[i] - time 10000 // limit inflight 10s - - check { result, exception, startTime, endTime -> - if (exception != null) { - throw exception - } - log.info("Stream load result: ${result}".toString()) - def json = parseJson(result) - assertEquals("fail", json.Status.toLowerCase()) - // assertEquals(1, json.NumberTotalRows) - // assertEquals(0, json.NumberLoadedRows) - // assertEquals(1, json.NumberFilteredRows) - // assertEquals(0, json.NumberUnselectedRows) - } - } - i++ - } - } finally { - for (String table in tables) { - sql new File("""${context.file.parent}/../stream_load/ddl/${table}_drop.sql""").text - } - } -} - --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org