This is an automated email from the ASF dual-hosted git repository.

yiguolei pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/doris.git

commit 5276cc4db6455e8db9b23b6ca6aff7de23ed0d6d
Author: XuJianxu <xjxlzl...@163.com>
AuthorDate: Thu Feb 29 14:34:27 2024 +0800

    [docker][fix] update routine load cases (#31553)
    
    Co-authored-by: 胥剑旭 <xujianxu@xujianxudeMacBook-Pro.local>
---
 docker/thirdparties/run-thirdparties-docker.sh | 44 +++++++++-----------------
 regression-test/framework/pom.xml              |  5 +++
 2 files changed, 20 insertions(+), 29 deletions(-)

diff --git a/docker/thirdparties/run-thirdparties-docker.sh 
b/docker/thirdparties/run-thirdparties-docker.sh
index 1a3faba5617..f8d647c73cb 100755
--- a/docker/thirdparties/run-thirdparties-docker.sh
+++ b/docker/thirdparties/run-thirdparties-docker.sh
@@ -261,39 +261,25 @@ if [[ "${RUN_KAFKA}" -eq 1 ]]; then
     sed -i "s/doris--/${CONTAINER_UID}/g" 
"${ROOT}"/docker-compose/kafka/kafka.yaml
     sed -i "s/localhost/${IP_HOST}/g" "${ROOT}"/docker-compose/kafka/kafka.yaml
     sudo docker compose -f "${ROOT}"/docker-compose/kafka/kafka.yaml 
--env-file "${ROOT}"/docker-compose/kafka/kafka.env down
-    # start_kafka_producers() {
-    #     local container_id="$1"
-    #     local ip_host="$2"
-    #     local backup_dir=/home/work/pipline/backup_center
-
-    #     declare -a topics=("basic_data" "basic_array_data" 
"basic_data_with_errors" "basic_array_data_with_errors" "basic_data_timezone" 
"basic_array_data_timezone" "multi_table_csv" "multi_table_csv1")
-
-    #     for topic in "${topics[@]}"; do
-    #         while IFS= read -r line; do
-    #             touch ${backup_dir}/kafka_info.log
-    #             echo $(date) >> ${backup_dir}/kafka_info.log
-    #             echo "docker exec "${container_id}" bash -c echo '$line' | 
/opt/kafka/bin/kafka-console-producer.sh --broker-list '${ip_host}:19193' 
--topic '${topic}'" >> ${backup_dir}/kafka_info.log
-    #             docker exec "${container_id}" bash -c "echo '$line' | 
/opt/kafka/bin/kafka-console-producer.sh --broker-list '${ip_host}:19193' 
--topic '${topic}'"
-    #         done < "${ROOT}/docker-compose/kafka/scripts/${topic}.csv"
-    #     done
-
-    #     declare -a json_topics=("basic_data_json" "basic_array_data_json" 
"basic_array_data_json_by_line" "basic_data_json_by_line" "multi_table_json" 
"multi_table_json1")
-        
-    #     for json_topic in "${json_topics[@]}"; do
-    #         echo ${json_topics}
-    #         while IFS= read -r json_line; do
-    #             docker exec "${container_id}" bash -c "echo '$json_line' | 
/opt/kafka/bin/kafka-console-producer.sh --broker-list '${ip_host}:19193' 
--topic '${json_topic}'"
-    #             echo "echo '$json_line' | 
/opt/kafka/bin/kafka-console-producer.sh --broker-list '${ip_host}:19193' 
--topic '${json_topic}'"
-    #         done < "${ROOT}/docker-compose/kafka/scripts/${json_topic}.json"
-    #     done
-    #     # copy kafka log to backup path
-    #     docker cp "${container_id}":/opt/kafka/logs ${backup_dir}/kafka_logs
-    # }
+
+    create_kafka_topics() {
+       local container_id="$1"
+       local ip_host="$2"
+       local backup_dir=/home/work/pipline/backup_center
+
+        declare -a topics=("basic_data" "basic_array_data" 
"basic_data_with_errors" "basic_array_data_with_errors" "basic_data_timezone" 
"basic_array_data_timezone")
+
+        for topic in "${topics[@]}"; do
+            echo "docker exec "${container_id}" bash -c echo 
'/opt/kafka/bin/kafka-topics.sh --create --broker-list '${ip_host}:19193' 
--partitions 10' --topic '${topic}'" 
+            docker exec "${container_id}" bash -c 
"/opt/kafka/bin/kafka-topics.sh --create --broker-list '${ip_host}:19193' 
--partitions 10' --topic '${topic}'"
+        done
+
+    }
 
     if [[ "${STOP}" -ne 1 ]]; then
         sudo docker compose -f "${ROOT}"/docker-compose/kafka/kafka.yaml 
--env-file "${ROOT}"/docker-compose/kafka/kafka.env up --build --remove-orphans 
-d
         sleep 10s
-        # start_kafka_producers "${KAFKA_CONTAINER_ID}" "${IP_HOST}"
+        create_kafka_topics "${KAFKA_CONTAINER_ID}" "${IP_HOST}"
     fi
 fi
 
diff --git a/regression-test/framework/pom.xml 
b/regression-test/framework/pom.xml
index 9e804362144..fae65b4c578 100644
--- a/regression-test/framework/pom.xml
+++ b/regression-test/framework/pom.xml
@@ -217,6 +217,11 @@ under the License.
             <version>${groovy.version}</version>
             <type>pom</type>
         </dependency>
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka-clients</artifactId>
+            <version>2.8.1</version>
+        </dependency>
         <dependency>
             <groupId>org.junit.jupiter</groupId>
             <artifactId>junit-jupiter-api</artifactId>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to