github-actions[bot] commented on PR #37451: URL: https://github.com/apache/doris/pull/37451#issuecomment-2216191134
#### `sh-checker report` To get the full details, please check in the [job]("https://github.com/apache/doris/actions/runs/9849705036") output. <details> <summary>shellcheck errors</summary> ``` 'shellcheck ' found no issues. ``` </details> <details> <summary>shfmt errors</summary> ``` 'shfmt ' returned error 1 finding the following formatting issues: ---------- --- samples/datalake/hudi/scripts/spark-hudi.sh.orig +++ samples/datalake/hudi/scripts/spark-hudi.sh @@ -22,7 +22,7 @@ export HADOOP_HOME=/opt/hadoop-3.3.1 if [[ ! -d "${SPARK_HOME}" ]]; then - cp -r /opt/spark-3.4.2-bin-hadoop3 "${SPARK_HOME}" + cp -r /opt/spark-3.4.2-bin-hadoop3 "${SPARK_HOME}" fi cp "${HIVE_HOME}"/conf/hive-site.xml "${SPARK_HOME}"/conf/ @@ -30,9 +30,9 @@ cp "${HADOOP_HOME}"/etc/hadoop/core-site.xml "${SPARK_HOME}"/conf/ "${SPARK_HOME}"/bin/spark-sql \ - --master local[*] \ - --name "spark-hudi-sql" \ - --conf spark.serializer=org.apache.spark.serializer.KryoSerializer \ - --conf spark.sql.catalog.spark_catalog=org.apache.spark.sql.hudi.catalog.HoodieCatalog \ - --conf spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension \ - --conf spark.sql.catalogImplementation=hive + --master local[*] \ + --name "spark-hudi-sql" \ + --conf spark.serializer=org.apache.spark.serializer.KryoSerializer \ + --conf spark.sql.catalog.spark_catalog=org.apache.spark.sql.hudi.catalog.HoodieCatalog \ + --conf spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension \ + --conf spark.sql.catalogImplementation=hive --- samples/datalake/hudi/start-hudi-compose.sh.orig +++ samples/datalake/hudi/start-hudi-compose.sh @@ -28,34 +28,34 @@ md5_doris="a4d8bc9730aca3a51294e87d7d5b3e8e" download_source_file() { - local FILE_PATH="$1" - local EXPECTED_MD5="$2" - local DOWNLOAD_URL="$3" + local FILE_PATH="$1" + local EXPECTED_MD5="$2" + local DOWNLOAD_URL="$3" - echo "Download ${FILE_PATH}" + echo "Download ${FILE_PATH}" - if [[ -f "${FILE_PATH}" ]]; then - local FILE_MD5 - FILE_MD5=$(md5sum "${FILE_PATH}" | awk '{ print $1 }') + if [[ -f "${FILE_PATH}" ]]; then + local FILE_MD5 + FILE_MD5=$(md5sum "${FILE_PATH}" | awk '{ print $1 }') - if [[ "${FILE_MD5}" = "${EXPECTED_MD5}" ]]; then - echo "${FILE_PATH} is ready!" + if [[ "${FILE_MD5}" = "${EXPECTED_MD5}" ]]; then + echo "${FILE_PATH} is ready!" + else + echo "${FILE_PATH} is broken, Redownloading ..." + rm "${FILE_PATH}" + wget "${DOWNLOAD_URL}"/"${FILE_PATH}" + fi else - echo "${FILE_PATH} is broken, Redownloading ..." - rm "${FILE_PATH}" - wget "${DOWNLOAD_URL}"/"${FILE_PATH}" + echo "Downloading ${FILE_PATH} ..." + wget "${DOWNLOAD_URL}"/"${FILE_PATH}" fi - else - echo "Downloading ${FILE_PATH} ..." - wget "${DOWNLOAD_URL}"/"${FILE_PATH}" - fi } curdir="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" cd "${curdir}" || exit if [[ ! -d "packages" ]]; then - mkdir packages + mkdir packages fi cd packages || exit @@ -67,38 +67,38 @@ download_source_file "${DORIS_PACKAGE}.tar.gz" "${md5_doris}" "${DORIS_DOWNLOAD_URL}" if [[ ! -f "jdk-17.0.2/SUCCESS" ]]; then - echo "Prepare jdk17 environment" - if [[ -d "jdk-17.0.2" ]]; then - echo "Remove broken jdk-17.0.2" - rm -rf jdk-17.0.2 - fi - echo "Unpackage jdk-17.0.2" - tar xzf openjdk-17.0.2_linux-x64_bin.tar.gz - touch jdk-17.0.2/SUCCESS + echo "Prepare jdk17 environment" + if [[ -d "jdk-17.0.2" ]]; then + echo "Remove broken jdk-17.0.2" + rm -rf jdk-17.0.2 + fi + echo "Unpackage jdk-17.0.2" + tar xzf openjdk-17.0.2_linux-x64_bin.tar.gz + touch jdk-17.0.2/SUCCESS fi if [[ ! -f "spark-3.4.2-bin-hadoop3/SUCCESS" ]]; then - echo "Prepare spark3.4 environment" - if [[ -d "spark-3.4.2-bin-hadoop3" ]]; then - echo "Remove broken spark-3.4.2-bin-hadoop3" - rm -rf spark-3.4.2-bin-hadoop3 - fi - echo "Unpackage spark-3.4.2-bin-hadoop3" - tar -xf spark-3.4.2-bin-hadoop3.tgz - cp aws-java-sdk-bundle-1.12.48.jar spark-3.4.2-bin-hadoop3/jars/ - cp hadoop-aws-3.3.1.jar spark-3.4.2-bin-hadoop3/jars/ - cp hudi-spark3.4-bundle_2.12-0.14.1.jar spark-3.4.2-bin-hadoop3/jars/ - touch spark-3.4.2-bin-hadoop3/SUCCESS + echo "Prepare spark3.4 environment" + if [[ -d "spark-3.4.2-bin-hadoop3" ]]; then + echo "Remove broken spark-3.4.2-bin-hadoop3" + rm -rf spark-3.4.2-bin-hadoop3 + fi + echo "Unpackage spark-3.4.2-bin-hadoop3" + tar -xf spark-3.4.2-bin-hadoop3.tgz + cp aws-java-sdk-bundle-1.12.48.jar spark-3.4.2-bin-hadoop3/jars/ + cp hadoop-aws-3.3.1.jar spark-3.4.2-bin-hadoop3/jars/ + cp hudi-spark3.4-bundle_2.12-0.14.1.jar spark-3.4.2-bin-hadoop3/jars/ + touch spark-3.4.2-bin-hadoop3/SUCCESS fi if [[ ! -f "doris-bin/SUCCESS" ]]; then - echo "Prepare ${DORIS_PACKAGE} environment" - if [[ -d "doris-bin" ]]; then - echo "Remove broken ${DORIS_PACKAGE}" - rm -rf doris-bin - fi - echo "Unpackage ${DORIS_PACKAGE}" - tar xzf "${DORIS_PACKAGE}".tar.gz - mv "${DORIS_PACKAGE}" doris-bin - touch doris-bin/SUCCESS + echo "Prepare ${DORIS_PACKAGE} environment" + if [[ -d "doris-bin" ]]; then + echo "Remove broken ${DORIS_PACKAGE}" + rm -rf doris-bin + fi + echo "Unpackage ${DORIS_PACKAGE}" + tar xzf "${DORIS_PACKAGE}".tar.gz + mv "${DORIS_PACKAGE}" doris-bin + touch doris-bin/SUCCESS fi cd ../ ---------- You can reformat the above files to meet shfmt's requirements by typing: shfmt -w filename ``` </details> -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org