This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch branch-0.12
in repository https://gitbox.apache.org/repos/asf/zeppelin.git


The following commit(s) were added to refs/heads/branch-0.12 by this push:
     new 73530d071a [ZEPPELIN-6118] Remove unnecessary hadoop3 profile
73530d071a is described below

commit 73530d071a65ef446e7117751315f30f132bfc83
Author: Cheng Pan <cheng...@apache.org>
AuthorDate: Sat Oct 5 20:35:31 2024 +0800

    [ZEPPELIN-6118] Remove unnecessary hadoop3 profile
    
    ### What is this PR for?
    
    Given that hadoop2 support was removed, hadoop3 is the only option, and 
it's unlikely that hadoop4 has existed in predictable years. We can remove such 
a profile to simplify the `pom.xml` files.
    
    ### What type of PR is it?
    
    Improvement
    
    ### Todos
    
    - [x] remove profile `hadoop3` and update CI workflow yamls and docs
    - [x] rename profile `hadoop3-aws` to `hadoop-aws`
    - [x] rename profile `hadoop3-azure` to `hadoop-azure`
    - [x] given hadoop deps are defined at root `pom.xml`'s 
`dependencyManagement` block, simplify sub-projects' `pom.xml` by removing 
redundant attributes.
    
    ### What is the Jira issue?
    
    ZEPPELIN-6118
    
    ### How should this be tested?
    
    Pass GHA.
    ### Screenshots (if appropriate)
    
    ### Questions:
    * Does the license files need to update? No.
    * Is there breaking changes for older versions? No.
    * Does this needs documentation? No.
    
    Closes #4861 from pan3793/ZEPPELIN-6118.
    
    Signed-off-by: Cheng Pan <cheng...@apache.org>
    (cherry picked from commit ed2ee4dfef1ccd7a816974bfda77c12841e7c18f)
    Signed-off-by: Cheng Pan <cheng...@apache.org>
---
 .github/workflows/core.yml                         | 34 ++++++++---------
 .github/workflows/frontend.yml                     |  8 ++--
 .github/workflows/quick.yml                        |  4 +-
 Dockerfile                                         |  4 +-
 alluxio/pom.xml                                    |  2 -
 .../contribution/useful_developer_tools.md         |  4 +-
 docs/setup/basics/how_to_build.md                  | 11 +-----
 flink-cmd/pom.xml                                  |  1 -
 jdbc/pom.xml                                       |  2 -
 pom.xml                                            | 17 ++++-----
 zeppelin-interpreter-integration/pom.xml           | 41 +++++---------------
 zeppelin-interpreter/pom.xml                       | 27 +++----------
 zeppelin-plugins/launcher/yarn/pom.xml             | 43 +++++++--------------
 zeppelin-plugins/notebookrepo/filesystem/pom.xml   | 44 +++++-----------------
 zeppelin-server/pom.xml                            | 30 +++------------
 zeppelin-zengine/pom.xml                           | 23 ++---------
 16 files changed, 83 insertions(+), 212 deletions(-)

diff --git a/.github/workflows/core.yml b/.github/workflows/core.yml
index 25a84630b8..c2b3b869a2 100644
--- a/.github/workflows/core.yml
+++ b/.github/workflows/core.yml
@@ -40,7 +40,6 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        hadoop: [hadoop3]
         java: [ 8, 11 ]
     steps:
       - name: Checkout
@@ -65,7 +64,7 @@ jobs:
           restore-keys: |
             ${{ runner.os }}-zeppelin-
       - name: install application with some interpreter
-        run: ./mvnw install -Pbuild-distr -DskipTests -pl 
zeppelin-server,zeppelin-web,spark-submit,spark/scala-2.12,spark/scala-2.13,markdown,angular,shell
 -am -Pweb-classic -Phelium-dev -Pexamples -P${{ matrix.hadoop }} ${MAVEN_ARGS}
+        run: ./mvnw install -Pbuild-distr -DskipTests -pl 
zeppelin-server,zeppelin-web,spark-submit,spark/scala-2.12,spark/scala-2.13,markdown,angular,shell
 -am -Pweb-classic -Phelium-dev -Pexamples ${MAVEN_ARGS}
       - name: install and test plugins
         run: ./mvnw package -pl zeppelin-plugins -amd ${MAVEN_ARGS}
       - name: Setup conda environment with python 3.9 and R
@@ -83,8 +82,8 @@ jobs:
           R -e "IRkernel::installspec()"
           conda list
           conda info
-      - name: run tests with ${{ matrix.hadoop }} # skip spark test because we 
would run them in other CI
-        run: ./mvnw verify -Pusing-packaged-distr -pl 
zeppelin-server,zeppelin-web,spark-submit,spark/scala-2.12,spark/scala-2.13,markdown,angular,shell
 -am -Pweb-classic -Phelium-dev -Pexamples -P${{ matrix.hadoop }} 
-Dtests.to.exclude=**/org/apache/zeppelin/spark/* -DfailIfNoTests=false
+      - name: run tests # skip spark test because we would run them in other CI
+        run: ./mvnw verify -Pusing-packaged-distr -pl 
zeppelin-server,zeppelin-web,spark-submit,spark/scala-2.12,spark/scala-2.13,markdown,angular,shell
 -am -Pweb-classic -Phelium-dev -Pexamples 
-Dtests.to.exclude=**/org/apache/zeppelin/spark/* -DfailIfNoTests=false
 
   # test interpreter modules except spark, flink, python, rlang, jupyter
   interpreter-test-non-core:
@@ -180,7 +179,7 @@ jobs:
           R -e "IRkernel::installspec()"
       - name: install environment
         run: |
-          ./mvnw install -DskipTests -pl 
python,rlang,zeppelin-jupyter-interpreter -am -Phadoop3 ${MAVEN_ARGS}
+          ./mvnw install -DskipTests -pl 
python,rlang,zeppelin-jupyter-interpreter -am ${MAVEN_ARGS}
       - name: run tests with ${{ matrix.python }}
         run: |
           ./mvnw test -pl python,rlang,zeppelin-jupyter-interpreter 
-DfailIfNoTests=false ${MAVEN_ARGS}
@@ -218,7 +217,7 @@ jobs:
             ${{ runner.os }}-zeppelin-
       - name: install environment
         run: |
-          ./mvnw install -DskipTests -Phadoop3 -Pintegration -pl 
zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.12,spark/scala-2.13,markdown,flink-cmd,flink/flink-scala-2.12,jdbc,shell
 -am -Pweb-classic -Pflink-117 ${MAVEN_ARGS}
+          ./mvnw install -DskipTests -Pintegration -pl 
zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.12,spark/scala-2.13,markdown,flink-cmd,flink/flink-scala-2.12,jdbc,shell
 -am -Pweb-classic -Pflink-117 ${MAVEN_ARGS}
           ./mvnw package -pl zeppelin-plugins -amd -DskipTests ${MAVEN_ARGS}
       - name: Setup conda environment with python 3.9 and R
         uses: conda-incubator/setup-miniconda@v3
@@ -234,7 +233,7 @@ jobs:
         run: |
           R -e "IRkernel::installspec()"
       - name: run tests
-        run: ./mvnw test -pl zeppelin-interpreter-integration -Phadoop3 
-Pintegration -DfailIfNoTests=false 
-Dtest=ZeppelinClientIntegrationTest,ZeppelinClientWithAuthIntegrationTest,ZSessionIntegrationTest,ShellIntegrationTest,JdbcIntegrationTest
+        run: ./mvnw test -pl zeppelin-interpreter-integration -Pintegration 
-DfailIfNoTests=false 
-Dtest=ZeppelinClientIntegrationTest,ZeppelinClientWithAuthIntegrationTest,ZSessionIntegrationTest,ShellIntegrationTest,JdbcIntegrationTest
       - name: Print zeppelin logs
         if: always()
         run: if [ -d "logs" ]; then cat logs/*; fi
@@ -274,7 +273,7 @@ jobs:
             ${{ runner.os }}-zeppelin-
       - name: install environment for flink
         run: |
-          ./mvnw install -DskipTests -am -pl 
flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ 
matrix.flink }} -Phadoop3 -Pintegration ${MAVEN_ARGS}
+          ./mvnw install -DskipTests -am -pl 
flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ 
matrix.flink }} -Pintegration ${MAVEN_ARGS}
           ./mvnw clean package -pl zeppelin-plugins -amd -DskipTests 
${MAVEN_ARGS}
       - name: Setup conda environment with python ${{ matrix.python }} and R
         uses: conda-incubator/setup-miniconda@v3
@@ -287,7 +286,7 @@ jobs:
           auto-activate-base: false
           use-mamba: true
       - name: run tests for flink
-        run: ./mvnw verify -pl 
flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ 
matrix.flink }} -am -Phadoop3 -Pintegration -DfailIfNoTests=false 
-Dtest=org.apache.zeppelin.flink.*Test,FlinkIntegrationTest${{ matrix.flink }} 
${MAVEN_ARGS}
+        run: ./mvnw verify -pl 
flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ 
matrix.flink }} -am -Pintegration -DfailIfNoTests=false 
-Dtest=org.apache.zeppelin.flink.*Test,FlinkIntegrationTest${{ matrix.flink }} 
${MAVEN_ARGS}
       - name: Print zeppelin logs
         if: always()
         run: if [ -d "logs" ]; then cat logs/*; fi
@@ -298,7 +297,6 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        hadoop: [ 3 ]
         java: [ 8, 11 ]
     steps:
       - name: Checkout
@@ -323,7 +321,7 @@ jobs:
             ${{ runner.os }}-zeppelin-
       - name: install environment
         run: |
-          ./mvnw install -DskipTests -pl 
zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.12,spark/scala-2.13,markdown
 -am -Pweb-classic -Phadoop3 -Pintegration ${MAVEN_ARGS}
+          ./mvnw install -DskipTests -pl 
zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.12,spark/scala-2.13,markdown
 -am -Pweb-classic -Pintegration ${MAVEN_ARGS}
           ./mvnw clean package -pl zeppelin-plugins -amd -DskipTests 
${MAVEN_ARGS}
       - name: Setup conda environment with python 3.9 and R
         uses: conda-incubator/setup-miniconda@v3
@@ -338,8 +336,8 @@ jobs:
       - name: Make IRkernel available to Jupyter
         run: |
           R -e "IRkernel::installspec()"
-      - name: run tests on hadoop${{ matrix.hadoop }}
-        run: ./mvnw test -pl zeppelin-interpreter-integration -Phadoop${{ 
matrix.hadoop }} -Pintegration 
-Dtest=SparkSubmitIntegrationTest,ZeppelinSparkClusterTest32,SparkIntegrationTest32,ZeppelinSparkClusterTest33,SparkIntegrationTest33
 -DfailIfNoTests=false ${MAVEN_ARGS}
+      - name: run tests
+        run: ./mvnw test -pl zeppelin-interpreter-integration -Pintegration 
-Dtest=SparkSubmitIntegrationTest,ZeppelinSparkClusterTest32,SparkIntegrationTest32,ZeppelinSparkClusterTest33,SparkIntegrationTest33
 -DfailIfNoTests=false ${MAVEN_ARGS}
 
   # test on spark for each spark version & scala version
   spark-test:
@@ -376,7 +374,7 @@ jobs:
           restore-keys: |
             ${{ runner.os }}-zeppelin-
       - name: install environment
-        run: ./mvnw install -DskipTests -pl 
spark-submit,spark/scala-2.12,spark/scala-2.13 -am -Phadoop3 ${MAVEN_ARGS}
+        run: ./mvnw install -DskipTests -pl 
spark-submit,spark/scala-2.12,spark/scala-2.13 -am ${MAVEN_ARGS}
       - name: Setup conda environment with python ${{ matrix.python }} and R
         uses: conda-incubator/setup-miniconda@v3
         with:
@@ -393,20 +391,20 @@ jobs:
       - name: run spark-3.3 tests with scala-2.12 and python-${{ matrix.python 
}}
         run: |
           rm -rf spark/interpreter/metastore_db
-          ./mvnw verify -pl spark-submit,spark/interpreter -am 
-Dtest=org/apache/zeppelin/spark/* -Pspark-3.3 -Pspark-scala-2.12 -Phadoop3 
-Pintegration -DfailIfNoTests=false ${MAVEN_ARGS}
+          ./mvnw verify -pl spark-submit,spark/interpreter -am 
-Dtest=org/apache/zeppelin/spark/* -Pspark-3.3 -Pspark-scala-2.12 -Pintegration 
-DfailIfNoTests=false ${MAVEN_ARGS}
       - name: run spark-3.3 tests with scala-2.13 and python-${{ matrix.python 
}}
         run: |
           rm -rf spark/interpreter/metastore_db
-          ./mvnw verify -pl spark-submit,spark/interpreter -am 
-Dtest=org/apache/zeppelin/spark/* -Pspark-3.3 -Pspark-scala-2.13 -Phadoop3 
-Pintegration -DfailIfNoTests=false ${MAVEN_ARGS}
+          ./mvnw verify -pl spark-submit,spark/interpreter -am 
-Dtest=org/apache/zeppelin/spark/* -Pspark-3.3 -Pspark-scala-2.13 -Pintegration 
-DfailIfNoTests=false ${MAVEN_ARGS}
       - name: run spark-3.4 tests with scala-2.13 and python-${{ matrix.python 
}}
         run: |
           rm -rf spark/interpreter/metastore_db
-          ./mvnw verify -pl spark-submit,spark/interpreter -am 
-Dtest=org/apache/zeppelin/spark/* -Pspark-3.4 -Pspark-scala-2.13 -Phadoop3 
-Pintegration -DfailIfNoTests=false ${MAVEN_ARGS}
+          ./mvnw verify -pl spark-submit,spark/interpreter -am 
-Dtest=org/apache/zeppelin/spark/* -Pspark-3.4 -Pspark-scala-2.13 -Pintegration 
-DfailIfNoTests=false ${MAVEN_ARGS}
       - name: run spark-3.5 tests with scala-2.13 and python-${{ matrix.python 
}}
         if: matrix.python >= '3.8'
         run: |
           rm -rf spark/interpreter/metastore_db
-          ./mvnw verify -pl spark-submit,spark/interpreter -am 
-Dtest=org/apache/zeppelin/spark/* -Pspark-3.5 -Pspark-scala-2.13 -Phadoop3 
-Pintegration -DfailIfNoTests=false ${MAVEN_ARGS}
+          ./mvnw verify -pl spark-submit,spark/interpreter -am 
-Dtest=org/apache/zeppelin/spark/* -Pspark-3.5 -Pspark-scala-2.13 -Pintegration 
-DfailIfNoTests=false ${MAVEN_ARGS}
 
   # The version combination is based on the facts:
   # 1. official Livy 0.8 binary tarball is built against Spark 2.4
diff --git a/.github/workflows/frontend.yml b/.github/workflows/frontend.yml
index 10d00e9ed4..1be63e5a26 100644
--- a/.github/workflows/frontend.yml
+++ b/.github/workflows/frontend.yml
@@ -53,9 +53,9 @@ jobs:
           restore-keys: |
             ${{ runner.os }}-zeppelin-
       - name: Install application
-        run: ./mvnw clean install -DskipTests -am -pl zeppelin-web 
-Pweb-classic -Pspark-scala-2.12 -Pspark-3.4 -Phadoop3 -Pweb-dist ${MAVEN_ARGS}
+        run: ./mvnw clean install -DskipTests -am -pl zeppelin-web 
-Pweb-classic -Pspark-scala-2.12 -Pspark-3.4 -Pweb-dist ${MAVEN_ARGS}
       - name: Run headless test
-        run: xvfb-run --auto-servernum --server-args="-screen 0 1024x768x24" 
./mvnw verify -pl zeppelin-web -Pweb-classic -Pspark-scala-2.12 -Pspark-3.4 
-Phadoop3 -Pweb-dist -Pweb-e2e ${MAVEN_ARGS}
+        run: xvfb-run --auto-servernum --server-args="-screen 0 1024x768x24" 
./mvnw verify -pl zeppelin-web -Pweb-classic -Pspark-scala-2.12 -Pspark-3.4 
-Pweb-dist -Pweb-e2e ${MAVEN_ARGS}
       - name: Print zeppelin logs
         if: always()
         run: if [ -d "logs" ]; then cat logs/*; fi
@@ -127,10 +127,10 @@ jobs:
           R -e "IRkernel::installspec()"
       - name: Install Environment
         run: |
-          ./mvnw clean install -DskipTests -am -pl zeppelin-integration 
-Pweb-classic -Pintegration -Pspark-scala-2.12 -Pspark-3.4 -Phadoop3 -Pweb-dist 
${MAVEN_ARGS}
+          ./mvnw clean install -DskipTests -am -pl zeppelin-integration 
-Pweb-classic -Pintegration -Pspark-scala-2.12 -Pspark-3.4 -Pweb-dist 
${MAVEN_ARGS}
       - name: run tests
         run: |
-          xvfb-run --auto-servernum --server-args="-screen 0 1600x1024x16" 
./mvnw verify -DfailIfNoTests=false -pl zeppelin-integration -Pweb-classic 
-Pintegration -Pspark-scala-2.12 -Pspark-3.4 -Phadoop3 -Pweb-dist 
-Pusing-source-tree ${MAVEN_ARGS}
+          xvfb-run --auto-servernum --server-args="-screen 0 1600x1024x16" 
./mvnw verify -DfailIfNoTests=false -pl zeppelin-integration -Pweb-classic 
-Pintegration -Pspark-scala-2.12 -Pspark-3.4 -Pweb-dist -Pusing-source-tree 
${MAVEN_ARGS}
       - name: Print zeppelin logs
         if: always()
         run: if [ -d "logs" ]; then cat logs/*; fi
diff --git a/.github/workflows/quick.yml b/.github/workflows/quick.yml
index b26f015c6b..d32c9f7838 100644
--- a/.github/workflows/quick.yml
+++ b/.github/workflows/quick.yml
@@ -40,8 +40,6 @@ jobs:
     runs-on: ubuntu-20.04
     strategy:
       fail-fast: false
-      matrix:
-        hadoop: [hadoop3]
     steps:
       - name: Checkout
         uses: actions/checkout@v4
@@ -51,4 +49,4 @@ jobs:
           distribution: 'temurin'
           java-version: 11
       - name: Run Maven Validate
-        run: ./mvnw validate -P${{ matrix.hadoop }} -Pinclude-hadoop 
${MAVEN_ARGS}
+        run: ./mvnw validate -Pinclude-hadoop ${MAVEN_ARGS}
diff --git a/Dockerfile b/Dockerfile
index 0b5baacb71..982e54ed44 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -21,9 +21,9 @@ ENV MAVEN_OPTS="-Xms1024M -Xmx2048M 
-XX:MaxMetaspaceSize=1024m -XX:-UseGCOverhea
 # Allow npm and bower to run with root privileges
 RUN echo "unsafe-perm=true" > ~/.npmrc && \
     echo '{ "allow_root": true }' > ~/.bowerrc && \
-    ./mvnw -B package -DskipTests -Pbuild-distr -Pspark-3.5 -Pinclude-hadoop 
-Phadoop3 -Pspark-scala-2.12 -Pweb-classic -Pweb-dist && \
+    ./mvnw -B package -DskipTests -Pbuild-distr -Pspark-3.5 -Pinclude-hadoop 
-Pspark-scala-2.12 -Pweb-classic -Pweb-dist && \
     # Example with doesn't compile all interpreters
-    # ./mvnw -B package -DskipTests -Pbuild-distr -Pspark-3.5 -Pinclude-hadoop 
-Phadoop3 -Pspark-scala-2.12 -Pweb-classic -Pweb-dist -pl 
'!groovy,!livy,!hbase,!file,!flink' && \
+    # ./mvnw -B package -DskipTests -Pbuild-distr -Pspark-3.5 -Pinclude-hadoop 
-Pspark-scala-2.12 -Pweb-classic -Pweb-dist -pl 
'!groovy,!livy,!hbase,!file,!flink' && \
     mv 
/workspace/zeppelin/zeppelin-distribution/target/zeppelin-*-bin/zeppelin-*-bin 
/opt/zeppelin/ && \
     # Removing stuff saves time, because docker creates a temporary layer
     rm -rf ~/.m2 && \
diff --git a/alluxio/pom.xml b/alluxio/pom.xml
index 87d374db3e..ca521cdf26 100644
--- a/alluxio/pom.xml
+++ b/alluxio/pom.xml
@@ -79,14 +79,12 @@
         <dependency>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-client-api</artifactId>
-            <version>${hadoop.version}</version>
             <scope>test</scope>
         </dependency>
 
         <dependency>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-client-runtime</artifactId>
-            <version>${hadoop.version}</version>
             <scope>test</scope>
         </dependency>
     </dependencies>
diff --git a/docs/development/contribution/useful_developer_tools.md 
b/docs/development/contribution/useful_developer_tools.md
index 47f3a84dc6..7e2dc3e7c8 100644
--- a/docs/development/contribution/useful_developer_tools.md
+++ b/docs/development/contribution/useful_developer_tools.md
@@ -69,8 +69,8 @@ you can use this function like `setjdk 1.8` / `setjdk 1.7`
 # build spark related modules with default profiles
 ./mvnw clean package -pl 'spark,spark-dependencies,zeppelin-server' --am 
-DskipTests
 
-# build spark related modules with profiles: scala 2.13, spark 3.5 hadoop 3.3
-./mvnw clean package -Pspark-scala-2.13 -Pspark-3.5 -Phadoop-3.3 \
+# build spark related modules with profiles: scala 2.13, spark 3.5
+./mvnw clean package -Pspark-scala-2.13 -Pspark-3.5 \
 -pl 'spark,spark-dependencies,zeppelin-server' --am -DskipTests
 
 # build `zeppelin-server` and `markdown` with dependencies
diff --git a/docs/setup/basics/how_to_build.md 
b/docs/setup/basics/how_to_build.md
index bccc2143b8..b842e242a1 100644
--- a/docs/setup/basics/how_to_build.md
+++ b/docs/setup/basics/how_to_build.md
@@ -113,19 +113,12 @@ Available profiles are
 -Pspark-scala-2.13
 ```
  
-#### Build hadoop with Zeppelin (`-Phadoop[version]`)
+#### Build hadoop with Zeppelin
  
 To be noticed, hadoop profiles only affect Zeppelin server, it doesn't affect 
any interpreter. 
 Zeppelin server use hadoop in some cases, such as using hdfs as notebook 
storage. You can check this [page](./hadoop_integration.html) for more details 
about how to configure hadoop in Zeppelin.
 
-Set hadoop major version (default hadoop3).
-Available profiles are
-
-```
--Phadoop3
-```
-
-minor version can be adjusted by `-Dhadoop.version=x.x.x`
+Hadoop version can be adjusted by `-Dhadoop.version=x.x.x`
 
 
 ##### `-Pvendor-repo` (optional)
diff --git a/flink-cmd/pom.xml b/flink-cmd/pom.xml
index 4f4456f8dc..aa87eda202 100644
--- a/flink-cmd/pom.xml
+++ b/flink-cmd/pom.xml
@@ -45,7 +45,6 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-client-runtime</artifactId>
-      <version>${hadoop.version}</version>
       <scope>provided</scope>
     </dependency>
   </dependencies>
diff --git a/jdbc/pom.xml b/jdbc/pom.xml
index f60b66aae6..339e938496 100644
--- a/jdbc/pom.xml
+++ b/jdbc/pom.xml
@@ -35,7 +35,6 @@
     <!--library versions-->
     <interpreter.name>jdbc</interpreter.name>
     <postgresql.version>42.7.2</postgresql.version>
-    <hadoop.version>${hadoop3.3.version}</hadoop.version>
     <h2.version>2.2.220</h2.version>
     <commons.dbcp2.version>2.0.1</commons.dbcp2.version>
     <hive3.version>3.1.3</hive3.version>
@@ -87,7 +86,6 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-client-runtime</artifactId>
-      <version>${hadoop.version}</version>
       <scope>provided</scope>
     </dependency>
 
diff --git a/pom.xml b/pom.xml
index 733650739c..206ce225d7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -141,9 +141,7 @@
     <micrometer.version>1.6.0</micrometer.version>
     <findbugs.jsr305.version>3.0.2</findbugs.jsr305.version>
 
-    <hadoop3.2.version>3.2.4</hadoop3.2.version>
-    <hadoop3.3.version>3.3.6</hadoop3.3.version>
-    <hadoop.version>${hadoop3.3.version}</hadoop.version>
+    <hadoop.version>3.3.6</hadoop.version>
     <hadoop.deps.scope>provided</hadoop.deps.scope>
 
     <quartz.scheduler.version>2.3.2</quartz.scheduler.version>
@@ -393,13 +391,6 @@
         <version>${jettison.version}</version>
       </dependency>
 
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-client-api</artifactId>
-        <version>${hadoop.version}</version>
-        <scope>${hadoop.deps.scope}</scope>
-      </dependency>
-
       <!-- Test libraries -->
       <dependency>
         <groupId>org.junit.jupiter</groupId>
@@ -457,6 +448,12 @@
         <scope>test</scope>
       </dependency>
 
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-client-api</artifactId>
+        <version>${hadoop.version}</version>
+        <scope>${hadoop.deps.scope}</scope>
+      </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-client-runtime</artifactId>
diff --git a/zeppelin-interpreter-integration/pom.xml 
b/zeppelin-interpreter-integration/pom.xml
index 631fffc02f..d86e24d2cf 100644
--- a/zeppelin-interpreter-integration/pom.xml
+++ b/zeppelin-interpreter-integration/pom.xml
@@ -138,7 +138,17 @@
       </exclusions>
     </dependency>
 
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client-runtime</artifactId>
+      <scope>test</scope>
+    </dependency>
 
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client-minicluster</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
   <build>
@@ -172,35 +182,4 @@
       </plugin>
     </plugins>
   </build>
-
-  <profiles>
-    <profile>
-      <id>hadoop3</id>
-
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
-
-      <properties>
-        <hadoop.version>${hadoop3.3.version}</hadoop.version>
-      </properties>
-
-      <dependencies>
-
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-client-runtime</artifactId>
-          <scope>test</scope>
-        </dependency>
-
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-client-minicluster</artifactId>
-          <scope>test</scope>
-        </dependency>
-
-      </dependencies>
-    </profile>
-  </profiles>
-
 </project>
diff --git a/zeppelin-interpreter/pom.xml b/zeppelin-interpreter/pom.xml
index 52e590769e..7ef81f82de 100644
--- a/zeppelin-interpreter/pom.xml
+++ b/zeppelin-interpreter/pom.xml
@@ -181,6 +181,11 @@
       <version>${jline.version}</version>
     </dependency>
 
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client-runtime</artifactId>
+    </dependency>
+
     <dependency>
       <groupId>org.junit.jupiter</groupId>
       <artifactId>junit-jupiter-engine</artifactId>
@@ -225,26 +230,4 @@
       </resource>
     </resources>
   </build>
-
-  <profiles>
-    <profile>
-      <id>hadoop3</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
-      <properties>
-        <hadoop.version>${hadoop3.3.version}</hadoop.version>
-      </properties>
-
-      <dependencies>
-
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-client-runtime</artifactId>
-        </dependency>
-
-      </dependencies>
-    </profile>
-  </profiles>
-
 </project>
diff --git a/zeppelin-plugins/launcher/yarn/pom.xml 
b/zeppelin-plugins/launcher/yarn/pom.xml
index 5f2dcfc363..0a7abe4da8 100644
--- a/zeppelin-plugins/launcher/yarn/pom.xml
+++ b/zeppelin-plugins/launcher/yarn/pom.xml
@@ -38,6 +38,20 @@
     <plugin.name>Launcher/YarnInterpreterLauncher</plugin.name>
   </properties>
 
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client-api</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client-runtime</artifactId>
+      <scope>provided</scope>
+    </dependency>
+  </dependencies>
+
   <build>
     <testResources>
       <testResource>
@@ -70,33 +84,4 @@
       </resource>
     </resources>
   </build>
-
-  <profiles>
-    <profile>
-      <id>hadoop3</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
-      <properties>
-        <hadoop.version>${hadoop3.3.version}</hadoop.version>
-      </properties>
-
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-client-api</artifactId>
-          <scope>provided</scope>
-          <version>${hadoop.version}</version>
-        </dependency>
-
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-client-runtime</artifactId>
-          <scope>provided</scope>
-          <version>${hadoop.version}</version>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-
 </project>
diff --git a/zeppelin-plugins/notebookrepo/filesystem/pom.xml 
b/zeppelin-plugins/notebookrepo/filesystem/pom.xml
index 6e8858802b..c8c828c98a 100644
--- a/zeppelin-plugins/notebookrepo/filesystem/pom.xml
+++ b/zeppelin-plugins/notebookrepo/filesystem/pom.xml
@@ -38,6 +38,14 @@
         <plugin.name>NotebookRepo/FileSystemNotebookRepo</plugin.name>
     </properties>
 
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-client-runtime</artifactId>
+            <scope>provided</scope>
+        </dependency>
+    </dependencies>
+
     <build>
         <plugins>
             <plugin>
@@ -47,37 +55,8 @@
     </build>
 
     <profiles>
-
-        <profile>
-            <id>hadoop3</id>
-            <activation>
-                <activeByDefault>true</activeByDefault>
-            </activation>
-            <properties>
-                <hadoop.version>${hadoop3.3.version}</hadoop.version>
-            </properties>
-
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-client-runtime</artifactId>
-                    <scope>provided</scope>
-                    <version>${hadoop.version}</version>
-                    <exclusions>
-                        <exclusion>
-                            <groupId>com.google.code.findbugs</groupId>
-                            <artifactId>jsr305</artifactId>
-                        </exclusion>
-                    </exclusions>
-                </dependency>
-            </dependencies>
-        </profile>
-
         <profile>
-            <id>hadoop3-azure</id>
-            <properties>
-                <hadoop.version>${hadoop3.3.version}</hadoop.version>
-            </properties>
+            <id>hadoop-azure</id>
             <dependencies>
                 <dependency>
                     <groupId>org.apache.hadoop</groupId>
@@ -153,10 +132,7 @@
         </profile>
 
         <profile>
-            <id>hadoop3-aws</id>
-            <properties>
-                <hadoop.version>${hadoop3.3.version}</hadoop.version>
-            </properties>
+            <id>hadoop-aws</id>
             <dependencies>
                 <dependency>
                     <groupId>org.apache.hadoop</groupId>
diff --git a/zeppelin-server/pom.xml b/zeppelin-server/pom.xml
index e298beed5b..93fdca7a45 100644
--- a/zeppelin-server/pom.xml
+++ b/zeppelin-server/pom.xml
@@ -395,6 +395,12 @@
         <artifactId>diff-match-patch</artifactId>
         <version>1.1</version>
       </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client-runtime</artifactId>
+      <scope>${hadoop.deps.scope}</scope>
+    </dependency>
   </dependencies>
   <build>
     <plugins>
@@ -472,29 +478,5 @@
         </zeppelin.daemon.package.base>
       </properties>
     </profile>
-
-    <profile>
-      <id>hadoop3</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
-      <properties>
-        <hadoop.version>${hadoop3.3.version}</hadoop.version>
-        
<hadoop-client-api.artifact>hadoop-client-api</hadoop-client-api.artifact>
-        
<hadoop-client-runtime.artifact>hadoop-client-runtime</hadoop-client-runtime.artifact>
-      </properties>
-
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>${hadoop-client-runtime.artifact}</artifactId>
-          <version>${hadoop.version}</version>
-          <scope>${hadoop.deps.scope}</scope>
-        </dependency>
-      </dependencies>
-
-    </profile>
-
   </profiles>
-
 </project>
diff --git a/zeppelin-zengine/pom.xml b/zeppelin-zengine/pom.xml
index 2097deb85b..dabe03e4c9 100644
--- a/zeppelin-zengine/pom.xml
+++ b/zeppelin-zengine/pom.xml
@@ -292,6 +292,10 @@
       <version>${commons.compress.version}</version>
     </dependency>
 
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client-runtime</artifactId>
+    </dependency>
   </dependencies>
 
   <build>
@@ -328,23 +332,4 @@
 
     </plugins>
   </build>
-
-  <profiles>
-    <profile>
-      <id>hadoop3</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
-      <properties>
-        <hadoop.version>${hadoop3.3.version}</hadoop.version>
-      </properties>
-
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-client-runtime</artifactId>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
 </project>

Reply via email to