This is an automated email from the ASF dual-hosted git repository. chengpan pushed a commit to branch branch-0.12 in repository https://gitbox.apache.org/repos/asf/zeppelin.git
The following commit(s) were added to refs/heads/branch-0.12 by this push: new c77e9f26e7 [ZEPPELIN-6086] Official Spark 4.0 support c77e9f26e7 is described below commit c77e9f26e74e92ae746ceffc8e3cc77a156bda2e Author: Adam Binford <adam...@gmail.com> AuthorDate: Thu Jun 12 06:19:56 2025 -0400 [ZEPPELIN-6086] Official Spark 4.0 support ### What is this PR for? Follow-up to https://github.com/apache/zeppelin/pull/4825. Now that Spark 4.0 is officially released, update the build to use the release, add to CI matrix, and bump max supported version. ### What type of PR is it? Feature ### What is the Jira issue? https://issues.apache.org/jira/browse/ZEPPELIN-6086 ### How should this be tested? * Strongly recommended: add automated unit tests for any new or changed behavior * Outline any manual steps to test the PR here. Added Spark 4.0 to CI tests ### Screenshots (if appropriate) ### Questions: * Does the license files need to update? * Is there breaking changes for older versions? * Does this needs documentation? Closes #4944 from Kimahriman/spark-4.0-supported. Signed-off-by: Cheng Pan <cheng...@apache.org> (cherry picked from commit 9820caae70289be49b40a9ea4899427c996f5d98) Signed-off-by: Cheng Pan <cheng...@apache.org> --- .github/workflows/core.yml | 11 ++++++++++- spark/interpreter/pom.xml | 4 ++-- .../src/main/java/org/apache/zeppelin/spark/SparkVersion.java | 4 ++-- 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/.github/workflows/core.yml b/.github/workflows/core.yml index 8c6935fdca..8d4ae1e3cb 100644 --- a/.github/workflows/core.yml +++ b/.github/workflows/core.yml @@ -341,7 +341,7 @@ jobs: fail-fast: false matrix: python: [ 3.9 ] - java: [ 11 ] + java: [ 11, 17 ] steps: - name: Checkout uses: actions/checkout@v4 @@ -379,21 +379,30 @@ jobs: run: | R -e "IRkernel::installspec()" - name: run spark-3.3 tests with scala-2.12 and python-${{ matrix.python }} + if: ${{ matrix.java == 11 }} run: | rm -rf spark/interpreter/metastore_db ./mvnw verify -pl spark-submit,spark/interpreter -am -Dtest=org/apache/zeppelin/spark/* -Pspark-3.3 -Pspark-scala-2.12 -Pintegration -DfailIfNoTests=false ${MAVEN_ARGS} - name: run spark-3.3 tests with scala-2.13 and python-${{ matrix.python }} + if: ${{ matrix.java == 11 }} run: | rm -rf spark/interpreter/metastore_db ./mvnw verify -pl spark-submit,spark/interpreter -am -Dtest=org/apache/zeppelin/spark/* -Pspark-3.3 -Pspark-scala-2.13 -Pintegration -DfailIfNoTests=false ${MAVEN_ARGS} - name: run spark-3.4 tests with scala-2.13 and python-${{ matrix.python }} + if: ${{ matrix.java == 11 }} run: | rm -rf spark/interpreter/metastore_db ./mvnw verify -pl spark-submit,spark/interpreter -am -Dtest=org/apache/zeppelin/spark/* -Pspark-3.4 -Pspark-scala-2.13 -Pintegration -DfailIfNoTests=false ${MAVEN_ARGS} - name: run spark-3.5 tests with scala-2.13 and python-${{ matrix.python }} + if: ${{ matrix.java == 11 }} run: | rm -rf spark/interpreter/metastore_db ./mvnw verify -pl spark-submit,spark/interpreter -am -Dtest=org/apache/zeppelin/spark/* -Pspark-3.5 -Pspark-scala-2.13 -Pintegration -DfailIfNoTests=false ${MAVEN_ARGS} + - name: run spark-4.0 tests with scala-2.13 and python-${{ matrix.python }} + if: ${{ matrix.java == 17 }} + run: | + rm -rf spark/interpreter/metastore_db + ./mvnw verify -pl spark-submit,spark/interpreter -am -Dtest=org/apache/zeppelin/spark/* -Pspark-4.0 -Pspark-scala-2.13 -Pintegration -DfailIfNoTests=false ${MAVEN_ARGS} # Livy 0.8.0 tested with Spark 3.2 # https://github.com/apache/incubator-livy/blob/v0.8.0-incubating/dev/docker/livy-dev-spark/Dockerfile#L20 diff --git a/spark/interpreter/pom.xml b/spark/interpreter/pom.xml index a1731d7a76..4eb8a8cb5b 100644 --- a/spark/interpreter/pom.xml +++ b/spark/interpreter/pom.xml @@ -569,9 +569,9 @@ <profile> <id>spark-4.0</id> <properties> - <spark.version>4.0.0-preview2</spark.version> + <spark.version>4.0.0</spark.version> <protobuf.version>3.21.12</protobuf.version> - <py4j.version>0.10.9.7</py4j.version> + <py4j.version>0.10.9.9</py4j.version> <libthrift.version>0.16.0</libthrift.version> </properties> </profile> diff --git a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkVersion.java b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkVersion.java index ff018c0369..2db447985a 100644 --- a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkVersion.java +++ b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkVersion.java @@ -29,10 +29,10 @@ public class SparkVersion { public static final SparkVersion SPARK_3_5_0 = SparkVersion.fromVersionString("3.5.0"); - public static final SparkVersion SPARK_4_0_0 = SparkVersion.fromVersionString("4.0.0"); + public static final SparkVersion SPARK_4_1_0 = SparkVersion.fromVersionString("4.1.0"); public static final SparkVersion MIN_SUPPORTED_VERSION = SPARK_3_3_0; - public static final SparkVersion UNSUPPORTED_FUTURE_VERSION = SPARK_4_0_0; + public static final SparkVersion UNSUPPORTED_FUTURE_VERSION = SPARK_4_1_0; private int version; private int majorVersion;