This is an automated email from the ASF dual-hosted git repository.

ruifengz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b14a63ad7c40 [SPARK-54826][INFRA] Delete scheduled job for numpy 2.1.3
b14a63ad7c40 is described below

commit b14a63ad7c403ff604afa802c134d37e9a3f730d
Author: Ruifeng Zheng <[email protected]>
AuthorDate: Thu Dec 25 11:17:42 2025 +0800

    [SPARK-54826][INFRA] Delete scheduled job for numpy 2.1.3
    
    ### What changes were proposed in this pull request?
    Delete scheduled job for numpy 2.1.3
    
    ### Why are the changes needed?
    we are testing with numpy 2.3 and will change to 2.4 soon, it is 
unnecessary to have a job for 2.1.3 now
    
    ### Does this PR introduce _any_ user-facing change?
    no
    
    ### How was this patch tested?
    ci
    
    ### Was this patch authored or co-authored using generative AI tooling?
    no
    
    Closes #53586 from zhengruifeng/del_np_213.
    
    Authored-by: Ruifeng Zheng <[email protected]>
    Signed-off-by: Ruifeng Zheng <[email protected]>
---
 .github/workflows/build_infra_images_cache.yml | 13 -----
 .github/workflows/build_python_numpy_2.1.3.yml | 47 ---------------
 README.md                                      |  1 -
 dev/spark-test-image/numpy-213/Dockerfile      | 80 --------------------------
 4 files changed, 141 deletions(-)

diff --git a/.github/workflows/build_infra_images_cache.yml 
b/.github/workflows/build_infra_images_cache.yml
index 7de6fc6c6c73..e7da95446e1b 100644
--- a/.github/workflows/build_infra_images_cache.yml
+++ b/.github/workflows/build_infra_images_cache.yml
@@ -272,16 +272,3 @@ jobs:
       - name: Image digest (PySpark with Python 3.14 no GIL)
         if: hashFiles('dev/spark-test-image/python-314-nogil/Dockerfile') != ''
         run: echo ${{ 
steps.docker_build_pyspark_python_314_nogil.outputs.digest }}
-      - name: Build and push (PySpark with Numpy 2.1.3)
-        if: hashFiles('dev/spark-test-image/numpy-213/Dockerfile') != ''
-        id: docker_build_pyspark_numpy_213
-        uses: docker/build-push-action@v6
-        with:
-          context: ./dev/spark-test-image/numpy-213/
-          push: true
-          tags: 
ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-numpy-213-cache:${{
 github.ref_name }}-static
-          cache-from: 
type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-numpy-213-cache:${{
 github.ref_name }}
-          cache-to: 
type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-numpy-213-cache:${{
 github.ref_name }},mode=max
-      - name: Image digest (PySpark with Numpy 2.1.3)
-        if: hashFiles('dev/spark-test-image/numpy-213/Dockerfile') != ''
-        run: echo ${{ steps.docker_build_pyspark_numpy_213.outputs.digest }}
diff --git a/.github/workflows/build_python_numpy_2.1.3.yml 
b/.github/workflows/build_python_numpy_2.1.3.yml
deleted file mode 100644
index 345b97c282a0..000000000000
--- a/.github/workflows/build_python_numpy_2.1.3.yml
+++ /dev/null
@@ -1,47 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-name: "Build / Python-only (master, Python 3.11, Numpy 2.1.3)"
-
-on:
-  schedule:
-    - cron: '0 3 */3 * *'
-  workflow_dispatch:
-
-jobs:
-  run-build:
-    permissions:
-      packages: write
-    name: Run
-    uses: ./.github/workflows/build_and_test.yml
-    if: github.repository == 'apache/spark'
-    with:
-      java: 17
-      branch: master
-      hadoop: hadoop3
-      envs: >-
-        {
-          "PYSPARK_IMAGE_TO_TEST": "numpy-213",
-          "PYTHON_TO_TEST": "python3.11"
-        }
-      jobs: >-
-        {
-          "pyspark": "true",
-          "pyspark-pandas": "true"
-        }
diff --git a/README.md b/README.md
index ed67db9e6edd..25483284e274 100644
--- a/README.md
+++ b/README.md
@@ -41,7 +41,6 @@ This README file only contains basic setup instructions.
 |            | [![GitHub Actions 
Build](https://github.com/apache/spark/actions/workflows/build_python_3.11_classic_only.yml/badge.svg)](https://github.com/apache/spark/actions/workflows/build_python_3.11_classic_only.yml)
 |
 |            | [![GitHub Actions 
Build](https://github.com/apache/spark/actions/workflows/build_python_3.11_arm.yml/badge.svg)](https://github.com/apache/spark/actions/workflows/build_python_3.11_arm.yml)
                   |
 |            | [![GitHub Actions 
Build](https://github.com/apache/spark/actions/workflows/build_python_3.11_macos26.yml/badge.svg)](https://github.com/apache/spark/actions/workflows/build_python_3.11_macos26.yml)
           |
-|            | [![GitHub Actions 
Build](https://github.com/apache/spark/actions/workflows/build_python_numpy_2.1.3.yml/badge.svg)](https://github.com/apache/spark/actions/workflows/build_python_numpy_2.1.3.yml)
             |
 |            | [![GitHub Actions 
Build](https://github.com/apache/spark/actions/workflows/build_python_3.12.yml/badge.svg)](https://github.com/apache/spark/actions/workflows/build_python_3.12.yml)
                           |
 |            | [![GitHub Actions 
Build](https://github.com/apache/spark/actions/workflows/build_python_3.13.yml/badge.svg)](https://github.com/apache/spark/actions/workflows/build_python_3.13.yml)
                           |
 |            | [![GitHub Actions 
Build](https://github.com/apache/spark/actions/workflows/build_python_3.13_nogil.yml/badge.svg)](https://github.com/apache/spark/actions/workflows/build_python_3.13_nogil.yml)
               |
diff --git a/dev/spark-test-image/numpy-213/Dockerfile 
b/dev/spark-test-image/numpy-213/Dockerfile
deleted file mode 100644
index 713e9e7d7ef4..000000000000
--- a/dev/spark-test-image/numpy-213/Dockerfile
+++ /dev/null
@@ -1,80 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Image for building and testing Spark branches. Based on Ubuntu 22.04.
-# See also in https://hub.docker.com/_/ubuntu
-FROM ubuntu:jammy-20240911.1
-LABEL org.opencontainers.image.authors="Apache Spark project 
<[email protected]>"
-LABEL org.opencontainers.image.licenses="Apache-2.0"
-LABEL org.opencontainers.image.ref.name="Apache Spark Infra Image For PySpark 
with Python 3.11 and Numpy 2.1.3"
-# Overwrite this label to avoid exposing the underlying Ubuntu OS version label
-LABEL org.opencontainers.image.version=""
-
-ENV FULL_REFRESH_DATE=20250327
-
-ENV DEBIAN_FRONTEND=noninteractive
-ENV DEBCONF_NONINTERACTIVE_SEEN=true
-
-RUN apt-get update && apt-get install -y \
-    build-essential \
-    ca-certificates \
-    curl \
-    gfortran \
-    git \
-    gnupg \
-    libcurl4-openssl-dev \
-    libfontconfig1-dev \
-    libfreetype6-dev \
-    libfribidi-dev \
-    libgit2-dev \
-    libharfbuzz-dev \
-    libjpeg-dev \
-    liblapack-dev \
-    libopenblas-dev \
-    libpng-dev \
-    libpython3-dev \
-    libssl-dev \
-    libtiff5-dev \
-    libwebp-dev \
-    libxml2-dev \
-    openjdk-17-jdk-headless \
-    pkg-config \
-    qpdf \
-    tzdata \
-    software-properties-common \
-    wget \
-    zlib1g-dev
-
-# Install Python 3.11
-RUN add-apt-repository ppa:deadsnakes/ppa
-RUN apt-get update && apt-get install -y \
-    python3.11 \
-    && apt-get autoremove --purge -y \
-    && apt-get clean \
-    && rm -rf /var/lib/apt/lists/*
-
-
-# Pin numpy==2.1.3
-ARG BASIC_PIP_PKGS="numpy==2.1.3 pyarrow>=22.0.0 six==1.16.0 pandas==2.2.3 
scipy plotly<6.0.0 mlflow>=2.8.1 coverage matplotlib openpyxl 
memory-profiler>=0.61.0 scikit-learn>=1.3.2"
-# Python deps for Spark Connect
-ARG CONNECT_PIP_PKGS="grpcio==1.76.0 grpcio-status==1.76.0 protobuf==6.33.0 
googleapis-common-protos==1.71.0 zstandard==0.25.0 graphviz==0.20.3"
-
-# Install Python 3.11 packages
-RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.11
-RUN python3.11 -m pip install --ignore-installed 'blinker>=1.6.2' # mlflow 
needs this
-RUN python3.11 -m pip install $BASIC_PIP_PKGS unittest-xml-reporting 
$CONNECT_PIP_PKGS && \
-    python3.11 -m pip cache purge


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to