This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 3b0ac4539170 [SPARK-50866][INFRA] Limit GHA job execution time to up 
to 2 hours
3b0ac4539170 is described below

commit 3b0ac45391708642ce6a1779e3c234bab0e40b66
Author: Dongjoon Hyun <dongj...@apache.org>
AuthorDate: Thu Jan 16 23:05:22 2025 -0800

    [SPARK-50866][INFRA] Limit GHA job execution time to up to 2 hours
    
    ### What changes were proposed in this pull request?
    
    This PR aims to limit GHA job execution time to up to 2 hours.
    
    ### Why are the changes needed?
    
    To prevent a silent regression in terms of the GitHub Action execution time.
    - #49539
    
    ### Does this PR introduce _any_ user-facing change?
    
    No, this is a infra-only change.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #49544 from dongjoon-hyun/SPARK-50866.
    
    Authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 .github/workflows/build_and_test.yml       | 18 +++++++++---------
 .github/workflows/build_python_connect.yml |  2 +-
 .github/workflows/build_sparkr_window.yml  |  2 +-
 3 files changed, 11 insertions(+), 11 deletions(-)

diff --git a/.github/workflows/build_and_test.yml 
b/.github/workflows/build_and_test.yml
index d3a40b095dbf..0ff7b3cd9f89 100644
--- a/.github/workflows/build_and_test.yml
+++ b/.github/workflows/build_and_test.yml
@@ -227,7 +227,7 @@ jobs:
     needs: precondition
     if: fromJson(needs.precondition.outputs.required).build == 'true'
     runs-on: ubuntu-latest
-    timeout-minutes: 180
+    timeout-minutes: 120
     strategy:
       fail-fast: false
       matrix:
@@ -491,7 +491,7 @@ jobs:
     if: (!cancelled()) && 
(fromJson(needs.precondition.outputs.required).pyspark == 'true' || 
fromJson(needs.precondition.outputs.required).pyspark-pandas == 'true')
     name: "Build modules: ${{ matrix.modules }}"
     runs-on: ubuntu-latest
-    timeout-minutes: 180
+    timeout-minutes: 120
     container:
       image: ${{ needs.precondition.outputs.image_pyspark_url_link }}
     strategy:
@@ -654,7 +654,7 @@ jobs:
     if: (!cancelled()) && fromJson(needs.precondition.outputs.required).sparkr 
== 'true'
     name: "Build modules: sparkr"
     runs-on: ubuntu-latest
-    timeout-minutes: 180
+    timeout-minutes: 120
     container:
       image: ${{ needs.precondition.outputs.image_sparkr_url_link }}
     env:
@@ -776,7 +776,7 @@ jobs:
     if: (!cancelled()) && fromJson(needs.precondition.outputs.required).lint 
== 'true'
     name: Linters, licenses, and dependencies
     runs-on: ubuntu-latest
-    timeout-minutes: 180
+    timeout-minutes: 120
     env:
       LC_ALL: C.UTF-8
       LANG: C.UTF-8
@@ -907,7 +907,7 @@ jobs:
     if: (!cancelled()) && fromJson(needs.precondition.outputs.required).docs 
== 'true'
     name: Documentation generation
     runs-on: ubuntu-latest
-    timeout-minutes: 180
+    timeout-minutes: 120
     env:
       LC_ALL: C.UTF-8
       LANG: C.UTF-8
@@ -1030,7 +1030,7 @@ jobs:
     name: Run TPC-DS queries with SF=1
     # Pin to 'Ubuntu 20.04' due to 'databricks/tpcds-kit' compilation
     runs-on: ubuntu-20.04
-    timeout-minutes: 180
+    timeout-minutes: 120
     env:
       SPARK_LOCAL_IP: localhost
     steps:
@@ -1132,7 +1132,7 @@ jobs:
     if: fromJson(needs.precondition.outputs.required).docker-integration-tests 
== 'true'
     name: Run Docker integration tests
     runs-on: ubuntu-latest
-    timeout-minutes: 180
+    timeout-minutes: 120
     env:
       HADOOP_PROFILE: ${{ inputs.hadoop }}
       HIVE_PROFILE: hive2.3
@@ -1199,7 +1199,7 @@ jobs:
     if: fromJson(needs.precondition.outputs.required).k8s-integration-tests == 
'true'
     name: Run Spark on Kubernetes Integration test
     runs-on: ubuntu-latest
-    timeout-minutes: 180
+    timeout-minutes: 120
     steps:
       - name: Checkout Spark repository
         uses: actions/checkout@v4
@@ -1278,7 +1278,7 @@ jobs:
     if: fromJson(needs.precondition.outputs.required).ui == 'true'
     name: Run Spark UI tests
     runs-on: ubuntu-latest
-    timeout-minutes: 180
+    timeout-minutes: 120
     steps:
       - uses: actions/checkout@v4
       - name: Use Node.js
diff --git a/.github/workflows/build_python_connect.yml 
b/.github/workflows/build_python_connect.yml
index 311907558f6e..9ae80102d0de 100644
--- a/.github/workflows/build_python_connect.yml
+++ b/.github/workflows/build_python_connect.yml
@@ -29,7 +29,7 @@ jobs:
   build:
     name: "Build modules: pyspark-connect"
     runs-on: ubuntu-latest
-    timeout-minutes: 300
+    timeout-minutes: 120
     if: github.repository == 'apache/spark'
     steps:
       - name: Checkout Spark repository
diff --git a/.github/workflows/build_sparkr_window.yml 
b/.github/workflows/build_sparkr_window.yml
index b28e81908549..20362da061a7 100644
--- a/.github/workflows/build_sparkr_window.yml
+++ b/.github/workflows/build_sparkr_window.yml
@@ -27,7 +27,7 @@ jobs:
   build:
     name: "Build module: sparkr"
     runs-on: windows-2022
-    timeout-minutes: 300
+    timeout-minutes: 120
     if: github.repository == 'apache/spark'
     steps:
     - name: Download winutils Hadoop binary


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to