This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git


The following commit(s) were added to refs/heads/main by this push:
     new 22226e2  [SPARK-49348] Support `schedulerName` for SparkCluster
22226e2 is described below

commit 22226e201a250d0f7b16743c1fc91421ec5fa044
Author: Dongjoon Hyun <dongj...@apache.org>
AuthorDate: Wed Aug 21 23:58:07 2024 -0700

    [SPARK-49348] Support `schedulerName` for SparkCluster
    
    ### What changes were proposed in this pull request?
    
    This PR aims to support K8s Scheduler for SparkCluster via 
`spark.kubernetes.scheduler.name`.
    
    ### Why are the changes needed?
    
    To support a submission to a custom scheduler like Apache YuniKorn.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No. This is a new feature.
    
    ### How was this patch tested?
    
    Pass the CIs and manual testing. Without installing `YuniKorn` scheduler, 
the pod of SparkCluster will be in Pending status.
    ```
    $ gradle build buildDockerImage spark-operator-api:relocateGeneratedCRD -x 
check
    $ kubectl apply -f examples/cluster-on-yunikorn.yaml
    ```
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #89 from dongjoon-hyun/SPARK-49348.
    
    Authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 examples/cluster-on-yunikorn.yaml                  | 31 ++++++++++++++++++++++
 .../k8s/operator/SparkClusterResourceSpec.java     | 17 +++++++++---
 2 files changed, 44 insertions(+), 4 deletions(-)

diff --git a/examples/cluster-on-yunikorn.yaml 
b/examples/cluster-on-yunikorn.yaml
new file mode 100644
index 0000000..9f1523d
--- /dev/null
+++ b/examples/cluster-on-yunikorn.yaml
@@ -0,0 +1,31 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+apiVersion: spark.apache.org/v1alpha1
+kind: SparkCluster
+metadata:
+  name: cluster-on-yunikorn
+spec:
+  runtimeVersions:
+    sparkVersion: "4.0.0-preview1"
+  initWorkers: 1
+  minWorkers: 1
+  maxWorkers: 1
+  sparkConf:
+    spark.kubernetes.container.image: "spark:4.0.0-preview1"
+    spark.kubernetes.scheduler.name: "yunikorn"
+    spark.master.ui.title: "Spark Cluster on YuniKorn Scheduler"
+    spark.master.rest.enabled: "true"
+    spark.master.rest.host: "0.0.0.0"
+    spark.ui.reverseProxy: "true"
diff --git 
a/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java
 
b/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java
index 4b909db..23265b0 100644
--- 
a/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java
+++ 
b/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java
@@ -45,6 +45,7 @@ public class SparkClusterResourceSpec {
   public SparkClusterResourceSpec(SparkCluster cluster, SparkConf conf) {
     String clusterNamespace = cluster.getMetadata().getNamespace();
     String clusterName = cluster.getMetadata().getName();
+    String scheduler = conf.get(Config.KUBERNETES_SCHEDULER_NAME().key(), 
"default-scheduler");
     String namespace = conf.get(Config.KUBERNETES_NAMESPACE().key(), 
clusterNamespace);
     String image = conf.get(Config.CONTAINER_IMAGE().key(), 
"spark:4.0.0-preview1");
     ClusterSpec spec = cluster.getSpec();
@@ -54,10 +55,11 @@ public class SparkClusterResourceSpec {
     }
     masterService = buildMasterService(clusterName, namespace);
     workerService = buildWorkerService(clusterName, namespace);
-    masterStatefulSet = buildMasterStatefulSet(clusterName, namespace, image, 
options.toString());
+    masterStatefulSet =
+        buildMasterStatefulSet(scheduler, clusterName, namespace, image, 
options.toString());
     workerStatefulSet =
         buildWorkerStatefulSet(
-            clusterName, namespace, image, spec.getInitWorkers(), 
options.toString());
+            scheduler, clusterName, namespace, image, spec.getInitWorkers(), 
options.toString());
   }
 
   private static Service buildMasterService(String name, String namespace) {
@@ -111,7 +113,7 @@ public class SparkClusterResourceSpec {
   }
 
   private static StatefulSet buildMasterStatefulSet(
-      String name, String namespace, String image, String options) {
+      String scheduler, String name, String namespace, String image, String 
options) {
     return new StatefulSetBuilder()
         .withNewMetadata()
         .withName(name + "-master")
@@ -129,6 +131,7 @@ public class SparkClusterResourceSpec {
         .addToLabels(LABEL_SPARK_ROLE_NAME, LABEL_SPARK_ROLE_MASTER_VALUE)
         .endMetadata()
         .withNewSpec()
+        .withSchedulerName(scheduler)
         .withTerminationGracePeriodSeconds(0L)
         .addNewContainer()
         .withName("master")
@@ -163,7 +166,12 @@ public class SparkClusterResourceSpec {
   }
 
   private static StatefulSet buildWorkerStatefulSet(
-      String name, String namespace, String image, int initWorkers, String 
options) {
+      String scheduler,
+      String name,
+      String namespace,
+      String image,
+      int initWorkers,
+      String options) {
     return new StatefulSetBuilder()
         .withNewMetadata()
         .withName(name + "-worker")
@@ -182,6 +190,7 @@ public class SparkClusterResourceSpec {
         .addToLabels(LABEL_SPARK_ROLE_NAME, LABEL_SPARK_ROLE_WORKER_VALUE)
         .endMetadata()
         .withNewSpec()
+        .withSchedulerName(scheduler)
         .withTerminationGracePeriodSeconds(0L)
         .withNewDnsConfig()
         .withSearches(String.format("%s-worker-svc.%s.svc.cluster.local", 
name, namespace))


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to