Skip to content

Commit c70a95f

Browse files
committed
[SPARK-54092] Use Java-friendly KubernetesClientUtils APIs
### What changes were proposed in this pull request? This PR aims to use Java-friendly `KubernetesClientUtils` APIs. ### Why are the changes needed? Apache Spark 4.1.0-preview3 introduced new Java friendly APIs. We had better take advantage of them. - apache/spark#52542 | Scala Version | New Java-friendly Version | | - | - | | `buildConfigMap` (Since 3.1.0) | `buildConfigMapJava` (Since 4.1.0) | | `buildKeyToPathObjects` (Since 3.1.0) | `buildKeyToPathObjectsJava` (Since 4.1.0) | | `buildSparkConfDirFilesMap` (Since 3.1.1) | `buildSparkConfDirFilesMapJava` (Since 4.1.0) | ### Does this PR introduce _any_ user-facing change? No behavior change. ### How was this patch tested? Pass the CIs. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #410 from dongjoon-hyun/SPARK-54092. Authored-by: Dongjoon Hyun <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent e32bb0d commit c70a95f

File tree

1 file changed

+13
-24
lines changed

1 file changed

+13
-24
lines changed

spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkAppResourceSpec.java

Lines changed: 13 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -19,15 +19,10 @@
1919

2020
package org.apache.spark.k8s.operator;
2121

22-
import java.util.ArrayList;
23-
import java.util.Collection;
24-
import java.util.List;
25-
import java.util.stream.Collectors;
22+
import static scala.jdk.javaapi.CollectionConverters.asJava;
2623

27-
import scala.Tuple2;
28-
import scala.collection.immutable.HashMap;
29-
import scala.collection.immutable.Map;
30-
import scala.jdk.CollectionConverters;
24+
import java.util.*;
25+
import java.util.stream.Collectors;
3126

3227
import io.fabric8.kubernetes.api.model.Container;
3328
import io.fabric8.kubernetes.api.model.ContainerBuilder;
@@ -79,12 +74,13 @@ public SparkAppResourceSpec(
7974
List<ConfigMapSpec> configMapSpecs) {
8075
this.kubernetesDriverConf = kubernetesDriverConf;
8176
String namespace = kubernetesDriverConf.sparkConf().get(Config.KUBERNETES_NAMESPACE().key());
82-
Map<String, String> confFilesMap =
83-
KubernetesClientUtils.buildSparkConfDirFilesMap(
77+
Map<String, String> originalConfFilesMap =
78+
KubernetesClientUtils.buildSparkConfDirFilesMapJava(
8479
kubernetesDriverConf.configMapNameDriver(),
8580
kubernetesDriverConf.sparkConf(),
86-
kubernetesDriverSpec.systemProperties())
87-
.$plus(new Tuple2<>(Config.KUBERNETES_NAMESPACE().key(), namespace));
81+
asJava(kubernetesDriverSpec.systemProperties()));
82+
Map<String, String> confFilesMap = new HashMap<>(originalConfFilesMap);
83+
confFilesMap.put(Config.KUBERNETES_NAMESPACE().key(), namespace);
8884
SparkPod sparkPod = addConfigMap(kubernetesDriverSpec.pod(), confFilesMap);
8985
this.configuredPod =
9086
new PodBuilder(sparkPod.pod())
@@ -93,16 +89,12 @@ public SparkAppResourceSpec(
9389
.endSpec()
9490
.build();
9591
this.driverPreResources =
96-
new ArrayList<>(
97-
CollectionConverters.SeqHasAsJava(kubernetesDriverSpec.driverPreKubernetesResources())
98-
.asJava());
92+
new ArrayList<>(asJava(kubernetesDriverSpec.driverPreKubernetesResources()));
9993
this.driverResources =
100-
new ArrayList<>(
101-
CollectionConverters.SeqHasAsJava(kubernetesDriverSpec.driverKubernetesResources())
102-
.asJava());
94+
new ArrayList<>(asJava(kubernetesDriverSpec.driverKubernetesResources()));
10395
this.driverResources.add(
104-
KubernetesClientUtils.buildConfigMap(
105-
kubernetesDriverConf.configMapNameDriver(), confFilesMap, new HashMap<>()));
96+
KubernetesClientUtils.buildConfigMapJava(
97+
kubernetesDriverConf.configMapNameDriver(), confFilesMap, Map.of()));
10698
this.driverPreResources.addAll(ConfigMapSpecUtils.buildConfigMaps(configMapSpecs));
10799
this.driverResources.addAll(configureDriverServerIngress(sparkPod, driverServiceIngressList));
108100
this.driverPreResources.forEach(r -> setNamespaceIfMissing(r, namespace));
@@ -147,10 +139,7 @@ private SparkPod addConfigMap(SparkPod pod, Map<String, String> confFilesMap) {
147139
.addNewVolume()
148140
.withName(Constants.SPARK_CONF_VOLUME_DRIVER())
149141
.withNewConfigMap()
150-
.withItems(
151-
CollectionConverters.SeqHasAsJava(
152-
KubernetesClientUtils.buildKeyToPathObjects(confFilesMap))
153-
.asJava())
142+
.withItems(KubernetesClientUtils.buildKeyToPathObjectsJava(confFilesMap))
154143
.withName(kubernetesDriverConf.configMapNameDriver())
155144
.endConfigMap()
156145
.endVolume()

0 commit comments

Comments
 (0)