Skip to content

Commit dc15e1b

Browse files
committed
[SPARK-53990] Use Java (Map|Set).of instead of Collections.(empty|singleton)(Map|Set)
### What changes were proposed in this pull request? This PR aims to use Java 9+ `Map.of` and `Set.of` APIs instead of the following. - `Collections.emptySet` - `Collections.emptyMap` - `Collections.singletonMap` - `Collections.singletonSet` ### Why are the changes needed? New Java APIs are concise and more intuitive. ```java - return Collections.emptyMap(); + return Map.of(); ``` ```java - Collections.singletonMap(getCurrentStateId() + 1, state), + Map.of(getCurrentStateId() + 1, state), ``` In addition, this is aligned with - apache/spark#51942 - apache/spark#51961 - apache/spark#51954 ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Pass the CIs. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #401 from dongjoon-hyun/SPARK-53990. Authored-by: Dongjoon Hyun <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent 87bdeee commit dc15e1b

File tree

9 files changed

+29
-36
lines changed

9 files changed

+29
-36
lines changed

spark-operator-api/src/main/java/org/apache/spark/k8s/operator/status/ApplicationStatus.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121

2222
import static org.apache.spark.k8s.operator.Constants.EXCEED_MAX_RETRY_ATTEMPT_MESSAGE;
2323

24-
import java.util.Collections;
2524
import java.util.Map;
2625
import java.util.TreeMap;
2726

@@ -151,7 +150,7 @@ public ApplicationStatus terminateOrRestart(
151150
currentAttemptSummary.getAttemptInfo(), stateTransitionHistory);
152151
return new ApplicationStatus(
153152
state,
154-
Collections.singletonMap(getCurrentStateId() + 1, state),
153+
Map.of(getCurrentStateId() + 1, state),
155154
newPrevSummary,
156155
nextAttemptSummary);
157156
} else {

spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/SparkAppResourceSpecFactory.java

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@
2929
import java.io.FileOutputStream;
3030
import java.io.IOException;
3131
import java.io.OutputStreamWriter;
32-
import java.util.Collections;
3332
import java.util.HashMap;
3433
import java.util.Map;
3534
import java.util.Optional;
@@ -172,10 +171,10 @@ private static Map<String, String> getOrCreateLocalFileForDriverSpec(
172171
createLocalFileForPodTemplateSpec(
173172
app.getSpec().getDriverSpec().getPodTemplateSpec(),
174173
app.getMetadata().getUid() + "-driver-");
175-
return Collections.singletonMap(DRIVER_SPARK_TEMPLATE_FILE_PROP_KEY, filePath);
174+
return Map.of(DRIVER_SPARK_TEMPLATE_FILE_PROP_KEY, filePath);
176175
}
177176
}
178-
return Collections.emptyMap();
177+
return Map.of();
179178
}
180179

181180
/**
@@ -196,10 +195,10 @@ private static Map<String, String> getOrCreateLocalFileForExecutorSpec(
196195
createLocalFileForPodTemplateSpec(
197196
app.getSpec().getExecutorSpec().getPodTemplateSpec(),
198197
app.getMetadata().getUid() + "-executor-");
199-
return Collections.singletonMap(EXECUTOR_SPARK_TEMPLATE_FILE_PROP_KEY, filePath);
198+
return Map.of(EXECUTOR_SPARK_TEMPLATE_FILE_PROP_KEY, filePath);
200199
}
201200
}
202-
return Collections.emptyMap();
201+
return Map.of();
203202
}
204203

205204
/**

spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/Utils.java

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@
3131
import static org.apache.spark.k8s.operator.config.SparkOperatorConf.SPARK_CLUSTER_STATUS_LISTENER_CLASS_NAMES;
3232

3333
import java.util.Arrays;
34-
import java.util.Collections;
3534
import java.util.HashMap;
3635
import java.util.List;
3736
import java.util.Map;
@@ -62,10 +61,10 @@ private Utils() {}
6261
*/
6362
public static Set<String> sanitizeCommaSeparatedStrAsSet(String str) {
6463
if (StringUtils.isBlank(str)) {
65-
return Collections.emptySet();
64+
return Set.of();
6665
}
6766
if ("*".equals(str)) {
68-
return Collections.emptySet();
67+
return Set.of();
6968
}
7069
return Arrays.stream(str.split(","))
7170
.map(String::trim)
@@ -240,15 +239,15 @@ SecondaryToPrimaryMapper<T> basicLabelSecondaryToPrimaryMapper(String nameKey) {
240239
return resource -> {
241240
final var metadata = resource.getMetadata();
242241
if (metadata == null) {
243-
return Collections.emptySet();
242+
return Set.of();
244243
} else {
245244
final var map = metadata.getLabels();
246245
if (map == null) {
247-
return Collections.emptySet();
246+
return Set.of();
248247
}
249248
var name = map.get(nameKey);
250249
if (name == null) {
251-
return Collections.emptySet();
250+
return Set.of();
252251
}
253252
var namespace = resource.getMetadata().getNamespace();
254253
return Set.of(new ResourceID(name, namespace));

spark-operator/src/test/java/org/apache/spark/k8s/operator/SparkOperatorTest.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@
2929
import static org.mockito.Mockito.verifyNoMoreInteractions;
3030
import static org.mockito.Mockito.when;
3131

32-
import java.util.Collections;
3332
import java.util.Set;
3433
import java.util.function.Consumer;
3534

@@ -84,7 +83,7 @@ void testOperatorConstructionWithDynamicConfigEnabled() {
8483
mockKubernetesClientFactory
8584
.when(() -> KubernetesClientFactory.buildKubernetesClient(any()))
8685
.thenReturn(mockClient);
87-
mockUtils.when(Utils::getWatchedNamespaces).thenReturn(Collections.singleton("namespace-1"));
86+
mockUtils.when(Utils::getWatchedNamespaces).thenReturn(Set.of("namespace-1"));
8887

8988
SparkOperator sparkOperator = new SparkOperator();
9089
Assertions.assertEquals(1, sparkOperator.registeredSparkControllers.size());
@@ -182,7 +181,7 @@ void testUpdateWatchedNamespacesWithDynamicConfigEnabled() {
182181
mockKubernetesClientFactory
183182
.when(() -> KubernetesClientFactory.buildKubernetesClient(any()))
184183
.thenReturn(mockClient);
185-
mockUtils.when(Utils::getWatchedNamespaces).thenReturn(Collections.singleton("namespace-1"));
184+
mockUtils.when(Utils::getWatchedNamespaces).thenReturn(Set.of("namespace-1"));
186185
SparkOperator sparkOperator = new SparkOperator();
187186
Set<String> updatedNamespaces = Set.of("namespace-1", "namespace-2");
188187
Assertions.assertTrue(sparkOperator.updateWatchingNamespaces(updatedNamespaces));

spark-operator/src/test/java/org/apache/spark/k8s/operator/config/SparkOperatorConfManagerTest.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
package org.apache.spark.k8s.operator.config;
2121

2222
import java.io.IOException;
23-
import java.util.Collections;
23+
import java.util.Map;
2424

2525
import org.junit.jupiter.api.Assertions;
2626
import org.junit.jupiter.api.Test;
@@ -58,11 +58,11 @@ void testOverrideProperties() {
5858
Assertions.assertEquals("bar", confManager.getInitialValue("spark.kubernetes.operator.foo"));
5959
Assertions.assertEquals("bar", confManager.getValue("spark.kubernetes.operator.foo"));
6060

61-
confManager.refresh(Collections.singletonMap("spark.kubernetes.operator.foo", "barbar"));
61+
confManager.refresh(Map.of("spark.kubernetes.operator.foo", "barbar"));
6262
Assertions.assertEquals("bar", confManager.getInitialValue("spark.kubernetes.operator.foo"));
6363
Assertions.assertEquals("barbar", confManager.getValue("spark.kubernetes.operator.foo"));
6464

65-
confManager.refresh(Collections.singletonMap("spark.kubernetes.operator.foo", "barbarbar"));
65+
confManager.refresh(Map.of("spark.kubernetes.operator.foo", "barbarbar"));
6666
Assertions.assertEquals("bar", confManager.getInitialValue("spark.kubernetes.operator.foo"));
6767
Assertions.assertEquals("barbarbar", confManager.getValue("spark.kubernetes.operator.foo"));
6868

spark-operator/src/test/java/org/apache/spark/k8s/operator/metrics/healthcheck/SentinelManagerTest.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@
2828
import static org.mockito.Mockito.mockStatic;
2929

3030
import java.time.Duration;
31-
import java.util.Collections;
3231
import java.util.HashMap;
3332
import java.util.HashSet;
3433
import java.util.List;
@@ -73,7 +72,7 @@ class SentinelManagerTest {
7372
@BeforeAll
7473
static void beforeAll() {
7574
Map<String, String> overrideValue =
76-
Collections.singletonMap(
75+
Map.of(
7776
SparkOperatorConf.SENTINEL_RESOURCE_RECONCILIATION_DELAY.getKey(),
7877
Duration.ofSeconds(SENTINEL_RESOURCE_RECONCILIATION_DELAY_SECONDS).toString());
7978
SparkOperatorConfManager.INSTANCE.refresh(overrideValue);
@@ -99,7 +98,7 @@ void testIsSentinelResource() {
9998
void testHandleSentinelResourceReconciliation() throws InterruptedException {
10099
// Reduce the SENTINEL_RESOURCE_RECONCILIATION_DELAY time to 0
101100
SparkOperatorConfManager.INSTANCE.refresh(
102-
Collections.singletonMap(
101+
Map.of(
103102
SparkOperatorConf.SENTINEL_RESOURCE_RECONCILIATION_DELAY.getKey(), "10"));
104103

105104
// Before Spark Reconciler Started

spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121

2222
import static org.apache.spark.k8s.operator.Constants.*;
2323

24-
import java.util.Collections;
24+
import java.util.Map;
2525
import java.util.Optional;
2626

2727
import scala.Tuple2;
@@ -133,9 +133,9 @@ private static Service buildMasterService(
133133
.endMetadata()
134134
.withNewSpecLike(serviceSpec)
135135
.withClusterIP("None")
136-
.addToSelector(Collections.singletonMap(LABEL_SPARK_CLUSTER_NAME, name))
136+
.addToSelector(Map.of(LABEL_SPARK_CLUSTER_NAME, name))
137137
.addToSelector(
138-
Collections.singletonMap(LABEL_SPARK_ROLE_NAME, LABEL_SPARK_ROLE_MASTER_VALUE))
138+
Map.of(LABEL_SPARK_ROLE_NAME, LABEL_SPARK_ROLE_MASTER_VALUE))
139139
.addNewPort()
140140
.withName("web")
141141
.withPort(8080)
@@ -176,9 +176,9 @@ private static Service buildWorkerService(
176176
.endMetadata()
177177
.withNewSpecLike(serviceSpec)
178178
.withClusterIP("None")
179-
.addToSelector(Collections.singletonMap(LABEL_SPARK_CLUSTER_NAME, name))
179+
.addToSelector(Map.of(LABEL_SPARK_CLUSTER_NAME, name))
180180
.addToSelector(
181-
Collections.singletonMap(LABEL_SPARK_ROLE_NAME, LABEL_SPARK_ROLE_WORKER_VALUE))
181+
Map.of(LABEL_SPARK_ROLE_NAME, LABEL_SPARK_ROLE_WORKER_VALUE))
182182
.addNewPort()
183183
.withName("web")
184184
.withPort(8081)

spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorkerTest.java

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@
2929
import static org.mockito.Mockito.when;
3030

3131
import java.util.ArrayList;
32-
import java.util.Collections;
3332
import java.util.HashMap;
3433
import java.util.List;
3534
import java.util.Map;
@@ -120,7 +119,7 @@ void buildDriverConfForPythonApp() {
120119
when(mockSpec.getPyFiles()).thenReturn("foo");
121120

122121
SparkAppSubmissionWorker submissionWorker = new SparkAppSubmissionWorker();
123-
SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Collections.emptyMap());
122+
SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Map.of());
124123
assertEquals(7, constructorArgs.get(conf).size());
125124

126125
// validate main resources
@@ -146,7 +145,7 @@ void handlePyFiles() {
146145
when(mockSpec.getPyFiles()).thenReturn("main.py,lib.py");
147146

148147
SparkAppSubmissionWorker submissionWorker = new SparkAppSubmissionWorker();
149-
SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Collections.emptyMap());
148+
SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Map.of());
150149
assertEquals(7, constructorArgs.get(conf).size());
151150
assertEquals(
152151
"lib.py", ((SparkConf) constructorArgs.get(conf).get(0)).get("spark.submit.pyFiles"));
@@ -173,7 +172,7 @@ void buildDriverConfForRApp() {
173172
when(mockSpec.getSparkRFiles()).thenReturn("foo");
174173

175174
SparkAppSubmissionWorker submissionWorker = new SparkAppSubmissionWorker();
176-
SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Collections.emptyMap());
175+
SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Map.of());
177176
assertEquals(7, constructorArgs.get(conf).size());
178177

179178
// validate main resources
@@ -260,7 +259,7 @@ void checkAppIdWhenUserSpecifiedInSparkConf() {
260259
when(mockApp.getMetadata()).thenReturn(appMeta);
261260

262261
SparkAppSubmissionWorker submissionWorker = new SparkAppSubmissionWorker();
263-
SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Collections.emptyMap());
262+
SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Map.of());
264263
assertEquals("foo", conf.appId());
265264
}
266265

@@ -282,7 +281,7 @@ void supportSparkVersionPlaceHolder() {
282281
when(mockRuntimeVersions.getSparkVersion()).thenReturn("dev");
283282

284283
SparkAppSubmissionWorker submissionWorker = new SparkAppSubmissionWorker();
285-
SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Collections.emptyMap());
284+
SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Map.of());
286285
assertEquals("apache/spark:dev", conf.get("spark.kubernetes.container.image"));
287286
assertEquals("apache/spark:dev", conf.get("spark.kubernetes.driver.container.image"));
288287
assertEquals("apache/spark:dev", conf.get("spark.kubernetes.executor.container.image"));

spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkClusterSubmissionWorkerTest.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@
2222
import static org.junit.jupiter.api.Assertions.*;
2323
import static org.mockito.Mockito.*;
2424

25-
import java.util.Collections;
2625
import java.util.HashMap;
2726
import java.util.Map;
2827

@@ -70,7 +69,7 @@ void setUp() {
7069
@Test
7170
void testGetResourceSpec() {
7271
SparkClusterSubmissionWorker worker = new SparkClusterSubmissionWorker();
73-
SparkClusterResourceSpec spec = worker.getResourceSpec(cluster, Collections.emptyMap());
72+
SparkClusterResourceSpec spec = worker.getResourceSpec(cluster, Map.of());
7473
// SparkClusterResourceSpecTest will cover the detail information of easy resources
7574
assertNotNull(spec.getMasterService());
7675
assertNotNull(spec.getMasterStatefulSet());
@@ -81,7 +80,7 @@ void testGetResourceSpec() {
8180
@Test
8281
void supportSparkVersionPlaceHolder() {
8382
SparkClusterSubmissionWorker worker = new SparkClusterSubmissionWorker();
84-
SparkClusterResourceSpec spec = worker.getResourceSpec(cluster, Collections.emptyMap());
83+
SparkClusterResourceSpec spec = worker.getResourceSpec(cluster, Map.of());
8584
assertEquals(
8685
"apache/spark:dev",
8786
spec.getMasterStatefulSet()

0 commit comments

Comments
 (0)