From ae350978f70948b8abfc5af17119dce43939368d Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Wed, 22 Oct 2025 13:36:44 -0700 Subject: [PATCH] [SPARK-53990] Use Java `(Map|Set).of` instead of `Collections.(empty|singleton)(Set|Map)` --- .../spark/k8s/operator/status/ApplicationStatus.java | 3 +-- .../reconciler/SparkAppResourceSpecFactory.java | 9 ++++----- .../org/apache/spark/k8s/operator/utils/Utils.java | 11 +++++------ .../apache/spark/k8s/operator/SparkOperatorTest.java | 5 ++--- .../operator/config/SparkOperatorConfManagerTest.java | 6 +++--- .../metrics/healthcheck/SentinelManagerTest.java | 5 ++--- .../spark/k8s/operator/SparkClusterResourceSpec.java | 10 +++++----- .../k8s/operator/SparkAppSubmissionWorkerTest.java | 11 +++++------ .../operator/SparkClusterSubmissionWorkerTest.java | 5 ++--- 9 files changed, 29 insertions(+), 36 deletions(-) diff --git a/spark-operator-api/src/main/java/org/apache/spark/k8s/operator/status/ApplicationStatus.java b/spark-operator-api/src/main/java/org/apache/spark/k8s/operator/status/ApplicationStatus.java index 3b98eb6f..3d2d7131 100644 --- a/spark-operator-api/src/main/java/org/apache/spark/k8s/operator/status/ApplicationStatus.java +++ b/spark-operator-api/src/main/java/org/apache/spark/k8s/operator/status/ApplicationStatus.java @@ -21,7 +21,6 @@ import static org.apache.spark.k8s.operator.Constants.EXCEED_MAX_RETRY_ATTEMPT_MESSAGE; -import java.util.Collections; import java.util.Map; import java.util.TreeMap; @@ -151,7 +150,7 @@ public ApplicationStatus terminateOrRestart( currentAttemptSummary.getAttemptInfo(), stateTransitionHistory); return new ApplicationStatus( state, - Collections.singletonMap(getCurrentStateId() + 1, state), + Map.of(getCurrentStateId() + 1, state), newPrevSummary, nextAttemptSummary); } else { diff --git a/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/SparkAppResourceSpecFactory.java b/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/SparkAppResourceSpecFactory.java index a1c33537..bd7e33ea 100644 --- a/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/SparkAppResourceSpecFactory.java +++ b/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/SparkAppResourceSpecFactory.java @@ -29,7 +29,6 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; -import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Optional; @@ -172,10 +171,10 @@ private static Map getOrCreateLocalFileForDriverSpec( createLocalFileForPodTemplateSpec( app.getSpec().getDriverSpec().getPodTemplateSpec(), app.getMetadata().getUid() + "-driver-"); - return Collections.singletonMap(DRIVER_SPARK_TEMPLATE_FILE_PROP_KEY, filePath); + return Map.of(DRIVER_SPARK_TEMPLATE_FILE_PROP_KEY, filePath); } } - return Collections.emptyMap(); + return Map.of(); } /** @@ -196,10 +195,10 @@ private static Map getOrCreateLocalFileForExecutorSpec( createLocalFileForPodTemplateSpec( app.getSpec().getExecutorSpec().getPodTemplateSpec(), app.getMetadata().getUid() + "-executor-"); - return Collections.singletonMap(EXECUTOR_SPARK_TEMPLATE_FILE_PROP_KEY, filePath); + return Map.of(EXECUTOR_SPARK_TEMPLATE_FILE_PROP_KEY, filePath); } } - return Collections.emptyMap(); + return Map.of(); } /** diff --git a/spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/Utils.java b/spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/Utils.java index 6272e8d8..a6ca076f 100644 --- a/spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/Utils.java +++ b/spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/Utils.java @@ -31,7 +31,6 @@ import static org.apache.spark.k8s.operator.config.SparkOperatorConf.SPARK_CLUSTER_STATUS_LISTENER_CLASS_NAMES; import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -62,10 +61,10 @@ private Utils() {} */ public static Set sanitizeCommaSeparatedStrAsSet(String str) { if (StringUtils.isBlank(str)) { - return Collections.emptySet(); + return Set.of(); } if ("*".equals(str)) { - return Collections.emptySet(); + return Set.of(); } return Arrays.stream(str.split(",")) .map(String::trim) @@ -240,15 +239,15 @@ SecondaryToPrimaryMapper basicLabelSecondaryToPrimaryMapper(String nameKey) { return resource -> { final var metadata = resource.getMetadata(); if (metadata == null) { - return Collections.emptySet(); + return Set.of(); } else { final var map = metadata.getLabels(); if (map == null) { - return Collections.emptySet(); + return Set.of(); } var name = map.get(nameKey); if (name == null) { - return Collections.emptySet(); + return Set.of(); } var namespace = resource.getMetadata().getNamespace(); return Set.of(new ResourceID(name, namespace)); diff --git a/spark-operator/src/test/java/org/apache/spark/k8s/operator/SparkOperatorTest.java b/spark-operator/src/test/java/org/apache/spark/k8s/operator/SparkOperatorTest.java index ca3b285f..b8b57998 100644 --- a/spark-operator/src/test/java/org/apache/spark/k8s/operator/SparkOperatorTest.java +++ b/spark-operator/src/test/java/org/apache/spark/k8s/operator/SparkOperatorTest.java @@ -29,7 +29,6 @@ import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; -import java.util.Collections; import java.util.Set; import java.util.function.Consumer; @@ -84,7 +83,7 @@ void testOperatorConstructionWithDynamicConfigEnabled() { mockKubernetesClientFactory .when(() -> KubernetesClientFactory.buildKubernetesClient(any())) .thenReturn(mockClient); - mockUtils.when(Utils::getWatchedNamespaces).thenReturn(Collections.singleton("namespace-1")); + mockUtils.when(Utils::getWatchedNamespaces).thenReturn(Set.of("namespace-1")); SparkOperator sparkOperator = new SparkOperator(); Assertions.assertEquals(1, sparkOperator.registeredSparkControllers.size()); @@ -182,7 +181,7 @@ void testUpdateWatchedNamespacesWithDynamicConfigEnabled() { mockKubernetesClientFactory .when(() -> KubernetesClientFactory.buildKubernetesClient(any())) .thenReturn(mockClient); - mockUtils.when(Utils::getWatchedNamespaces).thenReturn(Collections.singleton("namespace-1")); + mockUtils.when(Utils::getWatchedNamespaces).thenReturn(Set.of("namespace-1")); SparkOperator sparkOperator = new SparkOperator(); Set updatedNamespaces = Set.of("namespace-1", "namespace-2"); Assertions.assertTrue(sparkOperator.updateWatchingNamespaces(updatedNamespaces)); diff --git a/spark-operator/src/test/java/org/apache/spark/k8s/operator/config/SparkOperatorConfManagerTest.java b/spark-operator/src/test/java/org/apache/spark/k8s/operator/config/SparkOperatorConfManagerTest.java index a9ac7401..7baf5e1d 100644 --- a/spark-operator/src/test/java/org/apache/spark/k8s/operator/config/SparkOperatorConfManagerTest.java +++ b/spark-operator/src/test/java/org/apache/spark/k8s/operator/config/SparkOperatorConfManagerTest.java @@ -20,7 +20,7 @@ package org.apache.spark.k8s.operator.config; import java.io.IOException; -import java.util.Collections; +import java.util.Map; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -58,11 +58,11 @@ void testOverrideProperties() { Assertions.assertEquals("bar", confManager.getInitialValue("spark.kubernetes.operator.foo")); Assertions.assertEquals("bar", confManager.getValue("spark.kubernetes.operator.foo")); - confManager.refresh(Collections.singletonMap("spark.kubernetes.operator.foo", "barbar")); + confManager.refresh(Map.of("spark.kubernetes.operator.foo", "barbar")); Assertions.assertEquals("bar", confManager.getInitialValue("spark.kubernetes.operator.foo")); Assertions.assertEquals("barbar", confManager.getValue("spark.kubernetes.operator.foo")); - confManager.refresh(Collections.singletonMap("spark.kubernetes.operator.foo", "barbarbar")); + confManager.refresh(Map.of("spark.kubernetes.operator.foo", "barbarbar")); Assertions.assertEquals("bar", confManager.getInitialValue("spark.kubernetes.operator.foo")); Assertions.assertEquals("barbarbar", confManager.getValue("spark.kubernetes.operator.foo")); diff --git a/spark-operator/src/test/java/org/apache/spark/k8s/operator/metrics/healthcheck/SentinelManagerTest.java b/spark-operator/src/test/java/org/apache/spark/k8s/operator/metrics/healthcheck/SentinelManagerTest.java index b9e7e06b..26ef1c98 100644 --- a/spark-operator/src/test/java/org/apache/spark/k8s/operator/metrics/healthcheck/SentinelManagerTest.java +++ b/spark-operator/src/test/java/org/apache/spark/k8s/operator/metrics/healthcheck/SentinelManagerTest.java @@ -28,7 +28,6 @@ import static org.mockito.Mockito.mockStatic; import java.time.Duration; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -73,7 +72,7 @@ class SentinelManagerTest { @BeforeAll static void beforeAll() { Map overrideValue = - Collections.singletonMap( + Map.of( SparkOperatorConf.SENTINEL_RESOURCE_RECONCILIATION_DELAY.getKey(), Duration.ofSeconds(SENTINEL_RESOURCE_RECONCILIATION_DELAY_SECONDS).toString()); SparkOperatorConfManager.INSTANCE.refresh(overrideValue); @@ -99,7 +98,7 @@ void testIsSentinelResource() { void testHandleSentinelResourceReconciliation() throws InterruptedException { // Reduce the SENTINEL_RESOURCE_RECONCILIATION_DELAY time to 0 SparkOperatorConfManager.INSTANCE.refresh( - Collections.singletonMap( + Map.of( SparkOperatorConf.SENTINEL_RESOURCE_RECONCILIATION_DELAY.getKey(), "10")); // Before Spark Reconciler Started diff --git a/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java b/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java index efa5f456..5397c52b 100644 --- a/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java +++ b/spark-submission-worker/src/main/java/org/apache/spark/k8s/operator/SparkClusterResourceSpec.java @@ -21,7 +21,7 @@ import static org.apache.spark.k8s.operator.Constants.*; -import java.util.Collections; +import java.util.Map; import java.util.Optional; import scala.Tuple2; @@ -133,9 +133,9 @@ private static Service buildMasterService( .endMetadata() .withNewSpecLike(serviceSpec) .withClusterIP("None") - .addToSelector(Collections.singletonMap(LABEL_SPARK_CLUSTER_NAME, name)) + .addToSelector(Map.of(LABEL_SPARK_CLUSTER_NAME, name)) .addToSelector( - Collections.singletonMap(LABEL_SPARK_ROLE_NAME, LABEL_SPARK_ROLE_MASTER_VALUE)) + Map.of(LABEL_SPARK_ROLE_NAME, LABEL_SPARK_ROLE_MASTER_VALUE)) .addNewPort() .withName("web") .withPort(8080) @@ -176,9 +176,9 @@ private static Service buildWorkerService( .endMetadata() .withNewSpecLike(serviceSpec) .withClusterIP("None") - .addToSelector(Collections.singletonMap(LABEL_SPARK_CLUSTER_NAME, name)) + .addToSelector(Map.of(LABEL_SPARK_CLUSTER_NAME, name)) .addToSelector( - Collections.singletonMap(LABEL_SPARK_ROLE_NAME, LABEL_SPARK_ROLE_WORKER_VALUE)) + Map.of(LABEL_SPARK_ROLE_NAME, LABEL_SPARK_ROLE_WORKER_VALUE)) .addNewPort() .withName("web") .withPort(8081) diff --git a/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorkerTest.java b/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorkerTest.java index 6d080cff..71351cf8 100644 --- a/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorkerTest.java +++ b/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkAppSubmissionWorkerTest.java @@ -29,7 +29,6 @@ import static org.mockito.Mockito.when; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -120,7 +119,7 @@ void buildDriverConfForPythonApp() { when(mockSpec.getPyFiles()).thenReturn("foo"); SparkAppSubmissionWorker submissionWorker = new SparkAppSubmissionWorker(); - SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Collections.emptyMap()); + SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Map.of()); assertEquals(7, constructorArgs.get(conf).size()); // validate main resources @@ -146,7 +145,7 @@ void handlePyFiles() { when(mockSpec.getPyFiles()).thenReturn("main.py,lib.py"); SparkAppSubmissionWorker submissionWorker = new SparkAppSubmissionWorker(); - SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Collections.emptyMap()); + SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Map.of()); assertEquals(7, constructorArgs.get(conf).size()); assertEquals( "lib.py", ((SparkConf) constructorArgs.get(conf).get(0)).get("spark.submit.pyFiles")); @@ -173,7 +172,7 @@ void buildDriverConfForRApp() { when(mockSpec.getSparkRFiles()).thenReturn("foo"); SparkAppSubmissionWorker submissionWorker = new SparkAppSubmissionWorker(); - SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Collections.emptyMap()); + SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Map.of()); assertEquals(7, constructorArgs.get(conf).size()); // validate main resources @@ -260,7 +259,7 @@ void checkAppIdWhenUserSpecifiedInSparkConf() { when(mockApp.getMetadata()).thenReturn(appMeta); SparkAppSubmissionWorker submissionWorker = new SparkAppSubmissionWorker(); - SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Collections.emptyMap()); + SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Map.of()); assertEquals("foo", conf.appId()); } @@ -282,7 +281,7 @@ void supportSparkVersionPlaceHolder() { when(mockRuntimeVersions.getSparkVersion()).thenReturn("dev"); SparkAppSubmissionWorker submissionWorker = new SparkAppSubmissionWorker(); - SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Collections.emptyMap()); + SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Map.of()); assertEquals("apache/spark:dev", conf.get("spark.kubernetes.container.image")); assertEquals("apache/spark:dev", conf.get("spark.kubernetes.driver.container.image")); assertEquals("apache/spark:dev", conf.get("spark.kubernetes.executor.container.image")); diff --git a/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkClusterSubmissionWorkerTest.java b/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkClusterSubmissionWorkerTest.java index 3391755d..694e02bc 100644 --- a/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkClusterSubmissionWorkerTest.java +++ b/spark-submission-worker/src/test/java/org/apache/spark/k8s/operator/SparkClusterSubmissionWorkerTest.java @@ -22,7 +22,6 @@ import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; -import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -70,7 +69,7 @@ void setUp() { @Test void testGetResourceSpec() { SparkClusterSubmissionWorker worker = new SparkClusterSubmissionWorker(); - SparkClusterResourceSpec spec = worker.getResourceSpec(cluster, Collections.emptyMap()); + SparkClusterResourceSpec spec = worker.getResourceSpec(cluster, Map.of()); // SparkClusterResourceSpecTest will cover the detail information of easy resources assertNotNull(spec.getMasterService()); assertNotNull(spec.getMasterStatefulSet()); @@ -81,7 +80,7 @@ void testGetResourceSpec() { @Test void supportSparkVersionPlaceHolder() { SparkClusterSubmissionWorker worker = new SparkClusterSubmissionWorker(); - SparkClusterResourceSpec spec = worker.getResourceSpec(cluster, Collections.emptyMap()); + SparkClusterResourceSpec spec = worker.getResourceSpec(cluster, Map.of()); assertEquals( "apache/spark:dev", spec.getMasterStatefulSet()