Skip to content

Commit a6cbad6

Browse files
committed
[SPARK-53642] Improve Javadoc for interfaces, classes, methods, variables
### What changes were proposed in this pull request? This PR aims to improve `Javadoc` for interfaces, classes, methods, and variables as a part of v0.5 preparation. ### Why are the changes needed? Currently, we have comments partially. We had better be consistent as much as possible. - Fill many javadoc comments. - Fill missing `param` descriptions. - Fix typos. - Unify the description style like a sentence, e.g., adding a period at the end. After making this is a baseline, we can add and revise more in the future at every releases. ### Does this PR introduce _any_ user-facing change? No behavior change because this is a documentation only change. ### How was this patch tested? Pass the CIs (including building `javadoc` successfully). ### Was this patch authored or co-authored using generative AI tooling? Yes, I asked `gemini-2.5-flash` to fill the comments first and revised. Closes apache#324 from dongjoon-hyun/SPARK-53642. Authored-by: Dongjoon Hyun <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent 90e4777 commit a6cbad6

File tree

87 files changed

+1975
-102
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

87 files changed

+1975
-102
lines changed

build-tools/docs-utils/src/main/java/org/apache/spark/k8s/operator/utils/ConfOptionDocGenerator.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030
import org.apache.spark.k8s.operator.config.ConfigOption;
3131
import org.apache.spark.k8s.operator.config.SparkOperatorConf;
3232

33+
/** Generates documentation for configuration options. */
3334
@Slf4j
3435
public class ConfOptionDocGenerator {
3536
public static final String CONF_FILE_NAME = "config_properties.md";

build-tools/docs-utils/src/main/java/org/apache/spark/k8s/operator/utils/DocTable.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
import lombok.Data;
2929
import lombok.RequiredArgsConstructor;
3030

31+
/** Represents a table for documentation generation. */
3132
@Data
3233
@RequiredArgsConstructor
3334
@Builder

spark-operator-api/src/main/java/org/apache/spark/k8s/operator/Constants.java

Lines changed: 102 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,79 +22,181 @@
2222
/** Constants used in the Spark Kubernetes Operator. */
2323
@SuppressWarnings("PMD.DataClass")
2424
public class Constants {
25+
/** The API group for Spark K8s Operator CRD resources. */
2526
public static final String API_GROUP = "spark.apache.org";
27+
28+
/** The API version for Spark K8s Operator CRD resources. */
2629
public static final String API_VERSION = "v1";
30+
31+
/** The label for the Spark Application name. */
2732
public static final String LABEL_SPARK_APPLICATION_NAME = "spark.operator/spark-app-name";
33+
34+
/** The label for the Spark Cluster name. */
2835
public static final String LABEL_SPARK_CLUSTER_NAME = "spark.operator/spark-cluster-name";
36+
37+
/** The label for the Spark Operator name. */
2938
public static final String LABEL_SPARK_OPERATOR_NAME = "spark.operator/name";
39+
40+
/** The label for a sentinel resource. */
3041
public static final String LABEL_SENTINEL_RESOURCE = "spark.operator/sentinel";
42+
43+
/** The label for the resource name. */
3144
public static final String LABEL_RESOURCE_NAME = "app.kubernetes.io/name";
45+
46+
/** The label for the component name. */
3247
public static final String LABEL_COMPONENT_NAME = "app.kubernetes.io/component";
48+
49+
/** The label for the Spark role name. */
3350
public static final String LABEL_SPARK_ROLE_NAME = "spark-role";
51+
52+
/** The value for the Spark driver role label. */
3453
public static final String LABEL_SPARK_ROLE_DRIVER_VALUE = "driver";
54+
55+
/** The value for the Spark executor role label. */
3556
public static final String LABEL_SPARK_ROLE_EXECUTOR_VALUE = "executor";
57+
58+
/** The value for the Spark cluster role label. */
3659
public static final String LABEL_SPARK_ROLE_CLUSTER_VALUE = "cluster";
60+
61+
/** The value for the Spark master role label. */
3762
public static final String LABEL_SPARK_ROLE_MASTER_VALUE = "master";
63+
64+
/** The value for the Spark worker role label. */
3865
public static final String LABEL_SPARK_ROLE_WORKER_VALUE = "worker";
66+
67+
/** The label for the Spark version. */
3968
public static final String LABEL_SPARK_VERSION_NAME = "spark-version";
69+
70+
/** A dummy field for sentinel resources. */
4071
public static final String SENTINEL_RESOURCE_DUMMY_FIELD = "sentinel.dummy.number";
4172

73+
/** The property key for the driver Spark container name. */
4274
public static final String DRIVER_SPARK_CONTAINER_PROP_KEY =
4375
"spark.kubernetes.driver.podTemplateContainerName";
76+
77+
/** The property key for the driver Spark pod template file. */
4478
public static final String DRIVER_SPARK_TEMPLATE_FILE_PROP_KEY =
4579
"spark.kubernetes.driver.podTemplateFile";
80+
81+
/** The property key for the executor Spark pod template file. */
4682
public static final String EXECUTOR_SPARK_TEMPLATE_FILE_PROP_KEY =
4783
"spark.kubernetes.executor.podTemplateFile";
4884

4985
// Default state messages
86+
/** Message indicating that the driver has been requested from the resource scheduler. */
5087
public static final String DRIVER_REQUESTED_MESSAGE = "Requested driver from resource scheduler.";
88+
89+
/** Message indicating that the Spark application completed successfully. */
5190
public static final String DRIVER_COMPLETED_MESSAGE = "Spark application completed successfully.";
91+
92+
/**
93+
* Message indicating that the driver container terminated before SparkContext/SparkSession
94+
* initialization.
95+
*/
5296
public static final String DRIVER_TERMINATED_BEFORE_INITIALIZATION_MESSAGE =
5397
"Driver container is terminated without SparkContext / SparkSession initialization.";
98+
99+
/** Message indicating that the driver has failed init container(s). */
54100
public static final String DRIVER_FAILED_INIT_CONTAINERS_MESSAGE =
55101
"Driver has failed init container(s). Refer last observed status for details.";
102+
103+
/** Message indicating that the driver has one or more failed critical container(s). */
56104
public static final String DRIVER_FAILED_MESSAGE =
57105
"Driver has one or more failed critical container(s), refer last observed status for "
58106
+ "details.";
107+
108+
/** Message indicating that the driver's critical container(s) exited with 0. */
59109
public static final String DRIVER_SUCCEEDED_MESSAGE =
60110
"Driver has critical container(s) exited with 0.";
111+
112+
/** Message indicating that the driver's critical container(s) restarted unexpectedly. */
61113
public static final String DRIVER_RESTARTED_MESSAGE =
62114
"Driver has one or more critical container(s) restarted unexpectedly, refer last "
63115
+ "observed status for details.";
116+
117+
/** Message indicating that the Spark application has been shut down as requested. */
64118
public static final String APP_CANCELLED_MESSAGE =
65119
"Spark application has been shutdown as requested.";
120+
121+
/**
122+
* Message indicating that Spark application resources were released after exceeding the
123+
* configured retain duration.
124+
*/
66125
public static final String APP_EXCEEDED_RETAIN_DURATION_MESSAGE =
67126
"Spark application resources released after exceeding the configured retain duration.";
127+
128+
/** Message indicating that the driver was unexpectedly removed. */
68129
public static final String DRIVER_UNEXPECTED_REMOVED_MESSAGE =
69130
"Driver removed. This could caused by 'exit' called in driver process with non-zero "
70131
+ "code, involuntary disruptions or unintentional destroy behavior, check "
71132
+ "Kubernetes events for more details.";
133+
134+
/**
135+
* Message indicating that the driver has not responded to the initial health check request within
136+
* the allotted start-up time.
137+
*/
72138
public static final String DRIVER_LAUNCH_TIMEOUT_MESSAGE =
73139
"The driver has not responded to the initial health check request within the "
74140
+ "allotted start-up time. This can be configured by setting "
75141
+ ".spec.applicationTolerations.applicationTimeoutConfig.";
142+
143+
/** Message indicating that the driver has started running. */
76144
public static final String DRIVER_RUNNING_MESSAGE = "Driver has started running.";
145+
146+
/** Message indicating that the driver has reached a ready state. */
77147
public static final String DRIVER_READY_MESSAGE = "Driver has reached ready state.";
148+
149+
/** Message indicating that the Spark application has been created on the Kubernetes Cluster. */
78150
public static final String SUBMITTED_STATE_MESSAGE =
79151
"Spark application has been created on Kubernetes Cluster.";
152+
153+
/** Message indicating that the application status cannot be processed. */
80154
public static final String UNKNOWN_STATE_MESSAGE = "Cannot process application status.";
155+
156+
/** Message indicating that the maximum number of restart attempts has been exceeded. */
81157
public static final String EXCEED_MAX_RETRY_ATTEMPT_MESSAGE =
82158
"The maximum number of restart attempts (%d) has been exceeded.";
159+
160+
/** Message indicating a failure to request the driver from the scheduler backend. */
83161
public static final String SCHEDULE_FAILURE_MESSAGE =
84162
"Failed to request driver from scheduler backend.";
163+
164+
/** Message indicating that the application is running healthy. */
85165
public static final String RUNNING_HEALTHY_MESSAGE = "Application is running healthy.";
166+
167+
/**
168+
* Message indicating that the application is running with less than the minimal number of
169+
* requested initial executors.
170+
*/
86171
public static final String INITIALIZED_WITH_BELOW_THRESHOLD_EXECUTORS_MESSAGE =
87172
"The application is running with less than minimal number of requested initial executors.";
173+
174+
/**
175+
* Message indicating that the Spark application is running with less than the minimal number of
176+
* requested executors.
177+
*/
88178
public static final String RUNNING_WITH_BELOW_THRESHOLD_EXECUTORS_MESSAGE =
89179
"The Spark application is running with less than minimal number of requested executors.";
180+
181+
/**
182+
* Message indicating that the Spark application failed to get enough executors in the given time
183+
* threshold.
184+
*/
90185
public static final String EXECUTOR_LAUNCH_TIMEOUT_MESSAGE =
91186
"The Spark application failed to get enough executors in the given time threshold.";
92187

93188
// Spark Cluster Messages
189+
/** Message indicating a failure to request the Spark cluster from the scheduler backend. */
94190
public static final String CLUSTER_SCHEDULE_FAILURE_MESSAGE =
95191
"Failed to request Spark cluster from scheduler backend.";
192+
193+
/** Message indicating that the Spark cluster has been submitted to the Kubernetes Cluster. */
96194
public static final String CLUSTER_SUBMITTED_STATE_MESSAGE =
97195
"Spark cluster has been submitted to Kubernetes Cluster.";
196+
197+
/** Message indicating that the cluster has reached a ready state. */
98198
public static final String CLUSTER_READY_MESSAGE = "Cluster has reached ready state.";
199+
200+
/** Message indicating that the cluster status cannot be processed. */
99201
public static final String UNKNOWN_CLUSTER_STATE_MESSAGE = "Cannot process cluster status.";
100202
}

spark-operator-api/src/main/java/org/apache/spark/k8s/operator/SparkApplication.java

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,11 +46,21 @@ public class SparkApplication
4646
ApplicationState,
4747
ApplicationSpec,
4848
ApplicationStatus> {
49+
/**
50+
* Initializes and returns a new ApplicationStatus object.
51+
*
52+
* @return A new ApplicationStatus instance.
53+
*/
4954
@Override
5055
public ApplicationStatus initStatus() {
5156
return new ApplicationStatus();
5257
}
5358

59+
/**
60+
* Initializes and returns a new ApplicationSpec object.
61+
*
62+
* @return A new ApplicationSpec instance.
63+
*/
5464
@Override
5565
public ApplicationSpec initSpec() {
5666
return new ApplicationSpec();

spark-operator-api/src/main/java/org/apache/spark/k8s/operator/SparkCluster.java

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,11 +42,21 @@
4242
public class SparkCluster
4343
extends BaseResource<
4444
ClusterStateSummary, ClusterAttemptSummary, ClusterState, ClusterSpec, ClusterStatus> {
45+
/**
46+
* Initializes and returns a new ClusterStatus object.
47+
*
48+
* @return A new ClusterStatus instance.
49+
*/
4550
@Override
4651
public ClusterStatus initStatus() {
4752
return new ClusterStatus();
4853
}
4954

55+
/**
56+
* Initializes and returns a new ClusterSpec object.
57+
*
58+
* @return A new ClusterSpec instance.
59+
*/
5060
@Override
5161
public ClusterSpec initSpec() {
5262
return new ClusterSpec();

spark-operator-api/src/main/java/org/apache/spark/k8s/operator/decorators/ResourceDecorator.java

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,14 @@
2121

2222
import io.fabric8.kubernetes.api.model.HasMetadata;
2323

24+
/** Interface for decorating Kubernetes resources. */
2425
public interface ResourceDecorator {
26+
/**
27+
* Decorates a Kubernetes resource.
28+
*
29+
* @param resource The resource to decorate.
30+
* @param <T> The type of the resource, extending HasMetadata.
31+
* @return The decorated resource.
32+
*/
2533
<T extends HasMetadata> T decorate(T resource);
2634
}

spark-operator-api/src/main/java/org/apache/spark/k8s/operator/diff/Diffable.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,4 +19,9 @@
1919

2020
package org.apache.spark.k8s.operator.diff;
2121

22+
/**
23+
* Represents an entity that can be compared for differences.
24+
*
25+
* @param <T> the type of the entity to compare with
26+
*/
2227
public interface Diffable<T> {}

spark-operator-api/src/main/java/org/apache/spark/k8s/operator/spec/ApplicationTolerations.java

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,10 @@ public class ApplicationTolerations {
7070
protected Long ttlAfterStopMillis = -1L;
7171

7272
/**
73+
* Computes the effective resource retention duration in milliseconds. This is the smaller of
74+
* `resourceRetainDurationMillis` or `ttlAfterStopMillis` if both are non-negative. If only one is
75+
* non-negative, that value is used. If both are negative, -1L is returned.
76+
*
7377
* @return The effective retain duration for secondary resources, which would be the smaller value
7478
* of `resourceRetainDurationMillis` or `ttlAfterStopMillis`, if they are set to non-negative
7579
* value. Return -1 if none of them are set.
@@ -89,11 +93,12 @@ public long computeEffectiveRetainDurationMillis() {
8993

9094
/**
9195
* Check whether a terminated application has exceeded the resource retain duration at the
92-
* provided instant
96+
* provided instant.
9397
*
94-
* @param lastObservedState last observed state of the application
95-
* @return true if the app has terminated and resource retain duration is configured to a positive
96-
* value and the app is not within retain duration; false otherwise.
98+
* @param lastObservedState The last observed state of the application.
99+
* @param instant The instant to check against.
100+
* @return True if the app has terminated, has a positive retain duration configured, and has
101+
* exceeded that duration; false otherwise.
97102
*/
98103
public boolean exceedRetainDurationAtInstant(
99104
ApplicationState lastObservedState, Instant instant) {
@@ -106,20 +111,20 @@ public boolean exceedRetainDurationAtInstant(
106111
}
107112

108113
/**
109-
* Indicates whether the reconciler need to perform retain duration check
114+
* Indicates whether the reconciler needs to perform a retain duration check.
110115
*
111-
* @return true if `resourceRetainDurationMillis` or `ttlAfterStopMillis` is set to non-negative
112-
* value
116+
* @return True if `resourceRetainDurationMillis` or `ttlAfterStopMillis` is set to a non-negative
117+
* value, false otherwise.
113118
*/
114119
@JsonIgnore
115120
public boolean isRetainDurationEnabled() {
116121
return resourceRetainDurationMillis >= 0L || ttlAfterStopMillis >= 0L;
117122
}
118123

119124
/**
120-
* Indicates whether the reconciler need to perform ttl check
125+
* Indicates whether the reconciler needs to perform a TTL (Time-To-Live) check.
121126
*
122-
* @return true if `ttlAfterStopMillis` is set to non-negative value
127+
* @return True if `ttlAfterStopMillis` is set to a non-negative value, false otherwise.
123128
*/
124129
@JsonIgnore
125130
public boolean isTTLEnabled() {

spark-operator-api/src/main/java/org/apache/spark/k8s/operator/spec/DeploymentMode.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919

2020
package org.apache.spark.k8s.operator.spec;
2121

22+
/** Represents the deployment mode for a Spark application. */
2223
public enum DeploymentMode {
2324
ClusterMode,
2425
ClientMode

spark-operator-api/src/main/java/org/apache/spark/k8s/operator/spec/RestartPolicy.java

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,21 @@
2121

2222
import org.apache.spark.k8s.operator.status.BaseStateSummary;
2323

24+
/** Defines the restart policy for a Spark application. */
2425
public enum RestartPolicy {
2526
Always,
2627
Never,
2728
OnFailure,
2829
OnInfrastructureFailure;
2930

31+
/**
32+
* Determines if a restart should be attempted based on the restart policy and the current state
33+
* summary.
34+
*
35+
* @param policy The RestartPolicy to evaluate.
36+
* @param stateSummary The current BaseStateSummary of the application.
37+
* @return True if a restart should be attempted, false otherwise.
38+
*/
3039
public static boolean attemptRestartOnState(
3140
final RestartPolicy policy, final BaseStateSummary stateSummary) {
3241
return switch (policy) {

0 commit comments

Comments
 (0)