Skip to content

Commit f4b00b7

Browse files
authored
fix: allow overriding of application service account (#617)
* spark apps: remove SA command line argument * update changelog * fix spelling * add SA assert * fix typo * fix last typo (i promise)
1 parent 4475235 commit f4b00b7

File tree

5 files changed

+42
-13
lines changed

5 files changed

+42
-13
lines changed

CHANGELOG.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,12 @@ All notable changes to this project will be documented in this file.
1919
We now correctly handle multiple certificates in this cases.
2020
See [this GitHub issue](https://github.com/stackabletech/issues/issues/764) for details
2121

22+
- The service account of spark applications can now be overridden with pod overrides ([#617]).
23+
24+
Previously the application service account was passed as command line argument to spark-submit
25+
and was thus not possible to overwrite with pod overrides for the driver and executors.
26+
This CLI argument has now been moved to the pod templates of the individual roles.
27+
2228
### Removed
2329

2430
- Support for Spark versions 3.5.5 has been dropped ([#610]).
@@ -28,6 +34,7 @@ All notable changes to this project will be documented in this file.
2834
[#608]: https://github.com/stackabletech/spark-k8s-operator/pull/608
2935
[#610]: https://github.com/stackabletech/spark-k8s-operator/pull/610
3036
[#611]: https://github.com/stackabletech/spark-k8s-operator/pull/611
37+
[#617]: https://github.com/stackabletech/spark-k8s-operator/pull/617
3138

3239
## [25.7.0] - 2025-07-23
3340

rust/operator-binary/src/crd/mod.rs

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -543,7 +543,6 @@ impl v1alpha1::SparkApplication {
543543

544544
pub fn build_command(
545545
&self,
546-
serviceaccount_name: &str,
547546
s3conn: &Option<s3::v1alpha1::ConnectionSpec>,
548547
log_dir: &Option<ResolvedLogDir>,
549548
spark_image: &str,
@@ -585,10 +584,6 @@ impl v1alpha1::SparkApplication {
585584
"--conf spark.kubernetes.executor.container.image={}",
586585
spark_image.to_string()
587586
),
588-
format!(
589-
"--conf spark.kubernetes.authenticate.driver.serviceAccountName={}",
590-
serviceaccount_name
591-
),
592587
format!(
593588
"--conf spark.driver.defaultJavaOptions=-Dlog4j.configurationFile={VOLUME_MOUNT_PATH_LOG_CONFIG}/{LOG4J2_CONFIG_FILE}"
594589
),

rust/operator-binary/src/spark_k8s_controller.rs

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -324,6 +324,7 @@ pub async fn reconcile(
324324
&opt_s3conn,
325325
&logdir,
326326
&resolved_product_image,
327+
&serviceaccount,
327328
)?;
328329
client
329330
.apply_patch(
@@ -352,6 +353,7 @@ pub async fn reconcile(
352353
&opt_s3conn,
353354
&logdir,
354355
&resolved_product_image,
356+
&serviceaccount,
355357
)?;
356358
client
357359
.apply_patch(
@@ -363,13 +365,7 @@ pub async fn reconcile(
363365
.context(ApplyApplicationSnafu)?;
364366

365367
let job_commands = spark_application
366-
.build_command(
367-
// TODO (@NickLarsenNZ): Explain this unwrap. Either convert to expect, or gracefully handle the error.
368-
serviceaccount.metadata.name.as_ref().unwrap(),
369-
&opt_s3conn,
370-
&logdir,
371-
&resolved_product_image.image,
372-
)
368+
.build_command(&opt_s3conn, &logdir, &resolved_product_image.image)
373369
.context(BuildCommandSnafu)?;
374370

375371
let submit_config = spark_application
@@ -593,6 +589,7 @@ fn pod_template(
593589
s3conn: &Option<s3::v1alpha1::ConnectionSpec>,
594590
logdir: &Option<ResolvedLogDir>,
595591
spark_image: &ResolvedProductImage,
592+
service_account: &ServiceAccount,
596593
) -> Result<PodTemplateSpec> {
597594
let container_name = SparkContainer::Spark.to_string();
598595
let mut cb = ContainerBuilder::new(&container_name).context(IllegalContainerNameSnafu)?;
@@ -641,7 +638,8 @@ fn pod_template(
641638
.context(AddVolumeSnafu)?
642639
.security_context(security_context())
643640
.image_pull_secrets_from_product_image(spark_image)
644-
.affinity(&config.affinity);
641+
.affinity(&config.affinity)
642+
.service_account_name(service_account.name_any());
645643

646644
let init_containers = init_containers(
647645
spark_application,
@@ -700,6 +698,7 @@ fn pod_template_config_map(
700698
s3conn: &Option<s3::v1alpha1::ConnectionSpec>,
701699
logdir: &Option<ResolvedLogDir>,
702700
spark_image: &ResolvedProductImage,
701+
service_account: &ServiceAccount,
703702
) -> Result<ConfigMap> {
704703
let cm_name = spark_application.pod_template_config_map_name(role.clone());
705704

@@ -741,6 +740,7 @@ fn pod_template_config_map(
741740
s3conn,
742741
logdir,
743742
spark_image,
743+
service_account,
744744
)?;
745745

746746
let mut cm_builder = ConfigMapBuilder::new();

tests/templates/kuttl/overrides/10-deploy-spark-app.yaml.j2

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,21 @@
11
---
2+
apiVersion: v1
3+
kind: ServiceAccount
4+
metadata:
5+
name: override-sa
6+
---
7+
kind: RoleBinding
8+
apiVersion: rbac.authorization.k8s.io/v1
9+
metadata:
10+
name: override-sa-binding
11+
subjects:
12+
- kind: ServiceAccount
13+
name: override-sa
14+
roleRef:
15+
kind: ClusterRole
16+
name: spark-k8s-clusterrole
17+
apiGroup: rbac.authorization.k8s.io
18+
---
219
apiVersion: spark.stackable.tech/v1alpha1
320
kind: SparkApplication
421
metadata:
@@ -39,6 +56,7 @@ spec:
3956
TEST_JOB_SPARK-ENV-SH: TEST
4057
podOverrides:
4158
spec:
59+
serviceAccountName: override-sa
4260
containers:
4361
- name: spark-submit
4462
resources:
@@ -57,6 +75,7 @@ spec:
5775
TEST_DRIVER_SPARK-ENV-SH: TEST
5876
podOverrides:
5977
spec:
78+
serviceAccountName: override-sa
6079
containers:
6180
- name: spark
6281
resources:
@@ -76,6 +95,7 @@ spec:
7695
TEST_EXECUTOR_SPARK-ENV-SH: TEST
7796
podOverrides:
7897
spec:
98+
serviceAccountName: override-sa
7999
containers:
80100
- name: spark
81101
resources:

tests/templates/kuttl/overrides/11-assert.yaml

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,13 @@ commands:
1313
POD=$(kubectl -n $NAMESPACE get pod -l app.kubernetes.io/instance=spark-pi-s3-1 -o name | head -n 1 | sed -e 's#pod/##')
1414
kubectl -n $NAMESPACE get pod $POD -o yaml | yq '.spec.containers[0].env[] | select (.name == "TEST_SPARK_VAR_0").value' | grep 'REPLACED'
1515
kubectl -n $NAMESPACE get pod $POD -o yaml | yq '.spec.containers[0].env[] | select (.name == "TEST_SPARK_VAR_1").value' | grep 'DONOTREPLACE'
16+
- script: |
17+
for POD_SA_NAME in $(kubectl get pods -n $NAMESPACE -l app.kubernetes.io/instance=spark-pi-s3-1 -o=jsonpath='{.items[*].spec.serviceAccountName}'); do
18+
if [ "$POD_SA_NAME" != "override-sa" ]; then
19+
echo "Expected Pod service account [override-sa], but got [$POD_SA_NAME]"
20+
exit 1
21+
fi
22+
done
1623
---
1724
apiVersion: v1
1825
kind: ConfigMap

0 commit comments

Comments
 (0)