Skip to content

Commit 8971b36

Browse files
committed
rustfmt update
1 parent cc2b7ec commit 8971b36

File tree

3 files changed

+51
-29
lines changed

3 files changed

+51
-29
lines changed

rust/operator-binary/src/crd/mod.rs

Lines changed: 22 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -827,11 +827,14 @@ impl v1alpha1::SparkApplication {
827827
};
828828
if let Some(role_envs) = role_envs {
829829
env.extend(role_envs.iter().map(|(k, v)| {
830-
(k, EnvVar {
831-
name: k.clone(),
832-
value: Some(v.clone()),
833-
..Default::default()
834-
})
830+
(
831+
k,
832+
EnvVar {
833+
name: k.clone(),
834+
value: Some(v.clone()),
835+
..Default::default()
836+
},
837+
)
835838
}))
836839
}
837840

@@ -889,10 +892,13 @@ impl v1alpha1::SparkApplication {
889892
Role {
890893
config: submit_conf.clone(),
891894
role_config: GenericRoleConfig::default(),
892-
role_groups: [("default".to_string(), RoleGroup {
893-
config: submit_conf,
894-
replicas: Some(1),
895-
})]
895+
role_groups: [(
896+
"default".to_string(),
897+
RoleGroup {
898+
config: submit_conf,
899+
replicas: Some(1),
900+
},
901+
)]
896902
.into(),
897903
}
898904
.erase(),
@@ -909,10 +915,13 @@ impl v1alpha1::SparkApplication {
909915
Role {
910916
config: driver_conf.clone(),
911917
role_config: GenericRoleConfig::default(),
912-
role_groups: [("default".to_string(), RoleGroup {
913-
config: driver_conf,
914-
replicas: Some(1),
915-
})]
918+
role_groups: [(
919+
"default".to_string(),
920+
RoleGroup {
921+
config: driver_conf,
922+
replicas: Some(1),
923+
},
924+
)]
916925
.into(),
917926
}
918927
.erase(),

rust/operator-binary/src/main.rs

Lines changed: 22 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -109,10 +109,13 @@ async fn main() -> anyhow::Result<()> {
109109
client: client.clone(),
110110
product_config: product_config.load(&PRODUCT_CONFIG_PATHS)?,
111111
};
112-
let spark_event_recorder = Arc::new(Recorder::new(client.as_kube_client(), Reporter {
113-
controller: SPARK_FULL_CONTROLLER_NAME.to_string(),
114-
instance: None,
115-
}));
112+
let spark_event_recorder = Arc::new(Recorder::new(
113+
client.as_kube_client(),
114+
Reporter {
115+
controller: SPARK_FULL_CONTROLLER_NAME.to_string(),
116+
instance: None,
117+
},
118+
));
116119
let app_controller = Controller::new(
117120
watch_namespace
118121
.get_api::<DeserializeGuard<crd::v1alpha1::SparkApplication>>(&client),
@@ -147,11 +150,13 @@ async fn main() -> anyhow::Result<()> {
147150
},
148151
);
149152

150-
let pod_driver_event_recorder =
151-
Arc::new(Recorder::new(client.as_kube_client(), Reporter {
153+
let pod_driver_event_recorder = Arc::new(Recorder::new(
154+
client.as_kube_client(),
155+
Reporter {
152156
controller: POD_DRIVER_FULL_CONTROLLER_NAME.to_string(),
153157
instance: None,
154-
}));
158+
},
159+
));
155160
let pod_driver_controller = Controller::new(
156161
watch_namespace.get_api::<DeserializeGuard<Pod>>(&client),
157162
watcher::Config::default()
@@ -191,11 +196,13 @@ async fn main() -> anyhow::Result<()> {
191196
client: client.clone(),
192197
product_config: product_config.load(&PRODUCT_CONFIG_PATHS)?,
193198
};
194-
let history_event_recorder =
195-
Arc::new(Recorder::new(client.as_kube_client(), Reporter {
199+
let history_event_recorder = Arc::new(Recorder::new(
200+
client.as_kube_client(),
201+
Reporter {
196202
controller: HISTORY_FULL_CONTROLLER_NAME.to_string(),
197203
instance: None,
198-
}));
204+
},
205+
));
199206
let history_controller = Controller::new(
200207
watch_namespace
201208
.get_api::<DeserializeGuard<crd::history::v1alpha1::SparkHistoryServer>>(
@@ -253,11 +260,13 @@ async fn main() -> anyhow::Result<()> {
253260
client: client.clone(),
254261
product_config: product_config.load(&PRODUCT_CONFIG_PATHS)?,
255262
};
256-
let connect_event_recorder =
257-
Arc::new(Recorder::new(client.as_kube_client(), Reporter {
263+
let connect_event_recorder = Arc::new(Recorder::new(
264+
client.as_kube_client(),
265+
Reporter {
258266
controller: CONNECT_FULL_CONTROLLER_NAME.to_string(),
259267
instance: None,
260-
}));
268+
},
269+
));
261270
let connect_controller = Controller::new(
262271
watch_namespace
263272
.get_api::<DeserializeGuard<connect::crd::v1alpha1::SparkConnectServer>>(

rust/operator-binary/src/pod_driver_controller.rs

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -99,9 +99,13 @@ pub async fn reconcile(pod: Arc<DeserializeGuard<Pod>>, client: Arc<Client>) ->
9999
tracing::info!("Update spark application [{app_name}] status to [{phase}]");
100100

101101
client
102-
.apply_patch_status(POD_DRIVER_CONTROLLER_NAME, &app, &SparkApplicationStatus {
103-
phase: phase.clone(),
104-
})
102+
.apply_patch_status(
103+
POD_DRIVER_CONTROLLER_NAME,
104+
&app,
105+
&SparkApplicationStatus {
106+
phase: phase.clone(),
107+
},
108+
)
105109
.await
106110
.with_context(|_| ApplySparkApplicationStatusSnafu {
107111
name: app_name.clone(),

0 commit comments

Comments
 (0)