Skip to content

Commit 76487c5

Browse files
committed
chore: Bump stackable_operator to 0.93.1
Part of stackabletech/issues#642. Bump stackable-operator to 0.93.1 which versions common CRD structs.
1 parent b5a9376 commit 76487c5

File tree

14 files changed

+484
-516
lines changed

14 files changed

+484
-516
lines changed

Cargo.lock

Lines changed: 95 additions & 106 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.nix

Lines changed: 280 additions & 320 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,10 @@ repository = "https://github.com/stackabletech/spark-k8s-operator"
1111

1212
[workspace.dependencies]
1313
product-config = { git = "https://github.com/stackabletech/product-config.git", tag = "0.7.0" }
14-
stackable-operator = { git = "https://github.com/stackabletech/operator-rs.git", features = ["telemetry", "versioned"], tag = "stackable-operator-0.92.0" }
14+
stackable-operator = { git = "https://github.com/stackabletech/operator-rs.git", features = ["telemetry", "versioned"], tag = "stackable-operator-0.93.1" }
1515

1616
anyhow = "1.0"
17-
built = { version = "0.7", features = ["chrono", "git2"] }
17+
built = { version = "0.8", features = ["chrono", "git2"] }
1818
clap = "4.5"
1919
const_format = "0.2"
2020
futures = { version = "0.3", features = ["compat"] }

crate-hashes.json

Lines changed: 7 additions & 7 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

rust/operator-binary/src/config/jvm.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
use snafu::{ResultExt, Snafu};
22
use stackable_operator::{
3-
commons::s3::S3ConnectionSpec,
3+
crd::s3,
44
role_utils::{self, JvmArgumentOverrides},
55
};
66

@@ -25,7 +25,7 @@ pub enum Error {
2525
/// 2. `spark.executor.extraJavaOptions`
2626
pub fn construct_extra_java_options(
2727
spark_application: &SparkApplication,
28-
s3_conn: &Option<S3ConnectionSpec>,
28+
s3_conn: &Option<s3::v1alpha1::ConnectionSpec>,
2929
log_dir: &Option<ResolvedLogDir>,
3030
) -> Result<(String, String), Error> {
3131
// Note (@sbernauer): As of 2025-03-04, we did not set any heap related JVM arguments, so I

rust/operator-binary/src/connect/server.rs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -16,10 +16,8 @@ use stackable_operator::{
1616
},
1717
},
1818
},
19-
commons::{
20-
listener::{Listener, ListenerPort},
21-
product_image_selection::ResolvedProductImage,
22-
},
19+
commons::product_image_selection::ResolvedProductImage,
20+
crd::listener,
2321
k8s_openapi::{
2422
DeepMerge,
2523
api::{
@@ -53,7 +51,7 @@ use crate::{
5351
VOLUME_MOUNT_NAME_LOG, VOLUME_MOUNT_NAME_LOG_CONFIG, VOLUME_MOUNT_PATH_CONFIG,
5452
VOLUME_MOUNT_PATH_LOG, VOLUME_MOUNT_PATH_LOG_CONFIG,
5553
},
56-
listener,
54+
listener_ext,
5755
},
5856
product_logging,
5957
};
@@ -65,7 +63,9 @@ const HTTP: &str = "http";
6563
#[allow(clippy::enum_variant_names)]
6664
pub enum Error {
6765
#[snafu(display("failed to build spark connect listener"))]
68-
BuildListener { source: crate::crd::listener::Error },
66+
BuildListener {
67+
source: crate::crd::listener_ext::Error,
68+
},
6969

7070
#[snafu(display("failed to build listener volume"))]
7171
BuildListenerVolume {
@@ -616,27 +616,27 @@ pub(crate) fn build_listener(
616616
scs: &v1alpha1::SparkConnectServer,
617617
config: &v1alpha1::ServerConfig,
618618
resolved_product_image: &ResolvedProductImage,
619-
) -> Result<Listener, Error> {
619+
) -> Result<listener::v1alpha1::Listener, Error> {
620620
let listener_name = dummy_role_group_ref(scs).object_name();
621621
let listener_class = config.listener_class.clone();
622622
let role = SparkConnectRole::Server.to_string();
623623
let recommended_object_labels =
624624
common::labels(scs, &resolved_product_image.app_version_label, &role);
625625

626626
let listener_ports = [
627-
ListenerPort {
627+
listener::v1alpha1::ListenerPort {
628628
name: GRPC.to_string(),
629629
port: CONNECT_GRPC_PORT,
630630
protocol: Some("TCP".to_string()),
631631
},
632-
ListenerPort {
632+
listener::v1alpha1::ListenerPort {
633633
name: HTTP.to_string(),
634634
port: CONNECT_UI_PORT,
635635
protocol: Some("TCP".to_string()),
636636
},
637637
];
638638

639-
listener::build_listener(
639+
listener_ext::build_listener(
640640
scs,
641641
&listener_name,
642642
&listener_class,

rust/operator-binary/src/crd/history.rs

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,12 @@ use stackable_operator::{
1111
CpuLimitsFragment, MemoryLimitsFragment, NoRuntimeLimits, NoRuntimeLimitsFragment,
1212
Resources, ResourcesFragment,
1313
},
14-
s3::S3BucketInlineOrReference,
1514
},
1615
config::{
1716
fragment::{self, Fragment, ValidationError},
1817
merge::Merge,
1918
},
19+
crd::s3,
2020
k8s_openapi::{api::core::v1::EnvVar, apimachinery::pkg::api::resource::Quantity},
2121
kube::{CustomResource, ResourceExt, runtime::reflector::ObjectRef},
2222
product_config_utils::{
@@ -271,7 +271,7 @@ pub enum LogFileDirectorySpec {
271271
#[serde(rename_all = "camelCase")]
272272
pub struct S3LogFileDirectorySpec {
273273
pub prefix: String,
274-
pub bucket: S3BucketInlineOrReference,
274+
pub bucket: s3::v1alpha1::InlineBucketOrReference,
275275
}
276276

277277
#[allow(clippy::derive_partial_eq_without_eq)]
@@ -410,10 +410,7 @@ fn default_listener_class() -> String {
410410
#[cfg(test)]
411411
mod test {
412412
use indoc::indoc;
413-
use stackable_operator::commons::{
414-
s3::{ResolvedS3Bucket, ResolvedS3Connection},
415-
tls_verification::TlsClientDetails,
416-
};
413+
use stackable_operator::{commons::tls_verification::TlsClientDetails, crd::s3};
417414

418415
use super::*;
419416
use crate::crd::logdir::S3LogDir;
@@ -451,9 +448,9 @@ mod test {
451448
serde_yaml::with::singleton_map_recursive::deserialize(deserializer).unwrap();
452449

453450
let log_dir = ResolvedLogDir::S3(S3LogDir {
454-
bucket: ResolvedS3Bucket {
451+
bucket: s3::v1alpha1::ResolvedBucket {
455452
bucket_name: "my-bucket".to_string(),
456-
connection: ResolvedS3Connection {
453+
connection: s3::v1alpha1::ConnectionSpec {
457454
host: "my-s3".to_string().try_into().unwrap(),
458455
port: None,
459456
access_style: Default::default(),

rust/operator-binary/src/crd/listener.rs renamed to rust/operator-binary/src/crd/listener_ext.rs

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,6 @@
11
use snafu::{ResultExt, Snafu};
22
use stackable_operator::{
3-
builder::meta::ObjectMetaBuilder,
4-
commons::listener::{Listener, ListenerPort, ListenerSpec},
5-
kube::Resource,
6-
kvp::ObjectLabels,
3+
builder::meta::ObjectMetaBuilder, crd::listener, kube::Resource, kvp::ObjectLabels,
74
};
85
use strum::{EnumDiscriminants, IntoStaticStr};
96

@@ -22,14 +19,15 @@ pub enum Error {
2219
},
2320
}
2421

22+
// TODO (@NickLarsenNZ): Move this functionality to stackable-operator
2523
pub fn build_listener<T: Resource<DynamicType = ()>>(
2624
resource: &T,
2725
listener_name: &str,
2826
listener_class: &str,
2927
listener_labels: ObjectLabels<T>,
30-
listener_ports: &[ListenerPort],
31-
) -> Result<Listener, Error> {
32-
Ok(Listener {
28+
listener_ports: &[listener::v1alpha1::ListenerPort],
29+
) -> Result<listener::v1alpha1::Listener, Error> {
30+
Ok(listener::v1alpha1::Listener {
3331
metadata: ObjectMetaBuilder::new()
3432
.name_and_namespace(resource)
3533
.name(listener_name)
@@ -38,10 +36,10 @@ pub fn build_listener<T: Resource<DynamicType = ()>>(
3836
.with_recommended_labels(listener_labels)
3937
.context(ObjectMetaSnafu)?
4038
.build(),
41-
spec: ListenerSpec {
39+
spec: listener::v1alpha1::ListenerSpec {
4240
class_name: Some(listener_class.into()),
4341
ports: Some(listener_ports.to_vec()),
44-
..ListenerSpec::default()
42+
..listener::v1alpha1::ListenerSpec::default()
4543
},
4644
status: None,
4745
})

rust/operator-binary/src/crd/logdir.rs

Lines changed: 23 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,8 @@ use stackable_operator::{
66
SecretFormat, SecretOperatorVolumeSourceBuilder, SecretOperatorVolumeSourceBuilderError,
77
VolumeBuilder,
88
},
9-
commons::{
10-
s3::{ResolvedS3Bucket, S3AccessStyle, S3Error},
11-
secret_class::SecretClassVolume,
12-
},
9+
commons::secret_class::SecretClassVolume,
10+
crd::s3,
1311
k8s_openapi::api::core::v1::{Volume, VolumeMount},
1412
time::Duration,
1513
};
@@ -47,8 +45,15 @@ pub enum Error {
4745
source: stackable_operator::commons::secret_class::SecretClassVolumeError,
4846
},
4947

50-
#[snafu(display("failed to configure S3 connection/bucket"))]
51-
ConfigureS3 { source: S3Error },
48+
#[snafu(display("failed to configure S3 bucket"))]
49+
ConfigureS3Bucket {
50+
source: stackable_operator::crd::s3::v1alpha1::BucketError,
51+
},
52+
53+
#[snafu(display("failed to configure S3 connection"))]
54+
ConfigureS3Connection {
55+
source: stackable_operator::crd::s3::v1alpha1::ConnectionError,
56+
},
5257
}
5358

5459
pub enum ResolvedLogDir {
@@ -142,7 +147,7 @@ impl ResolvedLogDir {
142147
}
143148

144149
pub struct S3LogDir {
145-
pub bucket: ResolvedS3Bucket,
150+
pub bucket: s3::v1alpha1::ResolvedBucket,
146151
pub prefix: String,
147152
}
148153

@@ -158,7 +163,7 @@ impl S3LogDir {
158163
// TODO (@NickLarsenNZ): Explain this unwrap. Either convert to expect, or gracefully handle the error.
159164
.resolve(client, namespace.unwrap().as_str())
160165
.await
161-
.context(ConfigureS3Snafu)?;
166+
.context(ConfigureS3BucketSnafu)?;
162167

163168
if bucket.connection.tls.uses_tls() && !bucket.connection.tls.uses_tls() {
164169
return S3TlsNoVerificationNotSupportedSnafu.fail();
@@ -187,11 +192,14 @@ impl S3LogDir {
187192
("spark.history.fs.logDirectory".to_string(), self.url()),
188193
(
189194
"spark.hadoop.fs.s3a.endpoint".to_string(),
190-
connection.endpoint().context(ConfigureS3Snafu)?.to_string(),
195+
connection
196+
.endpoint()
197+
.context(ConfigureS3ConnectionSnafu)?
198+
.to_string(),
191199
),
192200
(
193201
"spark.hadoop.fs.s3a.path.style.access".to_string(),
194-
(connection.access_style == S3AccessStyle::Path).to_string(),
202+
(connection.access_style == s3::v1alpha1::S3AccessStyle::Path).to_string(),
195203
),
196204
(
197205
"spark.hadoop.fs.s3a.endpoint.region".to_string(),
@@ -212,11 +220,14 @@ impl S3LogDir {
212220
let bucket_name = &self.bucket.bucket_name;
213221
result.insert(
214222
format!("spark.hadoop.fs.s3a.bucket.{bucket_name}.endpoint"),
215-
connection.endpoint().context(ConfigureS3Snafu)?.to_string(),
223+
connection
224+
.endpoint()
225+
.context(ConfigureS3ConnectionSnafu)?
226+
.to_string(),
216227
);
217228
result.insert(
218229
format!("spark.hadoop.fs.s3a.bucket.{bucket_name}.path.style.access"),
219-
(connection.access_style == S3AccessStyle::Path).to_string(),
230+
(connection.access_style == s3::v1alpha1::S3AccessStyle::Path).to_string(),
220231
);
221232
result.insert(
222233
format!("spark.hadoop.fs.s3a.bucket.{bucket_name}.region"),

0 commit comments

Comments
 (0)