Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/pr_pre-commit.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ on:

env:
CARGO_TERM_COLOR: always
RUST_TOOLCHAIN_VERSION: "1.82.0"
RUST_TOOLCHAIN_VERSION: "nightly-2025-01-15"
HADOLINT_VERSION: "v2.12.0"
PYTHON_VERSION: "3.12"

Expand Down
11 changes: 7 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,6 @@ repos:
- repo: https://github.com/doublify/pre-commit-rust
rev: eeee35a89e69d5772bdee97db1a6a898467b686e # 1.0
hooks:
- id: fmt
# Pinning to a specific rustc version, so that we get consistent formatting
entry: RUSTUP_TOOLCHAIN=nightly-2025-01-15 cargo fmt
args: ["--all", "--", "--check"]
- id: clippy
args: ["--all-targets", "--", "-D", "warnings"]

Expand Down Expand Up @@ -78,3 +74,10 @@ repos:
entry: cargo test
stages: [pre-commit, pre-merge-commit, manual]
pass_filenames: false

- id: cargo-rustfmt
name: cargo-rustfmt
language: system
entry: cargo +nightly-2025-01-15 fmt --all -- --check
stages: [pre-commit]
pass_filenames: false
4 changes: 2 additions & 2 deletions rust/crd/src/affinity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@ pub fn history_affinity(cluster_name: &str) -> StackableAffinityFragment {
mod test {
use std::collections::BTreeMap;

use crate::{constants::HISTORY_ROLE_NAME, history::SparkHistoryServer};

use stackable_operator::{
commons::affinity::StackableAffinity,
k8s_openapi::{
Expand All @@ -38,6 +36,8 @@ mod test {
role_utils::RoleGroupRef,
};

use crate::{constants::HISTORY_ROLE_NAME, history::SparkHistoryServer};

#[test]
pub fn test_history_affinity_defaults() {
let input = r#"
Expand Down
1 change: 0 additions & 1 deletion rust/crd/src/constants.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use const_format::concatcp;

use stackable_operator::memory::{BinaryMultiple, MemoryQuantity};

pub const APP_NAME: &str = "spark-k8s";
Expand Down
12 changes: 6 additions & 6 deletions rust/crd/src/history.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use crate::logdir::ResolvedLogDir;
use crate::{affinity::history_affinity, constants::*};
use std::collections::{BTreeMap, HashMap};

use product_config::{types::PropertyNameKind, ProductConfigManager};
use serde::{Deserialize, Serialize};
Expand Down Expand Up @@ -29,9 +28,10 @@ use stackable_operator::{
schemars::{self, JsonSchema},
time::Duration,
};
use std::collections::{BTreeMap, HashMap};
use strum::{Display, EnumIter};

use crate::{affinity::history_affinity, constants::*, logdir::ResolvedLogDir};

#[derive(Snafu, Debug)]
pub enum Error {
#[snafu(display("failed to transform configs"))]
Expand Down Expand Up @@ -477,15 +477,15 @@ impl Configuration for HistoryConfigFragment {

#[cfg(test)]
mod test {
use crate::logdir::S3LogDir;

use super::*;
use indoc::indoc;
use stackable_operator::commons::{
s3::{ResolvedS3Bucket, ResolvedS3Connection},
tls_verification::TlsClientDetails,
};

use super::*;
use crate::logdir::S3LogDir;

#[test]
pub fn test_env_overrides() {
let input = indoc! {r#"
Expand Down
48 changes: 27 additions & 21 deletions rust/crd/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,17 @@ pub mod logdir;
pub mod roles;
pub mod tlscerts;

pub use crate::roles::*;
use std::{
cmp::max,
collections::{BTreeMap, HashMap},
};

use constants::*;
use history::LogFileDirectorySpec;
use logdir::ResolvedLogDir;
use product_config::{types::PropertyNameKind, ProductConfigManager};
use serde::{Deserialize, Serialize};
use snafu::{OptionExt, ResultExt, Snafu};
use stackable_operator::role_utils::{GenericProductSpecificCommonConfig, GenericRoleConfig};
use stackable_operator::time::Duration;
use stackable_operator::{
builder::pod::volume::{
SecretFormat, SecretOperatorVolumeSourceBuilder, SecretOperatorVolumeSourceBuilderError,
Expand All @@ -42,14 +44,15 @@ use stackable_operator::{
ValidatedRoleConfigByPropertyKind,
},
product_logging,
role_utils::{CommonConfiguration, Role, RoleGroup},
role_utils::{
CommonConfiguration, GenericProductSpecificCommonConfig, GenericRoleConfig, Role, RoleGroup,
},
schemars::{self, JsonSchema},
time::Duration,
utils::crds::raw_object_list_schema,
};
use std::{
cmp::max,
collections::{BTreeMap, HashMap},
};

pub use crate::roles::*;

#[derive(Snafu, Debug)]
pub enum Error {
Expand Down Expand Up @@ -1082,23 +1085,26 @@ where
#[cfg(test)]
mod tests {

use super::*;
use std::collections::{BTreeMap, HashMap};

use crate::{cores_from_quantity, resources_to_executor_props, RoleConfig};
use crate::{resources_to_driver_props, SparkApplication};
use crate::{Quantity, SparkStorageConfig};
use indoc::indoc;
use product_config::{types::PropertyNameKind, ProductConfigManager};
use stackable_operator::commons::affinity::StackableAffinity;
use stackable_operator::commons::resources::{
CpuLimits, MemoryLimits, NoRuntimeLimits, Resources,
use rstest::rstest;
use stackable_operator::{
commons::{
affinity::StackableAffinity,
resources::{CpuLimits, MemoryLimits, NoRuntimeLimits, Resources},
},
product_config_utils::ValidatedRoleConfigByPropertyKind,
product_logging::spec::Logging,
time::Duration,
};
use stackable_operator::product_config_utils::ValidatedRoleConfigByPropertyKind;
use stackable_operator::product_logging::spec::Logging;

use indoc::indoc;
use rstest::rstest;
use stackable_operator::time::Duration;
use std::collections::{BTreeMap, HashMap};
use super::*;
use crate::{
cores_from_quantity, resources_to_driver_props, resources_to_executor_props, Quantity,
RoleConfig, SparkApplication, SparkStorageConfig,
};

#[test]
fn test_default_resource_limits() {
Expand Down
23 changes: 12 additions & 11 deletions rust/crd/src/logdir.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,6 @@
use crate::{
constants::*,
history::{
LogFileDirectorySpec::{self, S3},
S3LogFileDirectorySpec,
},
tlscerts,
};
use std::collections::BTreeMap;

use snafu::{ResultExt, Snafu};
use stackable_operator::{
builder::pod::volume::{
SecretFormat, SecretOperatorVolumeSourceBuilder, SecretOperatorVolumeSourceBuilderError,
Expand All @@ -18,11 +13,17 @@ use stackable_operator::{
k8s_openapi::api::core::v1::{Volume, VolumeMount},
time::Duration,
};
use std::collections::BTreeMap;

use snafu::{ResultExt, Snafu};
use strum::{EnumDiscriminants, IntoStaticStr};

use crate::{
constants::*,
history::{
LogFileDirectorySpec::{self, S3},
S3LogFileDirectorySpec,
},
tlscerts,
};

#[derive(Snafu, Debug, EnumDiscriminants)]
#[strum_discriminants(derive(IntoStaticStr))]
#[allow(clippy::enum_variant_names)]
Expand Down
5 changes: 2 additions & 3 deletions rust/crd/src/roles.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,9 @@
//! each role is named "default". These roles are transparent to the user.
//!
//! The history server has its own role completely unrelated to this module.
use crate::ResolvedLogDir;
use std::{collections::BTreeMap, slice};

use serde::{Deserialize, Serialize};

use crate::SparkApplication;
use stackable_operator::{
commons::{
affinity::StackableAffinity,
Expand All @@ -41,6 +38,8 @@ use stackable_operator::{
};
use strum::{Display, EnumIter};

use crate::{ResolvedLogDir, SparkApplication};

#[derive(Clone, Debug, Deserialize, Display, Eq, PartialEq, Serialize, JsonSchema)]
#[strum(serialize_all = "kebab-case")]
pub enum SparkApplicationRole {
Expand Down
44 changes: 25 additions & 19 deletions rust/operator-binary/src/history/history_controller.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,19 @@
use crate::history::operations::pdb::add_pdbs;
use crate::product_logging::{self, resolve_vector_aggregator_address};
use crate::Ctx;
use std::{
collections::{BTreeMap, HashMap},
sync::Arc,
};

use product_config::{types::PropertyNameKind, writer::to_java_properties_string};
use stackable_operator::kube::core::{error_boundary, DeserializeGuard};
use snafu::{OptionExt, ResultExt, Snafu};
use stackable_operator::{
builder::{
self,
configmap::ConfigMapBuilder,
meta::ObjectMetaBuilder,
pod::{container::ContainerBuilder, volume::VolumeBuilder, PodBuilder},
pod::{
container::ContainerBuilder, resources::ResourceRequirementsBuilder,
volume::VolumeBuilder, PodBuilder,
},
},
cluster_resources::{ClusterResourceApplyStrategy, ClusterResources},
commons::product_image_selection::ResolvedProductImage,
Expand All @@ -21,12 +26,15 @@ use stackable_operator::{
rbac::v1::{ClusterRole, RoleBinding, RoleRef, Subject},
},
apimachinery::pkg::apis::meta::v1::LabelSelector,
DeepMerge,
},
kube::{
core::{error_boundary, DeserializeGuard},
runtime::{controller::Action, reflector::ObjectRef},
Resource, ResourceExt,
},
kvp::{Label, Labels, ObjectLabels},
logging::controller::ReconcilerError,
product_logging::{
framework::{calculate_log_volume_size_limit, vector_container, LoggingError},
spec::{
Expand All @@ -37,30 +45,28 @@ use stackable_operator::{
role_utils::RoleGroupRef,
time::Duration,
};
use stackable_spark_k8s_crd::constants::{METRICS_PORT, SPARK_ENV_SH_FILE_NAME};
use stackable_spark_k8s_crd::logdir::ResolvedLogDir;
use stackable_spark_k8s_crd::{
constants::{
ACCESS_KEY_ID, APP_NAME, HISTORY_CONTROLLER_NAME, HISTORY_ROLE_NAME,
JVM_SECURITY_PROPERTIES_FILE, MAX_SPARK_LOG_FILES_SIZE, OPERATOR_NAME, SECRET_ACCESS_KEY,
SPARK_CLUSTER_ROLE, SPARK_DEFAULTS_FILE_NAME, SPARK_IMAGE_BASE_NAME, SPARK_UID,
STACKABLE_TRUST_STORE, VOLUME_MOUNT_NAME_CONFIG, VOLUME_MOUNT_NAME_LOG,
VOLUME_MOUNT_NAME_LOG_CONFIG, VOLUME_MOUNT_PATH_CONFIG, VOLUME_MOUNT_PATH_LOG,
VOLUME_MOUNT_PATH_LOG_CONFIG,
JVM_SECURITY_PROPERTIES_FILE, MAX_SPARK_LOG_FILES_SIZE, METRICS_PORT, OPERATOR_NAME,
SECRET_ACCESS_KEY, SPARK_CLUSTER_ROLE, SPARK_DEFAULTS_FILE_NAME, SPARK_ENV_SH_FILE_NAME,
SPARK_IMAGE_BASE_NAME, SPARK_UID, STACKABLE_TRUST_STORE, VOLUME_MOUNT_NAME_CONFIG,
VOLUME_MOUNT_NAME_LOG, VOLUME_MOUNT_NAME_LOG_CONFIG, VOLUME_MOUNT_PATH_CONFIG,
VOLUME_MOUNT_PATH_LOG, VOLUME_MOUNT_PATH_LOG_CONFIG,
},
history,
history::{HistoryConfig, SparkHistoryServer, SparkHistoryServerContainer},
logdir::ResolvedLogDir,
tlscerts, to_spark_env_sh_string,
};
use std::collections::HashMap;
use std::{collections::BTreeMap, sync::Arc};

use snafu::{OptionExt, ResultExt, Snafu};
use stackable_operator::builder::pod::resources::ResourceRequirementsBuilder;
use stackable_operator::k8s_openapi::DeepMerge;
use stackable_operator::logging::controller::ReconcilerError;
use strum::{EnumDiscriminants, IntoStaticStr};

use crate::{
history::operations::pdb::add_pdbs,
product_logging::{self, resolve_vector_aggregator_address},
Ctx,
};

#[derive(Snafu, Debug, EnumDiscriminants)]
#[strum_discriminants(derive(IntoStaticStr))]
#[allow(clippy::enum_variant_names)]
Expand Down
2 changes: 0 additions & 2 deletions rust/operator-binary/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ use clap::{crate_description, crate_version, Parser};
use futures::{pin_mut, StreamExt};
use history::history_controller;
use product_config::ProductConfigManager;

use stackable_operator::{
cli::{Command, ProductOperatorRun},
k8s_openapi::api::{
Expand All @@ -26,7 +25,6 @@ use stackable_operator::{
logging::controller::report_controller_reconciled,
CustomResourceExt,
};

use stackable_spark_k8s_crd::{
constants::{
HISTORY_FULL_CONTROLLER_NAME, OPERATOR_NAME, POD_DRIVER_FULL_CONTROLLER_NAME,
Expand Down
14 changes: 8 additions & 6 deletions rust/operator-binary/src/pod_driver_controller.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,19 @@
use std::sync::Arc;

use snafu::{OptionExt, ResultExt, Snafu};
use stackable_operator::{
client::Client,
k8s_openapi::api::core::v1::Pod,
kube::core::{error_boundary, DeserializeGuard},
kube::runtime::controller::Action,
kube::{
core::{error_boundary, DeserializeGuard},
runtime::controller::Action,
},
logging::controller::ReconcilerError,
time::Duration,
};
use stackable_spark_k8s_crd::{
constants::POD_DRIVER_CONTROLLER_NAME, SparkApplication, SparkApplicationStatus,
};
use std::sync::Arc;

use snafu::{OptionExt, ResultExt, Snafu};
use stackable_operator::logging::controller::ReconcilerError;
use strum::{EnumDiscriminants, IntoStaticStr};

const LABEL_NAME_INSTANCE: &str = "app.kubernetes.io/instance";
Expand Down
Loading