diff --git a/.github/workflows/pr_pre-commit.yaml b/.github/workflows/pr_pre-commit.yaml index ec55fe78..bdc3ddfd 100644 --- a/.github/workflows/pr_pre-commit.yaml +++ b/.github/workflows/pr_pre-commit.yaml @@ -6,7 +6,7 @@ on: env: CARGO_TERM_COLOR: always - RUST_TOOLCHAIN_VERSION: "1.82.0" + RUST_TOOLCHAIN_VERSION: "nightly-2025-01-15" HADOLINT_VERSION: "v2.12.0" PYTHON_VERSION: "3.12" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a36fdcb7..c5bd8ad1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,10 +17,6 @@ repos: - repo: https://github.com/doublify/pre-commit-rust rev: eeee35a89e69d5772bdee97db1a6a898467b686e # 1.0 hooks: - - id: fmt - # Pinning to a specific rustc version, so that we get consistent formatting - entry: RUSTUP_TOOLCHAIN=nightly-2025-01-15 cargo fmt - args: ["--all", "--", "--check"] - id: clippy args: ["--all-targets", "--", "-D", "warnings"] @@ -78,3 +74,10 @@ repos: entry: cargo test stages: [pre-commit, pre-merge-commit, manual] pass_filenames: false + + - id: cargo-rustfmt + name: cargo-rustfmt + language: system + entry: cargo +nightly-2025-01-15 fmt --all -- --check + stages: [pre-commit] + pass_filenames: false diff --git a/rust/crd/src/affinity.rs b/rust/crd/src/affinity.rs index e3ae617e..ac476c04 100644 --- a/rust/crd/src/affinity.rs +++ b/rust/crd/src/affinity.rs @@ -26,8 +26,6 @@ pub fn history_affinity(cluster_name: &str) -> StackableAffinityFragment { mod test { use std::collections::BTreeMap; - use crate::{constants::HISTORY_ROLE_NAME, history::SparkHistoryServer}; - use stackable_operator::{ commons::affinity::StackableAffinity, k8s_openapi::{ @@ -38,6 +36,8 @@ mod test { role_utils::RoleGroupRef, }; + use crate::{constants::HISTORY_ROLE_NAME, history::SparkHistoryServer}; + #[test] pub fn test_history_affinity_defaults() { let input = r#" diff --git a/rust/crd/src/constants.rs b/rust/crd/src/constants.rs index 615378d0..c37ed8e2 100644 --- a/rust/crd/src/constants.rs +++ b/rust/crd/src/constants.rs @@ -1,5 +1,4 @@ use const_format::concatcp; - use stackable_operator::memory::{BinaryMultiple, MemoryQuantity}; pub const APP_NAME: &str = "spark-k8s"; diff --git a/rust/crd/src/history.rs b/rust/crd/src/history.rs index 18e855de..af837c13 100644 --- a/rust/crd/src/history.rs +++ b/rust/crd/src/history.rs @@ -1,5 +1,4 @@ -use crate::logdir::ResolvedLogDir; -use crate::{affinity::history_affinity, constants::*}; +use std::collections::{BTreeMap, HashMap}; use product_config::{types::PropertyNameKind, ProductConfigManager}; use serde::{Deserialize, Serialize}; @@ -29,9 +28,10 @@ use stackable_operator::{ schemars::{self, JsonSchema}, time::Duration, }; -use std::collections::{BTreeMap, HashMap}; use strum::{Display, EnumIter}; +use crate::{affinity::history_affinity, constants::*, logdir::ResolvedLogDir}; + #[derive(Snafu, Debug)] pub enum Error { #[snafu(display("failed to transform configs"))] @@ -477,15 +477,15 @@ impl Configuration for HistoryConfigFragment { #[cfg(test)] mod test { - use crate::logdir::S3LogDir; - - use super::*; use indoc::indoc; use stackable_operator::commons::{ s3::{ResolvedS3Bucket, ResolvedS3Connection}, tls_verification::TlsClientDetails, }; + use super::*; + use crate::logdir::S3LogDir; + #[test] pub fn test_env_overrides() { let input = indoc! {r#" diff --git a/rust/crd/src/lib.rs b/rust/crd/src/lib.rs index 7e518739..ee669e21 100644 --- a/rust/crd/src/lib.rs +++ b/rust/crd/src/lib.rs @@ -7,15 +7,17 @@ pub mod logdir; pub mod roles; pub mod tlscerts; -pub use crate::roles::*; +use std::{ + cmp::max, + collections::{BTreeMap, HashMap}, +}; + use constants::*; use history::LogFileDirectorySpec; use logdir::ResolvedLogDir; use product_config::{types::PropertyNameKind, ProductConfigManager}; use serde::{Deserialize, Serialize}; use snafu::{OptionExt, ResultExt, Snafu}; -use stackable_operator::role_utils::{GenericProductSpecificCommonConfig, GenericRoleConfig}; -use stackable_operator::time::Duration; use stackable_operator::{ builder::pod::volume::{ SecretFormat, SecretOperatorVolumeSourceBuilder, SecretOperatorVolumeSourceBuilderError, @@ -42,14 +44,15 @@ use stackable_operator::{ ValidatedRoleConfigByPropertyKind, }, product_logging, - role_utils::{CommonConfiguration, Role, RoleGroup}, + role_utils::{ + CommonConfiguration, GenericProductSpecificCommonConfig, GenericRoleConfig, Role, RoleGroup, + }, schemars::{self, JsonSchema}, + time::Duration, utils::crds::raw_object_list_schema, }; -use std::{ - cmp::max, - collections::{BTreeMap, HashMap}, -}; + +pub use crate::roles::*; #[derive(Snafu, Debug)] pub enum Error { @@ -1082,23 +1085,26 @@ where #[cfg(test)] mod tests { - use super::*; + use std::collections::{BTreeMap, HashMap}; - use crate::{cores_from_quantity, resources_to_executor_props, RoleConfig}; - use crate::{resources_to_driver_props, SparkApplication}; - use crate::{Quantity, SparkStorageConfig}; + use indoc::indoc; use product_config::{types::PropertyNameKind, ProductConfigManager}; - use stackable_operator::commons::affinity::StackableAffinity; - use stackable_operator::commons::resources::{ - CpuLimits, MemoryLimits, NoRuntimeLimits, Resources, + use rstest::rstest; + use stackable_operator::{ + commons::{ + affinity::StackableAffinity, + resources::{CpuLimits, MemoryLimits, NoRuntimeLimits, Resources}, + }, + product_config_utils::ValidatedRoleConfigByPropertyKind, + product_logging::spec::Logging, + time::Duration, }; - use stackable_operator::product_config_utils::ValidatedRoleConfigByPropertyKind; - use stackable_operator::product_logging::spec::Logging; - use indoc::indoc; - use rstest::rstest; - use stackable_operator::time::Duration; - use std::collections::{BTreeMap, HashMap}; + use super::*; + use crate::{ + cores_from_quantity, resources_to_driver_props, resources_to_executor_props, Quantity, + RoleConfig, SparkApplication, SparkStorageConfig, + }; #[test] fn test_default_resource_limits() { diff --git a/rust/crd/src/logdir.rs b/rust/crd/src/logdir.rs index 502466ec..63c00c7e 100644 --- a/rust/crd/src/logdir.rs +++ b/rust/crd/src/logdir.rs @@ -1,11 +1,6 @@ -use crate::{ - constants::*, - history::{ - LogFileDirectorySpec::{self, S3}, - S3LogFileDirectorySpec, - }, - tlscerts, -}; +use std::collections::BTreeMap; + +use snafu::{ResultExt, Snafu}; use stackable_operator::{ builder::pod::volume::{ SecretFormat, SecretOperatorVolumeSourceBuilder, SecretOperatorVolumeSourceBuilderError, @@ -18,11 +13,17 @@ use stackable_operator::{ k8s_openapi::api::core::v1::{Volume, VolumeMount}, time::Duration, }; -use std::collections::BTreeMap; - -use snafu::{ResultExt, Snafu}; use strum::{EnumDiscriminants, IntoStaticStr}; +use crate::{ + constants::*, + history::{ + LogFileDirectorySpec::{self, S3}, + S3LogFileDirectorySpec, + }, + tlscerts, +}; + #[derive(Snafu, Debug, EnumDiscriminants)] #[strum_discriminants(derive(IntoStaticStr))] #[allow(clippy::enum_variant_names)] diff --git a/rust/crd/src/roles.rs b/rust/crd/src/roles.rs index 72bab050..94da26f2 100644 --- a/rust/crd/src/roles.rs +++ b/rust/crd/src/roles.rs @@ -13,12 +13,9 @@ //! each role is named "default". These roles are transparent to the user. //! //! The history server has its own role completely unrelated to this module. -use crate::ResolvedLogDir; use std::{collections::BTreeMap, slice}; use serde::{Deserialize, Serialize}; - -use crate::SparkApplication; use stackable_operator::{ commons::{ affinity::StackableAffinity, @@ -41,6 +38,8 @@ use stackable_operator::{ }; use strum::{Display, EnumIter}; +use crate::{ResolvedLogDir, SparkApplication}; + #[derive(Clone, Debug, Deserialize, Display, Eq, PartialEq, Serialize, JsonSchema)] #[strum(serialize_all = "kebab-case")] pub enum SparkApplicationRole { diff --git a/rust/operator-binary/src/history/history_controller.rs b/rust/operator-binary/src/history/history_controller.rs index 90f79be0..aeec5742 100644 --- a/rust/operator-binary/src/history/history_controller.rs +++ b/rust/operator-binary/src/history/history_controller.rs @@ -1,14 +1,19 @@ -use crate::history::operations::pdb::add_pdbs; -use crate::product_logging::{self, resolve_vector_aggregator_address}; -use crate::Ctx; +use std::{ + collections::{BTreeMap, HashMap}, + sync::Arc, +}; + use product_config::{types::PropertyNameKind, writer::to_java_properties_string}; -use stackable_operator::kube::core::{error_boundary, DeserializeGuard}; +use snafu::{OptionExt, ResultExt, Snafu}; use stackable_operator::{ builder::{ self, configmap::ConfigMapBuilder, meta::ObjectMetaBuilder, - pod::{container::ContainerBuilder, volume::VolumeBuilder, PodBuilder}, + pod::{ + container::ContainerBuilder, resources::ResourceRequirementsBuilder, + volume::VolumeBuilder, PodBuilder, + }, }, cluster_resources::{ClusterResourceApplyStrategy, ClusterResources}, commons::product_image_selection::ResolvedProductImage, @@ -21,12 +26,15 @@ use stackable_operator::{ rbac::v1::{ClusterRole, RoleBinding, RoleRef, Subject}, }, apimachinery::pkg::apis::meta::v1::LabelSelector, + DeepMerge, }, kube::{ + core::{error_boundary, DeserializeGuard}, runtime::{controller::Action, reflector::ObjectRef}, Resource, ResourceExt, }, kvp::{Label, Labels, ObjectLabels}, + logging::controller::ReconcilerError, product_logging::{ framework::{calculate_log_volume_size_limit, vector_container, LoggingError}, spec::{ @@ -37,30 +45,28 @@ use stackable_operator::{ role_utils::RoleGroupRef, time::Duration, }; -use stackable_spark_k8s_crd::constants::{METRICS_PORT, SPARK_ENV_SH_FILE_NAME}; -use stackable_spark_k8s_crd::logdir::ResolvedLogDir; use stackable_spark_k8s_crd::{ constants::{ ACCESS_KEY_ID, APP_NAME, HISTORY_CONTROLLER_NAME, HISTORY_ROLE_NAME, - JVM_SECURITY_PROPERTIES_FILE, MAX_SPARK_LOG_FILES_SIZE, OPERATOR_NAME, SECRET_ACCESS_KEY, - SPARK_CLUSTER_ROLE, SPARK_DEFAULTS_FILE_NAME, SPARK_IMAGE_BASE_NAME, SPARK_UID, - STACKABLE_TRUST_STORE, VOLUME_MOUNT_NAME_CONFIG, VOLUME_MOUNT_NAME_LOG, - VOLUME_MOUNT_NAME_LOG_CONFIG, VOLUME_MOUNT_PATH_CONFIG, VOLUME_MOUNT_PATH_LOG, - VOLUME_MOUNT_PATH_LOG_CONFIG, + JVM_SECURITY_PROPERTIES_FILE, MAX_SPARK_LOG_FILES_SIZE, METRICS_PORT, OPERATOR_NAME, + SECRET_ACCESS_KEY, SPARK_CLUSTER_ROLE, SPARK_DEFAULTS_FILE_NAME, SPARK_ENV_SH_FILE_NAME, + SPARK_IMAGE_BASE_NAME, SPARK_UID, STACKABLE_TRUST_STORE, VOLUME_MOUNT_NAME_CONFIG, + VOLUME_MOUNT_NAME_LOG, VOLUME_MOUNT_NAME_LOG_CONFIG, VOLUME_MOUNT_PATH_CONFIG, + VOLUME_MOUNT_PATH_LOG, VOLUME_MOUNT_PATH_LOG_CONFIG, }, history, history::{HistoryConfig, SparkHistoryServer, SparkHistoryServerContainer}, + logdir::ResolvedLogDir, tlscerts, to_spark_env_sh_string, }; -use std::collections::HashMap; -use std::{collections::BTreeMap, sync::Arc}; - -use snafu::{OptionExt, ResultExt, Snafu}; -use stackable_operator::builder::pod::resources::ResourceRequirementsBuilder; -use stackable_operator::k8s_openapi::DeepMerge; -use stackable_operator::logging::controller::ReconcilerError; use strum::{EnumDiscriminants, IntoStaticStr}; +use crate::{ + history::operations::pdb::add_pdbs, + product_logging::{self, resolve_vector_aggregator_address}, + Ctx, +}; + #[derive(Snafu, Debug, EnumDiscriminants)] #[strum_discriminants(derive(IntoStaticStr))] #[allow(clippy::enum_variant_names)] diff --git a/rust/operator-binary/src/main.rs b/rust/operator-binary/src/main.rs index 99091fcc..109b3379 100644 --- a/rust/operator-binary/src/main.rs +++ b/rust/operator-binary/src/main.rs @@ -9,7 +9,6 @@ use clap::{crate_description, crate_version, Parser}; use futures::{pin_mut, StreamExt}; use history::history_controller; use product_config::ProductConfigManager; - use stackable_operator::{ cli::{Command, ProductOperatorRun}, k8s_openapi::api::{ @@ -26,7 +25,6 @@ use stackable_operator::{ logging::controller::report_controller_reconciled, CustomResourceExt, }; - use stackable_spark_k8s_crd::{ constants::{ HISTORY_FULL_CONTROLLER_NAME, OPERATOR_NAME, POD_DRIVER_FULL_CONTROLLER_NAME, diff --git a/rust/operator-binary/src/pod_driver_controller.rs b/rust/operator-binary/src/pod_driver_controller.rs index f75d5225..a2c93d1c 100644 --- a/rust/operator-binary/src/pod_driver_controller.rs +++ b/rust/operator-binary/src/pod_driver_controller.rs @@ -1,17 +1,19 @@ +use std::sync::Arc; + +use snafu::{OptionExt, ResultExt, Snafu}; use stackable_operator::{ client::Client, k8s_openapi::api::core::v1::Pod, - kube::core::{error_boundary, DeserializeGuard}, - kube::runtime::controller::Action, + kube::{ + core::{error_boundary, DeserializeGuard}, + runtime::controller::Action, + }, + logging::controller::ReconcilerError, time::Duration, }; use stackable_spark_k8s_crd::{ constants::POD_DRIVER_CONTROLLER_NAME, SparkApplication, SparkApplicationStatus, }; -use std::sync::Arc; - -use snafu::{OptionExt, ResultExt, Snafu}; -use stackable_operator::logging::controller::ReconcilerError; use strum::{EnumDiscriminants, IntoStaticStr}; const LABEL_NAME_INSTANCE: &str = "app.kubernetes.io/instance"; diff --git a/rust/operator-binary/src/spark_k8s_controller.rs b/rust/operator-binary/src/spark_k8s_controller.rs index d1748c75..dc4dbf3e 100644 --- a/rust/operator-binary/src/spark_k8s_controller.rs +++ b/rust/operator-binary/src/spark_k8s_controller.rs @@ -1,36 +1,26 @@ -use crate::Ctx; - use std::{ collections::{BTreeMap, HashMap}, sync::Arc, vec, }; -use product_config::writer::to_java_properties_string; +use product_config::{types::PropertyNameKind, writer::to_java_properties_string}; +use snafu::{OptionExt, ResultExt, Snafu}; use stackable_operator::{ builder, + builder::{ + configmap::ConfigMapBuilder, + meta::ObjectMetaBuilder, + pod::{ + container::ContainerBuilder, resources::ResourceRequirementsBuilder, + volume::VolumeBuilder, PodBuilder, + }, + }, commons::{ - s3::S3Error, + product_image_selection::ResolvedProductImage, + s3::{S3ConnectionSpec, S3Error}, tls_verification::{CaCert, TlsVerification}, }, - product_logging::framework::LoggingError, - time::Duration, -}; -use stackable_spark_k8s_crd::{ - constants::*, logdir::ResolvedLogDir, tlscerts, to_spark_env_sh_string, RoleConfig, - SparkApplication, SparkApplicationRole, SparkApplicationStatus, SparkContainer, SubmitConfig, -}; - -use crate::product_logging::{self, resolve_vector_aggregator_address}; -use product_config::types::PropertyNameKind; -use snafu::{OptionExt, ResultExt, Snafu}; -use stackable_operator::k8s_openapi::DeepMerge; -use stackable_operator::{ - builder::{ - configmap::ConfigMapBuilder, meta::ObjectMetaBuilder, pod::container::ContainerBuilder, - pod::resources::ResourceRequirementsBuilder, pod::volume::VolumeBuilder, pod::PodBuilder, - }, - commons::{product_image_selection::ResolvedProductImage, s3::S3ConnectionSpec}, k8s_openapi::{ api::{ batch::v1::{Job, JobSpec}, @@ -40,7 +30,7 @@ use stackable_operator::{ }, rbac::v1::{ClusterRole, RoleBinding, RoleRef, Subject}, }, - Resource, + DeepMerge, Resource, }, kube::{ core::{error_boundary, DeserializeGuard}, @@ -50,17 +40,29 @@ use stackable_operator::{ logging::controller::ReconcilerError, product_config_utils::ValidatedRoleConfigByPropertyKind, product_logging::{ - framework::{capture_shell_output, create_vector_shutdown_file_command, vector_container}, + framework::{ + capture_shell_output, create_vector_shutdown_file_command, vector_container, + LoggingError, + }, spec::{ ConfigMapLogConfig, ContainerLogConfig, ContainerLogConfigChoice, CustomContainerLogConfig, Logging, }, }, role_utils::RoleGroupRef, + time::Duration, +}; +use stackable_spark_k8s_crd::{ + constants::*, logdir::ResolvedLogDir, tlscerts, to_spark_env_sh_string, RoleConfig, + SparkApplication, SparkApplicationRole, SparkApplicationStatus, SparkContainer, SubmitConfig, }; - use strum::{EnumDiscriminants, IntoStaticStr}; +use crate::{ + product_logging::{self, resolve_vector_aggregator_address}, + Ctx, +}; + #[derive(Snafu, Debug, EnumDiscriminants)] #[strum_discriminants(derive(IntoStaticStr))] #[allow(clippy::enum_variant_names)]