Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 4 additions & 12 deletions crates/apollo_batcher/src/batcher_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -323,30 +323,22 @@ fn assert_proposal_metrics(
assert_eq!(
started,
Some(expected_started),
"unexpected value proposal_started, expected {} got {:?}",
expected_started,
started,
"unexpected value proposal_started, expected {expected_started} got {started:?}",
);
assert_eq!(
succeeded,
Some(expected_succeeded),
"unexpected value proposal_succeeded, expected {} got {:?}",
expected_succeeded,
succeeded,
"unexpected value proposal_succeeded, expected {expected_succeeded} got {succeeded:?}",
);
assert_eq!(
failed,
Some(expected_failed),
"unexpected value proposal_failed, expected {} got {:?}",
expected_failed,
failed,
"unexpected value proposal_failed, expected {expected_failed} got {failed:?}",
);
assert_eq!(
aborted,
Some(expected_aborted),
"unexpected value proposal_aborted, expected {} got {:?}",
expected_aborted,
aborted,
"unexpected value proposal_aborted, expected {expected_aborted} got {aborted:?}",
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ async fn download_class_if_necessary<TStarknetClient: StarknetReader>(

// Check declared classes.
if let Ok(Some(class)) = state_reader.get_class_definition_at(state_number, &class_hash) {
trace!("Class {:?} retrieved from storage.", class_hash);
trace!("Class {class_hash:?} retrieved from storage.");
{
let mut cache = cache.lock().expect("Failed to lock class cache.");
cache.put(class_hash, ApiContractClass::ContractClass(class.clone()));
Expand All @@ -370,7 +370,7 @@ async fn download_class_if_necessary<TStarknetClient: StarknetReader>(
if let Ok(Some(class)) =
state_reader.get_deprecated_class_definition_at(state_number, &class_hash)
{
trace!("Deprecated class {:?} retrieved from storage.", class_hash);
trace!("Deprecated class {class_hash:?} retrieved from storage.");
{
let mut cache = cache.lock().expect("Failed to lock class cache.");
cache.put(class_hash, ApiContractClass::DeprecatedContractClass(class.clone()));
Expand All @@ -379,7 +379,7 @@ async fn download_class_if_necessary<TStarknetClient: StarknetReader>(
}

// Class not found in storage - download.
trace!("Downloading class {:?}.", class_hash);
trace!("Downloading class {class_hash:?}.");
let client_class = apollo_starknet_client.class_by_hash(class_hash).await.map_err(Arc::new)?;
match client_class {
None => Ok(None),
Expand Down
2 changes: 1 addition & 1 deletion crates/apollo_central_sync/src/sources/central_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ async fn stream_block_headers_some_are_missing() {
(false, true, "signature missing"),
];
for (block_missing, signature_missing, test_case_description) in test_cases {
println!("Test case: {}", test_case_description);
println!("Test case: {test_case_description}");
const START_BLOCK_NUMBER: u64 = 5;
const END_BLOCK_NUMBER: u64 = 13;
const MISSING_BLOCK_NUMBER: u64 = 9;
Expand Down
7 changes: 3 additions & 4 deletions crates/apollo_class_manager/src/metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,13 +56,12 @@ pub(crate) fn increment_n_classes(cls_type: CairoClassType) {

pub(crate) fn record_class_size<T>(class_type: ClassObjectType, class: &SerializedClass<T>) {
let class_size = class.size().unwrap_or_else(|_| {
panic!("Illegally formatted {} class, should not have gotten into the system.", class_type)
panic!("Illegally formatted {class_type} class, should not have gotten into the system.")
});
let class_size = u32::try_from(class_size).unwrap_or_else(|_| {
panic!(
"{} class size {} is bigger than what is allowed,
should not have gotten into the system.",
class_type, class_size
"{class_type} class size {class_size} is bigger than what is allowed,
should not have gotten into the system."
)
});

Expand Down
4 changes: 2 additions & 2 deletions crates/apollo_compilation_utils/src/build_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ pub fn install_compiler_binary(
.unwrap_or_else(|_| panic!("Failed to install {binary_name}"));

if !install_command_status.success() {
panic!("Failed to install {}", binary_name);
panic!("Failed to install {binary_name}");
}

// Move the '{binary_name}' executable to a shared location.
Expand All @@ -59,7 +59,7 @@ pub fn install_compiler_binary(
.expect("Failed to perform mv command.");

if !move_command_status.success() {
panic!("Failed to move the {} binary to the shared folder.", binary_name);
panic!("Failed to move the {binary_name} binary to the shared folder.");
}

std::fs::remove_dir_all(temp_cargo_path).expect("Failed to remove the cargo directory.");
Expand Down
2 changes: 1 addition & 1 deletion crates/apollo_compilation_utils/src/compiler_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ pub fn compile_with_args(
"Process exited with non-zero status but no signal (likely a handled error, e.g., \
memory allocation failure)."
}
Some(sig) => &format!("Process terminated by unexpected signal: {}", sig),
Some(sig) => &format!("Process terminated by unexpected signal: {sig}"),
};

let stderr_output = String::from_utf8(compile_output.stderr)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ fn test_cpu_time_limit() {
let status = command.spawn().expect("Failed to start CPU consuming process").wait().unwrap();
assert!(start.elapsed().as_secs() <= cpu_limit);
let signal = status.signal();
assert_eq!(signal, Some(9), "Process should terminate with SIGKILL (9) got {:?}", signal);
assert_eq!(signal, Some(9), "Process should terminate with SIGKILL (9) got {signal:?}");
}

#[rstest]
Expand All @@ -34,7 +34,7 @@ fn test_memory_size_limit() {
let output = command.output().expect("Failed to start memory consuming process");

let signal = output.status.signal();
assert!(signal.is_none(), "Exceeding memory usage should not cause a signal, got {:?}", signal);
assert!(signal.is_none(), "Exceeding memory usage should not cause a signal, got {signal:?}");

let stderr = String::from_utf8_lossy(&output.stderr);

Expand Down Expand Up @@ -66,7 +66,7 @@ fn test_file_size_limit() {
let status = command.spawn().expect("Failed to start disk consuming process").wait().unwrap();
assert_eq!(std::fs::metadata(temp_file_path).unwrap().len(), file_limit);
let signal = status.signal();
assert!(signal == Some(25), "Process should terminate with SIGXFSZ (25), got {:?}", signal);
assert!(signal == Some(25), "Process should terminate with SIGXFSZ (25), got {signal:?}");
}

#[rstest]
Expand Down
2 changes: 1 addition & 1 deletion crates/apollo_compile_to_casm/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ fn install_starknet_sierra_compile() {
// available only after the build is completed. Most importantly, it is available during runtime.
fn set_run_time_out_dir_env_var() {
let out_dir = std::env::var("OUT_DIR").expect("OUT_DIR is not set");
println!("cargo:rustc-env=RUNTIME_ACCESSIBLE_OUT_DIR={}", out_dir);
println!("cargo:rustc-env=RUNTIME_ACCESSIBLE_OUT_DIR={out_dir}");
}

// Returns the OUT_DIR. This function is only operable at build time.
Expand Down
2 changes: 1 addition & 1 deletion crates/apollo_compile_to_native/build_with_cairo_native.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ fn install_starknet_native_compile() {
// available only after the build is completed. Most importantly, it is available during runtime.
fn set_run_time_out_dir_env_var() {
let out_dir = std::env::var("OUT_DIR").expect("OUT_DIR is not set");
println!("cargo:rustc-env=RUNTIME_ACCESSIBLE_OUT_DIR={}", out_dir);
println!("cargo:rustc-env=RUNTIME_ACCESSIBLE_OUT_DIR={out_dir}");
}

// Returns the OUT_DIR. This function is only operable at build time.
Expand Down
5 changes: 2 additions & 3 deletions crates/apollo_config/src/converters.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ pub fn serialize_optional_vec_u8(optional_vector: &Option<Vec<u8>>) -> String {
Some(vector) => {
format!(
"0x{}",
vector.iter().map(|num| format!("{:02x}", num)).collect::<Vec<String>>().join("")
vector.iter().map(|num| format!("{num:02x}")).collect::<Vec<String>>().join("")
)
}
}
Expand Down Expand Up @@ -125,8 +125,7 @@ where
let byte_str = &hex_str[i..i + 2];
let byte = u8::from_str_radix(byte_str, 16).map_err(|e| {
D::Error::custom(format!(
"Couldn't deserialize vector. Failed to parse byte: {} {}",
byte_str, e
"Couldn't deserialize vector. Failed to parse byte: {byte_str} {e}"
))
})?;
vector.push(byte);
Expand Down
12 changes: 5 additions & 7 deletions crates/apollo_config/src/dumping.rs
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ pub fn ser_generated_param(
common_ser_param(
name,
SerializedContent::ParamType(serialization_type),
format!("{} If no value is provided, the system will generate one.", description).as_str(),
format!("{description} If no value is provided, the system will generate one.").as_str(),
privacy.into(),
)
}
Expand Down Expand Up @@ -401,8 +401,7 @@ pub fn set_pointing_param_paths(param_path_list: &[&str]) -> Pointers {
for &param_path in param_path_list {
assert!(
param_paths.insert(param_path.to_string()),
"Duplicate parameter path found: {}",
param_path
"Duplicate parameter path found: {param_path}"
);
}
param_paths
Expand All @@ -412,7 +411,7 @@ pub fn set_pointing_param_paths(param_path_list: &[&str]) -> Pointers {
pub(crate) const REQUIRED_PARAM_DESCRIPTION_PREFIX: &str = "A required param!";

pub(crate) fn required_param_description(description: &str) -> String {
format!("{} {}", REQUIRED_PARAM_DESCRIPTION_PREFIX, description)
format!("{REQUIRED_PARAM_DESCRIPTION_PREFIX} {description}")
}

/// Verifies that params whose name matches a pointer target either point at it, or are whitelisted.
Expand All @@ -434,9 +433,8 @@ fn verify_pointing_params_by_name(
assert!(
serialized_param.content
== SerializedContent::PointerTarget(target_param.to_owned()),
"The target param {} should point to {}, or to be whitelisted.",
param_path,
target_param
"The target param {param_path} should point to {target_param}, or to be \
whitelisted."
);
};
}
Expand Down
2 changes: 1 addition & 1 deletion crates/apollo_config/src/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,6 @@ pub fn assert_default_config_file_is_up_to_date<T: Default + SerializeConfig>(
"Diffs shown below (default config file <<>> dump of {}::default()).",
std::any::type_name::<T>()
);
let error_message = format!("{}\n{}", update_instructions, file_names_on_diff);
let error_message = format!("{update_instructions}\n{file_names_on_diff}");
assert_json_eq(&from_default_config_file, &from_code, error_message);
}
14 changes: 5 additions & 9 deletions crates/apollo_config/src/validators.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ impl std::fmt::Display for ParsedValidationErrors {
));
}
error_string = error_string.replace('\"', "");
write!(f, "{}", error_string)
write!(f, "{error_string}")
}
}

Expand Down Expand Up @@ -106,7 +106,7 @@ fn parse_validation_error(
let new_path = if current_path.is_empty() {
field.to_string()
} else {
format!("{}.{}", current_path, field)
format!("{current_path}.{field}")
};

match error {
Expand All @@ -115,11 +115,7 @@ fn parse_validation_error(
}
ValidationErrorsKind::List(errors) => {
for (index, error) in errors.iter().enumerate() {
parse_validation_error(
error.1,
format!("{}[{}]", new_path, index),
parsed_errors,
);
parse_validation_error(error.1, format!("{new_path}[{index}]"), parsed_errors);
}
}
ValidationErrorsKind::Field(errors) => {
Expand All @@ -131,8 +127,8 @@ fn parse_validation_error(
params: {
let params = &error.params;
params
.iter()
.map(|(_k, v)| v.to_string().replace('\"', ""))
.values()
.map(|v| v.to_string().replace('\"', ""))
.collect::<Vec<String>>()
.join(", ")
}
Expand Down
17 changes: 8 additions & 9 deletions crates/apollo_consensus/src/bin/run_simulation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,7 @@ async fn run_simulation(
should_break = async {
tokio::time::sleep(Duration::from_secs(MONITORING_PERIOD_SECONDS)).await;
let elapsed = start_time.elapsed().as_secs();
println!("\nTime elapsed: {}s", elapsed);
println!("\nTime elapsed: {elapsed}s");

monitor_simulation(&mut nodes, start_time, max_test_duration, stagnation_timeout).await
} => {
Expand All @@ -278,7 +278,7 @@ async fn build_node(data_dir: &str, logs_dir: &str, i: usize, papyrus_args: &Pap
let is_bootstrap = i == 1;
let port = if is_bootstrap { *BOOTNODE_TCP_PORT } else { find_free_port() };
let monitoring_gateway_server_port = find_free_port();
let data_dir = format!("{}/data{}", data_dir, i);
let data_dir = format!("{data_dir}/data{i}");
let validator_id = i + usize::try_from(DEFAULT_VALIDATOR_ID).expect("Conversion failed");

let mut cmd = format!(
Expand All @@ -303,7 +303,7 @@ async fn build_node(data_dir: &str, logs_dir: &str, i: usize, papyrus_args: &Pap
];
for (key, value) in conditional_params {
if let Some(v) = value {
cmd.push_str(&format!("--consensus.{} {} ", key, v));
cmd.push_str(&format!("--consensus.{key} {v} "));
}
}

Expand All @@ -319,15 +319,14 @@ async fn build_node(data_dir: &str, logs_dir: &str, i: usize, papyrus_args: &Pap
];
for (key, value) in conditional_test_params {
if let Some(v) = value {
cmd.push_str(&format!("--test.{} {} ", key, v));
cmd.push_str(&format!("--test.{key} {v} "));
}
}

if is_bootstrap {
cmd.push_str(&format!(
"--network.secret_key {} 2>&1 | sed -r 's/\\x1B\\[[0-9;]*[mK]//g' > \
{}/validator0x{:x}.txt",
SECRET_KEY, logs_dir, validator_id
"--network.secret_key {SECRET_KEY} 2>&1 | sed -r 's/\\x1B\\[[0-9;]*[mK]//g' > \
{logs_dir}/validator0x{validator_id:x}.txt"
));
} else {
cmd.push_str(&format!(
Expand Down Expand Up @@ -385,11 +384,11 @@ fn setup_artifact_dirs(papyrus_args: &PapyrusArgs) -> (String, LockDir) {
.map(|d| d.file_name().into_string().unwrap())
.collect();
let expected_dirs: HashSet<_> =
(0..papyrus_args.num_validators).map(|i| format!("data{}", i)).collect();
(0..papyrus_args.num_validators).map(|i| format!("data{i}")).collect();
assert!(expected_dirs.is_subset(&actual_dirs), "{db_dir} must contain: {expected_dirs:?}");
} else {
for i in 0..papyrus_args.num_validators {
fs::create_dir_all(format!("{}/data{}", db_dir, i)).unwrap();
fs::create_dir_all(format!("{db_dir}/data{i}")).unwrap();
}
}
let db_lock = LockDir::new(db_dir).unwrap();
Expand Down
7 changes: 3 additions & 4 deletions crates/apollo_consensus/src/single_height_consensus.rs
Original file line number Diff line number Diff line change
Expand Up @@ -563,8 +563,7 @@ impl SingleHeightConsensus {
};
if let Some(old) = votes.insert((round, self.id), vote.clone()) {
return Err(ConsensusError::InternalInconsistency(format!(
"State machine should not send repeat votes: old={:?}, new={:?}",
old, vote
"State machine should not send repeat votes: old={old:?}, new={vote:?}"
)));
}
*last_vote = match last_vote {
Expand All @@ -575,8 +574,8 @@ impl SingleHeightConsensus {
// current round. It should monotonicly increase its round. It should only vote once
// per step.
return Err(ConsensusError::InternalInconsistency(format!(
"State machine must progress in time: last_vote: {:?} new_vote: {:?}",
last_vote, vote,
"State machine must progress in time: last_vote: {last_vote:?} new_vote: \
{vote:?}",
)));
}
};
Expand Down
2 changes: 1 addition & 1 deletion crates/apollo_consensus/src/state_machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ impl StateMachine {
{
trace!("Processing event: {:?}", event);
if self.awaiting_get_proposal {
assert!(matches!(event, StateMachineEvent::GetProposal(_, _)), "{:?}", event);
assert!(matches!(event, StateMachineEvent::GetProposal(_, _)), "{event:?}");
}

match event {
Expand Down
2 changes: 1 addition & 1 deletion crates/apollo_consensus/src/stream_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ where
);
} else {
// TODO(guyn): replace panic with more graceful error handling
panic!("Unexpected error: {:?}", e);
panic!("Unexpected error: {e:?}");
}
}
};
Expand Down
6 changes: 3 additions & 3 deletions crates/apollo_consensus_manager/src/consensus_manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -204,10 +204,10 @@ impl ConsensusManager {
}
},
network_result = network_task => {
panic!("Consensus' network task finished unexpectedly: {:?}", network_result);
panic!("Consensus' network task finished unexpectedly: {network_result:?}");
}
stream_handler_result = stream_handler_task => {
panic!("Consensus' stream handler task finished unexpectedly: {:?}", stream_handler_result);
panic!("Consensus' stream handler task finished unexpectedly: {stream_handler_result:?}");
}
}
}
Expand Down Expand Up @@ -264,6 +264,6 @@ impl ComponentStarter for ConsensusManager {
info!("Starting component {}.", short_type_name::<Self>());
self.run()
.await
.unwrap_or_else(|e| panic!("Failed to start ConsensusManager component: {:?}", e))
.unwrap_or_else(|e| panic!("Failed to start ConsensusManager component: {e:?}"))
}
}
2 changes: 1 addition & 1 deletion crates/apollo_dashboard/src/metric_definitions_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,6 @@ fn metric_names_no_duplications() {

let mut unique_metric_names: HashSet<&&'static str> = HashSet::new();
for metric_name in all_metric_names {
assert!(unique_metric_names.insert(metric_name), "Duplicated metric name: {}", metric_name);
assert!(unique_metric_names.insert(metric_name), "Duplicated metric name: {metric_name}");
}
}
Loading
Loading