diff --git a/core/Cargo.toml b/core/Cargo.toml index b0cb7138..8de17ca0 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "pasfmt-core" version = "0.7.0+dev" -edition = "2021" +edition = "2024" [features] _lang_types_from_str = ["dep:strum", "dep:strum_macros"] diff --git a/core/benches/benchmark_lexer.rs b/core/benches/benchmark_lexer.rs index 816ec653..f98fb29e 100644 --- a/core/benches/benchmark_lexer.rs +++ b/core/benches/benchmark_lexer.rs @@ -1,8 +1,8 @@ use std::{ops::Range, time::Duration}; -use criterion::{criterion_group, criterion_main, Criterion}; +use criterion::{Criterion, criterion_group, criterion_main}; use pasfmt_core::prelude::*; -use rand::{seq::SliceRandom, Rng}; +use rand::{Rng, seq::SliceRandom}; fn criterion_benchmark(c: &mut Criterion) { let mut group = c.benchmark_group("lexer"); diff --git a/core/datatests/Cargo.toml b/core/datatests/Cargo.toml index 3fcf0c07..ea6ea703 100644 --- a/core/datatests/Cargo.toml +++ b/core/datatests/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "datatests" version = "0.0.0" -edition = "2021" +edition = "2024" [dev-dependencies] datatest-stable = { workspace = true } diff --git a/core/datatests/tests/suites/logical_line_parser.rs b/core/datatests/tests/suites/logical_line_parser.rs index f42ac132..1db7afc9 100644 --- a/core/datatests/tests/suites/logical_line_parser.rs +++ b/core/datatests/tests/suites/logical_line_parser.rs @@ -469,7 +469,10 @@ impl Display for TestParsingError { write!(f, "{PREFIX}marker must succeed another token '{input}'") } TestParsingError::NoLineTypeDelimiter(input) => { - write!(f, "{PREFIX}no logical line type declaration delimiter '{input}' (expected `number:LogicalLineType`)") + write!( + f, + "{PREFIX}no logical line type declaration delimiter '{input}' (expected `number:LogicalLineType`)" + ) } TestParsingError::InvalidLogicalLineType(input) => { write!(f, "{PREFIX}invalid logical line type '{input}'") diff --git a/core/src/defaults/lexer.rs b/core/src/defaults/lexer.rs index 3ef9ae82..b15ee07b 100644 --- a/core/src/defaults/lexer.rs +++ b/core/src/defaults/lexer.rs @@ -534,10 +534,10 @@ fn get_word_token_type(input: &str) -> RawTokenType { if input.len() <= MAX_WORD_LENGTH { let key = hash_keyword(input) as usize; - if let Some(Some((candidate, keyword))) = KEYWORD_LOOKUP_TABLE.get(key) { - if input.eq_ignore_ascii_case(candidate) { - return *keyword; - } + if let Some(Some((candidate, keyword))) = KEYWORD_LOOKUP_TABLE.get(key) + && input.eq_ignore_ascii_case(candidate) + { + return *keyword; } } @@ -2673,7 +2673,7 @@ mod tests { ("THIN_SPACE\u{2009}", TT::Identifier), ("ZERO_WIDTH_NBSP\u{FEFF}", TT::Identifier), // note, does not contain the U+3000 character - ( "IDEOGRAPHIC_SPACE", TT::Identifier), + ("IDEOGRAPHIC_SPACE", TT::Identifier), ], ) } diff --git a/core/src/defaults/parser.rs b/core/src/defaults/parser.rs index 9329c324..59a52271 100644 --- a/core/src/defaults/parser.rs +++ b/core/src/defaults/parser.rs @@ -610,19 +610,19 @@ impl<'a, 'b> InternalDelphiLogicalLineParser<'a, 'b> { level, }); - if self.context.is_ended.last() == Some(&false) { - if let Some(KK::Else) = self.get_current_keyword_kind() { - let parent = self.get_line_parent_of_current_token(); - self.next_token(); // else - - trace!("Parse `else` statement"); - level = ParserContextLevel::Parent(parent, 1); - self.parse_block(ParserContext { - context_type: ContextType::Statement(StatementKind::Normal), - context_ending_predicate: CEP::Transparent(never_ending), - level, - }); - } + if self.context.is_ended.last() == Some(&false) + && let Some(KK::Else) = self.get_current_keyword_kind() + { + let parent = self.get_line_parent_of_current_token(); + self.next_token(); // else + + trace!("Parse `else` statement"); + level = ParserContextLevel::Parent(parent, 1); + self.parse_block(ParserContext { + context_type: ContextType::Statement(StatementKind::Normal), + context_ending_predicate: CEP::Transparent(never_ending), + level, + }); } trace!("Finished parsing `if` statement"); @@ -861,8 +861,8 @@ impl<'a, 'b> InternalDelphiLogicalLineParser<'a, 'b> { self.context.update_statuses(ending_context); return; } - if self.is_at_start_of_line() { - if let Some(line_type) = match context.context_type { + if self.is_at_start_of_line() + && let Some(line_type) = match context.context_type { ContextType::Statement(StatementKind::Case) => Some(LLT::CaseArm), ContextType::LabelBlock | ContextType::TypeBlock @@ -872,9 +872,9 @@ impl<'a, 'b> InternalDelphiLogicalLineParser<'a, 'b> { Some(LLT::VariantRecordCaseArm) } _ => None, - } { - self.set_logical_line_type(line_type); } + { + self.set_logical_line_type(line_type); } } trace!("parse_statement with {:?}", token_type); @@ -1749,10 +1749,9 @@ impl<'a, 'b> InternalDelphiLogicalLineParser<'a, 'b> { if let Some(TT::IdentifierOrKeyword( directive @ (KK::Deprecated | KK::Experimental | KK::Platform | KK::Library), )) = self.tokens.get(token_index).map(RawToken::get_token_type) + && let Some(token) = self.tokens.get_mut(token_index) { - if let Some(token) = self.tokens.get_mut(token_index) { - token.set_token_type(TT::Keyword(directive)) - } + token.set_token_type(TT::Keyword(directive)) } line_index -= 1; @@ -1847,7 +1846,7 @@ impl<'a, 'b> InternalDelphiLogicalLineParser<'a, 'b> { } fn get_current_logical_line_token_types( &self, - ) -> impl DoubleEndedIterator + '_ { + ) -> impl DoubleEndedIterator + use<'_> { self.get_current_logical_line() .tokens .iter() @@ -1928,10 +1927,9 @@ impl<'a, 'b> InternalDelphiLogicalLineParser<'a, 'b> { .checked_sub(1) .and_then(|prev_pass_index| self.pass_indices.get(prev_pass_index)) .and_then(|&token_index| self.tokens.get_mut(token_index)) + && let TT::IdentifierOrKeyword(keyword_kind) = token.get_token_type() { - if let TT::IdentifierOrKeyword(keyword_kind) = token.get_token_type() { - token.set_token_type(TT::Keyword(keyword_kind)); - } + token.set_token_type(TT::Keyword(keyword_kind)); } } fn get_token_type_for_index(&self, index: usize) -> Option { @@ -2019,17 +2017,17 @@ impl<'a, 'b> InternalDelphiLogicalLineParser<'a, 'b> { .and_then(Self::get_keyword_kind) } fn consolidate_current_ident(&mut self) { - if let Some(token) = self.get_token_mut::<0>() { - if let TT::IdentifierOrKeyword(_) = token.get_token_type() { - token.set_token_type(TT::Identifier); - } + if let Some(token) = self.get_token_mut::<0>() + && let TT::IdentifierOrKeyword(_) = token.get_token_type() + { + token.set_token_type(TT::Identifier); } } fn consolidate_current_keyword(&mut self) { - if let Some(token) = self.get_token_mut::<0>() { - if let TT::IdentifierOrKeyword(keyword_kind) = token.get_token_type() { - token.set_token_type(TT::Keyword(keyword_kind)); - } + if let Some(token) = self.get_token_mut::<0>() + && let TT::IdentifierOrKeyword(keyword_kind) = token.get_token_type() + { + token.set_token_type(TT::Keyword(keyword_kind)); } } fn set_current_token_type(&mut self, token_type: RawTokenType) { @@ -2306,10 +2304,10 @@ const PORTABILITY_DIRECTIVES: [KeywordKind; 4] = [KK::Deprecated, KK::Experimental, KK::Platform, KK::Library]; fn keyword_consolidator(keyword_predicate: impl Fn(KeywordKind) -> bool) -> impl Fn(&mut LLP) { move |parser| { - if let Some(TT::IdentifierOrKeyword(keyword_kind)) = parser.get_current_token_type() { - if keyword_predicate(keyword_kind) { - parser.consolidate_current_keyword() - } + if let Some(TT::IdentifierOrKeyword(keyword_kind)) = parser.get_current_token_type() + && keyword_predicate(keyword_kind) + { + parser.consolidate_current_keyword() } parser.next_token(); } diff --git a/core/src/formatter.rs b/core/src/formatter.rs index 1aecad56..37ce7878 100644 --- a/core/src/formatter.rs +++ b/core/src/formatter.rs @@ -842,10 +842,10 @@ else impl LogicalLineFormatter for AddSpaceBeforeIdentifier { fn format(&self, formatted_tokens: &mut FormattedTokens<'_>, input: &LogicalLine) { for &token in input.get_tokens() { - if formatted_tokens.get_token_type_for_index(token) == Some(TokenType::Identifier) { - if let Some(formatting_data) = formatted_tokens.get_formatting_data_mut(token) { - formatting_data.spaces_before = 1; - } + if formatted_tokens.get_token_type_for_index(token) == Some(TokenType::Identifier) + && let Some(formatting_data) = formatted_tokens.get_formatting_data_mut(token) + { + formatting_data.spaces_before = 1; } } } @@ -965,15 +965,13 @@ else struct LogicalLinesOnNewLines; impl LogicalLineFormatter for LogicalLinesOnNewLines { fn format(&self, formatted_tokens: &mut FormattedTokens<'_>, input: &LogicalLine) { - if let Some(&first_token) = input.get_tokens().first() { - if first_token != 0 && first_token != formatted_tokens.len() - 1 { - if let Some(formatting_data) = - formatted_tokens.get_formatting_data_mut(first_token) - { - formatting_data.spaces_before = 0; - formatting_data.newlines_before = 1; - } - } + if let Some(&first_token) = input.get_tokens().first() + && first_token != 0 + && first_token != formatted_tokens.len() - 1 + && let Some(formatting_data) = formatted_tokens.get_formatting_data_mut(first_token) + { + formatting_data.spaces_before = 0; + formatting_data.newlines_before = 1; } } } @@ -1158,11 +1156,11 @@ else impl LogicalLineFormatter for RetainSpacesLogcialLinesOnNewLines { fn format(&self, formatted_tokens: &mut FormattedTokens<'_>, input: &LogicalLine) { let first_token = *input.get_tokens().first().unwrap(); - if first_token != 0 && first_token != formatted_tokens.len() - 1 { - if let Some(formatting_data) = formatted_tokens.get_formatting_data_mut(first_token) - { - formatting_data.newlines_before = 1; - } + if first_token != 0 + && first_token != formatted_tokens.len() - 1 + && let Some(formatting_data) = formatted_tokens.get_formatting_data_mut(first_token) + { + formatting_data.newlines_before = 1; } } } diff --git a/core/src/lang.rs b/core/src/lang.rs index eb1fa485..0bf10d3d 100644 --- a/core/src/lang.rs +++ b/core/src/lang.rs @@ -613,7 +613,7 @@ impl<'a> FormattedTokens<'a> { pub fn tokens_mut( &mut self, ) -> impl DoubleEndedIterator, MutTokenErr>, &mut FormattingData)> - + ExactSizeIterator { + + ExactSizeIterator { self.tokens .iter_mut() .zip(&mut self.fmt) diff --git a/core/src/rules/generics_consolidator.rs b/core/src/rules/generics_consolidator.rs index 6574fcec..6c0ff296 100644 --- a/core/src/rules/generics_consolidator.rs +++ b/core/src/rules/generics_consolidator.rs @@ -64,28 +64,27 @@ impl TokenConsolidator for DistinguishGenericTypeParamsConsolidator { ), ) => {} Some(TokenType::Op(OperatorKind::GreaterThan(_))) => { - if comma_found { - if let Some( + if comma_found + && let Some( TokenType::Identifier | TokenType::Op(OperatorKind::AddressOf) | TokenType::Keyword(KeywordKind::Not), ) = tokens.get(next_idx + 1).map(TokenData::get_token_type) - { - // cases where it cannot be generics - // Foo(X < Y, U > V) - // Foo(X < Y, U > @V) - // Foo(X < Y, U > not V) - - // cases where it is ambiguous but we prefer to treat it as generics - // Foo(X < Y, U > +V) - // Foo(X < Y, U > -V) - // Foo(X < Y, U > (V)) - // Foo(X < Y, U > [V]) - - // cases where it must be generics - // Foo(X < Y, U > /V) (or any other binary operator) - break; - } + { + // cases where it cannot be generics + // Foo(X < Y, U > V) + // Foo(X < Y, U > @V) + // Foo(X < Y, U > not V) + + // cases where it is ambiguous but we prefer to treat it as generics + // Foo(X < Y, U > +V) + // Foo(X < Y, U > -V) + // Foo(X < Y, U > (V)) + // Foo(X < Y, U > [V]) + + // cases where it must be generics + // Foo(X < Y, U > /V) (or any other binary operator) + break; } let closed_state = state.pop().unwrap(); diff --git a/core/src/rules/ignore_asm_instructions.rs b/core/src/rules/ignore_asm_instructions.rs index ff84ae58..41e75baf 100644 --- a/core/src/rules/ignore_asm_instructions.rs +++ b/core/src/rules/ignore_asm_instructions.rs @@ -28,10 +28,9 @@ mod tests { fn format(&self, formatted_tokens: &mut FormattedTokens<'_>, _input: &[LogicalLine]) { for token_index in 1..formatted_tokens.len() { if let Some(formatting_data) = formatted_tokens.get_formatting_data_mut(token_index) + && formatting_data.newlines_before == 0 { - if formatting_data.newlines_before == 0 { - formatting_data.spaces_before = 1; - } + formatting_data.spaces_before = 1; } } } diff --git a/core/src/rules/optimising_line_formatter/contexts.rs b/core/src/rules/optimising_line_formatter/contexts.rs index ee032153..7c6939c4 100644 --- a/core/src/rules/optimising_line_formatter/contexts.rs +++ b/core/src/rules/optimising_line_formatter/contexts.rs @@ -337,7 +337,7 @@ impl<'a> SpecificContextStack<'a> { /// indices. Typically for modification. pub(super) fn ctx_iter_indices( &self, - ) -> impl Iterator)> { + ) -> impl Iterator)> + use<'a> { self.stack .into_iter() .flat_map(|stack| stack.walk_parents()) @@ -374,11 +374,12 @@ impl<'a> SpecificContextStack<'a> { filter: impl ContextFilter, operation: impl Fn(Ref<'_, FormattingContext>, &mut FormattingContextState), ) -> bool { - if let Some((context, data)) = self.get_last_matching_context_mut(node, filter) { - operation(context, data); - true - } else { - false + match self.get_last_matching_context_mut(node, filter) { + Some((context, data)) => { + operation(context, data); + true + } + _ => false, } } fn update_operator_precedences(&self, node: &mut FormattingNode, is_break: bool) { @@ -732,12 +733,11 @@ impl<'a> SpecificContextStack<'a> { data.is_child_broken = true; data.break_anonymous_routine = Some(true); }); - } else if !child_solutions.is_empty() { - if let Some((_, context)) = + } else if !child_solutions.is_empty() + && let Some((_, context)) = self.get_last_matching_context_mut(node, CT::ControlFlowBegin) - { - context.can_break = false; - } + { + context.can_break = false; } } } @@ -1282,7 +1282,7 @@ impl<'builder> LineFormattingContextsBuilder<'builder> { member_access_contexts: NodeRefSet::new(), } } - fn type_stack(&self) -> impl Iterator + 'builder { + fn type_stack(&self) -> impl Iterator + use<'builder> { self.current_context .walk_parents_data() .map(|index| index.context_type) diff --git a/core/src/rules/optimising_line_formatter/mod.rs b/core/src/rules/optimising_line_formatter/mod.rs index 029e8381..6bd926e8 100644 --- a/core/src/rules/optimising_line_formatter/mod.rs +++ b/core/src/rules/optimising_line_formatter/mod.rs @@ -3,8 +3,8 @@ //! use std::cell::RefCell; -use std::collections::hash_map::Entry; use std::collections::BinaryHeap; +use std::collections::hash_map::Entry; use std::rc::Rc; use debug::DebugFormattingNode; @@ -85,10 +85,10 @@ impl LogicalLineFileFormatter for OptimisingLineFormatter { provided by `TokenSpacing` can be removed at the starts of lines. */ for token_index in 0..olf.formatted_tokens.len() { - if let Some(data) = olf.formatted_tokens.get_formatting_data_mut(token_index) { - if data.newlines_before > 0 { - data.spaces_before = 0; - } + if let Some(data) = olf.formatted_tokens.get_formatting_data_mut(token_index) + && data.newlines_before > 0 + { + data.spaces_before = 0; } } @@ -591,8 +591,7 @@ impl<'this> InternalOptimisingLineFormatter<'this, '_> { if last_line_length > self.settings.max_line_length { trace!( "Last line length {} > max line length {}, line is too long", - last_line_length, - self.settings.max_line_length + last_line_length, self.settings.max_line_length ); if let Some((indiff, _stack)) = indifference_line { trace!( @@ -649,7 +648,9 @@ impl<'this> InternalOptimisingLineFormatter<'this, '_> { // indifferent and add its possibilities. if let Some((indiff, stack)) = indifference_line { - trace!("Returning to first `Indifferent` decision to push all successors"); + trace!( + "Returning to first `Indifferent` decision to push all successors" + ); node_successors.extend(get_solutions( NL::Break, indiff.clone(), @@ -714,7 +715,9 @@ impl<'this> InternalOptimisingLineFormatter<'this, '_> { trace!("Continuing to explore single successor branch"); } else { if let Some((indiff, stack)) = indifference_line { - trace!("Multiple successors found, returning to first `Indifferent` decision to push all successors"); + trace!( + "Multiple successors found, returning to first `Indifferent` decision to push all successors" + ); node_successors.extend(get_solutions( NL::Break, indiff.clone(), diff --git a/core/src/rules/optimising_line_formatter/multiline_strings.rs b/core/src/rules/optimising_line_formatter/multiline_strings.rs index 1bb791ca..64d689c0 100644 --- a/core/src/rules/optimising_line_formatter/multiline_strings.rs +++ b/core/src/rules/optimising_line_formatter/multiline_strings.rs @@ -43,17 +43,18 @@ impl StringFormatter<'_> { let base_indentation = &last_line[0..count_leading_whitespace(last_line)]; if base_indentation.len() != last_line.trim_end_matches('\'').len() { - log::warn!("Last line of multiline string contains non-whitespace before trailing quote: {last_line:?}"); + log::warn!( + "Last line of multiline string contains non-whitespace before trailing quote: {last_line:?}" + ); continue; }; if let Some(new_string_contents) = self.try_rewrite_string(tok.get_content(), fmt, base_indentation) + && new_string_contents != tok.get_content() { - if new_string_contents != tok.get_content() { - tok.set_content(new_string_contents); - changed = true - } + tok.set_content(new_string_contents); + changed = true } } changed @@ -83,7 +84,9 @@ impl StringFormatter<'_> { continue; } - log::warn!("Whitespace inside line of multiline string does not match whitespace before trailing quote: {line:?}"); + log::warn!( + "Whitespace inside line of multiline string does not match whitespace before trailing quote: {line:?}" + ); return None; }; diff --git a/core/src/rules/optimising_line_formatter/parent_pointer_tree.rs b/core/src/rules/optimising_line_formatter/parent_pointer_tree.rs index a487acdb..455ae945 100644 --- a/core/src/rules/optimising_line_formatter/parent_pointer_tree.rs +++ b/core/src/rules/optimising_line_formatter/parent_pointer_tree.rs @@ -178,12 +178,12 @@ impl<'list, T> NodeRef<'list, T> { /// Iterates from a node through its parents while providing direct access /// to their data - pub fn walk_parents_data(&self) -> impl Iterator> { + pub fn walk_parents_data(&self) -> impl Iterator> + use<'list, T> { self.walk_parents().map(|node| node.get()) } /// Iterate from a node through its parent nodes - pub fn walk_parents(&self) -> impl Iterator> { + pub fn walk_parents(&self) -> impl Iterator> + use<'list, T> { NodeRefIter { next_node: Some(self.index), tree: self.list, diff --git a/core/src/rules/optimising_line_formatter/requirements.rs b/core/src/rules/optimising_line_formatter/requirements.rs index d3705eba..8d352021 100644 --- a/core/src/rules/optimising_line_formatter/requirements.rs +++ b/core/src/rules/optimising_line_formatter/requirements.rs @@ -1,9 +1,9 @@ +use super::InternalOptimisingLineFormatter; +use super::SpecificContextDataStack; use super::contexts::*; use super::get_operator_precedence; use super::is_binary; use super::types::DecisionRequirement; -use super::InternalOptimisingLineFormatter; -use super::SpecificContextDataStack; use crate::lang::*; use super::contexts::ContextType as CT; diff --git a/core/src/rules/token_spacing.rs b/core/src/rules/token_spacing.rs index c81dc63c..4a599af8 100644 --- a/core/src/rules/token_spacing.rs +++ b/core/src/rules/token_spacing.rs @@ -28,20 +28,19 @@ impl LogicalLineFileFormatter for TokenSpacing { _ => max_one_either_side(token_index, formatted_tokens), }; - if let Some(spaces_before) = spaces_before { - if let Some(formatting_data) = formatted_tokens.get_formatting_data_mut(token_index) - { - formatting_data.spaces_before = spaces_before; - } + if let Some(spaces_before) = spaces_before + && let Some(formatting_data) = formatted_tokens.get_formatting_data_mut(token_index) + { + formatting_data.spaces_before = spaces_before; } if let Some(spaces_after) = spaces_after { let next_idx = token_index + 1; let next_token_type = formatted_tokens.get_token_type_for_index(next_idx); - if let Some(formatting_data) = formatted_tokens.get_formatting_data_mut(next_idx) { - if next_token_type != Some(TT::Eof) { - formatting_data.spaces_before = spaces_after; - } + if let Some(formatting_data) = formatted_tokens.get_formatting_data_mut(next_idx) + && next_token_type != Some(TT::Eof) + { + formatting_data.spaces_before = spaces_after; } } } diff --git a/front-end/Cargo.toml b/front-end/Cargo.toml index e4870ba9..0954f9df 100644 --- a/front-end/Cargo.toml +++ b/front-end/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "pasfmt" version = "0.7.0+dev" -edition = "2021" +edition = "2024" # To allow splitting integration tests into separate files without creating multiple test binaries autotests = false diff --git a/front-end/benches/benchmark_submodules.rs b/front-end/benches/benchmark_submodules.rs index b732b21b..46084b90 100644 --- a/front-end/benches/benchmark_submodules.rs +++ b/front-end/benches/benchmark_submodules.rs @@ -5,14 +5,14 @@ use std::{ fs::OpenOptions, io::Read, path::{Path, PathBuf}, - process::{exit, Command}, + process::{Command, exit}, time::Duration, }; use walkdir::WalkDir; -use criterion::{criterion_group, criterion_main, Criterion}; +use criterion::{Criterion, criterion_group, criterion_main}; -use pasfmt::{format, FormattingConfig}; +use pasfmt::{FormattingConfig, format}; use pasfmt_orchestrator::predule::*; pasfmt_config!(Config); diff --git a/front-end/src/main.rs b/front-end/src/main.rs index 0f27084b..0abbb300 100644 --- a/front-end/src/main.rs +++ b/front-end/src/main.rs @@ -3,7 +3,7 @@ use std::sync::atomic::AtomicBool; use std::sync::atomic::Ordering; use log::error; -use pasfmt::{format, FormattingConfig}; +use pasfmt::{FormattingConfig, format}; use pasfmt_orchestrator::predule::*; pasfmt_config!( diff --git a/front-end/tests/file_discovery.rs b/front-end/tests/file_discovery.rs index 938657a4..dca932da 100644 --- a/front-end/tests/file_discovery.rs +++ b/front-end/tests/file_discovery.rs @@ -1,4 +1,4 @@ -use assert_fs::{prelude::*, TempDir}; +use assert_fs::{TempDir, prelude::*}; use predicates::prelude::*; use std::path::Path; use std::path::PathBuf; diff --git a/front-end/tests/idempotence.rs b/front-end/tests/idempotence.rs index 0ccca258..71da8e58 100644 --- a/front-end/tests/idempotence.rs +++ b/front-end/tests/idempotence.rs @@ -1,4 +1,4 @@ -use assert_fs::{prelude::*, TempDir}; +use assert_fs::{TempDir, prelude::*}; use predicates::prelude::*; use crate::utils::windows::fmt_with_lock; diff --git a/front-end/tests/logging.rs b/front-end/tests/logging.rs index 584e2556..32441bfc 100644 --- a/front-end/tests/logging.rs +++ b/front-end/tests/logging.rs @@ -1,4 +1,4 @@ -use assert_fs::{prelude::*, TempDir}; +use assert_fs::{TempDir, prelude::*}; use predicates::prelude::*; use crate::utils::*; diff --git a/front-end/tests/modes.rs b/front-end/tests/modes.rs index 18d791f9..098c9780 100644 --- a/front-end/tests/modes.rs +++ b/front-end/tests/modes.rs @@ -1,4 +1,4 @@ -use assert_fs::{prelude::*, TempDir}; +use assert_fs::{TempDir, prelude::*}; use predicates::prelude::*; use std::fs::read_to_string; use std::path::Path; diff --git a/front-end/tests/utils.rs b/front-end/tests/utils.rs index 6b2fe583..7173e45d 100644 --- a/front-end/tests/utils.rs +++ b/front-end/tests/utils.rs @@ -23,7 +23,7 @@ pub mod windows { use std::os::windows::io::AsRawHandle; use std::path::Path; use windows_sys::Win32::{ - Storage::FileSystem::{LockFileEx, LOCK_FILE_FLAGS}, + Storage::FileSystem::{LOCK_FILE_FLAGS, LockFileEx}, System::IO::OVERLAPPED, }; diff --git a/orchestrator/Cargo.toml b/orchestrator/Cargo.toml index e51e6910..53caf5c7 100644 --- a/orchestrator/Cargo.toml +++ b/orchestrator/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "pasfmt-orchestrator" version = "0.4.0+dev" -edition = "2021" +edition = "2024" [dependencies] clap = { workspace = true, features = ["derive", "wrap_help"] } diff --git a/orchestrator/src/command_line.rs b/orchestrator/src/command_line.rs index dbb6451c..1675b3e1 100644 --- a/orchestrator/src/command_line.rs +++ b/orchestrator/src/command_line.rs @@ -9,14 +9,14 @@ use std::{ use anstyle::AnsiColor; use anyhow::Context; -pub use clap::{self, error::ErrorKind, CommandFactory, Parser}; +pub use clap::{self, CommandFactory, Parser, error::ErrorKind}; use clap::{ - builder::{PossibleValuesParser, StyledStr, Styles, TypedValueParser}, Args, ValueEnum, + builder::{PossibleValuesParser, StyledStr, Styles, TypedValueParser}, }; use config::{Config, File, FileFormat}; -use log::{debug, LevelFilter}; +use log::{LevelFilter, debug}; use crate::formatting_orchestrator::FormatterConfiguration; @@ -389,7 +389,7 @@ impl FormatterConfiguration for PasFmtConfiguration { #[cfg(test)] mod tests { use super::*; - use assert_fs::{prelude::*, TempDir}; + use assert_fs::{TempDir, prelude::*}; use serde::Deserialize; use spectral::prelude::*; diff --git a/orchestrator/src/file_formatter.rs b/orchestrator/src/file_formatter.rs index d7b40d1f..774c39d9 100644 --- a/orchestrator/src/file_formatter.rs +++ b/orchestrator/src/file_formatter.rs @@ -1,4 +1,4 @@ -use anyhow::{bail, Context}; +use anyhow::{Context, bail}; use encoding_rs::Encoding; use log::*; use std::{ diff --git a/web/Cargo.toml b/web/Cargo.toml index eb589a30..dc7632c2 100644 --- a/web/Cargo.toml +++ b/web/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "web" version = "0.0.0" -edition = "2021" +edition = "2024" [lib] crate-type = ["cdylib"] diff --git a/web/src/lib.rs b/web/src/lib.rs index d6202b7e..0a4753c5 100644 --- a/web/src/lib.rs +++ b/web/src/lib.rs @@ -1,4 +1,4 @@ -use pasfmt::{make_formatter, FormattingConfig}; +use pasfmt::{FormattingConfig, make_formatter}; use pasfmt_core::prelude::FileOptions; use wasm_bindgen::prelude::*;