From 6af0d08da774750a0a3b94d6821980ac7f0b3b21 Mon Sep 17 00:00:00 2001 From: Yuri Astrakhan Date: Mon, 16 Oct 2023 00:57:58 -0400 Subject: [PATCH] Inline format-args automata This makes the code a bit easier to read and smaller. Some of it was done with this command, and later fixed by hand: ``` cargo clippy --workspace --allow-dirty --fix --benches --tests --bins -- -A clippy::all -W clippy::uninlined_format_args ``` --- regex-automata/src/dfa/accel.rs | 10 +-- regex-automata/src/dfa/dense.rs | 31 ++++--- regex-automata/src/dfa/determinize.rs | 2 +- regex-automata/src/dfa/onepass.rs | 24 +++--- regex-automata/src/dfa/sparse.rs | 5 +- regex-automata/src/hybrid/dfa.rs | 9 +-- regex-automata/src/hybrid/error.rs | 7 +- regex-automata/src/meta/error.rs | 2 +- regex-automata/src/meta/limited.rs | 12 +-- regex-automata/src/meta/literal.rs | 4 +- regex-automata/src/meta/regex.rs | 4 +- regex-automata/src/meta/strategy.rs | 81 +++++++++---------- regex-automata/src/meta/wrappers.rs | 14 ++-- regex-automata/src/nfa/thompson/error.rs | 17 ++-- .../src/nfa/thompson/literal_trie.rs | 4 +- regex-automata/src/nfa/thompson/nfa.rs | 8 +- regex-automata/src/nfa/thompson/pikevm.rs | 6 +- regex-automata/src/nfa/thompson/range_trie.rs | 6 +- regex-automata/src/util/alphabet.rs | 8 +- regex-automata/src/util/captures.rs | 4 +- regex-automata/src/util/escape.rs | 2 +- regex-automata/src/util/iter.rs | 15 ++-- .../src/util/prefilter/aho_corasick.rs | 2 +- regex-automata/src/util/search.rs | 4 +- regex-automata/src/util/wire.rs | 39 ++++----- regex-automata/tests/dfa/onepass/suite.rs | 2 +- regex-automata/tests/dfa/suite.rs | 2 +- regex-automata/tests/hybrid/suite.rs | 2 +- regex-automata/tests/meta/suite.rs | 2 +- .../tests/nfa/thompson/backtrack/suite.rs | 2 +- .../tests/nfa/thompson/pikevm/suite.rs | 2 +- 31 files changed, 149 insertions(+), 183 deletions(-) diff --git a/regex-automata/src/dfa/accel.rs b/regex-automata/src/dfa/accel.rs index c0ba18ea8..47c846048 100644 --- a/regex-automata/src/dfa/accel.rs +++ b/regex-automata/src/dfa/accel.rs @@ -102,7 +102,7 @@ pub(crate) fn find_fwd( 2 => memchr::memchr2(bs[0], bs[1], &haystack[at..])?, 3 => memchr::memchr3(bs[0], bs[1], bs[2], &haystack[at..])?, 0 => panic!("cannot find with empty needles"), - n => panic!("invalid needles length: {}", n), + n => panic!("invalid needles length: {n}"), }; Some(at + i) } @@ -122,7 +122,7 @@ pub(crate) fn find_rev( 2 => memchr::memrchr2(bs[0], bs[1], &haystack[..at]), 3 => memchr::memrchr3(bs[0], bs[1], bs[2], &haystack[..at]), 0 => panic!("cannot find with empty needles"), - n => panic!("invalid needles length: {}", n), + n => panic!("invalid needles length: {n}"), } } @@ -267,7 +267,7 @@ impl> Accels { #[cfg_attr(feature = "perf-inline", inline(always))] pub fn needles(&self, i: usize) -> &[u8] { if i >= self.len() { - panic!("invalid accelerator index {}", i); + panic!("invalid accelerator index {i}"); } let bytes = self.as_bytes(); let offset = ACCEL_TY_SIZE + i * ACCEL_CAP; @@ -313,8 +313,8 @@ impl> Accels { assert_eq!( nwrite % ACCEL_TY_SIZE, 0, - "expected accelerator bytes written to be a multiple of {}", - ACCEL_TY_SIZE, + "expected accelerator bytes written to be a multiple \ + of {ACCEL_TY_SIZE}", ); if dst.len() < nwrite { return Err(SerializeError::buffer_too_small("accelerators")); diff --git a/regex-automata/src/dfa/dense.rs b/regex-automata/src/dfa/dense.rs index 056213b28..057536303 100644 --- a/regex-automata/src/dfa/dense.rs +++ b/regex-automata/src/dfa/dense.rs @@ -2837,8 +2837,8 @@ impl OwnedDFA { } assert!( !matches.contains_key(&start_id), - "{:?} is both a start and a match state, which is not allowed", - start_id, + "{start_id:?} is both a start and a match state, \ + which is not allowed", ); is_start.insert(start_id); } @@ -3098,7 +3098,7 @@ impl> fmt::Debug for DFA { } else { self.to_index(state.id()) }; - write!(f, "{:06?}: ", id)?; + write!(f, "{id:06?}: ")?; state.fmt(f)?; write!(f, "\n")?; } @@ -3114,11 +3114,11 @@ impl> fmt::Debug for DFA { Anchored::No => writeln!(f, "START-GROUP(unanchored)")?, Anchored::Yes => writeln!(f, "START-GROUP(anchored)")?, Anchored::Pattern(pid) => { - writeln!(f, "START_GROUP(pattern: {:?})", pid)? + writeln!(f, "START_GROUP(pattern: {pid:?})")? } } } - writeln!(f, " {:?} => {:06?}", sty, id)?; + writeln!(f, " {sty:?} => {id:06?}")?; } if self.pattern_len() > 1 { writeln!(f, "")?; @@ -3129,13 +3129,13 @@ impl> fmt::Debug for DFA { } else { self.to_index(id) }; - write!(f, "MATCH({:06?}): ", id)?; + write!(f, "MATCH({id:06?}): ")?; for (i, &pid) in self.ms.pattern_id_slice(i).iter().enumerate() { if i > 0 { write!(f, ", ")?; } - write!(f, "{:?}", pid)?; + write!(f, "{pid:?}")?; } writeln!(f, "")?; } @@ -3525,8 +3525,8 @@ impl TransitionTable> { /// /// Both id1 and id2 must point to valid states, otherwise this panics. fn swap(&mut self, id1: StateID, id2: StateID) { - assert!(self.is_valid(id1), "invalid 'id1' state: {:?}", id1); - assert!(self.is_valid(id2), "invalid 'id2' state: {:?}", id2); + assert!(self.is_valid(id1), "invalid 'id1' state: {id1:?}"); + assert!(self.is_valid(id2), "invalid 'id2' state: {id2:?}"); // We only need to swap the parts of the state that are used. So if the // stride is 64, but the alphabet length is only 33, then we save a lot // of work. @@ -4277,7 +4277,7 @@ impl> StartTable { let len = self .pattern_len .expect("start states for each pattern enabled"); - assert!(pid < len, "invalid pattern ID {:?}", pid); + assert!(pid < len, "invalid pattern ID {pid:?}"); self.stride .checked_mul(pid) .unwrap() @@ -4868,9 +4868,9 @@ impl<'a> fmt::Debug for State<'a> { write!(f, ", ")?; } if start == end { - write!(f, "{:?} => {:?}", start, id)?; + write!(f, "{start:?} => {id:?}")?; } else { - write!(f, "{:?}-{:?} => {:?}", start, end, id)?; + write!(f, "{start:?}-{end:?} => {id:?}")?; } } Ok(()) @@ -5135,7 +5135,7 @@ impl core::fmt::Display for BuildError { match self.kind() { BuildErrorKind::NFA(_) => write!(f, "error building NFA"), BuildErrorKind::Unsupported(ref msg) => { - write!(f, "unsupported regex feature for DFAs: {}", msg) + write!(f, "unsupported regex feature for DFAs: {msg}") } BuildErrorKind::TooManyStates => write!( f, @@ -5167,11 +5167,10 @@ impl core::fmt::Display for BuildError { ), BuildErrorKind::DFAExceededSizeLimit { limit } => write!( f, - "DFA exceeded size limit of {:?} during determinization", - limit, + "DFA exceeded size limit of {limit:?} during determinization", ), BuildErrorKind::DeterminizeExceededSizeLimit { limit } => { - write!(f, "determinization exceeded size limit of {:?}", limit) + write!(f, "determinization exceeded size limit of {limit:?}") } } } diff --git a/regex-automata/src/dfa/determinize.rs b/regex-automata/src/dfa/determinize.rs index 7a49c2453..d53815cbd 100644 --- a/regex-automata/src/dfa/determinize.rs +++ b/regex-automata/src/dfa/determinize.rs @@ -280,7 +280,7 @@ impl<'a> Runner<'a> { let per_elem = size_of::() + size_of::>(); let pats = total_pat_len * size_of::(); let mem = (matches.len() * per_elem) + pats; - log::debug!("matches map built, memory usage: {}", mem); + log::debug!("matches map built, memory usage: {mem}"); } // At this point, we shuffle the "special" states in the final DFA. // This permits a DFA's match loop to detect a match condition (among diff --git a/regex-automata/src/dfa/onepass.rs b/regex-automata/src/dfa/onepass.rs index e06d37cf4..700f2b18b 100644 --- a/regex-automata/src/dfa/onepass.rs +++ b/regex-automata/src/dfa/onepass.rs @@ -2408,7 +2408,7 @@ impl core::fmt::Debug for DFA { } write!(f, "{:06?}", sid.as_usize())?; if !pateps.is_empty() { - write!(f, " ({:?})", pateps)?; + write!(f, " ({pateps:?})")?; } write!(f, ": ")?; debug_state_transitions(f, self, sid)?; @@ -2939,7 +2939,7 @@ impl core::fmt::Debug for Slots { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { write!(f, "S")?; for slot in self.iter() { - write!(f, "-{:?}", slot)?; + write!(f, "-{slot:?}")?; } Ok(()) } @@ -3050,23 +3050,21 @@ impl core::fmt::Display for BuildError { Word(_) => write!(f, "NFA contains Unicode word boundary"), TooManyStates { limit } => write!( f, - "one-pass DFA exceeded a limit of {:?} for number of states", - limit, + "one-pass DFA exceeded a limit of {limit:?} \ + for number of states", ), TooManyPatterns { limit } => write!( f, - "one-pass DFA exceeded a limit of {:?} for number of patterns", - limit, + "one-pass DFA exceeded a limit of {limit:?} \ + for number of patterns", ), UnsupportedLook { look } => write!( f, - "one-pass DFA does not support the {:?} assertion", - look, + "one-pass DFA does not support the {look:?} assertion", ), ExceededSizeLimit { limit } => write!( f, - "one-pass DFA exceeded size limit of {:?} during building", - limit, + "one-pass DFA exceeded size limit of {limit:?} during building", ), NotOnePass { msg } => write!( f, @@ -3089,7 +3087,7 @@ mod tests { let predicate = |err: &str| err.contains("conflicting transition"); let err = DFA::new(r"a*[ab]").unwrap_err().to_string(); - assert!(predicate(&err), "{}", err); + assert!(predicate(&err), "{err}"); } #[test] @@ -3099,7 +3097,7 @@ mod tests { }; let err = DFA::new(r"(^|$)a").unwrap_err().to_string(); - assert!(predicate(&err), "{}", err); + assert!(predicate(&err), "{err}"); } #[test] @@ -3109,7 +3107,7 @@ mod tests { }; let err = DFA::new_many(&[r"^", r"$"]).unwrap_err().to_string(); - assert!(predicate(&err), "{}", err); + assert!(predicate(&err), "{err}"); } // This test is meant to build a one-pass regex with the maximum number of diff --git a/regex-automata/src/dfa/sparse.rs b/regex-automata/src/dfa/sparse.rs index 6a94bccc1..c03d02c86 100644 --- a/regex-automata/src/dfa/sparse.rs +++ b/regex-automata/src/dfa/sparse.rs @@ -299,8 +299,7 @@ impl DFA> { ); assert!( transition_len <= 257, - "expected transition length {} to be <= 257", - transition_len, + "expected transition length {transition_len} to be <= 257", ); // Fill in the transition length. @@ -2158,7 +2157,7 @@ impl> StartTable { let len = self .pattern_len .expect("start states for each pattern enabled"); - assert!(pid < len, "invalid pattern ID {:?}", pid); + assert!(pid < len, "invalid pattern ID {pid:?}"); self.stride .checked_mul(pid) .unwrap() diff --git a/regex-automata/src/hybrid/dfa.rs b/regex-automata/src/hybrid/dfa.rs index 92956911f..b4c9cf444 100644 --- a/regex-automata/src/hybrid/dfa.rs +++ b/regex-automata/src/hybrid/dfa.rs @@ -2598,8 +2598,8 @@ impl<'i, 'c> Lazy<'i, 'c> { unit: alphabet::Unit, to: LazyStateID, ) { - assert!(self.as_ref().is_valid(from), "invalid 'from' id: {:?}", from); - assert!(self.as_ref().is_valid(to), "invalid 'to' id: {:?}", to); + assert!(self.as_ref().is_valid(from), "invalid 'from' id: {from:?}"); + assert!(self.as_ref().is_valid(to), "invalid 'to' id: {to:?}"); let offset = from.as_usize_untagged() + self.dfa.classes.get_by_unit(unit); self.cache.trans[offset] = to; @@ -4080,10 +4080,9 @@ impl Builder { // and mush on. if self.config.get_skip_cache_capacity_check() { debug!( - "given capacity ({}) is too small, \ + "given capacity ({cache_capacity}) is too small, \ since skip_cache_capacity_check is enabled, \ - setting cache capacity to minimum ({})", - cache_capacity, min_cache, + setting cache capacity to minimum ({min_cache})", ); cache_capacity = min_cache; } else { diff --git a/regex-automata/src/hybrid/error.rs b/regex-automata/src/hybrid/error.rs index d134e7ec9..93e58dd54 100644 --- a/regex-automata/src/hybrid/error.rs +++ b/regex-automata/src/hybrid/error.rs @@ -80,16 +80,15 @@ impl core::fmt::Display for BuildError { BuildErrorKind::InsufficientCacheCapacity { minimum, given } => { write!( f, - "given cache capacity ({}) is smaller than \ - minimum required ({})", - given, minimum, + "given cache capacity ({given}) is smaller than \ + minimum required ({minimum})", ) } BuildErrorKind::InsufficientStateIDCapacity { ref err } => { err.fmt(f) } BuildErrorKind::Unsupported(ref msg) => { - write!(f, "unsupported regex feature for DFAs: {}", msg) + write!(f, "unsupported regex feature for DFAs: {msg}") } } } diff --git a/regex-automata/src/meta/error.rs b/regex-automata/src/meta/error.rs index ea9a3160e..9ead729bb 100644 --- a/regex-automata/src/meta/error.rs +++ b/regex-automata/src/meta/error.rs @@ -234,7 +234,7 @@ impl From for RetryFailError { // backtracker's wrapper will never hand out a backtracker engine // when the haystack would be too long. HaystackTooLong { .. } | UnsupportedAnchored { .. } => { - unreachable!("found impossible error in meta engine: {}", merr) + unreachable!("found impossible error in meta engine: {merr}") } } } diff --git a/regex-automata/src/meta/limited.rs b/regex-automata/src/meta/limited.rs index 5653adc9a..ce6708c70 100644 --- a/regex-automata/src/meta/limited.rs +++ b/regex-automata/src/meta/limited.rs @@ -78,9 +78,8 @@ pub(crate) fn dfa_try_search_half_rev( at -= 1; if at < min_start { trace!( - "reached position {} which is before the previous literal \ + "reached position {at} which is before the previous literal \ match, quitting to avoid quadratic behavior", - at, ); return Err(RetryError::Quadratic(RetryQuadraticError::new())); } @@ -114,9 +113,8 @@ pub(crate) fn dfa_try_search_half_rev( && !was_dead { trace!( - "reached beginning of search at offset {} without hitting \ + "reached beginning of search at offset {at} without hitting \ a dead state, quitting to avoid potential false positive match", - at, ); return Err(RetryError::Quadratic(RetryQuadraticError::new())); } @@ -161,9 +159,8 @@ pub(crate) fn hybrid_try_search_half_rev( at -= 1; if at < min_start { trace!( - "reached position {} which is before the previous literal \ + "reached position {at} which is before the previous literal \ match, quitting to avoid quadratic behavior", - at, ); return Err(RetryError::Quadratic(RetryQuadraticError::new())); } @@ -176,9 +173,8 @@ pub(crate) fn hybrid_try_search_half_rev( && !was_dead { trace!( - "reached beginning of search at offset {} without hitting \ + "reached beginning of search at offset {at} without hitting \ a dead state, quitting to avoid potential false positive match", - at, ); return Err(RetryError::Quadratic(RetryQuadraticError::new())); } diff --git a/regex-automata/src/meta/literal.rs b/regex-automata/src/meta/literal.rs index a68b93b7a..fac68d005 100644 --- a/regex-automata/src/meta/literal.rs +++ b/regex-automata/src/meta/literal.rs @@ -53,11 +53,11 @@ pub(crate) fn alternation_literals( HirKind::Literal(Literal(ref bytes)) => { lit.extend_from_slice(bytes); } - _ => unreachable!("expected literal, got {:?}", e), + _ => unreachable!("expected literal, got {e:?}"), } } } - _ => unreachable!("expected literal or concat, got {:?}", alt), + _ => unreachable!("expected literal or concat, got {alt:?}"), } lits.push(lit); } diff --git a/regex-automata/src/meta/regex.rs b/regex-automata/src/meta/regex.rs index 8cfdecbec..a812b4012 100644 --- a/regex-automata/src/meta/regex.rs +++ b/regex-automata/src/meta/regex.rs @@ -3413,9 +3413,9 @@ impl Builder { .last() .unwrap_or(0); if maxoff < p.len() { - debug!("{:?}: {}[... snip ...]", pid, &p[..maxoff]); + debug!("{pid:?}: {}[... snip ...]", &p[..maxoff]); } else { - debug!("{:?}: {}", pid, p); + debug!("{pid:?}: {p}"); } } } diff --git a/regex-automata/src/meta/strategy.rs b/regex-automata/src/meta/strategy.rs index 04f2ba3c3..ebb876b2b 100644 --- a/regex-automata/src/meta/strategy.rs +++ b/regex-automata/src/meta/strategy.rs @@ -711,7 +711,7 @@ impl Strategy for Core { match e.try_search(input) { Ok(x) => x, Err(_err) => { - trace!("full DFA search failed: {}", _err); + trace!("full DFA search failed: {_err}"); self.search_nofail(cache, input) } } @@ -720,7 +720,7 @@ impl Strategy for Core { match e.try_search(&mut cache.hybrid, input) { Ok(x) => x, Err(_err) => { - trace!("lazy DFA search failed: {}", _err); + trace!("lazy DFA search failed: {_err}"); self.search_nofail(cache, input) } } @@ -743,7 +743,7 @@ impl Strategy for Core { match e.try_search_half_fwd(input) { Ok(x) => x, Err(_err) => { - trace!("full DFA half search failed: {}", _err); + trace!("full DFA half search failed: {_err}"); self.search_half_nofail(cache, input) } } @@ -752,7 +752,7 @@ impl Strategy for Core { match e.try_search_half_fwd(&mut cache.hybrid, input) { Ok(x) => x, Err(_err) => { - trace!("lazy DFA half search failed: {}", _err); + trace!("lazy DFA half search failed: {_err}"); self.search_half_nofail(cache, input) } } @@ -771,7 +771,7 @@ impl Strategy for Core { match e.try_search_half_fwd(input) { Ok(x) => x.is_some(), Err(_err) => { - trace!("full DFA half search failed: {}", _err); + trace!("full DFA half search failed: {_err}"); self.is_match_nofail(cache, input) } } @@ -783,7 +783,7 @@ impl Strategy for Core { match e.try_search_half_fwd(&mut cache.hybrid, input) { Ok(x) => x.is_some(), Err(_err) => { - trace!("lazy DFA half search failed: {}", _err); + trace!("lazy DFA half search failed: {_err}"); self.is_match_nofail(cache, input) } } @@ -830,7 +830,7 @@ impl Strategy for Core { Some(Ok(Some(m))) => m, Some(Ok(None)) => return None, Some(Err(_err)) => { - trace!("fast capture search failed: {}", _err); + trace!("fast capture search failed: {_err}"); return self.search_slots_nofail(cache, input, slots); } None => { @@ -873,7 +873,7 @@ impl Strategy for Core { Ok(()) => return, Err(err) => err, }; - trace!("fast overlapping search failed: {}", _err); + trace!("fast overlapping search failed: {_err}"); } else if let Some(e) = self.hybrid.get(input) { trace!( "using lazy DFA for overlapping search at {:?}", @@ -889,7 +889,7 @@ impl Strategy for Core { } Err(err) => err, }; - trace!("fast overlapping search failed: {}", _err); + trace!("fast overlapping search failed: {_err}"); } trace!( "using PikeVM for overlapping search at {:?}", @@ -1012,7 +1012,7 @@ impl Strategy for ReverseAnchored { } match self.try_search_half_anchored_rev(cache, input) { Err(_err) => { - trace!("fast reverse anchored search failed: {}", _err); + trace!("fast reverse anchored search failed: {_err}"); self.core.search_nofail(cache, input) } Ok(None) => None, @@ -1033,7 +1033,7 @@ impl Strategy for ReverseAnchored { } match self.try_search_half_anchored_rev(cache, input) { Err(_err) => { - trace!("fast reverse anchored search failed: {}", _err); + trace!("fast reverse anchored search failed: {_err}"); self.core.search_half_nofail(cache, input) } Ok(None) => None, @@ -1056,7 +1056,7 @@ impl Strategy for ReverseAnchored { } match self.try_search_half_anchored_rev(cache, input) { Err(_err) => { - trace!("fast reverse anchored search failed: {}", _err); + trace!("fast reverse anchored search failed: {_err}"); self.core.is_match_nofail(cache, input) } Ok(None) => false, @@ -1076,7 +1076,7 @@ impl Strategy for ReverseAnchored { } match self.try_search_half_anchored_rev(cache, input) { Err(_err) => { - trace!("fast reverse anchored search failed: {}", _err); + trace!("fast reverse anchored search failed: {_err}"); self.core.search_slots_nofail(cache, input, slots) } Ok(None) => None, @@ -1220,7 +1220,7 @@ impl ReverseSuffix { None => return Ok(None), Some(span) => span, }; - trace!("reverse suffix scan found suffix match at {:?}", litmatch); + trace!("reverse suffix scan found suffix match at {litmatch:?}"); let revinput = input .clone() .anchored(Anchored::Yes) @@ -1324,11 +1324,11 @@ impl Strategy for ReverseSuffix { } match self.try_search_half_start(cache, input) { Err(RetryError::Quadratic(_err)) => { - trace!("reverse suffix optimization failed: {}", _err); + trace!("reverse suffix optimization failed: {_err}"); self.core.search(cache, input) } Err(RetryError::Fail(_err)) => { - trace!("reverse suffix reverse fast search failed: {}", _err); + trace!("reverse suffix reverse fast search failed: {_err}"); self.core.search_nofail(cache, input) } Ok(None) => None, @@ -1340,8 +1340,7 @@ impl Strategy for ReverseSuffix { match self.try_search_half_fwd(cache, &fwdinput) { Err(_err) => { trace!( - "reverse suffix forward fast search failed: {}", - _err + "reverse suffix forward fast search failed: {_err}" ); self.core.search_nofail(cache, input) } @@ -1371,13 +1370,12 @@ impl Strategy for ReverseSuffix { } match self.try_search_half_start(cache, input) { Err(RetryError::Quadratic(_err)) => { - trace!("reverse suffix half optimization failed: {}", _err); + trace!("reverse suffix half optimization failed: {_err}"); self.core.search_half(cache, input) } Err(RetryError::Fail(_err)) => { trace!( - "reverse suffix reverse fast half search failed: {}", - _err + "reverse suffix reverse fast half search failed: {_err}" ); self.core.search_half_nofail(cache, input) } @@ -1399,8 +1397,7 @@ impl Strategy for ReverseSuffix { match self.try_search_half_fwd(cache, &fwdinput) { Err(_err) => { trace!( - "reverse suffix forward fast search failed: {}", - _err + "reverse suffix forward fast search failed: {_err}" ); self.core.search_half_nofail(cache, input) } @@ -1423,13 +1420,12 @@ impl Strategy for ReverseSuffix { } match self.try_search_half_start(cache, input) { Err(RetryError::Quadratic(_err)) => { - trace!("reverse suffix half optimization failed: {}", _err); + trace!("reverse suffix half optimization failed: {_err}"); self.core.is_match_nofail(cache, input) } Err(RetryError::Fail(_err)) => { trace!( - "reverse suffix reverse fast half search failed: {}", - _err + "reverse suffix reverse fast half search failed: {_err}" ); self.core.is_match_nofail(cache, input) } @@ -1456,16 +1452,13 @@ impl Strategy for ReverseSuffix { } let hm_start = match self.try_search_half_start(cache, input) { Err(RetryError::Quadratic(_err)) => { - trace!( - "reverse suffix captures optimization failed: {}", - _err - ); + trace!("reverse suffix captures optimization failed: {_err}"); return self.core.search_slots(cache, input, slots); } Err(RetryError::Fail(_err)) => { trace!( - "reverse suffix reverse fast captures search failed: {}", - _err + "reverse suffix reverse fast captures search failed: \ + {_err}" ); return self.core.search_slots_nofail(cache, input, slots); } @@ -1638,15 +1631,13 @@ impl ReverseInner { }; if litmatch.start < min_pre_start { trace!( - "found inner prefilter match at {:?}, which starts \ - before the end of the last forward scan at {}, \ + "found inner prefilter match at {litmatch:?}, which starts \ + before the end of the last forward scan at {min_pre_start}, \ quitting to avoid quadratic behavior", - litmatch, - min_pre_start, ); return Err(RetryError::Quadratic(RetryQuadraticError::new())); } - trace!("reverse inner scan found inner match at {:?}", litmatch); + trace!("reverse inner scan found inner match at {litmatch:?}"); let revinput = input .clone() .anchored(Anchored::Yes) @@ -1786,11 +1777,11 @@ impl Strategy for ReverseInner { } match self.try_search_full(cache, input) { Err(RetryError::Quadratic(_err)) => { - trace!("reverse inner optimization failed: {}", _err); + trace!("reverse inner optimization failed: {_err}"); self.core.search(cache, input) } Err(RetryError::Fail(_err)) => { - trace!("reverse inner fast search failed: {}", _err); + trace!("reverse inner fast search failed: {_err}"); self.core.search_nofail(cache, input) } Ok(matornot) => matornot, @@ -1808,11 +1799,11 @@ impl Strategy for ReverseInner { } match self.try_search_full(cache, input) { Err(RetryError::Quadratic(_err)) => { - trace!("reverse inner half optimization failed: {}", _err); + trace!("reverse inner half optimization failed: {_err}"); self.core.search_half(cache, input) } Err(RetryError::Fail(_err)) => { - trace!("reverse inner fast half search failed: {}", _err); + trace!("reverse inner fast half search failed: {_err}"); self.core.search_half_nofail(cache, input) } Ok(None) => None, @@ -1827,11 +1818,11 @@ impl Strategy for ReverseInner { } match self.try_search_full(cache, input) { Err(RetryError::Quadratic(_err)) => { - trace!("reverse inner half optimization failed: {}", _err); + trace!("reverse inner half optimization failed: {_err}"); self.core.is_match_nofail(cache, input) } Err(RetryError::Fail(_err)) => { - trace!("reverse inner fast half search failed: {}", _err); + trace!("reverse inner fast half search failed: {_err}"); self.core.is_match_nofail(cache, input) } Ok(None) => false, @@ -1857,11 +1848,11 @@ impl Strategy for ReverseInner { } let m = match self.try_search_full(cache, input) { Err(RetryError::Quadratic(_err)) => { - trace!("reverse inner captures optimization failed: {}", _err); + trace!("reverse inner captures optimization failed: {_err}"); return self.core.search_slots(cache, input, slots); } Err(RetryError::Fail(_err)) => { - trace!("reverse inner fast captures search failed: {}", _err); + trace!("reverse inner fast captures search failed: {_err}"); return self.core.search_slots_nofail(cache, input, slots); } Ok(None) => return None, diff --git a/regex-automata/src/meta/wrappers.rs b/regex-automata/src/meta/wrappers.rs index 95d0e07b1..fd1d5a144 100644 --- a/regex-automata/src/meta/wrappers.rs +++ b/regex-automata/src/meta/wrappers.rs @@ -408,7 +408,7 @@ impl OnePassEngine { let engine = match result { Ok(engine) => engine, Err(_err) => { - debug!("OnePass failed to build: {}", _err); + debug!("OnePass failed to build: {_err}"); return None; } }; @@ -606,7 +606,7 @@ impl HybridEngine { let fwd = match result { Ok(fwd) => fwd, Err(_err) => { - debug!("forward lazy DFA failed to build: {}", _err); + debug!("forward lazy DFA failed to build: {_err}"); return None; } }; @@ -622,7 +622,7 @@ impl HybridEngine { let rev = match result { Ok(rev) => rev, Err(_err) => { - debug!("reverse lazy DFA failed to build: {}", _err); + debug!("reverse lazy DFA failed to build: {_err}"); return None; } }; @@ -900,7 +900,7 @@ impl DFAEngine { let fwd = match result { Ok(fwd) => fwd, Err(_err) => { - debug!("forward full DFA failed to build: {}", _err); + debug!("forward full DFA failed to build: {_err}"); return None; } }; @@ -924,7 +924,7 @@ impl DFAEngine { let rev = match result { Ok(rev) => rev, Err(_err) => { - debug!("reverse full DFA failed to build: {}", _err); + debug!("reverse full DFA failed to build: {_err}"); return None; } }; @@ -1132,7 +1132,7 @@ impl ReverseHybridEngine { let rev = match result { Ok(rev) => rev, Err(_err) => { - debug!("lazy reverse DFA failed to build: {}", _err); + debug!("lazy reverse DFA failed to build: {_err}"); return None; } }; @@ -1299,7 +1299,7 @@ impl ReverseDFAEngine { let rev = match result { Ok(rev) => rev, Err(_err) => { - debug!("full reverse DFA failed to build: {}", _err); + debug!("full reverse DFA failed to build: {_err}"); return None; } }; diff --git a/regex-automata/src/nfa/thompson/error.rs b/regex-automata/src/nfa/thompson/error.rs index e29006586..9f884ff20 100644 --- a/regex-automata/src/nfa/thompson/error.rs +++ b/regex-automata/src/nfa/thompson/error.rs @@ -154,25 +154,22 @@ impl core::fmt::Display for BuildError { } BuildErrorKind::TooManyPatterns { given, limit } => write!( f, - "attempted to compile {} patterns, \ - which exceeds the limit of {}", - given, limit, + "attempted to compile {given} patterns, \ + which exceeds the limit of {limit}", ), BuildErrorKind::TooManyStates { given, limit } => write!( f, - "attempted to compile {} NFA states, \ - which exceeds the limit of {}", - given, limit, + "attempted to compile {given} NFA states, \ + which exceeds the limit of {limit}", ), BuildErrorKind::ExceededSizeLimit { limit } => write!( f, - "heap usage during NFA compilation exceeded limit of {}", - limit, + "heap usage during NFA compilation exceeded limit of {limit}", ), BuildErrorKind::InvalidCaptureIndex { index } => write!( f, - "capture group index {} is invalid (too big or discontinuous)", - index, + "capture group index {index} is invalid \ + (too big or discontinuous)", ), #[cfg(feature = "syntax")] BuildErrorKind::UnsupportedCaptures => write!( diff --git a/regex-automata/src/nfa/thompson/literal_trie.rs b/regex-automata/src/nfa/thompson/literal_trie.rs index 7ed129afd..08793cd6d 100644 --- a/regex-automata/src/nfa/thompson/literal_trie.rs +++ b/regex-automata/src/nfa/thompson/literal_trie.rs @@ -419,7 +419,7 @@ impl core::fmt::Debug for State { let mut spacing = " "; for (i, chunk) in self.chunks().enumerate() { if i > 0 { - write!(f, "{}MATCH", spacing)?; + write!(f, "{spacing}MATCH")?; } spacing = ""; for (j, t) in chunk.iter().enumerate() { @@ -429,7 +429,7 @@ impl core::fmt::Debug for State { } else if j > 0 { write!(f, ", ")?; } - write!(f, "{:?}", t)?; + write!(f, "{t:?}")?; } } Ok(()) diff --git a/regex-automata/src/nfa/thompson/nfa.rs b/regex-automata/src/nfa/thompson/nfa.rs index 59a62f4ed..2a0cc9c16 100644 --- a/regex-automata/src/nfa/thompson/nfa.rs +++ b/regex-automata/src/nfa/thompson/nfa.rs @@ -1730,10 +1730,10 @@ impl fmt::Debug for State { State::Sparse(SparseTransitions { ref transitions }) => { let rs = transitions .iter() - .map(|t| format!("{:?}", t)) + .map(|t| format!("{t:?}")) .collect::>() .join(", "); - write!(f, "sparse({})", rs) + write!(f, "sparse({rs})") } State::Dense(ref dense) => { write!(f, "dense(")?; @@ -1741,7 +1741,7 @@ impl fmt::Debug for State { if i > 0 { write!(f, ", ")?; } - write!(f, "{:?}", t)?; + write!(f, "{t:?}")?; } write!(f, ")") } @@ -1754,7 +1754,7 @@ impl fmt::Debug for State { .map(|id| format!("{:?}", id.as_usize())) .collect::>() .join(", "); - write!(f, "union({})", alts) + write!(f, "union({alts})") } State::BinaryUnion { alt1, alt2 } => { write!( diff --git a/regex-automata/src/nfa/thompson/pikevm.rs b/regex-automata/src/nfa/thompson/pikevm.rs index 4eb47c85c..a5cd7086f 100644 --- a/regex-automata/src/nfa/thompson/pikevm.rs +++ b/regex-automata/src/nfa/thompson/pikevm.rs @@ -2294,9 +2294,9 @@ impl Counters { let mut set_counts = self.state_sets.iter().collect::, &u64)>>(); set_counts.sort_by_key(|(_, &count)| core::cmp::Reverse(count)); - trace!("## PikeVM frequency of state sets (top {})", LIMIT); + trace!("## PikeVM frequency of state sets (top {LIMIT})"); for (set, count) in set_counts.iter().take(LIMIT) { - trace!("{:?}: {}", set, count); + trace!("{set:?}: {count}"); } if set_counts.len() > LIMIT { trace!( @@ -2332,7 +2332,7 @@ impl Counters { trace!(""); trace!("## NFA debug display"); - trace!("{:?}", nfa); + trace!("{nfa:?}"); trace!("===== END PikeVM Instrumentation Output ====="); } diff --git a/regex-automata/src/nfa/thompson/range_trie.rs b/regex-automata/src/nfa/thompson/range_trie.rs index e877c08ee..57ae322d5 100644 --- a/regex-automata/src/nfa/thompson/range_trie.rs +++ b/regex-automata/src/nfa/thompson/range_trie.rs @@ -869,7 +869,7 @@ impl fmt::Debug for RangeTrie { writeln!(f)?; for (i, state) in self.states.iter().enumerate() { let status = if i == FINAL.as_usize() { '*' } else { ' ' }; - writeln!(f, "{}{:06}: {:?}", status, i, state)?; + writeln!(f, "{status}{i:06}: {state:?}")?; } Ok(()) } @@ -880,10 +880,10 @@ impl fmt::Debug for State { let rs = self .transitions .iter() - .map(|t| format!("{:?}", t)) + .map(|t| format!("{t:?}")) .collect::>() .join(", "); - write!(f, "{}", rs) + write!(f, "{rs}") } } diff --git a/regex-automata/src/util/alphabet.rs b/regex-automata/src/util/alphabet.rs index c0bc2847c..475f95159 100644 --- a/regex-automata/src/util/alphabet.rs +++ b/regex-automata/src/util/alphabet.rs @@ -117,8 +117,8 @@ impl Unit { pub fn eoi(num_byte_equiv_classes: usize) -> Unit { assert!( num_byte_equiv_classes <= 256, - "max number of byte-based equivalent classes is 256, but got {}", - num_byte_equiv_classes, + "max number of byte-based equivalent classes is 256, but got \ + {num_byte_equiv_classes}", ); Unit(UnitKind::EOI(u16::try_from(num_byte_equiv_classes).unwrap())) } @@ -501,9 +501,9 @@ impl core::fmt::Debug for ByteClasses { write!(f, "{:?} => [", class.as_usize())?; for (start, end) in self.element_ranges(class) { if start == end { - write!(f, "{:?}", start)?; + write!(f, "{start:?}")?; } else { - write!(f, "{:?}-{:?}", start, end)?; + write!(f, "{start:?}-{end:?}")?; } } write!(f, "]")?; diff --git a/regex-automata/src/util/captures.rs b/regex-automata/src/util/captures.rs index 84a631cfc..8e6e5aa8b 100644 --- a/regex-automata/src/util/captures.rs +++ b/regex-automata/src/util/captures.rs @@ -1227,7 +1227,7 @@ impl<'a> core::fmt::Debug for CapturesDebugMap<'a> { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { write!(f, "{}", self.0)?; if let Some(name) = self.1 { - write!(f, "/{:?}", name)?; + write!(f, "/{name:?}")?; } Ok(()) } @@ -2433,7 +2433,7 @@ impl core::fmt::Display for GroupInfoError { match self.kind { TooManyPatterns { ref err } => { - write!(f, "too many patterns to build capture info: {}", err) + write!(f, "too many patterns to build capture info: {err}") } TooManyGroups { pattern, minimum } => { write!( diff --git a/regex-automata/src/util/escape.rs b/regex-automata/src/util/escape.rs index 7f6aa15f5..9c5b72e9d 100644 --- a/regex-automata/src/util/escape.rs +++ b/regex-automata/src/util/escape.rs @@ -57,7 +57,7 @@ impl<'a> core::fmt::Debug for DebugHaystack<'a> { let ch = match result { Ok(ch) => ch, Err(byte) => { - write!(f, r"\x{:02x}", byte)?; + write!(f, r"\x{byte:02x}")?; bytes = &bytes[1..]; continue; } diff --git a/regex-automata/src/util/iter.rs b/regex-automata/src/util/iter.rs index a789fa042..dcfa4a4cc 100644 --- a/regex-automata/src/util/iter.rs +++ b/regex-automata/src/util/iter.rs @@ -265,9 +265,8 @@ impl<'h> Searcher<'h> { match self.try_advance_half(finder) { Ok(m) => m, Err(err) => panic!( - "unexpected regex half find error: {}\n\ + "unexpected regex half find error: {err}\n\ to handle find errors, use 'try' or 'search' methods", - err, ), } } @@ -380,9 +379,8 @@ impl<'h> Searcher<'h> { match self.try_advance(finder) { Ok(m) => m, Err(err) => panic!( - "unexpected regex find error: {}\n\ + "unexpected regex find error: {err}\n\ to handle find errors, use 'try' or 'search' methods", - err, ), } } @@ -788,9 +786,8 @@ where match self.0.next()? { Ok(m) => Some(m), Err(err) => panic!( - "unexpected regex half find error: {}\n\ + "unexpected regex half find error: {err}\n\ to handle find errors, use 'try' or 'search' methods", - err, ), } } @@ -903,9 +900,8 @@ where match self.0.next()? { Ok(m) => Some(m), Err(err) => panic!( - "unexpected regex find error: {}\n\ + "unexpected regex find error: {err}\n\ to handle find errors, use 'try' or 'search' methods", - err, ), } } @@ -1018,9 +1014,8 @@ where match self.0.next()? { Ok(m) => Some(m), Err(err) => panic!( - "unexpected regex captures error: {}\n\ + "unexpected regex captures error: {err}\n\ to handle find errors, use 'try' or 'search' methods", - err, ), } } diff --git a/regex-automata/src/util/prefilter/aho_corasick.rs b/regex-automata/src/util/prefilter/aho_corasick.rs index 31d5572f8..7a2517fc7 100644 --- a/regex-automata/src/util/prefilter/aho_corasick.rs +++ b/regex-automata/src/util/prefilter/aho_corasick.rs @@ -70,7 +70,7 @@ impl AhoCorasick { let ac = match result { Ok(ac) => ac, Err(_err) => { - debug!("aho-corasick prefilter failed to build: {}", _err); + debug!("aho-corasick prefilter failed to build: {_err}"); return None; } }; diff --git a/regex-automata/src/util/search.rs b/regex-automata/src/util/search.rs index 93c1272d5..3ece11d15 100644 --- a/regex-automata/src/util/search.rs +++ b/regex-automata/src/util/search.rs @@ -1902,10 +1902,10 @@ impl core::fmt::Display for MatchError { offset, ), MatchErrorKind::GaveUp { offset } => { - write!(f, "gave up searching at offset {}", offset) + write!(f, "gave up searching at offset {offset}") } MatchErrorKind::HaystackTooLong { len } => { - write!(f, "haystack of length {} is too long", len) + write!(f, "haystack of length {len} is too long") } MatchErrorKind::UnsupportedAnchored { mode: Anchored::Yes } => { write!(f, "anchored searches are not supported or enabled") diff --git a/regex-automata/src/util/wire.rs b/regex-automata/src/util/wire.rs index f05d328ac..210ab6f4b 100644 --- a/regex-automata/src/util/wire.rs +++ b/regex-automata/src/util/wire.rs @@ -219,47 +219,43 @@ impl core::fmt::Display for DeserializeError { use self::DeserializeErrorKind::*; match self.0 { - Generic { msg } => write!(f, "{}", msg), + Generic { msg } => write!(f, "{msg}"), BufferTooSmall { what } => { - write!(f, "buffer is too small to read {}", what) + write!(f, "buffer is too small to read {what}") } InvalidUsize { what } => { - write!(f, "{} is too big to fit in a usize", what) + write!(f, "{what} is too big to fit in a usize") } VersionMismatch { expected, found } => write!( f, "unsupported version: \ - expected version {} but found version {}", - expected, found, + expected version {expected} but found version {found}", ), EndianMismatch { expected, found } => write!( f, - "endianness mismatch: expected 0x{:X} but got 0x{:X}. \ - (Are you trying to load an object serialized with a \ - different endianness?)", - expected, found, + "endianness mismatch: expected 0x{expected:X} but \ + got 0x{found:X}. (Are you trying to load an object \ + serialized with a different endianness?)", ), AlignmentMismatch { alignment, address } => write!( f, - "alignment mismatch: slice starts at address \ - 0x{:X}, which is not aligned to a {} byte boundary", - address, alignment, + "alignment mismatch: slice starts at address 0x{address:X}, \ + which is not aligned to a {alignment} byte boundary", ), LabelMismatch { expected } => write!( f, "label mismatch: start of serialized object should \ - contain a NUL terminated {:?} label, but a different \ + contain a NUL terminated {expected:?} label, but a different \ label was found", - expected, ), ArithmeticOverflow { what } => { - write!(f, "arithmetic overflow for {}", what) + write!(f, "arithmetic overflow for {what}") } PatternID { ref err, what } => { - write!(f, "failed to read pattern ID for {}: {}", what, err) + write!(f, "failed to read pattern ID for {what}: {err}") } StateID { ref err, what } => { - write!(f, "failed to read state ID for {}: {}", what, err) + write!(f, "failed to read state ID for {what}: {err}") } } } @@ -392,20 +388,17 @@ pub(crate) fn alloc_aligned_buffer(size: usize) -> (Vec, usize) { let padding = ((address & !(align - 1)).checked_add(align).unwrap()) .checked_sub(address) .unwrap(); - assert!(padding <= 7, "padding of {} is bigger than 7", padding); + assert!(padding <= 7, "padding of {padding} is bigger than 7"); assert!( padding <= extra, - "padding of {} is bigger than extra {} bytes", - padding, - extra + "padding of {padding} is bigger than extra {extra} bytes", ); buf.truncate(size + padding); assert_eq!(size + padding, buf.len()); assert_eq!( 0, buf[padding..].as_ptr().as_usize() % align, - "expected end of initial padding to be aligned to {}", - align, + "expected end of initial padding to be aligned to {align}", ); (buf, padding) } diff --git a/regex-automata/tests/dfa/onepass/suite.rs b/regex-automata/tests/dfa/onepass/suite.rs index 20bd6965c..aba46c86d 100644 --- a/regex-automata/tests/dfa/onepass/suite.rs +++ b/regex-automata/tests/dfa/onepass/suite.rs @@ -151,7 +151,7 @@ fn run_test( TestResult::skip() } }, - name => TestResult::fail(&format!("unrecognized test name: {}", name)), + name => TestResult::fail(&format!("unrecognized test name: {name}")), } } diff --git a/regex-automata/tests/dfa/suite.rs b/regex-automata/tests/dfa/suite.rs index 8ed6dd007..8368ffef4 100644 --- a/regex-automata/tests/dfa/suite.rs +++ b/regex-automata/tests/dfa/suite.rs @@ -330,7 +330,7 @@ fn run_test(re: &Regex, test: &RegexTest) -> TestResult { TestResult::which(patset.iter().map(|p| p.as_usize())) } }, - name => TestResult::fail(&format!("unrecognized test name: {}", name)), + name => TestResult::fail(&format!("unrecognized test name: {name}")), } } diff --git a/regex-automata/tests/hybrid/suite.rs b/regex-automata/tests/hybrid/suite.rs index 4aaca6698..f0c3ebdbc 100644 --- a/regex-automata/tests/hybrid/suite.rs +++ b/regex-automata/tests/hybrid/suite.rs @@ -232,7 +232,7 @@ fn run_test( TestResult::which(patset.iter().map(|p| p.as_usize())) } }, - name => TestResult::fail(&format!("unrecognized test name: {}", name)), + name => TestResult::fail(&format!("unrecognized test name: {name}")), } } diff --git a/regex-automata/tests/meta/suite.rs b/regex-automata/tests/meta/suite.rs index 20f97b4bb..2c3de64fb 100644 --- a/regex-automata/tests/meta/suite.rs +++ b/regex-automata/tests/meta/suite.rs @@ -164,7 +164,7 @@ fn run_test(re: &Regex, test: &RegexTest) -> TestResult { TestResult::skip() } }, - name => TestResult::fail(&format!("unrecognized test name: {}", name)), + name => TestResult::fail(&format!("unrecognized test name: {name}")), } } diff --git a/regex-automata/tests/nfa/thompson/backtrack/suite.rs b/regex-automata/tests/nfa/thompson/backtrack/suite.rs index bce0eef40..c6f3b9f1f 100644 --- a/regex-automata/tests/nfa/thompson/backtrack/suite.rs +++ b/regex-automata/tests/nfa/thompson/backtrack/suite.rs @@ -153,7 +153,7 @@ fn run_test( .map(|caps| testify_captures(&caps)), ), }, - name => TestResult::fail(&format!("unrecognized test name: {}", name)), + name => TestResult::fail(&format!("unrecognized test name: {name}")), } } diff --git a/regex-automata/tests/nfa/thompson/pikevm/suite.rs b/regex-automata/tests/nfa/thompson/pikevm/suite.rs index d32842a15..1fb3fec9f 100644 --- a/regex-automata/tests/nfa/thompson/pikevm/suite.rs +++ b/regex-automata/tests/nfa/thompson/pikevm/suite.rs @@ -120,7 +120,7 @@ fn run_test( TestResult::skip() } }, - name => TestResult::fail(&format!("unrecognized test name: {}", name)), + name => TestResult::fail(&format!("unrecognized test name: {name}")), } }