Skip to content

Commit 5c9eba1

Browse files
committed
Update dev-dependencies
1 parent 9ed45b4 commit 5c9eba1

File tree

8 files changed

+39
-43
lines changed

8 files changed

+39
-43
lines changed

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ criterion = "0.5"
1313
env_logger = "0.11"
1414
pretty_assertions = { workspace = true }
1515
serde_json = { version = "1" }
16-
swc_core = { version = "0.100", features = [
16+
swc_core = { version = "10", features = [
1717
"common",
1818
"ecma_ast",
1919
"ecma_parser",

mdast_util_to_markdown/src/handle/list_item.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,5 +102,6 @@ impl Handle for ListItem {
102102
}
103103

104104
fn compute_size(a: usize) -> usize {
105-
((a + 4 - 1) / 4) * 4
105+
// `a.div_ceil(4)` is `((a + 4 - 1) / 4)`
106+
a.div_ceil(4) * 4
106107
}

src/construct/content.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,7 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, message::
182182
let result = subtokenize(
183183
&mut tokenizer.events,
184184
tokenizer.parse_state,
185-
&Some(Content::Content),
185+
Some(&Content::Content),
186186
)?;
187187

188188
Ok(Some(result))

src/construct/document.rs

Lines changed: 30 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -304,26 +304,24 @@ pub fn containers_after(tokenizer: &mut Tokenizer) -> State {
304304
!= tokenizer.tokenize_state.document_container_stack.len();
305305
child.define_skip(tokenizer.point.clone());
306306

307-
match tokenizer.current {
308-
// Note: EOL is part of data.
309-
None => State::Retry(StateName::DocumentFlowEnd),
310-
Some(_) => {
311-
let current = tokenizer.events.len();
312-
let previous = tokenizer.tokenize_state.document_data_index;
313-
if let Some(previous) = previous {
314-
tokenizer.events[previous].link.as_mut().unwrap().next = Some(current);
315-
}
316-
tokenizer.tokenize_state.document_data_index = Some(current);
317-
tokenizer.enter_link(
318-
Name::Data,
319-
Link {
320-
previous,
321-
next: None,
322-
content: Content::Flow,
323-
},
324-
);
325-
State::Retry(StateName::DocumentFlowInside)
307+
if tokenizer.current.is_none() {
308+
State::Retry(StateName::DocumentFlowEnd)
309+
} else {
310+
let current = tokenizer.events.len();
311+
let previous = tokenizer.tokenize_state.document_data_index;
312+
if let Some(previous) = previous {
313+
tokenizer.events[previous].link.as_mut().unwrap().next = Some(current);
326314
}
315+
tokenizer.tokenize_state.document_data_index = Some(current);
316+
tokenizer.enter_link(
317+
Name::Data,
318+
Link {
319+
previous,
320+
next: None,
321+
content: Content::Flow,
322+
},
323+
);
324+
State::Retry(StateName::DocumentFlowInside)
327325
}
328326
}
329327

@@ -450,23 +448,20 @@ pub fn flow_end(tokenizer: &mut Tokenizer) -> State {
450448
debug_assert!(result.is_ok(), "did not expect error when exiting");
451449
}
452450

453-
match tokenizer.current {
454-
None => {
455-
tokenizer.tokenize_state.document_continued = 0;
456-
if let Err(message) = exit_containers(tokenizer, &Phase::Eof) {
457-
return State::Error(message);
458-
}
459-
resolve(tokenizer);
460-
State::Ok
461-
}
462-
Some(_) => {
463-
tokenizer.tokenize_state.document_continued = 0;
464-
tokenizer.tokenize_state.document_lazy_accepting_before =
465-
document_lazy_continuation_current;
466-
// Containers would only be interrupting if we’ve continued.
467-
tokenizer.interrupt = false;
468-
State::Retry(StateName::DocumentContainerExistingBefore)
451+
if tokenizer.current.is_none() {
452+
tokenizer.tokenize_state.document_continued = 0;
453+
if let Err(message) = exit_containers(tokenizer, &Phase::Eof) {
454+
return State::Error(message);
469455
}
456+
resolve(tokenizer);
457+
State::Ok
458+
} else {
459+
tokenizer.tokenize_state.document_continued = 0;
460+
tokenizer.tokenize_state.document_lazy_accepting_before =
461+
document_lazy_continuation_current;
462+
// Containers would only be interrupting if we’ve continued.
463+
tokenizer.interrupt = false;
464+
State::Retry(StateName::DocumentContainerExistingBefore)
470465
}
471466
}
472467

src/mdast.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ impl serde::ser::Serialize for AlignKind {
9696
struct AlignKindVisitor;
9797

9898
#[cfg(feature = "serde")]
99-
impl<'de> serde::de::Visitor<'de> for AlignKindVisitor {
99+
impl serde::de::Visitor<'_> for AlignKindVisitor {
100100
type Value = AlignKind;
101101

102102
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {

src/parser.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,6 @@ pub fn parse<'a>(
7474
return Ok((events, parse_state));
7575
}
7676

77-
result = subtokenize(&mut events, &parse_state, &None)?;
77+
result = subtokenize(&mut events, &parse_state, None)?;
7878
}
7979
}

src/subtokenize.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ pub fn link_to(events: &mut [Event], previous: usize, next: usize) {
7878
pub fn subtokenize(
7979
events: &mut Vec<Event>,
8080
parse_state: &ParseState,
81-
filter: &Option<Content>,
81+
filter: Option<&Content>,
8282
) -> Result<Subresult, message::Message> {
8383
let mut map = EditMap::new();
8484
let mut index = 0;
@@ -98,7 +98,7 @@ pub fn subtokenize(
9898

9999
// No need to enter linked events again.
100100
if link.previous.is_none()
101-
&& (filter.is_none() || &link.content == filter.as_ref().unwrap())
101+
&& (filter.is_none() || &link.content == *filter.as_ref().unwrap())
102102
{
103103
// Index into `events` pointing to a chunk.
104104
let mut link_index = Some(index);

src/util/sanitize_uri.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ pub fn sanitize_with_protocols(value: &str, protocols: &[&str]) -> String {
5555
let value = sanitize(value);
5656

5757
let end = value.find(|c| matches!(c, '?' | '#' | '/'));
58-
let mut colon = value.find(|c| matches!(c, ':'));
58+
let mut colon = value.find(':');
5959

6060
// If the first colon is after `?`, `#`, or `/`, it’s not a protocol.
6161
if let Some(end) = end {

0 commit comments

Comments
 (0)