Skip to content

Commit 7bf7e86

Browse files
committed
Refactor to always resolve edit maps
This will probably catch some confusing bugs, such as ad1b3e6.
1 parent 5d5e945 commit 7bf7e86

File tree

8 files changed

+7
-8
lines changed

8 files changed

+7
-8
lines changed

src/construct/gfm_table.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -883,6 +883,7 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
883883
flush_table_end(tokenizer, last_table_end, last_table_has_body);
884884
}
885885

886+
tokenizer.map.consume(&mut tokenizer.events);
886887
None
887888
}
888889

src/construct/heading_atx.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -280,5 +280,6 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
280280
index += 1;
281281
}
282282

283+
tokenizer.map.consume(&mut tokenizer.events);
283284
None
284285
}

src/construct/heading_setext.rs

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -185,8 +185,6 @@ pub fn after(tokenizer: &mut Tokenizer) -> State {
185185

186186
/// Resolve heading (setext).
187187
pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
188-
tokenizer.map.consume(&mut tokenizer.events);
189-
190188
let mut enter = skip::to(&tokenizer.events, 0, &[Name::HeadingSetextUnderline]);
191189

192190
while enter < tokenizer.events.len() {
@@ -280,6 +278,5 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
280278
}
281279

282280
tokenizer.map.consume(&mut tokenizer.events);
283-
284281
None
285282
}

src/construct/label_end.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -669,7 +669,6 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
669669
mark_as_data(tokenizer, &starts);
670670

671671
tokenizer.map.consume(&mut tokenizer.events);
672-
673672
None
674673
}
675674

src/construct/list_item.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -469,5 +469,6 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
469469
index += 1;
470470
}
471471

472+
tokenizer.map.consume(&mut tokenizer.events);
472473
None
473474
}

src/construct/partial_data.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -74,8 +74,6 @@ pub fn inside(tokenizer: &mut Tokenizer) -> State {
7474

7575
/// Merge adjacent data events.
7676
pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
77-
tokenizer.map.consume(&mut tokenizer.events);
78-
7977
let mut index = 0;
8078

8179
// Loop through events and merge adjacent data events.
@@ -107,5 +105,6 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
107105
index += 1;
108106
}
109107

108+
tokenizer.map.consume(&mut tokenizer.events);
110109
None
111110
}

src/construct/partial_whitespace.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,8 +67,6 @@ use alloc::vec;
6767

6868
/// Resolve whitespace.
6969
pub fn resolve_whitespace(tokenizer: &mut Tokenizer, hard_break: bool, trim_whole: bool) {
70-
tokenizer.map.consume(&mut tokenizer.events);
71-
7270
let mut index = 0;
7371

7472
while index < tokenizer.events.len() {
@@ -86,6 +84,8 @@ pub fn resolve_whitespace(tokenizer: &mut Tokenizer, hard_break: bool, trim_whol
8684

8785
index += 1;
8886
}
87+
88+
tokenizer.map.consume(&mut tokenizer.events);
8989
}
9090

9191
/// Trim a [`Data`][Name::Data] event.

src/construct/text.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -259,5 +259,6 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
259259
resolve_gfm_autolink_literal(tokenizer);
260260
}
261261

262+
tokenizer.map.consume(&mut tokenizer.events);
262263
None
263264
}

0 commit comments

Comments
 (0)