Skip to content

Commit 48823f0

Browse files
Fix attention starting and ending in different links
Example: ```markdown [*]() [*]() ``` There was already code for: ```markdown [*]() x*. *x [*](). ``` But that wasn’t correct for attention at the same depth but in different places. Closes GH-21. Co-authored-by: Christian Murphy <[email protected]>
1 parent 7bf7e86 commit 48823f0

File tree

2 files changed

+18
-11
lines changed

2 files changed

+18
-11
lines changed

src/construct/attention.rs

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -92,8 +92,10 @@ use alloc::{vec, vec::Vec};
9292
struct Sequence {
9393
/// Marker as a byte (`u8`) used in this sequence.
9494
marker: u8,
95-
/// The depth in events where this sequence resides.
96-
balance: usize,
95+
/// We track whether sequences are in balanced events, and where those
96+
/// events start, so that one attention doesn’t start in say, one link, and
97+
/// end in another.
98+
stack: Vec<usize>,
9799
/// The index into events where this sequence’s `Enter` currently resides.
98100
index: usize,
99101
/// The (shifted) point where this sequence starts.
@@ -172,7 +174,7 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
172174
// An opener matching our closer:
173175
if sequence_open.open
174176
&& sequence_close.marker == sequence_open.marker
175-
&& sequence_close.balance == sequence_open.balance
177+
&& sequence_close.stack == sequence_open.stack
176178
{
177179
// If the opening can close or the closing can open,
178180
// and the close size *is not* a multiple of three,
@@ -219,23 +221,20 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
219221
}
220222

221223
tokenizer.map.consume(&mut tokenizer.events);
222-
223224
None
224225
}
225226

226227
/// Get sequences.
227228
fn get_sequences(tokenizer: &mut Tokenizer) -> Vec<Sequence> {
228229
let mut index = 0;
229-
let mut balance = 0;
230+
let mut stack = vec![];
230231
let mut sequences = vec![];
231232

232233
while index < tokenizer.events.len() {
233234
let enter = &tokenizer.events[index];
234235

235-
if enter.kind == Kind::Enter {
236-
balance += 1;
237-
238-
if enter.name == Name::AttentionSequence {
236+
if enter.name == Name::AttentionSequence {
237+
if enter.kind == Kind::Enter {
239238
let end = index + 1;
240239
let exit = &tokenizer.events[end];
241240

@@ -255,7 +254,7 @@ fn get_sequences(tokenizer: &mut Tokenizer) -> Vec<Sequence> {
255254

256255
sequences.push(Sequence {
257256
index,
258-
balance,
257+
stack: stack.clone(),
259258
start_point: enter.point.clone(),
260259
end_point: exit.point.clone(),
261260
size: exit.point.index - enter.point.index,
@@ -272,8 +271,10 @@ fn get_sequences(tokenizer: &mut Tokenizer) -> Vec<Sequence> {
272271
marker,
273272
});
274273
}
274+
} else if enter.kind == Kind::Enter {
275+
stack.push(index);
275276
} else {
276-
balance -= 1;
277+
stack.pop();
277278
}
278279

279280
index += 1;

tests/fuzz.rs

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,5 +104,11 @@ fn fuzz() -> Result<(), String> {
104104
"9: autolink literals that end in table cell delimiter (GH-20)"
105105
);
106106

107+
assert_eq!(
108+
to_html_with_options("[*]() [*]()", &Options::gfm()),
109+
Ok("<p><a href=\"\">*</a> <a href=\"\">*</a></p>".into()),
110+
"10: attention in different links (GH-21)"
111+
);
112+
107113
Ok(())
108114
}

0 commit comments

Comments
 (0)