Skip to content

Commit a5b303f

Browse files
authored
Fix usage of tokenizer in tokenizer tests (#644)
Signed-off-by: Simon Wülker <[email protected]>
1 parent 7861cad commit a5b303f

File tree

1 file changed

+10
-5
lines changed

1 file changed

+10
-5
lines changed

rcdom/tests/html-tokenizer.rs

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ use html5ever::tokenizer::{CharacterTokens, EOFToken, NullCharacterToken, ParseE
1919
use html5ever::tokenizer::{CommentToken, DoctypeToken, TagToken, Token};
2020
use html5ever::tokenizer::{Doctype, EndTag, StartTag, Tag};
2121
use html5ever::tokenizer::{TokenSink, TokenSinkResult, Tokenizer, TokenizerOpts};
22+
use html5ever::TokenizerResult;
2223
use html5ever::{ns, Attribute, LocalName, QualName};
2324
use serde_json::{Map, Value};
2425
use std::cell::RefCell;
@@ -147,15 +148,19 @@ impl TokenSink for TokenLogger {
147148

148149
fn tokenize(input: Vec<StrTendril>, opts: TokenizerOpts) -> (Vec<Token>, Vec<TestError>) {
149150
let sink = TokenLogger::new(opts.exact_errors);
150-
let tok = Tokenizer::new(sink, opts);
151+
let tokenizer = Tokenizer::new(sink, opts);
152+
151153
let buffer = BufferQueue::default();
152154
for chunk in input.into_iter() {
153155
buffer.push_back(chunk);
154-
let _ = tok.feed(&buffer);
155156
}
156-
let _ = tok.feed(&buffer);
157-
tok.end();
158-
tok.sink.get_tokens()
157+
158+
while tokenizer.feed(&buffer) != TokenizerResult::Done {
159+
// Ignore any script tags...
160+
}
161+
162+
tokenizer.end();
163+
tokenizer.sink.get_tokens()
159164
}
160165

161166
trait JsonExt: Sized {

0 commit comments

Comments
 (0)