@@ -19,6 +19,7 @@ use html5ever::tokenizer::{CharacterTokens, EOFToken, NullCharacterToken, ParseE
19
19
use html5ever:: tokenizer:: { CommentToken , DoctypeToken , TagToken , Token } ;
20
20
use html5ever:: tokenizer:: { Doctype , EndTag , StartTag , Tag } ;
21
21
use html5ever:: tokenizer:: { TokenSink , TokenSinkResult , Tokenizer , TokenizerOpts } ;
22
+ use html5ever:: TokenizerResult ;
22
23
use html5ever:: { ns, Attribute , LocalName , QualName } ;
23
24
use serde_json:: { Map , Value } ;
24
25
use std:: cell:: RefCell ;
@@ -147,15 +148,19 @@ impl TokenSink for TokenLogger {
147
148
148
149
fn tokenize ( input : Vec < StrTendril > , opts : TokenizerOpts ) -> ( Vec < Token > , Vec < TestError > ) {
149
150
let sink = TokenLogger :: new ( opts. exact_errors ) ;
150
- let tok = Tokenizer :: new ( sink, opts) ;
151
+ let tokenizer = Tokenizer :: new ( sink, opts) ;
152
+
151
153
let buffer = BufferQueue :: default ( ) ;
152
154
for chunk in input. into_iter ( ) {
153
155
buffer. push_back ( chunk) ;
154
- let _ = tok. feed ( & buffer) ;
155
156
}
156
- let _ = tok. feed ( & buffer) ;
157
- tok. end ( ) ;
158
- tok. sink . get_tokens ( )
157
+
158
+ while tokenizer. feed ( & buffer) != TokenizerResult :: Done {
159
+ // Ignore any script tags...
160
+ }
161
+
162
+ tokenizer. end ( ) ;
163
+ tokenizer. sink . get_tokens ( )
159
164
}
160
165
161
166
trait JsonExt : Sized {
0 commit comments