@@ -21,11 +21,10 @@ use util::runner::{run_all, Test};
21
21
22
22
use markup5ever:: buffer_queue:: BufferQueue ;
23
23
use xml5ever:: tendril:: { SliceExt , StrTendril } ;
24
- use xml5ever:: tokenizer:: { CharacterTokens , Token , TokenSink } ;
25
- use xml5ever:: tokenizer:: { CommentToken , EmptyTag , EndTag , ShortTag , StartTag , Tag } ;
26
- use xml5ever:: tokenizer:: { Doctype , DoctypeToken , PIToken , Pi } ;
27
- use xml5ever:: tokenizer:: { EOFToken , XmlTokenizer , XmlTokenizerOpts } ;
28
- use xml5ever:: tokenizer:: { NullCharacterToken , ParseError , TagToken } ;
24
+ use xml5ever:: tokenizer:: {
25
+ Doctype , EmptyTag , EndTag , Pi , ShortTag , StartTag , Tag , Token , TokenSink , XmlTokenizer ,
26
+ XmlTokenizerOpts ,
27
+ } ;
29
28
use xml5ever:: { ns, Attribute , LocalName , QualName } ;
30
29
31
30
mod util {
@@ -81,7 +80,7 @@ impl TokenLogger {
81
80
fn finish_str ( & self ) {
82
81
if !self . current_str . borrow ( ) . is_empty ( ) {
83
82
let s = self . current_str . take ( ) ;
84
- self . tokens . borrow_mut ( ) . push ( CharacterTokens ( s) ) ;
83
+ self . tokens . borrow_mut ( ) . push ( Token :: Characters ( s) ) ;
85
84
}
86
85
}
87
86
@@ -96,21 +95,20 @@ impl TokenSink for TokenLogger {
96
95
97
96
fn process_token ( & self , token : Token ) -> ProcessResult < ( ) > {
98
97
match token {
99
- CharacterTokens ( b ) => {
100
- self . current_str . borrow_mut ( ) . push_slice ( & b ) ;
98
+ Token :: Characters ( characters ) => {
99
+ self . current_str . borrow_mut ( ) . push_slice ( & characters ) ;
101
100
} ,
102
101
103
- NullCharacterToken => {
102
+ Token :: NullCharacter => {
104
103
self . current_str . borrow_mut ( ) . push_char ( '\0' ) ;
105
104
} ,
106
105
107
- ParseError ( _) => {
106
+ Token :: ParseError ( _) => {
108
107
if self . exact_errors {
109
- self . push ( ParseError ( Borrowed ( "" ) ) ) ;
108
+ self . push ( Token :: ParseError ( Borrowed ( "" ) ) ) ;
110
109
}
111
110
} ,
112
-
113
- TagToken ( mut t) => {
111
+ Token :: Tag ( mut t) => {
114
112
// The spec seems to indicate that one can emit
115
113
// erroneous end tags with attrs, but the test
116
114
// cases don't contain them.
@@ -120,11 +118,9 @@ impl TokenSink for TokenLogger {
120
118
} ,
121
119
_ => t. attrs . sort_by ( |a1, a2| a1. name . cmp ( & a2. name ) ) ,
122
120
}
123
- self . push ( TagToken ( t) ) ;
121
+ self . push ( Token :: Tag ( t) ) ;
124
122
} ,
125
-
126
- EOFToken => ( ) ,
127
-
123
+ Token :: EndOfFile => ( ) ,
128
124
_ => self . push ( token) ,
129
125
} ;
130
126
ProcessResult :: Continue
@@ -211,7 +207,7 @@ fn json_to_token(js: &Value) -> Token {
211
207
// Collect refs here so we don't have to use "ref" in all the patterns below.
212
208
let args: Vec < & Value > = parts[ 1 ..] . iter ( ) . collect ( ) ;
213
209
match & * parts[ 0 ] . get_str ( ) {
214
- "StartTag" => TagToken ( Tag {
210
+ "StartTag" => Token :: Tag ( Tag {
215
211
kind : StartTag ,
216
212
name : QualName :: new ( None , ns ! ( ) , LocalName :: from ( args[ 0 ] . get_str ( ) ) ) ,
217
213
attrs : args[ 1 ]
@@ -224,19 +220,19 @@ fn json_to_token(js: &Value) -> Token {
224
220
. collect ( ) ,
225
221
} ) ,
226
222
227
- "EndTag" => TagToken ( Tag {
223
+ "EndTag" => Token :: Tag ( Tag {
228
224
kind : EndTag ,
229
225
name : QualName :: new ( None , ns ! ( ) , LocalName :: from ( args[ 0 ] . get_str ( ) ) ) ,
230
226
attrs : vec ! [ ] ,
231
227
} ) ,
232
228
233
- "ShortTag" => TagToken ( Tag {
229
+ "ShortTag" => Token :: Tag ( Tag {
234
230
kind : ShortTag ,
235
231
name : QualName :: new ( None , ns ! ( ) , LocalName :: from ( args[ 0 ] . get_str ( ) ) ) ,
236
232
attrs : vec ! [ ] ,
237
233
} ) ,
238
234
239
- "EmptyTag" => TagToken ( Tag {
235
+ "EmptyTag" => Token :: Tag ( Tag {
240
236
kind : EmptyTag ,
241
237
name : QualName :: new ( None , ns ! ( ) , LocalName :: from ( args[ 0 ] . get_str ( ) ) ) ,
242
238
attrs : args[ 1 ]
@@ -249,16 +245,16 @@ fn json_to_token(js: &Value) -> Token {
249
245
. collect ( ) ,
250
246
} ) ,
251
247
252
- "Comment" => CommentToken ( args[ 0 ] . get_tendril ( ) ) ,
248
+ "Comment" => Token :: Comment ( args[ 0 ] . get_tendril ( ) ) ,
253
249
254
- "Character" => CharacterTokens ( args[ 0 ] . get_tendril ( ) ) ,
250
+ "Character" => Token :: Characters ( args[ 0 ] . get_tendril ( ) ) ,
255
251
256
- "PI" => PIToken ( Pi {
252
+ "PI" => Token :: ProcessingInstruction ( Pi {
257
253
target : args[ 0 ] . get_tendril ( ) ,
258
254
data : args[ 1 ] . get_tendril ( ) ,
259
255
} ) ,
260
256
261
- "DOCTYPE" => DoctypeToken ( Doctype {
257
+ "DOCTYPE" => Token :: Doctype ( Doctype {
262
258
name : args[ 0 ] . get_nullable_tendril ( ) ,
263
259
public_id : args[ 1 ] . get_nullable_tendril ( ) ,
264
260
system_id : args[ 2 ] . get_nullable_tendril ( ) ,
@@ -278,7 +274,7 @@ fn json_to_tokens(js: &Value, exact_errors: bool) -> Vec<Token> {
278
274
for tok in js. as_array ( ) . unwrap ( ) . iter ( ) {
279
275
match * tok {
280
276
Value :: String ( ref s) if & s[ ..] == "ParseError" => {
281
- let _ = sink. process_token ( ParseError ( Borrowed ( "" ) ) ) ;
277
+ let _ = sink. process_token ( Token :: ParseError ( Borrowed ( "" ) ) ) ;
282
278
} ,
283
279
_ => {
284
280
let _ = sink. process_token ( json_to_token ( tok) ) ;
0 commit comments