@@ -22,16 +22,16 @@ use self::states::{Rawtext, Rcdata, ScriptData, ScriptDataEscaped};
22
22
use self :: char_ref:: { CharRef , CharRefTokenizer } ;
23
23
24
24
use crate :: util:: str:: lower_ascii_letter;
25
-
26
25
use log:: { debug, trace} ;
27
26
use mac:: format_if;
28
- use markup5ever:: { namespace_url, ns, small_char_set} ;
27
+ use markup5ever:: buffer_queue:: BufferQueue ;
28
+ use markup5ever:: { namespace_url, ns, small_char_set, InputSink , InputSinkResult } ;
29
29
use std:: borrow:: Cow :: { self , Borrowed } ;
30
30
use std:: cell:: { Cell , RefCell , RefMut } ;
31
31
use std:: collections:: BTreeMap ;
32
- use std:: mem;
32
+ use std:: { iter , mem} ;
33
33
34
- pub use crate :: buffer_queue:: { BufferQueue , FromSet , NotFromSet , SetResult } ;
34
+ pub use crate :: buffer_queue:: { FromSet , NotFromSet , SetResult } ;
35
35
use crate :: tendril:: StrTendril ;
36
36
use crate :: { Attribute , LocalName , QualName , SmallCharSet } ;
37
37
@@ -43,13 +43,17 @@ pub enum ProcessResult<Handle> {
43
43
Continue ,
44
44
Suspend ,
45
45
Script ( Handle ) ,
46
+ #[ cfg( feature = "encoding" ) ]
47
+ MaybeChangeEncodingAndStartOver ( & ' static encoding_rs:: Encoding )
46
48
}
47
49
48
50
#[ must_use]
49
51
#[ derive( Debug ) ]
50
52
pub enum TokenizerResult < Handle > {
51
53
Done ,
52
54
Script ( Handle ) ,
55
+ #[ cfg( feature = "encoding" ) ]
56
+ MaybeChangeEncodingAndStartOver ( & ' static encoding_rs:: Encoding )
53
57
}
54
58
55
59
fn option_push ( opt_str : & mut Option < StrTendril > , c : char ) {
@@ -364,6 +368,8 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
364
368
ProcessResult :: Continue => ( ) ,
365
369
ProcessResult :: Suspend => break ,
366
370
ProcessResult :: Script ( node) => return TokenizerResult :: Script ( node) ,
371
+ #[ cfg( feature = "encoding" ) ]
372
+ ProcessResult :: MaybeChangeEncodingAndStartOver ( encoding) => return TokenizerResult :: MaybeChangeEncodingAndStartOver ( encoding) ,
367
373
}
368
374
}
369
375
} else {
@@ -372,6 +378,8 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
372
378
ProcessResult :: Continue => ( ) ,
373
379
ProcessResult :: Suspend => break ,
374
380
ProcessResult :: Script ( node) => return TokenizerResult :: Script ( node) ,
381
+ #[ cfg( feature = "encoding" ) ]
382
+ ProcessResult :: MaybeChangeEncodingAndStartOver ( encoding) => return TokenizerResult :: MaybeChangeEncodingAndStartOver ( encoding) ,
375
383
}
376
384
}
377
385
}
@@ -452,6 +460,8 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
452
460
self . state . set ( states:: RawData ( kind) ) ;
453
461
ProcessResult :: Continue
454
462
} ,
463
+ #[ cfg( feature = "encoding" ) ]
464
+ TokenSinkResult :: MaybeChangeEncodingAndStartOver ( encoding) => ProcessResult :: MaybeChangeEncodingAndStartOver ( encoding)
455
465
}
456
466
}
457
467
@@ -1455,6 +1465,8 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
1455
1465
ProcessResult :: Continue => ( ) ,
1456
1466
ProcessResult :: Suspend => break ,
1457
1467
ProcessResult :: Script ( _) => unreachable ! ( ) ,
1468
+ #[ cfg( feature = "encoding" ) ]
1469
+ ProcessResult :: MaybeChangeEncodingAndStartOver ( _) => unreachable ! ( ) ,
1458
1470
}
1459
1471
}
1460
1472
@@ -1582,13 +1594,34 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
1582
1594
}
1583
1595
}
1584
1596
1597
+ impl < Sink > InputSink for Tokenizer < Sink >
1598
+ where Sink : TokenSink {
1599
+ type Handle = Sink :: Handle ;
1600
+
1601
+ fn feed ( & self , input : & BufferQueue ) -> impl Iterator < Item = InputSinkResult < Self :: Handle > > {
1602
+ iter:: from_fn ( || {
1603
+ self . feed ( input) . into ( )
1604
+ } )
1605
+ }
1606
+ }
1607
+
1608
+ impl < Handle > From < TokenizerResult < Handle > > for Option < InputSinkResult < Handle > > {
1609
+ fn from ( value : TokenizerResult < Handle > ) -> Self {
1610
+ match value {
1611
+ TokenizerResult :: Script ( handle) => Some ( InputSinkResult :: HandleScript ( handle) ) ,
1612
+ TokenizerResult :: MaybeChangeEncodingAndStartOver ( encoding) => Some ( InputSinkResult :: MaybeStartOverWithEncoding ( encoding) ) ,
1613
+ TokenizerResult :: Done => None ,
1614
+ }
1615
+ }
1616
+ }
1617
+
1585
1618
#[ cfg( test) ]
1586
1619
#[ allow( non_snake_case) ]
1587
1620
mod test {
1588
1621
use super :: option_push; // private items
1589
- use crate :: tendril:: { SliceExt , StrTendril } ;
1590
-
1591
1622
use super :: { TokenSink , TokenSinkResult , Tokenizer , TokenizerOpts } ;
1623
+ use crate :: tendril:: { SliceExt , StrTendril } ;
1624
+ use crate :: LocalName ;
1592
1625
1593
1626
use super :: interface:: { CharacterTokens , EOFToken , NullCharacterToken , ParseError } ;
1594
1627
use super :: interface:: { EndTag , StartTag , Tag , TagKind } ;
@@ -1597,8 +1630,6 @@ mod test {
1597
1630
use markup5ever:: buffer_queue:: BufferQueue ;
1598
1631
use std:: cell:: RefCell ;
1599
1632
1600
- use crate :: LocalName ;
1601
-
1602
1633
// LinesMatch implements the TokenSink trait. It is used for testing to see
1603
1634
// if current_line is being updated when process_token is called. The lines
1604
1635
// vector is a collection of the line numbers that each token is on.
0 commit comments