@@ -61,20 +61,24 @@ impl<'a, T> ParseError<'a, T> {
6161}
6262
6363/// The owned input for a parser.
64- pub struct ParserInput < ' t > ( Tokenizer < ' t > ) ;
64+ pub struct ParserInput < ' t > {
65+ tokenizer : Tokenizer < ' t > ,
66+ }
6567
6668impl < ' t > ParserInput < ' t > {
6769 /// Create a new input for a parser.
6870 pub fn new ( input : & ' t str ) -> ParserInput < ' t > {
69- ParserInput ( Tokenizer :: new ( input) )
71+ ParserInput {
72+ tokenizer : Tokenizer :: new ( input) ,
73+ }
7074 }
7175}
7276
7377/// A CSS parser that borrows its `&str` input,
7478/// yields `Token`s,
7579/// and keeps track of nested blocks and functions.
7680pub struct Parser < ' i : ' t , ' t > {
77- tokenizer : & ' t mut ParserInput < ' i > ,
81+ input : & ' t mut ParserInput < ' i > ,
7882 /// If `Some(_)`, .parse_nested_block() can be called.
7983 at_start_of : Option < BlockType > ,
8084 /// For parsers from `parse_until` or `parse_nested_block`
@@ -182,15 +186,15 @@ impl<'i: 't, 't> Parser<'i, 't> {
182186 #[ inline]
183187 pub fn new ( input : & ' t mut ParserInput < ' i > ) -> Parser < ' i , ' t > {
184188 Parser {
185- tokenizer : input,
189+ input : input,
186190 at_start_of : None ,
187191 stop_before : Delimiter :: None ,
188192 }
189193 }
190194
191195 /// Return the current line that is being parsed.
192196 pub fn current_line ( & self ) -> & ' i str {
193- self . tokenizer . 0 . current_source_line ( )
197+ self . input . tokenizer . current_source_line ( )
194198 }
195199
196200 /// Check whether the input is exhausted. That is, if `.next()` would return a token.
@@ -223,7 +227,7 @@ impl<'i: 't, 't> Parser<'i, 't> {
223227 #[ inline]
224228 pub fn position ( & self ) -> SourcePosition {
225229 SourcePosition {
226- position : self . tokenizer . 0 . position ( ) ,
230+ position : self . input . tokenizer . position ( ) ,
227231 at_start_of : self . at_start_of ,
228232 }
229233 }
@@ -234,35 +238,35 @@ impl<'i: 't, 't> Parser<'i, 't> {
234238 /// Should only be used with `SourcePosition` values from the same `Parser` instance.
235239 #[ inline]
236240 pub fn reset ( & mut self , new_position : SourcePosition ) {
237- self . tokenizer . 0 . reset ( new_position. position ) ;
241+ self . input . tokenizer . reset ( new_position. position ) ;
238242 self . at_start_of = new_position. at_start_of ;
239243 }
240244
241245 /// Start looking for `var()` functions. (See the `.seen_var_functions()` method.)
242246 #[ inline]
243247 pub fn look_for_var_functions ( & mut self ) {
244- self . tokenizer . 0 . look_for_var_functions ( )
248+ self . input . tokenizer . look_for_var_functions ( )
245249 }
246250
247251 /// Return whether a `var()` function has been seen by the tokenizer since
248252 /// either `look_for_var_functions` was called, and stop looking.
249253 #[ inline]
250254 pub fn seen_var_functions ( & mut self ) -> bool {
251- self . tokenizer . 0 . seen_var_functions ( )
255+ self . input . tokenizer . seen_var_functions ( )
252256 }
253257
254258 /// Start looking for viewport percentage lengths. (See the `seen_viewport_percentages`
255259 /// method.)
256260 #[ inline]
257261 pub fn look_for_viewport_percentages ( & mut self ) {
258- self . tokenizer . 0 . look_for_viewport_percentages ( )
262+ self . input . tokenizer . look_for_viewport_percentages ( )
259263 }
260264
261265 /// Return whether a `vh`, `vw`, `vmin`, or `vmax` dimension has been seen by the tokenizer
262266 /// since `look_for_viewport_percentages` was called, and stop looking.
263267 #[ inline]
264268 pub fn seen_viewport_percentages ( & mut self ) -> bool {
265- self . tokenizer . 0 . seen_viewport_percentages ( )
269+ self . input . tokenizer . seen_viewport_percentages ( )
266270 }
267271
268272 /// Execute the given closure, passing it the parser.
@@ -283,25 +287,25 @@ impl<'i: 't, 't> Parser<'i, 't> {
283287 /// Return a slice of the CSS input
284288 #[ inline]
285289 pub fn slice ( & self , range : Range < SourcePosition > ) -> & ' i str {
286- self . tokenizer . 0 . slice ( range. start . position ..range. end . position )
290+ self . input . tokenizer . slice ( range. start . position ..range. end . position )
287291 }
288292
289293 /// Return a slice of the CSS input, from the given position to the current one.
290294 #[ inline]
291295 pub fn slice_from ( & self , start_position : SourcePosition ) -> & ' i str {
292- self . tokenizer . 0 . slice_from ( start_position. position )
296+ self . input . tokenizer . slice_from ( start_position. position )
293297 }
294298
295299 /// Return the line and column number within the input for the current position.
296300 #[ inline]
297301 pub fn current_source_location ( & self ) -> SourceLocation {
298- self . tokenizer . 0 . current_source_location ( )
302+ self . input . tokenizer . current_source_location ( )
299303 }
300304
301305 /// Return the line and column number within the input for the given position.
302306 #[ inline]
303307 pub fn source_location ( & self , target : SourcePosition ) -> SourceLocation {
304- self . tokenizer . 0 . source_location ( target. position )
308+ self . input . tokenizer . source_location ( target. position )
305309 }
306310
307311 /// Return the next token in the input that is neither whitespace or a comment,
@@ -342,13 +346,13 @@ impl<'i: 't, 't> Parser<'i, 't> {
342346 /// comments should always be ignored between tokens.
343347 pub fn next_including_whitespace_and_comments ( & mut self ) -> Result < Token < ' i > , BasicParseError < ' i > > {
344348 if let Some ( block_type) = self . at_start_of . take ( ) {
345- consume_until_end_of_block ( block_type, & mut self . tokenizer . 0 ) ;
349+ consume_until_end_of_block ( block_type, & mut self . input . tokenizer ) ;
346350 }
347- let byte = self . tokenizer . 0 . next_byte ( ) ;
351+ let byte = self . input . tokenizer . next_byte ( ) ;
348352 if self . stop_before . contains ( Delimiters :: from_byte ( byte) ) {
349353 return Err ( BasicParseError :: EndOfInput )
350354 }
351- let token = self . tokenizer . 0 . next ( ) . map_err ( |( ) | BasicParseError :: EndOfInput ) ?;
355+ let token = self . input . tokenizer . next ( ) . map_err ( |( ) | BasicParseError :: EndOfInput ) ?;
352356 if let Some ( block_type) = BlockType :: opening ( & token) {
353357 self . at_start_of = Some ( block_type) ;
354358 }
@@ -665,23 +669,23 @@ pub fn parse_until_before<'i: 't, 't, F, T, E>(parser: &mut Parser<'i, 't>,
665669 // Introduce a new scope to limit duration of nested_parser’s borrow
666670 {
667671 let mut delimited_parser = Parser {
668- tokenizer : parser. tokenizer ,
672+ input : parser. input ,
669673 at_start_of : parser. at_start_of . take ( ) ,
670674 stop_before : delimiters,
671675 } ;
672676 result = delimited_parser. parse_entirely ( parse) ;
673677 if let Some ( block_type) = delimited_parser. at_start_of {
674- consume_until_end_of_block ( block_type, & mut delimited_parser. tokenizer . 0 ) ;
678+ consume_until_end_of_block ( block_type, & mut delimited_parser. input . tokenizer ) ;
675679 }
676680 }
677681 // FIXME: have a special-purpose tokenizer method for this that does less work.
678682 loop {
679- if delimiters. contains ( Delimiters :: from_byte ( ( parser. tokenizer . 0 ) . next_byte ( ) ) ) {
683+ if delimiters. contains ( Delimiters :: from_byte ( ( parser. input . tokenizer ) . next_byte ( ) ) ) {
680684 break
681685 }
682- if let Ok ( token) = ( parser. tokenizer . 0 ) . next ( ) {
686+ if let Ok ( token) = ( parser. input . tokenizer ) . next ( ) {
683687 if let Some ( block_type) = BlockType :: opening ( & token) {
684- consume_until_end_of_block ( block_type, & mut parser. tokenizer . 0 ) ;
688+ consume_until_end_of_block ( block_type, & mut parser. input . tokenizer ) ;
685689 }
686690 } else {
687691 break
@@ -696,12 +700,12 @@ pub fn parse_until_after<'i: 't, 't, F, T, E>(parser: &mut Parser<'i, 't>,
696700 -> Result < T , ParseError < ' i , E > >
697701 where F : for < ' tt > FnOnce ( & mut Parser < ' i , ' tt > ) -> Result < T , ParseError < ' i , E > > {
698702 let result = parser. parse_until_before ( delimiters, parse) ;
699- let next_byte = ( parser. tokenizer . 0 ) . next_byte ( ) ;
703+ let next_byte = ( parser. input . tokenizer ) . next_byte ( ) ;
700704 if next_byte. is_some ( ) && !parser. stop_before . contains ( Delimiters :: from_byte ( next_byte) ) {
701705 debug_assert ! ( delimiters. contains( Delimiters :: from_byte( next_byte) ) ) ;
702- ( parser. tokenizer . 0 ) . advance ( 1 ) ;
706+ ( parser. input . tokenizer ) . advance ( 1 ) ;
703707 if next_byte == Some ( b'{' ) {
704- consume_until_end_of_block ( BlockType :: CurlyBracket , & mut parser. tokenizer . 0 ) ;
708+ consume_until_end_of_block ( BlockType :: CurlyBracket , & mut parser. input . tokenizer ) ;
705709 }
706710 }
707711 result
@@ -724,16 +728,16 @@ pub fn parse_nested_block<'i: 't, 't, F, T, E>(parser: &mut Parser<'i, 't>, pars
724728 // Introduce a new scope to limit duration of nested_parser’s borrow
725729 {
726730 let mut nested_parser = Parser {
727- tokenizer : parser. tokenizer ,
731+ input : parser. input ,
728732 at_start_of : None ,
729733 stop_before : closing_delimiter,
730734 } ;
731735 result = nested_parser. parse_entirely ( parse) ;
732736 if let Some ( block_type) = nested_parser. at_start_of {
733- consume_until_end_of_block ( block_type, & mut nested_parser. tokenizer . 0 ) ;
737+ consume_until_end_of_block ( block_type, & mut nested_parser. input . tokenizer ) ;
734738 }
735739 }
736- consume_until_end_of_block ( block_type, & mut parser. tokenizer . 0 ) ;
740+ consume_until_end_of_block ( block_type, & mut parser. input . tokenizer ) ;
737741 result
738742}
739743
0 commit comments