@@ -6,20 +6,40 @@ use cow_rc_str::CowRcStr;
6
6
use std:: ops:: Range ;
7
7
use std:: ascii:: AsciiExt ;
8
8
use std:: ops:: BitOr ;
9
- use tokenizer:: { self , Token , Tokenizer , SourceLocation } ;
9
+ use tokenizer:: { Token , Tokenizer , SourcePosition , SourceLocation } ;
10
10
11
11
12
12
/// A capture of the internal state of a `Parser` (including the position within the input),
13
13
/// obtained from the `Parser::position` method.
14
14
///
15
15
/// Can be used with the `Parser::reset` method to restore that state.
16
16
/// Should only be used with the `Parser` instance it came from.
17
- #[ derive( PartialEq , Eq , Debug , Clone , Copy ) ]
18
- pub struct SourcePosition {
19
- position : tokenizer:: SourcePosition ,
20
- at_start_of : Option < BlockType > ,
17
+ #[ derive( Debug , Clone ) ]
18
+ pub struct ParserState {
19
+ pub ( crate ) position : usize ,
20
+ pub ( crate ) current_line_start_position : usize ,
21
+ pub ( crate ) current_line_number : u32 ,
22
+ pub ( crate ) at_start_of : Option < BlockType > ,
21
23
}
22
24
25
+ impl ParserState {
26
+ /// The position from the start of the input, counted in UTF-8 bytes.
27
+ #[ inline]
28
+ pub fn position ( & self ) -> SourcePosition {
29
+ SourcePosition ( self . position )
30
+ }
31
+
32
+ /// The line number and column number
33
+ #[ inline]
34
+ pub fn source_location ( & self ) -> SourceLocation {
35
+ SourceLocation {
36
+ line : self . current_line_number ,
37
+ column : ( self . position - self . current_line_start_position ) as u32 ,
38
+ }
39
+ }
40
+ }
41
+
42
+
23
43
/// The funamental parsing errors that can be triggered by built-in parsing routines.
24
44
#[ derive( Clone , Debug , PartialEq ) ]
25
45
pub enum BasicParseError < ' a > {
@@ -68,8 +88,8 @@ pub struct ParserInput<'i> {
68
88
69
89
struct CachedToken < ' i > {
70
90
token : Token < ' i > ,
71
- start_position : tokenizer :: SourcePosition ,
72
- end_position : tokenizer :: SourcePosition ,
91
+ start_position : SourcePosition ,
92
+ end_state : ParserState ,
73
93
}
74
94
75
95
impl < ' i > ParserInput < ' i > {
@@ -100,7 +120,7 @@ pub struct Parser<'i: 't, 't> {
100
120
101
121
102
122
#[ derive( Copy , Clone , PartialEq , Eq , Debug ) ]
103
- enum BlockType {
123
+ pub ( crate ) enum BlockType {
104
124
Parenthesis ,
105
125
SquareBracket ,
106
126
CurlyBracket ,
@@ -224,24 +244,38 @@ impl<'i: 't, 't> Parser<'i, 't> {
224
244
/// This ignores whitespace and comments.
225
245
#[ inline]
226
246
pub fn expect_exhausted ( & mut self ) -> Result < ( ) , BasicParseError < ' i > > {
227
- let start_position = self . position ( ) ;
247
+ let start = self . state ( ) ;
228
248
let result = match self . next ( ) {
229
249
Err ( BasicParseError :: EndOfInput ) => Ok ( ( ) ) ,
230
250
Err ( e) => unreachable ! ( "Unexpected error encountered: {:?}" , e) ,
231
251
Ok ( t) => Err ( BasicParseError :: UnexpectedToken ( t. clone ( ) ) ) ,
232
252
} ;
233
- self . reset ( start_position ) ;
253
+ self . reset ( & start ) ;
234
254
result
235
255
}
236
256
257
+ /// Return the current position within the input.
258
+ ///
259
+ /// This can be used with the `Parser::slice` and `slice_from` methods.
260
+ #[ inline]
261
+ pub fn position ( & self ) -> SourcePosition {
262
+ self . input . tokenizer . position ( )
263
+ }
264
+
265
+ /// The current line number and column number.
266
+ #[ inline]
267
+ pub fn current_source_location ( & self ) -> SourceLocation {
268
+ self. input . tokenizer . current_source_location ( )
269
+ }
270
+
237
271
/// Return the current internal state of the parser (including position within the input).
238
272
///
239
273
/// This state can later be restored with the `Parser::reset` method.
240
274
#[ inline]
241
- pub fn position ( & self ) -> SourcePosition {
242
- SourcePosition {
243
- position : self . input . tokenizer . position ( ) ,
275
+ pub fn state ( & self ) -> ParserState {
276
+ ParserState {
244
277
at_start_of : self . at_start_of ,
278
+ .. self . input . tokenizer . state ( )
245
279
}
246
280
}
247
281
@@ -250,9 +284,9 @@ impl<'i: 't, 't> Parser<'i, 't> {
250
284
///
251
285
/// Should only be used with `SourcePosition` values from the same `Parser` instance.
252
286
#[ inline]
253
- pub fn reset ( & mut self , new_position : SourcePosition ) {
254
- self . input . tokenizer . reset ( new_position . position ) ;
255
- self . at_start_of = new_position . at_start_of ;
287
+ pub fn reset ( & mut self , state : & ParserState ) {
288
+ self . input . tokenizer . reset ( state ) ;
289
+ self . at_start_of = state . at_start_of ;
256
290
}
257
291
258
292
/// Start looking for `var()` functions. (See the `.seen_var_functions()` method.)
@@ -289,36 +323,24 @@ impl<'i: 't, 't> Parser<'i, 't> {
289
323
#[ inline]
290
324
pub fn try < F , T , E > ( & mut self , thing : F ) -> Result < T , E >
291
325
where F : FnOnce ( & mut Parser < ' i , ' t > ) -> Result < T , E > {
292
- let start_position = self . position ( ) ;
326
+ let start = self . state ( ) ;
293
327
let result = thing ( self ) ;
294
328
if result. is_err ( ) {
295
- self . reset ( start_position )
329
+ self . reset ( & start )
296
330
}
297
331
result
298
332
}
299
333
300
334
/// Return a slice of the CSS input
301
335
#[ inline]
302
336
pub fn slice ( & self , range : Range < SourcePosition > ) -> & ' i str {
303
- self . input . tokenizer . slice ( range. start . position ..range . end . position )
337
+ self . input . tokenizer . slice ( range)
304
338
}
305
339
306
340
/// Return a slice of the CSS input, from the given position to the current one.
307
341
#[ inline]
308
342
pub fn slice_from ( & self , start_position : SourcePosition ) -> & ' i str {
309
- self . input . tokenizer . slice_from ( start_position. position )
310
- }
311
-
312
- /// Return the line and column number within the input for the current position.
313
- #[ inline]
314
- pub fn current_source_location ( & self ) -> SourceLocation {
315
- self . input . tokenizer . current_source_location ( )
316
- }
317
-
318
- /// Return the line and column number within the input for the given position.
319
- #[ inline]
320
- pub fn source_location ( & self , target : SourcePosition ) -> SourceLocation {
321
- self . input . tokenizer . source_location ( target. position )
343
+ self . input . tokenizer . slice_from ( start_position)
322
344
}
323
345
324
346
/// Return the next token in the input that is neither whitespace or a comment,
@@ -374,8 +396,9 @@ impl<'i: 't, 't> Parser<'i, 't> {
374
396
let token_start_position = self . input . tokenizer . position ( ) ;
375
397
let token;
376
398
match self . input . cached_token {
377
- Some ( ref cached_token) if cached_token. start_position == token_start_position => {
378
- self . input . tokenizer . reset ( cached_token. end_position ) ;
399
+ Some ( ref cached_token)
400
+ if cached_token. start_position == token_start_position => {
401
+ self . input . tokenizer . reset ( & cached_token. end_state ) ;
379
402
match cached_token. token {
380
403
Token :: Dimension { ref unit, .. } => self . input . tokenizer . see_dimension ( unit) ,
381
404
Token :: Function ( ref name) => self . input . tokenizer . see_function ( name) ,
@@ -388,7 +411,7 @@ impl<'i: 't, 't> Parser<'i, 't> {
388
411
self . input . cached_token = Some ( CachedToken {
389
412
token : new_token,
390
413
start_position : token_start_position,
391
- end_position : self . input . tokenizer . position ( ) ,
414
+ end_state : self . input . tokenizer . state ( ) ,
392
415
} ) ;
393
416
token = self . input . cached_token_ref ( )
394
417
}
0 commit comments