File tree Expand file tree Collapse file tree 1 file changed +7
-5
lines changed Expand file tree Collapse file tree 1 file changed +7
-5
lines changed Original file line number Diff line number Diff line change @@ -514,9 +514,7 @@ fn next_token<'a>(tokenizer: &mut Tokenizer<'a>) -> Result<Token<'a>, ()> {
514
514
}
515
515
b'/' => {
516
516
if tokenizer. starts_with( b"/*" ) {
517
- let contents = consume_comment( tokenizer) ;
518
- check_for_source_map( tokenizer, contents) ;
519
- Comment ( contents)
517
+ Comment ( consume_comment( tokenizer) )
520
518
} else {
521
519
tokenizer. advance( 1 ) ;
522
520
Delim ( '/' )
@@ -627,7 +625,9 @@ fn consume_comment<'a>(tokenizer: &mut Tokenizer<'a>) -> &'a str {
627
625
tokenizer. advance( 1 ) ;
628
626
if tokenizer. next_byte( ) == Some ( b'/' ) {
629
627
tokenizer. advance( 1 ) ;
630
- return tokenizer. slice( start_position..end_position)
628
+ let contents = tokenizer. slice( start_position..end_position) ;
629
+ check_for_source_map( tokenizer, contents) ;
630
+ return contents
631
631
}
632
632
}
633
633
b'\n' | b'\x0C' => {
@@ -643,7 +643,9 @@ fn consume_comment<'a>(tokenizer: &mut Tokenizer<'a>) -> &'a str {
643
643
}
644
644
}
645
645
}
646
- tokenizer. slice_from ( start_position)
646
+ let contents = tokenizer. slice_from ( start_position) ;
647
+ check_for_source_map ( tokenizer, contents) ;
648
+ contents
647
649
}
648
650
649
651
fn consume_string < ' a > ( tokenizer : & mut Tokenizer < ' a > , single_quote : bool ) -> Token < ' a > {
You can’t perform that action at this time.
0 commit comments