Skip to content

Commit ff1ba7d

Browse files
author
bors-servo
authored
Auto merge of #178 - tromey:source-map-url, r=SimonSapin
Extract source map URL from directive comments Change the parser to extract the source map URL from directive comments. The relevant spec is here: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.lmz475t4mvbx This is part of similar work being done in M-C in https://bugzilla.mozilla.org/show_bug.cgi?id=1388855 <!-- Reviewable:start --> --- This change is [<img src="https://reviewable.io/review_button.svg" height="34" align="absmiddle" alt="Reviewable"/>](https://reviewable.io/reviews/servo/rust-cssparser/178) <!-- Reviewable:end -->
2 parents a21f97d + 94330b1 commit ff1ba7d

File tree

5 files changed

+61
-4
lines changed

5 files changed

+61
-4
lines changed

Cargo.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "cssparser"
3-
version = "0.19.0"
3+
version = "0.19.1"
44
authors = [ "Simon Sapin <simon.sapin@exyr.org>" ]
55

66
description = "Rust implementation of CSS Syntax Level 3"

src/parser.rs

+9
Original file line numberDiff line numberDiff line change
@@ -268,6 +268,15 @@ impl<'i: 't, 't> Parser<'i, 't> {
268268
self.input.tokenizer.current_source_location()
269269
}
270270

271+
/// The source map URL, if known.
272+
///
273+
/// The source map URL is extracted from a specially formatted
274+
/// comment. The last such comment is used, so this value may
275+
/// change as parsing proceeds.
276+
pub fn current_source_map_url(&self) -> Option<&str> {
277+
self.input.tokenizer.current_source_map_url()
278+
}
279+
271280
/// Return the current internal state of the parser (including position within the input).
272281
///
273282
/// This state can later be restored with the `Parser::reset` method.

src/size_of_tests.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,8 @@ size_of_test!(token, Token, 32);
3636
size_of_test!(std_cow_str, Cow<'static, str>, 32);
3737
size_of_test!(cow_rc_str, CowRcStr, 16);
3838

39-
size_of_test!(tokenizer, ::tokenizer::Tokenizer, 40);
40-
size_of_test!(parser_input, ::parser::ParserInput, 112);
39+
size_of_test!(tokenizer, ::tokenizer::Tokenizer, 56);
40+
size_of_test!(parser_input, ::parser::ParserInput, 128);
4141
size_of_test!(parser, ::parser::Parser, 16);
4242
size_of_test!(source_position, ::SourcePosition, 8);
4343
size_of_test!(parser_state, ::ParserState, 24);

src/tests.rs

+25
Original file line numberDiff line numberDiff line change
@@ -979,3 +979,28 @@ fn parse_entirely_reports_first_error() {
979979
let result: Result<(), _> = parser.parse_entirely(|_| Err(ParseError::Custom(E::Foo)));
980980
assert_eq!(result, Err(ParseError::Custom(E::Foo)));
981981
}
982+
983+
#[test]
984+
fn parse_comments() {
985+
let tests = vec![
986+
("/*# sourceMappingURL=here*/", Some("here")),
987+
("/*# sourceMappingURL=here */", Some("here")),
988+
("/*@ sourceMappingURL=here*/", Some("here")),
989+
("/*@ sourceMappingURL=there*/ /*# sourceMappingURL=here*/", Some("here")),
990+
("/*# sourceMappingURL=here there */", Some("here")),
991+
("/*# sourceMappingURL= here */", Some("")),
992+
("/*# sourceMappingURL=*/", Some("")),
993+
("/*# sourceMappingUR=here */", None),
994+
("/*! sourceMappingURL=here */", None),
995+
("/*# sourceMappingURL = here */", None),
996+
("/* # sourceMappingURL=here */", None)
997+
];
998+
999+
for test in tests {
1000+
let mut input = ParserInput::new(test.0);
1001+
let mut parser = Parser::new(&mut input);
1002+
while let Ok(_) = parser.next_including_whitespace() {
1003+
}
1004+
assert_eq!(parser.current_source_map_url(), test.1);
1005+
}
1006+
}

src/tokenizer.rs

+24-1
Original file line numberDiff line numberDiff line change
@@ -209,6 +209,7 @@ pub struct Tokenizer<'a> {
209209
current_line_number: u32,
210210
var_functions: SeenStatus,
211211
viewport_percentages: SeenStatus,
212+
source_map_url: Option<&'a str>,
212213
}
213214

214215
#[derive(Copy, Clone, PartialEq, Eq)]
@@ -234,6 +235,7 @@ impl<'a> Tokenizer<'a> {
234235
current_line_number: first_line_number,
235236
var_functions: SeenStatus::DontCare,
236237
viewport_percentages: SeenStatus::DontCare,
238+
source_map_url: None,
237239
}
238240
}
239241

@@ -300,6 +302,11 @@ impl<'a> Tokenizer<'a> {
300302
}
301303
}
302304

305+
#[inline]
306+
pub fn current_source_map_url(&self) -> Option<&'a str> {
307+
self.source_map_url
308+
}
309+
303310
#[inline]
304311
pub fn state(&self) -> ParserState {
305312
ParserState {
@@ -507,7 +514,9 @@ fn next_token<'a>(tokenizer: &mut Tokenizer<'a>) -> Result<Token<'a>, ()> {
507514
}
508515
b'/' => {
509516
if tokenizer.starts_with(b"/*") {
510-
Comment(consume_comment(tokenizer))
517+
let contents = consume_comment(tokenizer);
518+
check_for_source_map(tokenizer, contents);
519+
Comment(contents)
511520
} else {
512521
tokenizer.advance(1);
513522
Delim('/')
@@ -594,6 +603,20 @@ fn consume_whitespace<'a>(tokenizer: &mut Tokenizer<'a>, newline: bool, is_cr: b
594603
}
595604

596605

606+
// Check for a sourceMappingURL comment and update the tokenizer appropriately.
607+
fn check_for_source_map<'a>(tokenizer: &mut Tokenizer<'a>, contents: &'a str) {
608+
let directive = "# sourceMappingURL=";
609+
let directive_old = "@ sourceMappingURL=";
610+
611+
// If there is a source map directive, extract the URL.
612+
if contents.starts_with(directive) || contents.starts_with(directive_old) {
613+
let contents = &contents[directive.len()..];
614+
tokenizer.source_map_url = contents.split(|c| {
615+
c == ' ' || c == '\t' || c == '\x0C' || c == '\r' || c == '\n'
616+
}).next()
617+
}
618+
}
619+
597620
fn consume_comment<'a>(tokenizer: &mut Tokenizer<'a>) -> &'a str {
598621
tokenizer.advance(2); // consume "/*"
599622
let start_position = tokenizer.position();

0 commit comments

Comments
 (0)