diff --git a/Cargo.toml b/Cargo.toml index eed489d6..a851b5f7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cssparser" -version = "0.19.0" +version = "0.19.1" authors = [ "Simon Sapin " ] description = "Rust implementation of CSS Syntax Level 3" diff --git a/src/parser.rs b/src/parser.rs index 78165950..9e957f17 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -268,6 +268,15 @@ impl<'i: 't, 't> Parser<'i, 't> { self.input.tokenizer.current_source_location() } + /// The source map URL, if known. + /// + /// The source map URL is extracted from a specially formatted + /// comment. The last such comment is used, so this value may + /// change as parsing proceeds. + pub fn current_source_map_url(&self) -> Option<&str> { + self.input.tokenizer.current_source_map_url() + } + /// Return the current internal state of the parser (including position within the input). /// /// This state can later be restored with the `Parser::reset` method. diff --git a/src/size_of_tests.rs b/src/size_of_tests.rs index b352bdf7..256f7a57 100644 --- a/src/size_of_tests.rs +++ b/src/size_of_tests.rs @@ -36,8 +36,8 @@ size_of_test!(token, Token, 32); size_of_test!(std_cow_str, Cow<'static, str>, 32); size_of_test!(cow_rc_str, CowRcStr, 16); -size_of_test!(tokenizer, ::tokenizer::Tokenizer, 40); -size_of_test!(parser_input, ::parser::ParserInput, 112); +size_of_test!(tokenizer, ::tokenizer::Tokenizer, 56); +size_of_test!(parser_input, ::parser::ParserInput, 128); size_of_test!(parser, ::parser::Parser, 16); size_of_test!(source_position, ::SourcePosition, 8); size_of_test!(parser_state, ::ParserState, 24); diff --git a/src/tests.rs b/src/tests.rs index 6d58856e..9037bc53 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -979,3 +979,28 @@ fn parse_entirely_reports_first_error() { let result: Result<(), _> = parser.parse_entirely(|_| Err(ParseError::Custom(E::Foo))); assert_eq!(result, Err(ParseError::Custom(E::Foo))); } + +#[test] +fn parse_comments() { + let tests = vec![ + ("/*# sourceMappingURL=here*/", Some("here")), + ("/*# sourceMappingURL=here */", Some("here")), + ("/*@ sourceMappingURL=here*/", Some("here")), + ("/*@ sourceMappingURL=there*/ /*# sourceMappingURL=here*/", Some("here")), + ("/*# sourceMappingURL=here there */", Some("here")), + ("/*# sourceMappingURL= here */", Some("")), + ("/*# sourceMappingURL=*/", Some("")), + ("/*# sourceMappingUR=here */", None), + ("/*! sourceMappingURL=here */", None), + ("/*# sourceMappingURL = here */", None), + ("/* # sourceMappingURL=here */", None) + ]; + + for test in tests { + let mut input = ParserInput::new(test.0); + let mut parser = Parser::new(&mut input); + while let Ok(_) = parser.next_including_whitespace() { + } + assert_eq!(parser.current_source_map_url(), test.1); + } +} diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 4ea462e8..425de66b 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -209,6 +209,7 @@ pub struct Tokenizer<'a> { current_line_number: u32, var_functions: SeenStatus, viewport_percentages: SeenStatus, + source_map_url: Option<&'a str>, } #[derive(Copy, Clone, PartialEq, Eq)] @@ -234,6 +235,7 @@ impl<'a> Tokenizer<'a> { current_line_number: first_line_number, var_functions: SeenStatus::DontCare, viewport_percentages: SeenStatus::DontCare, + source_map_url: None, } } @@ -300,6 +302,11 @@ impl<'a> Tokenizer<'a> { } } + #[inline] + pub fn current_source_map_url(&self) -> Option<&'a str> { + self.source_map_url + } + #[inline] pub fn state(&self) -> ParserState { ParserState { @@ -507,7 +514,9 @@ fn next_token<'a>(tokenizer: &mut Tokenizer<'a>) -> Result, ()> { } b'/' => { if tokenizer.starts_with(b"/*") { - Comment(consume_comment(tokenizer)) + let contents = consume_comment(tokenizer); + check_for_source_map(tokenizer, contents); + Comment(contents) } else { tokenizer.advance(1); Delim('/') @@ -594,6 +603,20 @@ fn consume_whitespace<'a>(tokenizer: &mut Tokenizer<'a>, newline: bool, is_cr: b } +// Check for a sourceMappingURL comment and update the tokenizer appropriately. +fn check_for_source_map<'a>(tokenizer: &mut Tokenizer<'a>, contents: &'a str) { + let directive = "# sourceMappingURL="; + let directive_old = "@ sourceMappingURL="; + + // If there is a source map directive, extract the URL. + if contents.starts_with(directive) || contents.starts_with(directive_old) { + let contents = &contents[directive.len()..]; + tokenizer.source_map_url = contents.split(|c| { + c == ' ' || c == '\t' || c == '\x0C' || c == '\r' || c == '\n' + }).next() + } +} + fn consume_comment<'a>(tokenizer: &mut Tokenizer<'a>) -> &'a str { tokenizer.advance(2); // consume "/*" let start_position = tokenizer.position();