diff --git a/Cargo.toml b/Cargo.toml index b9b242cf..d6be42a3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cssparser" -version = "0.21.0" +version = "0.21.1" authors = [ "Simon Sapin " ] description = "Rust implementation of CSS Syntax Level 3" diff --git a/src/parser.rs b/src/parser.rs index 3c3482db..33fdc8b6 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -291,6 +291,15 @@ impl<'i: 't, 't> Parser<'i, 't> { self.input.tokenizer.current_source_map_url() } + /// The source URL, if known. + /// + /// The source URL is extracted from a specially formatted + /// comment. The last such comment is used, so this value may + /// change as parsing proceeds. + pub fn current_source_url(&self) -> Option<&str> { + self.input.tokenizer.current_source_url() + } + /// Return the current internal state of the parser (including position within the input). /// /// This state can later be restored with the `Parser::reset` method. diff --git a/src/size_of_tests.rs b/src/size_of_tests.rs index 256f7a57..80231f48 100644 --- a/src/size_of_tests.rs +++ b/src/size_of_tests.rs @@ -36,8 +36,8 @@ size_of_test!(token, Token, 32); size_of_test!(std_cow_str, Cow<'static, str>, 32); size_of_test!(cow_rc_str, CowRcStr, 16); -size_of_test!(tokenizer, ::tokenizer::Tokenizer, 56); -size_of_test!(parser_input, ::parser::ParserInput, 128); +size_of_test!(tokenizer, ::tokenizer::Tokenizer, 72); +size_of_test!(parser_input, ::parser::ParserInput, 144); size_of_test!(parser, ::parser::Parser, 16); size_of_test!(source_position, ::SourcePosition, 8); size_of_test!(parser_state, ::ParserState, 24); diff --git a/src/tests.rs b/src/tests.rs index 3f275a5e..0e25b149 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -1030,7 +1030,7 @@ fn parse_entirely_reports_first_error() { } #[test] -fn parse_comments() { +fn parse_sourcemapping_comments() { let tests = vec![ ("/*# sourceMappingURL=here*/", Some("here")), ("/*# sourceMappingURL=here */", Some("here")), @@ -1054,6 +1054,31 @@ fn parse_comments() { } } +#[test] +fn parse_sourceurl_comments() { + let tests = vec![ + ("/*# sourceURL=here*/", Some("here")), + ("/*# sourceURL=here */", Some("here")), + ("/*@ sourceURL=here*/", Some("here")), + ("/*@ sourceURL=there*/ /*# sourceURL=here*/", Some("here")), + ("/*# sourceURL=here there */", Some("here")), + ("/*# sourceURL= here */", Some("")), + ("/*# sourceURL=*/", Some("")), + ("/*# sourceMappingUR=here */", None), + ("/*! sourceURL=here */", None), + ("/*# sourceURL = here */", None), + ("/* # sourceURL=here */", None) + ]; + + for test in tests { + let mut input = ParserInput::new(test.0); + let mut parser = Parser::new(&mut input); + while let Ok(_) = parser.next_including_whitespace() { + } + assert_eq!(parser.current_source_url(), test.1); + } +} + #[test] fn roundtrip_percentage_token() { fn test_roundtrip(value: &str) { diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 9eb94f3e..507d0e72 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -213,6 +213,7 @@ pub struct Tokenizer<'a> { current_line_number: u32, var_functions: SeenStatus, source_map_url: Option<&'a str>, + source_url: Option<&'a str>, } #[derive(Copy, Clone, PartialEq, Eq)] @@ -238,6 +239,7 @@ impl<'a> Tokenizer<'a> { current_line_number: first_line_number, var_functions: SeenStatus::DontCare, source_map_url: None, + source_url: None, } } @@ -285,6 +287,11 @@ impl<'a> Tokenizer<'a> { self.source_map_url } + #[inline] + pub fn current_source_url(&self) -> Option<&'a str> { + self.source_url + } + #[inline] pub fn state(&self) -> ParserState { ParserState { @@ -692,7 +699,8 @@ fn consume_whitespace<'a>(tokenizer: &mut Tokenizer<'a>, newline: bool) -> Token } -// Check for a sourceMappingURL comment and update the tokenizer appropriately. +// Check for sourceMappingURL or sourceURL comments and update the +// tokenizer appropriately. fn check_for_source_map<'a>(tokenizer: &mut Tokenizer<'a>, contents: &'a str) { let directive = "# sourceMappingURL="; let directive_old = "@ sourceMappingURL="; @@ -704,6 +712,17 @@ fn check_for_source_map<'a>(tokenizer: &mut Tokenizer<'a>, contents: &'a str) { c == ' ' || c == '\t' || c == '\x0C' || c == '\r' || c == '\n' }).next() } + + let directive = "# sourceURL="; + let directive_old = "@ sourceURL="; + + // If there is a source map directive, extract the URL. + if contents.starts_with(directive) || contents.starts_with(directive_old) { + let contents = &contents[directive.len()..]; + tokenizer.source_url = contents.split(|c| { + c == ' ' || c == '\t' || c == '\x0C' || c == '\r' || c == '\n' + }).next() + } } fn consume_comment<'a>(tokenizer: &mut Tokenizer<'a>) -> &'a str {