diff --git a/css-parsing-tests/README.rst b/css-parsing-tests/README.rst index 7342d91b..9f4f5675 100644 --- a/css-parsing-tests/README.rst +++ b/css-parsing-tests/README.rst @@ -10,6 +10,17 @@ The upstream repository for these tests is at https://github.com/SimonSapin/css-parsing-tests +Projects using this +=================== + +CSS parsers using these tests: + +* `tinycss2 `_ (Python) +* `rust-cssparser `_ + (Rust, used in `Servo `_) +* `Crass `_ (Ruby) + + Importing ========= diff --git a/css-parsing-tests/component_value_list.json b/css-parsing-tests/component_value_list.json index 2ed07808..01c9cbb1 100644 --- a/css-parsing-tests/component_value_list.json +++ b/css-parsing-tests/component_value_list.json @@ -162,7 +162,7 @@ "url('a\nb') url('c\n", [["error", "bad-url"], " ", ["error", "bad-url"]], -"url() url( \t) url( Foô\\030\n!\n) url(a b) url(a\\ b) url(a(b) url(a\\(b) url(a'b) url(a\\'b) url(a\"b) url(a\\\"b) url(a\nb) url(a\\\nb) url(a\\a b) url(a\\", [ +"url() url( \t) url(\n Foô\\030\n!\n) url(\na\nb\n) url(a\\ b) url(a(b) url(a\\(b) url(a'b) url(a\\'b) url(a\"b) url(a\\\"b) url(a\nb) url(a\\\nb) url(a\\a b) url(a\\", [ ["url", ""], " ", ["url", ""], " ", ["url", "Foô0!"], " ", diff --git a/tokenizer.rs b/tokenizer.rs index e44c2aa6..53112302 100644 --- a/tokenizer.rs +++ b/tokenizer.rs @@ -461,7 +461,11 @@ fn consume_url(tokenizer: &mut Tokenizer) -> ComponentValue { tokenizer.position += 1; // Skip the ( of url( while !tokenizer.is_eof() { match tokenizer.current_char() { - '\t' | '\n' | ' ' => tokenizer.position += 1, + ' ' | '\t' => tokenizer.position += 1, + '\n' => { + tokenizer.position += 1; + tokenizer.new_line(); + }, '"' => return consume_quoted_url(tokenizer, false), '\'' => return consume_quoted_url(tokenizer, true), ')' => { tokenizer.position += 1; break },