Skip to content

Commit 898ae34

Browse files
author
bors-servo
authored
Auto merge of #194 - servo:bad, r=emilio
Fix serialization of bad-string and bad-url tokens This will help fix https://bugzilla.mozilla.org/show_bug.cgi?id=1396664. <!-- Reviewable:start --> This change is [<img src="https://reviewable.io/review_button.svg" height="34" align="absmiddle" alt="Reviewable"/>](https://reviewable.io/reviews/servo/rust-cssparser/194) <!-- Reviewable:end -->
2 parents a720448 + 00a8629 commit 898ae34

File tree

4 files changed

+37
-4
lines changed

4 files changed

+37
-4
lines changed

Cargo.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "cssparser"
3-
version = "0.20.0"
3+
version = "0.20.1"
44
authors = [ "Simon Sapin <simon.sapin@exyr.org>" ]
55

66
description = "Rust implementation of CSS Syntax Level 3"

src/serializer.rs

+14-2
Original file line numberDiff line numberDiff line change
@@ -122,8 +122,20 @@ impl<'a> ToCss for Token<'a> {
122122
Token::SquareBracketBlock => dest.write_str("[")?,
123123
Token::CurlyBracketBlock => dest.write_str("{")?,
124124

125-
Token::BadUrl(_) => dest.write_str("url(<bad url>)")?,
126-
Token::BadString(_) => dest.write_str("\"<bad string>\n")?,
125+
Token::BadUrl(ref contents) => {
126+
dest.write_str("url(")?;
127+
dest.write_str(contents)?;
128+
dest.write_char(')')?;
129+
}
130+
Token::BadString(ref value) => {
131+
// During tokenization, an unescaped newline after a quote causes
132+
// the token to be a BadString instead of a QuotedString.
133+
// The BadString token ends just before the newline
134+
// (which is in a separate WhiteSpace token),
135+
// and therefore does not have a closing quote.
136+
dest.write_char('"')?;
137+
CssStringWriter::new(dest).write_str(value)?;
138+
},
127139
Token::CloseParenthesis => dest.write_str(")")?,
128140
Token::CloseSquareBracket => dest.write_str("]")?,
129141
Token::CloseCurlyBracket => dest.write_str("}")?,

src/tests.rs

+20
Original file line numberDiff line numberDiff line change
@@ -441,6 +441,26 @@ fn serializer(preserve_comments: bool) {
441441
});
442442
}
443443

444+
#[test]
445+
fn serialize_bad_tokens() {
446+
let mut input = ParserInput::new("url(foo\\) b\\)ar)'ba\\'\"z\n4");
447+
let mut parser = Parser::new(&mut input);
448+
449+
let token = parser.next().unwrap().clone();
450+
assert!(matches!(token, Token::BadUrl(_)));
451+
assert_eq!(token.to_css_string(), "url(foo\\) b\\)ar)");
452+
453+
let token = parser.next().unwrap().clone();
454+
assert!(matches!(token, Token::BadString(_)));
455+
assert_eq!(token.to_css_string(), "\"ba'\\\"z");
456+
457+
let token = parser.next().unwrap().clone();
458+
assert!(matches!(token, Token::Number { .. }));
459+
assert_eq!(token.to_css_string(), "4");
460+
461+
assert!(parser.next().is_err());
462+
}
463+
444464
#[test]
445465
fn serialize_current_color() {
446466
let c = Color::CurrentColor;

src/tokenizer.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -1289,8 +1289,9 @@ fn consume_unquoted_url<'a>(tokenizer: &mut Tokenizer<'a>) -> Result<Token<'a>,
12891289
while !tokenizer.is_eof() {
12901290
match_byte! { tokenizer.next_byte_unchecked(),
12911291
b')' => {
1292+
let contents = tokenizer.slice_from(start_pos).into();
12921293
tokenizer.advance(1);
1293-
break
1294+
return BadUrl(contents)
12941295
}
12951296
b'\\' => {
12961297
tokenizer.advance(1);

0 commit comments

Comments
 (0)