Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 19 additions & 19 deletions ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,16 @@ use std::slice;

#[deriving(Eq)]
pub struct NumericValue {
representation: ~str,
value: f64,
int_value: Option<i64>,
pub representation: ~str,
pub value: f64,
pub int_value: Option<i64>,
}


#[deriving(Eq)]
pub struct SourceLocation {
line: uint, // First line is 1
column: uint, // First character of a line is at column 1
pub line: uint, // First line is 1
pub column: uint, // First character of a line is at column 1
}


Expand Down Expand Up @@ -70,25 +70,25 @@ pub enum ComponentValue {

#[deriving(Eq)]
pub struct Declaration {
location: SourceLocation,
name: ~str,
value: ~[ComponentValue],
important: bool,
pub location: SourceLocation,
pub name: ~str,
pub value: ~[ComponentValue],
pub important: bool,
}

#[deriving(Eq)]
pub struct QualifiedRule {
location: SourceLocation,
prelude: ~[ComponentValue],
block: ~[Node],
pub location: SourceLocation,
pub prelude: ~[ComponentValue],
pub block: ~[Node],
}

#[deriving(Eq)]
pub struct AtRule {
location: SourceLocation,
name: ~str,
prelude: ~[ComponentValue],
block: Option<~[Node]>,
pub location: SourceLocation,
pub name: ~str,
pub prelude: ~[ComponentValue],
pub block: Option<~[Node]>,
}

#[deriving(Eq)]
Expand All @@ -106,8 +106,8 @@ pub enum Rule {

#[deriving(Eq)]
pub struct SyntaxError {
location: SourceLocation,
reason: ErrorReason,
pub location: SourceLocation,
pub reason: ErrorReason,
}

#[deriving(Eq)]
Expand Down Expand Up @@ -139,7 +139,7 @@ impl<'a> SkipWhitespaceIterable<'a> for &'a [ComponentValue] {

#[deriving(Clone)]
pub struct SkipWhitespaceIterator<'a> {
iter_with_whitespace: slice::Items<'a, ComponentValue>,
pub iter_with_whitespace: slice::Items<'a, ComponentValue>,
}

impl<'a> Iterator<&'a ComponentValue> for SkipWhitespaceIterator<'a> {
Expand Down
8 changes: 4 additions & 4 deletions color.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@ use ast::*;
pub struct RGBA {
// All in 0..1
// Use f32 to try and match rust-azure’s AzFloat
red: f32,
green: f32,
blue: f32,
alpha: f32,
pub red: f32,
pub green: f32,
pub blue: f32,
pub alpha: f32,
}

#[deriving(Clone, Eq)]
Expand Down
2 changes: 1 addition & 1 deletion from_bytes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ pub fn decode_stylesheet_bytes(css: &[u8], protocol_encoding_label: Option<&str>
Some(label_length)
=> if css.slice_from(10 + label_length).starts_with("\";".as_bytes()) {
let label = css.slice(10, 10 + label_length);
let label = str::from_chars(label.iter().map(|&b| b as char).to_owned_vec());
let label = str::from_chars(label.iter().map(|&b| b as char).collect::<~[char]>());
match encoding_from_whatwg_label(label) {
None => (),
Some(fallback) => match fallback.name() {
Expand Down
11 changes: 6 additions & 5 deletions lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */

#[crate_id = "github.com/mozilla-servo/rust-cssparser#cssparser:0.1"];
#[feature(globs, macro_rules)];
#[crate_type = "lib"];
#[crate_type = "dylib"];
#[crate_type = "rlib"];
#![crate_id = "github.com/mozilla-servo/rust-cssparser#cssparser:0.1"]
#![crate_type = "lib"]
#![crate_type = "dylib"]
#![crate_type = "rlib"]

#![feature(globs, macro_rules)]

extern crate encoding; // https://github.com/lifthrasiir/rust-encoding

Expand Down
29 changes: 15 additions & 14 deletions tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ fn run_json_tests<T: ToJson>(json_data: &str, parse: |input: ~str| -> T) {
#[test]
fn component_value_list() {
run_json_tests(include_str!("css-parsing-tests/component_value_list.json"), |input| {
tokenize(input).map(|(c, _)| c).to_owned_vec()
tokenize(input).map(|(c, _)| c).collect::<~[ComponentValue]>()
});
}

Expand All @@ -108,7 +108,7 @@ fn one_component_value() {
#[test]
fn declaration_list() {
run_json_tests(include_str!("css-parsing-tests/declaration_list.json"), |input| {
parse_declaration_list(tokenize(input)).to_owned_vec()
parse_declaration_list(tokenize(input)).collect::<~[Result<DeclarationListItem, SyntaxError>]>()
});
}

Expand All @@ -124,15 +124,15 @@ fn one_declaration() {
#[test]
fn rule_list() {
run_json_tests(include_str!("css-parsing-tests/rule_list.json"), |input| {
parse_rule_list(tokenize(input)).to_owned_vec()
parse_rule_list(tokenize(input)).collect::<~[Result<Rule, SyntaxError>]>()
});
}


#[test]
fn stylesheet() {
run_json_tests(include_str!("css-parsing-tests/stylesheet.json"), |input| {
parse_stylesheet_rules(tokenize(input)).to_owned_vec()
parse_stylesheet_rules(tokenize(input)).collect::<~[Result<Rule, SyntaxError>]>()
});
}

Expand All @@ -158,15 +158,15 @@ fn stylesheet_from_bytes() {
let css = get_string(map, &~"css_bytes").unwrap().chars().map(|c| {
assert!(c as u32 <= 0xFF);
c as u8
}).to_owned_vec();
}).collect::<~[u8]>();
let protocol_encoding_label = get_string(map, &~"protocol_encoding");
let environment_encoding = get_string(map, &~"environment_encoding")
.and_then(encoding_from_whatwg_label);

let (mut rules, used_encoding) = parse_stylesheet_rules_from_bytes(
css, protocol_encoding_label, environment_encoding);

(rules.to_owned_vec(), used_encoding.name().to_owned()).to_json()
(rules.collect::<~[Result<Rule, SyntaxError>]>(), used_encoding.name().to_owned()).to_json()
};
assert_json_eq(result, expected, json::Object(map).to_str());
});
Expand Down Expand Up @@ -242,17 +242,17 @@ fn bench_color_lookup_fail(b: &mut test::BenchHarness) {
#[test]
fn nth() {
run_json_tests(include_str!("css-parsing-tests/An+B.json"), |input| {
parse_nth(tokenize(input).map(|(c, _)| c).to_owned_vec())
parse_nth(tokenize(input).map(|(c, _)| c).collect::<~[ComponentValue]>())
});
}


#[test]
fn serializer() {
run_json_tests(include_str!("css-parsing-tests/component_value_list.json"), |input| {
let component_values = tokenize(input).map(|(c, _)| c).to_owned_vec();
let component_values = tokenize(input).map(|(c, _)| c).collect::<~[ComponentValue]>();
let serialized = component_values.iter().to_css();
tokenize(serialized).map(|(c, _)| c).to_owned_vec()
tokenize(serialized).map(|(c, _)| c).collect::<~[ComponentValue]>()
});
}

Expand Down Expand Up @@ -339,11 +339,11 @@ impl ToJson for DeclarationListItem {


fn list_to_json(list: &~[(ComponentValue, SourceLocation)]) -> ~[json::Json] {
list.map(|tuple| {
list.iter().map(|tuple| {
match *tuple {
(ref c, _) => c.to_json()
}
})
}).collect()
}


Expand Down Expand Up @@ -426,11 +426,12 @@ impl ToJson for ComponentValue {
CDC => JString(~"-->"),

Function(ref name, ref arguments)
=> JList(~[JString(~"function"), name.to_json()] + arguments.map(|a| a.to_json())),
=> JList(~[JString(~"function"), name.to_json()] +
arguments.iter().map(|a| a.to_json()).collect::<~[json::Json]>()),
ParenthesisBlock(ref content)
=> JList(~[JString(~"()")] + content.map(|c| c.to_json())),
=> JList(~[JString(~"()")] + content.iter().map(|c| c.to_json()).collect::<~[json::Json]>()),
SquareBracketBlock(ref content)
=> JList(~[JString(~"[]")] + content.map(|c| c.to_json())),
=> JList(~[JString(~"[]")] + content.iter().map(|c| c.to_json()).collect::<~[json::Json]>()),
CurlyBracketBlock(ref content)
=> JList(~[JString(~"{}")] + list_to_json(content)),

Expand Down
10 changes: 5 additions & 5 deletions tokenizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,11 @@ fn test_preprocess() {


pub struct Tokenizer {
priv input: ~str,
priv length: uint, // All counted in bytes, not characters
priv position: uint, // All counted in bytes, not characters
priv line: uint,
priv last_line_start: uint, // All counted in bytes, not characters
input: ~str,
length: uint, // All counted in bytes, not characters
position: uint, // All counted in bytes, not characters
line: uint,
last_line_start: uint, // All counted in bytes, not characters
}


Expand Down