diff --git a/ast.rs b/ast.rs index b180a296..75b26d48 100644 --- a/ast.rs +++ b/ast.rs @@ -8,16 +8,16 @@ use std::slice; #[deriving(Eq)] pub struct NumericValue { - representation: ~str, - value: f64, - int_value: Option, + pub representation: ~str, + pub value: f64, + pub int_value: Option, } #[deriving(Eq)] pub struct SourceLocation { - line: uint, // First line is 1 - column: uint, // First character of a line is at column 1 + pub line: uint, // First line is 1 + pub column: uint, // First character of a line is at column 1 } @@ -70,25 +70,25 @@ pub enum ComponentValue { #[deriving(Eq)] pub struct Declaration { - location: SourceLocation, - name: ~str, - value: ~[ComponentValue], - important: bool, + pub location: SourceLocation, + pub name: ~str, + pub value: ~[ComponentValue], + pub important: bool, } #[deriving(Eq)] pub struct QualifiedRule { - location: SourceLocation, - prelude: ~[ComponentValue], - block: ~[Node], + pub location: SourceLocation, + pub prelude: ~[ComponentValue], + pub block: ~[Node], } #[deriving(Eq)] pub struct AtRule { - location: SourceLocation, - name: ~str, - prelude: ~[ComponentValue], - block: Option<~[Node]>, + pub location: SourceLocation, + pub name: ~str, + pub prelude: ~[ComponentValue], + pub block: Option<~[Node]>, } #[deriving(Eq)] @@ -106,8 +106,8 @@ pub enum Rule { #[deriving(Eq)] pub struct SyntaxError { - location: SourceLocation, - reason: ErrorReason, + pub location: SourceLocation, + pub reason: ErrorReason, } #[deriving(Eq)] @@ -139,7 +139,7 @@ impl<'a> SkipWhitespaceIterable<'a> for &'a [ComponentValue] { #[deriving(Clone)] pub struct SkipWhitespaceIterator<'a> { - iter_with_whitespace: slice::Items<'a, ComponentValue>, + pub iter_with_whitespace: slice::Items<'a, ComponentValue>, } impl<'a> Iterator<&'a ComponentValue> for SkipWhitespaceIterator<'a> { diff --git a/color.rs b/color.rs index 9c05eabe..4b9debce 100644 --- a/color.rs +++ b/color.rs @@ -11,10 +11,10 @@ use ast::*; pub struct RGBA { // All in 0..1 // Use f32 to try and match rust-azure’s AzFloat - red: f32, - green: f32, - blue: f32, - alpha: f32, + pub red: f32, + pub green: f32, + pub blue: f32, + pub alpha: f32, } #[deriving(Clone, Eq)] diff --git a/from_bytes.rs b/from_bytes.rs index 2355e4a5..bc0de828 100644 --- a/from_bytes.rs +++ b/from_bytes.rs @@ -50,7 +50,7 @@ pub fn decode_stylesheet_bytes(css: &[u8], protocol_encoding_label: Option<&str> Some(label_length) => if css.slice_from(10 + label_length).starts_with("\";".as_bytes()) { let label = css.slice(10, 10 + label_length); - let label = str::from_chars(label.iter().map(|&b| b as char).to_owned_vec()); + let label = str::from_chars(label.iter().map(|&b| b as char).collect::<~[char]>()); match encoding_from_whatwg_label(label) { None => (), Some(fallback) => match fallback.name() { diff --git a/lib.rs b/lib.rs index 4db2a81a..4b6a37c2 100644 --- a/lib.rs +++ b/lib.rs @@ -2,11 +2,12 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -#[crate_id = "github.com/mozilla-servo/rust-cssparser#cssparser:0.1"]; -#[feature(globs, macro_rules)]; -#[crate_type = "lib"]; -#[crate_type = "dylib"]; -#[crate_type = "rlib"]; +#![crate_id = "github.com/mozilla-servo/rust-cssparser#cssparser:0.1"] +#![crate_type = "lib"] +#![crate_type = "dylib"] +#![crate_type = "rlib"] + +#![feature(globs, macro_rules)] extern crate encoding; // https://github.com/lifthrasiir/rust-encoding diff --git a/tests.rs b/tests.rs index 43fd8503..a5b6d7af 100644 --- a/tests.rs +++ b/tests.rs @@ -92,7 +92,7 @@ fn run_json_tests(json_data: &str, parse: |input: ~str| -> T) { #[test] fn component_value_list() { run_json_tests(include_str!("css-parsing-tests/component_value_list.json"), |input| { - tokenize(input).map(|(c, _)| c).to_owned_vec() + tokenize(input).map(|(c, _)| c).collect::<~[ComponentValue]>() }); } @@ -108,7 +108,7 @@ fn one_component_value() { #[test] fn declaration_list() { run_json_tests(include_str!("css-parsing-tests/declaration_list.json"), |input| { - parse_declaration_list(tokenize(input)).to_owned_vec() + parse_declaration_list(tokenize(input)).collect::<~[Result]>() }); } @@ -124,7 +124,7 @@ fn one_declaration() { #[test] fn rule_list() { run_json_tests(include_str!("css-parsing-tests/rule_list.json"), |input| { - parse_rule_list(tokenize(input)).to_owned_vec() + parse_rule_list(tokenize(input)).collect::<~[Result]>() }); } @@ -132,7 +132,7 @@ fn rule_list() { #[test] fn stylesheet() { run_json_tests(include_str!("css-parsing-tests/stylesheet.json"), |input| { - parse_stylesheet_rules(tokenize(input)).to_owned_vec() + parse_stylesheet_rules(tokenize(input)).collect::<~[Result]>() }); } @@ -158,7 +158,7 @@ fn stylesheet_from_bytes() { let css = get_string(map, &~"css_bytes").unwrap().chars().map(|c| { assert!(c as u32 <= 0xFF); c as u8 - }).to_owned_vec(); + }).collect::<~[u8]>(); let protocol_encoding_label = get_string(map, &~"protocol_encoding"); let environment_encoding = get_string(map, &~"environment_encoding") .and_then(encoding_from_whatwg_label); @@ -166,7 +166,7 @@ fn stylesheet_from_bytes() { let (mut rules, used_encoding) = parse_stylesheet_rules_from_bytes( css, protocol_encoding_label, environment_encoding); - (rules.to_owned_vec(), used_encoding.name().to_owned()).to_json() + (rules.collect::<~[Result]>(), used_encoding.name().to_owned()).to_json() }; assert_json_eq(result, expected, json::Object(map).to_str()); }); @@ -242,7 +242,7 @@ fn bench_color_lookup_fail(b: &mut test::BenchHarness) { #[test] fn nth() { run_json_tests(include_str!("css-parsing-tests/An+B.json"), |input| { - parse_nth(tokenize(input).map(|(c, _)| c).to_owned_vec()) + parse_nth(tokenize(input).map(|(c, _)| c).collect::<~[ComponentValue]>()) }); } @@ -250,9 +250,9 @@ fn nth() { #[test] fn serializer() { run_json_tests(include_str!("css-parsing-tests/component_value_list.json"), |input| { - let component_values = tokenize(input).map(|(c, _)| c).to_owned_vec(); + let component_values = tokenize(input).map(|(c, _)| c).collect::<~[ComponentValue]>(); let serialized = component_values.iter().to_css(); - tokenize(serialized).map(|(c, _)| c).to_owned_vec() + tokenize(serialized).map(|(c, _)| c).collect::<~[ComponentValue]>() }); } @@ -339,11 +339,11 @@ impl ToJson for DeclarationListItem { fn list_to_json(list: &~[(ComponentValue, SourceLocation)]) -> ~[json::Json] { - list.map(|tuple| { + list.iter().map(|tuple| { match *tuple { (ref c, _) => c.to_json() } - }) + }).collect() } @@ -426,11 +426,12 @@ impl ToJson for ComponentValue { CDC => JString(~"-->"), Function(ref name, ref arguments) - => JList(~[JString(~"function"), name.to_json()] + arguments.map(|a| a.to_json())), + => JList(~[JString(~"function"), name.to_json()] + + arguments.iter().map(|a| a.to_json()).collect::<~[json::Json]>()), ParenthesisBlock(ref content) - => JList(~[JString(~"()")] + content.map(|c| c.to_json())), + => JList(~[JString(~"()")] + content.iter().map(|c| c.to_json()).collect::<~[json::Json]>()), SquareBracketBlock(ref content) - => JList(~[JString(~"[]")] + content.map(|c| c.to_json())), + => JList(~[JString(~"[]")] + content.iter().map(|c| c.to_json()).collect::<~[json::Json]>()), CurlyBracketBlock(ref content) => JList(~[JString(~"{}")] + list_to_json(content)), diff --git a/tokenizer.rs b/tokenizer.rs index 76092e8d..b3942980 100644 --- a/tokenizer.rs +++ b/tokenizer.rs @@ -47,11 +47,11 @@ fn test_preprocess() { pub struct Tokenizer { - priv input: ~str, - priv length: uint, // All counted in bytes, not characters - priv position: uint, // All counted in bytes, not characters - priv line: uint, - priv last_line_start: uint, // All counted in bytes, not characters + input: ~str, + length: uint, // All counted in bytes, not characters + position: uint, // All counted in bytes, not characters + line: uint, + last_line_start: uint, // All counted in bytes, not characters }