Skip to content

Commit 3bca44d

Browse files
committed
Inline NumericValue struct into Token::Dimension enum fields…
… to require less alignment and reduce the size of Token
1 parent 5cf34eb commit 3bca44d

File tree

7 files changed

+56
-35
lines changed

7 files changed

+56
-35
lines changed

src/color.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -502,7 +502,7 @@ fn parse_rgb_components_hsl<'i, 't>(arguments: &mut Parser<'i, 't>) -> Result<(u
502502
let token = try!(arguments.next());
503503
let hue_degrees = match token {
504504
Token::Number(NumericValue { value: v, .. }) => Ok(v),
505-
Token::Dimension(NumericValue { value: v, .. }, ref unit) => {
505+
Token::Dimension { value: v, ref unit, .. } => {
506506
match_ignore_ascii_case! { &*unit,
507507
"deg" => Ok(v),
508508
"grad" => Ok(v * 360. / 400.),

src/nth.rs

Lines changed: 7 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -20,20 +20,14 @@ pub fn parse_nth<'i, 't>(input: &mut Parser<'i, 't>) -> Result<(i32, i32), Basic
2020
None => Err(()),
2121
}
2222
}
23-
Token::Dimension(value, ref unit) => {
24-
match value.int_value {
25-
Some(v) => {
26-
let a = v as i32;
27-
match_ignore_ascii_case! {
28-
&unit,
29-
"n" => Ok(try!(parse_b(input, a))),
30-
"n-" => Ok(try!(parse_signless_b(input, a, -1))),
31-
_ => {
32-
parse_n_dash_digits(&*unit).map(|val| (a, val))
33-
}
34-
}
23+
Token::Dimension { int_value: Some(a), ref unit, .. } => {
24+
match_ignore_ascii_case! {
25+
&unit,
26+
"n" => Ok(try!(parse_b(input, a))),
27+
"n-" => Ok(try!(parse_signless_b(input, a, -1))),
28+
_ => {
29+
parse_n_dash_digits(&*unit).map(|val| (a, val))
3530
}
36-
None => Err(()),
3731
}
3832
}
3933
Token::Ident(ref value) => {

src/serializer.rs

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -97,8 +97,12 @@ impl<'a> ToCss for Token<'a> {
9797
try!(write_numeric(value, dest));
9898
try!(dest.write_str("%"));
9999
},
100-
Token::Dimension(value, ref unit) => {
101-
try!(write_numeric(value, dest));
100+
Token::Dimension { value, int_value, has_sign, ref unit } => {
101+
try!(write_numeric(NumericValue {
102+
value: value,
103+
int_value: int_value,
104+
has_sign: has_sign,
105+
}, dest));
102106
// Disambiguate with scientific notation.
103107
let unit = &**unit;
104108
if unit == "e" || unit == "E" || unit.starts_with("e-") || unit.starts_with("E-") {
@@ -391,7 +395,7 @@ impl<'a> Token<'a> {
391395
Token::Delim('*') => DelimAsterisk,
392396
Token::Number(_) => Number,
393397
Token::Percentage(_) => Percentage,
394-
Token::Dimension(..) => Dimension,
398+
Token::Dimension { .. } => Dimension,
395399
Token::WhiteSpace(_) => WhiteSpace,
396400
Token::Comment(_) => DelimSlash,
397401
Token::DashMatch => DashMatch,

src/size_of_tests.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ macro_rules! size_of_test {
3232
}
3333

3434
// These assume 64-bit
35-
size_of_test!(token, Token, 40);
35+
size_of_test!(token, Token, 32);
3636
size_of_test!(numeric_value, NumericValue, 16);
3737
size_of_test!(percentage_value, PercentageValue, 16);
3838
size_of_test!(std_cow_str, Cow<'static, str>, 32);

src/tests.rs

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -807,9 +807,13 @@ fn one_component_value_to_json(token: Token, input: &mut Parser) -> Json {
807807
}));
808808
v
809809
}),
810-
Token::Dimension(value, unit) => Json::Array({
810+
Token::Dimension { value, int_value, has_sign, unit } => Json::Array({
811811
let mut v = vec!["dimension".to_json()];
812-
v.extend(numeric(value));
812+
v.extend(numeric(NumericValue {
813+
value: value,
814+
int_value: int_value,
815+
has_sign: has_sign,
816+
}));
813817
v.push(unit.to_json());
814818
v
815819
}),

src/tokenizer.rs

Lines changed: 32 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,21 @@ pub enum Token<'a> {
5959
Percentage(PercentageValue),
6060

6161
/// A [`<dimension-token>`](https://drafts.csswg.org/css-syntax/#dimension-token-diagram)
62-
Dimension(NumericValue, CompactCowStr<'a>),
62+
Dimension {
63+
/// The value as a float
64+
value: f32,
65+
66+
/// If the origin source did not include a fractional part, the value as an integer.
67+
int_value: Option<i32>,
68+
69+
/// Whether the number had a `+` or `-` sign.
70+
///
71+
/// This is used is some cases like the <An+B> micro syntax. (See the `parse_nth` function.)
72+
has_sign: bool,
73+
74+
/// The unit, e.g. "px" in `12px`
75+
unit: CompactCowStr<'a>
76+
},
6377

6478
/// A [`<whitespace-token>`](https://drafts.csswg.org/css-syntax/#whitespace-token-diagram)
6579
WhiteSpace(&'a str),
@@ -861,24 +875,29 @@ fn consume_numeric<'a>(tokenizer: &mut Tokenizer<'a>) -> Token<'a> {
861875
has_sign: has_sign,
862876
})
863877
}
864-
let value = NumericValue {
865-
value: value as f32,
866-
int_value: int_value,
867-
has_sign: has_sign,
868-
};
878+
let value = value as f32;
869879
if is_ident_start(tokenizer) {
870-
let name = consume_name(tokenizer);
880+
let unit = consume_name(tokenizer);
871881
if tokenizer.viewport_percentages == SeenStatus::LookingForThem {
872-
if name.eq_ignore_ascii_case("vh") ||
873-
name.eq_ignore_ascii_case("vw") ||
874-
name.eq_ignore_ascii_case("vmin") ||
875-
name.eq_ignore_ascii_case("vmax") {
882+
if unit.eq_ignore_ascii_case("vh") ||
883+
unit.eq_ignore_ascii_case("vw") ||
884+
unit.eq_ignore_ascii_case("vmin") ||
885+
unit.eq_ignore_ascii_case("vmax") {
876886
tokenizer.viewport_percentages = SeenStatus::SeenAtLeastOne;
877887
}
878888
}
879-
Dimension(value, name)
889+
Dimension {
890+
value: value,
891+
int_value: int_value,
892+
has_sign: has_sign,
893+
unit: unit,
894+
}
880895
} else {
881-
Number(value)
896+
Number(NumericValue {
897+
value: value,
898+
int_value: int_value,
899+
has_sign: has_sign,
900+
})
882901
}
883902
}
884903

src/unicode_range.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,14 +64,14 @@ fn parse_tokens<'i, 't>(input: &mut Parser<'i, 't>) -> Result<(), BasicParseErro
6464
}
6565
parse_question_marks(input)
6666
}
67-
Token::Dimension(..) => {
67+
Token::Dimension { .. } => {
6868
parse_question_marks(input)
6969
}
7070
Token::Number(_) => {
7171
let after_number = input.position();
7272
match input.next_including_whitespace() {
7373
Ok(Token::Delim('?')) => parse_question_marks(input),
74-
Ok(Token::Dimension(..)) => {}
74+
Ok(Token::Dimension { .. }) => {}
7575
Ok(Token::Number(_)) => {}
7676
_ => input.reset(after_number)
7777
}

0 commit comments

Comments
 (0)