Skip to content

Fix using the match_ignore_ascii_case macro inside another macro expansion #264

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Oct 22, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ script:
- cargo build --verbose
- cargo test --verbose
- cargo doc --verbose
- cargo test --features heapsize
- cargo test --features dummy_match_byte
- if [ "$TRAVIS_RUST_VERSION" == "nightly" ]; then cargo test --features bench; fi
- if [ "$TRAVIS_RUST_VERSION" == "nightly" ]; then cargo test --features "bench dummy_match_byte"; fi
Expand Down
6 changes: 3 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "cssparser"
version = "0.26.0"
version = "0.27.0"
authors = [ "Simon Sapin <simon.sapin@exyr.org>" ]

description = "Rust implementation of CSS Syntax Level 3"
Expand All @@ -10,6 +10,7 @@ readme = "README.md"
keywords = ["css", "syntax", "parser"]
license = "MPL-2.0"
build = "build.rs"
edition = "2018"

exclude = ["src/css-parsing-tests/**", "src/big-data-url.css"]

Expand All @@ -19,9 +20,8 @@ difference = "2.0"
encoding_rs = "0.8"

[dependencies]
cssparser-macros = {path = "./macros", version = "0.4"}
cssparser-macros = {path = "./macros", version = "0.5"}
dtoa-short = "0.3"
heapsize = {version = ">= 0.3, < 0.5", optional = true}
itoa = "0.4"
matches = "0.1"
phf = {version = "0.8", features = ["macros"]}
Expand Down
9 changes: 1 addition & 8 deletions build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,6 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */

#[macro_use]
extern crate quote;
#[macro_use]
extern crate syn;
extern crate proc_macro2;

#[cfg(feature = "dummy_match_byte")]
mod codegen {
pub fn main() {}
Expand All @@ -19,7 +13,6 @@ mod match_byte;

#[cfg(not(feature = "dummy_match_byte"))]
mod codegen {
use match_byte;
use std::env;
use std::path::Path;
use std::thread::Builder;
Expand All @@ -35,7 +28,7 @@ mod codegen {
let handle = Builder::new()
.stack_size(128 * 1024 * 1024)
.spawn(move || {
match_byte::expand(&input, &output);
crate::match_byte::expand(&input, &output);
})
.unwrap();

Expand Down
12 changes: 9 additions & 3 deletions build/match_byte.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,14 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */

use quote::ToTokens;
use quote::{quote, ToTokens};
use std::fs::File;
use std::io::{Read, Write};
use std::path::Path;
use syn;
use syn::fold::Fold;
use syn::parse::{Parse, ParseStream, Result};
use syn::{parse_quote, Token};

use proc_macro2::{Span, TokenStream};

Expand Down Expand Up @@ -73,7 +74,12 @@ fn get_byte_from_expr_lit(expr: &Box<syn::Expr>) -> u8 {
}

/// Parse a pattern and fill the table accordingly
fn parse_pat_to_table<'a>(pat: &'a syn::Pat, case_id: u8, wildcard: &mut Option<&'a syn::Ident>, table: &mut [u8; 256]) {
fn parse_pat_to_table<'a>(
pat: &'a syn::Pat,
case_id: u8,
wildcard: &mut Option<&'a syn::Ident>,
table: &mut [u8; 256],
) {
match pat {
&syn::Pat::Lit(syn::PatLit { ref expr, .. }) => {
let value = get_byte_from_expr_lit(expr);
Expand Down Expand Up @@ -108,7 +114,7 @@ fn parse_pat_to_table<'a>(pat: &'a syn::Pat, case_id: u8, wildcard: &mut Option<
*byte = case_id;
}
}
},
}
&syn::Pat::Or(syn::PatOr { ref cases, .. }) => {
for case in cases {
parse_pat_to_table(case, case_id, wildcard, table);
Expand Down
2 changes: 1 addition & 1 deletion macros/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "cssparser-macros"
version = "0.4.0"
version = "0.5.0"
authors = ["Simon Sapin <simon.sapin@exyr.org>"]
description = "Procedural macros for cssparser"
documentation = "https://docs.rs/cssparser-macros/"
Expand Down
111 changes: 39 additions & 72 deletions macros/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,103 +6,72 @@ extern crate proc_macro;

use proc_macro::TokenStream;

/// Input: a `match` expression.
///
/// Output: a `MAX_LENGTH` constant with the length of the longest string pattern.
///
/// Panic if the arms contain non-string patterns,
/// or string patterns that contains ASCII uppercase letters.
/// Implementation detail of the `match_ignore_ascii_case!` macro
#[allow(non_snake_case)]
#[proc_macro]
pub fn cssparser_internal__assert_ascii_lowercase__max_len(input: TokenStream) -> TokenStream {
let expr: syn::ExprMatch = syn::parse_macro_input!(input);
let strings = expr
.arms
.iter()
.flat_map(|arm| match arm.pat {
syn::Pat::Or(ref p) => p.cases.iter().collect(),
ref p => vec![p],
})
.filter_map(|pattern| {
let expr = match pattern {
syn::Pat::Lit(expr) => expr,
syn::Pat::Wild(_) => return None,
_ => panic!("expected string or wildcard pattern, got {:?}", pattern),
};
match *expr.expr {
syn::Expr::Lit(syn::ExprLit {
lit: syn::Lit::Str(ref lit),
..
}) => {
assert_eq!(
lit.value(),
lit.value().to_ascii_lowercase(),
"string patterns must be given in ASCII lowercase"
);
Some(lit)
}
_ => panic!("expected string pattern, got {:?}", expr),
}
});
max_len(strings)
}

/// Input: string literals with no separator
///
/// Output: a `MAX_LENGTH` constant with the length of the longest string.
#[allow(non_snake_case)]
#[proc_macro]
pub fn cssparser_internal__max_len(input: TokenStream) -> TokenStream {
struct Input(Vec<syn::LitStr>);
pub fn cssparser_internal__match_ignore_ascii_case__support(input: TokenStream) -> TokenStream {
struct Input {
max_length: usize,
}

impl syn::parse::Parse for Input {
fn parse(input: syn::parse::ParseStream) -> syn::parse::Result<Self> {
let mut strings = Vec::new();
let mut max_length = 0;
while !input.is_empty() {
strings.push(input.parse()?)
if input.peek(syn::Token![_]) {
input.parse::<syn::Token![_]>().unwrap();
continue;
}
let lit: syn::LitStr = input.parse()?;
let value = lit.value();
if value.to_ascii_lowercase() != value {
return Err(syn::Error::new(lit.span(), "must be ASCII-lowercase"));
}
max_length = max_length.max(value.len());
}
Ok(Self(strings))
Ok(Input { max_length })
}
}

let strings: Input = syn::parse_macro_input!(input);
max_len(strings.0.iter())
}

fn max_len<'a, I: Iterator<Item = &'a syn::LitStr>>(strings: I) -> TokenStream {
let max_length = strings
.map(|s| s.value().len())
.max()
.expect("expected at least one string");
quote::quote!( pub(super) const MAX_LENGTH: usize = #max_length; ).into()
let Input { max_length } = syn::parse_macro_input!(input);
quote::quote!(
pub(super) const MAX_LENGTH: usize = #max_length;
)
.into()
}

/// Input: A type, followed by pairs of string literal keys and expression values. No separator.
///
/// Output: a rust-phf map, with keys ASCII-lowercased:
/// ```text
/// static MAP: &'static ::cssparser::phf::Map<&'static str, $ValueType> = …;
/// ```
/// Implementation detail of the `ascii_case_insensitive_phf_map!` macro
#[allow(non_snake_case)]
#[proc_macro]
pub fn cssparser_internal__phf_map(input: TokenStream) -> TokenStream {
pub fn cssparser_internal__ascii_case_insensitive_phf_map__support(
input: TokenStream,
) -> TokenStream {
struct Input {
value_type: syn::Type,
max_key_length: usize,
keys: Vec<syn::LitStr>,
values: Vec<syn::Expr>,
}

impl syn::parse::Parse for Input {
fn parse(input: syn::parse::ParseStream) -> syn::parse::Result<Self> {
let value_type = input.parse()?;
let mut max_key_length = 0;
let mut keys = Vec::new();
let mut values = Vec::new();
let value_type = input.parse()?;
while !input.is_empty() {
keys.push(input.parse()?);
let key: syn::LitStr = input.parse()?;
let key_value = key.value();
max_key_length = max_key_length.max(key_value.len());
keys.push(syn::LitStr::new(
&key_value.to_ascii_lowercase(),
key.span(),
));
values.push(input.parse()?);
}
Ok(Input {
value_type,
max_key_length,
keys,
values,
})
Expand All @@ -111,14 +80,12 @@ pub fn cssparser_internal__phf_map(input: TokenStream) -> TokenStream {

let Input {
value_type,
max_key_length,
keys,
values,
} = syn::parse_macro_input!(input);
let keys = keys
.iter()
.map(|s| syn::LitStr::new(&s.value().to_ascii_lowercase(), s.span()));

quote::quote!(
pub(super) const MAX_LENGTH: usize = #max_key_length;
pub(super) static MAP: Map<&'static str, #value_type> = phf_map! {
#(
#keys => #values,
Expand Down
6 changes: 0 additions & 6 deletions src/color.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,9 +101,6 @@ impl<'de> Deserialize<'de> for RGBA {
}
}

#[cfg(feature = "heapsize")]
known_heap_size!(0, RGBA);

impl ToCss for RGBA {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where
Expand Down Expand Up @@ -141,9 +138,6 @@ pub enum Color {
RGBA(RGBA),
}

#[cfg(feature = "heapsize")]
known_heap_size!(0, Color);

impl ToCss for Color {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where
Expand Down
65 changes: 18 additions & 47 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,56 +67,27 @@ fn parse_border_spacing(_context: &ParserContext, input: &mut Parser)

#![recursion_limit = "200"] // For color::parse_color_keyword

extern crate dtoa_short;
extern crate itoa;
#[macro_use]
extern crate cssparser_macros;
#[macro_use]
extern crate matches;
#[cfg(test)]
extern crate difference;
#[cfg(test)]
extern crate encoding_rs;
#[doc(hidden)]
pub extern crate phf as _internal__phf;
#[cfg(feature = "serde")]
extern crate serde;
#[cfg(test)]
extern crate serde_json;
#[cfg(feature = "heapsize")]
#[macro_use]
extern crate heapsize;
extern crate smallvec;

pub use cssparser_macros::*;

pub use color::{
pub use crate::color::{
parse_color_keyword, AngleOrNumber, Color, ColorComponentParser, NumberOrPercentage, RGBA,
};
pub use cow_rc_str::CowRcStr;
pub use from_bytes::{stylesheet_encoding, EncodingSupport};
pub use nth::parse_nth;
pub use parser::{BasicParseError, BasicParseErrorKind, ParseError, ParseErrorKind};
pub use parser::{Delimiter, Delimiters, Parser, ParserInput, ParserState};
pub use rules_and_declarations::parse_important;
pub use rules_and_declarations::{parse_one_declaration, DeclarationListParser, DeclarationParser};
pub use rules_and_declarations::{parse_one_rule, RuleListParser};
pub use rules_and_declarations::{AtRuleParser, AtRuleType, QualifiedRuleParser};
pub use serializer::{
serialize_identifier, serialize_name, serialize_string, CssStringWriter, ToCss,
TokenSerializationType,
};
pub use tokenizer::{SourceLocation, SourcePosition, Token};
pub use unicode_range::UnicodeRange;

// For macros
pub use crate::cow_rc_str::CowRcStr;
pub use crate::from_bytes::{stylesheet_encoding, EncodingSupport};
#[doc(hidden)]
pub use macros::_internal__to_lowercase;

// For macros when used in this crate. Unsure how $crate works with procedural-masquerade.
mod cssparser {
pub use _internal__phf;
}
pub use crate::macros::_internal__to_lowercase;
pub use crate::nth::parse_nth;
pub use crate::parser::{BasicParseError, BasicParseErrorKind, ParseError, ParseErrorKind};
pub use crate::parser::{Delimiter, Delimiters, Parser, ParserInput, ParserState};
pub use crate::rules_and_declarations::{parse_important, parse_one_declaration};
pub use crate::rules_and_declarations::{parse_one_rule, RuleListParser};
pub use crate::rules_and_declarations::{AtRuleParser, AtRuleType, QualifiedRuleParser};
pub use crate::rules_and_declarations::{DeclarationListParser, DeclarationParser};
pub use crate::serializer::{serialize_identifier, serialize_name, serialize_string};
pub use crate::serializer::{CssStringWriter, ToCss, TokenSerializationType};
pub use crate::tokenizer::{SourceLocation, SourcePosition, Token};
pub use crate::unicode_range::UnicodeRange;
pub use cssparser_macros::*;
#[doc(hidden)]
pub use phf as _internal__phf;

#[macro_use]
mod macros;
Expand Down
Loading