@@ -10,33 +10,58 @@ extern crate syn;
10
10
use std:: ascii:: AsciiExt ;
11
11
12
12
/// Panic if any string contains ASCII uppercase letters.
13
- #[ proc_macro_derive( cssparser__assert_ascii_lowercase) ]
14
- pub fn assert_ascii_lowercase ( input : proc_macro:: TokenStream ) -> proc_macro:: TokenStream {
15
- let input = syn:: parse_macro_input ( & input. to_string ( ) ) . unwrap ( ) ;
16
-
17
- for token in find_smuggled_tokens ( & input) {
18
- let string = string_literal ( token) ;
19
- assert_eq ! ( * string, string. to_ascii_lowercase( ) ,
20
- "the string patterns must be given in ASCII lowercase" ) ;
21
- }
22
-
23
- "" . parse ( ) . unwrap ( )
13
+ /// Emit a `MAX_LENGTH` constant with the length of the longest string.
14
+ #[ proc_macro_derive( cssparser__assert_ascii_lowercase__max_len) ]
15
+ pub fn assert_ascii_lowercase_max_len ( input : proc_macro:: TokenStream ) -> proc_macro:: TokenStream {
16
+ max_len_common ( input, |token_trees| {
17
+ let tokens = quote ! ( match x { #( #token_trees ) * } ) ;
18
+ let expr = syn:: parse_expr ( tokens. as_str ( ) ) . unwrap ( ) ;
19
+ let arms = match expr {
20
+ syn:: Expr { node : syn:: ExprKind :: Match ( _, ref arms) , .. } => arms,
21
+ _ => panic ! ( "expected a match expression, got {:?}" , expr)
22
+ } ;
23
+ arms. iter ( ) . flat_map ( |arm| & arm. pats ) . filter_map ( |pattern| {
24
+ let expr = match * pattern {
25
+ syn:: Pat :: Lit ( ref expr) => expr,
26
+ syn:: Pat :: Wild |
27
+ syn:: Pat :: Ident ( _, _, None ) => return None ,
28
+ syn:: Pat :: Ident ( _, _, Some ( ref sub_pattern) ) => {
29
+ match * * sub_pattern {
30
+ syn:: Pat :: Lit ( ref expr) => expr,
31
+ syn:: Pat :: Wild => return None ,
32
+ _ => panic ! ( "expected string or wildcard pattern, got {:?}" , pattern)
33
+ }
34
+ }
35
+ _ => panic ! ( "expected string or wildcard pattern, got {:?}" , pattern)
36
+ } ;
37
+ match * * expr {
38
+ syn:: Expr { node : syn:: ExprKind :: Lit ( syn:: Lit :: Str ( ref string, _) ) , .. } => {
39
+ assert_eq ! ( * string, string. to_ascii_lowercase( ) ,
40
+ "string patterns must be given in ASCII lowercase" ) ;
41
+ Some ( string. len ( ) )
42
+ }
43
+ _ => panic ! ( "expected string pattern, got {:?}" , expr)
44
+ }
45
+ } ) . max ( )
46
+ } )
24
47
}
25
48
26
49
/// Emit a `MAX_LENGTH` constant with the length of the longest string.
27
50
#[ proc_macro_derive( cssparser__max_len) ]
28
51
pub fn max_len ( input : proc_macro:: TokenStream ) -> proc_macro:: TokenStream {
29
- let input = syn:: parse_macro_input ( & input. to_string ( ) ) . unwrap ( ) ;
30
-
31
- let token_trees = find_smuggled_tokens ( & input) ;
32
- let lengths = token_trees. iter ( ) . map ( |tt| string_literal ( tt) . len ( ) ) ;
33
- let max_length = lengths. max ( ) . expect ( "expected at least one string" ) ;
34
-
35
- let tokens = quote ! {
36
- const MAX_LENGTH : usize = #max_length;
37
- } ;
52
+ max_len_common ( input, |token_trees| {
53
+ token_trees. iter ( ) . map ( |tt| string_literal ( tt) . len ( ) ) . max ( )
54
+ } )
55
+ }
38
56
39
- tokens. as_str ( ) . parse ( ) . unwrap ( )
57
+ fn max_len_common < F > ( input : proc_macro:: TokenStream , f : F ) -> proc_macro:: TokenStream
58
+ where F : FnOnce ( & [ syn:: TokenTree ] ) -> Option < usize > {
59
+ common ( input, |token_trees| {
60
+ let max_length = f ( token_trees) . expect ( "expected at least one string" ) ;
61
+ quote ! {
62
+ const MAX_LENGTH : usize = #max_length;
63
+ }
64
+ } )
40
65
}
41
66
42
67
/// ```
@@ -46,29 +71,35 @@ pub fn max_len(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
46
71
/// Map keys are ASCII-lowercased.
47
72
#[ proc_macro_derive( cssparser__phf_map) ]
48
73
pub fn phf_map ( input : proc_macro:: TokenStream ) -> proc_macro:: TokenStream {
49
- let input = syn:: parse_macro_input ( & input. to_string ( ) ) . unwrap ( ) ;
74
+ common ( input, |token_trees| {
75
+ let value_type = & token_trees[ 0 ] ;
76
+ let pairs: Vec < _ > = token_trees[ 1 ..] . chunks ( 2 ) . map ( |chunk| {
77
+ let key = string_literal ( & chunk[ 0 ] ) ;
78
+ let value = & chunk[ 1 ] ;
79
+ ( key. to_ascii_lowercase ( ) , quote ! ( #value) . to_string ( ) )
80
+ } ) . collect ( ) ;
81
+
82
+ let mut map = phf_codegen:: Map :: new ( ) ;
83
+ for & ( ref key, ref value) in & pairs {
84
+ map. entry ( & * * key, & * * value) ;
85
+ }
86
+
87
+ let mut tokens = quote ! {
88
+ static MAP : :: phf:: Map <& ' static str , #value_type> =
89
+ } ;
90
+ let mut initializer_bytes = Vec :: new ( ) ;
91
+ map. build ( & mut initializer_bytes) . unwrap ( ) ;
92
+ tokens. append ( :: std:: str:: from_utf8 ( & initializer_bytes) . unwrap ( ) ) ;
93
+ tokens. append ( ";" ) ;
94
+ tokens
95
+ } )
96
+ }
50
97
98
+ fn common < F > ( input : proc_macro:: TokenStream , f : F ) -> proc_macro:: TokenStream
99
+ where F : FnOnce ( & [ syn:: TokenTree ] ) -> quote:: Tokens {
100
+ let input = syn:: parse_macro_input ( & input. to_string ( ) ) . unwrap ( ) ;
51
101
let token_trees = find_smuggled_tokens ( & input) ;
52
- let value_type = & token_trees[ 0 ] ;
53
- let pairs: Vec < _ > = token_trees[ 1 ..] . chunks ( 2 ) . map ( |chunk| {
54
- let key = string_literal ( & chunk[ 0 ] ) ;
55
- let value = & chunk[ 1 ] ;
56
- ( key. to_ascii_lowercase ( ) , quote ! ( #value) . to_string ( ) )
57
- } ) . collect ( ) ;
58
-
59
- let mut map = phf_codegen:: Map :: new ( ) ;
60
- for & ( ref key, ref value) in & pairs {
61
- map. entry ( & * * key, & * * value) ;
62
- }
63
-
64
- let mut tokens = quote ! {
65
- static MAP : :: phf:: Map <& ' static str , #value_type> =
66
- } ;
67
- let mut initializer_bytes = Vec :: new ( ) ;
68
- map. build ( & mut initializer_bytes) . unwrap ( ) ;
69
- tokens. append ( :: std:: str:: from_utf8 ( & initializer_bytes) . unwrap ( ) ) ;
70
- tokens. append ( ";" ) ;
71
-
102
+ let tokens = f ( token_trees) ;
72
103
tokens. as_str ( ) . parse ( ) . unwrap ( )
73
104
}
74
105
0 commit comments