diff --git a/derive/src/lib.rs b/derive/src/lib.rs index 3fedb0fe..0cc11859 100644 --- a/derive/src/lib.rs +++ b/derive/src/lib.rs @@ -273,5 +273,5 @@ use proc_macro::TokenStream; #[proc_macro_derive(Parser, attributes(grammar))] pub fn derive_parser(input: TokenStream) -> TokenStream { - pest_generator::derive_parser(input.into()).into() + pest_generator::derive_parser(input.into(), true).into() } diff --git a/generator/src/generator.rs b/generator/src/generator.rs index c9c00de8..f360b5c8 100644 --- a/generator/src/generator.rs +++ b/generator/src/generator.rs @@ -22,12 +22,17 @@ pub fn generate( generics: &Generics, path: &Path, rules: Vec, - defaults: Vec<&str> + defaults: Vec<&str>, + include_grammar: bool ) -> TokenStream { let uses_eoi = defaults.iter().any(|name| *name == "EOI"); let builtins = generate_builtin_rules(); - let include_fix = generate_include(&name, &path.to_str().expect("non-Unicode path")); + let include_fix = if include_grammar { + generate_include(&name, &path.to_str().expect("non-Unicode path")) + } else { + quote!() + }; let rule_enum = generate_enum(&rules, uses_eoi); let patterns = generate_patterns(&rules, uses_eoi); let skip = generate_skip(&rules); @@ -897,7 +902,7 @@ mod tests { let defaults = vec!["ANY"]; assert_eq!( - generate(name, &generics, Path::new("test.pest"), rules, defaults).to_string(), + generate(name, &generics, Path::new("test.pest"), rules, defaults, true).to_string(), quote! { #[allow(non_upper_case_globals)] #[cfg(debug_assertions)] diff --git a/generator/src/lib.rs b/generator/src/lib.rs index f4ae9fad..9658fe8d 100644 --- a/generator/src/lib.rs +++ b/generator/src/lib.rs @@ -34,7 +34,7 @@ mod generator; use pest_meta::{optimizer, unwrap_or_report, validator}; use pest_meta::parser::{self, Rule}; -pub fn derive_parser(input: TokenStream) -> TokenStream { +pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream { let ast: DeriveInput = syn::parse2(input).unwrap(); let (name, generics, path) = parse_derive(ast); @@ -87,7 +87,8 @@ pub fn derive_parser(input: TokenStream) -> TokenStream { let defaults = unwrap_or_report(validator::validate_pairs(pairs.clone())); let ast = unwrap_or_report(parser::consume_rules(pairs)); let optimized = optimizer::optimize(ast); - let generated = generator::generate(name, &generics, &path, optimized, defaults); + let generated = + generator::generate(name, &generics, &path, optimized, defaults, include_grammar); generated.into() }