summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJeff Vander Stoep <jeffv@google.com>2024-02-05 09:37:30 +0000
committerAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>2024-02-05 09:37:30 +0000
commitf086b1561e2541bb10ad633b4e5d6720cf0765e2 (patch)
tree31c5cc515393402885b3654e8002f5c85d9112a0
parent10e777dd873320d8dce0b18a14a39beb67d4d231 (diff)
parenteb05b04b69051e35831635e11b5d2dd1251e1812 (diff)
downloadpest_generator-main.tar.gz
Upgrade pest_generator to 2.7.6 am: eb05b04b69HEADmastermainemu-34-2-dev
Original change: https://android-review.googlesource.com/c/platform/external/rust/crates/pest_generator/+/2949318 Change-Id: I200b2f08ce75e22e1f2bb03d31c31e6d2bccb300 Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
-rw-r--r--Android.bp2
-rw-r--r--Cargo.toml14
-rw-r--r--Cargo.toml.orig14
-rw-r--r--METADATA23
-rw-r--r--_README.md39
-rw-r--r--patches/syn-2.patch161
-rw-r--r--src/docs.rs10
-rw-r--r--src/generator.rs143
-rw-r--r--src/lib.rs55
-rw-r--r--tests/base.pest1
10 files changed, 230 insertions, 232 deletions
diff --git a/Android.bp b/Android.bp
index 152a9d1..50cb9a7 100644
--- a/Android.bp
+++ b/Android.bp
@@ -43,7 +43,7 @@ rust_library_host {
name: "libpest_generator",
crate_name: "pest_generator",
cargo_env_compat: true,
- cargo_pkg_version: "2.5.5",
+ cargo_pkg_version: "2.7.6",
srcs: ["src/lib.rs"],
edition: "2021",
features: [
diff --git a/Cargo.toml b/Cargo.toml
index afcd883..85dabac 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -11,9 +11,9 @@
[package]
edition = "2021"
-rust-version = "1.56"
+rust-version = "1.61"
name = "pest_generator"
-version = "2.5.5"
+version = "2.7.6"
authors = ["Dragoș Tiselice <dragostiselice@gmail.com>"]
description = "pest code generator"
homepage = "https://pest.rs/"
@@ -24,15 +24,15 @@ keywords = [
"generator",
]
categories = ["parsing"]
-license = "MIT/Apache-2.0"
+license = "MIT OR Apache-2.0"
repository = "https://github.com/pest-parser/pest"
[dependencies.pest]
-version = "2.5.5"
+version = "2.7.6"
default-features = false
[dependencies.pest_meta]
-version = "2.5.5"
+version = "2.7.6"
[dependencies.proc-macro2]
version = "1.0"
@@ -41,8 +41,10 @@ version = "1.0"
version = "1.0"
[dependencies.syn]
-version = "2.0.1"
+version = "2.0"
[features]
default = ["std"]
+grammar-extras = ["pest_meta/grammar-extras"]
+not-bootstrap-in-src = ["pest_meta/not-bootstrap-in-src"]
std = ["pest/std"]
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
index ab98cb2..0bdf4bd 100644
--- a/Cargo.toml.orig
+++ b/Cargo.toml.orig
@@ -1,7 +1,7 @@
[package]
name = "pest_generator"
description = "pest code generator"
-version = "2.5.5"
+version = "2.7.6"
edition = "2021"
authors = ["Dragoș Tiselice <dragostiselice@gmail.com>"]
homepage = "https://pest.rs/"
@@ -9,17 +9,19 @@ repository = "https://github.com/pest-parser/pest"
documentation = "https://docs.rs/pest"
keywords = ["pest", "generator"]
categories = ["parsing"]
-license = "MIT/Apache-2.0"
+license = "MIT OR Apache-2.0"
readme = "_README.md"
-rust-version = "1.56"
+rust-version = "1.61"
[features]
default = ["std"]
std = ["pest/std"]
+not-bootstrap-in-src = ["pest_meta/not-bootstrap-in-src"]
+grammar-extras = ["pest_meta/grammar-extras"]
[dependencies]
-pest = { path = "../pest", version = "2.5.5", default-features = false }
-pest_meta = { path = "../meta", version = "2.5.5" }
+pest = { path = "../pest", version = "2.7.6", default-features = false }
+pest_meta = { path = "../meta", version = "2.7.6" }
proc-macro2 = "1.0"
quote = "1.0"
-syn = "1.0"
+syn = "2.0"
diff --git a/METADATA b/METADATA
index 83c63eb..d58c07d 100644
--- a/METADATA
+++ b/METADATA
@@ -1,23 +1,20 @@
# This project was upgraded with external_updater.
-# Usage: tools/external_updater/updater.sh update rust/crates/pest_generator
-# For more info, check https://cs.android.com/android/platform/superproject/+/master:tools/external_updater/README.md
+# Usage: tools/external_updater/updater.sh update external/rust/crates/pest_generator
+# For more info, check https://cs.android.com/android/platform/superproject/+/main:tools/external_updater/README.md
name: "pest_generator"
description: "pest code generator"
third_party {
- url {
- type: HOMEPAGE
- value: "https://crates.io/crates/pest_generator"
- }
- url {
- type: ARCHIVE
- value: "https://static.crates.io/crates/pest_generator/pest_generator-2.5.5.crate"
- }
- version: "2.5.5"
license_type: NOTICE
last_upgrade_date {
- year: 2023
+ year: 2024
month: 2
- day: 16
+ day: 5
+ }
+ homepage: "https://crates.io/crates/pest_generator"
+ identifier {
+ type: "Archive"
+ value: "https://static.crates.io/crates/pest_generator/pest_generator-2.7.6.crate"
+ version: "2.7.6"
}
}
diff --git a/_README.md b/_README.md
index da30ab7..6d91eaf 100644
--- a/_README.md
+++ b/_README.md
@@ -11,7 +11,7 @@
[![pest Continuous Integration](https://github.com/pest-parser/pest/actions/workflows/ci.yml/badge.svg)](https://github.com/pest-parser/pest/actions/workflows/ci.yml)
[![codecov](https://codecov.io/gh/pest-parser/pest/branch/master/graph/badge.svg)](https://codecov.io/gh/pest-parser/pest)
-<a href="https://blog.rust-lang.org/2021/11/01/Rust-1.56.1.html"><img alt="Rustc Version 1.56.1+" src="https://img.shields.io/badge/rustc-1.56.1%2B-lightgrey.svg"/></a>
+<a href="https://blog.rust-lang.org/2021/11/01/Rust-1.61.0.html"><img alt="Rustc Version 1.61.0+" src="https://img.shields.io/badge/rustc-1.61.0%2B-lightgrey.svg"/></a>
[![Crates.io](https://img.shields.io/crates/d/pest.svg)](https://crates.io/crates/pest)
[![Crates.io](https://img.shields.io/crates/v/pest.svg)](https://crates.io/crates/pest)
@@ -93,10 +93,7 @@ The grammar can be used to derive a `Parser` implementation automatically.
Parsing returns an iterator of nested token pairs:
```rust
-extern crate pest;
-#[macro_use]
-extern crate pest_derive;
-
+use pest_derive::Parser;
use pest::Parser;
#[derive(Parser)]
@@ -104,7 +101,7 @@ use pest::Parser;
struct IdentParser;
fn main() {
-    let pairs = IdentParser::parse(Rule::ident_list, "a1 b2").unwrap_or_else(|e| panic!("{}", e));
+ let pairs = IdentParser::parse(Rule::ident_list, "a1 b2").unwrap_or_else(|e| panic!("{}", e));
// Because ident_list is silent, the iterator will contain idents
for pair in pairs {
@@ -167,6 +164,8 @@ mod b {
## Projects using pest
+You can find more projects and ecosystem tools in the [awesome-pest](https://github.com/pest-parser/awesome-pest) repo.
+
* [pest_meta](https://github.com/pest-parser/pest/blob/master/meta/src/grammar.pest) (bootstrapped)
* [AshPaper](https://github.com/shnewto/ashpaper)
* [brain](https://github.com/brain-lang/brain)
@@ -197,11 +196,35 @@ mod b {
* [qubit](https://github.com/abhimanyu003/qubit)
* [caith](https://github.com/Geobert/caith) (a dice roller crate)
* [Melody](https://github.com/yoav-lavi/melody)
+* [json5-nodes](https://github.com/jlyonsmith/json5-nodes)
+* [prisma](https://github.com/prisma/prisma)
## Minimum Supported Rust Version (MSRV)
-This library should always compile with default features on **Rust 1.56.1**
-or **Rust 1.61** with `const_prec_climber`.
+This library should always compile with default features on **Rust 1.61.0**.
+
+## no_std support
+
+The `pest` and `pest_derive` crates can be built without the Rust standard
+library and target embedded environments. To do so, you need to disable
+their default features. In your `Cargo.toml`, you can specify it as follows:
+
+```toml
+[dependencies]
+# ...
+pest = { version = "2", default-features = false }
+pest_derive = { version = "2", default-features = false }
+```
+
+If you want to build these crates in the pest repository's workspace, you can
+pass the `--no-default-features` flag to `cargo` and specify these crates using
+the `--package` (`-p`) flag. For example:
+
+```bash
+$ cargo build --target thumbv7em-none-eabihf --no-default-features -p pest
+$ cargo bootstrap
+$ cargo build --target thumbv7em-none-eabihf --no-default-features -p pest_derive
+```
## Special thanks
diff --git a/patches/syn-2.patch b/patches/syn-2.patch
deleted file mode 100644
index f956ee3..0000000
--- a/patches/syn-2.patch
+++ /dev/null
@@ -1,161 +0,0 @@
-diff --git a/src/generator.rs b/src/generator.rs
-index 87d1f00..0dbcaa3 100644
---- a/src/generator.rs
-+++ b/src/generator.rs
-@@ -22,7 +22,7 @@ use crate::docs::DocComment;
- pub(crate) fn generate(
- name: Ident,
- generics: &Generics,
-- path: Option<PathBuf>,
-+ paths: Vec<PathBuf>,
- rules: Vec<OptimizedRule>,
- defaults: Vec<&str>,
- doc_comment: &DocComment,
-@@ -32,10 +32,7 @@ pub(crate) fn generate(
-
- let builtins = generate_builtin_rules();
- let include_fix = if include_grammar {
-- match path {
-- Some(ref path) => generate_include(&name, path.to_str().expect("non-Unicode path")),
-- None => quote!(),
-- }
-+ generate_include(&name, paths)
- } else {
- quote!()
- };
-@@ -170,17 +167,33 @@ fn generate_builtin_rules() -> Vec<(&'static str, TokenStream)> {
- builtins
- }
-
--// Needed because Cargo doesn't watch for changes in grammars.
--fn generate_include(name: &Ident, path: &str) -> TokenStream {
-+/// Generate Rust `include_str!` for grammar files, then Cargo will watch changes in grammars.
-+fn generate_include(name: &Ident, paths: Vec<PathBuf>) -> TokenStream {
- let const_name = format_ident!("_PEST_GRAMMAR_{}", name);
- // Need to make this relative to the current directory since the path to the file
- // is derived from the CARGO_MANIFEST_DIR environment variable
-- let mut current_dir = std::env::current_dir().expect("Unable to get current directory");
-- current_dir.push(path);
-- let relative_path = current_dir.to_str().expect("path contains invalid unicode");
-+ let current_dir = std::env::current_dir().expect("Unable to get current directory");
-+
-+ let include_tokens = paths.iter().map(|path| {
-+ let path = path.to_str().expect("non-Unicode path");
-+
-+ let relative_path = current_dir
-+ .join(path)
-+ .to_str()
-+ .expect("path contains invalid unicode")
-+ .to_string();
-+
-+ quote! {
-+ include_str!(#relative_path)
-+ }
-+ });
-+
-+ let len = include_tokens.len();
- quote! {
- #[allow(non_upper_case_globals)]
-- const #const_name: &'static str = include_str!(#relative_path);
-+ const #const_name: [&'static str; #len] = [
-+ #(#include_tokens),*
-+ ];
- }
- }
-
-@@ -1016,14 +1029,16 @@ mod tests {
- let defaults = vec!["ANY"];
- let result = result_type();
- let box_ty = box_type();
-- let mut current_dir = std::env::current_dir().expect("Unable to get current directory");
-- current_dir.push("test.pest");
-- let test_path = current_dir.to_str().expect("path contains invalid unicode");
-+ let current_dir = std::env::current_dir().expect("Unable to get current directory");
-+
-+ let base_path = current_dir.join("base.pest").to_str().unwrap().to_string();
-+ let test_path = current_dir.join("test.pest").to_str().unwrap().to_string();
-+
- assert_eq!(
-- generate(name, &generics, Some(PathBuf::from("test.pest")), rules, defaults, doc_comment, true).to_string(),
-+ generate(name, &generics, vec![PathBuf::from("base.pest"), PathBuf::from("test.pest")], rules, defaults, doc_comment, true).to_string(),
- quote! {
- #[allow(non_upper_case_globals)]
-- const _PEST_GRAMMAR_MyParser: &'static str = include_str!(#test_path);
-+ const _PEST_GRAMMAR_MyParser: [&'static str; 2usize] = [include_str!(#base_path), include_str!(#test_path)];
-
- #[doc = "This is Rule doc\nThis is second line"]
- #[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
-diff --git a/src/lib.rs b/src/lib.rs
-index f808987..7aed193 100644
---- a/src/lib.rs
-+++ b/src/lib.rs
-@@ -27,7 +27,7 @@ use std::io::{self, Read};
- use std::path::Path;
-
- use proc_macro2::TokenStream;
--use syn::{Attribute, DeriveInput, Generics, Ident, Lit, Meta};
-+use syn::{Attribute, DeriveInput, Expr, ExprLit, Generics, Ident, Lit, Meta};
-
- #[macro_use]
- mod macros;
-@@ -45,7 +45,7 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
- let (name, generics, contents) = parse_derive(ast);
-
- let mut data = String::new();
-- let mut path = None;
-+ let mut paths = vec![];
-
- for content in contents {
- let (_data, _path) = match content {
-@@ -81,8 +81,9 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
- };
-
- data.push_str(&_data);
-- if _path.is_some() {
-- path = _path;
-+ match _path {
-+ Some(path) => paths.push(path),
-+ None => (),
- }
- }
-
-@@ -99,7 +100,7 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
- generator::generate(
- name,
- &generics,
-- path,
-+ paths,
- optimized,
- defaults,
- &doc_comment,
-@@ -127,11 +128,9 @@ fn parse_derive(ast: DeriveInput) -> (Ident, Generics, Vec<GrammarSource>) {
- let grammar: Vec<&Attribute> = ast
- .attrs
- .iter()
-- .filter(|attr| match attr.parse_meta() {
-- Ok(Meta::NameValue(name_value)) => {
-- name_value.path.is_ident("grammar") || name_value.path.is_ident("grammar_inline")
-- }
-- _ => false,
-+ .filter(|attr| {
-+ let path = attr.meta.path();
-+ path.is_ident("grammar") || path.is_ident("grammar_inline")
- })
- .collect();
-
-@@ -148,9 +147,12 @@ fn parse_derive(ast: DeriveInput) -> (Ident, Generics, Vec<GrammarSource>) {
- }
-
- fn get_attribute(attr: &Attribute) -> GrammarSource {
-- match attr.parse_meta() {
-- Ok(Meta::NameValue(name_value)) => match name_value.lit {
-- Lit::Str(string) => {
-+ match &attr.meta {
-+ Meta::NameValue(name_value) => match &name_value.value {
-+ Expr::Lit(ExprLit {
-+ lit: Lit::Str(string),
-+ ..
-+ }) => {
- if name_value.path.is_ident("grammar") {
- GrammarSource::File(string.value())
- } else {
diff --git a/src/docs.rs b/src/docs.rs
index f1ce188..ccc82e7 100644
--- a/src/docs.rs
+++ b/src/docs.rs
@@ -119,4 +119,14 @@ mod tests {
doc_comment.grammar_doc
);
}
+
+ #[test]
+ fn test_empty_grammar_doc() {
+ assert!(parser::parse(Rule::grammar_rules, "//!").is_ok());
+ assert!(parser::parse(Rule::grammar_rules, "///").is_ok());
+ assert!(parser::parse(Rule::grammar_rules, "//").is_ok());
+ assert!(parser::parse(Rule::grammar_rules, "/// Line Doc").is_ok());
+ assert!(parser::parse(Rule::grammar_rules, "//! Grammar Doc").is_ok());
+ assert!(parser::parse(Rule::grammar_rules, "// Comment").is_ok());
+ }
}
diff --git a/src/generator.rs b/src/generator.rs
index 0dbcaa3..7a527c5 100644
--- a/src/generator.rs
+++ b/src/generator.rs
@@ -11,17 +11,17 @@ use std::path::PathBuf;
use proc_macro2::TokenStream;
use quote::{ToTokens, TokenStreamExt};
-use syn::{self, Generics, Ident};
+use syn::{self, Ident};
use pest::unicode::unicode_property_names;
use pest_meta::ast::*;
use pest_meta::optimizer::*;
use crate::docs::DocComment;
+use crate::ParsedDerive;
pub(crate) fn generate(
- name: Ident,
- generics: &Generics,
+ parsed_derive: ParsedDerive,
paths: Vec<PathBuf>,
rules: Vec<OptimizedRule>,
defaults: Vec<&str>,
@@ -29,14 +29,14 @@ pub(crate) fn generate(
include_grammar: bool,
) -> TokenStream {
let uses_eoi = defaults.iter().any(|name| *name == "EOI");
-
+ let name = parsed_derive.name;
let builtins = generate_builtin_rules();
let include_fix = if include_grammar {
generate_include(&name, paths)
} else {
quote!()
};
- let rule_enum = generate_enum(&rules, doc_comment, uses_eoi);
+ let rule_enum = generate_enum(&rules, doc_comment, uses_eoi, parsed_derive.non_exhaustive);
let patterns = generate_patterns(&rules, uses_eoi);
let skip = generate_skip(&rules);
@@ -49,7 +49,7 @@ pub(crate) fn generate(
}
}));
- let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
+ let (impl_generics, ty_generics, where_clause) = parsed_derive.generics.split_for_impl();
let result = result_type();
@@ -197,8 +197,13 @@ fn generate_include(name: &Ident, paths: Vec<PathBuf>) -> TokenStream {
}
}
-fn generate_enum(rules: &[OptimizedRule], doc_comment: &DocComment, uses_eoi: bool) -> TokenStream {
- let rules = rules.iter().map(|rule| {
+fn generate_enum(
+ rules: &[OptimizedRule],
+ doc_comment: &DocComment,
+ uses_eoi: bool,
+ non_exhaustive: bool,
+) -> TokenStream {
+ let rule_variants = rules.iter().map(|rule| {
let rule_name = format_ident!("r#{}", rule.name);
match doc_comment.line_docs.get(&rule.name) {
@@ -213,26 +218,49 @@ fn generate_enum(rules: &[OptimizedRule], doc_comment: &DocComment, uses_eoi: bo
});
let grammar_doc = &doc_comment.grammar_doc;
+ let mut result = quote! {
+ #[doc = #grammar_doc]
+ #[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
+ #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+ };
+ if non_exhaustive {
+ result.append_all(quote! {
+ #[non_exhaustive]
+ });
+ }
+ result.append_all(quote! {
+ pub enum Rule
+ });
if uses_eoi {
- quote! {
- #[doc = #grammar_doc]
- #[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
- #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
- pub enum Rule {
+ result.append_all(quote! {
+ {
+ #[doc = "End-of-input"]
EOI,
- #( #rules ),*
+ #( #rule_variants ),*
}
- }
+ });
} else {
- quote! {
- #[doc = #grammar_doc]
- #[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
- #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
- pub enum Rule {
- #( #rules ),*
+ result.append_all(quote! {
+ {
+ #( #rule_variants ),*
+ }
+ })
+ };
+
+ let rules = rules.iter().map(|rule| {
+ let rule_name = format_ident!("r#{}", rule.name);
+ quote! { #rule_name }
+ });
+
+ result.append_all(quote! {
+ impl Rule {
+ pub fn all_rules() -> &'static[Rule] {
+ &[ #(Rule::#rules), * ]
}
}
- }
+ });
+
+ result
}
fn generate_patterns(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream {
@@ -496,6 +524,26 @@ fn generate_expr(expr: OptimizedExpr) -> TokenStream {
})
}
}
+ #[cfg(feature = "grammar-extras")]
+ OptimizedExpr::RepOnce(expr) => {
+ let expr = generate_expr(*expr);
+
+ quote! {
+ state.sequence(|state| {
+ #expr.and_then(|state| {
+ state.repeat(|state| {
+ state.sequence(|state| {
+ super::hidden::skip(
+ state
+ ).and_then(|state| {
+ #expr
+ })
+ })
+ })
+ })
+ })
+ }
+ }
OptimizedExpr::Skip(strings) => {
quote! {
let strings = [#(#strings),*];
@@ -517,6 +565,13 @@ fn generate_expr(expr: OptimizedExpr) -> TokenStream {
state.restore_on_err(|state| #expr)
}
}
+ #[cfg(feature = "grammar-extras")]
+ OptimizedExpr::NodeTag(expr, tag) => {
+ let expr = generate_expr(*expr);
+ quote! {
+ #expr.and_then(|state| state.tag_node(#tag))
+ }
+ }
}
}
@@ -628,6 +683,22 @@ fn generate_expr_atomic(expr: OptimizedExpr) -> TokenStream {
})
}
}
+ #[cfg(feature = "grammar-extras")]
+ OptimizedExpr::RepOnce(expr) => {
+ let expr = generate_expr_atomic(*expr);
+
+ quote! {
+ state.sequence(|state| {
+ #expr.and_then(|state| {
+ state.repeat(|state| {
+ state.sequence(|state| {
+ #expr
+ })
+ })
+ })
+ })
+ }
+ }
OptimizedExpr::Skip(strings) => {
quote! {
let strings = [#(#strings),*];
@@ -649,6 +720,13 @@ fn generate_expr_atomic(expr: OptimizedExpr) -> TokenStream {
state.restore_on_err(|state| #expr)
}
}
+ #[cfg(feature = "grammar-extras")]
+ OptimizedExpr::NodeTag(expr, tag) => {
+ let expr = generate_expr_atomic(*expr);
+ quote! {
+ #expr.and_then(|state| state.tag_node(#tag))
+ }
+ }
}
}
@@ -694,6 +772,7 @@ mod tests {
use proc_macro2::Span;
use std::collections::HashMap;
+ use syn::Generics;
#[test]
fn rule_enum_simple() {
@@ -712,7 +791,7 @@ mod tests {
};
assert_eq!(
- generate_enum(&rules, doc_comment, false).to_string(),
+ generate_enum(&rules, doc_comment, false, false).to_string(),
quote! {
#[doc = "Rule doc\nhello"]
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
@@ -721,6 +800,11 @@ mod tests {
#[doc = "This is rule comment"]
r#f
}
+ impl Rule {
+ pub fn all_rules() -> &'static [Rule] {
+ &[Rule::r#f]
+ }
+ }
}
.to_string()
);
@@ -1033,9 +1117,13 @@ mod tests {
let base_path = current_dir.join("base.pest").to_str().unwrap().to_string();
let test_path = current_dir.join("test.pest").to_str().unwrap().to_string();
-
+ let parsed_derive = ParsedDerive {
+ name,
+ generics,
+ non_exhaustive: false,
+ };
assert_eq!(
- generate(name, &generics, vec![PathBuf::from("base.pest"), PathBuf::from("test.pest")], rules, defaults, doc_comment, true).to_string(),
+ generate(parsed_derive, vec![PathBuf::from("base.pest"), PathBuf::from("test.pest")], rules, defaults, doc_comment, true).to_string(),
quote! {
#[allow(non_upper_case_globals)]
const _PEST_GRAMMAR_MyParser: [&'static str; 2usize] = [include_str!(#base_path), include_str!(#test_path)];
@@ -1048,6 +1136,11 @@ mod tests {
#[doc = "If statement"]
r#if
}
+ impl Rule {
+ pub fn all_rules() -> &'static [Rule] {
+ &[Rule::r#a, Rule::r#if]
+ }
+ }
#[allow(clippy::all)]
impl ::pest::Parser<Rule> for MyParser {
diff --git a/src/lib.rs b/src/lib.rs
index 7aed193..cbd13ea 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -42,7 +42,7 @@ use pest_meta::{optimizer, unwrap_or_report, validator};
/// "include_str" statement (done in pest_derive, but turned off in the local bootstrap).
pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
let ast: DeriveInput = syn::parse2(input).unwrap();
- let (name, generics, contents) = parse_derive(ast);
+ let (parsed_derive, contents) = parse_derive(ast);
let mut data = String::new();
let mut paths = vec![];
@@ -81,9 +81,8 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
};
data.push_str(&_data);
- match _path {
- Some(path) => paths.push(path),
- None => (),
+ if let Some(path) = _path {
+ paths.push(path);
}
}
@@ -98,8 +97,7 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
let optimized = optimizer::optimize(ast);
generator::generate(
- name,
- &generics,
+ parsed_derive,
paths,
optimized,
defaults,
@@ -121,7 +119,13 @@ enum GrammarSource {
Inline(String),
}
-fn parse_derive(ast: DeriveInput) -> (Ident, Generics, Vec<GrammarSource>) {
+struct ParsedDerive {
+ pub(crate) name: Ident,
+ pub(crate) generics: Generics,
+ pub(crate) non_exhaustive: bool,
+}
+
+fn parse_derive(ast: DeriveInput) -> (ParsedDerive, Vec<GrammarSource>) {
let name = ast.ident;
let generics = ast.generics;
@@ -143,7 +147,19 @@ fn parse_derive(ast: DeriveInput) -> (Ident, Generics, Vec<GrammarSource>) {
grammar_sources.push(get_attribute(attr))
}
- (name, generics, grammar_sources)
+ let non_exhaustive = ast
+ .attrs
+ .iter()
+ .any(|attr| attr.meta.path().is_ident("non_exhaustive"));
+
+ (
+ ParsedDerive {
+ name,
+ generics,
+ non_exhaustive,
+ },
+ grammar_sources,
+ )
}
fn get_attribute(attr: &Attribute) -> GrammarSource {
@@ -178,7 +194,7 @@ mod tests {
pub struct MyParser<'a, T>;
";
let ast = syn::parse_str(definition).unwrap();
- let (_, _, filenames) = parse_derive(ast);
+ let (_, filenames) = parse_derive(ast);
assert_eq!(filenames, [GrammarSource::Inline("GRAMMAR".to_string())]);
}
@@ -190,8 +206,9 @@ mod tests {
pub struct MyParser<'a, T>;
";
let ast = syn::parse_str(definition).unwrap();
- let (_, _, filenames) = parse_derive(ast);
+ let (parsed_derive, filenames) = parse_derive(ast);
assert_eq!(filenames, [GrammarSource::File("myfile.pest".to_string())]);
+ assert!(!parsed_derive.non_exhaustive);
}
#[test]
@@ -203,7 +220,7 @@ mod tests {
pub struct MyParser<'a, T>;
";
let ast = syn::parse_str(definition).unwrap();
- let (_, _, filenames) = parse_derive(ast);
+ let (_, filenames) = parse_derive(ast);
assert_eq!(
filenames,
[
@@ -214,6 +231,19 @@ mod tests {
}
#[test]
+ fn derive_nonexhaustive() {
+ let definition = "
+ #[non_exhaustive]
+ #[grammar = \"myfile.pest\"]
+ pub struct MyParser<'a, T>;
+ ";
+ let ast = syn::parse_str(definition).unwrap();
+ let (parsed_derive, filenames) = parse_derive(ast);
+ assert_eq!(filenames, [GrammarSource::File("myfile.pest".to_string())]);
+ assert!(parsed_derive.non_exhaustive);
+ }
+
+ #[test]
#[should_panic(expected = "grammar attribute must be a string")]
fn derive_wrong_arg() {
let definition = "
@@ -243,6 +273,7 @@ mod tests {
fn test_generate_doc() {
let input = quote! {
#[derive(Parser)]
+ #[non_exhaustive]
#[grammar = "../tests/test.pest"]
pub struct TestParser;
};
@@ -253,7 +284,7 @@ mod tests {
#[doc = "A parser for JSON file.\nAnd this is a example for JSON parser.\n\n indent-4-space\n"]
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
-
+ #[non_exhaustive]
pub enum Rule {
#[doc = "Matches foo str, e.g.: `foo`"]
r#foo,
diff --git a/tests/base.pest b/tests/base.pest
new file mode 100644
index 0000000..ae880b1
--- /dev/null
+++ b/tests/base.pest
@@ -0,0 +1 @@
+base = { "base" } \ No newline at end of file