From 5c471c4a5c3e80c810dadb19c5996e420426c3bc Mon Sep 17 00:00:00 2001 From: James Farrell Date: Tue, 15 Aug 2023 17:25:00 +0000 Subject: Upgrade proc-macro2 to 1.0.66 This project was upgraded with external_updater. Usage: tools/external_updater/updater.sh update rust/crates/proc-macro2 For more info, check https://cs.android.com/android/platform/superproject/+/main:tools/external_updater/README.md Test: TreeHugger Change-Id: I42a7ce27e4cee2efe110c6efc63fb215ab0e4c9a Bug: 295883071 --- .cargo_vcs_info.json | 2 +- .clippy.toml | 1 - .github/workflows/ci.yml | 28 ++++- Android.bp | 12 +- Cargo.toml | 7 +- Cargo.toml.orig | 8 +- METADATA | 10 +- README.md | 2 +- build.rs | 74 ++----------- rust-toolchain.toml | 2 + src/convert.rs | 19 ---- src/extra.rs | 20 +--- src/fallback.rs | 53 ++------- src/lib.rs | 45 ++------ src/marker.rs | 4 +- src/parse.rs | 278 +++++++++++++++++++++++++++++++---------------- src/rcvec.rs | 8 +- src/wrapper.rs | 99 +---------------- tests/marker.rs | 1 - tests/test.rs | 81 ++++++++++---- tests/test_fmt.rs | 2 +- 21 files changed, 332 insertions(+), 424 deletions(-) delete mode 100644 .clippy.toml create mode 100644 rust-toolchain.toml delete mode 100644 src/convert.rs diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json index c26dd7c..9eaf829 100644 --- a/.cargo_vcs_info.json +++ b/.cargo_vcs_info.json @@ -1,6 +1,6 @@ { "git": { - "sha1": "72ee0b3b0523395e75bf71ae7702e93a7c506f1a" + "sha1": "64b4608278be46fcc8d63ae1138da8cb600e258a" }, "path_in_vcs": "" } \ No newline at end of file diff --git a/.clippy.toml b/.clippy.toml deleted file mode 100644 index 3d30690..0000000 --- a/.clippy.toml +++ /dev/null @@ -1 +0,0 @@ -msrv = "1.31.0" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1299805..296381c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -24,13 +24,14 @@ jobs: strategy: fail-fast: false matrix: - rust: [1.31.0, stable, beta] + rust: [1.56.0, stable, beta] timeout-minutes: 45 steps: - uses: actions/checkout@v3 - uses: dtolnay/rust-toolchain@master with: toolchain: ${{matrix.rust}} + components: rust-src - run: cargo test - run: cargo test --no-default-features - run: cargo test --features span-locations @@ -52,6 +53,8 @@ jobs: steps: - uses: actions/checkout@v3 - uses: dtolnay/rust-toolchain@nightly + with: + components: rust-src - name: Enable type layout randomization run: echo RUSTFLAGS=${RUSTFLAGS}\ -Zrandomize-layout >> $GITHUB_ENV - run: cargo test @@ -71,7 +74,18 @@ jobs: run: cargo test env: RUSTFLAGS: -Z allow-features= ${{env.RUSTFLAGS}} - - run: cargo update -Z minimal-versions && cargo build + + minimal: + name: Minimal versions + needs: pre_ci + if: needs.pre_ci.outputs.continue + runs-on: ubuntu-latest + timeout-minutes: 45 + steps: + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@nightly + - run: cargo generate-lockfile -Z minimal-versions + - run: cargo check --locked webassembly: name: WebAssembly @@ -84,6 +98,7 @@ jobs: - uses: dtolnay/rust-toolchain@nightly with: target: wasm32-unknown-unknown + components: rust-src - run: cargo test --target wasm32-unknown-unknown --no-run fuzz: @@ -95,14 +110,18 @@ jobs: steps: - uses: actions/checkout@v3 - uses: dtolnay/rust-toolchain@nightly + with: + components: rust-src - uses: dtolnay/install@cargo-fuzz - run: cargo fuzz check - run: cargo check --no-default-features --features afl working-directory: fuzz - uses: dtolnay/install@honggfuzz - run: sudo apt-get install binutils-dev libunwind-dev + continue-on-error: true # https://github.com/dtolnay/proc-macro2/issues/387 - run: cargo hfuzz build --no-default-features --features honggfuzz working-directory: fuzz + continue-on-error: true # https://github.com/dtolnay/proc-macro2/issues/387 clippy: name: Clippy @@ -111,7 +130,9 @@ jobs: timeout-minutes: 45 steps: - uses: actions/checkout@v3 - - uses: dtolnay/rust-toolchain@clippy + - uses: dtolnay/rust-toolchain@nightly + with: + components: clippy, rust-src - run: cargo clippy --tests -- -Dclippy::all -Dclippy::pedantic - run: cargo clippy --tests --all-features -- -Dclippy::all -Dclippy::pedantic @@ -124,6 +145,7 @@ jobs: steps: - uses: actions/checkout@v3 - uses: dtolnay/rust-toolchain@miri + - run: cargo miri setup - run: cargo miri test env: MIRIFLAGS: -Zmiri-strict-provenance diff --git a/Android.bp b/Android.bp index dea1ca2..4222426 100644 --- a/Android.bp +++ b/Android.bp @@ -41,18 +41,16 @@ rust_library_host { name: "libproc_macro2", crate_name: "proc_macro2", cargo_env_compat: true, - cargo_pkg_version: "1.0.56", + cargo_pkg_version: "1.0.66", srcs: ["src/lib.rs"], - edition: "2018", + edition: "2021", features: [ "default", "proc-macro", "span-locations", ], cfgs: [ - "proc_macro_span", "span_locations", - "use_proc_macro", "wrap_proc_macro", ], rustlibs: [ @@ -67,19 +65,17 @@ rust_defaults { name: "proc-macro2_test_defaults", crate_name: "proc_macro2", cargo_env_compat: true, - cargo_pkg_version: "1.0.56", + cargo_pkg_version: "1.0.66", test_suites: ["general-tests"], auto_gen_config: true, - edition: "2018", + edition: "2021", features: [ "default", "proc-macro", "span-locations", ], cfgs: [ - "proc_macro_span", "span_locations", - "use_proc_macro", "wrap_proc_macro", ], rustlibs: [ diff --git a/Cargo.toml b/Cargo.toml index 76c0df6..7612f21 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,10 +10,10 @@ # See Cargo.toml.orig for the original contents. [package] -edition = "2018" -rust-version = "1.31" +edition = "2021" +rust-version = "1.56" name = "proc-macro2" -version = "1.0.56" +version = "1.0.66" authors = [ "David Tolnay ", "Alex Crichton ", @@ -40,6 +40,7 @@ rustdoc-args = [ "procmacro2_semver_exempt", "--cfg", "doc_cfg", + "--generate-link-to-definition", ] targets = ["x86_64-unknown-linux-gnu"] diff --git a/Cargo.toml.orig b/Cargo.toml.orig index 791d5e9..b0fb8fd 100644 --- a/Cargo.toml.orig +++ b/Cargo.toml.orig @@ -1,20 +1,20 @@ [package] name = "proc-macro2" -version = "1.0.56" # remember to update html_root_url +version = "1.0.66" # remember to update html_root_url authors = ["David Tolnay ", "Alex Crichton "] autobenches = false categories = ["development-tools::procedural-macro-helpers"] description = "A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case." documentation = "https://docs.rs/proc-macro2" -edition = "2018" +edition = "2021" keywords = ["macros", "syn"] license = "MIT OR Apache-2.0" repository = "https://github.com/dtolnay/proc-macro2" -rust-version = "1.31" +rust-version = "1.56" [package.metadata.docs.rs] rustc-args = ["--cfg", "procmacro2_semver_exempt"] -rustdoc-args = ["--cfg", "procmacro2_semver_exempt", "--cfg", "doc_cfg"] +rustdoc-args = ["--cfg", "procmacro2_semver_exempt", "--cfg", "doc_cfg", "--generate-link-to-definition"] targets = ["x86_64-unknown-linux-gnu"] [package.metadata.playground] diff --git a/METADATA b/METADATA index 66c5a87..34f90ff 100644 --- a/METADATA +++ b/METADATA @@ -1,6 +1,6 @@ # This project was upgraded with external_updater. # Usage: tools/external_updater/updater.sh update rust/crates/proc-macro2 -# For more info, check https://cs.android.com/android/platform/superproject/+/master:tools/external_updater/README.md +# For more info, check https://cs.android.com/android/platform/superproject/+/main:tools/external_updater/README.md name: "proc-macro2" description: "A substitute implementation of the compiler\'s `proc_macro` API to decouple token-based libraries from the procedural macro use case." @@ -11,13 +11,13 @@ third_party { } url { type: ARCHIVE - value: "https://static.crates.io/crates/proc-macro2/proc-macro2-1.0.56.crate" + value: "https://static.crates.io/crates/proc-macro2/proc-macro2-1.0.66.crate" } - version: "1.0.56" + version: "1.0.66" license_type: NOTICE last_upgrade_date { year: 2023 - month: 4 - day: 3 + month: 8 + day: 15 } } diff --git a/README.md b/README.md index 131ba51..e48dd47 100644 --- a/README.md +++ b/README.md @@ -62,7 +62,7 @@ proc-macro2 by default. To opt into the additional APIs available in the most recent nightly compiler, the `procmacro2_semver_exempt` config flag must be passed to rustc. We will -polyfill those nightly-only APIs back to Rust 1.31.0. As these are unstable APIs +polyfill those nightly-only APIs back to Rust 1.56.0. As these are unstable APIs that track the nightly compiler, minor versions of proc-macro2 may make breaking changes to them at any time. diff --git a/build.rs b/build.rs index 3ee8a9f..9f0fb51 100644 --- a/build.rs +++ b/build.rs @@ -1,11 +1,5 @@ // rustc-cfg emitted by the build script: // -// "use_proc_macro" -// Link to extern crate proc_macro. Available on any compiler and any target -// except wasm32. Requires "proc-macro" Cargo cfg to be enabled (default is -// enabled). On wasm32 we never link to proc_macro even if "proc-macro" cfg -// is enabled. -// // "wrap_proc_macro" // Wrap types from libproc_macro rather than polyfilling the whole API. // Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set, @@ -41,21 +35,17 @@ // 1.57+. use std::env; -use std::process::{self, Command}; +use std::process::Command; use std::str; +use std::u32; fn main() { println!("cargo:rerun-if-changed=build.rs"); - let version = match rustc_version() { - Some(version) => version, - None => return, - }; - - if version.minor < 31 { - eprintln!("Minimum supported rustc version is 1.31"); - process::exit(1); - } + let version = rustc_version().unwrap_or(RustcVersion { + minor: u32::MAX, + nightly: false, + }); let docs_rs = env::var_os("DOCS_RS").is_some(); let semver_exempt = cfg!(procmacro2_semver_exempt) || docs_rs; @@ -68,38 +58,6 @@ fn main() { println!("cargo:rustc-cfg=span_locations"); } - if version.minor < 32 { - println!("cargo:rustc-cfg=no_libprocmacro_unwind_safe"); - } - - if version.minor < 34 { - println!("cargo:rustc-cfg=no_try_from"); - } - - if version.minor < 39 { - println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard"); - } - - if version.minor < 44 { - println!("cargo:rustc-cfg=no_lexerror_display"); - } - - if version.minor < 45 { - println!("cargo:rustc-cfg=no_hygiene"); - } - - if version.minor < 47 { - println!("cargo:rustc-cfg=no_ident_new_raw"); - } - - if version.minor < 54 { - println!("cargo:rustc-cfg=no_literal_from_str"); - } - - if version.minor < 55 { - println!("cargo:rustc-cfg=no_group_open_close"); - } - if version.minor < 57 { println!("cargo:rustc-cfg=no_is_available"); } @@ -108,21 +66,15 @@ fn main() { println!("cargo:rustc-cfg=no_source_text"); } - let target = env::var("TARGET").unwrap(); - if !enable_use_proc_macro(&target) { + if !cfg!(feature = "proc-macro") { return; } - println!("cargo:rustc-cfg=use_proc_macro"); - if version.nightly || !semver_exempt { println!("cargo:rustc-cfg=wrap_proc_macro"); } - if version.nightly - && feature_allowed("proc_macro_span") - && feature_allowed("proc_macro_span_shrink") - { + if version.nightly && feature_allowed("proc_macro_span") { println!("cargo:rustc-cfg=proc_macro_span"); } @@ -131,16 +83,6 @@ fn main() { } } -fn enable_use_proc_macro(target: &str) -> bool { - // wasm targets don't have the `proc_macro` crate, disable this feature. - if target.contains("wasm32") { - return false; - } - - // Otherwise, only enable it if our feature is actually enabled. - cfg!(feature = "proc-macro") -} - struct RustcVersion { minor: u32, nightly: bool, diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 0000000..20fe888 --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,2 @@ +[toolchain] +components = ["rust-src"] diff --git a/src/convert.rs b/src/convert.rs deleted file mode 100644 index afc5faf..0000000 --- a/src/convert.rs +++ /dev/null @@ -1,19 +0,0 @@ -pub(crate) fn usize_to_u32(u: usize) -> Option { - #[cfg(not(no_try_from))] - { - use core::convert::TryFrom; - - u32::try_from(u).ok() - } - - #[cfg(no_try_from)] - { - use core::mem; - - if mem::size_of::() <= mem::size_of::() || u <= u32::max_value() as usize { - Some(u as u32) - } else { - None - } - } -} diff --git a/src/extra.rs b/src/extra.rs index cbce162..4a69d46 100644 --- a/src/extra.rs +++ b/src/extra.rs @@ -22,9 +22,7 @@ enum DelimSpanEnum { #[cfg(wrap_proc_macro)] Compiler { join: proc_macro::Span, - #[cfg(not(no_group_open_close))] open: proc_macro::Span, - #[cfg(not(no_group_open_close))] close: proc_macro::Span, }, Fallback(fallback::Span), @@ -36,9 +34,7 @@ impl DelimSpan { let inner = match group { imp::Group::Compiler(group) => DelimSpanEnum::Compiler { join: group.span(), - #[cfg(not(no_group_open_close))] open: group.span_open(), - #[cfg(not(no_group_open_close))] close: group.span_close(), }, imp::Group::Fallback(group) => DelimSpanEnum::Fallback(group.span()), @@ -66,13 +62,7 @@ impl DelimSpan { pub fn open(&self) -> Span { match &self.inner { #[cfg(wrap_proc_macro)] - DelimSpanEnum::Compiler { - #[cfg(not(no_group_open_close))] - open, - #[cfg(no_group_open_close)] - join: open, - .. - } => Span::_new(imp::Span::Compiler(*open)), + DelimSpanEnum::Compiler { open, .. } => Span::_new(imp::Span::Compiler(*open)), DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.first_byte()), } } @@ -81,13 +71,7 @@ impl DelimSpan { pub fn close(&self) -> Span { match &self.inner { #[cfg(wrap_proc_macro)] - DelimSpanEnum::Compiler { - #[cfg(not(no_group_open_close))] - close, - #[cfg(no_group_open_close)] - join: close, - .. - } => Span::_new(imp::Span::Compiler(*close)), + DelimSpanEnum::Compiler { close, .. } => Span::_new(imp::Span::Compiler(*close)), DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.last_byte()), } } diff --git a/src/fallback.rs b/src/fallback.rs index bbea473..daa1e17 100644 --- a/src/fallback.rs +++ b/src/fallback.rs @@ -8,7 +8,6 @@ use core::cell::RefCell; #[cfg(span_locations)] use core::cmp; use core::fmt::{self, Debug, Display, Write}; -use core::iter::FromIterator; use core::mem::ManuallyDrop; use core::ops::RangeBounds; use core::ptr; @@ -71,7 +70,6 @@ impl TokenStream { fn push_token_from_proc_macro(mut vec: RcVecMut, token: TokenTree) { // https://github.com/dtolnay/proc-macro2/issues/235 match token { - #[cfg(not(no_bind_by_move_pattern_guard))] TokenTree::Literal(crate::Literal { #[cfg(wrap_proc_macro)] inner: crate::imp::Literal::Fallback(literal), @@ -81,20 +79,6 @@ fn push_token_from_proc_macro(mut vec: RcVecMut, token: TokenTree) { }) if literal.repr.starts_with('-') => { push_negative_literal(vec, literal); } - #[cfg(no_bind_by_move_pattern_guard)] - TokenTree::Literal(crate::Literal { - #[cfg(wrap_proc_macro)] - inner: crate::imp::Literal::Fallback(literal), - #[cfg(not(wrap_proc_macro))] - inner: literal, - .. - }) => { - if literal.repr.starts_with('-') { - push_negative_literal(vec, literal); - } else { - vec.push(TokenTree::Literal(crate::Literal::_new_fallback(literal))); - } - } _ => vec.push(token), } @@ -233,7 +217,7 @@ impl Debug for TokenStream { } } -#[cfg(use_proc_macro)] +#[cfg(feature = "proc-macro")] impl From for TokenStream { fn from(inner: proc_macro::TokenStream) -> Self { inner @@ -243,7 +227,7 @@ impl From for TokenStream { } } -#[cfg(use_proc_macro)] +#[cfg(feature = "proc-macro")] impl From for proc_macro::TokenStream { fn from(inner: TokenStream) -> Self { inner @@ -479,7 +463,6 @@ impl Span { Span { lo: 0, hi: 0 } } - #[cfg(not(no_hygiene))] pub fn mixed_site() -> Self { Span::call_site() } @@ -541,26 +524,6 @@ impl Span { }) } - #[cfg(procmacro2_semver_exempt)] - pub fn before(&self) -> Span { - Span { - #[cfg(span_locations)] - lo: self.lo, - #[cfg(span_locations)] - hi: self.lo, - } - } - - #[cfg(procmacro2_semver_exempt)] - pub fn after(&self) -> Span { - Span { - #[cfg(span_locations)] - lo: self.hi, - #[cfg(span_locations)] - hi: self.hi, - } - } - #[cfg(not(span_locations))] pub fn join(&self, _other: Span) -> Option { Some(Span {}) @@ -789,7 +752,7 @@ fn validate_ident(string: &str, raw: bool) { panic!("Ident is not allowed to be empty; use Option"); } - if string.bytes().all(|digit| digit >= b'0' && digit <= b'9') { + if string.bytes().all(|digit| b'0' <= digit && digit <= b'9') { panic!("Ident cannot be a number; use Literal instead"); } @@ -850,6 +813,7 @@ impl Display for Ident { } } +#[allow(clippy::missing_fields_in_debug)] impl Debug for Ident { // Ident(proc_macro), Ident(r#union) #[cfg(not(span_locations))] @@ -1039,27 +1003,26 @@ impl Literal { #[cfg(span_locations)] { - use crate::convert::usize_to_u32; use core::ops::Bound; let lo = match range.start_bound() { Bound::Included(start) => { - let start = usize_to_u32(*start)?; + let start = u32::try_from(*start).ok()?; self.span.lo.checked_add(start)? } Bound::Excluded(start) => { - let start = usize_to_u32(*start)?; + let start = u32::try_from(*start).ok()?; self.span.lo.checked_add(start)?.checked_add(1)? } Bound::Unbounded => self.span.lo, }; let hi = match range.end_bound() { Bound::Included(end) => { - let end = usize_to_u32(*end)?; + let end = u32::try_from(*end).ok()?; self.span.lo.checked_add(end)?.checked_add(1)? } Bound::Excluded(end) => { - let end = usize_to_u32(*end)?; + let end = u32::try_from(*end).ok()?; self.span.lo.checked_add(end)? } Bound::Unbounded => self.span.hi, diff --git a/src/lib.rs b/src/lib.rs index 6ce679d..910d47b 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -65,7 +65,7 @@ //! //! To opt into the additional APIs available in the most recent nightly //! compiler, the `procmacro2_semver_exempt` config flag must be passed to -//! rustc. We will polyfill those nightly-only APIs back to Rust 1.31.0. As +//! rustc. We will polyfill those nightly-only APIs back to Rust 1.56.0. As //! these are unstable APIs that track the nightly compiler, minor versions of //! proc-macro2 may make breaking changes to them at any time. //! @@ -86,11 +86,8 @@ //! a different thread. // Proc-macro2 types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.56")] -#![cfg_attr( - any(proc_macro_span, super_unstable), - feature(proc_macro_span, proc_macro_span_shrink) -)] +#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.66")] +#![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))] #![cfg_attr(super_unstable, feature(proc_macro_def_site))] #![cfg_attr(doc_cfg, feature(doc_cfg))] #![allow( @@ -100,8 +97,10 @@ clippy::items_after_statements, clippy::let_underscore_untyped, clippy::manual_assert, + clippy::manual_range_contains, clippy::must_use_candidate, clippy::needless_doctest_main, + clippy::new_without_default, clippy::return_self_not_must_use, clippy::shadow_unrelated, clippy::trivially_copy_pass_by_ref, @@ -119,7 +118,9 @@ compile_error! {"\ build script as well. "} -#[cfg(use_proc_macro)] +extern crate alloc; + +#[cfg(feature = "proc-macro")] extern crate proc_macro; mod marker; @@ -142,8 +143,6 @@ use crate::fallback as imp; #[cfg(wrap_proc_macro)] mod imp; -#[cfg(span_locations)] -mod convert; #[cfg(span_locations)] mod location; @@ -152,7 +151,6 @@ use crate::marker::Marker; use core::cmp::Ordering; use core::fmt::{self, Debug, Display}; use core::hash::{Hash, Hasher}; -use core::iter::FromIterator; use core::ops::RangeBounds; use core::str::FromStr; use std::error::Error; @@ -235,14 +233,16 @@ impl FromStr for TokenStream { } } -#[cfg(use_proc_macro)] +#[cfg(feature = "proc-macro")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))] impl From for TokenStream { fn from(inner: proc_macro::TokenStream) -> Self { TokenStream::_new(inner.into()) } } -#[cfg(use_proc_macro)] +#[cfg(feature = "proc-macro")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))] impl From for proc_macro::TokenStream { fn from(inner: TokenStream) -> Self { inner.inner.into() @@ -402,9 +402,6 @@ impl Span { /// The span located at the invocation of the procedural macro, but with /// local variables, labels, and `$crate` resolved at the definition site /// of the macro. This is the same hygiene behavior as `macro_rules`. - /// - /// This function requires Rust 1.45 or later. - #[cfg(not(no_hygiene))] pub fn mixed_site() -> Self { Span::_new(imp::Span::mixed_site()) } @@ -491,24 +488,6 @@ impl Span { self.inner.end() } - /// Creates an empty span pointing to directly before this span. - /// - /// This method is semver exempt and not exposed by default. - #[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)))] - #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] - pub fn before(&self) -> Span { - Span::_new(self.inner.before()) - } - - /// Creates an empty span pointing to directly after this span. - /// - /// This method is semver exempt and not exposed by default. - #[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)))] - #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] - pub fn after(&self) -> Span { - Span::_new(self.inner.after()) - } - /// Create a new span encompassing `self` and `other`. /// /// Returns `None` if `self` and `other` are from different files. diff --git a/src/marker.rs b/src/marker.rs index 59fd096..e648dd2 100644 --- a/src/marker.rs +++ b/src/marker.rs @@ -1,6 +1,6 @@ +use alloc::rc::Rc; use core::marker::PhantomData; -use std::panic::{RefUnwindSafe, UnwindSafe}; -use std::rc::Rc; +use core::panic::{RefUnwindSafe, UnwindSafe}; // Zero sized marker with the correct set of autotrait impls we want all proc // macro types to have. diff --git a/src/parse.rs b/src/parse.rs index be2425b..c084e4c 100644 --- a/src/parse.rs +++ b/src/parse.rs @@ -108,7 +108,7 @@ fn skip_whitespace(input: Cursor) -> Cursor { s = s.advance(1); continue; } - b if b <= 0x7f => {} + b if b.is_ascii() => {} _ => { let ch = s.chars().next().unwrap(); if is_whitespace(ch) { @@ -273,9 +273,11 @@ fn leaf_token(input: Cursor) -> PResult { } fn ident(input: Cursor) -> PResult { - if ["r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#"] - .iter() - .any(|prefix| input.starts_with(prefix)) + if [ + "r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#", "c\"", "cr\"", "cr#", + ] + .iter() + .any(|prefix| input.starts_with(prefix)) { Err(Reject) } else { @@ -333,6 +335,8 @@ fn literal_nocapture(input: Cursor) -> Result { Ok(ok) } else if let Ok(ok) = byte_string(input) { Ok(ok) + } else if let Ok(ok) = c_string(input) { + Ok(ok) } else if let Ok(ok) = byte(input) { Ok(ok) } else if let Ok(ok) = character(input) { @@ -363,8 +367,8 @@ fn string(input: Cursor) -> Result { } } -fn cooked_string(input: Cursor) -> Result { - let mut chars = input.char_indices().peekable(); +fn cooked_string(mut input: Cursor) -> Result { + let mut chars = input.char_indices(); while let Some((i, ch)) = chars.next() { match ch { @@ -378,31 +382,16 @@ fn cooked_string(input: Cursor) -> Result { }, '\\' => match chars.next() { Some((_, 'x')) => { - if !backslash_x_char(&mut chars) { - break; - } + backslash_x_char(&mut chars)?; } - Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\')) - | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {} + Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"' | '0')) => {} Some((_, 'u')) => { - if !backslash_u(&mut chars) { - break; - } + backslash_u(&mut chars)?; } - Some((_, ch @ '\n')) | Some((_, ch @ '\r')) => { - let mut last = ch; - loop { - if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') { - return Err(Reject); - } - match chars.peek() { - Some((_, ch)) if ch.is_whitespace() => { - last = *ch; - chars.next(); - } - _ => break, - } - } + Some((newline, ch @ ('\n' | '\r'))) => { + input = input.advance(newline + 1); + trailing_backslash(&mut input, ch as u8)?; + chars = input.char_indices(); } _ => break, }, @@ -412,11 +401,30 @@ fn cooked_string(input: Cursor) -> Result { Err(Reject) } +fn raw_string(input: Cursor) -> Result { + let (input, delimiter) = delimiter_of_raw_string(input)?; + let mut bytes = input.bytes().enumerate(); + while let Some((i, byte)) = bytes.next() { + match byte { + b'"' if input.rest[i + 1..].starts_with(delimiter) => { + let rest = input.advance(i + 1 + delimiter.len()); + return Ok(literal_suffix(rest)); + } + b'\r' => match bytes.next() { + Some((_, b'\n')) => {} + _ => break, + }, + _ => {} + } + } + Err(Reject) +} + fn byte_string(input: Cursor) -> Result { if let Ok(input) = input.parse("b\"") { cooked_byte_string(input) } else if let Ok(input) = input.parse("br") { - raw_string(input) + raw_byte_string(input) } else { Err(Reject) } @@ -436,68 +444,125 @@ fn cooked_byte_string(mut input: Cursor) -> Result { }, b'\\' => match bytes.next() { Some((_, b'x')) => { - if !backslash_x_byte(&mut bytes) { - break; - } + backslash_x_byte(&mut bytes)?; } - Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\')) - | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {} - Some((newline, b @ b'\n')) | Some((newline, b @ b'\r')) => { - let mut last = b as char; - let rest = input.advance(newline + 1); - let mut chars = rest.char_indices(); - loop { - if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') { - return Err(Reject); - } - match chars.next() { - Some((_, ch)) if ch.is_whitespace() => last = ch, - Some((offset, _)) => { - input = rest.advance(offset); - bytes = input.bytes().enumerate(); - break; - } - None => return Err(Reject), - } - } + Some((_, b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"')) => {} + Some((newline, b @ (b'\n' | b'\r'))) => { + input = input.advance(newline + 1); + trailing_backslash(&mut input, b)?; + bytes = input.bytes().enumerate(); } _ => break, }, - b if b < 0x80 => {} + b if b.is_ascii() => {} _ => break, } } Err(Reject) } -fn raw_string(input: Cursor) -> Result { - let mut chars = input.char_indices(); - let mut n = 0; - for (i, ch) in &mut chars { - match ch { - '"' => { - n = i; - break; +fn delimiter_of_raw_string(input: Cursor) -> PResult<&str> { + for (i, byte) in input.bytes().enumerate() { + match byte { + b'"' => { + if i > 255 { + // https://github.com/rust-lang/rust/pull/95251 + return Err(Reject); + } + return Ok((input.advance(i + 1), &input.rest[..i])); } - '#' => {} - _ => return Err(Reject), + b'#' => {} + _ => break, } } - if n > 255 { - // https://github.com/rust-lang/rust/pull/95251 - return Err(Reject); + Err(Reject) +} + +fn raw_byte_string(input: Cursor) -> Result { + let (input, delimiter) = delimiter_of_raw_string(input)?; + let mut bytes = input.bytes().enumerate(); + while let Some((i, byte)) = bytes.next() { + match byte { + b'"' if input.rest[i + 1..].starts_with(delimiter) => { + let rest = input.advance(i + 1 + delimiter.len()); + return Ok(literal_suffix(rest)); + } + b'\r' => match bytes.next() { + Some((_, b'\n')) => {} + _ => break, + }, + other => { + if !other.is_ascii() { + break; + } + } + } } + Err(Reject) +} + +fn c_string(input: Cursor) -> Result { + if let Ok(input) = input.parse("c\"") { + cooked_c_string(input) + } else if let Ok(input) = input.parse("cr") { + raw_c_string(input) + } else { + Err(Reject) + } +} + +fn raw_c_string(input: Cursor) -> Result { + let (input, delimiter) = delimiter_of_raw_string(input)?; + let mut bytes = input.bytes().enumerate(); + while let Some((i, byte)) = bytes.next() { + match byte { + b'"' if input.rest[i + 1..].starts_with(delimiter) => { + let rest = input.advance(i + 1 + delimiter.len()); + return Ok(literal_suffix(rest)); + } + b'\r' => match bytes.next() { + Some((_, b'\n')) => {} + _ => break, + }, + b'\0' => break, + _ => {} + } + } + Err(Reject) +} + +fn cooked_c_string(mut input: Cursor) -> Result { + let mut chars = input.char_indices(); + while let Some((i, ch)) = chars.next() { match ch { - '"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => { - let rest = input.advance(i + 1 + n); - return Ok(literal_suffix(rest)); + '"' => { + let input = input.advance(i + 1); + return Ok(literal_suffix(input)); } '\r' => match chars.next() { Some((_, '\n')) => {} _ => break, }, - _ => {} + '\\' => match chars.next() { + Some((_, 'x')) => { + backslash_x_nonzero(&mut chars)?; + } + Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"')) => {} + Some((_, 'u')) => { + if backslash_u(&mut chars)? == '\0' { + break; + } + } + Some((newline, ch @ ('\n' | '\r'))) => { + input = input.advance(newline + 1); + trailing_backslash(&mut input, ch as u8)?; + chars = input.char_indices(); + } + _ => break, + }, + '\0' => break, + _ch => {} } } Err(Reject) @@ -508,9 +573,8 @@ fn byte(input: Cursor) -> Result { let mut bytes = input.bytes().enumerate(); let ok = match bytes.next().map(|(_, b)| b) { Some(b'\\') => match bytes.next().map(|(_, b)| b) { - Some(b'x') => backslash_x_byte(&mut bytes), - Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'') - | Some(b'"') => true, + Some(b'x') => backslash_x_byte(&mut bytes).is_ok(), + Some(b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"') => true, _ => false, }, b => b.is_some(), @@ -531,11 +595,9 @@ fn character(input: Cursor) -> Result { let mut chars = input.char_indices(); let ok = match chars.next().map(|(_, ch)| ch) { Some('\\') => match chars.next().map(|(_, ch)| ch) { - Some('x') => backslash_x_char(&mut chars), - Some('u') => backslash_u(&mut chars), - Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => { - true - } + Some('x') => backslash_x_char(&mut chars).is_ok(), + Some('u') => backslash_u(&mut chars).is_ok(), + Some('n' | 'r' | 't' | '\\' | '0' | '\'' | '"') => true, _ => false, }, ch => ch.is_some(), @@ -549,36 +611,49 @@ fn character(input: Cursor) -> Result { } macro_rules! next_ch { - ($chars:ident @ $pat:pat $(| $rest:pat)*) => { + ($chars:ident @ $pat:pat) => { match $chars.next() { Some((_, ch)) => match ch { - $pat $(| $rest)* => ch, - _ => return false, + $pat => ch, + _ => return Err(Reject), }, - None => return false, + None => return Err(Reject), } }; } -fn backslash_x_char(chars: &mut I) -> bool +fn backslash_x_char(chars: &mut I) -> Result<(), Reject> where I: Iterator, { next_ch!(chars @ '0'..='7'); next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F'); - true + Ok(()) } -fn backslash_x_byte(chars: &mut I) -> bool +fn backslash_x_byte(chars: &mut I) -> Result<(), Reject> where I: Iterator, { next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'); next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'); - true + Ok(()) +} + +fn backslash_x_nonzero(chars: &mut I) -> Result<(), Reject> +where + I: Iterator, +{ + let first = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F'); + let second = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F'); + if first == '0' && second == '0' { + Err(Reject) + } else { + Ok(()) + } } -fn backslash_u(chars: &mut I) -> bool +fn backslash_u(chars: &mut I) -> Result where I: Iterator, { @@ -591,17 +666,36 @@ where 'a'..='f' => 10 + ch as u8 - b'a', 'A'..='F' => 10 + ch as u8 - b'A', '_' if len > 0 => continue, - '}' if len > 0 => return char::from_u32(value).is_some(), - _ => return false, + '}' if len > 0 => return char::from_u32(value).ok_or(Reject), + _ => break, }; if len == 6 { - return false; + break; } value *= 0x10; value += u32::from(digit); len += 1; } - false + Err(Reject) +} + +fn trailing_backslash(input: &mut Cursor, mut last: u8) -> Result<(), Reject> { + let mut whitespace = input.bytes().enumerate(); + loop { + if last == b'\r' && whitespace.next().map_or(true, |(_, b)| b != b'\n') { + return Err(Reject); + } + match whitespace.next() { + Some((_, b @ (b' ' | b'\t' | b'\n' | b'\r'))) => { + last = b; + } + Some((offset, _)) => { + *input = input.advance(offset); + return Ok(()); + } + None => return Err(Reject), + } + } } fn float(input: Cursor) -> Result { @@ -617,7 +711,7 @@ fn float(input: Cursor) -> Result { fn float_digits(input: Cursor) -> Result { let mut chars = input.chars().peekable(); match chars.next() { - Some(ch) if ch >= '0' && ch <= '9' => {} + Some(ch) if '0' <= ch && ch <= '9' => {} _ => return Err(Reject), } diff --git a/src/rcvec.rs b/src/rcvec.rs index 62298b4..37955af 100644 --- a/src/rcvec.rs +++ b/src/rcvec.rs @@ -1,8 +1,8 @@ +use alloc::rc::Rc; +use alloc::vec; use core::mem; +use core::panic::RefUnwindSafe; use core::slice; -use std::panic::RefUnwindSafe; -use std::rc::Rc; -use std::vec; pub(crate) struct RcVec { inner: Rc>, @@ -53,7 +53,7 @@ impl RcVec { T: Clone, { let vec = if let Some(owned) = Rc::get_mut(&mut self.inner) { - mem::replace(owned, Vec::new()) + mem::take(owned) } else { Vec::clone(&self.inner) }; diff --git a/src/wrapper.rs b/src/wrapper.rs index 00f67cd..860b6b7 100644 --- a/src/wrapper.rs +++ b/src/wrapper.rs @@ -3,7 +3,6 @@ use crate::detection::inside_proc_macro; use crate::location::LineColumn; use crate::{fallback, Delimiter, Punct, Spacing, TokenTree}; use core::fmt::{self, Debug, Display}; -use core::iter::FromIterator; use core::ops::RangeBounds; use core::str::FromStr; use std::panic; @@ -286,15 +285,7 @@ impl Debug for LexError { impl Display for LexError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - #[cfg(not(no_lexerror_display))] LexError::Compiler(e) => Display::fmt(e, f), - #[cfg(no_lexerror_display)] - LexError::Compiler(_e) => Display::fmt( - &fallback::LexError { - span: fallback::Span::call_site(), - }, - f, - ), LexError::Fallback(e) => Display::fmt(e, f), } } @@ -406,7 +397,6 @@ impl Span { } } - #[cfg(not(no_hygiene))] pub fn mixed_site() -> Self { if inside_proc_macro() { Span::Compiler(proc_macro::Span::mixed_site()) @@ -426,13 +416,7 @@ impl Span { pub fn resolved_at(&self, other: Span) -> Span { match (self, other) { - #[cfg(not(no_hygiene))] (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)), - - // Name resolution affects semantics, but location is only cosmetic - #[cfg(no_hygiene)] - (Span::Compiler(_), Span::Compiler(_)) => other, - (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)), _ => mismatch(), } @@ -440,13 +424,7 @@ impl Span { pub fn located_at(&self, other: Span) -> Span { match (self, other) { - #[cfg(not(no_hygiene))] (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)), - - // Name resolution affects semantics, but location is only cosmetic - #[cfg(no_hygiene)] - (Span::Compiler(_), Span::Compiler(_)) => *self, - (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)), _ => mismatch(), } @@ -470,12 +448,6 @@ impl Span { #[cfg(span_locations)] pub fn start(&self) -> LineColumn { match self { - #[cfg(proc_macro_span)] - Span::Compiler(s) => { - let proc_macro::LineColumn { line, column } = s.start(); - LineColumn { line, column } - } - #[cfg(not(proc_macro_span))] Span::Compiler(_) => LineColumn { line: 0, column: 0 }, Span::Fallback(s) => s.start(), } @@ -484,33 +456,11 @@ impl Span { #[cfg(span_locations)] pub fn end(&self) -> LineColumn { match self { - #[cfg(proc_macro_span)] - Span::Compiler(s) => { - let proc_macro::LineColumn { line, column } = s.end(); - LineColumn { line, column } - } - #[cfg(not(proc_macro_span))] Span::Compiler(_) => LineColumn { line: 0, column: 0 }, Span::Fallback(s) => s.end(), } } - #[cfg(super_unstable)] - pub fn before(&self) -> Span { - match self { - Span::Compiler(s) => Span::Compiler(s.before()), - Span::Fallback(s) => Span::Fallback(s.before()), - } - } - - #[cfg(super_unstable)] - pub fn after(&self) -> Span { - match self { - Span::Compiler(s) => Span::Compiler(s.after()), - Span::Fallback(s) => Span::Fallback(s.after()), - } - } - pub fn join(&self, other: Span) -> Option { let ret = match (self, other) { #[cfg(proc_macro_span)] @@ -630,20 +580,14 @@ impl Group { pub fn span_open(&self) -> Span { match self { - #[cfg(not(no_group_open_close))] Group::Compiler(g) => Span::Compiler(g.span_open()), - #[cfg(no_group_open_close)] - Group::Compiler(g) => Span::Compiler(g.span()), Group::Fallback(g) => Span::Fallback(g.span_open()), } } pub fn span_close(&self) -> Span { match self { - #[cfg(not(no_group_open_close))] Group::Compiler(g) => Span::Compiler(g.span_close()), - #[cfg(no_group_open_close)] - Group::Compiler(g) => Span::Compiler(g.span()), Group::Fallback(g) => Span::Fallback(g.span_close()), } } @@ -704,27 +648,7 @@ impl Ident { pub fn new_raw(string: &str, span: Span) -> Self { match span { - #[cfg(not(no_ident_new_raw))] Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new_raw(string, s)), - #[cfg(no_ident_new_raw)] - Span::Compiler(s) => { - let _ = proc_macro::Ident::new(string, s); - // At this point the un-r#-prefixed string is known to be a - // valid identifier. Try to produce a valid raw identifier by - // running the `TokenStream` parser, and unwrapping the first - // token as an `Ident`. - let raw_prefixed = format!("r#{}", string); - if let Ok(ts) = raw_prefixed.parse::() { - let mut iter = ts.into_iter(); - if let (Some(proc_macro::TokenTree::Ident(mut id)), None) = - (iter.next(), iter.next()) - { - id.set_span(s); - return Ident::Compiler(id); - } - } - panic!("not allowed as a raw identifier: `{}`", raw_prefixed) - } Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw(string, s)), } } @@ -826,7 +750,7 @@ macro_rules! unsuffixed_integers { impl Literal { pub unsafe fn from_str_unchecked(repr: &str) -> Self { if inside_proc_macro() { - Literal::Compiler(compiler_literal_from_str(repr).expect("invalid literal")) + Literal::Compiler(proc_macro::Literal::from_str(repr).expect("invalid literal")) } else { Literal::Fallback(fallback::Literal::from_str_unchecked(repr)) } @@ -949,7 +873,8 @@ impl FromStr for Literal { fn from_str(repr: &str) -> Result { if inside_proc_macro() { - compiler_literal_from_str(repr).map(Literal::Compiler) + let literal = proc_macro::Literal::from_str(repr)?; + Ok(Literal::Compiler(literal)) } else { let literal = fallback::Literal::from_str(repr)?; Ok(Literal::Fallback(literal)) @@ -957,24 +882,6 @@ impl FromStr for Literal { } } -fn compiler_literal_from_str(repr: &str) -> Result { - #[cfg(not(no_literal_from_str))] - { - proc_macro::Literal::from_str(repr).map_err(LexError::Compiler) - } - #[cfg(no_literal_from_str)] - { - let tokens = proc_macro_parse(repr)?; - let mut iter = tokens.into_iter(); - if let (Some(proc_macro::TokenTree::Literal(literal)), None) = (iter.next(), iter.next()) { - if literal.to_string().len() == repr.len() { - return Ok(literal); - } - } - Err(LexError::call_site()) - } -} - impl Display for Literal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { diff --git a/tests/marker.rs b/tests/marker.rs index 5b45733..d08fbfc 100644 --- a/tests/marker.rs +++ b/tests/marker.rs @@ -62,7 +62,6 @@ mod semver_exempt { assert_impl!(SourceFile is not Send or Sync); } -#[cfg(not(no_libprocmacro_unwind_safe))] mod unwind_safe { use proc_macro2::{ Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree, diff --git a/tests/test.rs b/tests/test.rs index 75f69e2..8e47b46 100644 --- a/tests/test.rs +++ b/tests/test.rs @@ -7,7 +7,6 @@ use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; use std::iter; -use std::panic; use std::str::{self, FromStr}; #[test] @@ -90,24 +89,9 @@ fn lifetime_number() { } #[test] +#[should_panic(expected = r#""'a#" is not a valid Ident"#)] fn lifetime_invalid() { - let result = panic::catch_unwind(|| Ident::new("'a#", Span::call_site())); - match result { - Err(box_any) => { - let message = box_any.downcast_ref::().unwrap(); - let expected1 = r#""\'a#" is not a valid Ident"#; // 1.31.0 .. 1.53.0 - let expected2 = r#""'a#" is not a valid Ident"#; // 1.53.0 .. - assert!( - message == expected1 || message == expected2, - "panic message does not match expected string\n\ - \x20 panic message: `{:?}`\n\ - \x20expected message: `{:?}`", - message, - expected2, - ); - } - Ok(_) => panic!("test did not panic as expected"), - } + Ident::new("'a#", Span::call_site()); } #[test] @@ -119,6 +103,9 @@ fn literal_string() { Literal::string("a\00b\07c\08d\0e\0").to_string(), "\"a\\x000b\\x007c\\08d\\0e\\0\"", ); + + "\"\\\r\n x\"".parse::().unwrap(); + "\"\\\r\n \rx\"".parse::().unwrap_err(); } #[test] @@ -156,6 +143,47 @@ fn literal_byte_string() { Literal::byte_string(b"a\00b\07c\08d\0e\0").to_string(), "b\"a\\x000b\\x007c\\08d\\0e\\0\"", ); + + "b\"\\\r\n x\"".parse::().unwrap(); + "b\"\\\r\n \rx\"".parse::().unwrap_err(); + "b\"\\\r\n \u{a0}x\"".parse::().unwrap_err(); + "br\"\u{a0}\"".parse::().unwrap_err(); +} + +#[test] +fn literal_c_string() { + let strings = r###" + c"hello\x80我叫\u{1F980}" // from the RFC + cr"\" + cr##"Hello "world"!"## + c"\t\n\r\"\\" + "###; + + let mut tokens = strings.parse::().unwrap().into_iter(); + + for expected in &[ + r#"c"hello\x80我叫\u{1F980}""#, + r#"cr"\""#, + r###"cr##"Hello "world"!"##"###, + r#"c"\t\n\r\"\\""#, + ] { + match tokens.next().unwrap() { + TokenTree::Literal(literal) => { + assert_eq!(literal.to_string(), *expected); + } + unexpected => panic!("unexpected token: {:?}", unexpected), + } + } + + if let Some(unexpected) = tokens.next() { + panic!("unexpected token: {:?}", unexpected); + } + + for invalid in &[r#"c"\0""#, r#"c"\x00""#, r#"c"\u{0}""#, "c\"\0\""] { + if let Ok(unexpected) = invalid.parse::() { + panic!("unexpected token: {:?}", unexpected); + } + } } #[test] @@ -636,8 +664,8 @@ fn non_ascii_tokens() { check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]); check_spans(r#""abc""#, &[(1, 0, 1, 5)]); check_spans(r#""ábc""#, &[(1, 0, 1, 5)]); - check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]); - check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]); + check_spans(r##"r#"abc"#"##, &[(1, 0, 1, 8)]); + check_spans(r##"r#"ábc"#"##, &[(1, 0, 1, 8)]); check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]); check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]); check_spans("'a'", &[(1, 0, 1, 3)]); @@ -657,7 +685,6 @@ fn non_ascii_tokens() { check_spans("ábc// foo", &[(1, 0, 1, 3)]); check_spans("ábć// foo", &[(1, 0, 1, 3)]); check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]); - check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]); } #[cfg(span_locations)] @@ -688,6 +715,18 @@ fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usi } } +#[test] +fn whitespace() { + // space, horizontal tab, vertical tab, form feed, carriage return, line + // feed, non-breaking space, left-to-right mark, right-to-left mark + let various_spaces = " \t\u{b}\u{c}\r\n\u{a0}\u{200e}\u{200f}"; + let tokens = various_spaces.parse::().unwrap(); + assert_eq!(tokens.into_iter().count(), 0); + + let lone_carriage_returns = " \r \r\r\n "; + lone_carriage_returns.parse::().unwrap(); +} + #[test] fn byte_order_mark() { let string = "\u{feff}foo"; diff --git a/tests/test_fmt.rs b/tests/test_fmt.rs index 93dd19e..86a4c38 100644 --- a/tests/test_fmt.rs +++ b/tests/test_fmt.rs @@ -1,7 +1,7 @@ #![allow(clippy::from_iter_instead_of_collect)] use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree}; -use std::iter::{self, FromIterator}; +use std::iter; #[test] fn test_fmt_group() { -- cgit v1.2.3