aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorandroid-build-team Robot <android-build-team-robot@google.com>2021-06-21 14:50:39 +0000
committerandroid-build-team Robot <android-build-team-robot@google.com>2021-06-21 14:50:39 +0000
commita98c220ffaf69246dab58b1c4e096a5360b596e6 (patch)
tree44cebb73ced35debf89bc3554d7388321cd1a37b
parent2d16900ad37fde01f9ac7dd4e46a6067f65abe19 (diff)
parentcfc7d1dc1fc3c6a5269340e2169efee176549f4d (diff)
downloadproc-macro2-android12-mainline-extservices-release.tar.gz
Change-Id: I46bccc69628c5f57d33496a18315c81245499d28
-rw-r--r--.cargo_vcs_info.json2
-rw-r--r--.clippy.toml1
-rw-r--r--.github/workflows/ci.yml73
-rw-r--r--.gitignore2
-rw-r--r--.travis.yml36
-rw-r--r--Android.bp130
-rw-r--r--Cargo.toml17
-rw-r--r--Cargo.toml.orig34
l---------LICENSE1
-rw-r--r--METADATA19
-rw-r--r--MODULE_LICENSE_APACHE20
-rw-r--r--OWNERS1
-rw-r--r--README.md2
-rw-r--r--TEST_MAPPING224
-rw-r--r--build.rs20
-rw-r--r--src/detection.rs67
-rw-r--r--src/fallback.rs1015
-rw-r--r--src/lib.rs260
-rw-r--r--src/marker.rs18
-rw-r--r--src/parse.rs866
-rw-r--r--src/strnom.rs391
-rw-r--r--src/wrapper.rs391
-rw-r--r--tests/comments.rs103
-rw-r--r--tests/marker.rs33
-rw-r--r--tests/test.rs260
-rw-r--r--tests/test_fmt.rs26
26 files changed, 2376 insertions, 1616 deletions
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
index 19bdf08..1eb63e4 100644
--- a/.cargo_vcs_info.json
+++ b/.cargo_vcs_info.json
@@ -1,5 +1,5 @@
{
"git": {
- "sha1": "bdac3732544a3cfb73afe4548f550c369e906856"
+ "sha1": "56043a1715cf9c458e5203bdf792668e3b271651"
}
}
diff --git a/.clippy.toml b/.clippy.toml
new file mode 100644
index 0000000..3d30690
--- /dev/null
+++ b/.clippy.toml
@@ -0,0 +1 @@
+msrv = "1.31.0"
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..e469b08
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,73 @@
+name: CI
+
+on:
+ push:
+ pull_request:
+ schedule: [cron: "40 1 * * *"]
+
+jobs:
+ test:
+ name: Rust ${{ matrix.rust }}
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ rust: [1.31.0, stable, beta]
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions-rs/toolchain@v1
+ with:
+ toolchain: ${{ matrix.rust }}
+ profile: minimal
+ override: true
+ - run: cargo test
+ - run: cargo test --no-default-features
+ - run: cargo test --features span-locations
+ - run: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
+ - run: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test --no-default-features
+
+ nightly:
+ name: Rust nightly
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions-rs/toolchain@v1
+ with:
+ toolchain: nightly
+ profile: minimal
+ override: true
+ - run: cargo test
+ - run: cargo test --no-default-features
+ - run: cargo test --no-default-features -- --ignored # run the ignored test to make sure the `proc-macro` feature is disabled
+ - run: cargo test --features span-locations
+ - run: cargo test --manifest-path tests/ui/Cargo.toml
+ - run: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
+ - run: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test --no-default-features
+ - run: RUSTFLAGS='-Z allow-features=' cargo test
+ - run: cargo update -Z minimal-versions && cargo build
+
+ webassembly:
+ name: WebAssembly
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions-rs/toolchain@v1
+ with:
+ toolchain: nightly
+ target: wasm32-unknown-unknown
+ profile: minimal
+ override: true
+ - run: cargo test --target wasm32-unknown-unknown --no-run
+
+ clippy:
+ name: Clippy
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions-rs/toolchain@v1
+ with:
+ toolchain: nightly
+ profile: minimal
+ override: true
+ components: clippy
+ - run: cargo clippy -- -Dclippy::all
diff --git a/.gitignore b/.gitignore
index 4308d82..6936990 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,3 @@
-target/
+/target
**/*.rs.bk
Cargo.lock
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index acddb57..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,36 +0,0 @@
-language: rust
-sudo: false
-
-matrix:
- include:
- - rust: 1.31.0
- - rust: stable
- - rust: beta
- - rust: nightly
- script:
- - cargo test
- - cargo test --no-default-features
- - cargo test --no-default-features -- --ignored # run the ignored test to make sure the `proc-macro` feature is disabled
- - cargo test --features span-locations
- - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
- - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test --no-default-features
- - RUSTFLAGS='-Z allow-features=' cargo test
- - cargo update -Z minimal-versions && cargo build
- - rust: nightly
- name: WebAssembly
- install: rustup target add wasm32-unknown-unknown
- script: cargo test --target wasm32-unknown-unknown --no-run
-
-before_script:
- - set -o errexit
-
-script:
- - cargo test
- - cargo test --no-default-features
- - cargo test --features span-locations
- - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
- - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test --no-default-features
-
-notifications:
- email:
- on_success: never
diff --git a/Android.bp b/Android.bp
index 3cf41a9..4c28943 100644
--- a/Android.bp
+++ b/Android.bp
@@ -1,6 +1,43 @@
-// This file is generated by cargo2android.py.
+// This file is generated by cargo2android.py --run --dependencies --tests --host-first-multilib --features=default,span-locations.
+// Do not modify this file as changes will be overridden on upgrade.
-rust_library_host_rlib {
+package {
+ default_applicable_licenses: ["external_rust_crates_proc-macro2_license"],
+}
+
+// Added automatically by a large-scale-change that took the approach of
+// 'apply every license found to every target'. While this makes sure we respect
+// every license restriction, it may not be entirely correct.
+//
+// e.g. GPL in an MIT project might only apply to the contrib/ directory.
+//
+// Please consider splitting the single license below into multiple licenses,
+// taking care not to lose any license_kind information, and overriding the
+// default license using the 'licenses: [...]' property on targets as needed.
+//
+// For unused files, consider creating a 'fileGroup' with "//visibility:private"
+// to attach the license to, and including a comment whether the files may be
+// used in the current project.
+//
+// large-scale-change included anything that looked like it might be a license
+// text as a license_text. e.g. LICENSE, NOTICE, COPYING etc.
+//
+// Please consider removing redundant or irrelevant files from 'license_text:'.
+// See: http://go/android-license-faq
+license {
+ name: "external_rust_crates_proc-macro2_license",
+ visibility: [":__subpackages__"],
+ license_kinds: [
+ "SPDX-license-identifier-Apache-2.0",
+ "SPDX-license-identifier-MIT",
+ ],
+ license_text: [
+ "LICENSE-APACHE",
+ "LICENSE-MIT",
+ ],
+}
+
+rust_library_host {
name: "libproc_macro2",
crate_name: "proc_macro2",
srcs: ["src/lib.rs"],
@@ -8,64 +45,123 @@ rust_library_host_rlib {
features: [
"default",
"proc-macro",
+ "span-locations",
],
flags: [
+ "--cfg hygiene",
+ "--cfg lexerror_display",
"--cfg proc_macro_span",
+ "--cfg span_locations",
"--cfg use_proc_macro",
"--cfg wrap_proc_macro",
],
- rlibs: [
+ rustlibs: [
"libunicode_xid",
],
+ compile_multilib: "first",
}
rust_test_host {
- name: "proc-macro2_tests",
+ name: "proc-macro2_host_test_src_lib",
crate_name: "proc_macro2",
- srcs: [
- "tests/features.rs",
- "tests/marker.rs",
- "tests/test.rs",
- ],
- relative_install_path: "proc-macro2_tests",
+ srcs: ["src/lib.rs"],
test_suites: ["general-tests"],
auto_gen_config: true,
+ test_options: {
+ unit_test: true,
+ },
edition: "2018",
features: [
"default",
"proc-macro",
+ "span-locations",
],
flags: [
+ "--cfg hygiene",
+ "--cfg lexerror_display",
"--cfg proc_macro_span",
+ "--cfg span_locations",
"--cfg use_proc_macro",
"--cfg wrap_proc_macro",
],
- rlibs: [
- "libproc_macro2",
+ rustlibs: [
"libquote",
"libunicode_xid",
],
}
-rust_test_host {
- name: "proc-macro2_tests_proc_macro2",
+rust_defaults {
+ name: "proc-macro2_defaults",
crate_name: "proc_macro2",
- srcs: ["src/lib.rs"],
- relative_install_path: "proc-macro2_tests",
test_suites: ["general-tests"],
auto_gen_config: true,
edition: "2018",
features: [
"default",
"proc-macro",
+ "span-locations",
],
flags: [
+ "--cfg hygiene",
+ "--cfg lexerror_display",
"--cfg proc_macro_span",
+ "--cfg span_locations",
"--cfg use_proc_macro",
"--cfg wrap_proc_macro",
],
- rlibs: [
+ rustlibs: [
+ "libproc_macro2",
"libquote",
"libunicode_xid",
],
}
+
+rust_test_host {
+ name: "proc-macro2_host_test_tests_comments",
+ defaults: ["proc-macro2_defaults"],
+ srcs: ["tests/comments.rs"],
+ test_options: {
+ unit_test: true,
+ },
+}
+
+rust_test_host {
+ name: "proc-macro2_host_test_tests_features",
+ defaults: ["proc-macro2_defaults"],
+ srcs: ["tests/features.rs"],
+ test_options: {
+ unit_test: true,
+ },
+}
+
+rust_test_host {
+ name: "proc-macro2_host_test_tests_marker",
+ defaults: ["proc-macro2_defaults"],
+ srcs: ["tests/marker.rs"],
+ test_options: {
+ unit_test: true,
+ },
+}
+
+rust_test_host {
+ name: "proc-macro2_host_test_tests_test",
+ defaults: ["proc-macro2_defaults"],
+ srcs: ["tests/test.rs"],
+ test_options: {
+ unit_test: true,
+ },
+}
+
+rust_test_host {
+ name: "proc-macro2_host_test_tests_test_fmt",
+ defaults: ["proc-macro2_defaults"],
+ srcs: ["tests/test_fmt.rs"],
+ test_options: {
+ unit_test: true,
+ },
+}
+
+// dependent_library ["feature_list"]
+// proc-macro2-1.0.26
+// quote-1.0.9
+// unicode-xid-0.2.1 "default"
diff --git a/Cargo.toml b/Cargo.toml
index a6fea91..3bb4b8e 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -13,21 +13,22 @@
[package]
edition = "2018"
name = "proc-macro2"
-version = "1.0.4"
-authors = ["Alex Crichton <alex@alexcrichton.com>"]
-description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
-homepage = "https://github.com/alexcrichton/proc-macro2"
+version = "1.0.26"
+authors = ["Alex Crichton <alex@alexcrichton.com>", "David Tolnay <dtolnay@gmail.com>"]
+description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n"
documentation = "https://docs.rs/proc-macro2"
readme = "README.md"
keywords = ["macros"]
+categories = ["development-tools::procedural-macro-helpers"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/alexcrichton/proc-macro2"
[package.metadata.docs.rs]
rustc-args = ["--cfg", "procmacro2_semver_exempt"]
-rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
+rustdoc-args = ["--cfg", "procmacro2_semver_exempt", "--cfg", "doc_cfg"]
+targets = ["x86_64-unknown-linux-gnu"]
-[lib]
-name = "proc_macro2"
+[package.metadata.playground]
+features = ["span-locations"]
[dependencies.unicode-xid]
version = "0.2"
[dev-dependencies.quote]
@@ -39,5 +40,3 @@ default = ["proc-macro"]
nightly = []
proc-macro = []
span-locations = []
-[badges.travis-ci]
-repository = "alexcrichton/proc-macro2"
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
index fd5ee70..f229dbe 100644
--- a/Cargo.toml.orig
+++ b/Cargo.toml.orig
@@ -1,26 +1,26 @@
[package]
name = "proc-macro2"
-version = "1.0.4" # remember to update html_root_url
-authors = ["Alex Crichton <alex@alexcrichton.com>"]
+version = "1.0.26" # remember to update html_root_url
+authors = ["Alex Crichton <alex@alexcrichton.com>", "David Tolnay <dtolnay@gmail.com>"]
license = "MIT OR Apache-2.0"
readme = "README.md"
keywords = ["macros"]
repository = "https://github.com/alexcrichton/proc-macro2"
-homepage = "https://github.com/alexcrichton/proc-macro2"
documentation = "https://docs.rs/proc-macro2"
+categories = ["development-tools::procedural-macro-helpers"]
edition = "2018"
description = """
-A stable implementation of the upcoming new `proc_macro` API. Comes with an
-option, off by default, to also reimplement itself in terms of the upstream
-unstable API.
+A substitute implementation of the compiler's `proc_macro` API to decouple
+token-based libraries from the procedural macro use case.
"""
-[lib]
-name = "proc_macro2"
-
[package.metadata.docs.rs]
rustc-args = ["--cfg", "procmacro2_semver_exempt"]
-rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
+rustdoc-args = ["--cfg", "procmacro2_semver_exempt", "--cfg", "doc_cfg"]
+targets = ["x86_64-unknown-linux-gnu"]
+
+[package.metadata.playground]
+features = ["span-locations"]
[dependencies]
unicode-xid = "0.2"
@@ -39,8 +39,8 @@ span-locations = []
# This feature no longer means anything.
nightly = []
-[badges]
-travis-ci = { repository = "alexcrichton/proc-macro2" }
+[workspace]
+members = ["benches/bench-libproc-macro", "tests/ui"]
[patch.crates-io]
# Our doc tests depend on quote which depends on proc-macro2. Without this line,
@@ -49,9 +49,9 @@ travis-ci = { repository = "alexcrichton/proc-macro2" }
# meaning impls would be missing when tested against types from the local
# proc-macro2.
#
-# Travis builds that are in progress at the time that you publish may spuriously
-# fail. This is because they'll be building a local proc-macro2 which carries
-# the second-most-recent version number, pulling in quote which resolves to a
-# dependency on the just-published most recent version number. Thus the patch
-# will fail to apply because the version numbers are different.
+# GitHub Actions builds that are in progress at the time that you publish may
+# spuriously fail. This is because they'll be building a local proc-macro2 which
+# carries the second-most-recent version number, pulling in quote which resolves
+# to a dependency on the just-published most recent version number. Thus the
+# patch will fail to apply because the version numbers are different.
proc-macro2 = { path = "." }
diff --git a/LICENSE b/LICENSE
new file mode 120000
index 0000000..6b579aa
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1 @@
+LICENSE-APACHE \ No newline at end of file
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..1f73ea0
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,19 @@
+name: "proc-macro2"
+description: "A substitute implementation of the compiler\'s `proc_macro` API to decouple token-based libraries from the procedural macro use case."
+third_party {
+ url {
+ type: HOMEPAGE
+ value: "https://crates.io/crates/proc-macro2"
+ }
+ url {
+ type: ARCHIVE
+ value: "https://static.crates.io/crates/proc-macro2/proc-macro2-1.0.26.crate"
+ }
+ version: "1.0.26"
+ license_type: NOTICE
+ last_upgrade_date {
+ year: 2021
+ month: 4
+ day: 1
+ }
+}
diff --git a/MODULE_LICENSE_APACHE2 b/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/MODULE_LICENSE_APACHE2
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..46fc303
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1 @@
+include platform/prebuilts/rust:/OWNERS
diff --git a/README.md b/README.md
index 19b0c3b..3d05e87 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
# proc-macro2
-[![Build Status](https://api.travis-ci.com/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.com/alexcrichton/proc-macro2)
+[![Build Status](https://img.shields.io/github/workflow/status/alexcrichton/proc-macro2/build%20and%20test)](https://github.com/alexcrichton/proc-macro2/actions)
[![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
diff --git a/TEST_MAPPING b/TEST_MAPPING
new file mode 100644
index 0000000..2e3c81b
--- /dev/null
+++ b/TEST_MAPPING
@@ -0,0 +1,224 @@
+// Generated by update_crate_tests.py for tests that depend on this crate.
+{
+ "presubmit": [
+ {
+ "name": "ZipFuseTest"
+ },
+ {
+ "name": "anyhow_device_test_src_lib"
+ },
+ {
+ "name": "anyhow_device_test_tests_test_autotrait"
+ },
+ {
+ "name": "anyhow_device_test_tests_test_boxed"
+ },
+ {
+ "name": "anyhow_device_test_tests_test_chain"
+ },
+ {
+ "name": "anyhow_device_test_tests_test_context"
+ },
+ {
+ "name": "anyhow_device_test_tests_test_convert"
+ },
+ {
+ "name": "anyhow_device_test_tests_test_downcast"
+ },
+ {
+ "name": "anyhow_device_test_tests_test_ffi"
+ },
+ {
+ "name": "anyhow_device_test_tests_test_fmt"
+ },
+ {
+ "name": "anyhow_device_test_tests_test_macros"
+ },
+ {
+ "name": "anyhow_device_test_tests_test_repr"
+ },
+ {
+ "name": "anyhow_device_test_tests_test_source"
+ },
+ {
+ "name": "authfs_device_test_src_lib"
+ },
+ {
+ "name": "doh_unit_test"
+ },
+ {
+ "name": "either_device_test_src_lib"
+ },
+ {
+ "name": "futures-util_device_test_src_lib"
+ },
+ {
+ "name": "keystore2_crypto_test_rust"
+ },
+ {
+ "name": "keystore2_selinux_test"
+ },
+ {
+ "name": "keystore2_test"
+ },
+ {
+ "name": "libm_device_test_src_lib"
+ },
+ {
+ "name": "libsqlite3-sys_device_test_src_lib"
+ },
+ {
+ "name": "serde_cbor_device_test_src_lib"
+ },
+ {
+ "name": "serde_cbor_device_test_tests_bennofs"
+ },
+ {
+ "name": "serde_cbor_device_test_tests_canonical"
+ },
+ {
+ "name": "serde_cbor_device_test_tests_de"
+ },
+ {
+ "name": "serde_cbor_device_test_tests_enum"
+ },
+ {
+ "name": "serde_cbor_device_test_tests_ser"
+ },
+ {
+ "name": "serde_cbor_device_test_tests_std_types"
+ },
+ {
+ "name": "serde_cbor_device_test_tests_tags"
+ },
+ {
+ "name": "serde_cbor_device_test_tests_value"
+ },
+ {
+ "name": "serde_test_device_test_src_lib"
+ },
+ {
+ "name": "tokio-test_device_test_src_lib"
+ },
+ {
+ "name": "tokio-test_device_test_tests_block_on"
+ },
+ {
+ "name": "tokio-test_device_test_tests_io"
+ },
+ {
+ "name": "tokio-test_device_test_tests_macros"
+ },
+ {
+ "name": "tokio_device_test_tests_buffered"
+ },
+ {
+ "name": "tokio_device_test_tests_io_async_read"
+ },
+ {
+ "name": "tokio_device_test_tests_io_copy_bidirectional"
+ },
+ {
+ "name": "tokio_device_test_tests_io_lines"
+ },
+ {
+ "name": "tokio_device_test_tests_io_mem_stream"
+ },
+ {
+ "name": "tokio_device_test_tests_io_read"
+ },
+ {
+ "name": "tokio_device_test_tests_io_read_buf"
+ },
+ {
+ "name": "tokio_device_test_tests_io_read_to_end"
+ },
+ {
+ "name": "tokio_device_test_tests_io_take"
+ },
+ {
+ "name": "tokio_device_test_tests_io_write"
+ },
+ {
+ "name": "tokio_device_test_tests_io_write_all"
+ },
+ {
+ "name": "tokio_device_test_tests_io_write_buf"
+ },
+ {
+ "name": "tokio_device_test_tests_io_write_int"
+ },
+ {
+ "name": "tokio_device_test_tests_macros_join"
+ },
+ {
+ "name": "tokio_device_test_tests_no_rt"
+ },
+ {
+ "name": "tokio_device_test_tests_rt_basic"
+ },
+ {
+ "name": "tokio_device_test_tests_rt_threaded"
+ },
+ {
+ "name": "tokio_device_test_tests_sync_barrier"
+ },
+ {
+ "name": "tokio_device_test_tests_sync_broadcast"
+ },
+ {
+ "name": "tokio_device_test_tests_sync_errors"
+ },
+ {
+ "name": "tokio_device_test_tests_sync_mpsc"
+ },
+ {
+ "name": "tokio_device_test_tests_sync_mutex_owned"
+ },
+ {
+ "name": "tokio_device_test_tests_sync_rwlock"
+ },
+ {
+ "name": "tokio_device_test_tests_sync_watch"
+ },
+ {
+ "name": "tokio_device_test_tests_task_local"
+ },
+ {
+ "name": "tokio_device_test_tests_task_local_set"
+ },
+ {
+ "name": "tokio_device_test_tests_tcp_accept"
+ },
+ {
+ "name": "tokio_device_test_tests_tcp_echo"
+ },
+ {
+ "name": "tokio_device_test_tests_tcp_into_std"
+ },
+ {
+ "name": "tokio_device_test_tests_tcp_shutdown"
+ },
+ {
+ "name": "tokio_device_test_tests_time_rt"
+ },
+ {
+ "name": "tokio_device_test_tests_uds_split"
+ },
+ {
+ "name": "unicode-bidi_device_test_src_lib"
+ },
+ {
+ "name": "url_device_test_src_lib"
+ },
+ {
+ "name": "url_device_test_tests_data"
+ },
+ {
+ "name": "url_device_test_tests_unit"
+ },
+ {
+ "name": "vpnprofilestore_test"
+ }
+ ]
+}
diff --git a/build.rs b/build.rs
index deb9b92..b247d87 100644
--- a/build.rs
+++ b/build.rs
@@ -14,6 +14,10 @@
// procmacro2_semver_exempt surface area is implemented by using the
// nightly-only proc_macro API.
//
+// "hygiene"
+// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at
+// and Span::located_at. Enabled on Rust 1.45+.
+//
// "proc_macro_span"
// Enable non-dummy behavior of Span::start and Span::end methods which
// requires an unstable compiler feature. Enabled when building with
@@ -57,6 +61,22 @@ fn main() {
println!("cargo:rustc-cfg=span_locations");
}
+ if version.minor < 32 {
+ println!("cargo:rustc-cfg=no_libprocmacro_unwind_safe");
+ }
+
+ if version.minor < 39 {
+ println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard");
+ }
+
+ if version.minor >= 44 {
+ println!("cargo:rustc-cfg=lexerror_display");
+ }
+
+ if version.minor >= 45 {
+ println!("cargo:rustc-cfg=hygiene");
+ }
+
let target = env::var("TARGET").unwrap();
if !enable_use_proc_macro(&target) {
return;
diff --git a/src/detection.rs b/src/detection.rs
new file mode 100644
index 0000000..c597bc9
--- /dev/null
+++ b/src/detection.rs
@@ -0,0 +1,67 @@
+use std::panic::{self, PanicInfo};
+use std::sync::atomic::*;
+use std::sync::Once;
+
+static WORKS: AtomicUsize = AtomicUsize::new(0);
+static INIT: Once = Once::new();
+
+pub(crate) fn inside_proc_macro() -> bool {
+ match WORKS.load(Ordering::SeqCst) {
+ 1 => return false,
+ 2 => return true,
+ _ => {}
+ }
+
+ INIT.call_once(initialize);
+ inside_proc_macro()
+}
+
+pub(crate) fn force_fallback() {
+ WORKS.store(1, Ordering::SeqCst);
+}
+
+pub(crate) fn unforce_fallback() {
+ initialize();
+}
+
+// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
+// then use catch_unwind to determine whether the compiler's proc_macro is
+// working. When proc-macro2 is used from outside of a procedural macro all
+// of the proc_macro crate's APIs currently panic.
+//
+// The Once is to prevent the possibility of this ordering:
+//
+// thread 1 calls take_hook, gets the user's original hook
+// thread 1 calls set_hook with the null hook
+// thread 2 calls take_hook, thinks null hook is the original hook
+// thread 2 calls set_hook with the null hook
+// thread 1 calls set_hook with the actual original hook
+// thread 2 calls set_hook with what it thinks is the original hook
+//
+// in which the user's hook has been lost.
+//
+// There is still a race condition where a panic in a different thread can
+// happen during the interval that the user's original panic hook is
+// unregistered such that their hook is incorrectly not called. This is
+// sufficiently unlikely and less bad than printing panic messages to stderr
+// on correct use of this crate. Maybe there is a libstd feature request
+// here. For now, if a user needs to guarantee that this failure mode does
+// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
+// the main thread before launching any other threads.
+fn initialize() {
+ type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
+
+ let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
+ let sanity_check = &*null_hook as *const PanicHook;
+ let original_hook = panic::take_hook();
+ panic::set_hook(null_hook);
+
+ let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
+ WORKS.store(works as usize + 1, Ordering::SeqCst);
+
+ let hopefully_null_hook = panic::take_hook();
+ panic::set_hook(original_hook);
+ if sanity_check != &*hopefully_null_hook {
+ panic!("observed race condition in proc_macro2::inside_proc_macro");
+ }
+}
diff --git a/src/fallback.rs b/src/fallback.rs
index fe582b3..50d10db 100644
--- a/src/fallback.rs
+++ b/src/fallback.rs
@@ -1,27 +1,49 @@
+use crate::parse::{self, Cursor};
+use crate::{Delimiter, Spacing, TokenTree};
#[cfg(span_locations)]
use std::cell::RefCell;
#[cfg(span_locations)]
use std::cmp;
-use std::fmt;
-use std::iter;
+use std::fmt::{self, Debug, Display};
+use std::iter::FromIterator;
+use std::mem;
use std::ops::RangeBounds;
#[cfg(procmacro2_semver_exempt)]
use std::path::Path;
use std::path::PathBuf;
use std::str::FromStr;
use std::vec;
-
-use crate::strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
-use crate::{Delimiter, Punct, Spacing, TokenTree};
use unicode_xid::UnicodeXID;
+/// Force use of proc-macro2's fallback implementation of the API for now, even
+/// if the compiler's implementation is available.
+pub fn force() {
+ #[cfg(wrap_proc_macro)]
+ crate::detection::force_fallback();
+}
+
+/// Resume using the compiler's implementation of the proc macro API if it is
+/// available.
+pub fn unforce() {
+ #[cfg(wrap_proc_macro)]
+ crate::detection::unforce_fallback();
+}
+
#[derive(Clone)]
-pub struct TokenStream {
- inner: Vec<TokenTree>,
+pub(crate) struct TokenStream {
+ pub(crate) inner: Vec<TokenTree>,
}
#[derive(Debug)]
-pub struct LexError;
+pub(crate) struct LexError {
+ pub(crate) span: Span,
+}
+
+impl LexError {
+ pub(crate) fn span(&self) -> Span {
+ self.span
+ }
+}
impl TokenStream {
pub fn new() -> TokenStream {
@@ -31,6 +53,72 @@ impl TokenStream {
pub fn is_empty(&self) -> bool {
self.inner.len() == 0
}
+
+ fn take_inner(&mut self) -> Vec<TokenTree> {
+ mem::replace(&mut self.inner, Vec::new())
+ }
+
+ fn push_token(&mut self, token: TokenTree) {
+ // https://github.com/alexcrichton/proc-macro2/issues/235
+ match token {
+ #[cfg(not(no_bind_by_move_pattern_guard))]
+ TokenTree::Literal(crate::Literal {
+ #[cfg(wrap_proc_macro)]
+ inner: crate::imp::Literal::Fallback(literal),
+ #[cfg(not(wrap_proc_macro))]
+ inner: literal,
+ ..
+ }) if literal.text.starts_with('-') => {
+ push_negative_literal(self, literal);
+ }
+ #[cfg(no_bind_by_move_pattern_guard)]
+ TokenTree::Literal(crate::Literal {
+ #[cfg(wrap_proc_macro)]
+ inner: crate::imp::Literal::Fallback(literal),
+ #[cfg(not(wrap_proc_macro))]
+ inner: literal,
+ ..
+ }) => {
+ if literal.text.starts_with('-') {
+ push_negative_literal(self, literal);
+ } else {
+ self.inner
+ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
+ }
+ }
+ _ => self.inner.push(token),
+ }
+
+ #[cold]
+ fn push_negative_literal(stream: &mut TokenStream, mut literal: Literal) {
+ literal.text.remove(0);
+ let mut punct = crate::Punct::new('-', Spacing::Alone);
+ punct.set_span(crate::Span::_new_stable(literal.span));
+ stream.inner.push(TokenTree::Punct(punct));
+ stream
+ .inner
+ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
+ }
+ }
+}
+
+// Nonrecursive to prevent stack overflow.
+impl Drop for TokenStream {
+ fn drop(&mut self) {
+ while let Some(token) = self.inner.pop() {
+ let group = match token {
+ TokenTree::Group(group) => group.inner,
+ _ => continue,
+ };
+ #[cfg(wrap_proc_macro)]
+ let group = match group {
+ crate::imp::Group::Fallback(group) => group,
+ _ => continue,
+ };
+ let mut group = group;
+ self.inner.extend(group.stream.take_inner());
+ }
+ }
}
#[cfg(span_locations)]
@@ -59,20 +147,17 @@ impl FromStr for TokenStream {
// Create a dummy file & add it to the source map
let cursor = get_cursor(src);
- match token_stream(cursor) {
- Ok((input, output)) => {
- if skip_whitespace(input).len() != 0 {
- Err(LexError)
- } else {
- Ok(output)
- }
- }
- Err(LexError) => Err(LexError),
- }
+ parse::token_stream(cursor)
+ }
+}
+
+impl Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("cannot parse string into token stream")
}
}
-impl fmt::Display for TokenStream {
+impl Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut joint = false;
for (i, tt) in self.inner.iter().enumerate() {
@@ -80,37 +165,22 @@ impl fmt::Display for TokenStream {
write!(f, " ")?;
}
joint = false;
- match *tt {
- TokenTree::Group(ref tt) => {
- let (start, end) = match tt.delimiter() {
- Delimiter::Parenthesis => ("(", ")"),
- Delimiter::Brace => ("{", "}"),
- Delimiter::Bracket => ("[", "]"),
- Delimiter::None => ("", ""),
- };
- if tt.stream().into_iter().next().is_none() {
- write!(f, "{} {}", start, end)?
- } else {
- write!(f, "{} {} {}", start, tt.stream(), end)?
- }
+ match tt {
+ TokenTree::Group(tt) => Display::fmt(tt, f),
+ TokenTree::Ident(tt) => Display::fmt(tt, f),
+ TokenTree::Punct(tt) => {
+ joint = tt.spacing() == Spacing::Joint;
+ Display::fmt(tt, f)
}
- TokenTree::Ident(ref tt) => write!(f, "{}", tt)?,
- TokenTree::Punct(ref tt) => {
- write!(f, "{}", tt.as_char())?;
- match tt.spacing() {
- Spacing::Alone => {}
- Spacing::Joint => joint = true,
- }
- }
- TokenTree::Literal(ref tt) => write!(f, "{}", tt)?,
- }
+ TokenTree::Literal(tt) => Display::fmt(tt, f),
+ }?
}
Ok(())
}
}
-impl fmt::Debug for TokenStream {
+impl Debug for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("TokenStream ")?;
f.debug_list().entries(self.clone()).finish()
@@ -139,28 +209,26 @@ impl From<TokenStream> for proc_macro::TokenStream {
impl From<TokenTree> for TokenStream {
fn from(tree: TokenTree) -> TokenStream {
- TokenStream { inner: vec![tree] }
+ let mut stream = TokenStream::new();
+ stream.push_token(tree);
+ stream
}
}
-impl iter::FromIterator<TokenTree> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
- let mut v = Vec::new();
-
- for token in streams.into_iter() {
- v.push(token);
- }
-
- TokenStream { inner: v }
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(tokens: I) -> Self {
+ let mut stream = TokenStream::new();
+ stream.extend(tokens);
+ stream
}
}
-impl iter::FromIterator<TokenStream> for TokenStream {
+impl FromIterator<TokenStream> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
let mut v = Vec::new();
- for stream in streams.into_iter() {
- v.extend(stream.inner);
+ for mut stream in streams {
+ v.extend(stream.take_inner());
}
TokenStream { inner: v }
@@ -168,31 +236,30 @@ impl iter::FromIterator<TokenStream> for TokenStream {
}
impl Extend<TokenTree> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
- self.inner.extend(streams);
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) {
+ tokens.into_iter().for_each(|token| self.push_token(token));
}
}
impl Extend<TokenStream> for TokenStream {
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
- self.inner
- .extend(streams.into_iter().flat_map(|stream| stream));
+ self.inner.extend(streams.into_iter().flatten());
}
}
-pub type TokenTreeIter = vec::IntoIter<TokenTree>;
+pub(crate) type TokenTreeIter = vec::IntoIter<TokenTree>;
impl IntoIterator for TokenStream {
type Item = TokenTree;
type IntoIter = TokenTreeIter;
- fn into_iter(self) -> TokenTreeIter {
- self.inner.into_iter()
+ fn into_iter(mut self) -> TokenTreeIter {
+ self.take_inner().into_iter()
}
}
#[derive(Clone, PartialEq, Eq)]
-pub struct SourceFile {
+pub(crate) struct SourceFile {
path: PathBuf,
}
@@ -208,7 +275,7 @@ impl SourceFile {
}
}
-impl fmt::Debug for SourceFile {
+impl Debug for SourceFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("SourceFile")
.field("path", &self.path())
@@ -218,7 +285,7 @@ impl fmt::Debug for SourceFile {
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-pub struct LineColumn {
+pub(crate) struct LineColumn {
pub line: usize,
pub column: usize,
}
@@ -228,23 +295,11 @@ thread_local! {
static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
// NOTE: We start with a single dummy file which all call_site() and
// def_site() spans reference.
- files: vec![{
+ files: vec![FileInfo {
#[cfg(procmacro2_semver_exempt)]
- {
- FileInfo {
- name: "<unspecified>".to_owned(),
- span: Span { lo: 0, hi: 0 },
- lines: vec![0],
- }
- }
-
- #[cfg(not(procmacro2_semver_exempt))]
- {
- FileInfo {
- span: Span { lo: 0, hi: 0 },
- lines: vec![0],
- }
- }
+ name: "<unspecified>".to_owned(),
+ span: Span { lo: 0, hi: 0 },
+ lines: vec![0],
}],
});
}
@@ -282,16 +337,21 @@ impl FileInfo {
}
}
-/// Computesthe offsets of each line in the given source string.
+/// Computes the offsets of each line in the given source string
+/// and the total number of characters
#[cfg(span_locations)]
-fn lines_offsets(s: &str) -> Vec<usize> {
+fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
let mut lines = vec![0];
- let mut prev = 0;
- while let Some(len) = s[prev..].find('\n') {
- prev += len + 1;
- lines.push(prev);
+ let mut total = 0;
+
+ for ch in s.chars() {
+ total += 1;
+ if ch == '\n' {
+ lines.push(total);
+ }
}
- lines
+
+ (total, lines)
}
#[cfg(span_locations)]
@@ -310,23 +370,22 @@ impl SourceMap {
}
fn add_file(&mut self, name: &str, src: &str) -> Span {
- let lines = lines_offsets(src);
+ let (len, lines) = lines_offsets(src);
let lo = self.next_start_pos();
// XXX(nika): Shouild we bother doing a checked cast or checked add here?
let span = Span {
lo,
- hi: lo + (src.len() as u32),
+ hi: lo + (len as u32),
};
- #[cfg(procmacro2_semver_exempt)]
self.files.push(FileInfo {
+ #[cfg(procmacro2_semver_exempt)]
name: name.to_owned(),
span,
lines,
});
#[cfg(not(procmacro2_semver_exempt))]
- self.files.push(FileInfo { span, lines });
let _ = name;
span
@@ -343,11 +402,11 @@ impl SourceMap {
}
#[derive(Clone, Copy, PartialEq, Eq)]
-pub struct Span {
+pub(crate) struct Span {
#[cfg(span_locations)]
- lo: u32,
+ pub(crate) lo: u32,
#[cfg(span_locations)]
- hi: u32,
+ pub(crate) hi: u32,
}
impl Span {
@@ -361,12 +420,16 @@ impl Span {
Span { lo: 0, hi: 0 }
}
+ #[cfg(hygiene)]
+ pub fn mixed_site() -> Span {
+ Span::call_site()
+ }
+
#[cfg(procmacro2_semver_exempt)]
pub fn def_site() -> Span {
Span::call_site()
}
- #[cfg(procmacro2_semver_exempt)]
pub fn resolved_at(&self, _other: Span) -> Span {
// Stable spans consist only of line/column information, so
// `resolved_at` and `located_at` only select which span the
@@ -374,7 +437,6 @@ impl Span {
*self
}
- #[cfg(procmacro2_semver_exempt)]
pub fn located_at(&self, other: Span) -> Span {
other
}
@@ -427,26 +489,59 @@ impl Span {
})
})
}
+
+ #[cfg(not(span_locations))]
+ fn first_byte(self) -> Self {
+ self
+ }
+
+ #[cfg(span_locations)]
+ fn first_byte(self) -> Self {
+ Span {
+ lo: self.lo,
+ hi: cmp::min(self.lo.saturating_add(1), self.hi),
+ }
+ }
+
+ #[cfg(not(span_locations))]
+ fn last_byte(self) -> Self {
+ self
+ }
+
+ #[cfg(span_locations)]
+ fn last_byte(self) -> Self {
+ Span {
+ lo: cmp::max(self.hi.saturating_sub(1), self.lo),
+ hi: self.hi,
+ }
+ }
}
-impl fmt::Debug for Span {
+impl Debug for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- #[cfg(procmacro2_semver_exempt)]
+ #[cfg(span_locations)]
return write!(f, "bytes({}..{})", self.lo, self.hi);
- #[cfg(not(procmacro2_semver_exempt))]
+ #[cfg(not(span_locations))]
write!(f, "Span")
}
}
-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
- if cfg!(procmacro2_semver_exempt) {
+pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+ #[cfg(span_locations)]
+ {
+ if span.lo == 0 && span.hi == 0 {
+ return;
+ }
+ }
+
+ if cfg!(span_locations) {
debug.field("span", &span);
}
}
#[derive(Clone)]
-pub struct Group {
+pub(crate) struct Group {
delimiter: Delimiter,
stream: TokenStream,
span: Span,
@@ -474,11 +569,11 @@ impl Group {
}
pub fn span_open(&self) -> Span {
- self.span
+ self.span.first_byte()
}
pub fn span_close(&self) -> Span {
- self.span
+ self.span.last_byte()
}
pub fn set_span(&mut self, span: Span) {
@@ -486,36 +581,45 @@ impl Group {
}
}
-impl fmt::Display for Group {
+impl Display for Group {
+ // We attempt to match libproc_macro's formatting.
+ // Empty parens: ()
+ // Nonempty parens: (...)
+ // Empty brackets: []
+ // Nonempty brackets: [...]
+ // Empty braces: { }
+ // Nonempty braces: { ... }
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let (left, right) = match self.delimiter {
+ let (open, close) = match self.delimiter {
Delimiter::Parenthesis => ("(", ")"),
- Delimiter::Brace => ("{", "}"),
+ Delimiter::Brace => ("{ ", "}"),
Delimiter::Bracket => ("[", "]"),
Delimiter::None => ("", ""),
};
- f.write_str(left)?;
- self.stream.fmt(f)?;
- f.write_str(right)?;
+ f.write_str(open)?;
+ Display::fmt(&self.stream, f)?;
+ if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty() {
+ f.write_str(" ")?;
+ }
+ f.write_str(close)?;
Ok(())
}
}
-impl fmt::Debug for Group {
+impl Debug for Group {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let mut debug = fmt.debug_struct("Group");
debug.field("delimiter", &self.delimiter);
debug.field("stream", &self.stream);
- #[cfg(procmacro2_semver_exempt)]
- debug.field("span", &self.span);
+ debug_span_field_if_nontrivial(&mut debug, self.span);
debug.finish()
}
}
#[derive(Clone)]
-pub struct Ident {
+pub(crate) struct Ident {
sym: String,
span: Span,
raw: bool,
@@ -549,16 +653,14 @@ impl Ident {
}
}
-#[inline]
-fn is_ident_start(c: char) -> bool {
+pub(crate) fn is_ident_start(c: char) -> bool {
('a' <= c && c <= 'z')
|| ('A' <= c && c <= 'Z')
|| c == '_'
|| (c > '\x7f' && UnicodeXID::is_xid_start(c))
}
-#[inline]
-fn is_ident_continue(c: char) -> bool {
+pub(crate) fn is_ident_continue(c: char) -> bool {
('a' <= c && c <= 'z')
|| ('A' <= c && c <= 'Z')
|| c == '_'
@@ -615,18 +717,18 @@ where
}
}
-impl fmt::Display for Ident {
+impl Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.raw {
- "r#".fmt(f)?;
+ f.write_str("r#")?;
}
- self.sym.fmt(f)
+ Display::fmt(&self.sym, f)
}
}
-impl fmt::Debug for Ident {
+impl Debug for Ident {
// Ident(proc_macro), Ident(r#union)
- #[cfg(not(procmacro2_semver_exempt))]
+ #[cfg(not(span_locations))]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut debug = f.debug_tuple("Ident");
debug.field(&format_args!("{}", self));
@@ -637,17 +739,17 @@ impl fmt::Debug for Ident {
// sym: proc_macro,
// span: bytes(128..138)
// }
- #[cfg(procmacro2_semver_exempt)]
+ #[cfg(span_locations)]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut debug = f.debug_struct("Ident");
debug.field("sym", &format_args!("{}", self));
- debug.field("span", &self.span);
+ debug_span_field_if_nontrivial(&mut debug, self.span);
debug.finish()
}
}
#[derive(Clone)]
-pub struct Literal {
+pub(crate) struct Literal {
text: String,
span: Span,
}
@@ -669,7 +771,7 @@ macro_rules! unsuffixed_numbers {
}
impl Literal {
- fn _new(text: String) -> Literal {
+ pub(crate) fn _new(text: String) -> Literal {
Literal {
text,
span: Span::call_site(),
@@ -711,7 +813,7 @@ impl Literal {
pub fn f32_unsuffixed(f: f32) -> Literal {
let mut s = f.to_string();
- if !s.contains(".") {
+ if !s.contains('.') {
s.push_str(".0");
}
Literal::_new(s)
@@ -719,7 +821,7 @@ impl Literal {
pub fn f64_unsuffixed(f: f64) -> Literal {
let mut s = f.to_string();
- if !s.contains(".") {
+ if !s.contains('.') {
s.push_str(".0");
}
Literal::_new(s)
@@ -730,10 +832,10 @@ impl Literal {
text.push('"');
for c in t.chars() {
if c == '\'' {
- // escape_default turns this into "\'" which is unnecessary.
+ // escape_debug turns this into "\'" which is unnecessary.
text.push(c);
} else {
- text.extend(c.escape_default());
+ text.extend(c.escape_debug());
}
}
text.push('"');
@@ -744,10 +846,10 @@ impl Literal {
let mut text = String::new();
text.push('\'');
if t == '"' {
- // escape_default turns this into '\"' which is unnecessary.
+ // escape_debug turns this into '\"' which is unnecessary.
text.push(t);
} else {
- text.extend(t.escape_default());
+ text.extend(t.escape_debug());
}
text.push('\'');
Literal::_new(text)
@@ -756,6 +858,7 @@ impl Literal {
pub fn byte_string(bytes: &[u8]) -> Literal {
let mut escaped = "b\"".to_string();
for b in bytes {
+ #[allow(clippy::match_overlapping_arm)]
match *b {
b'\0' => escaped.push_str(r"\0"),
b'\t' => escaped.push_str(r"\t"),
@@ -784,651 +887,17 @@ impl Literal {
}
}
-impl fmt::Display for Literal {
+impl Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.text.fmt(f)
+ Display::fmt(&self.text, f)
}
}
-impl fmt::Debug for Literal {
+impl Debug for Literal {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let mut debug = fmt.debug_struct("Literal");
debug.field("lit", &format_args!("{}", self.text));
- #[cfg(procmacro2_semver_exempt)]
- debug.field("span", &self.span);
+ debug_span_field_if_nontrivial(&mut debug, self.span);
debug.finish()
}
}
-
-fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
- let mut trees = Vec::new();
- loop {
- let input_no_ws = skip_whitespace(input);
- if input_no_ws.rest.len() == 0 {
- break;
- }
- if let Ok((a, tokens)) = doc_comment(input_no_ws) {
- input = a;
- trees.extend(tokens);
- continue;
- }
-
- let (a, tt) = match token_tree(input_no_ws) {
- Ok(p) => p,
- Err(_) => break,
- };
- trees.push(tt);
- input = a;
- }
- Ok((input, TokenStream { inner: trees }))
-}
-
-#[cfg(not(span_locations))]
-fn spanned<'a, T>(
- input: Cursor<'a>,
- f: fn(Cursor<'a>) -> PResult<'a, T>,
-) -> PResult<'a, (T, crate::Span)> {
- let (a, b) = f(skip_whitespace(input))?;
- Ok((a, ((b, crate::Span::_new_stable(Span::call_site())))))
-}
-
-#[cfg(span_locations)]
-fn spanned<'a, T>(
- input: Cursor<'a>,
- f: fn(Cursor<'a>) -> PResult<'a, T>,
-) -> PResult<'a, (T, crate::Span)> {
- let input = skip_whitespace(input);
- let lo = input.off;
- let (a, b) = f(input)?;
- let hi = a.off;
- let span = crate::Span::_new_stable(Span { lo, hi });
- Ok((a, (b, span)))
-}
-
-fn token_tree(input: Cursor) -> PResult<TokenTree> {
- let (rest, (mut tt, span)) = spanned(input, token_kind)?;
- tt.set_span(span);
- Ok((rest, tt))
-}
-
-named!(token_kind -> TokenTree, alt!(
- map!(group, |g| TokenTree::Group(crate::Group::_new_stable(g)))
- |
- map!(literal, |l| TokenTree::Literal(crate::Literal::_new_stable(l))) // must be before symbol
- |
- map!(op, TokenTree::Punct)
- |
- symbol_leading_ws
-));
-
-named!(group -> Group, alt!(
- delimited!(
- punct!("("),
- token_stream,
- punct!(")")
- ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
- |
- delimited!(
- punct!("["),
- token_stream,
- punct!("]")
- ) => { |ts| Group::new(Delimiter::Bracket, ts) }
- |
- delimited!(
- punct!("{"),
- token_stream,
- punct!("}")
- ) => { |ts| Group::new(Delimiter::Brace, ts) }
-));
-
-fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
- symbol(skip_whitespace(input))
-}
-
-fn symbol(input: Cursor) -> PResult<TokenTree> {
- let raw = input.starts_with("r#");
- let rest = input.advance((raw as usize) << 1);
-
- let (rest, sym) = symbol_not_raw(rest)?;
-
- if !raw {
- let ident = crate::Ident::new(sym, crate::Span::call_site());
- return Ok((rest, ident.into()));
- }
-
- if sym == "_" {
- return Err(LexError);
- }
-
- let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
- Ok((rest, ident.into()))
-}
-
-fn symbol_not_raw(input: Cursor) -> PResult<&str> {
- let mut chars = input.char_indices();
-
- match chars.next() {
- Some((_, ch)) if is_ident_start(ch) => {}
- _ => return Err(LexError),
- }
-
- let mut end = input.len();
- for (i, ch) in chars {
- if !is_ident_continue(ch) {
- end = i;
- break;
- }
- }
-
- Ok((input.advance(end), &input.rest[..end]))
-}
-
-fn literal(input: Cursor) -> PResult<Literal> {
- let input_no_ws = skip_whitespace(input);
-
- match literal_nocapture(input_no_ws) {
- Ok((a, ())) => {
- let start = input.len() - input_no_ws.len();
- let len = input_no_ws.len() - a.len();
- let end = start + len;
- Ok((a, Literal::_new(input.rest[start..end].to_string())))
- }
- Err(LexError) => Err(LexError),
- }
-}
-
-named!(literal_nocapture -> (), alt!(
- string
- |
- byte_string
- |
- byte
- |
- character
- |
- float
- |
- int
-));
-
-named!(string -> (), alt!(
- quoted_string
- |
- preceded!(
- punct!("r"),
- raw_string
- ) => { |_| () }
-));
-
-named!(quoted_string -> (), do_parse!(
- punct!("\"") >>
- cooked_string >>
- tag!("\"") >>
- option!(symbol_not_raw) >>
- (())
-));
-
-fn cooked_string(input: Cursor) -> PResult<()> {
- let mut chars = input.char_indices().peekable();
- while let Some((byte_offset, ch)) = chars.next() {
- match ch {
- '"' => {
- return Ok((input.advance(byte_offset), ()));
- }
- '\r' => {
- if let Some((_, '\n')) = chars.next() {
- // ...
- } else {
- break;
- }
- }
- '\\' => match chars.next() {
- Some((_, 'x')) => {
- if !backslash_x_char(&mut chars) {
- break;
- }
- }
- Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
- | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
- Some((_, 'u')) => {
- if !backslash_u(&mut chars) {
- break;
- }
- }
- Some((_, '\n')) | Some((_, '\r')) => {
- while let Some(&(_, ch)) = chars.peek() {
- if ch.is_whitespace() {
- chars.next();
- } else {
- break;
- }
- }
- }
- _ => break,
- },
- _ch => {}
- }
- }
- Err(LexError)
-}
-
-named!(byte_string -> (), alt!(
- delimited!(
- punct!("b\""),
- cooked_byte_string,
- tag!("\"")
- ) => { |_| () }
- |
- preceded!(
- punct!("br"),
- raw_string
- ) => { |_| () }
-));
-
-fn cooked_byte_string(mut input: Cursor) -> PResult<()> {
- let mut bytes = input.bytes().enumerate();
- 'outer: while let Some((offset, b)) = bytes.next() {
- match b {
- b'"' => {
- return Ok((input.advance(offset), ()));
- }
- b'\r' => {
- if let Some((_, b'\n')) = bytes.next() {
- // ...
- } else {
- break;
- }
- }
- b'\\' => match bytes.next() {
- Some((_, b'x')) => {
- if !backslash_x_byte(&mut bytes) {
- break;
- }
- }
- Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
- | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
- Some((newline, b'\n')) | Some((newline, b'\r')) => {
- let rest = input.advance(newline + 1);
- for (offset, ch) in rest.char_indices() {
- if !ch.is_whitespace() {
- input = rest.advance(offset);
- bytes = input.bytes().enumerate();
- continue 'outer;
- }
- }
- break;
- }
- _ => break,
- },
- b if b < 0x80 => {}
- _ => break,
- }
- }
- Err(LexError)
-}
-
-fn raw_string(input: Cursor) -> PResult<()> {
- let mut chars = input.char_indices();
- let mut n = 0;
- while let Some((byte_offset, ch)) = chars.next() {
- match ch {
- '"' => {
- n = byte_offset;
- break;
- }
- '#' => {}
- _ => return Err(LexError),
- }
- }
- for (byte_offset, ch) in chars {
- match ch {
- '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
- let rest = input.advance(byte_offset + 1 + n);
- return Ok((rest, ()));
- }
- '\r' => {}
- _ => {}
- }
- }
- Err(LexError)
-}
-
-named!(byte -> (), do_parse!(
- punct!("b") >>
- tag!("'") >>
- cooked_byte >>
- tag!("'") >>
- (())
-));
-
-fn cooked_byte(input: Cursor) -> PResult<()> {
- let mut bytes = input.bytes().enumerate();
- let ok = match bytes.next().map(|(_, b)| b) {
- Some(b'\\') => match bytes.next().map(|(_, b)| b) {
- Some(b'x') => backslash_x_byte(&mut bytes),
- Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
- | Some(b'"') => true,
- _ => false,
- },
- b => b.is_some(),
- };
- if ok {
- match bytes.next() {
- Some((offset, _)) => {
- if input.chars().as_str().is_char_boundary(offset) {
- Ok((input.advance(offset), ()))
- } else {
- Err(LexError)
- }
- }
- None => Ok((input.advance(input.len()), ())),
- }
- } else {
- Err(LexError)
- }
-}
-
-named!(character -> (), do_parse!(
- punct!("'") >>
- cooked_char >>
- tag!("'") >>
- (())
-));
-
-fn cooked_char(input: Cursor) -> PResult<()> {
- let mut chars = input.char_indices();
- let ok = match chars.next().map(|(_, ch)| ch) {
- Some('\\') => match chars.next().map(|(_, ch)| ch) {
- Some('x') => backslash_x_char(&mut chars),
- Some('u') => backslash_u(&mut chars),
- Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
- true
- }
- _ => false,
- },
- ch => ch.is_some(),
- };
- if ok {
- match chars.next() {
- Some((idx, _)) => Ok((input.advance(idx), ())),
- None => Ok((input.advance(input.len()), ())),
- }
- } else {
- Err(LexError)
- }
-}
-
-macro_rules! next_ch {
- ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
- match $chars.next() {
- Some((_, ch)) => match ch {
- $pat $(| $rest)* => ch,
- _ => return false,
- },
- None => return false
- }
- };
-}
-
-fn backslash_x_char<I>(chars: &mut I) -> bool
-where
- I: Iterator<Item = (usize, char)>,
-{
- next_ch!(chars @ '0'..='7');
- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
- true
-}
-
-fn backslash_x_byte<I>(chars: &mut I) -> bool
-where
- I: Iterator<Item = (usize, u8)>,
-{
- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
- true
-}
-
-fn backslash_u<I>(chars: &mut I) -> bool
-where
- I: Iterator<Item = (usize, char)>,
-{
- next_ch!(chars @ '{');
- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
- loop {
- let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
- if c == '}' {
- return true;
- }
- }
-}
-
-fn float(input: Cursor) -> PResult<()> {
- let (mut rest, ()) = float_digits(input)?;
- if let Some(ch) = rest.chars().next() {
- if is_ident_start(ch) {
- rest = symbol_not_raw(rest)?.0;
- }
- }
- word_break(rest)
-}
-
-fn float_digits(input: Cursor) -> PResult<()> {
- let mut chars = input.chars().peekable();
- match chars.next() {
- Some(ch) if ch >= '0' && ch <= '9' => {}
- _ => return Err(LexError),
- }
-
- let mut len = 1;
- let mut has_dot = false;
- let mut has_exp = false;
- while let Some(&ch) = chars.peek() {
- match ch {
- '0'..='9' | '_' => {
- chars.next();
- len += 1;
- }
- '.' => {
- if has_dot {
- break;
- }
- chars.next();
- if chars
- .peek()
- .map(|&ch| ch == '.' || is_ident_start(ch))
- .unwrap_or(false)
- {
- return Err(LexError);
- }
- len += 1;
- has_dot = true;
- }
- 'e' | 'E' => {
- chars.next();
- len += 1;
- has_exp = true;
- break;
- }
- _ => break,
- }
- }
-
- let rest = input.advance(len);
- if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
- return Err(LexError);
- }
-
- if has_exp {
- let mut has_exp_value = false;
- while let Some(&ch) = chars.peek() {
- match ch {
- '+' | '-' => {
- if has_exp_value {
- break;
- }
- chars.next();
- len += 1;
- }
- '0'..='9' => {
- chars.next();
- len += 1;
- has_exp_value = true;
- }
- '_' => {
- chars.next();
- len += 1;
- }
- _ => break,
- }
- }
- if !has_exp_value {
- return Err(LexError);
- }
- }
-
- Ok((input.advance(len), ()))
-}
-
-fn int(input: Cursor) -> PResult<()> {
- let (mut rest, ()) = digits(input)?;
- if let Some(ch) = rest.chars().next() {
- if is_ident_start(ch) {
- rest = symbol_not_raw(rest)?.0;
- }
- }
- word_break(rest)
-}
-
-fn digits(mut input: Cursor) -> PResult<()> {
- let base = if input.starts_with("0x") {
- input = input.advance(2);
- 16
- } else if input.starts_with("0o") {
- input = input.advance(2);
- 8
- } else if input.starts_with("0b") {
- input = input.advance(2);
- 2
- } else {
- 10
- };
-
- let mut len = 0;
- let mut empty = true;
- for b in input.bytes() {
- let digit = match b {
- b'0'..=b'9' => (b - b'0') as u64,
- b'a'..=b'f' => 10 + (b - b'a') as u64,
- b'A'..=b'F' => 10 + (b - b'A') as u64,
- b'_' => {
- if empty && base == 10 {
- return Err(LexError);
- }
- len += 1;
- continue;
- }
- _ => break,
- };
- if digit >= base {
- return Err(LexError);
- }
- len += 1;
- empty = false;
- }
- if empty {
- Err(LexError)
- } else {
- Ok((input.advance(len), ()))
- }
-}
-
-fn op(input: Cursor) -> PResult<Punct> {
- let input = skip_whitespace(input);
- match op_char(input) {
- Ok((rest, '\'')) => {
- symbol(rest)?;
- Ok((rest, Punct::new('\'', Spacing::Joint)))
- }
- Ok((rest, ch)) => {
- let kind = match op_char(rest) {
- Ok(_) => Spacing::Joint,
- Err(LexError) => Spacing::Alone,
- };
- Ok((rest, Punct::new(ch, kind)))
- }
- Err(LexError) => Err(LexError),
- }
-}
-
-fn op_char(input: Cursor) -> PResult<char> {
- if input.starts_with("//") || input.starts_with("/*") {
- // Do not accept `/` of a comment as an op.
- return Err(LexError);
- }
-
- let mut chars = input.chars();
- let first = match chars.next() {
- Some(ch) => ch,
- None => {
- return Err(LexError);
- }
- };
- let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
- if recognized.contains(first) {
- Ok((input.advance(first.len_utf8()), first))
- } else {
- Err(LexError)
- }
-}
-
-fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
- let mut trees = Vec::new();
- let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
- trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
- if inner {
- trees.push(Punct::new('!', Spacing::Alone).into());
- }
- let mut stream = vec![
- TokenTree::Ident(crate::Ident::new("doc", span)),
- TokenTree::Punct(Punct::new('=', Spacing::Alone)),
- TokenTree::Literal(crate::Literal::string(comment)),
- ];
- for tt in stream.iter_mut() {
- tt.set_span(span);
- }
- let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
- trees.push(crate::Group::_new_stable(group).into());
- for tt in trees.iter_mut() {
- tt.set_span(span);
- }
- Ok((rest, trees))
-}
-
-named!(doc_comment_contents -> (&str, bool), alt!(
- do_parse!(
- punct!("//!") >>
- s: take_until_newline_or_eof!() >>
- ((s, true))
- )
- |
- do_parse!(
- option!(whitespace) >>
- peek!(tag!("/*!")) >>
- s: block_comment >>
- ((s, true))
- )
- |
- do_parse!(
- punct!("///") >>
- not!(tag!("/")) >>
- s: take_until_newline_or_eof!() >>
- ((s, false))
- )
- |
- do_parse!(
- option!(whitespace) >>
- peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
- s: block_comment >>
- ((s, false))
- )
-));
diff --git a/src/lib.rs b/src/lib.rs
index ad9e301..9dec309 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -78,27 +78,25 @@
//! a different thread.
// Proc-macro2 types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.4")]
+#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.26")]
#![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
#![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
+#![cfg_attr(doc_cfg, feature(doc_cfg))]
+#![allow(clippy::needless_doctest_main, clippy::vec_init_then_push)]
#[cfg(use_proc_macro)]
extern crate proc_macro;
-use std::cmp::Ordering;
-use std::fmt;
-use std::hash::{Hash, Hasher};
-use std::iter::FromIterator;
-use std::marker;
-use std::ops::RangeBounds;
-#[cfg(procmacro2_semver_exempt)]
-use std::path::PathBuf;
-use std::rc::Rc;
-use std::str::FromStr;
+mod marker;
+mod parse;
-#[macro_use]
-mod strnom;
-mod fallback;
+#[cfg(wrap_proc_macro)]
+mod detection;
+
+// Public for proc_macro2::fallback::force() and unforce(), but those are quite
+// a niche use case so we omit it from rustdoc.
+#[doc(hidden)]
+pub mod fallback;
#[cfg(not(wrap_proc_macro))]
use crate::fallback as imp;
@@ -106,6 +104,17 @@ use crate::fallback as imp;
#[cfg(wrap_proc_macro)]
mod imp;
+use crate::marker::Marker;
+use std::cmp::Ordering;
+use std::error::Error;
+use std::fmt::{self, Debug, Display};
+use std::hash::{Hash, Hasher};
+use std::iter::FromIterator;
+use std::ops::RangeBounds;
+#[cfg(procmacro2_semver_exempt)]
+use std::path::PathBuf;
+use std::str::FromStr;
+
/// An abstract stream of tokens, or more concretely a sequence of token trees.
///
/// This type provides interfaces for iterating over token trees and for
@@ -116,27 +125,27 @@ mod imp;
#[derive(Clone)]
pub struct TokenStream {
inner: imp::TokenStream,
- _marker: marker::PhantomData<Rc<()>>,
+ _marker: Marker,
}
/// Error returned from `TokenStream::from_str`.
pub struct LexError {
inner: imp::LexError,
- _marker: marker::PhantomData<Rc<()>>,
+ _marker: Marker,
}
impl TokenStream {
fn _new(inner: imp::TokenStream) -> TokenStream {
TokenStream {
inner,
- _marker: marker::PhantomData,
+ _marker: Marker,
}
}
fn _new_stable(inner: fallback::TokenStream) -> TokenStream {
TokenStream {
inner: inner.into(),
- _marker: marker::PhantomData,
+ _marker: Marker,
}
}
@@ -173,7 +182,7 @@ impl FromStr for TokenStream {
fn from_str(src: &str) -> Result<TokenStream, LexError> {
let e = src.parse().map_err(|e| LexError {
inner: e,
- _marker: marker::PhantomData,
+ _marker: Marker,
})?;
Ok(TokenStream::_new(e))
}
@@ -228,33 +237,48 @@ impl FromIterator<TokenStream> for TokenStream {
/// convertible back into the same token stream (modulo spans), except for
/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
/// numeric literals.
-impl fmt::Display for TokenStream {
+impl Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
+ Display::fmt(&self.inner, f)
}
}
/// Prints token in a form convenient for debugging.
-impl fmt::Debug for TokenStream {
+impl Debug for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
+ Debug::fmt(&self.inner, f)
+ }
+}
+
+impl LexError {
+ pub fn span(&self) -> Span {
+ Span::_new(self.inner.span())
}
}
-impl fmt::Debug for LexError {
+impl Debug for LexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
+ Debug::fmt(&self.inner, f)
}
}
+impl Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Display::fmt(&self.inner, f)
+ }
+}
+
+impl Error for LexError {}
+
/// The source file of a given `Span`.
///
/// This type is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
+#[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
#[derive(Clone, PartialEq, Eq)]
pub struct SourceFile {
inner: imp::SourceFile,
- _marker: marker::PhantomData<Rc<()>>,
+ _marker: Marker,
}
#[cfg(procmacro2_semver_exempt)]
@@ -262,7 +286,7 @@ impl SourceFile {
fn _new(inner: imp::SourceFile) -> Self {
SourceFile {
inner,
- _marker: marker::PhantomData,
+ _marker: Marker,
}
}
@@ -291,9 +315,9 @@ impl SourceFile {
}
#[cfg(procmacro2_semver_exempt)]
-impl fmt::Debug for SourceFile {
+impl Debug for SourceFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
+ Debug::fmt(&self.inner, f)
}
}
@@ -301,6 +325,7 @@ impl fmt::Debug for SourceFile {
///
/// This type is semver exempt and not exposed by default.
#[cfg(span_locations)]
+#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct LineColumn {
/// The 1-indexed line in the source file on which the span starts or ends
@@ -311,25 +336,41 @@ pub struct LineColumn {
pub column: usize,
}
+#[cfg(span_locations)]
+impl Ord for LineColumn {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.line
+ .cmp(&other.line)
+ .then(self.column.cmp(&other.column))
+ }
+}
+
+#[cfg(span_locations)]
+impl PartialOrd for LineColumn {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
/// A region of source code, along with macro expansion information.
#[derive(Copy, Clone)]
pub struct Span {
inner: imp::Span,
- _marker: marker::PhantomData<Rc<()>>,
+ _marker: Marker,
}
impl Span {
fn _new(inner: imp::Span) -> Span {
Span {
inner,
- _marker: marker::PhantomData,
+ _marker: Marker,
}
}
fn _new_stable(inner: fallback::Span) -> Span {
Span {
inner: inner.into(),
- _marker: marker::PhantomData,
+ _marker: Marker,
}
}
@@ -342,28 +383,33 @@ impl Span {
Span::_new(imp::Span::call_site())
}
+ /// The span located at the invocation of the procedural macro, but with
+ /// local variables, labels, and `$crate` resolved at the definition site
+ /// of the macro. This is the same hygiene behavior as `macro_rules`.
+ ///
+ /// This function requires Rust 1.45 or later.
+ #[cfg(hygiene)]
+ pub fn mixed_site() -> Span {
+ Span::_new(imp::Span::mixed_site())
+ }
+
/// A span that resolves at the macro definition site.
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
+ #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
pub fn def_site() -> Span {
Span::_new(imp::Span::def_site())
}
/// Creates a new span with the same line/column information as `self` but
/// that resolves symbols as though it were at `other`.
- ///
- /// This method is semver exempt and not exposed by default.
- #[cfg(procmacro2_semver_exempt)]
pub fn resolved_at(&self, other: Span) -> Span {
Span::_new(self.inner.resolved_at(other.inner))
}
/// Creates a new span with the same name resolution behavior as `self` but
/// with the line/column information of `other`.
- ///
- /// This method is semver exempt and not exposed by default.
- #[cfg(procmacro2_semver_exempt)]
pub fn located_at(&self, other: Span) -> Span {
Span::_new(self.inner.located_at(other.inner))
}
@@ -394,6 +440,7 @@ impl Span {
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
+ #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
pub fn source_file(&self) -> SourceFile {
SourceFile::_new(self.inner.source_file())
}
@@ -401,7 +448,14 @@ impl Span {
/// Get the starting line/column in the source file for this span.
///
/// This method requires the `"span-locations"` feature to be enabled.
+ ///
+ /// When executing in a procedural macro context, the returned line/column
+ /// are only meaningful if compiled with a nightly toolchain. The stable
+ /// toolchain does not have this information available. When executing
+ /// outside of a procedural macro, such as main.rs or build.rs, the
+ /// line/column are always meaningful regardless of toolchain.
#[cfg(span_locations)]
+ #[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
pub fn start(&self) -> LineColumn {
let imp::LineColumn { line, column } = self.inner.start();
LineColumn { line, column }
@@ -410,7 +464,14 @@ impl Span {
/// Get the ending line/column in the source file for this span.
///
/// This method requires the `"span-locations"` feature to be enabled.
+ ///
+ /// When executing in a procedural macro context, the returned line/column
+ /// are only meaningful if compiled with a nightly toolchain. The stable
+ /// toolchain does not have this information available. When executing
+ /// outside of a procedural macro, such as main.rs or build.rs, the
+ /// line/column are always meaningful regardless of toolchain.
#[cfg(span_locations)]
+ #[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
pub fn end(&self) -> LineColumn {
let imp::LineColumn { line, column } = self.inner.end();
LineColumn { line, column }
@@ -433,15 +494,16 @@ impl Span {
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
+ #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
pub fn eq(&self, other: &Span) -> bool {
self.inner.eq(&other.inner)
}
}
/// Prints a span in a form convenient for debugging.
-impl fmt::Debug for Span {
+impl Debug for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
+ Debug::fmt(&self.inner, f)
}
}
@@ -462,11 +524,11 @@ impl TokenTree {
/// Returns the span of this tree, delegating to the `span` method of
/// the contained token or a delimited stream.
pub fn span(&self) -> Span {
- match *self {
- TokenTree::Group(ref t) => t.span(),
- TokenTree::Ident(ref t) => t.span(),
- TokenTree::Punct(ref t) => t.span(),
- TokenTree::Literal(ref t) => t.span(),
+ match self {
+ TokenTree::Group(t) => t.span(),
+ TokenTree::Ident(t) => t.span(),
+ TokenTree::Punct(t) => t.span(),
+ TokenTree::Literal(t) => t.span(),
}
}
@@ -476,11 +538,11 @@ impl TokenTree {
/// the span of each of the internal tokens, this will simply delegate to
/// the `set_span` method of each variant.
pub fn set_span(&mut self, span: Span) {
- match *self {
- TokenTree::Group(ref mut t) => t.set_span(span),
- TokenTree::Ident(ref mut t) => t.set_span(span),
- TokenTree::Punct(ref mut t) => t.set_span(span),
- TokenTree::Literal(ref mut t) => t.set_span(span),
+ match self {
+ TokenTree::Group(t) => t.set_span(span),
+ TokenTree::Ident(t) => t.set_span(span),
+ TokenTree::Punct(t) => t.set_span(span),
+ TokenTree::Literal(t) => t.set_span(span),
}
}
}
@@ -513,32 +575,32 @@ impl From<Literal> for TokenTree {
/// convertible back into the same token tree (modulo spans), except for
/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
/// numeric literals.
-impl fmt::Display for TokenTree {
+impl Display for TokenTree {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match *self {
- TokenTree::Group(ref t) => t.fmt(f),
- TokenTree::Ident(ref t) => t.fmt(f),
- TokenTree::Punct(ref t) => t.fmt(f),
- TokenTree::Literal(ref t) => t.fmt(f),
+ match self {
+ TokenTree::Group(t) => Display::fmt(t, f),
+ TokenTree::Ident(t) => Display::fmt(t, f),
+ TokenTree::Punct(t) => Display::fmt(t, f),
+ TokenTree::Literal(t) => Display::fmt(t, f),
}
}
}
/// Prints token tree in a form convenient for debugging.
-impl fmt::Debug for TokenTree {
+impl Debug for TokenTree {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Each of these has the name in the struct type in the derived debug,
// so don't bother with an extra layer of indirection
- match *self {
- TokenTree::Group(ref t) => t.fmt(f),
- TokenTree::Ident(ref t) => {
+ match self {
+ TokenTree::Group(t) => Debug::fmt(t, f),
+ TokenTree::Ident(t) => {
let mut debug = f.debug_struct("Ident");
debug.field("sym", &format_args!("{}", t));
imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner);
debug.finish()
}
- TokenTree::Punct(ref t) => t.fmt(f),
- TokenTree::Literal(ref t) => t.fmt(f),
+ TokenTree::Punct(t) => Debug::fmt(t, f),
+ TokenTree::Literal(t) => Debug::fmt(t, f),
}
}
}
@@ -651,30 +713,30 @@ impl Group {
/// Prints the group as a string that should be losslessly convertible back
/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
/// with `Delimiter::None` delimiters.
-impl fmt::Display for Group {
+impl Display for Group {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
- fmt::Display::fmt(&self.inner, formatter)
+ Display::fmt(&self.inner, formatter)
}
}
-impl fmt::Debug for Group {
+impl Debug for Group {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
- fmt::Debug::fmt(&self.inner, formatter)
+ Debug::fmt(&self.inner, formatter)
}
}
-/// An `Punct` is an single punctuation character like `+`, `-` or `#`.
+/// A `Punct` is a single punctuation character like `+`, `-` or `#`.
///
/// Multicharacter operators like `+=` are represented as two instances of
/// `Punct` with different forms of `Spacing` returned.
#[derive(Clone)]
pub struct Punct {
- op: char,
+ ch: char,
spacing: Spacing,
span: Span,
}
-/// Whether an `Punct` is followed immediately by another `Punct` or followed by
+/// Whether a `Punct` is followed immediately by another `Punct` or followed by
/// another token or whitespace.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum Spacing {
@@ -695,9 +757,9 @@ impl Punct {
///
/// The returned `Punct` will have the default span of `Span::call_site()`
/// which can be further configured with the `set_span` method below.
- pub fn new(op: char, spacing: Spacing) -> Punct {
+ pub fn new(ch: char, spacing: Spacing) -> Punct {
Punct {
- op,
+ ch,
spacing,
span: Span::call_site(),
}
@@ -705,7 +767,7 @@ impl Punct {
/// Returns the value of this punctuation character as `char`.
pub fn as_char(&self) -> char {
- self.op
+ self.ch
}
/// Returns the spacing of this punctuation character, indicating whether
@@ -730,16 +792,16 @@ impl Punct {
/// Prints the punctuation character as a string that should be losslessly
/// convertible back into the same character.
-impl fmt::Display for Punct {
+impl Display for Punct {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.op.fmt(f)
+ Display::fmt(&self.ch, f)
}
}
-impl fmt::Debug for Punct {
+impl Debug for Punct {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let mut debug = fmt.debug_struct("Punct");
- debug.field("op", &self.op);
+ debug.field("char", &self.ch);
debug.field("spacing", &self.spacing);
imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner);
debug.finish()
@@ -813,14 +875,14 @@ impl fmt::Debug for Punct {
#[derive(Clone)]
pub struct Ident {
inner: imp::Ident,
- _marker: marker::PhantomData<Rc<()>>,
+ _marker: Marker,
}
impl Ident {
fn _new(inner: imp::Ident) -> Ident {
Ident {
inner,
- _marker: marker::PhantomData,
+ _marker: Marker,
}
}
@@ -863,6 +925,7 @@ impl Ident {
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
+ #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
pub fn new_raw(string: &str, span: Span) -> Ident {
Ident::_new_raw(string, span)
}
@@ -920,15 +983,15 @@ impl Hash for Ident {
/// Prints the identifier as a string that should be losslessly convertible back
/// into the same identifier.
-impl fmt::Display for Ident {
+impl Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
+ Display::fmt(&self.inner, f)
}
}
-impl fmt::Debug for Ident {
+impl Debug for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
+ Debug::fmt(&self.inner, f)
}
}
@@ -941,7 +1004,7 @@ impl fmt::Debug for Ident {
#[derive(Clone)]
pub struct Literal {
inner: imp::Literal,
- _marker: marker::PhantomData<Rc<()>>,
+ _marker: Marker,
}
macro_rules! suffixed_int_literals {
@@ -951,7 +1014,7 @@ macro_rules! suffixed_int_literals {
/// This function will create an integer like `1u32` where the integer
/// value specified is the first part of the token and the integral is
/// also suffixed at the end. Literals created from negative numbers may
- /// not survive rountrips through `TokenStream` or strings and may be
+ /// not survive roundtrips through `TokenStream` or strings and may be
/// broken into two tokens (`-` and positive literal).
///
/// Literals created through this method have the `Span::call_site()`
@@ -972,7 +1035,7 @@ macro_rules! unsuffixed_int_literals {
/// specified on this token, meaning that invocations like
/// `Literal::i8_unsuffixed(1)` are equivalent to
/// `Literal::u32_unsuffixed(1)`. Literals created from negative numbers
- /// may not survive rountrips through `TokenStream` or strings and may
+ /// may not survive roundtrips through `TokenStream` or strings and may
/// be broken into two tokens (`-` and positive literal).
///
/// Literals created through this method have the `Span::call_site()`
@@ -988,14 +1051,14 @@ impl Literal {
fn _new(inner: imp::Literal) -> Literal {
Literal {
inner,
- _marker: marker::PhantomData,
+ _marker: Marker,
}
}
fn _new_stable(inner: fallback::Literal) -> Literal {
Literal {
inner: inner.into(),
- _marker: marker::PhantomData,
+ _marker: Marker,
}
}
@@ -1140,26 +1203,25 @@ impl Literal {
}
}
-impl fmt::Debug for Literal {
+impl Debug for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
+ Debug::fmt(&self.inner, f)
}
}
-impl fmt::Display for Literal {
+impl Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
+ Display::fmt(&self.inner, f)
}
}
/// Public implementation details for the `TokenStream` type, such as iterators.
pub mod token_stream {
- use std::fmt;
- use std::marker;
- use std::rc::Rc;
+ use crate::marker::Marker;
+ use crate::{imp, TokenTree};
+ use std::fmt::{self, Debug};
pub use crate::TokenStream;
- use crate::{imp, TokenTree};
/// An iterator over `TokenStream`'s `TokenTree`s.
///
@@ -1168,7 +1230,7 @@ pub mod token_stream {
#[derive(Clone)]
pub struct IntoIter {
inner: imp::TokenTreeIter,
- _marker: marker::PhantomData<Rc<()>>,
+ _marker: Marker,
}
impl Iterator for IntoIter {
@@ -1179,9 +1241,9 @@ pub mod token_stream {
}
}
- impl fmt::Debug for IntoIter {
+ impl Debug for IntoIter {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
+ Debug::fmt(&self.inner, f)
}
}
@@ -1192,7 +1254,7 @@ pub mod token_stream {
fn into_iter(self) -> IntoIter {
IntoIter {
inner: self.inner.into_iter(),
- _marker: marker::PhantomData,
+ _marker: Marker,
}
}
}
diff --git a/src/marker.rs b/src/marker.rs
new file mode 100644
index 0000000..58729ba
--- /dev/null
+++ b/src/marker.rs
@@ -0,0 +1,18 @@
+use std::marker::PhantomData;
+use std::panic::{RefUnwindSafe, UnwindSafe};
+use std::rc::Rc;
+
+// Zero sized marker with the correct set of autotrait impls we want all proc
+// macro types to have.
+pub(crate) type Marker = PhantomData<ProcMacroAutoTraits>;
+
+pub(crate) use self::value::*;
+
+mod value {
+ pub(crate) use std::marker::PhantomData as Marker;
+}
+
+pub(crate) struct ProcMacroAutoTraits(Rc<()>);
+
+impl UnwindSafe for ProcMacroAutoTraits {}
+impl RefUnwindSafe for ProcMacroAutoTraits {}
diff --git a/src/parse.rs b/src/parse.rs
new file mode 100644
index 0000000..e5caed8
--- /dev/null
+++ b/src/parse.rs
@@ -0,0 +1,866 @@
+use crate::fallback::{
+ is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
+};
+use crate::{Delimiter, Punct, Spacing, TokenTree};
+use std::char;
+use std::str::{Bytes, CharIndices, Chars};
+
+#[derive(Copy, Clone, Eq, PartialEq)]
+pub(crate) struct Cursor<'a> {
+ pub rest: &'a str,
+ #[cfg(span_locations)]
+ pub off: u32,
+}
+
+impl<'a> Cursor<'a> {
+ fn advance(&self, bytes: usize) -> Cursor<'a> {
+ let (_front, rest) = self.rest.split_at(bytes);
+ Cursor {
+ rest,
+ #[cfg(span_locations)]
+ off: self.off + _front.chars().count() as u32,
+ }
+ }
+
+ fn starts_with(&self, s: &str) -> bool {
+ self.rest.starts_with(s)
+ }
+
+ fn is_empty(&self) -> bool {
+ self.rest.is_empty()
+ }
+
+ fn len(&self) -> usize {
+ self.rest.len()
+ }
+
+ fn as_bytes(&self) -> &'a [u8] {
+ self.rest.as_bytes()
+ }
+
+ fn bytes(&self) -> Bytes<'a> {
+ self.rest.bytes()
+ }
+
+ fn chars(&self) -> Chars<'a> {
+ self.rest.chars()
+ }
+
+ fn char_indices(&self) -> CharIndices<'a> {
+ self.rest.char_indices()
+ }
+
+ fn parse(&self, tag: &str) -> Result<Cursor<'a>, Reject> {
+ if self.starts_with(tag) {
+ Ok(self.advance(tag.len()))
+ } else {
+ Err(Reject)
+ }
+ }
+}
+
+struct Reject;
+type PResult<'a, O> = Result<(Cursor<'a>, O), Reject>;
+
+fn skip_whitespace(input: Cursor) -> Cursor {
+ let mut s = input;
+
+ while !s.is_empty() {
+ let byte = s.as_bytes()[0];
+ if byte == b'/' {
+ if s.starts_with("//")
+ && (!s.starts_with("///") || s.starts_with("////"))
+ && !s.starts_with("//!")
+ {
+ let (cursor, _) = take_until_newline_or_eof(s);
+ s = cursor;
+ continue;
+ } else if s.starts_with("/**/") {
+ s = s.advance(4);
+ continue;
+ } else if s.starts_with("/*")
+ && (!s.starts_with("/**") || s.starts_with("/***"))
+ && !s.starts_with("/*!")
+ {
+ match block_comment(s) {
+ Ok((rest, _)) => {
+ s = rest;
+ continue;
+ }
+ Err(Reject) => return s,
+ }
+ }
+ }
+ match byte {
+ b' ' | 0x09..=0x0d => {
+ s = s.advance(1);
+ continue;
+ }
+ b if b <= 0x7f => {}
+ _ => {
+ let ch = s.chars().next().unwrap();
+ if is_whitespace(ch) {
+ s = s.advance(ch.len_utf8());
+ continue;
+ }
+ }
+ }
+ return s;
+ }
+ s
+}
+
+fn block_comment(input: Cursor) -> PResult<&str> {
+ if !input.starts_with("/*") {
+ return Err(Reject);
+ }
+
+ let mut depth = 0;
+ let bytes = input.as_bytes();
+ let mut i = 0;
+ let upper = bytes.len() - 1;
+
+ while i < upper {
+ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
+ depth += 1;
+ i += 1; // eat '*'
+ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
+ depth -= 1;
+ if depth == 0 {
+ return Ok((input.advance(i + 2), &input.rest[..i + 2]));
+ }
+ i += 1; // eat '/'
+ }
+ i += 1;
+ }
+
+ Err(Reject)
+}
+
+fn is_whitespace(ch: char) -> bool {
+ // Rust treats left-to-right mark and right-to-left mark as whitespace
+ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
+}
+
+fn word_break(input: Cursor) -> Result<Cursor, Reject> {
+ match input.chars().next() {
+ Some(ch) if is_ident_continue(ch) => Err(Reject),
+ Some(_) | None => Ok(input),
+ }
+}
+
+pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> {
+ let mut trees = Vec::new();
+ let mut stack = Vec::new();
+
+ loop {
+ input = skip_whitespace(input);
+
+ if let Ok((rest, tt)) = doc_comment(input) {
+ trees.extend(tt);
+ input = rest;
+ continue;
+ }
+
+ #[cfg(span_locations)]
+ let lo = input.off;
+
+ let first = match input.bytes().next() {
+ Some(first) => first,
+ None => match stack.last() {
+ None => return Ok(TokenStream { inner: trees }),
+ #[cfg(span_locations)]
+ Some((lo, _frame)) => {
+ return Err(LexError {
+ span: Span { lo: *lo, hi: *lo },
+ })
+ }
+ #[cfg(not(span_locations))]
+ Some(_frame) => return Err(LexError { span: Span {} }),
+ },
+ };
+
+ if let Some(open_delimiter) = match first {
+ b'(' => Some(Delimiter::Parenthesis),
+ b'[' => Some(Delimiter::Bracket),
+ b'{' => Some(Delimiter::Brace),
+ _ => None,
+ } {
+ input = input.advance(1);
+ let frame = (open_delimiter, trees);
+ #[cfg(span_locations)]
+ let frame = (lo, frame);
+ stack.push(frame);
+ trees = Vec::new();
+ } else if let Some(close_delimiter) = match first {
+ b')' => Some(Delimiter::Parenthesis),
+ b']' => Some(Delimiter::Bracket),
+ b'}' => Some(Delimiter::Brace),
+ _ => None,
+ } {
+ let frame = match stack.pop() {
+ Some(frame) => frame,
+ None => return Err(lex_error(input)),
+ };
+ #[cfg(span_locations)]
+ let (lo, frame) = frame;
+ let (open_delimiter, outer) = frame;
+ if open_delimiter != close_delimiter {
+ return Err(lex_error(input));
+ }
+ input = input.advance(1);
+ let mut g = Group::new(open_delimiter, TokenStream { inner: trees });
+ g.set_span(Span {
+ #[cfg(span_locations)]
+ lo,
+ #[cfg(span_locations)]
+ hi: input.off,
+ });
+ trees = outer;
+ trees.push(TokenTree::Group(crate::Group::_new_stable(g)));
+ } else {
+ let (rest, mut tt) = match leaf_token(input) {
+ Ok((rest, tt)) => (rest, tt),
+ Err(Reject) => return Err(lex_error(input)),
+ };
+ tt.set_span(crate::Span::_new_stable(Span {
+ #[cfg(span_locations)]
+ lo,
+ #[cfg(span_locations)]
+ hi: rest.off,
+ }));
+ trees.push(tt);
+ input = rest;
+ }
+ }
+}
+
+fn lex_error(cursor: Cursor) -> LexError {
+ #[cfg(not(span_locations))]
+ let _ = cursor;
+ LexError {
+ span: Span {
+ #[cfg(span_locations)]
+ lo: cursor.off,
+ #[cfg(span_locations)]
+ hi: cursor.off,
+ },
+ }
+}
+
+fn leaf_token(input: Cursor) -> PResult<TokenTree> {
+ if let Ok((input, l)) = literal(input) {
+ // must be parsed before ident
+ Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l))))
+ } else if let Ok((input, p)) = punct(input) {
+ Ok((input, TokenTree::Punct(p)))
+ } else if let Ok((input, i)) = ident(input) {
+ Ok((input, TokenTree::Ident(i)))
+ } else {
+ Err(Reject)
+ }
+}
+
+fn ident(input: Cursor) -> PResult<crate::Ident> {
+ if ["r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#"]
+ .iter()
+ .any(|prefix| input.starts_with(prefix))
+ {
+ Err(Reject)
+ } else {
+ ident_any(input)
+ }
+}
+
+fn ident_any(input: Cursor) -> PResult<crate::Ident> {
+ let raw = input.starts_with("r#");
+ let rest = input.advance((raw as usize) << 1);
+
+ let (rest, sym) = ident_not_raw(rest)?;
+
+ if !raw {
+ let ident = crate::Ident::new(sym, crate::Span::call_site());
+ return Ok((rest, ident));
+ }
+
+ if sym == "_" {
+ return Err(Reject);
+ }
+
+ let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
+ Ok((rest, ident))
+}
+
+fn ident_not_raw(input: Cursor) -> PResult<&str> {
+ let mut chars = input.char_indices();
+
+ match chars.next() {
+ Some((_, ch)) if is_ident_start(ch) => {}
+ _ => return Err(Reject),
+ }
+
+ let mut end = input.len();
+ for (i, ch) in chars {
+ if !is_ident_continue(ch) {
+ end = i;
+ break;
+ }
+ }
+
+ Ok((input.advance(end), &input.rest[..end]))
+}
+
+fn literal(input: Cursor) -> PResult<Literal> {
+ let rest = literal_nocapture(input)?;
+ let end = input.len() - rest.len();
+ Ok((rest, Literal::_new(input.rest[..end].to_string())))
+}
+
+fn literal_nocapture(input: Cursor) -> Result<Cursor, Reject> {
+ if let Ok(ok) = string(input) {
+ Ok(ok)
+ } else if let Ok(ok) = byte_string(input) {
+ Ok(ok)
+ } else if let Ok(ok) = byte(input) {
+ Ok(ok)
+ } else if let Ok(ok) = character(input) {
+ Ok(ok)
+ } else if let Ok(ok) = float(input) {
+ Ok(ok)
+ } else if let Ok(ok) = int(input) {
+ Ok(ok)
+ } else {
+ Err(Reject)
+ }
+}
+
+fn literal_suffix(input: Cursor) -> Cursor {
+ match ident_not_raw(input) {
+ Ok((input, _)) => input,
+ Err(Reject) => input,
+ }
+}
+
+fn string(input: Cursor) -> Result<Cursor, Reject> {
+ if let Ok(input) = input.parse("\"") {
+ cooked_string(input)
+ } else if let Ok(input) = input.parse("r") {
+ raw_string(input)
+ } else {
+ Err(Reject)
+ }
+}
+
+fn cooked_string(input: Cursor) -> Result<Cursor, Reject> {
+ let mut chars = input.char_indices().peekable();
+
+ while let Some((i, ch)) = chars.next() {
+ match ch {
+ '"' => {
+ let input = input.advance(i + 1);
+ return Ok(literal_suffix(input));
+ }
+ '\r' => match chars.next() {
+ Some((_, '\n')) => {}
+ _ => break,
+ },
+ '\\' => match chars.next() {
+ Some((_, 'x')) => {
+ if !backslash_x_char(&mut chars) {
+ break;
+ }
+ }
+ Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
+ | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
+ Some((_, 'u')) => {
+ if !backslash_u(&mut chars) {
+ break;
+ }
+ }
+ Some((_, ch @ '\n')) | Some((_, ch @ '\r')) => {
+ let mut last = ch;
+ loop {
+ if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') {
+ return Err(Reject);
+ }
+ match chars.peek() {
+ Some((_, ch)) if ch.is_whitespace() => {
+ last = *ch;
+ chars.next();
+ }
+ _ => break,
+ }
+ }
+ }
+ _ => break,
+ },
+ _ch => {}
+ }
+ }
+ Err(Reject)
+}
+
+fn byte_string(input: Cursor) -> Result<Cursor, Reject> {
+ if let Ok(input) = input.parse("b\"") {
+ cooked_byte_string(input)
+ } else if let Ok(input) = input.parse("br") {
+ raw_string(input)
+ } else {
+ Err(Reject)
+ }
+}
+
+fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, Reject> {
+ let mut bytes = input.bytes().enumerate();
+ while let Some((offset, b)) = bytes.next() {
+ match b {
+ b'"' => {
+ let input = input.advance(offset + 1);
+ return Ok(literal_suffix(input));
+ }
+ b'\r' => match bytes.next() {
+ Some((_, b'\n')) => {}
+ _ => break,
+ },
+ b'\\' => match bytes.next() {
+ Some((_, b'x')) => {
+ if !backslash_x_byte(&mut bytes) {
+ break;
+ }
+ }
+ Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
+ | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
+ Some((newline, b @ b'\n')) | Some((newline, b @ b'\r')) => {
+ let mut last = b as char;
+ let rest = input.advance(newline + 1);
+ let mut chars = rest.char_indices();
+ loop {
+ if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') {
+ return Err(Reject);
+ }
+ match chars.next() {
+ Some((_, ch)) if ch.is_whitespace() => last = ch,
+ Some((offset, _)) => {
+ input = rest.advance(offset);
+ bytes = input.bytes().enumerate();
+ break;
+ }
+ None => return Err(Reject),
+ }
+ }
+ }
+ _ => break,
+ },
+ b if b < 0x80 => {}
+ _ => break,
+ }
+ }
+ Err(Reject)
+}
+
+fn raw_string(input: Cursor) -> Result<Cursor, Reject> {
+ let mut chars = input.char_indices();
+ let mut n = 0;
+ while let Some((i, ch)) = chars.next() {
+ match ch {
+ '"' => {
+ n = i;
+ break;
+ }
+ '#' => {}
+ _ => return Err(Reject),
+ }
+ }
+ while let Some((i, ch)) = chars.next() {
+ match ch {
+ '"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => {
+ let rest = input.advance(i + 1 + n);
+ return Ok(literal_suffix(rest));
+ }
+ '\r' => match chars.next() {
+ Some((_, '\n')) => {}
+ _ => break,
+ },
+ _ => {}
+ }
+ }
+ Err(Reject)
+}
+
+fn byte(input: Cursor) -> Result<Cursor, Reject> {
+ let input = input.parse("b'")?;
+ let mut bytes = input.bytes().enumerate();
+ let ok = match bytes.next().map(|(_, b)| b) {
+ Some(b'\\') => match bytes.next().map(|(_, b)| b) {
+ Some(b'x') => backslash_x_byte(&mut bytes),
+ Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
+ | Some(b'"') => true,
+ _ => false,
+ },
+ b => b.is_some(),
+ };
+ if !ok {
+ return Err(Reject);
+ }
+ let (offset, _) = bytes.next().ok_or(Reject)?;
+ if !input.chars().as_str().is_char_boundary(offset) {
+ return Err(Reject);
+ }
+ let input = input.advance(offset).parse("'")?;
+ Ok(literal_suffix(input))
+}
+
+fn character(input: Cursor) -> Result<Cursor, Reject> {
+ let input = input.parse("'")?;
+ let mut chars = input.char_indices();
+ let ok = match chars.next().map(|(_, ch)| ch) {
+ Some('\\') => match chars.next().map(|(_, ch)| ch) {
+ Some('x') => backslash_x_char(&mut chars),
+ Some('u') => backslash_u(&mut chars),
+ Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
+ true
+ }
+ _ => false,
+ },
+ ch => ch.is_some(),
+ };
+ if !ok {
+ return Err(Reject);
+ }
+ let (idx, _) = chars.next().ok_or(Reject)?;
+ let input = input.advance(idx).parse("'")?;
+ Ok(literal_suffix(input))
+}
+
+macro_rules! next_ch {
+ ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
+ match $chars.next() {
+ Some((_, ch)) => match ch {
+ $pat $(| $rest)* => ch,
+ _ => return false,
+ },
+ None => return false,
+ }
+ };
+}
+
+fn backslash_x_char<I>(chars: &mut I) -> bool
+where
+ I: Iterator<Item = (usize, char)>,
+{
+ next_ch!(chars @ '0'..='7');
+ next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+ true
+}
+
+fn backslash_x_byte<I>(chars: &mut I) -> bool
+where
+ I: Iterator<Item = (usize, u8)>,
+{
+ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+ true
+}
+
+fn backslash_u<I>(chars: &mut I) -> bool
+where
+ I: Iterator<Item = (usize, char)>,
+{
+ next_ch!(chars @ '{');
+ let mut value = 0;
+ let mut len = 0;
+ for (_, ch) in chars {
+ let digit = match ch {
+ '0'..='9' => ch as u8 - b'0',
+ 'a'..='f' => 10 + ch as u8 - b'a',
+ 'A'..='F' => 10 + ch as u8 - b'A',
+ '_' if len > 0 => continue,
+ '}' if len > 0 => return char::from_u32(value).is_some(),
+ _ => return false,
+ };
+ if len == 6 {
+ return false;
+ }
+ value *= 0x10;
+ value += u32::from(digit);
+ len += 1;
+ }
+ false
+}
+
+fn float(input: Cursor) -> Result<Cursor, Reject> {
+ let mut rest = float_digits(input)?;
+ if let Some(ch) = rest.chars().next() {
+ if is_ident_start(ch) {
+ rest = ident_not_raw(rest)?.0;
+ }
+ }
+ word_break(rest)
+}
+
+fn float_digits(input: Cursor) -> Result<Cursor, Reject> {
+ let mut chars = input.chars().peekable();
+ match chars.next() {
+ Some(ch) if ch >= '0' && ch <= '9' => {}
+ _ => return Err(Reject),
+ }
+
+ let mut len = 1;
+ let mut has_dot = false;
+ let mut has_exp = false;
+ while let Some(&ch) = chars.peek() {
+ match ch {
+ '0'..='9' | '_' => {
+ chars.next();
+ len += 1;
+ }
+ '.' => {
+ if has_dot {
+ break;
+ }
+ chars.next();
+ if chars
+ .peek()
+ .map(|&ch| ch == '.' || is_ident_start(ch))
+ .unwrap_or(false)
+ {
+ return Err(Reject);
+ }
+ len += 1;
+ has_dot = true;
+ }
+ 'e' | 'E' => {
+ chars.next();
+ len += 1;
+ has_exp = true;
+ break;
+ }
+ _ => break,
+ }
+ }
+
+ if !(has_dot || has_exp) {
+ return Err(Reject);
+ }
+
+ if has_exp {
+ let token_before_exp = if has_dot {
+ Ok(input.advance(len - 1))
+ } else {
+ Err(Reject)
+ };
+ let mut has_sign = false;
+ let mut has_exp_value = false;
+ while let Some(&ch) = chars.peek() {
+ match ch {
+ '+' | '-' => {
+ if has_exp_value {
+ break;
+ }
+ if has_sign {
+ return token_before_exp;
+ }
+ chars.next();
+ len += 1;
+ has_sign = true;
+ }
+ '0'..='9' => {
+ chars.next();
+ len += 1;
+ has_exp_value = true;
+ }
+ '_' => {
+ chars.next();
+ len += 1;
+ }
+ _ => break,
+ }
+ }
+ if !has_exp_value {
+ return token_before_exp;
+ }
+ }
+
+ Ok(input.advance(len))
+}
+
+fn int(input: Cursor) -> Result<Cursor, Reject> {
+ let mut rest = digits(input)?;
+ if let Some(ch) = rest.chars().next() {
+ if is_ident_start(ch) {
+ rest = ident_not_raw(rest)?.0;
+ }
+ }
+ word_break(rest)
+}
+
+fn digits(mut input: Cursor) -> Result<Cursor, Reject> {
+ let base = if input.starts_with("0x") {
+ input = input.advance(2);
+ 16
+ } else if input.starts_with("0o") {
+ input = input.advance(2);
+ 8
+ } else if input.starts_with("0b") {
+ input = input.advance(2);
+ 2
+ } else {
+ 10
+ };
+
+ let mut len = 0;
+ let mut empty = true;
+ for b in input.bytes() {
+ match b {
+ b'0'..=b'9' => {
+ let digit = (b - b'0') as u64;
+ if digit >= base {
+ return Err(Reject);
+ }
+ }
+ b'a'..=b'f' => {
+ let digit = 10 + (b - b'a') as u64;
+ if digit >= base {
+ break;
+ }
+ }
+ b'A'..=b'F' => {
+ let digit = 10 + (b - b'A') as u64;
+ if digit >= base {
+ break;
+ }
+ }
+ b'_' => {
+ if empty && base == 10 {
+ return Err(Reject);
+ }
+ len += 1;
+ continue;
+ }
+ _ => break,
+ };
+ len += 1;
+ empty = false;
+ }
+ if empty {
+ Err(Reject)
+ } else {
+ Ok(input.advance(len))
+ }
+}
+
+fn punct(input: Cursor) -> PResult<Punct> {
+ let (rest, ch) = punct_char(input)?;
+ if ch == '\'' {
+ if ident_any(rest)?.0.starts_with("'") {
+ Err(Reject)
+ } else {
+ Ok((rest, Punct::new('\'', Spacing::Joint)))
+ }
+ } else {
+ let kind = match punct_char(rest) {
+ Ok(_) => Spacing::Joint,
+ Err(Reject) => Spacing::Alone,
+ };
+ Ok((rest, Punct::new(ch, kind)))
+ }
+}
+
+fn punct_char(input: Cursor) -> PResult<char> {
+ if input.starts_with("//") || input.starts_with("/*") {
+ // Do not accept `/` of a comment as a punct.
+ return Err(Reject);
+ }
+
+ let mut chars = input.chars();
+ let first = match chars.next() {
+ Some(ch) => ch,
+ None => {
+ return Err(Reject);
+ }
+ };
+ let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
+ if recognized.contains(first) {
+ Ok((input.advance(first.len_utf8()), first))
+ } else {
+ Err(Reject)
+ }
+}
+
+fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
+ #[cfg(span_locations)]
+ let lo = input.off;
+ let (rest, (comment, inner)) = doc_comment_contents(input)?;
+ let span = crate::Span::_new_stable(Span {
+ #[cfg(span_locations)]
+ lo,
+ #[cfg(span_locations)]
+ hi: rest.off,
+ });
+
+ let mut scan_for_bare_cr = comment;
+ while let Some(cr) = scan_for_bare_cr.find('\r') {
+ let rest = &scan_for_bare_cr[cr + 1..];
+ if !rest.starts_with('\n') {
+ return Err(Reject);
+ }
+ scan_for_bare_cr = rest;
+ }
+
+ let mut trees = Vec::new();
+ trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
+ if inner {
+ trees.push(Punct::new('!', Spacing::Alone).into());
+ }
+ let mut stream = vec![
+ TokenTree::Ident(crate::Ident::new("doc", span)),
+ TokenTree::Punct(Punct::new('=', Spacing::Alone)),
+ TokenTree::Literal(crate::Literal::string(comment)),
+ ];
+ for tt in stream.iter_mut() {
+ tt.set_span(span);
+ }
+ let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
+ trees.push(crate::Group::_new_stable(group).into());
+ for tt in trees.iter_mut() {
+ tt.set_span(span);
+ }
+ Ok((rest, trees))
+}
+
+fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
+ if input.starts_with("//!") {
+ let input = input.advance(3);
+ let (input, s) = take_until_newline_or_eof(input);
+ Ok((input, (s, true)))
+ } else if input.starts_with("/*!") {
+ let (input, s) = block_comment(input)?;
+ Ok((input, (&s[3..s.len() - 2], true)))
+ } else if input.starts_with("///") {
+ let input = input.advance(3);
+ if input.starts_with("/") {
+ return Err(Reject);
+ }
+ let (input, s) = take_until_newline_or_eof(input);
+ Ok((input, (s, false)))
+ } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
+ let (input, s) = block_comment(input)?;
+ Ok((input, (&s[3..s.len() - 2], false)))
+ } else {
+ Err(Reject)
+ }
+}
+
+fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
+ let chars = input.char_indices();
+
+ for (i, ch) in chars {
+ if ch == '\n' {
+ return (input.advance(i), &input.rest[..i]);
+ } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
+ return (input.advance(i + 1), &input.rest[..i]);
+ }
+ }
+
+ (input.advance(input.len()), input.rest)
+}
diff --git a/src/strnom.rs b/src/strnom.rs
deleted file mode 100644
index eb7d0b8..0000000
--- a/src/strnom.rs
+++ /dev/null
@@ -1,391 +0,0 @@
-//! Adapted from [`nom`](https://github.com/Geal/nom).
-
-use crate::fallback::LexError;
-use std::str::{Bytes, CharIndices, Chars};
-use unicode_xid::UnicodeXID;
-
-#[derive(Copy, Clone, Eq, PartialEq)]
-pub struct Cursor<'a> {
- pub rest: &'a str,
- #[cfg(span_locations)]
- pub off: u32,
-}
-
-impl<'a> Cursor<'a> {
- #[cfg(not(span_locations))]
- pub fn advance(&self, amt: usize) -> Cursor<'a> {
- Cursor {
- rest: &self.rest[amt..],
- }
- }
- #[cfg(span_locations)]
- pub fn advance(&self, amt: usize) -> Cursor<'a> {
- Cursor {
- rest: &self.rest[amt..],
- off: self.off + (amt as u32),
- }
- }
-
- pub fn find(&self, p: char) -> Option<usize> {
- self.rest.find(p)
- }
-
- pub fn starts_with(&self, s: &str) -> bool {
- self.rest.starts_with(s)
- }
-
- pub fn is_empty(&self) -> bool {
- self.rest.is_empty()
- }
-
- pub fn len(&self) -> usize {
- self.rest.len()
- }
-
- pub fn as_bytes(&self) -> &'a [u8] {
- self.rest.as_bytes()
- }
-
- pub fn bytes(&self) -> Bytes<'a> {
- self.rest.bytes()
- }
-
- pub fn chars(&self) -> Chars<'a> {
- self.rest.chars()
- }
-
- pub fn char_indices(&self) -> CharIndices<'a> {
- self.rest.char_indices()
- }
-}
-
-pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
-
-pub fn whitespace(input: Cursor) -> PResult<()> {
- if input.is_empty() {
- return Err(LexError);
- }
-
- let bytes = input.as_bytes();
- let mut i = 0;
- while i < bytes.len() {
- let s = input.advance(i);
- if bytes[i] == b'/' {
- if s.starts_with("//")
- && (!s.starts_with("///") || s.starts_with("////"))
- && !s.starts_with("//!")
- {
- if let Some(len) = s.find('\n') {
- i += len + 1;
- continue;
- }
- break;
- } else if s.starts_with("/**/") {
- i += 4;
- continue;
- } else if s.starts_with("/*")
- && (!s.starts_with("/**") || s.starts_with("/***"))
- && !s.starts_with("/*!")
- {
- let (_, com) = block_comment(s)?;
- i += com.len();
- continue;
- }
- }
- match bytes[i] {
- b' ' | 0x09..=0x0d => {
- i += 1;
- continue;
- }
- b if b <= 0x7f => {}
- _ => {
- let ch = s.chars().next().unwrap();
- if is_whitespace(ch) {
- i += ch.len_utf8();
- continue;
- }
- }
- }
- return if i > 0 { Ok((s, ())) } else { Err(LexError) };
- }
- Ok((input.advance(input.len()), ()))
-}
-
-pub fn block_comment(input: Cursor) -> PResult<&str> {
- if !input.starts_with("/*") {
- return Err(LexError);
- }
-
- let mut depth = 0;
- let bytes = input.as_bytes();
- let mut i = 0;
- let upper = bytes.len() - 1;
- while i < upper {
- if bytes[i] == b'/' && bytes[i + 1] == b'*' {
- depth += 1;
- i += 1; // eat '*'
- } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
- depth -= 1;
- if depth == 0 {
- return Ok((input.advance(i + 2), &input.rest[..i + 2]));
- }
- i += 1; // eat '/'
- }
- i += 1;
- }
- Err(LexError)
-}
-
-pub fn skip_whitespace(input: Cursor) -> Cursor {
- match whitespace(input) {
- Ok((rest, _)) => rest,
- Err(LexError) => input,
- }
-}
-
-fn is_whitespace(ch: char) -> bool {
- // Rust treats left-to-right mark and right-to-left mark as whitespace
- ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
-}
-
-pub fn word_break(input: Cursor) -> PResult<()> {
- match input.chars().next() {
- Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
- Some(_) | None => Ok((input, ())),
- }
-}
-
-macro_rules! named {
- ($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
- fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
- $submac!(i, $($args)*)
- }
- };
-}
-
-macro_rules! alt {
- ($i:expr, $e:ident | $($rest:tt)*) => {
- alt!($i, call!($e) | $($rest)*)
- };
-
- ($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
- match $subrule!($i, $($args)*) {
- res @ Ok(_) => res,
- _ => alt!($i, $($rest)*)
- }
- };
-
- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
- match $subrule!($i, $($args)*) {
- Ok((i, o)) => Ok((i, $gen(o))),
- Err(LexError) => alt!($i, $($rest)*)
- }
- };
-
- ($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
- alt!($i, call!($e) => { $gen } | $($rest)*)
- };
-
- ($i:expr, $e:ident => { $gen:expr }) => {
- alt!($i, call!($e) => { $gen })
- };
-
- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
- match $subrule!($i, $($args)*) {
- Ok((i, o)) => Ok((i, $gen(o))),
- Err(LexError) => Err(LexError),
- }
- };
-
- ($i:expr, $e:ident) => {
- alt!($i, call!($e))
- };
-
- ($i:expr, $subrule:ident!( $($args:tt)*)) => {
- $subrule!($i, $($args)*)
- };
-}
-
-macro_rules! do_parse {
- ($i:expr, ( $($rest:expr),* )) => {
- Ok(($i, ( $($rest),* )))
- };
-
- ($i:expr, $e:ident >> $($rest:tt)*) => {
- do_parse!($i, call!($e) >> $($rest)*)
- };
-
- ($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
- match $submac!($i, $($args)*) {
- Err(LexError) => Err(LexError),
- Ok((i, _)) => do_parse!(i, $($rest)*),
- }
- };
-
- ($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
- do_parse!($i, $field: call!($e) >> $($rest)*)
- };
-
- ($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
- match $submac!($i, $($args)*) {
- Err(LexError) => Err(LexError),
- Ok((i, o)) => {
- let $field = o;
- do_parse!(i, $($rest)*)
- },
- }
- };
-}
-
-macro_rules! peek {
- ($i:expr, $submac:ident!( $($args:tt)* )) => {
- match $submac!($i, $($args)*) {
- Ok((_, o)) => Ok(($i, o)),
- Err(LexError) => Err(LexError),
- }
- };
-}
-
-macro_rules! call {
- ($i:expr, $fun:expr $(, $args:expr)*) => {
- $fun($i $(, $args)*)
- };
-}
-
-macro_rules! option {
- ($i:expr, $f:expr) => {
- match $f($i) {
- Ok((i, o)) => Ok((i, Some(o))),
- Err(LexError) => Ok(($i, None)),
- }
- };
-}
-
-macro_rules! take_until_newline_or_eof {
- ($i:expr,) => {{
- if $i.len() == 0 {
- Ok(($i, ""))
- } else {
- match $i.find('\n') {
- Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
- None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
- }
- }
- }};
-}
-
-macro_rules! tuple {
- ($i:expr, $($rest:tt)*) => {
- tuple_parser!($i, (), $($rest)*)
- };
-}
-
-/// Do not use directly. Use `tuple!`.
-macro_rules! tuple_parser {
- ($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
- tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
- };
-
- ($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
- match $submac!($i, $($args)*) {
- Err(LexError) => Err(LexError),
- Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
- }
- };
-
- ($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
- match $submac!($i, $($args)*) {
- Err(LexError) => Err(LexError),
- Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
- }
- };
-
- ($i:expr, ($($parsed:tt),*), $e:ident) => {
- tuple_parser!($i, ($($parsed),*), call!($e))
- };
-
- ($i:expr, (), $submac:ident!( $($args:tt)* )) => {
- $submac!($i, $($args)*)
- };
-
- ($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
- match $submac!($i, $($args)*) {
- Err(LexError) => Err(LexError),
- Ok((i, o)) => Ok((i, ($($parsed),*, o)))
- }
- };
-
- ($i:expr, ($($parsed:expr),*)) => {
- Ok(($i, ($($parsed),*)))
- };
-}
-
-macro_rules! not {
- ($i:expr, $submac:ident!( $($args:tt)* )) => {
- match $submac!($i, $($args)*) {
- Ok((_, _)) => Err(LexError),
- Err(LexError) => Ok(($i, ())),
- }
- };
-}
-
-macro_rules! tag {
- ($i:expr, $tag:expr) => {
- if $i.starts_with($tag) {
- Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
- } else {
- Err(LexError)
- }
- };
-}
-
-macro_rules! punct {
- ($i:expr, $punct:expr) => {
- $crate::strnom::punct($i, $punct)
- };
-}
-
-/// Do not use directly. Use `punct!`.
-pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
- let input = skip_whitespace(input);
- if input.starts_with(token) {
- Ok((input.advance(token.len()), token))
- } else {
- Err(LexError)
- }
-}
-
-macro_rules! preceded {
- ($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
- match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
- Ok((remaining, (_, o))) => Ok((remaining, o)),
- Err(LexError) => Err(LexError),
- }
- };
-
- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
- preceded!($i, $submac!($($args)*), call!($g))
- };
-}
-
-macro_rules! delimited {
- ($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
- match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
- Err(LexError) => Err(LexError),
- Ok((i1, (_, o, _))) => Ok((i1, o))
- }
- };
-}
-
-macro_rules! map {
- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
- match $submac!($i, $($args)*) {
- Err(LexError) => Err(LexError),
- Ok((i, o)) => Ok((i, call!(o, $g)))
- }
- };
-
- ($i:expr, $f:expr, $g:expr) => {
- map!($i, call!($f), $g)
- };
-}
diff --git a/src/wrapper.rs b/src/wrapper.rs
index c3b6e3a..24d86e8 100644
--- a/src/wrapper.rs
+++ b/src/wrapper.rs
@@ -1,89 +1,69 @@
-use std::fmt;
-use std::iter;
+use crate::detection::inside_proc_macro;
+use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
+use std::fmt::{self, Debug, Display};
+use std::iter::FromIterator;
use std::ops::RangeBounds;
-use std::panic::{self, PanicInfo};
+use std::panic;
#[cfg(super_unstable)]
use std::path::PathBuf;
use std::str::FromStr;
-use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
-
#[derive(Clone)]
-pub enum TokenStream {
- Compiler(proc_macro::TokenStream),
+pub(crate) enum TokenStream {
+ Compiler(DeferredTokenStream),
Fallback(fallback::TokenStream),
}
-pub enum LexError {
- Compiler(proc_macro::LexError),
- Fallback(fallback::LexError),
+// Work around https://github.com/rust-lang/rust/issues/65080.
+// In `impl Extend<TokenTree> for TokenStream` which is used heavily by quote,
+// we hold on to the appended tokens and do proc_macro::TokenStream::extend as
+// late as possible to batch together consecutive uses of the Extend impl.
+#[derive(Clone)]
+pub(crate) struct DeferredTokenStream {
+ stream: proc_macro::TokenStream,
+ extra: Vec<proc_macro::TokenTree>,
}
-fn nightly_works() -> bool {
- use std::sync::atomic::*;
- use std::sync::Once;
-
- static WORKS: AtomicUsize = AtomicUsize::new(0);
- static INIT: Once = Once::new();
-
- match WORKS.load(Ordering::SeqCst) {
- 1 => return false,
- 2 => return true,
- _ => {}
- }
-
- // Swap in a null panic hook to avoid printing "thread panicked" to stderr,
- // then use catch_unwind to determine whether the compiler's proc_macro is
- // working. When proc-macro2 is used from outside of a procedural macro all
- // of the proc_macro crate's APIs currently panic.
- //
- // The Once is to prevent the possibility of this ordering:
- //
- // thread 1 calls take_hook, gets the user's original hook
- // thread 1 calls set_hook with the null hook
- // thread 2 calls take_hook, thinks null hook is the original hook
- // thread 2 calls set_hook with the null hook
- // thread 1 calls set_hook with the actual original hook
- // thread 2 calls set_hook with what it thinks is the original hook
- //
- // in which the user's hook has been lost.
- //
- // There is still a race condition where a panic in a different thread can
- // happen during the interval that the user's original panic hook is
- // unregistered such that their hook is incorrectly not called. This is
- // sufficiently unlikely and less bad than printing panic messages to stderr
- // on correct use of this crate. Maybe there is a libstd feature request
- // here. For now, if a user needs to guarantee that this failure mode does
- // not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
- // the main thread before launching any other threads.
- INIT.call_once(|| {
- type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
-
- let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
- let sanity_check = &*null_hook as *const PanicHook;
- let original_hook = panic::take_hook();
- panic::set_hook(null_hook);
-
- let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
- WORKS.store(works as usize + 1, Ordering::SeqCst);
-
- let hopefully_null_hook = panic::take_hook();
- panic::set_hook(original_hook);
- if sanity_check != &*hopefully_null_hook {
- panic!("observed race condition in proc_macro2::nightly_works");
- }
- });
- nightly_works()
+pub(crate) enum LexError {
+ Compiler(proc_macro::LexError),
+ Fallback(fallback::LexError),
}
fn mismatch() -> ! {
panic!("stable/nightly mismatch")
}
+impl DeferredTokenStream {
+ fn new(stream: proc_macro::TokenStream) -> Self {
+ DeferredTokenStream {
+ stream,
+ extra: Vec::new(),
+ }
+ }
+
+ fn is_empty(&self) -> bool {
+ self.stream.is_empty() && self.extra.is_empty()
+ }
+
+ fn evaluate_now(&mut self) {
+ // If-check provides a fast short circuit for the common case of `extra`
+ // being empty, which saves a round trip over the proc macro bridge.
+ // Improves macro expansion time in winrt by 6% in debug mode.
+ if !self.extra.is_empty() {
+ self.stream.extend(self.extra.drain(..));
+ }
+ }
+
+ fn into_token_stream(mut self) -> proc_macro::TokenStream {
+ self.evaluate_now();
+ self.stream
+ }
+}
+
impl TokenStream {
pub fn new() -> TokenStream {
- if nightly_works() {
- TokenStream::Compiler(proc_macro::TokenStream::new())
+ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
} else {
TokenStream::Fallback(fallback::TokenStream::new())
}
@@ -98,7 +78,7 @@ impl TokenStream {
fn unwrap_nightly(self) -> proc_macro::TokenStream {
match self {
- TokenStream::Compiler(s) => s,
+ TokenStream::Compiler(s) => s.into_token_stream(),
TokenStream::Fallback(_) => mismatch(),
}
}
@@ -115,33 +95,45 @@ impl FromStr for TokenStream {
type Err = LexError;
fn from_str(src: &str) -> Result<TokenStream, LexError> {
- if nightly_works() {
- Ok(TokenStream::Compiler(src.parse()?))
+ if inside_proc_macro() {
+ Ok(TokenStream::Compiler(DeferredTokenStream::new(
+ proc_macro_parse(src)?,
+ )))
} else {
Ok(TokenStream::Fallback(src.parse()?))
}
}
}
-impl fmt::Display for TokenStream {
+// Work around https://github.com/rust-lang/rust/issues/58736.
+fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
+ let result = panic::catch_unwind(|| src.parse().map_err(LexError::Compiler));
+ result.unwrap_or_else(|_| {
+ Err(LexError::Fallback(fallback::LexError {
+ span: fallback::Span::call_site(),
+ }))
+ })
+}
+
+impl Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
- TokenStream::Compiler(tts) => tts.fmt(f),
- TokenStream::Fallback(tts) => tts.fmt(f),
+ TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f),
+ TokenStream::Fallback(tts) => Display::fmt(tts, f),
}
}
}
impl From<proc_macro::TokenStream> for TokenStream {
fn from(inner: proc_macro::TokenStream) -> TokenStream {
- TokenStream::Compiler(inner)
+ TokenStream::Compiler(DeferredTokenStream::new(inner))
}
}
impl From<TokenStream> for proc_macro::TokenStream {
fn from(inner: TokenStream) -> proc_macro::TokenStream {
match inner {
- TokenStream::Compiler(inner) => inner,
+ TokenStream::Compiler(inner) => inner.into_token_stream(),
TokenStream::Fallback(inner) => inner.to_string().parse().unwrap(),
}
}
@@ -153,53 +145,54 @@ impl From<fallback::TokenStream> for TokenStream {
}
}
+// Assumes inside_proc_macro().
+fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+ match token {
+ TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
+ TokenTree::Punct(tt) => {
+ let spacing = match tt.spacing() {
+ Spacing::Joint => proc_macro::Spacing::Joint,
+ Spacing::Alone => proc_macro::Spacing::Alone,
+ };
+ let mut punct = proc_macro::Punct::new(tt.as_char(), spacing);
+ punct.set_span(tt.span().inner.unwrap_nightly());
+ punct.into()
+ }
+ TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
+ TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
+ }
+}
+
impl From<TokenTree> for TokenStream {
fn from(token: TokenTree) -> TokenStream {
- if !nightly_works() {
- return TokenStream::Fallback(token.into());
+ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
+ } else {
+ TokenStream::Fallback(token.into())
}
- let tt: proc_macro::TokenTree = match token {
- TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
- TokenTree::Punct(tt) => {
- let spacing = match tt.spacing() {
- Spacing::Joint => proc_macro::Spacing::Joint,
- Spacing::Alone => proc_macro::Spacing::Alone,
- };
- let mut op = proc_macro::Punct::new(tt.as_char(), spacing);
- op.set_span(tt.span().inner.unwrap_nightly());
- op.into()
- }
- TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
- TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
- };
- TokenStream::Compiler(tt.into())
}
}
-impl iter::FromIterator<TokenTree> for TokenStream {
+impl FromIterator<TokenTree> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
- if nightly_works() {
- let trees = trees
- .into_iter()
- .map(TokenStream::from)
- .flat_map(|t| match t {
- TokenStream::Compiler(s) => s,
- TokenStream::Fallback(_) => mismatch(),
- });
- TokenStream::Compiler(trees.collect())
+ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(
+ trees.into_iter().map(into_compiler_token).collect(),
+ ))
} else {
TokenStream::Fallback(trees.into_iter().collect())
}
}
}
-impl iter::FromIterator<TokenStream> for TokenStream {
+impl FromIterator<TokenStream> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
let mut streams = streams.into_iter();
match streams.next() {
Some(TokenStream::Compiler(mut first)) => {
- first.extend(streams.map(|s| match s {
- TokenStream::Compiler(s) => s,
+ first.evaluate_now();
+ first.stream.extend(streams.map(|s| match s {
+ TokenStream::Compiler(s) => s.into_token_stream(),
TokenStream::Fallback(_) => mismatch(),
}));
TokenStream::Compiler(first)
@@ -217,16 +210,15 @@ impl iter::FromIterator<TokenStream> for TokenStream {
}
impl Extend<TokenTree> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
match self {
TokenStream::Compiler(tts) => {
- tts.extend(
- streams
- .into_iter()
- .map(|t| TokenStream::from(t).unwrap_nightly()),
- );
+ // Here is the reason for DeferredTokenStream.
+ for token in stream {
+ tts.extra.push(into_compiler_token(token));
+ }
}
- TokenStream::Fallback(tts) => tts.extend(streams),
+ TokenStream::Fallback(tts) => tts.extend(stream),
}
}
}
@@ -235,34 +227,31 @@ impl Extend<TokenStream> for TokenStream {
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
match self {
TokenStream::Compiler(tts) => {
- #[cfg(not(slow_extend))]
- {
- tts.extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
- }
- #[cfg(slow_extend)]
- {
- *tts = tts
- .clone()
- .into_iter()
- .chain(streams.into_iter().flat_map(|t| match t {
- TokenStream::Compiler(tts) => tts.into_iter(),
- _ => mismatch(),
- }))
- .collect();
- }
+ tts.evaluate_now();
+ tts.stream
+ .extend(streams.into_iter().map(TokenStream::unwrap_nightly));
}
TokenStream::Fallback(tts) => {
- tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()))
+ tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
}
}
}
}
-impl fmt::Debug for TokenStream {
+impl Debug for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
- TokenStream::Compiler(tts) => tts.fmt(f),
- TokenStream::Fallback(tts) => tts.fmt(f),
+ TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f),
+ TokenStream::Fallback(tts) => Debug::fmt(tts, f),
+ }
+ }
+}
+
+impl LexError {
+ pub(crate) fn span(&self) -> Span {
+ match self {
+ LexError::Compiler(_) => Span::call_site(),
+ LexError::Fallback(e) => Span::Fallback(e.span()),
}
}
}
@@ -279,17 +268,34 @@ impl From<fallback::LexError> for LexError {
}
}
-impl fmt::Debug for LexError {
+impl Debug for LexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
- LexError::Compiler(e) => e.fmt(f),
- LexError::Fallback(e) => e.fmt(f),
+ LexError::Compiler(e) => Debug::fmt(e, f),
+ LexError::Fallback(e) => Debug::fmt(e, f),
+ }
+ }
+}
+
+impl Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ #[cfg(lexerror_display)]
+ LexError::Compiler(e) => Display::fmt(e, f),
+ #[cfg(not(lexerror_display))]
+ LexError::Compiler(_e) => Display::fmt(
+ &fallback::LexError {
+ span: fallback::Span::call_site(),
+ },
+ f,
+ ),
+ LexError::Fallback(e) => Display::fmt(e, f),
}
}
}
#[derive(Clone)]
-pub enum TokenTreeIter {
+pub(crate) enum TokenTreeIter {
Compiler(proc_macro::token_stream::IntoIter),
Fallback(fallback::TokenTreeIter),
}
@@ -300,7 +306,9 @@ impl IntoIterator for TokenStream {
fn into_iter(self) -> TokenTreeIter {
match self {
- TokenStream::Compiler(tts) => TokenTreeIter::Compiler(tts.into_iter()),
+ TokenStream::Compiler(tts) => {
+ TokenTreeIter::Compiler(tts.into_token_stream().into_iter())
+ }
TokenStream::Fallback(tts) => TokenTreeIter::Fallback(tts.into_iter()),
}
}
@@ -338,7 +346,7 @@ impl Iterator for TokenTreeIter {
}
}
-impl fmt::Debug for TokenTreeIter {
+impl Debug for TokenTreeIter {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("TokenTreeIter").finish()
}
@@ -346,7 +354,7 @@ impl fmt::Debug for TokenTreeIter {
#[derive(Clone, PartialEq, Eq)]
#[cfg(super_unstable)]
-pub enum SourceFile {
+pub(crate) enum SourceFile {
Compiler(proc_macro::SourceFile),
Fallback(fallback::SourceFile),
}
@@ -374,58 +382,77 @@ impl SourceFile {
}
#[cfg(super_unstable)]
-impl fmt::Debug for SourceFile {
+impl Debug for SourceFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
- SourceFile::Compiler(a) => a.fmt(f),
- SourceFile::Fallback(a) => a.fmt(f),
+ SourceFile::Compiler(a) => Debug::fmt(a, f),
+ SourceFile::Fallback(a) => Debug::fmt(a, f),
}
}
}
#[cfg(any(super_unstable, feature = "span-locations"))]
-pub struct LineColumn {
+pub(crate) struct LineColumn {
pub line: usize,
pub column: usize,
}
#[derive(Copy, Clone)]
-pub enum Span {
+pub(crate) enum Span {
Compiler(proc_macro::Span),
Fallback(fallback::Span),
}
impl Span {
pub fn call_site() -> Span {
- if nightly_works() {
+ if inside_proc_macro() {
Span::Compiler(proc_macro::Span::call_site())
} else {
Span::Fallback(fallback::Span::call_site())
}
}
+ #[cfg(hygiene)]
+ pub fn mixed_site() -> Span {
+ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::mixed_site())
+ } else {
+ Span::Fallback(fallback::Span::mixed_site())
+ }
+ }
+
#[cfg(super_unstable)]
pub fn def_site() -> Span {
- if nightly_works() {
+ if inside_proc_macro() {
Span::Compiler(proc_macro::Span::def_site())
} else {
Span::Fallback(fallback::Span::def_site())
}
}
- #[cfg(super_unstable)]
pub fn resolved_at(&self, other: Span) -> Span {
match (self, other) {
+ #[cfg(hygiene)]
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
+
+ // Name resolution affects semantics, but location is only cosmetic
+ #[cfg(not(hygiene))]
+ (Span::Compiler(_), Span::Compiler(_)) => other,
+
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
_ => mismatch(),
}
}
- #[cfg(super_unstable)]
pub fn located_at(&self, other: Span) -> Span {
match (self, other) {
+ #[cfg(hygiene)]
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
+
+ // Name resolution affects semantics, but location is only cosmetic
+ #[cfg(not(hygiene))]
+ (Span::Compiler(_), Span::Compiler(_)) => *self,
+
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
_ => mismatch(),
}
@@ -519,16 +546,16 @@ impl From<fallback::Span> for Span {
}
}
-impl fmt::Debug for Span {
+impl Debug for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
- Span::Compiler(s) => s.fmt(f),
- Span::Fallback(s) => s.fmt(f),
+ Span::Compiler(s) => Debug::fmt(s, f),
+ Span::Fallback(s) => Debug::fmt(s, f),
}
}
}
-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
match span {
Span::Compiler(s) => {
debug.field("span", &s);
@@ -538,7 +565,7 @@ pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span)
}
#[derive(Clone)]
-pub enum Group {
+pub(crate) enum Group {
Compiler(proc_macro::Group),
Fallback(fallback::Group),
}
@@ -546,14 +573,14 @@ pub enum Group {
impl Group {
pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
match stream {
- TokenStream::Compiler(stream) => {
+ TokenStream::Compiler(tts) => {
let delimiter = match delimiter {
Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
Delimiter::Bracket => proc_macro::Delimiter::Bracket,
Delimiter::Brace => proc_macro::Delimiter::Brace,
Delimiter::None => proc_macro::Delimiter::None,
};
- Group::Compiler(proc_macro::Group::new(delimiter, stream))
+ Group::Compiler(proc_macro::Group::new(delimiter, tts.into_token_stream()))
}
TokenStream::Fallback(stream) => {
Group::Fallback(fallback::Group::new(delimiter, stream))
@@ -575,7 +602,7 @@ impl Group {
pub fn stream(&self) -> TokenStream {
match self {
- Group::Compiler(g) => TokenStream::Compiler(g.stream()),
+ Group::Compiler(g) => TokenStream::Compiler(DeferredTokenStream::new(g.stream())),
Group::Fallback(g) => TokenStream::Fallback(g.stream()),
}
}
@@ -629,26 +656,26 @@ impl From<fallback::Group> for Group {
}
}
-impl fmt::Display for Group {
+impl Display for Group {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match self {
- Group::Compiler(group) => group.fmt(formatter),
- Group::Fallback(group) => group.fmt(formatter),
+ Group::Compiler(group) => Display::fmt(group, formatter),
+ Group::Fallback(group) => Display::fmt(group, formatter),
}
}
}
-impl fmt::Debug for Group {
+impl Debug for Group {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match self {
- Group::Compiler(group) => group.fmt(formatter),
- Group::Fallback(group) => group.fmt(formatter),
+ Group::Compiler(group) => Debug::fmt(group, formatter),
+ Group::Fallback(group) => Debug::fmt(group, formatter),
}
}
}
#[derive(Clone)]
-pub enum Ident {
+pub(crate) enum Ident {
Compiler(proc_macro::Ident),
Fallback(fallback::Ident),
}
@@ -724,26 +751,26 @@ where
}
}
-impl fmt::Display for Ident {
+impl Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
- Ident::Compiler(t) => t.fmt(f),
- Ident::Fallback(t) => t.fmt(f),
+ Ident::Compiler(t) => Display::fmt(t, f),
+ Ident::Fallback(t) => Display::fmt(t, f),
}
}
}
-impl fmt::Debug for Ident {
+impl Debug for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
- Ident::Compiler(t) => t.fmt(f),
- Ident::Fallback(t) => t.fmt(f),
+ Ident::Compiler(t) => Debug::fmt(t, f),
+ Ident::Fallback(t) => Debug::fmt(t, f),
}
}
}
#[derive(Clone)]
-pub enum Literal {
+pub(crate) enum Literal {
Compiler(proc_macro::Literal),
Fallback(fallback::Literal),
}
@@ -751,7 +778,7 @@ pub enum Literal {
macro_rules! suffixed_numbers {
($($name:ident => $kind:ident,)*) => ($(
pub fn $name(n: $kind) -> Literal {
- if nightly_works() {
+ if inside_proc_macro() {
Literal::Compiler(proc_macro::Literal::$name(n))
} else {
Literal::Fallback(fallback::Literal::$name(n))
@@ -763,7 +790,7 @@ macro_rules! suffixed_numbers {
macro_rules! unsuffixed_integers {
($($name:ident => $kind:ident,)*) => ($(
pub fn $name(n: $kind) -> Literal {
- if nightly_works() {
+ if inside_proc_macro() {
Literal::Compiler(proc_macro::Literal::$name(n))
} else {
Literal::Fallback(fallback::Literal::$name(n))
@@ -807,7 +834,7 @@ impl Literal {
}
pub fn f32_unsuffixed(f: f32) -> Literal {
- if nightly_works() {
+ if inside_proc_macro() {
Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
} else {
Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
@@ -815,7 +842,7 @@ impl Literal {
}
pub fn f64_unsuffixed(f: f64) -> Literal {
- if nightly_works() {
+ if inside_proc_macro() {
Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
} else {
Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
@@ -823,7 +850,7 @@ impl Literal {
}
pub fn string(t: &str) -> Literal {
- if nightly_works() {
+ if inside_proc_macro() {
Literal::Compiler(proc_macro::Literal::string(t))
} else {
Literal::Fallback(fallback::Literal::string(t))
@@ -831,7 +858,7 @@ impl Literal {
}
pub fn character(t: char) -> Literal {
- if nightly_works() {
+ if inside_proc_macro() {
Literal::Compiler(proc_macro::Literal::character(t))
} else {
Literal::Fallback(fallback::Literal::character(t))
@@ -839,7 +866,7 @@ impl Literal {
}
pub fn byte_string(bytes: &[u8]) -> Literal {
- if nightly_works() {
+ if inside_proc_macro() {
Literal::Compiler(proc_macro::Literal::byte_string(bytes))
} else {
Literal::Fallback(fallback::Literal::byte_string(bytes))
@@ -885,20 +912,20 @@ impl From<fallback::Literal> for Literal {
}
}
-impl fmt::Display for Literal {
+impl Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
- Literal::Compiler(t) => t.fmt(f),
- Literal::Fallback(t) => t.fmt(f),
+ Literal::Compiler(t) => Display::fmt(t, f),
+ Literal::Fallback(t) => Display::fmt(t, f),
}
}
}
-impl fmt::Debug for Literal {
+impl Debug for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
- Literal::Compiler(t) => t.fmt(f),
- Literal::Fallback(t) => t.fmt(f),
+ Literal::Compiler(t) => Debug::fmt(t, f),
+ Literal::Fallback(t) => Debug::fmt(t, f),
}
}
}
diff --git a/tests/comments.rs b/tests/comments.rs
new file mode 100644
index 0000000..708cccb
--- /dev/null
+++ b/tests/comments.rs
@@ -0,0 +1,103 @@
+use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree};
+
+// #[doc = "..."] -> "..."
+fn lit_of_outer_doc_comment(tokens: TokenStream) -> Literal {
+ lit_of_doc_comment(tokens, false)
+}
+
+// #![doc = "..."] -> "..."
+fn lit_of_inner_doc_comment(tokens: TokenStream) -> Literal {
+ lit_of_doc_comment(tokens, true)
+}
+
+fn lit_of_doc_comment(tokens: TokenStream, inner: bool) -> Literal {
+ let mut iter = tokens.clone().into_iter();
+ match iter.next().unwrap() {
+ TokenTree::Punct(punct) => {
+ assert_eq!(punct.as_char(), '#');
+ assert_eq!(punct.spacing(), Spacing::Alone);
+ }
+ _ => panic!("wrong token {:?}", tokens),
+ }
+ if inner {
+ match iter.next().unwrap() {
+ TokenTree::Punct(punct) => {
+ assert_eq!(punct.as_char(), '!');
+ assert_eq!(punct.spacing(), Spacing::Alone);
+ }
+ _ => panic!("wrong token {:?}", tokens),
+ }
+ }
+ iter = match iter.next().unwrap() {
+ TokenTree::Group(group) => {
+ assert_eq!(group.delimiter(), Delimiter::Bracket);
+ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
+ group.stream().into_iter()
+ }
+ _ => panic!("wrong token {:?}", tokens),
+ };
+ match iter.next().unwrap() {
+ TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"),
+ _ => panic!("wrong token {:?}", tokens),
+ }
+ match iter.next().unwrap() {
+ TokenTree::Punct(punct) => {
+ assert_eq!(punct.as_char(), '=');
+ assert_eq!(punct.spacing(), Spacing::Alone);
+ }
+ _ => panic!("wrong token {:?}", tokens),
+ }
+ match iter.next().unwrap() {
+ TokenTree::Literal(literal) => {
+ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
+ literal
+ }
+ _ => panic!("wrong token {:?}", tokens),
+ }
+}
+
+#[test]
+fn closed_immediately() {
+ let stream = "/**/".parse::<TokenStream>().unwrap();
+ let tokens = stream.into_iter().collect::<Vec<_>>();
+ assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
+}
+
+#[test]
+fn incomplete() {
+ assert!("/*/".parse::<TokenStream>().is_err());
+}
+
+#[test]
+fn lit() {
+ let stream = "/// doc".parse::<TokenStream>().unwrap();
+ let lit = lit_of_outer_doc_comment(stream);
+ assert_eq!(lit.to_string(), "\" doc\"");
+
+ let stream = "//! doc".parse::<TokenStream>().unwrap();
+ let lit = lit_of_inner_doc_comment(stream);
+ assert_eq!(lit.to_string(), "\" doc\"");
+
+ let stream = "/** doc */".parse::<TokenStream>().unwrap();
+ let lit = lit_of_outer_doc_comment(stream);
+ assert_eq!(lit.to_string(), "\" doc \"");
+
+ let stream = "/*! doc */".parse::<TokenStream>().unwrap();
+ let lit = lit_of_inner_doc_comment(stream);
+ assert_eq!(lit.to_string(), "\" doc \"");
+}
+
+#[test]
+fn carriage_return() {
+ let stream = "///\r\n".parse::<TokenStream>().unwrap();
+ let lit = lit_of_outer_doc_comment(stream);
+ assert_eq!(lit.to_string(), "\"\"");
+
+ let stream = "/**\r\n*/".parse::<TokenStream>().unwrap();
+ let lit = lit_of_outer_doc_comment(stream);
+ assert_eq!(lit.to_string(), "\"\\r\\n\"");
+
+ "///\r".parse::<TokenStream>().unwrap_err();
+ "///\r \n".parse::<TokenStream>().unwrap_err();
+ "/**\r \n*/".parse::<TokenStream>().unwrap_err();
+}
diff --git a/tests/marker.rs b/tests/marker.rs
index 7af2539..70e5767 100644
--- a/tests/marker.rs
+++ b/tests/marker.rs
@@ -57,3 +57,36 @@ mod semver_exempt {
assert_impl!(SourceFile is not Send or Sync);
}
+
+#[cfg(not(no_libprocmacro_unwind_safe))]
+mod unwind_safe {
+ use super::*;
+ use std::panic::{RefUnwindSafe, UnwindSafe};
+
+ macro_rules! assert_unwind_safe {
+ ($($types:ident)*) => {
+ $(
+ assert_impl!($types is UnwindSafe and RefUnwindSafe);
+ )*
+ };
+ }
+
+ assert_unwind_safe! {
+ Delimiter
+ Group
+ Ident
+ LexError
+ Literal
+ Punct
+ Spacing
+ Span
+ TokenStream
+ TokenTree
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ assert_unwind_safe! {
+ LineColumn
+ SourceFile
+ }
+}
diff --git a/tests/test.rs b/tests/test.rs
index 7528388..6d0a93e 100644
--- a/tests/test.rs
+++ b/tests/test.rs
@@ -1,7 +1,7 @@
+use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+use std::panic;
use std::str::{self, FromStr};
-use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
-
#[test]
fn idents() {
assert_eq!(
@@ -72,9 +72,24 @@ fn lifetime_number() {
}
#[test]
-#[should_panic(expected = r#""\'a#" is not a valid Ident"#)]
fn lifetime_invalid() {
- Ident::new("'a#", Span::call_site());
+ let result = panic::catch_unwind(|| Ident::new("'a#", Span::call_site()));
+ match result {
+ Err(box_any) => {
+ let message = box_any.downcast_ref::<String>().unwrap();
+ let expected1 = r#""\'a#" is not a valid Ident"#; // 1.31.0 .. 1.53.0
+ let expected2 = r#""'a#" is not a valid Ident"#; // 1.53.0 ..
+ assert!(
+ message == expected1 || message == expected2,
+ "panic message does not match expected string\n\
+ \x20 panic message: `{:?}`\n\
+ \x20expected message: `{:?}`",
+ message,
+ expected2,
+ );
+ }
+ Ok(_) => panic!("test did not panic as expected"),
+ }
}
#[test]
@@ -85,6 +100,11 @@ fn literal_string() {
}
#[test]
+fn literal_raw_string() {
+ "r\"\r\n\"".parse::<TokenStream>().unwrap();
+}
+
+#[test]
fn literal_character() {
assert_eq!(Literal::character('x').to_string(), "'x'");
assert_eq!(Literal::character('\'').to_string(), "'\\''");
@@ -110,6 +130,37 @@ fn literal_suffix() {
assert_eq!(token_count("1._0"), 3);
assert_eq!(token_count("1._m"), 3);
assert_eq!(token_count("\"\"s"), 1);
+ assert_eq!(token_count("r\"\"r"), 1);
+ assert_eq!(token_count("b\"\"b"), 1);
+ assert_eq!(token_count("br\"\"br"), 1);
+ assert_eq!(token_count("r#\"\"#r"), 1);
+ assert_eq!(token_count("'c'c"), 1);
+ assert_eq!(token_count("b'b'b"), 1);
+ assert_eq!(token_count("0E"), 1);
+ assert_eq!(token_count("0o0A"), 1);
+ assert_eq!(token_count("0E--0"), 4);
+ assert_eq!(token_count("0.0ECMA"), 1);
+}
+
+#[test]
+fn literal_iter_negative() {
+ let negative_literal = Literal::i32_suffixed(-3);
+ let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
+ let mut iter = tokens.into_iter();
+ match iter.next().unwrap() {
+ TokenTree::Punct(punct) => {
+ assert_eq!(punct.as_char(), '-');
+ assert_eq!(punct.spacing(), Spacing::Alone);
+ }
+ unexpected => panic!("unexpected token {:?}", unexpected),
+ }
+ match iter.next().unwrap() {
+ TokenTree::Literal(literal) => {
+ assert_eq!(literal.to_string(), "3i32");
+ }
+ unexpected => panic!("unexpected token {:?}", unexpected),
+ }
+ assert!(iter.next().is_none());
}
#[test]
@@ -161,41 +212,21 @@ fn fail() {
fail("' static");
fail("r#1");
fail("r#_");
+ fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits)
+ fail("\"\\u{999999}\""); // outside of valid range of char
+ fail("\"\\u{_0}\""); // leading underscore
+ fail("\"\\u{}\""); // empty
+ fail("b\"\r\""); // bare carriage return in byte string
+ fail("r\"\r\""); // bare carriage return in raw string
+ fail("\"\\\r \""); // backslash carriage return
+ fail("'aa'aa");
+ fail("br##\"\"#");
+ fail("\"\\\n\u{85}\r\"");
}
#[cfg(span_locations)]
#[test]
fn span_test() {
- use proc_macro2::TokenTree;
-
- fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
- let ts = p.parse::<TokenStream>().unwrap();
- check_spans_internal(ts, &mut lines);
- }
-
- fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
- for i in ts {
- if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
- *lines = rest;
-
- let start = i.span().start();
- assert_eq!(start.line, sline, "sline did not match for {}", i);
- assert_eq!(start.column, scol, "scol did not match for {}", i);
-
- let end = i.span().end();
- assert_eq!(end.line, eline, "eline did not match for {}", i);
- assert_eq!(end.column, ecol, "ecol did not match for {}", i);
-
- match i {
- TokenTree::Group(ref g) => {
- check_spans_internal(g.stream().clone(), lines);
- }
- _ => {}
- }
- }
- }
- }
-
check_spans(
"\
/// This is a document comment
@@ -274,53 +305,11 @@ fn span_join() {
#[test]
fn no_panic() {
let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
- assert!(s.parse::<proc_macro2::TokenStream>().is_err());
+ assert!(s.parse::<TokenStream>().is_err());
}
#[test]
-fn tricky_doc_comment() {
- let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
- let tokens = stream.into_iter().collect::<Vec<_>>();
- assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
-
- let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
- let tokens = stream.into_iter().collect::<Vec<_>>();
- assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
- match tokens[0] {
- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
- _ => panic!("wrong token {:?}", tokens[0]),
- }
- let mut tokens = match tokens[1] {
- proc_macro2::TokenTree::Group(ref tt) => {
- assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
- tt.stream().into_iter()
- }
- _ => panic!("wrong token {:?}", tokens[0]),
- };
-
- match tokens.next().unwrap() {
- proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
- t => panic!("wrong token {:?}", t),
- }
- match tokens.next().unwrap() {
- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
- t => panic!("wrong token {:?}", t),
- }
- match tokens.next().unwrap() {
- proc_macro2::TokenTree::Literal(ref tt) => {
- assert_eq!(tt.to_string(), "\" doc\"");
- }
- t => panic!("wrong token {:?}", t),
- }
- assert!(tokens.next().is_none());
-
- let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
- let tokens = stream.into_iter().collect::<Vec<_>>();
- assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
-}
-
-#[test]
-fn op_before_comment() {
+fn punct_before_comment() {
let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
match tts.next().unwrap() {
TokenTree::Punct(tt) => {
@@ -332,6 +321,22 @@ fn op_before_comment() {
}
#[test]
+fn joint_last_token() {
+ // This test verifies that we match the behavior of libproc_macro *not* in
+ // the range nightly-2020-09-06 through nightly-2020-09-10, in which this
+ // behavior was temporarily broken.
+ // See https://github.com/rust-lang/rust/issues/76399
+
+ let joint_punct = Punct::new(':', Spacing::Joint);
+ let stream = TokenStream::from(TokenTree::Punct(joint_punct));
+ let punct = match stream.into_iter().next().unwrap() {
+ TokenTree::Punct(punct) => punct,
+ _ => unreachable!(),
+ };
+ assert_eq!(punct.spacing(), Spacing::Joint);
+}
+
+#[test]
fn raw_identifier() {
let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
match tts.next().unwrap() {
@@ -345,11 +350,11 @@ fn raw_identifier() {
fn test_debug_ident() {
let ident = Ident::new("proc_macro", Span::call_site());
- #[cfg(not(procmacro2_semver_exempt))]
+ #[cfg(not(span_locations))]
let expected = "Ident(proc_macro)";
- #[cfg(procmacro2_semver_exempt)]
- let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
+ #[cfg(span_locations)]
+ let expected = "Ident { sym: proc_macro }";
assert_eq!(expected, format!("{:?}", ident));
}
@@ -358,7 +363,7 @@ fn test_debug_ident() {
fn test_debug_tokenstream() {
let tts = TokenStream::from_str("[a + 1]").unwrap();
- #[cfg(not(procmacro2_semver_exempt))]
+ #[cfg(not(span_locations))]
let expected = "\
TokenStream [
Group {
@@ -368,7 +373,7 @@ TokenStream [
sym: a,
},
Punct {
- op: '+',
+ char: '+',
spacing: Alone,
},
Literal {
@@ -379,7 +384,7 @@ TokenStream [
]\
";
- #[cfg(not(procmacro2_semver_exempt))]
+ #[cfg(not(span_locations))]
let expected_before_trailing_commas = "\
TokenStream [
Group {
@@ -389,7 +394,7 @@ TokenStream [
sym: a
},
Punct {
- op: '+',
+ char: '+',
spacing: Alone
},
Literal {
@@ -400,7 +405,7 @@ TokenStream [
]\
";
- #[cfg(procmacro2_semver_exempt)]
+ #[cfg(span_locations)]
let expected = "\
TokenStream [
Group {
@@ -411,7 +416,7 @@ TokenStream [
span: bytes(2..3),
},
Punct {
- op: '+',
+ char: '+',
spacing: Alone,
span: bytes(4..5),
},
@@ -425,7 +430,7 @@ TokenStream [
]\
";
- #[cfg(procmacro2_semver_exempt)]
+ #[cfg(span_locations)]
let expected_before_trailing_commas = "\
TokenStream [
Group {
@@ -436,7 +441,7 @@ TokenStream [
span: bytes(2..3)
},
Punct {
- op: '+',
+ char: '+',
spacing: Alone,
span: bytes(4..5)
},
@@ -464,3 +469,80 @@ fn default_tokenstream_is_empty() {
assert!(default_token_stream.is_empty());
}
+
+#[test]
+fn tuple_indexing() {
+ // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
+ let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
+ assert_eq!("tuple", tokens.next().unwrap().to_string());
+ assert_eq!(".", tokens.next().unwrap().to_string());
+ assert_eq!("0.0", tokens.next().unwrap().to_string());
+ assert!(tokens.next().is_none());
+}
+
+#[cfg(span_locations)]
+#[test]
+fn non_ascii_tokens() {
+ check_spans("// abc", &[]);
+ check_spans("// ábc", &[]);
+ check_spans("// abc x", &[]);
+ check_spans("// ábc x", &[]);
+ check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
+ check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
+ check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
+ check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
+ check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
+ check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
+ check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
+ check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
+ check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]);
+ check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]);
+ check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
+ check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
+ check_spans("'a'", &[(1, 0, 1, 3)]);
+ check_spans("'á'", &[(1, 0, 1, 3)]);
+ check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
+ check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
+ check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
+ check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
+ check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
+ check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
+ check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
+ check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
+ check_spans("abc", &[(1, 0, 1, 3)]);
+ check_spans("ábc", &[(1, 0, 1, 3)]);
+ check_spans("ábć", &[(1, 0, 1, 3)]);
+ check_spans("abc// foo", &[(1, 0, 1, 3)]);
+ check_spans("ábc// foo", &[(1, 0, 1, 3)]);
+ check_spans("ábć// foo", &[(1, 0, 1, 3)]);
+ check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
+ check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]);
+}
+
+#[cfg(span_locations)]
+fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
+ let ts = p.parse::<TokenStream>().unwrap();
+ check_spans_internal(ts, &mut lines);
+ assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
+}
+
+#[cfg(span_locations)]
+fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
+ for i in ts {
+ if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
+ *lines = rest;
+
+ let start = i.span().start();
+ assert_eq!(start.line, sline, "sline did not match for {}", i);
+ assert_eq!(start.column, scol, "scol did not match for {}", i);
+
+ let end = i.span().end();
+ assert_eq!(end.line, eline, "eline did not match for {}", i);
+ assert_eq!(end.column, ecol, "ecol did not match for {}", i);
+
+ if let TokenTree::Group(g) = i {
+ check_spans_internal(g.stream().clone(), lines);
+ }
+ }
+ }
+}
diff --git a/tests/test_fmt.rs b/tests/test_fmt.rs
new file mode 100644
index 0000000..99a0aee
--- /dev/null
+++ b/tests/test_fmt.rs
@@ -0,0 +1,26 @@
+use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
+use std::iter::{self, FromIterator};
+
+#[test]
+fn test_fmt_group() {
+ let ident = Ident::new("x", Span::call_site());
+ let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident)));
+ let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new());
+ let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone());
+ let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new());
+ let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone());
+ let braces_empty = Group::new(Delimiter::Brace, TokenStream::new());
+ let braces_nonempty = Group::new(Delimiter::Brace, inner.clone());
+ let none_empty = Group::new(Delimiter::None, TokenStream::new());
+ let none_nonempty = Group::new(Delimiter::None, inner.clone());
+
+ // Matches libproc_macro.
+ assert_eq!("()", parens_empty.to_string());
+ assert_eq!("(x)", parens_nonempty.to_string());
+ assert_eq!("[]", brackets_empty.to_string());
+ assert_eq!("[x]", brackets_nonempty.to_string());
+ assert_eq!("{ }", braces_empty.to_string());
+ assert_eq!("{ x }", braces_nonempty.to_string());
+ assert_eq!("", none_empty.to_string());
+ assert_eq!("x", none_nonempty.to_string());
+}