aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorIvan Lozano <ivanlozano@google.com>2021-08-20 21:46:25 +0000
committerAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>2021-08-20 21:46:25 +0000
commit7235450cd338bb4e2793aba4a7908a05112d448f (patch)
treeb5ed33e9aace249432f40b7eb8556ee37264e2cc
parent6f8db9fac5c1145552f060bf74b2f73213212e15 (diff)
parentefd1fe2c43b75a602bb3f3c11594d41df20d078d (diff)
downloadcombine-7235450cd338bb4e2793aba4a7908a05112d448f.tar.gz
Initial commit for combine crate. am: 57e660e903 am: efd1fe2c43
Original change: https://android-review.googlesource.com/c/platform/external/rust/crates/combine/+/1804136 Change-Id: I4b1cf165df6bbe5bf6ce670e6b1e74ee4c7c83a3
-rw-r--r--CHANGELOG.md691
-rw-r--r--Cargo.lock1436
-rw-r--r--Cargo.toml178
-rw-r--r--Cargo.toml.orig104
-rw-r--r--LICENSE22
-rw-r--r--METADATA19
-rw-r--r--MODULE_LICENSE_MIT0
-rw-r--r--OWNERS1
-rw-r--r--README.md107
-rw-r--r--benches/data.json317
-rw-r--r--benches/http-requests.txt494
-rw-r--r--benches/http.rs180
-rw-r--r--benches/json.rs323
-rw-r--r--benches/mp4.rs86
-rw-r--r--examples/async.rs188
-rw-r--r--examples/date.rs235
-rw-r--r--examples/ini.rs179
-rw-r--r--examples/number.rs33
-rw-r--r--examples/readme.rs18
-rwxr-xr-xrelease.sh9
-rw-r--r--rustfmt.toml1
-rw-r--r--src/error.rs1100
-rw-r--r--src/lib.rs1006
-rw-r--r--src/parser/byte.rs676
-rw-r--r--src/parser/char.rs335
-rw-r--r--src/parser/choice.rs849
-rw-r--r--src/parser/combinator.rs1556
-rw-r--r--src/parser/error.rs245
-rw-r--r--src/parser/function.rs179
-rw-r--r--src/parser/mod.rs1204
-rw-r--r--src/parser/range.rs768
-rw-r--r--src/parser/regex.rs549
-rw-r--r--src/parser/repeat.rs1620
-rw-r--r--src/parser/sequence.rs893
-rw-r--r--src/parser/token.rs700
-rw-r--r--src/stream/buf_reader.rs959
-rw-r--r--src/stream/buffered.rs141
-rw-r--r--src/stream/decoder.rs227
-rw-r--r--src/stream/easy.rs897
-rw-r--r--src/stream/mod.rs1883
-rw-r--r--src/stream/position.rs465
-rw-r--r--src/stream/read.rs210
-rw-r--r--src/stream/span.rs157
-rw-r--r--src/stream/state.rs91
-rw-r--r--tests/async.rs854
-rw-r--r--tests/buffered_stream.rs116
-rw-r--r--tests/parser.rs670
-rw-r--r--tests/parser_macro.rs36
-rw-r--r--tests/support/mod.rs186
-rwxr-xr-xtravis.sh24
50 files changed, 23217 insertions, 0 deletions
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000..a71baa1
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,691 @@
+<a name="v4.6.0"></a>
+## v4.6.0 (2021-06-16)
+
+
+#### Features
+
+* Add decode_tokio ([aa20bf64](https://github.com/Marwes/combine/commit/aa20bf641bc5deed7e521de289f2b0963034f750))
+
+
+
+<a name="v4.5.2"></a>
+### v4.5.2 (2021-01-07)
+
+
+
+
+<a name="v4.5.1"></a>
+### v4.5.1 (2020-12-27)
+
+
+#### Bug Fixes
+
+* Correct the tokio-02 feature ([466a50d3](https://github.com/Marwes/combine/commit/466a50d3533118cca0bafab48451fd39f92a8233))
+
+
+
+<a name="v4.5.0"></a>
+## v4.5.0 (2020-12-25)
+
+
+#### Features
+
+* Remove pin-project-lite from the 'std' feature ([32ef87b0](https://github.com/Marwes/combine/commit/32ef87b08a643c9814cb9eec6d1f3adfe220c690))
+* Add async decoding for tokio version 1 ([8e91f57d](https://github.com/Marwes/combine/commit/8e91f57d1ddaefafcaf244df7a7eea2096c6d6aa))
+
+
+
+<a name="v4.4.0"></a>
+## v4.4.0 (2020-11-18)
+
+
+#### Features
+
+* Add support for decoding using tokio_03 ([ce1612ff](https://github.com/Marwes/combine/commit/ce1612ffa0a15547bc967474b511ec35eaaf743f))
+* Add a length_prefix combinator ([9e343b2a](https://github.com/Marwes/combine/commit/9e343b2a0067ea70b049369ea1023c364c14b19d), closes [#297](https://github.com/Marwes/combine/issues/297))
+* Add a spanned combinator ([1a70f3f4](https://github.com/Marwes/combine/commit/1a70f3f4219eac71880cbb376e62d3f01c22d981))
+* Add a spanned combinator ([9a8fead8](https://github.com/Marwes/combine/commit/9a8fead85b3eb0d7ee21f80d076af9f59aa408d1))
+* Allow conversion between errors with different token,range,position types ([65fe0af2](https://github.com/Marwes/combine/commit/65fe0af27fa560f90da2723160efd2d1782bea4f), breaks [#](https://github.com/Marwes/combine/issues/))
+
+#### Breaking Changes
+
+* Allow conversion between errors with different token,range,position types ([65fe0af2](https://github.com/Marwes/combine/commit/65fe0af27fa560f90da2723160efd2d1782bea4f), breaks [#](https://github.com/Marwes/combine/issues/))
+
+
+
+<a name="v4.3.2"></a>
+### v4.3.2 (2020-09-08)
+
+
+
+
+<a name="v4.3.1"></a>
+### v4.3.1 (2020-08-26)
+
+
+#### Bug Fixes
+
+* Make the decode macros work for redis ([82b908d8](https://github.com/Marwes/combine/commit/82b908d80ac21aeab27b169a1f1857a68a6bb11c))
+
+
+
+<a name="v4.3.0"></a>
+## v4.3.0 (2020-07-10)
+
+
+#### Features
+
+* Specialize decoding on BufReader ([9559e114](https://github.com/Marwes/combine/commit/9559e114658b60a59d570ffd78f4d7ecb597b814))
+* Add a BufReader abstraction for Decoder ([d65a23e8](https://github.com/Marwes/combine/commit/d65a23e81f482c6a5482e53d63c17fcbcf67d623))
+* Allow tuple structs and functions in struct_parser! ([48a16f6b](https://github.com/Marwes/combine/commit/48a16f6bf84939da9eacb1f82b4cff6ca7f324e0))
+
+
+
+<a name="v4.2.1"></a>
+### v4.2.1 (2020-05-20)
+
+
+#### Performance
+
+* Use size_hint in iterate ([b2649e3b](https://github.com/Marwes/combine/commit/b2649e3b467fc84ef48a91d56ec1fcb40291978e))
+* Pre-allocate collections for count* parsers ([5f37857c](https://github.com/Marwes/combine/commit/5f37857c988b81c2b0613d6f2ab14576c794ae99))
+
+
+
+<a name="v4.2.0"></a>
+## v4.2.0 (2020-05-17)
+
+
+#### Features
+
+* Add any_send_sync_partial_state ([7e90807a](https://github.com/Marwes/combine/commit/7e90807a0949411b6aaf24a677e6530a530a1478))
+
+
+
+<a name="v4.1.0"></a>
+## v4.1.0 (2020-04-22)
+
+
+
+
+<a name="4.0.1"></a>
+### 4.0.1 (2020-01-23)
+
+
+#### Breaking Changes
+
+* Make the decode macros take read by parameter ([aa813fb1](https://github.com/Marwes/combine/commit/aa813fb1b486ecdc5258bf2c89e0b18a8f4fc876), breaks [#](https://github.com/Marwes/combine/issues/))
+
+#### Features
+
+* Make the decode macros take read by parameter ([aa813fb1](https://github.com/Marwes/combine/commit/aa813fb1b486ecdc5258bf2c89e0b18a8f4fc876), breaks [#](https://github.com/Marwes/combine/issues/))
+
+#### Bug Fixes
+
+* Don't block decoding if there are more data in the buffer ([6659f993](https://github.com/Marwes/combine/commit/6659f993784876bdc5d6f6145aaffe2844ada760))
+* Don't try to read into the entire buffer for std decoding ([d613dc93](https://github.com/Marwes/combine/commit/d613dc937de6e0b745f8cbcca974e3cfcb3db723))
+
+
+
+<a name="4.0.0"></a>
+## 4.0.0 (2020-01-16)
+
+
+#### Features
+
+* Relax the decode*! macros to no longer need BufRead ([bc5b7794](https://github.com/Marwes/combine/commit/bc5b779491c66a18289bc4b237d281bd391b1d69))
+* Add decode_futures_03_buf_read ([f403ecda](https://github.com/Marwes/combine/commit/f403ecda68e5d7c5d5cf89d8feb26aa1b715609e))
+* Add the produce parser ([ac15b87c](https://github.com/Marwes/combine/commit/ac15b87c4eb23065920b3bb7ad8d590f29937895))
+
+
+
+<a name="4.0.0-beta.2"></a>
+## 4.0.0-beta.2 (2019-12-19)
+
+
+#### Features
+
+* Increase tuple parsers to 20 elements ([45781fea](https://github.com/Marwes/combine/commit/45781feac3cd2da252fcc999aa69c9a207af5f6a))
+* Remove deprecated re-exports ([b4e23207](https://github.com/Marwes/combine/commit/b4e23207f9c3e47111c69865389853d89b8ce12d), breaks [#](https://github.com/Marwes/combine/issues/))
+* Add decode_tokio_buf_read! to parse tokio::io::BufRead ([4d38f7c2](https://github.com/Marwes/combine/commit/4d38f7c25cb0cde10cdc869955c0a98079b5fa08))
+* Add `decode_buf_read!` to parse `BufRead` without ([dbe23ce9](https://github.com/Marwes/combine/commit/dbe23ce90803d409c591b02aaeb7005d0f58622a))
+* Add a macro to incrementally decode std::io::BufRead ([6e2f1121](https://github.com/Marwes/combine/commit/6e2f1121203c54b1623f9f5a8b35907867c83874))
+* Rename Consumed to Commit and Empty to Peek ([129046e3](https://github.com/Marwes/combine/commit/129046e3a555318c3e60f658988f1be8fe83a2fd), breaks [#](https://github.com/Marwes/combine/issues/))
+* Rename Consumed{Ok,Err} to Commit{Ok,Err} and Empty{Ok,Err} to Peek{Ok,Err} ([0ac1fd4f](https://github.com/Marwes/combine/commit/0ac1fd4fee89bd540b38b0d3224e0dcf5260ab77))
+* Add a dedicated error for ReadStream ([37bbd843](https://github.com/Marwes/combine/commit/37bbd84383ecaa632df537322426407be7712748))
+
+#### Bug Fixes
+
+* Handle partial parsing in sep_end_by ([281e0d30](https://github.com/Marwes/combine/commit/281e0d30276cd8d2b730680fb24117bb1b72b198))
+
+#### Breaking Changes
+
+* Remove deprecated re-exports ([b4e23207](https://github.com/Marwes/combine/commit/b4e23207f9c3e47111c69865389853d89b8ce12d), breaks [#](https://github.com/Marwes/combine/issues/))
+* Rename Consumed to Commit and Empty to Peek ([129046e3](https://github.com/Marwes/combine/commit/129046e3a555318c3e60f658988f1be8fe83a2fd), breaks [#](https://github.com/Marwes/combine/issues/))
+
+
+
+<a name="4.0.0-beta.1"></a>
+## 4.0.0-beta.1 (2019-10-08)
+
+
+#### Bug Fixes
+
+* Handle partial parsing in the num parsers ([47764c7f](https://github.com/Marwes/combine/commit/47764c7feb8becefd1d4c376fc11492ed3f3cd6a))
+* Don't bind the input lifetime to the parser with expected/message ([618c69e0](https://github.com/Marwes/combine/commit/618c69e09afc383dadc9af305394ca82948801c3))
+
+#### Breaking Changes
+
+* Merge FullRangeStream into RangeStreamOnce ([c160a971](https://github.com/Marwes/combine/commit/c160a971b47c29a0c8d37fbca6f77cd4a6c85831), breaks [#](https://github.com/Marwes/combine/issues/))
+* Rename `StreamOnce::Item` to `Token` ([74a0bbd3](https://github.com/Marwes/combine/commit/74a0bbd363214047236ae88e76bd7e7a7cdc265f), breaks [#](https://github.com/Marwes/combine/issues/), [#](https://github.com/Marwes/combine/issues/))
+* Minimum rust version is now 1.32 ([7b77508c](https://github.com/Marwes/combine/commit/7b77508c05f2075e7795b6026e419a2a57666a1c), breaks [#](https://github.com/Marwes/combine/issues/))
+* Only require `&mut I` in decode ([e154dbc2](https://github.com/Marwes/combine/commit/e154dbc2f2ca3288e6733f29c85f4b1efcd689c7), breaks [#](https://github.com/Marwes/combine/issues/))
+* Generalize factory to take the Input ([19b1a73e](https://github.com/Marwes/combine/commit/19b1a73ed3516d14b392aefeba0363d01937be22), breaks [#](https://github.com/Marwes/combine/issues/))
+
+#### Features
+
+* Merge FullRangeStream into RangeStreamOnce ([c160a971](https://github.com/Marwes/combine/commit/c160a971b47c29a0c8d37fbca6f77cd4a6c85831), breaks [#](https://github.com/Marwes/combine/issues/))
+* Rename `StreamOnce::Item` to `Token` ([74a0bbd3](https://github.com/Marwes/combine/commit/74a0bbd363214047236ae88e76bd7e7a7cdc265f), breaks [#](https://github.com/Marwes/combine/issues/), [#](https://github.com/Marwes/combine/issues/))
+* Allow fmt::Display be used to specify errors in combinators ([82796d4a](https://github.com/Marwes/combine/commit/82796d4ae5ea23290920ae4e1586b20535285d49), closes [#255](https://github.com/Marwes/combine/issues/255))
+* Minimum rust version is now 1.32 ([7b77508c](https://github.com/Marwes/combine/commit/7b77508c05f2075e7795b6026e419a2a57666a1c), breaks [#](https://github.com/Marwes/combine/issues/))
+* Only require `&mut I` in decode ([e154dbc2](https://github.com/Marwes/combine/commit/e154dbc2f2ca3288e6733f29c85f4b1efcd689c7), breaks [#](https://github.com/Marwes/combine/issues/))
+* Add RepeatUntil ([7f4a310f](https://github.com/Marwes/combine/commit/7f4a310f8edaebe7d3bd450aca7a2823fcf2e5d6))
+* Generalize factory to take the Input ([19b1a73e](https://github.com/Marwes/combine/commit/19b1a73ed3516d14b392aefeba0363d01937be22), breaks [#](https://github.com/Marwes/combine/issues/))
+* Add the iterate parser ([342a45b4](https://github.com/Marwes/combine/commit/342a45b484240ace2e313138b2818c95cbec3427))
+* Add dispatch! ([0740ce0f](https://github.com/Marwes/combine/commit/0740ce0fd2ade2bd19981261dfeb89cae1f63120))
+* impl Stream* for &mut T ([51e7e2b7](https://github.com/Marwes/combine/commit/51e7e2b736f3b3ab8d9ec9877e2afa8fc31f5207))
+* Add `From<S>` for `easy::Stream<S>` ([8f695cc6](https://github.com/Marwes/combine/commit/8f695cc62268a855c3c3847661f6ffe8308b745e))
+* Allow ReadStream parsers to use &[u8] in errors ([6b62a857](https://github.com/Marwes/combine/commit/6b62a857f441ab930e2f595aff3e87f992c769b8), closes [#249](https://github.com/Marwes/combine/issues/249))
+
+
+
+<a name="4.1.0-alpha.2"></a>
+## 4.0.0-alpha.2 (2019-06-17)
+
+
+#### Features
+
+* impl Stream* for &mut T ([51e7e2b7](https://github.com/Marwes/combine/commit/51e7e2b736f3b3ab8d9ec9877e2afa8fc31f5207))
+* Add `From<S>` for `easy::Stream<S>` ([8f695cc6](https://github.com/Marwes/combine/commit/8f695cc62268a855c3c3847661f6ffe8308b745e))
+* Allow ReadStream parsers to use `&[u8]` in errors ([6b62a857](https://github.com/Marwes/combine/commit/6b62a857f441ab930e2f595aff3e87f992c769b8), closes [#249](https://github.com/Marwes/combine/issues/249))
+
+
+
+<a name="4.0.0-alpha.1"></a>
+## 4.0.0-alpha.1 (2019-05-07)
+
+
+#### Breaking Changes
+
+* Drop support for regex 0.2 ([bf6aeb06](https://github.com/Marwes/combine/commit/bf6aeb06494abe2f1890c5bf90db86ac01ec9772), closes [#247](https://github.com/Marwes/combine/issues/247), breaks [#](https://github.com/Marwes/combine/issues/))
+* Rename tokens2 to tokens and tokens to tokens_cmp ([3dadbb4f](https://github.com/Marwes/combine/commit/3dadbb4f2adb0447c883ea56bdbfcfd53c58d384), closes [#166](https://github.com/Marwes/combine/issues/166), breaks [#](https://github.com/Marwes/combine/issues/))
+* Remove the std::result::Result returning parse functions ([6ec094ef](https://github.com/Marwes/combine/commit/6ec094efd2eecefeb2281fb99687143e7a5580e8), closes [#244](https://github.com/Marwes/combine/issues/244), breaks [#](https://github.com/Marwes/combine/issues/))
+* Make PointerOffset easier to understand and use ([5f6d65b3](https://github.com/Marwes/combine/commit/5f6d65b30679d39c4c8a41ef0877bca8dc199095), closes [#238](https://github.com/Marwes/combine/issues/238), breaks [#](https://github.com/Marwes/combine/issues/))
+* Allow reset to return errors ([3055c810](https://github.com/Marwes/combine/commit/3055c810fd4904d2eba3f51ea232ef0232f8fbe7), closes [#231](https://github.com/Marwes/combine/issues/231), breaks [#](https://github.com/Marwes/combine/issues/))
+* Remove the PartialEq bound from Item and Range ([24e1087c](https://github.com/Marwes/combine/commit/24e1087cbff4938d48ae3e5947e2eb0dcbb8cc87), closes [#219](https://github.com/Marwes/combine/issues/219), breaks [#](https://github.com/Marwes/combine/issues/))
+
+#### Features
+
+* Drop support for regex 0.2 ([bf6aeb06](https://github.com/Marwes/combine/commit/bf6aeb06494abe2f1890c5bf90db86ac01ec9772), closes [#247](https://github.com/Marwes/combine/issues/247), breaks [#](https://github.com/Marwes/combine/issues/))
+* Rename tokens2 to tokens and tokens to tokens_cmp ([3dadbb4f](https://github.com/Marwes/combine/commit/3dadbb4f2adb0447c883ea56bdbfcfd53c58d384), closes [#166](https://github.com/Marwes/combine/issues/166), breaks [#](https://github.com/Marwes/combine/issues/))
+* Remove the std::result::Result returning parse functions ([6ec094ef](https://github.com/Marwes/combine/commit/6ec094efd2eecefeb2281fb99687143e7a5580e8), closes [#244](https://github.com/Marwes/combine/issues/244), breaks [#](https://github.com/Marwes/combine/issues/))
+* Allow reset to return errors ([3055c810](https://github.com/Marwes/combine/commit/3055c810fd4904d2eba3f51ea232ef0232f8fbe7), closes [#231](https://github.com/Marwes/combine/issues/231), breaks [#](https://github.com/Marwes/combine/issues/))
+* Remove the PartialEq bound from Item and Range ([24e1087c](https://github.com/Marwes/combine/commit/24e1087cbff4938d48ae3e5947e2eb0dcbb8cc87), closes [#219](https://github.com/Marwes/combine/issues/219), breaks [#](https://github.com/Marwes/combine/issues/))
+
+#### Bug Fixes
+
+* Don't require macro_use when using parser! in rust 2018 ([72e4c70f](https://github.com/Marwes/combine/commit/72e4c70fda2f283e1feadfd926ed9bb0d384cb59), closes [#241](https://github.com/Marwes/combine/issues/241))
+* Make PointerOffset easier to understand and use ([5f6d65b3](https://github.com/Marwes/combine/commit/5f6d65b30679d39c4c8a41ef0877bca8dc199095), closes [#238](https://github.com/Marwes/combine/issues/238), breaks [#](https://github.com/Marwes/combine/issues/))
+
+
+
+<a name="3.8.1"></a>
+### 3.8.1 (2019-03-13)
+
+
+#### Bug Fixes
+
+* Handle partial parsing in `take_fn/take_until*` ([bae1e3bb](https://github.com/Marwes/combine/commit/bae1e3bbe476fb3fe7c4ff6cc375e50f3d9cfd45))
+
+
+
+<a name="3.8.0"></a>
+## 3.8.0 (2019-03-12)
+
+
+#### Features
+
+* Add take_fn and take_until_bytes ([5f560780](https://github.com/Marwes/combine/commit/5f5607806f8e133485c990419a03de8c7531fa14))
+
+
+
+<a name="3.6.7"></a>
+### 3.6.7 (2019-02-13)
+
+
+#### Bug Fixes
+
+* Forward is_partial in all Stream adaptors ([121b3987](https://github.com/Marwes/combine/commit/121b39879543c074924185dfe17d6b4f434ce413))
+
+
+
+<a name="3.6.6"></a>
+### 3.6.6 (2019-01-12)
+
+* Fix two inconsisties regarding EOI [#227](https://github.com/Marwes/combine/pull/227)
+
+
+<a name="3.6.5"></a>
+### 3.6.5 (2019-01-09)
+
+
+#### Bug Fixes
+
+* not_followed_by should fail on empty successful parses ([aa17e7d2](https://github.com/Marwes/combine/commit/aa17e7d25532eebdb6c447459c8550a4fb6bbe93))
+* Propagate the PartialState through parser! uses ([ae888244](https://github.com/Marwes/combine/commit/ae8882449ee935f7ffb0dd3ef7f0e2d8d6932409), closes [#223](https://github.com/Marwes/combine/issues/223))
+
+
+<a name="3.6.3"></a>
+### 3.6.3 (2018-11-16)
+
+
+
+
+<a name="3.6.2"></a>
+### 3.6.2 (2018-11-06)
+
+
+
+
+<a name="3.6.1"></a>
+### 3.6.1 (2018-10-13)
+
+
+#### Bug Fixes
+
+* Allow clippy to run on stable rust ([6cb00803](https://github.com/Marwes/combine/commit/6cb00803d0135a3849a7f0b35dcf635764b32c06))
+
+
+
+<a name="3.6.0"></a>
+## 3.6.0 (2018-10-07)
+
+
+#### Features
+
+* Deprecates `try` in favor of `attempt` ([a9c79321](https://github.com/Marwes/combine/commit/a9c79321b28ea5332d30429936bfca0c034105ca))
+* Adds `attempt`, a 2018-compatible alias for `try` ([18edaec4](https://github.com/Marwes/combine/commit/18edaec46f67a3803b96bac3762b6f809d0c2724))
+
+
+
+<a name="3.5.3"></a>
+### 3.5.3 (2018-10-06)
+
+
+#### Features
+
+* Allow regex parsers to use regex-1.0 ([77fe362c](https://github.com/Marwes/combine/commit/77fe362c56efa6ce20e3074388aeda33cfe91c4b))
+* pub use unexpected_any ([8b2ca559](https://github.com/Marwes/combine/commit/8b2ca55971e01e8a87ec20ec7b9fe8476023117f))
+
+#### Bug Fixes
+
+* use unexpected_any inside `then` examples ([f2018db5](https://github.com/Marwes/combine/commit/f2018db5cf5487d02488d017c23f6993e6ed6f82))
+
+
+
+<a name="3.5.2"></a>
+### 3.5.2 (2018-09-12)
+
+
+#### Bug Fixes
+
+* Don't report previous errors from expected ([0048c5ef](https://github.com/Marwes/combine/commit/0048c5ef4577592df94e0c5296bee3a128117211))
+
+
+
+<a name="3.5.1"></a>
+### 3.5.1 (2018-08-13)
+
+
+#### Bug Fixes
+
+* Add all errors of nested choice parsers ([f2b8fbbf](https://github.com/Marwes/combine/commit/f2b8fbbfe300e2b219eb72116856c217fcec8b2b))
+
+
+
+<a name="3.5.0"></a>
+## 3.5.0 (2018-08-13)
+
+
+#### Features
+
+* Add opaque! as convenience over the opaque parser ([9855aa1f](https://github.com/Marwes/combine/commit/9855aa1f39b62b09addb1c7a25035616a8cbef42))
+* Add the factory parser ([fdd38d46](https://github.com/Marwes/combine/commit/fdd38d46b5dbdeece7f6f3a99b12e470a08e9a92))
+
+
+
+<a name="3.4.0"></a>
+## 3.4.0 (2018-08-04)
+
+
+#### Features
+
+* Add the silent combinator ([b9bc28d3](https://github.com/Marwes/combine/commit/b9bc28d32b80644ba5a7fdc6969e7e8e734ee4de))
+* Add the `opaque` parser ([1a1123f5](https://github.com/Marwes/combine/commit/1a1123f5b6970c88d99643d87d75c8fb63117607))
+* Add tokens2 as a simpler version of tokens ([072a8c13](https://github.com/Marwes/combine/commit/072a8c13f90c640d2a98910276bc96d5aa27fae9))
+* Add the from_str combinator ([908f9ebd](https://github.com/Marwes/combine/commit/908f9ebdc3593eda67e8eba8f5b467962076964e))
+
+#### Bug Fixes
+
+* Make (many1(p), end) report more expected messages ([9e26d38e](https://github.com/Marwes/combine/commit/9e26d38e06c8bac9dbaa547893b8f2aea01f047d))
+* Report more errors in the middle of sequence parsers ([f9e404f6](https://github.com/Marwes/combine/commit/f9e404f6840eb4d42016122095416f76bee5abf3))
+* Report more expected information from nested sequence parsers ([9aa1db92](https://github.com/Marwes/combine/commit/9aa1db92dcd17a35d5775bf78b6642c4e8c89d1d))
+* Use the message variant in from_str to work on no_std ([edf5ff60](https://github.com/Marwes/combine/commit/edf5ff6002d6b6f2f8bd23692dce5084e9fc01dd))
+* Remove redundant Any bound on easy::Errors ([16601046](https://github.com/Marwes/combine/commit/16601046f3c2aa8b4ad75ba2390486f7b5306219), closes [#177](https://github.com/Marwes/combine/issues/177))
+
+<a name="3.3.6"></a>
+### 3.3.6
+
+* Don't forget the state in any_send_partial_state ([4e2eb928](https://github.com/Marwes/combine/commit/4e2eb928e6059a4182b2717bf2bfb7cccdc60127))
+
+<a name="3.3.5"></a>
+### 3.3.5 (2018-06-30)
+
+
+#### Bug Fixes
+
+* Propagate the ParseMode in range::recognize ([c330a737](https://github.com/Marwes/combine/commit/c330a73746f6adfa22c6b13b15d796d48f589614))
+
+
+
+<a name="3.3.4"></a>
+### 3.3.4 (2018-06-30)
+
+* fix: Forward the partial mode through the parser! macro correctly
+
+
+<a name="3.3.3"></a>
+### 3.3.3 (2018-06-29)
+
+#### Bug fixes
+* Parse from the start in sequence parsers in first mode
+
+
+<a name="3.3.1"></a>
+### 3.3.1 (2018-06-01)
+
+
+#### Bug Fixes
+
+* support resuming streams in TakeUntilRange ([b54ff061](https://github.com/Marwes/combine/commit/b54ff0619663aaf7d3c33185b1a4b7ec73cc1f61))
+* reset stream on error in take_until_range ([27449f21](https://github.com/Marwes/combine/commit/27449f2131ecb3d6ef956e2e67b588cae58a9810))
+* support multi-byte `Item`s in take_until_range ([4a690d65](https://github.com/Marwes/combine/commit/4a690d65160ea6e1866fd6dbe8865acae4070c3a))
+
+
+
+<a name="v3.3.0"></a>
+## v3.3.0 (2018-05-19)
+
+
+#### Features
+
+* Add the escaped parser ([0db58a20](https://github.com/Marwes/combine/commit/0db58a20f227d923ffcd3451fbb07ace87dba07c))
+* Add the lazy parser combinator ([496ac836](https://github.com/Marwes/combine/commit/496ac83628ba33ee9a886f989f8749388f918652))
+
+#### Breaking Changes
+
+* Remove redundant state comparison in Iter ([9d434c3f](https://github.com/Marwes/combine/commit/9d434c3ff89480aeb9c5552e439b465adc28e31d), breaks [#](https://github.com/Marwes/combine/issues/))
+
+#### Performance
+
+* Specialize uncons_while1 on all streams ([c995ad61](https://github.com/Marwes/combine/commit/c995ad6125996b594b8bf45da078fdebeb41b86a))
+* Unroll the loop for <[T]>::uncons_while ([f593e85d](https://github.com/Marwes/combine/commit/f593e85d4d0ecb9c7c91b769685864fda3291646))
+* Inline from/into for results ([fff248e4](https://github.com/Marwes/combine/commit/fff248e494361e04cedbf849df959193560416ff))
+* Unroll take_while1 ([279a4526](https://github.com/Marwes/combine/commit/279a4526494e1cd7e90252c3bc1cfc8f8c35ebcb))
+* Remove redundant state comparison in Iter ([9d434c3f](https://github.com/Marwes/combine/commit/9d434c3ff89480aeb9c5552e439b465adc28e31d), breaks [#](https://github.com/Marwes/combine/issues/))
+* Add uncons_while1 as a default method on RangeStream ([5d154f15](https://github.com/Marwes/combine/commit/5d154f15a13091c26246627486b309cbdef06d14))
+
+
+
+<a name="v3.2.0"></a>
+## v3.2.0 (2018-04-24)
+
+
+#### Features
+
+* Add any_send_partial_state ([d4153d31](https://github.com/Marwes/combine/commit/d4153d31b074e950f4752f29c8ed188102534e91))
+
+
+
+<a name="v3.1.0"></a>
+## v3.1.0 (2018-03-26)
+
+
+#### Features
+
+* Allow the the `num` parsers to be used without RangeStream ([b1cb0668](https://github.com/Marwes/combine/commit/b1cb0668cc13df3a2f1b6cc35f221089d0279579))
+* Add the take_until parser ([7b03b596](https://github.com/Marwes/combine/commit/7b03b596a58e4cdbe84d6008e277f0cc57394fae))
+* Allow try parsers to be used with partial parsing ([cb2da7ad](https://github.com/Marwes/combine/commit/cb2da7ad74758bc89b17bedd90f2f53ea9f83e7d))
+
+
+
+<a name="v3.0.0"></a>
+## v3.0.0 (2018-03-18)
+
+3.0.0 is the final stabilization of all the changes made in the 3.x-alpha/beta releases. You can read more about these changes
+at https://marwes.github.io/2018/02/08/combine-3.html and https://www.reddit.com/r/rust/comments/6s792a/combine_250_and_300alpha1/
+
+#### Features
+
+* Let single element tuples work as sequence and choice parsers ([81e34d2d](https://github.com/Marwes/combine/commit/81e34d2d8d823b9962f4036e7576353252f211b9))
+
+
+
+<a name="3.0.0-beta.1"></a>
+## v3.0.0-beta.1 (2018-02-02)
+
+
+#### Features
+
+* Encode parsers as resumable state machines [342fc47](https://github.com/Marwes/combine/commit/342fc4770ee9dc62df51683ccca2e612d6e1ea33)
+* Add the unexpected_any parser ([979e0d7e](https://github.com/Marwes/combine/commit/979e0d7e4ac63c41712352bc87b51001aa067879), closes [#126](https://github.com/Marwes/combine/issues/126))
+* Don't have the error type be generic for uncons* ([df3e84f0](https://github.com/Marwes/combine/commit/df3e84f0275352fba44672b2701d452f6bb55596))
+* Add a alias which helps to refer to the StreamError type ([95eb70cb](https://github.com/Marwes/combine/commit/95eb70cb025aec89925e3f1992a6d1b266328eb2))
+* Add memchr optimized take_until_byte parsers ([30cc7d1d](https://github.com/Marwes/combine/commit/30cc7d1d39e754d2c2e8491eb2123dacec5d30f3))
+* Add the `then_partial` parser ([5d402f6b](https://github.com/Marwes/combine/commit/5d402f6be0c1be4d69159d4df1d1d4589f5cd66e))
+* Don't require `Clone` for `Stream` ([3fc0b540](https://github.com/Marwes/combine/commit/3fc0b540521aff959ce62628df1ac7554a9df861), breaks [#](https://github.com/Marwes/combine/issues/))
+
+#### Breaking Changes
+
+* Don't require `Clone` for `Stream` ([3fc0b540](https://github.com/Marwes/combine/commit/3fc0b540521aff959ce62628df1ac7554a9df861), breaks [#](https://github.com/Marwes/combine/issues/))
+
+#### Bug Fixes
+
+* Allow `parser!` to be used within functions ([916bb824](https://github.com/Marwes/combine/commit/916bb824741f054ed2f0686dcce316d0a770d9db))
+* Allow multiple unnamed public parsers to be in the same scope ([c04e2247](https://github.com/Marwes/combine/commit/c04e2247ab04f098ef0eae887c7a7739d99212fc))
+* Return the correct distance for slices with larger than 1 byte items ([6b3c661a](https://github.com/Marwes/combine/commit/6b3c661af23fd221d2b28c37ed207d37409c2491))
+* Don't add extra expected errors when erroring in the middle of a sequence ([44eac24d](https://github.com/Marwes/combine/commit/44eac24d46f265a1b3a94d5587d4f200ebebc18f))
+
+#### Performance
+
+* Avoid cloning input in satisfy ([9aeaefa9](https://github.com/Marwes/combine/commit/9aeaefa95f97bd8b9d186923e16cd8def98d8e81))
+
+
+
+<a name="3.0.0-alpha.4"></a>
+## v3.0.0-alpha.4 (2017-10-11)
+
+
+#### Breaking Changes
+
+* Rename EasyStream -> Stream, ParsingError => ParseError ... ([d2f4ab14](https://github.com/Marwes/combine/commit/d2f4ab1471cc0616a46bfe965a611d465434d19a), breaks [#](https://github.com/Marwes/combine/issues/))
+* Make the RangeStreamOnce function generic over the returned error ([818d8629](https://github.com/Marwes/combine/commit/818d8629116fec8eef64494a938f0340c04d6ad6), breaks [#](https://github.com/Marwes/combine/issues/))
+* Re-export the type generated by parser! if it is public ([61469f0a](https://github.com/Marwes/combine/commit/61469f0a2db899a1144d0335dd47b9bb8d3105f2), breaks [#](https://github.com/Marwes/combine/issues/))
+
+#### Performance
+
+* Add inline annotations on error traits ([c8b495b4](https://github.com/Marwes/combine/commit/c8b495b41a21cd71b62782f62bbae77f13f92fb7))
+
+#### Features
+
+* Rename EasyStream -> Stream, ParsingError => ParseError ... ([d2f4ab14](https://github.com/Marwes/combine/commit/d2f4ab1471cc0616a46bfe965a611d465434d19a), breaks [#](https://github.com/Marwes/combine/issues/))
+* Make the RangeStreamOnce function generic over the returned error ([818d8629](https://github.com/Marwes/combine/commit/818d8629116fec8eef64494a938f0340c04d6ad6), breaks [#](https://github.com/Marwes/combine/issues/))
+* Allow combine to be used in no_std environments ([9fd310ac](https://github.com/Marwes/combine/commit/9fd310ac6b795f8f4152892a698dcf29d9c72b7b))
+* Re-export the type generated by parser! if it is public ([61469f0a](https://github.com/Marwes/combine/commit/61469f0a2db899a1144d0335dd47b9bb8d3105f2), breaks [#](https://github.com/Marwes/combine/issues/))
+
+
+
+<a name=""></a>
+## v3.0.0-alpha.3 (2017-08-20)
+
+* Implement Copy for more types ([e60395d6](https://github.com/Marwes/combine/commit/e60395d683faf52be772d222f28a5d38aec05f5c))
+
+
+
+<a name="v3.0.0-alpha.1"></a>
+## v3.0.0-alpha.1 (2017-08-07)
+
+
+#### Features
+
+* Remove the old State type and Positioner trait ([ae43f8ae](https://github.com/Marwes/combine/commit/ae43f8ae2b303aca3b5ae9fbb1a87475349f2745), breaks [#](https://github.com/Marwes/combine/issues/))
+* Teach the choice parser to take tuples ([96da7ee0](https://github.com/Marwes/combine/commit/96da7ee0cf8a112e60747a0be8a4dbd90efbecba), breaks [#](https://github.com/Marwes/combine/issues/))
+* Add the range_of parser ([7e692086](https://github.com/Marwes/combine/commit/7e69208650f7fdc75279370b193030b09ccdbc7a), closes [#83](https://github.com/Marwes/combine/issues/83), breaks [#](https://github.com/Marwes/combine/issues/))
+* Add map_token and map_range methods to ParseError ([2f92b296](https://github.com/Marwes/combine/commit/2f92b29669b618535bcd7533b7dd39b7daa8579b), closes [#86](https://github.com/Marwes/combine/issues/86))
+* Allow ParseError to be used without the StreamOnce constraint ([520da8e8](https://github.com/Marwes/combine/commit/520da8e89f7162b4d6ba3a3bca05a05f3bd37999), breaks [#](https://github.com/Marwes/combine/issues/))
+
+#### Bug Fixes
+
+* Remove depreceated items ([9107342a](https://github.com/Marwes/combine/commit/9107342a89a5efc664bac9c2919a93a992ca6809), breaks [#](https://github.com/Marwes/combine/issues/))
+* Don't forward tuple parsers to frunk to prevent a performance loss ([7e27c523](https://github.com/Marwes/combine/commit/7e27c523da46828b254ee4fc7c1f9750623e5aff))
+* Add the correct errors after sequencing has returned EmptyOk ([54fecc62](https://github.com/Marwes/combine/commit/54fecc62938445aae15373a6b1ec7c4419582025), closes [#95](https://github.com/Marwes/combine/issues/95))
+* Renamed SharedBufferedStream and BufferedStream to be less confusing ([3add407e](https://github.com/Marwes/combine/commit/3add407eecf886cc72ce05414d58a2b3b19a0bb9), breaks [#](https://github.com/Marwes/combine/issues/))
+* Add From<u8> for Info ([4cf8cff6](https://github.com/Marwes/combine/commit/4cf8cff64466519bf2d4a4dc1dcbe8deb449e004))
+* Make the positions of slice streams harder to misuse ([f50ab9e2](https://github.com/Marwes/combine/commit/f50ab9e2f42ec2465368bfb11a60b2339b699fc4), closes [#104](https://github.com/Marwes/combine/issues/104), breaks [#](https://github.com/Marwes/combine/issues/))
+
+#### Breaking Changes
+
+* Remove depreceated items ([9107342a](https://github.com/Marwes/combine/commit/9107342a89a5efc664bac9c2919a93a992ca6809), breaks [#](https://github.com/Marwes/combine/issues/))
+* Renamed SharedBufferedStream and BufferedStream to be less confusing ([3add407e](https://github.com/Marwes/combine/commit/3add407eecf886cc72ce05414d58a2b3b19a0bb9), breaks [#](https://github.com/Marwes/combine/issues/))
+* Remove the old State type and Positioner trait ([ae43f8ae](https://github.com/Marwes/combine/commit/ae43f8ae2b303aca3b5ae9fbb1a87475349f2745), breaks [#](https://github.com/Marwes/combine/issues/))
+* Teach the choice parser to take tuples ([96da7ee0](https://github.com/Marwes/combine/commit/96da7ee0cf8a112e60747a0be8a4dbd90efbecba), breaks [#](https://github.com/Marwes/combine/issues/))
+* Add the range_of parser ([7e692086](https://github.com/Marwes/combine/commit/7e69208650f7fdc75279370b193030b09ccdbc7a), closes [#83](https://github.com/Marwes/combine/issues/83), breaks [#](https://github.com/Marwes/combine/issues/))
+* Make the positions of slice streams harder to misuse ([f50ab9e2](https://github.com/Marwes/combine/commit/f50ab9e2f42ec2465368bfb11a60b2339b699fc4), closes [#104](https://github.com/Marwes/combine/issues/104), breaks [#](https://github.com/Marwes/combine/issues/))
+* Allow ParseError to be used without the StreamOnce constraint ([520da8e8](https://github.com/Marwes/combine/commit/520da8e89f7162b4d6ba3a3bca05a05f3bd37999), breaks [#](https://github.com/Marwes/combine/issues/))
+
+
+
+<a name="v2.5.0"></a>
+## v2.5.0 (2017-08-07)
+
+#### Features
+
+* Rename captures to captures_many and add a captures parser ([9d301e42](https://github.com/Marwes/combine/commit/9d301e42ee2da23c90ce78982d9dbef6d7586b4c))
+* Add regex parsers (match_, find_many) ([5ac12b98](https://github.com/Marwes/combine/commit/5ac12b9883c49b345341ad47aeac2c8accd52c33))
+* Add a macro to parse values directly into structs ([1656a620](https://github.com/Marwes/combine/commit/1656a620960e2b6256e724058cf39892d6e16944))
+* add count_min_max and skip_count_min_max ([8f3413a7](https://github.com/Marwes/combine/commit/8f3413a7431f4459d67695156f0b259df422bf09))
+* Add the skip_count parser ([15171d10](https://github.com/Marwes/combine/commit/15171d10495a5a221713ca0f67f3afc0b0eaf580))
+* Add the recognize parser ([61c9b269](https://github.com/Marwes/combine/commit/61c9b269826707e7fa7409512f21122c9fd8f137))
+* Add a macro for declaring parsers ([7fe1d9f7](https://github.com/Marwes/combine/commit/7fe1d9f723a14d20c9879849e104283ee24d254e), closes [#70](https://github.com/Marwes/combine/issues/70))
+* Provide parsers for decoding big-endian and little-endian numbers ([05ec0bc8](https://github.com/Marwes/combine/commit/05ec0bc8675a2de0a71268a458ceefa7ee99f7a0))
+
+#### Bug Fixes
+
+* Report and_then errors as if at the start of the parse ([b71a78f1](https://github.com/Marwes/combine/commit/b71a78f12a40e90425d59f72d28c628d28aebe1d))
+* Return EmptyErr when the any parser fails ([93208e9c](https://github.com/Marwes/combine/commit/93208e9c6fd92628eb02c0b32a0d6d3120a9af7f), closes [#99](https://github.com/Marwes/combine/issues/99))
+* **doc:** regex find consumes input until the end of the first match ([d1bbf1d4](https://github.com/Marwes/combine/commit/d1bbf1d4198cb71d9c4b9e6d13399e38078518f0))
+
+
+
+<a name="v2.3.0"></a>
+## v2.3.0 (2017-02-22)
+
+
+#### Performance
+
+* Don't call parse_stream in optional ([a4bf28d2](a4bf28d2))
+
+#### Features
+
+* Add the choice! macro ([6f2cec69](6f2cec69))
+* Add map functions for Error<> and Info<> ranges. (#86)
+* Add Parser::boxed ([3af9c9b3](3af9c9b3))
+
+<a name="2.1.0"></a>
+## 2.1.0 (2016-10-30)
+
+
+#### Features
+
+* Add a read adapter for the stream trait ([a2a9f214](a2a9f214))
+
+
+
+<a name="2.0.0"></a>
+## 2.0.0 (2016-10-19)
+
+
+#### Features
+
+* Version 2.0.0 ([80b24186](https://github.com/Marwes/combine/commit/80b24186fb4854d3242f32abc727107545e08c7b))
+* Add the count parser ([a7949f3a](https://github.com/Marwes/combine/commit/a7949f3aef8585523e730e2c1224c3725b360d32))
+* Add the Parser::by_ref method ([15554d0c](https://github.com/Marwes/combine/commit/15554d0c64a2415e8c234708595cc544ada6c585))
+* Add the one_of and none_of parsers ([941b277c](https://github.com/Marwes/combine/commit/941b277c8f4d8e8af804c88678181be7743f912b))
+* Add the position parser ([d6c65f6d](https://github.com/Marwes/combine/commit/d6c65f6da5a2af47254abe2db4b04c3ecbd74803))
+* Add bytes_cmp and string_cmp ([ee6b430d](https://github.com/Marwes/combine/commit/ee6b430d17508daf305d5f48fabae2d662a94d34))
+* Add the `tokens` parser ([886c4523](https://github.com/Marwes/combine/commit/886c45235be207241874a0a412ebcc0733959466))
+* Version 2.0.0-beta3 ([55c59322](https://github.com/Marwes/combine/commit/55c59322f8ead037dad703a41e1f6d769c059f31))
+* Break out the error formatting into a separate function ([b6ccb0c1](https://github.com/Marwes/combine/commit/b6ccb0c1807f0f182878b68d4dbdcfa739fd5157))
+* Rename parse_state to parse_stream ([b375df48](https://github.com/Marwes/combine/commit/b375df4811570d14bbd8db7cb74a6834e54679cf))
+* Simplify the flat_map parser ([08a91ce2](https://github.com/Marwes/combine/commit/08a91ce201b67f5528a18228bdfb079e7d86dd7f))
+* Merge the ParserExt trait into Parser ([26a84154](https://github.com/Marwes/combine/commit/26a841540107b79542bb874a60abb83f99c78a58))
+* Add the bytes parser ([9c73c053](https://github.com/Marwes/combine/commit/9c73c053f37b149c35d60377f6dcbbbfc145dda9))
+* Add parsers specialized on byte streams ([01ba3759](https://github.com/Marwes/combine/commit/01ba375929daac2cb81a3e966e529f0909014620))
+* Make ctry usable outside the crate ([f45740dd](https://github.com/Marwes/combine/commit/f45740dd71cf9c71e0900e932c2f10ccbefae35e))
+* Add versions of parse_* which return an unpacked version of ParseResult ([2bbd14ab](https://github.com/Marwes/combine/commit/2bbd14abd2b372afbfda56fb73d4aa036bd427e1))
+* Add the satisy_map parser ([4d97d296](https://github.com/Marwes/combine/commit/4d97d2968c48026e8369e1f0bcee3c6ef5784664))
+* Replace the And parser with the pair parser ([b1f56113](https://github.com/Marwes/combine/commit/b1f561139169caa1a5a2e3e2d84248b28f22bb82))
+* Remove reexport of the char module from the root module ([e39dacb5](https://github.com/Marwes/combine/commit/e39dacb57999c3cfb0bb4ae6d5db0b696da60a3f))
+* Version 2.0.0-beta ([5bdbf584](https://github.com/Marwes/combine/commit/5bdbf58484800717c7d7c20b9161562520f425cb))
+* Remove the buffered_stream feature ([3fdbf217](https://github.com/Marwes/combine/commit/3fdbf217ec0a66b052b8d11792ce3ff3d13b7463))
+* Version 1.3.0 ([acea26cd](https://github.com/Marwes/combine/commit/acea26cda536ffc681ca4fa9e4c1bf28f5184582))
+* Add the eof parser ([6a89cbf2](https://github.com/Marwes/combine/commit/6a89cbf2ef11ed5bf4145a296c208e5f5f90438c))
+* Stabilize RangeStream and all functions using it ([d932375d](https://github.com/Marwes/combine/commit/d932375d13a196fc74602f8e76ad5bd3512ca370))
+* Reexport Stream and StreamOnce from the crate root ([2c2b3f5c](https://github.com/Marwes/combine/commit/2c2b3f5cd21a04fbc157a95ce76fe72bfdc1a2c3))
+* Merge the HasPosition trait into StreamOnce ([3bda4a16](https://github.com/Marwes/combine/commit/3bda4a163e8f3b57dd4efa65384c97f9c3554aeb))
+* Add the StreamOnce trait ([9ea0ed5d](https://github.com/Marwes/combine/commit/9ea0ed5d6c8f8cead773a24b968d4a0bbb606721), breaks [#](https://github.com/Marwes/combine/issues/))
+* Make Stream::uncons take &mut self ([4ddc4257](https://github.com/Marwes/combine/commit/4ddc4257d1e719a9f1c17a49c39f08ebf20d2999))
+* Separate the Position type and position method from Stream ([9cfb9a89](https://github.com/Marwes/combine/commit/9cfb9a895be34b288ee9fc9f926cd1b9c5b97b03))
+* Version 1.2.1 ([f737af27](https://github.com/Marwes/combine/commit/f737af27306160088188900a1cdad255b5ca58d3))
+* Move the position handling inside the Stream trait ([f41f65e9](https://github.com/Marwes/combine/commit/f41f65e9f34b64481f81af078ecdb10a80e75f6f))
+* **range_stream:** Implement RangeStream on State ([f5679dc9](https://github.com/Marwes/combine/commit/f5679dc954be093a7a0278d2311cf5a162396833))
+
+#### Performance
+
+* Specialize and_then, then and flat_map ([9dc7dc6b](https://github.com/Marwes/combine/commit/9dc7dc6b9bcb638888be448efb7002d362aded16))
+* Specialize the tuple parser to avoid unnecessary branches ([2b294f80](https://github.com/Marwes/combine/commit/2b294f8009021897d9652981dfb107dd2102a902))
+* Add inline annotations and more forwarding parse functions ([0e5ee38e](https://github.com/Marwes/combine/commit/0e5ee38e1b15847908f6676c0c4032dc844e3462))
+* Avoid indirection in Skip and With ([52d335ca](https://github.com/Marwes/combine/commit/52d335caa2e698de9be50e46e8fbcf241d4e3081))
+* Optimize Iter by simplifying the state machine ([9631700a](https://github.com/Marwes/combine/commit/9631700a306cb5546e37dfb8f05d54728fb3bc8c))
+* Speedup tuple parsers by simplifying the expanded code ([5d86dcf2](https://github.com/Marwes/combine/commit/5d86dcf2d14f1cae078d1a4b8831d37041eaf7a2))
+* Avoid creating an error when take_while1 parses no input ([9bad15c0](https://github.com/Marwes/combine/commit/9bad15c0f79e3ff897fb92cdca6b92f988c69347))
+* Possibly improve performance of the RangeStream impl for &str ([abb1de7f](https://github.com/Marwes/combine/commit/abb1de7f15b65b9bc2c40572319269191bd0819f))
+
+#### Bug Fixes
+
+* Rename the String parser to Str ([d846bf0e](https://github.com/Marwes/combine/commit/d846bf0e7ddb3350ce9245b3682d7c054ff5cdd8))
+* Use five copies in the large http test anyway to match nom_benchmarks ([eb089f5b](https://github.com/Marwes/combine/commit/eb089f5bef175b96e097286b9c8c3e7d5f6e3922))
+* Avoid storing the position in primitives::uncons_while ([9912507a](https://github.com/Marwes/combine/commit/9912507a80e178737e16d4ff3d19d7a1fee9fbc8))
+* Calling uncons_range with the same size as is remaining should succeed ([cce6214e](https://github.com/Marwes/combine/commit/cce6214ed4722880881c8c6998e00f4509a22588))
+* Add Sync to to the Error::Other variant ([22add3ec](https://github.com/Marwes/combine/commit/22add3eca62ff5e6f4d58122a4b366290b1d9385))
+* Fix positions of BufferedStream being for the next token ([66eab92a](https://github.com/Marwes/combine/commit/66eab92a7dd63269f48cf0fbd0722a6eeea9135d))
+* Fix the position handling of BufferedStream ([f21148b3](https://github.com/Marwes/combine/commit/f21148b3c4c5c6f10d8b6d90ce4a7925596879b3))
+* Remove the Positioner bound from Stream::Token an Stream::Range ([fba3f1e7](https://github.com/Marwes/combine/commit/fba3f1e760505305b6a586b6ff5a53eff645e1d1))
+* **buffered_stream:** BufferedStream no longer emits the last token after EOF ([6532884c](https://github.com/Marwes/combine/commit/6532884cc16307e1753584dd40b2b59e3daa6267))
+* **travis:**
+ * Dont pass the test feature to travis ([382a608d](https://github.com/Marwes/combine/commit/382a608da2851c5cc2d3477025951e9a133732bc))
+ * Add travis_wait so travis does not time out the beta builds ([a3f0792a](https://github.com/Marwes/combine/commit/a3f0792ab347805e3f0ce619997a2c154f5e8c87))
+
+#### Breaking Changes
+
+* Add the StreamOnce trait ([9ea0ed5d](https://github.com/Marwes/combine/commit/9ea0ed5d6c8f8cead773a24b968d4a0bbb606721), breaks [#](https://github.com/Marwes/combine/issues/))
diff --git a/Cargo.lock b/Cargo.lock
new file mode 100644
index 0000000..deacbb0
--- /dev/null
+++ b/Cargo.lock
@@ -0,0 +1,1436 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "aho-corasick"
+version = "0.7.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "async-channel"
+version = "1.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59740d83946db6a5af71ae25ddf9562c2b176b2ca42cf99a455f09f4a220d6b9"
+dependencies = [
+ "concurrent-queue",
+ "event-listener",
+ "futures-core",
+]
+
+[[package]]
+name = "async-executor"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eb877970c7b440ead138f6321a3b5395d6061183af779340b65e20c0fede9146"
+dependencies = [
+ "async-task",
+ "concurrent-queue",
+ "fastrand",
+ "futures-lite",
+ "once_cell",
+ "vec-arena",
+]
+
+[[package]]
+name = "async-global-executor"
+version = "1.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73079b49cd26b8fd5a15f68fc7707fc78698dc2a3d61430f2a7a9430230dfa04"
+dependencies = [
+ "async-executor",
+ "async-io",
+ "futures-lite",
+ "num_cpus",
+ "once_cell",
+]
+
+[[package]]
+name = "async-io"
+version = "1.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9315f8f07556761c3e48fec2e6b276004acf426e6dc068b2c2251854d65ee0fd"
+dependencies = [
+ "concurrent-queue",
+ "fastrand",
+ "futures-lite",
+ "libc",
+ "log",
+ "nb-connect",
+ "once_cell",
+ "parking",
+ "polling",
+ "vec-arena",
+ "waker-fn",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "async-mutex"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "479db852db25d9dbf6204e6cb6253698f175c15726470f78af0d918e99d6156e"
+dependencies = [
+ "event-listener",
+]
+
+[[package]]
+name = "async-std"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f9f84f1280a2b436a2c77c2582602732b6c2f4321d5494d6e799e6c367859a8"
+dependencies = [
+ "async-channel",
+ "async-global-executor",
+ "async-io",
+ "async-mutex",
+ "blocking",
+ "crossbeam-utils",
+ "futures-channel",
+ "futures-core",
+ "futures-io",
+ "futures-lite",
+ "gloo-timers",
+ "kv-log-macro",
+ "log",
+ "memchr",
+ "num_cpus",
+ "once_cell",
+ "pin-project-lite 0.2.0",
+ "pin-utils",
+ "slab",
+ "wasm-bindgen-futures",
+]
+
+[[package]]
+name = "async-stream"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3670df70cbc01729f901f94c887814b3c68db038aad1329a418bae178bc5295c"
+dependencies = [
+ "async-stream-impl",
+ "futures-core",
+]
+
+[[package]]
+name = "async-stream-impl"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a3548b8efc9f8e8a5a0a2808c5bd8451a9031b9e5b879a79590304ae928b0a70"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "async-task"
+version = "4.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e91831deabf0d6d7ec49552e489aed63b7456a7a3c46cff62adad428110b0af0"
+
+[[package]]
+name = "atomic-waker"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "065374052e7df7ee4047b1160cca5e1467a12351a40b3da123c870ba0b8eda2a"
+
+[[package]]
+name = "atty"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
+
+[[package]]
+name = "bitflags"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
+
+[[package]]
+name = "blocking"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c5e170dbede1f740736619b776d7251cb1b9095c435c34d8ca9f57fcd2f335e9"
+dependencies = [
+ "async-channel",
+ "async-task",
+ "atomic-waker",
+ "fastrand",
+ "futures-lite",
+ "once_cell",
+]
+
+[[package]]
+name = "bstr"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "473fc6b38233f9af7baa94fb5852dca389e3d95b8e21c8e3719301462c5d9faf"
+dependencies = [
+ "lazy_static",
+ "memchr",
+ "regex-automata",
+ "serde",
+]
+
+[[package]]
+name = "bumpalo"
+version = "3.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e8c087f005730276d1096a652e92a8bacee2e2472bcc9715a74d2bec38b5820"
+
+[[package]]
+name = "byteorder"
+version = "1.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
+
+[[package]]
+name = "bytes"
+version = "0.4.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c"
+dependencies = [
+ "byteorder",
+ "iovec",
+]
+
+[[package]]
+name = "bytes"
+version = "0.5.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38"
+
+[[package]]
+name = "bytes"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ad1f8e949d755f9d79112b5bb46938e0ef9d3804a0b16dfab13aafcaa5f0fa72"
+
+[[package]]
+name = "cache-padded"
+version = "1.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "631ae5198c9be5e753e5cc215e1bd73c2b466a3565173db433f52bb9d3e66dba"
+
+[[package]]
+name = "cast"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4b9434b9a5aa1450faa3f9cb14ea0e8c53bb5d2b3c1bfd1ab4fc03e9f33fbfb0"
+dependencies = [
+ "rustc_version",
+]
+
+[[package]]
+name = "cc"
+version = "1.0.66"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48"
+
+[[package]]
+name = "cfg-if"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "clap"
+version = "2.33.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002"
+dependencies = [
+ "bitflags",
+ "textwrap",
+ "unicode-width",
+]
+
+[[package]]
+name = "combine"
+version = "4.6.0"
+dependencies = [
+ "async-std",
+ "bytes 0.5.6",
+ "bytes 1.0.0",
+ "criterion",
+ "futures 0.3.8",
+ "futures-io",
+ "futures-util",
+ "memchr",
+ "once_cell",
+ "partial-io",
+ "pin-project-lite 0.2.0",
+ "quick-error",
+ "quickcheck",
+ "regex",
+ "tokio 0.2.24",
+ "tokio 0.3.6",
+ "tokio 1.0.0",
+ "tokio-util",
+]
+
+[[package]]
+name = "concurrent-queue"
+version = "1.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "30ed07550be01594c6026cff2a1d7fe9c8f683caa798e12b68694ac9e88286a3"
+dependencies = [
+ "cache-padded",
+]
+
+[[package]]
+name = "const_fn"
+version = "0.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cd51eab21ab4fd6a3bf889e2d0958c0a6e3a61ad04260325e919e652a2a62826"
+
+[[package]]
+name = "criterion"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70daa7ceec6cf143990669a04c7df13391d55fb27bd4079d252fca774ba244d8"
+dependencies = [
+ "atty",
+ "cast",
+ "clap",
+ "criterion-plot",
+ "csv",
+ "itertools",
+ "lazy_static",
+ "num-traits",
+ "oorandom",
+ "plotters",
+ "rayon",
+ "regex",
+ "serde",
+ "serde_cbor",
+ "serde_derive",
+ "serde_json",
+ "tinytemplate",
+ "walkdir",
+]
+
+[[package]]
+name = "criterion-plot"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e022feadec601fba1649cfa83586381a4ad31c6bf3a9ab7d408118b05dd9889d"
+dependencies = [
+ "cast",
+ "itertools",
+]
+
+[[package]]
+name = "crossbeam-channel"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775"
+dependencies = [
+ "cfg-if 1.0.0",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-deque"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9"
+dependencies = [
+ "cfg-if 1.0.0",
+ "crossbeam-epoch",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-epoch"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1aaa739f95311c2c7887a76863f500026092fb1dce0161dab577e559ef3569d"
+dependencies = [
+ "cfg-if 1.0.0",
+ "const_fn",
+ "crossbeam-utils",
+ "lazy_static",
+ "memoffset",
+ "scopeguard",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d"
+dependencies = [
+ "autocfg",
+ "cfg-if 1.0.0",
+ "lazy_static",
+]
+
+[[package]]
+name = "csv"
+version = "1.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f9d58633299b24b515ac72a3f869f8b91306a3cec616a602843a383acd6f9e97"
+dependencies = [
+ "bstr",
+ "csv-core",
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "csv-core"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "either"
+version = "1.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
+
+[[package]]
+name = "env_logger"
+version = "0.5.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "15b0a4d2e39f8420210be8b27eeda28029729e2fd4291019455016c348240c38"
+dependencies = [
+ "atty",
+ "humantime",
+ "log",
+ "regex",
+ "termcolor",
+]
+
+[[package]]
+name = "event-listener"
+version = "2.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f7531096570974c3a9dcf9e4b8e1cede1ec26cf5046219fb3b9d897503b9be59"
+
+[[package]]
+name = "fastrand"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca5faf057445ce5c9d4329e382b2ce7ca38550ef3b73a5348362d5f24e0c7fe3"
+dependencies = [
+ "instant",
+]
+
+[[package]]
+name = "fuchsia-cprng"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
+
+[[package]]
+name = "fuchsia-zircon"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
+dependencies = [
+ "bitflags",
+ "fuchsia-zircon-sys",
+]
+
+[[package]]
+name = "fuchsia-zircon-sys"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
+
+[[package]]
+name = "futures"
+version = "0.1.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c7e4c2612746b0df8fed4ce0c69156021b704c9aefa360311c04e6e9e002eed"
+
+[[package]]
+name = "futures"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b3b0c040a1fe6529d30b3c5944b280c7f0dcb2930d2c3062bca967b602583d0"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-executor",
+ "futures-io",
+ "futures-sink",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-channel"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4b7109687aa4e177ef6fe84553af6280ef2778bdb7783ba44c9dc3399110fe64"
+dependencies = [
+ "futures-core",
+ "futures-sink",
+]
+
+[[package]]
+name = "futures-core"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "847ce131b72ffb13b6109a221da9ad97a64cbe48feb1028356b836b47b8f1748"
+
+[[package]]
+name = "futures-executor"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4caa2b2b68b880003057c1dd49f1ed937e38f22fcf6c212188a121f08cf40a65"
+dependencies = [
+ "futures-core",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-io"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "611834ce18aaa1bd13c4b374f5d653e1027cf99b6b502584ff8c9a64413b30bb"
+
+[[package]]
+name = "futures-lite"
+version = "1.11.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b4481d0cd0de1d204a4fa55e7d45f07b1d958abcb06714b3446438e2eff695fb"
+dependencies = [
+ "fastrand",
+ "futures-core",
+ "futures-io",
+ "memchr",
+ "parking",
+ "pin-project-lite 0.2.0",
+ "waker-fn",
+]
+
+[[package]]
+name = "futures-macro"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77408a692f1f97bcc61dc001d752e00643408fbc922e4d634c655df50d595556"
+dependencies = [
+ "proc-macro-hack",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "futures-sink"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f878195a49cee50e006b02b93cf7e0a95a38ac7b776b4c4d9cc1207cd20fcb3d"
+
+[[package]]
+name = "futures-task"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7c554eb5bf48b2426c4771ab68c6b14468b6e76cc90996f528c3338d761a4d0d"
+dependencies = [
+ "once_cell",
+]
+
+[[package]]
+name = "futures-util"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d304cff4a7b99cfb7986f7d43fbe93d175e72e704a8860787cc95e9ffd85cbd2"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-io",
+ "futures-macro",
+ "futures-sink",
+ "futures-task",
+ "memchr",
+ "pin-project",
+ "pin-utils",
+ "proc-macro-hack",
+ "proc-macro-nested",
+ "slab",
+]
+
+[[package]]
+name = "gloo-timers"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47204a46aaff920a1ea58b11d03dec6f704287d27561724a4631e450654a891f"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "js-sys",
+ "wasm-bindgen",
+ "web-sys",
+]
+
+[[package]]
+name = "half"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d36fab90f82edc3c747f9d438e06cf0a491055896f2a279638bb5beed6c40177"
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "humantime"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f"
+dependencies = [
+ "quick-error",
+]
+
+[[package]]
+name = "instant"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61124eeebbd69b8190558df225adf7e4caafce0d743919e5d6b19652314ec5ec"
+dependencies = [
+ "cfg-if 1.0.0",
+]
+
+[[package]]
+name = "iovec"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "itertools"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itoa"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc6f3ad7b9d11a0c00842ff8de1b60ee58661048eb8049ed33c73594f359d7e6"
+
+[[package]]
+name = "js-sys"
+version = "0.3.46"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cf3d7383929f7c9c7c2d0fa596f325832df98c3704f2c60553080f7127a58175"
+dependencies = [
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "kernel32-sys"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
+dependencies = [
+ "winapi 0.2.8",
+ "winapi-build",
+]
+
+[[package]]
+name = "kv-log-macro"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f"
+dependencies = [
+ "log",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "libc"
+version = "0.2.81"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1482821306169ec4d07f6aca392a4681f66c75c9918aa49641a2595db64053cb"
+
+[[package]]
+name = "log"
+version = "0.4.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b"
+dependencies = [
+ "cfg-if 0.1.10",
+]
+
+[[package]]
+name = "memchr"
+version = "2.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525"
+
+[[package]]
+name = "memoffset"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "mio"
+version = "0.6.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4afd66f5b91bf2a3bc13fad0e21caedac168ca4c707504e75585648ae80e4cc4"
+dependencies = [
+ "cfg-if 0.1.10",
+ "fuchsia-zircon",
+ "fuchsia-zircon-sys",
+ "iovec",
+ "kernel32-sys",
+ "libc",
+ "log",
+ "miow",
+ "net2",
+ "slab",
+ "winapi 0.2.8",
+]
+
+[[package]]
+name = "miow"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebd808424166322d4a38da87083bfddd3ac4c131334ed55856112eb06d46944d"
+dependencies = [
+ "kernel32-sys",
+ "net2",
+ "winapi 0.2.8",
+ "ws2_32-sys",
+]
+
+[[package]]
+name = "nb-connect"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8123a81538e457d44b933a02faf885d3fe8408806b23fa700e8f01c6c3a98998"
+dependencies = [
+ "libc",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "net2"
+version = "0.2.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "391630d12b68002ae1e25e8f974306474966550ad82dac6886fb8910c19568ae"
+dependencies = [
+ "cfg-if 0.1.10",
+ "libc",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "num-traits"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
+dependencies = [
+ "hermit-abi",
+ "libc",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "13bd41f508810a131401606d54ac32a467c97172d74ba7662562ebba5ad07fa0"
+
+[[package]]
+name = "oorandom"
+version = "11.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
+
+[[package]]
+name = "parking"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72"
+
+[[package]]
+name = "partial-io"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "682cf88dcd93492e8d17723b7ccc1ae2eeffd1d312ea3533c942aa8af7122a2d"
+dependencies = [
+ "futures 0.1.30",
+ "quickcheck",
+ "tokio-io",
+]
+
+[[package]]
+name = "pin-project"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ccc2237c2c489783abd8c4c80e5450fc0e98644555b1364da68cc29aa151ca7"
+dependencies = [
+ "pin-project-internal",
+]
+
+[[package]]
+name = "pin-project-internal"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8e8d2bf0b23038a4424865103a4df472855692821aab4e4f5c3312d461d9e5f"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "pin-project-lite"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c917123afa01924fc84bb20c4c03f004d9c38e5127e3c039bbf7f4b9c76a2f6b"
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6b063f57ec186e6140e2b8b6921e5f1bd89c7356dda5b33acc5401203ca6131c"
+
+[[package]]
+name = "pin-utils"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+
+[[package]]
+name = "plotters"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d1685fbe7beba33de0330629da9d955ac75bd54f33d7b79f9a895590124f6bb"
+dependencies = [
+ "js-sys",
+ "num-traits",
+ "wasm-bindgen",
+ "web-sys",
+]
+
+[[package]]
+name = "polling"
+version = "2.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2a7bc6b2a29e632e45451c941832803a18cce6781db04de8a04696cdca8bde4"
+dependencies = [
+ "cfg-if 0.1.10",
+ "libc",
+ "log",
+ "wepoll-sys",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "proc-macro-hack"
+version = "0.5.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
+
+[[package]]
+name = "proc-macro-nested"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eba180dafb9038b050a4c280019bbedf9f2467b61e5d892dcad585bb57aadc5a"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "quick-error"
+version = "1.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
+
+[[package]]
+name = "quickcheck"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c01babc5ffd48a2a83744b3024814bb46dfd4f2a4705ccb44b1b60e644fdcab7"
+dependencies = [
+ "env_logger",
+ "log",
+ "rand",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "991431c3519a3f36861882da93630ce66b52918dcf1b8e2fd66b397fc96f28df"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rand"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293"
+dependencies = [
+ "fuchsia-cprng",
+ "libc",
+ "rand_core 0.3.1",
+ "rdrand",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
+dependencies = [
+ "rand_core 0.4.2",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
+
+[[package]]
+name = "rayon"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b0d8e0819fadc20c74ea8373106ead0600e3a67ef1fe8da56e39b9ae7275674"
+dependencies = [
+ "autocfg",
+ "crossbeam-deque",
+ "either",
+ "rayon-core",
+]
+
+[[package]]
+name = "rayon-core"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ab346ac5921dc62ffa9f89b7a773907511cdfa5490c572ae9be1be33e8afa4a"
+dependencies = [
+ "crossbeam-channel",
+ "crossbeam-deque",
+ "crossbeam-utils",
+ "lazy_static",
+ "num_cpus",
+]
+
+[[package]]
+name = "rdrand"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
+dependencies = [
+ "rand_core 0.3.1",
+]
+
+[[package]]
+name = "regex"
+version = "1.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "38cf2c13ed4745de91a5eb834e11c00bcc3709e773173b2ce4c56c9fbde04b9c"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+ "thread_local",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ae1ded71d66a4a97f5e961fd0cb25a5f366a42a41570d16a763a69c092c26ae4"
+dependencies = [
+ "byteorder",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3b181ba2dcf07aaccad5448e8ead58db5b742cf85dfe035e2227f137a539a189"
+
+[[package]]
+name = "rustc_version"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
+dependencies = [
+ "semver",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "scopeguard"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+
+[[package]]
+name = "semver"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
+dependencies = [
+ "semver-parser",
+]
+
+[[package]]
+name = "semver-parser"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
+
+[[package]]
+name = "serde"
+version = "1.0.118"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06c64263859d87aa2eb554587e2d23183398d617427327cf2b3d0ed8c69e4800"
+
+[[package]]
+name = "serde_cbor"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e18acfa2f90e8b735b2836ab8d538de304cbb6729a7360729ea5a895d15a622"
+dependencies = [
+ "half",
+ "serde",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.118"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c84d3526699cd55261af4b941e4e725444df67aa4f9e6a3564f18030d12672df"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.60"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1500e84d27fe482ed1dc791a56eddc2f230046a040fa908c08bda1d9fb615779"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "slab"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
+
+[[package]]
+name = "syn"
+version = "1.0.55"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a571a711dddd09019ccc628e1b17fe87c59b09d513c06c026877aa708334f37a"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+]
+
+[[package]]
+name = "termcolor"
+version = "1.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "textwrap"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
+dependencies = [
+ "unicode-width",
+]
+
+[[package]]
+name = "thread_local"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14"
+dependencies = [
+ "lazy_static",
+]
+
+[[package]]
+name = "tinytemplate"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d3dc76004a03cec1c5932bca4cdc2e39aaa798e3f82363dd94f9adf6098c12f"
+dependencies = [
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "tokio"
+version = "0.2.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "099837d3464c16a808060bb3f02263b412f6fafcb5d01c533d309985fbeebe48"
+dependencies = [
+ "bytes 0.5.6",
+ "lazy_static",
+ "memchr",
+ "mio",
+ "pin-project-lite 0.1.11",
+ "slab",
+ "tokio-macros 0.2.6",
+]
+
+[[package]]
+name = "tokio"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "720ba21c25078711bf456d607987d95bce90f7c3bea5abe1db587862e7a1e87c"
+dependencies = [
+ "autocfg",
+ "num_cpus",
+ "pin-project-lite 0.2.0",
+ "slab",
+ "tokio-macros 0.3.2",
+]
+
+[[package]]
+name = "tokio"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9f4bfdcbd00fa893ac0549b38aa27080636a0104b0d0c38475a99439405e1df8"
+dependencies = [
+ "autocfg",
+ "bytes 1.0.0",
+ "memchr",
+ "num_cpus",
+ "pin-project-lite 0.2.0",
+ "tokio-macros 1.0.0",
+]
+
+[[package]]
+name = "tokio-io"
+version = "0.1.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57fc868aae093479e3131e3d165c93b1c7474109d13c90ec0dda2a1bbfff0674"
+dependencies = [
+ "bytes 0.4.12",
+ "futures 0.1.30",
+ "log",
+]
+
+[[package]]
+name = "tokio-macros"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e44da00bfc73a25f814cd8d7e57a68a5c31b74b3152a0a1d1f590c97ed06265a"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tokio-macros"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "46dfffa59fc3c8aad216ed61bdc2c263d2b9d87a9c8ac9de0c11a813e51b6db7"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tokio-macros"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42517d2975ca3114b22a16192634e8241dc5cc1f130be194645970cc1c371494"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tokio-stream"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f3be913b74b13210c8fe04b17ab833f5a124f45b93d0f99f59fff621f64392a"
+dependencies = [
+ "async-stream",
+ "futures-core",
+ "pin-project-lite 0.2.0",
+ "tokio 1.0.0",
+]
+
+[[package]]
+name = "tokio-util"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "36135b7e7da911f5f8b9331209f7fab4cc13498f3fff52f72a710c78187e3148"
+dependencies = [
+ "bytes 1.0.0",
+ "futures-core",
+ "futures-sink",
+ "log",
+ "pin-project-lite 0.2.0",
+ "tokio 1.0.0",
+ "tokio-stream",
+]
+
+[[package]]
+name = "unicode-width"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
+
+[[package]]
+name = "vec-arena"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eafc1b9b2dfc6f5529177b62cf806484db55b32dc7c9658a118e11bbeb33061d"
+
+[[package]]
+name = "waker-fn"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca"
+
+[[package]]
+name = "walkdir"
+version = "2.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d"
+dependencies = [
+ "same-file",
+ "winapi 0.3.9",
+ "winapi-util",
+]
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3cd364751395ca0f68cafb17666eee36b63077fb5ecd972bbcd74c90c4bf736e"
+dependencies = [
+ "cfg-if 1.0.0",
+ "wasm-bindgen-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-backend"
+version = "0.2.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1114f89ab1f4106e5b55e688b828c0ab0ea593a1ea7c094b141b14cbaaec2d62"
+dependencies = [
+ "bumpalo",
+ "lazy_static",
+ "log",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-futures"
+version = "0.4.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1fe9756085a84584ee9457a002b7cdfe0bfff169f45d2591d8be1345a6780e35"
+dependencies = [
+ "cfg-if 1.0.0",
+ "js-sys",
+ "wasm-bindgen",
+ "web-sys",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a6ac8995ead1f084a8dea1e65f194d0973800c7f571f6edd70adf06ecf77084"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b5a48c72f299d80557c7c62e37e7225369ecc0c963964059509fbafe917c7549"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-backend",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e7811dd7f9398f14cc76efd356f98f03aa30419dea46aa810d71e819fc97158"
+
+[[package]]
+name = "web-sys"
+version = "0.3.46"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "222b1ef9334f92a21d3fb53dc3fd80f30836959a90f9274a626d7e06315ba3c3"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "wepoll-sys"
+version = "3.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fcb14dea929042224824779fbc82d9fab8d2e6d3cbc0ac404de8edf489e77ff"
+dependencies = [
+ "cc",
+]
+
+[[package]]
+name = "winapi"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-build"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "ws2_32-sys"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
+dependencies = [
+ "winapi 0.2.8",
+ "winapi-build",
+]
diff --git a/Cargo.toml b/Cargo.toml
new file mode 100644
index 0000000..a3d9dd8
--- /dev/null
+++ b/Cargo.toml
@@ -0,0 +1,178 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+edition = "2018"
+name = "combine"
+version = "4.6.0"
+authors = ["Markus Westerlind <marwes91@gmail.com>"]
+description = "Fast parser combinators on arbitrary streams with zero-copy support."
+documentation = "https://docs.rs/combine"
+readme = "README.md"
+keywords = ["parser", "parsing", "combinators", "ll"]
+categories = ["parsing", "no-std"]
+license = "MIT"
+repository = "https://github.com/Marwes/combine"
+[package.metadata.docs.rs]
+all-features = true
+rustdoc-args = ["--cfg", "docsrs"]
+[profile.bench]
+lto = true
+codegen-units = 1
+
+[lib]
+name = "combine"
+path = "src/lib.rs"
+
+[[example]]
+name = "async"
+required-features = ["std"]
+
+[[example]]
+name = "date"
+
+[[example]]
+name = "number"
+
+[[example]]
+name = "readme"
+
+[[example]]
+name = "ini"
+
+[[test]]
+name = "async"
+required-features = ["tokio-02", "futures-util-03"]
+
+[[bench]]
+name = "json"
+harness = false
+
+[[bench]]
+name = "http"
+harness = false
+
+[[bench]]
+name = "mp4"
+harness = false
+required-features = ["mp4"]
+[dependencies.bytes]
+version = "1"
+optional = true
+
+[dependencies.bytes_05]
+version = "0.5"
+optional = true
+package = "bytes"
+
+[dependencies.futures-io-03]
+version = "0.3.1"
+optional = true
+default-features = false
+package = "futures-io"
+
+[dependencies.futures-util-03]
+version = "0.3.1"
+features = ["io", "std"]
+optional = true
+default-features = false
+package = "futures-util"
+
+[dependencies.memchr]
+version = "2.2"
+default-features = false
+
+[dependencies.pin-project-lite]
+version = "0.2"
+optional = true
+
+[dependencies.regex]
+version = "1"
+optional = true
+
+[dependencies.tokio-02-dep]
+version = "0.2.3"
+features = ["io-util"]
+optional = true
+default-features = false
+package = "tokio"
+
+[dependencies.tokio-03-dep]
+version = "0.3"
+optional = true
+default-features = false
+package = "tokio"
+
+[dependencies.tokio-dep]
+version = "1"
+optional = true
+default-features = false
+package = "tokio"
+[dev-dependencies.async-std]
+version = "1"
+
+[dev-dependencies.bytes]
+version = "1"
+
+[dev-dependencies.bytes_05]
+version = "0.5"
+package = "bytes"
+
+[dev-dependencies.criterion]
+version = "0.3"
+default-features = false
+
+[dev-dependencies.futures-03-dep]
+version = "0.3.1"
+package = "futures"
+
+[dev-dependencies.once_cell]
+version = "1.0"
+
+[dev-dependencies.partial-io]
+version = "0.3"
+features = ["tokio", "quickcheck"]
+
+[dev-dependencies.quick-error]
+version = "1.0"
+
+[dev-dependencies.quickcheck]
+version = "0.6"
+
+[dev-dependencies.tokio-02-dep]
+version = "0.2"
+features = ["fs", "io-driver", "io-util", "macros"]
+package = "tokio"
+
+[dev-dependencies.tokio-03-dep]
+version = "0.3"
+features = ["fs", "macros", "rt-multi-thread"]
+package = "tokio"
+
+[dev-dependencies.tokio-dep]
+version = "1"
+features = ["fs", "macros", "rt", "rt-multi-thread", "io-util"]
+package = "tokio"
+
+[dev-dependencies.tokio-util]
+version = "0.6"
+features = ["codec"]
+
+[features]
+default = ["std"]
+futures-03 = ["pin-project", "std", "futures-io-03", "futures-util-03", "pin-project-lite"]
+mp4 = []
+pin-project = ["pin-project-lite"]
+std = ["memchr/use_std", "bytes"]
+tokio = ["tokio-dep", "futures-util-03", "pin-project-lite"]
+tokio-02 = ["pin-project", "std", "tokio-02-dep", "futures-util-03", "pin-project-lite", "bytes_05"]
+tokio-03 = ["pin-project", "std", "tokio-03-dep", "futures-util-03", "pin-project-lite"]
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
new file mode 100644
index 0000000..a4448e1
--- /dev/null
+++ b/Cargo.toml.orig
@@ -0,0 +1,104 @@
+[package]
+name = "combine"
+version = "4.6.0"
+authors = ["Markus Westerlind <marwes91@gmail.com>"]
+
+description = "Fast parser combinators on arbitrary streams with zero-copy support."
+
+repository = "https://github.com/Marwes/combine"
+documentation = "https://docs.rs/combine"
+
+readme = "README.md"
+
+keywords = ["parser", "parsing", "combinators", "ll"]
+
+categories = ["parsing", "no-std"]
+
+license = "MIT"
+
+edition = "2018"
+
+[package.metadata.docs.rs]
+all-features = true
+rustdoc-args = ["--cfg", "docsrs"]
+
+[lib]
+name = "combine"
+path = "src/lib.rs"
+
+[dependencies]
+regex = { version = "1", optional = true }
+memchr = { version = "2.2", default-features = false }
+pin-project-lite = { version = "0.2", optional = true }
+# Future proofing so that tokio-0.3, tokio-0.1 etc can be supported
+tokio-02-dep = { version = "0.2.3", package = "tokio", features = ["io-util"], default-features = false, optional = true }
+tokio-03-dep = { version = "0.3", package = "tokio", default-features = false, optional = true }
+tokio-dep = { version = "1", package = "tokio", default-features = false, optional = true }
+futures-io-03 = { version = "0.3.1", package = "futures-io", default-features = false, optional = true }
+futures-util-03 = { version = "0.3.1", package = "futures-util", features = ["io", "std"], default-features = false, optional = true }
+bytes_05 = { version = "0.5", package = "bytes", optional = true }
+bytes = { version = "1", optional = true }
+
+[dev-dependencies]
+async-std = "1"
+bytes_05 = { version = "0.5", package = "bytes" }
+bytes = "1"
+criterion = { version = "0.3", default-features = false }
+once_cell = "1.0"
+futures-03-dep = { version = "0.3.1", package = "futures" }
+tokio-02-dep = { version = "0.2", features = ["fs", "io-driver", "io-util", "macros"], package = "tokio" }
+tokio-03-dep = { version = "0.3", features = ["fs", "macros", "rt-multi-thread"], package = "tokio" }
+tokio-dep = { version = "1", features = ["fs", "macros", "rt", "rt-multi-thread", "io-util"], package = "tokio" }
+tokio-util = { version = "0.6", features = ["codec"] }
+partial-io = { version = "0.3", features = ["tokio", "quickcheck"] }
+quickcheck = "0.6"
+quick-error = "1.0"
+# End of dev-dependencies
+
+[features]
+default = ["std"]
+# Run the mp4 benchmark, requires a mp4 file named `small.mp4` in the benches directory
+mp4 = []
+pin-project = ["pin-project-lite"]
+tokio-02 = ["pin-project", "std", "tokio-02-dep", "futures-util-03", "pin-project-lite", "bytes_05"]
+tokio-03 = ["pin-project", "std", "tokio-03-dep", "futures-util-03", "pin-project-lite"]
+tokio = ["tokio-dep", "futures-util-03", "pin-project-lite"]
+futures-03 = ["pin-project", "std", "futures-io-03", "futures-util-03", "pin-project-lite"]
+std = ["memchr/use_std", "bytes"]
+
+[[test]]
+name = "async"
+required-features = ["tokio-02", "futures-util-03"]
+
+[[bench]]
+name = "json"
+harness = false
+
+[[bench]]
+name = "http"
+harness = false
+
+[[bench]]
+name = "mp4"
+harness = false
+required-features = ["mp4"]
+
+[[example]]
+name = "async"
+required-features = ["std"]
+
+[[example]]
+name = "date"
+
+[[example]]
+name = "number"
+
+[[example]]
+name = "readme"
+
+[[example]]
+name = "ini"
+
+[profile.bench]
+lto = true
+codegen-units = 1
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..09677ea
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,22 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Markus Westerlind
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..378608f
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,19 @@
+name: "combine"
+description: "Fast parser combinators on arbitrary streams with zero-copy support."
+third_party {
+ url {
+ type: HOMEPAGE
+ value: "https://crates.io/crates/combine"
+ }
+ url {
+ type: ARCHIVE
+ value: "https://static.crates.io/crates/combine/combine-4.6.0.crate"
+ }
+ version: "4.6.0"
+ license_type: NOTICE
+ last_upgrade_date {
+ year: 2021
+ month: 7
+ day: 30
+ }
+}
diff --git a/MODULE_LICENSE_MIT b/MODULE_LICENSE_MIT
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/MODULE_LICENSE_MIT
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..45dc4dd
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1 @@
+include platform/prebuilts/rust:master:/OWNERS
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..fa68ccf
--- /dev/null
+++ b/README.md
@@ -0,0 +1,107 @@
+# combine
+[![Build Status](https://travis-ci.org/Marwes/combine.svg?branch=master)](https://travis-ci.org/Marwes/combine)
+[![Docs v3](https://docs.rs/combine/badge.svg?version=^3)](https://docs.rs/combine/^3)
+[![Docs](https://docs.rs/combine/badge.svg)](https://docs.rs/combine)
+[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/Marwes/combine?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
+
+An implementation of parser combinators for Rust, inspired by the Haskell library [Parsec](https://hackage.haskell.org/package/parsec). As in Parsec the parsers are [LL(1)](https://en.wikipedia.org/wiki/LL_parser) by default but they can opt-in to arbitrary lookahead using the [attempt combinator](https://docs.rs/combine/*/combine/fn.attempt.html).
+
+## Example
+
+```rust
+extern crate combine;
+use combine::{many1, Parser, sep_by};
+use combine::parser::char::{letter, space};
+
+// Construct a parser that parses *many* (and at least *1) *letter*s
+let word = many1(letter());
+
+// Construct a parser that parses many *word*s where each word is *separated by* a (white)*space*
+let mut parser = sep_by(word, space())
+ // Combine can collect into any type implementing `Default + Extend` so we need to assist rustc
+ // by telling it that `sep_by` should collect into a `Vec` and `many1` should collect to a `String`
+ .map(|mut words: Vec<String>| words.pop());
+let result = parser.parse("Pick up that word!");
+// `parse` returns `Result` where `Ok` contains a tuple of the parsers output and any remaining input.
+assert_eq!(result, Ok((Some("word".to_string()), "!")));
+```
+
+A tutorial as well as explanations on what goes on inside combine can be found in [the wiki](https://github.com/Marwes/combine/wiki).
+
+Larger examples can be found in the [examples][], [tests][] and [benches][] folders.
+
+[examples]:https://github.com/Marwes/combine/tree/master/examples
+[tests]:https://github.com/Marwes/combine/tree/master/tests
+[benches]:https://github.com/Marwes/combine/tree/master/benches
+
+## Links
+
+[Documentation and examples](https://docs.rs/crate/combine)
+
+[crates.io](https://crates.io/crates/combine)
+
+## Features
+
+* __Parse arbitrary streams__ - Combine can parse anything from `&[u8]` and `&str` to iterators and `Read` instances. If none of the builtin streams fit your use case you can even implement a couple traits your self to create your own custom [stream](https://docs.rs/combine/3.*/combine/stream/index.html)!
+
+* __zero-copy parsing__ - When parsing in memory data, combine can parse without copying. See the [range module](https://docs.rs/combine/3.*/combine/parser/range/index.html) for parsers specialized for zero-copy parsing.
+
+* __partial parsing__ - Combine parsers can be stopped at any point during parsing and later be resumed without losing any progress. This makes it possible to start parsing partial data coming from an io device such as a socket without worrying about if enough data is present to complete the parse. If more data is needed the parser will stop and may be resumed at the same point once more data is available. See the [async example](https://github.com/Marwes/combine/blob/master/examples/async.rs) for an example and [this post](https://marwes.github.io/2018/02/08/combine-3.html) for an introduction.
+
+## About
+
+A parser combinator is, broadly speaking, a function which takes several parsers as arguments and returns a new parser, created by combining those parsers. For instance, the [many](https://docs.rs/combine/*/combine/fn.many.html) parser takes one parser, `p`, as input and returns a new parser which applies `p` zero or more times. Thanks to the modularity that parser combinators gives it is possible to define parsers for a wide range of tasks without needing to implement the low level plumbing while still having the full power of Rust when you need it.
+
+The library adheres to [semantic versioning](https://semver.org/).
+
+If you end up trying it I welcome any feedback from your experience with it. I am usually reachable within a day by opening an issue, sending an email or posting a message on Gitter.
+
+## FAQ
+
+### Why does my errors contain inscrutable positions?
+
+Since `combine` aims to crate parsers with little to no overhead, streams over `&str` and `&[T]` do not carry any extra position information, but instead, they only rely on comparing the pointer of the buffer to check which `Stream` is further ahead than another `Stream`. To retrieve a better position, either call `translate_position` on the `PointerOffset` which represents the position or wrap your stream with `State`.
+
+### How does it compare to nom?
+
+https://github.com/Marwes/combine/issues/73 contains discussion and links to comparisons to [nom](https://github.com/Geal/nom).
+
+## Parsers written in combine
+
+### Formats and protocols
+
+* GraphQL https://github.com/graphql-rust/graphql-parser (Uses a custom tokenizer as input)
+* DiffX https://github.com/brennie/diffx-rs
+* Redis https://github.com/mitsuhiko/redis-rs/pull/141 (Uses partial parsing)
+* Toml https://github.com/ordian/toml_edit
+* Maker Interchange Format https://github.com/aidanhs/frametool (Uses combine as a lexer)
+* Javascript https://github.com/freemasen/ress
+* JPEG Metadata https://github.com/vadixidav/exifsd
+
+### Miscellaneous
+
+* Template language https://github.com/tailhook/trimmer
+* Code exercises https://github.com/dgel/adventOfCode2017
+* Programming language
+ * https://github.com/MaikKlein/spire-lang
+ * https://github.com/vadixidav/typeflow/tree/master/lang
+* Query parser (+ more) https://github.com/mozilla/mentat
+* Query parser https://github.com/tantivy-search/tantivy
+
+## Extra
+
+There is an additional crate which has parsers to lex and parse programming languages in [combine-language](https://github.com/Marwes/combine-language).
+
+You can find older versions of combine (parser-combinators) [here](https://crates.io/crates/parser-combinators).
+
+## Contributing
+
+Current master is the 3.0.0 branch. If you want to submit a fix or feature to the 2.x version of combine then
+do so to the 2.x branch or submit the PR to master and request that it be backported.
+
+The easiest way to contribute is to just open an issue about any problems you encounter using combine but if you are interested in adding something to the library here is a list of some of the easier things to work on to get started.
+
+* __Add additional parsers__ If you have a suggestion for another parser just open an issue or a PR with an implementation.
+* __Add additional examples__ More examples for using combine will always be useful!
+* __Add and improve the docs__ Not the fanciest of work but one cannot overstate the importance of good documentation.
+
diff --git a/benches/data.json b/benches/data.json
new file mode 100644
index 0000000..7125c59
--- /dev/null
+++ b/benches/data.json
@@ -0,0 +1,317 @@
+[
+ {
+ "_id": "54d38af178bf1fbfe80a59e5",
+ "index": 0,
+ "guid": "cc631fea-7bf2-4595-950c-097880a818bc",
+ "isActive": false,
+ "balance": "$3,397.06",
+ "picture": "http://placehold.it/32x32",
+ "age": 39,
+ "eyeColor": "brown",
+ "name": "Corina Tyler",
+ "gender": "female",
+ "company": "GEEKOL",
+ "email": "corinatyler@geekol.com",
+ "phone": "+1 (899) 452-2754",
+ "address": "608 Leonard Street, Yettem, Connecticut, 6707",
+ "about": "Ea dolore pariatur aliqua veniam officia est et fugiat ipsum do sunt mollit id aute. Nisi ad elit ut et et. Duis in aliquip id labore ex et laboris anim magna proident Lorem est ut. Consectetur sint aliqua eu exercitation anim cupidatat fugiat. Enim exercitation amet ex irure quis anim est fugiat et laborum.\r\n",
+ "registered": "2014-03-27T05:29:11 -01:00",
+ "latitude": -57.157547,
+ "longitude": -23.548119,
+ "tags": [
+ "est",
+ "est",
+ "exercitation",
+ "ipsum",
+ "tempor",
+ "id",
+ "aliqua"
+ ],
+ "friends": [
+ {
+ "id": 0,
+ "name": "Bates Lynch"
+ },
+ {
+ "id": 1,
+ "name": "Duffy Townsend"
+ },
+ {
+ "id": 2,
+ "name": "Nelson Good"
+ }
+ ],
+ "greeting": "Hello, Corina Tyler! You have 8 unread messages.",
+ "favoriteFruit": "apple"
+ },
+ {
+ "_id": "54d38af1c9a8d04d5e35f211",
+ "index": 1,
+ "guid": "d8dd5231-048f-4a5a-b392-8f7c3492ad15",
+ "isActive": false,
+ "balance": "$1,646.89",
+ "picture": "http://placehold.it/32x32",
+ "age": 22,
+ "eyeColor": "brown",
+ "name": "Santos Boyd",
+ "gender": "male",
+ "company": "AUTOGRATE",
+ "email": "santosboyd@autograte.com",
+ "phone": "+1 (872) 545-2605",
+ "address": "428 Homecrest Avenue, Century, Kentucky, 1905",
+ "about": "Quis cillum mollit adipisicing duis sunt. Eiusmod culpa reprehenderit proident magna laborum voluptate incididunt et ipsum in laboris consectetur. Nostrud consequat excepteur nisi magna officia.\r\n",
+ "registered": "2015-02-03T03:03:32 -01:00",
+ "latitude": 47.505093,
+ "longitude": -138.153509,
+ "tags": [
+ "aliquip",
+ "et",
+ "eu",
+ "minim",
+ "non",
+ "eiusmod",
+ "deserunt"
+ ],
+ "friends": [
+ {
+ "id": 0,
+ "name": "Annmarie Larson"
+ },
+ {
+ "id": 1,
+ "name": "Cherie Potts"
+ },
+ {
+ "id": 2,
+ "name": "Catalina Bass"
+ }
+ ],
+ "greeting": "Hello, Santos Boyd! You have 1 unread messages.",
+ "favoriteFruit": "apple"
+ },
+ {
+ "_id": "54d38af171fedb95c2841d64",
+ "index": 2,
+ "guid": "f4b131d7-871f-4503-8674-275c4c8e3121",
+ "isActive": false,
+ "balance": "$3,311.55",
+ "picture": "http://placehold.it/32x32",
+ "age": 23,
+ "eyeColor": "green",
+ "name": "Jeannie Daugherty",
+ "gender": "female",
+ "company": "COMTRAK",
+ "email": "jeanniedaugherty@comtrak.com",
+ "phone": "+1 (853) 445-3806",
+ "address": "189 Narrows Avenue, Cascades, Arkansas, 5738",
+ "about": "Velit labore mollit in sint culpa dolor consequat voluptate cupidatat ut. Laborum dolore incididunt deserunt adipisicing aliquip nisi cupidatat. Aliqua cillum consequat voluptate nulla velit deserunt cillum do reprehenderit cupidatat quis labore anim nulla. Irure do nisi et ea excepteur culpa mollit aliqua occaecat id dolore ullamco ad. Magna enim consectetur ea ullamco tempor magna eu consequat sint irure. In Lorem est id est do anim ex.\r\n",
+ "registered": "2014-01-29T22:48:53 -01:00",
+ "latitude": 72.451611,
+ "longitude": -89.847471,
+ "tags": [
+ "consequat",
+ "deserunt",
+ "ipsum",
+ "commodo",
+ "magna",
+ "ut",
+ "ut"
+ ],
+ "friends": [
+ {
+ "id": 0,
+ "name": "Kenya Alford"
+ },
+ {
+ "id": 1,
+ "name": "Francesca Carpenter"
+ },
+ {
+ "id": 2,
+ "name": "Celina Petty"
+ }
+ ],
+ "greeting": "Hello, Jeannie Daugherty! You have 3 unread messages.",
+ "favoriteFruit": "strawberry"
+ },
+ {
+ "_id": "54d38af16b1001dc2ee7f7b0",
+ "index": 3,
+ "guid": "c905d47f-8e7a-488d-a1a0-df61bf1af2eb",
+ "isActive": true,
+ "balance": "$2,311.19",
+ "picture": "http://placehold.it/32x32",
+ "age": 39,
+ "eyeColor": "green",
+ "name": "Jordan Horton",
+ "gender": "female",
+ "company": "ENERSOL",
+ "email": "jordanhorton@enersol.com",
+ "phone": "+1 (910) 467-2211",
+ "address": "123 Harwood Place, Northchase, Guam, 5138",
+ "about": "Cupidatat aliqua nisi minim ad culpa cupidatat proident fugiat veniam aliquip minim anim et. Ullamco consequat consequat aute exercitation aliqua eiusmod sunt ea. Et laborum dolor ex proident do non adipisicing nostrud voluptate qui reprehenderit elit ad nostrud. Excepteur exercitation laborum nulla laboris incididunt. Est sunt nisi eu id fugiat excepteur. In sunt laborum aliquip aute ipsum.\r\n",
+ "registered": "2014-10-08T23:32:53 -02:00",
+ "latitude": -52.78747,
+ "longitude": -121.825343,
+ "tags": [
+ "voluptate",
+ "non",
+ "dolor",
+ "nisi",
+ "minim",
+ "mollit",
+ "minim"
+ ],
+ "friends": [
+ {
+ "id": 0,
+ "name": "Fields Ballard"
+ },
+ {
+ "id": 1,
+ "name": "Peterson Reese"
+ },
+ {
+ "id": 2,
+ "name": "Montoya Mccullough"
+ }
+ ],
+ "greeting": "Hello, Jordan Horton! You have 3 unread messages.",
+ "favoriteFruit": "banana"
+ },
+ {
+ "_id": "54d38af1ae98fd5f8860a1a0",
+ "index": 4,
+ "guid": "d6f73551-9672-4f4a-b832-1d2942a53203",
+ "isActive": false,
+ "balance": "$1,178.62",
+ "picture": "http://placehold.it/32x32",
+ "age": 36,
+ "eyeColor": "green",
+ "name": "Mae Roy",
+ "gender": "female",
+ "company": "BUGSALL",
+ "email": "maeroy@bugsall.com",
+ "phone": "+1 (859) 452-3883",
+ "address": "266 Berriman Street, Logan, Rhode Island, 2566",
+ "about": "Adipisicing id incididunt ut excepteur officia incididunt enim quis dolor incididunt esse esse. Nulla laboris pariatur eiusmod veniam duis ipsum Lorem ex cupidatat do sunt commodo cillum. Laboris enim aute irure qui officia laborum. Veniam ullamco ad laboris nulla minim laboris ut ad minim non duis.\r\n",
+ "registered": "2014-07-31T12:08:03 -02:00",
+ "latitude": 49.234132,
+ "longitude": -86.652077,
+ "tags": [
+ "ut",
+ "mollit",
+ "duis",
+ "ea",
+ "cillum",
+ "in",
+ "ut"
+ ],
+ "friends": [
+ {
+ "id": 0,
+ "name": "Acevedo Kent"
+ },
+ {
+ "id": 1,
+ "name": "Wood Edwards"
+ },
+ {
+ "id": 2,
+ "name": "Kris Brennan"
+ }
+ ],
+ "greeting": "Hello, Mae Roy! You have 3 unread messages.",
+ "favoriteFruit": "strawberry"
+ },
+ {
+ "_id": "54d38af1638743947b6d15c2",
+ "index": 5,
+ "guid": "db331420-0216-4a92-93eb-23aa0f69a533",
+ "isActive": true,
+ "balance": "$2,375.71",
+ "picture": "http://placehold.it/32x32",
+ "age": 20,
+ "eyeColor": "blue",
+ "name": "Mooney Manning",
+ "gender": "male",
+ "company": "HOMELUX",
+ "email": "mooneymanning@homelux.com",
+ "phone": "+1 (865) 585-2829",
+ "address": "833 Madison Street, Mulino, Iowa, 249",
+ "about": "Et ex irure magna culpa fugiat magna exercitation laborum velit quis nostrud et minim in. Sint nulla laboris non non. Do excepteur dolor ipsum proident anim.\r\n",
+ "registered": "2014-12-23T16:46:55 -01:00",
+ "latitude": -37.873914,
+ "longitude": 11.797406,
+ "tags": [
+ "cupidatat",
+ "labore",
+ "culpa",
+ "esse",
+ "amet",
+ "nostrud",
+ "irure"
+ ],
+ "friends": [
+ {
+ "id": 0,
+ "name": "Avila Lawrence"
+ },
+ {
+ "id": 1,
+ "name": "Wright Stokes"
+ },
+ {
+ "id": 2,
+ "name": "Joann Ramirez"
+ }
+ ],
+ "greeting": "Hello, Mooney Manning! You have 8 unread messages.",
+ "favoriteFruit": "banana"
+ },
+ {
+ "_id": "54d38af187f8159129b215ef",
+ "index": 6,
+ "guid": "0a7a0751-3ed3-480e-8b87-082f440d4bde",
+ "isActive": false,
+ "balance": "$2,258.43",
+ "picture": "http://placehold.it/32x32",
+ "age": 26,
+ "eyeColor": "brown",
+ "name": "Sylvia Roberts",
+ "gender": "female",
+ "company": "ZOLARITY",
+ "email": "sylviaroberts@zolarity.com",
+ "phone": "+1 (852) 580-3720",
+ "address": "604 Boardwalk , Greensburg, Pennsylvania, 9876",
+ "about": "Amet mollit mollit nostrud dolor ut et. Eiusmod amet id nulla commodo qui ullamco pariatur nulla magna nisi proident. Irure aliquip eu excepteur incididunt nisi consectetur.\r\n",
+ "registered": "2014-09-20T02:58:52 -02:00",
+ "latitude": 3.989497,
+ "longitude": -111.05834,
+ "tags": [
+ "culpa",
+ "quis",
+ "commodo",
+ "laboris",
+ "consectetur",
+ "pariatur",
+ "enim"
+ ],
+ "friends": [
+ {
+ "id": 0,
+ "name": "Logan Rivas"
+ },
+ {
+ "id": 1,
+ "name": "Bryan Sullivan"
+ },
+ {
+ "id": 2,
+ "name": "Bobbie Cleveland"
+ }
+ ],
+ "greeting": "Hello, Sylvia Roberts! You have 9 unread messages.",
+ "favoriteFruit": "banana"
+ }
+]
diff --git a/benches/http-requests.txt b/benches/http-requests.txt
new file mode 100644
index 0000000..4068460
--- /dev/null
+++ b/benches/http-requests.txt
@@ -0,0 +1,494 @@
+GET / HTTP/1.1
+Host: www.reddit.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+
+GET /reddit.v_EZwRzV-Ns.css HTTP/1.1
+Host: www.redditstatic.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: text/css,*/*;q=0.1
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /reddit-init.en-us.O1zuMqOOQvY.js HTTP/1.1
+Host: www.redditstatic.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: */*
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /reddit.en-us.31yAfSoTsfo.js HTTP/1.1
+Host: www.redditstatic.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: */*
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /kill.png HTTP/1.1
+Host: www.redditstatic.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /icon.png HTTP/1.1
+Host: www.redditstatic.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+
+GET /favicon.ico HTTP/1.1
+Host: www.redditstatic.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+
+GET /AMZM4CWd6zstSC8y.jpg HTTP/1.1
+Host: b.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /jz1d5Nm0w97-YyNm.jpg HTTP/1.1
+Host: b.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /aWGO99I6yOcNUKXB.jpg HTTP/1.1
+Host: a.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /rZ_rD5TjrJM0E9Aj.css HTTP/1.1
+Host: e.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: text/css,*/*;q=0.1
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /tmsPwagFzyTvrGRx.jpg HTTP/1.1
+Host: a.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /KYgUaLvXCK3TCEJx.jpg HTTP/1.1
+Host: a.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /81pzxT5x2ozuEaxX.jpg HTTP/1.1
+Host: e.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /MFqCUiUVPO5V8t6x.jpg HTTP/1.1
+Host: a.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /TFpYTiAO5aEowokv.jpg HTTP/1.1
+Host: e.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /eMWMpmm9APNeNqcF.jpg HTTP/1.1
+Host: e.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /S-IpsJrOKuaK9GZ8.jpg HTTP/1.1
+Host: c.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /3V6dj9PDsNnheDXn.jpg HTTP/1.1
+Host: c.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /wQ3-VmNXhv8sg4SJ.jpg HTTP/1.1
+Host: c.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /ixd1C1njpczEWC22.jpg HTTP/1.1
+Host: c.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /nGsQj15VyOHMwmq8.jpg HTTP/1.1
+Host: c.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /zT4yQmDxQLbIxK1b.jpg HTTP/1.1
+Host: c.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /L5e1HcZLv1iu4nrG.jpg HTTP/1.1
+Host: f.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /WJFFPxD8X4JO_lIG.jpg HTTP/1.1
+Host: f.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /hVMVTDdjuY3bQox5.jpg HTTP/1.1
+Host: f.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /rnWf8CjBcyPQs5y_.jpg HTTP/1.1
+Host: f.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /gZJL1jNylKbGV4d-.jpg HTTP/1.1
+Host: d.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /aNd2zNRLXiMnKUFh.jpg HTTP/1.1
+Host: c.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /droparrowgray.gif HTTP/1.1
+Host: www.redditstatic.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.redditstatic.com/reddit.v_EZwRzV-Ns.css
+
+GET /sprite-reddit.an0Lnf61Ap4.png HTTP/1.1
+Host: www.redditstatic.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.redditstatic.com/reddit.v_EZwRzV-Ns.css
+
+GET /ga.js HTTP/1.1
+Host: www.google-analytics.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: */*
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+If-Modified-Since: Tue, 29 Oct 2013 19:33:51 GMT
+
+GET /reddit/ads.html?sr=-reddit.com&bust2 HTTP/1.1
+Host: static.adzerk.net
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /pixel/of_destiny.png?v=hOlmDALJCWWdjzfBV4ZxJPmrdCLWB%2Ftq7Z%2Ffp4Q%2FxXbVPPREuMJMVGzKraTuhhNWxCCwi6yFEZg%3D&r=783333388 HTTP/1.1
+Host: pixel.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /UNcO-h_QcS9PD-Gn.jpg HTTP/1.1
+Host: c.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://e.thumbs.redditmedia.com/rZ_rD5TjrJM0E9Aj.css
+
+GET /welcome-lines.png HTTP/1.1
+Host: www.redditstatic.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.redditstatic.com/reddit.v_EZwRzV-Ns.css
+
+GET /welcome-upvote.png HTTP/1.1
+Host: www.redditstatic.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.redditstatic.com/reddit.v_EZwRzV-Ns.css
+
+GET /__utm.gif?utmwv=5.5.1&utms=1&utmn=720496082&utmhn=www.reddit.com&utme=8(site*srpath*usertype*uitype)9(%20reddit.com*%20reddit.com-GET_listing*guest*web)11(3!2)&utmcs=UTF-8&utmsr=2560x1600&utmvp=1288x792&utmsc=24-bit&utmul=en-us&utmje=1&utmfl=13.0%20r0&utmdt=reddit%3A%20the%20front%20page%20of%20the%20internet&utmhid=2129416330&utmr=-&utmp=%2F&utmht=1400862512705&utmac=UA-12131688-1&utmcc=__utma%3D55650728.585571751.1400862513.1400862513.1400862513.1%3B%2B__utmz%3D55650728.1400862513.1.1.utmcsr%3D(direct)%7Cutmccn%3D(direct)%7Cutmcmd%3D(none)%3B&utmu=qR~ HTTP/1.1
+Host: www.google-analytics.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /ImnpOQhbXUPkwceN.png HTTP/1.1
+Host: a.thumbs.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /ajax/libs/jquery/1.7.1/jquery.min.js HTTP/1.1
+Host: ajax.googleapis.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: */*
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://static.adzerk.net/reddit/ads.html?sr=-reddit.com&bust2
+
+GET /__utm.gif?utmwv=5.5.1&utms=2&utmn=1493472678&utmhn=www.reddit.com&utmt=event&utme=5(AdBlock*enabled*false)(0)8(site*srpath*usertype*uitype)9(%20reddit.com*%20reddit.com-GET_listing*guest*web)11(3!2)&utmcs=UTF-8&utmsr=2560x1600&utmvp=1288x792&utmsc=24-bit&utmul=en-us&utmje=1&utmfl=13.0%20r0&utmdt=reddit%3A%20the%20front%20page%20of%20the%20internet&utmhid=2129416330&utmr=-&utmp=%2F&utmht=1400862512708&utmac=UA-12131688-1&utmni=1&utmcc=__utma%3D55650728.585571751.1400862513.1400862513.1400862513.1%3B%2B__utmz%3D55650728.1400862513.1.1.utmcsr%3D(direct)%7Cutmccn%3D(direct)%7Cutmcmd%3D(none)%3B&utmu=6R~ HTTP/1.1
+Host: www.google-analytics.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /ados.js?q=43 HTTP/1.1
+Host: secure.adzerk.net
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: */*
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://static.adzerk.net/reddit/ads.html?sr=-reddit.com&bust2
+
+GET /fetch-trackers?callback=jQuery111005268222517967478_1400862512407&ids%5B%5D=t3_25jzeq-t8_k2ii&_=1400862512408 HTTP/1.1
+Host: tracker.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: */*
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /ados?t=1400862512892&request={%22Placements%22:[{%22A%22:5146,%22S%22:24950,%22D%22:%22main%22,%22AT%22:5},{%22A%22:5146,%22S%22:24950,%22D%22:%22sponsorship%22,%22AT%22:8}],%22Keywords%22:%22-reddit.com%22,%22Referrer%22:%22http%3A%2F%2Fwww.reddit.com%2F%22,%22IsAsync%22:true,%22WriteResults%22:true} HTTP/1.1
+Host: engine.adzerk.net
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: */*
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://static.adzerk.net/reddit/ads.html?sr=-reddit.com&bust2
+
+GET /pixel/of_doom.png?id=t3_25jzeq-t8_k2ii&hash=da31d967485cdbd459ce1e9a5dde279fef7fc381&r=1738649500 HTTP/1.1
+Host: pixel.redditmedia.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /Extensions/adFeedback.js HTTP/1.1
+Host: static.adzrk.net
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: */*
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://static.adzerk.net/reddit/ads.html?sr=-reddit.com&bust2
+
+GET /Extensions/adFeedback.css HTTP/1.1
+Host: static.adzrk.net
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: text/css,*/*;q=0.1
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://static.adzerk.net/reddit/ads.html?sr=-reddit.com&bust2
+
+GET /reddit/ads-load.html?bust2 HTTP/1.1
+Host: static.adzerk.net
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://www.reddit.com/
+
+GET /Advertisers/a774d7d6148046efa89403a8db635a81.jpg HTTP/1.1
+Host: static.adzerk.net
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://static.adzerk.net/reddit/ads.html?sr=-reddit.com&bust2
+
+GET /i.gif?e=eyJhdiI6NjIzNTcsImF0Ijo1LCJjbSI6MTE2MzUxLCJjaCI6Nzk4NCwiY3IiOjMzNzAxNSwiZGkiOiI4NmI2Y2UzYWM5NDM0MjhkOTk2ZTg4MjYwZDE5ZTE1YyIsImRtIjoxLCJmYyI6NDE2MTI4LCJmbCI6MjEwNDY0LCJrdyI6Ii1yZWRkaXQuY29tIiwibWsiOiItcmVkZGl0LmNvbSIsIm53Ijo1MTQ2LCJwYyI6MCwicHIiOjIwMzYyLCJydCI6MSwicmYiOiJodHRwOi8vd3d3LnJlZGRpdC5jb20vIiwic3QiOjI0OTUwLCJ1ayI6InVlMS01ZWIwOGFlZWQ5YTc0MDFjOTE5NWNiOTMzZWI3Yzk2NiIsInRzIjoxNDAwODYyNTkzNjQ1fQ&s=lwlbFf2Uywt7zVBFRj_qXXu7msY HTTP/1.1
+Host: engine.adzerk.net
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://static.adzerk.net/reddit/ads.html?sr=-reddit.com&bust2
+Cookie: azk=ue1-5eb08aeed9a7401c9195cb933eb7c966
+
+GET /BurstingPipe/adServer.bs?cn=tf&c=19&mc=imp&pli=9994987&PluID=0&ord=1400862593644&rtu=-1 HTTP/1.1
+Host: bs.serving-sys.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://static.adzerk.net/reddit/ads.html?sr=-reddit.com&bust2
+
+GET /Advertisers/63cfd0044ffd49c0a71a6626f7a1d8f0.jpg HTTP/1.1
+Host: static.adzerk.net
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://static.adzerk.net/reddit/ads-load.html?bust2
+
+GET /BurstingPipe/adServer.bs?cn=tf&c=19&mc=imp&pli=9962555&PluID=0&ord=1400862593645&rtu=-1 HTTP/1.1
+Host: bs.serving-sys.com
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://static.adzerk.net/reddit/ads-load.html?bust2
+Cookie: S_9994987=6754579095859875029; A4=01fmFvgRnI09SF00000; u2=d1263d39-874b-4a89-86cd-a2ab0860ed4e3Zl040
+
+GET /i.gif?e=eyJhdiI6NjIzNTcsImF0Ijo4LCJjbSI6MTE2MzUxLCJjaCI6Nzk4NCwiY3IiOjMzNzAxOCwiZGkiOiI3OTdlZjU3OWQ5NjE0ODdiODYyMGMyMGJkOTE4YzNiMSIsImRtIjoxLCJmYyI6NDE2MTMxLCJmbCI6MjEwNDY0LCJrdyI6Ii1yZWRkaXQuY29tIiwibWsiOiItcmVkZGl0LmNvbSIsIm53Ijo1MTQ2LCJwYyI6MCwicHIiOjIwMzYyLCJydCI6MSwicmYiOiJodHRwOi8vd3d3LnJlZGRpdC5jb20vIiwic3QiOjI0OTUwLCJ1ayI6InVlMS01ZWIwOGFlZWQ5YTc0MDFjOTE5NWNiOTMzZWI3Yzk2NiIsInRzIjoxNDAwODYyNTkzNjQ2fQ&s=OjzxzXAgQksbdQOHNm-bjZcnZPA HTTP/1.1
+Host: engine.adzerk.net
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko/20100101 Firefox/15.0.1
+Accept: image/png,image/*;q=0.8,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip, deflate
+Connection: keep-alive
+Referer: http://static.adzerk.net/reddit/ads-load.html?bust2
+Cookie: azk=ue1-5eb08aeed9a7401c9195cb933eb7c966
+
+GET /subscribe?host_int=1042356184&ns_map=571794054_374233948806,464381511_13349283399&user_id=245722467&nid=1399334269710011966&ts=1400862514 HTTP/1.1
+Host: notify8.dropbox.com
+Accept-Encoding: identity
+Connection: keep-alive
+X-Dropbox-Locale: en_US
+User-Agent: DropboxDesktopClient/2.7.54 (Macintosh; 10.8; ('i32',); en_US)
+
diff --git a/benches/http.rs b/benches/http.rs
new file mode 100644
index 0000000..4330985
--- /dev/null
+++ b/benches/http.rs
@@ -0,0 +1,180 @@
+#[macro_use]
+extern crate criterion;
+#[macro_use]
+extern crate combine;
+
+use std::fmt;
+
+use {
+ combine::{
+ many, many1,
+ parser::range::{range, take_while1},
+ stream::easy,
+ token, ParseError, Parser, RangeStream,
+ },
+ criterion::{black_box, Bencher, Criterion},
+};
+
+#[derive(Debug)]
+struct Request<'a> {
+ method: &'a [u8],
+ uri: &'a [u8],
+ version: &'a [u8],
+}
+
+#[derive(Debug)]
+struct Header<'a> {
+ name: &'a [u8],
+ value: Vec<&'a [u8]>,
+}
+
+fn is_token(c: u8) -> bool {
+ match c {
+ 128..=255
+ | 0..=31
+ | b'('
+ | b')'
+ | b'<'
+ | b'>'
+ | b'@'
+ | b','
+ | b';'
+ | b':'
+ | b'\\'
+ | b'"'
+ | b'/'
+ | b'['
+ | b']'
+ | b'?'
+ | b'='
+ | b'{'
+ | b'}'
+ | b' ' => false,
+ _ => true,
+ }
+}
+
+fn is_horizontal_space(c: u8) -> bool {
+ c == b' ' || c == b'\t'
+}
+fn is_space(c: u8) -> bool {
+ c == b' '
+}
+fn is_not_space(c: u8) -> bool {
+ c != b' '
+}
+fn is_http_version(c: u8) -> bool {
+ c >= b'0' && c <= b'9' || c == b'.'
+}
+
+fn end_of_line<'a, Input>() -> impl Parser<Input, Output = u8>
+where
+ Input: RangeStream<Token = u8, Range = &'a [u8]>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ (token(b'\r'), token(b'\n')).map(|_| b'\r').or(token(b'\n'))
+}
+
+fn message_header<'a, Input>() -> impl Parser<Input, Output = Header<'a>>
+where
+ Input: RangeStream<Token = u8, Range = &'a [u8]>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ let message_header_line = (
+ take_while1(is_horizontal_space),
+ take_while1(|c| c != b'\r' && c != b'\n'),
+ end_of_line(),
+ )
+ .map(|(_, line, _)| line);
+
+ struct_parser!(Header {
+ name: take_while1(is_token),
+ _: token(b':'),
+ value: many1(message_header_line),
+ })
+}
+
+fn parse_http_request<'a, Input>(
+ input: Input,
+) -> Result<((Request<'a>, Vec<Header<'a>>), Input), Input::Error>
+where
+ Input: RangeStream<Token = u8, Range = &'a [u8]>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ let http_version = range(&b"HTTP/"[..]).with(take_while1(is_http_version));
+
+ let request_line = struct_parser!(Request {
+ method: take_while1(is_token),
+ _: take_while1(is_space),
+ uri: take_while1(is_not_space),
+ _: take_while1(is_space),
+ version: http_version,
+ });
+
+ let mut request = (
+ request_line,
+ end_of_line(),
+ many(message_header()),
+ end_of_line(),
+ )
+ .map(|(request, _, headers, _)| (request, headers));
+
+ request.parse(input)
+}
+
+static REQUESTS: &'static [u8] = include_bytes!("http-requests.txt");
+
+fn http_requests_small(b: &mut Bencher<'_>) {
+ http_requests_bench(b, easy::Stream(REQUESTS))
+}
+
+fn http_requests_large(b: &mut Bencher<'_>) {
+ use std::iter;
+
+ let mut buffer = Vec::with_capacity(REQUESTS.len() * 5);
+ for buf in iter::repeat(REQUESTS).take(5) {
+ buffer.extend_from_slice(buf);
+ }
+ http_requests_bench(b, easy::Stream(&buffer[..]))
+}
+
+fn http_requests_large_cheap_error(b: &mut Bencher<'_>) {
+ use std::iter;
+
+ let mut buffer = Vec::with_capacity(REQUESTS.len() * 5);
+ for buf in iter::repeat(REQUESTS).take(5) {
+ buffer.extend_from_slice(buf);
+ }
+ http_requests_bench(b, &buffer[..])
+}
+
+fn http_requests_bench<'a, Input>(b: &mut Bencher<'_>, buffer: Input)
+where
+ Input: RangeStream<Token = u8, Range = &'a [u8]> + Clone,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position> + fmt::Debug,
+{
+ b.iter(|| {
+ let mut buf = black_box(buffer.clone());
+
+ while buf.clone().uncons().is_ok() {
+ match parse_http_request(buf) {
+ Ok(((_, _), b)) => {
+ buf = b;
+ }
+ Err(err) => panic!("{:?}", err),
+ }
+ }
+ });
+}
+
+fn http_requests(c: &mut Criterion) {
+ c.bench_function("http_requests_small", http_requests_small);
+ c.bench_function("http_requests_large", http_requests_large);
+ c.bench_function(
+ "http_requests_large_cheap_error",
+ http_requests_large_cheap_error,
+ );
+}
+
+criterion_group!(http, http_requests,);
+criterion_main!(http);
diff --git a/benches/json.rs b/benches/json.rs
new file mode 100644
index 0000000..8a44b97
--- /dev/null
+++ b/benches/json.rs
@@ -0,0 +1,323 @@
+// `impl Trait` is not required for this parser but we use to to show that it can be used to
+// significantly simplify things
+
+#[macro_use]
+extern crate criterion;
+
+#[macro_use]
+extern crate combine;
+
+use std::{collections::HashMap, fs::File, io::Read, path::Path};
+
+use {
+ combine::{
+ error::{Commit, ParseError},
+ parser::{
+ char::{char, digit, spaces, string},
+ choice::{choice, optional},
+ function::parser,
+ repeat::{many, many1, sep_by},
+ sequence::between,
+ token::{any, satisfy, satisfy_map},
+ },
+ stream::{
+ buffered,
+ position::{self, SourcePosition},
+ IteratorStream,
+ },
+ EasyParser, Parser, Stream, StreamOnce,
+ },
+ criterion::{black_box, Bencher, Criterion},
+};
+
+#[derive(PartialEq, Debug)]
+enum Value {
+ Number(f64),
+ String(String),
+ Bool(bool),
+ Null,
+ Object(HashMap<String, Value>),
+ Array(Vec<Value>),
+}
+
+fn lex<Input, P>(p: P) -> impl Parser<Input, Output = P::Output>
+where
+ P: Parser<Input>,
+ Input: Stream<Token = char>,
+ <Input as StreamOnce>::Error: ParseError<
+ <Input as StreamOnce>::Token,
+ <Input as StreamOnce>::Range,
+ <Input as StreamOnce>::Position,
+ >,
+{
+ p.skip(spaces())
+}
+
+fn integer<Input>() -> impl Parser<Input, Output = i64>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ lex(many1(digit()))
+ .map(|s: String| {
+ let mut n = 0;
+ for c in s.chars() {
+ n = n * 10 + (c as i64 - '0' as i64);
+ }
+ n
+ })
+ .expected("integer")
+}
+
+fn number<Input>() -> impl Parser<Input, Output = f64>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ let i = char('0').map(|_| 0.0).or(integer().map(|x| x as f64));
+ let fractional = many(digit()).map(|digits: String| {
+ let mut magnitude = 1.0;
+ digits.chars().fold(0.0, |acc, d| {
+ magnitude /= 10.0;
+ match d.to_digit(10) {
+ Some(d) => acc + (d as f64) * magnitude,
+ None => panic!("Not a digit"),
+ }
+ })
+ });
+
+ let exp = satisfy(|c| c == 'e' || c == 'E').with(optional(char('-')).and(integer()));
+ lex(optional(char('-'))
+ .and(i)
+ .map(|(sign, n)| if sign.is_some() { -n } else { n })
+ .and(optional(char('.')).with(fractional))
+ .map(|(x, y)| if x >= 0.0 { x + y } else { x - y })
+ .and(optional(exp))
+ .map(|(n, exp_option)| match exp_option {
+ Some((sign, e)) => {
+ let e = if sign.is_some() { -e } else { e };
+ n * 10.0f64.powi(e as i32)
+ }
+ None => n,
+ }))
+ .expected("number")
+}
+
+fn json_char<Input>() -> impl Parser<Input, Output = char>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ parser(|input: &mut Input| {
+ let (c, committed) = any().parse_lazy(input).into_result()?;
+ let mut back_slash_char = satisfy_map(|c| {
+ Some(match c {
+ '"' => '"',
+ '\\' => '\\',
+ '/' => '/',
+ 'b' => '\u{0008}',
+ 'f' => '\u{000c}',
+ 'n' => '\n',
+ 'r' => '\r',
+ 't' => '\t',
+ _ => return None,
+ })
+ });
+ match c {
+ '\\' => committed.combine(|_| back_slash_char.parse_stream(input).into_result()),
+ '"' => Err(Commit::Peek(Input::Error::empty(input.position()).into())),
+ _ => Ok((c, committed)),
+ }
+ })
+}
+
+fn json_string<Input>() -> impl Parser<Input, Output = String>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ between(char('"'), lex(char('"')), many(json_char())).expected("string")
+}
+
+fn object<Input>() -> impl Parser<Input, Output = Value>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ let field = (json_string(), lex(char(':')), json_value()).map(|t| (t.0, t.2));
+ let fields = sep_by(field, lex(char(',')));
+ between(lex(char('{')), lex(char('}')), fields)
+ .map(Value::Object)
+ .expected("object")
+}
+
+#[inline]
+fn json_value<Input>() -> impl Parser<Input, Output = Value>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ json_value_()
+}
+
+// We need to use `parser!` to break the recursive use of `value` to prevent the returned parser
+// from containing itself
+parser! {
+ #[inline]
+ fn json_value_[Input]()(Input) -> Value
+ where [ Input: Stream<Token = char> ]
+ {
+ let array = between(
+ lex(char('[')),
+ lex(char(']')),
+ sep_by(json_value(), lex(char(','))),
+ ).map(Value::Array);
+
+ choice((
+ json_string().map(Value::String),
+ object(),
+ array,
+ number().map(Value::Number),
+ lex(string("false").map(|_| Value::Bool(false))),
+ lex(string("true").map(|_| Value::Bool(true))),
+ lex(string("null").map(|_| Value::Null)),
+ ))
+ }
+}
+
+#[test]
+fn json_test() {
+ use self::Value::*;
+
+ let input = r#"{
+ "array": [1, ""],
+ "object": {},
+ "number": 3.14,
+ "small_number": 0.59,
+ "int": -100,
+ "exp": -1e2,
+ "exp_neg": 23e-2,
+ "true": true,
+ "false" : false,
+ "null" : null
+}"#;
+ let result = json_value().easy_parse(input);
+ let expected = Object(
+ vec![
+ ("array", Array(vec![Number(1.0), String("".to_string())])),
+ ("object", Object(HashMap::new())),
+ ("number", Number(3.14)),
+ ("small_number", Number(0.59)),
+ ("int", Number(-100.)),
+ ("exp", Number(-1e2)),
+ ("exp_neg", Number(23E-2)),
+ ("true", Bool(true)),
+ ("false", Bool(false)),
+ ("null", Null),
+ ]
+ .into_iter()
+ .map(|(k, v)| (k.to_string(), v))
+ .collect(),
+ );
+ match result {
+ Ok(result) => assert_eq!(result, (expected, "")),
+ Err(e) => {
+ println!("{}", e);
+ assert!(false);
+ }
+ }
+}
+
+fn test_data() -> String {
+ let mut data = String::new();
+ File::open(&Path::new(&"benches/data.json"))
+ .and_then(|mut file| file.read_to_string(&mut data))
+ .unwrap();
+ data
+}
+
+fn bench_json(bencher: &mut Bencher<'_>) {
+ let data = test_data();
+ let mut parser = json_value();
+ match parser.easy_parse(position::Stream::new(&data[..])) {
+ Ok((Value::Array(_), _)) => (),
+ Ok(_) => assert!(false),
+ Err(err) => {
+ println!("{}", err);
+ assert!(false);
+ }
+ }
+ bencher.iter(|| {
+ let result = parser.easy_parse(position::Stream::new(&data[..]));
+ black_box(result)
+ });
+}
+
+fn bench_json_core_error(bencher: &mut Bencher<'_>) {
+ let data = test_data();
+ let mut parser = json_value();
+ match parser.parse(position::Stream::new(&data[..])) {
+ Ok((Value::Array(_), _)) => (),
+ Ok(_) => assert!(false),
+ Err(err) => {
+ println!("{}", err);
+ assert!(false);
+ }
+ }
+ bencher.iter(|| {
+ let result = parser.parse(position::Stream::new(&data[..]));
+ black_box(result)
+ });
+}
+
+fn bench_json_core_error_no_position(bencher: &mut Bencher<'_>) {
+ let data = test_data();
+ let mut parser = json_value();
+ match parser.parse(&data[..]) {
+ Ok((Value::Array(_), _)) => (),
+ Ok(_) => assert!(false),
+ Err(err) => {
+ println!("{}", err);
+ assert!(false);
+ }
+ }
+ bencher.iter(|| {
+ let result = parser.parse(&data[..]);
+ black_box(result)
+ });
+}
+
+fn bench_buffered_json(bencher: &mut Bencher<'_>) {
+ let data = test_data();
+ bencher.iter(|| {
+ let buffer =
+ buffered::Stream::new(position::Stream::new(IteratorStream::new(data.chars())), 1);
+ let mut parser = json_value();
+ match parser.easy_parse(position::Stream::with_positioner(
+ buffer,
+ SourcePosition::default(),
+ )) {
+ Ok((Value::Array(v), _)) => {
+ black_box(v);
+ }
+ Ok(_) => assert!(false),
+ Err(err) => {
+ println!("{}", err);
+ assert!(false);
+ }
+ }
+ });
+}
+
+fn bench(c: &mut Criterion) {
+ c.bench_function("json", bench_json);
+ c.bench_function("json_core_error", bench_json_core_error);
+ c.bench_function(
+ "json_core_error_no_position",
+ bench_json_core_error_no_position,
+ );
+ c.bench_function("buffered_json", bench_buffered_json);
+}
+
+criterion_group!(json, bench);
+criterion_main!(json);
diff --git a/benches/mp4.rs b/benches/mp4.rs
new file mode 100644
index 0000000..f7d0c21
--- /dev/null
+++ b/benches/mp4.rs
@@ -0,0 +1,86 @@
+#![cfg(feature = "mp4")]
+#[macro_use]
+extern crate criterion;
+
+use std::{fs::File, io::Read, str::from_utf8};
+
+use {
+ combine::{
+ parser::{
+ byte::num::be_u32,
+ range::{range, take},
+ },
+ stream::easy::ParseError,
+ *,
+ },
+ criterion::{black_box, Bencher, Criterion},
+};
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+struct FileType<'a> {
+ major_brand: &'a str,
+ major_brand_version: &'a [u8],
+ compatible_brands: Vec<&'a str>,
+}
+
+#[derive(Clone, Debug)]
+enum MP4Box<'a> {
+ Ftyp(FileType<'a>),
+ Moov,
+ Mdat,
+ Free,
+ Skip,
+ Wide,
+ Unknown,
+}
+
+fn parse_mp4(data: &[u8]) -> Result<(Vec<MP4Box>, &[u8]), ParseError<&[u8]>> {
+ let brand_name = || take(4).and_then(from_utf8);
+ let filetype_box = (
+ range(&b"ftyp"[..]),
+ brand_name(),
+ take(4),
+ many(brand_name()),
+ )
+ .map(|(_, m, v, c)| {
+ MP4Box::Ftyp(FileType {
+ major_brand: m,
+ major_brand_version: v,
+ compatible_brands: c,
+ })
+ });
+
+ let mp4_box = be_u32().then(|offset| take(offset as usize - 4));
+ let mut box_parser = choice((
+ filetype_box,
+ range(&b"moov"[..]).map(|_| MP4Box::Moov),
+ range(&b"mdat"[..]).map(|_| MP4Box::Mdat),
+ range(&b"free"[..]).map(|_| MP4Box::Free),
+ range(&b"skip"[..]).map(|_| MP4Box::Skip),
+ range(&b"wide"[..]).map(|_| MP4Box::Wide),
+ value(MP4Box::Unknown),
+ ));
+ let data_interpreter =
+ mp4_box.flat_map(|box_data| box_parser.easy_parse(box_data).map(|t| t.0));
+
+ many(data_interpreter).easy_parse(data)
+}
+
+fn run_test(b: &mut Bencher, data: &[u8]) {
+ b.iter(|| match parse_mp4(data) {
+ Ok(x) => black_box(x),
+ Err(err) => panic!("{}", err.map_range(|bytes| format!("{:?}", bytes))),
+ });
+}
+
+fn mp4_small_test(c: &mut Criterion) {
+ let mut mp4_small = Vec::new();
+ File::open("benches/small.mp4")
+ .and_then(|mut f| f.read_to_end(&mut mp4_small))
+ .expect("Unable to read benches/small.mp4");
+
+ c.bench_function("mp4_small", move |b| run_test(b, &mp4_small));
+}
+
+criterion_group!(mp4, mp4_small_test);
+criterion_main!(mp4);
diff --git a/examples/async.rs b/examples/async.rs
new file mode 100644
index 0000000..4ebc8e9
--- /dev/null
+++ b/examples/async.rs
@@ -0,0 +1,188 @@
+#![cfg(feature = "std")]
+
+use std::{cell::Cell, io::Cursor, rc::Rc, str};
+
+use {futures_03_dep as futures, tokio_dep as tokio};
+
+use {
+ bytes::{Buf, BytesMut},
+ combine::{
+ error::{ParseError, StreamError},
+ parser::{
+ byte::digit,
+ combinator::{any_partial_state, AnyPartialState},
+ range::{range, recognize, take},
+ },
+ skip_many, skip_many1,
+ stream::{easy, PartialStream, RangeStream, StreamErrorFor},
+ Parser,
+ },
+ futures::prelude::*,
+ partial_io::PartialOp,
+ tokio_util::codec::{Decoder, FramedRead},
+};
+
+// Workaround partial_io not working with tokio-0.2
+#[path = "../tests/support/mod.rs"]
+mod support;
+use support::*;
+
+pub struct LanguageServerDecoder {
+ state: AnyPartialState,
+ content_length_parses: Rc<Cell<i32>>,
+}
+
+impl Default for LanguageServerDecoder {
+ fn default() -> Self {
+ LanguageServerDecoder {
+ state: Default::default(),
+ content_length_parses: Rc::new(Cell::new(0)),
+ }
+ }
+}
+
+/// Parses blocks of data with length headers
+///
+/// ```
+/// Content-Length: 18
+///
+/// { "some": "data" }
+/// ```
+// The `content_length_parses` parameter only exists to demonstrate that `content_length` only
+// gets parsed once per message
+fn decode_parser<'a, Input>(
+ content_length_parses: Rc<Cell<i32>>,
+) -> impl Parser<Input, Output = Vec<u8>, PartialState = AnyPartialState> + 'a
+where
+ Input: RangeStream<Token = u8, Range = &'a [u8]> + 'a,
+ // Necessary due to rust-lang/rust#24159
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ let content_length = range(&b"Content-Length: "[..])
+ .with(recognize(skip_many1(digit())).and_then(|digits: &[u8]| {
+ str::from_utf8(digits)
+ .unwrap()
+ .parse::<usize>()
+ // Convert the error from `.parse` into an error combine understands
+ .map_err(StreamErrorFor::<Input>::other)
+ }))
+ .map(move |x| {
+ content_length_parses.set(content_length_parses.get() + 1);
+ x
+ });
+
+ // `any_partial_state` boxes the state which hides the type and lets us store it in
+ // `self`
+ any_partial_state(
+ (
+ skip_many(range(&b"\r\n"[..])),
+ content_length,
+ range(&b"\r\n\r\n"[..]).map(|_| ()),
+ )
+ .then_partial(|&mut (_, message_length, _)| {
+ take(message_length).map(|bytes: &[u8]| bytes.to_owned())
+ }),
+ )
+}
+
+impl Decoder for LanguageServerDecoder {
+ type Item = String;
+ type Error = Box<dyn std::error::Error + Send + Sync>;
+
+ fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
+ println!("Decoding `{:?}`", str::from_utf8(src).unwrap_or("NOT UTF8"));
+
+ let (opt, removed_len) = combine::stream::decode(
+ decode_parser(self.content_length_parses.clone()),
+ // easy::Stream gives us nice error messages
+ // (the same error messages that combine has had since its inception)
+ // PartialStream lets the parser know that more input should be
+ // expected if end of input is unexpectedly reached
+ &mut easy::Stream(PartialStream(&src[..])),
+ &mut self.state,
+ )
+ .map_err(|err| {
+ // Since err contains references into `src` we must replace these before
+ // we can return an error or call `advance` to remove the input we
+ // just committed
+ let err = err
+ .map_range(|r| {
+ str::from_utf8(r)
+ .ok()
+ .map_or_else(|| format!("{:?}", r), |s| s.to_string())
+ })
+ .map_position(|p| p.translate_position(&src[..]));
+ format!("{}\nIn input: `{}`", err, str::from_utf8(src).unwrap())
+ })?;
+
+ println!(
+ "Accepted {} bytes: `{:?}`",
+ removed_len,
+ str::from_utf8(&src[..removed_len]).unwrap_or("NOT UTF8")
+ );
+
+ // Remove the input we just committed.
+ // Ideally this would be done automatically by the call to
+ // `stream::decode` but it does unfortunately not work due
+ // to lifetime issues (Non lexical lifetimes might fix it!)
+ src.advance(removed_len);
+
+ match opt {
+ // `None` means we did not have enough input and we require that the
+ // caller of `decode` supply more before calling us again
+ None => {
+ println!("Requesting more input!");
+ Ok(None)
+ }
+
+ // `Some` means that a message was successfully decoded
+ // (and that we are ready to start decoding the next message)
+ Some(output) => {
+ let value = String::from_utf8(output)?;
+ println!("Decoded `{}`", value);
+ Ok(Some(value))
+ }
+ }
+ }
+}
+
+#[tokio::main]
+async fn main() {
+ let input = "Content-Length: 6\r\n\
+ \r\n\
+ 123456\r\n\
+ Content-Length: 4\r\n\
+ \r\n\
+ true";
+
+ let seq = vec![
+ PartialOp::Limited(20),
+ PartialOp::Limited(1),
+ PartialOp::Limited(2),
+ PartialOp::Limited(3),
+ ];
+ let reader = &mut Cursor::new(input.as_bytes());
+ // Using the `partial_io` crate we emulate the partial reads that would happen when reading
+ // asynchronously from an io device.
+ let partial_reader = PartialAsyncRead::new(reader, seq);
+
+ let decoder = LanguageServerDecoder::default();
+ let content_length_parses = decoder.content_length_parses.clone();
+
+ let result = FramedRead::new(partial_reader, decoder).try_collect().await;
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ let values: Vec<_> = result.unwrap();
+
+ let expected_values = ["123456", "true"];
+ assert_eq!(values, expected_values);
+
+ assert_eq!(content_length_parses.get(), expected_values.len() as i32);
+
+ println!("Successfully parsed: `{}`", input);
+ println!(
+ "Found {} items and never repeated a completed parse!",
+ values.len(),
+ );
+ println!("Result: {:?}", values);
+}
diff --git a/examples/date.rs b/examples/date.rs
new file mode 100644
index 0000000..3daaa72
--- /dev/null
+++ b/examples/date.rs
@@ -0,0 +1,235 @@
+//! Parser example for ISO8601 dates. This does not handle the entire specification but it should
+//! show the gist of it and be easy to extend to parse additional forms.
+
+use std::{
+ env, fmt,
+ fs::File,
+ io::{self, Read},
+};
+
+use combine::{
+ choice,
+ error::ParseError,
+ many, optional,
+ parser::char::{char, digit},
+ stream::position,
+ Parser, Stream,
+};
+
+#[cfg(feature = "std")]
+use combine::{
+ stream::{easy, position::SourcePosition},
+ EasyParser,
+};
+
+enum Error<E> {
+ Io(io::Error),
+ Parse(E),
+}
+
+impl<E> fmt::Display for Error<E>
+where
+ E: fmt::Display,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Error::Io(ref err) => write!(f, "{}", err),
+ Error::Parse(ref err) => write!(f, "{}", err),
+ }
+ }
+}
+
+#[derive(PartialEq, Debug)]
+pub struct Date {
+ pub year: i32,
+ pub month: i32,
+ pub day: i32,
+}
+
+#[derive(PartialEq, Debug)]
+pub struct Time {
+ pub hour: i32,
+ pub minute: i32,
+ pub second: i32,
+ pub time_zone: i32,
+}
+
+#[derive(PartialEq, Debug)]
+pub struct DateTime {
+ pub date: Date,
+ pub time: Time,
+}
+
+fn two_digits<Input>() -> impl Parser<Input, Output = i32>
+where
+ Input: Stream<Token = char>,
+ // Necessary due to rust-lang/rust#24159
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ (digit(), digit()).map(|(x, y): (char, char)| {
+ let x = x.to_digit(10).expect("digit");
+ let y = y.to_digit(10).expect("digit");
+ (x * 10 + y) as i32
+ })
+}
+
+/// Parses a time zone
+/// +0012
+/// -06:30
+/// -01
+/// Z
+fn time_zone<Input>() -> impl Parser<Input, Output = i32>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ let utc = char('Z').map(|_| 0);
+ let offset = (
+ choice([char('-'), char('+')]),
+ two_digits(),
+ optional(optional(char(':')).with(two_digits())),
+ )
+ .map(|(sign, hour, minute)| {
+ let offset = hour * 60 + minute.unwrap_or(0);
+ if sign == '-' {
+ -offset
+ } else {
+ offset
+ }
+ });
+
+ utc.or(offset)
+}
+
+/// Parses a date
+/// 2010-01-30
+fn date<Input>() -> impl Parser<Input, Output = Date>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ (
+ many::<String, _, _>(digit()),
+ char('-'),
+ two_digits(),
+ char('-'),
+ two_digits(),
+ )
+ .map(|(year, _, month, _, day)| {
+ // Its ok to just unwrap since we only parsed digits
+ Date {
+ year: year.parse().unwrap(),
+ month,
+ day,
+ }
+ })
+}
+
+/// Parses a time
+/// 12:30:02
+fn time<Input>() -> impl Parser<Input, Output = Time>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ (
+ two_digits(),
+ char(':'),
+ two_digits(),
+ char(':'),
+ two_digits(),
+ time_zone(),
+ )
+ .map(|(hour, _, minute, _, second, time_zone)| {
+ // Its ok to just unwrap since we only parsed digits
+ Time {
+ hour,
+ minute,
+ second,
+ time_zone,
+ }
+ })
+}
+
+/// Parses a date time according to ISO8601
+/// 2015-08-02T18:54:42+02
+fn date_time<Input>() -> impl Parser<Input, Output = DateTime>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ (date(), char('T'), time()).map(|(date, _, time)| DateTime { date, time })
+}
+
+#[test]
+fn test() {
+ // A parser for
+ let result = date_time().parse("2015-08-02T18:54:42+02");
+ let d = DateTime {
+ date: Date {
+ year: 2015,
+ month: 8,
+ day: 2,
+ },
+ time: Time {
+ hour: 18,
+ minute: 54,
+ second: 42,
+ time_zone: 2 * 60,
+ },
+ };
+ assert_eq!(result, Ok((d, "")));
+
+ let result = date_time().parse("50015-12-30T08:54:42Z");
+ let d = DateTime {
+ date: Date {
+ year: 50015,
+ month: 12,
+ day: 30,
+ },
+ time: Time {
+ hour: 8,
+ minute: 54,
+ second: 42,
+ time_zone: 0,
+ },
+ };
+ assert_eq!(result, Ok((d, "")));
+}
+
+fn main() {
+ let result = match env::args().nth(1) {
+ Some(file) => File::open(file).map_err(Error::Io).and_then(main_),
+ None => main_(io::stdin()),
+ };
+ match result {
+ Ok(_) => println!("OK"),
+ Err(err) => println!("{}", err),
+ }
+}
+
+#[cfg(feature = "std")]
+fn main_<R>(mut read: R) -> Result<(), Error<easy::Errors<char, String, SourcePosition>>>
+where
+ R: Read,
+{
+ let mut text = String::new();
+ read.read_to_string(&mut text).map_err(Error::Io)?;
+ date_time()
+ .easy_parse(position::Stream::new(&*text))
+ .map_err(|err| Error::Parse(err.map_range(|s| s.to_string())))?;
+ Ok(())
+}
+
+#[cfg(not(feature = "std"))]
+fn main_<R>(mut read: R) -> Result<(), Error<::combine::error::StringStreamError>>
+where
+ R: Read,
+{
+ let mut text = String::new();
+ read.read_to_string(&mut text).map_err(Error::Io)?;
+ date_time()
+ .parse(position::Stream::new(&*text))
+ .map_err(Error::Parse)?;
+ Ok(())
+}
diff --git a/examples/ini.rs b/examples/ini.rs
new file mode 100644
index 0000000..2d41619
--- /dev/null
+++ b/examples/ini.rs
@@ -0,0 +1,179 @@
+//! Parser example for INI files.
+
+use std::{
+ collections::HashMap,
+ env, fmt,
+ fs::File,
+ io::{self, Read},
+};
+
+use combine::{parser::char::space, stream::position, *};
+
+#[cfg(feature = "std")]
+use combine::stream::easy;
+
+#[cfg(feature = "std")]
+use combine::stream::position::SourcePosition;
+
+enum Error<E> {
+ Io(io::Error),
+ Parse(E),
+}
+
+impl<E> fmt::Display for Error<E>
+where
+ E: fmt::Display,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Error::Io(ref err) => write!(f, "{}", err),
+ Error::Parse(ref err) => write!(f, "{}", err),
+ }
+ }
+}
+
+#[derive(PartialEq, Debug)]
+pub struct Ini {
+ pub global: HashMap<String, String>,
+ pub sections: HashMap<String, HashMap<String, String>>,
+}
+
+fn property<Input>() -> impl Parser<Input, Output = (String, String)>
+where
+ Input: Stream<Token = char>,
+ // Necessary due to rust-lang/rust#24159
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ (
+ many1(satisfy(|c| c != '=' && c != '[' && c != ';')),
+ token('='),
+ many1(satisfy(|c| c != '\n' && c != ';')),
+ )
+ .map(|(key, _, value)| (key, value))
+ .message("while parsing property")
+}
+
+fn whitespace<Input>() -> impl Parser<Input>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ let comment = (token(';'), skip_many(satisfy(|c| c != '\n'))).map(|_| ());
+ // Wrap the `spaces().or(comment)` in `skip_many` so that it skips alternating whitespace and
+ // comments
+ skip_many(skip_many1(space()).or(comment))
+}
+
+fn properties<Input>() -> impl Parser<Input, Output = HashMap<String, String>>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ // After each property we skip any whitespace that followed it
+ many(property().skip(whitespace()))
+}
+
+fn section<Input>() -> impl Parser<Input, Output = (String, HashMap<String, String>)>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ (
+ between(token('['), token(']'), many(satisfy(|c| c != ']'))),
+ whitespace(),
+ properties(),
+ )
+ .map(|(name, _, properties)| (name, properties))
+ .message("while parsing section")
+}
+
+fn ini<Input>() -> impl Parser<Input, Output = Ini>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ (whitespace(), properties(), many(section()))
+ .map(|(_, global, sections)| Ini { global, sections })
+}
+
+#[test]
+fn ini_ok() {
+ let text = r#"
+language=rust
+
+[section]
+name=combine; Comment
+type=LL(1)
+
+"#;
+ let mut expected = Ini {
+ global: HashMap::new(),
+ sections: HashMap::new(),
+ };
+ expected
+ .global
+ .insert(String::from("language"), String::from("rust"));
+
+ let mut section = HashMap::new();
+ section.insert(String::from("name"), String::from("combine"));
+ section.insert(String::from("type"), String::from("LL(1)"));
+ expected.sections.insert(String::from("section"), section);
+
+ let result = ini().parse(text).map(|t| t.0);
+ assert_eq!(result, Ok(expected));
+}
+
+#[cfg(feature = "std")]
+#[test]
+fn ini_error() {
+ let text = "[error";
+ let result = ini().easy_parse(position::Stream::new(text)).map(|t| t.0);
+ assert_eq!(
+ result,
+ Err(easy::Errors {
+ position: SourcePosition { line: 1, column: 7 },
+ errors: vec![
+ easy::Error::end_of_input(),
+ easy::Error::Expected(']'.into()),
+ easy::Error::Message("while parsing section".into()),
+ ],
+ })
+ );
+}
+
+fn main() {
+ let result = match env::args().nth(1) {
+ Some(file) => File::open(file).map_err(Error::Io).and_then(main_),
+ None => main_(io::stdin()),
+ };
+ match result {
+ Ok(_) => println!("OK"),
+ Err(err) => println!("{}", err),
+ }
+}
+
+#[cfg(feature = "std")]
+fn main_<R>(mut read: R) -> Result<(), Error<easy::Errors<char, String, SourcePosition>>>
+where
+ R: Read,
+{
+ let mut text = String::new();
+ read.read_to_string(&mut text).map_err(Error::Io)?;
+ ini()
+ .easy_parse(position::Stream::new(&*text))
+ .map_err(|err| Error::Parse(err.map_range(|s| s.to_string())))?;
+ Ok(())
+}
+
+#[cfg(not(feature = "std"))]
+fn main_<R>(mut read: R) -> Result<(), Error<::combine::error::StringStreamError>>
+where
+ R: Read,
+{
+ let mut text = String::new();
+ read.read_to_string(&mut text).map_err(Error::Io)?;
+ ini()
+ .parse(position::Stream::new(&*text))
+ .map_err(Error::Parse)?;
+ Ok(())
+}
diff --git a/examples/number.rs b/examples/number.rs
new file mode 100644
index 0000000..6be4ab5
--- /dev/null
+++ b/examples/number.rs
@@ -0,0 +1,33 @@
+#![cfg_attr(not(feature = "std"), no_std)]
+
+#[cfg(not(feature = "std"))]
+use core::str;
+
+#[cfg(feature = "std")]
+use std::str;
+
+use combine::{
+ error::UnexpectedParse,
+ parser::{
+ byte::digit,
+ choice::optional,
+ range::recognize,
+ repeat::{skip_many, skip_many1},
+ token::token,
+ },
+ Parser,
+};
+
+fn main() {
+ let mut parser = recognize((
+ skip_many1(digit()),
+ optional((token(b'.'), skip_many(digit()))),
+ ))
+ .and_then(|bs: &[u8]| {
+ // `bs` only contains digits which are ascii and thus UTF-8
+ let s = unsafe { str::from_utf8_unchecked(bs) };
+ s.parse::<f64>().map_err(|_| UnexpectedParse::Unexpected)
+ });
+ let result = parser.parse(&b"123.45"[..]);
+ assert_eq!(result, Ok((123.45, &b""[..])));
+}
diff --git a/examples/readme.rs b/examples/readme.rs
new file mode 100644
index 0000000..148993b
--- /dev/null
+++ b/examples/readme.rs
@@ -0,0 +1,18 @@
+use combine::{
+ many1,
+ parser::char::{letter, space},
+ sep_by, Parser,
+};
+
+#[test]
+fn readme() {
+ main();
+}
+
+fn main() {
+ let word = many1(letter());
+
+ let mut parser = sep_by(word, space()).map(|mut words: Vec<String>| words.pop());
+ let result = parser.parse("Pick up that word!");
+ assert_eq!(result, Ok((Some("word".to_string()), "!")));
+}
diff --git a/release.sh b/release.sh
new file mode 100755
index 0000000..6b1e35c
--- /dev/null
+++ b/release.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+VERSION=$1
+if [ -z "$VERSION" ]; then
+ echo "Expected patch, minor or major"
+ exit 1
+fi
+
+clog --$VERSION && git add CHANGELOG.md && git commit -m "Updated changelog" && cargo release $VERSION
diff --git a/rustfmt.toml b/rustfmt.toml
new file mode 100644
index 0000000..32a9786
--- /dev/null
+++ b/rustfmt.toml
@@ -0,0 +1 @@
+edition = "2018"
diff --git a/src/error.rs b/src/error.rs
new file mode 100644
index 0000000..854af9d
--- /dev/null
+++ b/src/error.rs
@@ -0,0 +1,1100 @@
+use crate::lib::fmt;
+
+#[cfg(feature = "std")]
+use std::error::Error as StdError;
+
+use crate::{stream::StreamOnce, ErrorOffset};
+
+use self::ParseResult::*;
+
+pub(crate) trait ResultExt<E, T> {
+ fn committed(self) -> ParseResult<E, T>;
+}
+
+impl<E, T> ResultExt<E, T> for Result<E, T> {
+ fn committed(self) -> ParseResult<E, T> {
+ match self {
+ Ok(x) => CommitOk(x),
+ Err(x) => CommitErr(x),
+ }
+ }
+}
+
+#[macro_export]
+#[doc(hidden)]
+macro_rules! ctry {
+ ($result:expr) => {
+ match $result {
+ $crate::error::ParseResult::CommitOk(x) => (x, $crate::error::Commit::Commit(())),
+ $crate::error::ParseResult::PeekOk(x) => (x, $crate::error::Commit::Peek(())),
+ $crate::error::ParseResult::CommitErr(err) => {
+ return $crate::error::ParseResult::CommitErr(err.into())
+ }
+ $crate::error::ParseResult::PeekErr(err) => {
+ return $crate::error::ParseResult::PeekErr(err.into())
+ }
+ }
+ };
+}
+
+/// Trait for types which can be used to construct error information.
+///
+/// To call functions expecting this trait, use the wrapper types defined in this module
+/// `Token`, `Range`, `Format` or `Static`/`&'static str`
+pub trait ErrorInfo<'s, T, R> {
+ type Format: fmt::Display;
+ fn into_info(&'s self) -> Info<T, R, Self::Format>;
+}
+
+impl<'s, 'a, T, R, F> ErrorInfo<'s, T, R> for &'a F
+where
+ F: ErrorInfo<'s, T, R>,
+{
+ type Format = F::Format;
+ fn into_info(&'s self) -> Info<T, R, Self::Format> {
+ (**self).into_info()
+ }
+}
+
+#[derive(Clone, Debug)]
+pub enum Info<T, R, F = &'static str> {
+ Token(T),
+ Range(R),
+ Static(&'static str),
+ Format(F),
+}
+
+impl<'s, T, R, F> ErrorInfo<'s, T, R> for Info<T, R, F>
+where
+ T: Clone,
+ R: Clone,
+ F: fmt::Display + 's,
+{
+ type Format = &'s F;
+ fn into_info(&'s self) -> Info<T, R, <Self as ErrorInfo<'_, T, R>>::Format> {
+ match self {
+ Info::Token(b) => Info::Token(b.clone()),
+ Info::Range(b) => Info::Range(b.clone()),
+ Info::Static(b) => Info::Static(*b),
+ Info::Format(b) => Info::Format(b),
+ }
+ }
+}
+
+impl<R, F> From<char> for Info<char, R, F> {
+ fn from(s: char) -> Self {
+ Info::Token(s)
+ }
+}
+
+impl<'s, R> ErrorInfo<'s, char, R> for char {
+ type Format = &'static str;
+ fn into_info(&self) -> Info<char, R, Self::Format> {
+ Info::Token(*self)
+ }
+}
+
+impl<T, R, F> From<&'static str> for Info<T, R, F> {
+ fn from(s: &'static str) -> Self {
+ Info::Static(s)
+ }
+}
+
+impl<'s, T, R> ErrorInfo<'s, T, R> for &'static str {
+ type Format = &'static str;
+ fn into_info(&self) -> Info<T, R, Self::Format> {
+ Info::Static(*self)
+ }
+}
+
+impl<R, F> From<u8> for Info<u8, R, F> {
+ fn from(s: u8) -> Self {
+ Info::Token(s)
+ }
+}
+
+impl<R> ErrorInfo<'_, Self, R> for u8 {
+ type Format = &'static str;
+ fn into_info(&self) -> Info<Self, R, Self::Format> {
+ Info::Token(*self)
+ }
+}
+
+/// Newtype which constructs an `Info::Token` through `ErrorInfo`
+pub struct Token<T>(pub T);
+
+impl<T, R> From<Token<T>> for Info<T, R, &'static str> {
+ fn from(s: Token<T>) -> Self {
+ Info::Token(s.0)
+ }
+}
+
+impl<'s, T, R> ErrorInfo<'s, T, R> for Token<T>
+where
+ T: Clone,
+{
+ type Format = &'static str;
+ fn into_info(&'s self) -> Info<T, R, Self::Format> {
+ Info::Token(self.0.clone())
+ }
+}
+
+/// Newtype which constructs an `Info::Range` through `ErrorInfo`
+pub struct Range<R>(pub R);
+
+impl<T, R> From<Range<R>> for Info<T, R, &'static str> {
+ fn from(s: Range<R>) -> Self {
+ Info::Range(s.0)
+ }
+}
+
+impl<'s, T, R> ErrorInfo<'s, T, R> for Range<R>
+where
+ R: Clone,
+{
+ type Format = &'static str;
+ fn into_info(&'s self) -> Info<T, R, Self::Format> {
+ Info::Range(self.0.clone())
+ }
+}
+
+/// Newtype which constructs an `Info::Static` through `ErrorInfo`
+/// A plain `&'static str` can also be used, this exists for consistency.
+pub struct Static(&'static str);
+
+impl<T, R, F> From<Static> for Info<T, R, F>
+where
+ F: fmt::Display,
+{
+ fn from(s: Static) -> Self {
+ Info::Static(s.0)
+ }
+}
+
+impl<'s, T, R> ErrorInfo<'s, T, R> for Static {
+ type Format = &'static str;
+ fn into_info(&'s self) -> Info<T, R, Self::Format> {
+ Info::Static(self.0)
+ }
+}
+
+/// Newtype which constructs an `Info::Format` through `ErrorInfo`
+pub struct Format<F>(pub F)
+where
+ F: fmt::Display;
+
+impl<T, R, F> From<Format<F>> for Info<T, R, F>
+where
+ F: fmt::Display,
+{
+ fn from(s: Format<F>) -> Self {
+ Info::Format(s.0)
+ }
+}
+
+impl<'s, T, R, F> ErrorInfo<'s, T, R> for Format<F>
+where
+ F: fmt::Display + 's,
+{
+ type Format = &'s F;
+ fn into_info(&'s self) -> Info<T, R, Self::Format> {
+ Info::Format(&self.0)
+ }
+}
+
+/// Enum used to indicate if a parser committed any items of the stream it was given as an input.
+///
+/// This is used by parsers such as `or` and `choice` to determine if they should try to parse
+/// with another parser as they will only be able to provide good error reporting if the preceding
+/// parser did not commit to the parse.
+#[derive(Clone, PartialEq, Debug, Copy)]
+pub enum Commit<T> {
+ /// Constructor indicating that the parser has committed to this parse. If a parser after this fails,
+ /// other parser alternatives will not be attempted (`CommitErr` will be returned)
+ Commit(T),
+ /// Constructor indicating that the parser has not committed to this parse. If a parser after this fails,
+ /// other parser alternatives will be attempted (`EmptyErr` will be returned)
+ Peek(T),
+}
+
+impl<T> AsMut<T> for Commit<T> {
+ fn as_mut(&mut self) -> &mut T {
+ match *self {
+ Commit::Peek(ref mut t) | Commit::Commit(ref mut t) => t,
+ }
+ }
+}
+
+impl<T> AsRef<T> for Commit<T> {
+ fn as_ref(&self) -> &T {
+ match *self {
+ Commit::Peek(ref t) | Commit::Commit(ref t) => t,
+ }
+ }
+}
+
+impl<T> Commit<T> {
+ /// Returns true if `self` is peek.
+ pub fn is_peek(&self) -> bool {
+ match *self {
+ Commit::Peek(_) => true,
+ Commit::Commit(_) => false,
+ }
+ }
+
+ /// Extracts the contained value.
+ pub fn into_inner(self) -> T {
+ match self {
+ Commit::Peek(x) | Commit::Commit(x) => x,
+ }
+ }
+
+ /// Converts `self` into the `Commit` state.
+ pub fn into_commit(self) -> Commit<T> {
+ Commit::Commit(self.into_inner())
+ }
+
+ /// Converts `self` into the `Peek` state.
+ pub fn into_peek(self) -> Commit<T> {
+ Commit::Peek(self.into_inner())
+ }
+
+ /// Maps over the contained value without changing the committed state.
+ pub fn map<F, U>(self, f: F) -> Commit<U>
+ where
+ F: FnOnce(T) -> U,
+ {
+ match self {
+ Commit::Peek(x) => Commit::Peek(f(x)),
+ Commit::Commit(x) => Commit::Commit(f(x)),
+ }
+ }
+
+ pub fn merge(&self, current: Commit<T>) -> Commit<T> {
+ match *self {
+ Commit::Peek(_) => current,
+ Commit::Commit(_) => current.into_commit(),
+ }
+ }
+
+ /// Combines the `Commit` flags from `self` and the result of `f`.
+ ///
+ /// ```text
+ /// Peek <> Peek -> Peek
+ /// Commit <> Peek -> Commit
+ /// Peek <> Commit -> Commit
+ /// Commit <> Commit -> Commit
+ /// ```
+ ///
+ /// ```
+ /// # extern crate combine as pc;
+ /// # use pc::*;
+ /// # fn main() {
+ /// //Parses a character of string literal and handles the escaped characters \\ and \" as \
+ /// //and " respectively
+ /// fn char<Input>(input: &mut Input) -> StdParseResult<char, Input>
+ /// where Input: Stream<Token = char>,
+ /// Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+ /// {
+ /// let (c, committed) = satisfy(|c| c != '"').parse_stream(input).into_result()?;
+ /// match c {
+ /// //Since the `char` parser has already committed some of the input `combine` is used
+ /// //propagate the committed state to the next part of the parser
+ /// '\\' => committed.combine(|_| {
+ /// satisfy(|c| c == '"' || c == '\\')
+ /// .map(|c| {
+ /// match c {
+ /// '"' => '"',
+ /// '\\' => '\\',
+ /// c => c
+ /// }
+ /// })
+ /// .parse_stream(input)
+ /// .into_result()
+ /// }),
+ /// _ => Ok((c, committed))
+ /// }
+ /// }
+ /// let result = many(parser(char))
+ /// .easy_parse(r#"abc\"\\"#);
+ /// assert_eq!(result, Ok((r#"abc"\"#.to_string(), "")));
+ /// }
+ /// ```
+ pub fn combine<F, U, E>(self, f: F) -> StdParseResult2<U, E>
+ where
+ F: FnOnce(T) -> StdParseResult2<U, E>,
+ {
+ match self {
+ Commit::Commit(x) => match f(x) {
+ Ok((v, Commit::Peek(()))) => Ok((v, Commit::Commit(()))),
+ Err(Commit::Peek(err)) => Err(Commit::Commit(err)),
+ y => y,
+ },
+ Commit::Peek(x) => f(x),
+ }
+ }
+ pub fn combine_commit<F, U, E>(self, f: F) -> ParseResult<U, E>
+ where
+ F: FnOnce(T) -> ParseResult<U, E>,
+ {
+ use self::ParseResult::*;
+
+ match self {
+ Commit::Commit(x) => match f(x) {
+ PeekOk(v) => CommitOk(v),
+ PeekErr(err) => CommitErr(err.error),
+ y => y,
+ },
+ Commit::Peek(x) => f(x),
+ }
+ }
+}
+
+/// A type alias over the specific `Result` type used by parsers to indicate whether they were
+/// successful or not.
+/// `O` is the type that is output on success.
+/// `Input` is the specific stream type used in the parser.
+pub type StdParseResult<O, Input> =
+ Result<(O, Commit<()>), Commit<Tracked<<Input as StreamOnce>::Error>>>;
+pub type StdParseResult2<O, E> = Result<(O, Commit<()>), Commit<Tracked<E>>>;
+
+/// `StreamError` represents a single error returned from a `Stream` or a `Parser`.
+///
+/// Usually multiple instances of `StreamError` is composed into a `ParseError` to build the final
+/// error value.
+pub trait StreamError<Item, Range>: Sized {
+ fn unexpected_token(token: Item) -> Self;
+ fn unexpected_range(token: Range) -> Self;
+ fn unexpected_format<T>(msg: T) -> Self
+ where
+ T: fmt::Display;
+ fn unexpected<E>(info: E) -> Self
+ where
+ E: for<'s> ErrorInfo<'s, Item, Range>,
+ {
+ match info.into_info() {
+ Info::Token(b) => Self::unexpected_token(b),
+ Info::Range(b) => Self::unexpected_range(b),
+ Info::Static(b) => Self::unexpected_static_message(b),
+ Info::Format(b) => Self::unexpected_format(b),
+ }
+ }
+ fn unexpected_static_message(msg: &'static str) -> Self {
+ Self::unexpected_format(msg)
+ }
+
+ fn expected_token(token: Item) -> Self;
+ fn expected_range(token: Range) -> Self;
+ fn expected_format<T>(msg: T) -> Self
+ where
+ T: fmt::Display;
+ fn expected<E>(info: E) -> Self
+ where
+ E: for<'s> ErrorInfo<'s, Item, Range>,
+ {
+ match info.into_info() {
+ Info::Token(b) => Self::expected_token(b),
+ Info::Range(b) => Self::expected_range(b),
+ Info::Static(b) => Self::expected_static_message(b),
+ Info::Format(b) => Self::expected_format(b),
+ }
+ }
+ fn expected_static_message(msg: &'static str) -> Self {
+ Self::expected_format(msg)
+ }
+
+ fn message_token(token: Item) -> Self;
+ fn message_range(token: Range) -> Self;
+ fn message_format<T>(msg: T) -> Self
+ where
+ T: fmt::Display;
+ fn message_static_message(msg: &'static str) -> Self {
+ Self::message_format(msg)
+ }
+ fn message<E>(info: E) -> Self
+ where
+ E: for<'s> ErrorInfo<'s, Item, Range>,
+ {
+ match info.into_info() {
+ Info::Token(b) => Self::message_token(b),
+ Info::Range(b) => Self::message_range(b),
+ Info::Static(b) => Self::message_static_message(b),
+ Info::Format(b) => Self::message_format(b),
+ }
+ }
+
+ #[cfg(feature = "std")]
+ fn other<E>(err: E) -> Self
+ where
+ E: StdError + Send + Sync + 'static,
+ {
+ Self::message_format(err)
+ }
+
+ fn end_of_input() -> Self {
+ Self::unexpected_static_message("end of input")
+ }
+
+ fn is_unexpected_end_of_input(&self) -> bool;
+
+ /// Converts `self` into a different `StreamError` type.
+ ///
+ /// This should aim to preserve as much information as possible into the returned `T` value but
+ /// if `Self` ignores some information passed to it using one of the constructors that
+ /// information is naturally lost.
+ fn into_other<T>(self) -> T
+ where
+ T: StreamError<Item, Range>;
+}
+
+/// Trait which defines a combine parse error.
+///
+/// A parse error is composed of zero or more `StreamError` instances which gets added to it as
+/// errors are encountered during parsing.
+pub trait ParseError<Item, Range, Position>: Sized + PartialEq {
+ type StreamError: StreamError<Item, Range>;
+
+ /// Constructs an empty error.
+ ///
+ /// An empty error is expected to be cheap to create as it is frequently created and discarded.
+ fn empty(position: Position) -> Self;
+
+ /// Creates a `ParseError` from a single `Self::StreamError`
+ fn from_error(position: Position, err: Self::StreamError) -> Self {
+ let mut errors = Self::empty(position);
+ errors.add(err);
+ errors
+ }
+
+ fn position(&self) -> Position {
+ // TODO Remove the default implementation in a breaking release
+ unimplemented!()
+ }
+
+ /// Sets the position of this `ParseError`
+ fn set_position(&mut self, position: Position);
+
+ /// Merges two errors. If they exist at the same position the errors of `other` are
+ /// added to `self` (using the semantics of `add`). If they are not at the same
+ /// position the error furthest ahead are returned, ignoring the other `ParseError`.
+ fn merge(self, other: Self) -> Self {
+ other
+ }
+
+ /// Adds a `StreamError` to `self`.
+ ///
+ /// It is up to each individual error type to define what adding an error does, some may push
+ /// it to a vector while others may only keep `self` or `err` to avoid allocation
+ fn add(&mut self, err: Self::StreamError);
+
+ fn add_expected<E>(&mut self, info: E)
+ where
+ E: for<'s> ErrorInfo<'s, Item, Range>,
+ {
+ self.add(Self::StreamError::expected(info))
+ }
+
+ fn add_unexpected<E>(&mut self, info: E)
+ where
+ E: for<'s> ErrorInfo<'s, Item, Range>,
+ {
+ self.add(Self::StreamError::unexpected(info))
+ }
+
+ fn add_message<E>(&mut self, info: E)
+ where
+ E: for<'s> ErrorInfo<'s, Item, Range>,
+ {
+ self.add(Self::StreamError::message(info))
+ }
+
+ /// Sets `info` as the *only* `Expected` error of `self`
+ fn set_expected<F>(self_: &mut Tracked<Self>, info: Self::StreamError, f: F)
+ where
+ F: FnOnce(&mut Tracked<Self>);
+
+ /// Removes any expected errors currently in `self`
+ fn clear_expected(&mut self) {}
+
+ fn is_unexpected_end_of_input(&self) -> bool;
+
+ /// Does a best-effort conversion of `self` into another `ParseError`
+ fn into_other<T>(self) -> T
+ where
+ T: ParseError<Item, Range, Position>;
+}
+
+/// Defines a conversion between two parse error types.
+///
+/// Like `ParseError::into_other` but with a more general signature
+/// (This will take the place of `into_other` on breaking release of combine)
+pub trait ParseErrorInto<Item, Range, Position>: Sized {
+ fn into_other_error<T, Item2, Range2, Position2>(self) -> T
+ where
+ T: ParseError<Item2, Range2, Position2>,
+ Item2: From<Item>,
+ Range2: From<Range>,
+ Position2: From<Position>;
+}
+
+/// Defines a conversion between two stream error types.
+///
+/// Like `StreamError::into_other` but with a more general signature
+/// (This will take the place of `into_other` on breaking release of combine)
+pub trait StreamErrorInto<Item, Range>: Sized {
+ fn into_other_error<T, Item2, Range2>(self) -> T
+ where
+ T: StreamError<Item2, Range2>,
+ Item2: From<Item>,
+ Range2: From<Range>;
+}
+
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub enum UnexpectedParse {
+ Eoi,
+ Unexpected,
+}
+
+impl fmt::Display for UnexpectedParse {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.as_str())
+ }
+}
+
+#[cfg(feature = "std")]
+impl StdError for UnexpectedParse {
+ fn description(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl UnexpectedParse {
+ fn as_str(&self) -> &str {
+ use self::UnexpectedParse::*;
+ match *self {
+ Unexpected => "unexpected parse",
+ Eoi => "unexpected end of input",
+ }
+ }
+}
+
+impl<Item, Range> StreamError<Item, Range> for UnexpectedParse {
+ #[inline]
+ fn unexpected_token(_: Item) -> Self {
+ UnexpectedParse::Unexpected
+ }
+ #[inline]
+ fn unexpected_range(_: Range) -> Self {
+ UnexpectedParse::Unexpected
+ }
+ #[inline]
+ fn unexpected_format<T>(_: T) -> Self
+ where
+ T: fmt::Display,
+ {
+ UnexpectedParse::Unexpected
+ }
+
+ #[inline]
+ fn expected_token(_: Item) -> Self {
+ UnexpectedParse::Unexpected
+ }
+ #[inline]
+ fn expected_range(_: Range) -> Self {
+ UnexpectedParse::Unexpected
+ }
+ #[inline]
+ fn expected_format<T>(_: T) -> Self
+ where
+ T: fmt::Display,
+ {
+ UnexpectedParse::Unexpected
+ }
+ #[inline]
+ fn message_format<T>(_: T) -> Self
+ where
+ T: fmt::Display,
+ {
+ UnexpectedParse::Unexpected
+ }
+ #[inline]
+ fn message_token(_: Item) -> Self {
+ UnexpectedParse::Unexpected
+ }
+ #[inline]
+ fn message_range(_: Range) -> Self {
+ UnexpectedParse::Unexpected
+ }
+
+ #[inline]
+ fn end_of_input() -> Self {
+ UnexpectedParse::Eoi
+ }
+
+ #[inline]
+ fn is_unexpected_end_of_input(&self) -> bool {
+ *self == UnexpectedParse::Eoi
+ }
+
+ #[inline]
+ fn into_other<T>(self) -> T
+ where
+ T: StreamError<Item, Range>,
+ {
+ match self {
+ UnexpectedParse::Unexpected => T::unexpected_static_message("parse"),
+ UnexpectedParse::Eoi => T::end_of_input(),
+ }
+ }
+}
+
+impl<Item, Range, Position> ParseError<Item, Range, Position> for UnexpectedParse
+where
+ Position: Default,
+{
+ type StreamError = Self;
+ #[inline]
+ fn empty(_position: Position) -> Self {
+ UnexpectedParse::Unexpected
+ }
+
+ #[inline]
+ fn from_error(_: Position, err: Self::StreamError) -> Self {
+ err
+ }
+
+ fn position(&self) -> Position {
+ Position::default()
+ }
+
+ #[inline]
+ fn set_position(&mut self, _position: Position) {}
+
+ #[inline]
+ fn add(&mut self, err: Self::StreamError) {
+ *self = match (*self, err) {
+ (UnexpectedParse::Eoi, _) => UnexpectedParse::Eoi,
+ (_, err) => err,
+ };
+ }
+
+ #[inline]
+ fn set_expected<F>(self_: &mut Tracked<Self>, info: Self::StreamError, f: F)
+ where
+ F: FnOnce(&mut Tracked<Self>),
+ {
+ f(self_);
+ self_.error = info;
+ }
+
+ fn is_unexpected_end_of_input(&self) -> bool {
+ *self == UnexpectedParse::Eoi
+ }
+
+ #[inline]
+ fn into_other<T>(self) -> T
+ where
+ T: ParseError<Item, Range, Position>,
+ {
+ T::from_error(Position::default(), StreamError::into_other(self))
+ }
+}
+
+impl<Item, Range, Position> ParseErrorInto<Item, Range, Position> for UnexpectedParse
+where
+ Position: Default,
+{
+ fn into_other_error<T, Item2, Range2, Position2>(self) -> T
+ where
+ T: ParseError<Item2, Range2, Position2>,
+ Item2: From<Item>,
+ Range2: From<Range>,
+ Position2: From<Position>,
+ {
+ T::from_error(
+ Position::default().into(),
+ StreamErrorInto::<Item, Range>::into_other_error(self),
+ )
+ }
+}
+
+impl<Item, Range> StreamErrorInto<Item, Range> for UnexpectedParse {
+ fn into_other_error<T, Item2, Range2>(self) -> T
+ where
+ T: StreamError<Item2, Range2>,
+ Item2: From<Item>,
+ Range2: From<Range>,
+ {
+ StreamError::into_other(self)
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub enum StringStreamError {
+ UnexpectedParse,
+ Eoi,
+ CharacterBoundary,
+}
+
+pub(crate) const CHAR_BOUNDARY_ERROR_MESSAGE: &str = "unexpected slice on character boundary";
+
+impl fmt::Display for StringStreamError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.as_str())
+ }
+}
+
+#[cfg(feature = "std")]
+impl StdError for StringStreamError {
+ fn description(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl StringStreamError {
+ fn as_str(&self) -> &str {
+ use self::StringStreamError::*;
+ match *self {
+ UnexpectedParse => "unexpected parse",
+ Eoi => "unexpected end of input",
+ CharacterBoundary => CHAR_BOUNDARY_ERROR_MESSAGE,
+ }
+ }
+}
+
+impl<Item, Range> StreamError<Item, Range> for StringStreamError {
+ #[inline]
+ fn unexpected_token(_: Item) -> Self {
+ StringStreamError::UnexpectedParse
+ }
+ #[inline]
+ fn unexpected_range(_: Range) -> Self {
+ StringStreamError::UnexpectedParse
+ }
+ #[inline]
+ fn unexpected_format<T>(_msg: T) -> Self
+ where
+ T: fmt::Display,
+ {
+ StringStreamError::UnexpectedParse
+ }
+
+ #[inline]
+ fn expected_token(_: Item) -> Self {
+ StringStreamError::UnexpectedParse
+ }
+ #[inline]
+ fn expected_range(_: Range) -> Self {
+ StringStreamError::UnexpectedParse
+ }
+ #[inline]
+ fn expected_format<T>(_: T) -> Self
+ where
+ T: fmt::Display,
+ {
+ StringStreamError::UnexpectedParse
+ }
+ #[inline]
+ fn message_format<T>(_: T) -> Self
+ where
+ T: fmt::Display,
+ {
+ StringStreamError::UnexpectedParse
+ }
+ #[inline]
+ fn message_token(_: Item) -> Self {
+ StringStreamError::UnexpectedParse
+ }
+ #[inline]
+ fn message_range(_: Range) -> Self {
+ StringStreamError::UnexpectedParse
+ }
+ fn message_static_message(msg: &'static str) -> Self {
+ if msg == CHAR_BOUNDARY_ERROR_MESSAGE {
+ StringStreamError::CharacterBoundary
+ } else {
+ StringStreamError::UnexpectedParse
+ }
+ }
+ #[inline]
+ fn end_of_input() -> Self {
+ StringStreamError::Eoi
+ }
+ #[inline]
+ fn is_unexpected_end_of_input(&self) -> bool {
+ *self == StringStreamError::Eoi
+ }
+ #[inline]
+ fn into_other<T>(self) -> T
+ where
+ T: StreamError<Item, Range>,
+ {
+ let msg = match self {
+ StringStreamError::CharacterBoundary => CHAR_BOUNDARY_ERROR_MESSAGE,
+ StringStreamError::UnexpectedParse => "parse",
+ StringStreamError::Eoi => return T::end_of_input(),
+ };
+ T::unexpected_static_message(msg)
+ }
+}
+impl<Item, Range, Position> ParseError<Item, Range, Position> for StringStreamError
+where
+ Position: Default,
+{
+ type StreamError = Self;
+ #[inline]
+ fn empty(_position: Position) -> Self {
+ StringStreamError::UnexpectedParse
+ }
+ #[inline]
+ fn from_error(_: Position, err: Self::StreamError) -> Self {
+ err
+ }
+
+ fn position(&self) -> Position {
+ Position::default()
+ }
+
+ #[inline]
+ fn set_position(&mut self, _position: Position) {}
+
+ #[inline]
+ fn add(&mut self, err: Self::StreamError) {
+ *self = match (*self, err) {
+ (StringStreamError::Eoi, _) => StringStreamError::Eoi,
+ (_, err) => err,
+ };
+ }
+
+ #[inline]
+ fn set_expected<F>(self_: &mut Tracked<Self>, info: Self::StreamError, f: F)
+ where
+ F: FnOnce(&mut Tracked<Self>),
+ {
+ f(self_);
+ self_.error = info;
+ }
+
+ fn is_unexpected_end_of_input(&self) -> bool {
+ *self == StringStreamError::Eoi
+ }
+
+ #[inline]
+ fn into_other<T>(self) -> T
+ where
+ T: ParseError<Item, Range, Position>,
+ {
+ T::from_error(Position::default(), StreamError::into_other(self))
+ }
+}
+
+impl<Item, Range, Position> ParseErrorInto<Item, Range, Position> for StringStreamError
+where
+ Position: Default,
+{
+ fn into_other_error<T, Item2, Range2, Position2>(self) -> T
+ where
+ T: ParseError<Item2, Range2, Position2>,
+ Item2: From<Item>,
+ Range2: From<Range>,
+ Position2: From<Position>,
+ {
+ T::from_error(
+ Position::default().into(),
+ StreamErrorInto::<Item, Range>::into_other_error(self),
+ )
+ }
+}
+
+impl<Item, Range> StreamErrorInto<Item, Range> for StringStreamError {
+ fn into_other_error<T, Item2, Range2>(self) -> T
+ where
+ T: StreamError<Item2, Range2>,
+ Item2: From<Item>,
+ Range2: From<Range>,
+ {
+ StreamError::into_other(self)
+ }
+}
+
+/// Error wrapper which lets parsers track which parser in a sequence of sub-parsers has emitted
+/// the error. `Tracked::from` can be used to construct this and it should otherwise be
+/// ignored outside of combine.
+#[derive(Clone, PartialEq, Debug, Copy)]
+pub struct Tracked<E> {
+ /// The error returned
+ pub error: E,
+ #[doc(hidden)]
+ pub offset: ErrorOffset,
+}
+
+impl<E> From<E> for Tracked<E> {
+ fn from(error: E) -> Self {
+ Tracked {
+ error,
+ offset: ErrorOffset(1),
+ }
+ }
+}
+
+/// A `Result` type which has the committed status flattened into the result.
+/// Conversions to and from `std::result::Result` can be done using `result.into()` or
+/// `From::from(result)`
+#[derive(Clone, PartialEq, Debug, Copy)]
+pub enum ParseResult<T, E> {
+ /// The parser has succeeded and has committed to this parse. If a parser after this fails,
+ /// other parser alternatives will not be attempted (`CommitErr` will be returned)
+ CommitOk(T),
+ /// The parser has succeeded and has not committed to this parse. If a parser after this fails,
+ /// other parser alternatives will be attempted (`PeekErr` will be returned)
+ PeekOk(T),
+ /// The parser failed other parse alternatives will not be attempted.
+ CommitErr(E),
+ /// The parser failed but other parse alternatives may be attempted.
+ PeekErr(Tracked<E>),
+}
+
+impl<T, E> ParseResult<T, E> {
+ #[inline]
+ pub fn is_ok(&self) -> bool {
+ match *self {
+ CommitOk(_) | PeekOk(_) => true,
+ CommitErr(_) | PeekErr(_) => false,
+ }
+ }
+
+ #[inline]
+ pub fn is_err(&self) -> bool {
+ !self.is_ok()
+ }
+
+ pub fn as_ref(&self) -> ParseResult<&T, &E> {
+ match *self {
+ CommitOk(ref t) => CommitOk(t),
+ PeekOk(ref t) => PeekOk(t),
+ CommitErr(ref e) => CommitErr(e),
+ PeekErr(ref e) => PeekErr(Tracked {
+ error: &e.error,
+ offset: e.offset,
+ }),
+ }
+ }
+
+ pub fn and_then<F, T2>(self, f: F) -> F::Output
+ where
+ F: FnOnce(T) -> ParseResult<T2, E>,
+ {
+ match self {
+ CommitOk(t) => match f(t) {
+ CommitOk(t2) | PeekOk(t2) => CommitOk(t2),
+ PeekErr(e) => CommitErr(e.error),
+ CommitErr(e) => CommitErr(e),
+ },
+ PeekOk(t) => f(t),
+ CommitErr(e) => CommitErr(e),
+ PeekErr(e) => PeekErr(e),
+ }
+ }
+
+ pub fn map_err<F, E2>(self, f: F) -> ParseResult<T, F::Output>
+ where
+ F: FnOnce(E) -> E2,
+ {
+ match self {
+ CommitOk(t) => CommitOk(t),
+ PeekOk(t) => PeekOk(t),
+ CommitErr(e) => CommitErr(f(e)),
+ PeekErr(e) => PeekErr(Tracked {
+ error: f(e.error),
+ offset: e.offset,
+ }),
+ }
+ }
+
+ pub fn map<F, T2>(self, f: F) -> ParseResult<F::Output, E>
+ where
+ F: FnOnce(T) -> T2,
+ {
+ match self {
+ CommitOk(t) => CommitOk(f(t)),
+ PeekOk(t) => PeekOk(f(t)),
+ CommitErr(e) => CommitErr(e),
+ PeekErr(e) => PeekErr(e),
+ }
+ }
+}
+
+impl<O, E> ParseResult<O, E> {
+ pub fn into_result(self) -> StdParseResult2<O, E> {
+ self.into()
+ }
+}
+
+impl<T, E> Into<Result<Commit<T>, Commit<Tracked<E>>>> for ParseResult<T, E> {
+ #[inline]
+ fn into(self) -> Result<Commit<T>, Commit<Tracked<E>>> {
+ match self {
+ CommitOk(t) => Ok(Commit::Commit(t)),
+ PeekOk(t) => Ok(Commit::Peek(t)),
+ CommitErr(e) => Err(Commit::Commit(e.into())),
+ PeekErr(e) => Err(Commit::Peek(e)),
+ }
+ }
+}
+
+impl<O, E> Into<StdParseResult2<O, E>> for ParseResult<O, E> {
+ #[inline]
+ fn into(self) -> StdParseResult2<O, E> {
+ use self::ParseResult::*;
+
+ match self {
+ CommitOk(t) => Ok((t, Commit::Commit(()))),
+ PeekOk(t) => Ok((t, Commit::Peek(()))),
+ CommitErr(e) => Err(Commit::Commit(e.into())),
+ PeekErr(e) => Err(Commit::Peek(e)),
+ }
+ }
+}
+
+impl<O, E> From<StdParseResult2<O, E>> for ParseResult<O, E> {
+ #[inline]
+ fn from(result: StdParseResult2<O, E>) -> ParseResult<O, E> {
+ use self::ParseResult::*;
+
+ match result {
+ Ok((t, Commit::Commit(()))) => CommitOk(t),
+ Ok((t, Commit::Peek(()))) => PeekOk(t),
+ Err(Commit::Commit(e)) => CommitErr(e.error),
+ Err(Commit::Peek(e)) => PeekErr(e),
+ }
+ }
+}
+
+#[cfg(all(feature = "std", test))]
+mod tests_std {
+
+ use crate::Parser;
+
+ #[derive(Clone, PartialEq, Debug)]
+ struct CloneOnly {
+ s: String,
+ }
+
+ #[test]
+ fn parse_clone_but_not_copy() {
+ // This verifies we can parse slice references with an token type that is Clone but not Copy.
+ let input = &[
+ CloneOnly { s: "x".to_string() },
+ CloneOnly { s: "y".to_string() },
+ ][..];
+ let result =
+ crate::parser::range::take_while(|c: CloneOnly| c.s == "x".to_string()).parse(input);
+ assert_eq!(
+ result,
+ Ok((
+ &[CloneOnly { s: "x".to_string() }][..],
+ &[CloneOnly { s: "y".to_string() }][..]
+ ))
+ );
+ }
+}
diff --git a/src/lib.rs b/src/lib.rs
new file mode 100644
index 0000000..af2b713
--- /dev/null
+++ b/src/lib.rs
@@ -0,0 +1,1006 @@
+//! This crate contains parser combinators, roughly based on the Haskell libraries
+//! [parsec](http://hackage.haskell.org/package/parsec) and
+//! [attoparsec](https://hackage.haskell.org/package/attoparsec).
+//!
+//! A parser in this library can be described as a function which takes some input and if it
+//! is successful, returns a value together with the remaining input.
+//! A parser combinator is a function which takes one or more parsers and returns a new parser.
+//! For instance the [`many`] parser can be used to convert a parser for single digits into one that
+//! parses multiple digits. By modeling parsers in this way it becomes easy to compose complex
+//! parsers in an almost declarative way.
+//!
+//! # Overview
+//!
+//! `combine` limits itself to creating [LL(1) parsers](https://en.wikipedia.org/wiki/LL_parser)
+//! (it is possible to opt-in to LL(k) parsing using the [`attempt`] combinator) which makes the
+//! parsers easy to reason about in both function and performance while sacrificing
+//! some generality. In addition to you being able to reason better about the parsers you
+//! construct `combine` the library also takes the knowledge of being an LL parser and uses it to
+//! automatically construct good error messages.
+//!
+//! ```rust
+//! extern crate combine;
+//! use combine::{Parser, EasyParser};
+//! use combine::stream::position;
+//! use combine::parser::char::{digit, letter};
+//! const MSG: &'static str = r#"Parse error at line: 1, column: 1
+//! Unexpected `|`
+//! Expected `digit` or `letter`
+//! "#;
+//!
+//! fn main() {
+//! // Wrapping a `&str` with `State` provides automatic line and column tracking. If `State`
+//! // was not used the positions would instead only be pointers into the `&str`
+//! if let Err(err) = digit().or(letter()).easy_parse(position::Stream::new("|")) {
+//! assert_eq!(MSG, format!("{}", err));
+//! }
+//! }
+//! ```
+//!
+//! This library is currently split into a few core modules:
+//!
+//! * [`parser`][mod parser] is where you will find all the parsers that combine provides. It contains the core
+//! [`Parser`] trait as well as several submodules such as `sequence` or `choice` which each
+//! contain several parsers aimed at a specific niche.
+//!
+//! * [`stream`] contains the second most important trait next to [`Parser`]. Streams represent the
+//! data source which is being parsed such as `&[u8]`, `&str` or iterators.
+//!
+//! * [`easy`] contains combine's default "easy" error and stream handling. If you use the
+//! `easy_parse` method to start your parsing these are the types that are used.
+//!
+//! * [`error`] contains the types and traits that make up combine's error handling. Unless you
+//! need to customize the errors your parsers return you should not need to use this module much.
+//!
+//!
+//! # Examples
+//!
+//! ```
+//! extern crate combine;
+//! use combine::parser::char::{spaces, digit, char};
+//! use combine::{many1, sep_by, Parser, EasyParser};
+//! use combine::stream::easy;
+//!
+//! fn main() {
+//! //Parse spaces first and use the with method to only keep the result of the next parser
+//! let integer = spaces()
+//! //parse a string of digits into an i32
+//! .with(many1(digit()).map(|string: String| string.parse::<i32>().unwrap()));
+//!
+//! //Parse integers separated by commas, skipping whitespace
+//! let mut integer_list = sep_by(integer, spaces().skip(char(',')));
+//!
+//! //Call parse with the input to execute the parser
+//! let input = "1234, 45,78";
+//! let result: Result<(Vec<i32>, &str), easy::ParseError<&str>> =
+//! integer_list.easy_parse(input);
+//! match result {
+//! Ok((value, _remaining_input)) => println!("{:?}", value),
+//! Err(err) => println!("{}", err)
+//! }
+//! }
+//! ```
+//!
+//! If we need a parser that is mutually recursive or if we want to export a reusable parser the
+//! [`parser!`] macro can be used. In effect it makes it possible to return a parser without naming
+//! the type of the parser (which can be very large due to combine's trait based approach). While
+//! it is possible to do avoid naming the type without the macro those solutions require either allocation
+//! (`Box<dyn Parser< Input, Output = O, PartialState = P>>`) or nightly rust via `impl Trait`. The
+//! macro thus threads the needle and makes it possible to have non-allocating, anonymous parsers
+//! on stable rust.
+//!
+//! ```
+//! #[macro_use]
+//! extern crate combine;
+//! use combine::parser::char::{char, letter, spaces};
+//! use combine::{between, choice, many1, parser, sep_by, Parser, EasyParser};
+//! use combine::error::{ParseError, StdParseResult};
+//! use combine::stream::{Stream, Positioned};
+//! use combine::stream::position;
+//!
+//! #[derive(Debug, PartialEq)]
+//! pub enum Expr {
+//! Id(String),
+//! Array(Vec<Expr>),
+//! Pair(Box<Expr>, Box<Expr>)
+//! }
+//!
+//! // `impl Parser` can be used to create reusable parsers with zero overhead
+//! fn expr_<Input>() -> impl Parser< Input, Output = Expr>
+//! where Input: Stream<Token = char>,
+//! // Necessary due to rust-lang/rust#24159
+//! Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+//! {
+//! let word = many1(letter());
+//!
+//! // A parser which skips past whitespace.
+//! // Since we aren't interested in knowing that our expression parser
+//! // could have accepted additional whitespace between the tokens we also silence the error.
+//! let skip_spaces = || spaces().silent();
+//!
+//! //Creates a parser which parses a char and skips any trailing whitespace
+//! let lex_char = |c| char(c).skip(skip_spaces());
+//!
+//! let comma_list = sep_by(expr(), lex_char(','));
+//! let array = between(lex_char('['), lex_char(']'), comma_list);
+//!
+//! //We can use tuples to run several parsers in sequence
+//! //The resulting type is a tuple containing each parsers output
+//! let pair = (lex_char('('),
+//! expr(),
+//! lex_char(','),
+//! expr(),
+//! lex_char(')'))
+//! .map(|t| Expr::Pair(Box::new(t.1), Box::new(t.3)));
+//!
+//! choice((
+//! word.map(Expr::Id),
+//! array.map(Expr::Array),
+//! pair,
+//! ))
+//! .skip(skip_spaces())
+//! }
+//!
+//! // As this expression parser needs to be able to call itself recursively `impl Parser` can't
+//! // be used on its own as that would cause an infinitely large type. We can avoid this by using
+//! // the `parser!` macro which erases the inner type and the size of that type entirely which
+//! // lets it be used recursively.
+//! //
+//! // (This macro does not use `impl Trait` which means it can be used in rust < 1.26 as well to
+//! // emulate `impl Parser`)
+//! parser!{
+//! fn expr[Input]()(Input) -> Expr
+//! where [Input: Stream<Token = char>]
+//! {
+//! expr_()
+//! }
+//! }
+//!
+//! fn main() {
+//! let result = expr()
+//! .parse("[[], (hello, world), [rust]]");
+//! let expr = Expr::Array(vec![
+//! Expr::Array(Vec::new())
+//! , Expr::Pair(Box::new(Expr::Id("hello".to_string())),
+//! Box::new(Expr::Id("world".to_string())))
+//! , Expr::Array(vec![Expr::Id("rust".to_string())])
+//! ]);
+//! assert_eq!(result, Ok((expr, "")));
+//! }
+//! ```
+//!
+//! [`combinator`]: combinator/index.html
+//! [mod parser]: parser/index.html
+//! [`easy`]: easy/index.html
+//! [`error`]: error/index.html
+//! [`char`]: parser/char/index.html
+//! [`byte`]: parser/byte/index.html
+//! [`range`]: parser/range/index.html
+//! [`many`]: parser/repeat/fn.many.html
+//! [`attempt`]: parser/combinator/fn.attempt.html
+//! [`satisfy`]: parser/token/fn.satisfy.html
+//! [`or`]: parser/trait.Parser.html#method.or
+//! [`Stream`]: stream/trait.Stream.html
+//! [`RangeStream`]: stream/trait.RangeStream.html
+//! [`Parser`]: parser/trait.Parser.html
+//! [fn parser]: parser/function/fn.parser.html
+//! [`parser!`]: macro.parser.html
+// inline is only used on trivial functions returning parsers
+#![allow(
+ clippy::inline_always,
+ clippy::type_complexity,
+ clippy::too_many_arguments,
+ clippy::match_like_matches_macro
+)]
+#![cfg_attr(not(feature = "std"), no_std)]
+#![cfg_attr(docsrs, feature(doc_cfg))]
+
+#[doc(inline)]
+pub use crate::error::{ParseError, ParseResult, StdParseResult};
+
+#[cfg(feature = "std")]
+#[doc(inline)]
+pub use crate::parser::EasyParser;
+
+#[doc(inline)]
+pub use crate::parser::Parser;
+
+#[doc(inline)]
+pub use crate::stream::{Positioned, RangeStream, RangeStreamOnce, Stream, StreamOnce};
+
+#[doc(inline)]
+pub use crate::parser::{
+ choice::optional,
+ combinator::{attempt, look_ahead, not_followed_by},
+ error::{unexpected, unexpected_any},
+ function::parser,
+ repeat::{
+ chainl1, chainr1, count, count_min_max, many, many1, sep_by, sep_by1, sep_end_by,
+ sep_end_by1, skip_count, skip_count_min_max, skip_many, skip_many1,
+ },
+ sequence::between,
+ token::{
+ any, eof, none_of, one_of, position, produce, satisfy, satisfy_map, token, tokens, value,
+ },
+};
+
+#[doc(inline)]
+pub use crate::parser::choice::choice;
+
+#[doc(inline)]
+pub use crate::parser::combinator::from_str;
+
+#[doc(inline)]
+pub use crate::parser::token::tokens_cmp;
+
+/// Declares a named parser which can easily be reused.
+///
+/// The expression which creates the parser should have no side effects as it may be called
+/// multiple times even during a single parse attempt.
+///
+/// NOTE: If you are using rust nightly you can use `impl Trait` instead. See the [json parser][] for
+/// an example.
+///
+/// [json parser]:https://github.com/Marwes/combine/blob/master/benches/json.rs
+///
+/// ```
+/// #[macro_use]
+/// extern crate combine;
+/// use combine::parser::char::digit;
+/// use combine::{any, choice, from_str, many1, Parser, EasyParser, Stream};
+/// use combine::error::ParseError;
+///
+/// parser!{
+/// /// `[Input]` represents a normal type parameters and lifetime declaration for the function
+/// /// It gets expanded to `<Input>`
+/// fn integer[Input]()(Input) -> i32
+/// where [
+/// Input: Stream<Token = char>,
+/// Input::Error: ParseError<char, Input::Range, Input::Position>,
+/// <Input::Error as ParseError<Input::Token, Input::Range, Input::Position>>::StreamError:
+/// From<::std::num::ParseIntError>,
+/// ]
+/// {
+/// // The body must be a block body ( `{ <block body> }`) which ends with an expression
+/// // which evaluates to a parser
+/// from_str(many1::<String, _, _>(digit()))
+/// }
+/// }
+///
+/// #[derive(Debug, PartialEq)]
+/// pub enum IntOrString {
+/// Int(i32),
+/// String(String),
+/// }
+/// // prefix with `pub` to declare a public parser
+/// parser!{
+/// // Documentation comments works as well
+///
+/// /// Parses an integer or a string (any characters)
+/// pub fn integer_or_string[Input]()(Input) -> IntOrString
+/// where [
+/// Input: Stream<Token = char>,
+/// Input::Error: ParseError<char, Input::Range, Input::Position>,
+/// <Input::Error as ParseError<Input::Token, Input::Range, Input::Position>>::StreamError:
+/// From<::std::num::ParseIntError>,
+/// ]
+/// {
+/// choice!(
+/// integer().map(IntOrString::Int),
+/// many1(any()).map(IntOrString::String)
+/// )
+/// }
+/// }
+///
+/// parser!{
+/// // Give the created type a unique name
+/// #[derive(Clone)]
+/// pub struct Twice;
+/// pub fn twice[Input, F, P](f: F)(Input) -> (P::Output, P::Output)
+/// where [P: Parser<Input>,
+/// F: FnMut() -> P]
+/// {
+/// (f(), f())
+/// }
+/// }
+///
+/// fn main() {
+/// assert_eq!(integer().easy_parse("123"), Ok((123, "")));
+/// assert!(integer().easy_parse("!").is_err());
+///
+/// assert_eq!(
+/// integer_or_string().easy_parse("123"),
+/// Ok((IntOrString::Int(123), ""))
+/// );
+/// assert_eq!(
+/// integer_or_string().easy_parse("abc"),
+/// Ok((IntOrString::String("abc".to_string()), ""))
+/// );
+/// assert_eq!(twice(|| digit()).parse("123"), Ok((('1', '2'), "3")));
+/// }
+/// ```
+#[macro_export]
+macro_rules! parser {
+ (
+ type PartialState = $partial_state: ty;
+ $(#[$attr:meta])*
+ $fn_vis: vis fn $name: ident [$($type_params: tt)*]( $($arg: ident : $arg_type: ty),*)
+ ($input_type: ty) -> $output_type: ty
+ where [$($where_clause: tt)*]
+ $parser: block
+ ) => {
+ $crate::combine_parser_impl!{
+ #[allow(non_camel_case_types)]
+ #[doc(hidden)]
+ $fn_vis struct $name;
+ (type PartialState = ($partial_state);)
+ $(#[$attr])*
+ $fn_vis fn $name [$($type_params)*]($($arg : $arg_type),*)($input_type) -> $output_type
+ where [$($where_clause)*]
+ $parser
+ }
+ };
+ (
+ $(#[$derive:meta])*
+ $struct_vis: vis struct $type_name: ident;
+ type PartialState = $partial_state: ty;
+ $(#[$attr:meta])*
+ $fn_vis: vis fn $name: ident [$($type_params: tt)*]( $($arg: ident : $arg_type: ty),* )
+ ($input_type: ty) -> $output_type: ty
+ where [$($where_clause: tt)*]
+ $parser: block
+ ) => {
+ $crate::combine_parser_impl!{
+ $(#[$derive])*
+ $struct_vis struct $type_name;
+ (type PartialState = ($partial_state);)
+ $(#[$attr])*
+ $fn_vis fn $name [$($type_params)*]($($arg : $arg_type),*)($input_type) -> $output_type
+ where [$($where_clause)*]
+ $parser
+ }
+ };
+ (
+ $(#[$attr:meta])*
+ $fn_vis: vis fn $name: ident [$($type_params: tt)*]( $($arg: ident : $arg_type: ty),*)
+ ($input_type: ty) -> $output_type: ty
+ where [$($where_clause: tt)*]
+ $parser: block
+ ) => {
+ $crate::combine_parser_impl!{
+ #[allow(non_camel_case_types)]
+ #[doc(hidden)]
+ $fn_vis struct $name;
+ (type PartialState = (());)
+ $(#[$attr])*
+ $fn_vis fn $name [$($type_params)*]($($arg : $arg_type),*)($input_type) -> $output_type
+ where [$($where_clause)*]
+ $parser
+ }
+ };
+ (
+ $(#[$derive:meta])*
+ $struct_vis: vis struct $type_name: ident;
+ $(#[$attr:meta])*
+ $fn_vis: vis fn $name: ident [$($type_params: tt)*]( $($arg: ident : $arg_type: ty),* )
+ ($input_type: ty) -> $output_type: ty
+ where [$($where_clause: tt)*]
+ $parser: block
+ ) => {
+ $crate::combine_parser_impl!{
+ $(#[$derive])*
+ $struct_vis struct $type_name;
+ (type PartialState = (());)
+ $(#[$attr])*
+ $fn_vis fn $name [$($type_params)*]($($arg : $arg_type),*)($input_type) -> $output_type
+ where [$($where_clause)*]
+ $parser
+ }
+ };
+}
+
+#[doc(hidden)]
+#[macro_export]
+macro_rules! combine_parse_partial {
+ ((()) $mode:ident $input:ident $state:ident $parser:block) => {{
+ let _ = $state;
+ let mut state = Default::default();
+ let state = &mut state;
+ $parser.parse_mode($mode, $input, state)
+ }};
+ (($ignored:ty) $mode:ident $input:ident $state:ident $parser:block) => {
+ $parser.parse_mode($mode, $input, $state)
+ };
+}
+
+#[doc(hidden)]
+#[macro_export]
+macro_rules! combine_parser_impl {
+ (
+ $(#[$derive:meta])*
+ $struct_vis: vis struct $type_name: ident;
+ (type PartialState = ($($partial_state: tt)*);)
+ $(#[$attr:meta])*
+ $fn_vis: vis fn $name: ident [$($type_params: tt)*]( $($arg: ident : $arg_type: ty),*)
+ ($input_type: ty) -> $output_type: ty
+ where [$($where_clause: tt)*]
+ $parser: block
+ ) => {
+
+ $(#[$derive])*
+ $struct_vis struct $type_name<$($type_params)*>
+ where <$input_type as $crate::stream::StreamOnce>::Error:
+ $crate::error::ParseError<
+ <$input_type as $crate::stream::StreamOnce>::Token,
+ <$input_type as $crate::stream::StreamOnce>::Range,
+ <$input_type as $crate::stream::StreamOnce>::Position
+ >,
+ $input_type: $crate::stream::Stream,
+ $($where_clause)*
+ {
+ $(pub $arg : $arg_type,)*
+ __marker: $crate::lib::marker::PhantomData<fn ($input_type) -> $output_type>
+ }
+
+ // We want this to work on older compilers, at least for a while
+ #[allow(non_shorthand_field_patterns)]
+ impl<$($type_params)*> $crate::Parser<$input_type> for $type_name<$($type_params)*>
+ where <$input_type as $crate::stream::StreamOnce>::Error:
+ $crate::error::ParseError<
+ <$input_type as $crate::stream::StreamOnce>::Token,
+ <$input_type as $crate::stream::StreamOnce>::Range,
+ <$input_type as $crate::stream::StreamOnce>::Position
+ >,
+ $input_type: $crate::stream::Stream,
+ $($where_clause)*
+ {
+
+ type Output = $output_type;
+ type PartialState = $($partial_state)*;
+
+ $crate::parse_mode!($input_type);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut $input_type,
+ state: &mut Self::PartialState,
+ ) -> $crate::error::ParseResult<$output_type, <$input_type as $crate::stream::StreamOnce>::Error>
+ where M: $crate::parser::ParseMode
+ {
+ let $type_name { $( $arg: ref mut $arg,)* .. } = *self;
+ $crate::combine_parse_partial!(($($partial_state)*) mode input state $parser)
+ }
+
+ #[inline]
+ fn add_error(
+ &mut self,
+ errors: &mut $crate::error::Tracked<
+ <$input_type as $crate::stream::StreamOnce>::Error
+ >)
+ {
+ let $type_name { $( $arg : ref mut $arg,)* .. } = *self;
+ let mut parser = $parser;
+ {
+ let _: &mut dyn $crate::Parser< $input_type, Output = $output_type, PartialState = _> = &mut parser;
+ }
+ parser.add_error(errors)
+ }
+
+ fn add_committed_expected_error(
+ &mut self,
+ errors: &mut $crate::error::Tracked<
+ <$input_type as $crate::stream::StreamOnce>::Error
+ >)
+ {
+ let $type_name { $( $arg : ref mut $arg,)* .. } = *self;
+ let mut parser = $parser;
+ {
+ let _: &mut dyn $crate::Parser< $input_type, Output = $output_type, PartialState = _> = &mut parser;
+ }
+ parser.add_committed_expected_error(errors)
+ }
+ }
+
+ $(#[$attr])*
+ #[inline]
+ $fn_vis fn $name< $($type_params)* >(
+ $($arg : $arg_type),*
+ ) -> $type_name<$($type_params)*>
+ where <$input_type as $crate::stream::StreamOnce>::Error:
+ $crate::error::ParseError<
+ <$input_type as $crate::stream::StreamOnce>::Token,
+ <$input_type as $crate::stream::StreamOnce>::Range,
+ <$input_type as $crate::stream::StreamOnce>::Position
+ >,
+ $input_type: $crate::stream::Stream,
+ $($where_clause)*
+ {
+ $type_name {
+ $($arg,)*
+ __marker: $crate::lib::marker::PhantomData
+ }
+ }
+ };
+}
+
+/// Internal API. May break without a semver bump
+macro_rules! forward_parser {
+ ($input: ty, $method: ident $( $methods: ident)*, $($field: tt)*) => {
+ forward_parser!($input, $method $($field)+);
+ forward_parser!($input, $($methods)*, $($field)+);
+ };
+ ($input: ty, parse_mode $($field: tt)+) => {
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut $input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <$input as $crate::StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ self.$($field)+.parse_mode(mode, input, state).map(|(a, _)| a)
+ }
+ };
+ ($input: ty, parse_lazy $($field: tt)+) => {
+ fn parse_lazy(
+ &mut self,
+ input: &mut $input,
+ ) -> ParseResult<Self::Output, <$input as $crate::StreamOnce>::Error> {
+ self.$($field)+.parse_lazy(input)
+ }
+ };
+ ($input: ty, parse_first $($field: tt)+) => {
+ fn parse_first(
+ &mut self,
+ input: &mut $input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <$input as $crate::StreamOnce>::Error> {
+ self.$($field)+.parse_first(input, state)
+ }
+ };
+ ($input: ty, parse_partial $($field: tt)+) => {
+ fn parse_partial(
+ &mut self,
+ input: &mut $input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <$input as $crate::StreamOnce>::Error> {
+ self.$($field)+.parse_partial(input, state)
+ }
+ };
+ ($input: ty, add_error $($field: tt)+) => {
+
+ fn add_error(&mut self, error: &mut $crate::error::Tracked<<$input as $crate::StreamOnce>::Error>) {
+ self.$($field)+.add_error(error)
+ }
+ };
+ ($input: ty, add_committed_expected_error $($field: tt)+) => {
+ fn add_committed_expected_error(&mut self, error: &mut $crate::error::Tracked<<$input as $crate::StreamOnce>::Error>) {
+ self.$($field)+.add_committed_expected_error(error)
+ }
+ };
+ ($input: ty, parser_count $($field: tt)+) => {
+ fn parser_count(&self) -> $crate::ErrorOffset {
+ self.$($field)+.parser_count()
+ }
+ };
+ ($input: ty, $field: tt) => {
+ forward_parser!($input, parse_lazy parse_first parse_partial add_error add_committed_expected_error parser_count, $field);
+ };
+ ($input: ty, $($field: tt)+) => {
+ };
+}
+
+// Facade over the core types we need
+// Public but hidden to be accessible in macros
+#[doc(hidden)]
+pub mod lib {
+ #[cfg(not(feature = "std"))]
+ pub use core::*;
+
+ #[cfg(feature = "std")]
+ pub use std::*;
+}
+
+#[cfg(feature = "std")]
+#[doc(inline)]
+pub use crate::stream::easy;
+
+/// Error types and traits which define what kind of errors combine parsers may emit
+#[macro_use]
+pub mod error;
+#[macro_use]
+pub mod stream;
+#[macro_use]
+pub mod parser;
+
+#[doc(hidden)]
+#[derive(Clone, PartialOrd, PartialEq, Debug, Copy)]
+pub struct ErrorOffset(u8);
+
+#[cfg(test)]
+mod tests {
+
+ use crate::parser::char::{char, string};
+
+ use super::*;
+
+ #[test]
+ fn chainl1_error_consume() {
+ fn first<T, U>(t: T, _: U) -> T {
+ t
+ }
+ let mut p = chainl1(string("abc"), char(',').map(|_| first));
+ assert!(p.parse("abc,ab").is_err());
+ }
+
+ #[test]
+ fn choice_strings() {
+ let mut fruits = [
+ attempt(string("Apple")),
+ attempt(string("Banana")),
+ attempt(string("Cherry")),
+ attempt(string("Date")),
+ attempt(string("Fig")),
+ attempt(string("Grape")),
+ ];
+ let mut parser = choice(&mut fruits);
+ assert_eq!(parser.parse("Apple"), Ok(("Apple", "")));
+ assert_eq!(parser.parse("Banana"), Ok(("Banana", "")));
+ assert_eq!(parser.parse("Cherry"), Ok(("Cherry", "")));
+ assert_eq!(parser.parse("DateABC"), Ok(("Date", "ABC")));
+ assert_eq!(parser.parse("Fig123"), Ok(("Fig", "123")));
+ assert_eq!(parser.parse("GrapeApple"), Ok(("Grape", "Apple")));
+ }
+}
+
+#[cfg(all(feature = "std", test))]
+mod std_tests {
+
+ use crate::{
+ error::StdParseResult,
+ parser::char::{alpha_num, char, digit, letter, spaces, string},
+ stream::{
+ easy,
+ position::{self, SourcePosition},
+ },
+ };
+
+ use super::{easy::Error, error::Commit, stream::IteratorStream, *};
+
+ #[test]
+ fn optional_error_consume() {
+ let mut p = optional(string("abc"));
+ let err = p.easy_parse(position::Stream::new("ab")).unwrap_err();
+ assert_eq!(err.position, SourcePosition { line: 1, column: 1 });
+ }
+
+ fn follow<Input>(input: &mut Input) -> StdParseResult<(), Input>
+ where
+ Input: Stream<Token = char, Error = easy::ParseError<Input>>,
+ Input::Position: Default,
+ Input::Error: std::fmt::Debug,
+ Input::Token: PartialEq,
+ Input::Range: PartialEq,
+ {
+ let before = input.checkpoint();
+ match input.uncons() {
+ Ok(c) => {
+ if c.is_alphanumeric() {
+ input.reset(before).unwrap();
+ let e = Error::Unexpected(c.into());
+ Err(Commit::Peek(easy::Errors::new(input.position(), e).into()))
+ } else {
+ Ok(((), Commit::Peek(())))
+ }
+ }
+ Err(_) => Ok(((), Commit::Peek(()))),
+ }
+ }
+
+ fn integer<'a, Input>(input: &mut Input) -> StdParseResult<i64, Input>
+ where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+ {
+ let (s, input) = many1::<String, _, _>(digit())
+ .expected("integer")
+ .parse_stream(input)
+ .into_result()?;
+ let mut n = 0;
+ for c in s.chars() {
+ n = n * 10 + (c as i64 - '0' as i64);
+ }
+ Ok((n, input))
+ }
+
+ #[test]
+ fn test_integer() {
+ let result = parser(integer).parse("123");
+ assert_eq!(result, Ok((123i64, "")));
+ }
+ #[test]
+ fn list() {
+ let mut p = sep_by(parser(integer), char(','));
+ let result = p.parse("123,4,56");
+ assert_eq!(result, Ok((vec![123i64, 4, 56], "")));
+ }
+
+ #[test]
+ fn iterator() {
+ let result = parser(integer)
+ .parse(position::Stream::new(IteratorStream::new("123".chars())))
+ .map(|(i, mut input)| (i, input.uncons().is_err()));
+ assert_eq!(result, Ok((123i64, true)));
+ }
+
+ #[test]
+ fn field() {
+ let word = || many(alpha_num());
+ let c_decl = (word(), spaces(), char(':'), spaces(), word())
+ .map(|t| (t.0, t.4))
+ .parse("x: int");
+ assert_eq!(c_decl, Ok((("x".to_string(), "int".to_string()), "")));
+ }
+
+ #[test]
+ fn source_position() {
+ let source = r"
+123
+";
+ let mut parsed_state = position::Stream::with_positioner(source, SourcePosition::new());
+ let result = (spaces(), parser(integer), spaces())
+ .map(|t| t.1)
+ .parse_stream(&mut parsed_state)
+ .into_result();
+ let state = Commit::Commit(position::Stream {
+ positioner: SourcePosition { line: 3, column: 1 },
+ input: "",
+ });
+ assert_eq!(
+ result.map(|(x, c)| (x, c.map(|_| parsed_state))),
+ Ok((123i64, state))
+ );
+ }
+
+ #[derive(Debug, PartialEq)]
+ pub enum Expr {
+ Id(String),
+ Int(i64),
+ Array(Vec<Expr>),
+ Plus(Box<Expr>, Box<Expr>),
+ Times(Box<Expr>, Box<Expr>),
+ }
+
+ parser! {
+ fn expr[Input]()(Input) -> Expr
+ where
+ [Input: Stream<Token = char>,]
+ {
+ let word = many1(letter()).expected("identifier");
+ let integer = parser(integer);
+ let array = between(char('['), char(']'), sep_by(expr(), char(','))).expected("[");
+ let paren_expr = between(char('('), char(')'), parser(term)).expected("(");
+ spaces()
+ .silent()
+ .with(
+ word.map(Expr::Id)
+ .or(integer.map(Expr::Int))
+ .or(array.map(Expr::Array))
+ .or(paren_expr),
+ )
+ .skip(spaces().silent())
+ }
+ }
+
+ #[test]
+ fn expression_basic() {
+ let result = sep_by(expr(), char(',')).parse("int, 100, [[], 123]");
+ let exprs = vec![
+ Expr::Id("int".to_string()),
+ Expr::Int(100),
+ Expr::Array(vec![Expr::Array(vec![]), Expr::Int(123)]),
+ ];
+ assert_eq!(result, Ok((exprs, "")));
+ }
+
+ #[test]
+ fn expression_error() {
+ let input = r"
+,123
+";
+ let result = expr().easy_parse(position::Stream::new(input));
+ let err = easy::Errors {
+ position: SourcePosition { line: 2, column: 1 },
+ errors: vec![
+ Error::Unexpected(','.into()),
+ Error::Expected("integer".into()),
+ Error::Expected("identifier".into()),
+ Error::Expected("[".into()),
+ Error::Expected("(".into()),
+ ],
+ };
+ assert_eq!(result, Err(err));
+ }
+
+ fn term<Input>(input: &mut Input) -> StdParseResult<Expr, Input>
+ where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+ {
+ fn times(l: Expr, r: Expr) -> Expr {
+ Expr::Times(Box::new(l), Box::new(r))
+ }
+ fn plus(l: Expr, r: Expr) -> Expr {
+ Expr::Plus(Box::new(l), Box::new(r))
+ }
+ let mul = char('*').map(|_| times);
+ let add = char('+').map(|_| plus);
+ let factor = chainl1(expr(), mul);
+ chainl1(factor, add).parse_stream(input).into()
+ }
+
+ #[test]
+ fn operators() {
+ let input = r"
+1 * 2 + 3 * test
+";
+ let (result, _) = parser(term).parse(position::Stream::new(input)).unwrap();
+
+ let e1 = Expr::Times(Box::new(Expr::Int(1)), Box::new(Expr::Int(2)));
+ let e2 = Expr::Times(
+ Box::new(Expr::Int(3)),
+ Box::new(Expr::Id("test".to_string())),
+ );
+ assert_eq!(result, Expr::Plus(Box::new(e1), Box::new(e2)));
+ }
+
+ #[test]
+ fn error_position() {
+ let mut p = string("let")
+ .skip(parser(follow))
+ .map(|x| x.to_string())
+ .or(many1(digit()));
+ match p.easy_parse(position::Stream::new("le123")) {
+ Ok(_) => assert!(false),
+ Err(err) => assert_eq!(err.position, SourcePosition { line: 1, column: 1 }),
+ }
+ match p.easy_parse(position::Stream::new("let1")) {
+ Ok(_) => assert!(false),
+ Err(err) => assert_eq!(err.position, SourcePosition { line: 1, column: 4 }),
+ }
+ }
+
+ #[test]
+ fn sep_by_error_consume() {
+ let mut p = sep_by::<Vec<_>, _, _, _>(string("abc"), char(','));
+ let err = p.easy_parse(position::Stream::new("ab,abc")).unwrap_err();
+ assert_eq!(err.position, SourcePosition { line: 1, column: 1 });
+ }
+
+ #[test]
+ fn inner_error_consume() {
+ let mut p = many::<Vec<_>, _, _>(between(char('['), char(']'), digit()));
+ let result = p.easy_parse(position::Stream::new("[1][2][]"));
+ assert!(result.is_err(), "{:?}", result);
+ let error = result.map(|x| format!("{:?}", x)).unwrap_err();
+ assert_eq!(error.position, SourcePosition { line: 1, column: 8 });
+ }
+
+ #[test]
+ fn infinite_recursion_in_box_parser() {
+ let _: Result<(Vec<_>, _), _> = (many(Box::new(digit()))).parse("1");
+ }
+
+ #[test]
+ fn unsized_parser() {
+ let mut parser: Box<dyn Parser<_, Output = char, PartialState = _>> = Box::new(digit());
+ let borrow_parser = &mut *parser;
+ assert_eq!(borrow_parser.parse("1"), Ok(('1', "")));
+ }
+
+ #[test]
+ fn std_error() {
+ use std::error::Error as StdError;
+
+ use std::fmt;
+
+ #[derive(Debug)]
+ struct Error;
+ impl fmt::Display for Error {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "error")
+ }
+ }
+ impl StdError for Error {
+ fn description(&self) -> &str {
+ "error"
+ }
+ }
+ let result: Result<((), _), easy::Errors<char, &str, _>> =
+ EasyParser::easy_parse(&mut string("abc").and_then(|_| Err(Error)), "abc");
+ assert!(result.is_err());
+ // Test that ParseError can be coerced to a StdError
+ let _ = result.map_err(|err| {
+ let err: Box<dyn StdError> = Box::new(err);
+ err
+ });
+ }
+
+ #[test]
+ fn extract_std_error() {
+ // The previous test verified that we could map a ParseError to a StdError by dropping
+ // the internal error details.
+ // This test verifies that we can map a ParseError to a StdError
+ // without dropping the internal error details. Consumers using `error-chain` will
+ // appreciate this. For technical reasons this is pretty janky; see the discussion in
+ // https://github.com/Marwes/combine/issues/86, and excuse the test with significant
+ // boilerplate!
+ use std::error::Error as StdError;
+
+ use std::fmt;
+
+ #[derive(Clone, PartialEq, Debug)]
+ struct CloneOnly(String);
+
+ #[derive(Debug)]
+ struct DisplayVec<T>(Vec<T>);
+
+ #[derive(Debug)]
+ struct ExtractedError(usize, DisplayVec<Error<CloneOnly, DisplayVec<CloneOnly>>>);
+
+ impl StdError for ExtractedError {
+ fn description(&self) -> &str {
+ "extracted error"
+ }
+ }
+
+ impl fmt::Display for CloneOnly {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.0)
+ }
+ }
+
+ impl<T: fmt::Debug> fmt::Display for DisplayVec<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "[{:?}]", self.0)
+ }
+ }
+
+ impl fmt::Display for ExtractedError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ writeln!(f, "Parse error at {}", self.0)?;
+ Error::fmt_errors(&(self.1).0, f)
+ }
+ }
+
+ let input = &[CloneOnly("x".to_string()), CloneOnly("y".to_string())][..];
+ let result = token(CloneOnly("z".to_string()))
+ .easy_parse(input)
+ .map_err(|e| e.map_position(|p| p.translate_position(input)))
+ .map_err(|e| {
+ ExtractedError(
+ e.position,
+ DisplayVec(
+ e.errors
+ .into_iter()
+ .map(|e| e.map_range(|r| DisplayVec(r.to_owned())))
+ .collect(),
+ ),
+ )
+ });
+
+ assert!(result.is_err());
+ // Test that the fresh ExtractedError is Display, so that the internal errors can be
+ // inspected by consuming code; and that the ExtractedError can be coerced to StdError.
+ let _ = result.map_err(|err| {
+ let s = format!("{}", err);
+ assert!(s.starts_with("Parse error at 0"));
+ assert!(s.contains("Expected"));
+ let err: Box<dyn StdError> = Box::new(err);
+ err
+ });
+ }
+}
diff --git a/src/parser/byte.rs b/src/parser/byte.rs
new file mode 100644
index 0000000..95fc4c1
--- /dev/null
+++ b/src/parser/byte.rs
@@ -0,0 +1,676 @@
+//! Module containing parsers specialized on byte streams.
+
+use crate::{
+ error::{self, ParseError, ParseResult::*},
+ parser::{
+ combinator::no_partial,
+ range::{take_fn, TakeRange},
+ repeat::skip_many,
+ token::{satisfy, token, tokens_cmp, Token},
+ },
+ stream::{RangeStream, Stream},
+ Parser,
+};
+
+/// Parses a byte and succeeds if the byte is equal to `c`.
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::byte::byte;
+/// assert_eq!(byte(b'!').parse(&b"!"[..]), Ok((b'!', &b""[..])));
+/// assert!(byte(b'A').parse(&b""[..]).is_err());
+/// assert!(byte(b'A').parse(&b"!"[..]).is_err());
+/// ```
+pub fn byte<Input>(c: u8) -> Token<Input>
+where
+ Input: Stream<Token = u8>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ token(c)
+}
+
+macro_rules! byte_parser {
+ ($name:ident, $ty:ident, $f: ident) => {{
+ satisfy(|c: u8| c.$f())
+ .expected(stringify!($name))
+ }};
+ ($name:ident, $ty:ident, $f: ident $($args:tt)+) => {{
+ satisfy(|c: u8| c.$f $($args)+)
+ .expected(stringify!($name))
+ }};
+}
+
+/// Parses a base-10 digit (0–9).
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::byte::digit;
+/// assert_eq!(digit().parse(&b"9"[..]), Ok((b'9', &b""[..])));
+/// assert!(digit().parse(&b"A"[..]).is_err());
+/// ```
+pub fn digit<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
+where
+ Input: Stream<Token = u8>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ byte_parser!(digit, Digit, is_ascii_digit())
+}
+
+/// Parses a `b' '`, `b'\t'`, `b'\n'` or `'b\'r'`.
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::byte::space;
+/// assert_eq!(space().parse(&b" "[..]), Ok((b' ', &b""[..])));
+/// assert_eq!(space().parse(&b" "[..]), Ok((b' ', &b" "[..])));
+/// assert!(space().parse(&b"!"[..]).is_err());
+/// assert!(space().parse(&b""[..]).is_err());
+/// ```
+pub fn space<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
+where
+ Input: Stream<Token = u8>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ byte_parser!(space, Space, is_ascii_whitespace)
+}
+
+/// Skips over [`space`] zero or more times
+///
+/// [`space`]: fn.space.html
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::byte::spaces;
+/// assert_eq!(spaces().parse(&b""[..]), Ok(((), &b""[..])));
+/// assert_eq!(spaces().parse(&b" "[..]), Ok(((), &b""[..])));
+/// ```
+pub fn spaces<Input>() -> impl Parser<Input, Output = ()>
+where
+ Input: Stream<Token = u8>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ skip_many(space()).expected("whitespaces")
+}
+
+/// Parses a newline byte (`b'\n'`).
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::byte::newline;
+/// assert_eq!(newline().parse(&b"\n"[..]), Ok((b'\n', &b""[..])));
+/// assert!(newline().parse(&b"\r"[..]).is_err());
+/// ```
+pub fn newline<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
+where
+ Input: Stream<Token = u8>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ satisfy(|ch: u8| ch == b'\n').expected("lf newline")
+}
+
+/// Parses carriage return and newline (`&b"\r\n"`), returning the newline byte.
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::byte::crlf;
+/// assert_eq!(crlf().parse(&b"\r\n"[..]), Ok((b'\n', &b""[..])));
+/// assert!(crlf().parse(&b"\r"[..]).is_err());
+/// assert!(crlf().parse(&b"\n"[..]).is_err());
+/// ```
+pub fn crlf<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
+where
+ Input: Stream<Token = u8>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ no_partial(satisfy(|ch: u8| ch == b'\r').with(newline())).expected("crlf newline")
+}
+
+/// Parses a tab byte (`b'\t'`).
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::byte::tab;
+/// assert_eq!(tab().parse(&b"\t"[..]), Ok((b'\t', &b""[..])));
+/// assert!(tab().parse(&b" "[..]).is_err());
+/// ```
+pub fn tab<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
+where
+ Input: Stream<Token = u8>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ satisfy(|ch| ch == b'\t').expected("tab")
+}
+
+/// Parses an uppercase ASCII letter (A–Z).
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::byte::upper;
+/// assert_eq!(upper().parse(&b"A"[..]), Ok((b'A', &b""[..])));
+/// assert!(upper().parse(&b"a"[..]).is_err());
+/// ```
+pub fn upper<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
+where
+ Input: Stream<Token = u8>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ byte_parser!(upper, Upper, is_ascii_uppercase)
+}
+
+/// Parses an lowercase ASCII letter (a–z).
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::byte::lower;
+/// assert_eq!(lower().parse(&b"a"[..]), Ok((b'a', &b""[..])));
+/// assert!(lower().parse(&b"A"[..]).is_err());
+/// ```
+pub fn lower<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
+where
+ Input: Stream<Token = u8>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ byte_parser!(lower, Lower, is_ascii_lowercase)
+}
+
+/// Parses either an ASCII alphabet letter or digit (a–z, A–Z, 0–9).
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::byte::alpha_num;
+/// assert_eq!(alpha_num().parse(&b"A"[..]), Ok((b'A', &b""[..])));
+/// assert_eq!(alpha_num().parse(&b"1"[..]), Ok((b'1', &b""[..])));
+/// assert!(alpha_num().parse(&b"!"[..]).is_err());
+/// ```
+pub fn alpha_num<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
+where
+ Input: Stream<Token = u8>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ byte_parser!(alpha_num, AlphaNum, is_ascii_alphanumeric)
+}
+
+/// Parses an ASCII alphabet letter (a–z, A–Z).
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::byte::letter;
+/// assert_eq!(letter().parse(&b"a"[..]), Ok((b'a', &b""[..])));
+/// assert_eq!(letter().parse(&b"A"[..]), Ok((b'A', &b""[..])));
+/// assert!(letter().parse(&b"9"[..]).is_err());
+/// ```
+pub fn letter<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
+where
+ Input: Stream<Token = u8>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ byte_parser!(letter, Letter, is_ascii_alphabetic)
+}
+
+/// Parses an octal digit.
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::byte::oct_digit;
+/// assert_eq!(oct_digit().parse(&b"7"[..]), Ok((b'7', &b""[..])));
+/// assert!(oct_digit().parse(&b"8"[..]).is_err());
+/// ```
+pub fn oct_digit<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
+where
+ Input: Stream<Token = u8>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ satisfy(|ch| ch >= b'0' && ch <= b'7').expected("octal digit")
+}
+
+/// Parses an ASCII hexdecimal digit (accepts both uppercase and lowercase).
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::byte::hex_digit;
+/// assert_eq!(hex_digit().parse(&b"F"[..]), Ok((b'F', &b""[..])));
+/// assert!(hex_digit().parse(&b"H"[..]).is_err());
+/// ```
+pub fn hex_digit<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>
+where
+ Input: Stream<Token = u8>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ byte_parser!(hex_digit, HexDigit, is_ascii_hexdigit())
+}
+
+parser! {
+/// Parses the bytes `s`.
+///
+/// If you have a stream implementing [`RangeStream`] such as `&[u8]` you can also use the
+/// [`range`] parser which may be more efficient.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::byte::bytes;
+/// # fn main() {
+/// let result = bytes(&b"rust"[..])
+/// .parse(&b"rust"[..])
+/// .map(|x| x.0);
+/// assert_eq!(result, Ok(&b"rust"[..]));
+/// # }
+/// ```
+///
+/// [`RangeStream`]: super::super::stream::RangeStream
+/// [`range`]: super::range::range
+pub fn bytes['a, 'b, Input](s: &'static [u8])(Input) -> &'a [u8]
+where [
+ Input: Stream<Token = u8, Range = &'b [u8]>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+]
+{
+ bytes_cmp(s, |l: u8, r: u8| l == r)
+}
+}
+
+parser! {
+/// Parses the bytes `s` using `cmp` to compare each token.
+///
+/// If you have a stream implementing [`RangeStream`] such as `&[u8]` you can also use the
+/// [`range`] parser which may be more efficient.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::byte::bytes_cmp;
+/// # use combine::stream::easy::Info;
+/// # fn main() {
+/// let result = bytes_cmp(&b"abc"[..], |l, r| l.eq_ignore_ascii_case(&r))
+/// .parse(&b"AbC"[..]);
+/// assert_eq!(result, Ok((&b"abc"[..], &b""[..])));
+/// # }
+/// ```
+///
+/// [`RangeStream`]: super::super::stream::RangeStream
+/// [`range`]: super::range::range
+pub fn bytes_cmp['a, 'b, C, Input](s: &'static [u8], cmp: C)(Input) -> &'a [u8]
+where [
+ C: FnMut(u8, u8) -> bool,
+ Input: Stream<Token = u8, Range = &'b [u8]>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+]
+{
+ let s = *s;
+ tokens_cmp(s.iter().cloned(), cmp)
+ .map(move |_| s)
+ .expected(error::Range(s))
+}
+}
+
+macro_rules! take_until {
+ (
+ $(#[$attr:meta])*
+ $type_name: ident, $func_name: ident, $memchr: ident, $($param: ident),+
+ ) => {
+ parser!{
+ #[derive(Clone)]
+ pub struct $type_name;
+ $(#[$attr])*
+ pub fn $func_name[Input]($($param : u8),*)(Input) -> Input::Range
+ where [
+ Input: RangeStream,
+ Input::Range: AsRef<[u8]> + crate::stream::Range,
+ ]
+ {
+ take_fn(move |haystack: Input::Range| {
+ let haystack = haystack.as_ref();
+ match ::memchr::$memchr( $(*$param),+ , haystack) {
+ Some(i) => TakeRange::Found(i),
+ None => TakeRange::NotFound(haystack.len()),
+ }
+ })
+ }
+ }
+ }
+}
+
+take_until! {
+ /// Zero-copy parser which reads a range of 0 or more tokens until `a` is found.
+ ///
+ /// If `a` is not found, the parser will return an error.
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::parser::byte::take_until_byte;
+ /// # use combine::*;
+ /// # fn main() {
+ /// let mut parser = take_until_byte(b'\r');
+ /// let result = parser.parse("To: user@example.com\r\n");
+ /// assert_eq!(result, Ok(("To: user@example.com", "\r\n")));
+ /// let result = parser.parse("Hello, world\n");
+ /// assert!(result.is_err());
+ /// # }
+ /// ```
+ TakeUntilByte, take_until_byte, memchr, a
+}
+take_until! {
+ /// Zero-copy parser which reads a range of 0 or more tokens until `a` or `b` is found.
+ ///
+ /// If `a` or `b` is not found, the parser will return an error.
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::parser::byte::take_until_byte2;
+ /// # use combine::*;
+ /// # fn main() {
+ /// let mut parser = take_until_byte2(b'\r', b'\n');
+ /// let result = parser.parse("To: user@example.com\r\n");
+ /// assert_eq!(result, Ok(("To: user@example.com", "\r\n")));
+ /// let result = parser.parse("Hello, world\n");
+ /// assert_eq!(result, Ok(("Hello, world", "\n")));
+ /// # }
+ /// ```
+ TakeUntilByte2, take_until_byte2, memchr2, a, b
+}
+take_until! {
+ /// Zero-copy parser which reads a range of 0 or more tokens until `a`, 'b' or `c` is found.
+ ///
+ /// If `a`, 'b' or `c` is not found, the parser will return an error.
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::parser::byte::take_until_byte3;
+ /// # use combine::*;
+ /// # fn main() {
+ /// let mut parser = take_until_byte3(b'\r', b'\n', b' ');
+ /// let result = parser.parse("To: user@example.com\r\n");
+ /// assert_eq!(result, Ok(("To:", " user@example.com\r\n")));
+ /// let result = parser.parse("Helloworld");
+ /// assert!(result.is_err());
+ /// # }
+ /// ```
+ TakeUntilByte3, take_until_byte3, memchr3, a, b, c
+}
+
+parser! {
+/// Zero-copy parser which reads a range of 0 or more tokens until `needle` is found.
+///
+/// If `a`, 'b' or `c` is not found, the parser will return an error.
+///
+/// Optimized variant of [`take_until_range`](../range/fn.take_until_range.html)
+///
+/// ```
+/// use combine::*;
+/// use combine::parser::byte::take_until_bytes;
+/// assert_eq!(
+/// take_until_bytes(&b"\r\n"[..]).easy_parse(&b"abc\r\n"[..]).map(|(x, _)| x),
+/// Ok((&b"abc"[..]))
+/// );
+/// // Also works on strings as long as `needle` is UTF-8
+/// assert_eq!(
+/// take_until_bytes("\r\n".as_bytes()).easy_parse("abc\r\n").map(|(x, _)| x),
+/// Ok(("abc"))
+/// );
+/// ```
+pub fn take_until_bytes['a, Input](needle: &'a [u8])(Input) -> Input::Range
+where [
+ Input: RangeStream,
+ Input::Range: AsRef<[u8]> + crate::stream::Range,
+]
+{
+ take_fn(move |haystack: Input::Range| {
+ let haystack = haystack.as_ref();
+ match memslice(needle, haystack) {
+ Some(i) => TakeRange::Found(i),
+ None => TakeRange::NotFound(haystack.len().saturating_sub(needle.len() - 1)),
+ }
+ })
+}
+
+}
+
+fn memslice(needle: &[u8], haystack: &[u8]) -> Option<usize> {
+ let (&prefix, suffix) = match needle.split_first() {
+ Some(x) => x,
+ None => return Some(0),
+ };
+ for i in memchr::memchr_iter(prefix, haystack) {
+ if haystack[i + 1..].starts_with(suffix) {
+ return Some(i);
+ }
+ }
+ None
+}
+
+/// Parsers for decoding numbers in big-endian or little-endian order.
+pub mod num {
+
+ use crate::{error::ResultExt, lib::mem::size_of, parser::function::parser, stream::uncons};
+
+ use super::*;
+
+ macro_rules! integer_parser {
+ (
+ $(#[$attr:meta])*
+ pub $type_name: ident,
+ $output_type: ident, $be_name: ident, $le_name: ident, $read_name: ident
+ ) => {
+ $(#[$attr])*
+ pub fn $be_name<'a, Input>() -> impl Parser<Input, Output = $output_type, PartialState = ()>
+ where
+ Input: Stream<Token = u8>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+ {
+ parser(|input: &mut Input| {
+ let checkpoint = input.checkpoint();
+ let result = (|input: &mut Input| {
+ let mut buffer = [0u8; size_of::<$output_type>()];
+ for elem in &mut buffer[..] {
+ *elem = ctry!(uncons(input)).0;
+ }
+ CommitOk($output_type::from_be_bytes(buffer))
+ })(input);
+ if result.is_err() {
+ input.reset(checkpoint).committed().into_result()?;
+ }
+ result.into_result()
+ })
+ }
+
+ $(#[$attr])*
+ pub fn $le_name<'a, Input>() -> impl Parser<Input, Output = $output_type, PartialState = ()>
+ where
+ Input: Stream<Token = u8>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+ {
+ parser(|input: &mut Input| {
+ let checkpoint = input.checkpoint();
+ let result = (|input: &mut Input| {
+ let mut buffer = [0u8; size_of::<$output_type>()];
+ for elem in &mut buffer[..] {
+ *elem = ctry!(uncons(input)).0;
+ }
+ CommitOk($output_type::from_le_bytes(buffer))
+ })(input);
+ if result.is_err() {
+ input.reset(checkpoint).committed().into_result()?;
+ }
+ result.into_result()
+ })
+ }
+ }
+ }
+
+ integer_parser!(
+ /// Reads a u16 out of the byte stream with the specified endianess
+ ///
+ /// ```
+ /// use combine::Parser;
+ /// use combine::parser::byte::num::le_u16;
+ ///
+ /// assert_eq!(le_u16().parse(&b"\x01\0"[..]), Ok((1, &b""[..])));
+ /// assert!(le_u16().parse(&b"\0"[..]).is_err());
+ /// ```
+ pub U16, u16, be_u16, le_u16, read_u16
+ );
+ integer_parser!(
+ /// Reads a u32 out of the byte stream with the specified endianess
+ ///
+ /// ```
+ /// use combine::Parser;
+ /// use combine::parser::byte::num::le_u32;
+ ///
+ /// assert_eq!(le_u32().parse(&b"\x01\0\0\0"[..]), Ok((1, &b""[..])));
+ /// assert!(le_u32().parse(&b"\x01\0\0"[..]).is_err());
+ /// ```
+ pub U32, u32, be_u32, le_u32, read_u32
+ );
+ integer_parser!(
+ /// Reads a u64 out of the byte stream with the specified endianess
+ ///
+ /// ```
+ /// use combine::Parser;
+ /// use combine::parser::byte::num::le_u64;
+ ///
+ /// assert_eq!(le_u64().parse(&b"\x01\0\0\0\0\0\0\0"[..]), Ok((1, &b""[..])));
+ /// assert!(le_u64().parse(&b"\x01\0\0\0\0\0\0"[..]).is_err());
+ /// ```
+ pub U64, u64, be_u64, le_u64, read_u64
+ );
+
+ integer_parser!(
+ /// Reads a i16 out of the byte stream with the specified endianess
+ ///
+ /// ```
+ /// use combine::Parser;
+ /// use combine::parser::byte::num::le_i16;
+ ///
+ /// assert_eq!(le_i16().parse(&b"\x01\0"[..]), Ok((1, &b""[..])));
+ /// assert!(le_i16().parse(&b"\x01"[..]).is_err());
+ /// ```
+ pub I16, i16, be_i16, le_i16, read_i16
+ );
+
+ integer_parser!(
+ /// Reads a i32 out of the byte stream with the specified endianess
+ ///
+ /// ```
+ /// use combine::Parser;
+ /// use combine::parser::byte::num::le_i32;
+ ///
+ /// assert_eq!(le_i32().parse(&b"\x01\0\0\0"[..]), Ok((1, &b""[..])));
+ /// assert!(le_i32().parse(&b"\x01\0\0"[..]).is_err());
+ /// ```
+ pub I32, i32, be_i32, le_i32, read_i32
+ );
+ integer_parser!(
+ /// Reads a i64 out of the byte stream with the specified endianess
+ ///
+ /// ```
+ /// use combine::Parser;
+ /// use combine::parser::byte::num::le_i64;
+ ///
+ /// assert_eq!(le_i64().parse(&b"\x01\0\0\0\0\0\0\0"[..]), Ok((1, &b""[..])));
+ /// assert!(le_i64().parse(&b"\x01\0\0\0\0\0\0"[..]).is_err());
+ /// ```
+ pub I64, i64, be_i64, le_i64, read_i64
+ );
+
+ integer_parser!(
+ /// Reads a i32 out of the byte stream with the specified endianess
+ ///
+ /// ```
+ /// use combine::Parser;
+ /// use combine::parser::byte::num::le_f32;
+ ///
+ /// let buf = 123.45f32.to_le_bytes();
+ /// assert_eq!(le_f32().parse(&buf[..]), Ok((123.45, &b""[..])));
+ /// assert!(le_f32().parse(&b"\x01\0\0"[..]).is_err());
+ /// ```
+ pub F32, f32, be_f32, le_f32, read_f32
+ );
+ integer_parser!(
+ /// Reads a i64 out of the byte stream with the specified endianess
+ ///
+ /// ```
+ /// use combine::Parser;
+ /// use combine::parser::byte::num::le_f64;
+ ///
+ /// let buf = 123.45f64.to_le_bytes();
+ /// assert_eq!(le_f64().parse(&buf[..]), Ok((123.45, &b""[..])));
+ /// assert!(le_f64().parse(&b"\x01\0\0\0\0\0\0"[..]).is_err());
+ /// ```
+ pub F64, f64, be_f64, le_f64, read_f64
+ );
+
+ #[cfg(test)]
+ mod tests {
+
+ use crate::stream::{buffered, position, IteratorStream};
+
+ use super::*;
+
+ #[test]
+ fn no_rangestream() {
+ let buf = 123.45f64.to_le_bytes();
+ assert_eq!(
+ le_f64()
+ .parse(buffered::Stream::new(
+ position::Stream::new(IteratorStream::new(buf.iter().cloned())),
+ 1
+ ))
+ .map(|(t, _)| t),
+ Ok(123.45)
+ );
+ assert_eq!(
+ le_f64()
+ .parse(buffered::Stream::new(
+ position::Stream::new(IteratorStream::new(buf.iter().cloned())),
+ 1
+ ))
+ .map(|(t, _)| t),
+ Ok(123.45)
+ );
+ let buf = 123.45f64.to_be_bytes();
+ assert_eq!(
+ be_f64()
+ .parse(buffered::Stream::new(
+ position::Stream::new(IteratorStream::new(buf.iter().cloned())),
+ 1
+ ))
+ .map(|(t, _)| t),
+ Ok(123.45)
+ );
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+
+ use crate::stream::{buffered, position, read};
+
+ use super::*;
+
+ #[test]
+ fn memslice_basic() {
+ let haystack = b"abc123";
+ assert_eq!(memslice(b"", haystack), Some(0));
+ assert_eq!(memslice(b"a", haystack), Some(0));
+ assert_eq!(memslice(b"ab", haystack), Some(0));
+ assert_eq!(memslice(b"c12", haystack), Some(2));
+
+ let haystack2 = b"abcab2";
+ assert_eq!(memslice(b"abc", haystack2), Some(0));
+ assert_eq!(memslice(b"ab2", haystack2), Some(3));
+
+ let haystack3 = b"aaabaaaa";
+ assert_eq!(memslice(b"aaaa", haystack3), Some(4));
+ }
+
+ #[test]
+ fn bytes_read_stream() {
+ assert!(bytes(b"abc")
+ .parse(buffered::Stream::new(
+ position::Stream::new(read::Stream::new("abc".as_bytes())),
+ 1
+ ))
+ .is_ok());
+ }
+}
diff --git a/src/parser/char.rs b/src/parser/char.rs
new file mode 100644
index 0000000..b132910
--- /dev/null
+++ b/src/parser/char.rs
@@ -0,0 +1,335 @@
+//! Module containing parsers specialized on character streams.
+
+use crate::{
+ error::ParseError,
+ parser::{
+ combinator::no_partial,
+ repeat::skip_many,
+ token::{satisfy, token, tokens_cmp, Token},
+ },
+ stream::Stream,
+ Parser,
+};
+
+/// Parses a character and succeeds if the character is equal to `c`.
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::char::char;
+/// assert_eq!(char('!').parse("!"), Ok(('!', "")));
+/// assert!(char('A').parse("!").is_err());
+/// ```
+pub fn char<Input>(c: char) -> Token<Input>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ token(c)
+}
+
+parser! {
+ #[derive(Copy, Clone)]
+ pub struct Digit;
+ /// Parses a base-10 digit.
+ ///
+ /// ```
+ /// use combine::Parser;
+ /// use combine::parser::char::digit;
+ /// assert_eq!(digit().parse("9"), Ok(('9', "")));
+ /// assert!(digit().parse("A").is_err());
+ /// ```
+ pub fn digit[Input]()(Input) -> char
+ where
+ [Input: Stream<Token = char>,]
+ {
+ satisfy(|c: char| c.is_digit(10)).expected("digit")
+ }
+}
+
+/// Parse a single whitespace according to [`std::char::is_whitespace`].
+///
+/// This includes space characters, tabs and newlines.
+///
+/// [`std::char::is_whitespace`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_whitespace
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::char::space;
+/// assert_eq!(space().parse(" "), Ok((' ', "")));
+/// assert_eq!(space().parse(" "), Ok((' ', " ")));
+/// assert!(space().parse("!").is_err());
+/// assert!(space().parse("").is_err());
+/// ```
+pub fn space<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ let f: fn(char) -> bool = char::is_whitespace;
+ satisfy(f).expected("whitespace")
+}
+
+/// Skips over zero or more spaces according to [`std::char::is_whitespace`].
+///
+/// This includes space characters, tabs and newlines.
+///
+/// [`std::char::is_whitespace`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_whitespace
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::char::spaces;
+/// assert_eq!(spaces().parse(""), Ok(((), "")));
+/// assert_eq!(spaces().parse(" "), Ok(((), "")));
+/// ```
+pub fn spaces<Input>() -> impl Parser<Input, Output = ()>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ skip_many(space()).expected("whitespaces")
+}
+
+/// Parses a newline character (`'\n'`).
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::char::newline;
+/// assert_eq!(newline().parse("\n"), Ok(('\n', "")));
+/// assert!(newline().parse("\r").is_err());
+/// ```
+pub fn newline<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ satisfy(|ch: char| ch == '\n').expected("lf newline")
+}
+
+/// Parses carriage return and newline (`"\r\n"`), returning the newline character.
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::char::crlf;
+/// assert_eq!(crlf().parse("\r\n"), Ok(('\n', "")));
+/// assert!(crlf().parse("\r").is_err());
+/// assert!(crlf().parse("\n").is_err());
+/// ```
+pub fn crlf<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ no_partial(satisfy(|ch: char| ch == '\r').with(newline())).expected("crlf newline")
+}
+
+/// Parses a tab character (`'\t'`).
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::char::tab;
+/// assert_eq!(tab().parse("\t"), Ok(('\t', "")));
+/// assert!(tab().parse(" ").is_err());
+/// ```
+pub fn tab<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ satisfy(|ch: char| ch == '\t').expected("tab")
+}
+
+/// Parses an uppercase letter according to [`std::char::is_uppercase`].
+///
+/// [`std::char::is_uppercase`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_uppercase
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::char::upper;
+/// assert_eq!(upper().parse("A"), Ok(('A', "")));
+/// assert!(upper().parse("a").is_err());
+/// ```
+pub fn upper<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ satisfy(|ch: char| ch.is_uppercase()).expected("uppercase letter")
+}
+
+/// Parses an lowercase letter according to [`std::char::is_lowercase`].
+///
+/// [`std::char::is_lowercase`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_lowercase
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::char::lower;
+/// assert_eq!(lower().parse("a"), Ok(('a', "")));
+/// assert!(lower().parse("A").is_err());
+/// ```
+pub fn lower<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ satisfy(|ch: char| ch.is_lowercase()).expected("lowercase letter")
+}
+
+/// Parses either an alphabet letter or digit according to [`std::char::is_alphanumeric`].
+///
+/// [`std::char::is_alphanumeric`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_alphanumeric
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::char::alpha_num;
+/// assert_eq!(alpha_num().parse("A"), Ok(('A', "")));
+/// assert_eq!(alpha_num().parse("1"), Ok(('1', "")));
+/// assert!(alpha_num().parse("!").is_err());
+/// ```
+pub fn alpha_num<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ satisfy(|ch: char| ch.is_alphanumeric()).expected("letter or digit")
+}
+
+/// Parses an alphabet letter according to [`std::char::is_alphabetic`].
+///
+/// [`std::char::is_alphabetic`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_alphabetic
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::char::letter;
+/// assert_eq!(letter().parse("a"), Ok(('a', "")));
+/// assert_eq!(letter().parse("A"), Ok(('A', "")));
+/// assert!(letter().parse("9").is_err());
+/// ```
+pub fn letter<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ satisfy(|ch: char| ch.is_alphabetic()).expected("letter")
+}
+
+/// Parses an octal digit.
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::char::oct_digit;
+/// assert_eq!(oct_digit().parse("7"), Ok(('7', "")));
+/// assert!(oct_digit().parse("8").is_err());
+/// ```
+pub fn oct_digit<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ satisfy(|ch: char| ch.is_digit(8)).expected("octal digit")
+}
+
+/// Parses a hexdecimal digit with uppercase and lowercase.
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::char::hex_digit;
+/// assert_eq!(hex_digit().parse("F"), Ok(('F', "")));
+/// assert!(hex_digit().parse("H").is_err());
+/// ```
+pub fn hex_digit<Input>() -> impl Parser<Input, Output = char, PartialState = ()>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ satisfy(|ch: char| ch.is_digit(0x10)).expected("hexadecimal digit")
+}
+
+/// Parses the string `s`.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::string;
+/// # fn main() {
+/// let result = string("rust")
+/// .parse("rust")
+/// .map(|x| x.0);
+/// assert_eq!(result, Ok("rust"));
+/// # }
+/// ```
+pub fn string<'a, Input>(s: &'static str) -> impl Parser<Input, Output = &'a str>
+where
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ string_cmp(s, |l, r| l == r)
+}
+
+/// Parses the string `s`, using `cmp` to compare each character.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::string_cmp;
+/// use std::ascii::AsciiExt;
+/// # fn main() {
+/// let result = string_cmp("rust", |l, r| l.eq_ignore_ascii_case(&r))
+/// .parse("RusT")
+/// .map(|x| x.0);
+/// assert_eq!(result, Ok("rust"));
+/// # }
+/// ```
+pub fn string_cmp<'a, C, Input>(s: &'static str, cmp: C) -> impl Parser<Input, Output = &'a str>
+where
+ C: FnMut(char, char) -> bool,
+ Input: Stream<Token = char>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ tokens_cmp(s.chars(), cmp).map(move |_| s).expected(s)
+}
+
+#[cfg(all(feature = "std", test))]
+mod tests {
+
+ use crate::{
+ parser::EasyParser,
+ stream::{
+ easy::{Error, Errors},
+ position::{self, SourcePosition},
+ },
+ };
+
+ use super::*;
+
+ #[test]
+ fn space_error() {
+ let result = space().easy_parse("");
+ assert!(result.is_err());
+ assert_eq!(
+ result.unwrap_err().errors,
+ vec![Error::end_of_input(), Error::Expected("whitespace".into())]
+ );
+ }
+
+ #[test]
+ fn string_committed() {
+ let result = string("a").easy_parse(position::Stream::new("b"));
+ assert!(result.is_err());
+ assert_eq!(
+ result.unwrap_err().position,
+ SourcePosition { line: 1, column: 1 }
+ );
+ }
+
+ #[test]
+ fn string_error() {
+ let result = string("abc").easy_parse(position::Stream::new("bc"));
+ assert_eq!(
+ result,
+ Err(Errors {
+ position: SourcePosition { line: 1, column: 1 },
+ errors: vec![Error::Unexpected('b'.into()), Error::Expected("abc".into())],
+ })
+ );
+ }
+}
diff --git a/src/parser/choice.rs b/src/parser/choice.rs
new file mode 100644
index 0000000..8efdb0c
--- /dev/null
+++ b/src/parser/choice.rs
@@ -0,0 +1,849 @@
+//! Combinators which take one or more parsers and attempts to parse successfully with at least one
+//! of them.
+
+use crate::{
+ error::{
+ ParseError,
+ ParseResult::{self, *},
+ ResultExt, StreamError, Tracked,
+ },
+ parser::ParseMode,
+ ErrorOffset, Parser, Stream, StreamOnce,
+};
+
+/// Takes a number of parsers and tries to apply them each in order.
+/// Fails if all the parsers fails or if an applied parser fails after it has committed to its
+/// parse.
+///
+/// ```
+/// # #[macro_use]
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::{digit, letter, string};
+/// # use combine::stream::easy::Error;
+/// # fn main() {
+/// let mut parser = choice!(
+/// many1(digit()),
+/// string("let").map(|s| s.to_string()),
+/// many1(letter()));
+/// assert_eq!(parser.parse("let"), Ok(("let".to_string(), "")));
+/// assert_eq!(parser.parse("123abc"), Ok(("123".to_string(), "abc")));
+/// assert!(parser.parse(":123").is_err());
+/// # }
+/// ```
+#[macro_export]
+macro_rules! choice {
+ ($first : expr) => {
+ $first
+ };
+ ($first : expr, $($rest : expr),+) => {
+ $first.or(choice!($($rest),+))
+ }
+}
+
+#[macro_export]
+#[doc(hidden)]
+macro_rules! parse_mode_choice {
+ (Input) => {
+ fn parse_partial(
+ &mut self,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ self.parse_mode_choice($crate::parser::PartialMode::default(), input, state)
+ }
+
+ fn parse_first(
+ &mut self,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, Input::Error> {
+ self.parse_mode_choice($crate::parser::FirstMode, input, state)
+ }
+ };
+}
+
+/// `ChoiceParser` represents a parser which may parse one of several different choices depending
+/// on the input.
+///
+/// This is an internal trait used to overload the `choice` function.
+pub trait ChoiceParser<Input: Stream> {
+ type Output;
+ type PartialState: Default;
+
+ fn parse_first(
+ &mut self,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>;
+
+ fn parse_partial(
+ &mut self,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>;
+
+ fn parse_mode_choice<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ Self: Sized;
+
+ fn add_error_choice(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>);
+}
+
+impl<'a, Input, P> ChoiceParser<Input> for &'a mut P
+where
+ Input: Stream,
+ P: ?Sized + ChoiceParser<Input>,
+{
+ type Output = P::Output;
+ type PartialState = P::PartialState;
+
+ parse_mode_choice!(Input);
+ #[inline]
+ fn parse_mode_choice<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ if mode.is_first() {
+ (**self).parse_first(input, state)
+ } else {
+ (**self).parse_partial(input, state)
+ }
+ }
+
+ fn add_error_choice(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
+ (**self).add_error_choice(error)
+ }
+}
+
+macro_rules! merge {
+ ($head: ident) => {
+ $head.error
+ };
+ ($head: ident $($tail: ident)+) => {
+ $head.error.merge(merge!($($tail)+))
+ };
+}
+
+macro_rules! do_choice {
+ (
+ $input: ident
+ $before_position: ident
+ $before: ident
+ $partial_state: ident
+ $state: ident
+ ( )
+ $($parser: ident $error: ident)+
+ ) => { {
+ let mut error = Tracked::from(merge!($($error)+));
+ // If offset != 1 then the nested parser is a sequence of parsers where 1 or
+ // more parsers returned `PeekOk` before the parser finally failed with
+ // `PeekErr`. Since we lose the offsets of the nested parsers when we merge
+ // the errors we must first extract the errors before we do the merge.
+ // If the offset == 0 on the other hand (which should be the common case) then
+ // we can delay the addition of the error since we know for certain that only
+ // the first parser in the sequence were tried
+ $(
+ if $error.offset != ErrorOffset(1) {
+ error.offset = $error.offset;
+ $parser.add_error(&mut error);
+ error.offset = ErrorOffset(0);
+ }
+ )+
+ PeekErr(error)
+ } };
+ (
+ $input: ident
+ $before_position: ident
+ $before: ident
+ $partial_state: ident
+ $state: ident
+ ( $head: ident $($tail: ident)* )
+ $($all: ident)*
+ ) => { {
+ let parser = $head;
+ let mut state = $head::PartialState::default();
+ match parser.parse_mode(crate::parser::FirstMode, $input, &mut state) {
+ CommitOk(x) => CommitOk(x),
+ PeekOk(x) => PeekOk(x),
+ CommitErr(err) => {
+ // If we get `CommitErr` but the input is the same this is a partial parse we
+ // cannot commit to so leave the state as `Peek` to retry all the parsers
+ // on the next call to `parse_partial`
+ if $input.position() != $before_position {
+ *$state = self::$partial_state::$head(state);
+ }
+ CommitErr(err)
+ }
+ PeekErr($head) => {
+ ctry!($input.reset($before.clone()).committed());
+ do_choice!(
+ $input
+ $before_position
+ $before
+ $partial_state
+ $state
+ ( $($tail)* )
+ $($all)*
+ parser
+ $head
+ )
+ }
+ }
+ } }
+}
+
+macro_rules! tuple_choice_parser {
+ ($head: ident) => {
+ tuple_choice_parser_inner!($head; $head);
+ };
+ ($head: ident $($id: ident)+) => {
+ tuple_choice_parser_inner!($head; $head $($id)+);
+ tuple_choice_parser!($($id)+);
+ };
+}
+
+macro_rules! tuple_choice_parser_inner {
+ ($partial_state: ident; $($id: ident)+) => {
+ #[doc(hidden)]
+ pub enum $partial_state<$($id),+> {
+ Peek,
+ $(
+ $id($id),
+ )+
+ }
+
+ impl<$($id),+> Default for self::$partial_state<$($id),+> {
+ fn default() -> Self {
+ self::$partial_state::Peek
+ }
+ }
+
+ #[allow(non_snake_case)]
+ impl<Input, Output $(,$id)+> ChoiceParser<Input> for ($($id,)+)
+ where
+ Input: Stream,
+ $($id: Parser< Input, Output = Output>),+
+ {
+
+ type Output = Output;
+ type PartialState = self::$partial_state<$($id::PartialState),+>;
+
+ parse_mode_choice!(Input);
+ #[inline]
+ fn parse_mode_choice<Mode>(
+ &mut self,
+ mode: Mode,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ Mode: ParseMode,
+ {
+ let ($(ref mut $id,)+) = *self;
+ let empty = match *state {
+ self::$partial_state::Peek => true,
+ _ => false,
+ };
+ if mode.is_first() || empty {
+ let before_position = input.position();
+ let before = input.checkpoint();
+ do_choice!(input before_position before $partial_state state ( $($id)+ ) )
+ } else {
+ match *state {
+ self::$partial_state::Peek => unreachable!(),
+ $(
+ self::$partial_state::$id(_) => {
+ let result = match *state {
+ self::$partial_state::$id(ref mut state) => {
+ $id.parse_mode(mode, input, state)
+ }
+ _ => unreachable!()
+ };
+ if result.is_ok() {
+ *state = self::$partial_state::Peek;
+ }
+ result
+ }
+ )+
+ }
+ }
+ }
+
+ fn add_error_choice(
+ &mut self,
+ error: &mut Tracked<<Input as StreamOnce>::Error>
+ ) {
+ if error.offset != ErrorOffset(0) {
+ let ($(ref mut $id,)+) = *self;
+ // Reset the offset to 1 on every add so that we always (and only) takes the
+ // error of the first parser. If we don't do this the first parser will consume
+ // the offset to the detriment for all the other parsers.
+ $(
+ error.offset = ErrorOffset(1);
+ $id.add_error(error);
+ )+
+ }
+ }
+ }
+ }
+}
+
+tuple_choice_parser!(A B C D E F G H I J K L M N O P Q R S T U V X Y Z);
+
+macro_rules! array_choice_parser {
+ ($($t: tt)+) => {
+ $(
+ impl<Input, P> ChoiceParser<Input> for [P; $t]
+ where
+ Input: Stream,
+ P: Parser<Input>,
+ {
+
+ type Output = P::Output;
+ type PartialState = <[P] as ChoiceParser<Input>>::PartialState;
+
+ parse_mode_choice!(Input);
+ #[inline]
+ fn parse_mode_choice<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ if mode.is_first() {
+ self[..].parse_first(input, state)
+ } else {
+ self[..].parse_partial(input, state)
+ }
+ }
+ fn add_error_choice(
+ &mut self,
+ error: &mut Tracked<<Input as StreamOnce>::Error>
+ ) {
+ self[..].add_error_choice(error)
+ }
+ }
+ )+
+ };
+}
+
+#[rustfmt::skip]
+array_choice_parser!(
+ 0 1 2 3 4 5 6 7 8 9
+ 10 11 12 13 14 15 16 17 18 19
+ 20 21 22 23 24 25 26 27 28 29
+ 30 31 32
+);
+
+#[derive(Copy, Clone)]
+pub struct Choice<P>(P);
+
+impl<Input, P> Parser<Input> for Choice<P>
+where
+ Input: Stream,
+ P: ChoiceParser<Input>,
+{
+ type Output = P::Output;
+ type PartialState = P::PartialState;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ self.0.parse_mode_choice(mode, input, state)
+ }
+
+ fn add_error(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
+ let before = error.offset.0;
+ self.0.add_error_choice(error);
+ error.offset.0 = before.saturating_sub(1);
+ }
+}
+
+fn slice_parse_mode<Input, P, M>(
+ self_: &mut [P],
+ mode: M,
+ input: &mut Input,
+ state: &mut (usize, P::PartialState),
+) -> ParseResult<P::Output, <Input as StreamOnce>::Error>
+where
+ P: Parser<Input>,
+ Input: Stream,
+ M: ParseMode,
+{
+ let mut prev_err = None;
+ let mut last_parser_having_non_1_offset = 0;
+ let before = input.checkpoint();
+
+ let (ref mut index_state, ref mut child_state) = *state;
+ if !mode.is_first() && *index_state != 0 {
+ return self_[*index_state - 1]
+ .parse_partial(input, child_state)
+ .map(|x| {
+ *index_state = 0;
+ x
+ });
+ }
+
+ for i in 0..self_.len() {
+ ctry!(input.reset(before.clone()).committed());
+
+ match self_[i].parse_mode(mode, input, child_state) {
+ committed_err @ CommitErr(_) => {
+ *index_state = i + 1;
+ return committed_err;
+ }
+ PeekErr(err) => {
+ prev_err = match prev_err {
+ None => Some(err),
+ Some(mut prev_err) => {
+ if prev_err.offset != ErrorOffset(1) {
+ // First add the errors of all the preceding parsers which did not
+ // have a sequence of parsers returning `PeekOk` before failing
+ // with `PeekErr`.
+ let offset = prev_err.offset;
+ for p in &mut self_[last_parser_having_non_1_offset..(i - 1)] {
+ prev_err.offset = ErrorOffset(1);
+ p.add_error(&mut prev_err);
+ }
+ // Then add the errors if the current parser
+ prev_err.offset = offset;
+ self_[i - 1].add_error(&mut prev_err);
+ last_parser_having_non_1_offset = i;
+ }
+ Some(Tracked {
+ error: prev_err.error.merge(err.error),
+ offset: err.offset,
+ })
+ }
+ };
+ }
+ ok @ CommitOk(_) | ok @ PeekOk(_) => {
+ *index_state = 0;
+ return ok;
+ }
+ }
+ }
+ PeekErr(match prev_err {
+ None => Input::Error::from_error(
+ input.position(),
+ StreamError::message_static_message("parser choice is empty"),
+ )
+ .into(),
+ Some(mut prev_err) => {
+ if prev_err.offset != ErrorOffset(1) {
+ let offset = prev_err.offset;
+ let len = self_.len();
+ for p in &mut self_[last_parser_having_non_1_offset..(len - 1)] {
+ prev_err.offset = ErrorOffset(1);
+ p.add_error(&mut prev_err);
+ }
+ prev_err.offset = offset;
+ self_.last_mut().unwrap().add_error(&mut prev_err);
+ prev_err.offset = ErrorOffset(0);
+ }
+ prev_err
+ }
+ })
+}
+
+impl<Input, O, P> ChoiceParser<Input> for [P]
+where
+ Input: Stream,
+ P: Parser<Input, Output = O>,
+{
+ type Output = O;
+ type PartialState = (usize, P::PartialState);
+
+ #[inline]
+ fn parse_partial(
+ &mut self,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ slice_parse_mode(self, crate::parser::PartialMode::default(), input, state)
+ }
+
+ #[inline]
+ fn parse_first(
+ &mut self,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ slice_parse_mode(self, crate::parser::FirstMode, input, state)
+ }
+
+ #[inline]
+ fn parse_mode_choice<M>(
+ &mut self,
+ _mode: M,
+ _input: &mut Input,
+ _state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ unreachable!()
+ }
+
+ fn add_error_choice(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
+ if error.offset != ErrorOffset(0) {
+ for p in self {
+ error.offset = ErrorOffset(1);
+ p.add_error(error);
+ }
+ }
+ }
+}
+
+/// Takes a tuple, a slice or an array of parsers and tries to apply them each in order.
+/// Fails if all the parsers fails or if an applied parser consumes input before failing.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::{digit, string};
+/// # fn main() {
+/// // `choice` is overloaded on tuples so that different types of parsers can be used
+/// // (each parser must still have the same input and output types)
+/// let mut parser = choice((
+/// string("Apple").map(|s| s.to_string()),
+/// many1(digit()),
+/// string("Orange").map(|s| s.to_string()),
+/// ));
+/// assert_eq!(parser.parse("1234"), Ok(("1234".to_string(), "")));
+/// assert_eq!(parser.parse("Orangexx"), Ok(("Orange".to_string(), "xx")));
+/// assert!(parser.parse("Appl").is_err());
+/// assert!(parser.parse("Pear").is_err());
+///
+/// // If arrays or slices are used then all parsers must have the same type
+/// // (`string` in this case)
+/// let mut parser2 = choice([string("one"), string("two"), string("three")]);
+/// // Fails as the parser for "two" consumes the first 't' before failing
+/// assert!(parser2.parse("three").is_err());
+///
+/// // Use 'attempt' to make failing parsers always act as if they have not committed any input
+/// let mut parser3 = choice([attempt(string("one")), attempt(string("two")), attempt(string("three"))]);
+/// assert_eq!(parser3.parse("three"), Ok(("three", "")));
+/// # }
+/// ```
+pub fn choice<Input, P>(ps: P) -> Choice<P>
+where
+ Input: Stream,
+ P: ChoiceParser<Input>,
+{
+ Choice(ps)
+}
+
+#[derive(Copy, Clone)]
+pub struct Or<P1, P2>(Choice<(P1, P2)>);
+impl<Input, O, P1, P2> Parser<Input> for Or<P1, P2>
+where
+ Input: Stream,
+ P1: Parser<Input, Output = O>,
+ P2: Parser<Input, Output = O>,
+{
+ type Output = O;
+ type PartialState = <Choice<(P1, P2)> as Parser<Input>>::PartialState;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ self.0.parse_mode(mode, input, state)
+ }
+
+ #[inline]
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ if errors.offset != ErrorOffset(0) {
+ self.0.add_error(errors);
+ }
+ }
+}
+
+/// Equivalent to [`p1.or(p2)`].
+///
+/// If you are looking to chain 3 or more parsers using `or` you may consider using the
+/// [`choice!`] macro instead, which can be clearer and may result in a faster parser.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::choice::or;
+/// # use combine::parser::char::{digit, string};
+/// # fn main() {
+/// let mut parser = or(
+/// string("let"),
+/// or(digit().map(|_| "digit"), string("led")),
+/// );
+/// assert_eq!(parser.parse("let"), Ok(("let", "")));
+/// assert_eq!(parser.parse("1"), Ok(("digit", "")));
+/// assert!(parser.parse("led").is_err());
+///
+/// let mut parser2 = or(string("two"), string("three"));
+/// // Fails as the parser for "two" consumes the first 't' before failing
+/// assert!(parser2.parse("three").is_err());
+///
+/// // Use 'attempt' to make failing parsers always act as if they have not committed any input
+/// let mut parser3 = or(attempt(string("two")), attempt(string("three")));
+/// assert_eq!(parser3.parse("three"), Ok(("three", "")));
+/// # }
+/// ```
+///
+/// [`choice!`]: ../../macro.choice.html
+/// [`p1.or(p2)`]: ../trait.Parser.html#method.or
+pub fn or<Input, P1, P2>(p1: P1, p2: P2) -> Or<P1, P2>
+where
+ Input: Stream,
+ P1: Parser<Input>,
+ P2: Parser<Input, Output = P1::Output>,
+{
+ Or(choice((p1, p2)))
+}
+
+#[derive(Copy, Clone)]
+pub struct Optional<P>(P);
+impl<Input, P> Parser<Input> for Optional<P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+{
+ type Output = Option<P::Output>;
+ type PartialState = P::PartialState;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ let before = input.checkpoint();
+ match self.0.parse_mode(mode, input, state) {
+ PeekOk(x) => PeekOk(Some(x)),
+ CommitOk(x) => CommitOk(Some(x)),
+ CommitErr(err) => CommitErr(err),
+ PeekErr(_) => {
+ ctry!(input.reset(before).committed());
+ PeekOk(None)
+ }
+ }
+ }
+
+ forward_parser!(Input, add_error parser_count, 0);
+}
+
+/// Parses `parser` and outputs `Some(value)` if it succeeds, `None` if it fails without
+/// consuming any input. Fails if `parser` fails after having committed some input.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::string;
+/// # fn main() {
+/// let mut parser = optional(string("hello"));
+/// assert_eq!(parser.parse("hello"), Ok((Some("hello"), "")));
+/// assert_eq!(parser.parse("world"), Ok((None, "world")));
+/// assert!(parser.parse("heya").is_err());
+/// # }
+/// ```
+pub fn optional<Input, P>(parser: P) -> Optional<P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+{
+ Optional(parser)
+}
+
+#[macro_export]
+#[doc(hidden)]
+macro_rules! parse_mode_dispatch {
+ () => {
+ fn parse_partial(
+ &mut self,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ self.parse_mode_dispatch($crate::parser::PartialMode::default(), input, state)
+ }
+
+ fn parse_first(
+ &mut self,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ self.parse_mode_dispatch($crate::parser::FirstMode, input, state)
+ }
+ };
+}
+
+#[macro_export]
+#[doc(hidden)]
+macro_rules! dispatch_parser_impl {
+ ($parser_name: ident [$first_ident: ident $($id: ident)*] [$($collected_idents: ident)*] $expr: expr, $($rest: expr,)*) => {
+ $crate::dispatch_parser_impl!{ $parser_name [ $($id)* ] [$($collected_idents)* $first_ident] $($rest,)*}
+ };
+ ($parser_name: ident [$($id: ident)*] [$($collected_idents: ident)*]) => {
+ $crate::dispatch_parser_impl!{ $parser_name; $($collected_idents)* }
+ };
+
+ ($parser_name: ident; $($id: ident)*) => {
+ pub enum $parser_name<$($id),*> {
+ $(
+ $id($id),
+ )*
+ }
+
+ #[allow(non_snake_case)]
+ impl<Input, Output, $($id),*> $crate::Parser<Input> for $parser_name<$($id),*>
+ where
+ $( $id: $crate::Parser<Input, Output = Output>, )*
+ Input: $crate::Stream,
+ {
+ type Output = Output;
+ type PartialState = Option<$parser_name<$($id::PartialState),*>>;
+
+ $crate::parse_mode!(Input);
+ fn parse_mode<Mode>(
+ &mut self,
+ mode: Mode,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> $crate::error::ParseResult<Self::Output, <Input as $crate::StreamOnce>::Error>
+ where
+ Mode: $crate::parser::ParseMode,
+ {
+ match self {
+ $(
+ $parser_name::$id($id) => {
+ let state = match state {
+ Some($parser_name::$id(s)) => s,
+ _ => {
+ *state = Some($parser_name::$id(Default::default()));
+ match state {
+ Some($parser_name::$id(s)) => s,
+ _ => unreachable!(),
+ }
+ }
+ };
+ $id.parse_mode(mode, input, state)
+ }
+ )*
+ }
+ }
+
+ fn add_error(&mut self, error: &mut $crate::error::Tracked<<Input as $crate::StreamOnce>::Error>) {
+ match self {
+ $(
+ $parser_name::$id($id) => $id.add_error(error),
+ )*
+ }
+ }
+ }
+ }
+}
+
+#[macro_export]
+#[doc(hidden)]
+macro_rules! dispatch_inner {
+ ($expr_ident: ident [$first_ident: ident $($id: ident)*] [$($collected: tt)*] $($pat: pat)|+ $(if $pred:expr)? => $expr: expr, $($rest_alt: tt)*) => {
+ $crate::dispatch_inner!{ $expr_ident [ $($id)* ] [$($collected)* $first_ident $($pat)|+ $(if $pred)? => $expr,] $($rest_alt)*}
+ };
+ ($expr_ident: ident [$($id: ident)*] [$($collected: tt)*]) => {
+ $crate::dispatch_inner!{ $expr_ident $($collected)* }
+ };
+ ($expr_ident: ident [$($ident_tt: tt)*]) => {
+ unreachable!()
+ };
+ ($expr_ident: ident $( $ident: ident $($pat: pat)|+ $(if $pred:expr)? => $expr: expr,)+ ) => {
+ match $expr_ident {
+ $(
+ $($pat)|+ $(if $pred)? => Dispatch::$ident(check_parser($expr)),
+ )+
+ }
+ }
+}
+
+/// `dispatch!` allows a parser to be constructed depending on earlier input, without forcing each
+/// branch to have the same type of parser
+///
+/// ```
+/// use combine::{dispatch, any, token, satisfy, EasyParser, Parser};
+///
+/// let mut parser = any().then(|e| {
+/// dispatch!(e;
+/// 'a' => token('a'),
+/// 'b' => satisfy(|b| b == 'b'),
+/// t if t == 'c' => any(),
+/// _ => token('d')
+/// )
+/// });
+/// assert_eq!(parser.easy_parse("aa"), Ok(('a', "")));
+/// assert_eq!(parser.easy_parse("cc"), Ok(('c', "")));
+/// assert_eq!(parser.easy_parse("cd"), Ok(('d', "")));
+/// assert!(parser.easy_parse("ab").is_err());
+/// ```
+#[macro_export]
+macro_rules! dispatch {
+ ($match_expr: expr; $( $($pat: pat)|+ $(if $pred:expr)? => $expr: expr ),+ $(,)? ) => {
+ {
+ $crate::dispatch_parser_impl!{ Dispatch [A B C D E F G H I J K L M N O P Q R S T U V X Y Z] [] $($expr,)+ }
+
+ fn check_parser<Input, P>(p: P) -> P where P: $crate::Parser<Input>, Input: $crate::Stream { p }
+
+ let e = $match_expr;
+ let parser = $crate::dispatch_inner!(e [A B C D E F G H I J K L M N O P Q R S T U V X Y Z] []
+ $(
+ $($pat)|+ $(if $pred)? => $expr,
+ )*
+ );
+ parser
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+
+ use crate::parser::{token::any, EasyParser};
+
+ use super::*;
+
+ #[test]
+ fn choice_single_parser() {
+ assert!(choice((any(),),).easy_parse("a").is_ok());
+ }
+}
diff --git a/src/parser/combinator.rs b/src/parser/combinator.rs
new file mode 100644
index 0000000..d8f3f25
--- /dev/null
+++ b/src/parser/combinator.rs
@@ -0,0 +1,1556 @@
+//! Various combinators which do not fit anywhere else.
+
+use crate::{
+ error::{
+ Info, ParseError,
+ ParseResult::{self, *},
+ ResultExt, StreamError, Tracked,
+ },
+ lib::{fmt, marker::PhantomData, mem, str},
+ parser::ParseMode,
+ stream::{input_at_eof, span::Span, ResetStream, Stream, StreamErrorFor, StreamOnce},
+ Parser,
+};
+
+#[derive(Copy, Clone)]
+pub struct NotFollowedBy<P>(P);
+impl<Input, O, P> Parser<Input> for NotFollowedBy<P>
+where
+ Input: Stream,
+ P: Parser<Input, Output = O>,
+{
+ type Output = ();
+ type PartialState = P::PartialState;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ let checkpoint = input.checkpoint();
+ let result = self.0.parse_mode(mode, input, state);
+ ctry!(input.reset(checkpoint).committed());
+ match result {
+ CommitOk(_) | PeekOk(_) => PeekErr(Input::Error::empty(input.position()).into()),
+ CommitErr(_) | PeekErr(_) => PeekOk(()),
+ }
+ }
+
+ #[inline]
+ fn add_error(&mut self, _errors: &mut Tracked<<Input as StreamOnce>::Error>) {}
+
+ fn add_committed_expected_error(&mut self, _error: &mut Tracked<<Input as StreamOnce>::Error>) {
+ }
+
+ forward_parser!(Input, parser_count, 0);
+}
+
+/// Succeeds only if `parser` fails.
+/// Never consumes any input.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::{alpha_num, string};
+/// # fn main() {
+/// let result = string("let")
+/// .skip(not_followed_by(alpha_num()))
+/// .parse("letx")
+/// .map(|x| x.0);
+/// assert!(result.is_err());
+///
+/// # }
+/// ```
+pub fn not_followed_by<Input, P>(parser: P) -> NotFollowedBy<P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ P::Output: Into<Info<<Input as StreamOnce>::Token, <Input as StreamOnce>::Range, &'static str>>,
+{
+ NotFollowedBy(parser)
+}
+
+/*
+ * TODO :: Rename `Try` to `Attempt`
+ * Because this is public, it's name cannot be changed without also making a breaking change.
+ */
+#[derive(Copy, Clone)]
+pub struct Try<P>(P);
+impl<Input, O, P> Parser<Input> for Try<P>
+where
+ Input: Stream,
+ P: Parser<Input, Output = O>,
+{
+ type Output = O;
+ type PartialState = P::PartialState;
+
+ #[inline]
+ fn parse_stream(&mut self, input: &mut Input) -> ParseResult<O, <Input as StreamOnce>::Error> {
+ self.parse_lazy(input)
+ }
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_committed_mode<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ self.parse_mode(mode, input, state)
+ }
+
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ match self.0.parse_committed_mode(mode, input, state) {
+ v @ CommitOk(_) | v @ PeekOk(_) | v @ PeekErr(_) => v,
+ CommitErr(err) => {
+ if input.is_partial() && err.is_unexpected_end_of_input() {
+ CommitErr(err)
+ } else {
+ PeekErr(err.into())
+ }
+ }
+ }
+ }
+
+ forward_parser!(Input, add_error add_committed_expected_error parser_count, 0);
+}
+
+/// `attempt(p)` behaves as `p` except it always acts as `p` peeked instead of committed on its
+/// parse.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::string;
+/// # fn main() {
+/// let mut p = attempt(string("let"))
+/// .or(string("lex"));
+/// let result = p.parse("lex").map(|x| x.0);
+/// assert_eq!(result, Ok("lex"));
+/// let result = p.parse("aet").map(|x| x.0);
+/// assert!(result.is_err());
+/// # }
+/// ```
+pub fn attempt<Input, P>(p: P) -> Try<P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+{
+ Try(p)
+}
+
+#[derive(Copy, Clone)]
+pub struct LookAhead<P>(P);
+
+impl<Input, O, P> Parser<Input> for LookAhead<P>
+where
+ Input: Stream,
+ P: Parser<Input, Output = O>,
+{
+ type Output = O;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<O, <Input as StreamOnce>::Error> {
+ let before = input.checkpoint();
+ let result = self.0.parse_lazy(input);
+ ctry!(input.reset(before).committed());
+ let (o, _input) = ctry!(result);
+ PeekOk(o)
+ }
+
+ forward_parser!(Input, add_error add_committed_expected_error parser_count, 0);
+}
+
+/// `look_ahead(p)` acts as `p` but doesn't consume input on success.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::string;
+/// # fn main() {
+/// let mut p = look_ahead(string("test"));
+///
+/// let result = p.parse("test str");
+/// assert_eq!(result, Ok(("test", "test str")));
+///
+/// let result = p.parse("aet");
+/// assert!(result.is_err());
+/// # }
+/// ```
+pub fn look_ahead<Input, P>(p: P) -> LookAhead<P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+{
+ LookAhead(p)
+}
+
+#[derive(Copy, Clone)]
+pub struct Map<P, F>(P, F);
+impl<Input, A, B, P, F> Parser<Input> for Map<P, F>
+where
+ Input: Stream,
+ P: Parser<Input, Output = A>,
+ F: FnMut(A) -> B,
+{
+ type Output = B;
+ type PartialState = P::PartialState;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ match self.0.parse_mode(mode, input, state) {
+ CommitOk(x) => CommitOk((self.1)(x)),
+ PeekOk(x) => PeekOk((self.1)(x)),
+ CommitErr(err) => CommitErr(err),
+ PeekErr(err) => PeekErr(err),
+ }
+ }
+
+ forward_parser!(Input, add_error add_committed_expected_error parser_count, 0);
+}
+
+/// Equivalent to [`p.map(f)`].
+///
+/// [`p.map(f)`]: ../trait.Parser.html#method.map
+pub fn map<Input, P, F, B>(p: P, f: F) -> Map<P, F>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ F: FnMut(P::Output) -> B,
+{
+ Map(p, f)
+}
+
+#[derive(Copy, Clone)]
+pub struct MapInput<P, F>(P, F);
+impl<Input, A, B, P, F> Parser<Input> for MapInput<P, F>
+where
+ Input: Stream,
+ P: Parser<Input, Output = A>,
+ F: FnMut(A, &mut Input) -> B,
+{
+ type Output = B;
+ type PartialState = P::PartialState;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ match self.0.parse_mode(mode, input, state) {
+ CommitOk(x) => CommitOk((self.1)(x, input)),
+ PeekOk(x) => PeekOk((self.1)(x, input)),
+ CommitErr(err) => CommitErr(err),
+ PeekErr(err) => PeekErr(err),
+ }
+ }
+
+ forward_parser!(Input, add_error add_committed_expected_error parser_count, 0);
+}
+
+/// Equivalent to [`p.map_input(f)`].
+///
+/// [`p.map_input(f)`]: ../trait.Parser.html#method.map_input
+pub fn map_input<Input, P, F, B>(p: P, f: F) -> MapInput<P, F>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ F: FnMut(P::Output, &mut Input) -> B,
+{
+ MapInput(p, f)
+}
+
+#[derive(Copy, Clone)]
+pub struct FlatMap<P, F>(P, F);
+impl<Input, A, B, P, F> Parser<Input> for FlatMap<P, F>
+where
+ Input: Stream,
+ P: Parser<Input, Output = A>,
+ F: FnMut(A) -> Result<B, Input::Error>,
+{
+ type Output = B;
+ type PartialState = P::PartialState;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ match self.0.parse_mode(mode, input, state) {
+ PeekOk(o) => match (self.1)(o) {
+ Ok(x) => PeekOk(x),
+ Err(err) => PeekErr(err.into()),
+ },
+ CommitOk(o) => match (self.1)(o) {
+ Ok(x) => CommitOk(x),
+ Err(err) => CommitErr(err),
+ },
+ PeekErr(err) => PeekErr(err),
+ CommitErr(err) => CommitErr(err),
+ }
+ }
+
+ forward_parser!(Input, add_error add_committed_expected_error parser_count, 0);
+}
+
+/// Equivalent to [`p.flat_map(f)`].
+///
+/// [`p.flat_map(f)`]: ../trait.Parser.html#method.flat_map
+pub fn flat_map<Input, P, F, B>(p: P, f: F) -> FlatMap<P, F>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ F: FnMut(P::Output) -> Result<B, <Input as StreamOnce>::Error>,
+{
+ FlatMap(p, f)
+}
+
+#[derive(Copy, Clone)]
+pub struct AndThen<P, F>(P, F);
+impl<Input, P, F, O, E> Parser<Input> for AndThen<P, F>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ F: FnMut(P::Output) -> Result<O, E>,
+ E: Into<<Input::Error as ParseError<Input::Token, Input::Range, Input::Position>>::StreamError>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ type Output = O;
+ type PartialState = P::PartialState;
+
+ parse_mode!(Input);
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ let position = input.position();
+ let checkpoint = input.checkpoint();
+ match self.0.parse_mode(mode, input, state) {
+ PeekOk(o) => match (self.1)(o) {
+ Ok(o) => PeekOk(o),
+ Err(err) => {
+ let err = <Input as StreamOnce>::Error::from_error(position, err.into());
+
+ if input.is_partial() && input_at_eof(input) {
+ ctry!(input.reset(checkpoint).committed());
+ CommitErr(err)
+ } else {
+ PeekErr(err.into())
+ }
+ }
+ },
+ CommitOk(o) => match (self.1)(o) {
+ Ok(o) => CommitOk(o),
+ Err(err) => {
+ if input.is_partial() && input_at_eof(input) {
+ ctry!(input.reset(checkpoint).committed());
+ }
+ CommitErr(<Input as StreamOnce>::Error::from_error(
+ position,
+ err.into(),
+ ))
+ }
+ },
+ PeekErr(err) => PeekErr(err),
+ CommitErr(err) => CommitErr(err),
+ }
+ }
+
+ forward_parser!(Input, add_error add_committed_expected_error parser_count, 0);
+}
+
+/// Equivalent to [`p.and_then(f)`].
+///
+/// [`p.and_then(f)`]: ../trait.Parser.html#method.and_then
+pub fn and_then<Input, P, F, O, E>(p: P, f: F) -> AndThen<P, F>
+where
+ P: Parser<Input>,
+ F: FnMut(P::Output) -> Result<O, E>,
+ Input: Stream,
+ E: Into<<Input::Error as ParseError<Input::Token, Input::Range, Input::Position>>::StreamError>,
+{
+ AndThen(p, f)
+}
+
+#[derive(Copy, Clone)]
+pub struct Recognize<F, P>(P, PhantomData<fn() -> F>);
+
+impl<F, P> Recognize<F, P> {
+ #[inline]
+ fn recognize_result<Input>(
+ elements: &mut F,
+ before: <Input as ResetStream>::Checkpoint,
+ input: &mut Input,
+ result: ParseResult<P::Output, <Input as StreamOnce>::Error>,
+ ) -> ParseResult<F, <Input as StreamOnce>::Error>
+ where
+ P: Parser<Input>,
+ Input: Stream,
+ F: Default + Extend<Input::Token>,
+ {
+ match result {
+ PeekOk(_) => {
+ let last_position = input.position();
+ ctry!(input.reset(before).committed());
+
+ while input.position() != last_position {
+ match input.uncons() {
+ Ok(elem) => elements.extend(Some(elem)),
+ Err(err) => {
+ return PeekErr(
+ <Input as StreamOnce>::Error::from_error(input.position(), err)
+ .into(),
+ );
+ }
+ }
+ }
+ PeekOk(mem::take(elements))
+ }
+ CommitOk(_) => {
+ let last_position = input.position();
+ ctry!(input.reset(before).committed());
+
+ while input.position() != last_position {
+ match input.uncons() {
+ Ok(elem) => elements.extend(Some(elem)),
+ Err(err) => {
+ return CommitErr(<Input as StreamOnce>::Error::from_error(
+ input.position(),
+ err,
+ ));
+ }
+ }
+ }
+ CommitOk(mem::take(elements))
+ }
+ CommitErr(err) => {
+ let last_position = input.position();
+ ctry!(input.reset(before).committed());
+
+ while input.position() != last_position {
+ match input.uncons() {
+ Ok(elem) => elements.extend(Some(elem)),
+ Err(err) => {
+ return CommitErr(<Input as StreamOnce>::Error::from_error(
+ input.position(),
+ err,
+ ));
+ }
+ }
+ }
+ CommitErr(err)
+ }
+ PeekErr(err) => PeekErr(err),
+ }
+ }
+}
+
+impl<Input, P, F> Parser<Input> for Recognize<F, P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ F: Default + Extend<<Input as StreamOnce>::Token>,
+{
+ type Output = F;
+ type PartialState = (F, P::PartialState);
+
+ parse_mode!(Input);
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ let (ref mut elements, ref mut child_state) = *state;
+
+ let before = input.checkpoint();
+ let result = self.0.parse_mode(mode, input, child_state);
+ Self::recognize_result(elements, before, input, result)
+ }
+
+ #[inline]
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ self.0.add_error(errors)
+ }
+}
+
+/// Constructs a parser which returns the tokens parsed by `parser` accumulated in
+/// `F: Extend<Input::Token>` instead of `P::Output`.
+///
+/// ```
+/// use combine::Parser;
+/// use combine::parser::{repeat::skip_many1, token::token, combinator::recognize, char::digit};
+///
+/// let mut parser = recognize((skip_many1(digit()), token('.'), skip_many1(digit())));
+/// assert_eq!(parser.parse("123.45"), Ok(("123.45".to_string(), "")));
+/// assert_eq!(parser.parse("123.45"), Ok(("123.45".to_string(), "")));
+/// ```
+pub fn recognize<F, Input, P>(parser: P) -> Recognize<F, P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ F: Default + Extend<<Input as StreamOnce>::Token>,
+{
+ Recognize(parser, PhantomData)
+}
+
+pub enum Either<L, R> {
+ Left(L),
+ Right(R),
+}
+
+impl<Input, L, R> Parser<Input> for Either<L, R>
+where
+ Input: Stream,
+ L: Parser<Input>,
+ R: Parser<Input, Output = L::Output>,
+{
+ type Output = L::Output;
+ type PartialState = Option<Either<L::PartialState, R::PartialState>>;
+
+ #[inline]
+ fn parse_lazy(
+ &mut self,
+ input: &mut Input,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ match *self {
+ Either::Left(ref mut x) => x.parse_lazy(input),
+ Either::Right(ref mut x) => x.parse_lazy(input),
+ }
+ }
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ match *self {
+ Either::Left(ref mut x) => {
+ match *state {
+ None | Some(Either::Right(_)) => {
+ *state = Some(Either::Left(L::PartialState::default()))
+ }
+ Some(Either::Left(_)) => (),
+ }
+ x.parse_mode(
+ mode,
+ input,
+ match state {
+ Some(Either::Left(state)) => state,
+ _ => unreachable!(),
+ },
+ )
+ }
+ Either::Right(ref mut x) => {
+ match *state {
+ None | Some(Either::Left(_)) => {
+ *state = Some(Either::Right(R::PartialState::default()))
+ }
+ Some(Either::Right(_)) => (),
+ }
+ x.parse_mode(
+ mode,
+ input,
+ match state {
+ Some(Either::Right(state)) => state,
+ _ => unreachable!(),
+ },
+ )
+ }
+ }
+ }
+
+ #[inline]
+ fn add_error(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
+ match *self {
+ Either::Left(ref mut x) => x.add_error(error),
+ Either::Right(ref mut x) => x.add_error(error),
+ }
+ }
+}
+
+pub struct NoPartial<P>(P);
+
+impl<Input, P> Parser<Input> for NoPartial<P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+{
+ type Output = <P as Parser<Input>>::Output;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(
+ &mut self,
+ input: &mut Input,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ self.0.parse_lazy(input)
+ }
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ _mode: M,
+ input: &mut Input,
+ _state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ self.0.parse_lazy(input)
+ }
+
+ forward_parser!(Input, add_error add_committed_expected_error parser_count, 0);
+}
+
+pub fn no_partial<Input, P>(p: P) -> NoPartial<P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+{
+ NoPartial(p)
+}
+
+#[derive(Copy, Clone)]
+pub struct Ignore<P>(P);
+impl<Input, P> Parser<Input> for Ignore<P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+{
+ type Output = ();
+ type PartialState = P::PartialState;
+
+ #[inline]
+ fn parse_lazy(
+ &mut self,
+ input: &mut Input,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ self.0.parse_lazy(input).map(|_| ())
+ }
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ self.0.parse_mode(mode, input, state).map(|_| ())
+ }
+
+ forward_parser!(Input, add_error add_committed_expected_error parser_count, 0);
+}
+
+#[doc(hidden)]
+pub fn ignore<Input, P>(p: P) -> Ignore<P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+{
+ Ignore(p)
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+#[derive(Default)]
+pub struct AnyPartialState(Option<Box<dyn std::any::Any>>);
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+pub struct AnyPartialStateParser<P>(P);
+
+#[cfg(feature = "std")]
+impl<Input, P> Parser<Input> for AnyPartialStateParser<P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ P::PartialState: 'static,
+{
+ type Output = P::Output;
+ type PartialState = AnyPartialState;
+
+ #[inline]
+ fn parse_lazy(
+ &mut self,
+ input: &mut Input,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ self.0.parse_lazy(input)
+ }
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ let mut new_child_state;
+ let result = {
+ let child_state = if state.0.is_none() {
+ new_child_state = Some(Default::default());
+ new_child_state.as_mut().unwrap()
+ } else {
+ new_child_state = None;
+ state.0.as_mut().unwrap().downcast_mut().unwrap()
+ };
+
+ self.0.parse_mode(mode, input, child_state)
+ };
+
+ if let CommitErr(_) = result {
+ if state.0.is_none() {
+ // FIXME Make None unreachable for LLVM
+ state.0 = Some(Box::new(new_child_state.unwrap()));
+ }
+ }
+
+ result
+ }
+
+ forward_parser!(Input, add_error add_committed_expected_error parser_count, 0);
+}
+
+/// Returns a parser where `P::PartialState` is boxed. Useful as a way to avoid writing the type
+/// since it can get very large after combining a few parsers.
+///
+/// ```
+/// # #[macro_use]
+/// # extern crate combine;
+/// # use combine::parser::combinator::{AnyPartialState, any_partial_state};
+/// # use combine::parser::char::letter;
+/// # use combine::*;
+///
+/// # fn main() {
+///
+/// parser! {
+/// type PartialState = AnyPartialState;
+/// fn example[Input]()(Input) -> (char, char)
+/// where [ Input: Stream<Token = char> ]
+/// {
+/// any_partial_state((letter(), letter()))
+/// }
+/// }
+///
+/// assert_eq!(
+/// example().easy_parse("ab"),
+/// Ok((('a', 'b'), ""))
+/// );
+///
+/// # }
+/// ```
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+pub fn any_partial_state<Input, P>(p: P) -> AnyPartialStateParser<P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ P::PartialState: 'static,
+{
+ AnyPartialStateParser(p)
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+#[derive(Default)]
+pub struct AnySendPartialState(Option<Box<dyn std::any::Any + Send>>);
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+pub struct AnySendPartialStateParser<P>(P);
+
+#[cfg(feature = "std")]
+impl<Input, P> Parser<Input> for AnySendPartialStateParser<P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ P::PartialState: Send + 'static,
+{
+ type Output = P::Output;
+ type PartialState = AnySendPartialState;
+
+ #[inline]
+ fn parse_lazy(
+ &mut self,
+ input: &mut Input,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ self.0.parse_lazy(input)
+ }
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ let mut new_child_state;
+ let result = {
+ let child_state = if state.0.is_none() {
+ new_child_state = Some(Default::default());
+ new_child_state.as_mut().unwrap()
+ } else {
+ new_child_state = None;
+ state.0.as_mut().unwrap().downcast_mut().unwrap()
+ };
+
+ self.0.parse_mode(mode, input, child_state)
+ };
+
+ if let CommitErr(_) = result {
+ if state.0.is_none() {
+ // FIXME Make None unreachable for LLVM
+ state.0 = Some(Box::new(new_child_state.unwrap()));
+ }
+ }
+
+ result
+ }
+
+ forward_parser!(Input, add_error add_committed_expected_error parser_count, 0);
+}
+
+/// Returns a parser where `P::PartialState` is boxed. Useful as a way to avoid writing the type
+/// since it can get very large after combining a few parsers.
+///
+/// ```
+/// # #[macro_use]
+/// # extern crate combine;
+/// # use combine::parser::combinator::{AnySendPartialState, any_send_partial_state};
+/// # use combine::parser::char::letter;
+/// # use combine::*;
+///
+/// # fn main() {
+///
+/// parser! {
+/// type PartialState = AnySendPartialState;
+/// fn example[Input]()(Input) -> (char, char)
+/// where [ Input: Stream<Token = char> ]
+/// {
+/// any_send_partial_state((letter(), letter()))
+/// }
+/// }
+///
+/// assert_eq!(
+/// example().easy_parse("ab"),
+/// Ok((('a', 'b'), ""))
+/// );
+///
+/// # }
+/// ```
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+pub fn any_send_partial_state<Input, P>(p: P) -> AnySendPartialStateParser<P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ P::PartialState: Send + 'static,
+{
+ AnySendPartialStateParser(p)
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+#[derive(Default)]
+pub struct AnySendSyncPartialState(Option<Box<dyn std::any::Any + Send + Sync>>);
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+pub struct AnySendSyncPartialStateParser<P>(P);
+
+#[cfg(feature = "std")]
+impl<Input, P> Parser<Input> for AnySendSyncPartialStateParser<P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ P::PartialState: Send + Sync + 'static,
+{
+ type Output = P::Output;
+ type PartialState = AnySendSyncPartialState;
+
+ #[inline]
+ fn parse_lazy(
+ &mut self,
+ input: &mut Input,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ self.0.parse_lazy(input)
+ }
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ let mut new_child_state;
+ let result = {
+ let child_state = if state.0.is_none() {
+ new_child_state = Some(Default::default());
+ new_child_state.as_mut().unwrap()
+ } else {
+ new_child_state = None;
+ state.0.as_mut().unwrap().downcast_mut().unwrap()
+ };
+
+ self.0.parse_mode(mode, input, child_state)
+ };
+
+ if let CommitErr(_) = result {
+ if state.0.is_none() {
+ // FIXME Make None unreachable for LLVM
+ state.0 = Some(Box::new(new_child_state.unwrap()));
+ }
+ }
+
+ result
+ }
+
+ forward_parser!(Input, add_error add_committed_expected_error parser_count, 0);
+}
+
+/// Returns a parser where `P::PartialState` is boxed. Useful as a way to avoid writing the type
+/// since it can get very large after combining a few parsers.
+///
+/// ```
+/// # #[macro_use]
+/// # extern crate combine;
+/// # use combine::parser::combinator::{AnySendSyncPartialState, any_send_sync_partial_state};
+/// # use combine::parser::char::letter;
+/// # use combine::*;
+///
+/// # fn main() {
+///
+/// fn example<Input>() -> impl Parser<Input, Output = (char, char), PartialState = AnySendSyncPartialState>
+/// where
+/// Input: Stream<Token = char>,
+/// Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+/// {
+/// any_send_sync_partial_state((letter(), letter()))
+/// }
+///
+/// assert_eq!(
+/// example().easy_parse("ab"),
+/// Ok((('a', 'b'), ""))
+/// );
+///
+/// # }
+/// ```
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+pub fn any_send_sync_partial_state<Input, P>(p: P) -> AnySendSyncPartialStateParser<P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ P::PartialState: Send + Sync + 'static,
+{
+ AnySendSyncPartialStateParser(p)
+}
+
+#[derive(Copy, Clone)]
+pub struct Lazy<P>(P);
+impl<Input, O, P, R> Parser<Input> for Lazy<P>
+where
+ Input: Stream,
+ P: FnMut() -> R,
+ R: Parser<Input, Output = O>,
+{
+ type Output = O;
+ type PartialState = R::PartialState;
+
+ fn parse_stream(&mut self, input: &mut Input) -> ParseResult<O, <Input as StreamOnce>::Error> {
+ (self.0)().parse_stream(input)
+ }
+
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<O, <Input as StreamOnce>::Error> {
+ (self.0)().parse_lazy(input)
+ }
+
+ parse_mode!(Input);
+
+ fn parse_committed_mode<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ (self.0)().parse_mode(mode, input, state)
+ }
+
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ (self.0)().parse_mode_impl(mode, input, state)
+ }
+
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ (self.0)().add_error(errors);
+ }
+
+ fn add_committed_expected_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ (self.0)().add_committed_expected_error(errors);
+ }
+}
+
+/// Constructs the parser lazily on each `parse_*` call. Can be used to effectively reduce the
+/// size of deeply nested parsers as only the function producing the parser is stored.
+///
+/// NOTE: Expects that the parser returned is always the same one, if that is not the case the
+/// reported error may be wrong. If different parsers may be returned, use the [`factory`][] parser
+/// instead.
+///
+/// [`factory`]: fn.factory.html
+pub fn lazy<Input, P, R>(p: P) -> Lazy<P>
+where
+ Input: Stream,
+ P: FnMut() -> R,
+ R: Parser<Input>,
+{
+ Lazy(p)
+}
+
+#[derive(Copy, Clone)]
+pub struct Factory<P, R>(P, Option<R>);
+
+impl<P, R> Factory<P, R> {
+ fn parser<Input>(&mut self, input: &mut Input) -> &mut R
+ where
+ P: FnMut(&mut Input) -> R,
+ {
+ if let Some(ref mut r) = self.1 {
+ return r;
+ }
+ self.1 = Some((self.0)(input));
+ self.1.as_mut().unwrap()
+ }
+}
+
+impl<Input, O, P, R> Parser<Input> for Factory<P, R>
+where
+ Input: Stream,
+ P: FnMut(&mut Input) -> R,
+ R: Parser<Input, Output = O>,
+{
+ type Output = O;
+ type PartialState = R::PartialState;
+
+ parse_mode!(Input);
+
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ // Always ask for a new parser except if we are in a partial call being resumed as we want
+ // to resume the same parser then
+ if mode.is_first() {
+ self.1 = None;
+ }
+ self.parser(input).parse_mode_impl(mode, input, state)
+ }
+
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ if let Some(parser) = &mut self.1 {
+ parser.add_error(errors);
+ }
+ }
+
+ fn add_committed_expected_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ if let Some(parser) = &mut self.1 {
+ parser.add_committed_expected_error(errors);
+ }
+ }
+}
+
+/// Constructs the parser lazily on each `parse_*` call. This is similar to [`lazy`][] but it
+/// takes `Input` as an argument and allows different parsers to be returned on each call to
+/// `p` while still reporting the correct errors.
+///
+/// [`lazy`]: fn.lazy.html
+///
+/// ```
+/// # use combine::*;
+/// # use combine::parser::char::{digit, letter};
+/// # use combine::parser::combinator::{FnOpaque, opaque, factory};
+///
+/// let mut parsers: Vec<FnOpaque<_, _>> = vec![opaque(|f| f(&mut digit())), opaque(|f| f(&mut letter()))];
+/// let mut iter = parsers.into_iter().cycle();
+/// let mut parser = many(factory(move |_| iter.next().unwrap()));
+/// assert_eq!(parser.parse("1a2b3cd"), Ok(("1a2b3c".to_string(), "d")));
+/// ```
+pub fn factory<Input, P, R>(p: P) -> Factory<P, R>
+where
+ Input: Stream,
+ P: FnMut(&mut Input) -> R,
+ R: Parser<Input>,
+{
+ Factory(p, None)
+}
+
+mod internal {
+ pub trait Sealed {}
+}
+
+use self::internal::Sealed;
+
+pub trait StrLike: Sealed {
+ fn from_utf8(&self) -> Result<&str, ()>;
+}
+
+#[cfg(feature = "std")]
+impl Sealed for String {}
+#[cfg(feature = "std")]
+impl StrLike for String {
+ fn from_utf8(&self) -> Result<&str, ()> {
+ Ok(self)
+ }
+}
+
+impl<'a> Sealed for &'a str {}
+impl<'a> StrLike for &'a str {
+ fn from_utf8(&self) -> Result<&str, ()> {
+ Ok(*self)
+ }
+}
+
+impl Sealed for str {}
+impl StrLike for str {
+ fn from_utf8(&self) -> Result<&str, ()> {
+ Ok(self)
+ }
+}
+
+#[cfg(feature = "std")]
+impl Sealed for Vec<u8> {}
+#[cfg(feature = "std")]
+impl StrLike for Vec<u8> {
+ fn from_utf8(&self) -> Result<&str, ()> {
+ (**self).from_utf8()
+ }
+}
+
+impl<'a> Sealed for &'a [u8] {}
+impl<'a> StrLike for &'a [u8] {
+ fn from_utf8(&self) -> Result<&str, ()> {
+ (**self).from_utf8()
+ }
+}
+
+impl Sealed for [u8] {}
+impl StrLike for [u8] {
+ fn from_utf8(&self) -> Result<&str, ()> {
+ str::from_utf8(self).map_err(|_| ())
+ }
+}
+
+parser! {
+pub struct FromStr;
+type PartialState = P::PartialState;
+
+/// Takes a parser that outputs a string like value (`&str`, `String`, `&[u8]` or `Vec<u8>`) and parses it
+/// using `std::str::FromStr`. Errors if the output of `parser` is not UTF-8 or if
+/// `FromStr::from_str` returns an error.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::parser::range;
+/// # use combine::parser::repeat::many1;
+/// # use combine::parser::combinator::from_str;
+/// # use combine::parser::char;
+/// # use combine::parser::byte;
+/// # use combine::*;
+/// # fn main() {
+/// let mut parser = from_str(many1::<String, _, _>(char::digit()));
+/// let result = parser.parse("12345\r\n");
+/// assert_eq!(result, Ok((12345i32, "\r\n")));
+///
+/// // Range parsers work as well
+/// let mut parser = from_str(range::take_while1(|c: char| c.is_digit(10)));
+/// let result = parser.parse("12345\r\n");
+/// assert_eq!(result, Ok((12345i32, "\r\n")));
+///
+/// // As do parsers that work with bytes
+/// let digits = || range::take_while1(|b: u8| b >= b'0' && b <= b'9');
+/// let mut parser = from_str(range::recognize((
+/// digits(),
+/// byte::byte(b'.'),
+/// digits(),
+/// )));
+/// let result = parser.parse(&b"123.45\r\n"[..]);
+/// assert_eq!(result, Ok((123.45f64, &b"\r\n"[..])));
+/// # }
+/// ```
+pub fn from_str[Input, O, P](parser: P)(Input) -> O
+where [
+ P: Parser<Input>,
+ P::Output: StrLike,
+ O: str::FromStr,
+ O::Err: fmt::Display,
+]
+{
+ parser.and_then(|r| {
+ r.from_utf8()
+ .map_err(|_| StreamErrorFor::<Input>::expected_static_message("UTF-8"))
+ .and_then(|s| s.parse().map_err(StreamErrorFor::<Input>::message_format))
+ })
+}
+}
+
+#[derive(Copy, Clone)]
+pub struct Opaque<F, Input, O, S>(F, PhantomData<fn(&mut Input, &mut S) -> O>);
+impl<Input, F, O, S> Parser<Input> for Opaque<F, Input, O, S>
+where
+ Input: Stream,
+ S: Default,
+ F: FnMut(&mut dyn FnMut(&mut dyn Parser<Input, Output = O, PartialState = S>)),
+{
+ type Output = O;
+ type PartialState = S;
+
+ fn parse_stream(&mut self, input: &mut Input) -> ParseResult<O, <Input as StreamOnce>::Error> {
+ let mut x = None;
+ (self.0)(&mut |parser| x = Some(parser.parse_stream(input)));
+ x.expect("Parser")
+ }
+
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<O, <Input as StreamOnce>::Error> {
+ let mut x = None;
+ (self.0)(&mut |parser| x = Some(parser.parse_lazy(input)));
+ x.expect("Parser")
+ }
+
+ parse_mode!(Input);
+
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ let mut x = None;
+ (self.0)(&mut |parser| {
+ x = Some(if mode.is_first() {
+ parser.parse_first(input, state)
+ } else {
+ parser.parse_partial(input, state)
+ })
+ });
+ x.expect("Parser")
+ }
+
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ (self.0)(&mut |parser| parser.add_error(errors));
+ }
+
+ fn add_committed_expected_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ (self.0)(&mut |parser| parser.add_committed_expected_error(errors));
+ }
+}
+
+/// Alias over `Opaque` where the function can be a plain function pointer (does not need to
+/// capture any values)
+pub type FnOpaque<Input, O, S = ()> =
+ Opaque<fn(&mut dyn FnMut(&mut dyn Parser<Input, Output = O, PartialState = S>)), Input, O, S>;
+
+/// Creates a parser from a function which takes a function that are given the actual parser.
+/// Though convoluted this makes it possible to hide the concrete parser type without `Box` or
+/// losing the full information about the parser as is the case of [`parser`][].
+///
+/// Since this hides the type this can also be useful for writing mutually recursive `impl Parser`
+/// parsers to break the otherwise arbitrarily large type that rustc creates internally.
+///
+/// If you need a more general version (that does not need trait objects) try the [`parser!`][]
+/// macro.
+///
+/// ```
+/// # #[macro_use]
+/// # extern crate combine;
+/// # use combine::parser::combinator::{FnOpaque, no_partial};
+/// # use combine::parser::char::{char, digit};
+/// # use combine::*;
+///
+/// # fn main() {
+///
+/// #[derive(PartialEq, Debug)]
+/// enum Expr {
+/// Number(i64),
+/// Pair(Box<Expr>, Box<Expr>),
+/// }
+///
+/// fn expr<Input>() -> FnOpaque<Input, Expr>
+/// where
+/// Input: Stream<Token = char>,
+/// Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+/// {
+/// opaque!(
+/// // `no_partial` disables partial parsing and replaces the partial state with `()`,
+/// // letting us avoid naming that type
+/// no_partial(choice((
+/// from_str(many1::<String, _, _>(digit()))
+/// .map(Expr::Number),
+/// (char('('), expr(), char(','), expr(), char(')'))
+/// .map(|(_, l, _, r, _)| Expr::Pair(Box::new(l), Box::new(r)))
+/// ))),
+/// )
+/// }
+///
+/// assert_eq!(
+/// expr().easy_parse("123"),
+/// Ok((Expr::Number(123), ""))
+/// );
+///
+/// # }
+/// ```
+///
+/// [`parser`]: ../function/fn.parser.html
+/// [`parser!`]: ../../macro.parser.html
+pub fn opaque<Input, F, O, S>(f: F) -> Opaque<F, Input, O, S>
+where
+ Input: Stream,
+ S: Default,
+ F: FnMut(&mut dyn FnMut(&mut dyn Parser<Input, Output = O, PartialState = S>)),
+{
+ Opaque(f, PhantomData)
+}
+
+/// Convenience macro over [`opaque`][].
+///
+/// [`opaque`]: parser/combinator/fn.opaque.html
+#[macro_export]
+macro_rules! opaque {
+ ($e: expr) => {
+ $crate::opaque!($e,);
+ };
+ ($e: expr,) => {
+ $crate::parser::combinator::opaque(
+ move |f: &mut dyn FnMut(&mut $crate::Parser<_, Output = _, PartialState = _>)| {
+ f(&mut $e)
+ },
+ )
+ };
+}
+
+pub struct InputConverter<InputInner, P, C>
+where
+ InputInner: Stream,
+{
+ pub parser: P,
+ pub converter: C,
+ pub _marker: PhantomData<fn(InputInner)>,
+}
+impl<Input, InputInner, P, C> Parser<Input> for InputConverter<InputInner, P, C>
+where
+ Input: Stream,
+ InputInner: Stream,
+ P: Parser<InputInner>,
+ for<'c> C: Converter<'c, Input, InputInner = InputInner>,
+{
+ type Output = P::Output;
+ type PartialState = P::PartialState;
+
+ parse_mode!(Input);
+
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, Input::Error>
+ where
+ M: ParseMode,
+ {
+ let mut input_inner = match self.converter.convert(input) {
+ Ok(x) => x,
+ Err(err) => return PeekErr(err.into()),
+ };
+ self.parser
+ .parse_mode(mode, &mut input_inner, state)
+ .map_err(|err| self.converter.convert_error(input, err))
+ }
+}
+
+pub trait Converter<'a, Input>
+where
+ Input: Stream,
+{
+ type InputInner: Stream + 'a;
+ fn convert(&mut self, input: &'a mut Input) -> Result<Self::InputInner, Input::Error>;
+ fn convert_error(
+ &mut self,
+ input: &'a mut Input,
+ error: <Self::InputInner as StreamOnce>::Error,
+ ) -> Input::Error;
+}
+
+impl<'a, Input, InputInner> Converter<'a, Input>
+ for (
+ fn(&'a mut Input) -> Result<InputInner, Input::Error>,
+ fn(&'a mut Input, InputInner::Error) -> Input::Error,
+ )
+where
+ Input: Stream,
+ InputInner: Stream + 'a,
+{
+ type InputInner = InputInner;
+ fn convert(&mut self, input: &'a mut Input) -> Result<InputInner, Input::Error> {
+ (self.0)(input)
+ }
+ fn convert_error(&mut self, input: &'a mut Input, error: InputInner::Error) -> Input::Error {
+ (self.1)(input, error)
+ }
+}
+
+pub fn input_converter<Input, InputInner, P, C>(
+ parser: P,
+ converter: C,
+) -> InputConverter<InputInner, P, C>
+where
+ Input: Stream,
+ InputInner: Stream,
+ P: Parser<InputInner>,
+ for<'c> C: Converter<'c, Input, InputInner = InputInner>,
+{
+ InputConverter {
+ parser,
+ converter,
+ _marker: PhantomData,
+ }
+}
+
+#[derive(Clone)]
+pub struct Spanned<P>(P);
+impl<Input, P, Q> Parser<Input> for Spanned<P>
+where
+ P: Parser<Input>,
+ Input: Stream<Position = Span<Q>>,
+ Input::Error: ParseError<Input::Token, Input::Range, Span<Q>>,
+ Q: Ord + Clone,
+{
+ type Output = P::Output;
+ type PartialState = P::PartialState;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ let start = input.position().start;
+ self.0.parse_mode(mode, input, state).map_err(|mut err| {
+ let error_span = err.position();
+ // If an inner `spanned` combinator has already attached its span that will be more
+ // specific so only set a span if the current error has a position, not a span
+ if error_span.start == error_span.end {
+ let end = input.position().end;
+ err.set_position(Span { start, end });
+ }
+ err
+ })
+ }
+
+ forward_parser!(Input, add_error, add_committed_expected_error, 0);
+}
+
+/// Equivalent to [`p.spanned()`].
+///
+/// [`p.spanned()`]: ../trait.Parser.html#method.spanned
+pub fn spanned<Input, P>(p: P) -> Spanned<P>
+where
+ P: Parser<Input>,
+ Input: Stream,
+{
+ Spanned(p)
+}
diff --git a/src/parser/error.rs b/src/parser/error.rs
new file mode 100644
index 0000000..4ef6af1
--- /dev/null
+++ b/src/parser/error.rs
@@ -0,0 +1,245 @@
+//! Parsers which cause errors or modifies the returned error on parse failure.
+
+use crate::{
+ error::{
+ ErrorInfo, ParseError,
+ ParseResult::{self, *},
+ StreamError, Tracked,
+ },
+ lib::marker::PhantomData,
+ parser::ParseMode,
+ Parser, Stream, StreamOnce,
+};
+
+#[derive(Clone)]
+pub struct Unexpected<I, T, E>(E, PhantomData<fn(I) -> (I, T)>)
+where
+ I: Stream;
+impl<Input, T, E> Parser<Input> for Unexpected<Input, T, E>
+where
+ Input: Stream,
+ E: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
+{
+ type Output = T;
+ type PartialState = ();
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<T, <Input as StreamOnce>::Error> {
+ PeekErr(<Input as StreamOnce>::Error::empty(input.position()).into())
+ }
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ errors.error.add(StreamError::unexpected(&self.0));
+ }
+}
+/// Always fails with `message` as an unexpected error.
+/// Never consumes any input.
+///
+/// Has `()` the output type
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::error::StreamError;
+/// # fn main() {
+/// let result = unexpected("token")
+/// .easy_parse("a");
+/// assert!(result.is_err());
+/// assert!(
+/// result.err()
+/// .unwrap()
+/// .errors
+/// .iter()
+/// .any(|m| *m == StreamError::unexpected("token"))
+/// );
+/// # }
+/// ```
+pub fn unexpected<Input, S>(message: S) -> Unexpected<Input, (), S>
+where
+ Input: Stream,
+ S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
+{
+ unexpected_any(message)
+}
+
+/// Always fails with `message` as an unexpected error.
+/// Never consumes any input.
+///
+/// May have anything as the output type but must be used such that the output type can inferred.
+/// The `unexpected` parser can be used if the output type does not matter
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::error::unexpected_any;
+/// # use combine::error::StreamError;
+/// # fn main() {
+/// let result = token('b').or(unexpected_any("token"))
+/// .easy_parse("a");
+/// assert!(result.is_err());
+/// assert!(
+/// result.err()
+/// .unwrap()
+/// .errors
+/// .iter()
+/// .any(|m| *m == StreamError::unexpected("token"))
+/// );
+/// # }
+/// ```
+pub fn unexpected_any<Input, S, T>(message: S) -> Unexpected<Input, T, S>
+where
+ Input: Stream,
+ S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
+{
+ Unexpected(message, PhantomData)
+}
+
+#[derive(Clone)]
+pub struct Message<P, S>(P, S);
+impl<Input, P, S> Parser<Input> for Message<P, S>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
+{
+ type Output = P::Output;
+ type PartialState = P::PartialState;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ match self.0.parse_mode(mode, input, state) {
+ CommitOk(x) => CommitOk(x),
+ PeekOk(x) => PeekOk(x),
+
+ // The message should always be added even if some input was committed before failing
+ CommitErr(mut err) => {
+ err.add_message(&self.1);
+ CommitErr(err)
+ }
+
+ // The message will be added in `add_error`
+ PeekErr(err) => PeekErr(err),
+ }
+ }
+
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ self.0.add_error(errors);
+ errors.error.add_message(&self.1);
+ }
+
+ forward_parser!(Input, parser_count add_committed_expected_error, 0);
+}
+
+/// Equivalent to [`p1.message(msg)`].
+///
+/// [`p1.message(msg)`]: ../trait.Parser.html#method.message
+pub fn message<Input, P, S>(p: P, msg: S) -> Message<P, S>
+where
+ P: Parser<Input>,
+ Input: Stream,
+ S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
+{
+ Message(p, msg)
+}
+
+#[derive(Clone)]
+pub struct Expected<P, S>(P, S);
+impl<Input, P, S> Parser<Input> for Expected<P, S>
+where
+ P: Parser<Input>,
+ Input: Stream,
+ S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
+{
+ type Output = P::Output;
+ type PartialState = P::PartialState;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ self.0.parse_mode(mode, input, state)
+ }
+
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ ParseError::set_expected(errors, StreamError::expected(&self.1), |errors| {
+ self.0.add_error(errors);
+ })
+ }
+
+ forward_parser!(Input, parser_count add_committed_expected_error, 0);
+}
+
+/// Equivalent to [`p.expected(info)`].
+///
+/// [`p.expected(info)`]: ../trait.Parser.html#method.expected
+pub fn expected<Input, P, S>(p: P, info: S) -> Expected<P, S>
+where
+ P: Parser<Input>,
+ Input: Stream,
+ S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
+{
+ Expected(p, info)
+}
+
+#[derive(Clone)]
+pub struct Silent<P>(P);
+impl<Input, P> Parser<Input> for Silent<P>
+where
+ P: Parser<Input>,
+ Input: Stream,
+{
+ type Output = P::Output;
+ type PartialState = P::PartialState;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ self.0.parse_mode(mode, input, state).map_err(|mut err| {
+ err.clear_expected();
+ err
+ })
+ }
+
+ fn add_error(&mut self, _errors: &mut Tracked<<Input as StreamOnce>::Error>) {}
+
+ fn add_committed_expected_error(
+ &mut self,
+ _errors: &mut Tracked<<Input as StreamOnce>::Error>,
+ ) {
+ }
+
+ forward_parser!(Input, parser_count, 0);
+}
+
+/// Equivalent to [`p.silent()`].
+///
+/// [`p.silent()`]: ../trait.Parser.html#method.silent
+pub fn silent<Input, P>(p: P) -> Silent<P>
+where
+ P: Parser<Input>,
+ Input: Stream,
+{
+ Silent(p)
+}
diff --git a/src/parser/function.rs b/src/parser/function.rs
new file mode 100644
index 0000000..45258d4
--- /dev/null
+++ b/src/parser/function.rs
@@ -0,0 +1,179 @@
+//! Parsers constructor from regular functions
+
+use crate::{
+ error::{ParseResult, StdParseResult},
+ lib::marker::PhantomData,
+ stream::Stream,
+ Parser,
+};
+
+impl<'a, Input: Stream, O> Parser<Input>
+ for dyn FnMut(&mut Input) -> StdParseResult<O, Input> + 'a
+{
+ type Output = O;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<O, Input::Error> {
+ self(input).into()
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct FnParser<Input, F>(F, PhantomData<fn(Input) -> Input>);
+
+/// Wraps a function, turning it into a parser.
+///
+/// Mainly needed to turn closures into parsers as function types can be casted to function pointers
+/// to make them usable as a parser.
+///
+/// ```
+/// extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::digit;
+/// # use combine::error::{Commit, StreamError};
+/// # use combine::stream::easy;
+/// # fn main() {
+/// let mut even_digit = parser(|input| {
+/// // Help type inference out
+/// let _: &mut easy::Stream<&str> = input;
+/// let position = input.position();
+/// let (char_digit, committed) = digit().parse_stream(input).into_result()?;
+/// let d = (char_digit as i32) - ('0' as i32);
+/// if d % 2 == 0 {
+/// Ok((d, committed))
+/// }
+/// else {
+/// //Return an empty error since we only tested the first token of the stream
+/// let errors = easy::Errors::new(
+/// position,
+/// StreamError::expected("even number")
+/// );
+/// Err(Commit::Peek(errors.into()))
+/// }
+/// });
+/// let result = even_digit
+/// .easy_parse("8")
+/// .map(|x| x.0);
+/// assert_eq!(result, Ok(8));
+/// # }
+/// ```
+pub fn parser<Input, O, F>(f: F) -> FnParser<Input, F>
+where
+ Input: Stream,
+ F: FnMut(&mut Input) -> StdParseResult<O, Input>,
+{
+ FnParser(f, PhantomData)
+}
+
+impl<Input, O, F> Parser<Input> for FnParser<Input, F>
+where
+ Input: Stream,
+ F: FnMut(&mut Input) -> StdParseResult<O, Input>,
+{
+ type Output = O;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<O, Input::Error> {
+ (self.0)(input).into()
+ }
+}
+
+impl<Input, O> Parser<Input> for fn(&mut Input) -> StdParseResult<O, Input>
+where
+ Input: Stream,
+{
+ type Output = O;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<O, Input::Error> {
+ self(input).into()
+ }
+}
+
+#[derive(Copy)]
+pub struct EnvParser<E, Input, T>
+where
+ Input: Stream,
+{
+ env: E,
+ parser: fn(E, &mut Input) -> StdParseResult<T, Input>,
+}
+
+impl<E, Input, T> Clone for EnvParser<E, Input, T>
+where
+ Input: Stream,
+ E: Clone,
+{
+ fn clone(&self) -> Self {
+ EnvParser {
+ env: self.env.clone(),
+ parser: self.parser,
+ }
+ }
+}
+
+impl<Input, E, O> Parser<Input> for EnvParser<E, Input, O>
+where
+ E: Clone,
+ Input: Stream,
+{
+ type Output = O;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<O, Input::Error> {
+ (self.parser)(self.env.clone(), input).into()
+ }
+}
+
+/// Constructs a parser out of an environment and a function which needs the given environment to
+/// do the parsing. This is commonly useful to allow multiple parsers to share some environment
+/// while still allowing the parsers to be written in separate functions.
+///
+/// ```
+/// # extern crate combine;
+/// # use std::collections::HashMap;
+/// # use combine::*;
+/// # use combine::parser::function::env_parser;
+/// # use combine::parser::char::letter;
+/// # fn main() {
+/// struct Interner(HashMap<String, u32>);
+/// impl Interner {
+/// fn string<Input>(&self, input: &mut Input) -> StdParseResult<u32, Input>
+/// where Input: Stream<Token = char>,
+/// Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+/// {
+/// many(letter())
+/// .map(|s: String| self.0.get(&s).cloned().unwrap_or(0))
+/// .parse_stream(input)
+/// .into_result()
+/// }
+/// }
+///
+/// let mut map = HashMap::new();
+/// map.insert("hello".into(), 1);
+/// map.insert("test".into(), 2);
+///
+/// let env = Interner(map);
+/// let mut parser = env_parser(&env, Interner::string);
+///
+/// let result = parser.parse("hello");
+/// assert_eq!(result, Ok((1, "")));
+///
+/// let result = parser.parse("world");
+/// assert_eq!(result, Ok((0, "")));
+/// # }
+/// ```
+pub fn env_parser<E, Input, O>(
+ env: E,
+ parser: fn(E, &mut Input) -> StdParseResult<O, Input>,
+) -> EnvParser<E, Input, O>
+where
+ E: Clone,
+ Input: Stream,
+{
+ EnvParser { env, parser }
+}
diff --git a/src/parser/mod.rs b/src/parser/mod.rs
new file mode 100644
index 0000000..98a1e03
--- /dev/null
+++ b/src/parser/mod.rs
@@ -0,0 +1,1204 @@
+//! A collection of both concrete parsers as well as parser combinators.
+//!
+//! Implements the [`Parser`] trait which is the core of `combine` and contains the submodules
+//! implementing all combine parsers.
+
+use crate::{
+ error::{
+ ErrorInfo, ParseError,
+ ParseResult::{self, *},
+ ResultExt, StreamError, Token, Tracked,
+ },
+ parser::{
+ combinator::{
+ and_then, flat_map, map, map_input, spanned, AndThen, Either, FlatMap, Map, MapInput,
+ Spanned,
+ },
+ error::{expected, message, silent, Expected, Message, Silent},
+ repeat::Iter,
+ sequence::{then, then_partial, then_ref, Then, ThenPartial, ThenRef},
+ },
+ stream::{Stream, StreamErrorFor, StreamOnce},
+ ErrorOffset,
+};
+
+use self::{
+ choice::{or, Or},
+ sequence::{skip, with, Skip, With},
+};
+
+/// Internal API. May break without a semver bump
+#[macro_export]
+#[doc(hidden)]
+macro_rules! parse_mode {
+ ($input_type: ty) => {
+ #[inline]
+ fn parse_partial(
+ &mut self,
+ input: &mut $input_type,
+ state: &mut Self::PartialState,
+ ) -> $crate::error::ParseResult<Self::Output, <$input_type as $crate::StreamOnce>::Error> {
+ self.parse_mode($crate::parser::PartialMode::default(), input, state)
+ }
+
+ #[inline]
+ fn parse_first(
+ &mut self,
+ input: &mut $input_type,
+ state: &mut Self::PartialState,
+ ) -> $crate::error::ParseResult<Self::Output, <$input_type as $crate::StreamOnce>::Error> {
+ self.parse_mode($crate::parser::FirstMode, input, state)
+ }
+ };
+}
+
+pub mod byte;
+pub mod char;
+pub mod choice;
+pub mod combinator;
+pub mod error;
+pub mod function;
+pub mod range;
+#[cfg(feature = "regex")]
+#[cfg_attr(docsrs, doc(cfg(feature = "regex")))]
+pub mod regex;
+pub mod repeat;
+pub mod sequence;
+pub mod token;
+
+/// By implementing the `Parser` trait a type says that it can be used to parse an input stream
+/// into the type `Output`.
+///
+/// All methods have a default implementation but there needs to be at least an implementation of
+/// [`parse_stream`], [`parse_stream`], or [`parse_lazy`]. If the last is implemented, an
+/// implementation of [`add_error`] may also be required. See the documentation for
+/// [`parse_lazy`] for details.
+///
+/// [`parse_stream`]: trait.Parser.html#method.parse_stream
+/// [`parse_stream`]: trait.Parser.html#method.parse_stream
+/// [`parse_lazy`]: trait.Parser.html#method.parse_lazy
+/// [`add_error`]: trait.Parser.html#method.add_error
+pub trait Parser<Input: Stream> {
+ /// The type which is returned if the parser is successful.
+ type Output;
+
+ /// Determines the state necessary to resume parsing after more input is supplied.
+ ///
+ /// If partial parsing is not supported this can be set to `()`.
+ type PartialState: Default;
+
+ /// Entry point of the parser. Takes some input and tries to parse it.
+ ///
+ /// Returns the parsed result and the remaining input if the parser succeeds, or a
+ /// error otherwise.
+ ///
+ /// This is the most straightforward entry point to a parser. Since it does not decorate the
+ /// input in any way you may find the error messages a hard to read. If that is the case you
+ /// may want to try wrapping your input with an [`easy::Stream`] or call [`easy_parse`]
+ /// instead.
+ ///
+ /// [`easy::Stream`]: super::easy::Stream
+ /// [`easy_parse`]: super::parser::EasyParser::easy_parse
+ fn parse(
+ &mut self,
+ mut input: Input,
+ ) -> Result<(Self::Output, Input), <Input as StreamOnce>::Error> {
+ match self.parse_stream(&mut input).into() {
+ Ok((v, _)) => Ok((v, input)),
+ Err(error) => Err(error.into_inner().error),
+ }
+ }
+
+ /// Entry point of the parser when using partial parsing.
+ /// Takes some input and tries to parse it.
+ ///
+ /// Returns the parsed result and the remaining input if the parser succeeds, or a
+ /// error otherwise.
+ fn parse_with_state(
+ &mut self,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> Result<Self::Output, <Input as StreamOnce>::Error> {
+ match self.parse_stream_partial(input, state).into() {
+ Ok((v, _)) => Ok(v),
+ Err(error) => Err(error.into_inner().error),
+ }
+ }
+
+ /// Parses using the stream `input` by calling [`Stream::uncons`] one or more times.
+ ///
+ /// Semantically equivalent to [`parse_stream`], except this method returns a flattened result
+ /// type, combining `Result` and [`Commit`] into a single [`ParseResult`].
+ ///
+ /// [`Stream::uncons`]: super::stream::StreamOnce::uncons
+ /// [`parse_stream`]: Parser::parse_stream
+ /// [`Commit`]: super::error::Commit
+ /// [`ParseResult`]: super::error::ParseResult
+ #[inline]
+ fn parse_stream(
+ &mut self,
+ input: &mut Input,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ let before = input.checkpoint();
+ let mut state = Default::default();
+ let mut result = self.parse_first(input, &mut state);
+ if let ParseResult::PeekErr(ref mut error) = result {
+ ctry!(input.reset(before.clone()).committed());
+ if let Ok(t) = input.uncons() {
+ ctry!(input.reset(before).committed());
+ error.error.add_unexpected(Token(t));
+ } else {
+ error.error.add(StreamErrorFor::<Input>::end_of_input());
+ }
+ self.add_error(error);
+ }
+ result
+ }
+
+ /// Parses using the stream `input` by calling [`Stream::uncons`] one or more times.
+ ///
+ /// Specialized version of [`parse_stream`] which permits error value creation to be
+ /// skipped in the common case.
+ ///
+ /// When this parser returns `PeekErr`, this method is allowed to return an empty
+ /// [`Error`]. The error value that would have been returned can instead be obtained by
+ /// calling [`add_error`]. This allows a parent parser such as `choice` to skip the creation of
+ /// an unnecessary error value, if an alternative parser succeeds.
+ ///
+ /// Parsers should seek to implement this function instead of the above two if errors can be
+ /// encountered before consuming input. The default implementation always returns all errors,
+ /// with [`add_error`] being a no-op.
+ ///
+ /// [`Stream::uncons`]: super::stream::StreamOnce::uncons
+ /// [`parse_stream`]: Parser::parse_stream
+ /// [`Error`]: super::stream::StreamOnce::Error
+ /// [`add_error`]: trait.Parser.html#method.add_error
+ #[inline]
+ fn parse_lazy(
+ &mut self,
+ input: &mut Input,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ if input.is_partial() {
+ // If a partial parser were called from a non-partial parser (as it is here) we must
+ // reset the input to before the partial parser were called on errors that committed
+ // data as that parser's partial state was just temporary and it will not be able to
+ // resume itself
+ let before = input.checkpoint();
+ let result = self.parse_first(input, &mut Default::default());
+ if let CommitErr(_) = result {
+ ctry!(input.reset(before).committed());
+ }
+ result
+ } else {
+ self.parse_first(input, &mut Default::default())
+ }
+ }
+
+ /// Adds the first error that would normally be returned by this parser if it failed with an
+ /// `PeekErr` result.
+ ///
+ /// See [`parse_lazy`] for details.
+ ///
+ /// [`parse_lazy`]: trait.Parser.html#method.parse_lazy
+ fn add_error(&mut self, _error: &mut Tracked<<Input as StreamOnce>::Error>) {}
+
+ /// Like `parse_stream` but supports partial parsing.
+ #[inline]
+ fn parse_stream_partial(
+ &mut self,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ let before = input.checkpoint();
+ let mut result = self.parse_partial(input, state);
+ if let ParseResult::PeekErr(ref mut error) = result {
+ ctry!(input.reset(before.clone()).committed());
+ if let Ok(t) = input.uncons() {
+ ctry!(input.reset(before).committed());
+ error.error.add_unexpected(Token(t));
+ } else {
+ error.error.add(StreamErrorFor::<Input>::end_of_input());
+ }
+ self.add_error(error);
+ }
+ result
+ }
+
+ /// Parses using the stream `input` and allows itself to be resumed at a later point using
+ /// `parse_partial` by storing the necessary intermediate state in `state`.
+ ///
+ /// Unlike `parse_partial` function this is allowed to assume that there is no partial state to
+ /// resume.
+ ///
+ /// Internal API. May break without a semver bump
+ /// Always overridden by the `parse_mode!` macro
+ #[inline]
+ #[doc(hidden)]
+ fn parse_first(
+ &mut self,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ self.parse_partial(input, state)
+ }
+
+ /// Parses using the stream `input` and allows itself to be resumed at a later point using
+ /// `parse_partial` by storing the necessary intermediate state in `state`
+ ///
+ /// Internal API. May break without a semver bump
+ /// Always overridden by the `parse_mode!` macro
+ #[inline]
+ #[doc(hidden)]
+ fn parse_partial(
+ &mut self,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ let _ = state;
+ self.parse_lazy(input)
+ }
+
+ /// Internal API. May break without a semver bump
+ #[doc(hidden)]
+ #[inline]
+ fn parse_mode<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ Self: Sized,
+ {
+ mode.parse(self, input, state)
+ }
+
+ /// Internal API. May break without a semver bump
+ #[doc(hidden)]
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ Self: Sized,
+ {
+ if mode.is_first() {
+ self.parse_first(input, state)
+ } else {
+ self.parse_partial(input, state)
+ }
+ }
+
+ /// Internal API. May break without a semver bump
+ #[doc(hidden)]
+ #[inline]
+ fn parse_committed_mode<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ Self: Sized,
+ {
+ if mode.is_first() {
+ FirstMode.parse_committed(self, input, state)
+ } else {
+ PartialMode::default().parse_committed(self, input, state)
+ }
+ }
+
+ /// Returns how many parsers this parser contains
+ ///
+ /// Internal API: This should not be implemented explicitly outside of combine.
+ #[doc(hidden)]
+ fn parser_count(&self) -> ErrorOffset {
+ ErrorOffset(1)
+ }
+
+ /// Internal API: This should not be implemented explicitly outside of combine.
+ #[doc(hidden)]
+ fn add_committed_expected_error(&mut self, _error: &mut Tracked<<Input as StreamOnce>::Error>) {
+ }
+
+ /// Borrows a parser instead of consuming it.
+ ///
+ /// Used to apply parser combinators on `self` without losing ownership.
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::error::Commit;
+ /// # use combine::parser::char::{digit, letter};
+ /// fn test(input: &mut &'static str) -> StdParseResult<(char, char), &'static str> {
+ /// let mut p = digit();
+ /// let ((d, _), committed) = (p.by_ref(), letter()).parse_stream(input).into_result()?;
+ /// let (d2, committed) = committed.combine(|_| p.parse_stream(input).into_result())?;
+ /// Ok(((d, d2), committed))
+ /// }
+ ///
+ /// fn main() {
+ /// let mut input = "1a23";
+ /// assert_eq!(
+ /// test(&mut input).map(|(t, c)| (t, c.map(|_| input))),
+ /// Ok((('1', '2'), Commit::Commit("3")))
+ /// );
+ /// }
+ /// ```
+ fn by_ref(&mut self) -> &mut Self
+ where
+ Self: Sized,
+ {
+ self
+ }
+
+ /// Discards the value of the `self` parser and returns the value of `p`.
+ /// Fails if any of the parsers fails.
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::parser::char::digit;
+ /// # fn main() {
+ /// let result = digit()
+ /// .with(token('i'))
+ /// .parse("9i")
+ /// .map(|x| x.0);
+ /// assert_eq!(result, Ok('i'));
+ /// # }
+ /// ```
+ fn with<P2>(self, p: P2) -> With<Self, P2>
+ where
+ Self: Sized,
+ P2: Parser<Input>,
+ {
+ with(self, p)
+ }
+
+ /// Discards the value of the `p` parser and returns the value of `self`.
+ /// Fails if any of the parsers fails.
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::parser::char::digit;
+ /// # fn main() {
+ /// let result = digit()
+ /// .skip(token('i'))
+ /// .parse("9i")
+ /// .map(|x| x.0);
+ /// assert_eq!(result, Ok('9'));
+ /// # }
+ /// ```
+ fn skip<P2>(self, p: P2) -> Skip<Self, P2>
+ where
+ Self: Sized,
+ P2: Parser<Input>,
+ {
+ skip(self, p)
+ }
+
+ /// Parses with `self` followed by `p`.
+ /// Succeeds if both parsers succeed, otherwise fails.
+ /// Returns a tuple with both values on success.
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::parser::char::digit;
+ /// # fn main() {
+ /// let result = digit()
+ /// .and(token('i'))
+ /// .parse("9i")
+ /// .map(|x| x.0);
+ /// assert_eq!(result, Ok(('9', 'i')));
+ /// # }
+ /// ```
+ fn and<P2>(self, p: P2) -> (Self, P2)
+ where
+ Self: Sized,
+ P2: Parser<Input>,
+ {
+ (self, p)
+ }
+
+ /// Returns a parser which attempts to parse using `self`. If `self` fails without committing
+ /// it tries to consume the same input using `p`.
+ ///
+ /// If you are looking to chain 3 or more parsers using `or` you may consider using the
+ /// [`choice!`] macro instead, which can be clearer and may result in a faster parser.
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::parser::char::{digit, string};
+ /// # fn main() {
+ /// let mut parser = string("let")
+ /// .or(digit().map(|_| "digit"))
+ /// .or(string("led"));
+ /// assert_eq!(parser.parse("let"), Ok(("let", "")));
+ /// assert_eq!(parser.parse("1"), Ok(("digit", "")));
+ /// assert!(parser.parse("led").is_err());
+ ///
+ /// let mut parser2 = string("two").or(string("three"));
+ /// // Fails as the parser for "two" consumes the first 't' before failing
+ /// assert!(parser2.parse("three").is_err());
+ ///
+ /// // Use 'attempt' to make failing parsers always act as if they have not committed any input
+ /// let mut parser3 = attempt(string("two")).or(attempt(string("three")));
+ /// assert_eq!(parser3.parse("three"), Ok(("three", "")));
+ /// # }
+ /// ```
+ ///
+ /// [`choice!`]: super::choice!
+ fn or<P2>(self, p: P2) -> Or<Self, P2>
+ where
+ Self: Sized,
+ P2: Parser<Input, Output = Self::Output>,
+ {
+ or(self, p)
+ }
+
+ /// Parses using `self` and then passes the value to `f` which returns a parser used to parse
+ /// the rest of the input.
+ ///
+ /// Since the parser returned from `f` must have a single type it can be useful to use the
+ /// [`left`](Parser::left) and [`right`](Parser::right) methods to merge parsers of differing types into one.
+ ///
+ /// If you are using partial parsing you may want to use [`then_partial`](Parser::then_partial) instead.
+ ///
+ /// ```
+ /// # #![cfg(feature = "std")]
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::parser::char::digit;
+ /// # use combine::error::Commit;
+ /// # use combine::stream::easy;
+ /// # fn main() {
+ /// let result = digit()
+ /// .then(|d| {
+ /// if d == '9' {
+ /// value(9).left()
+ /// }
+ /// else {
+ /// unexpected_any(d).message("Not a nine").right()
+ /// }
+ /// })
+ /// .easy_parse("9");
+ /// assert_eq!(result, Ok((9, "")));
+ /// # }
+ /// ```
+ fn then<N, F>(self, f: F) -> Then<Self, F>
+ where
+ Self: Sized,
+ F: FnMut(Self::Output) -> N,
+ N: Parser<Input>,
+ {
+ then(self, f)
+ }
+
+ /// Variant of [`then`](Parser::then) which parses using `self` and then passes the value to `f` as a `&mut` reference.
+ ///
+ /// Useful when doing partial parsing since it does not need to store the parser returned by
+ /// `f` in the partial state. Instead it will call `f` each to request a new parser each time
+ /// parsing resumes and that parser is needed.
+ ///
+ /// Since the parser returned from `f` must have a single type it can be useful to use the
+ /// [`left`](Parser::left) and [`right`](Parser::right) methods to merge parsers of differing types into one.
+ ///
+ /// ```
+ /// # #![cfg(feature = "std")]
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::parser::char::digit;
+ /// # use combine::error::Commit;
+ /// # use combine::stream::easy;
+ /// # fn main() {
+ /// let result = digit()
+ /// .then_partial(|d| {
+ /// if *d == '9' {
+ /// value(9).left()
+ /// }
+ /// else {
+ /// unexpected_any(*d).message("Not a nine").right()
+ /// }
+ /// })
+ /// .easy_parse("9");
+ /// assert_eq!(result, Ok((9, "")));
+ /// # }
+ /// ```
+ fn then_partial<N, F>(self, f: F) -> ThenPartial<Self, F>
+ where
+ Self: Sized,
+ F: FnMut(&mut Self::Output) -> N,
+ N: Parser<Input>,
+ {
+ then_partial(self, f)
+ }
+
+ /// Parses using `self` and then passes a reference to the value to `f` which returns a parser
+ /// used to parse the rest of the input. The value is then combined with the output of `f`.
+ ///
+ /// Since the parser returned from `f` must have a single type it can be useful to use the
+ /// `left` and `right` methods to merge parsers of differing types into one.
+ ///
+ /// ```
+ /// # #![cfg(feature = "std")]
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::parser::char::digit;
+ /// # use combine::error::Commit;
+ /// # use combine::stream::easy;
+ /// # fn main() {
+ /// let result = digit()
+ /// .then_ref(|d| {
+ /// if *d == '9' {
+ /// digit().left()
+ /// }
+ /// else {
+ /// unexpected_any(*d).message("Not a nine").right()
+ /// }
+ /// })
+ /// .easy_parse("98");
+ /// assert_eq!(result, Ok((('9', '8'), "")));
+ /// # }
+ /// ```
+ fn then_ref<N, F>(self, f: F) -> ThenRef<Self, F>
+ where
+ Self: Sized,
+ F: FnMut(&Self::Output) -> N,
+ N: Parser<Input>,
+ {
+ then_ref(self, f)
+ }
+
+ /// Uses `f` to map over the parsed value.
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::parser::char::digit;
+ /// # fn main() {
+ /// let result = digit()
+ /// .map(|c| c == '9')
+ /// .parse("9")
+ /// .map(|x| x.0);
+ /// assert_eq!(result, Ok(true));
+ /// # }
+ /// ```
+ fn map<F, B>(self, f: F) -> Map<Self, F>
+ where
+ Self: Sized,
+ F: FnMut(Self::Output) -> B,
+ {
+ map(self, f)
+ }
+
+ fn map_input<F, B>(self, f: F) -> MapInput<Self, F>
+ where
+ Self: Sized,
+ F: FnMut(Self::Output, &mut Input) -> B,
+ {
+ map_input(self, f)
+ }
+
+ /// Uses `f` to map over the output of `self`. If `f` returns an error the parser fails.
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::parser::char::digit;
+ /// # use combine::parser::range::take;
+ /// # fn main() {
+ /// let result = take(4)
+ /// .flat_map(|bs| many(digit()).parse(bs).map(|t| t.0))
+ /// .parse("12abcd");
+ /// assert_eq!(result, Ok((String::from("12"), "cd")));
+ /// # }
+ /// ```
+ fn flat_map<F, B>(self, f: F) -> FlatMap<Self, F>
+ where
+ Self: Sized,
+ F: FnMut(Self::Output) -> Result<B, <Input as StreamOnce>::Error>,
+ {
+ flat_map(self, f)
+ }
+
+ /// Parses with `self` and if it fails, adds the message `msg` to the error.
+ ///
+ /// ```
+ /// # #![cfg(feature = "std")]
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::stream::easy;
+ /// # use combine::stream::position::{self, SourcePosition};
+ /// # fn main() {
+ /// let result = token('9')
+ /// .message("Not a nine")
+ /// .easy_parse(position::Stream::new("8"));
+ /// assert_eq!(result, Err(easy::Errors {
+ /// position: SourcePosition::default(),
+ /// errors: vec![
+ /// easy::Error::Unexpected('8'.into()),
+ /// easy::Error::Expected('9'.into()),
+ /// easy::Error::Message("Not a nine".into())
+ /// ]
+ /// }));
+ /// # }
+ /// ```
+ fn message<S>(self, msg: S) -> Message<Self, S>
+ where
+ Self: Sized,
+ S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
+ {
+ message(self, msg)
+ }
+
+ /// Parses with `self` and if it fails without consuming any input any expected errors are
+ /// replaced by `msg`. `msg` is then used in error messages as "Expected `msg`".
+ ///
+ /// ```
+ /// # #![cfg(feature = "std")]
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::error;
+ /// # use combine::stream::easy;
+ /// # use combine::stream::position::{self, SourcePosition};
+ /// # fn main() {
+ /// let result = token('9')
+ /// .expected("nine")
+ /// .easy_parse(position::Stream::new("8"));
+ /// assert_eq!(result, Err(easy::Errors {
+ /// position: SourcePosition::default(),
+ /// errors: vec![
+ /// easy::Error::Unexpected('8'.into()),
+ /// easy::Error::Expected("nine".into())
+ /// ]
+ /// }));
+ ///
+ /// let result = token('9')
+ /// .expected(error::Format(format_args!("That is not a nine!")))
+ /// .easy_parse(position::Stream::new("8"));
+ /// assert_eq!(result, Err(easy::Errors {
+ /// position: SourcePosition::default(),
+ /// errors: vec![
+ /// easy::Error::Unexpected('8'.into()),
+ /// easy::Error::Expected("That is not a nine!".to_string().into())
+ /// ]
+ /// }));
+ /// # }
+ /// ```
+ fn expected<S>(self, msg: S) -> Expected<Self, S>
+ where
+ Self: Sized,
+ S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
+ {
+ expected(self, msg)
+ }
+
+ /// Parses with `self`, if it fails without consuming any input any expected errors that would
+ /// otherwise be emitted by `self` are suppressed.
+ ///
+ /// ```
+ /// # #![cfg(feature = "std")]
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::stream::easy;
+ /// # use combine::stream::position::{self, SourcePosition};
+ /// # fn main() {
+ /// let result = token('9')
+ /// .expected("nine")
+ /// .silent()
+ /// .easy_parse(position::Stream::new("8"));
+ /// assert_eq!(result, Err(easy::Errors {
+ /// position: SourcePosition::default(),
+ /// errors: vec![
+ /// easy::Error::Unexpected('8'.into()),
+ /// ]
+ /// }));
+ /// # }
+ /// ```
+ fn silent(self) -> Silent<Self>
+ where
+ Self: Sized,
+ {
+ silent(self)
+ }
+
+ /// Parses with `self` and applies `f` on the result if `self` parses successfully.
+ /// `f` may optionally fail with an error which is automatically converted to a `ParseError`.
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::stream::position::{self, SourcePosition};
+ /// # use combine::parser::char::digit;
+ /// # fn main() {
+ /// let mut parser = many1(digit())
+ /// .and_then(|s: String| s.parse::<i32>());
+ /// let result = parser.easy_parse(position::Stream::new("1234")).map(|(x, state)| (x, state.input));
+ /// assert_eq!(result, Ok((1234, "")));
+ /// let result = parser.easy_parse(position::Stream::new("999999999999999999999999"));
+ /// assert!(result.is_err());
+ /// // Errors are report as if they occurred at the start of the parse
+ /// assert_eq!(result.unwrap_err().position, SourcePosition { line: 1, column: 1 });
+ /// # }
+ /// ```
+ fn and_then<F, O, E>(self, f: F) -> AndThen<Self, F>
+ where
+ Self: Parser<Input> + Sized,
+ F: FnMut(Self::Output) -> Result<O, E>,
+ E: Into<
+ <Input::Error as ParseError<Input::Token, Input::Range, Input::Position>>::StreamError,
+ >,
+ {
+ and_then(self, f)
+ }
+
+ /// Creates an iterator from a parser and a state. Can be used as an alternative to [`many`]
+ /// when collecting directly into a `Extend` type is not desirable.
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::parser::char::{char, digit};
+ /// # fn main() {
+ /// let mut buffer = String::new();
+ /// let number = parser(|input| {
+ /// buffer.clear();
+ /// let mut iter = digit().iter(input);
+ /// buffer.extend(&mut iter);
+ /// let i = buffer.parse::<i32>().unwrap();
+ /// iter.into_result(i)
+ /// });
+ /// let result = sep_by(number, char(','))
+ /// .parse("123,45,6");
+ /// assert_eq!(result, Ok((vec![123, 45, 6], "")));
+ /// # }
+ /// ```
+ ///
+ /// [`many`]: repeat::many
+ fn iter(self, input: &mut Input) -> Iter<'_, Input, Self, Self::PartialState, FirstMode>
+ where
+ Self: Parser<Input> + Sized,
+ {
+ Iter::new(self, FirstMode, input, Default::default())
+ }
+
+ /// Creates an iterator from a parser and a state. Can be used as an alternative to [`many`]
+ /// when collecting directly into a `Extend` type is not desirable.
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::parser::char::{char, digit};
+ /// # fn main() {
+ /// let mut buffer = String::new();
+ /// let number = parser(|input| {
+ /// buffer.clear();
+ /// let mut iter = digit().iter(input);
+ /// buffer.extend(&mut iter);
+ /// let i = buffer.parse::<i32>().unwrap();
+ /// iter.into_result(i)
+ /// });
+ /// let result = sep_by(number, char(','))
+ /// .parse("123,45,6");
+ /// assert_eq!(result, Ok((vec![123, 45, 6], "")));
+ /// # }
+ /// ```
+ ///
+ /// [`many`]: repeat::many
+ fn partial_iter<'a, 's, M>(
+ self,
+ mode: M,
+ input: &'a mut Input,
+ partial_state: &'s mut Self::PartialState,
+ ) -> Iter<'a, Input, Self, &'s mut Self::PartialState, M>
+ where
+ Self: Parser<Input> + Sized,
+ M: ParseMode,
+ {
+ Iter::new(self, mode, input, partial_state)
+ }
+
+ /// Turns the parser into a trait object by putting it in a `Box`. Can be used to easily
+ /// return parsers from functions without naming the type.
+ ///
+ /// ```
+ /// # use combine::*;
+ /// # fn main() {
+ /// fn test<'input, F>(
+ /// c: char,
+ /// f: F)
+ /// -> Box<dyn Parser<&'input str, Output = (char, char), PartialState = ()> + 'input>
+ /// where F: FnMut(char) -> bool + 'static
+ /// {
+ /// combine::parser::combinator::no_partial((token(c), satisfy(f))).boxed()
+ /// }
+ /// let result = test('a', |c| c >= 'a' && c <= 'f')
+ /// .parse("ac");
+ /// assert_eq!(result, Ok((('a', 'c'), "")));
+ /// # }
+ /// ```
+ #[cfg(feature = "std")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+ fn boxed<'a>(
+ self,
+ ) -> Box<dyn Parser<Input, Output = Self::Output, PartialState = Self::PartialState> + 'a>
+ where
+ Self: Sized + 'a,
+ {
+ Box::new(self)
+ }
+
+ /// Wraps the parser into the [`Either`](combinator::Either) enum which allows combinators such as [`then`](Parser::then) to return
+ /// multiple different parser types (merging them to one)
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::parser::char::{digit, letter};
+ /// # fn main() {
+ /// let mut parser = any().then(|c|
+ /// if c == '#' {
+ /// skip_many(satisfy(|c| c != '\n'))
+ /// .with(value("".to_string()))
+ /// .left()
+ /// } else {
+ /// many1(letter())
+ /// .map(move |mut s: String| { s.insert(0, c); s })
+ /// .right()
+ /// });
+ ///
+ /// let result = parser.parse("ac2");
+ /// assert_eq!(result, Ok(("ac".to_string(), "2")));
+ ///
+ /// let result = parser.parse("# ac2");
+ /// assert_eq!(result, Ok(("".to_string(), "")));
+ /// # }
+ /// ```
+ fn left<R>(self) -> Either<Self, R>
+ where
+ Self: Sized,
+ R: Parser<Input, Output = Self::Output>,
+ {
+ Either::Left(self)
+ }
+
+ /// Wraps the parser into the [`Either`](combinator::Either) enum which allows combinators such as [`then`](Parser::then) to return
+ /// multiple different parser types (merging them to one)
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::parser::char::{digit, letter};
+ /// # fn main() {
+ /// let mut parser = any().then(|c|
+ /// if c == '#' {
+ /// skip_many(satisfy(|c| c != '\n'))
+ /// .with(value("".to_string()))
+ /// .left()
+ /// } else {
+ /// many1(letter())
+ /// .map(move |mut s: String| { s.insert(0, c); s })
+ /// .right()
+ /// });
+ ///
+ /// let result = parser.parse("ac2");
+ /// assert_eq!(result, Ok(("ac".to_string(), "2")));
+ ///
+ /// let result = parser.parse("# ac2");
+ /// assert_eq!(result, Ok(("".to_string(), "")));
+ /// # }
+ /// ```
+ fn right<L>(self) -> Either<L, Self>
+ where
+ Self: Sized,
+ L: Parser<Input, Output = Self::Output>,
+ {
+ Either::Right(self)
+ }
+
+ /// Marks errors produced inside the `self` parser with the span from the start of the parse to
+ /// the end of it.
+ ///
+ /// [`p.spanned()`]: ../trait.Parser.html#method.spanned
+ ///
+ /// ```
+ /// use combine::{*, parser::{char::string, combinator::spanned}};
+ /// use combine::stream::{easy, span};
+ ///
+ /// let input = "hel";
+ /// let result = spanned(string("hello")).parse(
+ /// span::Stream::<_, easy::Errors<_, _, span::Span<_>>>::from(easy::Stream::from(input)),
+ /// );
+ /// assert!(result.is_err());
+ /// assert_eq!(
+ /// result.unwrap_err().position.map(|p| p.translate_position(input)),
+ /// span::Span { start: 0, end: 3 },
+ /// );
+ /// ```
+ fn spanned(self) -> Spanned<Self>
+ where
+ Self: Sized,
+ {
+ spanned(self)
+ }
+}
+
+/// Provides the `easy_parse` method which provides good error messages by default
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+pub trait EasyParser<Input: Stream>: Parser<crate::easy::Stream<Input>>
+where
+ Input::Token: PartialEq,
+ Input::Range: PartialEq,
+{
+ /// Entry point of the parser. Takes some input and tries to parse it, returning an easy to use
+ /// and format error if parsing did not succeed.
+ ///
+ /// Returns the parsed result and the remaining input if the parser succeeds, or a
+ /// This function wraps requires `Input == easy::Stream<Input>` which makes it return
+ /// return `easy::Errors` if an error occurs. Due to this wrapping it is recommended that the
+ /// parser `Self` is written with a generic input type.
+ ///
+ /// ```
+ /// # #[macro_use]
+ /// # extern crate combine;
+ ///
+ /// use combine::*;
+ /// use combine::parser::repeat::many1;
+ /// use combine::parser::char::letter;
+ ///
+ /// // Good!
+ /// parser!{
+ /// fn my_parser[Input]()(Input) -> String
+ /// where [Input: Stream<Token = char>]
+ /// {
+ /// many1::<String, _, _>(letter())
+ /// }
+ /// }
+ ///
+ /// // Won't compile with `easy_parse` since it is specialized on `&str`
+ /// parser!{
+ /// fn my_parser2['a]()(&'a str) -> String
+ /// where [&'a str: Stream<Token = char, Range = &'a str>]
+ /// {
+ /// many1(letter())
+ /// }
+ /// }
+ ///
+ /// fn main() {
+ /// assert_eq!(my_parser().parse("abc"), Ok(("abc".to_string(), "")));
+ /// // Would fail to compile if uncommented
+ /// // my_parser2().parse("abc")
+ /// }
+ /// ```
+ ///
+ /// [`ParseError`]: struct.ParseError.html
+ fn easy_parse(
+ &mut self,
+ input: Input,
+ ) -> Result<
+ (<Self as Parser<crate::easy::Stream<Input>>>::Output, Input),
+ crate::easy::ParseError<Input>,
+ >
+ where
+ Input: Stream,
+ crate::easy::Stream<Input>: StreamOnce<
+ Token = Input::Token,
+ Range = Input::Range,
+ Error = crate::easy::ParseError<crate::easy::Stream<Input>>,
+ Position = Input::Position,
+ >,
+ Input::Position: Default,
+ Self: Sized + Parser<crate::easy::Stream<Input>>,
+ {
+ let input = crate::easy::Stream(input);
+ self.parse(input).map(|(v, input)| (v, input.0))
+ }
+}
+
+#[cfg(feature = "std")]
+impl<Input, P> EasyParser<Input> for P
+where
+ P: ?Sized + Parser<crate::easy::Stream<Input>>,
+ Input: Stream,
+ Input::Token: PartialEq,
+ Input::Range: PartialEq,
+{
+}
+
+macro_rules! forward_deref {
+ (Input) => {
+ type Output = P::Output;
+ type PartialState = P::PartialState;
+
+ #[inline]
+ fn parse_first(
+ &mut self,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ (**self).parse_first(input, state)
+ }
+
+ #[inline]
+ fn parse_partial(
+ &mut self,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ (**self).parse_partial(input, state)
+ }
+
+ #[inline]
+ fn add_error(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
+ (**self).add_error(error)
+ }
+
+ #[inline]
+ fn add_committed_expected_error(
+ &mut self,
+ error: &mut Tracked<<Input as StreamOnce>::Error>,
+ ) {
+ (**self).add_committed_expected_error(error)
+ }
+
+ #[inline]
+ fn parser_count(&self) -> ErrorOffset {
+ (**self).parser_count()
+ }
+ };
+}
+
+impl<'a, P, Input> Parser<Input> for &'a mut P
+where
+ P: ?Sized + Parser<Input>,
+ Input: Stream,
+{
+ forward_deref!(Input);
+}
+
+#[cfg(feature = "std")]
+impl<P, Input> Parser<Input> for Box<P>
+where
+ P: ?Sized + Parser<Input>,
+ Input: Stream,
+{
+ forward_deref!(Input);
+}
+
+/// Internal API. May break without a semver bump
+#[doc(hidden)]
+/// Specifies whether the parser must check for partial state that must be resumed
+pub trait ParseMode: Copy {
+ /// If `true` then the parser has no previous state to resume otherwise the parser *might* have
+ /// state to resume which it must check.
+ fn is_first(self) -> bool;
+ /// Puts the mode into `first` parsing.
+ fn set_first(&mut self);
+
+ fn parse<P, Input>(
+ self,
+ parser: &mut P,
+ input: &mut Input,
+ state: &mut P::PartialState,
+ ) -> ParseResult<P::Output, Input::Error>
+ where
+ P: Parser<Input>,
+ Input: Stream;
+
+ #[inline]
+ fn parse_committed<P, Input>(
+ self,
+ parser: &mut P,
+ input: &mut Input,
+ state: &mut P::PartialState,
+ ) -> ParseResult<P::Output, <Input as StreamOnce>::Error>
+ where
+ P: Parser<Input>,
+ Input: Stream,
+ {
+ let before = input.checkpoint();
+ let mut result = parser.parse_mode_impl(self, input, state);
+ if let ParseResult::PeekErr(ref mut error) = result {
+ ctry!(input.reset(before.clone()).committed());
+ if let Ok(t) = input.uncons() {
+ ctry!(input.reset(before).committed());
+ error.error.add_unexpected(Token(t));
+ } else {
+ error.error.add(StreamErrorFor::<Input>::end_of_input());
+ }
+ parser.add_error(error);
+ }
+ result
+ }
+}
+
+/// Internal API. May break without a semver bump
+#[doc(hidden)]
+#[derive(Copy, Clone)]
+pub struct FirstMode;
+impl ParseMode for FirstMode {
+ #[inline]
+ fn is_first(self) -> bool {
+ true
+ }
+ #[inline]
+ fn set_first(&mut self) {}
+
+ fn parse<P, Input>(
+ self,
+ parser: &mut P,
+ input: &mut Input,
+ state: &mut P::PartialState,
+ ) -> ParseResult<P::Output, Input::Error>
+ where
+ P: Parser<Input>,
+ Input: Stream,
+ {
+ parser.parse_mode_impl(FirstMode, input, state)
+ }
+}
+
+/// Internal API. May break without a semver bump
+#[doc(hidden)]
+#[derive(Copy, Clone, Default)]
+pub struct PartialMode {
+ pub first: bool,
+}
+impl ParseMode for PartialMode {
+ #[inline]
+ fn is_first(self) -> bool {
+ self.first
+ }
+
+ #[inline]
+ fn set_first(&mut self) {
+ self.first = true;
+ }
+
+ fn parse<P, Input>(
+ self,
+ parser: &mut P,
+ input: &mut Input,
+ state: &mut P::PartialState,
+ ) -> ParseResult<P::Output, Input::Error>
+ where
+ P: Parser<Input>,
+ Input: Stream,
+ {
+ if self.is_first() {
+ parser.parse_mode_impl(FirstMode, input, state)
+ } else {
+ parser.parse_mode_impl(self, input, state)
+ }
+ }
+}
diff --git a/src/parser/range.rs b/src/parser/range.rs
new file mode 100644
index 0000000..7b53fe6
--- /dev/null
+++ b/src/parser/range.rs
@@ -0,0 +1,768 @@
+//! Module containing zero-copy parsers.
+//!
+//! These parsers require the [`RangeStream`][] bound instead of a plain [`Stream`][].
+//!
+//! [`RangeStream`]: ../../stream/trait.RangeStream.html
+//! [`Stream`]: ../../stream/trait.Stream.html
+
+use crate::{
+ error::{
+ self, ParseError,
+ ParseResult::{self, *},
+ ResultExt, StreamError, Tracked,
+ },
+ lib::{convert::TryFrom, marker::PhantomData},
+ parser::ParseMode,
+};
+
+#[cfg(feature = "std")]
+use crate::lib::error::Error as StdError;
+
+#[cfg(not(feature = "std"))]
+use crate::lib::fmt;
+
+use crate::stream::{
+ uncons_range, uncons_while, uncons_while1, wrap_stream_error, Range as StreamRange,
+ RangeStream, StreamErrorFor, StreamOnce,
+};
+
+use crate::Parser;
+
+pub struct Range<Input>(Input::Range)
+where
+ Input: RangeStream;
+
+impl<Input> Parser<Input> for Range<Input>
+where
+ Input: RangeStream,
+ Input::Range: PartialEq + crate::stream::Range,
+{
+ type Output = Input::Range;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(
+ &mut self,
+ input: &mut Input,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ use crate::stream::Range;
+
+ let position = input.position();
+ match input.uncons_range(self.0.len()) {
+ Ok(other) => {
+ if other == self.0 {
+ CommitOk(other)
+ } else {
+ PeekErr(Input::Error::empty(position).into())
+ }
+ }
+ Err(err) => wrap_stream_error(input, err),
+ }
+ }
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ // TODO Add unexpected message?
+ errors.error.add_expected(error::Range(self.0.clone()));
+ }
+}
+
+parser! {
+ #[derive(Clone)]
+ pub struct Recognize;
+ type PartialState = <RecognizeWithValue<P> as Parser<Input>>::PartialState;
+ /// Zero-copy parser which returns committed input range.
+ ///
+ /// [`combinator::recognize`][] is a non-`RangeStream` alternative.
+ ///
+ /// [`combinator::recognize`]: ../../parser/combinator/fn.recognize.html
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::parser::range::recognize;
+ /// # use combine::parser::char::letter;
+ /// # use combine::*;
+ /// # fn main() {
+ /// let mut parser = recognize(skip_many1(letter()));
+ /// assert_eq!(parser.parse("hello world"), Ok(("hello", " world")));
+ /// assert!(parser.parse("!").is_err());
+ /// # }
+ /// ```
+ #[inline]
+ pub fn recognize[Input, P](parser: P)(Input) -> <Input as StreamOnce>::Range
+ where [
+ P: Parser<Input>,
+ Input: RangeStream,
+ <Input as StreamOnce>::Range: crate::stream::Range,
+ ]
+ {
+ recognize_with_value(parser).map(|(range, _)| range)
+ }
+}
+
+#[inline]
+fn parse_partial_range<M, F, G, S, Input>(
+ mode: M,
+ input: &mut Input,
+ distance_state: &mut usize,
+ state: S,
+ first: F,
+ resume: G,
+) -> ParseResult<Input::Range, Input::Error>
+where
+ M: ParseMode,
+ F: FnOnce(&mut Input, S) -> ParseResult<Input::Range, <Input as StreamOnce>::Error>,
+ G: FnOnce(&mut Input, S) -> ParseResult<Input::Range, <Input as StreamOnce>::Error>,
+ Input: RangeStream,
+{
+ let before = input.checkpoint();
+
+ if !input.is_partial() {
+ first(input, state)
+ } else if mode.is_first() || *distance_state == 0 {
+ let result = first(input, state);
+ if let CommitErr(_) = result {
+ *distance_state = input.distance(&before);
+ ctry!(input.reset(before).committed());
+ }
+ result
+ } else {
+ if input.uncons_range(*distance_state).is_err() {
+ panic!("recognize errored when restoring the input stream to its expected state");
+ }
+
+ match resume(input, state) {
+ CommitOk(_) | PeekOk(_) => (),
+ PeekErr(err) => return PeekErr(err),
+ CommitErr(err) => {
+ *distance_state = input.distance(&before);
+ ctry!(input.reset(before).committed());
+ return CommitErr(err);
+ }
+ }
+
+ let distance = input.distance(&before);
+ ctry!(input.reset(before).committed());
+ take(distance).parse_lazy(input).map(|range| {
+ *distance_state = 0;
+ range
+ })
+ }
+}
+
+#[derive(Clone)]
+pub struct RecognizeWithValue<P>(P);
+
+impl<Input, P> Parser<Input> for RecognizeWithValue<P>
+where
+ P: Parser<Input>,
+ Input: RangeStream,
+ <Input as StreamOnce>::Range: crate::stream::Range,
+{
+ type Output = (<Input as StreamOnce>::Range, P::Output);
+ type PartialState = (usize, P::PartialState);
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ let (ref mut distance_state, ref mut child_state) = *state;
+
+ let before = input.checkpoint();
+ if !mode.is_first() && input.uncons_range(*distance_state).is_err() {
+ panic!("recognize errored when restoring the input stream to its expected state");
+ }
+
+ let value = match self.0.parse_mode(mode, input, child_state) {
+ CommitOk(x) | PeekOk(x) => x,
+ PeekErr(err) => return PeekErr(err),
+ CommitErr(err) => {
+ *distance_state = input.distance(&before);
+ ctry!(input.reset(before).committed());
+ return CommitErr(err);
+ }
+ };
+
+ let distance = input.distance(&before);
+ ctry!(input.reset(before).committed());
+ take(distance).parse_lazy(input).map(|range| {
+ *distance_state = 0;
+ (range, value)
+ })
+ }
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ self.0.add_error(errors)
+ }
+}
+
+/// Zero-copy parser which returns a pair: (committed input range, parsed value).
+///
+///
+/// [`combinator::recognize_with_value`] is a non-`RangeStream` alternative.
+///
+/// [`combinator::recognize_with_value`]: recognize_with_value
+/// ```
+/// # extern crate combine;
+/// # use combine::parser::range::recognize_with_value;
+/// # use combine::parser::char::{digit, char};
+/// # use combine::*;
+/// # fn main() {
+/// let mut parser = recognize_with_value((
+/// skip_many1(digit()),
+/// optional((attempt(char('.')), skip_many1(digit()))),
+/// ).map(|(_, opt)| opt.is_some()));
+///
+/// assert_eq!(parser.parse("1234!"), Ok((("1234", false), "!")));
+/// assert_eq!(parser.parse("1234.0001!"), Ok((("1234.0001", true), "!")));
+/// assert!(parser.parse("!").is_err());
+/// assert!(parser.parse("1234.").is_err());
+/// # }
+/// ```
+pub fn recognize_with_value<Input, P>(parser: P) -> RecognizeWithValue<P>
+where
+ P: Parser<Input>,
+ Input: RangeStream,
+ <Input as StreamOnce>::Range: crate::stream::Range,
+{
+ RecognizeWithValue(parser)
+}
+
+/// Zero-copy parser which reads a range of length `i.len()` and succeeds if `i` is equal to that
+/// range.
+///
+/// [`tokens`] is a non-`RangeStream` alternative.
+///
+/// [`tokens`]: super::token::tokens
+/// ```
+/// # extern crate combine;
+/// # use combine::parser::range::range;
+/// # use combine::*;
+/// # fn main() {
+/// let mut parser = range("hello");
+/// let result = parser.parse("hello world");
+/// assert_eq!(result, Ok(("hello", " world")));
+/// let result = parser.parse("hel world");
+/// assert!(result.is_err());
+/// # }
+/// ```
+pub fn range<Input>(i: Input::Range) -> Range<Input>
+where
+ Input: RangeStream,
+ Input::Range: PartialEq,
+{
+ Range(i)
+}
+
+pub struct Take<Input>(usize, PhantomData<fn(Input)>);
+impl<Input> Parser<Input> for Take<Input>
+where
+ Input: RangeStream,
+{
+ type Output = Input::Range;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(
+ &mut self,
+ input: &mut Input,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ uncons_range(input, self.0)
+ }
+}
+
+/// Zero-copy parser which reads a range of length `n`.
+///
+/// [`count_min_max`][] is a non-`RangeStream` alternative.
+///
+/// [`count_min_max`]: ../../parser/repeat/fn.count_min_max.html
+/// ```
+/// # extern crate combine;
+/// # use combine::parser::range::take;
+/// # use combine::*;
+/// # fn main() {
+/// let mut parser = take(1);
+/// let result = parser.parse("1");
+/// assert_eq!(result, Ok(("1", "")));
+/// let mut parser = take(4);
+/// let result = parser.parse("123abc");
+/// assert_eq!(result, Ok(("123a", "bc")));
+/// let result = parser.parse("abc");
+/// assert!(result.is_err());
+/// # }
+/// ```
+pub fn take<Input>(n: usize) -> Take<Input>
+where
+ Input: RangeStream,
+{
+ Take(n, PhantomData)
+}
+
+pub struct TakeWhile<Input, F>(F, PhantomData<fn(Input) -> Input>);
+impl<Input, F> Parser<Input> for TakeWhile<Input, F>
+where
+ Input: RangeStream,
+ Input::Range: crate::stream::Range,
+ F: FnMut(Input::Token) -> bool,
+{
+ type Output = Input::Range;
+ type PartialState = usize;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ parse_partial_range(
+ mode,
+ input,
+ state,
+ &mut self.0,
+ |input, predicate| uncons_while(input, predicate),
+ |input, predicate| uncons_while(input, predicate),
+ )
+ }
+}
+
+/// Zero-copy parser which reads a range of 0 or more tokens which satisfy `f`.
+///
+/// [`many`][] is a non-`RangeStream` alternative.
+///
+/// [`many`]: ../../parser/repeat/fn.many.html
+/// ```
+/// # extern crate combine;
+/// # use combine::parser::range::take_while;
+/// # use combine::*;
+/// # fn main() {
+/// let mut parser = take_while(|c: char| c.is_digit(10));
+/// let result = parser.parse("123abc");
+/// assert_eq!(result, Ok(("123", "abc")));
+/// let result = parser.parse("abc");
+/// assert_eq!(result, Ok(("", "abc")));
+/// # }
+/// ```
+pub fn take_while<Input, F>(f: F) -> TakeWhile<Input, F>
+where
+ Input: RangeStream,
+ Input::Range: crate::stream::Range,
+ F: FnMut(Input::Token) -> bool,
+{
+ TakeWhile(f, PhantomData)
+}
+
+pub struct TakeWhile1<Input, F>(F, PhantomData<fn(Input) -> Input>);
+impl<Input, F> Parser<Input> for TakeWhile1<Input, F>
+where
+ Input: RangeStream,
+ Input::Range: crate::stream::Range,
+ F: FnMut(Input::Token) -> bool,
+{
+ type Output = Input::Range;
+ type PartialState = usize;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ parse_partial_range(
+ mode,
+ input,
+ state,
+ &mut self.0,
+ |input, predicate| uncons_while1(input, predicate),
+ |input, predicate| uncons_while(input, predicate),
+ )
+ }
+}
+
+/// Zero-copy parser which reads a range of 1 or more tokens which satisfy `f`.
+///
+/// [`many1`][] is a non-`RangeStream` alternative.
+///
+/// [`many1`]: ../../parser/repeat/fn.many1.html
+/// ```
+/// # extern crate combine;
+/// # use combine::parser::range::take_while1;
+/// # use combine::*;
+/// # fn main() {
+/// let mut parser = take_while1(|c: char| c.is_digit(10));
+/// let result = parser.parse("123abc");
+/// assert_eq!(result, Ok(("123", "abc")));
+/// let result = parser.parse("abc");
+/// assert!(result.is_err());
+/// # }
+/// ```
+pub fn take_while1<Input, F>(f: F) -> TakeWhile1<Input, F>
+where
+ Input: RangeStream,
+ Input::Range: crate::stream::Range,
+ F: FnMut(Input::Token) -> bool,
+{
+ TakeWhile1(f, PhantomData)
+}
+
+pub struct TakeUntilRange<Input>(Input::Range)
+where
+ Input: RangeStream;
+impl<Input> Parser<Input> for TakeUntilRange<Input>
+where
+ Input: RangeStream,
+ Input::Range: PartialEq + crate::stream::Range,
+{
+ type Output = Input::Range;
+ type PartialState = usize;
+
+ #[inline]
+ fn parse_partial(
+ &mut self,
+ input: &mut Input,
+ to_consume: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ use crate::stream::Range;
+
+ let len = self.0.len();
+ let before = input.checkpoint();
+ let mut first_stream_error = None;
+
+ // Skip until the end of the last parse attempt
+ ctry!(uncons_range(input, *to_consume));
+
+ loop {
+ let look_ahead_input = input.checkpoint();
+
+ match input.uncons_range(len) {
+ Ok(xs) => {
+ if xs == self.0 {
+ let distance = input.distance(&before) - len;
+ ctry!(input.reset(before).committed());
+
+ if let Ok(committed) = input.uncons_range(distance) {
+ if distance == 0 {
+ return PeekOk(committed);
+ } else {
+ *to_consume = 0;
+ return CommitOk(committed);
+ }
+ }
+
+ // We are guaranteed able to uncons to_consume characters here
+ // because we've already done it on look_ahead_input.
+ unreachable!();
+ } else {
+ // Reset the stream back to where it was when we entered the top of the loop
+ ctry!(input.reset(look_ahead_input).committed());
+
+ // Advance the stream by one token
+ if input.uncons().is_err() {
+ unreachable!();
+ }
+ }
+ }
+ Err(first_error) => {
+ // If we are unable to find a successful parse even after advancing with `uncons`
+ // below we must reset the stream to its state before the first error.
+ // If we don't we may try and match the range `::` against `:<EOF>` which would
+ // fail as only one `:` is present at this parse attempt. But when we later resume
+ // with more input we must start parsing again at the first time we errored so we
+ // can see the entire `::`
+ if first_stream_error.is_none() {
+ first_stream_error = Some((first_error, input.distance(&before)));
+ }
+
+ // Reset the stream back to where it was when we entered the top of the loop
+ ctry!(input.reset(look_ahead_input).committed());
+
+ // See if we can advance anyway
+ if input.uncons().is_err() {
+ let (first_error, first_error_distance) = first_stream_error.unwrap();
+
+ // Reset the stream
+ ctry!(input.reset(before).committed());
+ *to_consume = first_error_distance;
+
+ // Return the original error if uncons failed
+ return wrap_stream_error(input, first_error);
+ }
+ }
+ };
+ }
+ }
+}
+
+/// Zero-copy parser which reads a range of 0 or more tokens until `r` is found.
+///
+/// The range `r` will not be committed. If `r` is not found, the parser will
+/// return an error.
+///
+/// [`repeat::take_until`][] is a non-`RangeStream` alternative.
+///
+/// [`repeat::take_until`]: ../../parser/repeat/fn.take_until.html
+/// ```
+/// # extern crate combine;
+/// # use combine::parser::range::{range, take_until_range};
+/// # use combine::*;
+/// # fn main() {
+/// let mut parser = take_until_range("\r\n");
+/// let result = parser.parse("To: user@example.com\r\n");
+/// assert_eq!(result, Ok(("To: user@example.com", "\r\n")));
+/// let result = parser.parse("Hello, world\n");
+/// assert!(result.is_err());
+/// # }
+/// ```
+pub fn take_until_range<Input>(r: Input::Range) -> TakeUntilRange<Input>
+where
+ Input: RangeStream,
+{
+ TakeUntilRange(r)
+}
+
+#[derive(Debug, PartialEq)]
+pub enum TakeRange {
+ /// Found the pattern at this offset
+ Found(usize),
+ /// Did not find the pattern but the parser can skip ahead to this offset.
+ NotFound(usize),
+}
+
+impl From<Option<usize>> for TakeRange {
+ fn from(opt: Option<usize>) -> TakeRange {
+ match opt {
+ Some(i) => TakeRange::Found(i),
+ None => TakeRange::NotFound(0),
+ }
+ }
+}
+
+pub struct TakeFn<F, Input> {
+ searcher: F,
+ _marker: PhantomData<fn(Input)>,
+}
+
+impl<Input, F, R> Parser<Input> for TakeFn<F, Input>
+where
+ F: FnMut(Input::Range) -> R,
+ R: Into<TakeRange>,
+ Input: RangeStream,
+ Input::Range: crate::stream::Range,
+{
+ type Output = Input::Range;
+ type PartialState = usize;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ offset: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ let checkpoint = input.checkpoint();
+
+ if mode.is_first() {
+ *offset = 0;
+ } else {
+ let _ = input.uncons_range(*offset);
+ }
+
+ match (self.searcher)(input.range()).into() {
+ TakeRange::Found(i) => {
+ ctry!(input.reset(checkpoint).committed());
+ let result = uncons_range(input, *offset + i);
+ if result.is_ok() {
+ *offset = 0;
+ }
+ result
+ }
+ TakeRange::NotFound(next_offset) => {
+ *offset = next_offset;
+
+ let range = input.range();
+ let _ = input.uncons_range(range.len());
+ let position = input.position();
+ ctry!(input.reset(checkpoint).committed());
+
+ let err = Input::Error::from_error(position, StreamError::end_of_input());
+ if !input.is_partial() && range.is_empty() {
+ PeekErr(err.into())
+ } else {
+ CommitErr(err)
+ }
+ }
+ }
+ }
+}
+
+/// Searches the entire range using `searcher` and then consumes a range of `Some(n)`.
+/// If `f` can not find anything in the range it must return `None/NotFound` which indicates an end of input error.
+///
+/// If partial parsing is used the `TakeRange` enum can be returned instead of `Option`. By
+/// returning `TakeRange::NotFound(n)` it indicates that the input can skip ahead until `n`
+/// when parsing is next resumed.
+///
+/// See [`take_until_bytes`](../byte/fn.take_until_bytes.html) for a usecase.
+pub fn take_fn<F, R, Input>(searcher: F) -> TakeFn<F, Input>
+where
+ F: FnMut(Input::Range) -> R,
+ R: Into<TakeRange>,
+ Input: RangeStream,
+ Input::Range: crate::stream::Range,
+{
+ TakeFn {
+ searcher,
+ _marker: PhantomData,
+ }
+}
+
+#[cfg(feature = "std")]
+parser! {
+/// Takes a parser which parses a `length` then extracts a range of that length and returns it.
+/// Commonly used in binary formats
+///
+/// ```
+/// # use combine::parser::{byte::num::be_u16, range::length_prefix};
+/// # use combine::*;
+/// # fn main() {
+/// let mut input = Vec::new();
+/// input.extend_from_slice(&3u16.to_be_bytes());
+/// input.extend_from_slice(b"1234");
+///
+/// let mut parser = length_prefix(be_u16());
+/// let result = parser.parse(&input[..]);
+/// assert_eq!(result, Ok((&b"123"[..], &b"4"[..])));
+/// # }
+/// ```
+pub fn length_prefix[Input, P](len: P)(Input) -> Input::Range
+where [
+ Input: RangeStream,
+ P: Parser<Input>,
+ usize: TryFrom<P::Output>,
+ <usize as TryFrom<P::Output>>::Error: StdError + Send + Sync + 'static,
+]
+{
+ len
+ .and_then(|u| {
+ usize::try_from(u)
+ .map_err(StreamErrorFor::<Input>::other)
+ })
+ .then_partial(|&mut len| take(len))
+}
+}
+
+#[cfg(not(feature = "std"))]
+parser! {
+/// Takes a parser which parses a `length` then extracts a range of that length and returns it.
+/// Commonly used in binary formats
+///
+/// ```
+/// # use combine::parser::{byte::num::be_u16, range::length_prefix};
+/// # use combine::*;
+/// # fn main() {
+/// let mut input = Vec::new();
+/// input.extend_from_slice(&3u16.to_be_bytes());
+/// input.extend_from_slice(b"1234");
+///
+/// let mut parser = length_prefix(be_u16());
+/// let result = parser.parse(&input[..]);
+/// assert_eq!(result, Ok((&b"123"[..], &b"4"[..])));
+/// # }
+/// ```
+pub fn length_prefix[Input, P](len: P)(Input) -> Input::Range
+where [
+ Input: RangeStream,
+ P: Parser<Input>,
+ usize: TryFrom<P::Output>,
+ <usize as TryFrom<P::Output>>::Error: fmt::Display + Send + Sync + 'static,
+]
+{
+ len
+ .and_then(|u| {
+ usize::try_from(u)
+ .map_err(StreamErrorFor::<Input>::message_format)
+ })
+ .then_partial(|&mut len| take(len))
+}
+}
+
+#[cfg(test)]
+mod tests {
+
+ use crate::Parser;
+
+ use super::*;
+
+ #[test]
+ fn take_while_test() {
+ let result = take_while(|c: char| c.is_digit(10)).parse("123abc");
+ assert_eq!(result, Ok(("123", "abc")));
+ let result = take_while(|c: char| c.is_digit(10)).parse("abc");
+ assert_eq!(result, Ok(("", "abc")));
+ }
+
+ #[test]
+ fn take_while1_test() {
+ let result = take_while1(|c: char| c.is_digit(10)).parse("123abc");
+ assert_eq!(result, Ok(("123", "abc")));
+ let result = take_while1(|c: char| c.is_digit(10)).parse("abc");
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn range_string_no_char_boundary_error() {
+ let mut parser = range("hello");
+ let result = parser.parse("hell\u{00EE} world");
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn take_until_range_1() {
+ let result = take_until_range("\"").parse("Foo baz bar quux\"");
+ assert_eq!(result, Ok(("Foo baz bar quux", "\"")));
+ }
+
+ #[test]
+ fn take_until_range_2() {
+ let result = take_until_range("===").parse("if ((pointless_comparison == 3) === true) {");
+ assert_eq!(
+ result,
+ Ok(("if ((pointless_comparison == 3) ", "=== true) {"))
+ );
+ }
+
+ #[test]
+ fn take_until_range_unicode_1() {
+ let result = take_until_range("🦀")
+ .parse("😃 Ferris the friendly rustacean 🦀 and his snake friend 🐍");
+ assert_eq!(
+ result,
+ Ok((
+ "😃 Ferris the friendly rustacean ",
+ "🦀 and his snake friend 🐍"
+ ))
+ );
+ }
+
+ #[test]
+ fn take_until_range_unicode_2() {
+ let result = take_until_range("⁘⁙/⁘").parse("⚙️🛠️🦀=🏎️⁘⁙⁘⁘⁙/⁘⁘⁙/⁘");
+ assert_eq!(result, Ok(("⚙️🛠️🦀=🏎️⁘⁙⁘", "⁘⁙/⁘⁘⁙/⁘")));
+ }
+}
diff --git a/src/parser/regex.rs b/src/parser/regex.rs
new file mode 100644
index 0000000..6e47e44
--- /dev/null
+++ b/src/parser/regex.rs
@@ -0,0 +1,549 @@
+//! Module containing regex parsers on streams returning ranges of `&str` or `&[u8]`.
+//!
+//! All regex parsers are overloaded on `&str` and `&[u8]` ranges and can take a `Regex` by value
+//! or shared reference (`&`).
+//!
+//! Enabled using the `regex` feature (for `regex-0.2`) or the `regex-1` feature for `regex-1.0`.
+//!
+//! ```
+//! use once_cell::sync::Lazy;
+//! use regex::{bytes, Regex};
+//! use combine::Parser;
+//! use combine::parser::regex::{find_many, match_};
+//!
+//! fn main() {
+//! let regex = bytes::Regex::new("[0-9]+").unwrap();
+//! // Shared references to any regex works as well
+//! assert_eq!(
+//! find_many(&regex).parse(&b"123 456 "[..]),
+//! Ok((vec![&b"123"[..], &b"456"[..]], &b" "[..]))
+//! );
+//! assert_eq!(
+//! find_many(regex).parse(&b""[..]),
+//! Ok((vec![], &b""[..]))
+//! );
+//!
+//! static REGEX: Lazy<Regex> = Lazy::new(|| Regex::new("[:alpha:]+").unwrap());
+//! assert_eq!(
+//! match_(&*REGEX).parse("abc123"),
+//! Ok(("abc123", "abc123"))
+//! );
+//! }
+//! ```
+
+use std::{iter::FromIterator, marker::PhantomData};
+
+use crate::{
+ error::{
+ ParseError,
+ ParseResult::{self, *},
+ StreamError, Tracked,
+ },
+ parser::range::take,
+ stream::{RangeStream, StreamOnce},
+ Parser,
+};
+
+struct First<T>(Option<T>);
+
+impl<A> FromIterator<A> for First<A> {
+ fn from_iter<T>(iter: T) -> Self
+ where
+ T: IntoIterator<Item = A>,
+ {
+ First(iter.into_iter().next())
+ }
+}
+
+pub trait MatchFind {
+ type Range;
+ fn end(&self) -> usize;
+ fn as_match(&self) -> Self::Range;
+}
+
+pub trait Regex<Range> {
+ fn is_match(&self, range: Range) -> bool;
+ fn find_iter<F>(&self, range: Range) -> (usize, F)
+ where
+ F: FromIterator<Range>;
+ fn captures<F, G>(&self, range: Range) -> (usize, G)
+ where
+ F: FromIterator<Range>,
+ G: FromIterator<F>;
+ fn as_str(&self) -> &str;
+}
+
+impl<'a, R, Range> Regex<Range> for &'a R
+where
+ R: Regex<Range>,
+{
+ fn is_match(&self, range: Range) -> bool {
+ (**self).is_match(range)
+ }
+ fn find_iter<F>(&self, range: Range) -> (usize, F)
+ where
+ F: FromIterator<Range>,
+ {
+ (**self).find_iter(range)
+ }
+ fn captures<F, G>(&self, range: Range) -> (usize, G)
+ where
+ F: FromIterator<Range>,
+ G: FromIterator<F>,
+ {
+ (**self).captures(range)
+ }
+ fn as_str(&self) -> &str {
+ (**self).as_str()
+ }
+}
+
+fn find_iter<'a, Input, F>(iterable: Input) -> (usize, F)
+where
+ Input: IntoIterator,
+ Input::Item: MatchFind,
+ F: FromIterator<<Input::Item as MatchFind>::Range>,
+{
+ let mut end = 0;
+ let value = iterable
+ .into_iter()
+ .map(|m| {
+ end = m.end();
+ m.as_match()
+ })
+ .collect();
+ (end, value)
+}
+
+#[cfg(feature = "regex")]
+mod regex {
+ pub extern crate regex;
+
+ use std::iter::FromIterator;
+
+ use super::{find_iter, MatchFind, Regex};
+
+ pub use self::regex::*;
+
+ impl<'t> MatchFind for regex::Match<'t> {
+ type Range = &'t str;
+ fn end(&self) -> usize {
+ regex::Match::end(self)
+ }
+ fn as_match(&self) -> Self::Range {
+ self.as_str()
+ }
+ }
+
+ impl<'t> MatchFind for regex::bytes::Match<'t> {
+ type Range = &'t [u8];
+ fn end(&self) -> usize {
+ regex::bytes::Match::end(self)
+ }
+ fn as_match(&self) -> Self::Range {
+ self.as_bytes()
+ }
+ }
+
+ impl<'a> Regex<&'a str> for regex::Regex {
+ fn is_match(&self, range: &'a str) -> bool {
+ regex::Regex::is_match(self, range)
+ }
+ fn find_iter<F>(&self, range: &'a str) -> (usize, F)
+ where
+ F: FromIterator<&'a str>,
+ {
+ find_iter(regex::Regex::find_iter(self, range))
+ }
+ fn captures<F, G>(&self, range: &'a str) -> (usize, G)
+ where
+ F: FromIterator<&'a str>,
+ G: FromIterator<F>,
+ {
+ let mut end = 0;
+ let value = regex::Regex::captures_iter(self, range)
+ .map(|captures| {
+ let mut captures_iter = captures.iter();
+ // The first group is the match on the entire regex
+ let first_match = captures_iter.next().unwrap().unwrap();
+ end = first_match.end();
+ Some(Some(first_match))
+ .into_iter()
+ .chain(captures_iter)
+ .filter_map(|match_| match_.map(|m| m.as_match()))
+ .collect()
+ })
+ .collect();
+ (end, value)
+ }
+ fn as_str(&self) -> &str {
+ regex::Regex::as_str(self)
+ }
+ }
+
+ impl<'a> Regex<&'a [u8]> for regex::bytes::Regex {
+ fn is_match(&self, range: &'a [u8]) -> bool {
+ regex::bytes::Regex::is_match(self, range)
+ }
+ fn find_iter<F>(&self, range: &'a [u8]) -> (usize, F)
+ where
+ F: FromIterator<&'a [u8]>,
+ {
+ find_iter(regex::bytes::Regex::find_iter(self, range))
+ }
+ fn captures<F, G>(&self, range: &'a [u8]) -> (usize, G)
+ where
+ F: FromIterator<&'a [u8]>,
+ G: FromIterator<F>,
+ {
+ let mut end = 0;
+ let value = regex::bytes::Regex::captures_iter(self, range)
+ .map(|captures| {
+ let mut captures_iter = captures.iter();
+ // The first group is the match on the entire regex
+ let first_match = captures_iter.next().unwrap().unwrap();
+ end = first_match.end();
+ Some(Some(first_match))
+ .into_iter()
+ .chain(captures_iter)
+ .filter_map(|match_| match_.map(|m| m.as_match()))
+ .collect()
+ })
+ .collect();
+ (end, value)
+ }
+ fn as_str(&self) -> &str {
+ regex::bytes::Regex::as_str(self)
+ }
+ }
+}
+
+pub struct Match<R, Input>(R, PhantomData<Input>);
+
+impl<'a, Input, R> Parser<Input> for Match<R, Input>
+where
+ R: Regex<Input::Range>,
+ Input: RangeStream,
+{
+ type Output = Input::Range;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(
+ &mut self,
+ input: &mut Input,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ if self.0.is_match(input.range()) {
+ PeekOk(input.range())
+ } else {
+ PeekErr(Input::Error::empty(input.position()).into())
+ }
+ }
+ fn add_error(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
+ error.error.add(StreamError::expected_format(format_args!(
+ "/{}/",
+ self.0.as_str()
+ )))
+ }
+}
+
+/// Matches `regex` on the input returning the entire input if it matches.
+/// Never consumes any input.
+///
+/// ```
+/// extern crate regex;
+/// extern crate combine;
+/// use regex::Regex;
+/// use combine::Parser;
+/// use combine::parser::regex::match_;
+///
+/// fn main() {
+/// let regex = Regex::new("[:alpha:]+").unwrap();
+/// assert_eq!(
+/// match_(&regex).parse("abc123"),
+/// Ok(("abc123", "abc123"))
+/// );
+/// }
+/// ```
+pub fn match_<R, Input>(regex: R) -> Match<R, Input>
+where
+ R: Regex<Input::Range>,
+ Input: RangeStream,
+{
+ Match(regex, PhantomData)
+}
+
+#[derive(Clone)]
+pub struct Find<R, Input>(R, PhantomData<fn() -> Input>);
+
+impl<'a, Input, R> Parser<Input> for Find<R, Input>
+where
+ R: Regex<Input::Range>,
+ Input: RangeStream,
+ Input::Range: crate::stream::Range,
+{
+ type Output = Input::Range;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(
+ &mut self,
+ input: &mut Input,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ let (end, First(value)) = self.0.find_iter(input.range());
+ match value {
+ Some(value) => take(end).parse_lazy(input).map(|_| value),
+ None => PeekErr(Input::Error::empty(input.position()).into()),
+ }
+ }
+ fn add_error(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
+ error.error.add(StreamError::expected_format(format_args!(
+ "/{}/",
+ self.0.as_str()
+ )))
+ }
+}
+
+/// Matches `regex` on the input by running `find` on the input and returns the first match.
+/// Consumes all input up until the end of the first match.
+///
+/// ```
+/// extern crate regex;
+/// extern crate combine;
+/// use regex::Regex;
+/// use combine::Parser;
+/// use combine::parser::regex::find;
+///
+/// fn main() {
+/// let mut digits = find(Regex::new("^[0-9]+").unwrap());
+/// assert_eq!(digits.parse("123 456 "), Ok(("123", " 456 ")));
+/// assert!(
+/// digits.parse("abc 123 456 ").is_err());
+///
+/// let mut digits2 = find(Regex::new("[0-9]+").unwrap());
+/// assert_eq!(digits2.parse("123 456 "), Ok(("123", " 456 ")));
+/// assert_eq!(digits2.parse("abc 123 456 "), Ok(("123", " 456 ")));
+/// }
+/// ```
+pub fn find<R, Input>(regex: R) -> Find<R, Input>
+where
+ R: Regex<Input::Range>,
+ Input: RangeStream,
+ Input::Range: crate::stream::Range,
+{
+ Find(regex, PhantomData)
+}
+
+#[derive(Clone)]
+pub struct FindMany<F, R, Input>(R, PhantomData<fn() -> (Input, F)>);
+
+impl<'a, Input, F, R> Parser<Input> for FindMany<F, R, Input>
+where
+ F: FromIterator<Input::Range>,
+ R: Regex<Input::Range>,
+ Input: RangeStream,
+ Input::Range: crate::stream::Range,
+{
+ type Output = F;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(
+ &mut self,
+ input: &mut Input,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ let (end, value) = self.0.find_iter(input.range());
+ take(end).parse_lazy(input).map(|_| value)
+ }
+ fn add_error(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
+ error.error.add(StreamError::expected_format(format_args!(
+ "/{}/",
+ self.0.as_str()
+ )))
+ }
+}
+
+/// Matches `regex` on the input by running `find_iter` on the input.
+/// Returns all matches in a `F: FromIterator<Input::Range>`.
+/// Consumes all input up until the end of the last match.
+///
+/// ```
+/// extern crate regex;
+/// extern crate combine;
+/// use regex::Regex;
+/// use regex::bytes;
+/// use combine::Parser;
+/// use combine::parser::regex::find_many;
+///
+/// fn main() {
+/// let mut digits = find_many(Regex::new("[0-9]+").unwrap());
+/// assert_eq!(digits.parse("123 456 "), Ok((vec!["123", "456"], " ")));
+/// assert_eq!(digits.parse("abc 123 456 "), Ok((vec!["123", "456"], " ")));
+/// assert_eq!(digits.parse("abc"), Ok((vec![], "abc")));
+/// }
+/// ```
+pub fn find_many<F, R, Input>(regex: R) -> FindMany<F, R, Input>
+where
+ F: FromIterator<Input::Range>,
+ R: Regex<Input::Range>,
+ Input: RangeStream,
+ Input::Range: crate::stream::Range,
+{
+ FindMany(regex, PhantomData)
+}
+
+#[derive(Clone)]
+pub struct Captures<F, R, Input>(R, PhantomData<fn() -> (Input, F)>);
+
+impl<'a, Input, F, R> Parser<Input> for Captures<F, R, Input>
+where
+ F: FromIterator<Input::Range>,
+ R: Regex<Input::Range>,
+ Input: RangeStream,
+ Input::Range: crate::stream::Range,
+{
+ type Output = F;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(
+ &mut self,
+ input: &mut Input,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ let (end, First(value)) = self.0.captures(input.range());
+ match value {
+ Some(value) => take(end).parse_lazy(input).map(|_| value),
+ None => PeekErr(Input::Error::empty(input.position()).into()),
+ }
+ }
+ fn add_error(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
+ error.error.add(StreamError::expected_format(format_args!(
+ "/{}/",
+ self.0.as_str()
+ )))
+ }
+}
+
+/// Matches `regex` on the input by running `captures_iter` on the input.
+/// Returns the captures of the first match and consumes the input up until the end of that match.
+///
+/// ```
+/// extern crate regex;
+/// extern crate combine;
+/// use regex::Regex;
+/// use combine::Parser;
+/// use combine::parser::regex::captures;
+///
+/// fn main() {
+/// let mut fields = captures(Regex::new("([a-z]+):([0-9]+)").unwrap());
+/// assert_eq!(
+/// fields.parse("test:123 field:456 "),
+/// Ok((vec!["test:123", "test", "123"],
+/// " field:456 "
+/// ))
+/// );
+/// assert_eq!(
+/// fields.parse("test:123 :456 "),
+/// Ok((vec!["test:123", "test", "123"],
+/// " :456 "
+/// ))
+/// );
+/// }
+/// ```
+pub fn captures<F, R, Input>(regex: R) -> Captures<F, R, Input>
+where
+ F: FromIterator<Input::Range>,
+ R: Regex<Input::Range>,
+ Input: RangeStream,
+ Input::Range: crate::stream::Range,
+{
+ Captures(regex, PhantomData)
+}
+
+#[derive(Clone)]
+pub struct CapturesMany<F, G, R, Input>(R, PhantomData<fn() -> (Input, F, G)>);
+
+impl<'a, Input, F, G, R> Parser<Input> for CapturesMany<F, G, R, Input>
+where
+ F: FromIterator<Input::Range>,
+ G: FromIterator<F>,
+ R: Regex<Input::Range>,
+ Input: RangeStream,
+ Input::Range: crate::stream::Range,
+{
+ type Output = G;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(
+ &mut self,
+ input: &mut Input,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ let (end, value) = self.0.captures(input.range());
+ take(end).parse_lazy(input).map(|_| value)
+ }
+ fn add_error(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
+ error.error.add(StreamError::expected_format(format_args!(
+ "/{}/",
+ self.0.as_str()
+ )))
+ }
+}
+
+/// Matches `regex` on the input by running `captures_iter` on the input.
+/// Returns all captures which is part of the match in a `F: FromIterator<Input::Range>`.
+/// Consumes all input up until the end of the last match.
+///
+/// ```
+/// extern crate regex;
+/// extern crate combine;
+/// use regex::Regex;
+/// use combine::Parser;
+/// use combine::parser::regex::captures_many;
+///
+/// fn main() {
+/// let mut fields = captures_many(Regex::new("([a-z]+):([0-9]+)").unwrap());
+/// assert_eq!(
+/// fields.parse("test:123 field:456 "),
+/// Ok((vec![vec!["test:123", "test", "123"],
+/// vec!["field:456", "field", "456"]],
+/// " "
+/// ))
+/// );
+/// assert_eq!(
+/// fields.parse("test:123 :456 "),
+/// Ok((vec![vec!["test:123", "test", "123"]],
+/// " :456 "
+/// ))
+/// );
+/// }
+/// ```
+pub fn captures_many<F, G, R, Input>(regex: R) -> CapturesMany<F, G, R, Input>
+where
+ F: FromIterator<Input::Range>,
+ G: FromIterator<F>,
+ R: Regex<Input::Range>,
+ Input: RangeStream,
+ Input::Range: crate::stream::Range,
+{
+ CapturesMany(regex, PhantomData)
+}
+
+#[cfg(test)]
+mod tests {
+
+ use regex::Regex;
+
+ use crate::{parser::regex::find, Parser};
+
+ #[test]
+ fn test() {
+ let mut digits = find(Regex::new("^[0-9]+").unwrap());
+ assert_eq!(digits.parse("123 456 "), Ok(("123", " 456 ")));
+ assert!(digits.parse("abc 123 456 ").is_err());
+
+ let mut digits2 = find(Regex::new("[0-9]+").unwrap());
+ assert_eq!(digits2.parse("123 456 "), Ok(("123", " 456 ")));
+ assert_eq!(digits2.parse("abc 123 456 "), Ok(("123", " 456 ")));
+ }
+}
diff --git a/src/parser/repeat.rs b/src/parser/repeat.rs
new file mode 100644
index 0000000..27543c3
--- /dev/null
+++ b/src/parser/repeat.rs
@@ -0,0 +1,1620 @@
+//! Combinators which take one or more parsers and applies them repeatedly.
+
+use crate::{
+ error::{
+ Commit, ParseError,
+ ParseResult::{self, *},
+ ResultExt, StdParseResult, StreamError, Tracked,
+ },
+ lib::{borrow::BorrowMut, cmp, marker::PhantomData, mem},
+ parser::{
+ choice::{optional, Optional, Or},
+ combinator::{ignore, Ignore},
+ function::{parser, FnParser},
+ sequence::With,
+ token::{value, Value},
+ FirstMode, ParseMode,
+ },
+ stream::{uncons, Stream, StreamOnce},
+ ErrorOffset, Parser,
+};
+
+parser! {
+pub struct Count;
+
+/// Parses `parser` from zero up to `count` times.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::error::Info;
+/// # use combine::stream::easy::Error;
+/// # fn main() {
+/// let mut parser = count(2, token(b'a'));
+///
+/// let result = parser.parse(&b"aaab"[..]);
+/// assert_eq!(result, Ok((b"aa"[..].to_owned(), &b"ab"[..])));
+/// # }
+/// ```
+pub fn count[F, Input, P](count: usize, parser: P)(Input) -> F
+where [
+ Input: Stream,
+ P: Parser<Input>,
+ F: Extend<P::Output> + Default,
+]
+{
+ count_min_max(0, *count, parser)
+}
+}
+
+parser! {
+ pub struct SkipCount;
+ type PartialState = <With<Count<Sink, Input, P>, Value<Input, ()>> as Parser<Input>>::PartialState;
+ /// Parses `parser` from zero up to `count` times skipping the output of `parser`.
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::stream::easy::{Error, Info};
+ /// # fn main() {
+ /// let mut parser = skip_count(2, token(b'a'));
+ ///
+ /// let result = parser.parse(&b"aaab"[..]);
+ /// assert_eq!(result, Ok(((), &b"ab"[..])));
+ /// # }
+ /// ```
+ pub fn skip_count[Input, P](count: usize, parser: P)(Input) -> ()
+ where [
+ P: Parser<Input>
+ ]
+ {
+ self::count::<Sink, _, _>(*count, parser.map(|_| ())).with(value(()))
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct CountMinMax<F, P> {
+ parser: P,
+ min: usize,
+ max: usize,
+ _marker: PhantomData<fn() -> F>,
+}
+
+struct SuggestSizeHint<I> {
+ iterator: I,
+ min: usize,
+ max: Option<usize>,
+}
+
+impl<I> Iterator for SuggestSizeHint<I>
+where
+ I: Iterator,
+{
+ type Item = I::Item;
+
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ self.iterator.next()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.min, self.max)
+ }
+}
+
+fn suggest_size_hint<I>(iterator: I, (min, max): (usize, Option<usize>)) -> SuggestSizeHint<I>
+where
+ I: Iterator,
+{
+ SuggestSizeHint {
+ iterator,
+ // Invalid input may report an extreme size so we guard against that (while still
+ // optimizing by preallocating for the expected case of success)
+ min: cmp::min(min, 4096),
+ max,
+ }
+}
+
+impl<Input, P, F> Parser<Input> for CountMinMax<F, P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ F: Extend<P::Output> + Default,
+{
+ type Output = F;
+ type PartialState = (usize, F, P::PartialState);
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, Input::Error>
+ where
+ M: ParseMode,
+ {
+ let (count, elements, child_state) = state;
+
+ let mut iter = self.parser.by_ref().partial_iter(mode, input, child_state);
+ let remaining_min = self.min.saturating_sub(*count);
+ let remaining_max = self.max - *count;
+ elements.extend(suggest_size_hint(
+ iter.by_ref().take(remaining_max).inspect(|_| *count += 1),
+ (remaining_min, Some(remaining_max)),
+ ));
+ if *count < self.min {
+ let err = StreamError::message_format(format_args!(
+ "expected {} more elements",
+ self.min - *count
+ ));
+ iter.fail(err)
+ } else {
+ iter.into_result_fast(elements).map(|x| {
+ *count = 0;
+ x
+ })
+ }
+ }
+
+ fn add_error(&mut self, error: &mut Tracked<<Input as StreamOnce>::Error>) {
+ self.parser.add_error(error)
+ }
+}
+
+/// Parses `parser` from `min` to `max` times (including `min` and `max`).
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::stream::easy::{Error, Info};
+/// # fn main() {
+/// let mut parser = count_min_max(2, 2, token(b'a'));
+///
+/// let result = parser.parse(&b"aaab"[..]);
+/// assert_eq!(result, Ok((b"aa"[..].to_owned(), &b"ab"[..])));
+/// let result = parser.parse(&b"ab"[..]);
+/// assert!(result.is_err());
+/// # }
+/// ```
+///
+/// # Panics
+///
+/// If `min` > `max`.
+pub fn count_min_max<F, Input, P>(min: usize, max: usize, parser: P) -> CountMinMax<F, P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ F: Extend<P::Output> + Default,
+{
+ assert!(min <= max);
+
+ CountMinMax {
+ parser,
+ min,
+ max,
+ _marker: PhantomData,
+ }
+}
+
+parser! {
+ pub struct SkipCountMinMax;
+ type PartialState = <With<CountMinMax<Sink, P>, Value<Input, ()>> as Parser<Input>>::PartialState;
+ /// Parses `parser` from `min` to `max` times (including `min` and `max`)
+ /// skipping the output of `parser`.
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # fn main() {
+ /// let mut parser = skip_count_min_max(2, 2, token(b'a'));
+ ///
+ /// let result = parser.parse(&b"aaab"[..]);
+ /// assert_eq!(result, Ok(((), &b"ab"[..])));
+ /// let result = parser.parse(&b"ab"[..]);
+ /// assert!(result.is_err());
+ /// # }
+ /// ```
+ ///
+ /// # Panics
+ ///
+ /// If `min` > `max`.
+ pub fn skip_count_min_max[Input, P](min: usize, max: usize, parser: P)(Input) -> ()
+ where [
+ P: Parser<Input>,
+ ]
+ {
+ count_min_max::<Sink, _, _>(*min, *max, parser.map(|_| ())).with(value(()))
+ }
+}
+
+pub struct Iter<'a, Input, P, S, M>
+where
+ Input: Stream,
+ P: Parser<Input>,
+{
+ parser: P,
+ input: &'a mut Input,
+ committed: bool,
+ state: State<<Input as StreamOnce>::Error>,
+ partial_state: S,
+ mode: M,
+}
+
+enum State<E> {
+ Ok,
+ PeekErr(E),
+ CommitErr(E),
+}
+
+impl<'a, Input, P, S, M> Iter<'a, Input, P, S, M>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ S: BorrowMut<P::PartialState>,
+{
+ pub fn new(parser: P, mode: M, input: &'a mut Input, partial_state: S) -> Self {
+ Iter {
+ parser,
+ input,
+ committed: false,
+ state: State::Ok,
+ partial_state,
+ mode,
+ }
+ }
+ /// Converts the iterator to a `ParseResult`, returning `Ok` if the parsing so far has be done
+ /// without any errors which committed data.
+ pub fn into_result<O>(self, value: O) -> StdParseResult<O, Input> {
+ self.into_result_(value).into()
+ }
+
+ fn into_result_<O>(self, value: O) -> ParseResult<O, Input::Error> {
+ match self.state {
+ State::Ok | State::PeekErr(_) => {
+ if self.committed {
+ CommitOk(value)
+ } else {
+ PeekOk(value)
+ }
+ }
+ State::CommitErr(e) => CommitErr(e),
+ }
+ }
+
+ fn into_result_fast<O>(self, value: &mut O) -> ParseResult<O, Input::Error>
+ where
+ O: Default,
+ {
+ match self.state {
+ State::Ok | State::PeekErr(_) => {
+ let value = mem::take(value);
+ if self.committed {
+ CommitOk(value)
+ } else {
+ PeekOk(value)
+ }
+ }
+ State::CommitErr(e) => CommitErr(e),
+ }
+ }
+
+ fn fail<T>(
+ self,
+ err: <<Input as StreamOnce>::Error as ParseError<
+ <Input as StreamOnce>::Token,
+ <Input as StreamOnce>::Range,
+ <Input as StreamOnce>::Position,
+ >>::StreamError,
+ ) -> ParseResult<T, Input::Error> {
+ match self.state {
+ State::Ok => {
+ let err = <Input as StreamOnce>::Error::from_error(self.input.position(), err);
+ if self.committed {
+ CommitErr(err)
+ } else {
+ PeekErr(err.into())
+ }
+ }
+ State::PeekErr(mut e) => {
+ let err = <Input as StreamOnce>::Error::from_error(self.input.position(), err);
+ e = e.merge(err);
+ if self.committed {
+ CommitErr(e)
+ } else {
+ PeekErr(e.into())
+ }
+ }
+ State::CommitErr(mut e) => {
+ e.add(err);
+ CommitErr(e)
+ }
+ }
+ }
+}
+
+impl<'a, Input, P, S, M> Iterator for Iter<'a, Input, P, S, M>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ S: BorrowMut<P::PartialState>,
+ M: ParseMode,
+{
+ type Item = P::Output;
+
+ fn next(&mut self) -> Option<P::Output> {
+ let before = self.input.checkpoint();
+ match self
+ .parser
+ .parse_mode(self.mode, self.input, self.partial_state.borrow_mut())
+ {
+ PeekOk(v) => {
+ self.mode.set_first();
+ Some(v)
+ }
+ CommitOk(v) => {
+ self.mode.set_first();
+ self.committed = true;
+ Some(v)
+ }
+ PeekErr(e) => {
+ self.state = match self.input.reset(before) {
+ Err(err) => State::CommitErr(err),
+ Ok(_) => State::PeekErr(e.error),
+ };
+ None
+ }
+ CommitErr(e) => {
+ self.state = State::CommitErr(e);
+ None
+ }
+ }
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct Many<F, P>(P, PhantomData<F>);
+
+impl<F, Input, P> Parser<Input> for Many<F, P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ F: Extend<P::Output> + Default,
+{
+ type Output = F;
+ type PartialState = (F, P::PartialState);
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, Input::Error>
+ where
+ M: ParseMode,
+ {
+ // TODO
+ let (ref mut elements, ref mut child_state) = *state;
+
+ let mut iter = (&mut self.0).partial_iter(mode, input, child_state);
+ elements.extend(iter.by_ref());
+ iter.into_result_fast(elements)
+ }
+
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ self.0.add_error(errors)
+ }
+
+ fn add_committed_expected_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ self.add_error(errors);
+ }
+
+ fn parser_count(&self) -> ErrorOffset {
+ self.0.parser_count()
+ }
+}
+
+/// Parses `p` zero or more times returning a collection with the values from `p`.
+///
+/// If the returned collection cannot be inferred type annotations must be supplied, either by
+/// annotating the resulting type binding `let collection: Vec<_> = ...` or by specializing when
+/// calling many, `many::<Vec<_>, _, _>(...)`.
+///
+/// NOTE: If `p` can succeed without consuming any input this may hang forever as `many` will
+/// repeatedly use `p` to parse the same location in the input every time
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::digit;
+/// # fn main() {
+/// let result = many(digit())
+/// .parse("123A")
+/// .map(|x| x.0);
+/// assert_eq!(result, Ok(vec!['1', '2', '3']));
+/// # }
+/// ```
+pub fn many<F, Input, P>(p: P) -> Many<F, P>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ F: Extend<P::Output> + Default,
+{
+ Many(p, PhantomData)
+}
+
+#[derive(Copy, Clone)]
+pub struct Many1<F, P>(P, PhantomData<fn() -> F>);
+impl<F, Input, P> Parser<Input> for Many1<F, P>
+where
+ Input: Stream,
+ F: Extend<P::Output> + Default,
+ P: Parser<Input>,
+{
+ type Output = F;
+ type PartialState = (bool, bool, F, P::PartialState);
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mut mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<F, Input::Error>
+ where
+ M: ParseMode,
+ {
+ let (ref mut parsed_one, ref mut committed_state, ref mut elements, ref mut child_state) =
+ *state;
+
+ if mode.is_first() || !*parsed_one {
+ debug_assert!(!*parsed_one);
+
+ let (first, committed) = ctry!(self.0.parse_mode(mode, input, child_state));
+ elements.extend(Some(first));
+ // TODO Should PeekOk be an error?
+ *committed_state = !committed.is_peek();
+ *parsed_one = true;
+ mode.set_first();
+ }
+
+ let mut iter = Iter {
+ parser: &mut self.0,
+ committed: *committed_state,
+ input,
+ state: State::Ok,
+ partial_state: child_state,
+ mode,
+ };
+ elements.extend(iter.by_ref());
+
+ iter.into_result_fast(elements).map(|x| {
+ *parsed_one = false;
+ x
+ })
+ }
+
+ fn add_committed_expected_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ self.add_error(errors);
+ }
+
+ forward_parser!(Input, add_error parser_count, 0);
+}
+
+/// Parses `p` one or more times returning a collection with the values from `p`.
+///
+/// If the returned collection cannot be inferred type annotations must be supplied, either by
+/// annotating the resulting type binding `let collection: Vec<_> = ...` or by specializing when
+/// calling many1 `many1::<Vec<_>, _>(...)`.
+///
+/// NOTE: If `p` can succeed without consuming any input this may hang forever as `many1` will
+/// repeatedly use `p` to parse the same location in the input every time
+///
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::digit;
+/// # fn main() {
+/// let result = many1::<Vec<_>, _, _>(digit())
+/// .parse("A123");
+/// assert!(result.is_err());
+/// # }
+/// ```
+pub fn many1<F, Input, P>(p: P) -> Many1<F, P>
+where
+ Input: Stream,
+ F: Extend<P::Output> + Default,
+ P: Parser<Input>,
+{
+ Many1(p, PhantomData)
+}
+
+#[derive(Clone)]
+#[doc(hidden)]
+// FIXME Should not be public
+pub struct Sink;
+
+impl Default for Sink {
+ fn default() -> Self {
+ Sink
+ }
+}
+
+impl<A> Extend<A> for Sink {
+ fn extend<T>(&mut self, iter: T)
+ where
+ T: IntoIterator<Item = A>,
+ {
+ for _ in iter {}
+ }
+}
+
+parser! {
+ pub struct SkipMany;
+ type PartialState = <Ignore<Many<Sink, Ignore<P>>> as Parser<Input>>::PartialState;
+/// Parses `p` zero or more times ignoring the result.
+///
+/// NOTE: If `p` can succeed without consuming any input this may hang forever as `skip_many` will
+/// repeatedly use `p` to parse the same location in the input every time
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::digit;
+/// # fn main() {
+/// let result = skip_many(digit())
+/// .parse("A");
+/// assert_eq!(result, Ok(((), "A")));
+/// # }
+/// ```
+pub fn skip_many[Input, P](p: P)(Input) -> ()
+where [
+ P: Parser<Input>,
+]
+{
+ ignore(many::<Sink, _, _>(ignore(p)))
+}
+}
+
+parser! {
+ pub struct SkipMany1;
+ type PartialState = <Ignore<Many1<Sink, Ignore<P>>> as Parser<Input>>::PartialState;
+/// Parses `p` one or more times ignoring the result.
+///
+/// NOTE: If `p` can succeed without consuming any input this may hang forever as `skip_many1` will
+/// repeatedly use `p` to parse the same location in the input every time
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::digit;
+/// # fn main() {
+/// let result = skip_many1(digit())
+/// .parse("123A");
+/// assert_eq!(result, Ok(((), "A")));
+/// # }
+/// ```
+pub fn skip_many1[Input, P](p: P)(Input) -> ()
+where [
+ P: Parser<Input>,
+]
+{
+ ignore(many1::<Sink, _, _>(ignore(p)))
+}
+}
+
+#[derive(Copy, Clone)]
+pub struct SepBy<F, P, S> {
+ parser: P,
+ separator: S,
+ _marker: PhantomData<fn() -> F>,
+}
+impl<F, Input, P, S> Parser<Input> for SepBy<F, P, S>
+where
+ Input: Stream,
+ F: Extend<P::Output> + Default,
+ P: Parser<Input>,
+ S: Parser<Input>,
+{
+ type Output = F;
+ type PartialState = <Or<
+ SepBy1<F, P, S>,
+ FnParser<Input, fn(&mut Input) -> StdParseResult<F, Input>>,
+ > as Parser<Input>>::PartialState;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<F, Input::Error>
+ where
+ M: ParseMode,
+ {
+ sep_by1(&mut self.parser, &mut self.separator)
+ .or(parser(|_| Ok((F::default(), Commit::Peek(())))))
+ .parse_mode(mode, input, state)
+ }
+
+ fn add_committed_expected_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ self.separator.add_error(errors)
+ }
+
+ forward_parser!(Input, add_error parser_count, parser);
+}
+
+/// Parses `parser` zero or more time separated by `separator`, returning a collection with the
+/// values from `p`.
+///
+/// If the returned collection cannot be inferred type annotations must be supplied, either by
+/// annotating the resulting type binding `let collection: Vec<_> = ...` or by specializing when
+/// calling `sep_by`, `sep_by::<Vec<_>, _, _>(...)`.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::digit;
+/// # fn main() {
+/// let mut parser = sep_by(digit(), token(','));
+/// let result_ok = parser.parse("1,2,3");
+/// assert_eq!(result_ok, Ok((vec!['1', '2', '3'], "")));
+/// let result_ok2 = parser.parse("");
+/// assert_eq!(result_ok2, Ok((vec![], "")));
+/// # }
+/// ```
+pub fn sep_by<F, Input, P, S>(parser: P, separator: S) -> SepBy<F, P, S>
+where
+ Input: Stream,
+ F: Extend<P::Output> + Default,
+ P: Parser<Input>,
+ S: Parser<Input>,
+{
+ SepBy {
+ parser,
+ separator,
+ _marker: PhantomData,
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct SepBy1<F, P, S> {
+ parser: P,
+ separator: S,
+ _marker: PhantomData<fn() -> F>,
+}
+impl<F, Input, P, S> Parser<Input> for SepBy1<F, P, S>
+where
+ Input: Stream,
+ F: Extend<P::Output> + Default,
+ P: Parser<Input>,
+ S: Parser<Input>,
+{
+ type Output = F;
+ type PartialState = (
+ Option<Commit<()>>,
+ F,
+ <With<S, P> as Parser<Input>>::PartialState,
+ );
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, Input::Error>
+ where
+ M: ParseMode,
+ {
+ let (ref mut parsed_one, ref mut elements, ref mut child_state) = *state;
+
+ let rest = match *parsed_one {
+ Some(rest) => rest,
+ None => {
+ let (first, rest) =
+ ctry!(self
+ .parser
+ .parse_mode(mode, input, &mut child_state.B.state));
+ elements.extend(Some(first));
+ rest
+ }
+ };
+
+ rest.combine_commit(move |_| {
+ let rest = (&mut self.separator).with(&mut self.parser);
+ let mut iter = Iter::new(rest, mode, input, child_state);
+
+ elements.extend(iter.by_ref());
+
+ iter.into_result_fast(elements).map(|x| {
+ *parsed_one = None;
+ x
+ })
+ })
+ }
+
+ fn add_committed_expected_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ self.separator.add_error(errors)
+ }
+
+ forward_parser!(Input, add_error parser_count, parser);
+}
+
+/// Parses `parser` one or more time separated by `separator`, returning a collection with the
+/// values from `p`.
+///
+/// If the returned collection cannot be inferred type annotations must be supplied, either by
+/// annotating the resulting type binding `let collection: Vec<_> = ...` or by specializing when
+/// calling `sep_by`, `sep_by1::<Vec<_>, _, _>(...)`.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::digit;
+/// # use combine::stream::easy;
+/// # use combine::stream::position::{self, SourcePosition};
+/// # fn main() {
+/// let mut parser = sep_by1(digit(), token(','));
+/// let result_ok = parser.easy_parse(position::Stream::new("1,2,3"))
+/// .map(|(vec, state)| (vec, state.input));
+/// assert_eq!(result_ok, Ok((vec!['1', '2', '3'], "")));
+/// let result_err = parser.easy_parse(position::Stream::new(""));
+/// assert_eq!(result_err, Err(easy::Errors {
+/// position: SourcePosition::default(),
+/// errors: vec![
+/// easy::Error::end_of_input(),
+/// easy::Error::Expected("digit".into())
+/// ]
+/// }));
+/// # }
+/// ```
+pub fn sep_by1<F, Input, P, S>(parser: P, separator: S) -> SepBy1<F, P, S>
+where
+ Input: Stream,
+ F: Extend<P::Output> + Default,
+ P: Parser<Input>,
+ S: Parser<Input>,
+{
+ SepBy1 {
+ parser,
+ separator,
+ _marker: PhantomData,
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct SepEndBy<F, P, S> {
+ parser: P,
+ separator: S,
+ _marker: PhantomData<fn() -> F>,
+}
+
+impl<F, Input, P, S> Parser<Input> for SepEndBy<F, P, S>
+where
+ Input: Stream,
+ F: Extend<P::Output> + Default,
+ P: Parser<Input>,
+ S: Parser<Input>,
+{
+ type Output = F;
+ type PartialState = <Or<
+ SepEndBy1<F, P, S>,
+ FnParser<Input, fn(&mut Input) -> StdParseResult<F, Input>>,
+ > as Parser<Input>>::PartialState;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, Input::Error>
+ where
+ M: ParseMode,
+ {
+ sep_end_by1(&mut self.parser, &mut self.separator)
+ .or(parser(|_| Ok((F::default(), Commit::Peek(())))))
+ .parse_mode(mode, input, state)
+ }
+
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ self.parser.add_error(errors)
+ }
+}
+
+/// Parses `parser` zero or more times separated and ended by `separator`, returning a collection
+/// with the values from `p`.
+///
+/// If the returned collection cannot be inferred type annotations must be supplied, either by
+/// annotating the resulting type binding `let collection: Vec<_> = ...` or by specializing when
+/// calling `sep_by`, `sep_by::<Vec<_>, _, _>(...)`
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::digit;
+/// # fn main() {
+/// let mut parser = sep_end_by(digit(), token(';'));
+/// let result_ok = parser.parse("1;2;3;");
+/// assert_eq!(result_ok, Ok((vec!['1', '2', '3'], "")));
+/// let result_ok2 = parser.parse("1;2;3");
+/// assert_eq!(result_ok2, Ok((vec!['1', '2', '3'], "")));
+/// # }
+/// ```
+pub fn sep_end_by<F, Input, P, S>(parser: P, separator: S) -> SepEndBy<F, P, S>
+where
+ Input: Stream,
+ F: Extend<P::Output> + Default,
+ P: Parser<Input>,
+ S: Parser<Input>,
+{
+ SepEndBy {
+ parser,
+ separator,
+ _marker: PhantomData,
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct SepEndBy1<F, P, S> {
+ parser: P,
+ separator: S,
+ _marker: PhantomData<fn() -> F>,
+}
+
+impl<F, Input, P, S> Parser<Input> for SepEndBy1<F, P, S>
+where
+ Input: Stream,
+ F: Extend<P::Output> + Default,
+ P: Parser<Input>,
+ S: Parser<Input>,
+{
+ type Output = F;
+ type PartialState = (
+ Option<Commit<()>>,
+ F,
+ <With<S, Optional<P>> as Parser<Input>>::PartialState,
+ );
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, Input::Error>
+ where
+ M: ParseMode,
+ {
+ let (ref mut parsed_one, ref mut elements, ref mut child_state) = *state;
+
+ let rest = match *parsed_one {
+ Some(rest) => rest,
+ None => {
+ let (first, rest) =
+ ctry!(self
+ .parser
+ .parse_mode(mode, input, &mut child_state.B.state));
+ *parsed_one = Some(rest);
+ elements.extend(Some(first));
+ rest
+ }
+ };
+
+ rest.combine_commit(|_| {
+ let rest = (&mut self.separator).with(optional(&mut self.parser));
+ let mut iter = Iter::new(rest, mode, input, child_state);
+
+ // Parse elements until `self.parser` returns `None`
+ elements.extend(iter.by_ref().scan((), |_, x| x));
+
+ if iter.committed {
+ *parsed_one = Some(Commit::Commit(()));
+ }
+
+ iter.into_result_fast(elements).map(|x| {
+ *parsed_one = None;
+ x
+ })
+ })
+ }
+
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ self.parser.add_error(errors)
+ }
+}
+
+/// Parses `parser` one or more times separated and ended by `separator`, returning a collection
+/// with the values from `p`.
+///
+/// If the returned collection cannot be inferred type annotations must be
+/// supplied, either by annotating the resulting type binding `let collection: Vec<_> = ...` or by
+/// specializing when calling `sep_by`, `sep_by1::<Vec<_>, _, _>(...)`.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::digit;
+/// # use combine::stream::easy;
+/// # use combine::stream::position::{self, SourcePosition};
+/// # fn main() {
+/// let mut parser = sep_end_by1(digit(), token(';'));
+/// let result_ok = parser.easy_parse(position::Stream::new("1;2;3;"))
+/// .map(|(vec, state)| (vec, state.input));
+/// assert_eq!(result_ok, Ok((vec!['1', '2', '3'], "")));
+/// let result_err = parser.easy_parse(position::Stream::new(""));
+/// assert_eq!(result_err, Err(easy::Errors {
+/// position: SourcePosition::default(),
+/// errors: vec![
+/// easy::Error::end_of_input(),
+/// easy::Error::Expected("digit".into())
+/// ]
+/// }));
+/// # }
+/// ```
+pub fn sep_end_by1<F, Input, P, S>(parser: P, separator: S) -> SepEndBy1<F, P, S>
+where
+ Input: Stream,
+ F: Extend<P::Output> + Default,
+ P: Parser<Input>,
+ S: Parser<Input>,
+{
+ SepEndBy1 {
+ parser,
+ separator,
+ _marker: PhantomData,
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct Chainl1<P, Op>(P, Op);
+impl<Input, P, Op> Parser<Input> for Chainl1<P, Op>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ Op: Parser<Input>,
+ Op::Output: FnOnce(P::Output, P::Output) -> P::Output,
+{
+ type Output = P::Output;
+ type PartialState = (
+ Option<(P::Output, Commit<()>)>,
+ <(Op, P) as Parser<Input>>::PartialState,
+ );
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mut mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, Input::Error>
+ where
+ M: ParseMode,
+ {
+ let (ref mut l_state, ref mut child_state) = *state;
+
+ let (mut l, mut committed) = match l_state.take() {
+ Some(x) => x,
+ None => {
+ let x = ctry!(self.0.parse_partial(input, &mut child_state.B.state));
+ mode.set_first();
+ x
+ }
+ };
+
+ loop {
+ let before = input.checkpoint();
+ match (&mut self.1, &mut self.0)
+ .parse_mode(mode, input, child_state)
+ .into()
+ {
+ Ok(((op, r), rest)) => {
+ l = op(l, r);
+ committed = committed.merge(rest);
+ mode.set_first();
+ }
+ Err(Commit::Commit(err)) => {
+ *l_state = Some((l, committed));
+ return CommitErr(err.error);
+ }
+ Err(Commit::Peek(_)) => {
+ ctry!(input.reset(before).committed());
+ break;
+ }
+ }
+ }
+ Ok((l, committed)).into()
+ }
+
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ self.0.add_error(errors)
+ }
+}
+
+/// Parses `p` 1 or more times separated by `op`. The value returned is the one produced by the
+/// left associative application of the function returned by the parser `op`.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::digit;
+/// # fn main() {
+/// let number = digit().map(|c: char| c.to_digit(10).unwrap());
+/// let sub = token('-').map(|_| |l: u32, r: u32| l - r);
+/// let mut parser = chainl1(number, sub);
+/// assert_eq!(parser.parse("9-3-5"), Ok((1, "")));
+/// # }
+/// ```
+pub fn chainl1<Input, P, Op>(parser: P, op: Op) -> Chainl1<P, Op>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ Op: Parser<Input>,
+ Op::Output: FnOnce(P::Output, P::Output) -> P::Output,
+{
+ Chainl1(parser, op)
+}
+
+#[derive(Copy, Clone)]
+pub struct Chainr1<P, Op>(P, Op);
+impl<Input, P, Op> Parser<Input> for Chainr1<P, Op>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ Op: Parser<Input>,
+ Op::Output: FnOnce(P::Output, P::Output) -> P::Output,
+{
+ type Output = P::Output;
+ type PartialState = ();
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<P::Output, Input::Error> {
+ // FIXME FastResult
+ let (mut l, mut committed) = ctry!(self.0.parse_lazy(input));
+ loop {
+ let before = input.checkpoint();
+ let op = match self.1.parse_lazy(input).into() {
+ Ok((x, rest)) => {
+ committed = committed.merge(rest);
+ x
+ }
+ Err(Commit::Commit(err)) => return CommitErr(err.error),
+ Err(Commit::Peek(_)) => {
+ ctry!(input.reset(before).committed());
+ break;
+ }
+ };
+ let before = input.checkpoint();
+ match self.parse_lazy(input).into() {
+ Ok((r, rest)) => {
+ l = op(l, r);
+ committed = committed.merge(rest);
+ }
+ Err(Commit::Commit(err)) => return CommitErr(err.error),
+ Err(Commit::Peek(_)) => {
+ ctry!(input.reset(before).committed());
+ break;
+ }
+ }
+ }
+ Ok((l, committed)).into()
+ }
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ self.0.add_error(errors)
+ }
+}
+
+/// Parses `p` one or more times separated by `op`. The value returned is the one produced by the
+/// right associative application of the function returned by `op`.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::digit;
+/// # fn main() {
+/// let number = digit().map(|c: char| c.to_digit(10).unwrap());
+/// let pow = token('^').map(|_| |l: u32, r: u32| l.pow(r));
+/// let mut parser = chainr1(number, pow);
+/// assert_eq!(parser.parse("2^3^2"), Ok((512, "")));
+/// }
+/// ```
+pub fn chainr1<Input, P, Op>(parser: P, op: Op) -> Chainr1<P, Op>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ Op: Parser<Input>,
+ Op::Output: FnOnce(P::Output, P::Output) -> P::Output,
+{
+ Chainr1(parser, op)
+}
+
+#[derive(Copy, Clone)]
+pub struct TakeUntil<F, P> {
+ end: P,
+ _marker: PhantomData<fn() -> F>,
+}
+impl<F, Input, P> Parser<Input> for TakeUntil<F, P>
+where
+ Input: Stream,
+ F: Extend<<Input as StreamOnce>::Token> + Default,
+ P: Parser<Input>,
+{
+ type Output = F;
+ type PartialState = (F, P::PartialState);
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, Input::Error>
+ where
+ M: ParseMode,
+ {
+ let (ref mut output, ref mut end_state) = *state;
+
+ let mut committed = Commit::Peek(());
+ loop {
+ let before = input.checkpoint();
+ match self.end.parse_mode(mode, input, end_state).into() {
+ Ok((_, rest)) => {
+ ctry!(input.reset(before).committed());
+ return match committed.merge(rest) {
+ Commit::Commit(()) => CommitOk(mem::take(output)),
+ Commit::Peek(()) => PeekOk(mem::take(output)),
+ };
+ }
+ Err(Commit::Peek(_)) => {
+ ctry!(input.reset(before).committed());
+ output.extend(Some(ctry!(uncons(input)).0));
+ committed = Commit::Commit(());
+ }
+ Err(Commit::Commit(e)) => {
+ ctry!(input.reset(before).committed());
+ return CommitErr(e.error);
+ }
+ };
+ }
+ }
+}
+
+/// Takes input until `end` is encountered or `end` indicates that it has committed input before
+/// failing (`attempt` can be used to make it look like it has not committed any input)
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char;
+/// # use combine::parser::byte;
+/// # use combine::parser::combinator::attempt;
+/// # use combine::parser::repeat::take_until;
+/// # fn main() {
+/// let mut char_parser = take_until(char::digit());
+/// assert_eq!(char_parser.parse("abc123"), Ok(("abc".to_string(), "123")));
+///
+/// let mut byte_parser = take_until(byte::bytes(&b"TAG"[..]));
+/// assert_eq!(byte_parser.parse(&b"123TAG"[..]), Ok((b"123".to_vec(), &b"TAG"[..])));
+/// assert!(byte_parser.parse(&b"123TATAG"[..]).is_err());
+///
+/// // `attempt` must be used if the `end` should be consume input before failing
+/// let mut byte_parser = take_until(attempt(byte::bytes(&b"TAG"[..])));
+/// assert_eq!(byte_parser.parse(&b"123TATAG"[..]), Ok((b"123TA".to_vec(), &b"TAG"[..])));
+/// # }
+/// ```
+pub fn take_until<F, Input, P>(end: P) -> TakeUntil<F, P>
+where
+ Input: Stream,
+ F: Extend<<Input as StreamOnce>::Token> + Default,
+ P: Parser<Input>,
+{
+ TakeUntil {
+ end,
+ _marker: PhantomData,
+ }
+}
+
+parser! {
+ pub struct SkipUntil;
+ type PartialState = <With<TakeUntil<Sink, P>, Value<Input, ()>> as Parser<Input>>::PartialState;
+ /// Skips input until `end` is encountered or `end` indicates that it has committed input before
+ /// failing (`attempt` can be used to make it look like it has not committed any input)
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::parser::char;
+ /// # use combine::parser::byte;
+ /// # use combine::parser::combinator::attempt;
+ /// # use combine::parser::repeat::skip_until;
+ /// # fn main() {
+ /// let mut char_parser = skip_until(char::digit());
+ /// assert_eq!(char_parser.parse("abc123"), Ok(((), "123")));
+ ///
+ /// let mut byte_parser = skip_until(byte::bytes(&b"TAG"[..]));
+ /// assert_eq!(byte_parser.parse(&b"123TAG"[..]), Ok(((), &b"TAG"[..])));
+ /// assert!(byte_parser.parse(&b"123TATAG"[..]).is_err());
+ ///
+ /// // `attempt` must be used if the `end` should consume input before failing
+ /// let mut byte_parser = skip_until(attempt(byte::bytes(&b"TAG"[..])));
+ /// assert_eq!(byte_parser.parse(&b"123TATAG"[..]), Ok(((), &b"TAG"[..])));
+ /// # }
+ /// ```
+ pub fn skip_until[Input, P](end: P)(Input) -> ()
+ where [
+ P: Parser<Input>,
+ ]
+ {
+ take_until::<Sink, _, _>(end).with(value(()))
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct RepeatUntil<F, P, E> {
+ parser: P,
+ end: E,
+ _marker: PhantomData<fn() -> F>,
+}
+impl<F, Input, P, E> Parser<Input> for RepeatUntil<F, P, E>
+where
+ Input: Stream,
+ F: Extend<P::Output> + Default,
+ P: Parser<Input>,
+ E: Parser<Input>,
+{
+ type Output = F;
+ type PartialState = (F, bool, P::PartialState, E::PartialState);
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mut mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, Input::Error>
+ where
+ M: ParseMode,
+ {
+ let (output, is_parse, parse_state, end_state) = state;
+
+ let mut committed = Commit::Peek(());
+ loop {
+ if *is_parse {
+ let (token, c) = ctry!(self.parser.parse_mode(mode, input, parse_state));
+ output.extend(Some(token));
+ committed = committed.merge(c);
+ *is_parse = false;
+ } else {
+ let before = input.checkpoint();
+ match self.end.parse_mode(mode, input, end_state).into() {
+ Ok((_, rest)) => {
+ ctry!(input.reset(before).committed());
+ return match committed.merge(rest) {
+ Commit::Commit(()) => CommitOk(mem::take(output)),
+ Commit::Peek(()) => PeekOk(mem::take(output)),
+ };
+ }
+ Err(Commit::Peek(_)) => {
+ ctry!(input.reset(before).committed());
+ mode.set_first();
+ *is_parse = true;
+ }
+ Err(Commit::Commit(e)) => {
+ ctry!(input.reset(before).committed());
+ return CommitErr(e.error);
+ }
+ }
+ }
+ }
+ }
+}
+
+pub fn repeat_until<F, Input, P, E>(parser: P, end: E) -> RepeatUntil<F, P, E>
+where
+ Input: Stream,
+ F: Extend<P::Output> + Default,
+ P: Parser<Input>,
+ E: Parser<Input>,
+{
+ RepeatUntil {
+ parser,
+ end,
+ _marker: PhantomData,
+ }
+}
+
+parser! {
+ pub struct SkipRepeatUntil;
+ type PartialState = <With<RepeatUntil<Sink, P, E>, Value<Input, ()>> as Parser<Input>>::PartialState;
+ /// Skips input until `end` is encountered or `end` indicates that it has committed input before
+ /// failing (`attempt` can be used to continue skipping even if `end` has committed input)
+ ///
+ /// ```
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::parser::char;
+ /// # use combine::parser::byte;
+ /// # use combine::parser::combinator::attempt;
+ /// # use combine::parser::repeat::skip_until;
+ /// # fn main() {
+ /// let mut char_parser = skip_until(char::digit());
+ /// assert_eq!(char_parser.parse("abc123"), Ok(((), "123")));
+ ///
+ /// let mut byte_parser = skip_until(byte::bytes(&b"TAG"[..]));
+ /// assert_eq!(byte_parser.parse(&b"123TAG"[..]), Ok(((), &b"TAG"[..])));
+ /// assert!(byte_parser.parse(&b"123TATAG"[..]).is_err());
+ ///
+ /// // `attempt` must be used because the `end` will commit to `TA` before failing,
+ /// // but we want to continue skipping
+ /// let mut byte_parser = skip_until(attempt(byte::bytes(&b"TAG"[..])));
+ /// assert_eq!(byte_parser.parse(&b"123TATAG"[..]), Ok(((), &b"TAG"[..])));
+ /// }
+ /// ```
+ pub fn repeat_skip_until[Input, P, E](parser: P, end: E)(Input) -> ()
+ where [
+ P: Parser<Input>,
+ E: Parser<Input>,
+ ]
+ {
+ repeat_until::<Sink, _, _, _>(parser, end).with(value(()))
+ }
+}
+
+#[derive(Default)]
+pub struct EscapedState<T, U>(PhantomData<(T, U)>);
+
+pub struct Escaped<P, Q, I> {
+ parser: P,
+ escape: I,
+ escape_parser: Q,
+}
+impl<Input, P, Q> Parser<Input> for Escaped<P, Q, Input::Token>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ <Input as StreamOnce>::Token: PartialEq,
+ Q: Parser<Input>,
+{
+ type Output = ();
+ type PartialState = EscapedState<P::PartialState, Q::PartialState>;
+
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<Self::Output, Input::Error> {
+ let mut committed = Commit::Peek(());
+ loop {
+ match self.parser.parse_lazy(input) {
+ PeekOk(_) => {}
+ CommitOk(_) => {
+ committed = Commit::Commit(());
+ }
+ PeekErr(_) => {
+ let checkpoint = input.checkpoint();
+ match uncons(input) {
+ CommitOk(ref c) | PeekOk(ref c) if *c == self.escape => {
+ match self.escape_parser.parse_committed_mode(
+ FirstMode,
+ input,
+ &mut Default::default(),
+ ) {
+ PeekOk(_) => {}
+ CommitOk(_) => {
+ committed = Commit::Commit(());
+ }
+ CommitErr(err) => return CommitErr(err),
+ PeekErr(err) => {
+ return CommitErr(err.error);
+ }
+ }
+ }
+ CommitErr(err) => {
+ return CommitErr(err);
+ }
+ _ => {
+ ctry!(input.reset(checkpoint).committed());
+ return if committed.is_peek() {
+ PeekOk(())
+ } else {
+ CommitOk(())
+ };
+ }
+ }
+ }
+ CommitErr(err) => return CommitErr(err),
+ }
+ }
+ }
+
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ use crate::error;
+
+ self.parser.add_error(errors);
+
+ errors.error.add_expected(error::Token(self.escape.clone()));
+ }
+}
+
+/// Parses an escaped string by first applying `parser` which accept the normal characters which do
+/// not need escaping. Once `parser` can not consume any more input it checks if the next token
+/// is `escape`. If it is then `escape_parser` is used to parse the escaped character and then
+/// resumes parsing using `parser`. If `escape` was not found then the parser finishes
+/// successfully.
+///
+/// This returns `()` since there isn't a good way to collect the output of the parsers so it is
+/// best paired with one of the `recognize` parsers.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::repeat::escaped;
+/// # use combine::parser::char;
+/// # use combine::parser::range::{recognize, take_while1};
+/// # fn main() {
+/// let mut parser = recognize(
+/// escaped(take_while1(|c| c != '"' && c != '\\'), '\\', one_of(r#"nr"\"#.chars()))
+/// );
+/// assert_eq!(parser.parse(r#"ab\"12\n\rc""#), Ok((r#"ab\"12\n\rc"#, r#"""#)));
+/// assert!(parser.parse(r#"\"#).is_err());
+/// assert!(parser.parse(r#"\a"#).is_err());
+/// }
+/// ```
+pub fn escaped<Input, P, Q>(
+ parser: P,
+ escape: <Input as StreamOnce>::Token,
+ escape_parser: Q,
+) -> Escaped<P, Q, Input::Token>
+where
+ Input: Stream,
+ P: Parser<Input>,
+ <Input as StreamOnce>::Token: PartialEq,
+ Q: Parser<Input>,
+{
+ Escaped {
+ parser,
+ escape,
+ escape_parser,
+ }
+}
+
+pub struct Iterate<F, I, P> {
+ parser: P,
+ iterable: I,
+ _marker: PhantomData<fn() -> F>,
+}
+impl<'s, 'a, P, Q, I, J, F> Parser<I> for Iterate<F, J, P>
+where
+ P: FnMut(&J::Item, &mut I) -> Q,
+ Q: Parser<I>,
+ I: Stream,
+ J: IntoIterator + Clone,
+ F: Extend<Q::Output> + Default,
+{
+ type Output = F;
+ type PartialState = (
+ Option<(J::IntoIter, Option<J::Item>)>,
+ bool,
+ F,
+ Q::PartialState,
+ );
+
+ parse_mode!(I);
+
+ fn parse_mode_impl<M>(
+ &mut self,
+ mut mode: M,
+ input: &mut I,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, I::Error>
+ where
+ M: ParseMode,
+ {
+ let (opt_iter, committed, buf, next) = state;
+ let (iter, next_item) = match opt_iter {
+ Some(iter) if !mode.is_first() => iter,
+ _ => {
+ *opt_iter = Some((self.iterable.clone().into_iter(), None));
+ opt_iter.as_mut().unwrap()
+ }
+ };
+
+ let mut consume = |item: J::Item| {
+ let mut parser = (self.parser)(&item, input);
+ let before = input.checkpoint();
+ match parser.parse_mode(mode, input, next) {
+ PeekOk(v) => {
+ mode.set_first();
+ Ok(v)
+ }
+ CommitOk(v) => {
+ mode.set_first();
+ *committed = true;
+ Ok(v)
+ }
+ PeekErr(err) => {
+ if let Err(err) = input.reset(before) {
+ return Err((item, CommitErr(err)));
+ }
+ Err((
+ item,
+ if *committed {
+ CommitErr(err.error)
+ } else {
+ PeekErr(err)
+ },
+ ))
+ }
+ CommitErr(err) => Err((item, CommitErr(err))),
+ }
+ };
+
+ let result = (|| {
+ if let Some(item) = next_item.take() {
+ buf.extend(Some(consume(item)?));
+ }
+ let mut result = Ok(());
+ let size_hint = iter.size_hint();
+ buf.extend(suggest_size_hint(
+ iter.scan((), |_, item| match consume(item) {
+ Ok(item) => Some(item),
+ Err(err) => {
+ result = Err(err);
+ None
+ }
+ }),
+ size_hint,
+ ));
+ result
+ })();
+
+ if let Err((item, err)) = result {
+ *next_item = Some(item);
+ return err;
+ }
+
+ opt_iter.take();
+
+ let value = mem::take(buf);
+ if *committed {
+ *committed = false;
+ CommitOk(value)
+ } else {
+ PeekOk(value)
+ }
+ }
+}
+
+///
+/// ```
+/// # use combine::parser::repeat::{count_min_max, iterate};
+/// # use combine::*;
+///
+/// assert_eq!(
+/// iterate(0..3, |&i, _| count_min_max(i, i, any())).parse("abbccc"),
+/// Ok((vec!["".to_string(), "a".to_string(), "bb".to_string()], "ccc")),
+/// );
+/// ```
+pub fn iterate<F, J, P, I, Q>(iterable: J, parser: P) -> Iterate<F, J, P>
+where
+ P: FnMut(&J::Item, &mut I) -> Q,
+ Q: Parser<I>,
+ I: Stream,
+ J: IntoIterator + Clone,
+ F: Extend<Q::Output> + Default,
+{
+ Iterate {
+ parser,
+ iterable,
+ _marker: PhantomData,
+ }
+}
diff --git a/src/parser/sequence.rs b/src/parser/sequence.rs
new file mode 100644
index 0000000..b296966
--- /dev/null
+++ b/src/parser/sequence.rs
@@ -0,0 +1,893 @@
+//! Combinators which take multiple parsers and applies them one after another.
+
+use crate::{
+ error::{
+ ParseError,
+ ParseResult::{self, *},
+ StreamError, Tracked,
+ },
+ lib::marker::PhantomData,
+ parser::{
+ combinator::{ignore, Ignore, Map},
+ ParseMode,
+ },
+ ErrorOffset, Parser, Stream, StreamOnce,
+};
+
+macro_rules! dispatch_on {
+ ($i: expr, $f: expr;) => {
+ };
+ ($i: expr, $f: expr; $first: ident $(, $id: ident)*) => { {
+ let b = $f($i, $first);
+ if b {
+ dispatch_on!($i + 1, $f; $($id),*);
+ }
+ } }
+}
+
+macro_rules! count {
+ () => { 0 };
+ ($f: ident) => { 1 };
+ ($f: ident, $($rest: ident),+) => { 1 + count!($($rest),*) };
+}
+
+#[doc(hidden)]
+pub struct SequenceState<T, U> {
+ pub value: Option<T>,
+ pub state: U,
+}
+
+impl<T, U: Default> Default for SequenceState<T, U> {
+ fn default() -> Self {
+ SequenceState {
+ value: None,
+ state: U::default(),
+ }
+ }
+}
+
+impl<T, U> SequenceState<T, U>
+where
+ U: Default,
+{
+ unsafe fn unwrap_value(&mut self) -> T {
+ match self.value.take() {
+ Some(t) => t,
+ None => core::hint::unreachable_unchecked(),
+ }
+ }
+}
+
+macro_rules! last_ident {
+ ($id: ident) => { $id };
+ ($id: ident, $($rest: ident),+) => { last_ident!($($rest),+) };
+}
+
+macro_rules! tuple_parser {
+ ($partial_state: ident; $h: ident $(, $id: ident)*) => {
+ #[allow(non_snake_case)]
+ #[derive(Default)]
+ pub struct $partial_state < $h $(, $id )* > {
+ pub $h: $h,
+ $(
+ pub $id: $id,
+ )*
+ #[allow(dead_code)]
+ offset: u8,
+ _marker: PhantomData <( $h, $( $id),* )>,
+ }
+
+
+ #[allow(non_snake_case)]
+ impl<$h $(, $id)*> $partial_state<$h $(, $id)*> {
+ #[allow(dead_code)]
+ fn add_errors<Input>(
+ input: &mut Input,
+ mut err: Tracked<Input::Error>,
+ first_empty_parser: usize,
+ offset: u8,
+ $h: &mut $h $(, $id : &mut $id )*
+ ) -> ParseResult<($h::Output, $($id::Output),*), <Input as StreamOnce>::Error>
+ where Input: Stream,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+ $h: Parser<Input>,
+ $($id: Parser<Input>),*
+ {
+ let inner_offset = err.offset;
+ err.offset = ErrorOffset(offset);
+ if first_empty_parser != 0 {
+ if let Ok(t) = input.uncons() {
+ err.error.add(StreamError::unexpected_token(t));
+ }
+ dispatch_on!(0, |i, mut p| {
+ if i + 1 == first_empty_parser {
+ Parser::add_committed_expected_error(&mut p, &mut err);
+ }
+ if i >= first_empty_parser {
+ if err.offset <= ErrorOffset(1) {
+ // We reached the last parser we need to add errors to (and the
+ // parser that actually returned the error), use the returned
+ // offset for that parser.
+ err.offset = inner_offset;
+ }
+ Parser::add_error(&mut p, &mut err);
+ if err.offset <= ErrorOffset(1) {
+ return false;
+ }
+ }
+ err.offset = ErrorOffset(
+ err.offset.0.saturating_sub(Parser::parser_count(&p).0)
+ );
+ true
+ }; $h $(, $id)*);
+ CommitErr(err.error)
+ } else {
+ PeekErr(err)
+ }
+ }
+ }
+
+ #[allow(non_snake_case)]
+ impl <Input: Stream, $h:, $($id:),*> Parser<Input> for ($h, $($id),*)
+ where Input: Stream,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+ $h: Parser<Input>,
+ $($id: Parser<Input>),*
+ {
+
+ type Output = ($h::Output, $($id::Output),*);
+ type PartialState = $partial_state<
+ SequenceState<$h::Output, $h::PartialState>
+ $(, SequenceState<$id::Output, $id::PartialState>)*
+ >;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<MODE>(
+ &mut self,
+ mut mode: MODE,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ MODE: ParseMode,
+ {
+ let (ref mut $h, $(ref mut $id),*) = *self;
+ let mut first_empty_parser = 0;
+ #[allow(unused_mut)]
+ let mut current_parser = 0;
+
+ #[allow(unused_macros)]
+ macro_rules! add_errors {
+ ($err: ident, $offset: expr) => {
+ $partial_state::add_errors(
+ input, $err, first_empty_parser, $offset, $h, $($id),*
+ )
+ }
+ }
+
+ if mode.is_first() || state.$h.value.is_none() {
+ let temp = match $h.parse_mode(mode, input, &mut state.$h.state) {
+ CommitOk(x) => {
+ first_empty_parser = current_parser + 1;
+ x
+ }
+ PeekErr(err) => return PeekErr(err),
+ CommitErr(err) => return CommitErr(err),
+ PeekOk(x) => {
+ x
+ }
+ };
+ state.offset = $h.parser_count().0.saturating_add(1);
+ state.$h.value = Some(temp);
+
+ // Once we have successfully parsed the partial input we may resume parsing in
+ // "first mode"
+ mode.set_first();
+ }
+
+ $(
+ if mode.is_first() || state.$id.value.is_none() {
+ current_parser += 1;
+ let before = input.checkpoint();
+ let temp = match $id.parse_mode(mode, input, &mut state.$id.state) {
+ CommitOk(x) => {
+ first_empty_parser = current_parser + 1;
+ x
+ }
+ PeekErr(err) => {
+ if let Err(err) = input.reset(before) {
+ return if first_empty_parser != 0 {
+ CommitErr(err.into())
+ } else {
+ PeekErr(err.into())
+ };
+ }
+ return add_errors!(err, state.offset)
+ }
+ CommitErr(err) => return CommitErr(err),
+ PeekOk(x) => {
+ x
+ }
+ };
+ state.offset = state.offset.saturating_add($id.parser_count().0);
+ state.$id.value = Some(temp);
+
+ // Once we have successfully parsed the partial input we may resume parsing in
+ // "first mode"
+ mode.set_first();
+ }
+ )*
+
+ let value = unsafe { (state.$h.unwrap_value(), $(state.$id.unwrap_value()),*) };
+ if first_empty_parser != 0 {
+ CommitOk(value)
+ } else {
+ PeekOk(value)
+ }
+ }
+
+ #[inline]
+ fn parser_count(&self) -> ErrorOffset {
+ let (ref $h, $(ref $id),*) = *self;
+ ErrorOffset($h.parser_count().0 $( + $id.parser_count().0)*)
+ }
+
+ #[inline]
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ let (ref mut $h, $(ref mut $id),*) = *self;
+ let prev = errors.offset;
+ $h.add_error(errors);
+ if errors.offset <= ErrorOffset(1) {
+ errors.offset = ErrorOffset(
+ errors.offset.0.saturating_sub(1)
+ );
+ return;
+ }
+ if errors.offset == prev {
+ errors.offset = ErrorOffset(errors.offset.0.saturating_sub($h.parser_count().0));
+ }
+
+ #[allow(dead_code)]
+ const LAST: usize = count!($($id),*);
+ #[allow(unused_mut, unused_variables)]
+ let mut i = 0;
+ $(
+ i += 1;
+ let prev = errors.offset;
+ $id.add_error(errors);
+ if errors.offset <= ErrorOffset(1) {
+ errors.offset = ErrorOffset(
+ errors.offset.0.saturating_sub(1)
+ );
+ return;
+ }
+ if i != LAST && errors.offset == prev {
+ errors.offset = ErrorOffset(
+ errors.offset.0.saturating_sub($id.parser_count().0)
+ );
+ }
+ )*
+ }
+
+ fn add_committed_expected_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ #[allow(unused_variables)]
+ let (ref mut $h, $(ref mut $id),*) = *self;
+ last_ident!($h $(, $id)*).add_committed_expected_error(errors)
+ }
+ }
+ }
+}
+
+tuple_parser!(PartialState1; A);
+tuple_parser!(PartialState2; A, B);
+tuple_parser!(PartialState3; A, B, C);
+tuple_parser!(PartialState4; A, B, C, D);
+tuple_parser!(PartialState5; A, B, C, D, E);
+tuple_parser!(PartialState6; A, B, C, D, E, F);
+tuple_parser!(PartialState7; A, B, C, D, E, F, G);
+tuple_parser!(PartialState8; A, B, C, D, E, F, G, H);
+tuple_parser!(PartialState9; A, B, C, D, E, F, G, H, I);
+tuple_parser!(PartialState10; A, B, C, D, E, F, G, H, I, J);
+tuple_parser!(PartialState11; A, B, C, D, E, F, G, H, I, J, K);
+tuple_parser!(PartialState12; A, B, C, D, E, F, G, H, I, J, K, L);
+tuple_parser!(PartialState13; A, B, C, D, E, F, G, H, I, J, K, L, M);
+tuple_parser!(PartialState14; A, B, C, D, E, F, G, H, I, J, K, L, M, N);
+tuple_parser!(PartialState15; A, B, C, D, E, F, G, H, I, J, K, L, M, N, P);
+tuple_parser!(PartialState16; A, B, C, D, E, F, G, H, I, J, K, L, M, N, P, Q);
+tuple_parser!(PartialState17; A, B, C, D, E, F, G, H, I, J, K, L, M, N, P, Q, R);
+tuple_parser!(PartialState18; A, B, C, D, E, F, G, H, I, J, K, L, M, N, P, Q, R, S);
+tuple_parser!(PartialState19; A, B, C, D, E, F, G, H, I, J, K, L, M, N, P, Q, R, S, T);
+tuple_parser!(PartialState20; A, B, C, D, E, F, G, H, I, J, K, L, M, N, P, Q, R, S, T, U);
+
+#[macro_export]
+#[doc(hidden)]
+macro_rules! seq_parser_expr {
+ (; $($tt: tt)*) => {
+ ( $($tt)* )
+ };
+ ( (_ : $first_parser: expr, $($remaining: tt)+ ); $($tt: tt)*) => {
+ $crate::seq_parser_expr!( ( $($remaining)+ ) ; $($tt)* $first_parser, )
+ };
+ ( ($first_field: ident : $first_parser: expr, $($remaining: tt)+ ); $($tt: tt)*) => {
+ $crate::seq_parser_expr!( ( $($remaining)+ ) ; $($tt)* $first_parser, )
+ };
+ ( (_ : $first_parser: expr ); $($tt: tt)*) => {
+ ( $($tt)* $first_parser, )
+ };
+ ( ($first_field: ident : $first_parser: expr, ); $($tt: tt)*) => {
+ $crate::seq_parser_expr!(; $($tt)* $first_parser,)
+ };
+ ( (_ : $first_parser: expr, ); $($tt: tt)*) => {
+ ( $($tt)* $first_parser, )
+ };
+ ( ($first_field: ident : $first_parser: expr ); $($tt: tt)*) => {
+ $crate::seq_parser_expr!(; $($tt)* $first_parser,)
+ };
+}
+
+#[macro_export]
+#[doc(hidden)]
+macro_rules! seq_parser_pattern {
+ (; $($tt: tt)*) => {
+ ( $($tt)* )
+ };
+ ( (_ : $first_parser: expr, $($remaining: tt)+ ); $($tt: tt)*) => {
+ $crate::seq_parser_pattern!( ( $($remaining)+ ) ; $($tt)* _, )
+ };
+ ( ($first_field: ident : $first_parser: expr, $($remaining: tt)+ ); $($tt: tt)*) => {
+ $crate::seq_parser_pattern!( ( $($remaining)+ ) ; $($tt)* $first_field, )
+ };
+ ( ( _ : $first_parser: expr ); $($tt: tt)*) => {
+ $crate::seq_parser_pattern!(; $($tt)* _, )
+ };
+ ( ($first_field: ident : $first_parser: expr ); $($tt: tt)*) => {
+ $crate::seq_parser_pattern!(; $($tt)* $first_field,)
+ };
+ ( ( _ : $first_parser: expr, ); $($tt: tt)*) => {
+ $crate::seq_parser_pattern!(; $($tt)* _, )
+ };
+ ( ($first_field: ident : $first_parser: expr, ); $($tt: tt)*) => {
+ $crate::seq_parser_pattern!(; $($tt)* $first_field,)
+ };
+}
+
+#[macro_export]
+#[doc(hidden)]
+macro_rules! seq_parser_impl {
+ (; $name: ident $($tt: tt)*) => {
+ $name { $($tt)* }
+ };
+ ( (_ : $first_parser: expr, $($remaining: tt)+ ); $name: ident $($tt: tt)*) => {
+ $crate::seq_parser_impl!( ( $($remaining)+ ) ; $name $($tt)* )
+ };
+ ( ($first_field: ident : $first_parser: expr, $($remaining: tt)+ );
+ $name: ident $($tt: tt)*) =>
+ {
+ $crate::seq_parser_impl!( ( $($remaining)+ ) ; $name $($tt)* $first_field: $first_field, )
+ };
+ ( ( _ : $first_parser: expr ); $name: ident $($tt: tt)*) => {
+ $crate::seq_parser_impl!( ; $name $($tt)* )
+ };
+ ( ($first_field: ident : $first_parser: expr ); $name: ident $($tt: tt)*) => {
+ $crate::seq_parser_impl!(; $name $($tt)* $first_field: $first_field,)
+ };
+ ( ( _ : $first_parser: expr, ); $name: ident $($tt: tt)*) => {
+ $crate::seq_parser_impl!(; $name $($tt)*)
+ };
+ ( ($first_field: ident : $first_parser: expr, ); $name: ident $($tt: tt)*) => {
+ $crate::seq_parser_impl!(; $name $($tt)* $first_field: $first_field,)
+ };
+}
+
+#[macro_export]
+#[doc(hidden)]
+macro_rules! seq_tuple_extract {
+ (; ; $name: ident ; $($arg: expr),* $(,)? ) => {
+ $name( $($arg,)* )
+ };
+
+ ( (_ : $first_parser: expr, $($remaining: tt)+ ); ( $first_arg: expr, $($arg: expr),* ) ; $($tt: tt)*) => {
+ $crate::seq_tuple_extract!( ( $($remaining)+ ); ( $($arg),* ) ; $($tt)* )
+ };
+
+ ( ($first_parser: expr, $($remaining: tt)+ ); ( $first_arg: expr, $($arg: expr),* ) ; $($tt: tt)*) => {
+ $crate::seq_tuple_extract!( ( $($remaining)+ ) ; ( $($arg),* ) ; $($tt)* $first_arg, )
+ };
+
+ ( (_ : $first_parser: expr $(,)? ); ( $first_arg: expr, $($arg: expr),* ) ; $($tt: tt)*) => {
+ $crate::seq_tuple_extract!(; ; $($tt)*)
+ };
+
+ ( ($first_parser: expr $(,)? ); ( $first_arg: expr, $($arg: expr),* ) ; $($tt: tt)*) => {
+ $crate::seq_tuple_extract!(; ; $($tt)* $first_arg)
+ };
+}
+
+#[macro_export]
+#[doc(hidden)]
+macro_rules! seq_tuple_parser_impl {
+ (; $($tt: tt)*) => {
+ ($($tt)*)
+ };
+
+ ( (_ : $first_parser: expr, $($remaining: tt)+ ); $($tt: tt)*) => {
+ $crate::seq_tuple_parser_impl!( ( $($remaining)+ ) ; $($tt)* $first_parser, )
+ };
+
+ ( ($first_parser: expr, $($remaining: tt)+ ); $($tt: tt)*) => {
+ $crate::seq_tuple_parser_impl!( ( $($remaining)+ ) ; $($tt)* $first_parser, )
+ };
+
+ ( (_ : $first_parser: expr $(,)? ); $($tt: tt)*) => {
+ $crate::seq_tuple_parser_impl!(; $($tt)* $first_parser, )
+ };
+
+ ( ($first_parser: expr $(,)? ); $($tt: tt)*) => {
+ $crate::seq_tuple_parser_impl!(; $($tt)* $first_parser, )
+ };
+}
+
+/// Sequences multiple parsers and builds a struct out of them.
+///
+/// ```
+/// use combine::{Parser, between, from_str, many, struct_parser, token};
+/// use combine::parser::range::take_while1;
+/// use combine::parser::byte::{letter, spaces};
+///
+/// #[derive(Debug, PartialEq)]
+/// struct Point(u32, u32);
+///
+/// #[derive(Debug, PartialEq)]
+/// struct Field {
+/// name: Vec<u8>,
+/// value: Vec<u8>,
+/// point: Point,
+/// }
+/// fn main() {
+/// let num = || from_str(take_while1(|b: u8| b >= b'0' && b <= b'9'));
+/// let spaced = |b| between(spaces(), spaces(), token(b));
+/// let mut parser = struct_parser!{
+/// Field {
+/// name: many(letter()),
+/// // `_` fields are ignored when building the struct
+/// _: spaced(b':'),
+/// value: many(letter()),
+/// _: spaced(b':'),
+/// point: struct_parser!(Point(num(), _: spaced(b','), num())),
+/// }
+/// };
+/// assert_eq!(
+/// parser.parse(&b"test: data: 123 , 4"[..]),
+/// Ok((
+/// Field {
+/// name: b"test"[..].to_owned(),
+/// value: b"data"[..].to_owned(),
+/// point: Point(123, 4),
+/// },
+/// &b""[..]
+/// )),
+/// );
+/// }
+/// ```
+#[macro_export]
+macro_rules! struct_parser {
+ ($name: ident { $($tt: tt)* }) => {
+ $crate::seq_parser_expr!( ( $($tt)* ); )
+ .map(|$crate::seq_parser_pattern!( ( $($tt)* ); )|
+ $crate::seq_parser_impl!(( $($tt)* ); $name )
+ )
+ };
+
+ ($name: ident ( $($arg: tt)* )) => {
+ $crate::seq_tuple_parser_impl!( ( $($arg)* ) ; )
+ .map(|t|
+ $crate::seq_tuple_extract!(
+ ( $($arg)* );
+ (t.0, t.1, t.2, t.3, t.4, t.5, t.6, t.7, t.8, t.9, t.10, t.11, t.12, t.13, t.14);
+ $name ;
+ )
+ )
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct With<P1, P2>((Ignore<P1>, P2));
+impl<Input, P1, P2> Parser<Input> for With<P1, P2>
+where
+ Input: Stream,
+ P1: Parser<Input>,
+ P2: Parser<Input>,
+{
+ type Output = P2::Output;
+ type PartialState = <(Ignore<P1>, P2) as Parser<Input>>::PartialState;
+
+ #[inline]
+ fn parse_lazy(
+ &mut self,
+ input: &mut Input,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {
+ self.0.parse_lazy(input).map(|(_, b)| b)
+ }
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ self.0.parse_mode(mode, input, state).map(|(_, b)| b)
+ }
+
+ forward_parser!(Input, add_error add_committed_expected_error parser_count, 0);
+}
+
+/// Equivalent to [`p1.with(p2)`].
+///
+/// [`p1.with(p2)`]: ../trait.Parser.html#method.with
+pub fn with<Input, P1, P2>(p1: P1, p2: P2) -> With<P1, P2>
+where
+ Input: Stream,
+ P1: Parser<Input>,
+ P2: Parser<Input>,
+{
+ With((ignore(p1), p2))
+}
+
+#[derive(Copy, Clone)]
+pub struct Skip<P1, P2>((P1, Ignore<P2>));
+impl<Input, P1, P2> Parser<Input> for Skip<P1, P2>
+where
+ Input: Stream,
+ P1: Parser<Input>,
+ P2: Parser<Input>,
+{
+ type Output = P1::Output;
+ type PartialState = <(P1, Ignore<P2>) as Parser<Input>>::PartialState;
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ self.0.parse_mode(mode, input, state).map(|(a, _)| a)
+ }
+
+ forward_parser!(Input, add_error add_committed_expected_error parser_count, 0);
+}
+
+pub fn skip<Input, P1, P2>(p1: P1, p2: P2) -> Skip<P1, P2>
+where
+ Input: Stream,
+ P1: Parser<Input>,
+ P2: Parser<Input>,
+{
+ Skip((p1, ignore(p2)))
+}
+
+parser! {
+ #[derive(Copy, Clone)]
+ pub struct Between;
+ type PartialState = <Map<(L, P, R), fn ((L::Output, P::Output, R::Output)) -> P::Output> as Parser<Input>>::PartialState;
+/// Parses `open` followed by `parser` followed by `close`.
+/// Returns the value of `parser`.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::parser::char::string;
+/// # fn main() {
+/// let result = between(token('['), token(']'), string("rust"))
+/// .parse("[rust]")
+/// .map(|x| x.0);
+/// assert_eq!(result, Ok("rust"));
+/// # }
+/// ```
+pub fn between[Input, L, R, P](open: L, close: R, parser: P)(Input) -> P::Output
+where [
+ Input: Stream,
+ L: Parser< Input>,
+ R: Parser< Input>,
+ P: Parser< Input>,
+]
+{
+ fn middle<T, U, V>((_, x, _): (T, U, V)) -> U {
+ x
+ }
+ (open, parser, close).map(middle)
+}
+}
+
+#[derive(Copy, Clone)]
+pub struct Then<P, F>(P, F);
+impl<Input, P, N, F> Parser<Input> for Then<P, F>
+where
+ Input: Stream,
+ F: FnMut(P::Output) -> N,
+ P: Parser<Input>,
+ N: Parser<Input>,
+{
+ type Output = N::Output;
+ type PartialState = (P::PartialState, Option<(bool, N)>, N::PartialState);
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mut mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ let (ref mut p_state, ref mut n_parser_cache, ref mut n_state) = *state;
+
+ if mode.is_first() || n_parser_cache.is_none() {
+ debug_assert!(n_parser_cache.is_none());
+
+ let (value, committed) = match self.0.parse_mode(mode, input, p_state) {
+ PeekOk(value) => (value, false),
+ CommitOk(value) => (value, true),
+
+ PeekErr(err) => return PeekErr(err),
+ CommitErr(err) => return CommitErr(err),
+ };
+
+ *n_parser_cache = Some((committed, (self.1)(value)));
+ mode.set_first();
+ }
+
+ let result = n_parser_cache
+ .as_mut()
+ .unwrap()
+ .1
+ .parse_committed_mode(mode, input, n_state);
+ match result {
+ PeekOk(x) => {
+ let (committed, _) = *n_parser_cache.as_ref().unwrap();
+ *n_parser_cache = None;
+ if committed {
+ CommitOk(x)
+ } else {
+ PeekOk(x)
+ }
+ }
+ CommitOk(x) => {
+ *n_parser_cache = None;
+ CommitOk(x)
+ }
+ PeekErr(x) => {
+ let (committed, _) = *n_parser_cache.as_ref().unwrap();
+ *n_parser_cache = None;
+ if committed {
+ CommitErr(x.error)
+ } else {
+ PeekErr(x)
+ }
+ }
+ CommitErr(x) => CommitErr(x),
+ }
+ }
+
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ self.0.add_error(errors);
+ }
+}
+
+/// Equivalent to [`p.then(f)`].
+///
+/// [`p.then(f)`]: ../trait.Parser.html#method.then
+pub fn then<Input, P, F, N>(p: P, f: F) -> Then<P, F>
+where
+ Input: Stream,
+ F: FnMut(P::Output) -> N,
+ P: Parser<Input>,
+ N: Parser<Input>,
+{
+ Then(p, f)
+}
+
+#[derive(Copy, Clone)]
+pub struct ThenPartial<P, F>(P, F);
+impl<Input, P, N, F> Parser<Input> for ThenPartial<P, F>
+where
+ Input: Stream,
+ F: FnMut(&mut P::Output) -> N,
+ P: Parser<Input>,
+ N: Parser<Input>,
+{
+ type Output = N::Output;
+ type PartialState = (P::PartialState, Option<(bool, P::Output)>, N::PartialState);
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mut mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ let (ref mut p_state, ref mut n_parser_cache, ref mut n_state) = *state;
+
+ if mode.is_first() || n_parser_cache.is_none() {
+ debug_assert!(n_parser_cache.is_none());
+
+ match self.0.parse_mode(mode, input, p_state) {
+ PeekOk(value) => {
+ *n_parser_cache = Some((false, value));
+ }
+ CommitOk(value) => {
+ *n_parser_cache = Some((true, value));
+ }
+ PeekErr(err) => return PeekErr(err),
+ CommitErr(err) => return CommitErr(err),
+ }
+ mode.set_first();
+ }
+
+ let result = (self.1)(&mut n_parser_cache.as_mut().unwrap().1)
+ .parse_committed_mode(mode, input, n_state);
+ match result {
+ PeekOk(x) => {
+ let (committed, _) = n_parser_cache.take().unwrap();
+ if committed {
+ CommitOk(x)
+ } else {
+ PeekOk(x)
+ }
+ }
+ CommitOk(x) => {
+ *n_parser_cache = None;
+ CommitOk(x)
+ }
+ PeekErr(x) => {
+ let (committed, _) = n_parser_cache.take().unwrap();
+ if committed {
+ CommitErr(x.error)
+ } else {
+ PeekErr(x)
+ }
+ }
+ CommitErr(x) => CommitErr(x),
+ }
+ }
+
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ self.0.add_error(errors);
+ }
+}
+
+/// Equivalent to [`p.then_partial(f)`].
+///
+/// [`p.then_partial(f)`]: ../trait.Parser.html#method.then_partial
+pub fn then_partial<Input, P, F, N>(p: P, f: F) -> ThenPartial<P, F>
+where
+ Input: Stream,
+ F: FnMut(&mut P::Output) -> N,
+ P: Parser<Input>,
+ N: Parser<Input>,
+{
+ ThenPartial(p, f)
+}
+
+#[cfg(test)]
+mod tests {
+
+ use crate::parser::{token::any, EasyParser};
+
+ #[test]
+ fn sequence_single_parser() {
+ assert!((any(),).easy_parse("a").is_ok());
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct ThenRef<P, F>(P, F);
+impl<Input, P, N, F> Parser<Input> for ThenRef<P, F>
+where
+ Input: Stream,
+ F: FnMut(&P::Output) -> N,
+ P: Parser<Input>,
+ N: Parser<Input>,
+{
+ type Output = (P::Output, N::Output);
+ type PartialState = (
+ P::PartialState,
+ Option<(bool, P::Output, N)>,
+ N::PartialState,
+ );
+
+ parse_mode!(Input);
+ #[inline]
+ fn parse_mode_impl<M>(
+ &mut self,
+ mut mode: M,
+ input: &mut Input,
+ state: &mut Self::PartialState,
+ ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>
+ where
+ M: ParseMode,
+ {
+ let (ref mut p_state, ref mut n_parser_cache, ref mut n_state) = *state;
+
+ if mode.is_first() || n_parser_cache.is_none() {
+ debug_assert!(n_parser_cache.is_none());
+
+ let (value, committed) = match self.0.parse_mode(mode, input, p_state) {
+ PeekOk(value) => (value, false),
+ CommitOk(value) => (value, true),
+
+ PeekErr(err) => return PeekErr(err),
+ CommitErr(err) => return CommitErr(err),
+ };
+
+ let parser = (self.1)(&value);
+ *n_parser_cache = Some((committed, value, parser));
+
+ mode.set_first();
+ }
+
+ let result = n_parser_cache
+ .as_mut()
+ .unwrap()
+ .2
+ .parse_committed_mode(mode, input, n_state);
+ match result {
+ PeekOk(x) => {
+ let (committed, in_value, _) = n_parser_cache.take().unwrap();
+ if committed {
+ CommitOk((in_value, x))
+ } else {
+ PeekOk((in_value, x))
+ }
+ }
+ CommitOk(x) => {
+ let (_, in_value, _) = n_parser_cache.take().unwrap();
+ *n_parser_cache = None;
+ CommitOk((in_value, x))
+ }
+ PeekErr(x) => {
+ let (committed, _, _) = n_parser_cache.take().unwrap();
+ *n_parser_cache = None;
+ if committed {
+ CommitErr(x.error)
+ } else {
+ PeekErr(x)
+ }
+ }
+ CommitErr(x) => CommitErr(x),
+ }
+ }
+
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ self.0.add_error(errors);
+ }
+}
+
+/// Equivalent to [`p.then_ref(f)`].
+///
+/// [`p.then_ref(f)`]: ../trait.Parser.html#method.then
+pub fn then_ref<Input, P, F, N>(p: P, f: F) -> ThenRef<P, F>
+where
+ Input: Stream,
+ F: FnMut(&P::Output) -> N,
+ P: Parser<Input>,
+ N: Parser<Input>,
+{
+ ThenRef(p, f)
+}
diff --git a/src/parser/token.rs b/src/parser/token.rs
new file mode 100644
index 0000000..fe41421
--- /dev/null
+++ b/src/parser/token.rs
@@ -0,0 +1,700 @@
+//! Parsers working with single stream items.
+
+use crate::{
+ error::{
+ self, ErrorInfo, ParseError,
+ ParseResult::{self, *},
+ ResultExt, StreamError, Tracked,
+ },
+ lib::marker::PhantomData,
+ stream::{uncons, Stream, StreamOnce},
+ Parser,
+};
+
+#[derive(Copy, Clone)]
+pub struct Any<Input>(PhantomData<fn(Input) -> Input>);
+
+impl<Input> Parser<Input> for Any<Input>
+where
+ Input: Stream,
+{
+ type Output = Input::Token;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<Input::Token, Input::Error> {
+ uncons(input)
+ }
+}
+
+/// Parses any token.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # fn main() {
+/// let mut char_parser = any();
+/// assert_eq!(char_parser.parse("!").map(|x| x.0), Ok('!'));
+/// assert!(char_parser.parse("").is_err());
+/// let mut byte_parser = any();
+/// assert_eq!(byte_parser.parse(&b"!"[..]).map(|x| x.0), Ok(b'!'));
+/// assert!(byte_parser.parse(&b""[..]).is_err());
+/// # }
+/// ```
+pub fn any<Input>() -> Any<Input>
+where
+ Input: Stream,
+{
+ Any(PhantomData)
+}
+
+#[derive(Copy, Clone)]
+pub struct Satisfy<Input, P> {
+ predicate: P,
+ _marker: PhantomData<Input>,
+}
+
+fn satisfy_impl<Input, P, R>(input: &mut Input, mut predicate: P) -> ParseResult<R, Input::Error>
+where
+ Input: Stream,
+ P: FnMut(Input::Token) -> Option<R>,
+{
+ let position = input.position();
+ match uncons(input) {
+ PeekOk(c) | CommitOk(c) => match predicate(c) {
+ Some(c) => CommitOk(c),
+ None => PeekErr(Input::Error::empty(position).into()),
+ },
+ PeekErr(err) => PeekErr(err),
+ CommitErr(err) => CommitErr(err),
+ }
+}
+
+impl<Input, P> Parser<Input> for Satisfy<Input, P>
+where
+ Input: Stream,
+ P: FnMut(Input::Token) -> bool,
+{
+ type Output = Input::Token;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<Self::Output, Input::Error> {
+ satisfy_impl(input, |c| {
+ if (self.predicate)(c.clone()) {
+ Some(c)
+ } else {
+ None
+ }
+ })
+ }
+}
+
+/// Parses a token and succeeds depending on the result of `predicate`.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # fn main() {
+/// let mut parser = satisfy(|c| c == '!' || c == '?');
+/// assert_eq!(parser.parse("!").map(|x| x.0), Ok('!'));
+/// assert_eq!(parser.parse("?").map(|x| x.0), Ok('?'));
+/// # }
+/// ```
+pub fn satisfy<Input, P>(predicate: P) -> Satisfy<Input, P>
+where
+ Input: Stream,
+ P: FnMut(Input::Token) -> bool,
+{
+ Satisfy {
+ predicate,
+ _marker: PhantomData,
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct SatisfyMap<Input, P> {
+ predicate: P,
+ _marker: PhantomData<Input>,
+}
+
+impl<Input, P, R> Parser<Input> for SatisfyMap<Input, P>
+where
+ Input: Stream,
+ P: FnMut(Input::Token) -> Option<R>,
+{
+ type Output = R;
+ type PartialState = ();
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<Self::Output, Input::Error> {
+ satisfy_impl(input, &mut self.predicate)
+ }
+}
+
+/// Parses a token and passes it to `predicate`. If `predicate` returns `Some` the parser succeeds
+/// and returns the value inside the `Option`. If `predicate` returns `None` the parser fails
+/// without consuming any input.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # fn main() {
+/// #[derive(Debug, PartialEq)]
+/// enum YesNo {
+/// Yes,
+/// No,
+/// }
+/// let mut parser = satisfy_map(|c| {
+/// match c {
+/// 'Y' => Some(YesNo::Yes),
+/// 'N' => Some(YesNo::No),
+/// _ => None,
+/// }
+/// });
+/// assert_eq!(parser.parse("Y").map(|x| x.0), Ok(YesNo::Yes));
+/// assert!(parser.parse("A").map(|x| x.0).is_err());
+/// # }
+/// ```
+pub fn satisfy_map<Input, P, R>(predicate: P) -> SatisfyMap<Input, P>
+where
+ Input: Stream,
+ P: FnMut(Input::Token) -> Option<R>,
+{
+ SatisfyMap {
+ predicate,
+ _marker: PhantomData,
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct Token<Input>
+where
+ Input: Stream,
+ Input::Token: PartialEq,
+{
+ c: Input::Token,
+ _marker: PhantomData<Input>,
+}
+
+impl<Input> Parser<Input> for Token<Input>
+where
+ Input: Stream,
+ Input::Token: PartialEq + Clone,
+{
+ type Output = Input::Token;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<Input::Token, Input::Error> {
+ satisfy_impl(input, |c| if c == self.c { Some(c) } else { None })
+ }
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ errors.error.add_expected(error::Token(self.c.clone()));
+ }
+}
+
+/// Parses a character and succeeds if the character is equal to `c`.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # fn main() {
+/// let result = token('!')
+/// .parse("!")
+/// .map(|x| x.0);
+/// assert_eq!(result, Ok('!'));
+/// # }
+/// ```
+pub fn token<Input>(c: Input::Token) -> Token<Input>
+where
+ Input: Stream,
+ Input::Token: PartialEq,
+{
+ Token {
+ c,
+ _marker: PhantomData,
+ }
+}
+
+#[derive(Clone)]
+pub struct Tokens<C, E, T, Input>
+where
+ Input: Stream,
+{
+ cmp: C,
+ expected: E,
+ tokens: T,
+ _marker: PhantomData<Input>,
+}
+
+impl<Input, C, E, T> Parser<Input> for Tokens<C, E, T, Input>
+where
+ C: FnMut(T::Item, Input::Token) -> bool,
+ E: for<'s> ErrorInfo<'s, Input::Token, Input::Range>,
+ T: Clone + IntoIterator,
+ Input: Stream,
+{
+ type Output = T;
+ type PartialState = ();
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<T, Input::Error> {
+ let start = input.position();
+ let mut committed = false;
+ for c in self.tokens.clone() {
+ match crate::stream::uncons(input) {
+ CommitOk(other) | PeekOk(other) => {
+ if !(self.cmp)(c, other.clone()) {
+ return if committed {
+ let mut errors = <Input as StreamOnce>::Error::from_error(
+ start,
+ StreamError::unexpected_token(other),
+ );
+ errors.add_expected(&self.expected);
+ CommitErr(errors)
+ } else {
+ PeekErr(<Input as StreamOnce>::Error::empty(start).into())
+ };
+ }
+ committed = true;
+ }
+ PeekErr(mut error) => {
+ error.error.set_position(start);
+ return if committed {
+ CommitErr(error.error)
+ } else {
+ PeekErr(error)
+ };
+ }
+ CommitErr(mut error) => {
+ error.set_position(start);
+ return CommitErr(error);
+ }
+ }
+ }
+ if committed {
+ CommitOk(self.tokens.clone())
+ } else {
+ PeekOk(self.tokens.clone())
+ }
+ }
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ errors.error.add_expected(&self.expected);
+ }
+}
+
+/// Parses multiple tokens.
+///
+/// Consumes items from the input and compares them to the values from `tokens` using the
+/// comparison function `cmp`. Succeeds if all the items from `tokens` are matched in the input
+/// stream and fails otherwise with `expected` used as part of the error.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::error;
+/// # fn main() {
+/// let result = tokens(|l, r| l.eq_ignore_ascii_case(&r), "abc", "abc".chars())
+/// .parse("AbC")
+/// .map(|x| x.0.as_str());
+/// assert_eq!(result, Ok("abc"));
+/// let result = tokens(
+/// |&l, r| (if l < r { r - l } else { l - r }) <= 2,
+/// error::Range(&b"025"[..]),
+/// &b"025"[..]
+/// )
+/// .parse(&b"123"[..])
+/// .map(|x| x.0);
+/// assert_eq!(result, Ok(&b"025"[..]));
+/// # }
+/// ```
+pub fn tokens<C, E, T, Input>(cmp: C, expected: E, tokens: T) -> Tokens<C, E, T, Input>
+where
+ C: FnMut(T::Item, Input::Token) -> bool,
+ T: Clone + IntoIterator,
+ Input: Stream,
+{
+ Tokens {
+ cmp,
+ expected,
+ tokens,
+ _marker: PhantomData,
+ }
+}
+
+#[derive(Clone)]
+pub struct TokensCmp<C, T, Input>
+where
+ Input: Stream,
+{
+ cmp: C,
+ tokens: T,
+ _marker: PhantomData<Input>,
+}
+
+impl<Input, C, T> Parser<Input> for TokensCmp<C, T, Input>
+where
+ C: FnMut(T::Item, Input::Token) -> bool,
+ T: Clone + IntoIterator,
+ Input: Stream,
+{
+ type Output = T;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<T, Input::Error> {
+ let start = input.position();
+ let mut committed = false;
+ for c in self.tokens.clone() {
+ match crate::stream::uncons(input) {
+ CommitOk(other) | PeekOk(other) => {
+ if !(self.cmp)(c, other.clone()) {
+ return if committed {
+ let errors = <Input as StreamOnce>::Error::from_error(
+ start,
+ StreamError::unexpected_token(other),
+ );
+ CommitErr(errors)
+ } else {
+ PeekErr(<Input as StreamOnce>::Error::empty(start).into())
+ };
+ }
+ committed = true;
+ }
+ PeekErr(mut error) => {
+ error.error.set_position(start);
+ return if committed {
+ CommitErr(error.error)
+ } else {
+ PeekErr(error)
+ };
+ }
+ CommitErr(mut error) => {
+ error.set_position(start);
+ return CommitErr(error);
+ }
+ }
+ }
+ if committed {
+ CommitOk(self.tokens.clone())
+ } else {
+ PeekOk(self.tokens.clone())
+ }
+ }
+}
+
+/// Parses multiple tokens.
+///
+/// Consumes items from the input and compares them to the values from `tokens` using the
+/// comparison function `cmp`. Succeeds if all the items from `tokens` are matched in the input
+/// stream and fails otherwise.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # fn main() {
+/// # #[allow(deprecated)]
+/// # use std::ascii::AsciiExt;
+/// let result = tokens_cmp("abc".chars(), |l, r| l.eq_ignore_ascii_case(&r))
+/// .parse("AbC")
+/// .map(|x| x.0.as_str());
+/// assert_eq!(result, Ok("abc"));
+/// let result = tokens_cmp(
+/// &b"025"[..],
+/// |&l, r| (if l < r { r - l } else { l - r }) <= 2,
+/// )
+/// .parse(&b"123"[..])
+/// .map(|x| x.0);
+/// assert_eq!(result, Ok(&b"025"[..]));
+/// # }
+/// ```
+pub fn tokens_cmp<C, T, I>(tokens: T, cmp: C) -> TokensCmp<C, T, I>
+where
+ C: FnMut(T::Item, I::Token) -> bool,
+ T: Clone + IntoIterator,
+ I: Stream,
+{
+ TokensCmp {
+ cmp,
+ tokens,
+ _marker: PhantomData,
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct Position<Input>
+where
+ Input: Stream,
+{
+ _marker: PhantomData<Input>,
+}
+
+impl<Input> Parser<Input> for Position<Input>
+where
+ Input: Stream,
+{
+ type Output = Input::Position;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<Input::Position, Input::Error> {
+ PeekOk(input.position())
+ }
+}
+
+/// Parser which just returns the current position in the stream.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::stream::position::{self, SourcePosition};
+/// # fn main() {
+/// let result = (position(), token('!'), position())
+/// .parse(position::Stream::new("!"))
+/// .map(|x| x.0);
+/// assert_eq!(result, Ok((SourcePosition { line: 1, column: 1 },
+/// '!',
+/// SourcePosition { line: 1, column: 2 })));
+/// # }
+/// ```
+pub fn position<Input>() -> Position<Input>
+where
+ Input: Stream,
+{
+ Position {
+ _marker: PhantomData,
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct OneOf<T, Input>
+where
+ Input: Stream,
+{
+ tokens: T,
+ _marker: PhantomData<Input>,
+}
+
+impl<Input, T> Parser<Input> for OneOf<T, Input>
+where
+ T: Clone + IntoIterator<Item = Input::Token>,
+ Input: Stream,
+ Input::Token: PartialEq,
+{
+ type Output = Input::Token;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<Input::Token, Input::Error> {
+ satisfy(|c| self.tokens.clone().into_iter().any(|t| t == c)).parse_lazy(input)
+ }
+
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ for expected in self.tokens.clone() {
+ errors.error.add_expected(error::Token(expected));
+ }
+ }
+}
+
+/// Extract one token and succeeds if it is part of `tokens`.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # fn main() {
+/// let result = many(one_of("abc".chars()))
+/// .parse("abd");
+/// assert_eq!(result, Ok((String::from("ab"), "d")));
+/// # }
+/// ```
+pub fn one_of<T, Input>(tokens: T) -> OneOf<T, Input>
+where
+ T: Clone + IntoIterator,
+ Input: Stream,
+ Input::Token: PartialEq<T::Item>,
+{
+ OneOf {
+ tokens,
+ _marker: PhantomData,
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct NoneOf<T, Input>
+where
+ Input: Stream,
+{
+ tokens: T,
+ _marker: PhantomData<Input>,
+}
+
+impl<Input, T> Parser<Input> for NoneOf<T, Input>
+where
+ T: Clone + IntoIterator<Item = Input::Token>,
+ Input: Stream,
+ Input::Token: PartialEq,
+{
+ type Output = Input::Token;
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<Input::Token, Input::Error> {
+ satisfy(|c| self.tokens.clone().into_iter().all(|t| t != c)).parse_lazy(input)
+ }
+}
+
+/// Extract one token and succeeds if it is not part of `tokens`.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::stream::easy;
+/// # use combine::stream::position;
+/// # fn main() {
+/// let mut parser = many1(none_of(b"abc".iter().cloned()));
+/// let result = parser.easy_parse(position::Stream::new(&b"xyb"[..]))
+/// .map(|(output, input)| (output, input.input));
+/// assert_eq!(result, Ok((b"xy"[..].to_owned(), &b"b"[..])));
+///
+/// let result = parser.easy_parse(position::Stream::new(&b"ab"[..]));
+/// assert_eq!(result, Err(easy::Errors {
+/// position: 0,
+/// errors: vec![
+/// easy::Error::Unexpected(easy::Info::Token(b'a')),
+/// ]
+/// }));
+/// # }
+/// ```
+pub fn none_of<T, Input>(tokens: T) -> NoneOf<T, Input>
+where
+ T: Clone + IntoIterator,
+ Input: Stream,
+ Input::Token: PartialEq<T::Item>,
+{
+ NoneOf {
+ tokens,
+ _marker: PhantomData,
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct Value<Input, T>(T, PhantomData<fn(Input) -> Input>);
+impl<Input, T> Parser<Input> for Value<Input, T>
+where
+ Input: Stream,
+ T: Clone,
+{
+ type Output = T;
+ type PartialState = ();
+ #[inline]
+ fn parse_lazy(&mut self, _: &mut Input) -> ParseResult<T, Input::Error> {
+ PeekOk(self.0.clone())
+ }
+}
+
+/// Always returns the value `v` without consuming any input.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # fn main() {
+/// let result = value(42)
+/// .parse("hello world")
+/// .map(|x| x.0);
+/// assert_eq!(result, Ok(42));
+/// # }
+/// ```
+pub fn value<Input, T>(v: T) -> Value<Input, T>
+where
+ Input: Stream,
+ T: Clone,
+{
+ Value(v, PhantomData)
+}
+
+#[derive(Copy, Clone)]
+pub struct Produce<Input, F>(F, PhantomData<fn(Input) -> Input>);
+impl<Input, F, R> Parser<Input> for Produce<Input, F>
+where
+ Input: Stream,
+ F: FnMut() -> R,
+{
+ type Output = R;
+ type PartialState = ();
+ #[inline]
+ fn parse_lazy(&mut self, _: &mut Input) -> ParseResult<R, Input::Error> {
+ PeekOk((self.0)())
+ }
+}
+
+/// Always returns the value produced by calling `f`.
+///
+/// Can be used when `value` is unable to be used for lack of `Clone` implementation on the value.
+///
+/// ```
+/// # use combine::*;
+/// #[derive(Debug, PartialEq)]
+/// struct NoClone;
+/// let result = produce(|| vec![NoClone])
+/// .parse("hello world")
+/// .map(|x| x.0);
+/// assert_eq!(result, Ok(vec![NoClone]));
+/// ```
+pub fn produce<Input, F, R>(f: F) -> Produce<Input, F>
+where
+ Input: Stream,
+ F: FnMut() -> R,
+{
+ Produce(f, PhantomData)
+}
+
+#[derive(Copy, Clone)]
+pub struct Eof<Input>(PhantomData<Input>);
+impl<Input> Parser<Input> for Eof<Input>
+where
+ Input: Stream,
+{
+ type Output = ();
+ type PartialState = ();
+
+ #[inline]
+ fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<(), Input::Error> {
+ let before = input.checkpoint();
+ match input.uncons() {
+ Err(ref err) if err.is_unexpected_end_of_input() => PeekOk(()),
+ _ => {
+ ctry!(input.reset(before).committed());
+ PeekErr(<Input as StreamOnce>::Error::empty(input.position()).into())
+ }
+ }
+ }
+
+ fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) {
+ errors.error.add_expected("end of input");
+ }
+}
+
+/// Succeeds only if the stream is at end of input, fails otherwise.
+///
+/// ```
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::stream::easy;
+/// # use combine::stream::position::{self, SourcePosition};
+/// # fn main() {
+/// let mut parser = eof();
+/// assert_eq!(parser.easy_parse(position::Stream::new("")), Ok(((), position::Stream::new(""))));
+/// assert_eq!(parser.easy_parse(position::Stream::new("x")), Err(easy::Errors {
+/// position: SourcePosition::default(),
+/// errors: vec![
+/// easy::Error::Unexpected('x'.into()),
+/// easy::Error::Expected("end of input".into())
+/// ]
+/// }));
+/// # }
+/// ```
+pub fn eof<Input>() -> Eof<Input>
+where
+ Input: Stream,
+{
+ Eof(PhantomData)
+}
diff --git a/src/stream/buf_reader.rs b/src/stream/buf_reader.rs
new file mode 100644
index 0000000..1a10bc9
--- /dev/null
+++ b/src/stream/buf_reader.rs
@@ -0,0 +1,959 @@
+use std::io::{self, BufRead, Read};
+
+#[cfg(any(
+ features = "futures-03",
+ feature = "tokio-02",
+ feature = "tokio-03",
+ feature = "tokio"
+))]
+use std::{mem::MaybeUninit, pin::Pin};
+
+#[cfg(feature = "futures-util-03")]
+use std::task::{Context, Poll};
+
+#[cfg(feature = "futures-03")]
+use std::future::Future;
+
+use bytes::{Buf, BufMut, BytesMut};
+
+#[cfg(feature = "pin-project-lite")]
+use pin_project_lite::pin_project;
+
+#[cfg(feature = "tokio-03")]
+use tokio_03_dep::io::AsyncBufRead as _;
+
+#[cfg(feature = "tokio")]
+use tokio_dep::io::AsyncBufRead as _;
+
+#[cfg(feature = "futures-util-03")]
+use futures_util_03::ready;
+
+#[cfg(feature = "pin-project-lite")]
+pin_project! {
+ /// `BufReader` used by `Decoder` when it is constructed with [`Decoder::new_bufferless`][]
+ ///
+ /// [`Decoder::new_bufferless`]: ../decoder/struct.Decoder.html#method.new_bufferless
+ #[derive(Debug)]
+ pub struct BufReader<R> {
+ #[pin]
+ inner: R,
+ buf: BytesMut
+ }
+}
+
+#[cfg(not(feature = "pin-project-lite"))]
+/// `BufReader` used by `Decoder` when it is constructed with [`Decoder::new_bufferless`][]
+///
+/// [`Decoder::new_bufferless`]: ../decoder/struct.Decoder.html#method.new_bufferless
+#[derive(Debug)]
+pub struct BufReader<R> {
+ inner: R,
+ buf: BytesMut,
+}
+
+impl<R> BufReader<R> {
+ /// Creates a new `BufReader` with a default buffer capacity. The default is currently 8 KB,
+ /// but may change in the future.
+ pub fn new(inner: R) -> Self {
+ Self::with_capacity(8096, inner)
+ }
+
+ /// Creates a new `BufReader` with the specified buffer capacity.
+ pub fn with_capacity(capacity: usize, inner: R) -> Self {
+ let buf = BytesMut::with_capacity(capacity);
+
+ Self { inner, buf }
+ }
+
+ /// Gets a reference to the underlying reader.
+ ///
+ /// It is inadvisable to directly read from the underlying reader.
+ pub fn get_ref(&self) -> &R {
+ &self.inner
+ }
+
+ /// Gets a mutable reference to the underlying reader.
+ ///
+ /// It is inadvisable to directly read from the underlying reader.
+ pub fn get_mut(&mut self) -> &mut R {
+ &mut self.inner
+ }
+
+ #[cfg(feature = "pin-project-lite")]
+ /// Gets a pinned mutable reference to the underlying reader.
+ ///
+ /// It is inadvisable to directly read from the underlying reader.
+ pub fn get_pin_mut(self: Pin<&mut Self>) -> Pin<&mut R> {
+ self.project().inner
+ }
+
+ /// Consumes this `BufWriter`, returning the underlying reader.
+ ///
+ /// Note that any leftover data in the internal buffer is lost.
+ pub fn into_inner(self) -> R {
+ self.inner
+ }
+
+ /// Returns a reference to the internally buffered data.
+ ///
+ /// Unlike `fill_buf`, this will not attempt to fill the buffer if it is empty.
+ pub fn buffer(&self) -> &[u8] {
+ &self.buf
+ }
+
+ /// Invalidates all data in the internal buffer.
+ #[inline]
+ #[cfg(any(feature = "tokio-02", feature = "tokio-03", feature = "tokio"))]
+ fn discard_buffer(self: Pin<&mut Self>) {
+ let me = self.project();
+ me.buf.clear();
+ }
+}
+
+mod sealed {
+ pub trait Sealed {}
+}
+
+#[doc(hidden)]
+pub trait CombineBuffer<R>: sealed::Sealed {
+ fn buffer<'a>(&'a self, read: &'a R) -> &'a [u8];
+
+ fn advance(&mut self, read: &mut R, len: usize);
+
+ #[cfg(feature = "pin-project-lite")]
+ fn advance_pin(&mut self, read: Pin<&mut R>, len: usize);
+}
+
+#[doc(hidden)]
+pub trait CombineSyncRead<R>: CombineBuffer<R> {
+ fn extend_buf_sync(&mut self, read: &mut R) -> io::Result<usize>;
+}
+
+#[cfg(any(feature = "tokio-02", feature = "tokio-03", feature = "tokio"))]
+#[doc(hidden)]
+pub trait CombineRead<R, T: ?Sized>: CombineBuffer<R> {
+ fn poll_extend_buf(
+ &mut self,
+ cx: &mut Context<'_>,
+ read: Pin<&mut R>,
+ ) -> Poll<io::Result<usize>>;
+}
+
+#[cfg(feature = "futures-03")]
+#[doc(hidden)]
+pub trait CombineAsyncRead<R>: CombineBuffer<R> {
+ fn poll_extend_buf(
+ &mut self,
+ cx: &mut Context<'_>,
+ read: Pin<&mut R>,
+ ) -> Poll<io::Result<usize>>;
+
+ fn extend_buf<'a>(&'a mut self, read: Pin<&'a mut R>) -> ExtendBuf<'a, Self, R>
+ where
+ Self: Sized;
+}
+
+#[cfg(feature = "futures-03")]
+pin_project_lite::pin_project! {
+ #[doc(hidden)]
+ pub struct ExtendBuf<'a, C, R> {
+ buffer: &'a mut C,
+ read: Pin<&'a mut R>
+ }
+}
+
+#[cfg(feature = "futures-03")]
+impl<'a, C, R> Future for ExtendBuf<'a, C, R>
+where
+ C: CombineAsyncRead<R>,
+{
+ type Output = io::Result<usize>;
+
+ fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
+ let me = self.project();
+ me.buffer.poll_extend_buf(cx, me.read.as_mut())
+ }
+}
+
+/// Marker used by `Decoder` for an internal buffer
+#[derive(Default)]
+pub struct Buffer(pub(crate) BytesMut);
+
+impl sealed::Sealed for Buffer {}
+
+impl<R> CombineBuffer<R> for Buffer {
+ fn buffer<'a>(&'a self, _read: &'a R) -> &'a [u8] {
+ &self.0
+ }
+
+ fn advance(&mut self, _read: &mut R, len: usize) {
+ self.0.advance(len);
+ }
+
+ #[cfg(feature = "pin-project-lite")]
+ fn advance_pin(&mut self, _read: Pin<&mut R>, len: usize) {
+ self.0.advance(len);
+ }
+}
+
+impl<R> CombineSyncRead<R> for Buffer
+where
+ R: Read,
+{
+ fn extend_buf_sync(&mut self, read: &mut R) -> io::Result<usize> {
+ extend_buf_sync(&mut self.0, read)
+ }
+}
+
+#[cfg(feature = "futures-03")]
+impl<R> CombineAsyncRead<R> for Buffer
+where
+ R: futures_util_03::io::AsyncRead,
+{
+ fn poll_extend_buf(
+ &mut self,
+ cx: &mut Context<'_>,
+ read: Pin<&mut R>,
+ ) -> Poll<io::Result<usize>> {
+ poll_extend_buf(&mut self.0, cx, read)
+ }
+
+ fn extend_buf<'a>(&'a mut self, read: Pin<&'a mut R>) -> ExtendBuf<'a, Self, R> {
+ if !self.0.has_remaining_mut() {
+ self.0.reserve(8 * 1024);
+ }
+ // Copy of tokio's read_buf method (but it has to force initialize the buffer)
+ let bs = self.0.chunk_mut();
+
+ for i in 0..bs.len() {
+ bs.write_byte(i, 0);
+ }
+ ExtendBuf { buffer: self, read }
+ }
+}
+
+#[cfg(feature = "tokio-02")]
+impl<R> CombineRead<R, dyn tokio_02_dep::io::AsyncRead> for Buffer
+where
+ R: tokio_02_dep::io::AsyncRead,
+{
+ fn poll_extend_buf(
+ &mut self,
+ cx: &mut Context<'_>,
+ read: Pin<&mut R>,
+ ) -> Poll<io::Result<usize>> {
+ if !self.0.has_remaining_mut() {
+ self.0.reserve(8 * 1024);
+ }
+ read.poll_read_buf(cx, &mut Bytes05(&mut self.0))
+ }
+}
+
+#[cfg(feature = "tokio-03")]
+impl<R> CombineRead<R, dyn tokio_03_dep::io::AsyncRead> for Buffer
+where
+ R: tokio_03_dep::io::AsyncRead,
+{
+ fn poll_extend_buf(
+ &mut self,
+ cx: &mut Context<'_>,
+ read: Pin<&mut R>,
+ ) -> Poll<io::Result<usize>> {
+ if !self.0.has_remaining_mut() {
+ self.0.reserve(8 * 1024);
+ }
+ let uninit = self.0.chunk_mut();
+ let mut buf = unsafe {
+ tokio_03_dep::io::ReadBuf::uninit(std::slice::from_raw_parts_mut(
+ uninit.as_mut_ptr() as *mut MaybeUninit<u8>,
+ uninit.len(),
+ ))
+ };
+ ready!(read.poll_read(cx, &mut buf))?;
+ let n = buf.filled().len();
+ unsafe {
+ self.0.advance_mut(n);
+ }
+ Poll::Ready(Ok(n))
+ }
+}
+
+#[cfg(feature = "tokio-03")]
+fn tokio_03_read_buf(
+ cx: &mut Context<'_>,
+ read: Pin<&mut impl tokio_03_dep::io::AsyncRead>,
+ bs: &mut bytes::BytesMut,
+) -> Poll<io::Result<usize>> {
+ if !bs.has_remaining_mut() {
+ bs.reserve(8 * 1024);
+ }
+
+ unsafe {
+ let uninit = bs.chunk_mut();
+ let mut buf = tokio_03_dep::io::ReadBuf::uninit(std::slice::from_raw_parts_mut(
+ uninit.as_mut_ptr() as *mut MaybeUninit<u8>,
+ uninit.len(),
+ ));
+ ready!(read.poll_read(cx, &mut buf))?;
+ let n = buf.filled().len();
+ bs.advance_mut(n);
+ Poll::Ready(Ok(n))
+ }
+}
+
+#[cfg(feature = "tokio")]
+impl<R> CombineRead<R, dyn tokio_dep::io::AsyncRead> for Buffer
+where
+ R: tokio_dep::io::AsyncRead,
+{
+ fn poll_extend_buf(
+ &mut self,
+ cx: &mut Context<'_>,
+ read: Pin<&mut R>,
+ ) -> Poll<io::Result<usize>> {
+ if !self.0.has_remaining_mut() {
+ self.0.reserve(8 * 1024);
+ }
+ let mut buf = unsafe {
+ tokio_dep::io::ReadBuf::uninit(
+ &mut *(self.0.chunk_mut() as *mut _ as *mut [MaybeUninit<u8>]),
+ )
+ };
+ ready!(read.poll_read(cx, &mut buf))?;
+ let n = buf.filled().len();
+ unsafe {
+ self.0.advance_mut(n);
+ }
+ Poll::Ready(Ok(n))
+ }
+}
+
+#[cfg(feature = "tokio")]
+fn tokio_read_buf(
+ cx: &mut Context<'_>,
+ read: Pin<&mut impl tokio_dep::io::AsyncRead>,
+ bs: &mut bytes::BytesMut,
+) -> Poll<io::Result<usize>> {
+ if !bs.has_remaining_mut() {
+ bs.reserve(8 * 1024);
+ }
+
+ unsafe {
+ let uninit = bs.chunk_mut();
+ let mut buf = tokio_dep::io::ReadBuf::uninit(std::slice::from_raw_parts_mut(
+ uninit.as_mut_ptr() as *mut MaybeUninit<u8>,
+ uninit.len(),
+ ));
+ ready!(read.poll_read(cx, &mut buf))?;
+ let n = buf.filled().len();
+ bs.advance_mut(n);
+ Poll::Ready(Ok(n))
+ }
+}
+
+/// Marker used by `Decoder` for an external buffer
+#[derive(Default)]
+pub struct Bufferless;
+
+impl sealed::Sealed for Bufferless {}
+
+impl<R> CombineBuffer<BufReader<R>> for Bufferless {
+ fn buffer<'a>(&'a self, read: &'a BufReader<R>) -> &'a [u8] {
+ &read.buf
+ }
+
+ fn advance(&mut self, read: &mut BufReader<R>, len: usize) {
+ read.buf.advance(len);
+ }
+
+ #[cfg(feature = "pin-project-lite")]
+ fn advance_pin(&mut self, read: Pin<&mut BufReader<R>>, len: usize) {
+ read.project().buf.advance(len);
+ }
+}
+
+impl<R> CombineSyncRead<BufReader<R>> for Bufferless
+where
+ R: Read,
+{
+ fn extend_buf_sync(&mut self, read: &mut BufReader<R>) -> io::Result<usize> {
+ extend_buf_sync(&mut read.buf, &mut read.inner)
+ }
+}
+
+fn extend_buf_sync<R>(buf: &mut BytesMut, read: &mut R) -> io::Result<usize>
+where
+ R: Read,
+{
+ if !buf.has_remaining_mut() {
+ buf.reserve(8 * 1024);
+ }
+
+ // Copy of tokio's read_buf method (but it has to force initialize the buffer)
+ let copied = unsafe {
+ let n = {
+ let bs = buf.chunk_mut();
+
+ for i in 0..bs.len() {
+ bs.write_byte(i, 0);
+ }
+
+ // Convert to `&mut [u8]`
+ let bs = &mut *(bs as *mut _ as *mut [u8]);
+
+ let n = read.read(bs)?;
+ assert!(n <= bs.len(), "AsyncRead reported that it initialized more than the number of bytes in the buffer");
+ n
+ };
+
+ buf.advance_mut(n);
+ n
+ };
+ Ok(copied)
+}
+
+#[cfg(feature = "tokio-02")]
+struct Bytes05<'a>(&'a mut BytesMut);
+
+#[cfg(feature = "tokio-02")]
+impl bytes_05::BufMut for Bytes05<'_> {
+ fn remaining_mut(&self) -> usize {
+ self.0.remaining_mut()
+ }
+ unsafe fn advance_mut(&mut self, cnt: usize) {
+ self.0.advance_mut(cnt)
+ }
+ fn bytes_mut(&mut self) -> &mut [MaybeUninit<u8>] {
+ unsafe { &mut *(self.0.chunk_mut() as *mut _ as *mut [MaybeUninit<u8>]) }
+ }
+}
+
+#[cfg(feature = "tokio-02")]
+impl<R> CombineRead<BufReader<R>, dyn tokio_02_dep::io::AsyncRead> for Bufferless
+where
+ R: tokio_02_dep::io::AsyncRead,
+{
+ fn poll_extend_buf(
+ &mut self,
+ cx: &mut Context<'_>,
+ read: Pin<&mut BufReader<R>>,
+ ) -> Poll<io::Result<usize>> {
+ let me = read.project();
+
+ if !me.buf.has_remaining_mut() {
+ me.buf.reserve(8 * 1024);
+ }
+ tokio_02_dep::io::AsyncRead::poll_read_buf(me.inner, cx, &mut Bytes05(me.buf))
+ }
+}
+
+#[cfg(feature = "tokio-03")]
+impl<R> CombineRead<BufReader<R>, dyn tokio_03_dep::io::AsyncRead> for Bufferless
+where
+ R: tokio_03_dep::io::AsyncRead,
+{
+ fn poll_extend_buf(
+ &mut self,
+ cx: &mut Context<'_>,
+ read: Pin<&mut BufReader<R>>,
+ ) -> Poll<io::Result<usize>> {
+ let me = read.project();
+
+ tokio_03_read_buf(cx, me.inner, me.buf)
+ }
+}
+
+#[cfg(feature = "tokio")]
+impl<R> CombineRead<BufReader<R>, dyn tokio_dep::io::AsyncRead> for Bufferless
+where
+ R: tokio_dep::io::AsyncRead,
+{
+ fn poll_extend_buf(
+ &mut self,
+ cx: &mut Context<'_>,
+ read: Pin<&mut BufReader<R>>,
+ ) -> Poll<io::Result<usize>> {
+ let me = read.project();
+
+ tokio_read_buf(cx, me.inner, me.buf)
+ }
+}
+
+#[cfg(feature = "futures-03")]
+impl<R> CombineAsyncRead<BufReader<R>> for Bufferless
+where
+ R: futures_util_03::io::AsyncRead,
+{
+ fn poll_extend_buf(
+ &mut self,
+ cx: &mut Context<'_>,
+ read: Pin<&mut BufReader<R>>,
+ ) -> Poll<io::Result<usize>> {
+ let me = read.project();
+
+ poll_extend_buf(me.buf, cx, me.inner)
+ }
+
+ fn extend_buf<'a>(
+ &'a mut self,
+ mut read: Pin<&'a mut BufReader<R>>,
+ ) -> ExtendBuf<'a, Self, BufReader<R>> {
+ let me = read.as_mut().project();
+
+ if !me.buf.has_remaining_mut() {
+ me.buf.reserve(8 * 1024);
+ }
+ // Copy of tokio's read_buf method (but it has to force initialize the buffer)
+ let bs = me.buf.chunk_mut();
+
+ for i in 0..bs.len() {
+ bs.write_byte(i, 0);
+ }
+ ExtendBuf { buffer: self, read }
+ }
+}
+
+#[cfg(feature = "futures-03")]
+fn poll_extend_buf<R>(
+ buf: &mut BytesMut,
+ cx: &mut Context<'_>,
+ read: Pin<&mut R>,
+) -> Poll<io::Result<usize>>
+where
+ R: futures_util_03::io::AsyncRead,
+{
+ // Copy of tokio's read_buf method (but it has to force initialize the buffer)
+ let copied = unsafe {
+ let n = {
+ let bs = buf.chunk_mut();
+ // Convert to `&mut [u8]`
+ let bs = &mut *(bs as *mut _ as *mut [u8]);
+
+ let n = ready!(read.poll_read(cx, bs))?;
+ assert!(n <= bs.len(), "AsyncRead reported that it initialized more than the number of bytes in the buffer");
+ n
+ };
+
+ buf.advance_mut(n);
+ n
+ };
+ Poll::Ready(Ok(copied))
+}
+
+#[cfg(feature = "tokio-02")]
+impl<R: tokio_02_dep::io::AsyncRead> tokio_02_dep::io::AsyncRead for BufReader<R> {
+ fn poll_read(
+ mut self: Pin<&mut Self>,
+ cx: &mut Context<'_>,
+ buf: &mut [u8],
+ ) -> Poll<io::Result<usize>> {
+ use tokio_02_dep::io::AsyncBufRead;
+
+ // If we don't have any buffered data and we're doing a massive read
+ // (larger than our internal buffer), bypass our internal buffer
+ // entirely.
+ if !self.buf.has_remaining_mut() && buf.len() >= self.buf.len() {
+ let res = ready!(self.as_mut().get_pin_mut().poll_read(cx, buf));
+ self.discard_buffer();
+ return Poll::Ready(res);
+ }
+ let mut rem = ready!(self.as_mut().poll_fill_buf(cx))?;
+ let nread = rem.read(buf)?;
+ self.consume(nread);
+ Poll::Ready(Ok(nread))
+ }
+
+ // we can't skip unconditionally because of the large buffer case in read.
+ unsafe fn prepare_uninitialized_buffer(&self, buf: &mut [MaybeUninit<u8>]) -> bool {
+ self.inner.prepare_uninitialized_buffer(buf)
+ }
+}
+
+#[cfg(feature = "tokio-02")]
+impl<R: tokio_02_dep::io::AsyncRead> tokio_02_dep::io::AsyncBufRead for BufReader<R> {
+ fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> {
+ let me = self.project();
+
+ // If we've reached the end of our internal buffer then we need to fetch
+ // some more data from the underlying reader.
+ // Branch using `>=` instead of the more correct `==`
+ // to tell the compiler that the pos..cap slice is always valid.
+
+ if me.buf.is_empty() {
+ ready!(me.inner.poll_read_buf(cx, &mut Bytes05(me.buf)))?;
+ }
+ Poll::Ready(Ok(&me.buf[..]))
+ }
+
+ fn consume(self: Pin<&mut Self>, amt: usize) {
+ let me = self.project();
+ me.buf.advance(amt);
+ }
+}
+
+#[cfg(feature = "tokio-02")]
+impl<R: tokio_02_dep::io::AsyncRead + tokio_02_dep::io::AsyncWrite> tokio_02_dep::io::AsyncWrite
+ for BufReader<R>
+{
+ fn poll_write(
+ self: Pin<&mut Self>,
+ cx: &mut Context<'_>,
+ buf: &[u8],
+ ) -> Poll<io::Result<usize>> {
+ self.get_pin_mut().poll_write(cx, buf)
+ }
+
+ fn poll_write_buf<B: bytes_05::Buf>(
+ self: Pin<&mut Self>,
+ cx: &mut Context<'_>,
+ buf: &mut B,
+ ) -> Poll<io::Result<usize>> {
+ self.get_pin_mut().poll_write_buf(cx, buf)
+ }
+
+ fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
+ self.get_pin_mut().poll_flush(cx)
+ }
+
+ fn poll_shutdown(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
+ self.get_pin_mut().poll_shutdown(cx)
+ }
+}
+
+#[cfg(feature = "tokio-03")]
+impl<R: tokio_03_dep::io::AsyncRead> tokio_03_dep::io::AsyncRead for BufReader<R> {
+ fn poll_read(
+ mut self: Pin<&mut Self>,
+ cx: &mut Context<'_>,
+ buf: &mut tokio_03_dep::io::ReadBuf<'_>,
+ ) -> Poll<io::Result<()>> {
+ // If we don't have any buffered data and we're doing a massive read
+ // (larger than our internal buffer), bypass our internal buffer
+ // entirely.
+ if !self.buf.has_remaining_mut() && buf.remaining() >= self.buf.len() {
+ let res = ready!(self.as_mut().get_pin_mut().poll_read(cx, buf));
+ self.discard_buffer();
+ return Poll::Ready(res);
+ }
+ let rem = ready!(self.as_mut().poll_fill_buf(cx))?;
+ let amt = std::cmp::min(rem.len(), buf.remaining());
+ buf.put_slice(&rem[..amt]);
+ self.consume(amt);
+ Poll::Ready(Ok(()))
+ }
+}
+
+#[cfg(feature = "tokio-03")]
+impl<R: tokio_03_dep::io::AsyncRead> tokio_03_dep::io::AsyncBufRead for BufReader<R> {
+ fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> {
+ let me = self.project();
+
+ // If we've reached the end of our internal buffer then we need to fetch
+ // some more data from the underlying reader.
+ if me.buf.is_empty() {
+ ready!(tokio_03_read_buf(cx, me.inner, me.buf))?;
+ }
+ Poll::Ready(Ok(&me.buf[..]))
+ }
+
+ fn consume(self: Pin<&mut Self>, amt: usize) {
+ let me = self.project();
+ me.buf.advance(amt);
+ }
+}
+
+#[cfg(feature = "tokio-03")]
+impl<R: tokio_03_dep::io::AsyncRead + tokio_03_dep::io::AsyncWrite> tokio_03_dep::io::AsyncWrite
+ for BufReader<R>
+{
+ fn poll_write(
+ self: Pin<&mut Self>,
+ cx: &mut Context<'_>,
+ buf: &[u8],
+ ) -> Poll<io::Result<usize>> {
+ self.get_pin_mut().poll_write(cx, buf)
+ }
+
+ fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
+ self.get_pin_mut().poll_flush(cx)
+ }
+
+ fn poll_shutdown(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
+ self.get_pin_mut().poll_shutdown(cx)
+ }
+}
+
+#[cfg(feature = "tokio")]
+impl<R: tokio_dep::io::AsyncRead> tokio_dep::io::AsyncRead for BufReader<R> {
+ fn poll_read(
+ mut self: Pin<&mut Self>,
+ cx: &mut Context<'_>,
+ buf: &mut tokio_dep::io::ReadBuf<'_>,
+ ) -> Poll<io::Result<()>> {
+ // If we don't have any buffered data and we're doing a massive read
+ // (larger than our internal buffer), bypass our internal buffer
+ // entirely.
+ if !self.buf.has_remaining_mut() && buf.remaining() >= self.buf.len() {
+ let res = ready!(self.as_mut().get_pin_mut().poll_read(cx, buf));
+ self.discard_buffer();
+ return Poll::Ready(res);
+ }
+ let rem = ready!(self.as_mut().poll_fill_buf(cx))?;
+ let amt = std::cmp::min(rem.len(), buf.remaining());
+ buf.put_slice(&rem[..amt]);
+ self.consume(amt);
+ Poll::Ready(Ok(()))
+ }
+}
+
+#[cfg(feature = "tokio")]
+impl<R: tokio_dep::io::AsyncRead> tokio_dep::io::AsyncBufRead for BufReader<R> {
+ fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> {
+ let me = self.project();
+
+ // If we've reached the end of our internal buffer then we need to fetch
+ // some more data from the underlying reader.
+ if me.buf.is_empty() {
+ ready!(tokio_read_buf(cx, me.inner, me.buf))?;
+ }
+ Poll::Ready(Ok(&me.buf[..]))
+ }
+
+ fn consume(self: Pin<&mut Self>, amt: usize) {
+ let me = self.project();
+ me.buf.advance(amt);
+ }
+}
+
+#[cfg(feature = "tokio")]
+impl<R: tokio_dep::io::AsyncRead + tokio_dep::io::AsyncWrite> tokio_dep::io::AsyncWrite
+ for BufReader<R>
+{
+ fn poll_write(
+ self: Pin<&mut Self>,
+ cx: &mut Context<'_>,
+ buf: &[u8],
+ ) -> Poll<io::Result<usize>> {
+ self.get_pin_mut().poll_write(cx, buf)
+ }
+
+ fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
+ self.get_pin_mut().poll_flush(cx)
+ }
+
+ fn poll_shutdown(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
+ self.get_pin_mut().poll_shutdown(cx)
+ }
+}
+
+impl<R: Read> Read for BufReader<R> {
+ fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+ // If we don't have any buffered data and we're doing a massive read
+ // (larger than our internal buffer), bypass our internal buffer
+ // entirely.
+ if !self.buf.has_remaining_mut() && buf.len() >= self.buf.len() {
+ let res = self.read(buf);
+ self.buf.clear();
+ return res;
+ }
+ let nread = {
+ let mut rem = self.fill_buf()?;
+ rem.read(buf)?
+ };
+ self.consume(nread);
+ Ok(nread)
+ }
+}
+
+impl<R: Read> BufRead for BufReader<R> {
+ fn fill_buf(&mut self) -> io::Result<&[u8]> {
+ // If we've reached the end of our internal buffer then we need to fetch
+ // some more data from the underlying reader.
+ // Branch using `>=` instead of the more correct `==`
+ // to tell the compiler that the pos..cap slice is always valid.
+
+ if self.buf.is_empty() {
+ Bufferless.extend_buf_sync(self)?;
+ }
+ Ok(&self.buf[..])
+ }
+
+ fn consume(&mut self, amt: usize) {
+ self.buf.advance(amt);
+ }
+}
+
+#[cfg(test)]
+#[cfg(feature = "tokio-02")]
+mod tests {
+ use super::{BufReader, Bufferless, CombineRead};
+
+ use std::{io, pin::Pin};
+
+ use {
+ bytes_05::BytesMut,
+ tokio_02_dep::{
+ self as tokio,
+ io::{AsyncRead, AsyncReadExt},
+ },
+ };
+
+ impl<R: AsyncRead> BufReader<R> {
+ async fn extend_buf_tokio_02(mut self: Pin<&mut Self>) -> io::Result<usize> {
+ futures_util_03::future::poll_fn(|cx| Bufferless.poll_extend_buf(cx, self.as_mut()))
+ .await
+ }
+ }
+
+ #[tokio::test]
+ async fn buf_reader() {
+ let mut read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
+
+ let mut buf = [0u8; 3];
+ read.read(&mut buf).await.unwrap();
+ assert_eq!(buf, [1, 2, 3]);
+
+ let mut buf = [0u8; 3];
+ read.read(&mut buf).await.unwrap();
+ assert_eq!(buf, [4, 5, 6]);
+
+ let mut buf = [0u8; 3];
+ read.read(&mut buf).await.unwrap();
+ assert_eq!(buf, [7, 8, 9]);
+
+ let mut buf = [1u8; 3];
+ read.read(&mut buf).await.unwrap();
+ assert_eq!(buf, [0, 1, 1]);
+ }
+
+ #[tokio::test]
+ async fn buf_reader_buf() {
+ let mut read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
+
+ let mut buf = BytesMut::with_capacity(3);
+ read.read_buf(&mut buf).await.unwrap();
+ assert_eq!(&buf[..], [1, 2, 3]);
+
+ read.read_buf(&mut buf).await.unwrap();
+ assert_eq!(&buf[..], [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]);
+ }
+
+ #[tokio::test]
+ async fn buf_reader_extend_buf() {
+ let read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
+ futures_util_03::pin_mut!(read);
+
+ assert_eq!(read.as_mut().extend_buf_tokio_02().await.unwrap(), 3);
+ assert_eq!(read.buffer(), [1, 2, 3]);
+
+ assert_eq!(read.as_mut().extend_buf_tokio_02().await.unwrap(), 7);
+ assert_eq!(read.buffer(), [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]);
+ }
+}
+
+#[cfg(test)]
+#[cfg(feature = "tokio")]
+mod tests_tokio_1 {
+ use super::{BufReader, Bufferless, CombineRead};
+
+ use std::{io, pin::Pin};
+
+ use {
+ bytes::BytesMut,
+ tokio_dep::{
+ self as tokio,
+ io::{AsyncRead, AsyncReadExt},
+ },
+ };
+
+ impl<R: AsyncRead> BufReader<R> {
+ async fn extend_buf_tokio(mut self: Pin<&mut Self>) -> io::Result<usize> {
+ futures_util_03::future::poll_fn(|cx| Bufferless.poll_extend_buf(cx, self.as_mut()))
+ .await
+ }
+ }
+
+ #[tokio::test]
+ async fn buf_reader() {
+ let mut read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
+
+ let mut buf = [0u8; 3];
+ read.read(&mut buf).await.unwrap();
+ assert_eq!(buf, [1, 2, 3]);
+
+ let mut buf = [0u8; 3];
+ read.read(&mut buf).await.unwrap();
+ assert_eq!(buf, [4, 5, 6]);
+
+ let mut buf = [0u8; 3];
+ read.read(&mut buf).await.unwrap();
+ assert_eq!(buf, [7, 8, 9]);
+
+ let mut buf = [1u8; 3];
+ read.read(&mut buf).await.unwrap();
+ assert_eq!(buf, [0, 1, 1]);
+ }
+
+ #[tokio::test]
+ async fn buf_reader_buf() {
+ let mut read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
+
+ let mut buf = BytesMut::with_capacity(3);
+ read.read_buf(&mut buf).await.unwrap();
+ assert_eq!(&buf[..], [1, 2, 3]);
+
+ read.read_buf(&mut buf).await.unwrap();
+ assert_eq!(&buf[..], [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]);
+ }
+
+ #[tokio::test]
+ async fn buf_reader_extend_buf() {
+ let read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
+ futures_util_03::pin_mut!(read);
+
+ assert_eq!(read.as_mut().extend_buf_tokio().await.unwrap(), 3);
+ assert_eq!(read.buffer(), [1, 2, 3]);
+
+ assert_eq!(read.as_mut().extend_buf_tokio().await.unwrap(), 7);
+ assert_eq!(read.buffer(), [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]);
+ }
+}
+
+#[cfg(test)]
+mod tests_sync {
+ use super::{BufReader, Bufferless, CombineSyncRead};
+
+ use std::io::Read;
+
+ #[test]
+ fn buf_reader() {
+ let mut read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
+
+ let mut buf = [0u8; 3];
+ read.read(&mut buf).unwrap();
+ assert_eq!(buf, [1, 2, 3]);
+
+ let mut buf = [0u8; 3];
+ read.read(&mut buf).unwrap();
+ assert_eq!(buf, [4, 5, 6]);
+
+ let mut buf = [0u8; 3];
+ read.read(&mut buf).unwrap();
+ assert_eq!(buf, [7, 8, 9]);
+
+ let mut buf = [1u8; 3];
+ read.read(&mut buf).unwrap();
+ assert_eq!(buf, [0, 1, 1]);
+ }
+
+ #[test]
+ fn buf_reader_extend_buf() {
+ let mut read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
+
+ assert_eq!(Bufferless.extend_buf_sync(&mut read).unwrap(), 3);
+ assert_eq!(read.buffer(), [1, 2, 3]);
+
+ assert_eq!(Bufferless.extend_buf_sync(&mut read).unwrap(), 7);
+ assert_eq!(read.buffer(), [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]);
+ }
+}
diff --git a/src/stream/buffered.rs b/src/stream/buffered.rs
new file mode 100644
index 0000000..93f7b82
--- /dev/null
+++ b/src/stream/buffered.rs
@@ -0,0 +1,141 @@
+use std::collections::VecDeque;
+
+use crate::{
+ error::StreamError,
+ stream::{ParseError, Positioned, ResetStream, StreamErrorFor, StreamOnce},
+};
+
+/// `Stream` which buffers items from an instance of `StreamOnce` into a ring buffer.
+/// Instances of `StreamOnce` which is not able to implement `ResetStream` (such as `ReadStream`) may
+/// use this as a way to implement `ResetStream` and become a full `Stream` instance.
+///
+/// The drawback is that the buffer only stores a limited number of items which limits how many
+/// tokens that can be reset and replayed. If a `buffered::Stream` is reset past this limit an error
+/// will be returned when `uncons` is next called.
+///
+/// NOTE: If this stream is used in conjunction with an error enhancing stream such as
+/// `easy::Stream` (also via the `easy_parser` method) it is recommended that the `buffered::Stream`
+/// instance wraps the `easy::Stream` instance instead of the other way around.
+///
+/// ```ignore
+/// // DO
+/// buffered::Stream::new(easy::Stream(..), ..)
+/// // DON'T
+/// easy::Stream(buffered::Stream::new(.., ..))
+/// parser.easy_parse(buffered::Stream::new(..));
+/// ```
+#[derive(Debug, PartialEq)]
+pub struct Stream<Input>
+where
+ Input: StreamOnce + Positioned,
+{
+ offset: usize,
+ iter: Input,
+ buffer_offset: usize,
+ buffer: VecDeque<(Input::Token, Input::Position)>,
+}
+
+impl<Input> ResetStream for Stream<Input>
+where
+ Input: Positioned,
+{
+ type Checkpoint = usize;
+
+ fn checkpoint(&self) -> Self::Checkpoint {
+ self.offset
+ }
+
+ fn reset(&mut self, checkpoint: Self::Checkpoint) -> Result<(), Self::Error> {
+ if checkpoint < self.buffer_offset - self.buffer.len() {
+ // We have backtracked to far
+ Err(Self::Error::from_error(
+ self.position(),
+ StreamErrorFor::<Self>::message_static_message("Backtracked to far"),
+ ))
+ } else {
+ self.offset = checkpoint;
+ Ok(())
+ }
+ }
+}
+
+impl<Input> Stream<Input>
+where
+ Input: StreamOnce + Positioned,
+ Input::Position: Clone,
+ Input::Token: Clone,
+{
+ /// Constructs a new `BufferedStream` from a `StreamOnce` instance with a `lookahead`
+ /// number of elements that can be stored in the buffer.
+ pub fn new(iter: Input, lookahead: usize) -> Stream<Input> {
+ Stream {
+ offset: 0,
+ iter,
+ buffer_offset: 0,
+ buffer: VecDeque::with_capacity(lookahead),
+ }
+ }
+}
+
+impl<Input> Positioned for Stream<Input>
+where
+ Input: StreamOnce + Positioned,
+{
+ #[inline]
+ fn position(&self) -> Self::Position {
+ if self.offset >= self.buffer_offset {
+ self.iter.position()
+ } else if self.offset < self.buffer_offset - self.buffer.len() {
+ self.buffer
+ .front()
+ .expect("At least 1 element in the buffer")
+ .1
+ .clone()
+ } else {
+ self.buffer[self.buffer.len() - (self.buffer_offset - self.offset)]
+ .1
+ .clone()
+ }
+ }
+}
+
+impl<Input> StreamOnce for Stream<Input>
+where
+ Input: StreamOnce + Positioned,
+ Input::Token: Clone,
+{
+ type Token = Input::Token;
+ type Range = Input::Range;
+ type Position = Input::Position;
+ type Error = Input::Error;
+
+ #[inline]
+ fn uncons(&mut self) -> Result<Input::Token, StreamErrorFor<Self>> {
+ if self.offset >= self.buffer_offset {
+ let position = self.iter.position();
+ let token = self.iter.uncons()?;
+ self.buffer_offset += 1;
+ // We want the VecDeque to only keep the last .capacity() elements so we need to remove
+ // an element if it gets to large
+ if self.buffer.len() == self.buffer.capacity() {
+ self.buffer.pop_front();
+ }
+ self.buffer.push_back((token.clone(), position));
+ self.offset += 1;
+ Ok(token)
+ } else if self.offset < self.buffer_offset - self.buffer.len() {
+ // We have backtracked to far
+ Err(StreamError::message_static_message("Backtracked to far"))
+ } else {
+ let value = self.buffer[self.buffer.len() - (self.buffer_offset - self.offset)]
+ .0
+ .clone();
+ self.offset += 1;
+ Ok(value)
+ }
+ }
+
+ fn is_partial(&self) -> bool {
+ self.iter.is_partial()
+ }
+}
diff --git a/src/stream/decoder.rs b/src/stream/decoder.rs
new file mode 100644
index 0000000..9807d80
--- /dev/null
+++ b/src/stream/decoder.rs
@@ -0,0 +1,227 @@
+use crate::{
+ error::ParseError,
+ stream::buf_reader::{Buffer, Bufferless, CombineBuffer},
+};
+
+use std::{
+ fmt,
+ io::{self, Read},
+};
+
+#[cfg(feature = "pin-project-lite")]
+use std::pin::Pin;
+
+#[derive(Debug)]
+pub enum Error<E, P> {
+ Parse(E),
+ Io { position: P, error: io::Error },
+}
+
+impl<'a, P> From<Error<crate::easy::Errors<u8, &'a [u8], P>, P>>
+ for crate::easy::Errors<u8, &'a [u8], P>
+where
+ P: Ord + Clone,
+{
+ fn from(e: Error<crate::easy::Errors<u8, &'a [u8], P>, P>) -> Self {
+ match e {
+ Error::Parse(e) => e,
+ Error::Io { position, error } => {
+ crate::easy::Errors::from_error(position, crate::easy::Error::Other(error.into()))
+ }
+ }
+ }
+}
+
+impl<E, P> std::error::Error for Error<E, P>
+where
+ E: std::error::Error,
+ P: fmt::Display + fmt::Debug,
+{
+}
+
+impl<E: fmt::Display, P: fmt::Display> fmt::Display for Error<E, P> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Error::Parse(e) => e.fmt(f),
+ Error::Io { position: _, error } => error.fmt(f),
+ }
+ }
+}
+
+#[derive(Default)]
+/// Used together with the `decode!` macro
+pub struct Decoder<S, P, C = Buffer> {
+ position: P,
+ state: S,
+ buffer: C,
+ end_of_input: bool,
+}
+
+impl<S, P> Decoder<S, P, Buffer>
+where
+ P: Default,
+ S: Default,
+{
+ /// Constructs a new [`Decoder`] with an internal buffer. Allows any `AsyncRead/Read` instance to
+ /// be used when decoding but there may be data left in the internal buffer after decoding
+ /// (accessible with [`Decoder::buffer`])
+ pub fn new() -> Self {
+ Decoder::default()
+ }
+
+ /// Constructs a new [`Decoder`] with an internal buffer. Allows any `AsyncRead/Read` instance to
+ /// be used when decoding but there may be data left in the internal buffer after decoding
+ /// (accessible with [`Decoder::buffer`])
+ pub fn new_buffer() -> Self {
+ Decoder::new()
+ }
+}
+
+impl<S, P> Decoder<S, P, Bufferless>
+where
+ P: Default,
+ S: Default,
+{
+ /// Constructs a new `Decoder` without an internal buffer. Requires the read instance to be
+ /// wrapped with combine's [`BufReader`] instance to
+ ///
+ /// [`BufReader`]: super::buf_reader::BufReader
+ pub fn new_bufferless() -> Self {
+ Decoder::default()
+ }
+}
+
+impl<S, P> Decoder<S, P> {
+ pub fn buffer(&self) -> &[u8] {
+ &self.buffer.0
+ }
+}
+
+impl<S, P, C> Decoder<S, P, C> {
+ #[doc(hidden)]
+ pub fn advance<R>(&mut self, read: &mut R, removed: usize)
+ where
+ C: CombineBuffer<R>,
+ {
+ // Remove the data we have parsed and adjust `removed` to be the amount of data we
+ // committed from `self.reader`
+ self.buffer.advance(read, removed)
+ }
+
+ #[doc(hidden)]
+ #[cfg(feature = "pin-project-lite")]
+ pub fn advance_pin<R>(&mut self, read: Pin<&mut R>, removed: usize)
+ where
+ C: CombineBuffer<R>,
+ {
+ // Remove the data we have parsed and adjust `removed` to be the amount of data we
+ // committed from `self.reader`
+ self.buffer.advance_pin(read, removed);
+ }
+
+ pub fn position(&self) -> &P {
+ &self.position
+ }
+
+ #[doc(hidden)]
+ pub fn __inner(&mut self) -> (&mut S, &mut P, &C, bool) {
+ (
+ &mut self.state,
+ &mut self.position,
+ &self.buffer,
+ self.end_of_input,
+ )
+ }
+}
+
+impl<S, P, C> Decoder<S, P, C>
+where
+ C: ,
+{
+ #[doc(hidden)]
+ pub fn __before_parse<R>(&mut self, mut reader: R) -> io::Result<()>
+ where
+ R: Read,
+ C: crate::stream::buf_reader::CombineSyncRead<R>,
+ {
+ if self.buffer.extend_buf_sync(&mut reader)? == 0 {
+ self.end_of_input = true;
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg(feature = "tokio-02")]
+impl<S, P, C> Decoder<S, P, C> {
+ #[doc(hidden)]
+ pub async fn __before_parse_tokio_02<R>(&mut self, mut reader: Pin<&mut R>) -> io::Result<()>
+ where
+ R: tokio_02_dep::io::AsyncRead,
+ C: crate::stream::buf_reader::CombineRead<R, dyn tokio_02_dep::io::AsyncRead>,
+ {
+ let copied =
+ futures_util_03::future::poll_fn(|cx| self.buffer.poll_extend_buf(cx, reader.as_mut()))
+ .await?;
+ if copied == 0 {
+ self.end_of_input = true;
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg(feature = "tokio-03")]
+impl<S, P, C> Decoder<S, P, C> {
+ #[doc(hidden)]
+ pub async fn __before_parse_tokio_03<R>(&mut self, mut reader: Pin<&mut R>) -> io::Result<()>
+ where
+ R: tokio_03_dep::io::AsyncRead,
+ C: crate::stream::buf_reader::CombineRead<R, dyn tokio_03_dep::io::AsyncRead>,
+ {
+ let copied =
+ futures_util_03::future::poll_fn(|cx| self.buffer.poll_extend_buf(cx, reader.as_mut()))
+ .await?;
+ if copied == 0 {
+ self.end_of_input = true;
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg(feature = "tokio")]
+impl<S, P, C> Decoder<S, P, C> {
+ #[doc(hidden)]
+ pub async fn __before_parse_tokio<R>(&mut self, mut reader: Pin<&mut R>) -> io::Result<()>
+ where
+ R: tokio_dep::io::AsyncRead,
+ C: crate::stream::buf_reader::CombineRead<R, dyn tokio_dep::io::AsyncRead>,
+ {
+ let copied =
+ futures_util_03::future::poll_fn(|cx| self.buffer.poll_extend_buf(cx, reader.as_mut()))
+ .await?;
+ if copied == 0 {
+ self.end_of_input = true;
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg(feature = "futures-03")]
+impl<S, P, C> Decoder<S, P, C> {
+ #[doc(hidden)]
+ pub async fn __before_parse_async<R>(&mut self, reader: Pin<&mut R>) -> io::Result<()>
+ where
+ R: futures_io_03::AsyncRead,
+ C: crate::stream::buf_reader::CombineAsyncRead<R>,
+ {
+ let copied = self.buffer.extend_buf(reader).await?;
+
+ if copied == 0 {
+ self.end_of_input = true;
+ }
+ Ok(())
+ }
+}
diff --git a/src/stream/easy.rs b/src/stream/easy.rs
new file mode 100644
index 0000000..a0c25f7
--- /dev/null
+++ b/src/stream/easy.rs
@@ -0,0 +1,897 @@
+//! Stream wrapper which provides an informative and easy to use error type.
+//!
+//! Unless you have specific constraints preventing you from using this error type (such as being
+//! a `no_std` environment) you probably want to use this stream type. It can easily be used
+//! through the [`EasyParser::easy_parse`] method.
+//!
+//! The provided `Errors` type is roughly the same as `ParseError` in combine 1.x and 2.x.
+//!
+//! ```
+//! #[macro_use]
+//! extern crate combine;
+//! use combine::{easy, Parser, EasyParser, Stream, many1};
+//! use combine::parser::char::letter;
+//! use combine::stream::StreamErrorFor;
+//! use combine::error::{ParseError, StreamError};
+//!
+//! fn main() {
+//! parser!{
+//! fn parser[Input]()(Input) -> String
+//! where [
+//! Input: Stream<Token = char, Error = easy::ParseError<Input>>,
+//! Input::Range: PartialEq,
+//! // If we want to use the error type explicitly we need to help rustc infer
+//! // `StreamError` to `easy::Error` (rust-lang/rust#24159)
+//! Input::Error: ParseError<
+//! Input::Token,
+//! Input::Range,
+//! Input::Position,
+//! StreamError = easy::Error<Input::Token, Input::Range>
+//! >
+//! ]
+//! {
+//! many1(letter()).and_then(|word: String| {
+//! if word == "combine" {
+//! Ok(word)
+//! } else {
+//! Err(easy::Error::Expected(easy::Info::Static("combine")))
+//! }
+//! })
+//! }
+//! }
+//!
+//! parser!{
+//! fn parser2[Input]()(Input) -> String
+//! where [
+//! Input: Stream<Token = char>,
+//! ]
+//! {
+//! many1(letter()).and_then(|word: String| {
+//! if word == "combine" {
+//! Ok(word)
+//! } else {
+//! // Alternatively it is possible to only use the methods provided by the
+//! // `StreamError` trait.
+//! // In that case the extra bound is not necessary (and this method will work
+//! // for other errors than `easy::Errors`)
+//! Err(StreamErrorFor::<Input>::expected_static_message("combine"))
+//! }
+//! })
+//! }
+//! }
+//!
+//! let input = "combin";
+//! let expected_error = Err(easy::Errors {
+//! errors: vec![
+//! easy::Error::Expected("combine".into())
+//! ],
+//! position: 0,
+//! });
+//! assert_eq!(
+//! parser().easy_parse(input).map_err(|err| err.map_position(|p| p.translate_position(input))),
+//! expected_error
+//! );
+//! assert_eq!(
+//! parser2().easy_parse(input).map_err(|err| err.map_position(|p| p.translate_position(input))),
+//! expected_error
+//! );
+//! }
+//!
+//! ```
+//!
+//! [`EasyParser::easy_parse`]: super::super::parser::EasyParser::easy_parse
+use std::{error::Error as StdError, fmt};
+
+use crate::error::{Info as PrimitiveInfo, ParseResult, StreamError, Tracked};
+
+use crate::stream::{
+ Positioned, RangeStream, RangeStreamOnce, ResetStream, StreamErrorFor, StreamOnce,
+};
+
+/// Enum holding error information. Variants are defined for `Stream::Token` and `Stream::Range` as
+/// well as string variants holding easy descriptions.
+///
+/// As there is implementations of `From` for `String` and `&'static str` the
+/// constructor need not be used directly as calling `msg.into()` should turn a message into the
+/// correct `Info` variant.
+#[derive(Clone, Debug)]
+pub enum Info<T, R> {
+ Token(T),
+ Range(R),
+ Owned(String),
+ Static(&'static str),
+}
+
+impl<T, R, F> From<PrimitiveInfo<T, R, F>> for Info<T, R>
+where
+ F: fmt::Display,
+{
+ fn from(info: PrimitiveInfo<T, R, F>) -> Self {
+ match info {
+ PrimitiveInfo::Token(b) => Info::Token(b),
+ PrimitiveInfo::Range(b) => Info::Range(b),
+ PrimitiveInfo::Static(b) => Info::Static(b),
+ PrimitiveInfo::Format(b) => Info::Owned(b.to_string()),
+ }
+ }
+}
+
+impl<T, R> Info<T, R> {
+ pub fn map_token<F, U>(self, f: F) -> Info<U, R>
+ where
+ F: FnOnce(T) -> U,
+ {
+ use self::Info::*;
+
+ match self {
+ Token(t) => Token(f(t)),
+ Range(r) => Range(r),
+ Owned(s) => Owned(s),
+ Static(x) => Static(x),
+ }
+ }
+
+ pub fn map_range<F, S>(self, f: F) -> Info<T, S>
+ where
+ F: FnOnce(R) -> S,
+ {
+ use self::Info::*;
+
+ match self {
+ Token(t) => Token(t),
+ Range(r) => Range(f(r)),
+ Owned(s) => Owned(s),
+ Static(x) => Static(x),
+ }
+ }
+}
+
+impl<T: PartialEq, R: PartialEq> PartialEq for Info<T, R> {
+ fn eq(&self, other: &Info<T, R>) -> bool {
+ match (self, other) {
+ (&Info::Token(ref l), &Info::Token(ref r)) => l == r,
+ (&Info::Range(ref l), &Info::Range(ref r)) => l == r,
+ (&Info::Owned(ref l), &Info::Owned(ref r)) => l == r,
+ (&Info::Static(l), &Info::Owned(ref r)) => l == r,
+ (&Info::Owned(ref l), &Info::Static(r)) => l == r,
+ (&Info::Static(l), &Info::Static(r)) => l == r,
+ _ => false,
+ }
+ }
+}
+impl<T: fmt::Display, R: fmt::Display> fmt::Display for Info<T, R> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Info::Token(ref c) => write!(f, "{}", c),
+ Info::Range(ref c) => write!(f, "{}", c),
+ Info::Owned(ref s) => write!(f, "{}", s),
+ Info::Static(s) => write!(f, "{}", s),
+ }
+ }
+}
+
+impl<R> From<char> for Info<char, R> {
+ fn from(s: char) -> Info<char, R> {
+ Info::Token(s)
+ }
+}
+impl<T, R> From<String> for Info<T, R> {
+ fn from(s: String) -> Info<T, R> {
+ Info::Owned(s)
+ }
+}
+
+impl<T, R> From<&'static str> for Info<T, R> {
+ fn from(s: &'static str) -> Info<T, R> {
+ Info::Static(s)
+ }
+}
+
+impl<R> From<u8> for Info<u8, R> {
+ fn from(s: u8) -> Info<u8, R> {
+ Info::Token(s)
+ }
+}
+
+/// Enum used to store information about an error that has occurred during parsing.
+#[derive(Debug)]
+pub enum Error<T, R> {
+ /// Error indicating an unexpected token has been encountered in the stream
+ Unexpected(Info<T, R>),
+ /// Error indicating that the parser expected something else
+ Expected(Info<T, R>),
+ /// Generic message
+ Message(Info<T, R>),
+ /// Variant for containing other types of errors
+ Other(Box<dyn StdError + Send + Sync>),
+}
+
+impl<Item, Range> StreamError<Item, Range> for Error<Item, Range>
+where
+ Item: PartialEq,
+ Range: PartialEq,
+{
+ #[inline]
+ fn unexpected_token(token: Item) -> Self {
+ Error::Unexpected(Info::Token(token))
+ }
+ #[inline]
+ fn unexpected_range(token: Range) -> Self {
+ Error::Unexpected(Info::Range(token))
+ }
+ #[inline]
+ fn unexpected_format<T>(msg: T) -> Self
+ where
+ T: fmt::Display,
+ {
+ Error::Unexpected(Info::Owned(msg.to_string()))
+ }
+ #[inline]
+ fn unexpected_static_message(msg: &'static str) -> Self {
+ Error::Unexpected(Info::Static(msg))
+ }
+
+ #[inline]
+ fn expected_token(token: Item) -> Self {
+ Error::Expected(Info::Token(token))
+ }
+ #[inline]
+ fn expected_range(token: Range) -> Self {
+ Error::Expected(Info::Range(token))
+ }
+ #[inline]
+ fn expected_format<T>(msg: T) -> Self
+ where
+ T: fmt::Display,
+ {
+ Error::Expected(Info::Owned(msg.to_string()))
+ }
+ #[inline]
+ fn expected_static_message(msg: &'static str) -> Self {
+ Error::Expected(Info::Static(msg))
+ }
+
+ #[inline]
+ fn message_format<T>(msg: T) -> Self
+ where
+ T: fmt::Display,
+ {
+ Error::Message(Info::Owned(msg.to_string()))
+ }
+ #[inline]
+ fn message_static_message(msg: &'static str) -> Self {
+ Error::Message(Info::Static(msg))
+ }
+ #[inline]
+ fn message_token(token: Item) -> Self {
+ Error::Message(Info::Token(token))
+ }
+ #[inline]
+ fn message_range(token: Range) -> Self {
+ Error::Message(Info::Range(token))
+ }
+
+ fn is_unexpected_end_of_input(&self) -> bool {
+ *self == Self::end_of_input()
+ }
+
+ #[inline]
+ fn other<E>(err: E) -> Self
+ where
+ E: StdError + Send + Sync + 'static,
+ {
+ err.into()
+ }
+
+ #[inline]
+ fn into_other<T>(self) -> T
+ where
+ T: StreamError<Item, Range>,
+ {
+ match self {
+ Error::Unexpected(info) => match info {
+ Info::Token(x) => T::unexpected_token(x),
+ Info::Range(x) => T::unexpected_range(x),
+ Info::Static(x) => T::unexpected_static_message(x),
+ Info::Owned(x) => T::unexpected_format(x),
+ },
+ Error::Expected(info) => match info {
+ Info::Token(x) => T::expected_token(x),
+ Info::Range(x) => T::expected_range(x),
+ Info::Static(x) => T::expected_static_message(x),
+ Info::Owned(x) => T::expected_format(x),
+ },
+ Error::Message(info) => match info {
+ Info::Token(x) => T::expected_token(x),
+ Info::Range(x) => T::expected_range(x),
+ Info::Static(x) => T::expected_static_message(x),
+ Info::Owned(x) => T::expected_format(x),
+ },
+ Error::Other(err) => T::message_format(err),
+ }
+ }
+}
+
+impl<Item, Range, Position> crate::error::ParseError<Item, Range, Position> for Error<Item, Range>
+where
+ Item: PartialEq,
+ Range: PartialEq,
+ Position: Default,
+{
+ type StreamError = Self;
+ #[inline]
+ fn empty(_: Position) -> Self {
+ Self::message_static_message("")
+ }
+ #[inline]
+ fn from_error(_: Position, err: Self::StreamError) -> Self {
+ err
+ }
+
+ #[inline]
+ fn position(&self) -> Position {
+ Position::default()
+ }
+
+ #[inline]
+ fn set_position(&mut self, _position: Position) {}
+
+ #[inline]
+ fn add(&mut self, err: Self::StreamError) {
+ *self = err;
+ }
+
+ #[inline]
+ fn set_expected<F>(self_: &mut Tracked<Self>, info: Self::StreamError, f: F)
+ where
+ F: FnOnce(&mut Tracked<Self>),
+ {
+ f(self_);
+ self_.error = info;
+ }
+
+ fn is_unexpected_end_of_input(&self) -> bool {
+ *self == Self::end_of_input()
+ }
+
+ #[inline]
+ fn into_other<T>(self) -> T
+ where
+ T: crate::error::ParseError<Item, Range, Position>,
+ {
+ T::from_error(Position::default(), StreamError::into_other(self))
+ }
+}
+
+impl<Item, Range, Position> crate::error::ParseErrorInto<Item, Range, Position>
+ for Errors<Item, Range, Position>
+{
+ fn into_other_error<T, Item2, Range2, Position2>(self) -> T
+ where
+ T: crate::error::ParseError<Item2, Range2, Position2>,
+ Item2: From<Item>,
+ Range2: From<Range>,
+ Position2: From<Position>,
+ {
+ let mut error = T::empty(self.position.into());
+ for err in self.errors {
+ error.add(crate::error::StreamErrorInto::<Item, Range>::into_other_error(err));
+ }
+ error
+ }
+}
+
+impl<Item, Range> crate::error::StreamErrorInto<Item, Range> for Error<Item, Range> {
+ fn into_other_error<T, Item2, Range2>(self) -> T
+ where
+ T: crate::error::StreamError<Item2, Range2>,
+ Item2: From<Item>,
+ Range2: From<Range>,
+ {
+ match self {
+ Error::Unexpected(info) => match info {
+ Info::Token(x) => T::unexpected_token(x.into()),
+ Info::Range(x) => T::unexpected_range(x.into()),
+ Info::Static(x) => T::unexpected_static_message(x),
+ Info::Owned(x) => T::unexpected_format(x),
+ },
+ Error::Expected(info) => match info {
+ Info::Token(x) => T::expected_token(x.into()),
+ Info::Range(x) => T::expected_range(x.into()),
+ Info::Static(x) => T::expected_static_message(x),
+ Info::Owned(x) => T::expected_format(x),
+ },
+ Error::Message(info) => match info {
+ Info::Token(x) => T::expected_token(x.into()),
+ Info::Range(x) => T::expected_range(x.into()),
+ Info::Static(x) => T::expected_static_message(x),
+ Info::Owned(x) => T::expected_format(x),
+ },
+ Error::Other(err) => T::message_format(err),
+ }
+ }
+}
+
+impl<Item, Range, Position> crate::error::ParseError<Item, Range, Position>
+ for Errors<Item, Range, Position>
+where
+ Item: PartialEq,
+ Range: PartialEq,
+ Position: Ord + Clone,
+{
+ type StreamError = Error<Item, Range>;
+ #[inline]
+ fn empty(pos: Position) -> Self {
+ Errors::empty(pos)
+ }
+ #[inline]
+ fn from_error(position: Position, err: Self::StreamError) -> Self {
+ Self::new(position, err)
+ }
+
+ #[inline]
+ fn position(&self) -> Position {
+ self.position.clone()
+ }
+
+ #[inline]
+ fn set_position(&mut self, position: Position) {
+ self.position = position;
+ }
+
+ #[inline]
+ fn merge(self, other: Self) -> Self {
+ Errors::merge(self, other)
+ }
+
+ #[inline]
+ fn add(&mut self, err: Self::StreamError) {
+ self.add_error(err);
+ }
+
+ #[inline]
+ fn set_expected<F>(self_: &mut Tracked<Self>, info: Self::StreamError, f: F)
+ where
+ F: FnOnce(&mut Tracked<Self>),
+ {
+ let start = self_.error.errors.len();
+ f(self_);
+ // Replace all expected errors that were added from the previous add_error
+ // with this expected error
+ let mut i = 0;
+ self_.error.errors.retain(|e| {
+ if i < start {
+ i += 1;
+ true
+ } else {
+ match *e {
+ Error::Expected(_) => false,
+ _ => true,
+ }
+ }
+ });
+ self_.error.add(info);
+ }
+
+ fn clear_expected(&mut self) {
+ self.errors.retain(|e| match *e {
+ Error::Expected(_) => false,
+ _ => true,
+ })
+ }
+
+ fn is_unexpected_end_of_input(&self) -> bool {
+ self.errors
+ .iter()
+ .any(StreamError::is_unexpected_end_of_input)
+ }
+
+ #[inline]
+ fn into_other<T>(mut self) -> T
+ where
+ T: crate::error::ParseError<Item, Range, Position>,
+ {
+ match self.errors.pop() {
+ Some(err) => T::from_error(self.position, StreamError::into_other(err)),
+ None => T::empty(self.position),
+ }
+ }
+}
+
+impl<T, R> Error<T, R> {
+ pub fn map_token<F, U>(self, f: F) -> Error<U, R>
+ where
+ F: FnOnce(T) -> U,
+ {
+ use self::Error::*;
+
+ match self {
+ Unexpected(x) => Unexpected(x.map_token(f)),
+ Expected(x) => Expected(x.map_token(f)),
+ Message(x) => Message(x.map_token(f)),
+ Other(x) => Other(x),
+ }
+ }
+
+ pub fn map_range<F, S>(self, f: F) -> Error<T, S>
+ where
+ F: FnOnce(R) -> S,
+ {
+ use self::Error::*;
+
+ match self {
+ Unexpected(x) => Unexpected(x.map_range(f)),
+ Expected(x) => Expected(x.map_range(f)),
+ Message(x) => Message(x.map_range(f)),
+ Other(x) => Other(x),
+ }
+ }
+}
+
+impl<T: PartialEq, R: PartialEq> PartialEq for Error<T, R> {
+ fn eq(&self, other: &Error<T, R>) -> bool {
+ match (self, other) {
+ (&Error::Unexpected(ref l), &Error::Unexpected(ref r))
+ | (&Error::Expected(ref l), &Error::Expected(ref r))
+ | (&Error::Message(ref l), &Error::Message(ref r)) => l == r,
+ _ => false,
+ }
+ }
+}
+
+impl<T, R, E> From<E> for Error<T, R>
+where
+ E: StdError + 'static + Send + Sync,
+{
+ fn from(e: E) -> Error<T, R> {
+ Error::Other(Box::new(e))
+ }
+}
+
+impl<T, R> Error<T, R> {
+ /// Returns the `end_of_input` error.
+ pub fn end_of_input() -> Error<T, R> {
+ Error::Unexpected("end of input".into())
+ }
+
+ /// Formats a slice of errors in a human readable way.
+ ///
+ /// ```rust
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # use combine::parser::char::*;
+ /// # use combine::stream::position::{self, SourcePosition};
+ ///
+ /// # fn main() {
+ /// let input = r"
+ /// ,123
+ /// ";
+ /// let result = spaces().silent().with(char('.').or(char('a')).or(digit()))
+ /// .easy_parse(position::Stream::new(input));
+ /// let m = format!("{}", result.unwrap_err());
+ /// let expected = r"Parse error at line: 2, column: 3
+ /// Unexpected `,`
+ /// Expected `.`, `a` or `digit`
+ /// ";
+ /// assert_eq!(m, expected);
+ /// # }
+ /// ```
+ pub fn fmt_errors(errors: &[Error<T, R>], f: &mut fmt::Formatter<'_>) -> fmt::Result
+ where
+ T: fmt::Display,
+ R: fmt::Display,
+ {
+ // First print the token that we did not expect
+ // There should really just be one unexpected message at this point though we print them
+ // all to be safe
+ let unexpected = errors.iter().filter(|e| match **e {
+ Error::Unexpected(_) => true,
+ _ => false,
+ });
+ for error in unexpected {
+ writeln!(f, "{}", error)?;
+ }
+
+ // Then we print out all the things that were expected in a comma separated list
+ // 'Expected 'a', 'expression' or 'let'
+ let iter = || {
+ errors.iter().filter_map(|e| match *e {
+ Error::Expected(ref err) => Some(err),
+ _ => None,
+ })
+ };
+ let expected_count = iter().count();
+ for (i, message) in iter().enumerate() {
+ let s = match i {
+ 0 => "Expected",
+ _ if i < expected_count - 1 => ",",
+ // Last expected message to be written
+ _ => " or",
+ };
+ write!(f, "{} `{}`", s, message)?;
+ }
+ if expected_count != 0 {
+ writeln!(f)?;
+ }
+ // If there are any generic messages we print them out last
+ let messages = errors.iter().filter(|e| match **e {
+ Error::Message(_) | Error::Other(_) => true,
+ _ => false,
+ });
+ for error in messages {
+ writeln!(f, "{}", error)?;
+ }
+ Ok(())
+ }
+}
+
+/// Convenience alias over `Errors` for `StreamOnce` types which makes it possible to specify the
+/// `Errors` type from a `StreamOnce` by writing `ParseError<Input>` instead of `Errors<Input::Token,
+/// Input::Range, Input::Position>`
+pub type ParseError<S> =
+ Errors<<S as StreamOnce>::Token, <S as StreamOnce>::Range, <S as StreamOnce>::Position>;
+
+/// Struct which hold information about an error that occurred at a specific position.
+/// Can hold multiple instances of `Error` if more that one error occurred in the same position.
+#[derive(Debug, PartialEq)]
+pub struct Errors<T, R, P> {
+ /// The position where the error occurred
+ pub position: P,
+ /// A vector containing specific information on what errors occurred at `position`. Usually
+ /// a fully formed message contains one `Unexpected` error and one or more `Expected` errors.
+ /// `Message` and `Other` may also appear (`combine` never generates these errors on its own)
+ /// and may warrant custom handling.
+ pub errors: Vec<Error<T, R>>,
+}
+
+impl<T, R, P> Errors<T, R, P> {
+ /// Constructs a new `ParseError` which occurred at `position`.
+ #[inline]
+ pub fn new(position: P, error: Error<T, R>) -> Errors<T, R, P> {
+ Self::from_errors(position, vec![error])
+ }
+
+ /// Constructs an error with no other information than the position it occurred at.
+ #[inline]
+ pub fn empty(position: P) -> Errors<T, R, P> {
+ Self::from_errors(position, vec![])
+ }
+
+ /// Constructs a `ParseError` with multiple causes.
+ #[inline]
+ pub fn from_errors(position: P, errors: Vec<Error<T, R>>) -> Errors<T, R, P> {
+ Errors { position, errors }
+ }
+
+ /// Constructs an end of input error. Should be returned by parsers which encounter end of
+ /// input unexpectedly.
+ #[inline]
+ pub fn end_of_input(position: P) -> Errors<T, R, P> {
+ Self::new(position, Error::end_of_input())
+ }
+
+ /// Adds an error if `error` does not exist in this `ParseError` already (as determined byte
+ /// `PartialEq`).
+ pub fn add_error(&mut self, error: Error<T, R>)
+ where
+ T: PartialEq,
+ R: PartialEq,
+ {
+ // Don't add duplicate errors
+ if self.errors.iter().all(|err| *err != error) {
+ self.errors.push(error);
+ }
+ }
+
+ /// Removes all `Expected` errors in `self` and adds `info` instead.
+ pub fn set_expected(&mut self, info: Info<T, R>) {
+ // Remove all other expected messages
+ self.errors.retain(|e| match *e {
+ Error::Expected(_) => false,
+ _ => true,
+ });
+ self.errors.push(Error::Expected(info));
+ }
+
+ /// Merges two `ParseError`s. If they exist at the same position the errors of `other` are
+ /// added to `self` (using `add_error` to skip duplicates). If they are not at the same
+ /// position the error furthest ahead are returned, ignoring the other `ParseError`.
+ pub fn merge(mut self, mut other: Errors<T, R, P>) -> Errors<T, R, P>
+ where
+ P: Ord,
+ T: PartialEq,
+ R: PartialEq,
+ {
+ use std::cmp::Ordering;
+
+ // Only keep the errors which occurred after consuming the most amount of data
+ match self.position.cmp(&other.position) {
+ Ordering::Less => other,
+ Ordering::Greater => self,
+ Ordering::Equal => {
+ for message in other.errors.drain(..) {
+ self.add_error(message);
+ }
+ self
+ }
+ }
+ }
+
+ /// Maps the position to a new value
+ pub fn map_position<F, Q>(self, f: F) -> Errors<T, R, Q>
+ where
+ F: FnOnce(P) -> Q,
+ {
+ Errors::from_errors(f(self.position), self.errors)
+ }
+
+ /// Maps all token variants to a new value
+ pub fn map_token<F, U>(self, mut f: F) -> Errors<U, R, P>
+ where
+ F: FnMut(T) -> U,
+ {
+ Errors::from_errors(
+ self.position,
+ self.errors
+ .into_iter()
+ .map(|error| error.map_token(&mut f))
+ .collect(),
+ )
+ }
+
+ /// Maps all range variants to a new value.
+ ///
+ /// ```
+ /// use combine::*;
+ /// use combine::parser::range::range;
+ /// println!(
+ /// "{}",
+ /// range(&"HTTP"[..])
+ /// .easy_parse("HTT")
+ /// .unwrap_err()
+ /// .map_range(|bytes| format!("{:?}", bytes))
+ /// );
+ /// ```
+ pub fn map_range<F, S>(self, mut f: F) -> Errors<T, S, P>
+ where
+ F: FnMut(R) -> S,
+ {
+ Errors::from_errors(
+ self.position,
+ self.errors
+ .into_iter()
+ .map(|error| error.map_range(&mut f))
+ .collect(),
+ )
+ }
+}
+
+impl<T, R, P> StdError for Errors<T, R, P>
+where
+ P: fmt::Display + fmt::Debug,
+ T: fmt::Display + fmt::Debug,
+ R: fmt::Display + fmt::Debug,
+{
+ fn description(&self) -> &str {
+ "parse error"
+ }
+}
+
+impl<T, R, P> fmt::Display for Errors<T, R, P>
+where
+ P: fmt::Display,
+ T: fmt::Display,
+ R: fmt::Display,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ writeln!(f, "Parse error at {}", self.position)?;
+ Error::fmt_errors(&self.errors, f)
+ }
+}
+
+impl<T: fmt::Display, R: fmt::Display> fmt::Display for Error<T, R> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Error::Unexpected(ref c) => write!(f, "Unexpected `{}`", c),
+ Error::Expected(ref s) => write!(f, "Expected `{}`", s),
+ Error::Message(ref msg) => msg.fmt(f),
+ Error::Other(ref err) => err.fmt(f),
+ }
+ }
+}
+
+#[derive(PartialEq, Eq, Copy, Clone, Debug)]
+pub struct Stream<S>(pub S);
+
+impl<S> From<S> for Stream<S> {
+ fn from(stream: S) -> Self {
+ Stream(stream)
+ }
+}
+
+impl<S> ResetStream for Stream<S>
+where
+ S: ResetStream + Positioned,
+ S::Token: PartialEq,
+ S::Range: PartialEq,
+{
+ type Checkpoint = S::Checkpoint;
+
+ fn checkpoint(&self) -> Self::Checkpoint {
+ self.0.checkpoint()
+ }
+ fn reset(&mut self, checkpoint: Self::Checkpoint) -> Result<(), Self::Error> {
+ self.0
+ .reset(checkpoint)
+ .map_err(crate::error::ParseError::into_other)
+ }
+}
+
+impl<S> StreamOnce for Stream<S>
+where
+ S: StreamOnce + Positioned,
+ S::Token: PartialEq,
+ S::Range: PartialEq,
+{
+ type Token = S::Token;
+ type Range = S::Range;
+ type Position = S::Position;
+ type Error = ParseError<S>;
+
+ #[inline]
+ fn uncons(&mut self) -> Result<Self::Token, StreamErrorFor<Self>> {
+ self.0.uncons().map_err(StreamError::into_other)
+ }
+
+ fn is_partial(&self) -> bool {
+ self.0.is_partial()
+ }
+}
+
+impl<S> RangeStreamOnce for Stream<S>
+where
+ S: RangeStream,
+ S::Token: PartialEq,
+ S::Range: PartialEq,
+{
+ #[inline]
+ fn uncons_range(&mut self, size: usize) -> Result<Self::Range, StreamErrorFor<Self>> {
+ self.0.uncons_range(size).map_err(StreamError::into_other)
+ }
+
+ #[inline]
+ fn uncons_while<F>(&mut self, f: F) -> Result<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ self.0.uncons_while(f).map_err(StreamError::into_other)
+ }
+
+ #[inline]
+ fn uncons_while1<F>(&mut self, f: F) -> ParseResult<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ self.0.uncons_while1(f).map_err(StreamError::into_other)
+ }
+
+ #[inline]
+ fn distance(&self, end: &Self::Checkpoint) -> usize {
+ self.0.distance(end)
+ }
+
+ fn range(&self) -> Self::Range {
+ self.0.range()
+ }
+}
+
+impl<S> Positioned for Stream<S>
+where
+ S: StreamOnce + Positioned,
+ S::Token: PartialEq,
+ S::Range: PartialEq,
+{
+ fn position(&self) -> S::Position {
+ self.0.position()
+ }
+}
diff --git a/src/stream/mod.rs b/src/stream/mod.rs
new file mode 100644
index 0000000..f38bd72
--- /dev/null
+++ b/src/stream/mod.rs
@@ -0,0 +1,1883 @@
+//
+//Traits and implementations of arbitrary data streams.
+//!
+//! Streams are similar to the `Iterator` trait in that they represent some sequential set of items
+//! which can be retrieved one by one. Where `Stream`s differ is that they are allowed to return
+//! errors instead of just `None` and if they implement the `RangeStreamOnce` trait they are also
+//! capable of returning multiple items at the same time, usually in the form of a slice.
+//!
+//! In addition to he functionality above, a proper `Stream` usable by a `Parser` must also have a
+//! position (marked by the `Positioned` trait) and must also be resetable (marked by the
+//! `ResetStream` trait). The former is used to ensure that errors at different points in the stream
+//! aren't combined and the latter is used in parsers such as `or` to try multiple alternative
+//! parses.
+
+use crate::lib::{cmp::Ordering, fmt, marker::PhantomData, str::Chars};
+
+use crate::{
+ error::{
+ ParseError,
+ ParseResult::{self, *},
+ StreamError, StringStreamError, Tracked, UnexpectedParse,
+ },
+ Parser,
+};
+
+#[cfg(feature = "std")]
+pub use self::decoder::Decoder;
+
+#[doc(hidden)]
+#[macro_export]
+macro_rules! clone_resetable {
+ (( $($params: tt)* ) $ty: ty) => {
+ impl<$($params)*> ResetStream for $ty
+ where Self: StreamOnce
+ {
+ type Checkpoint = Self;
+
+ fn checkpoint(&self) -> Self {
+ self.clone()
+ }
+ #[inline]
+ fn reset(&mut self, checkpoint: Self) -> Result<(), Self::Error> {
+ *self = checkpoint;
+ Ok(())
+ }
+ }
+ }
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+pub mod buf_reader;
+/// Stream wrapper which provides a `ResetStream` impl for `StreamOnce` impls which do not have
+/// one.
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+pub mod buffered;
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+pub mod easy;
+/// Stream wrapper which provides more detailed position information.
+pub mod position;
+/// Stream wrapper allowing `std::io::Read` to be used
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+pub mod read;
+pub mod span;
+/// Stream wrapper allowing custom state to be used.
+pub mod state;
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+pub mod decoder;
+
+/// A type which has a position.
+pub trait Positioned: StreamOnce {
+ /// Returns the current position of the stream.
+ fn position(&self) -> Self::Position;
+}
+
+/// Convenience alias over the `StreamError` for the input stream `Input`
+///
+/// ```
+/// #[macro_use]
+/// extern crate combine;
+/// use combine::{easy, Parser, Stream, many1};
+/// use combine::parser::char::letter;
+/// use combine::stream::StreamErrorFor;
+/// use combine::error::{ParseError, StreamError};
+///
+/// parser!{
+/// fn parser[Input]()(Input) -> String
+/// where [ Input: Stream<Token = char>, ]
+/// {
+/// many1(letter()).and_then(|word: String| {
+/// if word == "combine" {
+/// Ok(word)
+/// } else {
+/// // The alias makes it easy to refer to the `StreamError` type of `Input`
+/// Err(StreamErrorFor::<Input>::expected_static_message("combine"))
+/// }
+/// })
+/// }
+/// }
+///
+/// fn main() {
+/// }
+/// ```
+pub type StreamErrorFor<Input> = <<Input as StreamOnce>::Error as ParseError<
+ <Input as StreamOnce>::Token,
+ <Input as StreamOnce>::Range,
+ <Input as StreamOnce>::Position,
+>>::StreamError;
+
+/// `StreamOnce` represents a sequence of items that can be extracted one by one.
+pub trait StreamOnce {
+ /// The type of items which is yielded from this stream.
+ type Token: Clone;
+
+ /// The type of a range of items yielded from this stream.
+ /// Types which do not a have a way of yielding ranges of items should just use the
+ /// `Self::Token` for this type.
+ type Range: Clone;
+
+ /// Type which represents the position in a stream.
+ /// `Ord` is required to allow parsers to determine which of two positions are further ahead.
+ type Position: Clone + Ord;
+
+ type Error: ParseError<Self::Token, Self::Range, Self::Position>;
+ /// Takes a stream and removes its first token, yielding the token and the rest of the elements.
+ /// Returns `Err` if no element could be retrieved.
+ fn uncons(&mut self) -> Result<Self::Token, StreamErrorFor<Self>>;
+
+ /// Returns `true` if this stream only contains partial input.
+ ///
+ /// See `PartialStream`.
+ fn is_partial(&self) -> bool {
+ false
+ }
+}
+
+/// A `StreamOnce` which can create checkpoints which the stream can be reset to
+pub trait ResetStream: StreamOnce {
+ type Checkpoint: Clone;
+
+ /// Creates a `Checkpoint` at the current position which can be used to reset the stream
+ /// later to the current position
+ fn checkpoint(&self) -> Self::Checkpoint;
+ /// Attempts to reset the stream to an earlier position.
+ fn reset(&mut self, checkpoint: Self::Checkpoint) -> Result<(), Self::Error>;
+}
+
+clone_resetable! {('a) &'a str}
+clone_resetable! {('a, T) &'a [T]}
+clone_resetable! {('a, T) SliceStream<'a, T> }
+clone_resetable! {(T: Clone) IteratorStream<T>}
+
+/// A stream of tokens which can be duplicated
+///
+/// This is a trait over types which implement the `StreamOnce`, `ResetStream` and `Positioned`
+/// traits. If you need a custom `Stream` object then implement those traits and `Stream` is
+/// implemented automatically.
+pub trait Stream: StreamOnce + ResetStream + Positioned {}
+
+impl<Input> Stream for Input
+where
+ Input: StreamOnce + Positioned + ResetStream,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+}
+
+#[inline]
+pub fn uncons<Input>(input: &mut Input) -> ParseResult<Input::Token, Input::Error>
+where
+ Input: ?Sized + Stream,
+{
+ match input.uncons() {
+ Ok(x) => CommitOk(x),
+ Err(err) => wrap_stream_error(input, err),
+ }
+}
+
+/// A `RangeStream` is an extension of `StreamOnce` which allows for zero copy parsing.
+pub trait RangeStreamOnce: StreamOnce + ResetStream {
+ /// Takes `size` elements from the stream.
+ /// Fails if the length of the stream is less than `size`.
+ fn uncons_range(&mut self, size: usize) -> Result<Self::Range, StreamErrorFor<Self>>;
+
+ /// Takes items from stream, testing each one with `predicate`.
+ /// returns the range of items which passed `predicate`.
+ fn uncons_while<F>(&mut self, f: F) -> Result<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool;
+
+ #[inline]
+ /// Takes items from stream, testing each one with `predicate`
+ /// returns a range of at least one items which passed `predicate`.
+ ///
+ /// # Note
+ ///
+ /// This may not return `PeekOk` as it should uncons at least one token.
+ fn uncons_while1<F>(&mut self, mut f: F) -> ParseResult<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ let mut committed = false;
+ let mut started_at_eoi = true;
+ let result = self.uncons_while(|c| {
+ let ok = f(c);
+ committed |= ok;
+ started_at_eoi = false;
+ ok
+ });
+ if committed {
+ match result {
+ Ok(x) => CommitOk(x),
+ Err(x) => CommitErr(x),
+ }
+ } else if started_at_eoi {
+ PeekErr(Tracked::from(StreamErrorFor::<Self>::end_of_input()))
+ } else {
+ PeekErr(Tracked::from(
+ StreamErrorFor::<Self>::unexpected_static_message(""),
+ ))
+ }
+ }
+
+ /// Returns the distance between `self` and `end`. The returned `usize` must be so that
+ ///
+ /// ```ignore
+ /// let start = stream.checkpoint();
+ /// stream.uncons_range(distance);
+ /// stream.distance(&start) == distance
+ /// ```
+ fn distance(&self, end: &Self::Checkpoint) -> usize;
+
+ /// Returns the entire range of `self`
+ fn range(&self) -> Self::Range;
+}
+
+/// A `RangeStream` is an extension of `Stream` which allows for zero copy parsing.
+pub trait RangeStream: Stream + RangeStreamOnce {}
+
+impl<Input> RangeStream for Input where Input: RangeStreamOnce + Stream {}
+
+#[doc(hidden)]
+pub fn wrap_stream_error<T, Input>(
+ input: &Input,
+ err: <Input::Error as ParseError<Input::Token, Input::Range, Input::Position>>::StreamError,
+) -> ParseResult<T, <Input as StreamOnce>::Error>
+where
+ Input: ?Sized + StreamOnce + Positioned,
+{
+ let err = Input::Error::from_error(input.position(), err);
+ if input.is_partial() {
+ CommitErr(err)
+ } else {
+ PeekErr(err.into())
+ }
+}
+
+#[inline]
+pub fn uncons_range<Input>(
+ input: &mut Input,
+ size: usize,
+) -> ParseResult<Input::Range, <Input as StreamOnce>::Error>
+where
+ Input: ?Sized + RangeStream,
+{
+ match input.uncons_range(size) {
+ Err(err) => wrap_stream_error(input, err),
+ Ok(x) => {
+ if size == 0 {
+ PeekOk(x)
+ } else {
+ CommitOk(x)
+ }
+ }
+ }
+}
+
+#[doc(hidden)]
+pub fn input_at_eof<Input>(input: &mut Input) -> bool
+where
+ Input: ?Sized + Stream,
+{
+ let before = input.checkpoint();
+ let x = input
+ .uncons()
+ .err()
+ .map_or(false, |err| err.is_unexpected_end_of_input());
+ input.reset(before).is_ok() && x
+}
+
+/// Removes items from the input while `predicate` returns `true`.
+#[inline]
+pub fn uncons_while<Input, F>(
+ input: &mut Input,
+ predicate: F,
+) -> ParseResult<Input::Range, Input::Error>
+where
+ F: FnMut(Input::Token) -> bool,
+ Input: ?Sized + RangeStream,
+ Input::Range: Range,
+{
+ match input.uncons_while(predicate) {
+ Err(err) => wrap_stream_error(input, err),
+ Ok(x) => {
+ if input.is_partial() && input_at_eof(input) {
+ // Partial inputs which encounter end of file must fail to let more input be
+ // retrieved
+ CommitErr(Input::Error::from_error(
+ input.position(),
+ StreamError::end_of_input(),
+ ))
+ } else if x.len() == 0 {
+ PeekOk(x)
+ } else {
+ CommitOk(x)
+ }
+ }
+ }
+}
+
+#[inline]
+/// Takes items from stream, testing each one with `predicate`
+/// returns a range of at least one items which passed `predicate`.
+///
+/// # Note
+///
+/// This may not return `PeekOk` as it should uncons at least one token.
+pub fn uncons_while1<Input, F>(
+ input: &mut Input,
+ predicate: F,
+) -> ParseResult<Input::Range, Input::Error>
+where
+ F: FnMut(Input::Token) -> bool,
+ Input: ?Sized + RangeStream,
+{
+ match input.uncons_while1(predicate) {
+ CommitOk(x) => {
+ if input.is_partial() && input_at_eof(input) {
+ // Partial inputs which encounter end of file must fail to let more input be
+ // retrieved
+ CommitErr(Input::Error::from_error(
+ input.position(),
+ StreamError::end_of_input(),
+ ))
+ } else {
+ CommitOk(x)
+ }
+ }
+ PeekErr(_) => {
+ if input.is_partial() && input_at_eof(input) {
+ // Partial inputs which encounter end of file must fail to let more input be
+ // retrieved
+ CommitErr(Input::Error::from_error(
+ input.position(),
+ StreamError::end_of_input(),
+ ))
+ } else {
+ PeekErr(Input::Error::empty(input.position()).into())
+ }
+ }
+ CommitErr(err) => {
+ if input.is_partial() && input_at_eof(input) {
+ // Partial inputs which encounter end of file must fail to let more input be
+ // retrieved
+ CommitErr(Input::Error::from_error(
+ input.position(),
+ StreamError::end_of_input(),
+ ))
+ } else {
+ wrap_stream_error(input, err)
+ }
+ }
+ PeekOk(_) => unreachable!(),
+ }
+}
+
+/// Trait representing a range of elements.
+pub trait Range {
+ /// Returns the remaining length of `self`.
+ /// The returned length need not be the same as the number of items left in the stream.
+ fn len(&self) -> usize;
+
+ /// Returns `true` if the range does not contain any elements (`Range::len() == 0`)
+ fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+
+impl<'a, I> StreamOnce for &'a mut I
+where
+ I: StreamOnce + ?Sized,
+{
+ type Token = I::Token;
+
+ type Range = I::Range;
+
+ type Position = I::Position;
+
+ type Error = I::Error;
+ fn uncons(&mut self) -> Result<Self::Token, StreamErrorFor<Self>> {
+ (**self).uncons()
+ }
+
+ fn is_partial(&self) -> bool {
+ (**self).is_partial()
+ }
+}
+
+impl<'a, I> Positioned for &'a mut I
+where
+ I: Positioned + ?Sized,
+{
+ #[inline]
+ fn position(&self) -> Self::Position {
+ (**self).position()
+ }
+}
+
+impl<'a, I> ResetStream for &'a mut I
+where
+ I: ResetStream + ?Sized,
+{
+ type Checkpoint = I::Checkpoint;
+
+ fn checkpoint(&self) -> Self::Checkpoint {
+ (**self).checkpoint()
+ }
+
+ fn reset(&mut self, checkpoint: Self::Checkpoint) -> Result<(), Self::Error> {
+ (**self).reset(checkpoint)
+ }
+}
+
+impl<'a, I> RangeStreamOnce for &'a mut I
+where
+ I: RangeStreamOnce + ?Sized,
+{
+ #[inline]
+ fn uncons_while<F>(&mut self, f: F) -> Result<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ (**self).uncons_while(f)
+ }
+
+ #[inline]
+ fn uncons_while1<F>(&mut self, f: F) -> ParseResult<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ (**self).uncons_while1(f)
+ }
+
+ #[inline]
+ fn uncons_range(&mut self, size: usize) -> Result<Self::Range, StreamErrorFor<Self>> {
+ (**self).uncons_range(size)
+ }
+
+ #[inline]
+ fn distance(&self, end: &Self::Checkpoint) -> usize {
+ (**self).distance(end)
+ }
+
+ fn range(&self) -> Self::Range {
+ (**self).range()
+ }
+}
+
+impl<'a, I> Range for &'a mut I
+where
+ I: Range + ?Sized,
+{
+ fn len(&self) -> usize {
+ (**self).len()
+ }
+}
+
+impl<'a> StreamOnce for &'a str {
+ type Token = char;
+ type Range = &'a str;
+ type Position = PointerOffset<str>;
+ type Error = StringStreamError;
+
+ #[inline]
+ fn uncons(&mut self) -> Result<char, StreamErrorFor<Self>> {
+ let mut chars = self.chars();
+ match chars.next() {
+ Some(c) => {
+ *self = chars.as_str();
+ Ok(c)
+ }
+ None => Err(StringStreamError::Eoi),
+ }
+ }
+}
+
+impl<'a> Positioned for &'a str {
+ #[inline]
+ fn position(&self) -> Self::Position {
+ PointerOffset::new(self.as_bytes().position().0)
+ }
+}
+
+#[allow(clippy::while_let_loop)]
+fn str_uncons_while<'a, F>(slice: &mut &'a str, mut chars: Chars<'a>, mut f: F) -> &'a str
+where
+ F: FnMut(char) -> bool,
+{
+ let mut last_char_size = 0;
+
+ macro_rules! test_next {
+ () => {
+ match chars.next() {
+ Some(c) => {
+ if !f(c) {
+ last_char_size = c.len_utf8();
+ break;
+ }
+ }
+ None => break,
+ }
+ };
+ }
+ loop {
+ test_next!();
+ test_next!();
+ test_next!();
+ test_next!();
+ test_next!();
+ test_next!();
+ test_next!();
+ test_next!();
+ }
+
+ let len = slice.len() - chars.as_str().len() - last_char_size;
+ let (result, rest) = slice.split_at(len);
+ *slice = rest;
+ result
+}
+
+impl<'a> RangeStreamOnce for &'a str {
+ fn uncons_while<F>(&mut self, f: F) -> Result<&'a str, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ Ok(str_uncons_while(self, self.chars(), f))
+ }
+
+ #[inline]
+ fn uncons_while1<F>(&mut self, mut f: F) -> ParseResult<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ let mut chars = self.chars();
+ match chars.next() {
+ Some(c) => {
+ if !f(c) {
+ return PeekErr(Tracked::from(StringStreamError::UnexpectedParse));
+ }
+ }
+ None => return PeekErr(Tracked::from(StringStreamError::Eoi)),
+ }
+
+ CommitOk(str_uncons_while(self, chars, f))
+ }
+
+ #[inline]
+ fn uncons_range(&mut self, size: usize) -> Result<&'a str, StreamErrorFor<Self>> {
+ fn is_char_boundary(s: &str, index: usize) -> bool {
+ if index == s.len() {
+ return true;
+ }
+ match s.as_bytes().get(index) {
+ None => false,
+ Some(&b) => b < 128 || b >= 192,
+ }
+ }
+ if size <= self.len() {
+ if is_char_boundary(self, size) {
+ let (result, remaining) = self.split_at(size);
+ *self = remaining;
+ Ok(result)
+ } else {
+ Err(StringStreamError::CharacterBoundary)
+ }
+ } else {
+ Err(StringStreamError::Eoi)
+ }
+ }
+
+ #[inline]
+ fn distance(&self, end: &Self) -> usize {
+ self.position().0 - end.position().0
+ }
+
+ fn range(&self) -> Self::Range {
+ self
+ }
+}
+
+impl<'a> Range for &'a str {
+ #[inline]
+ fn len(&self) -> usize {
+ str::len(self)
+ }
+}
+
+impl<'a, T> Range for &'a [T] {
+ #[inline]
+ fn len(&self) -> usize {
+ <[T]>::len(self)
+ }
+}
+
+fn slice_uncons_while<'a, T, F>(slice: &mut &'a [T], mut i: usize, mut f: F) -> &'a [T]
+where
+ F: FnMut(T) -> bool,
+ T: Clone,
+{
+ let len = slice.len();
+ let mut found = false;
+
+ macro_rules! check {
+ () => {
+ if !f(unsafe { slice.get_unchecked(i).clone() }) {
+ found = true;
+ break;
+ }
+ i += 1;
+ };
+ }
+
+ while len - i >= 8 {
+ check!();
+ check!();
+ check!();
+ check!();
+ check!();
+ check!();
+ check!();
+ check!();
+ }
+
+ if !found {
+ while let Some(c) = slice.get(i) {
+ if !f(c.clone()) {
+ break;
+ }
+ i += 1;
+ }
+ }
+
+ let (result, remaining) = slice.split_at(i);
+ *slice = remaining;
+ result
+}
+
+impl<'a, T> RangeStreamOnce for &'a [T]
+where
+ T: Clone + PartialEq,
+{
+ #[inline]
+ fn uncons_range(&mut self, size: usize) -> Result<&'a [T], StreamErrorFor<Self>> {
+ if size <= self.len() {
+ let (result, remaining) = self.split_at(size);
+ *self = remaining;
+ Ok(result)
+ } else {
+ Err(UnexpectedParse::Eoi)
+ }
+ }
+
+ #[inline]
+ fn uncons_while<F>(&mut self, f: F) -> Result<&'a [T], StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ Ok(slice_uncons_while(self, 0, f))
+ }
+
+ #[inline]
+ fn uncons_while1<F>(&mut self, mut f: F) -> ParseResult<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ match self.first() {
+ Some(c) => {
+ if !f(c.clone()) {
+ return PeekErr(Tracked::from(UnexpectedParse::Unexpected));
+ }
+ }
+ None => {
+ return PeekErr(Tracked::from(UnexpectedParse::Eoi));
+ }
+ }
+
+ CommitOk(slice_uncons_while(self, 1, f))
+ }
+
+ #[inline]
+ fn distance(&self, end: &Self) -> usize {
+ end.len() - self.len()
+ }
+
+ fn range(&self) -> Self::Range {
+ self
+ }
+}
+
+impl<'a, T> Positioned for &'a [T]
+where
+ T: Clone + PartialEq,
+{
+ #[inline]
+ fn position(&self) -> Self::Position {
+ PointerOffset::new(self.as_ptr() as usize)
+ }
+}
+
+impl<'a, T> StreamOnce for &'a [T]
+where
+ T: Clone + PartialEq,
+{
+ type Token = T;
+ type Range = &'a [T];
+ type Position = PointerOffset<[T]>;
+ type Error = UnexpectedParse;
+
+ #[inline]
+ fn uncons(&mut self) -> Result<T, StreamErrorFor<Self>> {
+ match self.split_first() {
+ Some((first, rest)) => {
+ *self = rest;
+ Ok(first.clone())
+ }
+ None => Err(UnexpectedParse::Eoi),
+ }
+ }
+}
+
+/// Stream type which indicates that the stream is partial if end of input is reached
+#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)]
+pub struct PartialStream<S>(pub S);
+
+impl<S> From<S> for PartialStream<S> {
+ fn from(t: S) -> Self {
+ PartialStream(t)
+ }
+}
+
+impl<S> Positioned for PartialStream<S>
+where
+ S: Positioned,
+{
+ #[inline]
+ fn position(&self) -> Self::Position {
+ self.0.position()
+ }
+}
+
+impl<S> ResetStream for PartialStream<S>
+where
+ S: ResetStream,
+{
+ type Checkpoint = S::Checkpoint;
+
+ #[inline]
+ fn checkpoint(&self) -> Self::Checkpoint {
+ self.0.checkpoint()
+ }
+
+ #[inline]
+ fn reset(&mut self, checkpoint: Self::Checkpoint) -> Result<(), S::Error> {
+ self.0.reset(checkpoint)
+ }
+}
+
+impl<S> StreamOnce for PartialStream<S>
+where
+ S: StreamOnce,
+{
+ type Token = S::Token;
+ type Range = S::Range;
+ type Position = S::Position;
+ type Error = S::Error;
+
+ #[inline]
+ fn uncons(&mut self) -> Result<S::Token, StreamErrorFor<Self>> {
+ self.0.uncons()
+ }
+
+ fn is_partial(&self) -> bool {
+ true
+ }
+}
+
+impl<S> RangeStreamOnce for PartialStream<S>
+where
+ S: RangeStreamOnce,
+{
+ #[inline]
+ fn uncons_range(&mut self, size: usize) -> Result<Self::Range, StreamErrorFor<Self>> {
+ self.0.uncons_range(size)
+ }
+
+ #[inline]
+ fn uncons_while<F>(&mut self, f: F) -> Result<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ self.0.uncons_while(f)
+ }
+
+ fn uncons_while1<F>(&mut self, f: F) -> ParseResult<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ self.0.uncons_while1(f)
+ }
+
+ #[inline]
+ fn distance(&self, end: &Self::Checkpoint) -> usize {
+ self.0.distance(end)
+ }
+
+ #[inline]
+ fn range(&self) -> Self::Range {
+ self.0.range()
+ }
+}
+
+/// Stream type which indicates that the stream is complete if end of input is reached
+///
+/// For most streams this is already the default but this wrapper can be used to override a nested
+/// `PartialStream`
+#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)]
+#[repr(transparent)]
+pub struct CompleteStream<S>(pub S);
+
+impl<S> From<S> for CompleteStream<S> {
+ fn from(t: S) -> Self {
+ CompleteStream(t)
+ }
+}
+
+impl<'s, S> From<&'s mut S> for &'s mut CompleteStream<S> {
+ fn from(t: &'s mut S) -> Self {
+ // SAFETY repr(transparent) is specified on CompleteStream
+ unsafe { &mut *(t as *mut S as *mut CompleteStream<S>) }
+ }
+}
+
+impl<S> Positioned for CompleteStream<S>
+where
+ S: Positioned,
+{
+ #[inline]
+ fn position(&self) -> Self::Position {
+ self.0.position()
+ }
+}
+
+impl<S> ResetStream for CompleteStream<S>
+where
+ S: ResetStream,
+{
+ type Checkpoint = S::Checkpoint;
+
+ #[inline]
+ fn checkpoint(&self) -> Self::Checkpoint {
+ self.0.checkpoint()
+ }
+
+ #[inline]
+ fn reset(&mut self, checkpoint: Self::Checkpoint) -> Result<(), S::Error> {
+ self.0.reset(checkpoint)
+ }
+}
+
+impl<S> StreamOnce for CompleteStream<S>
+where
+ S: StreamOnce,
+{
+ type Token = S::Token;
+ type Range = S::Range;
+ type Position = S::Position;
+ type Error = S::Error;
+
+ #[inline]
+ fn uncons(&mut self) -> Result<S::Token, StreamErrorFor<Self>> {
+ self.0.uncons()
+ }
+
+ fn is_partial(&self) -> bool {
+ false
+ }
+}
+
+impl<S> RangeStreamOnce for CompleteStream<S>
+where
+ S: RangeStreamOnce,
+{
+ #[inline]
+ fn uncons_range(&mut self, size: usize) -> Result<Self::Range, StreamErrorFor<Self>> {
+ self.0.uncons_range(size)
+ }
+
+ #[inline]
+ fn uncons_while<F>(&mut self, f: F) -> Result<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ self.0.uncons_while(f)
+ }
+
+ fn uncons_while1<F>(&mut self, f: F) -> ParseResult<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ self.0.uncons_while1(f)
+ }
+
+ #[inline]
+ fn distance(&self, end: &Self::Checkpoint) -> usize {
+ self.0.distance(end)
+ }
+
+ #[inline]
+ fn range(&self) -> Self::Range {
+ self.0.range()
+ }
+}
+
+#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)]
+pub struct MaybePartialStream<S>(pub S, pub bool);
+
+impl<S> Positioned for MaybePartialStream<S>
+where
+ S: Positioned,
+{
+ #[inline]
+ fn position(&self) -> Self::Position {
+ self.0.position()
+ }
+}
+
+impl<S> ResetStream for MaybePartialStream<S>
+where
+ S: ResetStream,
+{
+ type Checkpoint = S::Checkpoint;
+
+ #[inline]
+ fn checkpoint(&self) -> Self::Checkpoint {
+ self.0.checkpoint()
+ }
+
+ #[inline]
+ fn reset(&mut self, checkpoint: Self::Checkpoint) -> Result<(), S::Error> {
+ self.0.reset(checkpoint)
+ }
+}
+
+impl<S> StreamOnce for MaybePartialStream<S>
+where
+ S: StreamOnce,
+{
+ type Token = S::Token;
+ type Range = S::Range;
+ type Position = S::Position;
+ type Error = S::Error;
+
+ #[inline]
+ fn uncons(&mut self) -> Result<S::Token, StreamErrorFor<Self>> {
+ self.0.uncons()
+ }
+
+ fn is_partial(&self) -> bool {
+ self.1
+ }
+}
+
+impl<S> RangeStreamOnce for MaybePartialStream<S>
+where
+ S: RangeStreamOnce,
+{
+ #[inline]
+ fn uncons_range(&mut self, size: usize) -> Result<Self::Range, StreamErrorFor<Self>> {
+ self.0.uncons_range(size)
+ }
+
+ #[inline]
+ fn uncons_while<F>(&mut self, f: F) -> Result<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ self.0.uncons_while(f)
+ }
+
+ fn uncons_while1<F>(&mut self, f: F) -> ParseResult<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ self.0.uncons_while1(f)
+ }
+
+ #[inline]
+ fn distance(&self, end: &Self::Checkpoint) -> usize {
+ self.0.distance(end)
+ }
+
+ #[inline]
+ fn range(&self) -> Self::Range {
+ self.0.range()
+ }
+}
+
+/// Newtype for constructing a stream from a slice where the items in the slice are not copyable.
+#[derive(Copy, Eq, PartialEq, Ord, PartialOrd, Debug)]
+pub struct SliceStream<'a, T>(pub &'a [T]);
+
+impl<'a, T> Clone for SliceStream<'a, T> {
+ fn clone(&self) -> SliceStream<'a, T> {
+ SliceStream(self.0)
+ }
+}
+
+impl<'a, T> Positioned for SliceStream<'a, T>
+where
+ T: PartialEq + 'a,
+{
+ #[inline]
+ fn position(&self) -> Self::Position {
+ PointerOffset::new(self.0.as_ptr() as usize)
+ }
+}
+
+impl<'a, T> StreamOnce for SliceStream<'a, T>
+where
+ T: PartialEq + 'a,
+{
+ type Token = &'a T;
+ type Range = &'a [T];
+ type Position = PointerOffset<[T]>;
+ type Error = UnexpectedParse;
+
+ #[inline]
+ fn uncons(&mut self) -> Result<&'a T, StreamErrorFor<Self>> {
+ match self.0.split_first() {
+ Some((first, rest)) => {
+ self.0 = rest;
+ Ok(first)
+ }
+ None => Err(UnexpectedParse::Eoi),
+ }
+ }
+}
+
+fn slice_uncons_while_ref<'a, T, F>(slice: &mut &'a [T], mut i: usize, mut f: F) -> &'a [T]
+where
+ F: FnMut(&'a T) -> bool,
+{
+ let len = slice.len();
+ let mut found = false;
+
+ macro_rules! check {
+ () => {
+ if !f(unsafe { slice.get_unchecked(i) }) {
+ found = true;
+ break;
+ }
+ i += 1;
+ };
+ }
+
+ while len - i >= 8 {
+ check!();
+ check!();
+ check!();
+ check!();
+ check!();
+ check!();
+ check!();
+ check!();
+ }
+
+ if !found {
+ while let Some(c) = slice.get(i) {
+ if !f(c) {
+ break;
+ }
+ i += 1;
+ }
+ }
+
+ let (result, remaining) = slice.split_at(i);
+ *slice = remaining;
+ result
+}
+
+impl<'a, T> RangeStreamOnce for SliceStream<'a, T>
+where
+ T: PartialEq + 'a,
+{
+ #[inline]
+ fn uncons_range(&mut self, size: usize) -> Result<&'a [T], StreamErrorFor<Self>> {
+ if size <= self.0.len() {
+ let (range, rest) = self.0.split_at(size);
+ self.0 = rest;
+ Ok(range)
+ } else {
+ Err(UnexpectedParse::Eoi)
+ }
+ }
+
+ #[inline]
+ fn uncons_while<F>(&mut self, f: F) -> Result<&'a [T], StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ Ok(slice_uncons_while_ref(&mut self.0, 0, f))
+ }
+
+ #[inline]
+ fn uncons_while1<F>(&mut self, mut f: F) -> ParseResult<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ match self.0.first() {
+ Some(c) => {
+ if !f(c) {
+ return PeekErr(Tracked::from(UnexpectedParse::Unexpected));
+ }
+ }
+ None => return PeekErr(Tracked::from(UnexpectedParse::Eoi)),
+ }
+
+ CommitOk(slice_uncons_while_ref(&mut self.0, 1, f))
+ }
+
+ #[inline]
+ fn distance(&self, end: &Self) -> usize {
+ end.0.len() - self.0.len()
+ }
+
+ fn range(&self) -> Self::Range {
+ self.0
+ }
+}
+
+/// Wrapper around iterators which allows them to be treated as a stream.
+/// Returned by [`IteratorStream::new`].
+#[derive(Copy, Clone, Debug)]
+pub struct IteratorStream<Input>(Input);
+
+impl<Input> IteratorStream<Input>
+where
+ Input: Iterator,
+{
+ /// Converts an `Iterator` into a stream.
+ ///
+ /// NOTE: This type do not implement `Positioned` and `Clone` and must be wrapped with types
+ /// such as `BufferedStreamRef` and `State` to become a `Stream` which can be parsed
+ pub fn new<T>(iter: T) -> IteratorStream<Input>
+ where
+ T: IntoIterator<IntoIter = Input, Item = Input::Item>,
+ {
+ IteratorStream(iter.into_iter())
+ }
+}
+
+impl<Input> Iterator for IteratorStream<Input>
+where
+ Input: Iterator,
+{
+ type Item = Input::Item;
+ fn next(&mut self) -> Option<Input::Item> {
+ self.0.next()
+ }
+}
+
+impl<Input: Iterator> StreamOnce for IteratorStream<Input>
+where
+ Input::Item: Clone + PartialEq,
+{
+ type Token = Input::Item;
+ type Range = Input::Item;
+ type Position = ();
+ type Error = UnexpectedParse;
+
+ #[inline]
+ fn uncons(&mut self) -> Result<Self::Token, StreamErrorFor<Self>> {
+ match self.next() {
+ Some(x) => Ok(x),
+ None => Err(UnexpectedParse::Eoi),
+ }
+ }
+}
+
+/// Newtype around a pointer offset into a slice stream (`&[T]`/`&str`).
+pub struct PointerOffset<T: ?Sized>(pub usize, PhantomData<T>);
+
+impl<T: ?Sized> Clone for PointerOffset<T> {
+ fn clone(&self) -> Self {
+ PointerOffset::new(self.0)
+ }
+}
+
+impl<T: ?Sized> Copy for PointerOffset<T> {}
+
+impl<T: ?Sized> Default for PointerOffset<T> {
+ fn default() -> Self {
+ PointerOffset::new(0)
+ }
+}
+
+impl<T: ?Sized> PartialEq for PointerOffset<T> {
+ fn eq(&self, other: &Self) -> bool {
+ self.0 == other.0
+ }
+}
+
+impl<T: ?Sized> Eq for PointerOffset<T> {}
+
+impl<T: ?Sized> PartialOrd for PointerOffset<T> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ self.0.partial_cmp(&other.0)
+ }
+}
+
+impl<T: ?Sized> Ord for PointerOffset<T> {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.0.cmp(&other.0)
+ }
+}
+
+impl<T> fmt::Debug for PointerOffset<T>
+where
+ T: ?Sized,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self)
+ }
+}
+
+impl<T> fmt::Display for PointerOffset<T>
+where
+ T: ?Sized,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "PointerOffset({:?})", self.0 as *const ())
+ }
+}
+
+impl<T> PointerOffset<T>
+where
+ T: ?Sized,
+{
+ pub fn new(offset: usize) -> Self {
+ PointerOffset(offset, PhantomData)
+ }
+
+ /// Converts the pointer-based position into an indexed position.
+ ///
+ /// ```rust
+ /// # extern crate combine;
+ /// # use combine::*;
+ /// # fn main() {
+ /// let text = "b";
+ /// let err = token('a').easy_parse(text).unwrap_err();
+ /// assert_eq!(err.position.0, text.as_ptr() as usize);
+ /// assert_eq!(err.map_position(|p| p.translate_position(text)).position, 0);
+ /// # }
+ /// ```
+ pub fn translate_position(mut self, initial_slice: &T) -> usize {
+ self.0 -= initial_slice as *const T as *const () as usize;
+ self.0
+ }
+}
+
+/// Decodes `input` using `parser`.
+///
+/// Return `Ok(Some(token), committed_data)` if there was enough data to finish parsing using
+/// `parser`.
+/// Returns `Ok(None, committed_data)` if `input` did not contain enough data to finish parsing
+/// using `parser`.
+///
+/// See `examples/async.rs` for example usage in a `tokio_io::codec::Decoder`
+pub fn decode<Input, P>(
+ mut parser: P,
+ input: &mut Input,
+ partial_state: &mut P::PartialState,
+) -> Result<(Option<P::Output>, usize), <Input as StreamOnce>::Error>
+where
+ P: Parser<Input>,
+ Input: RangeStream,
+{
+ let start = input.checkpoint();
+ match parser.parse_with_state(input, partial_state) {
+ Ok(message) => Ok((Some(message), input.distance(&start))),
+ Err(err) => {
+ if err.is_unexpected_end_of_input() {
+ if input.is_partial() {
+ // The parser expected more input to parse and input is partial, return `None`
+ // as we did not finish and also return how much may be removed from the stream
+ Ok((None, input.distance(&start)))
+ } else {
+ Err(err)
+ }
+ } else {
+ Err(err)
+ }
+ }
+ }
+}
+
+/// Decodes `input` using `parser`. Like `decode` but works directly in both
+/// `tokio_util::Decoder::decode` and `tokio_util::Decoder::decode_eof`
+///
+/// Return `Ok(Some(token), committed_data)` if there was enough data to finish parsing using
+/// `parser`.
+/// Returns `Ok(None, committed_data)` if `input` did not contain enough data to finish parsing
+/// using `parser`.
+/// Returns `Ok(None, 0)` if `input` did not contain enough data to finish parsing
+/// using `parser`.
+///
+/// See `examples/async.rs` for example usage in a `tokio_io::codec::Decoder`
+pub fn decode_tokio<Input, P>(
+ mut parser: P,
+ input: &mut Input,
+ partial_state: &mut P::PartialState,
+) -> Result<(Option<P::Output>, usize), <Input as StreamOnce>::Error>
+where
+ P: Parser<Input>,
+ Input: RangeStream,
+{
+ let start = input.checkpoint();
+ match parser.parse_with_state(input, partial_state) {
+ Ok(message) => Ok((Some(message), input.distance(&start))),
+ Err(err) => {
+ if err.is_unexpected_end_of_input() {
+ if input.is_partial() {
+ // The parser expected more input to parse and input is partial, return `None`
+ // as we did not finish and also return how much may be removed from the stream
+ Ok((None, input.distance(&start)))
+ } else if input_at_eof(input) && input.distance(&start) == 0 {
+ // We are at eof and the input is empty, return None to indicate that we are
+ // done
+ Ok((None, 0))
+ } else {
+ Err(err)
+ }
+ } else {
+ Err(err)
+ }
+ }
+ }
+}
+
+/// Parses an instance of `std::io::Read` as a `&[u8]` without reading the entire file into
+/// memory.
+///
+/// This is defined as a macro to work around the lack of Higher Ranked Types. See the
+/// example for how to pass a parser to the macro (constructing parts of the parser outside of
+/// the `decode!` call is unlikely to work.
+///
+/// ```
+/// use std::{
+/// fs::File,
+/// };
+/// use combine::{decode, satisfy, skip_many1, many1, sep_end_by, Parser, stream::Decoder};
+///
+/// let mut read = File::open("README.md").unwrap();
+/// let mut decoder = Decoder::new();
+/// let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
+/// assert_eq!(
+/// decode!(
+/// decoder,
+/// read,
+/// {
+/// let word = many1(satisfy(|b| !is_whitespace(b)));
+/// sep_end_by(word, skip_many1(satisfy(is_whitespace))).map(|words: Vec<Vec<u8>>| words.len())
+/// },
+/// |input, _position| combine::easy::Stream::from(input),
+/// ).map_err(combine::easy::Errors::<u8, &[u8], _>::from),
+/// Ok(819),
+/// );
+/// ```
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+#[macro_export]
+macro_rules! decode {
+ ($decoder: expr, $read: expr, $parser: expr $(,)?) => {
+ $crate::decode!($decoder, $read, $parser, |input, _position| input, |x| x)
+ };
+
+ ($decoder: expr, $read: expr, $parser: expr, $input_stream: expr $(,)?) => {
+ $crate::decode!($decoder, $read, $parser, $input_stream, |x| x)
+ };
+
+ ($decoder: expr, $read: expr, $parser: expr, $input_stream: expr, $post_decode: expr $(,)?) => {
+ match $decoder {
+ ref mut decoder => match $read {
+ ref mut read => 'outer: loop {
+ let (opt, removed) = {
+ let (state, position, buffer, end_of_input) = decoder.__inner();
+ let buffer =
+ $crate::stream::buf_reader::CombineBuffer::buffer(buffer, read);
+
+ let mut stream = $crate::stream::call_with2(
+ $crate::stream::MaybePartialStream(buffer, !end_of_input),
+ *position,
+ $input_stream,
+ );
+ let result = $crate::stream::decode($parser, &mut stream, state);
+ *position = $crate::stream::Positioned::position(&stream);
+ $crate::stream::call_with(stream, $post_decode);
+ match result {
+ Ok(x) => x,
+ Err(err) => {
+ break 'outer Err($crate::stream::decoder::Error::Parse(err))
+ }
+ }
+ };
+
+ decoder.advance(&mut *read, removed);
+
+ if let Some(v) = opt {
+ break 'outer Ok(v);
+ }
+
+ match decoder.__before_parse(&mut *read) {
+ Ok(x) => x,
+ Err(error) => {
+ break 'outer Err($crate::stream::decoder::Error::Io {
+ error,
+ position: Clone::clone(decoder.position()),
+ })
+ }
+ };
+ },
+ },
+ }
+ };
+}
+
+/// Parses an instance of `futures::io::AsyncRead` as a `&[u8]` without reading the entire file into
+/// memory.
+///
+/// This is defined as a macro to work around the lack of Higher Ranked Types. See the
+/// example for how to pass a parser to the macro (constructing parts of the parser outside of
+/// the `decode!` call is unlikely to work.
+///
+/// ```
+/// # use futures_03_dep as futures;
+/// use futures::pin_mut;
+/// use async_std::{
+/// fs::File,
+/// task,
+/// };
+///
+/// use combine::{decode_futures_03, satisfy, skip_many1, many1, sep_end_by, Parser, stream::Decoder};
+///
+/// fn main() {
+/// task::block_on(main_());
+/// }
+///
+/// async fn main_() {
+/// let mut read = File::open("README.md").await.unwrap();
+/// let mut decoder = Decoder::new();
+/// let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
+/// assert_eq!(
+/// decode_futures_03!(
+/// decoder,
+/// read,
+/// {
+/// let word = many1(satisfy(|b| !is_whitespace(b)));
+/// sep_end_by(word, skip_many1(satisfy(is_whitespace))).map(|words: Vec<Vec<u8>>| words.len())
+/// },
+/// |input, _position| combine::easy::Stream::from(input),
+/// ).map_err(combine::easy::Errors::<u8, &[u8], _>::from),
+/// Ok(819),
+/// );
+/// }
+/// ```
+#[cfg(feature = "futures-io-03")]
+#[cfg_attr(docsrs, doc(cfg(feature = "futures-io-03")))]
+#[macro_export]
+macro_rules! decode_futures_03 {
+ ($decoder: expr, $read: expr, $parser: expr) => {
+ $crate::decode_futures_03!($decoder, $read, $parser, |x| x $(,)?)
+ };
+
+
+ ($decoder: expr, $read: expr, $parser: expr, $input_stream: expr $(,)?) => {
+ $crate::decode_futures_03!($decoder, $read, $parser, $input_stream, |x| x)
+ };
+
+ ($decoder: expr, $read: expr, $parser: expr, $input_stream: expr, $post_decode: expr $(,)?) => {
+ match $decoder {
+ ref mut decoder => match $read {
+ ref mut read => 'outer: loop {
+ let (opt, removed) = {
+ let (state, position, buffer, end_of_input) = decoder.__inner();
+ let buffer =
+ $crate::stream::buf_reader::CombineBuffer::buffer(buffer, &*read);
+
+ let mut stream = $crate::stream::call_with2(
+ $crate::stream::MaybePartialStream(buffer, !end_of_input),
+ *position,
+ $input_stream,
+ );
+ let result = $crate::stream::decode($parser, &mut stream, state);
+ *position = $crate::stream::Positioned::position(&stream);
+ $crate::stream::call_with(stream, $post_decode);
+ match result {
+ Ok(x) => x,
+ Err(err) => break 'outer Err($crate::stream::decoder::Error::Parse(err)),
+ }
+ };
+
+ decoder.advance_pin(std::pin::Pin::new(&mut *read), removed);
+
+ if let Some(v) = opt {
+ break 'outer Ok(v);
+ }
+
+
+ match decoder.__before_parse_async(std::pin::Pin::new(&mut *read)).await {
+ Ok(_) => (),
+ Err(error) => {
+ break 'outer Err($crate::stream::decoder::Error::Io {
+ error,
+ position: Clone::clone(decoder.position()),
+ })
+ }
+ };
+ }
+ }
+ }
+ };
+}
+
+/// Parses an instance of `tokio::io::AsyncRead` as a `&[u8]` without reading the entire file into
+/// memory.
+///
+/// This is defined as a macro to work around the lack of Higher Ranked Types. See the
+/// example for how to pass a parser to the macro (constructing parts of the parser outside of
+/// the `decode!` call is unlikely to work.
+///
+/// ```
+/// # use tokio_02_dep as tokio;
+/// # use futures_03_dep as futures;
+/// use futures::pin_mut;
+/// use tokio::{
+/// fs::File,
+/// };
+///
+/// use combine::{decode_tokio_02, satisfy, skip_many1, many1, sep_end_by, Parser, stream::{Decoder, buf_reader::BufReader}};
+///
+/// #[tokio::main]
+/// async fn main() {
+/// let mut read = BufReader::new(File::open("README.md").await.unwrap());
+/// let mut decoder = Decoder::new_bufferless();
+/// let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
+/// assert_eq!(
+/// decode_tokio_02!(
+/// decoder,
+/// read,
+/// {
+/// let word = many1(satisfy(|b| !is_whitespace(b)));
+/// sep_end_by(word, skip_many1(satisfy(is_whitespace))).map(|words: Vec<Vec<u8>>| words.len())
+/// },
+/// |input, _position| combine::easy::Stream::from(input),
+/// ).map_err(combine::easy::Errors::<u8, &[u8], _>::from),
+/// Ok(819),
+/// );
+/// }
+/// ```
+#[cfg(feature = "tokio-02")]
+#[cfg_attr(docsrs, doc(cfg(feature = "tokio-02")))]
+#[macro_export]
+macro_rules! decode_tokio_02 {
+ ($decoder: expr, $read: expr, $parser: expr $(,)?) => {
+ $crate::decode_tokio_02!($decoder, $read, $parser, |input, _position| input)
+ };
+
+ ($decoder: expr, $read: expr, $parser: expr, $input_stream: expr $(,)?) => {
+ $crate::decode_tokio_02!($decoder, $read, $parser, $input_stream, |x| x)
+ };
+
+ ($decoder: expr, $read: expr, $parser: expr, $input_stream: expr, $post_decode: expr $(,)?) => {
+ match $decoder {
+ ref mut decoder => match $read {
+ ref mut read => 'outer: loop {
+ let (opt, removed) = {
+ let (state, position, buffer, end_of_input) = decoder.__inner();
+ let buffer =
+ $crate::stream::buf_reader::CombineBuffer::buffer(buffer, &*read);
+ let mut stream = $crate::stream::call_with2(
+ $crate::stream::MaybePartialStream(buffer, !end_of_input),
+ *position,
+ $input_stream,
+ );
+ let result = $crate::stream::decode($parser, &mut stream, state);
+ *position = $crate::stream::Positioned::position(&stream);
+ $crate::stream::call_with(stream, $post_decode);
+ match result {
+ Ok(x) => x,
+ Err(err) => {
+ break 'outer Err($crate::stream::decoder::Error::Parse(err))
+ }
+ }
+ };
+
+ decoder.advance_pin(std::pin::Pin::new(read), removed);
+
+ if let Some(v) = opt {
+ break 'outer Ok(v);
+ }
+
+ match decoder
+ .__before_parse_tokio_02(std::pin::Pin::new(&mut *read))
+ .await
+ {
+ Ok(x) => x,
+ Err(error) => {
+ break 'outer Err($crate::stream::decoder::Error::Io {
+ error,
+ position: Clone::clone(decoder.position()),
+ })
+ }
+ };
+ },
+ },
+ }
+ };
+}
+
+/// Parses an instance of `tokio::io::AsyncRead` as a `&[u8]` without reading the entire file into
+/// memory.
+///
+/// This is defined as a macro to work around the lack of Higher Ranked Types. See the
+/// example for how to pass a parser to the macro (constructing parts of the parser outside of
+/// the `decode!` call is unlikely to work.
+///
+/// ```
+/// # use tokio_03_dep as tokio;
+/// # use futures_03_dep as futures;
+/// use futures::pin_mut;
+/// use tokio::{
+/// fs::File,
+/// };
+///
+/// use combine::{decode_tokio_03, satisfy, skip_many1, many1, sep_end_by, Parser, stream::{Decoder, buf_reader::BufReader}};
+///
+/// #[tokio::main]
+/// async fn main() {
+/// let mut read = BufReader::new(File::open("README.md").await.unwrap());
+/// let mut decoder = Decoder::new_bufferless();
+/// let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
+/// assert_eq!(
+/// decode_tokio_03!(
+/// decoder,
+/// read,
+/// {
+/// let word = many1(satisfy(|b| !is_whitespace(b)));
+/// sep_end_by(word, skip_many1(satisfy(is_whitespace))).map(|words: Vec<Vec<u8>>| words.len())
+/// },
+/// |input, _position| combine::easy::Stream::from(input),
+/// ).map_err(combine::easy::Errors::<u8, &[u8], _>::from),
+/// Ok(819),
+/// );
+/// }
+/// ```
+#[cfg(feature = "tokio-03")]
+#[cfg_attr(docsrs, doc(cfg(feature = "tokio-03")))]
+#[macro_export]
+macro_rules! decode_tokio_03 {
+ ($decoder: expr, $read: expr, $parser: expr $(,)?) => {
+ $crate::decode_tokio_03!($decoder, $read, $parser, |input, _position| input)
+ };
+
+ ($decoder: expr, $read: expr, $parser: expr, $input_stream: expr $(,)?) => {
+ $crate::decode_tokio_03!($decoder, $read, $parser, $input_stream, |x| x)
+ };
+
+ ($decoder: expr, $read: expr, $parser: expr, $input_stream: expr, $post_decode: expr $(,)?) => {
+ match $decoder {
+ ref mut decoder => match $read {
+ ref mut read => 'outer: loop {
+ let (opt, removed) = {
+ let (state, position, buffer, end_of_input) = decoder.__inner();
+ let buffer =
+ $crate::stream::buf_reader::CombineBuffer::buffer(buffer, &*read);
+ let mut stream = $crate::stream::call_with2(
+ $crate::stream::MaybePartialStream(buffer, !end_of_input),
+ *position,
+ $input_stream,
+ );
+ let result = $crate::stream::decode($parser, &mut stream, state);
+ *position = $crate::stream::Positioned::position(&stream);
+ $crate::stream::call_with(stream, $post_decode);
+ match result {
+ Ok(x) => x,
+ Err(err) => {
+ break 'outer Err($crate::stream::decoder::Error::Parse(err))
+ }
+ }
+ };
+
+ decoder.advance_pin(std::pin::Pin::new(read), removed);
+
+ if let Some(v) = opt {
+ break 'outer Ok(v);
+ }
+
+ match decoder
+ .__before_parse_tokio_03(std::pin::Pin::new(&mut *read))
+ .await
+ {
+ Ok(x) => x,
+ Err(error) => {
+ break 'outer Err($crate::stream::decoder::Error::Io {
+ error,
+ position: Clone::clone(decoder.position()),
+ })
+ }
+ };
+ },
+ },
+ }
+ };
+}
+
+/// Parses an instance of `tokio::io::AsyncRead` as a `&[u8]` without reading the entire file into
+/// memory.
+///
+/// This is defined as a macro to work around the lack of Higher Ranked Types. See the
+/// example for how to pass a parser to the macro (constructing parts of the parser outside of
+/// the `decode!` call is unlikely to work.
+///
+/// ```
+/// # use tokio_dep as tokio;
+/// # use futures_03_dep as futures;
+/// use futures::pin_mut;
+/// use tokio::{
+/// fs::File,
+/// };
+///
+/// use combine::{decode_tokio, satisfy, skip_many1, many1, sep_end_by, Parser, stream::{Decoder, buf_reader::BufReader}};
+///
+/// #[tokio::main]
+/// async fn main() {
+/// let mut read = BufReader::new(File::open("README.md").await.unwrap());
+/// let mut decoder = Decoder::new_bufferless();
+/// let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
+/// assert_eq!(
+/// decode_tokio!(
+/// decoder,
+/// read,
+/// {
+/// let word = many1(satisfy(|b| !is_whitespace(b)));
+/// sep_end_by(word, skip_many1(satisfy(is_whitespace))).map(|words: Vec<Vec<u8>>| words.len())
+/// },
+/// |input, _position| combine::easy::Stream::from(input),
+/// ).map_err(combine::easy::Errors::<u8, &[u8], _>::from),
+/// Ok(819),
+/// );
+/// }
+/// ```
+#[cfg(feature = "tokio")]
+#[cfg_attr(docsrs, doc(cfg(feature = "tokio")))]
+#[macro_export]
+macro_rules! decode_tokio {
+ ($decoder: expr, $read: expr, $parser: expr $(,)?) => {
+ $crate::decode_tokio!($decoder, $read, $parser, |input, _position| input)
+ };
+
+ ($decoder: expr, $read: expr, $parser: expr, $input_stream: expr $(,)?) => {
+ $crate::decode_tokio!($decoder, $read, $parser, $input_stream, |x| x)
+ };
+
+ ($decoder: expr, $read: expr, $parser: expr, $input_stream: expr, $post_decode: expr $(,)?) => {
+ match $decoder {
+ ref mut decoder => match $read {
+ ref mut read => 'outer: loop {
+ let (opt, removed) = {
+ let (state, position, buffer, end_of_input) = decoder.__inner();
+ let buffer =
+ $crate::stream::buf_reader::CombineBuffer::buffer(buffer, &*read);
+ let mut stream = $crate::stream::call_with2(
+ $crate::stream::MaybePartialStream(buffer, !end_of_input),
+ *position,
+ $input_stream,
+ );
+ let result = $crate::stream::decode($parser, &mut stream, state);
+ *position = $crate::stream::Positioned::position(&stream);
+ $crate::stream::call_with(stream, $post_decode);
+ match result {
+ Ok(x) => x,
+ Err(err) => {
+ break 'outer Err($crate::stream::decoder::Error::Parse(err))
+ }
+ }
+ };
+
+ decoder.advance_pin(std::pin::Pin::new(read), removed);
+
+ if let Some(v) = opt {
+ break 'outer Ok(v);
+ }
+
+ match decoder
+ .__before_parse_tokio(std::pin::Pin::new(&mut *read))
+ .await
+ {
+ Ok(x) => x,
+ Err(error) => {
+ break 'outer Err($crate::stream::decoder::Error::Io {
+ error,
+ position: Clone::clone(decoder.position()),
+ })
+ }
+ };
+ },
+ },
+ }
+ };
+}
+
+#[doc(hidden)]
+pub fn call_with2<F, A, B, R>(a: A, b: B, f: F) -> R
+where
+ F: FnOnce(A, B) -> R,
+{
+ f(a, b)
+}
+
+#[doc(hidden)]
+pub fn call_with<F, A, R>(a: A, f: F) -> R
+where
+ F: FnOnce(A) -> R,
+{
+ f(a)
+}
+
+#[cfg(test)]
+mod tests {
+
+ use super::*;
+
+ #[test]
+ #[inline]
+ fn uncons_range_at_end() {
+ assert_eq!("".uncons_range(0), Ok(""));
+ assert_eq!("123".uncons_range(3), Ok("123"));
+ assert_eq!((&[1][..]).uncons_range(1), Ok(&[1][..]));
+ let s: &[u8] = &[];
+ assert_eq!(SliceStream(s).uncons_range(0), Ok(&[][..]));
+ }
+
+ #[test]
+ fn larger_than_1_byte_items_return_correct_distance() {
+ let mut input = &[123i32, 0i32][..];
+
+ let before = input.checkpoint();
+ assert_eq!(input.distance(&before), 0);
+
+ input.uncons().unwrap();
+ assert_eq!(input.distance(&before), 1);
+
+ input.uncons().unwrap();
+ assert_eq!(input.distance(&before), 2);
+
+ input.reset(before.clone()).unwrap();
+ assert_eq!(input.distance(&before), 0);
+ }
+}
diff --git a/src/stream/position.rs b/src/stream/position.rs
new file mode 100644
index 0000000..137b05e
--- /dev/null
+++ b/src/stream/position.rs
@@ -0,0 +1,465 @@
+use crate::{
+ error::{ParseError, ParseResult, StreamError},
+ lib::fmt,
+ stream::{
+ IteratorStream, Positioned, RangeStreamOnce, ResetStream, SliceStream, StreamErrorFor,
+ StreamOnce,
+ },
+};
+
+#[cfg(feature = "std")]
+use crate::stream::read;
+
+/// Trait for tracking the current position of a `Stream`.
+pub trait Positioner<Item> {
+ /// The type which keeps track of the position
+ type Position: Clone + Ord;
+
+ type Checkpoint: Clone;
+
+ /// Returns the current position
+ fn position(&self) -> Self::Position;
+ /// Updates the position given that `token` has been taken from the stream
+ fn update(&mut self, token: &Item);
+
+ fn checkpoint(&self) -> Self::Checkpoint;
+ fn reset(&mut self, checkpoint: Self::Checkpoint);
+}
+
+/// Trait for tracking the current position of a `RangeStream`.
+pub trait RangePositioner<Item, Range>: Positioner<Item> {
+ /// Updates the position given that `range` has been taken from the stream
+ fn update_range(&mut self, range: &Range);
+}
+
+/// Defines a default `Positioner` type for a particular `Stream` type.
+pub trait DefaultPositioned {
+ type Positioner: Default;
+}
+
+impl<'a> DefaultPositioned for &'a str {
+ type Positioner = SourcePosition;
+}
+
+impl<'a, T> DefaultPositioned for &'a [T] {
+ type Positioner = IndexPositioner;
+}
+
+impl<'a, T> DefaultPositioned for SliceStream<'a, T> {
+ type Positioner = IndexPositioner;
+}
+
+impl<T> DefaultPositioned for IteratorStream<T> {
+ type Positioner = IndexPositioner;
+}
+
+#[cfg(feature = "std")]
+impl<R> DefaultPositioned for read::Stream<R> {
+ type Positioner = IndexPositioner;
+}
+
+/// The `Stream<Input>` struct maintains the current position in the stream `Input` using
+/// the `Positioner` trait to track the position.
+///
+/// ```
+/// # #![cfg(feature = "std")]
+/// # extern crate combine;
+/// # use combine::*;
+/// # use combine::stream::easy;
+/// # use combine::stream::position;
+/// # fn main() {
+/// let result = token(b'9')
+/// .message("Not a nine")
+/// .easy_parse(position::Stream::new(&b"8"[..]));
+/// assert_eq!(result, Err(easy::Errors {
+/// position: 0,
+/// errors: vec![
+/// easy::Error::Unexpected(b'8'.into()),
+/// easy::Error::Expected(b'9'.into()),
+/// easy::Error::Message("Not a nine".into())
+/// ]
+/// }));
+/// # }
+/// ```
+#[derive(Clone, Debug, PartialEq)]
+pub struct Stream<Input, X> {
+ /// The input stream used when items are requested
+ pub input: Input,
+ /// The positioner used to update the current position
+ pub positioner: X,
+}
+
+impl<Input, X> Stream<Input, X>
+where
+ Input: StreamOnce,
+ X: Positioner<Input::Token>,
+{
+ /// Creates a new `Stream<Input, X>` from an input stream and a positioner.
+ pub fn with_positioner(input: Input, positioner: X) -> Stream<Input, X> {
+ Stream { input, positioner }
+ }
+}
+
+impl<Input> Stream<Input, Input::Positioner>
+where
+ Input: StreamOnce + DefaultPositioned,
+ Input::Positioner: Positioner<Input::Token>,
+{
+ /// Creates a new `Stream<Input, X>` from an input stream and its default positioner.
+ pub fn new(input: Input) -> Stream<Input, Input::Positioner> {
+ Stream::with_positioner(input, Input::Positioner::default())
+ }
+}
+
+impl<Input, X, E> Positioned for Stream<Input, X>
+where
+ Input: StreamOnce,
+ X: Positioner<Input::Token>,
+ E: StreamError<Input::Token, Input::Range>,
+ Input::Error: ParseError<Input::Token, Input::Range, X::Position, StreamError = E>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position, StreamError = E>,
+{
+ #[inline]
+ fn position(&self) -> Self::Position {
+ self.positioner.position()
+ }
+}
+
+impl<Input, X, S> StreamOnce for Stream<Input, X>
+where
+ Input: StreamOnce,
+ X: Positioner<Input::Token>,
+ S: StreamError<Input::Token, Input::Range>,
+ Input::Error: ParseError<Input::Token, Input::Range, X::Position, StreamError = S>
+ + ParseError<Input::Token, Input::Range, Input::Position, StreamError = S>,
+{
+ type Token = Input::Token;
+ type Range = Input::Range;
+ type Position = X::Position;
+ type Error = Input::Error;
+
+ #[inline]
+ fn uncons(&mut self) -> Result<Input::Token, StreamErrorFor<Self>> {
+ self.input.uncons().map(|c| {
+ self.positioner.update(&c);
+ c
+ })
+ }
+
+ fn is_partial(&self) -> bool {
+ self.input.is_partial()
+ }
+}
+
+impl<Item, T> Positioner<Item> for &'_ mut T
+where
+ Item: Clone,
+ T: ?Sized + Positioner<Item>,
+{
+ type Position = T::Position;
+ type Checkpoint = T::Checkpoint;
+
+ #[inline]
+ fn position(&self) -> T::Position {
+ (**self).position()
+ }
+
+ #[inline]
+ fn update(&mut self, item: &Item) {
+ (**self).update(item)
+ }
+
+ #[inline]
+ fn checkpoint(&self) -> Self::Checkpoint {
+ (**self).checkpoint()
+ }
+
+ #[inline]
+ fn reset(&mut self, checkpoint: Self::Checkpoint) {
+ (**self).reset(checkpoint)
+ }
+}
+
+impl<Item, Range, T> RangePositioner<Item, Range> for &'_ mut T
+where
+ Item: Clone,
+ Range: Clone + crate::stream::Range,
+ T: ?Sized + RangePositioner<Item, Range>,
+{
+ fn update_range(&mut self, range: &Range) {
+ (**self).update_range(range);
+ }
+}
+
+/// The `IndexPositioner<Item, Range>` struct maintains the current index into the stream `Input`. The
+/// initial index is index 0. Each `Item` committed increments the index by 1; each `range` committed
+/// increments the position by `range.len()`.
+#[derive(Clone, Debug, Default, PartialEq)]
+pub struct IndexPositioner(usize);
+
+impl<Item> Positioner<Item> for IndexPositioner
+where
+ Item: Clone,
+{
+ type Position = usize;
+ type Checkpoint = Self;
+
+ #[inline]
+ fn position(&self) -> usize {
+ self.0
+ }
+
+ #[inline]
+ fn update(&mut self, _item: &Item) {
+ self.0 += 1
+ }
+
+ #[inline]
+ fn checkpoint(&self) -> Self::Checkpoint {
+ self.clone()
+ }
+
+ #[inline]
+ fn reset(&mut self, checkpoint: Self::Checkpoint) {
+ *self = checkpoint;
+ }
+}
+
+impl IndexPositioner {
+ pub fn new() -> IndexPositioner {
+ IndexPositioner::new_with_position(0)
+ }
+
+ pub fn new_with_position(position: usize) -> IndexPositioner {
+ IndexPositioner(position)
+ }
+}
+
+impl<Item, Range> RangePositioner<Item, Range> for IndexPositioner
+where
+ Item: Clone,
+ Range: Clone + crate::stream::Range,
+{
+ fn update_range(&mut self, range: &Range) {
+ self.0 += range.len()
+ }
+}
+
+/// Struct which represents a position in a source file.
+#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd)]
+pub struct SourcePosition {
+ /// Current line of the input
+ pub line: i32,
+ /// Current column of the input
+ pub column: i32,
+}
+
+impl Default for SourcePosition {
+ fn default() -> Self {
+ SourcePosition { line: 1, column: 1 }
+ }
+}
+
+impl fmt::Display for SourcePosition {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "line: {}, column: {}", self.line, self.column)
+ }
+}
+
+impl SourcePosition {
+ pub fn new() -> Self {
+ SourcePosition::default()
+ }
+}
+
+impl Positioner<char> for SourcePosition {
+ type Position = SourcePosition;
+ type Checkpoint = Self;
+
+ #[inline]
+ fn position(&self) -> SourcePosition {
+ *self
+ }
+
+ #[inline]
+ fn update(&mut self, token: &char) {
+ self.column += 1;
+ if *token == '\n' {
+ self.column = 1;
+ self.line += 1;
+ }
+ }
+
+ #[inline]
+ fn checkpoint(&self) -> Self::Checkpoint {
+ *self
+ }
+
+ #[inline]
+ fn reset(&mut self, checkpoint: Self::Checkpoint) {
+ *self = checkpoint;
+ }
+}
+
+impl Positioner<u8> for SourcePosition {
+ type Position = SourcePosition;
+ type Checkpoint = Self;
+
+ #[inline]
+ fn position(&self) -> SourcePosition {
+ *self
+ }
+
+ #[inline]
+ fn update(&mut self, token: &u8) {
+ self.column += 1;
+ if *token == b'\n' {
+ self.column = 1;
+ self.line += 1;
+ }
+ }
+
+ #[inline]
+ fn checkpoint(&self) -> Self::Checkpoint {
+ *self
+ }
+
+ #[inline]
+ fn reset(&mut self, checkpoint: Self::Checkpoint) {
+ *self = checkpoint;
+ }
+}
+
+impl<'a> RangePositioner<char, &'a str> for SourcePosition {
+ fn update_range(&mut self, range: &&'a str) {
+ for c in range.chars() {
+ self.update(&c);
+ }
+ }
+}
+
+impl<Input, X, S> RangeStreamOnce for Stream<Input, X>
+where
+ Input: RangeStreamOnce,
+ X: RangePositioner<Input::Token, Input::Range>,
+ S: StreamError<Input::Token, Input::Range>,
+ Input::Error: ParseError<Input::Token, Input::Range, X::Position, StreamError = S>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position, StreamError = S>,
+ Input::Position: Clone + Ord,
+{
+ #[inline]
+ fn uncons_range(&mut self, size: usize) -> Result<Input::Range, StreamErrorFor<Self>> {
+ self.input.uncons_range(size).map(|range| {
+ self.positioner.update_range(&range);
+ range
+ })
+ }
+
+ #[inline]
+ fn uncons_while<F>(&mut self, mut predicate: F) -> Result<Input::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Input::Token) -> bool,
+ {
+ let positioner = &mut self.positioner;
+ self.input.uncons_while(|t| {
+ if predicate(t.clone()) {
+ positioner.update(&t);
+ true
+ } else {
+ false
+ }
+ })
+ }
+
+ #[inline]
+ fn uncons_while1<F>(
+ &mut self,
+ mut predicate: F,
+ ) -> ParseResult<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ let positioner = &mut self.positioner;
+ self.input.uncons_while1(|t| {
+ if predicate(t.clone()) {
+ positioner.update(&t);
+ true
+ } else {
+ false
+ }
+ })
+ }
+
+ #[inline]
+ fn distance(&self, end: &Self::Checkpoint) -> usize {
+ self.input.distance(&end.input)
+ }
+
+ fn range(&self) -> Self::Range {
+ self.input.range()
+ }
+}
+
+impl<Input, X, S> ResetStream for Stream<Input, X>
+where
+ Input: ResetStream,
+ X: Positioner<Input::Token>,
+ S: StreamError<Input::Token, Input::Range>,
+ Input::Error: ParseError<Input::Token, Input::Range, X::Position, StreamError = S>,
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position, StreamError = S>,
+{
+ type Checkpoint = Stream<Input::Checkpoint, X::Checkpoint>;
+ fn checkpoint(&self) -> Self::Checkpoint {
+ Stream {
+ input: self.input.checkpoint(),
+ positioner: self.positioner.checkpoint(),
+ }
+ }
+ fn reset(&mut self, checkpoint: Self::Checkpoint) -> Result<(), Self::Error> {
+ self.input.reset(checkpoint.input)?;
+ self.positioner.reset(checkpoint.positioner);
+ Ok(())
+ }
+}
+
+#[cfg(all(feature = "std", test))]
+mod tests {
+
+ use crate::Parser;
+
+ use super::*;
+
+ #[test]
+ fn test_positioner() {
+ let input = ["a".to_string(), "b".to_string()];
+ let mut parser = crate::any();
+ let result = parser.parse(Stream::new(&input[..]));
+ assert_eq!(
+ result,
+ Ok((
+ "a".to_string(),
+ Stream::with_positioner(
+ &["b".to_string()][..],
+ IndexPositioner::new_with_position(1)
+ )
+ ))
+ );
+ }
+
+ #[test]
+ fn test_range_positioner() {
+ let input = ["a".to_string(), "b".to_string(), "c".to_string()];
+ let mut parser = crate::parser::range::take(2);
+ let result = parser.parse(Stream::new(&input[..]));
+ assert_eq!(
+ result,
+ Ok((
+ &["a".to_string(), "b".to_string()][..],
+ Stream::with_positioner(
+ &["c".to_string()][..],
+ IndexPositioner::new_with_position(2)
+ )
+ ))
+ );
+ }
+}
diff --git a/src/stream/read.rs b/src/stream/read.rs
new file mode 100644
index 0000000..6fa13b0
--- /dev/null
+++ b/src/stream/read.rs
@@ -0,0 +1,210 @@
+use std::{
+ fmt,
+ io::{self, Bytes, Read},
+};
+
+use crate::{
+ error::{ParseError, StreamError, Tracked},
+ stream::{StreamErrorFor, StreamOnce},
+};
+
+#[derive(Debug)]
+pub enum Error {
+ Unexpected,
+ EndOfInput,
+ Io(io::Error),
+}
+
+impl fmt::Display for Error {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Error::Unexpected => write!(f, "unexpected parse"),
+ Error::EndOfInput => write!(f, "unexpected end of input"),
+ Error::Io(err) => write!(f, "{}", err),
+ }
+ }
+}
+
+impl PartialEq for Error {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (Error::Unexpected, Error::Unexpected) => true,
+ (Error::EndOfInput, Error::EndOfInput) => true,
+ _ => false,
+ }
+ }
+}
+
+impl<Item, Range> StreamError<Item, Range> for Error {
+ #[inline]
+ fn unexpected_token(_: Item) -> Self {
+ Error::Unexpected
+ }
+ #[inline]
+ fn unexpected_range(_: Range) -> Self {
+ Error::Unexpected
+ }
+ #[inline]
+ fn unexpected_format<T>(_: T) -> Self
+ where
+ T: fmt::Display,
+ {
+ Error::Unexpected
+ }
+
+ #[inline]
+ fn expected_token(_: Item) -> Self {
+ Error::Unexpected
+ }
+ #[inline]
+ fn expected_range(_: Range) -> Self {
+ Error::Unexpected
+ }
+ #[inline]
+ fn expected_format<T>(_: T) -> Self
+ where
+ T: fmt::Display,
+ {
+ Error::Unexpected
+ }
+ #[inline]
+ fn message_format<T>(_: T) -> Self
+ where
+ T: fmt::Display,
+ {
+ Error::Unexpected
+ }
+ #[inline]
+ fn message_token(_: Item) -> Self {
+ Error::Unexpected
+ }
+ #[inline]
+ fn message_range(_: Range) -> Self {
+ Error::Unexpected
+ }
+
+ #[inline]
+ fn end_of_input() -> Self {
+ Error::EndOfInput
+ }
+
+ #[inline]
+ fn is_unexpected_end_of_input(&self) -> bool {
+ *self == Error::EndOfInput
+ }
+
+ #[inline]
+ fn into_other<T>(self) -> T
+ where
+ T: StreamError<Item, Range>,
+ {
+ match self {
+ Error::Unexpected => T::unexpected_static_message("parse"),
+ Error::EndOfInput => T::end_of_input(),
+ Error::Io(err) => T::other(err),
+ }
+ }
+}
+
+impl<Item, Range, Position> ParseError<Item, Range, Position> for Error
+where
+ Position: Default,
+{
+ type StreamError = Self;
+ #[inline]
+ fn empty(_position: Position) -> Self {
+ Error::Unexpected
+ }
+
+ #[inline]
+ fn from_error(_: Position, err: Self::StreamError) -> Self {
+ err
+ }
+
+ #[inline]
+ fn set_position(&mut self, _position: Position) {}
+
+ #[inline]
+ fn add(&mut self, err: Self::StreamError) {
+ *self = match (&*self, err) {
+ (Error::EndOfInput, _) => Error::EndOfInput,
+ (_, err) => err,
+ };
+ }
+
+ #[inline]
+ fn set_expected<F>(self_: &mut Tracked<Self>, info: Self::StreamError, f: F)
+ where
+ F: FnOnce(&mut Tracked<Self>),
+ {
+ f(self_);
+ self_.error = info;
+ }
+
+ fn is_unexpected_end_of_input(&self) -> bool {
+ *self == Error::EndOfInput
+ }
+
+ #[inline]
+ fn into_other<T>(self) -> T
+ where
+ T: ParseError<Item, Range, Position>,
+ {
+ T::from_error(Position::default(), StreamError::into_other(self))
+ }
+}
+
+pub struct Stream<R> {
+ bytes: Bytes<R>,
+}
+
+impl<R: Read> StreamOnce for Stream<R> {
+ type Token = u8;
+ type Range = &'static [u8];
+ type Position = usize;
+ type Error = Error;
+
+ #[inline]
+ fn uncons(&mut self) -> Result<u8, StreamErrorFor<Self>> {
+ match self.bytes.next() {
+ Some(Ok(b)) => Ok(b),
+ Some(Err(err)) => Err(Error::Io(err)),
+ None => Err(Error::EndOfInput),
+ }
+ }
+}
+
+impl<R> Stream<R>
+where
+ R: Read,
+{
+ /// Creates a `StreamOnce` instance from a value implementing `std::io::Read`.
+ ///
+ /// NOTE: This type do not implement `Positioned` and `Clone` and must be wrapped with types
+ /// such as `BufferedStreamRef` and `State` to become a `Stream` which can be parsed
+ ///
+ /// ```rust
+ /// # #![cfg(feature = "std")]
+ /// # extern crate combine;
+ /// use combine::*;
+ /// use combine::parser::byte::*;
+ /// use combine::stream::read;
+ /// use combine::stream::buffered;
+ /// use combine::stream::position;
+ /// use std::io::Read;
+ ///
+ /// # fn main() {
+ /// let input: &[u8] = b"123,";
+ /// let stream = buffered::Stream::new(position::Stream::new(read::Stream::new(input)), 1);
+ /// let result = (many(digit()), byte(b','))
+ /// .parse(stream)
+ /// .map(|t| t.0);
+ /// assert_eq!(result, Ok((vec![b'1', b'2', b'3'], b',')));
+ /// # }
+ /// ```
+ pub fn new(read: R) -> Stream<R> {
+ Stream {
+ bytes: read.bytes(),
+ }
+ }
+}
diff --git a/src/stream/span.rs b/src/stream/span.rs
new file mode 100644
index 0000000..c615678
--- /dev/null
+++ b/src/stream/span.rs
@@ -0,0 +1,157 @@
+use crate::lib::marker::PhantomData;
+
+use crate::{
+ error::{ParseErrorInto, ParseResult, StreamErrorInto},
+ stream::{ResetStream, StreamErrorFor},
+ Positioned, RangeStream, RangeStreamOnce, StreamOnce,
+};
+
+#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, PartialOrd, Ord)]
+pub struct Span<P> {
+ pub start: P,
+ pub end: P,
+}
+
+impl<P> From<P> for Span<P>
+where
+ P: Clone,
+{
+ #[inline]
+ fn from(p: P) -> Self {
+ Self {
+ start: p.clone(),
+ end: p,
+ }
+ }
+}
+
+impl<P> Span<P> {
+ pub fn map<Q>(self, mut f: impl FnMut(P) -> Q) -> Span<Q> {
+ Span {
+ start: f(self.start),
+ end: f(self.end),
+ }
+ }
+}
+
+#[derive(PartialEq, Eq, Copy, Clone, Debug)]
+pub struct Stream<S, E>(pub S, PhantomData<fn(E) -> E>);
+
+impl<S, E> From<S> for Stream<S, E> {
+ fn from(stream: S) -> Self {
+ Stream(stream, PhantomData)
+ }
+}
+
+impl<S, E> ResetStream for Stream<S, E>
+where
+ S: ResetStream + Positioned,
+ S::Token: PartialEq,
+ S::Range: PartialEq,
+ E: crate::error::ParseError<S::Token, S::Range, Span<S::Position>>,
+ S::Error: ParseErrorInto<S::Token, S::Range, S::Position>,
+ <S::Error as crate::error::ParseError<S::Token, S::Range, S::Position>>::StreamError:
+ StreamErrorInto<S::Token, S::Range>,
+{
+ type Checkpoint = S::Checkpoint;
+
+ #[inline]
+ fn checkpoint(&self) -> Self::Checkpoint {
+ self.0.checkpoint()
+ }
+
+ #[inline]
+ fn reset(&mut self, checkpoint: Self::Checkpoint) -> Result<(), Self::Error> {
+ self.0
+ .reset(checkpoint)
+ .map_err(ParseErrorInto::into_other_error)
+ }
+}
+
+impl<S, E> StreamOnce for Stream<S, E>
+where
+ S: StreamOnce + Positioned,
+ S::Token: PartialEq,
+ S::Range: PartialEq,
+ E: crate::error::ParseError<S::Token, S::Range, Span<S::Position>>,
+ S::Error: ParseErrorInto<S::Token, S::Range, S::Position>,
+ <S::Error as crate::error::ParseError<S::Token, S::Range, S::Position>>::StreamError:
+ StreamErrorInto<S::Token, S::Range>,
+{
+ type Token = S::Token;
+ type Range = S::Range;
+ type Position = Span<S::Position>;
+ type Error = E;
+
+ #[inline]
+ fn uncons(&mut self) -> Result<Self::Token, StreamErrorFor<Self>> {
+ self.0.uncons().map_err(StreamErrorInto::into_other_error)
+ }
+
+ #[inline]
+ fn is_partial(&self) -> bool {
+ self.0.is_partial()
+ }
+}
+
+impl<S, E> RangeStreamOnce for Stream<S, E>
+where
+ S: RangeStream,
+ S::Token: PartialEq,
+ S::Range: PartialEq,
+ E: crate::error::ParseError<S::Token, S::Range, Span<S::Position>>,
+ S::Error: ParseErrorInto<S::Token, S::Range, S::Position>,
+ <S::Error as crate::error::ParseError<S::Token, S::Range, S::Position>>::StreamError:
+ StreamErrorInto<S::Token, S::Range>,
+{
+ #[inline]
+ fn uncons_range(&mut self, size: usize) -> Result<Self::Range, StreamErrorFor<Self>> {
+ self.0
+ .uncons_range(size)
+ .map_err(StreamErrorInto::into_other_error)
+ }
+
+ #[inline]
+ fn uncons_while<F>(&mut self, f: F) -> Result<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ self.0
+ .uncons_while(f)
+ .map_err(StreamErrorInto::into_other_error)
+ }
+
+ #[inline]
+ fn uncons_while1<F>(&mut self, f: F) -> ParseResult<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ self.0
+ .uncons_while1(f)
+ .map_err(StreamErrorInto::into_other_error)
+ }
+
+ #[inline]
+ fn distance(&self, end: &Self::Checkpoint) -> usize {
+ self.0.distance(end)
+ }
+
+ fn range(&self) -> Self::Range {
+ self.0.range()
+ }
+}
+
+impl<S, E> Positioned for Stream<S, E>
+where
+ S: StreamOnce + Positioned,
+ S::Token: PartialEq,
+ S::Range: PartialEq,
+ E: crate::error::ParseError<S::Token, S::Range, Span<S::Position>>,
+ S::Error: ParseErrorInto<S::Token, S::Range, S::Position>,
+ <S::Error as crate::error::ParseError<S::Token, S::Range, S::Position>>::StreamError:
+ StreamErrorInto<S::Token, S::Range>,
+{
+ fn position(&self) -> Span<S::Position> {
+ Span::from(self.0.position())
+ }
+}
diff --git a/src/stream/state.rs b/src/stream/state.rs
new file mode 100644
index 0000000..dde5f83
--- /dev/null
+++ b/src/stream/state.rs
@@ -0,0 +1,91 @@
+use crate::{
+ error::ParseResult,
+ stream::{Positioned, RangeStreamOnce, ResetStream, StreamErrorFor, StreamOnce},
+};
+
+#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)]
+pub struct Stream<S, U> {
+ pub stream: S,
+ pub state: U,
+}
+
+impl<S, U> Positioned for Stream<S, U>
+where
+ S: Positioned,
+{
+ #[inline]
+ fn position(&self) -> Self::Position {
+ self.stream.position()
+ }
+}
+
+impl<S, U> ResetStream for Stream<S, U>
+where
+ S: ResetStream,
+{
+ type Checkpoint = S::Checkpoint;
+
+ #[inline]
+ fn checkpoint(&self) -> Self::Checkpoint {
+ self.stream.checkpoint()
+ }
+
+ #[inline]
+ fn reset(&mut self, checkpoint: Self::Checkpoint) -> Result<(), Self::Error> {
+ self.stream.reset(checkpoint)
+ }
+}
+
+impl<S, U> StreamOnce for Stream<S, U>
+where
+ S: StreamOnce,
+{
+ type Token = S::Token;
+ type Range = S::Range;
+ type Position = S::Position;
+ type Error = S::Error;
+
+ #[inline]
+ fn uncons(&mut self) -> Result<S::Token, StreamErrorFor<Self>> {
+ self.stream.uncons()
+ }
+
+ fn is_partial(&self) -> bool {
+ self.stream.is_partial()
+ }
+}
+
+impl<S, U> RangeStreamOnce for Stream<S, U>
+where
+ S: RangeStreamOnce,
+{
+ #[inline]
+ fn uncons_range(&mut self, size: usize) -> Result<Self::Range, StreamErrorFor<Self>> {
+ self.stream.uncons_range(size)
+ }
+
+ #[inline]
+ fn uncons_while<F>(&mut self, f: F) -> Result<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ self.stream.uncons_while(f)
+ }
+
+ fn uncons_while1<F>(&mut self, f: F) -> ParseResult<Self::Range, StreamErrorFor<Self>>
+ where
+ F: FnMut(Self::Token) -> bool,
+ {
+ self.stream.uncons_while1(f)
+ }
+
+ #[inline]
+ fn distance(&self, end: &Self::Checkpoint) -> usize {
+ self.stream.distance(end)
+ }
+
+ #[inline]
+ fn range(&self) -> Self::Range {
+ self.stream.range()
+ }
+}
diff --git a/tests/async.rs b/tests/async.rs
new file mode 100644
index 0000000..f5fb063
--- /dev/null
+++ b/tests/async.rs
@@ -0,0 +1,854 @@
+#![allow(renamed_and_removed_lints)]
+
+use std::{
+ cell::Cell,
+ io::{self, Cursor},
+ rc::Rc,
+ str,
+};
+
+use {
+ bytes::{Buf, BytesMut},
+ combine::{
+ any, count_min_max,
+ error::{ParseError, StreamError},
+ many1, parser,
+ parser::{
+ byte::{num, take_until_bytes},
+ char::{char, digit, letter, string},
+ choice::optional,
+ combinator::{
+ any_partial_state, any_send_partial_state, attempt, from_str, no_partial,
+ recognize, AnyPartialState, AnySendPartialState,
+ },
+ range::{
+ self, range, recognize_with_value, take, take_fn, take_until_range, take_while,
+ take_while1,
+ },
+ repeat,
+ },
+ satisfy, sep_end_by, skip_many, skip_many1,
+ stream::{easy, RangeStream, StreamErrorFor},
+ token, Parser,
+ },
+ futures::prelude::*,
+ futures_03_dep as futures,
+ partial_io::PartialRead,
+ quick_error::quick_error,
+ quickcheck::quickcheck,
+ tokio_dep as tokio,
+ tokio_util::codec::{Decoder, FramedRead},
+};
+
+// Workaround partial_io not working with tokio-0.2
+mod support;
+use support::*;
+
+quick_error! {
+ #[derive(Debug)]
+ enum Error {
+ Io(err: io::Error) {
+ display("{}", err)
+ from()
+ }
+ Parse(err: easy::Errors<char, String, usize>) {
+ display("{}", err)
+ from()
+ }
+ Utf8(err: std::str::Utf8Error) {
+ display("{}", err)
+ from()
+ }
+ Message(err: String) {
+ display("{}", err)
+ from()
+ }
+ }
+}
+
+macro_rules! mk_parser {
+ ($parser:expr, $self_:expr,()) => {
+ $parser
+ };
+ ($parser:expr, $self_:expr,($custom_state:ty)) => {
+ $parser($self_.1.clone())
+ };
+}
+macro_rules! impl_decoder {
+ ($typ: ident, $token: ty, $parser: expr, $custom_state: ty) => {
+ #[derive(Default)]
+ struct $typ(AnyPartialState, $custom_state);
+ impl_decoder!{$typ, $token, $parser; ($custom_state)}
+ };
+ ($typ: ident, $token: ty, $parser: expr) => {
+ #[derive(Default)]
+ struct $typ(AnyPartialState);
+ impl_decoder!{$typ, $token, $parser; ()}
+ };
+ ($typ: ident, $token: ty, $parser: expr; ( $($custom_state: tt)* )) => {
+ impl Decoder for $typ {
+ type Item = $token;
+ type Error = Error;
+
+ fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
+ (&mut &mut *self).decode(src)
+ }
+ fn decode_eof(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
+ (&mut &mut *self).decode_eof(src)
+ }
+ }
+
+ impl<'a> Decoder for &'a mut $typ {
+ type Item = $token;
+ type Error = Error;
+
+ fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
+ self.decode_stream(src, false)
+ }
+ fn decode_eof(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
+ self.decode_stream(src, true)
+ }
+ }
+
+ impl<'a> $typ {
+ fn decode_stream(&mut self, src: &mut BytesMut, eof: bool) -> Result<Option<$token>, Error> {
+ let (opt, removed_len) = {
+ let str_src = str::from_utf8(&src[..])?;
+ println!("Decoding `{}`", str_src);
+ combine::stream::decode_tokio(
+ any_partial_state(mk_parser!($parser, self, ($($custom_state)*))),
+ &mut easy::Stream(combine::stream::MaybePartialStream(str_src, !eof)),
+ &mut self.0,
+ ).map_err(|err| {
+ // Since err contains references into `src` we must remove these before
+ // returning the error and before we call `advance` to remove the input we
+ // just committed
+ let err = err.map_range(|r| r.to_string())
+ .map_position(|p| p.translate_position(&str_src[..]));
+ format!("{}\nIn input: `{}`", err, str_src)
+ })?
+ };
+
+ src.advance(removed_len);
+ match opt {
+ None => println!("Need more input!"),
+ Some(_) => (),
+ }
+ Ok(opt)
+ }
+ }
+ }
+}
+
+macro_rules! impl_byte_decoder {
+ ($typ: ident, $token: ty, $parser: expr, $custom_state: ty) => {
+ #[derive(Default)]
+ struct $typ(AnyPartialState, $custom_state);
+ impl_byte_decoder!{$typ, $token, $parser; ($custom_state)}
+ };
+ ($typ: ident, $token: ty, $parser: expr) => {
+ #[derive(Default)]
+ struct $typ(AnyPartialState);
+ impl_byte_decoder!{$typ, $token, $parser; ()}
+ };
+ ($typ: ident, $token: ty, $parser: expr; ( $($custom_state: tt)* )) => {
+ impl Decoder for $typ {
+ type Item = $token;
+ type Error = Error;
+
+ fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
+ (&mut &mut *self).decode(src)
+ }
+ }
+
+ impl<'a> Decoder for &'a mut $typ {
+ type Item = $token;
+ type Error = Error;
+
+ fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
+ let (opt, removed_len) = {
+ let str_src = &src[..];
+ println!("Decoding `{:?}`", str_src);
+ combine::stream::decode(
+ any_partial_state(mk_parser!($parser, self, ($($custom_state)*))),
+ &mut easy::Stream(combine::stream::PartialStream(str_src)),
+ &mut self.0,
+ ).map_err(|err| {
+ // Since err contains references into `src` we must remove these before
+ // returning the error and before we call `advance` to remove the input we
+ // just committed
+ let err = err.map_range(|r| format!("{:?}", r))
+ .map_position(|p| p.translate_position(&str_src[..]));
+ format!("{}\nIn input: `{:?}`", err, str_src)
+ })?
+ };
+
+ src.advance(removed_len);
+ match opt {
+ None => println!("Need more input!"),
+ Some(_) => (),
+ }
+ Ok(opt)
+ }
+ }
+ }
+}
+
+use partial_io::{GenNoErrors, GenWouldBlock, PartialOp, PartialWithErrors};
+
+fn run_decoder<B, D, S>(input: &B, seq: S, decoder: D) -> Result<Vec<D::Item>, D::Error>
+where
+ D: Decoder<Error = Error>,
+ D::Item: ::std::fmt::Debug,
+ S: IntoIterator<Item = PartialOp> + 'static,
+ S::IntoIter: Send,
+ B: ?Sized + AsRef<[u8]>,
+{
+ let ref mut reader = Cursor::new(input.as_ref());
+ let partial_reader = PartialAsyncRead::new(reader, seq);
+
+ tokio_02_dep::runtime::Builder::new()
+ .basic_scheduler()
+ .build()
+ .unwrap()
+ .block_on(
+ FramedRead::new(partial_reader, decoder)
+ .map_ok(|x| {
+ println!("Decoded `{:?}`", x);
+ x
+ })
+ .try_collect(),
+ )
+}
+
+parser! {
+ type PartialState = AnyPartialState;
+ fn basic_parser['a, Input]()(Input) -> String
+ where [ Input: RangeStream<Token = char, Range = &'a str> ]
+ {
+ any_partial_state(
+ many1(digit()).skip(range(&"\r\n"[..])),
+ )
+ }
+}
+
+impl_decoder! { Basic, String, basic_parser() }
+
+#[test]
+fn many1_skip_no_errors() {
+ let input = "123\r\n\
+ 456\r\n";
+
+ let result = run_decoder(input, vec![], Basic::default());
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(result.unwrap(), vec!["123".to_string(), "456".to_string()]);
+}
+
+parser! {
+ type PartialState = AnyPartialState;
+ fn prefix_many_then_parser['a, Input]()(Input) -> String
+ where [ Input: RangeStream<Token = char, Range = &'a str> ]
+ {
+ let integer = from_str(many1::<String, _, _>(digit()));
+ any_partial_state((char('#'), skip_many(char(' ')), integer)
+ .then_partial(|t| {
+ let c = t.2;
+ count_min_max(c, c, any())
+ })
+ )
+ }
+}
+
+parser! {
+ type PartialState = AnyPartialState;
+ fn choice_parser['a, Input]()(Input) -> String
+ where [ Input: RangeStream<Token = char, Range = &'a str> ]
+ {
+ any_partial_state(
+ many1(digit())
+ .or(many1(letter()))
+ .skip(range(&"\r\n"[..]))
+ )
+ }
+}
+
+fn content_length<'a, Input>(
+) -> impl Parser<Input, Output = String, PartialState = AnySendPartialState> + 'a
+where
+ Input: RangeStream<Token = char, Range = &'a str> + 'a,
+ // Necessary due to rust-lang/rust#24159
+ Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
+{
+ let content_length = range("Content-Length: ").with(
+ range::recognize(skip_many1(digit())).and_then(|digits: &str| {
+ // Convert the error from `.parse` into an error combine understands
+ digits
+ .parse::<usize>()
+ .map_err(StreamErrorFor::<Input>::other)
+ }),
+ );
+
+ any_send_partial_state(
+ (
+ skip_many(range("\r\n")),
+ content_length,
+ range("\r\n\r\n").map(|_| ()),
+ )
+ .then_partial(|&mut (_, message_length, _)| {
+ take(message_length).map(|bytes: &str| bytes.to_owned())
+ }),
+ )
+}
+
+quickcheck! {
+ fn many1_skip_test(seq: PartialWithErrors<GenWouldBlock>) -> () {
+
+ let input = "123\r\n\
+ 456\r\n\
+ 1\r\n\
+ 5\r\n\
+ 666666\r\n";
+
+ let result = run_decoder(input, seq, Basic::default());
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(
+ result.unwrap(),
+ vec!["123".to_string(), "456".to_string(), "1".to_string(), "5".to_string(), "666666".to_string()]
+ );
+ }
+
+ fn prefix_many_then_test(seq: PartialWithErrors<GenWouldBlock>) -> () {
+ impl_decoder!{ TestParser, String, prefix_many_then_parser() }
+
+ let input = "# 1a\
+ # 4abcd\
+ #0\
+ #3:?a\
+ #10abcdefghij";
+
+ let result = run_decoder(input, seq, TestParser::default());
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(
+ result.unwrap(),
+ ["a", "abcd", "", ":?a", "abcdefghij"]
+ );
+ }
+
+ fn choice_test(seq: PartialWithErrors<GenWouldBlock>) -> () {
+ impl_decoder!{ TestParser, String, choice_parser() }
+
+ let input = "1\r\n\
+ abcd\r\n\
+ 123\r\n\
+ abc\r\n\
+ 1232751\r\n";
+
+ let result = run_decoder(input, seq, TestParser::default());
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(
+ result.unwrap(),
+ ["1", "abcd", "123", "abc", "1232751"]
+ );
+ }
+
+ fn recognize_test(seq: PartialWithErrors<GenWouldBlock>) -> () {
+ impl_decoder!{ TestParser, String,
+ recognize(
+ (skip_many1(digit()), optional((char('.'), skip_many(digit()))))
+ )
+ .skip(range(&"\r\n"[..]))
+ }
+
+ let input = "1.0\r\n\
+ 123.123\r\n\
+ 17824\r\n\
+ 3.14\r\n\
+ 1.\r\n\
+ 2\r\n";
+
+ let result = run_decoder(input, seq, TestParser::default());
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(
+ result.unwrap(),
+ ["1.0", "123.123", "17824", "3.14", "1.", "2"]
+ );
+ }
+
+ fn recognize_range_test(seq: PartialWithErrors<GenWouldBlock>) -> () {
+ impl_decoder!{ TestParser, String,
+ recognize_with_value(
+ (skip_many1(digit()), optional((char('.'), skip_many(digit()))))
+ )
+ .map(|(r, _)| String::from(r))
+ .skip(range(&"\r\n"[..]))
+ }
+
+ let input = "1.0\r\n\
+ 123.123\r\n\
+ 17824\r\n\
+ 3.14\r\n\
+ 1.\r\n\
+ 2\r\n";
+
+ let result = run_decoder(input, seq, TestParser::default());
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(
+ result.unwrap(),
+ ["1.0", "123.123", "17824", "3.14", "1.", "2"]
+ );
+ }
+
+ fn take_while_test(seq: PartialWithErrors<GenWouldBlock>) -> () {
+ impl_decoder!{ TestParser, String,
+ |counter: Rc<Cell<i32>>|
+ take_while(move |c| { counter.set(counter.get() + 1); c != '\r' })
+ .map(String::from)
+ .skip(range("\r\n")),
+ Rc<Cell<i32>>
+ }
+
+ let input = "1.0\r\n\
+ 123.123\r\n\
+ 17824\r\n\
+ 3.14\r\n\
+ \r\n\
+ 2\r\n";
+
+ let counter = Rc::new(Cell::new(0));
+ let result = run_decoder(input, seq, TestParser(Default::default(), counter.clone()));
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(
+ result.unwrap(),
+ ["1.0", "123.123", "17824", "3.14", "", "2"]
+ );
+
+ assert_eq!(counter.get(), 26);
+ }
+
+ fn take_while1_test(seq: PartialWithErrors<GenWouldBlock>) -> () {
+ impl_decoder!{ TestParser, String,
+ |count: Rc<Cell<i32>>|
+ take_while1(move |c| { count.set(count.get() + 1); c != '\r' })
+ .map(String::from)
+ .skip(range("\r\n")),
+ Rc<Cell<i32>>
+ }
+
+ let input = "1.0\r\n\
+ 123.123\r\n\
+ 17824\r\n\
+ 3.14\r\n\
+ 1.\r\n\
+ 2\r\n";
+
+ let counter = Rc::new(Cell::new(0));
+ let result = run_decoder(input, seq, TestParser(Default::default(), counter.clone()));
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(
+ result.unwrap(),
+ ["1.0", "123.123", "17824", "3.14", "1.", "2"]
+ );
+
+ assert_eq!(counter.get(), 28);
+ }
+
+ fn take_until(seq: PartialWithErrors<GenWouldBlock>) -> () {
+ impl_decoder!{ TestParser, String,
+ |count: Rc<Cell<i32>>|
+ repeat::take_until(token(',').map(move |_| count.set(count.get() + 1))).skip(token(',')),
+ Rc<Cell<i32>>
+ }
+
+ let input = "123,456,789,";
+
+ let counter = Rc::new(Cell::new(0));
+ let result = run_decoder(input, seq, TestParser(Default::default(), counter.clone()));
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(
+ result.unwrap(),
+ ["123", "456", "789"]
+ );
+
+ assert_eq!(counter.get(), 3);
+ }
+
+ fn take_until_committed(seq: PartialWithErrors<GenWouldBlock>) -> () {
+ impl_decoder!{ TestParser, String,
+ |count: Rc<Cell<i32>>| {
+ let end = attempt((token(':').map(move |_| count.set(count.get() + 1)), token(':')));
+ repeat::take_until(end).skip((token(':'), token(':')))
+ },
+ Rc<Cell<i32>>
+ }
+
+ let input = "123::456::789::";
+
+ let counter = Rc::new(Cell::new(0));
+ let result = run_decoder(input, seq, TestParser(Default::default(), counter.clone()));
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(
+ result.unwrap(),
+ ["123", "456", "789"]
+ );
+
+ assert_eq!(counter.get(), 3);
+ }
+
+ fn take_until_range_committed(seq: PartialWithErrors<GenWouldBlock>) -> () {
+ impl_decoder!{ TestParser, String,
+ take_until_range("::").map(String::from).skip((token(':'), token(':')))
+ }
+
+ let input = "123::456::789::";
+
+ let result = run_decoder(input, seq, TestParser(Default::default()));
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(result.unwrap(), ["123", "456", "789"]);
+ }
+
+ fn any_send_partial_state_do_not_forget_state(sizes: Vec<usize>, seq: PartialWithErrors<GenWouldBlock>) -> () {
+ impl_decoder!{ TestParser, usize,
+ any_send_partial_state(content_length().map(|bytes| bytes.len()))
+ }
+
+ let input : String = sizes
+ .iter()
+ .map(|s| {
+ format!(
+ "Content-Length: {}\r\n\r\n{}\r\n",
+ s,
+ ::std::iter::repeat('a').take(*s).collect::<String>()
+ )
+ })
+ .collect();
+
+ let result = run_decoder(input.as_bytes(), seq, TestParser(Default::default()));
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(result.unwrap(), sizes);
+ }
+
+ fn take_fn_test(sizes: Vec<usize>, seq: PartialWithErrors<GenWouldBlock>) -> () {
+ impl_decoder!{ TestParser, usize,
+ take_fn(|s: &str| s.find("\r\n")).map(|bytes: &str| bytes.parse::<usize>().unwrap()).skip(take(2))
+ }
+
+ let input : String = sizes
+ .iter()
+ .map(|s| {
+ format!(
+ "{}\r\n",
+ s,
+ )
+ })
+ .collect();
+
+ let result = run_decoder(input.as_bytes(), seq, TestParser(Default::default()));
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(result.unwrap(), sizes);
+ }
+
+ fn take_until_bytes_test(sizes: Vec<usize>, seq: PartialWithErrors<GenWouldBlock>) -> () {
+ impl_decoder!{ TestParser, usize,
+ take_until_bytes("\r\n".as_bytes())
+ .map(|bytes: &str| bytes.parse::<usize>().unwrap())
+ .skip(take(2))
+ }
+
+ let input : String = sizes
+ .iter()
+ .map(|s| {
+ format!(
+ "{}\r\n",
+ s,
+ )
+ })
+ .collect();
+
+ let result = run_decoder(input.as_bytes(), seq, TestParser(Default::default()));
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(result.unwrap(), sizes);
+ }
+
+ fn num_test(ints: Vec<u16>, seq: PartialWithErrors<GenWouldBlock>) -> () {
+ impl_byte_decoder!{ TestParser, u16,
+ num::be_u16()
+ .skip(take(2))
+ }
+
+ let input: Vec<u8> = ints.iter()
+ .flat_map(|i| {
+ let mut v = Vec::new();
+ v.extend_from_slice(&i.to_be_bytes());
+ v.extend_from_slice(b"\r\n");
+ v
+ })
+ .collect();
+
+ let result = run_decoder(&input, seq, TestParser(Default::default()));
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(result.unwrap(), ints);
+ }
+
+ fn sep_end_by_test(seq: PartialWithErrors<GenWouldBlock>) -> () {
+ impl_decoder!{ TestParser, Vec<String>,
+ repeat::sep_end_by((digit(), digit(), digit()).map(|(a, b, c)| vec![a, b, c].into_iter().collect()), no_partial(string("::")))
+ .skip(no_partial(string("\r\n")))
+ }
+
+ let input = "123::456::789::\r\n";
+
+ let result = run_decoder(&input, seq, TestParser(Default::default()));
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(result.unwrap(), vec![vec!["123".to_string(), "456".to_string(), "789".to_string()]]);
+ }
+}
+
+#[test]
+fn skip_count_min_max_test() {
+ let seq = vec![PartialOp::Limited(1)];
+ impl_decoder! { TestParser, String,
+ repeat::skip_count_min_max(1, 2, char('_')).skip(char('.')).map(|_| "".to_string())
+ }
+
+ let input = "_.";
+
+ let result = run_decoder(input, seq, TestParser::default());
+
+ assert!(result.as_ref().is_ok(), "{}", result.unwrap_err());
+ assert_eq!(result.unwrap(), [""]);
+}
+
+const WORDS_IN_README: usize = 819;
+
+#[test]
+fn decode_std() {
+ quickcheck(
+ (|ops: PartialWithErrors<GenNoErrors>| {
+ let buf = include_bytes!("../README.md");
+
+ let mut read = PartialRead::new(&buf[..], ops);
+ let mut decoder =
+ combine::stream::Decoder::<_, combine::stream::PointerOffset<_>>::new();
+ let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
+ assert_eq!(
+ combine::decode!(
+ decoder,
+ read,
+ {
+ let word = many1(satisfy(|b| !is_whitespace(b)));
+ sep_end_by(word, skip_many1(satisfy(is_whitespace)))
+ .map(|words: Vec<Vec<u8>>| words.len())
+ },
+ |input, _| combine::easy::Stream::from(input)
+ )
+ .map_err(From::from)
+ .map_err(
+ |err: combine::easy::Errors<u8, &[u8], combine::stream::PointerOffset<_>>| err
+ .map_position(|p| p.0)
+ ),
+ Ok(WORDS_IN_README),
+ );
+ }) as fn(_) -> _,
+ )
+}
+
+#[test]
+fn decode_tokio_02() {
+ quickcheck(
+ (|ops: PartialWithErrors<GenWouldBlock>| {
+ let buf = include_bytes!("../README.md");
+ let runtime = tokio::runtime::Builder::new_current_thread()
+ .build()
+ .unwrap();
+ runtime.block_on(async {
+ let mut read = PartialAsyncRead::new(&buf[..], ops);
+ let mut decoder =
+ combine::stream::Decoder::<_, combine::stream::PointerOffset<[u8]>>::new();
+ let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
+ assert_eq!(
+ combine::decode_tokio_02!(
+ decoder,
+ read,
+ {
+ let word = many1(satisfy(|b| !is_whitespace(b)));
+ sep_end_by(word, skip_many1(satisfy(is_whitespace)))
+ .map(|words: Vec<Vec<u8>>| words.len())
+ },
+ |input, _| combine::easy::Stream::from(input)
+ )
+ .map_err(From::from)
+ .map_err(
+ |err: combine::easy::Errors<u8, &[u8], _>| err.map_range(|r| r.to_owned())
+ )
+ .map_err(|err| err.map_position(|p| p.translate_position(&decoder.buffer()))),
+ Ok(WORDS_IN_README),
+ );
+ })
+ }) as fn(_) -> _,
+ )
+}
+
+#[test]
+fn decode_tokio_03() {
+ quickcheck(
+ (|ops: PartialWithErrors<GenWouldBlock>| {
+ let buf = include_bytes!("../README.md");
+ let runtime = tokio::runtime::Builder::new_current_thread()
+ .build()
+ .unwrap();
+ runtime.block_on(async {
+ let mut read = PartialAsyncRead::new(&buf[..], ops);
+ let mut decoder =
+ combine::stream::Decoder::<_, combine::stream::PointerOffset<[u8]>>::new();
+ let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
+ assert_eq!(
+ combine::decode_tokio_03!(
+ decoder,
+ read,
+ {
+ let word = many1(satisfy(|b| !is_whitespace(b)));
+ sep_end_by(word, skip_many1(satisfy(is_whitespace)))
+ .map(|words: Vec<Vec<u8>>| words.len())
+ },
+ |input, _| combine::easy::Stream::from(input)
+ )
+ .map_err(From::from)
+ .map_err(
+ |err: combine::easy::Errors<u8, &[u8], _>| err.map_range(|r| r.to_owned())
+ )
+ .map_err(|err| err.map_position(|p| p.translate_position(&decoder.buffer()))),
+ Ok(WORDS_IN_README),
+ );
+ })
+ }) as fn(_) -> _,
+ )
+}
+
+#[test]
+fn decode_tokio() {
+ quickcheck(
+ (|ops: PartialWithErrors<GenWouldBlock>| {
+ let buf = include_bytes!("../README.md");
+ let runtime = tokio::runtime::Builder::new_current_thread()
+ .build()
+ .unwrap();
+ runtime.block_on(async {
+ let mut read = PartialAsyncRead::new(&buf[..], ops);
+ let mut decoder =
+ combine::stream::Decoder::<_, combine::stream::PointerOffset<[u8]>>::new();
+ let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
+ assert_eq!(
+ combine::decode_tokio!(
+ decoder,
+ read,
+ {
+ let word = many1(satisfy(|b| !is_whitespace(b)));
+ sep_end_by(word, skip_many1(satisfy(is_whitespace)))
+ .map(|words: Vec<Vec<u8>>| words.len())
+ },
+ |input, _| combine::easy::Stream::from(input)
+ )
+ .map_err(From::from)
+ .map_err(
+ |err: combine::easy::Errors<u8, &[u8], _>| err.map_range(|r| r.to_owned())
+ )
+ .map_err(|err| err.map_position(|p| p.translate_position(&decoder.buffer()))),
+ Ok(WORDS_IN_README),
+ );
+ })
+ }) as fn(_) -> _,
+ )
+}
+
+#[test]
+fn decode_async_std() {
+ quickcheck(
+ (|ops: PartialWithErrors<GenWouldBlock>| {
+ let buf = include_bytes!("../README.md");
+ async_std::task::block_on(async {
+ let mut read = FuturesPartialAsyncRead::new(&buf[..], ops);
+ let mut decoder =
+ combine::stream::Decoder::<_, combine::stream::PointerOffset<[u8]>>::new();
+ let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
+ assert_eq!(
+ combine::decode_futures_03!(
+ decoder,
+ read,
+ {
+ let word = many1(satisfy(|b| !is_whitespace(b)));
+ sep_end_by(word, skip_many1(satisfy(is_whitespace)))
+ .map(|words: Vec<Vec<u8>>| words.len())
+ },
+ |input, _| combine::easy::Stream::from(input),
+ )
+ .map_err(From::from)
+ .map_err(|err: combine::easy::Errors<u8, &[u8], _>| err),
+ Ok(WORDS_IN_README),
+ );
+ })
+ }) as fn(_) -> _,
+ )
+}
+
+#[tokio::test]
+async fn decode_loop() {
+ use tokio::fs::File;
+
+ use combine::{
+ decode_tokio, many1, satisfy, skip_many1,
+ stream::{buf_reader::BufReader, Decoder},
+ };
+ let mut read = BufReader::new(File::open("README.md").await.unwrap());
+ let mut decoder = Decoder::new_bufferless();
+ let is_whitespace = |b: u8| b == b' ' || b == b'\r' || b == b'\n';
+
+ let mut count = 0;
+ loop {
+ // async block suppresses a warning about duplicate label
+ if async {
+ decode_tokio!(
+ decoder,
+ read,
+ many1(satisfy(|b| !is_whitespace(b))),
+ |input, _position| combine::easy::Stream::from(input),
+ )
+ .is_err()
+ }
+ .await
+ {
+ break;
+ }
+
+ count += 1;
+
+ {
+ if decode_tokio!(
+ decoder,
+ read,
+ skip_many1(satisfy(is_whitespace)),
+ |input, _position| combine::easy::Stream::from(input),
+ )
+ .is_err()
+ {
+ break;
+ }
+ }
+ }
+ assert_eq!(819, count);
+}
diff --git a/tests/buffered_stream.rs b/tests/buffered_stream.rs
new file mode 100644
index 0000000..8872a69
--- /dev/null
+++ b/tests/buffered_stream.rs
@@ -0,0 +1,116 @@
+#![cfg(feature = "std")]
+
+use combine::{
+ attempt, choice, many, many1,
+ parser::{
+ char::{char, digit, spaces, string},
+ combinator::recognize,
+ },
+ sep_by, skip_many1,
+ stream::{
+ buffered,
+ easy::{self, Error, Errors},
+ position, IteratorStream,
+ },
+ Parser, Positioned,
+};
+
+#[test]
+fn shared_stream_buffer() {
+ // Iterator that can't be cloned
+ let text = "10,222,3,44".chars().map(|c| {
+ if c.is_digit(10) {
+ (c as u8 + 1) as char
+ } else {
+ c
+ }
+ });
+ let buffer = buffered::Stream::new(position::Stream::new(IteratorStream::new(text)), 1);
+ let int: &mut dyn Parser<_, Output = _, PartialState = _> =
+ &mut many(digit()).map(|s: String| s.parse::<i64>().unwrap());
+ let result = sep_by(int, char(',')).parse(buffer).map(|t| t.0);
+ assert_eq!(result, Ok(vec![21, 333, 4, 55]));
+}
+
+#[test]
+fn shared_stream_backtrack() {
+ let text = "apple,apple,ananas,orangeblah";
+ let mut iter = text.chars();
+ // Iterator that can't be cloned
+ let stream = buffered::Stream::new(position::Stream::new(IteratorStream::new(&mut iter)), 2);
+
+ let value: &mut dyn Parser<_, Output = _, PartialState = _> = &mut choice([
+ attempt(string("apple")),
+ attempt(string("orange")),
+ attempt(string("ananas")),
+ ]);
+ let mut parser = sep_by(value, char(','));
+ let result = parser.parse(stream).map(|t| t.0);
+ assert_eq!(result, Ok(vec!["apple", "apple", "ananas", "orange"]));
+}
+
+#[test]
+fn shared_stream_insufficent_backtrack() {
+ let text = "apple,apple,ananas,orangeblah";
+ let mut iter = text.chars();
+ // Iterator that can't be cloned
+ let stream = buffered::Stream::new(
+ easy::Stream(position::Stream::new(IteratorStream::new(&mut iter))),
+ 1,
+ );
+
+ let value: &mut dyn Parser<_, Output = _, PartialState = _> = &mut choice([
+ attempt(string("apple")),
+ attempt(string("orange")),
+ attempt(string("ananas")),
+ ]);
+ let mut parser = sep_by(value, char(','));
+ let result: Result<Vec<&str>, _> = parser.parse(stream).map(|t| t.0);
+ assert!(result.is_err());
+ assert!(
+ result
+ .as_ref()
+ .unwrap_err()
+ .errors
+ .iter()
+ .any(|err| *err == Error::Message("Backtracked to far".into())),
+ "{}",
+ result.unwrap_err()
+ );
+}
+
+/// Test which checks that a stream which has ended does not repeat the last token in some cases in
+/// which case this test would loop forever
+#[test]
+fn always_output_end_of_input_after_end_of_input() {
+ let text = "10".chars();
+ let buffer = buffered::Stream::new(position::Stream::new(IteratorStream::new(text)), 1);
+ let int = many1(digit()).map(|s: String| s.parse::<i64>().unwrap());
+ let result = many(spaces().with(int)).parse(buffer).map(|t| t.0);
+ assert_eq!(result, Ok(vec![10]));
+}
+
+#[test]
+fn position() {
+ let text = "10abc".chars();
+ let stream = buffered::Stream::new(position::Stream::new(IteratorStream::new(text)), 3);
+ assert_eq!(stream.position(), 0);
+ let result = many1::<Vec<_>, _, _>(digit()).parse(stream);
+ assert!(result.is_ok());
+ assert_eq!(result.unwrap().1.position(), 2);
+}
+
+#[test]
+fn buffered_stream_recognize_issue_256() {
+ let mut parser = recognize::<String, _, _>(skip_many1(digit()));
+ let input = "12 ";
+ assert_eq!(
+ parser
+ .parse(buffered::Stream::new(easy::Stream(input), 1))
+ .map_err(|err| err.map_position(|pos| pos.translate_position(input))),
+ Err(Errors {
+ position: 2,
+ errors: vec![easy::Error::Message("Backtracked to far".into())]
+ })
+ );
+}
diff --git a/tests/parser.rs b/tests/parser.rs
new file mode 100644
index 0000000..c2ab07c
--- /dev/null
+++ b/tests/parser.rs
@@ -0,0 +1,670 @@
+use combine::{
+ parser::{
+ byte::bytes_cmp,
+ char::{digit, letter, string, string_cmp},
+ choice::{choice, optional},
+ combinator::{attempt, no_partial, not_followed_by},
+ error::unexpected,
+ range::{self, range},
+ repeat::{count, count_min_max, many, sep_by, sep_end_by1, skip_until, take_until},
+ token::{any, eof, position, token, value, Token},
+ },
+ EasyParser, Parser,
+};
+
+#[test]
+fn choice_empty() {
+ let mut parser = choice::<_, &mut [Token<&str>]>(&mut []);
+ let result_err = parser.parse("a");
+ assert!(result_err.is_err());
+}
+
+#[test]
+fn tuple() {
+ let mut parser = (digit(), token(','), digit(), token(','), letter());
+ assert_eq!(parser.parse("1,2,z"), Ok((('1', ',', '2', ',', 'z'), "")));
+}
+
+#[test]
+fn issue_99() {
+ let result = any().map(|_| ()).or(eof()).parse("");
+ assert!(result.is_ok(), "{:?}", result);
+}
+
+#[test]
+fn not_followed_by_does_not_consume_any_input() {
+ let mut parser = not_followed_by(range("a")).map(|_| "").or(range("a"));
+
+ assert_eq!(parser.parse("a"), Ok(("a", "")));
+
+ let mut parser = range("a").skip(not_followed_by(range("aa")));
+
+ assert_eq!(parser.parse("aa"), Ok(("a", "a")));
+ assert!(parser.parse("aaa").is_err());
+}
+
+#[cfg(feature = "std")]
+mod tests_std {
+
+ use combine::{
+ parser::{
+ byte::{alpha_num, bytes, num::be_u32},
+ char::{char, digit, letter},
+ },
+ stream::{
+ easy::{self, Error, Errors},
+ position::{self, SourcePosition},
+ },
+ Parser,
+ };
+
+ use super::*;
+
+ #[derive(Clone, PartialEq, Debug)]
+ struct CloneOnly {
+ s: String,
+ }
+
+ #[test]
+ fn token_clone_but_not_copy() {
+ // Verify we can use token() with a StreamSlice with an token type that is Clone but not
+ // Copy.
+ let input = &[
+ CloneOnly { s: "x".to_string() },
+ CloneOnly { s: "y".to_string() },
+ ][..];
+ let result = token(CloneOnly { s: "x".to_string() }).easy_parse(input);
+ assert_eq!(
+ result,
+ Ok((
+ CloneOnly { s: "x".to_string() },
+ &[CloneOnly { s: "y".to_string() }][..]
+ ))
+ );
+ }
+
+ #[test]
+ fn sep_by_committed_error() {
+ let mut parser2 = sep_by((letter(), letter()), token(','));
+ let result_err: Result<(Vec<(char, char)>, &str), easy::ParseError<&str>> =
+ parser2.easy_parse("a,bc");
+ assert!(result_err.is_err());
+ }
+
+ /// The expected combinator should retain only errors that are not `Expected`
+ #[test]
+ fn expected_retain_errors() {
+ let mut parser = digit()
+ .message("message")
+ .expected("N/A")
+ .expected("my expected digit");
+ assert_eq!(
+ parser.easy_parse(position::Stream::new("a")),
+ Err(Errors {
+ position: SourcePosition::default(),
+ errors: vec![
+ Error::Unexpected('a'.into()),
+ Error::Message("message".into()),
+ Error::Expected("my expected digit".into()),
+ ],
+ })
+ );
+ }
+
+ #[test]
+ fn tuple_parse_error() {
+ let mut parser = (digit(), digit());
+ let result = parser.easy_parse(position::Stream::new("a"));
+ assert_eq!(
+ result,
+ Err(Errors {
+ position: SourcePosition::default(),
+ errors: vec![
+ Error::Unexpected('a'.into()),
+ Error::Expected("digit".into()),
+ ],
+ })
+ );
+ }
+
+ #[test]
+ fn message_tests() {
+ // Ensure message adds to both committed and empty errors, interacting with parse_lazy and
+ // parse_stream correctly on either side
+ let input = "hi";
+
+ let mut ok = char('h').message("not expected");
+ let mut empty0 = char('o').message("expected message");
+ let mut empty1 = char('o').message("expected message").map(|x| x);
+ let mut empty2 = char('o').map(|x| x).message("expected message");
+ let mut committed0 = char('h').with(char('o')).message("expected message");
+ let mut committed1 = char('h')
+ .with(char('o'))
+ .message("expected message")
+ .map(|x| x);
+ let mut committed2 = char('h')
+ .with(char('o'))
+ .map(|x| x)
+ .message("expected message");
+
+ assert!(ok.easy_parse(position::Stream::new(input)).is_ok());
+
+ let empty_expected = Err(Errors {
+ position: SourcePosition { line: 1, column: 1 },
+ errors: vec![
+ Error::Unexpected('h'.into()),
+ Error::Expected('o'.into()),
+ Error::Message("expected message".into()),
+ ],
+ });
+
+ let committed_expected = Err(Errors {
+ position: SourcePosition { line: 1, column: 2 },
+ errors: vec![
+ Error::Unexpected('i'.into()),
+ Error::Expected('o'.into()),
+ Error::Message("expected message".into()),
+ ],
+ });
+
+ assert_eq!(
+ empty0.easy_parse(position::Stream::new(input)),
+ empty_expected
+ );
+ assert_eq!(
+ empty1.easy_parse(position::Stream::new(input)),
+ empty_expected
+ );
+ assert_eq!(
+ empty2.easy_parse(position::Stream::new(input)),
+ empty_expected
+ );
+
+ assert_eq!(
+ committed0.easy_parse(position::Stream::new(input)),
+ committed_expected
+ );
+ assert_eq!(
+ committed1.easy_parse(position::Stream::new(input)),
+ committed_expected
+ );
+ assert_eq!(
+ committed2.easy_parse(position::Stream::new(input)),
+ committed_expected
+ );
+ }
+
+ #[test]
+ fn expected_tests() {
+ // Ensure `expected` replaces only empty errors, interacting with parse_lazy and
+ // parse_stream correctly on either side
+ let input = "hi";
+
+ let mut ok = char('h').expected("not expected");
+ let mut empty0 = char('o').expected("expected message");
+ let mut empty1 = char('o').expected("expected message").map(|x| x);
+ let mut empty2 = char('o').map(|x| x).expected("expected message");
+ let mut committed0 = char('h').with(char('o')).expected("expected message");
+ let mut committed1 = char('h')
+ .with(char('o'))
+ .expected("expected message")
+ .map(|x| x);
+ let mut committed2 = char('h')
+ .with(char('o'))
+ .map(|x| x)
+ .expected("expected message");
+
+ assert!(ok.easy_parse(position::Stream::new(input)).is_ok());
+
+ let empty_expected = Err(Errors {
+ position: SourcePosition { line: 1, column: 1 },
+ errors: vec![
+ Error::Unexpected('h'.into()),
+ Error::Expected("expected message".into()),
+ ],
+ });
+
+ let committed_expected = Err(Errors {
+ position: SourcePosition { line: 1, column: 2 },
+ errors: vec![Error::Unexpected('i'.into()), Error::Expected('o'.into())],
+ });
+
+ assert_eq!(
+ empty0.easy_parse(position::Stream::new(input)),
+ empty_expected
+ );
+ assert_eq!(
+ empty1.easy_parse(position::Stream::new(input)),
+ empty_expected
+ );
+ assert_eq!(
+ empty2.easy_parse(position::Stream::new(input)),
+ empty_expected
+ );
+
+ assert_eq!(
+ committed0.easy_parse(position::Stream::new(input)),
+ committed_expected
+ );
+ assert_eq!(
+ committed1.easy_parse(position::Stream::new(input)),
+ committed_expected
+ );
+ assert_eq!(
+ committed2.easy_parse(position::Stream::new(input)),
+ committed_expected
+ );
+ }
+
+ #[test]
+ fn try_tests() {
+ // Ensure attempt adds error messages exactly once
+ assert_eq!(
+ attempt(unexpected("test")).easy_parse(position::Stream::new("hi")),
+ Err(Errors {
+ position: SourcePosition { line: 1, column: 1 },
+ errors: vec![
+ Error::Unexpected('h'.into()),
+ Error::Unexpected("test".into()),
+ ],
+ })
+ );
+ assert_eq!(
+ attempt(char('h').with(unexpected("test"))).easy_parse(position::Stream::new("hi")),
+ Err(Errors {
+ position: SourcePosition { line: 1, column: 2 },
+ errors: vec![
+ Error::Unexpected('i'.into()),
+ Error::Unexpected("test".into()),
+ ],
+ })
+ );
+ }
+
+ #[test]
+ fn sequence_error() {
+ let mut parser = (char('a'), char('b'), char('c'));
+
+ assert_eq!(
+ parser.easy_parse(position::Stream::new("c")),
+ Err(Errors {
+ position: SourcePosition { line: 1, column: 1 },
+ errors: vec![Error::Unexpected('c'.into()), Error::Expected('a'.into())],
+ })
+ );
+
+ assert_eq!(
+ parser.easy_parse(position::Stream::new("ac")),
+ Err(Errors {
+ position: SourcePosition { line: 1, column: 2 },
+ errors: vec![Error::Unexpected('c'.into()), Error::Expected('b'.into())],
+ })
+ );
+ }
+
+ #[test]
+ fn optional_empty_ok_then_error() {
+ let mut parser = (optional(char('a')), char('b'));
+
+ assert_eq!(
+ parser.easy_parse(position::Stream::new("c")),
+ Err(Errors {
+ position: SourcePosition { line: 1, column: 1 },
+ errors: vec![
+ Error::Unexpected('c'.into()),
+ Error::Expected('a'.into()),
+ Error::Expected('b'.into()),
+ ],
+ })
+ );
+ }
+
+ #[test]
+ fn nested_optional_empty_ok_then_error() {
+ let mut parser = ((optional(char('a')), char('b')), char('c'));
+
+ assert_eq!(
+ parser.easy_parse(position::Stream::new("c")),
+ Err(Errors {
+ position: SourcePosition { line: 1, column: 1 },
+ errors: vec![
+ Error::Unexpected('c'.into()),
+ Error::Expected('a'.into()),
+ Error::Expected('b'.into()),
+ ],
+ })
+ );
+ }
+
+ #[test]
+ fn committed_then_optional_empty_ok_then_error() {
+ let mut parser = (char('b'), optional(char('a')), char('b'));
+
+ assert_eq!(
+ parser.easy_parse(position::Stream::new("bc")),
+ Err(Errors {
+ position: SourcePosition { line: 1, column: 2 },
+ errors: vec![
+ Error::Unexpected('c'.into()),
+ Error::Expected('a'.into()),
+ Error::Expected('b'.into()),
+ ],
+ })
+ );
+ }
+
+ #[test]
+ fn sequence_in_choice_parser_empty_err() {
+ let mut parser = choice((
+ (optional(char('a')), char('1')),
+ (optional(char('b')), char('2')).skip(char('d')),
+ ));
+
+ assert_eq!(
+ parser.easy_parse(position::Stream::new("c")),
+ Err(Errors {
+ position: SourcePosition { line: 1, column: 1 },
+ errors: vec![
+ Error::Expected('a'.into()),
+ Error::Expected('1'.into()),
+ Error::Expected('b'.into()),
+ Error::Expected('2'.into()),
+ Error::Unexpected('c'.into()),
+ ],
+ })
+ );
+ }
+
+ #[test]
+ fn sequence_in_choice_array_parser_empty_err() {
+ let mut parser = choice([
+ (optional(char('a')), char('1')),
+ (optional(char('b')), char('2')),
+ ]);
+
+ assert_eq!(
+ parser.easy_parse(position::Stream::new("c")),
+ Err(Errors {
+ position: SourcePosition { line: 1, column: 1 },
+ errors: vec![
+ Error::Expected('a'.into()),
+ Error::Expected('1'.into()),
+ Error::Expected('b'.into()),
+ Error::Expected('2'.into()),
+ Error::Unexpected('c'.into()),
+ ],
+ })
+ );
+ }
+
+ #[test]
+ fn sequence_in_choice_array_parser_empty_err_where_first_parser_delay_errors() {
+ let mut p1 = char('1');
+ let mut p2 = no_partial((optional(char('b')), char('2')).map(|t| t.1));
+ let mut parser =
+ choice::<_, [&mut dyn Parser<_, Output = _, PartialState = _>; 2]>([&mut p1, &mut p2]);
+
+ assert_eq!(
+ parser.easy_parse(position::Stream::new("c")),
+ Err(Errors {
+ position: SourcePosition { line: 1, column: 1 },
+ errors: vec![
+ Error::Expected('1'.into()),
+ Error::Expected('b'.into()),
+ Error::Expected('2'.into()),
+ Error::Unexpected('c'.into()),
+ ],
+ })
+ );
+ }
+
+ #[test]
+ fn sep_end_by1_dont_eat_separator_twice() {
+ let mut parser = sep_end_by1(digit(), token(';'));
+ assert_eq!(parser.parse("1;;"), Ok((vec!['1'], ";")));
+ }
+
+ #[test]
+ fn count_min_max_empty_error() {
+ assert_eq!(
+ count_min_max(1, 1, char('a')).or(value(vec![])).parse("b"),
+ Ok((vec![], "b"))
+ );
+ }
+
+ #[test]
+ fn sequence_parser_resets_partial_state_issue_168() {
+ assert_eq!(
+ take_until::<String, _, _>(attempt((char('a'), char('b')))).parse("aaab"),
+ Ok((String::from("aa"), "ab"))
+ );
+ }
+
+ #[test]
+ fn parser_macro_must_impl_parse_mode_issue_168() {
+ assert_eq!(
+ skip_until(attempt((char('a'), char('b')))).parse("aaab"),
+ Ok(((), "ab"))
+ );
+ }
+
+ #[test]
+ fn recognize_parser_issue_168() {
+ assert_eq!(
+ range::recognize(skip_until(attempt((char('a'), char('b'))))).parse("aaab"),
+ Ok(("aa", "ab"))
+ );
+ }
+
+ #[test]
+ fn sequence_in_optional_report_delayed_error() {
+ assert_eq!(
+ optional(position().with(char('a')))
+ .skip(char('}'))
+ .easy_parse("b")
+ .map_err(|e| e.errors),
+ Err(vec![
+ Error::Unexpected('b'.into()),
+ Error::Expected('a'.into()),
+ Error::Expected('}'.into()),
+ ]),
+ );
+ }
+
+ #[test]
+ fn sequence_in_optional_nested_report_delayed_error() {
+ assert_eq!(
+ optional(position().with(char('a')))
+ .skip(optional(position().with(char('c'))))
+ .skip(char('}'))
+ .easy_parse("b")
+ .map_err(|e| e.errors),
+ Err(vec![
+ Error::Unexpected('b'.into()),
+ Error::Expected('a'.into()),
+ Error::Expected('c'.into()),
+ Error::Expected('}'.into()),
+ ]),
+ );
+ }
+
+ #[test]
+ fn sequence_in_optional_nested_2_report_delayed_error() {
+ assert_eq!(
+ (
+ char('{'),
+ optional(position().with(char('a')))
+ .skip(optional(position().with(char('c'))))
+ .skip(char('}'))
+ )
+ .easy_parse("{b")
+ .map_err(|e| e.errors),
+ Err(vec![
+ Error::Unexpected('b'.into()),
+ Error::Expected('a'.into()),
+ Error::Expected('c'.into()),
+ Error::Expected('}'.into()),
+ ]),
+ );
+ }
+
+ macro_rules! sequence_many_test {
+ ($many:expr, $seq:expr) => {
+ let mut parser = $seq($many(position().with(char('a'))), char('}'));
+ let expected_error = Err(vec![
+ Error::Unexpected('b'.into()),
+ Error::Expected('a'.into()),
+ Error::Expected('}'.into()),
+ ]);
+ assert_eq!(
+ parser.easy_parse("ab").map_err(|e| e.errors),
+ expected_error,
+ );
+ };
+ }
+
+ #[test]
+ fn sequence_in_many_report_delayed_error() {
+ use combine::parser::{repeat, sequence};
+
+ sequence_many_test!(repeat::many::<Vec<_>, _, _>, sequence::skip);
+ sequence_many_test!(repeat::many1::<Vec<_>, _, _>, sequence::skip);
+ sequence_many_test!(repeat::many::<Vec<_>, _, _>, sequence::with);
+ sequence_many_test!(repeat::many1::<Vec<_>, _, _>, sequence::with);
+ sequence_many_test!(repeat::many::<Vec<_>, _, _>, |l, x| sequence::between(
+ l,
+ char('|'),
+ x,
+ ));
+ sequence_many_test!(repeat::many1::<Vec<_>, _, _>, |l, x| sequence::between(
+ l,
+ char('|'),
+ x,
+ ));
+ }
+
+ macro_rules! sequence_sep_by_test {
+ ($many:expr, $seq:expr) => {
+ let mut parser = $seq($many(position().with(char('a')), char(',')), char('}'));
+ let expected_error = Err(vec![
+ Error::Unexpected('b'.into()),
+ Error::Expected(','.into()),
+ Error::Expected('}'.into()),
+ ]);
+ assert_eq!(
+ parser.easy_parse("a,ab").map_err(|e| e.errors),
+ expected_error,
+ );
+ };
+ }
+
+ #[test]
+ fn sequence_in_sep_by_report_delayed_error() {
+ use combine::parser::{repeat, sequence};
+
+ sequence_sep_by_test!(repeat::sep_by::<Vec<_>, _, _, _>, sequence::skip);
+ sequence_sep_by_test!(repeat::sep_by1::<Vec<_>, _, _, _>, sequence::skip);
+ sequence_sep_by_test!(repeat::sep_by::<Vec<_>, _, _, _>, sequence::with);
+ sequence_sep_by_test!(repeat::sep_by1::<Vec<_>, _, _, _>, sequence::with);
+ }
+
+ #[test]
+ fn choice_compose_on_error() {
+ let ident = |s| attempt(string(s));
+ let mut parser = choice((ident("aa").skip(string(";")), choice((ident("cc"),))));
+
+ assert_eq!(
+ parser.easy_parse("c").map_err(|err| err.errors),
+ Err(vec![
+ Error::Unexpected('c'.into()),
+ Error::Expected("aa".into()),
+ Error::Unexpected("end of input".into()),
+ Error::Expected("cc".into()),
+ ]),
+ );
+ }
+
+ #[test]
+ fn choice_compose_issue_175() {
+ let ident = |s| attempt(string(s));
+ let mut parser = many::<Vec<_>, _, _>(position().and(choice((
+ ident("aa").skip(string(";")),
+ choice((ident("bb"), ident("cc"))),
+ ))))
+ .skip(string("."));
+
+ assert_eq!(
+ parser.easy_parse("c").map_err(|err| err.errors),
+ Err(vec![
+ Error::Unexpected('c'.into()),
+ Error::Expected("aa".into()),
+ Error::Expected("bb".into()),
+ Error::Expected("cc".into()),
+ ]),
+ );
+ }
+
+ #[test]
+ fn test() {
+ let mut parser = (digit(), letter());
+
+ assert_eq!(
+ parser.easy_parse("11").map_err(|err| err.errors),
+ Err(vec![
+ Error::Unexpected('1'.into()),
+ Error::Expected("letter".into()),
+ ]),
+ );
+ }
+
+ #[test]
+ fn lifetime_inference() {
+ fn _string<'a>(source: &'a str) {
+ range::take(1).or(string("a")).parse(source).ok();
+ range::take(1)
+ .or(string_cmp("a", |x, y| x == y))
+ .parse(source)
+ .ok();
+ let _: &'static str = string("a").parse(source).unwrap().0;
+ let _: &'static str = string_cmp("a", |x, y| x == y).parse(source).unwrap().0;
+ }
+ fn _bytes<'a>(source: &'a [u8]) {
+ range::take(1).or(bytes(&[0u8])).parse(source).ok();
+ range::take(1)
+ .or(bytes_cmp(&[0u8], |x, y| x == y))
+ .parse(source)
+ .ok();
+ let _: &'static [u8] = bytes(&[0u8]).parse(source).unwrap().0;
+ let _: &'static [u8] = bytes_cmp(&[0u8], |x, y| x == y).parse(source).unwrap().0;
+ }
+ }
+
+ #[test]
+ fn test_nested_count_overflow() {
+ let key = || count::<Vec<_>, _, _>(64, alpha_num());
+ let value_bytes =
+ || be_u32().then_partial(|&mut size| count::<Vec<_>, _, _>(size as usize, any()));
+ let value_messages =
+ (be_u32(), be_u32()).then_partial(|&mut (_body_size, message_count)| {
+ count::<Vec<_>, _, _>(message_count as usize, value_bytes())
+ });
+ let put = (bytes(b"PUT"), key())
+ .map(|(_, key)| key)
+ .and(value_messages);
+
+ let parser = || put.map(|(_, messages)| messages);
+
+ let command = &b"PUTkey\x00\x00\x00\x12\x00\x00\x00\x02\x00\x00\x00\x04\xDE\xAD\xBE\xEF\x00\x00\x00\x02\xBE\xEF"[..];
+ let result = parser().parse(command).unwrap();
+ assert_eq!(2, result.0.len());
+ }
+
+ #[test]
+ fn not_followed_by_empty_error_issue_220() {
+ let mut parser = string("let").skip(not_followed_by(eof().map(|_| "EOF")));
+ assert_eq!(
+ parser.easy_parse("let").map_err(|err| err.errors),
+ Err(vec![]),
+ );
+ }
+}
diff --git a/tests/parser_macro.rs b/tests/parser_macro.rs
new file mode 100644
index 0000000..e6d8dfc
--- /dev/null
+++ b/tests/parser_macro.rs
@@ -0,0 +1,36 @@
+#[macro_use]
+extern crate combine;
+
+parser! {
+ pub fn test[Input]()(Input) -> ()
+ where [Input: ::combine::Stream<Token = char>]
+ {
+
+use combine::parser::token::value;
+
+let _ = ();
+ fn _test() { }
+ match Some(1) {
+ Some(_) => (),
+ None => (),
+ }
+ value(())
+ }
+}
+
+parser! {
+ pub fn test_that_parsers_with_unnamed_types_can_be_in_same_scope[Input]()(Input) -> ()
+ where [Input: ::combine::Stream<Token = char>]
+ {
+
+use combine::parser::token::value;
+
+value(())
+ }
+}
+
+#[test]
+fn test_that_we_dont_need_imports_for_this_macro_to_work() {
+ test::<&str>();
+ test_that_parsers_with_unnamed_types_can_be_in_same_scope::<&str>();
+}
diff --git a/tests/support/mod.rs b/tests/support/mod.rs
new file mode 100644
index 0000000..e603c4c
--- /dev/null
+++ b/tests/support/mod.rs
@@ -0,0 +1,186 @@
+#![allow(dead_code)]
+
+use std::{
+ io,
+ marker::Unpin,
+ pin::Pin,
+ task::{self, Poll},
+};
+
+use {futures_03_dep::ready, partial_io::PartialOp};
+
+pub struct PartialAsyncRead<R> {
+ inner: R,
+ ops: Box<dyn Iterator<Item = PartialOp> + Send>,
+}
+
+impl<R> PartialAsyncRead<R>
+where
+ R: Unpin,
+{
+ pub fn new<I>(inner: R, ops: I) -> Self
+ where
+ I: IntoIterator<Item = PartialOp>,
+ I::IntoIter: Send + 'static,
+ {
+ PartialAsyncRead {
+ inner,
+ ops: Box::new(ops.into_iter()),
+ }
+ }
+}
+
+impl<R> tokio_02_dep::io::AsyncRead for PartialAsyncRead<R>
+where
+ R: tokio_02_dep::io::AsyncRead + Unpin,
+{
+ fn poll_read(
+ mut self: Pin<&mut Self>,
+ cx: &mut task::Context<'_>,
+ buf: &mut [u8],
+ ) -> Poll<io::Result<usize>> {
+ match self.ops.next() {
+ Some(PartialOp::Limited(n)) => {
+ let len = std::cmp::min(n, buf.len());
+ Pin::new(&mut self.inner).poll_read(cx, &mut buf[..len])
+ }
+ Some(PartialOp::Err(err)) => {
+ if err == io::ErrorKind::WouldBlock {
+ cx.waker().wake_by_ref();
+ Poll::Pending
+ } else {
+ Err(io::Error::new(
+ err,
+ "error during read, generated by partial-io",
+ ))
+ .into()
+ }
+ }
+ Some(PartialOp::Unlimited) | None => Pin::new(&mut self.inner).poll_read(cx, buf),
+ }
+ }
+}
+
+impl<R> tokio_03_dep::io::AsyncRead for PartialAsyncRead<R>
+where
+ R: tokio_03_dep::io::AsyncRead + Unpin,
+{
+ fn poll_read(
+ mut self: Pin<&mut Self>,
+ cx: &mut task::Context<'_>,
+ buf: &mut tokio_03_dep::io::ReadBuf<'_>,
+ ) -> Poll<io::Result<()>> {
+ match self.ops.next() {
+ Some(PartialOp::Limited(n)) => {
+ let len = std::cmp::min(n, buf.remaining());
+ buf.initialize_unfilled();
+ let mut sub_buf = buf.take(len);
+ ready!(Pin::new(&mut self.inner).poll_read(cx, &mut sub_buf))?;
+ let filled = sub_buf.filled().len();
+ buf.advance(filled);
+ Poll::Ready(Ok(()))
+ }
+ Some(PartialOp::Err(err)) => {
+ if err == io::ErrorKind::WouldBlock {
+ cx.waker().wake_by_ref();
+ Poll::Pending
+ } else {
+ Err(io::Error::new(
+ err,
+ "error during read, generated by partial-io",
+ ))
+ .into()
+ }
+ }
+ Some(PartialOp::Unlimited) | None => Pin::new(&mut self.inner).poll_read(cx, buf),
+ }
+ }
+}
+
+impl<R> tokio_dep::io::AsyncRead for PartialAsyncRead<R>
+where
+ R: tokio_dep::io::AsyncRead + Unpin,
+{
+ fn poll_read(
+ mut self: Pin<&mut Self>,
+ cx: &mut task::Context<'_>,
+ buf: &mut tokio_dep::io::ReadBuf<'_>,
+ ) -> Poll<io::Result<()>> {
+ match self.ops.next() {
+ Some(PartialOp::Limited(n)) => {
+ let len = std::cmp::min(n, buf.remaining());
+ buf.initialize_unfilled();
+ let mut sub_buf = buf.take(len);
+ ready!(Pin::new(&mut self.inner).poll_read(cx, &mut sub_buf))?;
+ let filled = sub_buf.filled().len();
+ buf.advance(filled);
+ Poll::Ready(Ok(()))
+ }
+ Some(PartialOp::Err(err)) => {
+ if err == io::ErrorKind::WouldBlock {
+ cx.waker().wake_by_ref();
+ Poll::Pending
+ } else {
+ Err(io::Error::new(
+ err,
+ "error during read, generated by partial-io",
+ ))
+ .into()
+ }
+ }
+ Some(PartialOp::Unlimited) | None => Pin::new(&mut self.inner).poll_read(cx, buf),
+ }
+ }
+}
+
+pub struct FuturesPartialAsyncRead<R> {
+ inner: R,
+ ops: Box<dyn Iterator<Item = PartialOp> + Send>,
+}
+
+impl<R> FuturesPartialAsyncRead<R>
+where
+ R: crate::futures::io::AsyncRead + Unpin,
+{
+ pub fn new<I>(inner: R, ops: I) -> Self
+ where
+ I: IntoIterator<Item = PartialOp>,
+ I::IntoIter: Send + 'static,
+ {
+ FuturesPartialAsyncRead {
+ inner,
+ ops: Box::new(ops.into_iter()),
+ }
+ }
+}
+
+impl<R> crate::futures::io::AsyncRead for FuturesPartialAsyncRead<R>
+where
+ R: crate::futures::io::AsyncRead + Unpin,
+{
+ fn poll_read(
+ mut self: Pin<&mut Self>,
+ cx: &mut task::Context<'_>,
+ buf: &mut [u8],
+ ) -> Poll<io::Result<usize>> {
+ match self.ops.next() {
+ Some(PartialOp::Limited(n)) => {
+ let len = std::cmp::min(n, buf.len());
+ Pin::new(&mut self.inner).poll_read(cx, &mut buf[..len])
+ }
+ Some(PartialOp::Err(err)) => {
+ if err == io::ErrorKind::WouldBlock {
+ cx.waker().wake_by_ref();
+ Poll::Pending
+ } else {
+ Err(io::Error::new(
+ err,
+ "error during read, generated by partial-io",
+ ))
+ .into()
+ }
+ }
+ Some(PartialOp::Unlimited) | None => Pin::new(&mut self.inner).poll_read(cx, buf),
+ }
+ }
+}
diff --git a/travis.sh b/travis.sh
new file mode 100755
index 0000000..6d83c38
--- /dev/null
+++ b/travis.sh
@@ -0,0 +1,24 @@
+#!/bin/bash -x
+set -ex
+
+if [[ "$TRAVIS_RUST_VERSION" == "1.40.0" ]]; then
+ cargo "$@" check
+ cargo "$@" check --no-default-features
+else
+ cargo "$@" build
+ cargo "$@" test --all-features
+ cargo "$@" test --all-features --examples
+
+ cargo "$@" test --bench json --bench http -- --test
+ cargo "$@" check --bench mp4 --features mp4
+
+ cargo "$@" build --no-default-features
+ cargo "$@" test --no-default-features --examples
+
+ cargo "$@" check --no-default-features --features tokio-02
+ cargo "$@" check --no-default-features --features tokio-03
+fi
+
+if [[ "$TRAVIS_RUST_VERSION" == "stable" ]]; then
+ cargo doc
+fi