aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoel Galenson <jgalenson@google.com>2021-09-22 10:53:16 -0700
committerJoel Galenson <jgalenson@google.com>2021-09-22 10:53:16 -0700
commit057d758bdc55bd6af377c100e3955e310da314d4 (patch)
tree7ade8f157fe658eb53a78fba4a6f2d289e33c66b
parenteac22ba160becf195253d0fbac0c241c69271b2d (diff)
downloadcombine-057d758bdc55bd6af377c100e3955e310da314d4.tar.gz
Upgrade rust/crates/combine to 4.6.1
Test: make Change-Id: I2154f778e17a26eb25fde1d738162d257f3d9613
-rw-r--r--.cargo_vcs_info.json5
-rw-r--r--.clog.toml6
-rw-r--r--.gitignore8
-rw-r--r--.travis.yml37
-rw-r--r--Android.bp5
-rw-r--r--CHANGELOG.md11
-rw-r--r--Cargo.lock6
-rw-r--r--Cargo.toml38
-rw-r--r--Cargo.toml.orig23
-rw-r--r--METADATA8
-rw-r--r--benches/http.rs20
-rw-r--r--benches/json.rs20
-rw-r--r--examples/async.rs1
-rw-r--r--src/error.rs5
-rw-r--r--src/future_ext.rs29
-rw-r--r--src/lib.rs12
-rw-r--r--src/parser/byte.rs6
-rw-r--r--src/parser/choice.rs2
-rw-r--r--src/parser/combinator.rs68
-rw-r--r--src/parser/mod.rs7
-rw-r--r--src/parser/sequence.rs5
-rw-r--r--src/stream/buf_reader.rs157
-rw-r--r--src/stream/buffered.rs2
-rw-r--r--src/stream/decoder.rs6
-rw-r--r--src/stream/mod.rs32
-rw-r--r--tests/parser.rs49
-rw-r--r--tests/parser_macro.rs2
-rwxr-xr-xtravis.sh3
28 files changed, 350 insertions, 223 deletions
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
new file mode 100644
index 0000000..8ace92c
--- /dev/null
+++ b/.cargo_vcs_info.json
@@ -0,0 +1,5 @@
+{
+ "git": {
+ "sha1": "cd35e54a4e9c555da87984fa665542e31f04e306"
+ }
+}
diff --git a/.clog.toml b/.clog.toml
new file mode 100644
index 0000000..2206c2f
--- /dev/null
+++ b/.clog.toml
@@ -0,0 +1,6 @@
+[clog]
+repository = "https://github.com/Marwes/combine"
+
+changelog = "CHANGELOG.md"
+
+from-latest-tag = true
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..2d66f83
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,8 @@
+/target
+
+
+#vim temporary files
+*.swp
+*.swo
+/.vscode
+/benches/small.mp4
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..fa632ea
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,37 @@
+language: rust
+cache: cargo
+sudo: required
+dist: trusty
+addons:
+ apt:
+ packages:
+ - libcurl4-openssl-dev
+ - libelf-dev
+ - libdw-dev
+ - libssl-dev
+rust:
+- nightly
+- beta
+- stable
+- 1.40.0
+before_script:
+- |
+ export PATH=$HOME/.local/bin:$PATH
+script:
+- ./travis.sh
+after_success: |
+ if [[ "$TRAVIS_RUST_VERSION" == stable ]]; then
+ bash <(curl https://raw.githubusercontent.com/xd009642/tarpaulin/master/travis-install.sh)
+ cargo tarpaulin --run-type Tests --run-type DocTests --ciserver travis-ci --coveralls $TRAVIS_JOB_ID
+ fi
+env:
+ global:
+ - TRAVIS_CARGO_NIGHTLY_FEATURE=""
+ - secure: Z0JCbroitF6pKdImGLcar9UcXDFUoggvEwYsksoGX16/28iBXLmBX6DDWN1brVdasx/i5M5aEy8xbzcV680+HEbbUgz5uLAMp3xQFzu5FJ276PM9ZFZZgb02EJuYz9THfrC9ajlc+CirYF91i/yMZbpBGajmAzp61puRph/CgI8=
+notifications:
+ webhooks:
+ urls:
+ - https://webhooks.gitter.im/e/ee4400ef3d920e51415e
+ on_success: change # options: [always|never|change] default: always
+ on_failure: always # options: [always|never|change] default: always
+ on_start: never # options: [always|never|change] default: always
diff --git a/Android.bp b/Android.bp
index 04f06c3..444ea9f 100644
--- a/Android.bp
+++ b/Android.bp
@@ -1,8 +1,6 @@
// This file is generated by cargo2android.py --run --device.
// Do not modify this file as changes will be overridden on upgrade.
-
-
package {
default_applicable_licenses: ["external_rust_crates_combine_license"],
}
@@ -25,10 +23,11 @@ rust_library {
host_supported: true,
crate_name: "combine",
cargo_env_compat: true,
- cargo_pkg_version: "4.6.0",
+ cargo_pkg_version: "4.6.1",
srcs: ["src/lib.rs"],
edition: "2018",
features: [
+ "alloc",
"bytes",
"default",
"std",
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a71baa1..7005dbb 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,14 @@
+<a name="v4.6.1"></a>
+### v4.6.1 (2021-08-25)
+
+
+#### Performance
+
+* Avoid a saturating add in slice_uncons_while ([7f330b0c](https://github.com/Marwes/combine/commit/7f330b0cacd61131df88c919074ffa8136100299))
+* Avoid a saturating add in slice_uncons_while ([ad4180dd](https://github.com/Marwes/combine/commit/ad4180dd7d3530d47502795ead21e13b7816aed7))
+
+
+
<a name="v4.6.0"></a>
## v4.6.0 (2021-06-16)
diff --git a/Cargo.lock b/Cargo.lock
index deacbb0..843b88f 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1,5 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
+version = 3
+
[[package]]
name = "aho-corasick"
version = "0.7.15"
@@ -266,15 +268,15 @@ dependencies = [
[[package]]
name = "combine"
-version = "4.6.0"
+version = "4.6.1"
dependencies = [
"async-std",
"bytes 0.5.6",
"bytes 1.0.0",
"criterion",
"futures 0.3.8",
+ "futures-core",
"futures-io",
- "futures-util",
"memchr",
"once_cell",
"partial-io",
diff --git a/Cargo.toml b/Cargo.toml
index a3d9dd8..71df3f5 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -13,7 +13,7 @@
[package]
edition = "2018"
name = "combine"
-version = "4.6.0"
+version = "4.6.1"
authors = ["Markus Westerlind <marwes91@gmail.com>"]
description = "Fast parser combinators on arbitrary streams with zero-copy support."
documentation = "https://docs.rs/combine"
@@ -35,7 +35,7 @@ path = "src/lib.rs"
[[example]]
name = "async"
-required-features = ["std"]
+required-features = ["std", "tokio"]
[[example]]
name = "date"
@@ -51,15 +51,17 @@ name = "ini"
[[test]]
name = "async"
-required-features = ["tokio-02", "futures-util-03"]
+required-features = ["tokio-02", "futures-io-03"]
[[bench]]
name = "json"
harness = false
+required-features = ["std"]
[[bench]]
name = "http"
harness = false
+required-features = ["std"]
[[bench]]
name = "mp4"
@@ -74,18 +76,17 @@ version = "0.5"
optional = true
package = "bytes"
-[dependencies.futures-io-03]
+[dependencies.futures-core-03]
version = "0.3.1"
optional = true
default-features = false
-package = "futures-io"
+package = "futures-core"
-[dependencies.futures-util-03]
+[dependencies.futures-io-03]
version = "0.3.1"
-features = ["io", "std"]
optional = true
default-features = false
-package = "futures-util"
+package = "futures-io"
[dependencies.memchr]
version = "2.2"
@@ -117,6 +118,12 @@ version = "1"
optional = true
default-features = false
package = "tokio"
+
+[dependencies.tokio-util]
+version = "0.6"
+features = ["codec"]
+optional = true
+default-features = false
[dev-dependencies.async-std]
version = "1"
@@ -163,16 +170,13 @@ version = "1"
features = ["fs", "macros", "rt", "rt-multi-thread", "io-util"]
package = "tokio"
-[dev-dependencies.tokio-util]
-version = "0.6"
-features = ["codec"]
-
[features]
+alloc = []
default = ["std"]
-futures-03 = ["pin-project", "std", "futures-io-03", "futures-util-03", "pin-project-lite"]
+futures-03 = ["pin-project", "std", "futures-core-03", "futures-io-03", "pin-project-lite"]
mp4 = []
pin-project = ["pin-project-lite"]
-std = ["memchr/use_std", "bytes"]
-tokio = ["tokio-dep", "futures-util-03", "pin-project-lite"]
-tokio-02 = ["pin-project", "std", "tokio-02-dep", "futures-util-03", "pin-project-lite", "bytes_05"]
-tokio-03 = ["pin-project", "std", "tokio-03-dep", "futures-util-03", "pin-project-lite"]
+std = ["memchr/use_std", "bytes", "alloc"]
+tokio = ["tokio-dep", "tokio-util/io", "futures-core-03", "pin-project-lite"]
+tokio-02 = ["pin-project", "std", "tokio-02-dep", "futures-core-03", "pin-project-lite", "bytes_05"]
+tokio-03 = ["pin-project", "std", "tokio-03-dep", "futures-core-03", "pin-project-lite"]
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
index a4448e1..f2517e6 100644
--- a/Cargo.toml.orig
+++ b/Cargo.toml.orig
@@ -1,6 +1,6 @@
[package]
name = "combine"
-version = "4.6.0"
+version = "4.6.1"
authors = ["Markus Westerlind <marwes91@gmail.com>"]
description = "Fast parser combinators on arbitrary streams with zero-copy support."
@@ -34,8 +34,9 @@ pin-project-lite = { version = "0.2", optional = true }
tokio-02-dep = { version = "0.2.3", package = "tokio", features = ["io-util"], default-features = false, optional = true }
tokio-03-dep = { version = "0.3", package = "tokio", default-features = false, optional = true }
tokio-dep = { version = "1", package = "tokio", default-features = false, optional = true }
+tokio-util = { version = "0.6", features = ["codec"], default-features = false, optional = true }
+futures-core-03 = { version = "0.3.1", package = "futures-core", default-features = false, optional = true }
futures-io-03 = { version = "0.3.1", package = "futures-io", default-features = false, optional = true }
-futures-util-03 = { version = "0.3.1", package = "futures-util", features = ["io", "std"], default-features = false, optional = true }
bytes_05 = { version = "0.5", package = "bytes", optional = true }
bytes = { version = "1", optional = true }
@@ -49,7 +50,6 @@ futures-03-dep = { version = "0.3.1", package = "futures" }
tokio-02-dep = { version = "0.2", features = ["fs", "io-driver", "io-util", "macros"], package = "tokio" }
tokio-03-dep = { version = "0.3", features = ["fs", "macros", "rt-multi-thread"], package = "tokio" }
tokio-dep = { version = "1", features = ["fs", "macros", "rt", "rt-multi-thread", "io-util"], package = "tokio" }
-tokio-util = { version = "0.6", features = ["codec"] }
partial-io = { version = "0.3", features = ["tokio", "quickcheck"] }
quickcheck = "0.6"
quick-error = "1.0"
@@ -60,23 +60,26 @@ default = ["std"]
# Run the mp4 benchmark, requires a mp4 file named `small.mp4` in the benches directory
mp4 = []
pin-project = ["pin-project-lite"]
-tokio-02 = ["pin-project", "std", "tokio-02-dep", "futures-util-03", "pin-project-lite", "bytes_05"]
-tokio-03 = ["pin-project", "std", "tokio-03-dep", "futures-util-03", "pin-project-lite"]
-tokio = ["tokio-dep", "futures-util-03", "pin-project-lite"]
-futures-03 = ["pin-project", "std", "futures-io-03", "futures-util-03", "pin-project-lite"]
-std = ["memchr/use_std", "bytes"]
+tokio-02 = ["pin-project", "std", "tokio-02-dep", "futures-core-03", "pin-project-lite", "bytes_05"]
+tokio-03 = ["pin-project", "std", "tokio-03-dep", "futures-core-03", "pin-project-lite"]
+tokio = ["tokio-dep", "tokio-util/io", "futures-core-03", "pin-project-lite"]
+futures-03 = ["pin-project", "std", "futures-core-03", "futures-io-03", "pin-project-lite"]
+std = ["memchr/use_std", "bytes", "alloc"]
+alloc = []
[[test]]
name = "async"
-required-features = ["tokio-02", "futures-util-03"]
+required-features = ["tokio-02", "futures-io-03"]
[[bench]]
name = "json"
harness = false
+required-features = ["std"]
[[bench]]
name = "http"
harness = false
+required-features = ["std"]
[[bench]]
name = "mp4"
@@ -85,7 +88,7 @@ required-features = ["mp4"]
[[example]]
name = "async"
-required-features = ["std"]
+required-features = ["std", "tokio"]
[[example]]
name = "date"
diff --git a/METADATA b/METADATA
index 378608f..63cbd25 100644
--- a/METADATA
+++ b/METADATA
@@ -7,13 +7,13 @@ third_party {
}
url {
type: ARCHIVE
- value: "https://static.crates.io/crates/combine/combine-4.6.0.crate"
+ value: "https://static.crates.io/crates/combine/combine-4.6.1.crate"
}
- version: "4.6.0"
+ version: "4.6.1"
license_type: NOTICE
last_upgrade_date {
year: 2021
- month: 7
- day: 30
+ month: 9
+ day: 22
}
}
diff --git a/benches/http.rs b/benches/http.rs
index 4330985..68414a5 100644
--- a/benches/http.rs
+++ b/benches/http.rs
@@ -1,3 +1,5 @@
+#![cfg(feature = "std")]
+
#[macro_use]
extern crate criterion;
#[macro_use]
@@ -29,7 +31,8 @@ struct Header<'a> {
}
fn is_token(c: u8) -> bool {
- match c {
+ !matches!(
+ c,
128..=255
| 0..=31
| b'('
@@ -49,9 +52,8 @@ fn is_token(c: u8) -> bool {
| b'='
| b'{'
| b'}'
- | b' ' => false,
- _ => true,
- }
+ | b' '
+ )
}
fn is_horizontal_space(c: u8) -> bool {
@@ -64,7 +66,7 @@ fn is_not_space(c: u8) -> bool {
c != b' '
}
fn is_http_version(c: u8) -> bool {
- c >= b'0' && c <= b'9' || c == b'.'
+ (b'0'..=b'9').contains(&c) || c == b'.'
}
fn end_of_line<'a, Input>() -> impl Parser<Input, Output = u8>
@@ -94,9 +96,9 @@ where
})
}
-fn parse_http_request<'a, Input>(
- input: Input,
-) -> Result<((Request<'a>, Vec<Header<'a>>), Input), Input::Error>
+type HttpRequest<'a> = (Request<'a>, Vec<Header<'a>>);
+
+fn parse_http_request<'a, Input>(input: Input) -> Result<(HttpRequest<'a>, Input), Input::Error>
where
Input: RangeStream<Token = u8, Range = &'a [u8]>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
@@ -122,7 +124,7 @@ where
request.parse(input)
}
-static REQUESTS: &'static [u8] = include_bytes!("http-requests.txt");
+static REQUESTS: &[u8] = include_bytes!("http-requests.txt");
fn http_requests_small(b: &mut Bencher<'_>) {
http_requests_bench(b, easy::Stream(REQUESTS))
diff --git a/benches/json.rs b/benches/json.rs
index 8a44b97..56add8e 100644
--- a/benches/json.rs
+++ b/benches/json.rs
@@ -1,6 +1,8 @@
// `impl Trait` is not required for this parser but we use to to show that it can be used to
// significantly simplify things
+#![cfg(feature = "std")]
+
#[macro_use]
extern crate criterion;
@@ -223,7 +225,7 @@ fn json_test() {
Ok(result) => assert_eq!(result, (expected, "")),
Err(e) => {
println!("{}", e);
- assert!(false);
+ panic!();
}
}
}
@@ -241,10 +243,10 @@ fn bench_json(bencher: &mut Bencher<'_>) {
let mut parser = json_value();
match parser.easy_parse(position::Stream::new(&data[..])) {
Ok((Value::Array(_), _)) => (),
- Ok(_) => assert!(false),
+ Ok(_) => panic!(),
Err(err) => {
println!("{}", err);
- assert!(false);
+ panic!();
}
}
bencher.iter(|| {
@@ -258,10 +260,10 @@ fn bench_json_core_error(bencher: &mut Bencher<'_>) {
let mut parser = json_value();
match parser.parse(position::Stream::new(&data[..])) {
Ok((Value::Array(_), _)) => (),
- Ok(_) => assert!(false),
+ Ok(_) => panic!(),
Err(err) => {
println!("{}", err);
- assert!(false);
+ panic!();
}
}
bencher.iter(|| {
@@ -275,10 +277,10 @@ fn bench_json_core_error_no_position(bencher: &mut Bencher<'_>) {
let mut parser = json_value();
match parser.parse(&data[..]) {
Ok((Value::Array(_), _)) => (),
- Ok(_) => assert!(false),
+ Ok(_) => panic!(),
Err(err) => {
println!("{}", err);
- assert!(false);
+ panic!();
}
}
bencher.iter(|| {
@@ -300,10 +302,10 @@ fn bench_buffered_json(bencher: &mut Bencher<'_>) {
Ok((Value::Array(v), _)) => {
black_box(v);
}
- Ok(_) => assert!(false),
+ Ok(_) => panic!(),
Err(err) => {
println!("{}", err);
- assert!(false);
+ panic!();
}
}
});
diff --git a/examples/async.rs b/examples/async.rs
index 4ebc8e9..909ca2c 100644
--- a/examples/async.rs
+++ b/examples/async.rs
@@ -1,4 +1,5 @@
#![cfg(feature = "std")]
+#![cfg(feature = "tokio")]
use std::{cell::Cell, io::Cursor, rc::Rc, str};
diff --git a/src/error.rs b/src/error.rs
index 854af9d..23326c7 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -43,6 +43,8 @@ macro_rules! ctry {
/// `Token`, `Range`, `Format` or `Static`/`&'static str`
pub trait ErrorInfo<'s, T, R> {
type Format: fmt::Display;
+
+ #[allow(clippy::wrong_self_convention)]
fn into_info(&'s self) -> Info<T, R, Self::Format>;
}
@@ -1087,8 +1089,7 @@ mod tests_std {
CloneOnly { s: "x".to_string() },
CloneOnly { s: "y".to_string() },
][..];
- let result =
- crate::parser::range::take_while(|c: CloneOnly| c.s == "x".to_string()).parse(input);
+ let result = crate::parser::range::take_while(|c: CloneOnly| c.s == "x").parse(input);
assert_eq!(
result,
Ok((
diff --git a/src/future_ext.rs b/src/future_ext.rs
new file mode 100644
index 0000000..f6168a3
--- /dev/null
+++ b/src/future_ext.rs
@@ -0,0 +1,29 @@
+use crate::lib::future::Future;
+use crate::lib::marker::Unpin;
+use crate::lib::pin::Pin;
+use crate::lib::task::{Context, Poll};
+
+// Replace usage of this with std::future::poll_fn once it stabilizes
+pub struct PollFn<F> {
+ f: F,
+}
+
+impl<F> Unpin for PollFn<F> {}
+
+pub fn poll_fn<T, F>(f: F) -> PollFn<F>
+where
+ F: FnMut(&mut Context<'_>) -> Poll<T>,
+{
+ PollFn { f }
+}
+
+impl<T, F> Future for PollFn<F>
+where
+ F: FnMut(&mut Context<'_>) -> Poll<T>,
+{
+ type Output = T;
+
+ fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<T> {
+ (&mut self.f)(cx)
+ }
+}
diff --git a/src/lib.rs b/src/lib.rs
index af2b713..ab46d81 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -195,6 +195,9 @@
#![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(docsrs, feature(doc_cfg))]
+#[cfg(feature = "alloc")]
+extern crate alloc;
+
#[doc(inline)]
pub use crate::error::{ParseError, ParseResult, StdParseResult};
@@ -616,6 +619,9 @@ pub mod stream;
#[macro_use]
pub mod parser;
+#[cfg(feature = "futures-core-03")]
+pub mod future_ext;
+
#[doc(hidden)]
#[derive(Clone, PartialOrd, PartialEq, Debug, Copy)]
pub struct ErrorOffset(u8);
@@ -700,7 +706,7 @@ mod std_tests {
}
}
- fn integer<'a, Input>(input: &mut Input) -> StdParseResult<i64, Input>
+ fn integer<Input>(input: &mut Input) -> StdParseResult<i64, Input>
where
Input: Stream<Token = char>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
@@ -864,11 +870,11 @@ mod std_tests {
.map(|x| x.to_string())
.or(many1(digit()));
match p.easy_parse(position::Stream::new("le123")) {
- Ok(_) => assert!(false),
+ Ok(_) => panic!(),
Err(err) => assert_eq!(err.position, SourcePosition { line: 1, column: 1 }),
}
match p.easy_parse(position::Stream::new("let1")) {
- Ok(_) => assert!(false),
+ Ok(_) => panic!(),
Err(err) => assert_eq!(err.position, SourcePosition { line: 1, column: 4 }),
}
}
diff --git a/src/parser/byte.rs b/src/parser/byte.rs
index 95fc4c1..b28d362 100644
--- a/src/parser/byte.rs
+++ b/src/parser/byte.rs
@@ -220,7 +220,7 @@ where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
- satisfy(|ch| ch >= b'0' && ch <= b'7').expected("octal digit")
+ satisfy(|ch| (b'0'..=b'7').contains(&ch)).expected("octal digit")
}
/// Parses an ASCII hexdecimal digit (accepts both uppercase and lowercase).
@@ -599,7 +599,7 @@ pub mod num {
pub F64, f64, be_f64, le_f64, read_f64
);
- #[cfg(test)]
+ #[cfg(all(feature = "std", test))]
mod tests {
use crate::stream::{buffered, position, IteratorStream};
@@ -641,7 +641,7 @@ pub mod num {
}
}
-#[cfg(test)]
+#[cfg(all(feature = "std", test))]
mod tests {
use crate::stream::{buffered, position, read};
diff --git a/src/parser/choice.rs b/src/parser/choice.rs
index 8efdb0c..ef4e5a8 100644
--- a/src/parser/choice.rs
+++ b/src/parser/choice.rs
@@ -835,7 +835,7 @@ macro_rules! dispatch {
}
}
-#[cfg(test)]
+#[cfg(all(feature = "std", test))]
mod tests {
use crate::parser::{token::any, EasyParser};
diff --git a/src/parser/combinator.rs b/src/parser/combinator.rs
index d8f3f25..bb06b89 100644
--- a/src/parser/combinator.rs
+++ b/src/parser/combinator.rs
@@ -12,6 +12,12 @@ use crate::{
Parser,
};
+#[cfg(feature = "alloc")]
+use alloc::{boxed::Box, string::String, vec::Vec};
+
+#[cfg(feature = "alloc")]
+use crate::lib::any::Any;
+
#[derive(Copy, Clone)]
pub struct NotFollowedBy<P>(P);
impl<Input, O, P> Parser<Input> for NotFollowedBy<P>
@@ -711,16 +717,16 @@ where
Ignore(p)
}
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
#[derive(Default)]
-pub struct AnyPartialState(Option<Box<dyn std::any::Any>>);
+pub struct AnyPartialState(Option<Box<dyn Any>>);
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
pub struct AnyPartialStateParser<P>(P);
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
impl<Input, P> Parser<Input> for AnyPartialStateParser<P>
where
Input: Stream,
@@ -803,7 +809,7 @@ where
///
/// # }
/// ```
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
pub fn any_partial_state<Input, P>(p: P) -> AnyPartialStateParser<P>
where
@@ -814,16 +820,16 @@ where
AnyPartialStateParser(p)
}
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
#[derive(Default)]
-pub struct AnySendPartialState(Option<Box<dyn std::any::Any + Send>>);
+pub struct AnySendPartialState(Option<Box<dyn Any + Send>>);
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
pub struct AnySendPartialStateParser<P>(P);
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
impl<Input, P> Parser<Input> for AnySendPartialStateParser<P>
where
Input: Stream,
@@ -906,7 +912,7 @@ where
///
/// # }
/// ```
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
pub fn any_send_partial_state<Input, P>(p: P) -> AnySendPartialStateParser<P>
where
@@ -917,16 +923,16 @@ where
AnySendPartialStateParser(p)
}
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
#[derive(Default)]
-pub struct AnySendSyncPartialState(Option<Box<dyn std::any::Any + Send + Sync>>);
+pub struct AnySendSyncPartialState(Option<Box<dyn Any + Send + Sync>>);
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
pub struct AnySendSyncPartialStateParser<P>(P);
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
impl<Input, P> Parser<Input> for AnySendSyncPartialStateParser<P>
where
Input: Stream,
@@ -1008,7 +1014,7 @@ where
///
/// # }
/// ```
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
pub fn any_send_sync_partial_state<Input, P>(p: P) -> AnySendSyncPartialStateParser<P>
where
@@ -1179,52 +1185,52 @@ mod internal {
use self::internal::Sealed;
pub trait StrLike: Sealed {
- fn from_utf8(&self) -> Result<&str, ()>;
+ fn from_utf8(&self) -> Option<&str>;
}
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
impl Sealed for String {}
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
impl StrLike for String {
- fn from_utf8(&self) -> Result<&str, ()> {
- Ok(self)
+ fn from_utf8(&self) -> Option<&str> {
+ Some(self)
}
}
impl<'a> Sealed for &'a str {}
impl<'a> StrLike for &'a str {
- fn from_utf8(&self) -> Result<&str, ()> {
- Ok(*self)
+ fn from_utf8(&self) -> Option<&str> {
+ Some(*self)
}
}
impl Sealed for str {}
impl StrLike for str {
- fn from_utf8(&self) -> Result<&str, ()> {
- Ok(self)
+ fn from_utf8(&self) -> Option<&str> {
+ Some(self)
}
}
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
impl Sealed for Vec<u8> {}
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
impl StrLike for Vec<u8> {
- fn from_utf8(&self) -> Result<&str, ()> {
+ fn from_utf8(&self) -> Option<&str> {
(**self).from_utf8()
}
}
impl<'a> Sealed for &'a [u8] {}
impl<'a> StrLike for &'a [u8] {
- fn from_utf8(&self) -> Result<&str, ()> {
+ fn from_utf8(&self) -> Option<&str> {
(**self).from_utf8()
}
}
impl Sealed for [u8] {}
impl StrLike for [u8] {
- fn from_utf8(&self) -> Result<&str, ()> {
- str::from_utf8(self).map_err(|_| ())
+ fn from_utf8(&self) -> Option<&str> {
+ str::from_utf8(self).ok()
}
}
@@ -1275,7 +1281,7 @@ where [
{
parser.and_then(|r| {
r.from_utf8()
- .map_err(|_| StreamErrorFor::<Input>::expected_static_message("UTF-8"))
+ .ok_or_else(|| StreamErrorFor::<Input>::expected_static_message("UTF-8"))
.and_then(|s| s.parse().map_err(StreamErrorFor::<Input>::message_format))
})
}
diff --git a/src/parser/mod.rs b/src/parser/mod.rs
index 98a1e03..17a88b7 100644
--- a/src/parser/mod.rs
+++ b/src/parser/mod.rs
@@ -27,6 +27,9 @@ use self::{
sequence::{skip, with, Skip, With},
};
+#[cfg(feature = "alloc")]
+use alloc::boxed::Box;
+
/// Internal API. May break without a semver bump
#[macro_export]
#[doc(hidden)]
@@ -846,7 +849,7 @@ pub trait Parser<Input: Stream> {
/// assert_eq!(result, Ok((('a', 'c'), "")));
/// # }
/// ```
- #[cfg(feature = "std")]
+ #[cfg(feature = "alloc")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
fn boxed<'a>(
self,
@@ -1086,7 +1089,7 @@ where
forward_deref!(Input);
}
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
impl<P, Input> Parser<Input> for Box<P>
where
P: ?Sized + Parser<Input>,
diff --git a/src/parser/sequence.rs b/src/parser/sequence.rs
index b296966..1c8bdcb 100644
--- a/src/parser/sequence.rs
+++ b/src/parser/sequence.rs
@@ -179,6 +179,7 @@ macro_rules! tuple_parser {
}
};
state.offset = $h.parser_count().0.saturating_add(1);
+ // SAFETY: must be set to avoid UB below when unwrapping
state.$h.value = Some(temp);
// Once we have successfully parsed the partial input we may resume parsing in
@@ -211,6 +212,7 @@ macro_rules! tuple_parser {
}
};
state.offset = state.offset.saturating_add($id.parser_count().0);
+ // SAFETY: must be set to avoid UB below when unwrapping
state.$id.value = Some(temp);
// Once we have successfully parsed the partial input we may resume parsing in
@@ -219,6 +221,7 @@ macro_rules! tuple_parser {
}
)*
+ // SAFETY: requires both $h and $id to be set, see previous SAFETY comments
let value = unsafe { (state.$h.unwrap_value(), $(state.$id.unwrap_value()),*) };
if first_empty_parser != 0 {
CommitOk(value)
@@ -785,7 +788,7 @@ where
ThenPartial(p, f)
}
-#[cfg(test)]
+#[cfg(all(feature = "std", test))]
mod tests {
use crate::parser::{token::any, EasyParser};
diff --git a/src/stream/buf_reader.rs b/src/stream/buf_reader.rs
index 1a10bc9..512833d 100644
--- a/src/stream/buf_reader.rs
+++ b/src/stream/buf_reader.rs
@@ -8,7 +8,7 @@ use std::io::{self, BufRead, Read};
))]
use std::{mem::MaybeUninit, pin::Pin};
-#[cfg(feature = "futures-util-03")]
+#[cfg(feature = "futures-core-03")]
use std::task::{Context, Poll};
#[cfg(feature = "futures-03")]
@@ -25,8 +25,8 @@ use tokio_03_dep::io::AsyncBufRead as _;
#[cfg(feature = "tokio")]
use tokio_dep::io::AsyncBufRead as _;
-#[cfg(feature = "futures-util-03")]
-use futures_util_03::ready;
+#[cfg(feature = "futures-core-03")]
+use futures_core_03::ready;
#[cfg(feature = "pin-project-lite")]
pin_project! {
@@ -208,7 +208,7 @@ where
#[cfg(feature = "futures-03")]
impl<R> CombineAsyncRead<R> for Buffer
where
- R: futures_util_03::io::AsyncRead,
+ R: futures_io_03::AsyncRead,
{
fn poll_extend_buf(
&mut self,
@@ -250,6 +250,17 @@ where
}
#[cfg(feature = "tokio-03")]
+fn tokio_03_to_read_buf(bs: &mut BytesMut) -> tokio_03_dep::io::ReadBuf<'_> {
+ let uninit = bs.chunk_mut();
+ unsafe {
+ tokio_03_dep::io::ReadBuf::uninit(std::slice::from_raw_parts_mut(
+ uninit.as_mut_ptr() as *mut MaybeUninit<u8>,
+ uninit.len(),
+ ))
+ }
+}
+
+#[cfg(feature = "tokio-03")]
impl<R> CombineRead<R, dyn tokio_03_dep::io::AsyncRead> for Buffer
where
R: tokio_03_dep::io::AsyncRead,
@@ -259,22 +270,7 @@ where
cx: &mut Context<'_>,
read: Pin<&mut R>,
) -> Poll<io::Result<usize>> {
- if !self.0.has_remaining_mut() {
- self.0.reserve(8 * 1024);
- }
- let uninit = self.0.chunk_mut();
- let mut buf = unsafe {
- tokio_03_dep::io::ReadBuf::uninit(std::slice::from_raw_parts_mut(
- uninit.as_mut_ptr() as *mut MaybeUninit<u8>,
- uninit.len(),
- ))
- };
- ready!(read.poll_read(cx, &mut buf))?;
- let n = buf.filled().len();
- unsafe {
- self.0.advance_mut(n);
- }
- Poll::Ready(Ok(n))
+ tokio_03_read_buf(cx, read, &mut self.0)
}
}
@@ -288,13 +284,9 @@ fn tokio_03_read_buf(
bs.reserve(8 * 1024);
}
+ let mut buf = tokio_03_to_read_buf(bs);
+ ready!(read.poll_read(cx, &mut buf))?;
unsafe {
- let uninit = bs.chunk_mut();
- let mut buf = tokio_03_dep::io::ReadBuf::uninit(std::slice::from_raw_parts_mut(
- uninit.as_mut_ptr() as *mut MaybeUninit<u8>,
- uninit.len(),
- ));
- ready!(read.poll_read(cx, &mut buf))?;
let n = buf.filled().len();
bs.advance_mut(n);
Poll::Ready(Ok(n))
@@ -311,44 +303,21 @@ where
cx: &mut Context<'_>,
read: Pin<&mut R>,
) -> Poll<io::Result<usize>> {
- if !self.0.has_remaining_mut() {
- self.0.reserve(8 * 1024);
- }
- let mut buf = unsafe {
- tokio_dep::io::ReadBuf::uninit(
- &mut *(self.0.chunk_mut() as *mut _ as *mut [MaybeUninit<u8>]),
- )
- };
- ready!(read.poll_read(cx, &mut buf))?;
- let n = buf.filled().len();
- unsafe {
- self.0.advance_mut(n);
- }
- Poll::Ready(Ok(n))
+ tokio_read_buf(read, cx, &mut self.0)
}
}
#[cfg(feature = "tokio")]
fn tokio_read_buf(
- cx: &mut Context<'_>,
read: Pin<&mut impl tokio_dep::io::AsyncRead>,
+ cx: &mut Context<'_>,
bs: &mut bytes::BytesMut,
) -> Poll<io::Result<usize>> {
if !bs.has_remaining_mut() {
bs.reserve(8 * 1024);
}
- unsafe {
- let uninit = bs.chunk_mut();
- let mut buf = tokio_dep::io::ReadBuf::uninit(std::slice::from_raw_parts_mut(
- uninit.as_mut_ptr() as *mut MaybeUninit<u8>,
- uninit.len(),
- ));
- ready!(read.poll_read(cx, &mut buf))?;
- let n = buf.filled().len();
- bs.advance_mut(n);
- Poll::Ready(Ok(n))
- }
+ tokio_util::io::poll_read_buf(read, cx, bs)
}
/// Marker used by `Decoder` for an external buffer
@@ -389,27 +358,30 @@ where
buf.reserve(8 * 1024);
}
- // Copy of tokio's read_buf method (but it has to force initialize the buffer)
- let copied = unsafe {
- let n = {
- let bs = buf.chunk_mut();
-
- for i in 0..bs.len() {
- bs.write_byte(i, 0);
- }
+ // Copy of tokio's poll_read_buf method (but it has to force initialize the buffer)
+ let n = {
+ let bs = buf.chunk_mut();
- // Convert to `&mut [u8]`
- let bs = &mut *(bs as *mut _ as *mut [u8]);
+ for i in 0..bs.len() {
+ bs.write_byte(i, 0);
+ }
- let n = read.read(bs)?;
- assert!(n <= bs.len(), "AsyncRead reported that it initialized more than the number of bytes in the buffer");
- n
- };
+ // Convert to `&mut [u8]`
+ // SAFETY: the entire buffer is preinitialized above
+ let bs = unsafe { &mut *(bs as *mut _ as *mut [u8]) };
- buf.advance_mut(n);
+ let n = read.read(bs)?;
+ assert!(
+ n <= bs.len(),
+ "AsyncRead reported that it initialized more than the number of bytes in the buffer"
+ );
n
};
- Ok(copied)
+
+ // SAFETY: the entire buffer has been preinitialized
+ unsafe { buf.advance_mut(n) };
+
+ Ok(n)
}
#[cfg(feature = "tokio-02")]
@@ -475,14 +447,14 @@ where
) -> Poll<io::Result<usize>> {
let me = read.project();
- tokio_read_buf(cx, me.inner, me.buf)
+ tokio_read_buf(me.inner, cx, me.buf)
}
}
#[cfg(feature = "futures-03")]
impl<R> CombineAsyncRead<BufReader<R>> for Bufferless
where
- R: futures_util_03::io::AsyncRead,
+ R: futures_io_03::AsyncRead,
{
fn poll_extend_buf(
&mut self,
@@ -520,24 +492,30 @@ fn poll_extend_buf<R>(
read: Pin<&mut R>,
) -> Poll<io::Result<usize>>
where
- R: futures_util_03::io::AsyncRead,
+ R: futures_io_03::AsyncRead,
{
// Copy of tokio's read_buf method (but it has to force initialize the buffer)
- let copied = unsafe {
- let n = {
- let bs = buf.chunk_mut();
- // Convert to `&mut [u8]`
- let bs = &mut *(bs as *mut _ as *mut [u8]);
-
- let n = ready!(read.poll_read(cx, bs))?;
- assert!(n <= bs.len(), "AsyncRead reported that it initialized more than the number of bytes in the buffer");
- n
- };
+ let n = {
+ let bs = buf.chunk_mut();
+ // preinit the buffer
+ for i in 0..bs.len() {
+ bs.write_byte(i, 0);
+ }
+
+ // Convert to `&mut [u8]`
+ // SAFETY: preinitialize the buffer
+ let bs = unsafe { &mut *(bs as *mut _ as *mut [u8]) };
- buf.advance_mut(n);
+ let n = ready!(read.poll_read(cx, bs))?;
+ assert!(
+ n <= bs.len(),
+ "AsyncRead reported that it initialized more than the number of bytes in the buffer"
+ );
n
};
- Poll::Ready(Ok(copied))
+ // SAFETY: the buffer was preinitialized
+ unsafe { buf.advance_mut(n) };
+ Poll::Ready(Ok(n))
}
#[cfg(feature = "tokio-02")]
@@ -714,7 +692,7 @@ impl<R: tokio_dep::io::AsyncRead> tokio_dep::io::AsyncBufRead for BufReader<R> {
// If we've reached the end of our internal buffer then we need to fetch
// some more data from the underlying reader.
if me.buf.is_empty() {
- ready!(tokio_read_buf(cx, me.inner, me.buf))?;
+ ready!(tokio_read_buf(me.inner, cx, me.buf))?;
}
Poll::Ready(Ok(&me.buf[..]))
}
@@ -800,8 +778,7 @@ mod tests {
impl<R: AsyncRead> BufReader<R> {
async fn extend_buf_tokio_02(mut self: Pin<&mut Self>) -> io::Result<usize> {
- futures_util_03::future::poll_fn(|cx| Bufferless.poll_extend_buf(cx, self.as_mut()))
- .await
+ crate::future_ext::poll_fn(|cx| Bufferless.poll_extend_buf(cx, self.as_mut())).await
}
}
@@ -841,7 +818,7 @@ mod tests {
#[tokio::test]
async fn buf_reader_extend_buf() {
let read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
- futures_util_03::pin_mut!(read);
+ futures_03_dep::pin_mut!(read);
assert_eq!(read.as_mut().extend_buf_tokio_02().await.unwrap(), 3);
assert_eq!(read.buffer(), [1, 2, 3]);
@@ -868,8 +845,7 @@ mod tests_tokio_1 {
impl<R: AsyncRead> BufReader<R> {
async fn extend_buf_tokio(mut self: Pin<&mut Self>) -> io::Result<usize> {
- futures_util_03::future::poll_fn(|cx| Bufferless.poll_extend_buf(cx, self.as_mut()))
- .await
+ crate::future_ext::poll_fn(|cx| Bufferless.poll_extend_buf(cx, self.as_mut())).await
}
}
@@ -909,7 +885,7 @@ mod tests_tokio_1 {
#[tokio::test]
async fn buf_reader_extend_buf() {
let read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
- futures_util_03::pin_mut!(read);
+ futures_03_dep::pin_mut!(read);
assert_eq!(read.as_mut().extend_buf_tokio().await.unwrap(), 3);
assert_eq!(read.buffer(), [1, 2, 3]);
@@ -926,6 +902,7 @@ mod tests_sync {
use std::io::Read;
#[test]
+ #[allow(clippy::unused_io_amount)]
fn buf_reader() {
let mut read = BufReader::with_capacity(3, &[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 0][..]);
diff --git a/src/stream/buffered.rs b/src/stream/buffered.rs
index 93f7b82..32caba2 100644
--- a/src/stream/buffered.rs
+++ b/src/stream/buffered.rs
@@ -1,4 +1,4 @@
-use std::collections::VecDeque;
+use alloc::collections::VecDeque;
use crate::{
error::StreamError,
diff --git a/src/stream/decoder.rs b/src/stream/decoder.rs
index 9807d80..ad034f8 100644
--- a/src/stream/decoder.rs
+++ b/src/stream/decoder.rs
@@ -161,7 +161,7 @@ impl<S, P, C> Decoder<S, P, C> {
C: crate::stream::buf_reader::CombineRead<R, dyn tokio_02_dep::io::AsyncRead>,
{
let copied =
- futures_util_03::future::poll_fn(|cx| self.buffer.poll_extend_buf(cx, reader.as_mut()))
+ crate::future_ext::poll_fn(|cx| self.buffer.poll_extend_buf(cx, reader.as_mut()))
.await?;
if copied == 0 {
self.end_of_input = true;
@@ -180,7 +180,7 @@ impl<S, P, C> Decoder<S, P, C> {
C: crate::stream::buf_reader::CombineRead<R, dyn tokio_03_dep::io::AsyncRead>,
{
let copied =
- futures_util_03::future::poll_fn(|cx| self.buffer.poll_extend_buf(cx, reader.as_mut()))
+ crate::future_ext::poll_fn(|cx| self.buffer.poll_extend_buf(cx, reader.as_mut()))
.await?;
if copied == 0 {
self.end_of_input = true;
@@ -199,7 +199,7 @@ impl<S, P, C> Decoder<S, P, C> {
C: crate::stream::buf_reader::CombineRead<R, dyn tokio_dep::io::AsyncRead>,
{
let copied =
- futures_util_03::future::poll_fn(|cx| self.buffer.poll_extend_buf(cx, reader.as_mut()))
+ crate::future_ext::poll_fn(|cx| self.buffer.poll_extend_buf(cx, reader.as_mut()))
.await?;
if copied == 0 {
self.end_of_input = true;
diff --git a/src/stream/mod.rs b/src/stream/mod.rs
index f38bd72..803e9a8 100644
--- a/src/stream/mod.rs
+++ b/src/stream/mod.rs
@@ -52,7 +52,7 @@ macro_rules! clone_resetable {
pub mod buf_reader;
/// Stream wrapper which provides a `ResetStream` impl for `StreamOnce` impls which do not have
/// one.
-#[cfg(feature = "std")]
+#[cfg(feature = "alloc")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
pub mod buffered;
#[cfg(feature = "std")]
@@ -576,7 +576,7 @@ impl<'a> RangeStreamOnce for &'a str {
}
match s.as_bytes().get(index) {
None => false,
- Some(&b) => b < 128 || b >= 192,
+ Some(b) => !(128..=192).contains(b),
}
}
if size <= self.len() {
@@ -616,12 +616,21 @@ impl<'a, T> Range for &'a [T] {
}
}
-fn slice_uncons_while<'a, T, F>(slice: &mut &'a [T], mut i: usize, mut f: F) -> &'a [T]
+#[repr(usize)]
+enum UnconsStart {
+ Zero = 0,
+ One = 1,
+}
+
+fn slice_uncons_while<'a, T, F>(slice: &mut &'a [T], start: UnconsStart, mut f: F) -> &'a [T]
where
F: FnMut(T) -> bool,
T: Clone,
{
+ let mut i = start as usize;
let len = slice.len();
+ // SAFETY: We only call this function with `One` if the slice has length >= 1
+ debug_assert!(len >= i, "");
let mut found = false;
macro_rules! check {
@@ -634,6 +643,7 @@ where
};
}
+ // SAFETY: ensures we can access at least 8 elements starting at i, making get_unchecked sound.
while len - i >= 8 {
check!();
check!();
@@ -679,7 +689,7 @@ where
where
F: FnMut(Self::Token) -> bool,
{
- Ok(slice_uncons_while(self, 0, f))
+ Ok(slice_uncons_while(self, UnconsStart::Zero, f))
}
#[inline]
@@ -698,7 +708,7 @@ where
}
}
- CommitOk(slice_uncons_while(self, 1, f))
+ CommitOk(slice_uncons_while(self, UnconsStart::One, f))
}
#[inline]
@@ -1060,11 +1070,14 @@ where
}
}
-fn slice_uncons_while_ref<'a, T, F>(slice: &mut &'a [T], mut i: usize, mut f: F) -> &'a [T]
+fn slice_uncons_while_ref<'a, T, F>(slice: &mut &'a [T], start: UnconsStart, mut f: F) -> &'a [T]
where
F: FnMut(&'a T) -> bool,
{
+ let mut i = start as usize;
let len = slice.len();
+ // SAFETY: We only call this function with `One` if the slice has length >= 1
+ debug_assert!(len >= i, "");
let mut found = false;
macro_rules! check {
@@ -1077,6 +1090,7 @@ where
};
}
+ // SAFETY: ensures we can access at least 8 elements starting at i, making get_unchecked sound.
while len - i >= 8 {
check!();
check!();
@@ -1122,7 +1136,7 @@ where
where
F: FnMut(Self::Token) -> bool,
{
- Ok(slice_uncons_while_ref(&mut self.0, 0, f))
+ Ok(slice_uncons_while_ref(&mut self.0, UnconsStart::Zero, f))
}
#[inline]
@@ -1139,7 +1153,7 @@ where
None => return PeekErr(Tracked::from(UnexpectedParse::Eoi)),
}
- CommitOk(slice_uncons_while_ref(&mut self.0, 1, f))
+ CommitOk(slice_uncons_while_ref(&mut self.0, UnconsStart::One, f))
}
#[inline]
@@ -1877,7 +1891,7 @@ mod tests {
input.uncons().unwrap();
assert_eq!(input.distance(&before), 2);
- input.reset(before.clone()).unwrap();
+ input.reset(before).unwrap();
assert_eq!(input.distance(&before), 0);
}
}
diff --git a/tests/parser.rs b/tests/parser.rs
index c2ab07c..4721825 100644
--- a/tests/parser.rs
+++ b/tests/parser.rs
@@ -1,15 +1,12 @@
use combine::{
parser::{
- byte::bytes_cmp,
- char::{digit, letter, string, string_cmp},
- choice::{choice, optional},
- combinator::{attempt, no_partial, not_followed_by},
- error::unexpected,
- range::{self, range},
- repeat::{count, count_min_max, many, sep_by, sep_end_by1, skip_until, take_until},
- token::{any, eof, position, token, value, Token},
+ char::{digit, letter},
+ choice::choice,
+ combinator::not_followed_by,
+ range::range,
+ token::{any, eof, token, Token},
},
- EasyParser, Parser,
+ Parser,
};
#[test]
@@ -45,21 +42,25 @@ fn not_followed_by_does_not_consume_any_input() {
#[cfg(feature = "std")]
mod tests_std {
+ use super::*;
+ use combine::easy::{Error, Errors};
+ use combine::parser::byte::alpha_num;
+ use combine::parser::byte::bytes;
+ use combine::parser::byte::bytes_cmp;
+ use combine::parser::byte::num::be_u32;
+ use combine::parser::char::char;
+ use combine::parser::char::{string, string_cmp};
+ use combine::parser::combinator::no_partial;
+ use combine::parser::range;
+ use combine::parser::repeat::{skip_until, take_until};
+ use combine::stream::position;
+ use combine::stream::position::SourcePosition;
use combine::{
- parser::{
- byte::{alpha_num, bytes, num::be_u32},
- char::{char, digit, letter},
- },
- stream::{
- easy::{self, Error, Errors},
- position::{self, SourcePosition},
- },
- Parser,
+ attempt, count, count_min_max, easy, many, optional, position, sep_by, sep_end_by1,
+ unexpected, value, EasyParser,
};
- use super::*;
-
#[derive(Clone, PartialEq, Debug)]
struct CloneOnly {
s: String,
@@ -85,8 +86,10 @@ mod tests_std {
#[test]
fn sep_by_committed_error() {
+ type TwoLettersList = Vec<(char, char)>;
+
let mut parser2 = sep_by((letter(), letter()), token(','));
- let result_err: Result<(Vec<(char, char)>, &str), easy::ParseError<&str>> =
+ let result_err: Result<(TwoLettersList, &str), easy::ParseError<&str>> =
parser2.easy_parse("a,bc");
assert!(result_err.is_err());
}
@@ -619,7 +622,7 @@ mod tests_std {
#[test]
fn lifetime_inference() {
- fn _string<'a>(source: &'a str) {
+ fn _string(source: &str) {
range::take(1).or(string("a")).parse(source).ok();
range::take(1)
.or(string_cmp("a", |x, y| x == y))
@@ -628,7 +631,7 @@ mod tests_std {
let _: &'static str = string("a").parse(source).unwrap().0;
let _: &'static str = string_cmp("a", |x, y| x == y).parse(source).unwrap().0;
}
- fn _bytes<'a>(source: &'a [u8]) {
+ fn _bytes(source: &[u8]) {
range::take(1).or(bytes(&[0u8])).parse(source).ok();
range::take(1)
.or(bytes_cmp(&[0u8], |x, y| x == y))
diff --git a/tests/parser_macro.rs b/tests/parser_macro.rs
index e6d8dfc..2832286 100644
--- a/tests/parser_macro.rs
+++ b/tests/parser_macro.rs
@@ -1,3 +1,5 @@
+#![allow(clippy::single_match)]
+
#[macro_use]
extern crate combine;
diff --git a/travis.sh b/travis.sh
index 6d83c38..6397319 100755
--- a/travis.sh
+++ b/travis.sh
@@ -11,6 +11,9 @@ else
cargo "$@" test --bench json --bench http -- --test
cargo "$@" check --bench mp4 --features mp4
+
+ cargo "$@" build --no-default-features --features alloc
+ cargo "$@" test --no-default-features --features alloc --examples
cargo "$@" build --no-default-features
cargo "$@" test --no-default-features --examples