diff options
author | Joel Galenson <jgalenson@google.com> | 2021-05-19 15:39:13 -0700 |
---|---|---|
committer | Joel Galenson <jgalenson@google.com> | 2021-05-19 15:39:13 -0700 |
commit | e30d42e8fe4b64ad02e14bb18767f26f53fc1f07 (patch) | |
tree | 88390f1a9f0ab00e0ed8b47c06a1b2bf73d448d3 | |
parent | 25b6e21c78c960133408b16b25378e892ce9f096 (diff) | |
download | hashlink-e30d42e8fe4b64ad02e14bb18767f26f53fc1f07.tar.gz |
Upgrade rust/crates/hashlink to 0.7.0
Test: make
Change-Id: Icb994f873166a9e8b6ebe569eb171cf90e0a0778
-rw-r--r-- | .cargo_vcs_info.json | 2 | ||||
-rw-r--r-- | .circleci/config.yml | 10 | ||||
-rw-r--r-- | Android.bp | 10 | ||||
-rw-r--r-- | CHANGELOG.md | 8 | ||||
-rw-r--r-- | Cargo.toml | 7 | ||||
-rw-r--r-- | Cargo.toml.orig | 5 | ||||
-rw-r--r-- | METADATA | 10 | ||||
-rw-r--r-- | TEST_MAPPING | 11 | ||||
-rw-r--r-- | src/linked_hash_map.rs | 34 | ||||
-rw-r--r-- | src/linked_hash_set.rs | 10 | ||||
-rw-r--r-- | src/serde.rs | 164 | ||||
-rw-r--r-- | tests/linked_hash_map.rs | 15 | ||||
-rw-r--r-- | tests/linked_hash_set.rs | 15 | ||||
-rw-r--r-- | tests/serde.rs | 49 |
14 files changed, 249 insertions, 101 deletions
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json index 89ad255..3f828db 100644 --- a/.cargo_vcs_info.json +++ b/.cargo_vcs_info.json @@ -1,5 +1,5 @@ { "git": { - "sha1": "a244c741ac60333e3c37298fffdbfc1aa5f3c0d6" + "sha1": "491feb0b3f9805f7548a459ac32ab24914e12db2" } } diff --git a/.circleci/config.yml b/.circleci/config.yml index 9116b38..12fde18 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -35,17 +35,17 @@ jobs: command: cargo build --all --all-targets - run: name: Run all tests - command: cargo test --all + command: cargo test --all --all-features - run: name: Run all tests under miri command: | - cargo +nightly miri test + cargo +nightly miri test --all-features - run: name: Run all tests under sanitizers command: | - RUSTFLAGS="-Z sanitizer=address" cargo +nightly -Z build-std test --target x86_64-unknown-linux-gnu - RUSTFLAGS="-Z sanitizer=leak" cargo +nightly test -Z build-std --target x86_64-unknown-linux-gnu - RUSTFLAGS="-Z sanitizer=memory" cargo +nightly test -Z build-std --target x86_64-unknown-linux-gnu + RUSTFLAGS="-Z sanitizer=address" cargo +nightly -Z build-std test --target x86_64-unknown-linux-gnu --all-features + RUSTFLAGS="-Z sanitizer=leak" cargo +nightly test -Z build-std --target x86_64-unknown-linux-gnu --all-features + RUSTFLAGS="-Z sanitizer=memory" cargo +nightly test -Z build-std --target x86_64-unknown-linux-gnu --all-features - save_cache: paths: - /usr/local/cargo/registry @@ -1,4 +1,5 @@ // This file is generated by cargo2android.py --device --run --dependencies. +// Do not modify this file as changes will be overridden on upgrade. package { default_applicable_licenses: ["external_rust_crates_hashlink_license"], @@ -48,5 +49,10 @@ rust_library { } // dependent_library ["feature_list"] -// ahash-0.4.6 -// hashbrown-0.9.1 "ahash,default,inline-more" +// ahash-0.7.2 "folded_multiply,runtime-rng,specialize" +// cfg-if-1.0.0 +// getrandom-0.2.2 +// hashbrown-0.11.2 "ahash,default,inline-more" +// libc-0.2.94 +// once_cell-1.7.2 "alloc,race,unstable" +// version_check-0.9.3 diff --git a/CHANGELOG.md b/CHANGELOG.md index 12963ea..54c2bbe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,11 @@ +## [0.7.0 +- API incompatible change: depend on hashbrown 0.11, changes re-exported types. +- Fix `LinkedHashSet::back` to take `&self` not `&mut self`. +- API incompatible change: equality tests on `LinkedHashSet` are now *ordered*, + similar to `LinkedHashMap`. +- Make the serde `Deserialize` implementations on `LinkedHashMap` and + `LinkedHashSet` generic on the `BuildHasher` type. + ## [0.6.0] - API incompatible change: depend on hashbrown 0.9, re-export renamed hashbrown::TryReserveError type. @@ -13,7 +13,7 @@ [package] edition = "2018" name = "hashlink" -version = "0.6.0" +version = "0.7.0" authors = ["kyren <kerriganw@gmail.com>"] description = "HashMap-like containers that hold their key-value pairs in a user controllable order" documentation = "https://docs.rs/hashlink" @@ -22,11 +22,14 @@ keywords = ["data-structures"] license = "MIT OR Apache-2.0" repository = "https://github.com/kyren/hashlink" [dependencies.hashbrown] -version = "0.9.0" +version = "0.11.0" [dependencies.serde] version = "1.0" optional = true +[dev-dependencies.fxhash] +version = "0.2.1" + [dev-dependencies.serde_test] version = "1.0" diff --git a/Cargo.toml.orig b/Cargo.toml.orig index 6ad661b..c6de186 100644 --- a/Cargo.toml.orig +++ b/Cargo.toml.orig @@ -1,6 +1,6 @@ [package] name = "hashlink" -version = "0.6.0" +version = "0.7.0" authors = ["kyren <kerriganw@gmail.com>"] edition = "2018" description = "HashMap-like containers that hold their key-value pairs in a user controllable order" @@ -17,8 +17,9 @@ circle-ci = { repository = "kyren/hashlink", branch = "master" } serde_impl = ["serde"] [dependencies] -hashbrown = "0.9.0" +hashbrown = "0.11.0" serde = { version = "1.0", optional = true } [dev-dependencies] serde_test = "1.0" +fxhash = "0.2.1" @@ -7,13 +7,13 @@ third_party { } url { type: ARCHIVE - value: "https://static.crates.io/crates/hashlink/hashlink-0.6.0.crate" + value: "https://static.crates.io/crates/hashlink/hashlink-0.7.0.crate" } - version: "0.6.0" + version: "0.7.0" license_type: NOTICE last_upgrade_date { - year: 2020 - month: 11 - day: 9 + year: 2021 + month: 5 + day: 19 } } diff --git a/TEST_MAPPING b/TEST_MAPPING new file mode 100644 index 0000000..e707449 --- /dev/null +++ b/TEST_MAPPING @@ -0,0 +1,11 @@ +// Generated by update_crate_tests.py for tests that depend on this crate. +{ + "presubmit": [ + { + "name": "keystore2_test" + }, + { + "name": "vpnprofilestore_test" + } + ] +} diff --git a/src/linked_hash_map.rs b/src/linked_hash_map.rs index 32733ea..191844c 100644 --- a/src/linked_hash_map.rs +++ b/src/linked_hash_map.rs @@ -441,6 +441,40 @@ where } } } + + /// If an entry with this key exists, move it to the front of the list and return a reference to + /// the value. + #[inline] + pub fn to_front<Q>(&mut self, k: &Q) -> Option<&mut V> + where + K: Borrow<Q>, + Q: Hash + Eq + ?Sized, + { + match self.raw_entry_mut().from_key(k) { + RawEntryMut::Occupied(mut occupied) => { + occupied.to_front(); + Some(occupied.into_mut()) + } + RawEntryMut::Vacant(_) => None, + } + } + + /// If an entry with this key exists, move it to the back of the list and return a reference to + /// the value. + #[inline] + pub fn to_back<Q>(&mut self, k: &Q) -> Option<&mut V> + where + K: Borrow<Q>, + Q: Hash + Eq + ?Sized, + { + match self.raw_entry_mut().from_key(k) { + RawEntryMut::Occupied(mut occupied) => { + occupied.to_back(); + Some(occupied.into_mut()) + } + RawEntryMut::Vacant(_) => None, + } + } } impl<K, V, S> LinkedHashMap<K, V, S> diff --git a/src/linked_hash_set.rs b/src/linked_hash_set.rs index 1ab7dbb..f55f6c5 100644 --- a/src/linked_hash_set.rs +++ b/src/linked_hash_set.rs @@ -250,7 +250,7 @@ where } #[inline] - pub fn back(&mut self) -> Option<&T> { + pub fn back(&self) -> Option<&T> { self.map.back().map(|(k, _)| k) } @@ -304,12 +304,8 @@ where S: BuildHasher, { #[inline] - fn eq(&self, other: &LinkedHashSet<T, S>) -> bool { - if self.len() != other.len() { - return false; - } - - self.iter().all(|key| other.contains(key)) + fn eq(&self, other: &Self) -> bool { + self.len() == other.len() && self.iter().eq(other) } } diff --git a/src/serde.rs b/src/serde.rs index b8e307c..f44ebb3 100644 --- a/src/serde.rs +++ b/src/serde.rs @@ -31,54 +31,59 @@ where } } -#[derive(Debug)] -pub struct LinkedHashMapVisitor<K, V> { - marker: PhantomData<LinkedHashMap<K, V>>, -} - -impl<K, V> LinkedHashMapVisitor<K, V> { - fn new() -> Self { - LinkedHashMapVisitor { - marker: PhantomData, - } - } -} - -impl<K, V> Default for LinkedHashMapVisitor<K, V> { - fn default() -> Self { - Self::new() - } -} - -impl<'de, K, V> Visitor<'de> for LinkedHashMapVisitor<K, V> +impl<'de, K, V, S> Deserialize<'de> for LinkedHashMap<K, V, S> where K: Deserialize<'de> + Eq + Hash, V: Deserialize<'de>, + S: BuildHasher + Default, { - type Value = LinkedHashMap<K, V>; - - fn expecting(&self, formatter: &mut Formatter) -> fmt::Result { - write!(formatter, "a map") - } + fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> { + #[derive(Debug)] + pub struct LinkedHashMapVisitor<K, V, S> { + marker: PhantomData<LinkedHashMap<K, V, S>>, + } - #[inline] - fn visit_map<M: MapAccess<'de>>(self, mut map: M) -> Result<Self::Value, M::Error> { - let mut values = LinkedHashMap::with_capacity(map.size_hint().unwrap_or(0)); + impl<K, V, S> LinkedHashMapVisitor<K, V, S> { + fn new() -> Self { + LinkedHashMapVisitor { + marker: PhantomData, + } + } + } - while let Some((k, v)) = map.next_entry()? { - values.insert(k, v); + impl<K, V, S> Default for LinkedHashMapVisitor<K, V, S> { + fn default() -> Self { + Self::new() + } } - Ok(values) - } -} + impl<'de, K, V, S> Visitor<'de> for LinkedHashMapVisitor<K, V, S> + where + K: Deserialize<'de> + Eq + Hash, + V: Deserialize<'de>, + S: BuildHasher + Default, + { + type Value = LinkedHashMap<K, V, S>; + + fn expecting(&self, formatter: &mut Formatter) -> fmt::Result { + write!(formatter, "a map") + } + + #[inline] + fn visit_map<M: MapAccess<'de>>(self, mut map: M) -> Result<Self::Value, M::Error> { + let mut values = LinkedHashMap::with_capacity_and_hasher( + map.size_hint().unwrap_or(0), + S::default(), + ); + + while let Some((k, v)) = map.next_entry()? { + values.insert(k, v); + } + + Ok(values) + } + } -impl<'de, K, V> Deserialize<'de> for LinkedHashMap<K, V> -where - K: Deserialize<'de> + Eq + Hash, - V: Deserialize<'de>, -{ - fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> { deserializer.deserialize_map(LinkedHashMapVisitor::default()) } } @@ -100,52 +105,57 @@ where } } -#[derive(Debug)] -pub struct LinkedHashSetVisitor<T> { - marker: PhantomData<LinkedHashSet<T>>, -} - -impl<T> LinkedHashSetVisitor<T> { - fn new() -> Self { - LinkedHashSetVisitor { - marker: PhantomData, - } - } -} - -impl<T> Default for LinkedHashSetVisitor<T> { - fn default() -> Self { - Self::new() - } -} - -impl<'de, T> Visitor<'de> for LinkedHashSetVisitor<T> +impl<'de, T, S> Deserialize<'de> for LinkedHashSet<T, S> where T: Deserialize<'de> + Eq + Hash, + S: BuildHasher + Default, { - type Value = LinkedHashSet<T>; - - fn expecting(&self, formatter: &mut Formatter) -> fmt::Result { - write!(formatter, "a sequence") - } + fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> { + #[derive(Debug)] + pub struct LinkedHashSetVisitor<T, S> { + marker: PhantomData<LinkedHashSet<T, S>>, + } - #[inline] - fn visit_seq<S: SeqAccess<'de>>(self, mut seq: S) -> Result<Self::Value, S::Error> { - let mut values = LinkedHashSet::with_capacity(seq.size_hint().unwrap_or(0)); + impl<T, S> LinkedHashSetVisitor<T, S> { + fn new() -> Self { + LinkedHashSetVisitor { + marker: PhantomData, + } + } + } - while let Some(v) = seq.next_element()? { - values.insert(v); + impl<T, S> Default for LinkedHashSetVisitor<T, S> { + fn default() -> Self { + Self::new() + } } - Ok(values) - } -} + impl<'de, T, S> Visitor<'de> for LinkedHashSetVisitor<T, S> + where + T: Deserialize<'de> + Eq + Hash, + S: BuildHasher + Default, + { + type Value = LinkedHashSet<T, S>; + + fn expecting(&self, formatter: &mut Formatter) -> fmt::Result { + write!(formatter, "a sequence") + } + + #[inline] + fn visit_seq<SA: SeqAccess<'de>>(self, mut seq: SA) -> Result<Self::Value, SA::Error> { + let mut values = LinkedHashSet::with_capacity_and_hasher( + seq.size_hint().unwrap_or(0), + S::default(), + ); + + while let Some(v) = seq.next_element()? { + values.insert(v); + } + + Ok(values) + } + } -impl<'de, T> Deserialize<'de> for LinkedHashSet<T> -where - T: Deserialize<'de> + Eq + Hash, -{ - fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> { deserializer.deserialize_seq(LinkedHashSetVisitor::default()) } } diff --git a/tests/linked_hash_map.rs b/tests/linked_hash_map.rs index 19dcc00..fbd3d2e 100644 --- a/tests/linked_hash_map.rs +++ b/tests/linked_hash_map.rs @@ -496,3 +496,18 @@ fn test_retain() { drop(map); assert!(c.get() == 4); } + +#[test] +fn test_order_equality() { + let xs = [1, 2, 3, 4, 5, 6]; + let mut map1: LinkedHashMap<String, i32> = xs.iter().map(|i| (i.to_string(), *i)).collect(); + let mut map2: LinkedHashMap<String, i32> = xs.iter().map(|i| (i.to_string(), *i)).collect(); + + assert_eq!(map1, map2); + + map1.to_front("4"); + assert_ne!(map1, map2); + + map2.to_front("4"); + assert_eq!(map1, map2); +} diff --git a/tests/linked_hash_set.rs b/tests/linked_hash_set.rs index 13cceae..cb75887 100644 --- a/tests/linked_hash_set.rs +++ b/tests/linked_hash_set.rs @@ -510,3 +510,18 @@ fn to_back_front_order() { set.to_front(&3); assert_eq!(set.front().copied(), Some(3)); } + +#[test] +fn test_order_equality() { + let xs = [1, 2, 3, 4, 5, 6]; + let mut set1: LinkedHashSet<i32> = xs.iter().copied().collect(); + let mut set2: LinkedHashSet<i32> = xs.iter().copied().collect(); + + assert_eq!(set1, set2); + + set1.to_front(&4); + assert_ne!(set1, set2); + + set2.to_front(&4); + assert_eq!(set1, set2); +} diff --git a/tests/serde.rs b/tests/serde.rs index fce3108..d397a24 100644 --- a/tests/serde.rs +++ b/tests/serde.rs @@ -1,5 +1,6 @@ #![cfg(feature = "serde_impl")] +use fxhash::FxBuildHasher; use hashlink::{LinkedHashMap, LinkedHashSet}; use serde_test::{assert_tokens, Token}; @@ -33,6 +34,35 @@ fn map_serde_tokens() { } #[test] +fn map_serde_tokens_empty_generic() { + let map = LinkedHashMap::<char, u32, FxBuildHasher>::with_hasher(FxBuildHasher::default()); + + assert_tokens(&map, &[Token::Map { len: Some(0) }, Token::MapEnd]); +} + +#[test] +fn map_serde_tokens_generic() { + let mut map = LinkedHashMap::with_hasher(FxBuildHasher::default()); + map.insert('a', 10); + map.insert('b', 20); + map.insert('c', 30); + + assert_tokens( + &map, + &[ + Token::Map { len: Some(3) }, + Token::Char('a'), + Token::I32(10), + Token::Char('b'), + Token::I32(20), + Token::Char('c'), + Token::I32(30), + Token::MapEnd, + ], + ); +} + +#[test] fn set_serde_tokens_empty() { let set = LinkedHashSet::<u32>::new(); @@ -57,3 +87,22 @@ fn set_serde_tokens() { ], ); } + +#[test] +fn set_serde_tokens_generic() { + let mut set = LinkedHashSet::with_hasher(FxBuildHasher::default()); + set.insert('a'); + set.insert('b'); + set.insert('c'); + + assert_tokens( + &set, + &[ + Token::Seq { len: Some(3) }, + Token::Char('a'), + Token::Char('b'), + Token::Char('c'), + Token::SeqEnd, + ], + ); +} |