From 4cf345b8cefebfe3148d859704f6f70a56825e0d Mon Sep 17 00:00:00 2001 From: Jeff Vander Stoep Date: Mon, 12 Dec 2022 12:39:18 +0100 Subject: Upgrade hashlink to 0.8.1 This project was upgraded with external_updater. Usage: tools/external_updater/updater.sh update rust/crates/hashlink For more info, check https://cs.android.com/android/platform/superproject/+/master:tools/external_updater/README.md Test: TreeHugger Change-Id: Ie542866f332f29703f6ccc7869593dadfd925101 --- .cargo_vcs_info.json | 7 +- .gitignore | 3 +- Android.bp | 24 +----- CHANGELOG.md | 15 +++- Cargo.toml | 16 ++-- Cargo.toml.orig | 4 +- METADATA | 14 ++-- README.md | 2 +- src/linked_hash_map.rs | 192 +++++++++++++++++++++++++++++++++++------------ src/linked_hash_set.rs | 17 +++++ tests/linked_hash_map.rs | 50 ++++++++++++ tests/linked_hash_set.rs | 16 ++++ 12 files changed, 268 insertions(+), 92 deletions(-) diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json index 3f828db..3339e30 100644 --- a/.cargo_vcs_info.json +++ b/.cargo_vcs_info.json @@ -1,5 +1,6 @@ { "git": { - "sha1": "491feb0b3f9805f7548a459ac32ab24914e12db2" - } -} + "sha1": "684ce98553c07c773af062605a5f4cbd9aa4dcd7" + }, + "path_in_vcs": "" +} \ No newline at end of file diff --git a/.gitignore b/.gitignore index 256af81..a3a73ce 100644 --- a/.gitignore +++ b/.gitignore @@ -3,5 +3,6 @@ Cargo.lock .DS_Store .envrc +.direnv shell.nix -.dir-locals.el +.dir-locals.el \ No newline at end of file diff --git a/Android.bp b/Android.bp index 8f1d565..3af9fd2 100644 --- a/Android.bp +++ b/Android.bp @@ -37,31 +37,11 @@ license { ], } -rust_test { - name: "hashlink_test_src_lib", - host_supported: true, - crate_name: "hashlink", - cargo_env_compat: true, - cargo_pkg_version: "0.7.0", - srcs: ["src/lib.rs"], - test_suites: ["general-tests"], - auto_gen_config: true, - test_options: { - unit_test: true, - }, - edition: "2018", - rustlibs: [ - "libfxhash", - "libhashbrown", - "libserde_test", - ], -} - rust_defaults { name: "hashlink_test_defaults", crate_name: "hashlink", cargo_env_compat: true, - cargo_pkg_version: "0.7.0", + cargo_pkg_version: "0.8.1", test_suites: ["general-tests"], auto_gen_config: true, edition: "2018", @@ -108,7 +88,7 @@ rust_library { host_supported: true, crate_name: "hashlink", cargo_env_compat: true, - cargo_pkg_version: "0.7.0", + cargo_pkg_version: "0.8.1", srcs: ["src/lib.rs"], edition: "2018", rustlibs: [ diff --git a/CHANGELOG.md b/CHANGELOG.md index 54c2bbe..0cec7be 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,23 @@ -## [0.7.0 +## [0.8.1] +- Add `retain_with_order` methods, equivalent to `retain` but which iterate + through the map in the proper linked list order + +## [0.8.0] +- API incompatible change: No longer re-export hashbrown types so that bumping + hashbrown is no longer an API compatible change. +- bump hashbrown to 0.12 +- Fix implementation of `shrink_to_fit` to not panic when called on non-empty + containers. + +## [0.7.0] - API incompatible change: depend on hashbrown 0.11, changes re-exported types. - Fix `LinkedHashSet::back` to take `&self` not `&mut self`. - API incompatible change: equality tests on `LinkedHashSet` are now *ordered*, similar to `LinkedHashMap`. - Make the serde `Deserialize` implementations on `LinkedHashMap` and `LinkedHashSet` generic on the `BuildHasher` type. +- Add `to_back` and `to_front` methods for `LinkedHashMap` to control entry + order. ## [0.6.0] - API incompatible change: depend on hashbrown 0.9, re-export renamed diff --git a/Cargo.toml b/Cargo.toml index 92db926..308f9f7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,17 +3,16 @@ # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies +# to registry (e.g., crates.io) dependencies. # -# If you believe there's an error in this file please file an -# issue against the rust-lang/cargo repository. If you're -# editing this file be aware that the upstream Cargo.toml -# will likely look very different (and much more reasonable) +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. [package] edition = "2018" name = "hashlink" -version = "0.7.0" +version = "0.8.1" authors = ["kyren "] description = "HashMap-like containers that hold their key-value pairs in a user controllable order" documentation = "https://docs.rs/hashlink" @@ -21,12 +20,14 @@ readme = "README.md" keywords = ["data-structures"] license = "MIT OR Apache-2.0" repository = "https://github.com/kyren/hashlink" + [dependencies.hashbrown] -version = "0.11.0" +version = "0.12.0" [dependencies.serde] version = "1.0" optional = true + [dev-dependencies.fxhash] version = "0.2.1" @@ -35,6 +36,7 @@ version = "1.0" [features] serde_impl = ["serde"] + [badges.circle-ci] branch = "master" repository = "kyren/hashlink" diff --git a/Cargo.toml.orig b/Cargo.toml.orig index c6de186..afab59a 100644 --- a/Cargo.toml.orig +++ b/Cargo.toml.orig @@ -1,6 +1,6 @@ [package] name = "hashlink" -version = "0.7.0" +version = "0.8.1" authors = ["kyren "] edition = "2018" description = "HashMap-like containers that hold their key-value pairs in a user controllable order" @@ -17,7 +17,7 @@ circle-ci = { repository = "kyren/hashlink", branch = "master" } serde_impl = ["serde"] [dependencies] -hashbrown = "0.11.0" +hashbrown = "0.12.0" serde = { version = "1.0", optional = true } [dev-dependencies] diff --git a/METADATA b/METADATA index 1aeaa1e..30b5508 100644 --- a/METADATA +++ b/METADATA @@ -1,3 +1,7 @@ +# This project was upgraded with external_updater. +# Usage: tools/external_updater/updater.sh update rust/crates/hashlink +# For more info, check https://cs.android.com/android/platform/superproject/+/master:tools/external_updater/README.md + name: "hashlink" description: "HashMap-like containers that hold their key-value pairs in a user controllable order" third_party { @@ -7,13 +11,13 @@ third_party { } url { type: ARCHIVE - value: "https://static.crates.io/crates/hashlink/hashlink-0.7.0.crate" + value: "https://static.crates.io/crates/hashlink/hashlink-0.8.1.crate" } - version: "0.7.0" + version: "0.8.1" license_type: NOTICE last_upgrade_date { - year: 2021 - month: 5 - day: 19 + year: 2022 + month: 12 + day: 12 } } diff --git a/README.md b/README.md index 9272b0d..9270cc3 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # hashlink -- HashMap-like containers that hold their key-value pairs in a user controllable order -[![Build Status](https://img.shields.io/circleci/project/github/kyren/hashlink.svg)](https://circleci.com/gh/kyren/hashlink) +[![Build Status](https://img.shields.io/circleci/project/github/triplehex/hashlink.svg)](https://circleci.com/gh/triplehex/hashlink) [![Latest Version](https://img.shields.io/crates/v/hashlink.svg)](https://crates.io/crates/hashlink) [![API Documentation](https://docs.rs/hashlink/badge.svg)](https://docs.rs/hashlink) diff --git a/src/linked_hash_map.rs b/src/linked_hash_map.rs index 191844c..b27c98b 100644 --- a/src/linked_hash_map.rs +++ b/src/linked_hash_map.rs @@ -1,4 +1,5 @@ use std::{ + alloc::Layout, borrow::Borrow, cmp::Ordering, fmt, @@ -12,7 +13,10 @@ use std::{ use hashbrown::{hash_map, HashMap}; -pub type TryReserveError = hashbrown::TryReserveError; +pub enum TryReserveError { + CapacityOverflow, + AllocError { layout: Layout }, +} /// A version of `HashMap` that has a user controllable order for its entries. /// @@ -93,14 +97,12 @@ impl LinkedHashMap { #[inline] pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> { - self.map.try_reserve(additional) - } - - #[inline] - pub fn shrink_to_fit(&mut self) { - self.map.shrink_to_fit(); - unsafe { drop_free_nodes(self.free) }; - self.free = None; + self.map.try_reserve(additional).map_err(|e| match e { + hashbrown::TryReserveError::CapacityOverflow => TryReserveError::CapacityOverflow, + hashbrown::TryReserveError::AllocError { layout } => { + TryReserveError::AllocError { layout } + } + }) } #[inline] @@ -238,42 +240,6 @@ impl LinkedHashMap { where F: FnMut(&K, &mut V) -> bool, { - // We do not drop the key and value when a value is filtered from the map during the call to - // `retain`. We need to be very careful not to have a live `HashMap` entry pointing to - // either a dangling `Node` or a `Node` with dropped keys / values. Since the key and value - // types may panic on drop, they may short-circuit the entry in the map actually being - // removed. Instead, we push the removed nodes onto the free list eagerly, then try and - // drop the keys and values for any newly freed nodes *after* `HashMap::retain` has - // completely finished. - struct DropFilteredValues<'a, K, V> { - free: &'a mut Option>>, - cur_free: Option>>, - } - - impl<'a, K, V> DropFilteredValues<'a, K, V> { - #[inline] - fn drop_later(&mut self, node: NonNull>) { - unsafe { - detach_node(node); - push_free(&mut self.cur_free, node); - } - } - } - - impl<'a, K, V> Drop for DropFilteredValues<'a, K, V> { - fn drop(&mut self) { - unsafe { - let end_free = self.cur_free; - while self.cur_free != *self.free { - let cur_free = self.cur_free.as_ptr(); - (*cur_free).take_entry(); - self.cur_free = (*cur_free).links.free.next; - } - *self.free = end_free; - } - } - } - let free = self.free; let mut drop_filtered_values = DropFilteredValues { free: &mut self.free, @@ -378,6 +344,21 @@ where } } + /// If the given key is not in this map, inserts the key / value pair at the *back* of the + /// internal linked list and returns `None`, otherwise, replaces the existing value with the + /// given value *without* moving the entry in the internal linked list and returns the previous + /// value. + #[inline] + pub fn replace(&mut self, k: K, v: V) -> Option { + match self.raw_entry_mut().from_key(&k) { + RawEntryMut::Occupied(mut occupied) => Some(occupied.replace_value(v)), + RawEntryMut::Vacant(vacant) => { + vacant.insert(k, v); + None + } + } + } + #[inline] pub fn remove(&mut self, k: &Q) -> Option where @@ -475,6 +456,81 @@ where RawEntryMut::Vacant(_) => None, } } + + #[inline] + pub fn shrink_to_fit(&mut self) { + unsafe { + let len = self.map.len(); + if len != self.map.capacity() { + self.map = HashMap::with_hasher(NullHasher); + self.map.reserve(len); + + if let Some(guard) = self.values { + let mut cur = guard.as_ref().links.value.next; + while cur != guard { + let hash = hash_key(&self.hash_builder, cur.as_ref().key_ref()); + match self + .map + .raw_entry_mut() + .from_hash(hash, |k| (*k).as_ref().key_ref().eq(cur.as_ref().key_ref())) + { + hash_map::RawEntryMut::Occupied(_) => unreachable!(), + hash_map::RawEntryMut::Vacant(vacant) => { + let hash_builder = &self.hash_builder; + vacant.insert_with_hasher(hash, cur, (), |k| { + hash_key(hash_builder, (*k).as_ref().key_ref()) + }); + } + } + cur = cur.as_ref().links.value.next; + } + } + } + + drop_free_nodes(self.free); + self.free = None; + } + } + + pub fn retain_with_order(&mut self, mut f: F) + where + F: FnMut(&K, &mut V) -> bool, + { + let free = self.free; + let mut drop_filtered_values = DropFilteredValues { + free: &mut self.free, + cur_free: free, + }; + + if let Some(values) = self.values { + unsafe { + let mut cur = values.as_ref().links.value.next; + while cur != values { + let next = cur.as_ref().links.value.next; + let filter = { + let (k, v) = (*cur.as_ptr()).entry_mut(); + !f(k, v) + }; + if filter { + let k = (*cur.as_ptr()).key_ref(); + let hash = hash_key(&self.hash_builder, k); + match self + .map + .raw_entry_mut() + .from_hash(hash, |o| (*o).as_ref().key_ref().eq(k)) + { + hash_map::RawEntryMut::Occupied(entry) => { + entry.remove(); + drop_filtered_values.drop_later(cur); + } + hash_map::RawEntryMut::Vacant(_) => unreachable!(), + } + } + cur = next; + } + } + } + } } impl LinkedHashMap @@ -591,7 +647,7 @@ impl Drop for LinkedHashMap { unsafe { if let Some(values) = self.values { drop_value_nodes(values); - Box::from_raw(values.as_ptr()); + let _ = Box::from_raw(values.as_ptr()); } drop_free_nodes(self.free); } @@ -1641,7 +1697,7 @@ impl Drop for IntoIter { let tail = self.tail.as_ptr(); self.tail = Some((*tail).links.value.prev); (*tail).take_entry(); - Box::from_raw(tail); + let _ = Box::from_raw(tail); } } } @@ -1833,7 +1889,7 @@ impl IntoIterator for LinkedHashMap { prev: tail, } = values.as_ref().links.value; - Box::from_raw(self.values.as_ptr()); + let _ = Box::from_raw(self.values.as_ptr()); self.values = None; (Some(head), Some(tail)) @@ -2049,7 +2105,7 @@ unsafe fn drop_value_nodes(guard: NonNull>) { while cur != guard { let prev = cur.as_ref().links.value.prev; cur.as_mut().take_entry(); - Box::from_raw(cur.as_ptr()); + let _ = Box::from_raw(cur.as_ptr()); cur = prev; } } @@ -2060,7 +2116,7 @@ unsafe fn drop_value_nodes(guard: NonNull>) { unsafe fn drop_free_nodes(mut free: Option>>) { while let Some(some_free) = free { let next_free = some_free.as_ref().links.free.next; - Box::from_raw(some_free.as_ptr()); + let _ = Box::from_raw(some_free.as_ptr()); free = next_free; } } @@ -2085,3 +2141,39 @@ where k.hash(&mut hasher); hasher.finish() } + +// We do not drop the key and value when a value is filtered from the map during the call to +// `retain`. We need to be very careful not to have a live `HashMap` entry pointing to +// either a dangling `Node` or a `Node` with dropped keys / values. Since the key and value +// types may panic on drop, they may short-circuit the entry in the map actually being +// removed. Instead, we push the removed nodes onto the free list eagerly, then try and +// drop the keys and values for any newly freed nodes *after* `HashMap::retain` has +// completely finished. +struct DropFilteredValues<'a, K, V> { + free: &'a mut Option>>, + cur_free: Option>>, +} + +impl<'a, K, V> DropFilteredValues<'a, K, V> { + #[inline] + fn drop_later(&mut self, node: NonNull>) { + unsafe { + detach_node(node); + push_free(&mut self.cur_free, node); + } + } +} + +impl<'a, K, V> Drop for DropFilteredValues<'a, K, V> { + fn drop(&mut self) { + unsafe { + let end_free = self.cur_free; + while self.cur_free != *self.free { + let cur_free = self.cur_free.as_ptr(); + (*cur_free).take_entry(); + self.cur_free = (*cur_free).links.free.next; + } + *self.free = end_free; + } + } +} diff --git a/src/linked_hash_set.rs b/src/linked_hash_set.rs index f55f6c5..5a89875 100644 --- a/src/linked_hash_set.rs +++ b/src/linked_hash_set.rs @@ -202,11 +202,20 @@ where other.is_subset(self) } + /// Inserts the given value into the set. + /// + /// If the set did not have this value present, inserts it at the *back* of the internal linked + /// list and returns true, otherwise it moves the existing value to the *back* of the internal + /// linked list and returns false. #[inline] pub fn insert(&mut self, value: T) -> bool { self.map.insert(value, ()).is_none() } + /// Adds the given value to the set, replacing the existing value. + /// + /// If a previous value existed, returns the replaced value. In this case, the value's position + /// in the internal linked list is *not* changed. #[inline] pub fn replace(&mut self, value: T) -> Option { match self.map.entry(value) { @@ -288,6 +297,14 @@ where linked_hash_map::RawEntryMut::Vacant(_) => false, } } + + #[inline] + pub fn retain_with_order(&mut self, mut f: F) + where + F: FnMut(&T) -> bool, + { + self.map.retain_with_order(|k, _| f(k)); + } } impl Clone for LinkedHashSet { diff --git a/tests/linked_hash_map.rs b/tests/linked_hash_map.rs index fbd3d2e..e046292 100644 --- a/tests/linked_hash_map.rs +++ b/tests/linked_hash_map.rs @@ -511,3 +511,53 @@ fn test_order_equality() { map2.to_front("4"); assert_eq!(map1, map2); } + +#[test] +fn test_replace() { + let mut map = LinkedHashMap::new(); + + map.insert(1, 1); + map.insert(2, 2); + map.insert(3, 3); + map.insert(4, 4); + + assert!(map + .iter() + .map(|(k, v)| (*k, *v)) + .eq([(1, 1), (2, 2), (3, 3), (4, 4)].iter().copied())); + + map.insert(3, 5); + + assert!(map + .iter() + .map(|(k, v)| (*k, *v)) + .eq([(1, 1), (2, 2), (4, 4), (3, 5)].iter().copied())); + + map.replace(2, 6); + + assert!(map + .iter() + .map(|(k, v)| (*k, *v)) + .eq([(1, 1), (2, 6), (4, 4), (3, 5)].iter().copied())); +} + +#[test] +fn test_shrink_to_fit_resize() { + let mut map = LinkedHashMap::new(); + map.shrink_to_fit(); + + for i in 0..100 { + map.insert(i, i); + } + map.shrink_to_fit(); + + for _ in 0..50 { + map.pop_front(); + map.shrink_to_fit(); + } + + assert_eq!(map.len(), 50); + for i in 50..100 { + assert_eq!(map.get(&i).unwrap(), &i); + } +} diff --git a/tests/linked_hash_set.rs b/tests/linked_hash_set.rs index cb75887..7a9e33f 100644 --- a/tests/linked_hash_set.rs +++ b/tests/linked_hash_set.rs @@ -423,6 +423,22 @@ fn test_retain() { assert!(set.contains(&6)); } +#[test] +fn test_retain_with_order() { + let xs = [1, 2, 3, 4, 5, 6]; + let mut set: LinkedHashSet = xs.iter().cloned().collect(); + let mut vec = Vec::new(); + set.retain_with_order(|&k| { + if k % 2 == 0 { + true + } else { + vec.push(k); + false + } + }); + assert_eq!(vec![1, 3, 5], vec); +} + #[test] fn insert_order() { let mut set = LinkedHashSet::new(); -- cgit v1.2.3