aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChih-Hung Hsieh <chh@google.com>2020-11-24 01:47:06 +0000
committerAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>2020-11-24 01:47:06 +0000
commit1c858da4ae69b422f92107f6fd4a5a784a2a4063 (patch)
tree50069180fea404c5247eb6b933c6836cda6f2857
parentf9571b675554e0cc4c2a37e68df0b78a40f5c027 (diff)
parent0543f0dc8c49e4fdeb57e3c5f4ba921f8c5a1b9a (diff)
downloadhashlink-1c858da4ae69b422f92107f6fd4a5a784a2a4063.tar.gz
Import hashlink-0.6.0 am: e93fbab9e5 am: 0543f0dc8c
Original change: https://android-review.googlesource.com/c/platform/external/rust/crates/hashlink/+/1493280 Change-Id: I09415f0842e511509490afd702b688be4c33d266
-rw-r--r--.cargo_vcs_info.json5
-rw-r--r--.circleci/config.yml55
-rw-r--r--.gitignore7
-rw-r--r--Android.bp16
-rw-r--r--CHANGELOG.md51
-rw-r--r--Cargo.toml37
-rw-r--r--Cargo.toml.orig24
l---------LICENSE1
-rw-r--r--LICENSE-APACHE201
-rw-r--r--LICENSE-MIT26
-rw-r--r--METADATA19
-rw-r--r--MODULE_LICENSE_APACHE20
-rw-r--r--OWNERS1
-rw-r--r--README.md67
-rw-r--r--src/lib.rs9
-rw-r--r--src/linked_hash_map.rs2053
-rw-r--r--src/linked_hash_set.rs753
-rw-r--r--src/lru_cache.rs292
-rw-r--r--src/serde.rs151
-rw-r--r--tests/linked_hash_map.rs498
-rw-r--r--tests/linked_hash_set.rs512
-rw-r--r--tests/lru_cache.rs166
-rw-r--r--tests/serde.rs59
23 files changed, 5003 insertions, 0 deletions
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
new file mode 100644
index 0000000..89ad255
--- /dev/null
+++ b/.cargo_vcs_info.json
@@ -0,0 +1,5 @@
+{
+ "git": {
+ "sha1": "a244c741ac60333e3c37298fffdbfc1aa5f3c0d6"
+ }
+}
diff --git a/.circleci/config.yml b/.circleci/config.yml
new file mode 100644
index 0000000..9116b38
--- /dev/null
+++ b/.circleci/config.yml
@@ -0,0 +1,55 @@
+version: 2
+
+jobs:
+ build:
+ docker:
+ - image: circleci/rust:latest
+ steps:
+ - checkout
+ - run:
+ name: Setup Rust
+ command: |
+ rustup toolchain uninstall nightly
+ rustup toolchain install nightly -c miri rust-src rustfmt
+ - run:
+ name: Version information
+ command: |
+ rustc --version
+ cargo --version
+ rustc +nightly --version
+ cargo +nightly --version
+ rustup --version
+ - run:
+ name: Calculate dependencies
+ command: cargo generate-lockfile
+ - restore_cache:
+ keys:
+ - cargo-cache-{{ arch }}-{{ checksum "Cargo.lock" }}
+ - run:
+ name: Check Formatting
+ command: |
+ rustfmt --version
+ cargo fmt --all -- --check --color=auto
+ - run:
+ name: Build all targets
+ command: cargo build --all --all-targets
+ - run:
+ name: Run all tests
+ command: cargo test --all
+ - run:
+ name: Run all tests under miri
+ command: |
+ cargo +nightly miri test
+ - run:
+ name: Run all tests under sanitizers
+ command: |
+ RUSTFLAGS="-Z sanitizer=address" cargo +nightly -Z build-std test --target x86_64-unknown-linux-gnu
+ RUSTFLAGS="-Z sanitizer=leak" cargo +nightly test -Z build-std --target x86_64-unknown-linux-gnu
+ RUSTFLAGS="-Z sanitizer=memory" cargo +nightly test -Z build-std --target x86_64-unknown-linux-gnu
+ - save_cache:
+ paths:
+ - /usr/local/cargo/registry
+ - target/debug/.fingerprint
+ - target/debug/build
+ - target/debug/deps
+ key: cargo-cache-{{ arch }}-{{ checksum "Cargo.lock" }}
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..256af81
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,7 @@
+/target
+**/*.rs.bk
+Cargo.lock
+.DS_Store
+.envrc
+shell.nix
+.dir-locals.el
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000..e61e79d
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,16 @@
+// This file is generated by cargo2android.py --device --run --dependencies.
+
+rust_library {
+ name: "libhashlink",
+ host_supported: true,
+ crate_name: "hashlink",
+ srcs: ["src/lib.rs"],
+ edition: "2018",
+ rustlibs: [
+ "libhashbrown",
+ ],
+}
+
+// dependent_library ["feature_list"]
+// ahash-0.4.6
+// hashbrown-0.9.1 "ahash,default,inline-more"
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000..12963ea
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,51 @@
+## [0.6.0]
+- API incompatible change: depend on hashbrown 0.9, re-export renamed
+ hashbrown::TryReserveError type.
+- Add a `Debug` impl to `LruCache` (thanks @thomcc!)
+- Adjust trait bounds for `LinkedHashMap::retain`, `LinkedHashSet::default` to
+ be less strict (to match hashbrown)
+- Adjust trait bounds for all `Debug` impls to be less strict (to match
+ hashbrown).
+- Adjust trait bounds for all `IntoIterator` impls to be less strict (to match
+ hashbrown).
+- Adjust trait bounds for `LruCache::with_hasher`, `LruCache::capacity`,
+ `LruCache::len`, `LruCache::is_empty`, `LruCache::clear`, `LruCache::iter`,
+ `LruCache::iter_mut`, and `LruCache::drain` to be less strict
+- Add optional serde support for `LinkedHashMap` and `LinkedHashSet`.
+- Add `to_back` and `to_front` methods for LinkedHashSet to control entry order.
+
+## [0.5.1]
+- Add `LinkedHashMap::remove_entry` and `LruCache::remove_entry`
+- Add `LruCache::new_unbounded` constructor that sets capacity to usize::MAX
+- Add `LruCache::get` method to go with `LruCache::get_mut`
+- Add `LruCache::peek` and `LruCache::peek_mut` to access the cache without
+ moving the entry in the LRU list
+
+## [0.5.0]
+- API incompatible change: depend on hashbrown 0.7
+
+## [0.4.0]
+- API incompatible change: depend on hashbrown 0.6
+- Passes miri
+
+## [0.3.0]
+- Add some *minimal* documentation for methods that change the internal ordering.
+- Decide on a pattern for methods that change the internal ordering: the word
+ "insert" means that it will move an existing entry to the back.
+- Some methods have been renamed to conform to the above system.
+
+## [0.2.1]
+- Fix variance for LinkedHashMap (now covariant where appropriate)
+- Add Debug impls to many more associated types
+- Add LinkedHashSet
+- Add `LinkedHashMap::retain`
+
+## [0.2.0]
+- Move `linked_hash_map` into its own module
+- Add `LruCache` type ported from `lru-cache` crate into its own module
+- Add `LruCache` entry and raw-entry API
+- Add `linked_hash_map` `IntoIter` iterator that is different from `Drain` iterator
+- Make `Drain` iterator recycle freed linked list nodes
+
+## [0.1.0]
+- Initial release
diff --git a/Cargo.toml b/Cargo.toml
new file mode 100644
index 0000000..6a5a2d4
--- /dev/null
+++ b/Cargo.toml
@@ -0,0 +1,37 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+edition = "2018"
+name = "hashlink"
+version = "0.6.0"
+authors = ["kyren <kerriganw@gmail.com>"]
+description = "HashMap-like containers that hold their key-value pairs in a user controllable order"
+documentation = "https://docs.rs/hashlink"
+readme = "README.md"
+keywords = ["data-structures"]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/kyren/hashlink"
+[dependencies.hashbrown]
+version = "0.9.0"
+
+[dependencies.serde]
+version = "1.0"
+optional = true
+[dev-dependencies.serde_test]
+version = "1.0"
+
+[features]
+serde_impl = ["serde"]
+[badges.circle-ci]
+branch = "master"
+repository = "kyren/hashlink"
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
new file mode 100644
index 0000000..6ad661b
--- /dev/null
+++ b/Cargo.toml.orig
@@ -0,0 +1,24 @@
+[package]
+name = "hashlink"
+version = "0.6.0"
+authors = ["kyren <kerriganw@gmail.com>"]
+edition = "2018"
+description = "HashMap-like containers that hold their key-value pairs in a user controllable order"
+repository = "https://github.com/kyren/hashlink"
+documentation = "https://docs.rs/hashlink"
+readme = "README.md"
+keywords = ["data-structures"]
+license = "MIT OR Apache-2.0"
+
+[badges]
+circle-ci = { repository = "kyren/hashlink", branch = "master" }
+
+[features]
+serde_impl = ["serde"]
+
+[dependencies]
+hashbrown = "0.9.0"
+serde = { version = "1.0", optional = true }
+
+[dev-dependencies]
+serde_test = "1.0"
diff --git a/LICENSE b/LICENSE
new file mode 120000
index 0000000..6b579aa
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1 @@
+LICENSE-APACHE \ No newline at end of file
diff --git a/LICENSE-APACHE b/LICENSE-APACHE
new file mode 100644
index 0000000..1b22bef
--- /dev/null
+++ b/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License. \ No newline at end of file
diff --git a/LICENSE-MIT b/LICENSE-MIT
new file mode 100644
index 0000000..b3cad87
--- /dev/null
+++ b/LICENSE-MIT
@@ -0,0 +1,26 @@
+This work is derived in part from the `linked-hash-map` crate, Copyright (c)
+2015 The Rust Project Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE. \ No newline at end of file
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..d38b627
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,19 @@
+name: "hashlink"
+description: "HashMap-like containers that hold their key-value pairs in a user controllable order"
+third_party {
+ url {
+ type: HOMEPAGE
+ value: "https://crates.io/crates/hashlink"
+ }
+ url {
+ type: ARCHIVE
+ value: "https://static.crates.io/crates/hashlink/hashlink-0.6.0.crate"
+ }
+ version: "0.6.0"
+ license_type: NOTICE
+ last_upgrade_date {
+ year: 2020
+ month: 11
+ day: 9
+ }
+}
diff --git a/MODULE_LICENSE_APACHE2 b/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/MODULE_LICENSE_APACHE2
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..46fc303
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1 @@
+include platform/prebuilts/rust:/OWNERS
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..9272b0d
--- /dev/null
+++ b/README.md
@@ -0,0 +1,67 @@
+# hashlink -- HashMap-like containers that hold their key-value pairs in a user controllable order
+
+[![Build Status](https://img.shields.io/circleci/project/github/kyren/hashlink.svg)](https://circleci.com/gh/kyren/hashlink)
+[![Latest Version](https://img.shields.io/crates/v/hashlink.svg)](https://crates.io/crates/hashlink)
+[![API Documentation](https://docs.rs/hashlink/badge.svg)](https://docs.rs/hashlink)
+
+This crate is a fork of
+[linked-hash-map](https://github.com/contain-rs/linked-hash-map) that builds on
+top of [hashbrown](https://github.com/rust-lang/hashbrown) to implement more up
+to date versions of `LinkedHashMap` `LinkedHashSet`, and `LruCache`.
+
+One important API change is that when a `LinkedHashMap` is used as a LRU cache,
+it allows you to easily retrieve an entry and move it to the back OR produce a
+new entry at the back without needlessly repeating key hashing and lookups:
+
+``` rust
+let mut lru_cache = LinkedHashMap::new();
+let key = "key".to_owned();
+// Try to find my expensive to construct and hash key
+let _cached_val = match lru_cache.raw_entry_mut().from_key(&key) {
+ RawEntryMut::Occupied(mut occupied) => {
+ // Cache hit, move entry to the back.
+ occupied.to_back();
+ occupied.into_mut()
+ }
+ RawEntryMut::Vacant(vacant) => {
+ // Insert expensive to construct key and expensive to compute value,
+ // automatically inserted at the back.
+ vacant.insert(key.clone(), 42).1
+ }
+};
+```
+
+Or, a simpler way to do the same thing:
+
+``` rust
+let mut lru_cache = LinkedHashMap::new();
+let key = "key".to_owned();
+let _cached_val = lru_cache
+ .raw_entry_mut()
+ .from_key(&key)
+ .or_insert_with(|| (key.clone(), 42));
+```
+
+This crate contains a decent amount of unsafe code from handling its internal
+linked list, and the unsafe code has diverged quite a lot from the original
+`linked-hash-map` implementation. It currently passes tests under miri and
+sanitizers, but it should probably still receive more review and testing, and
+check for test code coverage.
+
+## Credit
+
+There is a huge amount of code in this crate that is copied verbatim from
+`linked-hash-map` and `hashbrown`, especially tests, associated types like
+iterators, and things like `Debug` impls.
+
+## License
+
+This library is licensed the same as
+[linked-hash-map](https://github.com/contain-rs/linked-hash-map) and
+[hashbrown](https://github.com/rust-lang/hashbrown), it is licensed under either
+of:
+
+* MIT license [LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT
+* Apache License 2.0 [LICENSE-APACHE](LICENSE-APACHE) or https://opensource.org/licenses/Apache-2.0
+
+at your option.
diff --git a/src/lib.rs b/src/lib.rs
new file mode 100644
index 0000000..55bdcd2
--- /dev/null
+++ b/src/lib.rs
@@ -0,0 +1,9 @@
+pub mod linked_hash_map;
+pub mod linked_hash_set;
+pub mod lru_cache;
+#[cfg(feature = "serde_impl")]
+pub mod serde;
+
+pub use linked_hash_map::LinkedHashMap;
+pub use linked_hash_set::LinkedHashSet;
+pub use lru_cache::LruCache;
diff --git a/src/linked_hash_map.rs b/src/linked_hash_map.rs
new file mode 100644
index 0000000..32733ea
--- /dev/null
+++ b/src/linked_hash_map.rs
@@ -0,0 +1,2053 @@
+use std::{
+ borrow::Borrow,
+ cmp::Ordering,
+ fmt,
+ hash::{BuildHasher, Hash, Hasher},
+ iter::FromIterator,
+ marker::PhantomData,
+ mem::{self, MaybeUninit},
+ ops::{Index, IndexMut},
+ ptr::{self, NonNull},
+};
+
+use hashbrown::{hash_map, HashMap};
+
+pub type TryReserveError = hashbrown::TryReserveError;
+
+/// A version of `HashMap` that has a user controllable order for its entries.
+///
+/// It achieves this by keeping its entries in an internal linked list and using a `HashMap` to
+/// point at nodes in this linked list.
+///
+/// The order of entries defaults to "insertion order", but the user can also modify the order of
+/// existing entries by manually moving them to the front or back.
+///
+/// There are two kinds of methods that modify the order of the internal list:
+///
+/// * Methods that have names like `to_front` and `to_back` will unsurprisingly move an existing
+/// entry to the front or back
+/// * Methods that have the word `insert` will insert a new entry ot the back of the list, and if
+/// that method might replace an entry, that method will *also move that existing entry to the
+/// back*.
+pub struct LinkedHashMap<K, V, S = hash_map::DefaultHashBuilder> {
+ map: HashMap<NonNull<Node<K, V>>, (), NullHasher>,
+ // We need to keep any custom hash builder outside of the HashMap so we can access it alongside
+ // the entry API without mutable aliasing.
+ hash_builder: S,
+ // Circular linked list of nodes. If `values` is non-null, it will point to a "guard node"
+ // which will never have an initialized key or value, `values.prev` will contain the last key /
+ // value in the list, `values.next` will contain the first key / value in the list.
+ values: Option<NonNull<Node<K, V>>>,
+ // *Singly* linked list of free nodes. The `prev` pointers in the free list should be assumed
+ // invalid.
+ free: Option<NonNull<Node<K, V>>>,
+}
+
+impl<K, V> LinkedHashMap<K, V> {
+ #[inline]
+ pub fn new() -> Self {
+ Self {
+ hash_builder: hash_map::DefaultHashBuilder::default(),
+ map: HashMap::with_hasher(NullHasher),
+ values: None,
+ free: None,
+ }
+ }
+
+ #[inline]
+ pub fn with_capacity(capacity: usize) -> Self {
+ Self {
+ hash_builder: hash_map::DefaultHashBuilder::default(),
+ map: HashMap::with_capacity_and_hasher(capacity, NullHasher),
+ values: None,
+ free: None,
+ }
+ }
+}
+
+impl<K, V, S> LinkedHashMap<K, V, S> {
+ #[inline]
+ pub fn with_hasher(hash_builder: S) -> Self {
+ Self {
+ hash_builder,
+ map: HashMap::with_hasher(NullHasher),
+ values: None,
+ free: None,
+ }
+ }
+
+ #[inline]
+ pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self {
+ Self {
+ hash_builder,
+ map: HashMap::with_capacity_and_hasher(capacity, NullHasher),
+ values: None,
+ free: None,
+ }
+ }
+
+ #[inline]
+ pub fn reserve(&mut self, additional: usize) {
+ self.map.reserve(additional);
+ }
+
+ #[inline]
+ pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
+ self.map.try_reserve(additional)
+ }
+
+ #[inline]
+ pub fn shrink_to_fit(&mut self) {
+ self.map.shrink_to_fit();
+ unsafe { drop_free_nodes(self.free) };
+ self.free = None;
+ }
+
+ #[inline]
+ pub fn len(&self) -> usize {
+ self.map.len()
+ }
+
+ #[inline]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ #[inline]
+ pub fn clear(&mut self) {
+ self.map.clear();
+ if let Some(mut values) = self.values {
+ unsafe {
+ drop_value_nodes(values);
+ values.as_mut().links.value = ValueLinks {
+ prev: values,
+ next: values,
+ };
+ }
+ }
+ }
+
+ #[inline]
+ pub fn iter(&self) -> Iter<K, V> {
+ let (head, tail) = if let Some(values) = self.values {
+ unsafe {
+ let ValueLinks { next, prev } = values.as_ref().links.value;
+ (next.as_ptr(), prev.as_ptr())
+ }
+ } else {
+ (ptr::null_mut(), ptr::null_mut())
+ };
+
+ Iter {
+ head,
+ tail,
+ remaining: self.len(),
+ marker: PhantomData,
+ }
+ }
+
+ #[inline]
+ pub fn iter_mut(&mut self) -> IterMut<K, V> {
+ let (head, tail) = if let Some(values) = self.values {
+ unsafe {
+ let ValueLinks { next, prev } = values.as_ref().links.value;
+ (Some(next), Some(prev))
+ }
+ } else {
+ (None, None)
+ };
+
+ IterMut {
+ head,
+ tail,
+ remaining: self.len(),
+ marker: PhantomData,
+ }
+ }
+
+ #[inline]
+ pub fn drain(&mut self) -> Drain<'_, K, V> {
+ unsafe {
+ let (head, tail) = if let Some(mut values) = self.values {
+ let ValueLinks { next, prev } = values.as_ref().links.value;
+ values.as_mut().links.value = ValueLinks {
+ next: values,
+ prev: values,
+ };
+ (Some(next), Some(prev))
+ } else {
+ (None, None)
+ };
+ let len = self.len();
+
+ self.map.clear();
+
+ Drain {
+ free: (&mut self.free).into(),
+ head,
+ tail,
+ remaining: len,
+ marker: PhantomData,
+ }
+ }
+ }
+
+ #[inline]
+ pub fn keys(&self) -> Keys<K, V> {
+ Keys { inner: self.iter() }
+ }
+
+ #[inline]
+ pub fn values(&self) -> Values<K, V> {
+ Values { inner: self.iter() }
+ }
+
+ #[inline]
+ pub fn values_mut(&mut self) -> ValuesMut<K, V> {
+ ValuesMut {
+ inner: self.iter_mut(),
+ }
+ }
+
+ #[inline]
+ pub fn front(&self) -> Option<(&K, &V)> {
+ if self.is_empty() {
+ return None;
+ }
+ unsafe {
+ let front = (*self.values.as_ptr()).links.value.next.as_ptr();
+ let (key, value) = (*front).entry_ref();
+ Some((key, value))
+ }
+ }
+
+ #[inline]
+ pub fn back(&self) -> Option<(&K, &V)> {
+ if self.is_empty() {
+ return None;
+ }
+ unsafe {
+ let back = &*(*self.values.as_ptr()).links.value.prev.as_ptr();
+ let (key, value) = (*back).entry_ref();
+ Some((key, value))
+ }
+ }
+
+ #[inline]
+ pub fn retain<F>(&mut self, mut f: F)
+ where
+ F: FnMut(&K, &mut V) -> bool,
+ {
+ // We do not drop the key and value when a value is filtered from the map during the call to
+ // `retain`. We need to be very careful not to have a live `HashMap` entry pointing to
+ // either a dangling `Node` or a `Node` with dropped keys / values. Since the key and value
+ // types may panic on drop, they may short-circuit the entry in the map actually being
+ // removed. Instead, we push the removed nodes onto the free list eagerly, then try and
+ // drop the keys and values for any newly freed nodes *after* `HashMap::retain` has
+ // completely finished.
+ struct DropFilteredValues<'a, K, V> {
+ free: &'a mut Option<NonNull<Node<K, V>>>,
+ cur_free: Option<NonNull<Node<K, V>>>,
+ }
+
+ impl<'a, K, V> DropFilteredValues<'a, K, V> {
+ #[inline]
+ fn drop_later(&mut self, node: NonNull<Node<K, V>>) {
+ unsafe {
+ detach_node(node);
+ push_free(&mut self.cur_free, node);
+ }
+ }
+ }
+
+ impl<'a, K, V> Drop for DropFilteredValues<'a, K, V> {
+ fn drop(&mut self) {
+ unsafe {
+ let end_free = self.cur_free;
+ while self.cur_free != *self.free {
+ let cur_free = self.cur_free.as_ptr();
+ (*cur_free).take_entry();
+ self.cur_free = (*cur_free).links.free.next;
+ }
+ *self.free = end_free;
+ }
+ }
+ }
+
+ let free = self.free;
+ let mut drop_filtered_values = DropFilteredValues {
+ free: &mut self.free,
+ cur_free: free,
+ };
+
+ self.map.retain(|&node, _| unsafe {
+ let (k, v) = (*node.as_ptr()).entry_mut();
+ if f(k, v) {
+ true
+ } else {
+ drop_filtered_values.drop_later(node);
+ false
+ }
+ });
+ }
+
+ #[inline]
+ pub fn hasher(&self) -> &S {
+ &self.hash_builder
+ }
+
+ #[inline]
+ pub fn capacity(&self) -> usize {
+ self.map.capacity()
+ }
+}
+
+impl<K, V, S> LinkedHashMap<K, V, S>
+where
+ K: Eq + Hash,
+ S: BuildHasher,
+{
+ #[inline]
+ pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S> {
+ match self.raw_entry_mut().from_key(&key) {
+ RawEntryMut::Occupied(occupied) => Entry::Occupied(OccupiedEntry {
+ key,
+ raw_entry: occupied,
+ }),
+ RawEntryMut::Vacant(vacant) => Entry::Vacant(VacantEntry {
+ key,
+ raw_entry: vacant,
+ }),
+ }
+ }
+
+ #[inline]
+ pub fn get<Q>(&self, k: &Q) -> Option<&V>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ self.raw_entry().from_key(k).map(|(_, v)| v)
+ }
+
+ #[inline]
+ pub fn get_key_value<Q>(&self, k: &Q) -> Option<(&K, &V)>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ self.raw_entry().from_key(k)
+ }
+
+ #[inline]
+ pub fn contains_key<Q>(&self, k: &Q) -> bool
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ self.get(k).is_some()
+ }
+
+ #[inline]
+ pub fn get_mut<Q>(&mut self, k: &Q) -> Option<&mut V>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ match self.raw_entry_mut().from_key(k) {
+ RawEntryMut::Occupied(occupied) => Some(occupied.into_mut()),
+ RawEntryMut::Vacant(_) => None,
+ }
+ }
+
+ /// Inserts the given key / value pair at the *back* of the internal linked list.
+ ///
+ /// Returns the previously set value, if one existed prior to this call. After this call,
+ /// calling `LinkedHashMap::back` will return a reference to this key / value pair.
+ #[inline]
+ pub fn insert(&mut self, k: K, v: V) -> Option<V> {
+ match self.raw_entry_mut().from_key(&k) {
+ RawEntryMut::Occupied(mut occupied) => {
+ occupied.to_back();
+ Some(occupied.replace_value(v))
+ }
+ RawEntryMut::Vacant(vacant) => {
+ vacant.insert(k, v);
+ None
+ }
+ }
+ }
+
+ #[inline]
+ pub fn remove<Q>(&mut self, k: &Q) -> Option<V>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ match self.raw_entry_mut().from_key(&k) {
+ RawEntryMut::Occupied(occupied) => Some(occupied.remove()),
+ RawEntryMut::Vacant(_) => None,
+ }
+ }
+
+ #[inline]
+ pub fn remove_entry<Q>(&mut self, k: &Q) -> Option<(K, V)>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ match self.raw_entry_mut().from_key(&k) {
+ RawEntryMut::Occupied(occupied) => Some(occupied.remove_entry()),
+ RawEntryMut::Vacant(_) => None,
+ }
+ }
+
+ #[inline]
+ pub fn pop_front(&mut self) -> Option<(K, V)> {
+ if self.is_empty() {
+ return None;
+ }
+ unsafe {
+ let front = (*self.values.as_ptr()).links.value.next;
+ match self.map.raw_entry_mut().from_hash(
+ hash_key(&self.hash_builder, front.as_ref().key_ref()),
+ |k| (*k).as_ref().key_ref().eq(front.as_ref().key_ref()),
+ ) {
+ hash_map::RawEntryMut::Occupied(occupied) => {
+ Some(remove_node(&mut self.free, occupied.remove_entry().0))
+ }
+ hash_map::RawEntryMut::Vacant(_) => None,
+ }
+ }
+ }
+
+ #[inline]
+ pub fn pop_back(&mut self) -> Option<(K, V)> {
+ if self.is_empty() {
+ return None;
+ }
+ unsafe {
+ let back = (*self.values.as_ptr()).links.value.prev;
+ match self
+ .map
+ .raw_entry_mut()
+ .from_hash(hash_key(&self.hash_builder, back.as_ref().key_ref()), |k| {
+ (*k).as_ref().key_ref().eq(back.as_ref().key_ref())
+ }) {
+ hash_map::RawEntryMut::Occupied(occupied) => {
+ Some(remove_node(&mut self.free, occupied.remove_entry().0))
+ }
+ hash_map::RawEntryMut::Vacant(_) => None,
+ }
+ }
+ }
+}
+
+impl<K, V, S> LinkedHashMap<K, V, S>
+where
+ S: BuildHasher,
+{
+ #[inline]
+ pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S> {
+ RawEntryBuilder {
+ hash_builder: &self.hash_builder,
+ entry: self.map.raw_entry(),
+ }
+ }
+
+ #[inline]
+ pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S> {
+ RawEntryBuilderMut {
+ hash_builder: &self.hash_builder,
+ values: &mut self.values,
+ free: &mut self.free,
+ entry: self.map.raw_entry_mut(),
+ }
+ }
+}
+
+impl<K, V, S> Default for LinkedHashMap<K, V, S>
+where
+ S: Default,
+{
+ #[inline]
+ fn default() -> Self {
+ Self::with_hasher(S::default())
+ }
+}
+
+impl<K: Hash + Eq, V, S: BuildHasher + Default> FromIterator<(K, V)> for LinkedHashMap<K, V, S> {
+ #[inline]
+ fn from_iter<I: IntoIterator<Item = (K, V)>>(iter: I) -> Self {
+ let iter = iter.into_iter();
+ let mut map = Self::with_capacity_and_hasher(iter.size_hint().0, S::default());
+ map.extend(iter);
+ map
+ }
+}
+
+impl<K, V, S> fmt::Debug for LinkedHashMap<K, V, S>
+where
+ K: fmt::Debug,
+ V: fmt::Debug,
+{
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_map().entries(self).finish()
+ }
+}
+
+impl<K: Hash + Eq, V: PartialEq, S: BuildHasher> PartialEq for LinkedHashMap<K, V, S> {
+ #[inline]
+ fn eq(&self, other: &Self) -> bool {
+ self.len() == other.len() && self.iter().eq(other)
+ }
+}
+
+impl<K: Hash + Eq, V: Eq, S: BuildHasher> Eq for LinkedHashMap<K, V, S> {}
+
+impl<K: Hash + Eq + PartialOrd, V: PartialOrd, S: BuildHasher> PartialOrd
+ for LinkedHashMap<K, V, S>
+{
+ #[inline]
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ self.iter().partial_cmp(other)
+ }
+
+ #[inline]
+ fn lt(&self, other: &Self) -> bool {
+ self.iter().lt(other)
+ }
+
+ #[inline]
+ fn le(&self, other: &Self) -> bool {
+ self.iter().le(other)
+ }
+
+ #[inline]
+ fn ge(&self, other: &Self) -> bool {
+ self.iter().ge(other)
+ }
+
+ #[inline]
+ fn gt(&self, other: &Self) -> bool {
+ self.iter().gt(other)
+ }
+}
+
+impl<K: Hash + Eq + Ord, V: Ord, S: BuildHasher> Ord for LinkedHashMap<K, V, S> {
+ #[inline]
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.iter().cmp(other)
+ }
+}
+
+impl<K: Hash + Eq, V: Hash, S: BuildHasher> Hash for LinkedHashMap<K, V, S> {
+ #[inline]
+ fn hash<H: Hasher>(&self, h: &mut H) {
+ for e in self.iter() {
+ e.hash(h);
+ }
+ }
+}
+
+impl<K, V, S> Drop for LinkedHashMap<K, V, S> {
+ #[inline]
+ fn drop(&mut self) {
+ unsafe {
+ if let Some(values) = self.values {
+ drop_value_nodes(values);
+ Box::from_raw(values.as_ptr());
+ }
+ drop_free_nodes(self.free);
+ }
+ }
+}
+
+unsafe impl<K: Send, V: Send, S: Send> Send for LinkedHashMap<K, V, S> {}
+unsafe impl<K: Sync, V: Sync, S: Sync> Sync for LinkedHashMap<K, V, S> {}
+
+impl<'a, K, V, S, Q> Index<&'a Q> for LinkedHashMap<K, V, S>
+where
+ K: Hash + Eq + Borrow<Q>,
+ S: BuildHasher,
+ Q: Eq + Hash + ?Sized,
+{
+ type Output = V;
+
+ #[inline]
+ fn index(&self, index: &'a Q) -> &V {
+ self.get(index).expect("no entry found for key")
+ }
+}
+
+impl<'a, K, V, S, Q> IndexMut<&'a Q> for LinkedHashMap<K, V, S>
+where
+ K: Hash + Eq + Borrow<Q>,
+ S: BuildHasher,
+ Q: Eq + Hash + ?Sized,
+{
+ #[inline]
+ fn index_mut(&mut self, index: &'a Q) -> &mut V {
+ self.get_mut(index).expect("no entry found for key")
+ }
+}
+
+impl<K: Hash + Eq + Clone, V: Clone, S: BuildHasher + Clone> Clone for LinkedHashMap<K, V, S> {
+ #[inline]
+ fn clone(&self) -> Self {
+ let mut map = Self::with_hasher(self.hash_builder.clone());
+ map.extend(self.iter().map(|(k, v)| (k.clone(), v.clone())));
+ map
+ }
+}
+
+impl<K: Hash + Eq, V, S: BuildHasher> Extend<(K, V)> for LinkedHashMap<K, V, S> {
+ #[inline]
+ fn extend<I: IntoIterator<Item = (K, V)>>(&mut self, iter: I) {
+ for (k, v) in iter {
+ self.insert(k, v);
+ }
+ }
+}
+
+impl<'a, K, V, S> Extend<(&'a K, &'a V)> for LinkedHashMap<K, V, S>
+where
+ K: 'a + Hash + Eq + Copy,
+ V: 'a + Copy,
+ S: BuildHasher,
+{
+ #[inline]
+ fn extend<I: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: I) {
+ for (&k, &v) in iter {
+ self.insert(k, v);
+ }
+ }
+}
+
+pub enum Entry<'a, K, V, S> {
+ Occupied(OccupiedEntry<'a, K, V>),
+ Vacant(VacantEntry<'a, K, V, S>),
+}
+
+impl<K: fmt::Debug, V: fmt::Debug, S> fmt::Debug for Entry<'_, K, V, S> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Entry::Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
+ Entry::Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
+ }
+ }
+}
+
+impl<'a, K, V, S> Entry<'a, K, V, S> {
+ /// If this entry is vacant, inserts a new entry with the given value and returns a reference to
+ /// it.
+ ///
+ /// If this entry is occupied, this method *moves the occupied entry to the back of the internal
+ /// linked list* and returns a reference to the existing value.
+ #[inline]
+ pub fn or_insert(self, default: V) -> &'a mut V
+ where
+ K: Hash,
+ S: BuildHasher,
+ {
+ match self {
+ Entry::Occupied(mut entry) => {
+ entry.to_back();
+ entry.into_mut()
+ }
+ Entry::Vacant(entry) => entry.insert(default),
+ }
+ }
+
+ /// Similar to `Entry::or_insert`, but accepts a function to construct a new value if this entry
+ /// is vacant.
+ #[inline]
+ pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V
+ where
+ K: Hash,
+ S: BuildHasher,
+ {
+ match self {
+ Entry::Occupied(mut entry) => {
+ entry.to_back();
+ entry.into_mut()
+ }
+ Entry::Vacant(entry) => entry.insert(default()),
+ }
+ }
+
+ #[inline]
+ pub fn key(&self) -> &K {
+ match *self {
+ Entry::Occupied(ref entry) => entry.key(),
+ Entry::Vacant(ref entry) => entry.key(),
+ }
+ }
+
+ #[inline]
+ pub fn and_modify<F>(self, f: F) -> Self
+ where
+ F: FnOnce(&mut V),
+ {
+ match self {
+ Entry::Occupied(mut entry) => {
+ f(entry.get_mut());
+ Entry::Occupied(entry)
+ }
+ Entry::Vacant(entry) => Entry::Vacant(entry),
+ }
+ }
+}
+
+pub struct OccupiedEntry<'a, K, V> {
+ key: K,
+ raw_entry: RawOccupiedEntryMut<'a, K, V>,
+}
+
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for OccupiedEntry<'_, K, V> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("OccupiedEntry")
+ .field("key", self.key())
+ .field("value", self.get())
+ .finish()
+ }
+}
+
+impl<'a, K, V> OccupiedEntry<'a, K, V> {
+ #[inline]
+ pub fn key(&self) -> &K {
+ self.raw_entry.key()
+ }
+
+ #[inline]
+ pub fn remove_entry(self) -> (K, V) {
+ self.raw_entry.remove_entry()
+ }
+
+ #[inline]
+ pub fn get(&self) -> &V {
+ self.raw_entry.get()
+ }
+
+ #[inline]
+ pub fn get_mut(&mut self) -> &mut V {
+ self.raw_entry.get_mut()
+ }
+
+ #[inline]
+ pub fn into_mut(self) -> &'a mut V {
+ self.raw_entry.into_mut()
+ }
+
+ #[inline]
+ pub fn to_back(&mut self) {
+ self.raw_entry.to_back()
+ }
+
+ #[inline]
+ pub fn to_front(&mut self) {
+ self.raw_entry.to_front()
+ }
+
+ /// Replaces this entry's value with the provided value.
+ ///
+ /// Similarly to `LinkedHashMap::insert`, this moves the existing entry to the back of the
+ /// internal linked list.
+ #[inline]
+ pub fn insert(&mut self, value: V) -> V {
+ self.raw_entry.to_back();
+ self.raw_entry.replace_value(value)
+ }
+
+ #[inline]
+ pub fn remove(self) -> V {
+ self.raw_entry.remove()
+ }
+
+ /// Similar to `OccupiedEntry::replace_entry`, but *does* move the entry to the back of the
+ /// internal linked list.
+ #[inline]
+ pub fn insert_entry(mut self, value: V) -> (K, V) {
+ self.raw_entry.to_back();
+ self.replace_entry(value)
+ }
+
+ /// Replaces the entry's key with the key provided to `LinkedHashMap::entry`, and replaces the
+ /// entry's value with the given `value` parameter.
+ ///
+ /// Does *not* move the entry to the back of the internal linked list.
+ pub fn replace_entry(mut self, value: V) -> (K, V) {
+ let old_key = mem::replace(self.raw_entry.key_mut(), self.key);
+ let old_value = mem::replace(self.raw_entry.get_mut(), value);
+ (old_key, old_value)
+ }
+
+ /// Replaces this entry's key with the key provided to `LinkedHashMap::entry`.
+ ///
+ /// Does *not* move the entry to the back of the internal linked list.
+ #[inline]
+ pub fn replace_key(mut self) -> K {
+ mem::replace(self.raw_entry.key_mut(), self.key)
+ }
+}
+
+pub struct VacantEntry<'a, K, V, S> {
+ key: K,
+ raw_entry: RawVacantEntryMut<'a, K, V, S>,
+}
+
+impl<K: fmt::Debug, V, S> fmt::Debug for VacantEntry<'_, K, V, S> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("VacantEntry").field(self.key()).finish()
+ }
+}
+
+impl<'a, K, V, S> VacantEntry<'a, K, V, S> {
+ #[inline]
+ pub fn key(&self) -> &K {
+ &self.key
+ }
+
+ #[inline]
+ pub fn into_key(self) -> K {
+ self.key
+ }
+
+ /// Insert's the key for this vacant entry paired with the given value as a new entry at the
+ /// *back* of the internal linked list.
+ #[inline]
+ pub fn insert(self, value: V) -> &'a mut V
+ where
+ K: Hash,
+ S: BuildHasher,
+ {
+ self.raw_entry.insert(self.key, value).1
+ }
+}
+
+pub struct RawEntryBuilder<'a, K, V, S> {
+ hash_builder: &'a S,
+ entry: hash_map::RawEntryBuilder<'a, NonNull<Node<K, V>>, (), NullHasher>,
+}
+
+impl<'a, K, V, S> RawEntryBuilder<'a, K, V, S>
+where
+ S: BuildHasher,
+{
+ #[inline]
+ pub fn from_key<Q>(self, k: &Q) -> Option<(&'a K, &'a V)>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ let hash = hash_key(self.hash_builder, k);
+ self.from_key_hashed_nocheck(hash, k)
+ }
+
+ #[inline]
+ pub fn from_key_hashed_nocheck<Q>(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ self.from_hash(hash, move |o| k.eq(o.borrow()))
+ }
+
+ #[inline]
+ pub fn from_hash(
+ self,
+ hash: u64,
+ mut is_match: impl FnMut(&K) -> bool,
+ ) -> Option<(&'a K, &'a V)> {
+ unsafe {
+ let node = *self
+ .entry
+ .from_hash(hash, move |k| is_match((*k).as_ref().key_ref()))?
+ .0;
+
+ let (key, value) = (*node.as_ptr()).entry_ref();
+ Some((key, value))
+ }
+ }
+}
+
+unsafe impl<'a, K, V, S> Send for RawEntryBuilder<'a, K, V, S>
+where
+ K: Send,
+ V: Send,
+ S: Send,
+{
+}
+
+unsafe impl<'a, K, V, S> Sync for RawEntryBuilder<'a, K, V, S>
+where
+ K: Sync,
+ V: Sync,
+ S: Sync,
+{
+}
+
+pub struct RawEntryBuilderMut<'a, K, V, S> {
+ hash_builder: &'a S,
+ values: &'a mut Option<NonNull<Node<K, V>>>,
+ free: &'a mut Option<NonNull<Node<K, V>>>,
+ entry: hash_map::RawEntryBuilderMut<'a, NonNull<Node<K, V>>, (), NullHasher>,
+}
+
+impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S>
+where
+ S: BuildHasher,
+{
+ #[inline]
+ pub fn from_key<Q>(self, k: &Q) -> RawEntryMut<'a, K, V, S>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ let hash = hash_key(self.hash_builder, k);
+ self.from_key_hashed_nocheck(hash, k)
+ }
+
+ #[inline]
+ pub fn from_key_hashed_nocheck<Q>(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ self.from_hash(hash, move |o| k.eq(o.borrow()))
+ }
+
+ #[inline]
+ pub fn from_hash(
+ self,
+ hash: u64,
+ mut is_match: impl FnMut(&K) -> bool,
+ ) -> RawEntryMut<'a, K, V, S> {
+ let entry = self
+ .entry
+ .from_hash(hash, move |k| is_match(unsafe { (*k).as_ref().key_ref() }));
+
+ match entry {
+ hash_map::RawEntryMut::Occupied(occupied) => {
+ RawEntryMut::Occupied(RawOccupiedEntryMut {
+ free: self.free,
+ values: self.values,
+ entry: occupied,
+ })
+ }
+ hash_map::RawEntryMut::Vacant(vacant) => RawEntryMut::Vacant(RawVacantEntryMut {
+ hash_builder: self.hash_builder,
+ values: self.values,
+ free: self.free,
+ entry: vacant,
+ }),
+ }
+ }
+}
+
+unsafe impl<'a, K, V, S> Send for RawEntryBuilderMut<'a, K, V, S>
+where
+ K: Send,
+ V: Send,
+ S: Send,
+{
+}
+
+unsafe impl<'a, K, V, S> Sync for RawEntryBuilderMut<'a, K, V, S>
+where
+ K: Sync,
+ V: Sync,
+ S: Sync,
+{
+}
+
+pub enum RawEntryMut<'a, K, V, S> {
+ Occupied(RawOccupiedEntryMut<'a, K, V>),
+ Vacant(RawVacantEntryMut<'a, K, V, S>),
+}
+
+impl<'a, K, V, S> RawEntryMut<'a, K, V, S> {
+ /// Similarly to `Entry::or_insert`, if this entry is occupied, it will move the existing entry
+ /// to the back of the internal linked list.
+ #[inline]
+ pub fn or_insert(self, default_key: K, default_val: V) -> (&'a mut K, &'a mut V)
+ where
+ K: Hash,
+ S: BuildHasher,
+ {
+ match self {
+ RawEntryMut::Occupied(mut entry) => {
+ entry.to_back();
+ entry.into_key_value()
+ }
+ RawEntryMut::Vacant(entry) => entry.insert(default_key, default_val),
+ }
+ }
+
+ /// Similarly to `Entry::or_insert_with`, if this entry is occupied, it will move the existing
+ /// entry to the back of the internal linked list.
+ #[inline]
+ pub fn or_insert_with<F>(self, default: F) -> (&'a mut K, &'a mut V)
+ where
+ F: FnOnce() -> (K, V),
+ K: Hash,
+ S: BuildHasher,
+ {
+ match self {
+ RawEntryMut::Occupied(mut entry) => {
+ entry.to_back();
+ entry.into_key_value()
+ }
+ RawEntryMut::Vacant(entry) => {
+ let (k, v) = default();
+ entry.insert(k, v)
+ }
+ }
+ }
+
+ #[inline]
+ pub fn and_modify<F>(self, f: F) -> Self
+ where
+ F: FnOnce(&mut K, &mut V),
+ {
+ match self {
+ RawEntryMut::Occupied(mut entry) => {
+ {
+ let (k, v) = entry.get_key_value_mut();
+ f(k, v);
+ }
+ RawEntryMut::Occupied(entry)
+ }
+ RawEntryMut::Vacant(entry) => RawEntryMut::Vacant(entry),
+ }
+ }
+}
+
+pub struct RawOccupiedEntryMut<'a, K, V> {
+ free: &'a mut Option<NonNull<Node<K, V>>>,
+ values: &'a mut Option<NonNull<Node<K, V>>>,
+ entry: hash_map::RawOccupiedEntryMut<'a, NonNull<Node<K, V>>, (), NullHasher>,
+}
+
+impl<'a, K, V> RawOccupiedEntryMut<'a, K, V> {
+ #[inline]
+ pub fn key(&self) -> &K {
+ self.get_key_value().0
+ }
+
+ #[inline]
+ pub fn key_mut(&mut self) -> &mut K {
+ self.get_key_value_mut().0
+ }
+
+ #[inline]
+ pub fn into_key(self) -> &'a mut K {
+ self.into_key_value().0
+ }
+
+ #[inline]
+ pub fn get(&self) -> &V {
+ self.get_key_value().1
+ }
+
+ #[inline]
+ pub fn get_mut(&mut self) -> &mut V {
+ self.get_key_value_mut().1
+ }
+
+ #[inline]
+ pub fn into_mut(self) -> &'a mut V {
+ self.into_key_value().1
+ }
+
+ #[inline]
+ pub fn get_key_value(&self) -> (&K, &V) {
+ unsafe {
+ let node = *self.entry.key();
+ let (key, value) = (*node.as_ptr()).entry_ref();
+ (key, value)
+ }
+ }
+
+ #[inline]
+ pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V) {
+ unsafe {
+ let node = *self.entry.key_mut();
+ let (key, value) = (*node.as_ptr()).entry_mut();
+ (key, value)
+ }
+ }
+
+ #[inline]
+ pub fn into_key_value(self) -> (&'a mut K, &'a mut V) {
+ unsafe {
+ let node = *self.entry.into_key();
+ let (key, value) = (*node.as_ptr()).entry_mut();
+ (key, value)
+ }
+ }
+
+ #[inline]
+ pub fn to_back(&mut self) {
+ unsafe {
+ let node = *self.entry.key_mut();
+ detach_node(node);
+ attach_before(node, NonNull::new_unchecked(self.values.as_ptr()));
+ }
+ }
+
+ #[inline]
+ pub fn to_front(&mut self) {
+ unsafe {
+ let node = *self.entry.key_mut();
+ detach_node(node);
+ attach_before(node, (*self.values.as_ptr()).links.value.next);
+ }
+ }
+
+ #[inline]
+ pub fn replace_value(&mut self, value: V) -> V {
+ unsafe {
+ let mut node = *self.entry.key_mut();
+ mem::replace(&mut node.as_mut().entry_mut().1, value)
+ }
+ }
+
+ #[inline]
+ pub fn replace_key(&mut self, key: K) -> K {
+ unsafe {
+ let mut node = *self.entry.key_mut();
+ mem::replace(&mut node.as_mut().entry_mut().0, key)
+ }
+ }
+
+ #[inline]
+ pub fn remove(self) -> V {
+ self.remove_entry().1
+ }
+
+ #[inline]
+ pub fn remove_entry(self) -> (K, V) {
+ let node = self.entry.remove_entry().0;
+ unsafe { remove_node(self.free, node) }
+ }
+}
+
+pub struct RawVacantEntryMut<'a, K, V, S> {
+ hash_builder: &'a S,
+ values: &'a mut Option<NonNull<Node<K, V>>>,
+ free: &'a mut Option<NonNull<Node<K, V>>>,
+ entry: hash_map::RawVacantEntryMut<'a, NonNull<Node<K, V>>, (), NullHasher>,
+}
+
+impl<'a, K, V, S> RawVacantEntryMut<'a, K, V, S> {
+ #[inline]
+ pub fn insert(self, key: K, value: V) -> (&'a mut K, &'a mut V)
+ where
+ K: Hash,
+ S: BuildHasher,
+ {
+ let hash = hash_key(self.hash_builder, &key);
+ self.insert_hashed_nocheck(hash, key, value)
+ }
+
+ #[inline]
+ pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V)
+ where
+ K: Hash,
+ S: BuildHasher,
+ {
+ let hash_builder = self.hash_builder;
+ self.insert_with_hasher(hash, key, value, |k| hash_key(hash_builder, k))
+ }
+
+ #[inline]
+ pub fn insert_with_hasher(
+ self,
+ hash: u64,
+ key: K,
+ value: V,
+ hasher: impl Fn(&K) -> u64,
+ ) -> (&'a mut K, &'a mut V)
+ where
+ S: BuildHasher,
+ {
+ unsafe {
+ ensure_guard_node(self.values);
+ let mut new_node = allocate_node(self.free);
+ new_node.as_mut().put_entry((key, value));
+ attach_before(new_node, NonNull::new_unchecked(self.values.as_ptr()));
+
+ let node = *self
+ .entry
+ .insert_with_hasher(hash, new_node, (), move |k| hasher((*k).as_ref().key_ref()))
+ .0;
+
+ let (key, value) = (*node.as_ptr()).entry_mut();
+ (key, value)
+ }
+ }
+}
+
+impl<K, V, S> fmt::Debug for RawEntryBuilderMut<'_, K, V, S> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("RawEntryBuilder").finish()
+ }
+}
+
+impl<K: fmt::Debug, V: fmt::Debug, S> fmt::Debug for RawEntryMut<'_, K, V, S> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ RawEntryMut::Vacant(ref v) => f.debug_tuple("RawEntry").field(v).finish(),
+ RawEntryMut::Occupied(ref o) => f.debug_tuple("RawEntry").field(o).finish(),
+ }
+ }
+}
+
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for RawOccupiedEntryMut<'_, K, V> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("RawOccupiedEntryMut")
+ .field("key", self.key())
+ .field("value", self.get())
+ .finish()
+ }
+}
+
+impl<K, V, S> fmt::Debug for RawVacantEntryMut<'_, K, V, S> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("RawVacantEntryMut").finish()
+ }
+}
+
+impl<K, V, S> fmt::Debug for RawEntryBuilder<'_, K, V, S> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("RawEntryBuilder").finish()
+ }
+}
+
+unsafe impl<'a, K, V> Send for RawOccupiedEntryMut<'a, K, V>
+where
+ K: Send,
+ V: Send,
+{
+}
+
+unsafe impl<'a, K, V> Sync for RawOccupiedEntryMut<'a, K, V>
+where
+ K: Sync,
+ V: Sync,
+{
+}
+
+unsafe impl<'a, K, V, S> Send for RawVacantEntryMut<'a, K, V, S>
+where
+ K: Send,
+ V: Send,
+ S: Send,
+{
+}
+
+unsafe impl<'a, K, V, S> Sync for RawVacantEntryMut<'a, K, V, S>
+where
+ K: Sync,
+ V: Sync,
+ S: Sync,
+{
+}
+
+pub struct Iter<'a, K, V> {
+ head: *const Node<K, V>,
+ tail: *const Node<K, V>,
+ remaining: usize,
+ marker: PhantomData<(&'a K, &'a V)>,
+}
+
+pub struct IterMut<'a, K, V> {
+ head: Option<NonNull<Node<K, V>>>,
+ tail: Option<NonNull<Node<K, V>>>,
+ remaining: usize,
+ marker: PhantomData<(&'a K, &'a mut V)>,
+}
+
+pub struct IntoIter<K, V> {
+ head: Option<NonNull<Node<K, V>>>,
+ tail: Option<NonNull<Node<K, V>>>,
+ remaining: usize,
+ marker: PhantomData<(K, V)>,
+}
+
+pub struct Drain<'a, K, V> {
+ free: NonNull<Option<NonNull<Node<K, V>>>>,
+ head: Option<NonNull<Node<K, V>>>,
+ tail: Option<NonNull<Node<K, V>>>,
+ remaining: usize,
+ // We want `Drain` to be covariant
+ marker: PhantomData<(K, V, &'a LinkedHashMap<K, V>)>,
+}
+
+impl<K, V> IterMut<'_, K, V> {
+ #[inline]
+ pub(crate) fn iter(&self) -> Iter<'_, K, V> {
+ Iter {
+ head: self.head.as_ptr(),
+ tail: self.tail.as_ptr(),
+ remaining: self.remaining,
+ marker: PhantomData,
+ }
+ }
+}
+
+impl<K, V> IntoIter<K, V> {
+ #[inline]
+ pub(crate) fn iter(&self) -> Iter<'_, K, V> {
+ Iter {
+ head: self.head.as_ptr(),
+ tail: self.tail.as_ptr(),
+ remaining: self.remaining,
+ marker: PhantomData,
+ }
+ }
+}
+
+impl<K, V> Drain<'_, K, V> {
+ #[inline]
+ pub(crate) fn iter(&self) -> Iter<'_, K, V> {
+ Iter {
+ head: self.head.as_ptr(),
+ tail: self.tail.as_ptr(),
+ remaining: self.remaining,
+ marker: PhantomData,
+ }
+ }
+}
+
+unsafe impl<'a, K, V> Send for Iter<'a, K, V>
+where
+ K: Send,
+ V: Send,
+{
+}
+
+unsafe impl<'a, K, V> Send for IterMut<'a, K, V>
+where
+ K: Send,
+ V: Send,
+{
+}
+
+unsafe impl<K, V> Send for IntoIter<K, V>
+where
+ K: Send,
+ V: Send,
+{
+}
+
+unsafe impl<'a, K, V> Send for Drain<'a, K, V>
+where
+ K: Send,
+ V: Send,
+{
+}
+
+unsafe impl<'a, K, V> Sync for Iter<'a, K, V>
+where
+ K: Sync,
+ V: Sync,
+{
+}
+
+unsafe impl<'a, K, V> Sync for IterMut<'a, K, V>
+where
+ K: Sync,
+ V: Sync,
+{
+}
+
+unsafe impl<K, V> Sync for IntoIter<K, V>
+where
+ K: Sync,
+ V: Sync,
+{
+}
+
+unsafe impl<'a, K, V> Sync for Drain<'a, K, V>
+where
+ K: Sync,
+ V: Sync,
+{
+}
+
+impl<'a, K, V> Clone for Iter<'a, K, V> {
+ #[inline]
+ fn clone(&self) -> Self {
+ Iter { ..*self }
+ }
+}
+
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for Iter<'_, K, V> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl<K, V> fmt::Debug for IterMut<'_, K, V>
+where
+ K: fmt::Debug,
+ V: fmt::Debug,
+{
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.iter()).finish()
+ }
+}
+
+impl<K, V> fmt::Debug for IntoIter<K, V>
+where
+ K: fmt::Debug,
+ V: fmt::Debug,
+{
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.iter()).finish()
+ }
+}
+
+impl<K, V> fmt::Debug for Drain<'_, K, V>
+where
+ K: fmt::Debug,
+ V: fmt::Debug,
+{
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.iter()).finish()
+ }
+}
+
+impl<'a, K, V> Iterator for Iter<'a, K, V> {
+ type Item = (&'a K, &'a V);
+
+ #[inline]
+ fn next(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.remaining == 0 {
+ None
+ } else {
+ self.remaining -= 1;
+ unsafe {
+ let (key, value) = (*self.head).entry_ref();
+ self.head = (*self.head).links.value.next.as_ptr();
+ Some((key, value))
+ }
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.remaining, Some(self.remaining))
+ }
+}
+
+impl<'a, K, V> Iterator for IterMut<'a, K, V> {
+ type Item = (&'a K, &'a mut V);
+
+ #[inline]
+ fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.remaining == 0 {
+ None
+ } else {
+ self.remaining -= 1;
+ unsafe {
+ let head = self.head.as_ptr();
+ let (key, value) = (*head).entry_mut();
+ self.head = Some((*head).links.value.next);
+ Some((key, value))
+ }
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.remaining, Some(self.remaining))
+ }
+}
+
+impl<K, V> Iterator for IntoIter<K, V> {
+ type Item = (K, V);
+
+ #[inline]
+ fn next(&mut self) -> Option<(K, V)> {
+ if self.remaining == 0 {
+ return None;
+ }
+ self.remaining -= 1;
+ unsafe {
+ let head = self.head.as_ptr();
+ self.head = Some((*head).links.value.next);
+ let mut e = Box::from_raw(head);
+ Some(e.take_entry())
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.remaining, Some(self.remaining))
+ }
+}
+
+impl<'a, K, V> Iterator for Drain<'a, K, V> {
+ type Item = (K, V);
+
+ #[inline]
+ fn next(&mut self) -> Option<(K, V)> {
+ if self.remaining == 0 {
+ return None;
+ }
+ self.remaining -= 1;
+ unsafe {
+ let mut head = NonNull::new_unchecked(self.head.as_ptr());
+ self.head = Some(head.as_ref().links.value.next);
+ let entry = head.as_mut().take_entry();
+ push_free(self.free.as_mut(), head);
+ Some(entry)
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.remaining, Some(self.remaining))
+ }
+}
+
+impl<'a, K, V> DoubleEndedIterator for Iter<'a, K, V> {
+ #[inline]
+ fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.remaining == 0 {
+ None
+ } else {
+ self.remaining -= 1;
+ unsafe {
+ let tail = self.tail;
+ self.tail = (*tail).links.value.prev.as_ptr();
+ let (key, value) = (*tail).entry_ref();
+ Some((key, value))
+ }
+ }
+ }
+}
+
+impl<'a, K, V> DoubleEndedIterator for IterMut<'a, K, V> {
+ #[inline]
+ fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.remaining == 0 {
+ None
+ } else {
+ self.remaining -= 1;
+ unsafe {
+ let tail = self.tail.as_ptr();
+ self.tail = Some((*tail).links.value.prev);
+ let (key, value) = (*tail).entry_mut();
+ Some((key, value))
+ }
+ }
+ }
+}
+
+impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
+ #[inline]
+ fn next_back(&mut self) -> Option<(K, V)> {
+ if self.remaining == 0 {
+ return None;
+ }
+ self.remaining -= 1;
+ unsafe {
+ let mut e = *Box::from_raw(self.tail.as_ptr());
+ self.tail = Some(e.links.value.prev);
+ Some(e.take_entry())
+ }
+ }
+}
+
+impl<'a, K, V> DoubleEndedIterator for Drain<'a, K, V> {
+ #[inline]
+ fn next_back(&mut self) -> Option<(K, V)> {
+ if self.remaining == 0 {
+ return None;
+ }
+ self.remaining -= 1;
+ unsafe {
+ let mut tail = NonNull::new_unchecked(self.tail.as_ptr());
+ self.tail = Some(tail.as_ref().links.value.prev);
+ let entry = tail.as_mut().take_entry();
+ push_free(&mut *self.free.as_ptr(), tail);
+ Some(entry)
+ }
+ }
+}
+
+impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> {}
+
+impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V> {}
+
+impl<K, V> ExactSizeIterator for IntoIter<K, V> {}
+
+impl<K, V> Drop for IntoIter<K, V> {
+ #[inline]
+ fn drop(&mut self) {
+ for _ in 0..self.remaining {
+ unsafe {
+ let tail = self.tail.as_ptr();
+ self.tail = Some((*tail).links.value.prev);
+ (*tail).take_entry();
+ Box::from_raw(tail);
+ }
+ }
+ }
+}
+
+impl<'a, K, V> Drop for Drain<'a, K, V> {
+ #[inline]
+ fn drop(&mut self) {
+ for _ in 0..self.remaining {
+ unsafe {
+ let mut tail = NonNull::new_unchecked(self.tail.as_ptr());
+ self.tail = Some(tail.as_ref().links.value.prev);
+ tail.as_mut().take_entry();
+ push_free(&mut *self.free.as_ptr(), tail);
+ }
+ }
+ }
+}
+
+pub struct Keys<'a, K, V> {
+ inner: Iter<'a, K, V>,
+}
+
+impl<K: fmt::Debug, V> fmt::Debug for Keys<'_, K, V> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl<'a, K, V> Clone for Keys<'a, K, V> {
+ #[inline]
+ fn clone(&self) -> Keys<'a, K, V> {
+ Keys {
+ inner: self.inner.clone(),
+ }
+ }
+}
+
+impl<'a, K, V> Iterator for Keys<'a, K, V> {
+ type Item = &'a K;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a K> {
+ self.inner.next().map(|e| e.0)
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a K> {
+ self.inner.next_back().map(|e| e.0)
+ }
+}
+
+impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> {
+ #[inline]
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+pub struct Values<'a, K, V> {
+ inner: Iter<'a, K, V>,
+}
+
+impl<K, V> Clone for Values<'_, K, V> {
+ #[inline]
+ fn clone(&self) -> Self {
+ Values {
+ inner: self.inner.clone(),
+ }
+ }
+}
+
+impl<K, V: fmt::Debug> fmt::Debug for Values<'_, K, V> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl<'a, K, V> Iterator for Values<'a, K, V> {
+ type Item = &'a V;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a V> {
+ self.inner.next().map(|e| e.1)
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a V> {
+ self.inner.next_back().map(|e| e.1)
+ }
+}
+
+impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> {
+ #[inline]
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+pub struct ValuesMut<'a, K, V> {
+ inner: IterMut<'a, K, V>,
+}
+
+impl<K, V> fmt::Debug for ValuesMut<'_, K, V>
+where
+ K: fmt::Debug,
+ V: fmt::Debug,
+{
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.inner.iter()).finish()
+ }
+}
+
+impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
+ type Item = &'a mut V;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a mut V> {
+ self.inner.next().map(|e| e.1)
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+impl<'a, K, V> DoubleEndedIterator for ValuesMut<'a, K, V> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a mut V> {
+ self.inner.next_back().map(|e| e.1)
+ }
+}
+
+impl<'a, K, V> ExactSizeIterator for ValuesMut<'a, K, V> {
+ #[inline]
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+impl<'a, K, V, S> IntoIterator for &'a LinkedHashMap<K, V, S> {
+ type Item = (&'a K, &'a V);
+ type IntoIter = Iter<'a, K, V>;
+
+ #[inline]
+ fn into_iter(self) -> Iter<'a, K, V> {
+ self.iter()
+ }
+}
+
+impl<'a, K, V, S> IntoIterator for &'a mut LinkedHashMap<K, V, S> {
+ type Item = (&'a K, &'a mut V);
+ type IntoIter = IterMut<'a, K, V>;
+
+ #[inline]
+ fn into_iter(self) -> IterMut<'a, K, V> {
+ self.iter_mut()
+ }
+}
+
+impl<K, V, S> IntoIterator for LinkedHashMap<K, V, S> {
+ type Item = (K, V);
+ type IntoIter = IntoIter<K, V>;
+
+ #[inline]
+ fn into_iter(mut self) -> IntoIter<K, V> {
+ unsafe {
+ let (head, tail) = if let Some(values) = self.values {
+ let ValueLinks {
+ next: head,
+ prev: tail,
+ } = values.as_ref().links.value;
+
+ Box::from_raw(self.values.as_ptr());
+ self.values = None;
+
+ (Some(head), Some(tail))
+ } else {
+ (None, None)
+ };
+ let len = self.len();
+
+ drop_free_nodes(self.free);
+ self.free = None;
+
+ self.map.clear();
+
+ IntoIter {
+ head,
+ tail,
+ remaining: len,
+ marker: PhantomData,
+ }
+ }
+ }
+}
+
+// A ZST that asserts that the inner HashMap will not do its own key hashing
+struct NullHasher;
+
+impl BuildHasher for NullHasher {
+ type Hasher = Self;
+
+ #[inline]
+ fn build_hasher(&self) -> Self {
+ Self
+ }
+}
+
+impl Hasher for NullHasher {
+ #[inline]
+ fn write(&mut self, _bytes: &[u8]) {
+ unreachable!("inner map should not be using its built-in hasher")
+ }
+
+ #[inline]
+ fn finish(&self) -> u64 {
+ unreachable!("inner map should not be using its built-in hasher")
+ }
+}
+
+struct ValueLinks<K, V> {
+ next: NonNull<Node<K, V>>,
+ prev: NonNull<Node<K, V>>,
+}
+
+impl<K, V> Clone for ValueLinks<K, V> {
+ #[inline]
+ fn clone(&self) -> Self {
+ ValueLinks {
+ next: self.next,
+ prev: self.prev,
+ }
+ }
+}
+
+impl<K, V> Copy for ValueLinks<K, V> {}
+
+struct FreeLink<K, V> {
+ next: Option<NonNull<Node<K, V>>>,
+}
+
+impl<K, V> Clone for FreeLink<K, V> {
+ #[inline]
+ fn clone(&self) -> Self {
+ FreeLink { next: self.next }
+ }
+}
+
+impl<K, V> Copy for FreeLink<K, V> {}
+
+union Links<K, V> {
+ value: ValueLinks<K, V>,
+ free: FreeLink<K, V>,
+}
+
+struct Node<K, V> {
+ entry: MaybeUninit<(K, V)>,
+ links: Links<K, V>,
+}
+
+impl<K, V> Node<K, V> {
+ #[inline]
+ unsafe fn put_entry(&mut self, entry: (K, V)) {
+ self.entry.as_mut_ptr().write(entry)
+ }
+
+ #[inline]
+ unsafe fn entry_ref(&self) -> &(K, V) {
+ &*self.entry.as_ptr()
+ }
+
+ #[inline]
+ unsafe fn key_ref(&self) -> &K {
+ &(*self.entry.as_ptr()).0
+ }
+
+ #[inline]
+ unsafe fn entry_mut(&mut self) -> &mut (K, V) {
+ &mut *self.entry.as_mut_ptr()
+ }
+
+ #[inline]
+ unsafe fn take_entry(&mut self) -> (K, V) {
+ self.entry.as_ptr().read()
+ }
+}
+
+trait OptNonNullExt<T> {
+ fn as_ptr(self) -> *mut T;
+}
+
+impl<T> OptNonNullExt<T> for Option<NonNull<T>> {
+ #[inline]
+ fn as_ptr(self) -> *mut T {
+ match self {
+ Some(ptr) => ptr.as_ptr(),
+ None => ptr::null_mut(),
+ }
+ }
+}
+
+// Allocate a circular list guard node if not present.
+#[inline]
+unsafe fn ensure_guard_node<K, V>(head: &mut Option<NonNull<Node<K, V>>>) {
+ if head.is_none() {
+ let mut p = NonNull::new_unchecked(Box::into_raw(Box::new(Node {
+ entry: MaybeUninit::uninit(),
+ links: Links {
+ value: ValueLinks {
+ next: NonNull::dangling(),
+ prev: NonNull::dangling(),
+ },
+ },
+ })));
+ p.as_mut().links.value = ValueLinks { next: p, prev: p };
+ *head = Some(p);
+ }
+}
+
+// Attach the `to_attach` node to the existing circular list *before* `node`.
+#[inline]
+unsafe fn attach_before<K, V>(mut to_attach: NonNull<Node<K, V>>, mut node: NonNull<Node<K, V>>) {
+ to_attach.as_mut().links.value = ValueLinks {
+ prev: node.as_ref().links.value.prev,
+ next: node,
+ };
+ node.as_mut().links.value.prev = to_attach;
+ (*to_attach.as_mut().links.value.prev.as_ptr())
+ .links
+ .value
+ .next = to_attach;
+}
+
+#[inline]
+unsafe fn detach_node<K, V>(mut node: NonNull<Node<K, V>>) {
+ node.as_mut().links.value.prev.as_mut().links.value.next = node.as_ref().links.value.next;
+ node.as_mut().links.value.next.as_mut().links.value.prev = node.as_ref().links.value.prev;
+}
+
+#[inline]
+unsafe fn push_free<K, V>(
+ free_list: &mut Option<NonNull<Node<K, V>>>,
+ mut node: NonNull<Node<K, V>>,
+) {
+ node.as_mut().links.free.next = *free_list;
+ *free_list = Some(node);
+}
+
+#[inline]
+unsafe fn pop_free<K, V>(
+ free_list: &mut Option<NonNull<Node<K, V>>>,
+) -> Option<NonNull<Node<K, V>>> {
+ if let Some(free) = *free_list {
+ *free_list = free.as_ref().links.free.next;
+ Some(free)
+ } else {
+ None
+ }
+}
+
+#[inline]
+unsafe fn allocate_node<K, V>(free_list: &mut Option<NonNull<Node<K, V>>>) -> NonNull<Node<K, V>> {
+ if let Some(mut free) = pop_free(free_list) {
+ free.as_mut().links.value = ValueLinks {
+ next: NonNull::dangling(),
+ prev: NonNull::dangling(),
+ };
+ free
+ } else {
+ NonNull::new_unchecked(Box::into_raw(Box::new(Node {
+ entry: MaybeUninit::uninit(),
+ links: Links {
+ value: ValueLinks {
+ next: NonNull::dangling(),
+ prev: NonNull::dangling(),
+ },
+ },
+ })))
+ }
+}
+
+// Given node is assumed to be the guard node and is *not* dropped.
+#[inline]
+unsafe fn drop_value_nodes<K, V>(guard: NonNull<Node<K, V>>) {
+ let mut cur = guard.as_ref().links.value.prev;
+ while cur != guard {
+ let prev = cur.as_ref().links.value.prev;
+ cur.as_mut().take_entry();
+ Box::from_raw(cur.as_ptr());
+ cur = prev;
+ }
+}
+
+// Drops all linked free nodes starting with the given node. Free nodes are only non-circular
+// singly linked, and should have uninitialized keys / values.
+#[inline]
+unsafe fn drop_free_nodes<K, V>(mut free: Option<NonNull<Node<K, V>>>) {
+ while let Some(some_free) = free {
+ let next_free = some_free.as_ref().links.free.next;
+ Box::from_raw(some_free.as_ptr());
+ free = next_free;
+ }
+}
+
+#[inline]
+unsafe fn remove_node<K, V>(
+ free_list: &mut Option<NonNull<Node<K, V>>>,
+ mut node: NonNull<Node<K, V>>,
+) -> (K, V) {
+ detach_node(node);
+ push_free(free_list, node);
+ node.as_mut().take_entry()
+}
+
+#[inline]
+fn hash_key<S, Q>(s: &S, k: &Q) -> u64
+where
+ S: BuildHasher,
+ Q: Hash + ?Sized,
+{
+ let mut hasher = s.build_hasher();
+ k.hash(&mut hasher);
+ hasher.finish()
+}
diff --git a/src/linked_hash_set.rs b/src/linked_hash_set.rs
new file mode 100644
index 0000000..1ab7dbb
--- /dev/null
+++ b/src/linked_hash_set.rs
@@ -0,0 +1,753 @@
+use std::{
+ borrow::Borrow,
+ fmt,
+ hash::{BuildHasher, Hash, Hasher},
+ iter::{Chain, FromIterator},
+ ops::{BitAnd, BitOr, BitXor, Sub},
+};
+
+use hashbrown::hash_map::DefaultHashBuilder;
+
+use crate::linked_hash_map::{self, LinkedHashMap, TryReserveError};
+
+pub struct LinkedHashSet<T, S = DefaultHashBuilder> {
+ map: LinkedHashMap<T, (), S>,
+}
+
+impl<T: Hash + Eq> LinkedHashSet<T, DefaultHashBuilder> {
+ #[inline]
+ pub fn new() -> LinkedHashSet<T, DefaultHashBuilder> {
+ LinkedHashSet {
+ map: LinkedHashMap::new(),
+ }
+ }
+
+ #[inline]
+ pub fn with_capacity(capacity: usize) -> LinkedHashSet<T, DefaultHashBuilder> {
+ LinkedHashSet {
+ map: LinkedHashMap::with_capacity(capacity),
+ }
+ }
+}
+
+impl<T, S> LinkedHashSet<T, S> {
+ #[inline]
+ pub fn capacity(&self) -> usize {
+ self.map.capacity()
+ }
+
+ #[inline]
+ pub fn iter(&self) -> Iter<'_, T> {
+ Iter {
+ iter: self.map.keys(),
+ }
+ }
+
+ #[inline]
+ pub fn len(&self) -> usize {
+ self.map.len()
+ }
+
+ #[inline]
+ pub fn is_empty(&self) -> bool {
+ self.map.is_empty()
+ }
+
+ #[inline]
+ pub fn drain(&mut self) -> Drain<T> {
+ Drain {
+ iter: self.map.drain(),
+ }
+ }
+
+ #[inline]
+ pub fn clear(&mut self) {
+ self.map.clear()
+ }
+
+ #[inline]
+ pub fn retain<F>(&mut self, mut f: F)
+ where
+ F: FnMut(&T) -> bool,
+ {
+ self.map.retain(|k, _| f(k));
+ }
+}
+
+impl<T, S> LinkedHashSet<T, S>
+where
+ T: Eq + Hash,
+ S: BuildHasher,
+{
+ #[inline]
+ pub fn with_hasher(hasher: S) -> LinkedHashSet<T, S> {
+ LinkedHashSet {
+ map: LinkedHashMap::with_hasher(hasher),
+ }
+ }
+
+ #[inline]
+ pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> LinkedHashSet<T, S> {
+ LinkedHashSet {
+ map: LinkedHashMap::with_capacity_and_hasher(capacity, hasher),
+ }
+ }
+
+ #[inline]
+ pub fn hasher(&self) -> &S {
+ self.map.hasher()
+ }
+
+ #[inline]
+ pub fn reserve(&mut self, additional: usize) {
+ self.map.reserve(additional)
+ }
+
+ #[inline]
+ pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
+ self.map.try_reserve(additional)
+ }
+
+ #[inline]
+ pub fn shrink_to_fit(&mut self) {
+ self.map.shrink_to_fit()
+ }
+
+ #[inline]
+ pub fn difference<'a>(&'a self, other: &'a LinkedHashSet<T, S>) -> Difference<'a, T, S> {
+ Difference {
+ iter: self.iter(),
+ other,
+ }
+ }
+
+ #[inline]
+ pub fn symmetric_difference<'a>(
+ &'a self,
+ other: &'a LinkedHashSet<T, S>,
+ ) -> SymmetricDifference<'a, T, S> {
+ SymmetricDifference {
+ iter: self.difference(other).chain(other.difference(self)),
+ }
+ }
+
+ #[inline]
+ pub fn intersection<'a>(&'a self, other: &'a LinkedHashSet<T, S>) -> Intersection<'a, T, S> {
+ Intersection {
+ iter: self.iter(),
+ other,
+ }
+ }
+
+ #[inline]
+ pub fn union<'a>(&'a self, other: &'a LinkedHashSet<T, S>) -> Union<'a, T, S> {
+ Union {
+ iter: self.iter().chain(other.difference(self)),
+ }
+ }
+
+ #[inline]
+ pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
+ where
+ T: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ self.map.contains_key(value)
+ }
+
+ #[inline]
+ pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T>
+ where
+ T: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ self.map.raw_entry().from_key(value).map(|p| p.0)
+ }
+
+ #[inline]
+ pub fn get_or_insert(&mut self, value: T) -> &T {
+ self.map
+ .raw_entry_mut()
+ .from_key(&value)
+ .or_insert(value, ())
+ .0
+ }
+
+ #[inline]
+ pub fn get_or_insert_with<Q: ?Sized, F>(&mut self, value: &Q, f: F) -> &T
+ where
+ T: Borrow<Q>,
+ Q: Hash + Eq,
+ F: FnOnce(&Q) -> T,
+ {
+ self.map
+ .raw_entry_mut()
+ .from_key(value)
+ .or_insert_with(|| (f(value), ()))
+ .0
+ }
+
+ #[inline]
+ pub fn is_disjoint(&self, other: &LinkedHashSet<T, S>) -> bool {
+ self.iter().all(|v| !other.contains(v))
+ }
+
+ #[inline]
+ pub fn is_subset(&self, other: &LinkedHashSet<T, S>) -> bool {
+ self.iter().all(|v| other.contains(v))
+ }
+
+ #[inline]
+ pub fn is_superset(&self, other: &LinkedHashSet<T, S>) -> bool {
+ other.is_subset(self)
+ }
+
+ #[inline]
+ pub fn insert(&mut self, value: T) -> bool {
+ self.map.insert(value, ()).is_none()
+ }
+
+ #[inline]
+ pub fn replace(&mut self, value: T) -> Option<T> {
+ match self.map.entry(value) {
+ linked_hash_map::Entry::Occupied(occupied) => Some(occupied.replace_key()),
+ linked_hash_map::Entry::Vacant(vacant) => {
+ vacant.insert(());
+ None
+ }
+ }
+ }
+
+ #[inline]
+ pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
+ where
+ T: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ self.map.remove(value).is_some()
+ }
+
+ #[inline]
+ pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
+ where
+ T: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ match self.map.raw_entry_mut().from_key(value) {
+ linked_hash_map::RawEntryMut::Occupied(occupied) => Some(occupied.remove_entry().0),
+ linked_hash_map::RawEntryMut::Vacant(_) => None,
+ }
+ }
+
+ #[inline]
+ pub fn front(&self) -> Option<&T> {
+ self.map.front().map(|(k, _)| k)
+ }
+
+ #[inline]
+ pub fn pop_front(&mut self) -> Option<T> {
+ self.map.pop_front().map(|(k, _)| k)
+ }
+
+ #[inline]
+ pub fn back(&mut self) -> Option<&T> {
+ self.map.back().map(|(k, _)| k)
+ }
+
+ #[inline]
+ pub fn pop_back(&mut self) -> Option<T> {
+ self.map.pop_back().map(|(k, _)| k)
+ }
+
+ #[inline]
+ pub fn to_front<Q: ?Sized>(&mut self, value: &Q) -> bool
+ where
+ T: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ match self.map.raw_entry_mut().from_key(value) {
+ linked_hash_map::RawEntryMut::Occupied(mut occupied) => {
+ occupied.to_front();
+ true
+ }
+ linked_hash_map::RawEntryMut::Vacant(_) => false,
+ }
+ }
+
+ #[inline]
+ pub fn to_back<Q: ?Sized>(&mut self, value: &Q) -> bool
+ where
+ T: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ match self.map.raw_entry_mut().from_key(value) {
+ linked_hash_map::RawEntryMut::Occupied(mut occupied) => {
+ occupied.to_back();
+ true
+ }
+ linked_hash_map::RawEntryMut::Vacant(_) => false,
+ }
+ }
+}
+
+impl<T: Hash + Eq + Clone, S: BuildHasher + Clone> Clone for LinkedHashSet<T, S> {
+ #[inline]
+ fn clone(&self) -> Self {
+ let map = self.map.clone();
+ Self { map }
+ }
+}
+
+impl<T, S> PartialEq for LinkedHashSet<T, S>
+where
+ T: Eq + Hash,
+ S: BuildHasher,
+{
+ #[inline]
+ fn eq(&self, other: &LinkedHashSet<T, S>) -> bool {
+ if self.len() != other.len() {
+ return false;
+ }
+
+ self.iter().all(|key| other.contains(key))
+ }
+}
+
+impl<T, S> Hash for LinkedHashSet<T, S>
+where
+ T: Eq + Hash,
+ S: BuildHasher,
+{
+ #[inline]
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ for e in self {
+ e.hash(state);
+ }
+ }
+}
+
+impl<T, S> Eq for LinkedHashSet<T, S>
+where
+ T: Eq + Hash,
+ S: BuildHasher,
+{
+}
+
+impl<T, S> fmt::Debug for LinkedHashSet<T, S>
+where
+ T: fmt::Debug,
+{
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_set().entries(self.iter()).finish()
+ }
+}
+
+impl<T, S> FromIterator<T> for LinkedHashSet<T, S>
+where
+ T: Eq + Hash,
+ S: BuildHasher + Default,
+{
+ #[inline]
+ fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> LinkedHashSet<T, S> {
+ let mut set = LinkedHashSet::with_hasher(Default::default());
+ set.extend(iter);
+ set
+ }
+}
+
+impl<T, S> Extend<T> for LinkedHashSet<T, S>
+where
+ T: Eq + Hash,
+ S: BuildHasher,
+{
+ #[inline]
+ fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+ self.map.extend(iter.into_iter().map(|k| (k, ())));
+ }
+}
+
+impl<'a, T, S> Extend<&'a T> for LinkedHashSet<T, S>
+where
+ T: 'a + Eq + Hash + Copy,
+ S: BuildHasher,
+{
+ #[inline]
+ fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().cloned());
+ }
+}
+
+impl<T, S> Default for LinkedHashSet<T, S>
+where
+ S: Default,
+{
+ #[inline]
+ fn default() -> LinkedHashSet<T, S> {
+ LinkedHashSet {
+ map: LinkedHashMap::default(),
+ }
+ }
+}
+
+impl<'a, 'b, T, S> BitOr<&'b LinkedHashSet<T, S>> for &'a LinkedHashSet<T, S>
+where
+ T: Eq + Hash + Clone,
+ S: BuildHasher + Default,
+{
+ type Output = LinkedHashSet<T, S>;
+
+ #[inline]
+ fn bitor(self, rhs: &LinkedHashSet<T, S>) -> LinkedHashSet<T, S> {
+ self.union(rhs).cloned().collect()
+ }
+}
+
+impl<'a, 'b, T, S> BitAnd<&'b LinkedHashSet<T, S>> for &'a LinkedHashSet<T, S>
+where
+ T: Eq + Hash + Clone,
+ S: BuildHasher + Default,
+{
+ type Output = LinkedHashSet<T, S>;
+
+ #[inline]
+ fn bitand(self, rhs: &LinkedHashSet<T, S>) -> LinkedHashSet<T, S> {
+ self.intersection(rhs).cloned().collect()
+ }
+}
+
+impl<'a, 'b, T, S> BitXor<&'b LinkedHashSet<T, S>> for &'a LinkedHashSet<T, S>
+where
+ T: Eq + Hash + Clone,
+ S: BuildHasher + Default,
+{
+ type Output = LinkedHashSet<T, S>;
+
+ #[inline]
+ fn bitxor(self, rhs: &LinkedHashSet<T, S>) -> LinkedHashSet<T, S> {
+ self.symmetric_difference(rhs).cloned().collect()
+ }
+}
+
+impl<'a, 'b, T, S> Sub<&'b LinkedHashSet<T, S>> for &'a LinkedHashSet<T, S>
+where
+ T: Eq + Hash + Clone,
+ S: BuildHasher + Default,
+{
+ type Output = LinkedHashSet<T, S>;
+
+ #[inline]
+ fn sub(self, rhs: &LinkedHashSet<T, S>) -> LinkedHashSet<T, S> {
+ self.difference(rhs).cloned().collect()
+ }
+}
+
+pub struct Iter<'a, K> {
+ iter: linked_hash_map::Keys<'a, K, ()>,
+}
+
+pub struct IntoIter<K> {
+ iter: linked_hash_map::IntoIter<K, ()>,
+}
+
+pub struct Drain<'a, K: 'a> {
+ iter: linked_hash_map::Drain<'a, K, ()>,
+}
+
+pub struct Intersection<'a, T, S> {
+ iter: Iter<'a, T>,
+ other: &'a LinkedHashSet<T, S>,
+}
+
+pub struct Difference<'a, T, S> {
+ iter: Iter<'a, T>,
+ other: &'a LinkedHashSet<T, S>,
+}
+
+pub struct SymmetricDifference<'a, T, S> {
+ iter: Chain<Difference<'a, T, S>, Difference<'a, T, S>>,
+}
+
+pub struct Union<'a, T, S> {
+ iter: Chain<Iter<'a, T>, Difference<'a, T, S>>,
+}
+
+impl<'a, T, S> IntoIterator for &'a LinkedHashSet<T, S> {
+ type Item = &'a T;
+ type IntoIter = Iter<'a, T>;
+
+ #[inline]
+ fn into_iter(self) -> Iter<'a, T> {
+ self.iter()
+ }
+}
+
+impl<T, S> IntoIterator for LinkedHashSet<T, S> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+
+ #[inline]
+ fn into_iter(self) -> IntoIter<T> {
+ IntoIter {
+ iter: self.map.into_iter(),
+ }
+ }
+}
+
+impl<'a, K> Clone for Iter<'a, K> {
+ #[inline]
+ fn clone(&self) -> Iter<'a, K> {
+ Iter {
+ iter: self.iter.clone(),
+ }
+ }
+}
+impl<'a, K> Iterator for Iter<'a, K> {
+ type Item = &'a K;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a K> {
+ self.iter.next()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+impl<'a, K> ExactSizeIterator for Iter<'a, K> {}
+
+impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a T> {
+ self.iter.next_back()
+ }
+}
+
+impl<'a, K: fmt::Debug> fmt::Debug for Iter<'a, K> {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl<K> Iterator for IntoIter<K> {
+ type Item = K;
+
+ #[inline]
+ fn next(&mut self) -> Option<K> {
+ self.iter.next().map(|(k, _)| k)
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+impl<K> ExactSizeIterator for IntoIter<K> {}
+
+impl<K> DoubleEndedIterator for IntoIter<K> {
+ #[inline]
+ fn next_back(&mut self) -> Option<K> {
+ self.iter.next_back().map(|(k, _)| k)
+ }
+}
+
+impl<'a, K> Iterator for Drain<'a, K> {
+ type Item = K;
+
+ #[inline]
+ fn next(&mut self) -> Option<K> {
+ self.iter.next().map(|(k, _)| k)
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+impl<'a, K> DoubleEndedIterator for Drain<'a, K> {
+ #[inline]
+ fn next_back(&mut self) -> Option<K> {
+ self.iter.next_back().map(|(k, _)| k)
+ }
+}
+
+impl<'a, K> ExactSizeIterator for Drain<'a, K> {}
+
+impl<'a, T, S> Clone for Intersection<'a, T, S> {
+ #[inline]
+ fn clone(&self) -> Intersection<'a, T, S> {
+ Intersection {
+ iter: self.iter.clone(),
+ ..*self
+ }
+ }
+}
+
+impl<'a, T, S> Iterator for Intersection<'a, T, S>
+where
+ T: Eq + Hash,
+ S: BuildHasher,
+{
+ type Item = &'a T;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a T> {
+ loop {
+ match self.iter.next() {
+ None => return None,
+ Some(elt) => {
+ if self.other.contains(elt) {
+ return Some(elt);
+ }
+ }
+ }
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let (_, upper) = self.iter.size_hint();
+ (0, upper)
+ }
+}
+
+impl<'a, T, S> fmt::Debug for Intersection<'a, T, S>
+where
+ T: fmt::Debug + Eq + Hash,
+ S: BuildHasher,
+{
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl<'a, T, S> Clone for Difference<'a, T, S> {
+ #[inline]
+ fn clone(&self) -> Difference<'a, T, S> {
+ Difference {
+ iter: self.iter.clone(),
+ ..*self
+ }
+ }
+}
+
+impl<'a, T, S> Iterator for Difference<'a, T, S>
+where
+ T: Eq + Hash,
+ S: BuildHasher,
+{
+ type Item = &'a T;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a T> {
+ loop {
+ match self.iter.next() {
+ None => return None,
+ Some(elt) => {
+ if !self.other.contains(elt) {
+ return Some(elt);
+ }
+ }
+ }
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let (_, upper) = self.iter.size_hint();
+ (0, upper)
+ }
+}
+
+impl<'a, T, S> fmt::Debug for Difference<'a, T, S>
+where
+ T: fmt::Debug + Eq + Hash,
+ S: BuildHasher,
+{
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl<'a, T, S> Clone for SymmetricDifference<'a, T, S> {
+ #[inline]
+ fn clone(&self) -> SymmetricDifference<'a, T, S> {
+ SymmetricDifference {
+ iter: self.iter.clone(),
+ }
+ }
+}
+
+impl<'a, T, S> Iterator for SymmetricDifference<'a, T, S>
+where
+ T: Eq + Hash,
+ S: BuildHasher,
+{
+ type Item = &'a T;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a T> {
+ self.iter.next()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+impl<'a, T, S> fmt::Debug for SymmetricDifference<'a, T, S>
+where
+ T: fmt::Debug + Eq + Hash,
+ S: BuildHasher,
+{
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl<'a, T, S> Clone for Union<'a, T, S> {
+ #[inline]
+ fn clone(&self) -> Union<'a, T, S> {
+ Union {
+ iter: self.iter.clone(),
+ }
+ }
+}
+
+impl<'a, T, S> fmt::Debug for Union<'a, T, S>
+where
+ T: fmt::Debug + Eq + Hash,
+ S: BuildHasher,
+{
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl<'a, T, S> Iterator for Union<'a, T, S>
+where
+ T: Eq + Hash,
+ S: BuildHasher,
+{
+ type Item = &'a T;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a T> {
+ self.iter.next()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
diff --git a/src/lru_cache.rs b/src/lru_cache.rs
new file mode 100644
index 0000000..9e5740e
--- /dev/null
+++ b/src/lru_cache.rs
@@ -0,0 +1,292 @@
+use std::{
+ borrow::Borrow,
+ fmt,
+ hash::{BuildHasher, Hash},
+ usize,
+};
+
+use hashbrown::hash_map;
+
+use crate::linked_hash_map::{self, LinkedHashMap};
+
+pub use crate::linked_hash_map::{
+ Drain, Entry, IntoIter, Iter, IterMut, OccupiedEntry, RawEntryBuilder, RawEntryBuilderMut,
+ RawOccupiedEntryMut, RawVacantEntryMut, VacantEntry,
+};
+
+pub struct LruCache<K, V, S = hash_map::DefaultHashBuilder> {
+ map: LinkedHashMap<K, V, S>,
+ max_size: usize,
+}
+
+impl<K: Eq + Hash, V> LruCache<K, V> {
+ #[inline]
+ pub fn new(capacity: usize) -> Self {
+ LruCache {
+ map: LinkedHashMap::new(),
+ max_size: capacity,
+ }
+ }
+
+ /// Create a new unbounded `LruCache` that does not automatically evict entries.
+ ///
+ /// A simple convenience method that is equivalent to `LruCache::new(usize::MAX)`
+ #[inline]
+ pub fn new_unbounded() -> Self {
+ LruCache::new(usize::MAX)
+ }
+}
+
+impl<K, V, S> LruCache<K, V, S> {
+ #[inline]
+ pub fn with_hasher(capacity: usize, hash_builder: S) -> Self {
+ LruCache {
+ map: LinkedHashMap::with_hasher(hash_builder),
+ max_size: capacity,
+ }
+ }
+
+ #[inline]
+ pub fn capacity(&self) -> usize {
+ self.max_size
+ }
+
+ #[inline]
+ pub fn len(&self) -> usize {
+ self.map.len()
+ }
+
+ #[inline]
+ pub fn is_empty(&self) -> bool {
+ self.map.is_empty()
+ }
+
+ #[inline]
+ pub fn clear(&mut self) {
+ self.map.clear();
+ }
+
+ #[inline]
+ pub fn iter(&self) -> Iter<K, V> {
+ self.map.iter()
+ }
+
+ #[inline]
+ pub fn iter_mut(&mut self) -> IterMut<K, V> {
+ self.map.iter_mut()
+ }
+
+ #[inline]
+ pub fn drain(&mut self) -> Drain<K, V> {
+ self.map.drain()
+ }
+}
+
+impl<K: Eq + Hash, V, S> LruCache<K, V, S>
+where
+ S: BuildHasher,
+{
+ #[inline]
+ pub fn contains_key<Q>(&mut self, key: &Q) -> bool
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ self.get_mut(key).is_some()
+ }
+
+ /// Insert a new value into the `LruCache`.
+ ///
+ /// If necessary, will remove the value at the front of the LRU list to make room.
+ #[inline]
+ pub fn insert(&mut self, k: K, v: V) -> Option<V> {
+ let old_val = self.map.insert(k, v);
+ if self.len() > self.capacity() {
+ self.remove_lru();
+ }
+ old_val
+ }
+
+ /// Get the value for the given key, *without* marking the value as recently used and moving it
+ /// to the back of the LRU list.
+ #[inline]
+ pub fn peek<Q>(&self, k: &Q) -> Option<&V>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ self.map.get(k)
+ }
+
+ /// Get the value for the given key mutably, *without* marking the value as recently used and
+ /// moving it to the back of the LRU list.
+ #[inline]
+ pub fn peek_mut<Q>(&mut self, k: &Q) -> Option<&mut V>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ self.map.get_mut(k)
+ }
+
+ /// Retrieve the given key, marking it as recently used and moving it to the back of the LRU
+ /// list.
+ #[inline]
+ pub fn get<Q>(&mut self, k: &Q) -> Option<&V>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ self.get_mut(k).map(|v| &*v)
+ }
+
+ /// Retrieve the given key, marking it as recently used and moving it to the back of the LRU
+ /// list.
+ #[inline]
+ pub fn get_mut<Q>(&mut self, k: &Q) -> Option<&mut V>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ match self.map.raw_entry_mut().from_key(k) {
+ linked_hash_map::RawEntryMut::Occupied(mut occupied) => {
+ occupied.to_back();
+ Some(occupied.into_mut())
+ }
+ linked_hash_map::RawEntryMut::Vacant(_) => None,
+ }
+ }
+
+ /// If the returned entry is vacant, it will always have room to insert a single value. By
+ /// using the entry API, you can exceed the configured capacity by 1.
+ ///
+ /// The returned entry is not automatically moved to the back of the LRU list. By calling
+ /// `Entry::to_back` / `Entry::to_front` you can manually control the position of this entry in
+ /// the LRU list.
+ #[inline]
+ pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S> {
+ if self.len() > self.capacity() {
+ self.remove_lru();
+ }
+ self.map.entry(key)
+ }
+
+ /// The constructed raw entry is never automatically moved to the back of the LRU list. By
+ /// calling `Entry::to_back` / `Entry::to_front` you can manually control the position of this
+ /// entry in the LRU list.
+ #[inline]
+ pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S> {
+ self.map.raw_entry()
+ }
+
+ /// If the constructed raw entry is vacant, it will always have room to insert a single value.
+ /// By using the raw entry API, you can exceed the configured capacity by 1.
+ ///
+ /// The constructed raw entry is never automatically moved to the back of the LRU list. By
+ /// calling `Entry::to_back` / `Entry::to_front` you can manually control the position of this
+ /// entry in the LRU list.
+ #[inline]
+ pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S> {
+ if self.len() > self.capacity() {
+ self.remove_lru();
+ }
+ self.map.raw_entry_mut()
+ }
+
+ #[inline]
+ pub fn remove<Q>(&mut self, k: &Q) -> Option<V>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ self.map.remove(k)
+ }
+
+ #[inline]
+ pub fn remove_entry<Q>(&mut self, k: &Q) -> Option<(K, V)>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ self.map.remove_entry(k)
+ }
+
+ /// Set the new cache capacity for the `LruCache`.
+ ///
+ /// If there are more entries in the `LruCache` than the new capacity will allow, they are
+ /// removed.
+ #[inline]
+ pub fn set_capacity(&mut self, capacity: usize) {
+ for _ in capacity..self.len() {
+ self.remove_lru();
+ }
+ self.max_size = capacity;
+ }
+
+ /// Remove the least recently used entry and return it.
+ ///
+ /// If the `LruCache` is empty this will return None.
+ #[inline]
+ pub fn remove_lru(&mut self) -> Option<(K, V)> {
+ self.map.pop_front()
+ }
+}
+
+impl<K: Hash + Eq + Clone, V: Clone, S: BuildHasher + Clone> Clone for LruCache<K, V, S> {
+ #[inline]
+ fn clone(&self) -> Self {
+ LruCache {
+ map: self.map.clone(),
+ max_size: self.max_size,
+ }
+ }
+}
+
+impl<K: Eq + Hash, V, S: BuildHasher> Extend<(K, V)> for LruCache<K, V, S> {
+ #[inline]
+ fn extend<I: IntoIterator<Item = (K, V)>>(&mut self, iter: I) {
+ for (k, v) in iter {
+ self.insert(k, v);
+ }
+ }
+}
+
+impl<K, V, S> IntoIterator for LruCache<K, V, S> {
+ type Item = (K, V);
+ type IntoIter = IntoIter<K, V>;
+
+ #[inline]
+ fn into_iter(self) -> IntoIter<K, V> {
+ self.map.into_iter()
+ }
+}
+
+impl<'a, K, V, S> IntoIterator for &'a LruCache<K, V, S> {
+ type Item = (&'a K, &'a V);
+ type IntoIter = Iter<'a, K, V>;
+
+ #[inline]
+ fn into_iter(self) -> Iter<'a, K, V> {
+ self.iter()
+ }
+}
+
+impl<'a, K, V, S> IntoIterator for &'a mut LruCache<K, V, S> {
+ type Item = (&'a K, &'a mut V);
+ type IntoIter = IterMut<'a, K, V>;
+
+ #[inline]
+ fn into_iter(self) -> IterMut<'a, K, V> {
+ self.iter_mut()
+ }
+}
+
+impl<K, V, S> fmt::Debug for LruCache<K, V, S>
+where
+ K: fmt::Debug,
+ V: fmt::Debug,
+{
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_map().entries(self.iter().rev()).finish()
+ }
+}
diff --git a/src/serde.rs b/src/serde.rs
new file mode 100644
index 0000000..b8e307c
--- /dev/null
+++ b/src/serde.rs
@@ -0,0 +1,151 @@
+use std::{
+ fmt::{self, Formatter},
+ hash::{BuildHasher, Hash},
+ marker::PhantomData,
+};
+
+use serde::{
+ de::{MapAccess, SeqAccess, Visitor},
+ ser::{SerializeMap, SerializeSeq},
+ Deserialize, Deserializer, Serialize, Serializer,
+};
+
+use crate::{LinkedHashMap, LinkedHashSet};
+
+// LinkedHashMap impls
+
+impl<K, V, S> Serialize for LinkedHashMap<K, V, S>
+where
+ K: Serialize + Eq + Hash,
+ V: Serialize,
+ S: BuildHasher,
+{
+ #[inline]
+ fn serialize<T: Serializer>(&self, serializer: T) -> Result<T::Ok, T::Error> {
+ let mut map_serializer = serializer.serialize_map(Some(self.len()))?;
+ for (k, v) in self {
+ map_serializer.serialize_key(k)?;
+ map_serializer.serialize_value(v)?;
+ }
+ map_serializer.end()
+ }
+}
+
+#[derive(Debug)]
+pub struct LinkedHashMapVisitor<K, V> {
+ marker: PhantomData<LinkedHashMap<K, V>>,
+}
+
+impl<K, V> LinkedHashMapVisitor<K, V> {
+ fn new() -> Self {
+ LinkedHashMapVisitor {
+ marker: PhantomData,
+ }
+ }
+}
+
+impl<K, V> Default for LinkedHashMapVisitor<K, V> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl<'de, K, V> Visitor<'de> for LinkedHashMapVisitor<K, V>
+where
+ K: Deserialize<'de> + Eq + Hash,
+ V: Deserialize<'de>,
+{
+ type Value = LinkedHashMap<K, V>;
+
+ fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
+ write!(formatter, "a map")
+ }
+
+ #[inline]
+ fn visit_map<M: MapAccess<'de>>(self, mut map: M) -> Result<Self::Value, M::Error> {
+ let mut values = LinkedHashMap::with_capacity(map.size_hint().unwrap_or(0));
+
+ while let Some((k, v)) = map.next_entry()? {
+ values.insert(k, v);
+ }
+
+ Ok(values)
+ }
+}
+
+impl<'de, K, V> Deserialize<'de> for LinkedHashMap<K, V>
+where
+ K: Deserialize<'de> + Eq + Hash,
+ V: Deserialize<'de>,
+{
+ fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
+ deserializer.deserialize_map(LinkedHashMapVisitor::default())
+ }
+}
+
+// LinkedHashSet impls
+
+impl<T, S> Serialize for LinkedHashSet<T, S>
+where
+ T: Serialize + Eq + Hash,
+ S: BuildHasher,
+{
+ #[inline]
+ fn serialize<U: Serializer>(&self, serializer: U) -> Result<U::Ok, U::Error> {
+ let mut seq_serializer = serializer.serialize_seq(Some(self.len()))?;
+ for v in self {
+ seq_serializer.serialize_element(v)?;
+ }
+ seq_serializer.end()
+ }
+}
+
+#[derive(Debug)]
+pub struct LinkedHashSetVisitor<T> {
+ marker: PhantomData<LinkedHashSet<T>>,
+}
+
+impl<T> LinkedHashSetVisitor<T> {
+ fn new() -> Self {
+ LinkedHashSetVisitor {
+ marker: PhantomData,
+ }
+ }
+}
+
+impl<T> Default for LinkedHashSetVisitor<T> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl<'de, T> Visitor<'de> for LinkedHashSetVisitor<T>
+where
+ T: Deserialize<'de> + Eq + Hash,
+{
+ type Value = LinkedHashSet<T>;
+
+ fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
+ write!(formatter, "a sequence")
+ }
+
+ #[inline]
+ fn visit_seq<S: SeqAccess<'de>>(self, mut seq: S) -> Result<Self::Value, S::Error> {
+ let mut values = LinkedHashSet::with_capacity(seq.size_hint().unwrap_or(0));
+
+ while let Some(v) = seq.next_element()? {
+ values.insert(v);
+ }
+
+ Ok(values)
+ }
+}
+
+impl<'de, T> Deserialize<'de> for LinkedHashSet<T>
+where
+ T: Deserialize<'de> + Eq + Hash,
+{
+ fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
+ deserializer.deserialize_seq(LinkedHashSetVisitor::default())
+ }
+}
diff --git a/tests/linked_hash_map.rs b/tests/linked_hash_map.rs
new file mode 100644
index 0000000..19dcc00
--- /dev/null
+++ b/tests/linked_hash_map.rs
@@ -0,0 +1,498 @@
+use hashlink::{linked_hash_map, LinkedHashMap};
+
+#[allow(dead_code)]
+fn assert_covariance() {
+ fn set<'new>(v: LinkedHashMap<&'static str, ()>) -> LinkedHashMap<&'new str, ()> {
+ v
+ }
+
+ fn iter<'a, 'new>(
+ v: linked_hash_map::Iter<'a, &'static str, &'static str>,
+ ) -> linked_hash_map::Iter<'a, &'new str, &'new str> {
+ v
+ }
+
+ fn iter_mut<'a, 'new>(
+ v: linked_hash_map::Iter<'a, &'static str, ()>,
+ ) -> linked_hash_map::Iter<'a, &'new str, ()> {
+ v
+ }
+
+ fn into_iter<'new>(
+ v: linked_hash_map::IntoIter<&'static str, &'static str>,
+ ) -> linked_hash_map::IntoIter<&'new str, &'new str> {
+ v
+ }
+
+ fn drain<'new>(
+ d: linked_hash_map::Drain<'static, &'static str, &'static str>,
+ ) -> linked_hash_map::Drain<'new, &'new str, &'new str> {
+ d
+ }
+
+ fn raw_entry_builder<'a, 'new>(
+ v: linked_hash_map::RawEntryBuilder<'a, &'static str, &'static str, ()>,
+ ) -> linked_hash_map::RawEntryBuilder<'a, &'new str, &'new str, ()> {
+ v
+ }
+}
+
+#[test]
+fn test_index() {
+ let mut map = LinkedHashMap::new();
+ map.insert(1, 10);
+ map.insert(2, 20);
+ assert_eq!(10, map[&1]);
+ map[&2] = 22;
+ assert_eq!(22, map[&2]);
+}
+
+#[test]
+fn test_insert_and_get() {
+ let mut map = LinkedHashMap::new();
+ map.insert(1, 10);
+ map.insert(2, 20);
+ assert_eq!(map.get(&1), Some(&10));
+ assert_eq!(map.get(&2), Some(&20));
+ assert_eq!(map.len(), 2);
+}
+
+#[test]
+fn test_insert_update() {
+ let mut map = LinkedHashMap::new();
+ map.insert("1".to_string(), vec![10, 10]);
+ map.insert("1".to_string(), vec![10, 19]);
+ assert_eq!(map.get(&"1".to_string()), Some(&vec![10, 19]));
+ assert_eq!(map.len(), 1);
+}
+
+#[test]
+fn test_entry_insert_vacant() {
+ let mut map = LinkedHashMap::new();
+ match map.entry("1".to_string()) {
+ linked_hash_map::Entry::Vacant(e) => {
+ assert_eq!(*e.insert(vec![10, 10]), vec![10, 10]);
+ }
+ _ => panic!("fail"),
+ }
+ assert!(map.contains_key("1"));
+ assert_eq!(map["1"], vec![10, 10]);
+
+ match map.entry("1".to_string()) {
+ linked_hash_map::Entry::Occupied(mut e) => {
+ assert_eq!(*e.get(), vec![10, 10]);
+ assert_eq!(e.insert(vec![10, 16]), vec![10, 10]);
+ }
+ _ => panic!("fail"),
+ }
+
+ assert!(map.contains_key("1"));
+ assert_eq!(map["1"], vec![10, 16]);
+
+ match map.entry("1".to_string()) {
+ linked_hash_map::Entry::Occupied(e) => {
+ assert_eq!(e.remove(), vec![10, 16]);
+ }
+ _ => panic!("fail"),
+ }
+}
+
+#[test]
+fn test_remove() {
+ let mut map = LinkedHashMap::new();
+ map.insert(1, 10);
+ map.insert(2, 20);
+ map.insert(3, 30);
+ map.insert(4, 40);
+ map.insert(5, 50);
+ map.remove(&3);
+ map.remove(&4);
+ assert!(map.get(&3).is_none());
+ assert!(map.get(&4).is_none());
+ map.insert(6, 60);
+ map.insert(7, 70);
+ map.insert(8, 80);
+ assert_eq!(map.get(&6), Some(&60));
+ assert_eq!(map.get(&7), Some(&70));
+ assert_eq!(map.get(&8), Some(&80));
+}
+
+#[test]
+fn test_pop() {
+ let mut map = LinkedHashMap::new();
+ map.insert(1, 10);
+ map.insert(2, 20);
+ map.insert(3, 30);
+ map.insert(4, 40);
+ map.insert(5, 50);
+ assert_eq!(map.pop_front(), Some((1, 10)));
+ assert!(map.get(&1).is_none());
+ assert_eq!(map.pop_back(), Some((5, 50)));
+ assert!(map.get(&5).is_none());
+ map.insert(6, 60);
+ map.insert(7, 70);
+ map.insert(8, 80);
+ assert_eq!(map.pop_front(), Some((2, 20)));
+ assert!(map.get(&2).is_none());
+ assert_eq!(map.pop_back(), Some((8, 80)));
+ assert!(map.get(&8).is_none());
+ map.insert(3, 30);
+ assert_eq!(map.pop_front(), Some((4, 40)));
+ assert!(map.get(&4).is_none());
+ assert_eq!(map.pop_back(), Some((3, 30)));
+ assert!(map.get(&3).is_none());
+}
+
+#[test]
+fn test_move() {
+ let to_back = |map: &mut LinkedHashMap<_, _>, key| match map.entry(key) {
+ linked_hash_map::Entry::Occupied(mut occupied) => occupied.to_back(),
+ linked_hash_map::Entry::Vacant(_) => panic!(),
+ };
+
+ let to_front = |map: &mut LinkedHashMap<_, _>, key| match map.entry(key) {
+ linked_hash_map::Entry::Occupied(mut occupied) => occupied.to_front(),
+ linked_hash_map::Entry::Vacant(_) => panic!(),
+ };
+
+ let mut map = LinkedHashMap::new();
+ map.insert(1, 10);
+ map.insert(2, 20);
+ map.insert(3, 30);
+ map.insert(4, 40);
+ map.insert(5, 50);
+
+ to_back(&mut map, 1);
+ assert_eq!(map.keys().copied().collect::<Vec<_>>(), vec![2, 3, 4, 5, 1]);
+
+ to_front(&mut map, 4);
+ assert_eq!(map.keys().copied().collect::<Vec<_>>(), vec![4, 2, 3, 5, 1]);
+
+ to_back(&mut map, 3);
+ assert_eq!(map.keys().copied().collect::<Vec<_>>(), vec![4, 2, 5, 1, 3]);
+
+ to_front(&mut map, 2);
+ assert_eq!(map.keys().copied().collect::<Vec<_>>(), vec![2, 4, 5, 1, 3]);
+
+ to_back(&mut map, 3);
+ assert_eq!(map.keys().copied().collect::<Vec<_>>(), vec![2, 4, 5, 1, 3]);
+
+ to_front(&mut map, 2);
+ assert_eq!(map.keys().copied().collect::<Vec<_>>(), vec![2, 4, 5, 1, 3]);
+}
+
+#[test]
+fn test_clear() {
+ let mut map = LinkedHashMap::new();
+ map.insert(1, 10);
+ map.insert(2, 20);
+ map.clear();
+ assert!(map.get(&1).is_none());
+ assert!(map.get(&2).is_none());
+ assert!(map.is_empty());
+}
+
+#[test]
+fn test_iter() {
+ let mut map = LinkedHashMap::new();
+
+ // empty iter
+ assert_eq!(None, map.iter().next());
+
+ map.insert("a", 10);
+ map.insert("b", 20);
+ map.insert("c", 30);
+
+ // regular iter
+ let mut iter = map.iter();
+ assert_eq!((&"a", &10), iter.next().unwrap());
+ assert_eq!((&"b", &20), iter.next().unwrap());
+ assert_eq!((&"c", &30), iter.next().unwrap());
+ assert_eq!(None, iter.next());
+ assert_eq!(None, iter.next());
+
+ let mut iter = map.iter();
+ assert_eq!((&"a", &10), iter.next().unwrap());
+ let mut iclone = iter.clone();
+ assert_eq!((&"b", &20), iter.next().unwrap());
+ assert_eq!((&"b", &20), iclone.next().unwrap());
+ assert_eq!((&"c", &30), iter.next().unwrap());
+ assert_eq!((&"c", &30), iclone.next().unwrap());
+
+ // reversed iter
+ let mut rev_iter = map.iter().rev();
+ assert_eq!((&"c", &30), rev_iter.next().unwrap());
+ assert_eq!((&"b", &20), rev_iter.next().unwrap());
+ assert_eq!((&"a", &10), rev_iter.next().unwrap());
+ assert_eq!(None, rev_iter.next());
+ assert_eq!(None, rev_iter.next());
+
+ // mixed
+ let mut mixed_iter = map.iter();
+ assert_eq!((&"a", &10), mixed_iter.next().unwrap());
+ assert_eq!((&"c", &30), mixed_iter.next_back().unwrap());
+ assert_eq!((&"b", &20), mixed_iter.next().unwrap());
+ assert_eq!(None, mixed_iter.next());
+ assert_eq!(None, mixed_iter.next_back());
+}
+
+#[test]
+fn test_borrow() {
+ #[derive(PartialEq, Eq, Hash)]
+ struct Foo(Bar);
+ #[derive(PartialEq, Eq, Hash)]
+ struct Bar(i32);
+
+ impl ::std::borrow::Borrow<Bar> for Foo {
+ fn borrow(&self) -> &Bar {
+ &self.0
+ }
+ }
+
+ let mut map = LinkedHashMap::new();
+ map.insert(Foo(Bar(1)), "a");
+ map.insert(Foo(Bar(2)), "b");
+
+ assert!(map.contains_key(&Bar(1)));
+ assert!(map.contains_key(&Bar(2)));
+ assert!(map.contains_key(&Foo(Bar(1))));
+ assert!(map.contains_key(&Foo(Bar(2))));
+
+ assert_eq!(map.get(&Bar(1)), Some(&"a"));
+ assert_eq!(map.get(&Bar(2)), Some(&"b"));
+ assert_eq!(map.get(&Foo(Bar(1))), Some(&"a"));
+ assert_eq!(map.get(&Foo(Bar(2))), Some(&"b"));
+
+ assert_eq!(map.get_mut(&Bar(1)), Some(&mut "a"));
+ assert_eq!(map.get_mut(&Bar(2)), Some(&mut "b"));
+ assert_eq!(map.get_mut(&Foo(Bar(1))), Some(&mut "a"));
+ assert_eq!(map.get_mut(&Foo(Bar(2))), Some(&mut "b"));
+
+ assert_eq!(map[&Bar(1)], "a");
+ assert_eq!(map[&Bar(2)], "b");
+ assert_eq!(map[&Foo(Bar(1))], "a");
+ assert_eq!(map[&Foo(Bar(2))], "b");
+
+ assert_eq!(map.remove(&Bar(1)), Some("a"));
+ assert_eq!(map.remove(&Bar(2)), Some("b"));
+ assert_eq!(map.remove(&Foo(Bar(1))), None);
+ assert_eq!(map.remove(&Foo(Bar(2))), None);
+}
+
+#[test]
+fn test_iter_mut() {
+ let mut map = LinkedHashMap::new();
+ map.insert("a", 10);
+ map.insert("c", 30);
+ map.insert("b", 20);
+
+ {
+ let mut iter = map.iter_mut();
+ let entry = iter.next().unwrap();
+ assert_eq!("a", *entry.0);
+ *entry.1 = 17;
+
+ assert_eq!(format!("{:?}", iter), "[(\"c\", 30), (\"b\", 20)]");
+
+ // reverse iterator
+ let mut iter = iter.rev();
+ let entry = iter.next().unwrap();
+ assert_eq!("b", *entry.0);
+ *entry.1 = 23;
+
+ let entry = iter.next().unwrap();
+ assert_eq!("c", *entry.0);
+ assert_eq!(None, iter.next());
+ assert_eq!(None, iter.next());
+ }
+
+ assert_eq!(17, map[&"a"]);
+ assert_eq!(23, map[&"b"]);
+}
+
+#[test]
+fn test_consuming_iter() {
+ let map = {
+ let mut map = LinkedHashMap::new();
+ map.insert("a", 10);
+ map.insert("c", 30);
+ map.insert("b", 20);
+ map
+ };
+
+ let mut iter = map.into_iter();
+ assert_eq!(Some(("a", 10)), iter.next());
+ assert_eq!(Some(("b", 20)), iter.next_back());
+ assert_eq!(iter.len(), 1);
+ assert_eq!(format!("{:?}", iter), "[(\"c\", 30)]");
+ assert_eq!(Some(("c", 30)), iter.next());
+ assert_eq!(None, iter.next());
+}
+
+#[test]
+fn test_consuming_iter_empty() {
+ let map = LinkedHashMap::<&str, i32>::new();
+ let mut iter = map.into_iter();
+ assert_eq!(None, iter.next());
+}
+
+#[test]
+fn test_consuming_iter_with_free_list() {
+ let mut map = LinkedHashMap::new();
+ map.insert("a", 10);
+ map.insert("c", 30);
+ map.insert("b", 20);
+ map.remove("a");
+ map.remove("b");
+
+ let mut iter = map.into_iter();
+ assert_eq!(Some(("c", 30)), iter.next());
+ assert_eq!(None, iter.next());
+}
+
+#[test]
+fn test_into_iter_drop() {
+ struct Counter<'a>(&'a mut usize);
+
+ impl<'a> Drop for Counter<'a> {
+ fn drop(&mut self) {
+ *self.0 += 1;
+ }
+ }
+
+ let mut a = 0;
+ let mut b = 0;
+ let mut c = 0;
+
+ {
+ let mut map = LinkedHashMap::new();
+ map.insert("a", Counter(&mut a));
+ map.insert("b", Counter(&mut b));
+ map.insert("c", Counter(&mut c));
+
+ let mut iter = map.into_iter();
+ assert_eq!(iter.next().map(|p| p.0), Some("a"));
+ assert_eq!(iter.next_back().map(|p| p.0), Some("c"));
+ }
+
+ assert_eq!(a, 1);
+ assert_eq!(b, 1);
+ assert_eq!(c, 1);
+}
+
+#[test]
+fn test_drain() {
+ use std::{cell::Cell, rc::Rc};
+
+ struct Counter(Rc<Cell<u32>>);
+
+ impl<'a> Drop for Counter {
+ fn drop(&mut self) {
+ self.0.set(self.0.get() + 1);
+ }
+ }
+
+ let mut map = LinkedHashMap::new();
+
+ let a = Rc::new(Cell::new(0));
+ let b = Rc::new(Cell::new(0));
+ let c = Rc::new(Cell::new(0));
+
+ map.insert("a", Counter(a.clone()));
+ map.insert("b", Counter(b.clone()));
+ map.insert("c", Counter(c.clone()));
+
+ let mut iter = map.drain();
+ assert_eq!(iter.next().map(|p| p.0), Some("a"));
+ assert_eq!(iter.next_back().map(|p| p.0), Some("c"));
+ assert_eq!(iter.next_back().map(|p| p.0), Some("b"));
+ assert!(iter.next().is_none());
+ assert!(iter.next_back().is_none());
+
+ drop(iter);
+ assert_eq!(map.len(), 0);
+
+ assert_eq!(a.get(), 1);
+ assert_eq!(b.get(), 1);
+ assert_eq!(c.get(), 1);
+
+ map.insert("a", Counter(a.clone()));
+ map.insert("b", Counter(b.clone()));
+ map.insert("c", Counter(c.clone()));
+
+ let mut iter = map.drain();
+ assert_eq!(iter.next().map(|p| p.0), Some("a"));
+ assert_eq!(iter.next().map(|p| p.0), Some("b"));
+ assert_eq!(iter.next_back().map(|p| p.0), Some("c"));
+ assert!(iter.next().is_none());
+ assert!(iter.next_back().is_none());
+
+ drop(iter);
+ assert_eq!(map.len(), 0);
+
+ assert_eq!(a.get(), 2);
+ assert_eq!(b.get(), 2);
+ assert_eq!(c.get(), 2);
+
+ map.insert("a", Counter(a.clone()));
+ map.insert("b", Counter(b.clone()));
+ map.insert("c", Counter(c.clone()));
+
+ map.drain();
+ assert_eq!(map.len(), 0);
+
+ assert_eq!(a.get(), 3);
+ assert_eq!(b.get(), 3);
+ assert_eq!(c.get(), 3);
+}
+
+#[test]
+fn test_send_sync() {
+ fn is_send_sync<T: Send + Sync>() {}
+
+ is_send_sync::<LinkedHashMap<u32, i32>>();
+ is_send_sync::<linked_hash_map::Entry<u32, i32, ()>>();
+ is_send_sync::<linked_hash_map::RawEntryBuilder<u32, i32, ()>>();
+ is_send_sync::<linked_hash_map::RawEntryBuilderMut<u32, i32, ()>>();
+ is_send_sync::<linked_hash_map::RawEntryMut<u32, i32, ()>>();
+ is_send_sync::<linked_hash_map::Iter<u32, i32>>();
+ is_send_sync::<linked_hash_map::IterMut<u32, i32>>();
+ is_send_sync::<linked_hash_map::Drain<u32, i32>>();
+ is_send_sync::<linked_hash_map::Keys<u32, i32>>();
+ is_send_sync::<linked_hash_map::Values<u32, i32>>();
+}
+
+#[test]
+fn test_retain() {
+ use std::{cell::Cell, rc::Rc};
+
+ let xs = [1, 2, 3, 4, 5, 6];
+ let mut map: LinkedHashMap<String, i32> = xs.iter().map(|i| (i.to_string(), *i)).collect();
+ map.retain(|_, v| *v % 2 == 0);
+ assert_eq!(map.len(), 3);
+ assert!(map.contains_key("2"));
+ assert!(map.contains_key("4"));
+ assert!(map.contains_key("6"));
+
+ struct Counter(Rc<Cell<u32>>);
+
+ impl<'a> Drop for Counter {
+ fn drop(&mut self) {
+ self.0.set(self.0.get() + 1);
+ }
+ }
+
+ let c = Rc::new(Cell::new(0));
+
+ let mut map = LinkedHashMap::new();
+ map.insert(1, Counter(Rc::clone(&c)));
+ map.insert(2, Counter(Rc::clone(&c)));
+ map.insert(3, Counter(Rc::clone(&c)));
+ map.insert(4, Counter(Rc::clone(&c)));
+
+ map.retain(|k, _| *k % 2 == 0);
+
+ assert!(c.get() == 2);
+ drop(map);
+ assert!(c.get() == 4);
+}
diff --git a/tests/linked_hash_set.rs b/tests/linked_hash_set.rs
new file mode 100644
index 0000000..13cceae
--- /dev/null
+++ b/tests/linked_hash_set.rs
@@ -0,0 +1,512 @@
+use hashbrown::hash_map::DefaultHashBuilder;
+use hashlink::linked_hash_set::{self, LinkedHashSet};
+
+#[allow(dead_code)]
+fn assert_covariance() {
+ fn set<'new>(v: LinkedHashSet<&'static str>) -> LinkedHashSet<&'new str> {
+ v
+ }
+
+ fn iter<'a, 'new>(
+ v: linked_hash_set::Iter<'a, &'static str>,
+ ) -> linked_hash_set::Iter<'a, &'new str> {
+ v
+ }
+
+ fn into_iter<'new>(
+ v: linked_hash_set::IntoIter<&'static str>,
+ ) -> linked_hash_set::IntoIter<&'new str> {
+ v
+ }
+
+ fn difference<'a, 'new>(
+ v: linked_hash_set::Difference<'a, &'static str, DefaultHashBuilder>,
+ ) -> linked_hash_set::Difference<'a, &'new str, DefaultHashBuilder> {
+ v
+ }
+
+ fn symmetric_difference<'a, 'new>(
+ v: linked_hash_set::SymmetricDifference<'a, &'static str, DefaultHashBuilder>,
+ ) -> linked_hash_set::SymmetricDifference<'a, &'new str, DefaultHashBuilder> {
+ v
+ }
+
+ fn intersection<'a, 'new>(
+ v: linked_hash_set::Intersection<'a, &'static str, DefaultHashBuilder>,
+ ) -> linked_hash_set::Intersection<'a, &'new str, DefaultHashBuilder> {
+ v
+ }
+
+ fn union<'a, 'new>(
+ v: linked_hash_set::Union<'a, &'static str, DefaultHashBuilder>,
+ ) -> linked_hash_set::Union<'a, &'new str, DefaultHashBuilder> {
+ v
+ }
+
+ fn drain<'new>(
+ d: linked_hash_set::Drain<'static, &'static str>,
+ ) -> linked_hash_set::Drain<'new, &'new str> {
+ d
+ }
+}
+
+#[test]
+fn test_zero_capacities() {
+ type HS = LinkedHashSet<i32>;
+
+ let s = HS::new();
+ assert_eq!(s.capacity(), 0);
+
+ let s = HS::default();
+ assert_eq!(s.capacity(), 0);
+
+ let s = HS::with_hasher(DefaultHashBuilder::default());
+ assert_eq!(s.capacity(), 0);
+
+ let s = HS::with_capacity(0);
+ assert_eq!(s.capacity(), 0);
+
+ let s = HS::with_capacity_and_hasher(0, DefaultHashBuilder::default());
+ assert_eq!(s.capacity(), 0);
+
+ let mut s = HS::new();
+ s.insert(1);
+ s.insert(2);
+ s.remove(&1);
+ s.remove(&2);
+ s.shrink_to_fit();
+ assert_eq!(s.capacity(), 0);
+
+ let mut s = HS::new();
+ s.reserve(0);
+ assert_eq!(s.capacity(), 0);
+}
+
+#[test]
+fn test_disjoint() {
+ let mut xs = LinkedHashSet::new();
+ let mut ys = LinkedHashSet::new();
+ assert!(xs.is_disjoint(&ys));
+ assert!(ys.is_disjoint(&xs));
+ assert!(xs.insert(5));
+ assert!(ys.insert(11));
+ assert!(xs.is_disjoint(&ys));
+ assert!(ys.is_disjoint(&xs));
+ assert!(xs.insert(7));
+ assert!(xs.insert(19));
+ assert!(xs.insert(4));
+ assert!(ys.insert(2));
+ assert!(ys.insert(-11));
+ assert!(xs.is_disjoint(&ys));
+ assert!(ys.is_disjoint(&xs));
+ assert!(ys.insert(7));
+ assert!(!xs.is_disjoint(&ys));
+ assert!(!ys.is_disjoint(&xs));
+}
+
+#[test]
+fn test_subset_and_superset() {
+ let mut a = LinkedHashSet::new();
+ assert!(a.insert(0));
+ assert!(a.insert(5));
+ assert!(a.insert(11));
+ assert!(a.insert(7));
+
+ let mut b = LinkedHashSet::new();
+ assert!(b.insert(0));
+ assert!(b.insert(7));
+ assert!(b.insert(19));
+ assert!(b.insert(250));
+ assert!(b.insert(11));
+ assert!(b.insert(200));
+
+ assert!(!a.is_subset(&b));
+ assert!(!a.is_superset(&b));
+ assert!(!b.is_subset(&a));
+ assert!(!b.is_superset(&a));
+
+ assert!(b.insert(5));
+
+ assert!(a.is_subset(&b));
+ assert!(!a.is_superset(&b));
+ assert!(!b.is_subset(&a));
+ assert!(b.is_superset(&a));
+}
+
+#[test]
+fn test_iterate() {
+ let mut a = LinkedHashSet::new();
+ for i in 0..32 {
+ assert!(a.insert(i));
+ }
+ let mut observed: u32 = 0;
+ for k in &a {
+ observed |= 1 << *k;
+ }
+ assert_eq!(observed, 0xFFFF_FFFF);
+}
+
+#[test]
+fn test_intersection() {
+ let mut a = LinkedHashSet::new();
+ let mut b = LinkedHashSet::new();
+
+ assert!(a.insert(11));
+ assert!(a.insert(1));
+ assert!(a.insert(3));
+ assert!(a.insert(77));
+ assert!(a.insert(103));
+ assert!(a.insert(5));
+ assert!(a.insert(-5));
+
+ assert!(b.insert(2));
+ assert!(b.insert(11));
+ assert!(b.insert(77));
+ assert!(b.insert(-9));
+ assert!(b.insert(-42));
+ assert!(b.insert(5));
+ assert!(b.insert(3));
+
+ let mut i = 0;
+ let expected = [3, 5, 11, 77];
+ for x in a.intersection(&b) {
+ assert!(expected.contains(x));
+ i += 1
+ }
+ assert_eq!(i, expected.len());
+}
+
+#[test]
+fn test_difference() {
+ let mut a = LinkedHashSet::new();
+ let mut b = LinkedHashSet::new();
+
+ assert!(a.insert(1));
+ assert!(a.insert(3));
+ assert!(a.insert(5));
+ assert!(a.insert(9));
+ assert!(a.insert(11));
+
+ assert!(b.insert(3));
+ assert!(b.insert(9));
+
+ let mut i = 0;
+ let expected = [1, 5, 11];
+ for x in a.difference(&b) {
+ assert!(expected.contains(x));
+ i += 1
+ }
+ assert_eq!(i, expected.len());
+}
+
+#[test]
+fn test_symmetric_difference() {
+ let mut a = LinkedHashSet::new();
+ let mut b = LinkedHashSet::new();
+
+ assert!(a.insert(1));
+ assert!(a.insert(3));
+ assert!(a.insert(5));
+ assert!(a.insert(9));
+ assert!(a.insert(11));
+
+ assert!(b.insert(-2));
+ assert!(b.insert(3));
+ assert!(b.insert(9));
+ assert!(b.insert(14));
+ assert!(b.insert(22));
+
+ let mut i = 0;
+ let expected = [-2, 1, 5, 11, 14, 22];
+ for x in a.symmetric_difference(&b) {
+ assert!(expected.contains(x));
+ i += 1
+ }
+ assert_eq!(i, expected.len());
+}
+
+#[test]
+fn test_union() {
+ let mut a = LinkedHashSet::new();
+ let mut b = LinkedHashSet::new();
+
+ assert!(a.insert(1));
+ assert!(a.insert(3));
+ assert!(a.insert(5));
+ assert!(a.insert(9));
+ assert!(a.insert(11));
+ assert!(a.insert(16));
+ assert!(a.insert(19));
+ assert!(a.insert(24));
+
+ assert!(b.insert(-2));
+ assert!(b.insert(1));
+ assert!(b.insert(5));
+ assert!(b.insert(9));
+ assert!(b.insert(13));
+ assert!(b.insert(19));
+
+ let mut i = 0;
+ let expected = [-2, 1, 3, 5, 9, 11, 13, 16, 19, 24];
+ for x in a.union(&b) {
+ assert!(expected.contains(x));
+ i += 1
+ }
+ assert_eq!(i, expected.len());
+}
+
+#[test]
+fn test_from_iter() {
+ let xs = [1, 2, 3, 4, 5, 6, 7, 8, 9];
+
+ let set: LinkedHashSet<_> = xs.iter().cloned().collect();
+
+ for x in &xs {
+ assert!(set.contains(x));
+ }
+}
+
+#[test]
+fn test_move_iter() {
+ let hs = {
+ let mut hs = LinkedHashSet::new();
+
+ hs.insert('a');
+ hs.insert('b');
+
+ hs
+ };
+
+ let v = hs.into_iter().collect::<Vec<char>>();
+ assert!(v == ['a', 'b'] || v == ['b', 'a']);
+}
+
+#[test]
+fn test_eq() {
+ let mut s1 = LinkedHashSet::new();
+
+ s1.insert(1);
+ s1.insert(2);
+ s1.insert(3);
+
+ let mut s2 = LinkedHashSet::new();
+
+ s2.insert(1);
+ s2.insert(2);
+
+ assert!(s1 != s2);
+
+ s2.insert(3);
+
+ assert_eq!(s1, s2);
+}
+
+#[test]
+fn test_show() {
+ let mut set = LinkedHashSet::new();
+ let empty = LinkedHashSet::<i32>::new();
+
+ set.insert(1);
+ set.insert(2);
+
+ let set_str = format!("{:?}", set);
+
+ assert!(set_str == "{1, 2}" || set_str == "{2, 1}");
+ assert_eq!(format!("{:?}", empty), "{}");
+}
+
+#[test]
+fn test_trivial_drain() {
+ let mut s = LinkedHashSet::<i32>::new();
+ for _ in s.drain() {}
+ assert!(s.is_empty());
+ drop(s);
+
+ let mut s = LinkedHashSet::<i32>::new();
+ drop(s.drain());
+ assert!(s.is_empty());
+}
+
+#[test]
+fn test_drain() {
+ let mut s: LinkedHashSet<_> = (1..100).collect();
+
+ for _ in 0..20 {
+ assert_eq!(s.len(), 99);
+
+ {
+ let mut last_i = 0;
+ let mut d = s.drain();
+ for (i, x) in d.by_ref().take(50).enumerate() {
+ last_i = i;
+ assert!(x != 0);
+ }
+ assert_eq!(last_i, 49);
+ }
+
+ for _ in &s {
+ panic!("s should be empty!");
+ }
+
+ s.extend(1..100);
+ }
+}
+
+#[test]
+fn test_replace() {
+ use core::hash;
+
+ #[derive(Debug)]
+ struct Foo(&'static str, i32);
+
+ impl PartialEq for Foo {
+ fn eq(&self, other: &Self) -> bool {
+ self.0 == other.0
+ }
+ }
+
+ impl Eq for Foo {}
+
+ impl hash::Hash for Foo {
+ fn hash<H: hash::Hasher>(&self, h: &mut H) {
+ self.0.hash(h);
+ }
+ }
+
+ let mut s = LinkedHashSet::new();
+ assert_eq!(s.replace(Foo("a", 1)), None);
+ assert_eq!(s.len(), 1);
+ assert_eq!(s.replace(Foo("a", 2)), Some(Foo("a", 1)));
+ assert_eq!(s.len(), 1);
+
+ let mut it = s.iter();
+ assert_eq!(it.next(), Some(&Foo("a", 2)));
+ assert_eq!(it.next(), None);
+}
+
+#[test]
+fn test_extend_ref() {
+ let mut a = LinkedHashSet::new();
+ a.insert(1);
+
+ a.extend(&[2, 3, 4]);
+
+ assert_eq!(a.len(), 4);
+ assert!(a.contains(&1));
+ assert!(a.contains(&2));
+ assert!(a.contains(&3));
+ assert!(a.contains(&4));
+
+ let mut b = LinkedHashSet::new();
+ b.insert(5);
+ b.insert(6);
+
+ a.extend(&b);
+
+ assert_eq!(a.len(), 6);
+ assert!(a.contains(&1));
+ assert!(a.contains(&2));
+ assert!(a.contains(&3));
+ assert!(a.contains(&4));
+ assert!(a.contains(&5));
+ assert!(a.contains(&6));
+}
+
+#[test]
+fn test_retain() {
+ let xs = [1, 2, 3, 4, 5, 6];
+ let mut set: LinkedHashSet<i32> = xs.iter().cloned().collect();
+ set.retain(|&k| k % 2 == 0);
+ assert_eq!(set.len(), 3);
+ assert!(set.contains(&2));
+ assert!(set.contains(&4));
+ assert!(set.contains(&6));
+}
+
+#[test]
+fn insert_order() {
+ let mut set = LinkedHashSet::new();
+ set.insert(1);
+ set.insert(2);
+ set.insert(3);
+ set.insert(4);
+ assert_eq!(
+ set.clone().into_iter().collect::<Vec<_>>(),
+ vec![1, 2, 3, 4]
+ );
+ assert_eq!(set.into_iter().collect::<Vec<_>>(), vec![1, 2, 3, 4]);
+}
+
+#[test]
+fn front_back() {
+ let mut set = LinkedHashSet::new();
+ set.insert(1);
+ set.insert(2);
+ set.insert(3);
+ set.insert(4);
+ assert_eq!(set.front(), Some(&1));
+ assert_eq!(set.back(), Some(&4));
+ assert_eq!(set.pop_back(), Some(4));
+ assert_eq!(set.back(), Some(&3));
+ assert_eq!(set.pop_front(), Some(1));
+ assert_eq!(set.front(), Some(&2));
+}
+
+#[test]
+fn double_ended_iter() {
+ let mut set = LinkedHashSet::new();
+ set.insert(1);
+ set.insert(2);
+ set.insert(3);
+ set.insert(4);
+
+ let mut iter = set.iter();
+ assert_eq!(iter.next(), Some(&1));
+ assert_eq!(iter.next(), Some(&2));
+ assert_eq!(iter.next_back(), Some(&4));
+ assert_eq!(iter.next_back(), Some(&3));
+ assert_eq!(iter.next_back(), None);
+ assert_eq!(iter.next(), None);
+ assert_eq!(iter.next_back(), None);
+ drop(iter);
+
+ let mut iter = set.drain();
+ assert_eq!(iter.next(), Some(1));
+ assert_eq!(iter.next(), Some(2));
+ assert_eq!(iter.next_back(), Some(4));
+ assert_eq!(iter.next_back(), Some(3));
+ assert_eq!(iter.next_back(), None);
+ assert_eq!(iter.next(), None);
+ assert_eq!(iter.next_back(), None);
+ drop(iter);
+
+ set.insert(1);
+ set.insert(2);
+ set.insert(3);
+ set.insert(4);
+
+ let mut iter = set.into_iter();
+ assert_eq!(iter.next(), Some(1));
+ assert_eq!(iter.next(), Some(2));
+ assert_eq!(iter.next_back(), Some(4));
+ assert_eq!(iter.next_back(), Some(3));
+ assert_eq!(iter.next_back(), None);
+ assert_eq!(iter.next(), None);
+ assert_eq!(iter.next_back(), None);
+}
+
+#[test]
+fn to_back_front_order() {
+ let mut set = LinkedHashSet::new();
+ set.insert(1);
+ set.insert(2);
+ set.insert(3);
+ set.insert(4);
+
+ assert_eq!(set.back().copied(), Some(4));
+ assert_eq!(set.front().copied(), Some(1));
+ set.to_back(&2);
+ assert_eq!(set.back().copied(), Some(2));
+ set.to_front(&3);
+ assert_eq!(set.front().copied(), Some(3));
+}
diff --git a/tests/lru_cache.rs b/tests/lru_cache.rs
new file mode 100644
index 0000000..f863c70
--- /dev/null
+++ b/tests/lru_cache.rs
@@ -0,0 +1,166 @@
+use hashlink::LruCache;
+
+#[test]
+fn test_put_and_get() {
+ let mut cache = LruCache::new(2);
+ cache.insert(1, 10);
+ cache.insert(2, 20);
+ assert_eq!(cache.get_mut(&1), Some(&mut 10));
+ assert_eq!(cache.get_mut(&2), Some(&mut 20));
+ assert_eq!(cache.len(), 2);
+}
+
+#[test]
+fn test_put_update() {
+ let mut cache = LruCache::new(1);
+ cache.insert("1", 10);
+ cache.insert("1", 19);
+ assert_eq!(cache.get_mut("1"), Some(&mut 19));
+ assert_eq!(cache.len(), 1);
+}
+
+#[test]
+fn test_contains_key() {
+ let mut cache = LruCache::new(1);
+ cache.insert("1", 10);
+ assert_eq!(cache.contains_key("1"), true);
+}
+
+#[test]
+fn test_expire_lru() {
+ let mut cache = LruCache::new(2);
+ cache.insert("foo1", "bar1");
+ cache.insert("foo2", "bar2");
+ cache.insert("foo3", "bar3");
+ assert!(cache.get_mut("foo1").is_none());
+ cache.insert("foo2", "bar2update");
+ cache.insert("foo4", "bar4");
+ assert!(cache.get_mut("foo3").is_none());
+}
+
+#[test]
+fn test_pop() {
+ let mut cache = LruCache::new(2);
+ cache.insert(1, 10);
+ cache.insert(2, 20);
+ assert_eq!(cache.len(), 2);
+ let opt1 = cache.remove(&1);
+ assert!(opt1.is_some());
+ assert_eq!(opt1.unwrap(), 10);
+ assert!(cache.get_mut(&1).is_none());
+ assert_eq!(cache.len(), 1);
+}
+
+#[test]
+fn test_change_capacity() {
+ let mut cache = LruCache::new(2);
+ assert_eq!(cache.capacity(), 2);
+ cache.insert(1, 10);
+ cache.insert(2, 20);
+ cache.set_capacity(1);
+ assert!(cache.get_mut(&1).is_none());
+ assert_eq!(cache.capacity(), 1);
+}
+
+#[test]
+fn test_remove() {
+ let mut cache = LruCache::new(3);
+ cache.insert(1, 10);
+ cache.insert(2, 20);
+ cache.insert(3, 30);
+ cache.insert(4, 40);
+ cache.insert(5, 50);
+ cache.remove(&3);
+ cache.remove(&4);
+ assert!(cache.get_mut(&3).is_none());
+ assert!(cache.get_mut(&4).is_none());
+ cache.insert(6, 60);
+ cache.insert(7, 70);
+ cache.insert(8, 80);
+ assert!(cache.get_mut(&5).is_none());
+ assert_eq!(cache.get_mut(&6), Some(&mut 60));
+ assert_eq!(cache.get_mut(&7), Some(&mut 70));
+ assert_eq!(cache.get_mut(&8), Some(&mut 80));
+}
+
+#[test]
+fn test_clear() {
+ let mut cache = LruCache::new(2);
+ cache.insert(1, 10);
+ cache.insert(2, 20);
+ cache.clear();
+ assert!(cache.get_mut(&1).is_none());
+ assert!(cache.get_mut(&2).is_none());
+ assert!(cache.is_empty())
+}
+
+#[test]
+fn test_iter() {
+ let mut cache = LruCache::new(3);
+ cache.insert(1, 10);
+ cache.insert(2, 20);
+ cache.insert(3, 30);
+ cache.insert(4, 40);
+ cache.insert(5, 50);
+ assert_eq!(
+ cache.iter().collect::<Vec<_>>(),
+ [(&3, &30), (&4, &40), (&5, &50)]
+ );
+ assert_eq!(
+ cache.iter_mut().collect::<Vec<_>>(),
+ [(&3, &mut 30), (&4, &mut 40), (&5, &mut 50)]
+ );
+ assert_eq!(
+ cache.iter().rev().collect::<Vec<_>>(),
+ [(&5, &50), (&4, &40), (&3, &30)]
+ );
+ assert_eq!(
+ cache.iter_mut().rev().collect::<Vec<_>>(),
+ [(&5, &mut 50), (&4, &mut 40), (&3, &mut 30)]
+ );
+}
+
+#[test]
+fn test_peek() {
+ let mut cache = LruCache::new_unbounded();
+ cache.insert(1, 10);
+ cache.insert(2, 20);
+ cache.insert(3, 30);
+ cache.insert(4, 40);
+ cache.insert(5, 50);
+ cache.insert(6, 60);
+
+ assert_eq!(cache.remove_lru(), Some((1, 10)));
+ assert_eq!(cache.peek(&2), Some(&20));
+ assert_eq!(cache.remove_lru(), Some((2, 20)));
+ assert_eq!(cache.peek_mut(&3), Some(&mut 30));
+ assert_eq!(cache.remove_lru(), Some((3, 30)));
+ assert_eq!(cache.get(&4), Some(&40));
+ assert_eq!(cache.remove_lru(), Some((5, 50)));
+}
+
+#[test]
+fn test_entry() {
+ let mut cache = LruCache::new(4);
+
+ cache.insert(1, 10);
+ cache.insert(2, 20);
+ cache.insert(3, 30);
+ cache.insert(4, 40);
+ cache.insert(5, 50);
+ cache.insert(6, 60);
+
+ assert_eq!(cache.len(), 4);
+
+ cache.entry(7).or_insert(70);
+ cache.entry(8).or_insert(80);
+ cache.entry(9).or_insert(90);
+
+ assert!(cache.len() <= 5);
+
+ cache.raw_entry_mut().from_key(&10).or_insert(10, 100);
+ cache.raw_entry_mut().from_key(&11).or_insert(11, 110);
+ cache.raw_entry_mut().from_key(&12).or_insert(12, 120);
+
+ assert!(cache.len() <= 5);
+}
diff --git a/tests/serde.rs b/tests/serde.rs
new file mode 100644
index 0000000..fce3108
--- /dev/null
+++ b/tests/serde.rs
@@ -0,0 +1,59 @@
+#![cfg(feature = "serde_impl")]
+
+use hashlink::{LinkedHashMap, LinkedHashSet};
+use serde_test::{assert_tokens, Token};
+
+#[test]
+fn map_serde_tokens_empty() {
+ let map = LinkedHashMap::<char, u32>::new();
+
+ assert_tokens(&map, &[Token::Map { len: Some(0) }, Token::MapEnd]);
+}
+
+#[test]
+fn map_serde_tokens() {
+ let mut map = LinkedHashMap::new();
+ map.insert('a', 10);
+ map.insert('b', 20);
+ map.insert('c', 30);
+
+ assert_tokens(
+ &map,
+ &[
+ Token::Map { len: Some(3) },
+ Token::Char('a'),
+ Token::I32(10),
+ Token::Char('b'),
+ Token::I32(20),
+ Token::Char('c'),
+ Token::I32(30),
+ Token::MapEnd,
+ ],
+ );
+}
+
+#[test]
+fn set_serde_tokens_empty() {
+ let set = LinkedHashSet::<u32>::new();
+
+ assert_tokens(&set, &[Token::Seq { len: Some(0) }, Token::SeqEnd]);
+}
+
+#[test]
+fn set_serde_tokens() {
+ let mut set = LinkedHashSet::new();
+ set.insert(10);
+ set.insert(20);
+ set.insert(30);
+
+ assert_tokens(
+ &set,
+ &[
+ Token::Seq { len: Some(3) },
+ Token::I32(10),
+ Token::I32(20),
+ Token::I32(30),
+ Token::SeqEnd,
+ ],
+ );
+}