aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHasini Gunasinghe <hasinitg@google.com>2022-09-10 01:20:24 +0000
committerHasini Gunasinghe <hasinitg@google.com>2022-10-03 23:03:42 +0000
commitccc9f2fcad36ede3902a2f2e4b667dd3eb6c2091 (patch)
tree1f7f2d342cf2a0129e9ff319c64ef0d0674b38cb
parent2af36d5ef11fef8197efd1785cf0ccb53509b31b (diff)
downloadder-ccc9f2fcad36ede3902a2f2e4b667dd3eb6c2091.tar.gz
Import platform/external/rust/crates/der
Bug: 239549209 Test: N/A Change-Id: Ia0e4b2cc1a9ee7b9bbbfae73c7498fda8e9f8924
-rw-r--r--.cargo_vcs_info.json6
-rw-r--r--Android.bp33
-rw-r--r--CHANGELOG.md327
-rw-r--r--Cargo.toml91
-rw-r--r--Cargo.toml.orig40
l---------LICENSE1
-rw-r--r--LICENSE-APACHE201
-rw-r--r--METADATA20
-rw-r--r--MODULE_LICENSE_APACHE20
-rw-r--r--OWNERS1
-rw-r--r--README.md96
-rw-r--r--cargo2android.json9
-rw-r--r--patches/std.diff15
-rw-r--r--src/arrayvec.rs148
-rw-r--r--src/asn1.rs50
-rw-r--r--src/asn1/any.rs274
-rw-r--r--src/asn1/bit_string.rs493
-rw-r--r--src/asn1/boolean.rs93
-rw-r--r--src/asn1/choice.rs26
-rw-r--r--src/asn1/context_specific.rs354
-rw-r--r--src/asn1/generalized_time.rs348
-rw-r--r--src/asn1/ia5_string.rs150
-rw-r--r--src/asn1/integer.rs276
-rw-r--r--src/asn1/integer/bigint.rs150
-rw-r--r--src/asn1/integer/int.rs55
-rw-r--r--src/asn1/integer/uint.rs116
-rw-r--r--src/asn1/null.rs108
-rw-r--r--src/asn1/octet_string.rs179
-rw-r--r--src/asn1/oid.rs90
-rw-r--r--src/asn1/optional.rs66
-rw-r--r--src/asn1/printable_string.rs186
-rw-r--r--src/asn1/real.rs993
-rw-r--r--src/asn1/sequence.rs84
-rw-r--r--src/asn1/sequence_of.rs234
-rw-r--r--src/asn1/set_of.rs451
-rw-r--r--src/asn1/utc_time.rs215
-rw-r--r--src/asn1/utf8_string.rs227
-rw-r--r--src/byte_slice.rs116
-rw-r--r--src/datetime.rs423
-rw-r--r--src/decode.rs76
-rw-r--r--src/document.rs369
-rw-r--r--src/encode.rs133
-rw-r--r--src/encode_ref.rs71
-rw-r--r--src/error.rs366
-rw-r--r--src/header.rs60
-rw-r--r--src/length.rs375
-rw-r--r--src/lib.rs409
-rw-r--r--src/ord.rs71
-rw-r--r--src/reader.rs168
-rw-r--r--src/reader/nested.rs96
-rw-r--r--src/reader/pem.rs83
-rw-r--r--src/reader/slice.rs214
-rw-r--r--src/str_slice.rs79
-rw-r--r--src/tag.rs444
-rw-r--r--src/tag/class.rs50
-rw-r--r--src/tag/mode.rs45
-rw-r--r--src/tag/number.rs188
-rw-r--r--src/writer.rs30
-rw-r--r--src/writer/pem.rs42
-rw-r--r--src/writer/slice.rs149
-rw-r--r--tests/datetime.proptest-regressions8
-rw-r--r--tests/datetime.rs64
-rw-r--r--tests/derive.rs459
-rw-r--r--tests/examples/spki.derbin0 -> 44 bytes
-rw-r--r--tests/examples/spki.pem3
-rw-r--r--tests/pem.rs67
-rw-r--r--tests/set_of.rs59
67 files changed, 10923 insertions, 0 deletions
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
new file mode 100644
index 0000000..fd618ae
--- /dev/null
+++ b/.cargo_vcs_info.json
@@ -0,0 +1,6 @@
+{
+ "git": {
+ "sha1": "00f6c926a7aaed60441e17b93cd3e3fe5b93650e"
+ },
+ "path_in_vcs": "der"
+} \ No newline at end of file
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000..a438f57
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,33 @@
+// This file is generated by cargo2android.py --config cargo2android.json.
+// Do not modify this file as changes will be overridden on upgrade.
+
+
+
+rust_library_host {
+ name: "libder",
+ crate_name: "der",
+ cargo_env_compat: true,
+ cargo_pkg_version: "0.6.0",
+ srcs: ["src/lib.rs"],
+ edition: "2021",
+ features: [
+ "alloc",
+ "const-oid",
+ "der_derive",
+ "derive",
+ "flagset",
+ "oid",
+ "zeroize",
+ ],
+ rustlibs: [
+ "libconst_oid",
+ "libflagset",
+ "libzeroize",
+ ],
+ proc_macros: ["libder_derive"],
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.virt",
+ ],
+ vendor_available: true,
+}
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000..451056d
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,327 @@
+# Changelog
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
+and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+## 0.6.0 (2022-05-08)
+### Added
+- Impl `ValueOrd` for `SetOf` and `SetOfVec` ([#362])
+- `SequenceRef` type ([#374])
+- Support for `SetOf` sorting on heapless `no_std` targets ([#401])
+- Support for mapping `BitString` to/from a `FlagSet` ([#412])
+- `DecodeOwned` marker trait ([#529])
+- Support for the ASN.1 `REAL` type ([#346])
+- `DecodePem` and `EncodePem` traits ([#571])
+- `Document` and `SecretDocument` types ([#571])
+- `EncodeRef`/`EncodeValueRef` wrapper types ([#604])
+- `Writer` trait ([#605])
+- `Reader` trait ([#606])
+- Streaming on-the-fly `PemReader` and `PemWriter` ([#618], [#636])
+- Owned `BitString` ([#636])
+- Owned `Any` and `OctetString` types ([#640])
+
+### Changed
+- Pass `Header` to `DecodeValue` ([#392])
+- Bump `const-oid` dependency to v0.9 ([#507])
+- Renamed `Decodable`/`Encodable` => `Decode`/`Encode` ([#523])
+- Enable arithmetic, casting, and panic `clippy` lints ([#556], [#579])
+- Use `&mut dyn Writer` as output for `Encode::encode` and `EncodeValue::encode_value` ([#611])
+- Bump `pem-rfc7468` dependency to v0.6 ([#620])
+- Use `Reader<'a>` as input for `Decode::decode` and `DecodeValue::decode_value` ([#633])
+- Renamed `Any` => `AnyRef` ([#637])
+- Renamed `BitString` => `BitStringRef` ([#637])
+- Renamed `Ia5String` => `Ia5StringRef` ([#637])
+- Renamed `OctetString` => `OctetStringRef` ([#637])
+- Renamed `PrintableString` => `PrintableStringRef` ([#637])
+- Renamed `Utf8String` => `Utf8StringRef` ([#637])
+- Renamed `UIntBytes` => `UIntRef` ([#637])
+- Renamed `Decoder` => `SliceReader` ([#651])
+- Renamed `Encoder` => `SliceWriter` ([#651])
+
+### Fixed
+- Handling of oversized unsigned `INTEGER` inputs ([#447])
+
+### Removed
+- `bigint` feature ([#344])
+- `OrdIsValueOrd` trait ([#359])
+- `Document` trait ([#571])
+- `OptionalRef` ([#604])
+- Decode-time SET OF ordering checks ([#625])
+
+[#344]: https://github.com/RustCrypto/formats/pull/344
+[#346]: https://github.com/RustCrypto/formats/pull/346
+[#359]: https://github.com/RustCrypto/formats/pull/359
+[#362]: https://github.com/RustCrypto/formats/pull/362
+[#374]: https://github.com/RustCrypto/formats/pull/374
+[#392]: https://github.com/RustCrypto/formats/pull/392
+[#401]: https://github.com/RustCrypto/formats/pull/401
+[#412]: https://github.com/RustCrypto/formats/pull/412
+[#447]: https://github.com/RustCrypto/formats/pull/447
+[#507]: https://github.com/RustCrypto/formats/pull/507
+[#523]: https://github.com/RustCrypto/formats/pull/523
+[#529]: https://github.com/RustCrypto/formats/pull/529
+[#556]: https://github.com/RustCrypto/formats/pull/556
+[#571]: https://github.com/RustCrypto/formats/pull/571
+[#579]: https://github.com/RustCrypto/formats/pull/579
+[#604]: https://github.com/RustCrypto/formats/pull/604
+[#605]: https://github.com/RustCrypto/formats/pull/605
+[#606]: https://github.com/RustCrypto/formats/pull/606
+[#611]: https://github.com/RustCrypto/formats/pull/611
+[#618]: https://github.com/RustCrypto/formats/pull/618
+[#620]: https://github.com/RustCrypto/formats/pull/620
+[#625]: https://github.com/RustCrypto/formats/pull/625
+[#633]: https://github.com/RustCrypto/formats/pull/633
+[#636]: https://github.com/RustCrypto/formats/pull/636
+[#637]: https://github.com/RustCrypto/formats/pull/637
+[#640]: https://github.com/RustCrypto/formats/pull/640
+[#651]: https://github.com/RustCrypto/formats/pull/651
+
+## 0.5.1 (2021-11-17)
+### Added
+- `Any::NULL` constant ([#226])
+
+[#226]: https://github.com/RustCrypto/formats/pull/226
+
+## 0.5.0 (2021-11-15) [YANKED]
+### Added
+- Support for `IMPLICIT` mode `CONTEXT-SPECIFIC` fields ([#61])
+- `DecodeValue`/`EncodeValue` traits ([#63])
+- Expose `DateTime` through public API ([#75])
+- `SEQUENCE OF` support for `[T; N]` ([#90])
+- `SequenceOf` type ([#95])
+- `SEQUENCE OF` support for `Vec` ([#96])
+- `Document` trait ([#117])
+- Basic integration with `time` crate ([#129])
+- `Tag::NumericString` ([#132])
+- Support for unused bits to `BitString` ([#141])
+- `Decoder::{peek_tag, peek_header}` ([#142])
+- Type hint in `encoder `sequence` method ([#147])
+- `Tag::Enumerated` ([#153])
+- `ErrorKind::TagNumberInvalid` ([#156])
+- `Tag::VisibleString` and `Tag::BmpString` ([#160])
+- Inherent constants for all valid `TagNumber`s ([#165])
+- `DerOrd` and `ValueOrd` traits ([#190])
+- `ContextSpecificRef` type ([#199])
+
+### Changed
+- Make `ContextSpecific` generic around an inner type ([#60])
+- Removed `SetOf` trait; rename `SetOfArray` => `SetOf` ([#97])
+- Rename `Message` trait to `Sequence` ([#99])
+- Make `GeneralizedTime`/`UtcTime` into `DateTime` newtypes ([#102])
+- Rust 2021 edition upgrade; MSRV 1.56 ([#136])
+- Replace `ErrorKind::Truncated` with `ErrorKind::Incomplete` ([#143])
+- Rename `ErrorKind::UnknownTagMode` => `ErrorKind::TagModeUnknown` ([#155])
+- Rename `ErrorKind::UnexpectedTag` => `ErrorKind::TagUnexpected` ([#155])
+- Rename `ErrorKind::UnknownTag` => `ErrorKind::TagUnknown` ([#155])
+- Consolidate `ErrorKind::{Incomplete, Underlength}` ([#157])
+- Rename `Tagged` => `FixedTag`; add new `Tagged` trait ([#189])
+- Use `DerOrd` for `SetOf*` types ([#200])
+- Switch `impl From<BitString> for &[u8]` to `TryFrom` ([#203])
+- Bump `crypto-bigint` dependency to v0.3 ([#215])
+- Bump `const-oid` dependency to v0.7 ([#216])
+- Bump `pem-rfc7468` dependency to v0.3 ([#217])
+- Bump `der_derive` dependency to v0.5 ([#221])
+
+### Removed
+- `Sequence` struct ([#98])
+- `Tagged` bound on `ContextSpecific::decode_implicit` ([#161])
+- `ErrorKind::DuplicateField` ([#162])
+
+[#60]: https://github.com/RustCrypto/formats/pull/60
+[#61]: https://github.com/RustCrypto/formats/pull/61
+[#63]: https://github.com/RustCrypto/formats/pull/63
+[#75]: https://github.com/RustCrypto/formats/pull/75
+[#90]: https://github.com/RustCrypto/formats/pull/90
+[#95]: https://github.com/RustCrypto/formats/pull/95
+[#96]: https://github.com/RustCrypto/formats/pull/96
+[#97]: https://github.com/RustCrypto/formats/pull/97
+[#98]: https://github.com/RustCrypto/formats/pull/98
+[#99]: https://github.com/RustCrypto/formats/pull/99
+[#102]: https://github.com/RustCrypto/formats/pull/102
+[#117]: https://github.com/RustCrypto/formats/pull/117
+[#129]: https://github.com/RustCrypto/formats/pull/129
+[#132]: https://github.com/RustCrypto/formats/pull/132
+[#136]: https://github.com/RustCrypto/formats/pull/136
+[#141]: https://github.com/RustCrypto/formats/pull/141
+[#142]: https://github.com/RustCrypto/formats/pull/142
+[#143]: https://github.com/RustCrypto/formats/pull/143
+[#147]: https://github.com/RustCrypto/formats/pull/147
+[#153]: https://github.com/RustCrypto/formats/pull/153
+[#155]: https://github.com/RustCrypto/formats/pull/155
+[#156]: https://github.com/RustCrypto/formats/pull/156
+[#157]: https://github.com/RustCrypto/formats/pull/157
+[#160]: https://github.com/RustCrypto/formats/pull/160
+[#161]: https://github.com/RustCrypto/formats/pull/161
+[#162]: https://github.com/RustCrypto/formats/pull/162
+[#165]: https://github.com/RustCrypto/formats/pull/165
+[#189]: https://github.com/RustCrypto/formats/pull/189
+[#190]: https://github.com/RustCrypto/formats/pull/190
+[#199]: https://github.com/RustCrypto/formats/pull/199
+[#200]: https://github.com/RustCrypto/formats/pull/200
+[#203]: https://github.com/RustCrypto/formats/pull/203
+[#215]: https://github.com/RustCrypto/formats/pull/215
+[#216]: https://github.com/RustCrypto/formats/pull/216
+[#217]: https://github.com/RustCrypto/formats/pull/217
+[#221]: https://github.com/RustCrypto/formats/pull/221
+
+## 0.4.5 (2021-12-01)
+### Fixed
+- Backport [#147] type hint fix for WASM platforms to 0.4.x
+
+## 0.4.4 (2021-10-06)
+### Removed
+- Accidentally checked-in `target/` directory ([#66])
+
+[#66]: https://github.com/RustCrypto/formats/pull/66
+
+## 0.4.3 (2021-09-15)
+### Added
+- `Tag::unexpected_error` ([#33])
+
+[#33]: https://github.com/RustCrypto/formats/pull/33
+
+## 0.4.2 (2021-09-14)
+### Changed
+- Moved to `formats` repo ([#2])
+
+### Fixed
+- ASN.1 `SET` type now flagged with the constructed bit
+
+[#2]: https://github.com/RustCrypto/formats/pull/2
+
+## 0.4.1 (2021-08-08)
+### Fixed
+- Encoding `UTCTime` for dates with `20xx` years
+
+## 0.4.0 (2021-06-07)
+### Added
+- `TagNumber` type
+- Const generic integer de/encoders with support for all of Rust's integer
+ primitives
+- `crypto-bigint` support
+- `Tag` number helpers
+- `Tag::octet`
+- `ErrorKind::Value` helpers
+- `SequenceIter`
+
+### Changed
+- Bump `const-oid` crate dependency to v0.6
+- Make `Tag` structured
+- Namespace ASN.1 types in `asn1` module
+- Refactor context-specific field decoding
+- MSRV 1.51
+- Rename `big-uint` crate feature to `bigint`
+- Rename `BigUInt` to `UIntBytes`
+- Have `Decoder::error()` return an `Error`
+
+### Removed
+- Deprecated methods replaced by associated constants
+
+## 0.3.5 (2021-05-24)
+### Added
+- Helper methods for context-specific fields
+- `ContextSpecific` field wrapper
+- Decoder position tracking for errors during `Any<'a>` decoding
+
+### Fixed
+- `From` conversion for `BitString` into `Any`
+
+## 0.3.4 (2021-05-16)
+### Changed
+- Support `Length` of up to 1 MiB
+
+## 0.3.3 (2021-04-15)
+### Added
+- `Length` constants
+
+### Changed
+- Deprecate `const fn` methods replaced by `Length` constants
+
+## 0.3.2 (2021-04-15)
+### Fixed
+- Non-critical bug allowing `Length` to exceed the max invariant
+
+## 0.3.1 (2021-04-01) [YANKED]
+### Added
+- `PartialOrd` + `Ord` impls to all ASN.1 types
+
+## 0.3.0 (2021-03-22) [YANKED]
+### Added
+- Impl `Decode`/`Encoded`/`Tagged` for `String`
+- `Length::one` and `Length::for_tlv`
+- `SET OF` support with `SetOf` trait and `SetOfRef`
+
+### Changed
+- Rename `Decodable::from_bytes` => `Decodable::from_der`
+- Separate `sequence` and `message`
+- Rename `ErrorKind::Oid` => `ErrorKind::MalformedOid`
+- Auto-derive `From` impls for variants when deriving `Choice`
+- Make `Length` use `u32` internally
+- Make `Sequence` constructor private
+- Bump `const_oid` to v0.5
+- Bump `der_derive` to v0.3
+
+### Removed
+- Deprecated methods
+- `BigUIntSize`
+
+## 0.2.10 (2021-02-28)
+### Added
+- Impl `From<ObjectIdentifier>` for `Any`
+
+### Changed
+- Bump minimum `const-oid` dependency to v0.4.4
+
+## 0.2.9 (2021-02-24)
+### Added
+- Support for `IA5String`
+
+## 0.2.8 (2021-02-22)
+### Added
+- `Choice` trait
+
+## 0.2.7 (2021-02-20)
+### Added
+- Export `Header` publicly
+- Make `Encoder::reserve` public
+
+## 0.2.6 (2021-02-19)
+### Added
+- Make the unit type an encoding of `NULL`
+
+## 0.2.5 (2021-02-18)
+### Added
+- `ErrorKind::UnknownOid` variant
+
+## 0.2.4 (2021-02-16)
+### Added
+- `Any::is_null` method
+
+### Changed
+- Deprecate `Any::null` method
+
+## 0.2.3 (2021-02-15)
+### Added
+- Additional `rustdoc` documentation
+
+## 0.2.2 (2021-02-12)
+### Added
+- Support for `UTCTime` and `GeneralizedTime`
+
+## 0.2.1 (2021-02-02)
+### Added
+- Support for `PrintableString` and `Utf8String`
+
+## 0.2.0 (2021-01-22)
+### Added
+- `BigUInt` type
+- `i16` support
+- `u8` and `u16` support
+- Integer decoder helper methods
+
+### Fixed
+- Handle leading byte of `BIT STRING`s
+
+## 0.1.0 (2020-12-21)
+- Initial release
diff --git a/Cargo.toml b/Cargo.toml
new file mode 100644
index 0000000..f7822e0
--- /dev/null
+++ b/Cargo.toml
@@ -0,0 +1,91 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+rust-version = "1.57"
+name = "der"
+version = "0.6.0"
+authors = ["RustCrypto Developers"]
+description = """
+Pure Rust embedded-friendly implementation of the Distinguished Encoding Rules
+(DER) for Abstract Syntax Notation One (ASN.1) as described in ITU X.690 with
+full support for heapless no_std targets
+"""
+readme = "README.md"
+keywords = [
+ "asn1",
+ "crypto",
+ "itu",
+ "pkcs",
+]
+categories = [
+ "cryptography",
+ "data-structures",
+ "encoding",
+ "no-std",
+ "parser-implementations",
+]
+license = "Apache-2.0 OR MIT"
+repository = "https://github.com/RustCrypto/formats/tree/master/der"
+resolver = "2"
+
+[package.metadata.docs.rs]
+all-features = true
+rustdoc-args = [
+ "--cfg",
+ "docsrs",
+]
+
+[dependencies.const-oid]
+version = "0.9"
+optional = true
+
+[dependencies.der_derive]
+version = "0.6"
+optional = true
+
+[dependencies.flagset]
+version = "0.4.3"
+optional = true
+
+[dependencies.pem-rfc7468]
+version = "0.6"
+optional = true
+
+[dependencies.time]
+version = "0.3.4"
+optional = true
+default-features = false
+
+[dependencies.zeroize]
+version = "1.5"
+features = ["alloc"]
+optional = true
+default-features = false
+
+[dev-dependencies.hex-literal]
+version = "0.3.3"
+
+[dev-dependencies.proptest]
+version = "1"
+
+[features]
+alloc = []
+derive = ["der_derive"]
+oid = ["const-oid"]
+pem = [
+ "alloc",
+ "pem-rfc7468/alloc",
+ "zeroize",
+]
+real = []
+std = ["alloc"]
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
new file mode 100644
index 0000000..7c34417
--- /dev/null
+++ b/Cargo.toml.orig
@@ -0,0 +1,40 @@
+[package]
+name = "der"
+version = "0.6.0"
+description = """
+Pure Rust embedded-friendly implementation of the Distinguished Encoding Rules
+(DER) for Abstract Syntax Notation One (ASN.1) as described in ITU X.690 with
+full support for heapless no_std targets
+"""
+authors = ["RustCrypto Developers"]
+license = "Apache-2.0 OR MIT"
+repository = "https://github.com/RustCrypto/formats/tree/master/der"
+categories = ["cryptography", "data-structures", "encoding", "no-std", "parser-implementations"]
+keywords = ["asn1", "crypto", "itu", "pkcs"]
+readme = "README.md"
+edition = "2021"
+rust-version = "1.57"
+
+[dependencies]
+const-oid = { version = "0.9", optional = true, path = "../const-oid" }
+der_derive = { version = "0.6", optional = true, path = "derive" }
+flagset = { version = "0.4.3", optional = true }
+pem-rfc7468 = { version = "0.6", optional = true, path = "../pem-rfc7468" }
+time = { version = "0.3.4", optional = true, default-features = false }
+zeroize = { version = "1.5", optional = true, default-features = false, features = ["alloc"] }
+
+[dev-dependencies]
+hex-literal = "0.3.3"
+proptest = "1"
+
+[features]
+alloc = []
+derive = ["der_derive"]
+oid = ["const-oid"]
+pem = ["alloc", "pem-rfc7468/alloc", "zeroize"]
+real = []
+std = ["alloc"]
+
+[package.metadata.docs.rs]
+all-features = true
+rustdoc-args = ["--cfg", "docsrs"]
diff --git a/LICENSE b/LICENSE
new file mode 120000
index 0000000..6b579aa
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1 @@
+LICENSE-APACHE \ No newline at end of file
diff --git a/LICENSE-APACHE b/LICENSE-APACHE
new file mode 100644
index 0000000..78173fa
--- /dev/null
+++ b/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..427d79a
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,20 @@
+name: "der"
+description: "Pure Rust embedded-friendly implementation of the Distinguished Encoding Rules (DER) for Abstract Syntax Notation One (ASN.1) as described in ITU X.690."
+third_party {
+ url {
+ type: HOMEPAGE
+ value: "https://crates.io/crates/der"
+ }
+ url {
+ type: ARCHIVE
+ value: "https://static.crates.io/crates/der/der-0.6.0.crate"
+ }
+ version: "0.6.0"
+ # Dual-licensed, using the least restrictive per go/thirdpartylicenses#same.
+ license_type: NOTICE
+ last_upgrade_date {
+ year: 2022
+ month: 8
+ day: 31
+ }
+}
diff --git a/MODULE_LICENSE_APACHE2 b/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/MODULE_LICENSE_APACHE2
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..45dc4dd
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1 @@
+include platform/prebuilts/rust:master:/OWNERS
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..f7bb015
--- /dev/null
+++ b/README.md
@@ -0,0 +1,96 @@
+# [RustCrypto]: ASN.1 DER
+
+[![Crate][crate-image]][crate-link]
+[![Docs][docs-image]][docs-link]
+[![Build Status][build-image]][build-link]
+![Apache2/MIT licensed][license-image]
+![Rust Version][rustc-image]
+[![Project Chat][chat-image]][chat-link]
+
+Pure Rust embedded-friendly implementation of the Distinguished Encoding Rules (DER)
+for Abstract Syntax Notation One (ASN.1) as described in ITU X.690.
+
+[Documentation][docs-link]
+
+## About
+
+This crate provides a `no_std`-friendly implementation of a subset of ASN.1 DER
+necessary for decoding/encoding the following cryptography-related formats
+implemented as crates maintained by the [RustCrypto] project:
+
+- [`pkcs1`]: RSA Cryptography Specifications
+- [`pkcs5`]: Password-Based Cryptography Specification
+- [`pkcs7`]: Cryptographic Message Syntax
+- [`pkcs8`]: Private-Key Information Syntax Specification
+- [`pkcs10`]: Certification Request Syntax Specification
+- [`sec1`]: Elliptic Curve Cryptography
+- [`spki`]: X.509 Subject Public Key Info
+- [`x501`]: Directory Services Types
+- [`x509`]: Public Key Infrastructure Certificate
+
+The core implementation avoids any heap usage (with convenience methods
+that allocate gated under the off-by-default `alloc` feature).
+
+The DER decoder in this crate performs checks to ensure that the input document
+is in canonical form, and will return errors if non-canonical productions are
+encountered. There is currently no way to disable these checks.
+
+### Features
+
+- Rich support for ASN.1 types used by PKCS/PKIX documents
+- Performs DER canonicalization checks at decoding time
+- `no_std` friendly: supports "heapless" usage
+- Optionally supports `alloc` and `std` if desired
+- No hard dependencies! Self-contained implementation with optional
+ integrations with the following crates, all of which are `no_std` friendly:
+ - `const-oid`: const-friendly OID implementation
+ - `pem-rfc7468`: PKCS/PKIX-flavored PEM library with constant-time decoder/encoders
+ - `time` crate: date/time library
+
+## Minimum Supported Rust Version
+
+This crate requires **Rust 1.57** at a minimum.
+
+We may change the MSRV in the future, but it will be accompanied by a minor
+version bump.
+
+## License
+
+Licensed under either of:
+
+ * [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0)
+ * [MIT license](http://opensource.org/licenses/MIT)
+
+at your option.
+
+### Contribution
+
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in the work by you, as defined in the Apache-2.0 license, shall be
+dual licensed as above, without any additional terms or conditions.
+
+[//]: # (badges)
+
+[crate-image]: https://buildstats.info/crate/der
+[crate-link]: https://crates.io/crates/der
+[docs-image]: https://docs.rs/der/badge.svg
+[docs-link]: https://docs.rs/der/
+[build-image]: https://github.com/RustCrypto/formats/actions/workflows/der.yml/badge.svg
+[build-link]: https://github.com/RustCrypto/formats/actions/workflows/der.yml
+[license-image]: https://img.shields.io/badge/license-Apache2.0/MIT-blue.svg
+[rustc-image]: https://img.shields.io/badge/rustc-1.57+-blue.svg
+[chat-image]: https://img.shields.io/badge/zulip-join_chat-blue.svg
+[chat-link]: https://rustcrypto.zulipchat.com/#narrow/stream/300570-formats
+
+[//]: # (links)
+
+[RustCrypto]: https://github.com/rustcrypto
+[`pkcs1`]: https://github.com/RustCrypto/formats/tree/master/pkcs1
+[`pkcs5`]: https://github.com/RustCrypto/formats/tree/master/pkcs5
+[`pkcs7`]: https://github.com/RustCrypto/formats/tree/master/pkcs7
+[`pkcs8`]: https://github.com/RustCrypto/formats/tree/master/pkcs8
+[`pkcs10`]: https://github.com/RustCrypto/formats/tree/master/pkcs10
+[`sec1`]: https://github.com/RustCrypto/formats/tree/master/sec1
+[`spki`]: https://github.com/RustCrypto/formats/tree/master/spki
+[`x501`]: https://github.com/RustCrypto/formats/tree/master/x501
+[`x509`]: https://github.com/RustCrypto/formats/tree/master/x509
diff --git a/cargo2android.json b/cargo2android.json
new file mode 100644
index 0000000..f708fb4
--- /dev/null
+++ b/cargo2android.json
@@ -0,0 +1,9 @@
+{
+ "apex-available": [
+ "//apex_available:platform",
+ "com.android.virt"
+ ],
+ "run": true,
+ "vendor-available": true,
+ "features": "alloc,derive,flagset,oid,zeroize"
+}
diff --git a/patches/std.diff b/patches/std.diff
new file mode 100644
index 0000000..1cedb25
--- /dev/null
+++ b/patches/std.diff
@@ -0,0 +1,15 @@
+diff --git a/src/lib.rs b/src/lib.rs
+index b657ea1..f16857b 100644
+--- a/src/lib.rs
++++ b/src/lib.rs
+@@ -328,6 +328,10 @@
+ //! [`UtcTime`]: asn1::UtcTime
+ //! [`Utf8StringRef`]: asn1::Utf8StringRef
+
++/// Local Android change: Use std to allow building as a dylib.
++#[cfg(android_dylib)]
++extern crate std;
++
+ #[cfg(feature = "alloc")]
+ #[allow(unused_imports)]
+ #[macro_use]
diff --git a/src/arrayvec.rs b/src/arrayvec.rs
new file mode 100644
index 0000000..21f1341
--- /dev/null
+++ b/src/arrayvec.rs
@@ -0,0 +1,148 @@
+//! Array-backed append-only vector type.
+// TODO(tarcieri): use `core` impl of `ArrayVec`
+// See: https://github.com/rust-lang/rfcs/pull/2990
+
+use crate::{ErrorKind, Result};
+
+/// Array-backed append-only vector type.
+#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub(crate) struct ArrayVec<T, const N: usize> {
+ /// Elements of the set.
+ elements: [Option<T>; N],
+
+ /// Last populated element.
+ length: usize,
+}
+
+impl<T, const N: usize> ArrayVec<T, N> {
+ /// Create a new [`ArrayVec`].
+ pub fn new() -> Self {
+ Self {
+ elements: [(); N].map(|_| None),
+ length: 0,
+ }
+ }
+
+ /// Add an element to this [`ArrayVec`].
+ ///
+ /// Items MUST be added in lexicographical order according to the `Ord`
+ /// impl on `T`.
+ pub fn add(&mut self, element: T) -> Result<()> {
+ match self.length.checked_add(1) {
+ Some(n) if n <= N => {
+ self.elements[self.length] = Some(element);
+ self.length = n;
+ Ok(())
+ }
+ _ => Err(ErrorKind::Overlength.into()),
+ }
+ }
+
+ /// Get an element from this [`ArrayVec`].
+ pub fn get(&self, index: usize) -> Option<&T> {
+ match self.elements.get(index) {
+ Some(Some(ref item)) => Some(item),
+ _ => None,
+ }
+ }
+
+ /// Iterate over the elements in this [`ArrayVec`].
+ pub fn iter(&self) -> Iter<'_, T> {
+ Iter::new(&self.elements)
+ }
+
+ /// Is this [`ArrayVec`] empty?
+ pub fn is_empty(&self) -> bool {
+ self.length == 0
+ }
+
+ /// Get the number of elements in this [`ArrayVec`].
+ pub fn len(&self) -> usize {
+ self.length
+ }
+
+ /// Get the last item from this [`ArrayVec`].
+ pub fn last(&self) -> Option<&T> {
+ self.length.checked_sub(1).and_then(|n| self.get(n))
+ }
+
+ /// Extract the inner array.
+ pub fn into_array(self) -> [Option<T>; N] {
+ self.elements
+ }
+}
+
+impl<T, const N: usize> AsRef<[Option<T>]> for ArrayVec<T, N> {
+ fn as_ref(&self) -> &[Option<T>] {
+ &self.elements[..self.length]
+ }
+}
+
+impl<T, const N: usize> AsMut<[Option<T>]> for ArrayVec<T, N> {
+ fn as_mut(&mut self) -> &mut [Option<T>] {
+ &mut self.elements[..self.length]
+ }
+}
+
+impl<T, const N: usize> Default for ArrayVec<T, N> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+/// Iterator over the elements of an [`ArrayVec`].
+#[derive(Clone, Debug)]
+pub struct Iter<'a, T> {
+ /// Decoder which iterates over the elements of the message.
+ elements: &'a [Option<T>],
+
+ /// Position within the iterator.
+ position: usize,
+}
+
+impl<'a, T> Iter<'a, T> {
+ pub(crate) fn new(elements: &'a [Option<T>]) -> Self {
+ Self {
+ elements,
+ position: 0,
+ }
+ }
+}
+
+impl<'a, T> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ match self.elements.get(self.position) {
+ Some(Some(res)) => {
+ self.position = self.position.checked_add(1)?;
+ Some(res)
+ }
+ _ => None,
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let len = self.elements.len().saturating_sub(self.position);
+ (len, Some(len))
+ }
+}
+
+impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
+
+#[cfg(test)]
+mod tests {
+ use super::ArrayVec;
+ use crate::ErrorKind;
+
+ #[test]
+ fn add() {
+ let mut vec = ArrayVec::<u8, 3>::new();
+ vec.add(1).unwrap();
+ vec.add(2).unwrap();
+ vec.add(3).unwrap();
+
+ assert_eq!(vec.add(4).err().unwrap(), ErrorKind::Overlength.into());
+ assert_eq!(vec.len(), 3);
+ }
+}
diff --git a/src/asn1.rs b/src/asn1.rs
new file mode 100644
index 0000000..0f7290c
--- /dev/null
+++ b/src/asn1.rs
@@ -0,0 +1,50 @@
+//! Module containing all of the various ASN.1 built-in types supported by
+//! this library.
+
+mod any;
+mod bit_string;
+mod boolean;
+mod choice;
+mod context_specific;
+mod generalized_time;
+mod ia5_string;
+mod integer;
+mod null;
+mod octet_string;
+#[cfg(feature = "oid")]
+mod oid;
+mod optional;
+mod printable_string;
+#[cfg(feature = "real")]
+mod real;
+mod sequence;
+mod sequence_of;
+mod set_of;
+mod utc_time;
+mod utf8_string;
+
+pub use self::{
+ any::AnyRef,
+ bit_string::{BitStringIter, BitStringRef},
+ choice::Choice,
+ context_specific::{ContextSpecific, ContextSpecificRef},
+ generalized_time::GeneralizedTime,
+ ia5_string::Ia5StringRef,
+ integer::bigint::UIntRef,
+ null::Null,
+ octet_string::OctetStringRef,
+ printable_string::PrintableStringRef,
+ sequence::{Sequence, SequenceRef},
+ sequence_of::{SequenceOf, SequenceOfIter},
+ set_of::{SetOf, SetOfIter},
+ utc_time::UtcTime,
+ utf8_string::Utf8StringRef,
+};
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+pub use self::{any::Any, bit_string::BitString, octet_string::OctetString, set_of::SetOfVec};
+
+#[cfg(feature = "oid")]
+#[cfg_attr(docsrs, doc(cfg(feature = "oid")))]
+pub use const_oid::ObjectIdentifier;
diff --git a/src/asn1/any.rs b/src/asn1/any.rs
new file mode 100644
index 0000000..717d666
--- /dev/null
+++ b/src/asn1/any.rs
@@ -0,0 +1,274 @@
+//! ASN.1 `ANY` type.
+
+use crate::{
+ asn1::*, ByteSlice, Choice, Decode, DecodeValue, DerOrd, EncodeValue, Error, ErrorKind,
+ FixedTag, Header, Length, Reader, Result, SliceReader, Tag, Tagged, ValueOrd, Writer,
+};
+use core::cmp::Ordering;
+
+#[cfg(feature = "alloc")]
+use alloc::vec::Vec;
+
+#[cfg(feature = "oid")]
+use crate::asn1::ObjectIdentifier;
+
+/// ASN.1 `ANY`: represents any explicitly tagged ASN.1 value.
+///
+/// This is a zero-copy reference type which borrows from the input data.
+///
+/// Technically `ANY` hasn't been a recommended part of ASN.1 since the X.209
+/// revision from 1988. It was deprecated and replaced by Information Object
+/// Classes in X.680 in 1994, and X.690 no longer refers to it whatsoever.
+///
+/// Nevertheless, this crate defines an `ANY` type as it remains a familiar
+/// and useful concept which is still extensively used in things like
+/// PKI-related RFCs.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub struct AnyRef<'a> {
+ /// Tag representing the type of the encoded value.
+ tag: Tag,
+
+ /// Inner value encoded as bytes.
+ value: ByteSlice<'a>,
+}
+
+impl<'a> AnyRef<'a> {
+ /// [`AnyRef`] representation of the ASN.1 `NULL` type.
+ pub const NULL: Self = Self {
+ tag: Tag::Null,
+ value: ByteSlice::EMPTY,
+ };
+
+ /// Create a new [`AnyRef`] from the provided [`Tag`] and DER bytes.
+ pub fn new(tag: Tag, bytes: &'a [u8]) -> Result<Self> {
+ let value = ByteSlice::new(bytes).map_err(|_| ErrorKind::Length { tag })?;
+ Ok(Self { tag, value })
+ }
+
+ /// Infallible creation of an [`AnyRef`] from a [`ByteSlice`].
+ pub(crate) fn from_tag_and_value(tag: Tag, value: ByteSlice<'a>) -> Self {
+ Self { tag, value }
+ }
+
+ /// Get the raw value for this [`AnyRef`] type as a byte slice.
+ pub fn value(self) -> &'a [u8] {
+ self.value.as_slice()
+ }
+
+ /// Attempt to decode this [`AnyRef`] type into the inner value.
+ pub fn decode_into<T>(self) -> Result<T>
+ where
+ T: DecodeValue<'a> + FixedTag,
+ {
+ self.tag.assert_eq(T::TAG)?;
+ let header = Header {
+ tag: self.tag,
+ length: self.value.len(),
+ };
+
+ let mut decoder = SliceReader::new(self.value())?;
+ let result = T::decode_value(&mut decoder, header)?;
+ decoder.finish(result)
+ }
+
+ /// Is this value an ASN.1 `NULL` value?
+ pub fn is_null(self) -> bool {
+ self == Self::NULL
+ }
+
+ /// Attempt to decode an ASN.1 `BIT STRING`.
+ pub fn bit_string(self) -> Result<BitStringRef<'a>> {
+ self.try_into()
+ }
+
+ /// Attempt to decode an ASN.1 `CONTEXT-SPECIFIC` field.
+ pub fn context_specific<T>(self) -> Result<ContextSpecific<T>>
+ where
+ T: Decode<'a>,
+ {
+ self.try_into()
+ }
+
+ /// Attempt to decode an ASN.1 `GeneralizedTime`.
+ pub fn generalized_time(self) -> Result<GeneralizedTime> {
+ self.try_into()
+ }
+
+ /// Attempt to decode an ASN.1 `IA5String`.
+ pub fn ia5_string(self) -> Result<Ia5StringRef<'a>> {
+ self.try_into()
+ }
+
+ /// Attempt to decode an ASN.1 `OCTET STRING`.
+ pub fn octet_string(self) -> Result<OctetStringRef<'a>> {
+ self.try_into()
+ }
+
+ /// Attempt to decode an ASN.1 `OBJECT IDENTIFIER`.
+ #[cfg(feature = "oid")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "oid")))]
+ pub fn oid(self) -> Result<ObjectIdentifier> {
+ self.try_into()
+ }
+
+ /// Attempt to decode an ASN.1 `OPTIONAL` value.
+ pub fn optional<T>(self) -> Result<Option<T>>
+ where
+ T: Choice<'a> + TryFrom<Self, Error = Error>,
+ {
+ if T::can_decode(self.tag) {
+ T::try_from(self).map(Some)
+ } else {
+ Ok(None)
+ }
+ }
+
+ /// Attempt to decode an ASN.1 `PrintableString`.
+ pub fn printable_string(self) -> Result<PrintableStringRef<'a>> {
+ self.try_into()
+ }
+
+ /// Attempt to decode this value an ASN.1 `SEQUENCE`, creating a new
+ /// nested reader and calling the provided argument with it.
+ pub fn sequence<F, T>(self, f: F) -> Result<T>
+ where
+ F: FnOnce(&mut SliceReader<'a>) -> Result<T>,
+ {
+ self.tag.assert_eq(Tag::Sequence)?;
+ let mut reader = SliceReader::new(self.value.as_slice())?;
+ let result = f(&mut reader)?;
+ reader.finish(result)
+ }
+
+ /// Attempt to decode an ASN.1 `UTCTime`.
+ pub fn utc_time(self) -> Result<UtcTime> {
+ self.try_into()
+ }
+
+ /// Attempt to decode an ASN.1 `UTF8String`.
+ pub fn utf8_string(self) -> Result<Utf8StringRef<'a>> {
+ self.try_into()
+ }
+}
+
+impl<'a> Choice<'a> for AnyRef<'a> {
+ fn can_decode(_: Tag) -> bool {
+ true
+ }
+}
+
+impl<'a> Decode<'a> for AnyRef<'a> {
+ fn decode<R: Reader<'a>>(reader: &mut R) -> Result<AnyRef<'a>> {
+ let header = Header::decode(reader)?;
+
+ Ok(Self {
+ tag: header.tag,
+ value: ByteSlice::decode_value(reader, header)?,
+ })
+ }
+}
+
+impl EncodeValue for AnyRef<'_> {
+ fn value_len(&self) -> Result<Length> {
+ Ok(self.value.len())
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ writer.write(self.value())
+ }
+}
+
+impl Tagged for AnyRef<'_> {
+ fn tag(&self) -> Tag {
+ self.tag
+ }
+}
+
+impl ValueOrd for AnyRef<'_> {
+ fn value_cmp(&self, other: &Self) -> Result<Ordering> {
+ self.value.der_cmp(&other.value)
+ }
+}
+
+impl<'a> From<AnyRef<'a>> for ByteSlice<'a> {
+ fn from(any: AnyRef<'a>) -> ByteSlice<'a> {
+ any.value
+ }
+}
+
+impl<'a> TryFrom<&'a [u8]> for AnyRef<'a> {
+ type Error = Error;
+
+ fn try_from(bytes: &'a [u8]) -> Result<AnyRef<'a>> {
+ AnyRef::from_der(bytes)
+ }
+}
+
+/// ASN.1 `ANY`: represents any explicitly tagged ASN.1 value.
+///
+/// This type provides the same functionality as [`AnyRef`] but owns the
+/// backing data.
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub struct Any {
+ /// Tag representing the type of the encoded value.
+ tag: Tag,
+
+ /// Inner value encoded as bytes.
+ value: Vec<u8>,
+}
+
+#[cfg(feature = "alloc")]
+impl Any {
+ /// Create a new [`Any`] from the provided [`Tag`] and DER bytes.
+ pub fn new(tag: Tag, bytes: impl Into<Vec<u8>>) -> Result<Self> {
+ let value = bytes.into();
+
+ // Ensure the tag and value are a valid `AnyRef`.
+ AnyRef::new(tag, &value)?;
+ Ok(Self { tag, value })
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl Choice<'_> for Any {
+ fn can_decode(_: Tag) -> bool {
+ true
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl<'a> Decode<'a> for Any {
+ fn decode<R: Reader<'a>>(reader: &mut R) -> Result<Self> {
+ let header = Header::decode(reader)?;
+ let value = reader.read_vec(header.length)?;
+ Self::new(header.tag, value)
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl EncodeValue for Any {
+ fn value_len(&self) -> Result<Length> {
+ self.value.len().try_into()
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ writer.write(&self.value)
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl<'a> From<&'a Any> for AnyRef<'a> {
+ fn from(any: &'a Any) -> AnyRef<'a> {
+ // Ensured to parse successfully in constructor
+ AnyRef::new(any.tag, &any.value).expect("invalid ANY")
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl Tagged for Any {
+ fn tag(&self) -> Tag {
+ self.tag
+ }
+}
diff --git a/src/asn1/bit_string.rs b/src/asn1/bit_string.rs
new file mode 100644
index 0000000..7eea5a5
--- /dev/null
+++ b/src/asn1/bit_string.rs
@@ -0,0 +1,493 @@
+//! ASN.1 `BIT STRING` support.
+
+use crate::{
+ asn1::AnyRef, ByteSlice, DecodeValue, DerOrd, EncodeValue, Error, ErrorKind, FixedTag, Header,
+ Length, Reader, Result, Tag, ValueOrd, Writer,
+};
+use core::{cmp::Ordering, iter::FusedIterator};
+
+#[cfg(feature = "alloc")]
+use alloc::vec::Vec;
+
+/// ASN.1 `BIT STRING` type.
+///
+/// This type contains a sequence of any number of bits, modeled internally as
+/// a sequence of bytes with a known number of "unused bits".
+///
+/// This is a zero-copy reference type which borrows from the input data.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub struct BitStringRef<'a> {
+ /// Number of unused bits in the final octet.
+ unused_bits: u8,
+
+ /// Length of this `BIT STRING` in bits.
+ bit_length: usize,
+
+ /// Bitstring represented as a slice of bytes.
+ inner: ByteSlice<'a>,
+}
+
+impl<'a> BitStringRef<'a> {
+ /// Maximum number of unused bits allowed.
+ pub const MAX_UNUSED_BITS: u8 = 7;
+
+ /// Create a new ASN.1 `BIT STRING` from a byte slice.
+ ///
+ /// Accepts an optional number of "unused bits" (0-7) which are omitted
+ /// from the final octet. This number is 0 if the value is octet-aligned.
+ pub fn new(unused_bits: u8, bytes: &'a [u8]) -> Result<Self> {
+ if (unused_bits > Self::MAX_UNUSED_BITS) || (unused_bits != 0 && bytes.is_empty()) {
+ return Err(Self::TAG.value_error());
+ }
+
+ let inner = ByteSlice::new(bytes).map_err(|_| Self::TAG.length_error())?;
+
+ let bit_length = usize::try_from(inner.len())?
+ .checked_mul(8)
+ .and_then(|n| n.checked_sub(usize::from(unused_bits)))
+ .ok_or(ErrorKind::Overflow)?;
+
+ Ok(Self {
+ unused_bits,
+ bit_length,
+ inner,
+ })
+ }
+
+ /// Create a new ASN.1 `BIT STRING` from the given bytes.
+ ///
+ /// The "unused bits" are set to 0.
+ pub fn from_bytes(bytes: &'a [u8]) -> Result<Self> {
+ Self::new(0, bytes)
+ }
+
+ /// Get the number of unused bits in this byte slice.
+ pub fn unused_bits(&self) -> u8 {
+ self.unused_bits
+ }
+
+ /// Is the number of unused bits a value other than 0?
+ pub fn has_unused_bits(&self) -> bool {
+ self.unused_bits != 0
+ }
+
+ /// Get the length of this `BIT STRING` in bits.
+ pub fn bit_len(&self) -> usize {
+ self.bit_length
+ }
+
+ /// Get the number of bytes/octets needed to represent this `BIT STRING`
+ /// when serialized in an octet-aligned manner.
+ pub fn byte_len(&self) -> Length {
+ self.inner.len()
+ }
+
+ /// Is the inner byte slice empty?
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+
+ /// Borrow the inner byte slice.
+ ///
+ /// Returns `None` if the number of unused bits is *not* equal to zero,
+ /// i.e. if the `BIT STRING` is not octet aligned.
+ ///
+ /// Use [`BitString::raw_bytes`] to obtain access to the raw value
+ /// regardless of the presence of unused bits.
+ pub fn as_bytes(&self) -> Option<&'a [u8]> {
+ if self.has_unused_bits() {
+ None
+ } else {
+ Some(self.raw_bytes())
+ }
+ }
+
+ /// Borrow the raw bytes of this `BIT STRING`.
+ ///
+ /// Note that the byte string may contain extra unused bits in the final
+ /// octet. If the number of unused bits is expected to be 0, the
+ /// [`BitStringRef::as_bytes`] function can be used instead.
+ pub fn raw_bytes(&self) -> &'a [u8] {
+ self.inner.as_slice()
+ }
+
+ /// Iterator over the bits of this `BIT STRING`.
+ pub fn bits(self) -> BitStringIter<'a> {
+ BitStringIter {
+ bit_string: self,
+ position: 0,
+ }
+ }
+}
+
+impl<'a> DecodeValue<'a> for BitStringRef<'a> {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ let header = Header {
+ tag: header.tag,
+ length: (header.length - Length::ONE)?,
+ };
+
+ let unused_bits = reader.read_byte()?;
+ let inner = ByteSlice::decode_value(reader, header)?;
+ Self::new(unused_bits, inner.as_slice())
+ }
+}
+
+impl EncodeValue for BitStringRef<'_> {
+ fn value_len(&self) -> Result<Length> {
+ self.byte_len() + Length::ONE
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ writer.write_byte(self.unused_bits)?;
+ writer.write(self.raw_bytes())
+ }
+}
+
+impl ValueOrd for BitStringRef<'_> {
+ fn value_cmp(&self, other: &Self) -> Result<Ordering> {
+ match self.unused_bits.cmp(&other.unused_bits) {
+ Ordering::Equal => self.inner.der_cmp(&other.inner),
+ ordering => Ok(ordering),
+ }
+ }
+}
+
+impl<'a> From<&BitStringRef<'a>> for BitStringRef<'a> {
+ fn from(value: &BitStringRef<'a>) -> BitStringRef<'a> {
+ *value
+ }
+}
+
+impl<'a> TryFrom<AnyRef<'a>> for BitStringRef<'a> {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'a>) -> Result<BitStringRef<'a>> {
+ any.decode_into()
+ }
+}
+
+impl<'a> TryFrom<&'a [u8]> for BitStringRef<'a> {
+ type Error = Error;
+
+ fn try_from(bytes: &'a [u8]) -> Result<BitStringRef<'a>> {
+ BitStringRef::from_bytes(bytes)
+ }
+}
+
+/// Hack for simplifying the custom derive use case.
+impl<'a> TryFrom<&&'a [u8]> for BitStringRef<'a> {
+ type Error = Error;
+
+ fn try_from(bytes: &&'a [u8]) -> Result<BitStringRef<'a>> {
+ BitStringRef::from_bytes(*bytes)
+ }
+}
+
+impl<'a> TryFrom<BitStringRef<'a>> for &'a [u8] {
+ type Error = Error;
+
+ fn try_from(bit_string: BitStringRef<'a>) -> Result<&'a [u8]> {
+ bit_string
+ .as_bytes()
+ .ok_or_else(|| Tag::BitString.value_error())
+ }
+}
+
+impl<'a> FixedTag for BitStringRef<'a> {
+ const TAG: Tag = Tag::BitString;
+}
+
+/// Owned form of ASN.1 `BIT STRING` type.
+///
+/// This type provides the same functionality as [`BitStringRef`] but owns the
+/// backing data.
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub struct BitString {
+ /// Number of unused bits in the final octet.
+ unused_bits: u8,
+
+ /// Length of this `BIT STRING` in bits.
+ bit_length: usize,
+
+ /// Bitstring represented as a slice of bytes.
+ inner: Vec<u8>,
+}
+
+#[cfg(feature = "alloc")]
+impl BitString {
+ /// Maximum number of unused bits allowed.
+ pub const MAX_UNUSED_BITS: u8 = 7;
+
+ /// Create a new ASN.1 `BIT STRING` from a byte slice.
+ ///
+ /// Accepts an optional number of "unused bits" (0-7) which are omitted
+ /// from the final octet. This number is 0 if the value is octet-aligned.
+ pub fn new(unused_bits: u8, bytes: impl Into<Vec<u8>>) -> Result<Self> {
+ let inner = bytes.into();
+
+ // Ensure parameters parse successfully as a `BitStringRef`.
+ let bit_length = BitStringRef::new(unused_bits, &inner)?.bit_length;
+
+ Ok(BitString {
+ unused_bits,
+ bit_length,
+ inner,
+ })
+ }
+
+ /// Create a new ASN.1 `BIT STRING` from the given bytes.
+ ///
+ /// The "unused bits" are set to 0.
+ pub fn from_bytes(bytes: &[u8]) -> Result<Self> {
+ Self::new(0, bytes)
+ }
+
+ /// Get the number of unused bits in the octet serialization of this
+ /// `BIT STRING`.
+ pub fn unused_bits(&self) -> u8 {
+ self.unused_bits
+ }
+
+ /// Is the number of unused bits a value other than 0?
+ pub fn has_unused_bits(&self) -> bool {
+ self.unused_bits != 0
+ }
+
+ /// Get the length of this `BIT STRING` in bits.
+ pub fn bit_len(&self) -> usize {
+ self.bit_length
+ }
+
+ /// Is the inner byte slice empty?
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+
+ /// Borrow the inner byte slice.
+ ///
+ /// Returns `None` if the number of unused bits is *not* equal to zero,
+ /// i.e. if the `BIT STRING` is not octet aligned.
+ ///
+ /// Use [`BitString::raw_bytes`] to obtain access to the raw value
+ /// regardless of the presence of unused bits.
+ pub fn as_bytes(&self) -> Option<&[u8]> {
+ if self.has_unused_bits() {
+ None
+ } else {
+ Some(self.raw_bytes())
+ }
+ }
+
+ /// Borrow the raw bytes of this `BIT STRING`.
+ pub fn raw_bytes(&self) -> &[u8] {
+ self.inner.as_slice()
+ }
+
+ /// Iterator over the bits of this `BIT STRING`.
+ pub fn bits(&self) -> BitStringIter<'_> {
+ BitStringRef::from(self).bits()
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl<'a> DecodeValue<'a> for BitString {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ let inner_len = (header.length - Length::ONE)?;
+ let unused_bits = reader.read_byte()?;
+ let inner = reader.read_vec(inner_len)?;
+ Self::new(unused_bits, inner)
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl EncodeValue for BitString {
+ fn value_len(&self) -> Result<Length> {
+ Length::ONE + Length::try_from(self.inner.len())?
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ writer.write_byte(self.unused_bits)?;
+ writer.write(&self.inner)
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl FixedTag for BitString {
+ const TAG: Tag = Tag::BitString;
+}
+
+#[cfg(feature = "alloc")]
+impl<'a> From<&'a BitString> for BitStringRef<'a> {
+ fn from(bit_string: &'a BitString) -> BitStringRef<'a> {
+ // Ensured to parse successfully in constructor
+ BitStringRef::new(bit_string.unused_bits, &bit_string.inner).expect("invalid BIT STRING")
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl ValueOrd for BitString {
+ fn value_cmp(&self, other: &Self) -> Result<Ordering> {
+ match self.unused_bits.cmp(&other.unused_bits) {
+ Ordering::Equal => self.inner.der_cmp(&other.inner),
+ ordering => Ok(ordering),
+ }
+ }
+}
+
+/// Iterator over the bits of a [`BitString`].
+pub struct BitStringIter<'a> {
+ /// [`BitString`] being iterated over.
+ bit_string: BitStringRef<'a>,
+
+ /// Current bit position within the iterator.
+ position: usize,
+}
+
+impl<'a> Iterator for BitStringIter<'a> {
+ type Item = bool;
+
+ #[allow(clippy::integer_arithmetic)]
+ fn next(&mut self) -> Option<bool> {
+ if self.position >= self.bit_string.bit_len() {
+ return None;
+ }
+
+ let byte = self.bit_string.raw_bytes().get(self.position / 8)?;
+ let bit = 1u8 << (7 - (self.position % 8));
+ self.position = self.position.checked_add(1)?;
+ Some(byte & bit != 0)
+ }
+}
+
+impl<'a> ExactSizeIterator for BitStringIter<'a> {
+ fn len(&self) -> usize {
+ self.bit_string.bit_len()
+ }
+}
+
+impl<'a> FusedIterator for BitStringIter<'a> {}
+
+#[cfg(feature = "flagset")]
+impl<T: flagset::Flags> FixedTag for flagset::FlagSet<T> {
+ const TAG: Tag = BitStringRef::TAG;
+}
+
+#[cfg(feature = "flagset")]
+#[allow(clippy::integer_arithmetic)]
+impl<'a, T> DecodeValue<'a> for flagset::FlagSet<T>
+where
+ T: flagset::Flags,
+ T::Type: From<bool>,
+ T::Type: core::ops::Shl<usize, Output = T::Type>,
+{
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ let position = reader.position();
+ let bits = BitStringRef::decode_value(reader, header)?;
+
+ let mut flags = T::none().bits();
+
+ if bits.bit_len() > core::mem::size_of_val(&flags) * 8 {
+ return Err(Error::new(ErrorKind::Overlength, position));
+ }
+
+ for (i, bit) in bits.bits().enumerate() {
+ flags |= T::Type::from(bit) << i;
+ }
+
+ Ok(Self::new_truncated(flags))
+ }
+}
+
+#[cfg(feature = "flagset")]
+#[allow(clippy::integer_arithmetic)]
+#[inline(always)]
+fn encode_flagset<T>(set: &flagset::FlagSet<T>) -> (usize, [u8; 16])
+where
+ T: flagset::Flags,
+ u128: From<T::Type>,
+{
+ let bits: u128 = set.bits().into();
+ let mut swap = 0u128;
+
+ for i in 0..128 {
+ let on = bits & (1 << i);
+ swap |= on >> i << (128 - i - 1);
+ }
+
+ (bits.leading_zeros() as usize, swap.to_be_bytes())
+}
+
+#[cfg(feature = "flagset")]
+#[allow(clippy::cast_possible_truncation, clippy::integer_arithmetic)]
+impl<T: flagset::Flags> EncodeValue for flagset::FlagSet<T>
+where
+ T::Type: From<bool>,
+ T::Type: core::ops::Shl<usize, Output = T::Type>,
+ u128: From<T::Type>,
+{
+ fn value_len(&self) -> Result<Length> {
+ let (lead, buff) = encode_flagset(self);
+ let buff = &buff[..buff.len() - lead / 8];
+ BitStringRef::new((lead % 8) as u8, buff)?.value_len()
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ let (lead, buff) = encode_flagset(self);
+ let buff = &buff[..buff.len() - lead / 8];
+ BitStringRef::new((lead % 8) as u8, buff)?.encode_value(writer)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::{BitStringRef, Result, Tag};
+ use crate::asn1::AnyRef;
+ use hex_literal::hex;
+
+ /// Parse a `BitString` from an ASN.1 `Any` value to test decoding behaviors.
+ fn parse_bitstring(bytes: &[u8]) -> Result<BitStringRef<'_>> {
+ AnyRef::new(Tag::BitString, bytes)?.try_into()
+ }
+
+ #[test]
+ fn decode_empty_bitstring() {
+ let bs = parse_bitstring(&hex!("00")).unwrap();
+ assert_eq!(bs.as_bytes().unwrap(), &[]);
+ }
+
+ #[test]
+ fn decode_non_empty_bitstring() {
+ let bs = parse_bitstring(&hex!("00010203")).unwrap();
+ assert_eq!(bs.as_bytes().unwrap(), &[0x01, 0x02, 0x03]);
+ }
+
+ #[test]
+ fn decode_bitstring_with_unused_bits() {
+ let bs = parse_bitstring(&hex!("066e5dc0")).unwrap();
+ assert_eq!(bs.unused_bits(), 6);
+ assert_eq!(bs.raw_bytes(), &hex!("6e5dc0"));
+
+ // Expected: 011011100101110111
+ let mut bits = bs.bits();
+ assert_eq!(bits.len(), 18);
+
+ for bit in [0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1] {
+ assert_eq!(bits.next().unwrap() as u8, bit)
+ }
+
+ // Ensure `None` is returned on successive calls
+ assert_eq!(bits.next(), None);
+ assert_eq!(bits.next(), None);
+ }
+
+ #[test]
+ fn reject_unused_bits_in_empty_string() {
+ assert_eq!(
+ parse_bitstring(&[0x03]).err().unwrap().kind(),
+ Tag::BitString.value_error().kind()
+ )
+ }
+}
diff --git a/src/asn1/boolean.rs b/src/asn1/boolean.rs
new file mode 100644
index 0000000..e032181
--- /dev/null
+++ b/src/asn1/boolean.rs
@@ -0,0 +1,93 @@
+//! ASN.1 `BOOLEAN` support.
+
+use crate::{
+ asn1::AnyRef, ord::OrdIsValueOrd, ByteSlice, DecodeValue, EncodeValue, Error, ErrorKind,
+ FixedTag, Header, Length, Reader, Result, Tag, Writer,
+};
+
+/// Byte used to encode `true` in ASN.1 DER. From X.690 Section 11.1:
+///
+/// > If the encoding represents the boolean value TRUE, its single contents
+/// > octet shall have all eight bits set to one.
+const TRUE_OCTET: u8 = 0b11111111;
+
+/// Byte used to encode `false` in ASN.1 DER.
+const FALSE_OCTET: u8 = 0b00000000;
+
+impl<'a> DecodeValue<'a> for bool {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ if header.length != Length::ONE {
+ return Err(reader.error(ErrorKind::Length { tag: Self::TAG }));
+ }
+
+ match reader.read_byte()? {
+ FALSE_OCTET => Ok(false),
+ TRUE_OCTET => Ok(true),
+ _ => Err(Self::TAG.non_canonical_error()),
+ }
+ }
+}
+
+impl EncodeValue for bool {
+ fn value_len(&self) -> Result<Length> {
+ Ok(Length::ONE)
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ writer.write_byte(if *self { TRUE_OCTET } else { FALSE_OCTET })
+ }
+}
+
+impl FixedTag for bool {
+ const TAG: Tag = Tag::Boolean;
+}
+
+impl OrdIsValueOrd for bool {}
+
+impl From<bool> for AnyRef<'static> {
+ fn from(value: bool) -> AnyRef<'static> {
+ let value = ByteSlice::from(match value {
+ false => &[FALSE_OCTET],
+ true => &[TRUE_OCTET],
+ });
+
+ AnyRef::from_tag_and_value(Tag::Boolean, value)
+ }
+}
+
+impl TryFrom<AnyRef<'_>> for bool {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'_>) -> Result<bool> {
+ any.try_into()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{Decode, Encode};
+
+ #[test]
+ fn decode() {
+ assert_eq!(true, bool::from_der(&[0x01, 0x01, 0xFF]).unwrap());
+ assert_eq!(false, bool::from_der(&[0x01, 0x01, 0x00]).unwrap());
+ }
+
+ #[test]
+ fn encode() {
+ let mut buffer = [0u8; 3];
+ assert_eq!(
+ &[0x01, 0x01, 0xFF],
+ true.encode_to_slice(&mut buffer).unwrap()
+ );
+ assert_eq!(
+ &[0x01, 0x01, 0x00],
+ false.encode_to_slice(&mut buffer).unwrap()
+ );
+ }
+
+ #[test]
+ fn reject_non_canonical() {
+ assert!(bool::from_der(&[0x01, 0x01, 0x01]).is_err());
+ }
+}
diff --git a/src/asn1/choice.rs b/src/asn1/choice.rs
new file mode 100644
index 0000000..40c7720
--- /dev/null
+++ b/src/asn1/choice.rs
@@ -0,0 +1,26 @@
+//! ASN.1 `CHOICE` support.
+
+use crate::{Decode, FixedTag, Tag, Tagged};
+
+/// ASN.1 `CHOICE` denotes a union of one or more possible alternatives.
+///
+/// The types MUST have distinct tags.
+///
+/// This crate models choice as a trait, with a blanket impl for all types
+/// which impl `Decode + FixedTag` (i.e. they are modeled as a `CHOICE`
+/// with only one possible variant)
+pub trait Choice<'a>: Decode<'a> + Tagged {
+ /// Is the provided [`Tag`] decodable as a variant of this `CHOICE`?
+ fn can_decode(tag: Tag) -> bool;
+}
+
+/// This blanket impl allows any [`Tagged`] type to function as a [`Choice`]
+/// with a single alternative.
+impl<'a, T> Choice<'a> for T
+where
+ T: Decode<'a> + FixedTag,
+{
+ fn can_decode(tag: Tag) -> bool {
+ T::TAG == tag
+ }
+}
diff --git a/src/asn1/context_specific.rs b/src/asn1/context_specific.rs
new file mode 100644
index 0000000..311b5fe
--- /dev/null
+++ b/src/asn1/context_specific.rs
@@ -0,0 +1,354 @@
+//! Context-specific field.
+
+use crate::{
+ asn1::AnyRef, Choice, Decode, DecodeValue, DerOrd, Encode, EncodeValue, EncodeValueRef, Error,
+ Header, Length, Reader, Result, Tag, TagMode, TagNumber, Tagged, ValueOrd, Writer,
+};
+use core::cmp::Ordering;
+
+/// Context-specific field which wraps an owned inner value.
+///
+/// This type decodes/encodes a field which is specific to a particular context
+/// and is identified by a [`TagNumber`].
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub struct ContextSpecific<T> {
+ /// Context-specific tag number sans the leading `0b10000000` class
+ /// identifier bit and `0b100000` constructed flag.
+ pub tag_number: TagNumber,
+
+ /// Tag mode: `EXPLICIT` VS `IMPLICIT`.
+ pub tag_mode: TagMode,
+
+ /// Value of the field.
+ pub value: T,
+}
+
+impl<T> ContextSpecific<T> {
+ /// Attempt to decode an `EXPLICIT` ASN.1 `CONTEXT-SPECIFIC` field with the
+ /// provided [`TagNumber`].
+ ///
+ /// This method has the following behavior which is designed to simplify
+ /// handling of extension fields, which are denoted in an ASN.1 schema
+ /// using the `...` ellipsis extension marker:
+ ///
+ /// - Skips over [`ContextSpecific`] fields with a tag number lower than
+ /// the current one, consuming and ignoring them.
+ /// - Returns `Ok(None)` if a [`ContextSpecific`] field with a higher tag
+ /// number is encountered. These fields are not consumed in this case,
+ /// allowing a field with a lower tag number to be omitted, then the
+ /// higher numbered field consumed as a follow-up.
+ /// - Returns `Ok(None)` if anything other than a [`ContextSpecific`] field
+ /// is encountered.
+ pub fn decode_explicit<'a, R: Reader<'a>>(
+ reader: &mut R,
+ tag_number: TagNumber,
+ ) -> Result<Option<Self>>
+ where
+ T: Decode<'a>,
+ {
+ Self::decode_with(reader, tag_number, |reader| Self::decode(reader))
+ }
+
+ /// Attempt to decode an `IMPLICIT` ASN.1 `CONTEXT-SPECIFIC` field with the
+ /// provided [`TagNumber`].
+ ///
+ /// This method otherwise behaves the same as `decode_explicit`,
+ /// but should be used in cases where the particular fields are `IMPLICIT`
+ /// as opposed to `EXPLICIT`.
+ pub fn decode_implicit<'a, R: Reader<'a>>(
+ reader: &mut R,
+ tag_number: TagNumber,
+ ) -> Result<Option<Self>>
+ where
+ T: DecodeValue<'a> + Tagged,
+ {
+ Self::decode_with(reader, tag_number, |reader| {
+ let header = Header::decode(reader)?;
+ let value = T::decode_value(reader, header)?;
+
+ if header.tag.is_constructed() != value.tag().is_constructed() {
+ return Err(header.tag.non_canonical_error());
+ }
+
+ Ok(Self {
+ tag_number,
+ tag_mode: TagMode::Implicit,
+ value,
+ })
+ })
+ }
+
+ /// Attempt to decode a context-specific field with the given
+ /// helper callback.
+ fn decode_with<'a, F, R: Reader<'a>>(
+ reader: &mut R,
+ tag_number: TagNumber,
+ f: F,
+ ) -> Result<Option<Self>>
+ where
+ F: FnOnce(&mut R) -> Result<Self>,
+ {
+ while let Some(octet) = reader.peek_byte() {
+ let tag = Tag::try_from(octet)?;
+
+ if !tag.is_context_specific() || (tag.number() > tag_number) {
+ break;
+ } else if tag.number() == tag_number {
+ return Some(f(reader)).transpose();
+ } else {
+ AnyRef::decode(reader)?;
+ }
+ }
+
+ Ok(None)
+ }
+}
+
+impl<'a, T> Choice<'a> for ContextSpecific<T>
+where
+ T: Decode<'a> + Tagged,
+{
+ fn can_decode(tag: Tag) -> bool {
+ tag.is_context_specific()
+ }
+}
+
+impl<'a, T> Decode<'a> for ContextSpecific<T>
+where
+ T: Decode<'a>,
+{
+ fn decode<R: Reader<'a>>(reader: &mut R) -> Result<Self> {
+ let header = Header::decode(reader)?;
+
+ match header.tag {
+ Tag::ContextSpecific {
+ number,
+ constructed: true,
+ } => Ok(Self {
+ tag_number: number,
+ tag_mode: TagMode::default(),
+ value: reader.read_nested(header.length, |reader| T::decode(reader))?,
+ }),
+ tag => Err(tag.unexpected_error(None)),
+ }
+ }
+}
+
+impl<T> EncodeValue for ContextSpecific<T>
+where
+ T: EncodeValue + Tagged,
+{
+ fn value_len(&self) -> Result<Length> {
+ match self.tag_mode {
+ TagMode::Explicit => self.value.encoded_len(),
+ TagMode::Implicit => self.value.value_len(),
+ }
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ match self.tag_mode {
+ TagMode::Explicit => self.value.encode(writer),
+ TagMode::Implicit => self.value.encode_value(writer),
+ }
+ }
+}
+
+impl<T> Tagged for ContextSpecific<T>
+where
+ T: Tagged,
+{
+ fn tag(&self) -> Tag {
+ let constructed = match self.tag_mode {
+ TagMode::Explicit => true,
+ TagMode::Implicit => self.value.tag().is_constructed(),
+ };
+
+ Tag::ContextSpecific {
+ number: self.tag_number,
+ constructed,
+ }
+ }
+}
+
+impl<'a, T> TryFrom<AnyRef<'a>> for ContextSpecific<T>
+where
+ T: Decode<'a>,
+{
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'a>) -> Result<ContextSpecific<T>> {
+ match any.tag() {
+ Tag::ContextSpecific {
+ number,
+ constructed: true,
+ } => Ok(Self {
+ tag_number: number,
+ tag_mode: TagMode::default(),
+ value: T::from_der(any.value())?,
+ }),
+ tag => Err(tag.unexpected_error(None)),
+ }
+ }
+}
+
+impl<T> ValueOrd for ContextSpecific<T>
+where
+ T: EncodeValue + ValueOrd + Tagged,
+{
+ fn value_cmp(&self, other: &Self) -> Result<Ordering> {
+ match self.tag_mode {
+ TagMode::Explicit => self.der_cmp(other),
+ TagMode::Implicit => self.value_cmp(other),
+ }
+ }
+}
+
+/// Context-specific field reference.
+///
+/// This type encodes a field which is specific to a particular context
+/// and is identified by a [`TagNumber`].
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub struct ContextSpecificRef<'a, T> {
+ /// Context-specific tag number sans the leading `0b10000000` class
+ /// identifier bit and `0b100000` constructed flag.
+ pub tag_number: TagNumber,
+
+ /// Tag mode: `EXPLICIT` VS `IMPLICIT`.
+ pub tag_mode: TagMode,
+
+ /// Value of the field.
+ pub value: &'a T,
+}
+
+impl<'a, T> ContextSpecificRef<'a, T> {
+ /// Convert to a [`ContextSpecific`].
+ fn encoder(&self) -> ContextSpecific<EncodeValueRef<'a, T>> {
+ ContextSpecific {
+ tag_number: self.tag_number,
+ tag_mode: self.tag_mode,
+ value: EncodeValueRef(self.value),
+ }
+ }
+}
+
+impl<'a, T> EncodeValue for ContextSpecificRef<'a, T>
+where
+ T: EncodeValue + Tagged,
+{
+ fn value_len(&self) -> Result<Length> {
+ self.encoder().value_len()
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ self.encoder().encode_value(writer)
+ }
+}
+
+impl<'a, T> Tagged for ContextSpecificRef<'a, T>
+where
+ T: Tagged,
+{
+ fn tag(&self) -> Tag {
+ self.encoder().tag()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::ContextSpecific;
+ use crate::{asn1::BitStringRef, Decode, Encode, SliceReader, TagMode, TagNumber};
+ use hex_literal::hex;
+
+ // Public key data from `pkcs8` crate's `ed25519-pkcs8-v2.der`
+ const EXAMPLE_BYTES: &[u8] =
+ &hex!("A123032100A3A7EAE3A8373830BC47E1167BC50E1DB551999651E0E2DC587623438EAC3F31");
+
+ #[test]
+ fn round_trip() {
+ let field = ContextSpecific::<BitStringRef<'_>>::from_der(EXAMPLE_BYTES).unwrap();
+ assert_eq!(field.tag_number.value(), 1);
+ assert_eq!(
+ field.value,
+ BitStringRef::from_bytes(&EXAMPLE_BYTES[5..]).unwrap()
+ );
+
+ let mut buf = [0u8; 128];
+ let encoded = field.encode_to_slice(&mut buf).unwrap();
+ assert_eq!(encoded, EXAMPLE_BYTES);
+ }
+
+ #[test]
+ fn context_specific_with_explicit_field() {
+ let tag_number = TagNumber::new(0);
+
+ // Empty message
+ let mut reader = SliceReader::new(&[]).unwrap();
+ assert_eq!(
+ ContextSpecific::<u8>::decode_explicit(&mut reader, tag_number).unwrap(),
+ None
+ );
+
+ // Message containing a non-context-specific type
+ let mut reader = SliceReader::new(&hex!("020100")).unwrap();
+ assert_eq!(
+ ContextSpecific::<u8>::decode_explicit(&mut reader, tag_number).unwrap(),
+ None
+ );
+
+ // Message containing an EXPLICIT context-specific field
+ let mut reader = SliceReader::new(&hex!("A003020100")).unwrap();
+ let field = ContextSpecific::<u8>::decode_explicit(&mut reader, tag_number)
+ .unwrap()
+ .unwrap();
+
+ assert_eq!(field.tag_number, tag_number);
+ assert_eq!(field.tag_mode, TagMode::Explicit);
+ assert_eq!(field.value, 0);
+ }
+
+ #[test]
+ fn context_specific_with_implicit_field() {
+ // From RFC8410 Section 10.3:
+ // <https://datatracker.ietf.org/doc/html/rfc8410#section-10.3>
+ //
+ // 81 33: [1] 00 19 BF 44 09 69 84 CD FE 85 41 BA C1 67 DC 3B
+ // 96 C8 50 86 AA 30 B6 B6 CB 0C 5C 38 AD 70 31 66
+ // E1
+ let context_specific_implicit_bytes =
+ hex!("81210019BF44096984CDFE8541BAC167DC3B96C85086AA30B6B6CB0C5C38AD703166E1");
+
+ let tag_number = TagNumber::new(1);
+
+ let mut reader = SliceReader::new(&context_specific_implicit_bytes).unwrap();
+ let field = ContextSpecific::<BitStringRef<'_>>::decode_implicit(&mut reader, tag_number)
+ .unwrap()
+ .unwrap();
+
+ assert_eq!(field.tag_number, tag_number);
+ assert_eq!(field.tag_mode, TagMode::Implicit);
+ assert_eq!(
+ field.value.as_bytes().unwrap(),
+ &context_specific_implicit_bytes[3..]
+ );
+ }
+
+ #[test]
+ fn context_specific_skipping_unknown_field() {
+ let tag = TagNumber::new(1);
+ let mut reader = SliceReader::new(&hex!("A003020100A103020101")).unwrap();
+ let field = ContextSpecific::<u8>::decode_explicit(&mut reader, tag)
+ .unwrap()
+ .unwrap();
+ assert_eq!(field.value, 1);
+ }
+
+ #[test]
+ fn context_specific_returns_none_on_greater_tag_number() {
+ let tag = TagNumber::new(0);
+ let mut reader = SliceReader::new(&hex!("A103020101")).unwrap();
+ assert_eq!(
+ ContextSpecific::<u8>::decode_explicit(&mut reader, tag).unwrap(),
+ None
+ );
+ }
+}
diff --git a/src/asn1/generalized_time.rs b/src/asn1/generalized_time.rs
new file mode 100644
index 0000000..9950e36
--- /dev/null
+++ b/src/asn1/generalized_time.rs
@@ -0,0 +1,348 @@
+//! ASN.1 `GeneralizedTime` support.
+
+use crate::{
+ asn1::AnyRef,
+ datetime::{self, DateTime},
+ ord::OrdIsValueOrd,
+ DecodeValue, EncodeValue, Error, ErrorKind, FixedTag, Header, Length, Reader, Result, Tag,
+ Writer,
+};
+use core::time::Duration;
+
+#[cfg(feature = "std")]
+use std::time::SystemTime;
+
+#[cfg(feature = "time")]
+use time::PrimitiveDateTime;
+
+/// ASN.1 `GeneralizedTime` type.
+///
+/// This type implements the validity requirements specified in
+/// [RFC 5280 Section 4.1.2.5.2][1], namely:
+///
+/// > For the purposes of this profile, GeneralizedTime values MUST be
+/// > expressed in Greenwich Mean Time (Zulu) and MUST include seconds
+/// > (i.e., times are `YYYYMMDDHHMMSSZ`), even where the number of seconds
+/// > is zero. GeneralizedTime values MUST NOT include fractional seconds.
+///
+/// [1]: https://tools.ietf.org/html/rfc5280#section-4.1.2.5.2
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub struct GeneralizedTime(DateTime);
+
+impl GeneralizedTime {
+ /// Length of an RFC 5280-flavored ASN.1 DER-encoded [`GeneralizedTime`].
+ const LENGTH: usize = 15;
+
+ /// Create a [`GeneralizedTime`] from a [`DateTime`].
+ pub fn from_date_time(datetime: DateTime) -> Self {
+ Self(datetime)
+ }
+
+ /// Convert this [`GeneralizedTime`] into a [`DateTime`].
+ pub fn to_date_time(&self) -> DateTime {
+ self.0
+ }
+
+ /// Create a new [`GeneralizedTime`] given a [`Duration`] since `UNIX_EPOCH`
+ /// (a.k.a. "Unix time")
+ pub fn from_unix_duration(unix_duration: Duration) -> Result<Self> {
+ DateTime::from_unix_duration(unix_duration)
+ .map(Into::into)
+ .map_err(|_| Self::TAG.value_error())
+ }
+
+ /// Get the duration of this timestamp since `UNIX_EPOCH`.
+ pub fn to_unix_duration(&self) -> Duration {
+ self.0.unix_duration()
+ }
+
+ /// Instantiate from [`SystemTime`].
+ #[cfg(feature = "std")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+ pub fn from_system_time(time: SystemTime) -> Result<Self> {
+ DateTime::try_from(time)
+ .map(Into::into)
+ .map_err(|_| Self::TAG.value_error())
+ }
+
+ /// Convert to [`SystemTime`].
+ #[cfg(feature = "std")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+ pub fn to_system_time(&self) -> SystemTime {
+ self.0.to_system_time()
+ }
+}
+
+impl<'a> DecodeValue<'a> for GeneralizedTime {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ if Self::LENGTH != usize::try_from(header.length)? {
+ return Err(Self::TAG.value_error());
+ }
+
+ let mut bytes = [0u8; Self::LENGTH];
+ reader.read_into(&mut bytes)?;
+
+ match bytes {
+ // RFC 5280 requires mandatory seconds and Z-normalized time zone
+ [y1, y2, y3, y4, mon1, mon2, day1, day2, hour1, hour2, min1, min2, sec1, sec2, b'Z'] => {
+ let year = u16::from(datetime::decode_decimal(Self::TAG, y1, y2)?)
+ .checked_mul(100)
+ .and_then(|y| {
+ y.checked_add(datetime::decode_decimal(Self::TAG, y3, y4).ok()?.into())
+ })
+ .ok_or(ErrorKind::DateTime)?;
+ let month = datetime::decode_decimal(Self::TAG, mon1, mon2)?;
+ let day = datetime::decode_decimal(Self::TAG, day1, day2)?;
+ let hour = datetime::decode_decimal(Self::TAG, hour1, hour2)?;
+ let minute = datetime::decode_decimal(Self::TAG, min1, min2)?;
+ let second = datetime::decode_decimal(Self::TAG, sec1, sec2)?;
+
+ DateTime::new(year, month, day, hour, minute, second)
+ .map_err(|_| Self::TAG.value_error())
+ .and_then(|dt| Self::from_unix_duration(dt.unix_duration()))
+ }
+ _ => Err(Self::TAG.value_error()),
+ }
+ }
+}
+
+impl EncodeValue for GeneralizedTime {
+ fn value_len(&self) -> Result<Length> {
+ Self::LENGTH.try_into()
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ let year_hi = u8::try_from(self.0.year() / 100)?;
+ let year_lo = u8::try_from(self.0.year() % 100)?;
+
+ datetime::encode_decimal(writer, Self::TAG, year_hi)?;
+ datetime::encode_decimal(writer, Self::TAG, year_lo)?;
+ datetime::encode_decimal(writer, Self::TAG, self.0.month())?;
+ datetime::encode_decimal(writer, Self::TAG, self.0.day())?;
+ datetime::encode_decimal(writer, Self::TAG, self.0.hour())?;
+ datetime::encode_decimal(writer, Self::TAG, self.0.minutes())?;
+ datetime::encode_decimal(writer, Self::TAG, self.0.seconds())?;
+ writer.write_byte(b'Z')
+ }
+}
+
+impl FixedTag for GeneralizedTime {
+ const TAG: Tag = Tag::GeneralizedTime;
+}
+
+impl OrdIsValueOrd for GeneralizedTime {}
+
+impl From<&GeneralizedTime> for GeneralizedTime {
+ fn from(value: &GeneralizedTime) -> GeneralizedTime {
+ *value
+ }
+}
+
+impl From<GeneralizedTime> for DateTime {
+ fn from(utc_time: GeneralizedTime) -> DateTime {
+ utc_time.0
+ }
+}
+
+impl From<&GeneralizedTime> for DateTime {
+ fn from(utc_time: &GeneralizedTime) -> DateTime {
+ utc_time.0
+ }
+}
+
+impl From<DateTime> for GeneralizedTime {
+ fn from(datetime: DateTime) -> Self {
+ Self::from_date_time(datetime)
+ }
+}
+
+impl From<&DateTime> for GeneralizedTime {
+ fn from(datetime: &DateTime) -> Self {
+ Self::from_date_time(*datetime)
+ }
+}
+
+impl TryFrom<AnyRef<'_>> for GeneralizedTime {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'_>) -> Result<GeneralizedTime> {
+ any.decode_into()
+ }
+}
+
+impl<'a> DecodeValue<'a> for DateTime {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ Ok(GeneralizedTime::decode_value(reader, header)?.into())
+ }
+}
+
+impl EncodeValue for DateTime {
+ fn value_len(&self) -> Result<Length> {
+ GeneralizedTime::from(self).value_len()
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ GeneralizedTime::from(self).encode_value(writer)
+ }
+}
+
+impl FixedTag for DateTime {
+ const TAG: Tag = Tag::GeneralizedTime;
+}
+
+impl OrdIsValueOrd for DateTime {}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+impl<'a> DecodeValue<'a> for SystemTime {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ Ok(GeneralizedTime::decode_value(reader, header)?.into())
+ }
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+impl EncodeValue for SystemTime {
+ fn value_len(&self) -> Result<Length> {
+ GeneralizedTime::try_from(self)?.value_len()
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ GeneralizedTime::try_from(self)?.encode_value(writer)
+ }
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+impl From<GeneralizedTime> for SystemTime {
+ fn from(time: GeneralizedTime) -> SystemTime {
+ time.to_system_time()
+ }
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+impl From<&GeneralizedTime> for SystemTime {
+ fn from(time: &GeneralizedTime) -> SystemTime {
+ time.to_system_time()
+ }
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+impl TryFrom<SystemTime> for GeneralizedTime {
+ type Error = Error;
+
+ fn try_from(time: SystemTime) -> Result<GeneralizedTime> {
+ GeneralizedTime::from_system_time(time)
+ }
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+impl TryFrom<&SystemTime> for GeneralizedTime {
+ type Error = Error;
+
+ fn try_from(time: &SystemTime) -> Result<GeneralizedTime> {
+ GeneralizedTime::from_system_time(*time)
+ }
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+impl<'a> TryFrom<AnyRef<'a>> for SystemTime {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'a>) -> Result<SystemTime> {
+ GeneralizedTime::try_from(any).map(|s| s.to_system_time())
+ }
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+impl FixedTag for SystemTime {
+ const TAG: Tag = Tag::GeneralizedTime;
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+impl OrdIsValueOrd for SystemTime {}
+
+#[cfg(feature = "time")]
+#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
+impl<'a> DecodeValue<'a> for PrimitiveDateTime {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ GeneralizedTime::decode_value(reader, header)?.try_into()
+ }
+}
+
+#[cfg(feature = "time")]
+#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
+impl EncodeValue for PrimitiveDateTime {
+ fn value_len(&self) -> Result<Length> {
+ GeneralizedTime::try_from(self)?.value_len()
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ GeneralizedTime::try_from(self)?.encode_value(writer)
+ }
+}
+
+#[cfg(feature = "time")]
+#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
+impl FixedTag for PrimitiveDateTime {
+ const TAG: Tag = Tag::GeneralizedTime;
+}
+
+#[cfg(feature = "time")]
+#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
+impl OrdIsValueOrd for PrimitiveDateTime {}
+
+#[cfg(feature = "time")]
+#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
+impl TryFrom<PrimitiveDateTime> for GeneralizedTime {
+ type Error = Error;
+
+ fn try_from(time: PrimitiveDateTime) -> Result<GeneralizedTime> {
+ Ok(GeneralizedTime::from_date_time(DateTime::try_from(time)?))
+ }
+}
+
+#[cfg(feature = "time")]
+#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
+impl TryFrom<&PrimitiveDateTime> for GeneralizedTime {
+ type Error = Error;
+
+ fn try_from(time: &PrimitiveDateTime) -> Result<GeneralizedTime> {
+ Self::try_from(*time)
+ }
+}
+
+#[cfg(feature = "time")]
+#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
+impl TryFrom<GeneralizedTime> for PrimitiveDateTime {
+ type Error = Error;
+
+ fn try_from(time: GeneralizedTime) -> Result<PrimitiveDateTime> {
+ time.to_date_time().try_into()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::GeneralizedTime;
+ use crate::{Decode, Encode, SliceWriter};
+ use hex_literal::hex;
+
+ #[test]
+ fn round_trip() {
+ let example_bytes = hex!("18 0f 31 39 39 31 30 35 30 36 32 33 34 35 34 30 5a");
+ let utc_time = GeneralizedTime::from_der(&example_bytes).unwrap();
+ assert_eq!(utc_time.to_unix_duration().as_secs(), 673573540);
+
+ let mut buf = [0u8; 128];
+ let mut encoder = SliceWriter::new(&mut buf);
+ utc_time.encode(&mut encoder).unwrap();
+ assert_eq!(example_bytes, encoder.finish().unwrap());
+ }
+}
diff --git a/src/asn1/ia5_string.rs b/src/asn1/ia5_string.rs
new file mode 100644
index 0000000..c1dbfaa
--- /dev/null
+++ b/src/asn1/ia5_string.rs
@@ -0,0 +1,150 @@
+//! ASN.1 `IA5String` support.
+
+use crate::{
+ asn1::AnyRef, ord::OrdIsValueOrd, ByteSlice, DecodeValue, EncodeValue, Error, FixedTag, Header,
+ Length, Reader, Result, StrSlice, Tag, Writer,
+};
+use core::{fmt, str};
+
+/// ASN.1 `IA5String` type.
+///
+/// Supports the [International Alphabet No. 5 (IA5)] character encoding, i.e.
+/// the lower 128 characters of the ASCII alphabet. (Note: IA5 is now
+/// technically known as the International Reference Alphabet or IRA as
+/// specified in the ITU-T's T.50 recommendation).
+///
+/// For UTF-8, use [`Utf8StringRef`][`crate::asn1::Utf8StringRef`].
+///
+/// This is a zero-copy reference type which borrows from the input data.
+///
+/// [International Alphabet No. 5 (IA5)]: https://en.wikipedia.org/wiki/T.50_%28standard%29
+#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord)]
+pub struct Ia5StringRef<'a> {
+ /// Inner value
+ inner: StrSlice<'a>,
+}
+
+impl<'a> Ia5StringRef<'a> {
+ /// Create a new `IA5String`.
+ pub fn new<T>(input: &'a T) -> Result<Self>
+ where
+ T: AsRef<[u8]> + ?Sized,
+ {
+ let input = input.as_ref();
+
+ // Validate all characters are within IA5String's allowed set
+ if input.iter().any(|&c| c > 0x7F) {
+ return Err(Self::TAG.value_error());
+ }
+
+ StrSlice::from_bytes(input)
+ .map(|inner| Self { inner })
+ .map_err(|_| Self::TAG.value_error())
+ }
+
+ /// Borrow the string as a `str`.
+ pub fn as_str(&self) -> &'a str {
+ self.inner.as_str()
+ }
+
+ /// Borrow the string as bytes.
+ pub fn as_bytes(&self) -> &'a [u8] {
+ self.inner.as_bytes()
+ }
+
+ /// Get the length of the inner byte slice.
+ pub fn len(&self) -> Length {
+ self.inner.len()
+ }
+
+ /// Is the inner string empty?
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+}
+
+impl AsRef<str> for Ia5StringRef<'_> {
+ fn as_ref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl AsRef<[u8]> for Ia5StringRef<'_> {
+ fn as_ref(&self) -> &[u8] {
+ self.as_bytes()
+ }
+}
+
+impl<'a> DecodeValue<'a> for Ia5StringRef<'a> {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ Self::new(ByteSlice::decode_value(reader, header)?.as_slice())
+ }
+}
+
+impl EncodeValue for Ia5StringRef<'_> {
+ fn value_len(&self) -> Result<Length> {
+ self.inner.value_len()
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ self.inner.encode_value(writer)
+ }
+}
+
+impl<'a> FixedTag for Ia5StringRef<'a> {
+ const TAG: Tag = Tag::Ia5String;
+}
+
+impl OrdIsValueOrd for Ia5StringRef<'_> {}
+
+impl<'a> From<&Ia5StringRef<'a>> for Ia5StringRef<'a> {
+ fn from(value: &Ia5StringRef<'a>) -> Ia5StringRef<'a> {
+ *value
+ }
+}
+
+impl<'a> TryFrom<AnyRef<'a>> for Ia5StringRef<'a> {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'a>) -> Result<Ia5StringRef<'a>> {
+ any.decode_into()
+ }
+}
+
+impl<'a> From<Ia5StringRef<'a>> for AnyRef<'a> {
+ fn from(printable_string: Ia5StringRef<'a>) -> AnyRef<'a> {
+ AnyRef::from_tag_and_value(Tag::Ia5String, printable_string.inner.into())
+ }
+}
+
+impl<'a> From<Ia5StringRef<'a>> for &'a [u8] {
+ fn from(printable_string: Ia5StringRef<'a>) -> &'a [u8] {
+ printable_string.as_bytes()
+ }
+}
+
+impl<'a> fmt::Display for Ia5StringRef<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(self.as_str())
+ }
+}
+
+impl<'a> fmt::Debug for Ia5StringRef<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "Ia5String({:?})", self.as_str())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::Ia5StringRef;
+ use crate::Decode;
+ use hex_literal::hex;
+
+ #[test]
+ fn parse_bytes() {
+ let example_bytes = hex!("16 0d 74 65 73 74 31 40 72 73 61 2e 63 6f 6d");
+ let printable_string = Ia5StringRef::from_der(&example_bytes).unwrap();
+ assert_eq!(printable_string.as_str(), "test1@rsa.com");
+ }
+}
diff --git a/src/asn1/integer.rs b/src/asn1/integer.rs
new file mode 100644
index 0000000..20e2f01
--- /dev/null
+++ b/src/asn1/integer.rs
@@ -0,0 +1,276 @@
+//! ASN.1 `INTEGER` support.
+
+pub(super) mod bigint;
+pub(super) mod int;
+pub(super) mod uint;
+
+use crate::{
+ asn1::AnyRef, ByteSlice, DecodeValue, EncodeValue, Error, FixedTag, Header, Length, Reader,
+ Result, SliceWriter, Tag, ValueOrd, Writer,
+};
+use core::{cmp::Ordering, mem};
+
+macro_rules! impl_int_encoding {
+ ($($int:ty => $uint:ty),+) => {
+ $(
+ impl<'a> DecodeValue<'a> for $int {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ let bytes = ByteSlice::decode_value(reader, header)?.as_slice();
+
+ let result = if is_highest_bit_set(bytes) {
+ <$uint>::from_be_bytes(int::decode_to_array(bytes)?) as $int
+ } else {
+ Self::from_be_bytes(uint::decode_to_array(bytes)?)
+ };
+
+ // Ensure we compute the same encoded length as the original any value
+ if header.length != result.value_len()? {
+ return Err(Self::TAG.non_canonical_error());
+ }
+
+ Ok(result)
+ }
+ }
+
+ impl EncodeValue for $int {
+ fn value_len(&self) -> Result<Length> {
+ if *self < 0 {
+ int::encoded_len(&(*self as $uint).to_be_bytes())
+ } else {
+ uint::encoded_len(&self.to_be_bytes())
+ }
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ if *self < 0 {
+ int::encode_bytes(writer, &(*self as $uint).to_be_bytes())
+ } else {
+ uint::encode_bytes(writer, &self.to_be_bytes())
+ }
+ }
+ }
+
+ impl FixedTag for $int {
+ const TAG: Tag = Tag::Integer;
+ }
+
+ impl ValueOrd for $int {
+ fn value_cmp(&self, other: &Self) -> Result<Ordering> {
+ value_cmp(*self, *other)
+ }
+ }
+
+ impl TryFrom<AnyRef<'_>> for $int {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'_>) -> Result<Self> {
+ any.decode_into()
+ }
+ }
+ )+
+ };
+}
+
+macro_rules! impl_uint_encoding {
+ ($($uint:ty),+) => {
+ $(
+ impl<'a> DecodeValue<'a> for $uint {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ let bytes = ByteSlice::decode_value(reader, header)?.as_slice();
+ let result = Self::from_be_bytes(uint::decode_to_array(bytes)?);
+
+ // Ensure we compute the same encoded length as the original any value
+ if header.length != result.value_len()? {
+ return Err(Self::TAG.non_canonical_error());
+ }
+
+ Ok(result)
+ }
+ }
+
+ impl EncodeValue for $uint {
+ fn value_len(&self) -> Result<Length> {
+ uint::encoded_len(&self.to_be_bytes())
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ uint::encode_bytes(writer, &self.to_be_bytes())
+ }
+ }
+
+ impl FixedTag for $uint {
+ const TAG: Tag = Tag::Integer;
+ }
+
+ impl ValueOrd for $uint {
+ fn value_cmp(&self, other: &Self) -> Result<Ordering> {
+ value_cmp(*self, *other)
+ }
+ }
+
+ impl TryFrom<AnyRef<'_>> for $uint {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'_>) -> Result<Self> {
+ any.decode_into()
+ }
+ }
+ )+
+ };
+}
+
+impl_int_encoding!(i8 => u8, i16 => u16, i32 => u32, i64 => u64, i128 => u128);
+impl_uint_encoding!(u8, u16, u32, u64, u128);
+
+/// Is the highest bit of the first byte in the slice 1? (if present)
+#[inline]
+fn is_highest_bit_set(bytes: &[u8]) -> bool {
+ bytes
+ .get(0)
+ .map(|byte| byte & 0b10000000 != 0)
+ .unwrap_or(false)
+}
+
+/// Compare two integer values
+fn value_cmp<T>(a: T, b: T) -> Result<Ordering>
+where
+ T: Copy + EncodeValue + Sized,
+{
+ const MAX_INT_SIZE: usize = 16;
+ debug_assert!(mem::size_of::<T>() <= MAX_INT_SIZE);
+
+ let mut buf1 = [0u8; MAX_INT_SIZE];
+ let mut encoder1 = SliceWriter::new(&mut buf1);
+ a.encode_value(&mut encoder1)?;
+
+ let mut buf2 = [0u8; MAX_INT_SIZE];
+ let mut encoder2 = SliceWriter::new(&mut buf2);
+ b.encode_value(&mut encoder2)?;
+
+ Ok(encoder1.finish()?.cmp(encoder2.finish()?))
+}
+
+#[cfg(test)]
+pub(crate) mod tests {
+ use crate::{Decode, Encode};
+
+ // Vectors from Section 5.7 of:
+ // https://luca.ntop.org/Teaching/Appunti/asn1.html
+ pub(crate) const I0_BYTES: &[u8] = &[0x02, 0x01, 0x00];
+ pub(crate) const I127_BYTES: &[u8] = &[0x02, 0x01, 0x7F];
+ pub(crate) const I128_BYTES: &[u8] = &[0x02, 0x02, 0x00, 0x80];
+ pub(crate) const I256_BYTES: &[u8] = &[0x02, 0x02, 0x01, 0x00];
+ pub(crate) const INEG128_BYTES: &[u8] = &[0x02, 0x01, 0x80];
+ pub(crate) const INEG129_BYTES: &[u8] = &[0x02, 0x02, 0xFF, 0x7F];
+
+ // Additional vectors
+ pub(crate) const I255_BYTES: &[u8] = &[0x02, 0x02, 0x00, 0xFF];
+ pub(crate) const I32767_BYTES: &[u8] = &[0x02, 0x02, 0x7F, 0xFF];
+ pub(crate) const I65535_BYTES: &[u8] = &[0x02, 0x03, 0x00, 0xFF, 0xFF];
+ pub(crate) const INEG32768_BYTES: &[u8] = &[0x02, 0x02, 0x80, 0x00];
+
+ #[test]
+ fn decode_i8() {
+ assert_eq!(0, i8::from_der(I0_BYTES).unwrap());
+ assert_eq!(127, i8::from_der(I127_BYTES).unwrap());
+ assert_eq!(-128, i8::from_der(INEG128_BYTES).unwrap());
+ }
+
+ #[test]
+ fn decode_i16() {
+ assert_eq!(0, i16::from_der(I0_BYTES).unwrap());
+ assert_eq!(127, i16::from_der(I127_BYTES).unwrap());
+ assert_eq!(128, i16::from_der(I128_BYTES).unwrap());
+ assert_eq!(255, i16::from_der(I255_BYTES).unwrap());
+ assert_eq!(256, i16::from_der(I256_BYTES).unwrap());
+ assert_eq!(32767, i16::from_der(I32767_BYTES).unwrap());
+ assert_eq!(-128, i16::from_der(INEG128_BYTES).unwrap());
+ assert_eq!(-129, i16::from_der(INEG129_BYTES).unwrap());
+ assert_eq!(-32768, i16::from_der(INEG32768_BYTES).unwrap());
+ }
+
+ #[test]
+ fn decode_u8() {
+ assert_eq!(0, u8::from_der(I0_BYTES).unwrap());
+ assert_eq!(127, u8::from_der(I127_BYTES).unwrap());
+ assert_eq!(255, u8::from_der(I255_BYTES).unwrap());
+ }
+
+ #[test]
+ fn decode_u16() {
+ assert_eq!(0, u16::from_der(I0_BYTES).unwrap());
+ assert_eq!(127, u16::from_der(I127_BYTES).unwrap());
+ assert_eq!(255, u16::from_der(I255_BYTES).unwrap());
+ assert_eq!(256, u16::from_der(I256_BYTES).unwrap());
+ assert_eq!(32767, u16::from_der(I32767_BYTES).unwrap());
+ assert_eq!(65535, u16::from_der(I65535_BYTES).unwrap());
+ }
+
+ #[test]
+ fn encode_i8() {
+ let mut buffer = [0u8; 3];
+
+ assert_eq!(I0_BYTES, 0i8.encode_to_slice(&mut buffer).unwrap());
+ assert_eq!(I127_BYTES, 127i8.encode_to_slice(&mut buffer).unwrap());
+
+ assert_eq!(
+ INEG128_BYTES,
+ (-128i8).encode_to_slice(&mut buffer).unwrap()
+ );
+ }
+
+ #[test]
+ fn encode_i16() {
+ let mut buffer = [0u8; 4];
+ assert_eq!(I0_BYTES, 0i16.encode_to_slice(&mut buffer).unwrap());
+ assert_eq!(I127_BYTES, 127i16.encode_to_slice(&mut buffer).unwrap());
+ assert_eq!(I128_BYTES, 128i16.encode_to_slice(&mut buffer).unwrap());
+ assert_eq!(I255_BYTES, 255i16.encode_to_slice(&mut buffer).unwrap());
+ assert_eq!(I256_BYTES, 256i16.encode_to_slice(&mut buffer).unwrap());
+ assert_eq!(I32767_BYTES, 32767i16.encode_to_slice(&mut buffer).unwrap());
+
+ assert_eq!(
+ INEG128_BYTES,
+ (-128i16).encode_to_slice(&mut buffer).unwrap()
+ );
+
+ assert_eq!(
+ INEG129_BYTES,
+ (-129i16).encode_to_slice(&mut buffer).unwrap()
+ );
+
+ assert_eq!(
+ INEG32768_BYTES,
+ (-32768i16).encode_to_slice(&mut buffer).unwrap()
+ );
+ }
+
+ #[test]
+ fn encode_u8() {
+ let mut buffer = [0u8; 4];
+ assert_eq!(I0_BYTES, 0u8.encode_to_slice(&mut buffer).unwrap());
+ assert_eq!(I127_BYTES, 127u8.encode_to_slice(&mut buffer).unwrap());
+ assert_eq!(I255_BYTES, 255u8.encode_to_slice(&mut buffer).unwrap());
+ }
+
+ #[test]
+ fn encode_u16() {
+ let mut buffer = [0u8; 5];
+ assert_eq!(I0_BYTES, 0u16.encode_to_slice(&mut buffer).unwrap());
+ assert_eq!(I127_BYTES, 127u16.encode_to_slice(&mut buffer).unwrap());
+ assert_eq!(I128_BYTES, 128u16.encode_to_slice(&mut buffer).unwrap());
+ assert_eq!(I255_BYTES, 255u16.encode_to_slice(&mut buffer).unwrap());
+ assert_eq!(I256_BYTES, 256u16.encode_to_slice(&mut buffer).unwrap());
+ assert_eq!(I32767_BYTES, 32767u16.encode_to_slice(&mut buffer).unwrap());
+ assert_eq!(I65535_BYTES, 65535u16.encode_to_slice(&mut buffer).unwrap());
+ }
+
+ /// Integers must be encoded with a minimum number of octets
+ #[test]
+ fn reject_non_canonical() {
+ assert!(i8::from_der(&[0x02, 0x02, 0x00, 0x00]).is_err());
+ assert!(i16::from_der(&[0x02, 0x02, 0x00, 0x00]).is_err());
+ assert!(u8::from_der(&[0x02, 0x02, 0x00, 0x00]).is_err());
+ assert!(u16::from_der(&[0x02, 0x02, 0x00, 0x00]).is_err());
+ }
+}
diff --git a/src/asn1/integer/bigint.rs b/src/asn1/integer/bigint.rs
new file mode 100644
index 0000000..7a73c48
--- /dev/null
+++ b/src/asn1/integer/bigint.rs
@@ -0,0 +1,150 @@
+//! "Big" ASN.1 `INTEGER` types.
+
+use super::uint;
+use crate::{
+ asn1::AnyRef, ByteSlice, DecodeValue, EncodeValue, Error, ErrorKind, FixedTag, Header, Length,
+ Reader, Result, Tag, Writer,
+};
+
+/// "Big" unsigned ASN.1 `INTEGER` type.
+///
+/// Provides direct access to the underlying big endian bytes which comprise an
+/// unsigned integer value.
+///
+/// Intended for use cases like very large integers that are used in
+/// cryptographic applications (e.g. keys, signatures).
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd)]
+pub struct UIntRef<'a> {
+ /// Inner value
+ inner: ByteSlice<'a>,
+}
+
+impl<'a> UIntRef<'a> {
+ /// Create a new [`UIntRef`] from a byte slice.
+ pub fn new(bytes: &'a [u8]) -> Result<Self> {
+ let inner = ByteSlice::new(uint::strip_leading_zeroes(bytes))
+ .map_err(|_| ErrorKind::Length { tag: Self::TAG })?;
+
+ Ok(Self { inner })
+ }
+
+ /// Borrow the inner byte slice which contains the least significant bytes
+ /// of a big endian integer value with all leading zeros stripped.
+ pub fn as_bytes(&self) -> &'a [u8] {
+ self.inner.as_slice()
+ }
+
+ /// Get the length of this [`UIntRef`] in bytes.
+ pub fn len(&self) -> Length {
+ self.inner.len()
+ }
+
+ /// Is the inner byte slice empty?
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+}
+
+impl<'a> DecodeValue<'a> for UIntRef<'a> {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ let bytes = ByteSlice::decode_value(reader, header)?.as_slice();
+ let result = Self::new(uint::decode_to_slice(bytes)?)?;
+
+ // Ensure we compute the same encoded length as the original any value.
+ if result.value_len()? != header.length {
+ return Err(Self::TAG.non_canonical_error());
+ }
+
+ Ok(result)
+ }
+}
+
+impl<'a> EncodeValue for UIntRef<'a> {
+ fn value_len(&self) -> Result<Length> {
+ uint::encoded_len(self.inner.as_slice())
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ // Add leading `0x00` byte if required
+ if self.value_len()? > self.len() {
+ writer.write_byte(0)?;
+ }
+
+ writer.write(self.as_bytes())
+ }
+}
+
+impl<'a> From<&UIntRef<'a>> for UIntRef<'a> {
+ fn from(value: &UIntRef<'a>) -> UIntRef<'a> {
+ *value
+ }
+}
+
+impl<'a> TryFrom<AnyRef<'a>> for UIntRef<'a> {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'a>) -> Result<UIntRef<'a>> {
+ any.decode_into()
+ }
+}
+
+impl<'a> FixedTag for UIntRef<'a> {
+ const TAG: Tag = Tag::Integer;
+}
+
+#[cfg(test)]
+mod tests {
+ use super::UIntRef;
+ use crate::{
+ asn1::{integer::tests::*, AnyRef},
+ Decode, Encode, ErrorKind, SliceWriter, Tag,
+ };
+
+ #[test]
+ fn decode_uint_bytes() {
+ assert_eq!(&[0], UIntRef::from_der(I0_BYTES).unwrap().as_bytes());
+ assert_eq!(&[127], UIntRef::from_der(I127_BYTES).unwrap().as_bytes());
+ assert_eq!(&[128], UIntRef::from_der(I128_BYTES).unwrap().as_bytes());
+ assert_eq!(&[255], UIntRef::from_der(I255_BYTES).unwrap().as_bytes());
+
+ assert_eq!(
+ &[0x01, 0x00],
+ UIntRef::from_der(I256_BYTES).unwrap().as_bytes()
+ );
+
+ assert_eq!(
+ &[0x7F, 0xFF],
+ UIntRef::from_der(I32767_BYTES).unwrap().as_bytes()
+ );
+ }
+
+ #[test]
+ fn encode_uint_bytes() {
+ for &example in &[
+ I0_BYTES,
+ I127_BYTES,
+ I128_BYTES,
+ I255_BYTES,
+ I256_BYTES,
+ I32767_BYTES,
+ ] {
+ let uint = UIntRef::from_der(example).unwrap();
+
+ let mut buf = [0u8; 128];
+ let mut encoder = SliceWriter::new(&mut buf);
+ uint.encode(&mut encoder).unwrap();
+
+ let result = encoder.finish().unwrap();
+ assert_eq!(example, result);
+ }
+ }
+
+ #[test]
+ fn reject_oversize_without_extra_zero() {
+ let err = UIntRef::try_from(AnyRef::new(Tag::Integer, &[0x81]).unwrap())
+ .err()
+ .unwrap();
+
+ assert_eq!(err.kind(), ErrorKind::Value { tag: Tag::Integer });
+ }
+}
diff --git a/src/asn1/integer/int.rs b/src/asn1/integer/int.rs
new file mode 100644
index 0000000..a9fe438
--- /dev/null
+++ b/src/asn1/integer/int.rs
@@ -0,0 +1,55 @@
+//! Support for encoding negative integers
+
+use super::is_highest_bit_set;
+use crate::{ErrorKind, Length, Result, Writer};
+
+/// Decode an unsigned integer of the specified size.
+///
+/// Returns a byte array of the requested size containing a big endian integer.
+pub(super) fn decode_to_array<const N: usize>(bytes: &[u8]) -> Result<[u8; N]> {
+ match N.checked_sub(bytes.len()) {
+ Some(offset) => {
+ let mut output = [0xFFu8; N];
+ output[offset..].copy_from_slice(bytes);
+ Ok(output)
+ }
+ None => {
+ let expected_len = Length::try_from(N)?;
+ let actual_len = Length::try_from(bytes.len())?;
+
+ Err(ErrorKind::Incomplete {
+ expected_len,
+ actual_len,
+ }
+ .into())
+ }
+ }
+}
+
+/// Encode the given big endian bytes representing an integer as ASN.1 DER.
+pub(super) fn encode_bytes<W>(writer: &mut W, bytes: &[u8]) -> Result<()>
+where
+ W: Writer + ?Sized,
+{
+ writer.write(strip_leading_ones(bytes))
+}
+
+/// Get the encoded length for the given unsigned integer serialized as bytes.
+#[inline]
+pub(super) fn encoded_len(bytes: &[u8]) -> Result<Length> {
+ Length::try_from(strip_leading_ones(bytes).len())
+}
+
+/// Strip the leading all-ones bytes from the given byte slice.
+fn strip_leading_ones(mut bytes: &[u8]) -> &[u8] {
+ while let Some((byte, rest)) = bytes.split_first() {
+ if *byte == 0xFF && is_highest_bit_set(rest) {
+ bytes = rest;
+ continue;
+ }
+
+ break;
+ }
+
+ bytes
+}
diff --git a/src/asn1/integer/uint.rs b/src/asn1/integer/uint.rs
new file mode 100644
index 0000000..e45a72f
--- /dev/null
+++ b/src/asn1/integer/uint.rs
@@ -0,0 +1,116 @@
+//! Unsigned integer decoders/encoders.
+
+use crate::{Length, Result, Tag, Writer};
+
+/// Decode an unsigned integer into a big endian byte slice with all leading
+/// zeroes removed.
+///
+/// Returns a byte array of the requested size containing a big endian integer.
+pub(crate) fn decode_to_slice(bytes: &[u8]) -> Result<&[u8]> {
+ // The `INTEGER` type always encodes a signed value, so for unsigned
+ // values the leading `0x00` byte may need to be removed.
+ //
+ // We also disallow a leading byte which would overflow a signed ASN.1
+ // integer (since we're decoding an unsigned integer).
+ // We expect all such cases to have a leading `0x00` byte.
+ match bytes {
+ [] => Err(Tag::Integer.non_canonical_error()),
+ [0] => Ok(bytes),
+ [0, byte, ..] if *byte < 0x80 => Err(Tag::Integer.non_canonical_error()),
+ [0, rest @ ..] => Ok(rest),
+ [byte, ..] if *byte >= 0x80 => Err(Tag::Integer.value_error()),
+ _ => Ok(bytes),
+ }
+}
+
+/// Decode an unsigned integer into a byte array of the requested size
+/// containing a big endian integer.
+pub(super) fn decode_to_array<const N: usize>(bytes: &[u8]) -> Result<[u8; N]> {
+ let input = decode_to_slice(bytes)?;
+
+ // Compute number of leading zeroes to add
+ let num_zeroes = N
+ .checked_sub(input.len())
+ .ok_or_else(|| Tag::Integer.length_error())?;
+
+ // Copy input into `N`-sized output buffer with leading zeroes
+ let mut output = [0u8; N];
+ output[num_zeroes..].copy_from_slice(input);
+ Ok(output)
+}
+
+/// Encode the given big endian bytes representing an integer as ASN.1 DER.
+pub(crate) fn encode_bytes<W>(encoder: &mut W, bytes: &[u8]) -> Result<()>
+where
+ W: Writer + ?Sized,
+{
+ let bytes = strip_leading_zeroes(bytes);
+
+ if needs_leading_zero(bytes) {
+ encoder.write_byte(0)?;
+ }
+
+ encoder.write(bytes)
+}
+
+/// Get the encoded length for the given unsigned integer serialized as bytes.
+#[inline]
+pub(crate) fn encoded_len(bytes: &[u8]) -> Result<Length> {
+ let bytes = strip_leading_zeroes(bytes);
+ Length::try_from(bytes.len())? + u8::from(needs_leading_zero(bytes))
+}
+
+/// Strip the leading zeroes from the given byte slice
+pub(crate) fn strip_leading_zeroes(mut bytes: &[u8]) -> &[u8] {
+ while let Some((byte, rest)) = bytes.split_first() {
+ if *byte == 0 && !rest.is_empty() {
+ bytes = rest;
+ } else {
+ break;
+ }
+ }
+
+ bytes
+}
+
+/// Does the given integer need a leading zero?
+fn needs_leading_zero(bytes: &[u8]) -> bool {
+ matches!(bytes.get(0), Some(byte) if *byte >= 0x80)
+}
+
+#[cfg(test)]
+mod tests {
+ use super::decode_to_array;
+ use crate::{ErrorKind, Tag};
+
+ #[test]
+ fn decode_to_array_no_leading_zero() {
+ let arr = decode_to_array::<4>(&[1, 2]).unwrap();
+ assert_eq!(arr, [0, 0, 1, 2]);
+ }
+
+ #[test]
+ fn decode_to_array_leading_zero() {
+ let arr = decode_to_array::<4>(&[0x00, 0xFF, 0xFE]).unwrap();
+ assert_eq!(arr, [0x00, 0x00, 0xFF, 0xFE]);
+ }
+
+ #[test]
+ fn decode_to_array_extra_zero() {
+ let err = decode_to_array::<4>(&[0, 1, 2]).err().unwrap();
+ assert_eq!(err.kind(), ErrorKind::Noncanonical { tag: Tag::Integer });
+ }
+
+ #[test]
+ fn decode_to_array_missing_zero() {
+ // We're decoding an unsigned integer, but this value would be signed
+ let err = decode_to_array::<4>(&[0xFF, 0xFE]).err().unwrap();
+ assert_eq!(err.kind(), ErrorKind::Value { tag: Tag::Integer });
+ }
+
+ #[test]
+ fn decode_to_array_oversized_input() {
+ let err = decode_to_array::<1>(&[1, 2, 3]).err().unwrap();
+ assert_eq!(err.kind(), ErrorKind::Length { tag: Tag::Integer });
+ }
+}
diff --git a/src/asn1/null.rs b/src/asn1/null.rs
new file mode 100644
index 0000000..e87729f
--- /dev/null
+++ b/src/asn1/null.rs
@@ -0,0 +1,108 @@
+//! ASN.1 `NULL` support.
+
+use crate::{
+ asn1::AnyRef, ord::OrdIsValueOrd, ByteSlice, DecodeValue, EncodeValue, Error, ErrorKind,
+ FixedTag, Header, Length, Reader, Result, Tag, Writer,
+};
+
+/// ASN.1 `NULL` type.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub struct Null;
+
+impl<'a> DecodeValue<'a> for Null {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ if header.length.is_zero() {
+ Ok(Null)
+ } else {
+ Err(reader.error(ErrorKind::Length { tag: Self::TAG }))
+ }
+ }
+}
+
+impl EncodeValue for Null {
+ fn value_len(&self) -> Result<Length> {
+ Ok(Length::ZERO)
+ }
+
+ fn encode_value(&self, _writer: &mut dyn Writer) -> Result<()> {
+ Ok(())
+ }
+}
+
+impl FixedTag for Null {
+ const TAG: Tag = Tag::Null;
+}
+
+impl OrdIsValueOrd for Null {}
+
+impl<'a> From<Null> for AnyRef<'a> {
+ fn from(_: Null) -> AnyRef<'a> {
+ AnyRef::from_tag_and_value(Tag::Null, ByteSlice::default())
+ }
+}
+
+impl TryFrom<AnyRef<'_>> for Null {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'_>) -> Result<Null> {
+ any.decode_into()
+ }
+}
+
+impl TryFrom<AnyRef<'_>> for () {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'_>) -> Result<()> {
+ Null::try_from(any).map(|_| ())
+ }
+}
+
+impl<'a> From<()> for AnyRef<'a> {
+ fn from(_: ()) -> AnyRef<'a> {
+ Null.into()
+ }
+}
+
+impl<'a> DecodeValue<'a> for () {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ Null::decode_value(reader, header)?;
+ Ok(())
+ }
+}
+
+impl EncodeValue for () {
+ fn value_len(&self) -> Result<Length> {
+ Ok(Length::ZERO)
+ }
+
+ fn encode_value(&self, _writer: &mut dyn Writer) -> Result<()> {
+ Ok(())
+ }
+}
+
+impl FixedTag for () {
+ const TAG: Tag = Tag::Null;
+}
+
+#[cfg(test)]
+mod tests {
+ use super::Null;
+ use crate::{Decode, Encode};
+
+ #[test]
+ fn decode() {
+ Null::from_der(&[0x05, 0x00]).unwrap();
+ }
+
+ #[test]
+ fn encode() {
+ let mut buffer = [0u8; 2];
+ assert_eq!(&[0x05, 0x00], Null.encode_to_slice(&mut buffer).unwrap());
+ assert_eq!(&[0x05, 0x00], ().encode_to_slice(&mut buffer).unwrap());
+ }
+
+ #[test]
+ fn reject_non_canonical() {
+ assert!(Null::from_der(&[0x05, 0x81, 0x00]).is_err());
+ }
+}
diff --git a/src/asn1/octet_string.rs b/src/asn1/octet_string.rs
new file mode 100644
index 0000000..9f78c96
--- /dev/null
+++ b/src/asn1/octet_string.rs
@@ -0,0 +1,179 @@
+//! ASN.1 `OCTET STRING` support.
+
+use crate::{
+ asn1::AnyRef, ord::OrdIsValueOrd, ByteSlice, DecodeValue, EncodeValue, Error, ErrorKind,
+ FixedTag, Header, Length, Reader, Result, Tag, Writer,
+};
+
+#[cfg(feature = "alloc")]
+use alloc::vec::Vec;
+
+/// ASN.1 `OCTET STRING` type: borrowed form.
+///
+/// Octet strings represent contiguous sequences of octets, a.k.a. bytes.
+///
+/// This is a zero-copy reference type which borrows from the input data.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub struct OctetStringRef<'a> {
+ /// Inner value
+ inner: ByteSlice<'a>,
+}
+
+impl<'a> OctetStringRef<'a> {
+ /// Create a new ASN.1 `OCTET STRING` from a byte slice.
+ pub fn new(slice: &'a [u8]) -> Result<Self> {
+ ByteSlice::new(slice)
+ .map(|inner| Self { inner })
+ .map_err(|_| ErrorKind::Length { tag: Self::TAG }.into())
+ }
+
+ /// Borrow the inner byte slice.
+ pub fn as_bytes(&self) -> &'a [u8] {
+ self.inner.as_slice()
+ }
+
+ /// Get the length of the inner byte slice.
+ pub fn len(&self) -> Length {
+ self.inner.len()
+ }
+
+ /// Is the inner byte slice empty?
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+}
+
+impl AsRef<[u8]> for OctetStringRef<'_> {
+ fn as_ref(&self) -> &[u8] {
+ self.as_bytes()
+ }
+}
+
+impl<'a> DecodeValue<'a> for OctetStringRef<'a> {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ let inner = ByteSlice::decode_value(reader, header)?;
+ Ok(Self { inner })
+ }
+}
+
+impl EncodeValue for OctetStringRef<'_> {
+ fn value_len(&self) -> Result<Length> {
+ self.inner.value_len()
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ self.inner.encode_value(writer)
+ }
+}
+
+impl FixedTag for OctetStringRef<'_> {
+ const TAG: Tag = Tag::OctetString;
+}
+
+impl OrdIsValueOrd for OctetStringRef<'_> {}
+
+impl<'a> From<&OctetStringRef<'a>> for OctetStringRef<'a> {
+ fn from(value: &OctetStringRef<'a>) -> OctetStringRef<'a> {
+ *value
+ }
+}
+
+impl<'a> TryFrom<AnyRef<'a>> for OctetStringRef<'a> {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'a>) -> Result<OctetStringRef<'a>> {
+ any.decode_into()
+ }
+}
+
+impl<'a> From<OctetStringRef<'a>> for AnyRef<'a> {
+ fn from(octet_string: OctetStringRef<'a>) -> AnyRef<'a> {
+ AnyRef::from_tag_and_value(Tag::OctetString, octet_string.inner)
+ }
+}
+
+impl<'a> From<OctetStringRef<'a>> for &'a [u8] {
+ fn from(octet_string: OctetStringRef<'a>) -> &'a [u8] {
+ octet_string.as_bytes()
+ }
+}
+
+/// ASN.1 `OCTET STRING` type: owned form..
+///
+/// Octet strings represent contiguous sequences of octets, a.k.a. bytes.
+///
+/// This type provides the same functionality as [`OctetStringRef`] but owns
+/// the backing data.
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub struct OctetString {
+ /// Bitstring represented as a slice of bytes.
+ inner: Vec<u8>,
+}
+
+#[cfg(feature = "alloc")]
+impl OctetString {
+ /// Create a new ASN.1 `OCTET STRING`.
+ pub fn new(bytes: impl Into<Vec<u8>>) -> Result<Self> {
+ let inner = bytes.into();
+
+ // Ensure the bytes parse successfully as an `OctetStringRef`
+ OctetStringRef::new(&inner)?;
+
+ Ok(Self { inner })
+ }
+
+ /// Borrow the inner byte slice.
+ pub fn as_bytes(&self) -> &[u8] {
+ self.inner.as_slice()
+ }
+
+ /// Get the length of the inner byte slice.
+ pub fn len(&self) -> Length {
+ self.value_len().expect("invalid OCTET STRING length")
+ }
+
+ /// Is the inner byte slice empty?
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl AsRef<[u8]> for OctetString {
+ fn as_ref(&self) -> &[u8] {
+ self.as_bytes()
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl<'a> DecodeValue<'a> for OctetString {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ Self::new(reader.read_vec(header.length)?)
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl EncodeValue for OctetString {
+ fn value_len(&self) -> Result<Length> {
+ self.inner.len().try_into()
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ writer.write(&self.inner)
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl FixedTag for OctetString {
+ const TAG: Tag = Tag::OctetString;
+}
+
+#[cfg(feature = "alloc")]
+impl<'a> From<&'a OctetString> for OctetStringRef<'a> {
+ fn from(octet_string: &'a OctetString) -> OctetStringRef<'a> {
+ // Ensured to parse successfully in constructor
+ OctetStringRef::new(&octet_string.inner).expect("invalid OCTET STRING")
+ }
+}
diff --git a/src/asn1/oid.rs b/src/asn1/oid.rs
new file mode 100644
index 0000000..8b28718
--- /dev/null
+++ b/src/asn1/oid.rs
@@ -0,0 +1,90 @@
+//! ASN.1 `OBJECT IDENTIFIER`
+
+use crate::{
+ asn1::AnyRef, ord::OrdIsValueOrd, DecodeValue, EncodeValue, Error, FixedTag, Header, Length,
+ Reader, Result, Tag, Tagged, Writer,
+};
+use const_oid::ObjectIdentifier;
+
+impl<'a> DecodeValue<'a> for ObjectIdentifier {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ let mut buf = [0u8; ObjectIdentifier::MAX_SIZE];
+ let slice = buf
+ .get_mut(..header.length.try_into()?)
+ .ok_or_else(|| Self::TAG.length_error())?;
+
+ let actual_len = reader.read_into(slice)?.len();
+ debug_assert_eq!(actual_len, header.length.try_into()?);
+ Ok(Self::from_bytes(slice)?)
+ }
+}
+
+impl EncodeValue for ObjectIdentifier {
+ fn value_len(&self) -> Result<Length> {
+ Length::try_from(self.as_bytes().len())
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ writer.write(self.as_bytes())
+ }
+}
+
+impl FixedTag for ObjectIdentifier {
+ const TAG: Tag = Tag::ObjectIdentifier;
+}
+
+impl OrdIsValueOrd for ObjectIdentifier {}
+
+impl<'a> From<&'a ObjectIdentifier> for AnyRef<'a> {
+ fn from(oid: &'a ObjectIdentifier) -> AnyRef<'a> {
+ // Note: ensuring an infallible conversion is possible relies on the
+ // invariant that `const_oid::MAX_LEN <= Length::max()`.
+ //
+ // The `length()` test below ensures this is the case.
+ let value = oid
+ .as_bytes()
+ .try_into()
+ .expect("OID length invariant violated");
+
+ AnyRef::from_tag_and_value(Tag::ObjectIdentifier, value)
+ }
+}
+
+impl TryFrom<AnyRef<'_>> for ObjectIdentifier {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'_>) -> Result<ObjectIdentifier> {
+ any.tag().assert_eq(Tag::ObjectIdentifier)?;
+ Ok(ObjectIdentifier::from_bytes(any.value())?)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::ObjectIdentifier;
+ use crate::{Decode, Encode, Length};
+
+ const EXAMPLE_OID: ObjectIdentifier = ObjectIdentifier::new_unwrap("1.2.840.113549");
+ const EXAMPLE_OID_BYTES: &[u8; 8] = &[0x06, 0x06, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d];
+
+ #[test]
+ fn decode() {
+ let oid = ObjectIdentifier::from_der(EXAMPLE_OID_BYTES).unwrap();
+ assert_eq!(EXAMPLE_OID, oid);
+ }
+
+ #[test]
+ fn encode() {
+ let mut buffer = [0u8; 8];
+ assert_eq!(
+ EXAMPLE_OID_BYTES,
+ EXAMPLE_OID.encode_to_slice(&mut buffer).unwrap()
+ );
+ }
+
+ #[test]
+ fn length() {
+ // Ensure an infallible `From` conversion to `Any` will never panic
+ assert!(ObjectIdentifier::MAX_SIZE <= Length::MAX.try_into().unwrap());
+ }
+}
diff --git a/src/asn1/optional.rs b/src/asn1/optional.rs
new file mode 100644
index 0000000..a9b923c
--- /dev/null
+++ b/src/asn1/optional.rs
@@ -0,0 +1,66 @@
+//! ASN.1 `OPTIONAL` as mapped to Rust's `Option` type
+
+use crate::{Choice, Decode, DerOrd, Encode, Length, Reader, Result, Tag, Writer};
+use core::cmp::Ordering;
+
+impl<'a, T> Decode<'a> for Option<T>
+where
+ T: Choice<'a>, // NOTE: all `Decode + Tagged` types receive a blanket `Choice` impl
+{
+ fn decode<R: Reader<'a>>(reader: &mut R) -> Result<Option<T>> {
+ if let Some(byte) = reader.peek_byte() {
+ if T::can_decode(Tag::try_from(byte)?) {
+ return T::decode(reader).map(Some);
+ }
+ }
+
+ Ok(None)
+ }
+}
+
+impl<T> DerOrd for Option<T>
+where
+ T: DerOrd,
+{
+ fn der_cmp(&self, other: &Self) -> Result<Ordering> {
+ match self {
+ Some(a) => match other {
+ Some(b) => a.der_cmp(b),
+ None => Ok(Ordering::Greater),
+ },
+ None => Ok(Ordering::Less),
+ }
+ }
+}
+
+impl<T> Encode for Option<T>
+where
+ T: Encode,
+{
+ fn encoded_len(&self) -> Result<Length> {
+ (&self).encoded_len()
+ }
+
+ fn encode(&self, writer: &mut dyn Writer) -> Result<()> {
+ (&self).encode(writer)
+ }
+}
+
+impl<T> Encode for &Option<T>
+where
+ T: Encode,
+{
+ fn encoded_len(&self) -> Result<Length> {
+ match self {
+ Some(encodable) => encodable.encoded_len(),
+ None => Ok(0u8.into()),
+ }
+ }
+
+ fn encode(&self, encoder: &mut dyn Writer) -> Result<()> {
+ match self {
+ Some(encodable) => encodable.encode(encoder),
+ None => Ok(()),
+ }
+ }
+}
diff --git a/src/asn1/printable_string.rs b/src/asn1/printable_string.rs
new file mode 100644
index 0000000..c5560a0
--- /dev/null
+++ b/src/asn1/printable_string.rs
@@ -0,0 +1,186 @@
+//! ASN.1 `PrintableString` support.
+
+use crate::{
+ asn1::AnyRef, ord::OrdIsValueOrd, ByteSlice, DecodeValue, EncodeValue, Error, FixedTag, Header,
+ Length, Reader, Result, StrSlice, Tag, Writer,
+};
+use core::{fmt, str};
+
+/// ASN.1 `PrintableString` type.
+///
+/// Supports a subset the ASCII character set (desribed below).
+///
+/// For UTF-8, use [`Utf8StringRef`][`crate::asn1::Utf8StringRef`] instead.
+/// For the full ASCII character set, use
+/// [`Ia5StringRef`][`crate::asn1::Ia5StringRef`].
+///
+/// This is a zero-copy reference type which borrows from the input data.
+///
+/// # Supported characters
+///
+/// The following ASCII characters/ranges are supported:
+///
+/// - `A..Z`
+/// - `a..z`
+/// - `0..9`
+/// - "` `" (i.e. space)
+/// - `\`
+/// - `(`
+/// - `)`
+/// - `+`
+/// - `,`
+/// - `-`
+/// - `.`
+/// - `/`
+/// - `:`
+/// - `=`
+/// - `?`
+#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord)]
+pub struct PrintableStringRef<'a> {
+ /// Inner value
+ inner: StrSlice<'a>,
+}
+
+impl<'a> PrintableStringRef<'a> {
+ /// Create a new ASN.1 `PrintableString`.
+ pub fn new<T>(input: &'a T) -> Result<Self>
+ where
+ T: AsRef<[u8]> + ?Sized,
+ {
+ let input = input.as_ref();
+
+ // Validate all characters are within PrintedString's allowed set
+ for &c in input.iter() {
+ match c {
+ b'A'..=b'Z'
+ | b'a'..=b'z'
+ | b'0'..=b'9'
+ | b' '
+ | b'\''
+ | b'('
+ | b')'
+ | b'+'
+ | b','
+ | b'-'
+ | b'.'
+ | b'/'
+ | b':'
+ | b'='
+ | b'?' => (),
+ _ => return Err(Self::TAG.value_error()),
+ }
+ }
+
+ StrSlice::from_bytes(input)
+ .map(|inner| Self { inner })
+ .map_err(|_| Self::TAG.value_error())
+ }
+
+ /// Borrow the string as a `str`.
+ pub fn as_str(&self) -> &'a str {
+ self.inner.as_str()
+ }
+
+ /// Borrow the string as bytes.
+ pub fn as_bytes(&self) -> &'a [u8] {
+ self.inner.as_bytes()
+ }
+
+ /// Get the length of the inner byte slice.
+ pub fn len(&self) -> Length {
+ self.inner.len()
+ }
+
+ /// Is the inner string empty?
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+}
+
+impl AsRef<str> for PrintableStringRef<'_> {
+ fn as_ref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl AsRef<[u8]> for PrintableStringRef<'_> {
+ fn as_ref(&self) -> &[u8] {
+ self.as_bytes()
+ }
+}
+
+impl<'a> DecodeValue<'a> for PrintableStringRef<'a> {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ Self::new(ByteSlice::decode_value(reader, header)?.as_slice())
+ }
+}
+
+impl<'a> EncodeValue for PrintableStringRef<'a> {
+ fn value_len(&self) -> Result<Length> {
+ self.inner.value_len()
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ self.inner.encode_value(writer)
+ }
+}
+
+impl FixedTag for PrintableStringRef<'_> {
+ const TAG: Tag = Tag::PrintableString;
+}
+
+impl OrdIsValueOrd for PrintableStringRef<'_> {}
+
+impl<'a> From<&PrintableStringRef<'a>> for PrintableStringRef<'a> {
+ fn from(value: &PrintableStringRef<'a>) -> PrintableStringRef<'a> {
+ *value
+ }
+}
+
+impl<'a> TryFrom<AnyRef<'a>> for PrintableStringRef<'a> {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'a>) -> Result<PrintableStringRef<'a>> {
+ any.decode_into()
+ }
+}
+
+impl<'a> From<PrintableStringRef<'a>> for AnyRef<'a> {
+ fn from(printable_string: PrintableStringRef<'a>) -> AnyRef<'a> {
+ AnyRef::from_tag_and_value(Tag::PrintableString, printable_string.inner.into())
+ }
+}
+
+impl<'a> From<PrintableStringRef<'a>> for &'a [u8] {
+ fn from(printable_string: PrintableStringRef<'a>) -> &'a [u8] {
+ printable_string.as_bytes()
+ }
+}
+
+impl<'a> fmt::Display for PrintableStringRef<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(self.as_str())
+ }
+}
+
+impl<'a> fmt::Debug for PrintableStringRef<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "PrintableString({:?})", self.as_str())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::PrintableStringRef;
+ use crate::Decode;
+
+ #[test]
+ fn parse_bytes() {
+ let example_bytes = &[
+ 0x13, 0x0b, 0x54, 0x65, 0x73, 0x74, 0x20, 0x55, 0x73, 0x65, 0x72, 0x20, 0x31,
+ ];
+
+ let printable_string = PrintableStringRef::from_der(example_bytes).unwrap();
+ assert_eq!(printable_string.as_str(), "Test User 1");
+ }
+}
diff --git a/src/asn1/real.rs b/src/asn1/real.rs
new file mode 100644
index 0000000..f872d2d
--- /dev/null
+++ b/src/asn1/real.rs
@@ -0,0 +1,993 @@
+//! ASN.1 `REAL` support.
+
+// TODO(tarcieri): checked arithmetic
+#![allow(
+ clippy::cast_lossless,
+ clippy::cast_sign_loss,
+ clippy::integer_arithmetic
+)]
+
+use crate::{
+ str_slice::StrSlice, ByteSlice, DecodeValue, EncodeValue, FixedTag, Header, Length, Reader,
+ Result, Tag, Writer,
+};
+
+use super::integer::uint::strip_leading_zeroes;
+
+#[cfg_attr(docsrs, doc(cfg(feature = "real")))]
+impl<'a> DecodeValue<'a> for f64 {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ let bytes = ByteSlice::decode_value(reader, header)?.as_slice();
+
+ if header.length == Length::ZERO {
+ Ok(0.0)
+ } else if is_nth_bit_one::<7>(bytes) {
+ // Binary encoding from section 8.5.7 applies
+ let sign: u64 = if is_nth_bit_one::<6>(bytes) { 1 } else { 0 };
+
+ // Section 8.5.7.2: Check the base -- the DER specs say that only base 2 should be supported in DER
+ let base = mnth_bits_to_u8::<5, 4>(bytes);
+
+ if base != 0 {
+ // Real related error: base is not DER compliant (base encoded in enum)
+ return Err(Tag::Real.value_error());
+ }
+
+ // Section 8.5.7.3
+ let scaling_factor = mnth_bits_to_u8::<3, 2>(bytes);
+
+ // Section 8.5.7.4
+ let mantissa_start;
+ let exponent = match mnth_bits_to_u8::<1, 0>(bytes) {
+ 0 => {
+ mantissa_start = 2;
+ let ebytes = (i16::from_be_bytes([0x0, bytes[1]])).to_be_bytes();
+ u64::from_be_bytes([0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ebytes[0], ebytes[1]])
+ }
+ 1 => {
+ mantissa_start = 3;
+ let ebytes = (i16::from_be_bytes([bytes[1], bytes[2]])).to_be_bytes();
+ u64::from_be_bytes([0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ebytes[0], ebytes[1]])
+ }
+ _ => {
+ // Real related error: encoded exponent cannot be represented on an IEEE-754 double
+ return Err(Tag::Real.value_error());
+ }
+ };
+ // Section 8.5.7.5: Read the remaining bytes for the mantissa
+ let mut n_bytes = [0x0; 8];
+ for (pos, byte) in bytes[mantissa_start..].iter().rev().enumerate() {
+ n_bytes[7 - pos] = *byte;
+ }
+ let n = u64::from_be_bytes(n_bytes);
+ // Multiply byt 2^F corresponds to just a left shift
+ let mantissa = n << scaling_factor;
+ // Create the f64
+ Ok(encode_f64(sign, exponent, mantissa))
+ } else if is_nth_bit_one::<6>(bytes) {
+ // This either a special value, or it's the value minus zero is encoded, section 8.5.9 applies
+ match mnth_bits_to_u8::<1, 0>(bytes) {
+ 0 => Ok(f64::INFINITY),
+ 1 => Ok(f64::NEG_INFINITY),
+ 2 => Ok(f64::NAN),
+ 3 => Ok(-0.0_f64),
+ _ => Err(Tag::Real.value_error()),
+ }
+ } else {
+ let astr = StrSlice::from_bytes(&bytes[1..])?;
+ match astr.inner.parse::<f64>() {
+ Ok(val) => Ok(val),
+ // Real related error: encoding not supported or malformed
+ Err(_) => Err(Tag::Real.value_error()),
+ }
+ }
+ }
+}
+
+#[cfg_attr(docsrs, doc(cfg(feature = "real")))]
+impl EncodeValue for f64 {
+ fn value_len(&self) -> Result<Length> {
+ if self.is_sign_positive() && (*self) < f64::MIN_POSITIVE {
+ // Zero: positive yet smaller than the minimum positive number
+ Ok(Length::ZERO)
+ } else if self.is_nan()
+ || self.is_infinite()
+ || (self.is_sign_negative() && -self < f64::MIN_POSITIVE)
+ {
+ // NaN, infinite (positive or negative), or negative zero (negative but its negative is less than the min positive number)
+ Ok(Length::ONE)
+ } else {
+ // The length is that of the first octets plus those needed for the exponent plus those needed for the mantissa
+ let (_sign, exponent, mantissa) = decode_f64(*self);
+
+ let exponent_len = if exponent == 0 {
+ // Section 8.5.7.4: there must be at least one octet for exponent encoding
+ // But, if the exponent is zero, it'll be skipped, so we make sure force it to 1
+ Length::ONE
+ } else {
+ let ebytes = exponent.to_be_bytes();
+ Length::try_from(strip_leading_zeroes(&ebytes).len())?
+ };
+
+ let mantissa_len = if mantissa == 0 {
+ Length::ONE
+ } else {
+ let mbytes = mantissa.to_be_bytes();
+ Length::try_from(strip_leading_zeroes(&mbytes).len())?
+ };
+
+ exponent_len + mantissa_len + Length::ONE
+ }
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ // Check if special value
+ // Encode zero first, if it's zero
+ // Special value from section 8.5.9 if non zero
+ if self.is_nan()
+ || self.is_infinite()
+ || (self.is_sign_negative() && -self < f64::MIN_POSITIVE)
+ || (self.is_sign_positive() && (*self) < f64::MIN_POSITIVE)
+ {
+ if self.is_sign_positive() && (*self) < f64::MIN_POSITIVE {
+ // Zero
+ return Ok(());
+ } else if self.is_nan() {
+ // Not a number
+ writer.write_byte(0b0100_0010)?;
+ } else if self.is_infinite() {
+ if self.is_sign_negative() {
+ // Negative infinity
+ writer.write_byte(0b0100_0001)?;
+ } else {
+ // Plus infinity
+ writer.write_byte(0b0100_0000)?;
+ }
+ } else {
+ // Minus zero
+ writer.write_byte(0b0100_0011)?;
+ }
+ } else {
+ // Always use binary encoding, set bit 8 to 1
+ let mut first_byte = 0b1000_0000;
+
+ if self.is_sign_negative() {
+ // Section 8.5.7.1: set bit 7 to 1 if negative
+ first_byte |= 0b0100_0000;
+ }
+
+ // Bits 6 and 5 are set to 0 to specify that binary encoding is used
+ //
+ // NOTE: the scaling factor is only used to align the implicit point of the mantissa.
+ // This is unnecessary in DER because the base is 2, and therefore necessarily aligned.
+ // Therefore, we do not modify the mantissa in anyway after this function call, which
+ // already adds the implicit one of the IEEE 754 representation.
+ let (_sign, exponent, mantissa) = decode_f64(*self);
+
+ // Encode the exponent as two's complement on 16 bits and remove the bias
+ let exponent_bytes = exponent.to_be_bytes();
+ let ebytes = strip_leading_zeroes(&exponent_bytes);
+
+ match ebytes.len() {
+ 0 | 1 => {}
+ 2 => first_byte |= 0b0000_0001,
+ 3 => first_byte |= 0b0000_0010,
+ _ => {
+ // TODO: support multi octet exponent encoding?
+ return Err(Tag::Real.value_error());
+ }
+ }
+
+ writer.write_byte(first_byte)?;
+
+ // Encode both bytes or just the last one, handled by encode_bytes directly
+ // Rust already encodes the data as two's complement, so no further processing is needed
+ writer.write(ebytes)?;
+
+ // Now, encode the mantissa as unsigned binary number
+ let mantissa_bytes = mantissa.to_be_bytes();
+ let mbytes = strip_leading_zeroes(&mantissa_bytes);
+ writer.write(mbytes)?;
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg_attr(docsrs, doc(cfg(feature = "real")))]
+impl FixedTag for f64 {
+ const TAG: Tag = Tag::Real;
+}
+
+/// Is the N-th bit 1 in the first octet?
+/// NOTE: this function is zero indexed
+pub(crate) fn is_nth_bit_one<const N: usize>(bytes: &[u8]) -> bool {
+ if N < 8 {
+ bytes
+ .get(0)
+ .map(|byte| byte & (1 << N) != 0)
+ .unwrap_or(false)
+ } else {
+ false
+ }
+}
+
+/// Convert bits M, N into a u8, in the first octet only
+pub(crate) fn mnth_bits_to_u8<const M: usize, const N: usize>(bytes: &[u8]) -> u8 {
+ let bit_m = is_nth_bit_one::<M>(bytes);
+ let bit_n = is_nth_bit_one::<N>(bytes);
+ (bit_m as u8) << 1 | bit_n as u8
+}
+
+/// Decode an f64 as its sign, exponent, and mantissa in u64 and in that order, using bit shifts and masks.
+/// Note: this function **removes** the 1023 bias from the exponent and adds the implicit 1
+#[allow(clippy::cast_possible_truncation)]
+pub(crate) fn decode_f64(f: f64) -> (u64, u64, u64) {
+ let bits = f.to_bits();
+ let sign = bits >> 63;
+ let exponent = bits >> 52 & 0x7ff;
+ let exponent_bytes_no_bias = (exponent as i16 - 1023).to_be_bytes();
+ let exponent_no_bias = u64::from_be_bytes([
+ 0x0,
+ 0x0,
+ 0x0,
+ 0x0,
+ 0x0,
+ 0x0,
+ exponent_bytes_no_bias[0],
+ exponent_bytes_no_bias[1],
+ ]);
+ let mantissa = bits & 0xfffffffffffff;
+ (sign, exponent_no_bias, mantissa + 1)
+}
+
+/// Encode an f64 from its sign, exponent (**without** the 1023 bias), and (mantissa - 1) using bit shifts as received by ASN1
+pub(crate) fn encode_f64(sign: u64, exponent: u64, mantissa: u64) -> f64 {
+ // Add the bias to the exponent
+ let exponent_with_bias =
+ (i16::from_be_bytes([exponent.to_be_bytes()[6], exponent.to_be_bytes()[7]]) + 1023) as u64;
+ let bits = sign << 63 | exponent_with_bias << 52 | (mantissa - 1);
+ f64::from_bits(bits)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{Decode, Encode};
+
+ #[test]
+ fn decode_subnormal() {
+ assert!(f64::from_der(&[0x09, 0x01, 0b0100_0010]).unwrap().is_nan());
+ let plus_infty = f64::from_der(&[0x09, 0x01, 0b0100_0000]).unwrap();
+ assert!(plus_infty.is_infinite() && plus_infty.is_sign_positive());
+ let neg_infty = f64::from_der(&[0x09, 0x01, 0b0100_0001]).unwrap();
+ assert!(neg_infty.is_infinite() && neg_infty.is_sign_negative());
+ let neg_zero = f64::from_der(&[0x09, 0x01, 0b0100_0011]).unwrap();
+ assert!(neg_zero.is_sign_negative() && neg_zero.abs() < f64::EPSILON);
+ }
+
+ #[test]
+ fn encode_subnormal() {
+ // All subnormal fit in three bytes
+ let mut buffer = [0u8; 3];
+ assert_eq!(
+ &[0x09, 0x01, 0b0100_0010],
+ f64::NAN.encode_to_slice(&mut buffer).unwrap()
+ );
+ assert_eq!(
+ &[0x09, 0x01, 0b0100_0000],
+ f64::INFINITY.encode_to_slice(&mut buffer).unwrap()
+ );
+ assert_eq!(
+ &[0x09, 0x01, 0b0100_0001],
+ f64::NEG_INFINITY.encode_to_slice(&mut buffer).unwrap()
+ );
+ assert_eq!(
+ &[0x09, 0x01, 0b0100_0011],
+ (-0.0_f64).encode_to_slice(&mut buffer).unwrap()
+ );
+ }
+
+ #[test]
+ fn encdec_normal() {
+ // The comments correspond to the decoded value from the ASN.1 playground when the bytes are inputed.
+ {
+ // rec1value R ::= 0
+ let val = 0.0;
+ let expected = &[0x09, 0x0];
+ let mut buffer = [0u8; 2];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ // rec1value R ::= { mantissa 1, base 2, exponent 0 }
+ let val = 1.0;
+ let expected = &[0x09, 0x03, 0x80, 0x00, 0x01];
+ let mut buffer = [0u8; 5];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ // rec1value R ::= { mantissa -1, base 2, exponent 0 }
+ let val = -1.0;
+ let expected = &[0x09, 0x03, 0xc0, 0x00, 0x01];
+ let mut buffer = [0u8; 5];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ // rec1value R ::= { mantissa -1, base 2, exponent 1 }
+ let val = -1.0000000000000002;
+ let expected = &[0x09, 0x03, 0xc0, 0x00, 0x02];
+ let mut buffer = [0u8; 5];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ // rec1value R ::= { mantissa 1, base 2, exponent -1022 }
+ // NOTE: f64::MIN_EXP == -1021 so the exponent decoded by ASN.1 is what we expect
+ let val = f64::MIN_POSITIVE;
+ let expected = &[0x09, 0x04, 0x81, 0xfc, 0x02, 0x01];
+ let mut buffer = [0u8; 7];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ // rec4value R ::= { mantissa 1, base 2, exponent 3 }
+ let val = 1.0000000000000016;
+ let expected = &[0x09, 0x03, 0x80, 0x00, 0x08];
+ let mut buffer = [0u8; 5];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ // rec5value R ::= { mantissa 4222124650659841, base 2, exponent 4 }
+ let val = 31.0;
+ let expected = &[
+ 0x9, 0x9, 0x80, 0x04, 0x0f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
+ ];
+ let mut buffer = [0u8; 11];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+ }
+
+ #[test]
+ fn encdec_irrationals() {
+ {
+ let val = core::f64::consts::PI;
+ let expected = &[
+ 0x09, 0x09, 0x80, 0x01, 0x09, 0x21, 0xfb, 0x54, 0x44, 0x2d, 0x19,
+ ];
+ let mut buffer = [0u8; 11];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ let val = core::f64::consts::E;
+ let expected = &[
+ 0x09, 0x09, 0x80, 0x01, 0x05, 0xbf, 0x0a, 0x8b, 0x14, 0x57, 0x6a,
+ ];
+ let mut buffer = [0u8; 12];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+ {
+ let val = core::f64::consts::LN_2;
+ let expected = &[
+ 0x09, 0x0a, 0x81, 0xff, 0xff, 0x6, 0x2e, 0x42, 0xfe, 0xfa, 0x39, 0xf0,
+ ];
+ let mut buffer = [0u8; 12];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+ }
+
+ #[test]
+ fn encdec_reasonable_f64() {
+ // Tests the encoding and decoding of reals with some arbitrary numbers
+ {
+ // rec1value R ::= { mantissa 2414341043715239, base 2, exponent 21 }
+ let val = 3221417.1584163485;
+ let expected = &[
+ 0x9, 0x9, 0x80, 0x15, 0x8, 0x93, 0xd4, 0x94, 0x46, 0xfc, 0xa7,
+ ];
+ let mut buffer = [0u8; 11];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ // rec1value R ::= { mantissa 2671155248072715, base 2, exponent 23 }
+ let val = 13364022.365665454;
+ let expected = &[
+ 0x09, 0x09, 0x80, 0x17, 0x09, 0x7d, 0x66, 0xcb, 0xb3, 0x88, 0x0b,
+ ];
+ let mut buffer = [0u8; 12];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ // rec1value R ::= { mantissa -4386812962460287, base 2, exponent 14 }
+ let val = -32343.132588105735;
+ let expected = &[
+ 0x09, 0x09, 0xc0, 0x0e, 0x0f, 0x95, 0xc8, 0x7c, 0x52, 0xd2, 0x7f,
+ ];
+ let mut buffer = [0u8; 12];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ let val = -27084.866751869475;
+ let expected = &[
+ 0x09, 0x09, 0xc0, 0x0e, 0x0a, 0x73, 0x37, 0x78, 0xdc, 0xd5, 0x4a,
+ ];
+ let mut buffer = [0u8; 12];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ // rec1value R ::= { mantissa -4372913134428149, base 2, exponent 7 }
+ let val = -252.28566647111404;
+ let expected = &[
+ 0x09, 0x09, 0xc0, 0x07, 0x0f, 0x89, 0x24, 0x2e, 0x02, 0xdf, 0xf5,
+ ];
+ let mut buffer = [0u8; 12];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ let val = -14.399709612928548;
+ let expected = &[
+ 0x09, 0x09, 0xc0, 0x03, 0x0c, 0xcc, 0xa6, 0xbd, 0x06, 0xd9, 0x92,
+ ];
+ let mut buffer = [0u8; 12];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ let val = -0.08340570261832964;
+ let expected = &[
+ 0x09, 0x0a, 0xc1, 0xff, 0xfc, 0x05, 0x5a, 0x13, 0x7d, 0x0b, 0xae, 0x3d,
+ ];
+ let mut buffer = [0u8; 12];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ let val = 0.00536851453803701;
+ let expected = &[
+ 0x09, 0x0a, 0x81, 0xff, 0xf8, 0x05, 0xfd, 0x4b, 0xa5, 0xe7, 0x4c, 0x93,
+ ];
+ let mut buffer = [0u8; 12];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ let val = 0.00045183525648866433;
+ let expected = &[
+ 0x09, 0x0a, 0x81, 0xff, 0xf4, 0x0d, 0x9c, 0x89, 0xa6, 0x59, 0x33, 0x39,
+ ];
+ let mut buffer = [0u8; 12];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ let val = 0.000033869092002682955;
+ let expected = &[
+ 0x09, 0x0a, 0x81, 0xff, 0xf1, 0x01, 0xc1, 0xd5, 0x23, 0xd5, 0x54, 0x7c,
+ ];
+ let mut buffer = [0u8; 12];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ let val = 0.0000011770891033600088;
+ let expected = &[
+ 0x09, 0x0a, 0x81, 0xff, 0xec, 0x03, 0xbf, 0x8f, 0x27, 0xf4, 0x62, 0x56,
+ ];
+ let mut buffer = [0u8; 12];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ let val = 0.00000005549514041997082;
+ let expected = &[
+ 0x09, 0x0a, 0x81, 0xff, 0xe7, 0x0d, 0xcb, 0x31, 0xab, 0x6e, 0xb8, 0xd7,
+ ];
+ let mut buffer = [0u8; 12];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ let val = 0.0000000012707044685547803;
+ let expected = &[
+ 0x09, 0x0a, 0x81, 0xff, 0xe2, 0x05, 0xd4, 0x9e, 0x0a, 0xf2, 0xff, 0x1f,
+ ];
+ let mut buffer = [0u8; 12];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+
+ {
+ let val = 0.00000000002969611878378562;
+ let expected = &[
+ 0x09, 0x09, 0x81, 0xff, 0xdd, 0x53, 0x5b, 0x6f, 0x97, 0xee, 0xb6,
+ ];
+ let mut buffer = [0u8; 11];
+ let encoded = val.encode_to_slice(&mut buffer).unwrap();
+ assert_eq!(
+ expected, encoded,
+ "invalid encoding of {}:\ngot {:x?}\nwant: {:x?}",
+ val, encoded, expected
+ );
+ let decoded = f64::from_der(encoded).unwrap();
+ assert!(
+ (decoded - val).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ val,
+ decoded
+ );
+ }
+ }
+
+ #[test]
+ fn reject_non_canonical() {
+ assert!(f64::from_der(&[0x09, 0x81, 0x00]).is_err());
+ }
+
+ #[test]
+ fn encdec_f64() {
+ use super::{decode_f64, encode_f64};
+ // Test that the extraction and recreation works
+ for val in [
+ 1.0,
+ 0.1,
+ -0.1,
+ -1.0,
+ 0.0,
+ f64::MIN_POSITIVE,
+ f64::MAX,
+ f64::MIN,
+ 3.1415,
+ 951.2357864,
+ -3.1415,
+ -951.2357864,
+ ] {
+ let (s, e, m) = decode_f64(val);
+ let val2 = encode_f64(s, e, m);
+ assert!(
+ (val - val2).abs() < f64::EPSILON,
+ "fail - want {}\tgot {}",
+ val,
+ val2
+ );
+ }
+ }
+
+ #[test]
+ fn validation_cases() {
+ // Caveat: these test cases are validated on the ASN.1 playground: https://asn1.io/asn1playground/ .
+ // The test case consists in inputing the bytes in the "decode" field and checking that the decoded
+ // value corresponds to the one encoded here.
+ // This tool encodes _all_ values that are non-zero in the ISO 6093 NR3 representation.
+ // This does not seem to perfectly adhere to the ITU specifications, Special Cases section.
+ // The implementation of this crate correctly supports decoding such values. It will, however,
+ // systematically encode REALs in their base 2 form, with a scaling factor where needed to
+ // ensure that the mantissa is either odd or zero (as per section 11.3.1).
+
+ // Positive trivial numbers
+ {
+ let expect = 10.0;
+ let testcase = &[0x09, 0x05, 0x03, 0x31, 0x2E, 0x45, 0x31];
+ let decoded = f64::from_der(testcase).unwrap();
+ assert!(
+ (decoded - expect).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ expect,
+ decoded
+ );
+ }
+ {
+ let expect = 100.0;
+ let testcase = &[0x09, 0x05, 0x03, 0x31, 0x2E, 0x45, 0x32];
+ let decoded = f64::from_der(testcase).unwrap();
+ assert!(
+ (decoded - expect).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ expect,
+ decoded
+ );
+ }
+ {
+ let expect = 101.0;
+ let testcase = &[0x09, 0x08, 0x03, 0x31, 0x30, 0x31, 0x2E, 0x45, 0x2B, 0x30];
+ let decoded = f64::from_der(testcase).unwrap();
+ assert!(
+ (decoded - expect).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ expect,
+ decoded
+ );
+ }
+ {
+ let expect = 101.0;
+ let testcase = &[0x09, 0x08, 0x03, 0x31, 0x30, 0x31, 0x2E, 0x45, 0x2B, 0x30];
+ let decoded = f64::from_der(testcase).unwrap();
+ assert!(
+ (decoded - expect).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ expect,
+ decoded
+ );
+ }
+ {
+ let expect = 0.0;
+ let testcase = &[0x09, 0x00];
+ let decoded = f64::from_der(testcase).unwrap();
+ assert!(
+ (decoded - expect).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ expect,
+ decoded
+ );
+ }
+ {
+ let expect = 951.2357864;
+ let testcase = &[
+ 0x09, 0x0F, 0x03, 0x39, 0x35, 0x31, 0x32, 0x33, 0x35, 0x37, 0x38, 0x36, 0x34, 0x2E,
+ 0x45, 0x2D, 0x37,
+ ];
+ let decoded = f64::from_der(testcase).unwrap();
+ assert!(
+ (decoded - expect).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ expect,
+ decoded
+ );
+ }
+ // Negative trivial numbers
+ {
+ let expect = -10.0;
+ let testcase = &[0x09, 0x06, 0x03, 0x2D, 0x31, 0x2E, 0x45, 0x31];
+ let decoded = f64::from_der(testcase).unwrap();
+ assert!(
+ (decoded - expect).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ expect,
+ decoded
+ );
+ }
+ {
+ let expect = -100.0;
+ let testcase = &[0x09, 0x06, 0x03, 0x2D, 0x31, 0x2E, 0x45, 0x32];
+ let decoded = f64::from_der(testcase).unwrap();
+ assert!(
+ (decoded - expect).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ expect,
+ decoded
+ );
+ }
+ {
+ let expect = -101.0;
+ let testcase = &[
+ 0x09, 0x09, 0x03, 0x2D, 0x31, 0x30, 0x31, 0x2E, 0x45, 0x2B, 0x30,
+ ];
+ let decoded = f64::from_der(testcase).unwrap();
+ assert!(
+ (decoded - expect).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ expect,
+ decoded
+ );
+ }
+ {
+ let expect = -0.5;
+ let testcase = &[0x09, 0x07, 0x03, 0x2D, 0x35, 0x2E, 0x45, 0x2D, 0x31];
+ let decoded = f64::from_der(testcase).unwrap();
+ assert!(
+ (decoded - expect).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ expect,
+ decoded
+ );
+ }
+ {
+ let expect = -0.0;
+ let testcase = &[0x09, 0x03, 0x01, 0x2D, 0x30];
+ let decoded = f64::from_der(testcase).unwrap();
+ assert!(
+ (decoded - expect).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ expect,
+ decoded
+ );
+ }
+ {
+ // Test NR3 decoding
+ let expect = -951.2357864;
+ let testcase = &[
+ 0x09, 0x10, 0x03, 0x2D, 0x39, 0x35, 0x31, 0x32, 0x33, 0x35, 0x37, 0x38, 0x36, 0x34,
+ 0x2E, 0x45, 0x2D, 0x37,
+ ];
+ let decoded = f64::from_der(testcase).unwrap();
+ assert!(
+ (decoded - expect).abs() < f64::EPSILON,
+ "wanted: {}\tgot: {}",
+ expect,
+ decoded
+ );
+ }
+ }
+}
diff --git a/src/asn1/sequence.rs b/src/asn1/sequence.rs
new file mode 100644
index 0000000..d2f6bc5
--- /dev/null
+++ b/src/asn1/sequence.rs
@@ -0,0 +1,84 @@
+//! The [`Sequence`] trait simplifies writing decoders/encoders which map ASN.1
+//! `SEQUENCE`s to Rust structs.
+
+use crate::{
+ ByteSlice, Decode, DecodeValue, Encode, EncodeValue, FixedTag, Header, Length, Reader, Result,
+ Tag, Writer,
+};
+
+/// ASN.1 `SEQUENCE` trait.
+///
+/// Types which impl this trait receive blanket impls for the [`Decode`],
+/// [`Encode`], and [`FixedTag`] traits.
+pub trait Sequence<'a>: Decode<'a> {
+ /// Call the provided function with a slice of [`Encode`] trait objects
+ /// representing the fields of this `SEQUENCE`.
+ ///
+ /// This method uses a callback because structs with fields which aren't
+ /// directly [`Encode`] may need to construct temporary values from
+ /// their fields prior to encoding.
+ fn fields<F, T>(&self, f: F) -> Result<T>
+ where
+ F: FnOnce(&[&dyn Encode]) -> Result<T>;
+}
+
+impl<'a, M> EncodeValue for M
+where
+ M: Sequence<'a>,
+{
+ fn value_len(&self) -> Result<Length> {
+ self.fields(|fields| {
+ fields
+ .iter()
+ .try_fold(Length::ZERO, |acc, field| acc + field.encoded_len()?)
+ })
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ self.fields(|fields| {
+ for &field in fields {
+ field.encode(writer)?;
+ }
+
+ Ok(())
+ })
+ }
+}
+
+impl<'a, M> FixedTag for M
+where
+ M: Sequence<'a>,
+{
+ const TAG: Tag = Tag::Sequence;
+}
+
+/// The [`SequenceRef`] type provides raw access to the octets which comprise a
+/// DER-encoded `SEQUENCE`.
+///
+/// This is a zero-copy reference type which borrows from the input data.
+pub struct SequenceRef<'a> {
+ /// Body of the `SEQUENCE`.
+ body: ByteSlice<'a>,
+}
+
+impl<'a> DecodeValue<'a> for SequenceRef<'a> {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ Ok(Self {
+ body: ByteSlice::decode_value(reader, header)?,
+ })
+ }
+}
+
+impl EncodeValue for SequenceRef<'_> {
+ fn value_len(&self) -> Result<Length> {
+ Ok(self.body.len())
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ self.body.encode_value(writer)
+ }
+}
+
+impl<'a> FixedTag for SequenceRef<'a> {
+ const TAG: Tag = Tag::Sequence;
+}
diff --git a/src/asn1/sequence_of.rs b/src/asn1/sequence_of.rs
new file mode 100644
index 0000000..6334d71
--- /dev/null
+++ b/src/asn1/sequence_of.rs
@@ -0,0 +1,234 @@
+//! ASN.1 `SEQUENCE OF` support.
+
+use crate::{
+ arrayvec, ord::iter_cmp, ArrayVec, Decode, DecodeValue, DerOrd, Encode, EncodeValue, FixedTag,
+ Header, Length, Reader, Result, Tag, ValueOrd, Writer,
+};
+use core::cmp::Ordering;
+
+#[cfg(feature = "alloc")]
+use alloc::vec::Vec;
+
+/// ASN.1 `SEQUENCE OF` backed by an array.
+///
+/// This type implements an append-only `SEQUENCE OF` type which is stack-based
+/// and does not depend on `alloc` support.
+// TODO(tarcieri): use `ArrayVec` when/if it's merged into `core`
+// See: https://github.com/rust-lang/rfcs/pull/2990
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct SequenceOf<T, const N: usize> {
+ inner: ArrayVec<T, N>,
+}
+
+impl<T, const N: usize> SequenceOf<T, N> {
+ /// Create a new [`SequenceOf`].
+ pub fn new() -> Self {
+ Self {
+ inner: ArrayVec::new(),
+ }
+ }
+
+ /// Add an element to this [`SequenceOf`].
+ pub fn add(&mut self, element: T) -> Result<()> {
+ self.inner.add(element)
+ }
+
+ /// Get an element of this [`SequenceOf`].
+ pub fn get(&self, index: usize) -> Option<&T> {
+ self.inner.get(index)
+ }
+
+ /// Iterate over the elements in this [`SequenceOf`].
+ pub fn iter(&self) -> SequenceOfIter<'_, T> {
+ SequenceOfIter {
+ inner: self.inner.iter(),
+ }
+ }
+
+ /// Is this [`SequenceOf`] empty?
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+
+ /// Number of elements in this [`SequenceOf`].
+ pub fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+impl<T, const N: usize> Default for SequenceOf<T, N> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl<'a, T, const N: usize> DecodeValue<'a> for SequenceOf<T, N>
+where
+ T: Decode<'a>,
+{
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ reader.read_nested(header.length, |reader| {
+ let mut sequence_of = Self::new();
+
+ while !reader.is_finished() {
+ sequence_of.add(T::decode(reader)?)?;
+ }
+
+ Ok(sequence_of)
+ })
+ }
+}
+
+impl<T, const N: usize> EncodeValue for SequenceOf<T, N>
+where
+ T: Encode,
+{
+ fn value_len(&self) -> Result<Length> {
+ self.iter()
+ .fold(Ok(Length::ZERO), |len, elem| len + elem.encoded_len()?)
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ for elem in self.iter() {
+ elem.encode(writer)?;
+ }
+
+ Ok(())
+ }
+}
+
+impl<T, const N: usize> FixedTag for SequenceOf<T, N> {
+ const TAG: Tag = Tag::Sequence;
+}
+
+impl<T, const N: usize> ValueOrd for SequenceOf<T, N>
+where
+ T: DerOrd,
+{
+ fn value_cmp(&self, other: &Self) -> Result<Ordering> {
+ iter_cmp(self.iter(), other.iter())
+ }
+}
+
+/// Iterator over the elements of an [`SequenceOf`].
+#[derive(Clone, Debug)]
+pub struct SequenceOfIter<'a, T> {
+ /// Inner iterator.
+ inner: arrayvec::Iter<'a, T>,
+}
+
+impl<'a, T> Iterator for SequenceOfIter<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ self.inner.next()
+ }
+}
+
+impl<'a, T> ExactSizeIterator for SequenceOfIter<'a, T> {}
+
+impl<'a, T, const N: usize> DecodeValue<'a> for [T; N]
+where
+ T: Decode<'a>,
+{
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ let sequence_of = SequenceOf::<T, N>::decode_value(reader, header)?;
+
+ // TODO(tarcieri): use `[T; N]::try_map` instead of `expect` when stable
+ if sequence_of.inner.len() == N {
+ Ok(sequence_of
+ .inner
+ .into_array()
+ .map(|elem| elem.expect("arrayvec length mismatch")))
+ } else {
+ Err(Self::TAG.length_error())
+ }
+ }
+}
+
+impl<T, const N: usize> EncodeValue for [T; N]
+where
+ T: Encode,
+{
+ fn value_len(&self) -> Result<Length> {
+ self.iter()
+ .fold(Ok(Length::ZERO), |len, elem| len + elem.encoded_len()?)
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ for elem in self {
+ elem.encode(writer)?;
+ }
+
+ Ok(())
+ }
+}
+
+impl<T, const N: usize> FixedTag for [T; N] {
+ const TAG: Tag = Tag::Sequence;
+}
+
+impl<T, const N: usize> ValueOrd for [T; N]
+where
+ T: DerOrd,
+{
+ fn value_cmp(&self, other: &Self) -> Result<Ordering> {
+ iter_cmp(self.iter(), other.iter())
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<'a, T> DecodeValue<'a> for Vec<T>
+where
+ T: Decode<'a>,
+{
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ reader.read_nested(header.length, |reader| {
+ let mut sequence_of = Self::new();
+
+ while !reader.is_finished() {
+ sequence_of.push(T::decode(reader)?);
+ }
+
+ Ok(sequence_of)
+ })
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<T> EncodeValue for Vec<T>
+where
+ T: Encode,
+{
+ fn value_len(&self) -> Result<Length> {
+ self.iter()
+ .fold(Ok(Length::ZERO), |len, elem| len + elem.encoded_len()?)
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ for elem in self {
+ elem.encode(writer)?;
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<T> FixedTag for Vec<T> {
+ const TAG: Tag = Tag::Sequence;
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<T> ValueOrd for Vec<T>
+where
+ T: DerOrd,
+{
+ fn value_cmp(&self, other: &Self) -> Result<Ordering> {
+ iter_cmp(self.iter(), other.iter())
+ }
+}
diff --git a/src/asn1/set_of.rs b/src/asn1/set_of.rs
new file mode 100644
index 0000000..b8c4b0d
--- /dev/null
+++ b/src/asn1/set_of.rs
@@ -0,0 +1,451 @@
+//! ASN.1 `SET OF` support.
+//!
+//! # Ordering Notes
+//!
+//! Some DER serializer implementations fail to properly sort elements of a
+//! `SET OF`. This is technically non-canonical, but occurs frequently
+//! enough that most DER decoders tolerate it. Unfortunately because
+//! of that, we must also follow suit.
+//!
+//! However, all types in this module sort elements of a set at decode-time,
+//! ensuring they'll be in the proper order if reserialized.
+
+use crate::{
+ arrayvec, ord::iter_cmp, ArrayVec, Decode, DecodeValue, DerOrd, Encode, EncodeValue, Error,
+ ErrorKind, FixedTag, Header, Length, Reader, Result, Tag, ValueOrd, Writer,
+};
+use core::cmp::Ordering;
+
+#[cfg(feature = "alloc")]
+use {alloc::vec::Vec, core::slice};
+
+/// ASN.1 `SET OF` backed by an array.
+///
+/// This type implements an append-only `SET OF` type which is stack-based
+/// and does not depend on `alloc` support.
+// TODO(tarcieri): use `ArrayVec` when/if it's merged into `core`
+// See: https://github.com/rust-lang/rfcs/pull/2990
+#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub struct SetOf<T, const N: usize>
+where
+ T: DerOrd,
+{
+ inner: ArrayVec<T, N>,
+}
+
+impl<T, const N: usize> SetOf<T, N>
+where
+ T: DerOrd,
+{
+ /// Create a new [`SetOf`].
+ pub fn new() -> Self {
+ Self {
+ inner: ArrayVec::default(),
+ }
+ }
+
+ /// Add an element to this [`SetOf`].
+ ///
+ /// Items MUST be added in lexicographical order according to the
+ /// [`DerOrd`] impl on `T`.
+ pub fn add(&mut self, new_elem: T) -> Result<()> {
+ // Ensure set elements are lexicographically ordered
+ if let Some(last_elem) = self.inner.last() {
+ if new_elem.der_cmp(last_elem)? != Ordering::Greater {
+ return Err(ErrorKind::SetOrdering.into());
+ }
+ }
+
+ self.inner.add(new_elem)
+ }
+
+ /// Get the nth element from this [`SetOf`].
+ pub fn get(&self, index: usize) -> Option<&T> {
+ self.inner.get(index)
+ }
+
+ /// Iterate over the elements of this [`SetOf`].
+ pub fn iter(&self) -> SetOfIter<'_, T> {
+ SetOfIter {
+ inner: self.inner.iter(),
+ }
+ }
+
+ /// Is this [`SetOf`] empty?
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+
+ /// Number of elements in this [`SetOf`].
+ pub fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+impl<T, const N: usize> Default for SetOf<T, N>
+where
+ T: DerOrd,
+{
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl<'a, T, const N: usize> DecodeValue<'a> for SetOf<T, N>
+where
+ T: Decode<'a> + DerOrd,
+{
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ reader.read_nested(header.length, |reader| {
+ let mut result = Self::new();
+
+ while !reader.is_finished() {
+ result.inner.add(T::decode(reader)?)?;
+ }
+
+ der_sort(result.inner.as_mut())?;
+ validate(result.inner.as_ref())?;
+ Ok(result)
+ })
+ }
+}
+
+impl<'a, T, const N: usize> EncodeValue for SetOf<T, N>
+where
+ T: 'a + Decode<'a> + Encode + DerOrd,
+{
+ fn value_len(&self) -> Result<Length> {
+ self.iter()
+ .fold(Ok(Length::ZERO), |len, elem| len + elem.encoded_len()?)
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ for elem in self.iter() {
+ elem.encode(writer)?;
+ }
+
+ Ok(())
+ }
+}
+
+impl<'a, T, const N: usize> FixedTag for SetOf<T, N>
+where
+ T: Decode<'a> + DerOrd,
+{
+ const TAG: Tag = Tag::Set;
+}
+
+impl<T, const N: usize> TryFrom<[T; N]> for SetOf<T, N>
+where
+ T: DerOrd,
+{
+ type Error = Error;
+
+ fn try_from(mut arr: [T; N]) -> Result<SetOf<T, N>> {
+ der_sort(&mut arr)?;
+
+ let mut result = SetOf::new();
+
+ for elem in arr {
+ result.add(elem)?;
+ }
+
+ Ok(result)
+ }
+}
+
+impl<T, const N: usize> ValueOrd for SetOf<T, N>
+where
+ T: DerOrd,
+{
+ fn value_cmp(&self, other: &Self) -> Result<Ordering> {
+ iter_cmp(self.iter(), other.iter())
+ }
+}
+
+/// Iterator over the elements of an [`SetOf`].
+#[derive(Clone, Debug)]
+pub struct SetOfIter<'a, T> {
+ /// Inner iterator.
+ inner: arrayvec::Iter<'a, T>,
+}
+
+impl<'a, T> Iterator for SetOfIter<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ self.inner.next()
+ }
+}
+
+impl<'a, T> ExactSizeIterator for SetOfIter<'a, T> {}
+
+/// ASN.1 `SET OF` backed by a [`Vec`].
+///
+/// This type implements an append-only `SET OF` type which is heap-backed
+/// and depends on `alloc` support.
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub struct SetOfVec<T>
+where
+ T: DerOrd,
+{
+ inner: Vec<T>,
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<T: DerOrd> Default for SetOfVec<T> {
+ fn default() -> Self {
+ Self {
+ inner: Default::default(),
+ }
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<T> SetOfVec<T>
+where
+ T: DerOrd,
+{
+ /// Create a new [`SetOfVec`].
+ pub fn new() -> Self {
+ Self {
+ inner: Vec::default(),
+ }
+ }
+
+ /// Add an element to this [`SetOfVec`].
+ ///
+ /// Items MUST be added in lexicographical order according to the
+ /// [`DerOrd`] impl on `T`.
+ pub fn add(&mut self, new_elem: T) -> Result<()> {
+ // Ensure set elements are lexicographically ordered
+ if let Some(last_elem) = self.inner.last() {
+ if new_elem.der_cmp(last_elem)? != Ordering::Greater {
+ return Err(ErrorKind::SetOrdering.into());
+ }
+ }
+
+ self.inner.push(new_elem);
+ Ok(())
+ }
+
+ /// Borrow the elements of this [`SetOfVec`] as a slice.
+ pub fn as_slice(&self) -> &[T] {
+ self.inner.as_slice()
+ }
+
+ /// Get the nth element from this [`SetOfVec`].
+ pub fn get(&self, index: usize) -> Option<&T> {
+ self.inner.get(index)
+ }
+
+ /// Convert this [`SetOfVec`] into the inner [`Vec`].
+ pub fn into_vec(self) -> Vec<T> {
+ self.inner
+ }
+
+ /// Iterate over the elements of this [`SetOfVec`].
+ pub fn iter(&self) -> slice::Iter<'_, T> {
+ self.inner.iter()
+ }
+
+ /// Is this [`SetOfVec`] empty?
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+
+ /// Number of elements in this [`SetOfVec`].
+ pub fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<T> AsRef<[T]> for SetOfVec<T>
+where
+ T: DerOrd,
+{
+ fn as_ref(&self) -> &[T] {
+ self.as_slice()
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<'a, T> DecodeValue<'a> for SetOfVec<T>
+where
+ T: Decode<'a> + DerOrd,
+{
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ reader.read_nested(header.length, |reader| {
+ let mut inner = Vec::new();
+
+ while !reader.is_finished() {
+ inner.push(T::decode(reader)?);
+ }
+
+ der_sort(inner.as_mut())?;
+ validate(inner.as_ref())?;
+ Ok(Self { inner })
+ })
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<'a, T> EncodeValue for SetOfVec<T>
+where
+ T: 'a + Decode<'a> + Encode + DerOrd,
+{
+ fn value_len(&self) -> Result<Length> {
+ self.iter()
+ .fold(Ok(Length::ZERO), |len, elem| len + elem.encoded_len()?)
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ for elem in self.iter() {
+ elem.encode(writer)?;
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<T> FixedTag for SetOfVec<T>
+where
+ T: DerOrd,
+{
+ const TAG: Tag = Tag::Set;
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<T> From<SetOfVec<T>> for Vec<T>
+where
+ T: DerOrd,
+{
+ fn from(set: SetOfVec<T>) -> Vec<T> {
+ set.into_vec()
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<T> TryFrom<Vec<T>> for SetOfVec<T>
+where
+ T: DerOrd,
+{
+ type Error = Error;
+
+ fn try_from(mut vec: Vec<T>) -> Result<SetOfVec<T>> {
+ // TODO(tarcieri): use `[T]::sort_by` here?
+ der_sort(vec.as_mut_slice())?;
+ Ok(SetOfVec { inner: vec })
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<T, const N: usize> TryFrom<[T; N]> for SetOfVec<T>
+where
+ T: DerOrd,
+{
+ type Error = Error;
+
+ fn try_from(arr: [T; N]) -> Result<SetOfVec<T>> {
+ Vec::from(arr).try_into()
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<T> ValueOrd for SetOfVec<T>
+where
+ T: DerOrd,
+{
+ fn value_cmp(&self, other: &Self) -> Result<Ordering> {
+ iter_cmp(self.iter(), other.iter())
+ }
+}
+
+/// Sort a mut slice according to its [`DerOrd`], returning any errors which
+/// might occur during the comparison.
+///
+/// The algorithm is insertion sort, which should perform well when the input
+/// is mostly sorted to begin with.
+///
+/// This function is used rather than Rust's built-in `[T]::sort_by` in order
+/// to support heapless `no_std` targets as well as to enable bubbling up
+/// sorting errors.
+#[allow(clippy::integer_arithmetic)]
+fn der_sort<T: DerOrd>(slice: &mut [T]) -> Result<()> {
+ for i in 0..slice.len() {
+ let mut j = i;
+
+ while j > 0 && slice[j - 1].der_cmp(&slice[j])? == Ordering::Greater {
+ slice.swap(j - 1, j);
+ j -= 1;
+ }
+ }
+
+ Ok(())
+}
+
+/// Validate the elements of a `SET OF`, ensuring that they are all in order
+/// and that there are no duplicates.
+fn validate<T: DerOrd>(slice: &[T]) -> Result<()> {
+ if let Some(len) = slice.len().checked_sub(1) {
+ for i in 0..len {
+ let j = i.checked_add(1).ok_or(ErrorKind::Overflow)?;
+
+ match slice.get(i..=j) {
+ Some([a, b]) => {
+ if a.der_cmp(b)? != Ordering::Less {
+ return Err(ErrorKind::SetOrdering.into());
+ }
+ }
+ _ => return Err(Tag::Set.value_error()),
+ }
+ }
+ }
+
+ Ok(())
+}
+
+#[cfg(all(test, feature = "alloc"))]
+mod tests {
+ use super::{SetOf, SetOfVec};
+ use alloc::vec::Vec;
+
+ #[test]
+ fn setof_tryfrom_array() {
+ let arr = [3u16, 2, 1, 65535, 0];
+ let set = SetOf::try_from(arr).unwrap();
+ assert_eq!(
+ set.iter().cloned().collect::<Vec<u16>>(),
+ &[0, 1, 2, 3, 65535]
+ );
+ }
+
+ #[test]
+ fn setofvec_tryfrom_array() {
+ let arr = [3u16, 2, 1, 65535, 0];
+ let set = SetOfVec::try_from(arr).unwrap();
+ assert_eq!(set.as_ref(), &[0, 1, 2, 3, 65535]);
+ }
+
+ #[cfg(feature = "alloc")]
+ #[test]
+ fn setofvec_tryfrom_vec() {
+ let vec = vec![3u16, 2, 1, 65535, 0];
+ let set = SetOfVec::try_from(vec).unwrap();
+ assert_eq!(set.as_ref(), &[0, 1, 2, 3, 65535]);
+ }
+}
diff --git a/src/asn1/utc_time.rs b/src/asn1/utc_time.rs
new file mode 100644
index 0000000..7c23811
--- /dev/null
+++ b/src/asn1/utc_time.rs
@@ -0,0 +1,215 @@
+//! ASN.1 `UTCTime` support.
+
+use crate::{
+ asn1::AnyRef,
+ datetime::{self, DateTime},
+ ord::OrdIsValueOrd,
+ DecodeValue, EncodeValue, Error, ErrorKind, FixedTag, Header, Length, Reader, Result, Tag,
+ Writer,
+};
+use core::time::Duration;
+
+#[cfg(feature = "std")]
+use std::time::SystemTime;
+
+/// Maximum year that can be represented as a `UTCTime`.
+pub const MAX_YEAR: u16 = 2049;
+
+/// ASN.1 `UTCTime` type.
+///
+/// This type implements the validity requirements specified in
+/// [RFC 5280 Section 4.1.2.5.1][1], namely:
+///
+/// > For the purposes of this profile, UTCTime values MUST be expressed in
+/// > Greenwich Mean Time (Zulu) and MUST include seconds (i.e., times are
+/// > `YYMMDDHHMMSSZ`), even where the number of seconds is zero. Conforming
+/// > systems MUST interpret the year field (`YY`) as follows:
+/// >
+/// > - Where `YY` is greater than or equal to 50, the year SHALL be
+/// > interpreted as `19YY`; and
+/// > - Where `YY` is less than 50, the year SHALL be interpreted as `20YY`.
+///
+/// [1]: https://tools.ietf.org/html/rfc5280#section-4.1.2.5.1
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub struct UtcTime(DateTime);
+
+impl UtcTime {
+ /// Length of an RFC 5280-flavored ASN.1 DER-encoded [`UtcTime`].
+ pub const LENGTH: usize = 13;
+
+ /// Create a [`UtcTime`] from a [`DateTime`].
+ pub fn from_date_time(datetime: DateTime) -> Result<Self> {
+ if datetime.year() <= MAX_YEAR {
+ Ok(Self(datetime))
+ } else {
+ Err(Self::TAG.value_error())
+ }
+ }
+
+ /// Convert this [`UtcTime`] into a [`DateTime`].
+ pub fn to_date_time(&self) -> DateTime {
+ self.0
+ }
+
+ /// Create a new [`UtcTime`] given a [`Duration`] since `UNIX_EPOCH`
+ /// (a.k.a. "Unix time")
+ pub fn from_unix_duration(unix_duration: Duration) -> Result<Self> {
+ DateTime::from_unix_duration(unix_duration)?.try_into()
+ }
+
+ /// Get the duration of this timestamp since `UNIX_EPOCH`.
+ pub fn to_unix_duration(&self) -> Duration {
+ self.0.unix_duration()
+ }
+
+ /// Instantiate from [`SystemTime`].
+ #[cfg(feature = "std")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+ pub fn from_system_time(time: SystemTime) -> Result<Self> {
+ DateTime::try_from(time)
+ .map_err(|_| Self::TAG.value_error())?
+ .try_into()
+ }
+
+ /// Convert to [`SystemTime`].
+ #[cfg(feature = "std")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+ pub fn to_system_time(&self) -> SystemTime {
+ self.0.to_system_time()
+ }
+}
+
+impl<'a> DecodeValue<'a> for UtcTime {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ if Self::LENGTH != usize::try_from(header.length)? {
+ return Err(Self::TAG.value_error());
+ }
+
+ let mut bytes = [0u8; Self::LENGTH];
+ reader.read_into(&mut bytes)?;
+
+ match bytes {
+ // RFC 5280 requires mandatory seconds and Z-normalized time zone
+ [year1, year2, mon1, mon2, day1, day2, hour1, hour2, min1, min2, sec1, sec2, b'Z'] => {
+ let year = u16::from(datetime::decode_decimal(Self::TAG, year1, year2)?);
+ let month = datetime::decode_decimal(Self::TAG, mon1, mon2)?;
+ let day = datetime::decode_decimal(Self::TAG, day1, day2)?;
+ let hour = datetime::decode_decimal(Self::TAG, hour1, hour2)?;
+ let minute = datetime::decode_decimal(Self::TAG, min1, min2)?;
+ let second = datetime::decode_decimal(Self::TAG, sec1, sec2)?;
+
+ // RFC 5280 rules for interpreting the year
+ let year = if year >= 50 {
+ year.checked_add(1900)
+ } else {
+ year.checked_add(2000)
+ }
+ .ok_or(ErrorKind::DateTime)?;
+
+ DateTime::new(year, month, day, hour, minute, second)
+ .map_err(|_| Self::TAG.value_error())
+ .and_then(|dt| Self::from_unix_duration(dt.unix_duration()))
+ }
+ _ => Err(Self::TAG.value_error()),
+ }
+ }
+}
+
+impl EncodeValue for UtcTime {
+ fn value_len(&self) -> Result<Length> {
+ Self::LENGTH.try_into()
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ let year = match self.0.year() {
+ y @ 1950..=1999 => y.checked_sub(1900),
+ y @ 2000..=2049 => y.checked_sub(2000),
+ _ => return Err(Self::TAG.value_error()),
+ }
+ .and_then(|y| u8::try_from(y).ok())
+ .ok_or(ErrorKind::DateTime)?;
+
+ datetime::encode_decimal(writer, Self::TAG, year)?;
+ datetime::encode_decimal(writer, Self::TAG, self.0.month())?;
+ datetime::encode_decimal(writer, Self::TAG, self.0.day())?;
+ datetime::encode_decimal(writer, Self::TAG, self.0.hour())?;
+ datetime::encode_decimal(writer, Self::TAG, self.0.minutes())?;
+ datetime::encode_decimal(writer, Self::TAG, self.0.seconds())?;
+ writer.write_byte(b'Z')
+ }
+}
+
+impl FixedTag for UtcTime {
+ const TAG: Tag = Tag::UtcTime;
+}
+
+impl OrdIsValueOrd for UtcTime {}
+
+impl From<&UtcTime> for UtcTime {
+ fn from(value: &UtcTime) -> UtcTime {
+ *value
+ }
+}
+
+impl From<UtcTime> for DateTime {
+ fn from(utc_time: UtcTime) -> DateTime {
+ utc_time.0
+ }
+}
+
+impl From<&UtcTime> for DateTime {
+ fn from(utc_time: &UtcTime) -> DateTime {
+ utc_time.0
+ }
+}
+
+impl TryFrom<DateTime> for UtcTime {
+ type Error = Error;
+
+ fn try_from(datetime: DateTime) -> Result<Self> {
+ Self::from_date_time(datetime)
+ }
+}
+
+impl TryFrom<&DateTime> for UtcTime {
+ type Error = Error;
+
+ fn try_from(datetime: &DateTime) -> Result<Self> {
+ Self::from_date_time(*datetime)
+ }
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+impl From<UtcTime> for SystemTime {
+ fn from(utc_time: UtcTime) -> SystemTime {
+ utc_time.to_system_time()
+ }
+}
+
+impl TryFrom<AnyRef<'_>> for UtcTime {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'_>) -> Result<UtcTime> {
+ any.decode_into()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::UtcTime;
+ use crate::{Decode, Encode, SliceWriter};
+ use hex_literal::hex;
+
+ #[test]
+ fn round_trip_vector() {
+ let example_bytes = hex!("17 0d 39 31 30 35 30 36 32 33 34 35 34 30 5a");
+ let utc_time = UtcTime::from_der(&example_bytes).unwrap();
+ assert_eq!(utc_time.to_unix_duration().as_secs(), 673573540);
+
+ let mut buf = [0u8; 128];
+ let mut encoder = SliceWriter::new(&mut buf);
+ utc_time.encode(&mut encoder).unwrap();
+ assert_eq!(example_bytes, encoder.finish().unwrap());
+ }
+}
diff --git a/src/asn1/utf8_string.rs b/src/asn1/utf8_string.rs
new file mode 100644
index 0000000..9f7a1bc
--- /dev/null
+++ b/src/asn1/utf8_string.rs
@@ -0,0 +1,227 @@
+//! ASN.1 `UTF8String` support.
+
+use crate::{
+ asn1::AnyRef, ord::OrdIsValueOrd, ByteSlice, DecodeValue, EncodeValue, Error, FixedTag, Header,
+ Length, Reader, Result, StrSlice, Tag, Writer,
+};
+use core::{fmt, str};
+
+#[cfg(feature = "alloc")]
+use alloc::{borrow::ToOwned, string::String};
+
+/// ASN.1 `UTF8String` type.
+///
+/// Supports the full UTF-8 encoding.
+///
+/// Note that the [`Decode`][`crate::Decode`] and [`Encode`][`crate::Encode`]
+/// traits are impl'd for Rust's [`str`][`prim@str`] primitive, which
+/// decodes/encodes as a [`Utf8StringRef`].
+///
+/// You are free to use [`str`][`prim@str`] instead of this type, however it's
+/// still provided for explicitness in cases where it might be ambiguous with
+/// other ASN.1 string encodings such as
+/// [`PrintableStringRef`][`crate::asn1::PrintableStringRef`].
+///
+/// This is a zero-copy reference type which borrows from the input data.
+#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord)]
+pub struct Utf8StringRef<'a> {
+ /// Inner value
+ inner: StrSlice<'a>,
+}
+
+impl<'a> Utf8StringRef<'a> {
+ /// Create a new ASN.1 `UTF8String`.
+ pub fn new<T>(input: &'a T) -> Result<Self>
+ where
+ T: AsRef<[u8]> + ?Sized,
+ {
+ StrSlice::from_bytes(input.as_ref()).map(|inner| Self { inner })
+ }
+
+ /// Borrow the string as a `str`.
+ pub fn as_str(&self) -> &'a str {
+ self.inner.as_str()
+ }
+
+ /// Borrow the string as bytes.
+ pub fn as_bytes(&self) -> &'a [u8] {
+ self.inner.as_bytes()
+ }
+
+ /// Get the length of the inner byte slice.
+ pub fn len(&self) -> Length {
+ self.inner.len()
+ }
+
+ /// Is the inner string empty?
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+}
+
+impl AsRef<str> for Utf8StringRef<'_> {
+ fn as_ref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl AsRef<[u8]> for Utf8StringRef<'_> {
+ fn as_ref(&self) -> &[u8] {
+ self.as_bytes()
+ }
+}
+
+impl<'a> DecodeValue<'a> for Utf8StringRef<'a> {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ Self::new(ByteSlice::decode_value(reader, header)?.as_slice())
+ }
+}
+
+impl EncodeValue for Utf8StringRef<'_> {
+ fn value_len(&self) -> Result<Length> {
+ self.inner.value_len()
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ self.inner.encode_value(writer)
+ }
+}
+
+impl FixedTag for Utf8StringRef<'_> {
+ const TAG: Tag = Tag::Utf8String;
+}
+
+impl OrdIsValueOrd for Utf8StringRef<'_> {}
+
+impl<'a> From<&Utf8StringRef<'a>> for Utf8StringRef<'a> {
+ fn from(value: &Utf8StringRef<'a>) -> Utf8StringRef<'a> {
+ *value
+ }
+}
+
+impl<'a> TryFrom<AnyRef<'a>> for Utf8StringRef<'a> {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'a>) -> Result<Utf8StringRef<'a>> {
+ any.decode_into()
+ }
+}
+
+impl<'a> From<Utf8StringRef<'a>> for AnyRef<'a> {
+ fn from(printable_string: Utf8StringRef<'a>) -> AnyRef<'a> {
+ AnyRef::from_tag_and_value(Tag::Utf8String, printable_string.inner.into())
+ }
+}
+
+impl<'a> From<Utf8StringRef<'a>> for &'a [u8] {
+ fn from(utf8_string: Utf8StringRef<'a>) -> &'a [u8] {
+ utf8_string.as_bytes()
+ }
+}
+
+impl<'a> fmt::Display for Utf8StringRef<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(self.as_str())
+ }
+}
+
+impl<'a> fmt::Debug for Utf8StringRef<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "Utf8String({:?})", self.as_str())
+ }
+}
+
+impl<'a> TryFrom<AnyRef<'a>> for &'a str {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'a>) -> Result<&'a str> {
+ Utf8StringRef::try_from(any).map(|s| s.as_str())
+ }
+}
+
+impl EncodeValue for str {
+ fn value_len(&self) -> Result<Length> {
+ Utf8StringRef::new(self)?.value_len()
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ Utf8StringRef::new(self)?.encode_value(writer)
+ }
+}
+
+impl FixedTag for str {
+ const TAG: Tag = Tag::Utf8String;
+}
+
+impl OrdIsValueOrd for str {}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<'a> From<Utf8StringRef<'a>> for String {
+ fn from(s: Utf8StringRef<'a>) -> String {
+ s.as_str().to_owned()
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<'a> TryFrom<AnyRef<'a>> for String {
+ type Error = Error;
+
+ fn try_from(any: AnyRef<'a>) -> Result<String> {
+ Utf8StringRef::try_from(any).map(|s| s.as_str().to_owned())
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl<'a> DecodeValue<'a> for String {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ Ok(String::from_utf8(reader.read_vec(header.length)?)?)
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl EncodeValue for String {
+ fn value_len(&self) -> Result<Length> {
+ Utf8StringRef::new(self)?.value_len()
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ Utf8StringRef::new(self)?.encode_value(writer)
+ }
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl FixedTag for String {
+ const TAG: Tag = Tag::Utf8String;
+}
+
+#[cfg(feature = "alloc")]
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+impl OrdIsValueOrd for String {}
+
+#[cfg(test)]
+mod tests {
+ use super::Utf8StringRef;
+ use crate::Decode;
+
+ #[test]
+ fn parse_ascii_bytes() {
+ let example_bytes = &[
+ 0x0c, 0x0b, 0x54, 0x65, 0x73, 0x74, 0x20, 0x55, 0x73, 0x65, 0x72, 0x20, 0x31,
+ ];
+
+ let utf8_string = Utf8StringRef::from_der(example_bytes).unwrap();
+ assert_eq!(utf8_string.as_str(), "Test User 1");
+ }
+
+ #[test]
+ fn parse_utf8_bytes() {
+ let example_bytes = &[0x0c, 0x06, 0x48, 0x65, 0x6c, 0x6c, 0xc3, 0xb3];
+ let utf8_string = Utf8StringRef::from_der(example_bytes).unwrap();
+ assert_eq!(utf8_string.as_str(), "Helló");
+ }
+}
diff --git a/src/byte_slice.rs b/src/byte_slice.rs
new file mode 100644
index 0000000..00d46d0
--- /dev/null
+++ b/src/byte_slice.rs
@@ -0,0 +1,116 @@
+//! Common handling for types backed by byte slices with enforcement of a
+//! library-level length limitation i.e. `Length::max()`.
+
+use crate::{
+ str_slice::StrSlice, DecodeValue, DerOrd, EncodeValue, Error, Header, Length, Reader, Result,
+ Writer,
+};
+use core::cmp::Ordering;
+
+/// Byte slice newtype which respects the `Length::max()` limit.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub(crate) struct ByteSlice<'a> {
+ /// Precomputed `Length` (avoids possible panicking conversions)
+ length: Length,
+
+ /// Inner value
+ inner: &'a [u8],
+}
+
+impl<'a> ByteSlice<'a> {
+ /// Constant value representing an empty byte slice.
+ pub const EMPTY: Self = Self {
+ length: Length::ZERO,
+ inner: &[],
+ };
+
+ /// Create a new [`ByteSlice`], ensuring that the provided `slice` value
+ /// is shorter than `Length::max()`.
+ pub fn new(slice: &'a [u8]) -> Result<Self> {
+ Ok(Self {
+ length: Length::try_from(slice.len())?,
+ inner: slice,
+ })
+ }
+
+ /// Borrow the inner byte slice
+ pub fn as_slice(&self) -> &'a [u8] {
+ self.inner
+ }
+
+ /// Get the [`Length`] of this [`ByteSlice`]
+ pub fn len(self) -> Length {
+ self.length
+ }
+
+ /// Is this [`ByteSlice`] empty?
+ pub fn is_empty(self) -> bool {
+ self.len() == Length::ZERO
+ }
+}
+
+impl AsRef<[u8]> for ByteSlice<'_> {
+ fn as_ref(&self) -> &[u8] {
+ self.as_slice()
+ }
+}
+
+impl<'a> DecodeValue<'a> for ByteSlice<'a> {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ reader.read_slice(header.length).and_then(Self::new)
+ }
+}
+
+impl EncodeValue for ByteSlice<'_> {
+ fn value_len(&self) -> Result<Length> {
+ Ok(self.length)
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ writer.write(self.as_ref())
+ }
+}
+
+impl Default for ByteSlice<'_> {
+ fn default() -> Self {
+ Self {
+ length: Length::ZERO,
+ inner: &[],
+ }
+ }
+}
+
+impl DerOrd for ByteSlice<'_> {
+ fn der_cmp(&self, other: &Self) -> Result<Ordering> {
+ Ok(self.as_slice().cmp(other.as_slice()))
+ }
+}
+
+impl<'a> From<&'a [u8; 1]> for ByteSlice<'a> {
+ fn from(byte: &'a [u8; 1]) -> ByteSlice<'a> {
+ Self {
+ length: Length::ONE,
+ inner: byte,
+ }
+ }
+}
+
+impl<'a> From<StrSlice<'a>> for ByteSlice<'a> {
+ fn from(s: StrSlice<'a>) -> ByteSlice<'a> {
+ let bytes = s.as_bytes();
+ debug_assert_eq!(bytes.len(), usize::try_from(s.length).expect("overflow"));
+
+ ByteSlice {
+ inner: bytes,
+ length: s.length,
+ }
+ }
+}
+
+impl<'a> TryFrom<&'a [u8]> for ByteSlice<'a> {
+ type Error = Error;
+
+ fn try_from(slice: &'a [u8]) -> Result<Self> {
+ Self::new(slice)
+ }
+}
diff --git a/src/datetime.rs b/src/datetime.rs
new file mode 100644
index 0000000..2b4c504
--- /dev/null
+++ b/src/datetime.rs
@@ -0,0 +1,423 @@
+//! Date and time functionality shared between various ASN.1 types
+//! (e.g. `GeneralizedTime`, `UTCTime`)
+
+// Adapted from the `humantime` crate.
+// Copyright (c) 2016 The humantime Developers
+// Released under the MIT OR Apache 2.0 licenses
+
+use crate::{Error, ErrorKind, Result, Tag, Writer};
+use core::{fmt, str::FromStr, time::Duration};
+
+#[cfg(feature = "std")]
+use std::time::{SystemTime, UNIX_EPOCH};
+
+#[cfg(feature = "time")]
+use time::PrimitiveDateTime;
+
+/// Minimum year allowed in [`DateTime`] values.
+const MIN_YEAR: u16 = 1970;
+
+/// Maximum duration since `UNIX_EPOCH` which can be represented as a
+/// [`DateTime`] (non-inclusive).
+///
+/// This corresponds to: 9999-12-31T23:59:59Z
+const MAX_UNIX_DURATION: Duration = Duration::from_secs(253_402_300_799);
+
+/// Date-and-time type shared by multiple ASN.1 types
+/// (e.g. `GeneralizedTime`, `UTCTime`).
+///
+/// Following conventions from RFC 5280, this type is always Z-normalized
+/// (i.e. represents a UTC time). However, it isn't named "UTC time" in order
+/// to prevent confusion with ASN.1 `UTCTime`.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub struct DateTime {
+ /// Full year (e.g. 2000).
+ ///
+ /// Must be >=1970 to permit positive conversions to Unix time.
+ year: u16,
+
+ /// Month (1-12)
+ month: u8,
+
+ /// Day of the month (1-31)
+ day: u8,
+
+ /// Hour (0-23)
+ hour: u8,
+
+ /// Minutes (0-59)
+ minutes: u8,
+
+ /// Seconds (0-59)
+ seconds: u8,
+
+ /// [`Duration`] since the Unix epoch.
+ unix_duration: Duration,
+}
+
+impl DateTime {
+ /// Create a new [`DateTime`] from the given UTC time components.
+ // TODO(tarcieri): checked arithmetic
+ #[allow(clippy::integer_arithmetic)]
+ pub fn new(year: u16, month: u8, day: u8, hour: u8, minutes: u8, seconds: u8) -> Result<Self> {
+ // Basic validation of the components.
+ if year < MIN_YEAR
+ || !(1..=12).contains(&month)
+ || !(1..=31).contains(&day)
+ || !(0..=23).contains(&hour)
+ || !(0..=59).contains(&minutes)
+ || !(0..=59).contains(&seconds)
+ {
+ return Err(ErrorKind::DateTime.into());
+ }
+
+ let leap_years =
+ ((year - 1) - 1968) / 4 - ((year - 1) - 1900) / 100 + ((year - 1) - 1600) / 400;
+
+ let is_leap_year = year % 4 == 0 && (year % 100 != 0 || year % 400 == 0);
+
+ let (mut ydays, mdays): (u16, u8) = match month {
+ 1 => (0, 31),
+ 2 if is_leap_year => (31, 29),
+ 2 => (31, 28),
+ 3 => (59, 31),
+ 4 => (90, 30),
+ 5 => (120, 31),
+ 6 => (151, 30),
+ 7 => (181, 31),
+ 8 => (212, 31),
+ 9 => (243, 30),
+ 10 => (273, 31),
+ 11 => (304, 30),
+ 12 => (334, 31),
+ _ => return Err(ErrorKind::DateTime.into()),
+ };
+
+ if day > mdays || day == 0 {
+ return Err(ErrorKind::DateTime.into());
+ }
+
+ ydays += u16::from(day) - 1;
+
+ if is_leap_year && month > 2 {
+ ydays += 1;
+ }
+
+ let days = u64::from(year - 1970) * 365 + u64::from(leap_years) + u64::from(ydays);
+ let time = u64::from(seconds) + (u64::from(minutes) * 60) + (u64::from(hour) * 3600);
+ let unix_duration = Duration::from_secs(time + days * 86400);
+
+ if unix_duration > MAX_UNIX_DURATION {
+ return Err(ErrorKind::DateTime.into());
+ }
+
+ Ok(Self {
+ year,
+ month,
+ day,
+ hour,
+ minutes,
+ seconds,
+ unix_duration,
+ })
+ }
+
+ /// Compute a [`DateTime`] from the given [`Duration`] since the `UNIX_EPOCH`.
+ ///
+ /// Returns `None` if the value is outside the supported date range.
+ // TODO(tarcieri): checked arithmetic
+ #[allow(clippy::integer_arithmetic)]
+ pub fn from_unix_duration(unix_duration: Duration) -> Result<Self> {
+ if unix_duration > MAX_UNIX_DURATION {
+ return Err(ErrorKind::DateTime.into());
+ }
+
+ let secs_since_epoch = unix_duration.as_secs();
+
+ /// 2000-03-01 (mod 400 year, immediately after Feb 29)
+ const LEAPOCH: i64 = 11017;
+ const DAYS_PER_400Y: i64 = 365 * 400 + 97;
+ const DAYS_PER_100Y: i64 = 365 * 100 + 24;
+ const DAYS_PER_4Y: i64 = 365 * 4 + 1;
+
+ let days = i64::try_from(secs_since_epoch / 86400)? - LEAPOCH;
+ let secs_of_day = secs_since_epoch % 86400;
+
+ let mut qc_cycles = days / DAYS_PER_400Y;
+ let mut remdays = days % DAYS_PER_400Y;
+
+ if remdays < 0 {
+ remdays += DAYS_PER_400Y;
+ qc_cycles -= 1;
+ }
+
+ let mut c_cycles = remdays / DAYS_PER_100Y;
+ if c_cycles == 4 {
+ c_cycles -= 1;
+ }
+ remdays -= c_cycles * DAYS_PER_100Y;
+
+ let mut q_cycles = remdays / DAYS_PER_4Y;
+ if q_cycles == 25 {
+ q_cycles -= 1;
+ }
+ remdays -= q_cycles * DAYS_PER_4Y;
+
+ let mut remyears = remdays / 365;
+ if remyears == 4 {
+ remyears -= 1;
+ }
+ remdays -= remyears * 365;
+
+ let mut year = 2000 + remyears + 4 * q_cycles + 100 * c_cycles + 400 * qc_cycles;
+
+ let months = [31, 30, 31, 30, 31, 31, 30, 31, 30, 31, 31, 29];
+ let mut mon = 0;
+ for mon_len in months.iter() {
+ mon += 1;
+ if remdays < *mon_len {
+ break;
+ }
+ remdays -= *mon_len;
+ }
+ let mday = remdays + 1;
+ let mon = if mon + 2 > 12 {
+ year += 1;
+ mon - 10
+ } else {
+ mon + 2
+ };
+
+ let second = secs_of_day % 60;
+ let mins_of_day = secs_of_day / 60;
+ let minute = mins_of_day % 60;
+ let hour = mins_of_day / 60;
+
+ Self::new(
+ year.try_into()?,
+ mon,
+ mday.try_into()?,
+ hour.try_into()?,
+ minute.try_into()?,
+ second.try_into()?,
+ )
+ }
+
+ /// Get the year.
+ pub fn year(&self) -> u16 {
+ self.year
+ }
+
+ /// Get the month.
+ pub fn month(&self) -> u8 {
+ self.month
+ }
+
+ /// Get the day.
+ pub fn day(&self) -> u8 {
+ self.day
+ }
+
+ /// Get the hour.
+ pub fn hour(&self) -> u8 {
+ self.hour
+ }
+
+ /// Get the minutes.
+ pub fn minutes(&self) -> u8 {
+ self.minutes
+ }
+
+ /// Get the seconds.
+ pub fn seconds(&self) -> u8 {
+ self.seconds
+ }
+
+ /// Compute [`Duration`] since `UNIX_EPOCH` from the given calendar date.
+ pub fn unix_duration(&self) -> Duration {
+ self.unix_duration
+ }
+
+ /// Instantiate from [`SystemTime`].
+ #[cfg(feature = "std")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+ pub fn from_system_time(time: SystemTime) -> Result<Self> {
+ time.duration_since(UNIX_EPOCH)
+ .map_err(|_| ErrorKind::DateTime.into())
+ .and_then(Self::from_unix_duration)
+ }
+
+ /// Convert to [`SystemTime`].
+ #[cfg(feature = "std")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+ pub fn to_system_time(&self) -> SystemTime {
+ UNIX_EPOCH + self.unix_duration()
+ }
+}
+
+impl FromStr for DateTime {
+ type Err = Error;
+
+ // TODO(tarcieri): checked arithmetic
+ #[allow(clippy::integer_arithmetic)]
+ fn from_str(s: &str) -> Result<Self> {
+ match *s.as_bytes() {
+ [year1, year2, year3, year4, b'-', month1, month2, b'-', day1, day2, b'T', hour1, hour2, b':', min1, min2, b':', sec1, sec2, b'Z'] =>
+ {
+ let tag = Tag::GeneralizedTime;
+ let year =
+ u16::from(decode_decimal(tag, year1, year2).map_err(|_| ErrorKind::DateTime)?)
+ * 100
+ + u16::from(
+ decode_decimal(tag, year3, year4).map_err(|_| ErrorKind::DateTime)?,
+ );
+ let month = decode_decimal(tag, month1, month2).map_err(|_| ErrorKind::DateTime)?;
+ let day = decode_decimal(tag, day1, day2).map_err(|_| ErrorKind::DateTime)?;
+ let hour = decode_decimal(tag, hour1, hour2).map_err(|_| ErrorKind::DateTime)?;
+ let minutes = decode_decimal(tag, min1, min2).map_err(|_| ErrorKind::DateTime)?;
+ let seconds = decode_decimal(tag, sec1, sec2).map_err(|_| ErrorKind::DateTime)?;
+ Self::new(year, month, day, hour, minutes, seconds)
+ }
+ _ => Err(ErrorKind::DateTime.into()),
+ }
+ }
+}
+
+impl fmt::Display for DateTime {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(
+ f,
+ "{:02}-{:02}-{:02}T{:02}:{:02}:{:02}Z",
+ self.year, self.month, self.day, self.hour, self.minutes, self.seconds
+ )
+ }
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+impl From<DateTime> for SystemTime {
+ fn from(time: DateTime) -> SystemTime {
+ time.to_system_time()
+ }
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+impl From<&DateTime> for SystemTime {
+ fn from(time: &DateTime) -> SystemTime {
+ time.to_system_time()
+ }
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+impl TryFrom<SystemTime> for DateTime {
+ type Error = Error;
+
+ fn try_from(time: SystemTime) -> Result<DateTime> {
+ DateTime::from_system_time(time)
+ }
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+impl TryFrom<&SystemTime> for DateTime {
+ type Error = Error;
+
+ fn try_from(time: &SystemTime) -> Result<DateTime> {
+ DateTime::from_system_time(*time)
+ }
+}
+
+#[cfg(feature = "time")]
+#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
+impl TryFrom<DateTime> for PrimitiveDateTime {
+ type Error = Error;
+
+ fn try_from(time: DateTime) -> Result<PrimitiveDateTime> {
+ let month = (time.month() as u8).try_into()?;
+ let date = time::Date::from_calendar_date(i32::from(time.year()), month, time.day())?;
+ let time = time::Time::from_hms(time.hour(), time.minutes(), time.seconds())?;
+
+ Ok(PrimitiveDateTime::new(date, time))
+ }
+}
+
+#[cfg(feature = "time")]
+#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
+impl TryFrom<PrimitiveDateTime> for DateTime {
+ type Error = Error;
+
+ fn try_from(time: PrimitiveDateTime) -> Result<DateTime> {
+ DateTime::new(
+ time.year().try_into().map_err(|_| ErrorKind::DateTime)?,
+ time.month().into(),
+ time.day(),
+ time.hour(),
+ time.minute(),
+ time.second(),
+ )
+ }
+}
+
+/// Decode 2-digit decimal value
+// TODO(tarcieri): checked arithmetic
+#[allow(clippy::integer_arithmetic)]
+pub(crate) fn decode_decimal(tag: Tag, hi: u8, lo: u8) -> Result<u8> {
+ if (b'0'..=b'9').contains(&hi) && (b'0'..=b'9').contains(&lo) {
+ Ok((hi - b'0') * 10 + (lo - b'0'))
+ } else {
+ Err(tag.value_error())
+ }
+}
+
+/// Encode 2-digit decimal value
+pub(crate) fn encode_decimal<W>(writer: &mut W, tag: Tag, value: u8) -> Result<()>
+where
+ W: Writer + ?Sized,
+{
+ let hi_val = value / 10;
+
+ if hi_val >= 10 {
+ return Err(tag.value_error());
+ }
+
+ writer.write_byte(b'0'.checked_add(hi_val).ok_or(ErrorKind::Overflow)?)?;
+ writer.write_byte(b'0'.checked_add(value % 10).ok_or(ErrorKind::Overflow)?)
+}
+
+#[cfg(test)]
+mod tests {
+ use super::DateTime;
+
+ /// Ensure a day is OK
+ fn is_date_valid(year: u16, month: u8, day: u8, hour: u8, minute: u8, second: u8) -> bool {
+ DateTime::new(year, month, day, hour, minute, second).is_ok()
+ }
+
+ #[test]
+ fn feb_leap_year_handling() {
+ assert!(is_date_valid(2000, 2, 29, 0, 0, 0));
+ assert!(!is_date_valid(2001, 2, 29, 0, 0, 0));
+ assert!(!is_date_valid(2100, 2, 29, 0, 0, 0));
+ }
+
+ #[test]
+ fn from_str() {
+ let datetime = "2001-01-02T12:13:14Z".parse::<DateTime>().unwrap();
+ assert_eq!(datetime.year(), 2001);
+ assert_eq!(datetime.month(), 1);
+ assert_eq!(datetime.day(), 2);
+ assert_eq!(datetime.hour(), 12);
+ assert_eq!(datetime.minutes(), 13);
+ assert_eq!(datetime.seconds(), 14);
+ }
+
+ #[cfg(feature = "alloc")]
+ #[test]
+ fn display() {
+ use alloc::string::ToString;
+ let datetime = DateTime::new(2001, 01, 02, 12, 13, 14).unwrap();
+ assert_eq!(&datetime.to_string(), "2001-01-02T12:13:14Z");
+ }
+}
diff --git a/src/decode.rs b/src/decode.rs
new file mode 100644
index 0000000..1c63b32
--- /dev/null
+++ b/src/decode.rs
@@ -0,0 +1,76 @@
+//! Trait definition for [`Decode`].
+
+use crate::{FixedTag, Header, Reader, Result, SliceReader};
+
+#[cfg(feature = "pem")]
+use crate::{pem::PemLabel, PemReader};
+
+#[cfg(doc)]
+use crate::{Length, Tag};
+
+/// Decoding trait.
+///
+/// This trait provides the core abstraction upon which all decoding operations
+/// are based.
+pub trait Decode<'a>: Sized {
+ /// Attempt to decode this message using the provided decoder.
+ fn decode<R: Reader<'a>>(decoder: &mut R) -> Result<Self>;
+
+ /// Parse `Self` from the provided DER-encoded byte slice.
+ fn from_der(bytes: &'a [u8]) -> Result<Self> {
+ let mut reader = SliceReader::new(bytes)?;
+ let result = Self::decode(&mut reader)?;
+ reader.finish(result)
+ }
+}
+
+impl<'a, T> Decode<'a> for T
+where
+ T: DecodeValue<'a> + FixedTag,
+{
+ fn decode<R: Reader<'a>>(reader: &mut R) -> Result<T> {
+ let header = Header::decode(reader)?;
+ header.tag.assert_eq(T::TAG)?;
+ T::decode_value(reader, header)
+ }
+}
+
+/// Marker trait for data structures that can be decoded from DER without
+/// borrowing any data from the decoder.
+///
+/// This is primarily useful for trait bounds on functions which require that
+/// no data is borrowed from the decoder, for example a PEM decoder which needs
+/// to first decode data from Base64.
+///
+/// This trait is inspired by the [`DeserializeOwned` trait from `serde`](https://docs.rs/serde/latest/serde/de/trait.DeserializeOwned.html).
+pub trait DecodeOwned: for<'a> Decode<'a> {}
+
+impl<T> DecodeOwned for T where T: for<'a> Decode<'a> {}
+
+/// PEM decoding trait.
+///
+/// This trait is automatically impl'd for any type which impls both
+/// [`DecodeOwned`] and [`PemLabel`].
+#[cfg(feature = "pem")]
+#[cfg_attr(docsrs, doc(cfg(feature = "pem")))]
+pub trait DecodePem: DecodeOwned + PemLabel {
+ /// Try to decode this type from PEM.
+ fn from_pem(pem: impl AsRef<[u8]>) -> Result<Self>;
+}
+
+#[cfg(feature = "pem")]
+#[cfg_attr(docsrs, doc(cfg(feature = "pem")))]
+impl<T: DecodeOwned + PemLabel> DecodePem for T {
+ fn from_pem(pem: impl AsRef<[u8]>) -> Result<Self> {
+ let mut reader = PemReader::new(pem.as_ref())?;
+ Self::validate_pem_label(reader.type_label())?;
+ T::decode(&mut reader)
+ }
+}
+
+/// Decode the value part of a Tag-Length-Value encoded field, sans the [`Tag`]
+/// and [`Length`].
+pub trait DecodeValue<'a>: Sized {
+ /// Attempt to decode this message using the provided [`Reader`].
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self>;
+}
diff --git a/src/document.rs b/src/document.rs
new file mode 100644
index 0000000..aa953cd
--- /dev/null
+++ b/src/document.rs
@@ -0,0 +1,369 @@
+//! ASN.1 DER-encoded documents stored on the heap.
+
+use crate::{Decode, Encode, Error, FixedTag, Length, Reader, Result, SliceReader, Tag, Writer};
+use alloc::vec::Vec;
+use core::fmt::{self, Debug};
+
+#[cfg(feature = "pem")]
+use {crate::pem, alloc::string::String};
+
+#[cfg(feature = "std")]
+use std::{fs, path::Path};
+
+#[cfg(all(feature = "pem", feature = "std"))]
+use alloc::borrow::ToOwned;
+
+#[cfg(feature = "zeroize")]
+use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
+
+/// ASN.1 DER-encoded document.
+///
+/// This type wraps an encoded ASN.1 DER message. The document checked to
+/// ensure it contains a valid DER-encoded `SEQUENCE`.
+///
+/// It implements common functionality related to encoding/decoding such
+/// documents, such as PEM encapsulation as well as reading/writing documents
+/// from/to the filesystem.
+///
+/// The [`SecretDocument`] provides a wrapper for this type with additional
+/// hardening applied.
+#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+#[derive(Clone, Eq, PartialEq)]
+pub struct Document {
+ /// ASN.1 DER encoded bytes.
+ der_bytes: Vec<u8>,
+
+ /// Length of this document.
+ length: Length,
+}
+
+impl Document {
+ /// Get the ASN.1 DER-encoded bytes of this document.
+ pub fn as_bytes(&self) -> &[u8] {
+ self.der_bytes.as_slice()
+ }
+
+ /// Convert to a [`SecretDocument`].
+ #[cfg(feature = "zeroize")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "zeroize")))]
+ pub fn into_secret(self) -> SecretDocument {
+ SecretDocument(self)
+ }
+
+ /// Convert to an ASN.1 DER-encoded byte vector.
+ pub fn into_vec(self) -> Vec<u8> {
+ self.der_bytes
+ }
+
+ /// Return an ASN.1 DER-encoded byte vector.
+ pub fn to_vec(&self) -> Vec<u8> {
+ self.der_bytes.clone()
+ }
+
+ /// Get the length of the encoded ASN.1 DER in bytes.
+ pub fn len(&self) -> Length {
+ self.length
+ }
+
+ /// Try to decode the inner ASN.1 DER message contained in this
+ /// [`Document`] as the given type.
+ pub fn decode_msg<'a, T: Decode<'a>>(&'a self) -> Result<T> {
+ T::from_der(self.as_bytes())
+ }
+
+ /// Encode the provided type as ASN.1 DER, storing the resulting encoded DER
+ /// as a [`Document`].
+ pub fn encode_msg<T: Encode>(msg: &T) -> Result<Self> {
+ msg.to_vec()?.try_into()
+ }
+
+ /// Decode ASN.1 DER document from PEM.
+ ///
+ /// Returns the PEM label and decoded [`Document`] on success.
+ #[cfg(feature = "pem")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "pem")))]
+ pub fn from_pem(pem: &str) -> Result<(&str, Self)> {
+ let (label, der_bytes) = pem::decode_vec(pem.as_bytes())?;
+ Ok((label, der_bytes.try_into()?))
+ }
+
+ /// Encode ASN.1 DER document as a PEM string with encapsulation boundaries
+ /// containing the provided PEM type `label` (e.g. `CERTIFICATE`).
+ #[cfg(feature = "pem")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "pem")))]
+ pub fn to_pem(&self, label: &'static str, line_ending: pem::LineEnding) -> Result<String> {
+ Ok(pem::encode_string(label, line_ending, self.as_bytes())?)
+ }
+
+ /// Read ASN.1 DER document from a file.
+ #[cfg(feature = "std")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+ pub fn read_der_file(path: impl AsRef<Path>) -> Result<Self> {
+ fs::read(path)?.try_into()
+ }
+
+ /// Write ASN.1 DER document to a file.
+ #[cfg(feature = "std")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+ pub fn write_der_file(&self, path: impl AsRef<Path>) -> Result<()> {
+ Ok(fs::write(path, self.as_bytes())?)
+ }
+
+ /// Read PEM-encoded ASN.1 DER document from a file.
+ #[cfg(all(feature = "pem", feature = "std"))]
+ #[cfg_attr(docsrs, doc(cfg(all(feature = "pem", feature = "std"))))]
+ pub fn read_pem_file(path: impl AsRef<Path>) -> Result<(String, Self)> {
+ Self::from_pem(&fs::read_to_string(path)?).map(|(label, doc)| (label.to_owned(), doc))
+ }
+
+ /// Write PEM-encoded ASN.1 DER document to a file.
+ #[cfg(all(feature = "pem", feature = "std"))]
+ #[cfg_attr(docsrs, doc(cfg(all(feature = "pem", feature = "std"))))]
+ pub fn write_pem_file(
+ &self,
+ path: impl AsRef<Path>,
+ label: &'static str,
+ line_ending: pem::LineEnding,
+ ) -> Result<()> {
+ let pem = self.to_pem(label, line_ending)?;
+ Ok(fs::write(path, pem.as_bytes())?)
+ }
+}
+
+impl AsRef<[u8]> for Document {
+ fn as_ref(&self) -> &[u8] {
+ self.as_bytes()
+ }
+}
+
+impl Debug for Document {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("Document(")?;
+
+ for byte in self.as_bytes() {
+ write!(f, "{:02X}", byte)?;
+ }
+
+ f.write_str(")")
+ }
+}
+
+impl<'a> Decode<'a> for Document {
+ fn decode<R: Reader<'a>>(reader: &mut R) -> Result<Document> {
+ let header = reader.peek_header()?;
+ let length = (header.encoded_len()? + header.length)?;
+ let bytes = reader.read_slice(length)?;
+
+ Ok(Self {
+ der_bytes: bytes.into(),
+ length,
+ })
+ }
+}
+
+impl Encode for Document {
+ fn encoded_len(&self) -> Result<Length> {
+ Ok(self.len())
+ }
+
+ fn encode(&self, writer: &mut dyn Writer) -> Result<()> {
+ writer.write(self.as_bytes())
+ }
+}
+
+impl FixedTag for Document {
+ const TAG: Tag = Tag::Sequence;
+}
+
+impl TryFrom<&[u8]> for Document {
+ type Error = Error;
+
+ fn try_from(der_bytes: &[u8]) -> Result<Self> {
+ Self::from_der(der_bytes)
+ }
+}
+
+impl TryFrom<Vec<u8>> for Document {
+ type Error = Error;
+
+ fn try_from(der_bytes: Vec<u8>) -> Result<Self> {
+ let mut decoder = SliceReader::new(&der_bytes)?;
+ decode_sequence(&mut decoder)?;
+ decoder.finish(())?;
+
+ let length = der_bytes.len().try_into()?;
+ Ok(Self { der_bytes, length })
+ }
+}
+
+/// Secret [`Document`] type.
+///
+/// Useful for formats which represent potentially secret data, such as
+/// cryptographic keys.
+///
+/// This type provides additional hardening such as ensuring that the contents
+/// are zeroized-on-drop, and also using more restrictive file permissions when
+/// writing files to disk.
+#[cfg(feature = "zeroize")]
+#[cfg_attr(docsrs, doc(cfg(all(feature = "alloc", feature = "zeroize"))))]
+#[derive(Clone)]
+pub struct SecretDocument(Document);
+
+#[cfg(feature = "zeroize")]
+impl SecretDocument {
+ /// Borrow the inner serialized bytes of this document.
+ pub fn as_bytes(&self) -> &[u8] {
+ self.0.as_bytes()
+ }
+
+ /// Return an allocated ASN.1 DER serialization as a byte vector.
+ pub fn to_bytes(&self) -> Zeroizing<Vec<u8>> {
+ Zeroizing::new(self.0.to_vec())
+ }
+
+ /// Get the length of the encoded ASN.1 DER in bytes.
+ pub fn len(&self) -> Length {
+ self.0.len()
+ }
+
+ /// Try to decode the inner ASN.1 DER message as the given type.
+ pub fn decode_msg<'a, T: Decode<'a>>(&'a self) -> Result<T> {
+ self.0.decode_msg()
+ }
+
+ /// Encode the provided type as ASN.1 DER.
+ pub fn encode_msg<T: Encode>(msg: &T) -> Result<Self> {
+ Document::encode_msg(msg).map(Self)
+ }
+
+ /// Decode ASN.1 DER document from PEM.
+ #[cfg(feature = "pem")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "pem")))]
+ pub fn from_pem(pem: &str) -> Result<(&str, Self)> {
+ Document::from_pem(pem).map(|(label, doc)| (label, Self(doc)))
+ }
+
+ /// Encode ASN.1 DER document as a PEM string.
+ #[cfg(feature = "pem")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "pem")))]
+ pub fn to_pem(
+ &self,
+ label: &'static str,
+ line_ending: pem::LineEnding,
+ ) -> Result<Zeroizing<String>> {
+ self.0.to_pem(label, line_ending).map(Zeroizing::new)
+ }
+
+ /// Read ASN.1 DER document from a file.
+ #[cfg(feature = "std")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+ pub fn read_der_file(path: impl AsRef<Path>) -> Result<Self> {
+ Document::read_der_file(path).map(Self)
+ }
+
+ /// Write ASN.1 DER document to a file.
+ #[cfg(feature = "std")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+ pub fn write_der_file(&self, path: impl AsRef<Path>) -> Result<()> {
+ write_secret_file(path, self.as_bytes())
+ }
+
+ /// Read PEM-encoded ASN.1 DER document from a file.
+ #[cfg(all(feature = "pem", feature = "std"))]
+ #[cfg_attr(docsrs, doc(cfg(all(feature = "pem", feature = "std"))))]
+ pub fn read_pem_file(path: impl AsRef<Path>) -> Result<(String, Self)> {
+ Document::read_pem_file(path).map(|(label, doc)| (label, Self(doc)))
+ }
+
+ /// Write PEM-encoded ASN.1 DER document to a file.
+ #[cfg(all(feature = "pem", feature = "std"))]
+ #[cfg_attr(docsrs, doc(cfg(all(feature = "pem", feature = "std"))))]
+ pub fn write_pem_file(
+ &self,
+ path: impl AsRef<Path>,
+ label: &'static str,
+ line_ending: pem::LineEnding,
+ ) -> Result<()> {
+ write_secret_file(path, self.to_pem(label, line_ending)?.as_bytes())
+ }
+}
+#[cfg(feature = "zeroize")]
+impl Debug for SecretDocument {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt.debug_struct("SecretDocument").finish_non_exhaustive()
+ }
+}
+
+#[cfg(feature = "zeroize")]
+impl Drop for SecretDocument {
+ fn drop(&mut self) {
+ self.0.der_bytes.zeroize();
+ }
+}
+
+#[cfg(feature = "zeroize")]
+impl From<Document> for SecretDocument {
+ fn from(doc: Document) -> SecretDocument {
+ SecretDocument(doc)
+ }
+}
+
+#[cfg(feature = "zeroize")]
+impl TryFrom<&[u8]> for SecretDocument {
+ type Error = Error;
+
+ fn try_from(der_bytes: &[u8]) -> Result<Self> {
+ Document::try_from(der_bytes).map(Self)
+ }
+}
+
+#[cfg(feature = "zeroize")]
+impl TryFrom<Vec<u8>> for SecretDocument {
+ type Error = Error;
+
+ fn try_from(der_bytes: Vec<u8>) -> Result<Self> {
+ Document::try_from(der_bytes).map(Self)
+ }
+}
+
+#[cfg(feature = "zeroize")]
+impl ZeroizeOnDrop for SecretDocument {}
+
+/// Attempt to decode a ASN.1 `SEQUENCE` from the given decoder, returning the
+/// entire sequence including the header.
+fn decode_sequence<'a>(decoder: &mut SliceReader<'a>) -> Result<&'a [u8]> {
+ let header = decoder.peek_header()?;
+ header.tag.assert_eq(Tag::Sequence)?;
+
+ let len = (header.encoded_len()? + header.length)?;
+ decoder.read_slice(len)
+}
+
+/// Write a file containing secret data to the filesystem, restricting the
+/// file permissions so it's only readable by the owner
+#[cfg(all(unix, feature = "std", feature = "zeroize"))]
+fn write_secret_file(path: impl AsRef<Path>, data: &[u8]) -> Result<()> {
+ use std::{io::Write, os::unix::fs::OpenOptionsExt};
+
+ /// File permissions for secret data
+ #[cfg(unix)]
+ const SECRET_FILE_PERMS: u32 = 0o600;
+
+ fs::OpenOptions::new()
+ .create(true)
+ .write(true)
+ .truncate(true)
+ .mode(SECRET_FILE_PERMS)
+ .open(path)
+ .and_then(|mut file| file.write_all(data))?;
+
+ Ok(())
+}
+
+/// Write a file containing secret data to the filesystem
+// TODO(tarcieri): permissions hardening on Windows
+#[cfg(all(not(unix), feature = "std", feature = "zeroize"))]
+fn write_secret_file(path: impl AsRef<Path>, data: &[u8]) -> Result<()> {
+ fs::write(path, data)?;
+ Ok(())
+}
diff --git a/src/encode.rs b/src/encode.rs
new file mode 100644
index 0000000..51fc13d
--- /dev/null
+++ b/src/encode.rs
@@ -0,0 +1,133 @@
+//! Trait definition for [`Encode`].
+
+use crate::{Header, Length, Result, SliceWriter, Tagged, Writer};
+
+#[cfg(feature = "alloc")]
+use {alloc::vec::Vec, core::iter};
+
+#[cfg(feature = "pem")]
+use {
+ crate::PemWriter,
+ alloc::string::String,
+ pem_rfc7468::{self as pem, LineEnding, PemLabel},
+};
+
+#[cfg(any(feature = "alloc", feature = "pem"))]
+use crate::ErrorKind;
+
+#[cfg(doc)]
+use crate::Tag;
+
+/// Encoding trait.
+pub trait Encode {
+ /// Compute the length of this value in bytes when encoded as ASN.1 DER.
+ fn encoded_len(&self) -> Result<Length>;
+
+ /// Encode this value as ASN.1 DER using the provided [`Writer`].
+ fn encode(&self, encoder: &mut dyn Writer) -> Result<()>;
+
+ /// Encode this value to the provided byte slice, returning a sub-slice
+ /// containing the encoded message.
+ fn encode_to_slice<'a>(&self, buf: &'a mut [u8]) -> Result<&'a [u8]> {
+ let mut writer = SliceWriter::new(buf);
+ self.encode(&mut writer)?;
+ writer.finish()
+ }
+
+ /// Encode this message as ASN.1 DER, appending it to the provided
+ /// byte vector.
+ #[cfg(feature = "alloc")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+ fn encode_to_vec(&self, buf: &mut Vec<u8>) -> Result<Length> {
+ let expected_len = usize::try_from(self.encoded_len()?)?;
+ buf.reserve(expected_len);
+ buf.extend(iter::repeat(0).take(expected_len));
+
+ let mut writer = SliceWriter::new(buf);
+ self.encode(&mut writer)?;
+ let actual_len = writer.finish()?.len();
+
+ if expected_len != actual_len {
+ return Err(ErrorKind::Incomplete {
+ expected_len: expected_len.try_into()?,
+ actual_len: actual_len.try_into()?,
+ }
+ .into());
+ }
+
+ actual_len.try_into()
+ }
+
+ /// Serialize this message as a byte vector.
+ #[cfg(feature = "alloc")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+ fn to_vec(&self) -> Result<Vec<u8>> {
+ let mut buf = Vec::new();
+ self.encode_to_vec(&mut buf)?;
+ Ok(buf)
+ }
+}
+
+impl<T> Encode for T
+where
+ T: EncodeValue + Tagged,
+{
+ /// Compute the length of this value in bytes when encoded as ASN.1 DER.
+ fn encoded_len(&self) -> Result<Length> {
+ self.value_len().and_then(|len| len.for_tlv())
+ }
+
+ /// Encode this value as ASN.1 DER using the provided [`Writer`].
+ fn encode(&self, writer: &mut dyn Writer) -> Result<()> {
+ self.header()?.encode(writer)?;
+ self.encode_value(writer)
+ }
+}
+
+/// PEM encoding trait.
+///
+/// This trait is automatically impl'd for any type which impls both
+/// [`Encode`] and [`PemLabel`].
+#[cfg(feature = "pem")]
+#[cfg_attr(docsrs, doc(cfg(feature = "pem")))]
+pub trait EncodePem: Encode + PemLabel {
+ /// Try to encode this type as PEM.
+ fn to_pem(&self, line_ending: LineEnding) -> Result<String>;
+}
+
+#[cfg(feature = "pem")]
+#[cfg_attr(docsrs, doc(cfg(feature = "pem")))]
+impl<T: Encode + PemLabel> EncodePem for T {
+ fn to_pem(&self, line_ending: LineEnding) -> Result<String> {
+ let der_len = usize::try_from(self.encoded_len()?)?;
+ let pem_len = pem::encapsulated_len(Self::PEM_LABEL, line_ending, der_len)?;
+
+ let mut buf = vec![0u8; pem_len];
+ let mut writer = PemWriter::new(Self::PEM_LABEL, line_ending, &mut buf)?;
+ self.encode(&mut writer)?;
+
+ let actual_len = writer.finish()?;
+ buf.truncate(actual_len);
+ Ok(String::from_utf8(buf)?)
+ }
+}
+
+/// Encode the value part of a Tag-Length-Value encoded field, sans the [`Tag`]
+/// and [`Length`].
+pub trait EncodeValue {
+ /// Get the [`Header`] used to encode this value.
+ fn header(&self) -> Result<Header>
+ where
+ Self: Tagged,
+ {
+ Header::new(self.tag(), self.value_len()?)
+ }
+
+ /// Compute the length of this value (sans [`Tag`]+[`Length`] header) when
+ /// encoded as ASN.1 DER.
+ fn value_len(&self) -> Result<Length>;
+
+ /// Encode value (sans [`Tag`]+[`Length`] header) as ASN.1 DER using the
+ /// provided [`Writer`].
+ fn encode_value(&self, encoder: &mut dyn Writer) -> Result<()>;
+}
diff --git a/src/encode_ref.rs b/src/encode_ref.rs
new file mode 100644
index 0000000..c1e4f03
--- /dev/null
+++ b/src/encode_ref.rs
@@ -0,0 +1,71 @@
+//! Wrapper object for encoding reference types.
+// TODO(tarcieri): replace with blanket impls of `Encode(Value)` for reference types?
+
+use crate::{Encode, EncodeValue, Length, Result, Tag, Tagged, ValueOrd, Writer};
+use core::cmp::Ordering;
+
+/// Reference encoder: wrapper type which impls `Encode` for any reference to a
+/// type which impls the same.
+pub struct EncodeRef<'a, T>(pub &'a T);
+
+impl<'a, T> AsRef<T> for EncodeRef<'a, T> {
+ fn as_ref(&self) -> &T {
+ self.0
+ }
+}
+
+impl<'a, T> Encode for EncodeRef<'a, T>
+where
+ T: Encode,
+{
+ fn encoded_len(&self) -> Result<Length> {
+ self.0.encoded_len()
+ }
+
+ fn encode(&self, writer: &mut dyn Writer) -> Result<()> {
+ self.0.encode(writer)
+ }
+}
+
+/// Reference value encoder: wrapper type which impls `EncodeValue` and `Tagged`
+/// for any reference type which impls the same.
+///
+/// By virtue of the blanket impl, this type also impls `Encode`.
+pub struct EncodeValueRef<'a, T>(pub &'a T);
+
+impl<'a, T> AsRef<T> for EncodeValueRef<'a, T> {
+ fn as_ref(&self) -> &T {
+ self.0
+ }
+}
+
+impl<'a, T> EncodeValue for EncodeValueRef<'a, T>
+where
+ T: EncodeValue,
+{
+ fn value_len(&self) -> Result<Length> {
+ self.0.value_len()
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ self.0.encode_value(writer)
+ }
+}
+
+impl<'a, T> Tagged for EncodeValueRef<'a, T>
+where
+ T: Tagged,
+{
+ fn tag(&self) -> Tag {
+ self.0.tag()
+ }
+}
+
+impl<'a, T> ValueOrd for EncodeValueRef<'a, T>
+where
+ T: ValueOrd,
+{
+ fn value_cmp(&self, other: &Self) -> Result<Ordering> {
+ self.0.value_cmp(other.0)
+ }
+}
diff --git a/src/error.rs b/src/error.rs
new file mode 100644
index 0000000..5e492a4
--- /dev/null
+++ b/src/error.rs
@@ -0,0 +1,366 @@
+//! Error types.
+
+pub use core::str::Utf8Error;
+
+use crate::{Length, Tag};
+use core::{convert::Infallible, fmt, num::TryFromIntError};
+
+#[cfg(feature = "oid")]
+use crate::asn1::ObjectIdentifier;
+
+#[cfg(feature = "pem")]
+use crate::pem;
+
+/// Result type.
+pub type Result<T> = core::result::Result<T, Error>;
+
+/// Error type.
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub struct Error {
+ /// Kind of error.
+ kind: ErrorKind,
+
+ /// Position inside of message where error occurred.
+ position: Option<Length>,
+}
+
+impl Error {
+ /// Create a new [`Error`].
+ pub fn new(kind: ErrorKind, position: Length) -> Error {
+ Error {
+ kind,
+ position: Some(position),
+ }
+ }
+
+ /// Create a new [`ErrorKind::Incomplete`] for the given length.
+ ///
+ /// Computes the expected len as being one greater than `actual_len`.
+ pub fn incomplete(actual_len: Length) -> Self {
+ match actual_len + Length::ONE {
+ Ok(expected_len) => ErrorKind::Incomplete {
+ expected_len,
+ actual_len,
+ }
+ .at(actual_len),
+ Err(err) => err.kind().at(actual_len),
+ }
+ }
+
+ /// Get the [`ErrorKind`] which occurred.
+ pub fn kind(self) -> ErrorKind {
+ self.kind
+ }
+
+ /// Get the position inside of the message where the error occurred.
+ pub fn position(self) -> Option<Length> {
+ self.position
+ }
+
+ /// For errors occurring inside of a nested message, extend the position
+ /// count by the location where the nested message occurs.
+ pub(crate) fn nested(self, nested_position: Length) -> Self {
+ // TODO(tarcieri): better handle length overflows occurring in this calculation?
+ let position = (nested_position + self.position.unwrap_or_default()).ok();
+
+ Self {
+ kind: self.kind,
+ position,
+ }
+ }
+}
+
+#[cfg(feature = "std")]
+impl std::error::Error for Error {}
+
+impl fmt::Display for Error {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.kind)?;
+
+ if let Some(pos) = self.position {
+ write!(f, " at DER byte {}", pos)?;
+ }
+
+ Ok(())
+ }
+}
+
+impl From<ErrorKind> for Error {
+ fn from(kind: ErrorKind) -> Error {
+ Error {
+ kind,
+ position: None,
+ }
+ }
+}
+
+impl From<Infallible> for Error {
+ fn from(_: Infallible) -> Error {
+ unreachable!()
+ }
+}
+
+impl From<TryFromIntError> for Error {
+ fn from(_: TryFromIntError) -> Error {
+ Error {
+ kind: ErrorKind::Overflow,
+ position: None,
+ }
+ }
+}
+
+impl From<Utf8Error> for Error {
+ fn from(err: Utf8Error) -> Error {
+ Error {
+ kind: ErrorKind::Utf8(err),
+ position: None,
+ }
+ }
+}
+
+#[cfg(feature = "alloc")]
+impl From<alloc::string::FromUtf8Error> for Error {
+ fn from(err: alloc::string::FromUtf8Error) -> Error {
+ ErrorKind::Utf8(err.utf8_error()).into()
+ }
+}
+
+#[cfg(feature = "oid")]
+impl From<const_oid::Error> for Error {
+ fn from(_: const_oid::Error) -> Error {
+ ErrorKind::OidMalformed.into()
+ }
+}
+
+#[cfg(feature = "pem")]
+impl From<pem::Error> for Error {
+ fn from(err: pem::Error) -> Error {
+ ErrorKind::Pem(err).into()
+ }
+}
+
+#[cfg(feature = "std")]
+impl From<std::io::Error> for Error {
+ fn from(err: std::io::Error) -> Error {
+ match err.kind() {
+ std::io::ErrorKind::NotFound => ErrorKind::FileNotFound,
+ std::io::ErrorKind::PermissionDenied => ErrorKind::PermissionDenied,
+ other => ErrorKind::Io(other),
+ }
+ .into()
+ }
+}
+
+#[cfg(feature = "time")]
+impl From<time::error::ComponentRange> for Error {
+ fn from(_: time::error::ComponentRange) -> Error {
+ ErrorKind::DateTime.into()
+ }
+}
+
+/// Error type.
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+#[non_exhaustive]
+pub enum ErrorKind {
+ /// Date-and-time related errors.
+ DateTime,
+
+ /// This error indicates a previous DER parsing operation resulted in
+ /// an error and tainted the state of a `Decoder` or `Encoder`.
+ ///
+ /// Once this occurs, the overall operation has failed and cannot be
+ /// subsequently resumed.
+ Failed,
+
+ /// File not found error.
+ #[cfg(feature = "std")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+ FileNotFound,
+
+ /// Message is incomplete and does not contain all of the expected data.
+ Incomplete {
+ /// Expected message length.
+ ///
+ /// Note that this length represents a *minimum* lower bound on how
+ /// much additional data is needed to continue parsing the message.
+ ///
+ /// It's possible upon subsequent message parsing that the parser will
+ /// discover even more data is needed.
+ expected_len: Length,
+
+ /// Actual length of the message buffer currently being processed.
+ actual_len: Length,
+ },
+
+ /// I/O errors.
+ #[cfg(feature = "std")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+ Io(std::io::ErrorKind),
+
+ /// Incorrect length for a given field.
+ Length {
+ /// Tag of the value being decoded.
+ tag: Tag,
+ },
+
+ /// Message is not canonically encoded.
+ Noncanonical {
+ /// Tag of the value which is not canonically encoded.
+ tag: Tag,
+ },
+
+ /// OID is improperly encoded.
+ OidMalformed,
+
+ /// Unknown OID.
+ ///
+ /// This error is intended to be used by libraries which parse DER-based
+ /// formats which encounter unknown or unsupported OID libraries.
+ ///
+ /// It enables passing back the OID value to the caller, which allows them
+ /// to determine which OID(s) are causing the error (and then potentially
+ /// contribute upstream support for algorithms they care about).
+ #[cfg(feature = "oid")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "oid")))]
+ OidUnknown {
+ /// OID value that was unrecognized by a parser for a DER-based format.
+ oid: ObjectIdentifier,
+ },
+
+ /// `SET` ordering error: items not in canonical order.
+ SetOrdering,
+
+ /// Integer overflow occurred (library bug!).
+ Overflow,
+
+ /// Message is longer than this library's internal limits support.
+ Overlength,
+
+ /// PEM encoding errors.
+ #[cfg(feature = "pem")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "pem")))]
+ Pem(pem::Error),
+
+ /// Permission denied reading file.
+ #[cfg(feature = "std")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+ PermissionDenied,
+
+ /// Reader does not support the requested operation.
+ Reader,
+
+ /// Unknown tag mode.
+ TagModeUnknown,
+
+ /// Invalid tag number.
+ ///
+ /// The "tag number" is the lower 5-bits of a tag's octet.
+ /// This error occurs in the case that all 5-bits are set to `1`,
+ /// which indicates a multi-byte tag which is unsupported by this library.
+ TagNumberInvalid,
+
+ /// Unexpected tag.
+ TagUnexpected {
+ /// Tag the decoder was expecting (if there is a single such tag).
+ ///
+ /// `None` if multiple tags are expected/allowed, but the `actual` tag
+ /// does not match any of them.
+ expected: Option<Tag>,
+
+ /// Actual tag encountered in the message.
+ actual: Tag,
+ },
+
+ /// Unknown/unsupported tag.
+ TagUnknown {
+ /// Raw byte value of the tag.
+ byte: u8,
+ },
+
+ /// Undecoded trailing data at end of message.
+ TrailingData {
+ /// Length of the decoded data.
+ decoded: Length,
+
+ /// Total length of the remaining data left in the buffer.
+ remaining: Length,
+ },
+
+ /// UTF-8 errors.
+ Utf8(Utf8Error),
+
+ /// Unexpected value.
+ Value {
+ /// Tag of the unexpected value.
+ tag: Tag,
+ },
+}
+
+impl ErrorKind {
+ /// Annotate an [`ErrorKind`] with context about where it occurred,
+ /// returning an error.
+ pub fn at(self, position: Length) -> Error {
+ Error::new(self, position)
+ }
+}
+
+impl fmt::Display for ErrorKind {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ErrorKind::DateTime => write!(f, "date/time error"),
+ ErrorKind::Failed => write!(f, "operation failed"),
+ #[cfg(feature = "std")]
+ ErrorKind::FileNotFound => write!(f, "file not found"),
+ ErrorKind::Incomplete {
+ expected_len,
+ actual_len,
+ } => write!(
+ f,
+ "ASN.1 DER message is incomplete: expected {}, actual {}",
+ expected_len, actual_len
+ ),
+ #[cfg(feature = "std")]
+ ErrorKind::Io(err) => write!(f, "I/O error: {:?}", err),
+ ErrorKind::Length { tag } => write!(f, "incorrect length for {}", tag),
+ ErrorKind::Noncanonical { tag } => {
+ write!(f, "ASN.1 {} not canonically encoded as DER", tag)
+ }
+ ErrorKind::OidMalformed => write!(f, "malformed OID"),
+ #[cfg(feature = "oid")]
+ ErrorKind::OidUnknown { oid } => {
+ write!(f, "unknown/unsupported OID: {}", oid)
+ }
+ ErrorKind::SetOrdering => write!(f, "SET OF ordering error"),
+ ErrorKind::Overflow => write!(f, "integer overflow"),
+ ErrorKind::Overlength => write!(f, "ASN.1 DER message is too long"),
+ #[cfg(feature = "pem")]
+ ErrorKind::Pem(e) => write!(f, "PEM error: {}", e),
+ #[cfg(feature = "std")]
+ ErrorKind::PermissionDenied => write!(f, "permission denied"),
+ ErrorKind::Reader => write!(f, "reader does not support the requested operation"),
+ ErrorKind::TagModeUnknown => write!(f, "unknown tag mode"),
+ ErrorKind::TagNumberInvalid => write!(f, "invalid tag number"),
+ ErrorKind::TagUnexpected { expected, actual } => {
+ write!(f, "unexpected ASN.1 DER tag: ")?;
+
+ if let Some(tag) = expected {
+ write!(f, "expected {}, ", tag)?;
+ }
+
+ write!(f, "got {}", actual)
+ }
+ ErrorKind::TagUnknown { byte } => {
+ write!(f, "unknown/unsupported ASN.1 DER tag: 0x{:02x}", byte)
+ }
+ ErrorKind::TrailingData { decoded, remaining } => {
+ write!(
+ f,
+ "trailing data at end of DER message: decoded {} bytes, {} bytes remaining",
+ decoded, remaining
+ )
+ }
+ ErrorKind::Utf8(e) => write!(f, "{}", e),
+ ErrorKind::Value { tag } => write!(f, "malformed ASN.1 DER value for {}", tag),
+ }
+ }
+}
diff --git a/src/header.rs b/src/header.rs
new file mode 100644
index 0000000..ddb484e
--- /dev/null
+++ b/src/header.rs
@@ -0,0 +1,60 @@
+//! ASN.1 DER headers.
+
+use crate::{Decode, DerOrd, Encode, ErrorKind, Length, Reader, Result, Tag, Writer};
+use core::cmp::Ordering;
+
+/// ASN.1 DER headers: tag + length component of TLV-encoded values
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub struct Header {
+ /// Tag representing the type of the encoded value
+ pub tag: Tag,
+
+ /// Length of the encoded value
+ pub length: Length,
+}
+
+impl Header {
+ /// Create a new [`Header`] from a [`Tag`] and a specified length.
+ ///
+ /// Returns an error if the length exceeds the limits of [`Length`].
+ pub fn new(tag: Tag, length: impl TryInto<Length>) -> Result<Self> {
+ let length = length.try_into().map_err(|_| ErrorKind::Overflow)?;
+ Ok(Self { tag, length })
+ }
+}
+
+impl<'a> Decode<'a> for Header {
+ fn decode<R: Reader<'a>>(reader: &mut R) -> Result<Header> {
+ let tag = Tag::decode(reader)?;
+
+ let length = Length::decode(reader).map_err(|e| {
+ if e.kind() == ErrorKind::Overlength {
+ ErrorKind::Length { tag }.into()
+ } else {
+ e
+ }
+ })?;
+
+ Ok(Self { tag, length })
+ }
+}
+
+impl Encode for Header {
+ fn encoded_len(&self) -> Result<Length> {
+ self.tag.encoded_len()? + self.length.encoded_len()?
+ }
+
+ fn encode(&self, writer: &mut dyn Writer) -> Result<()> {
+ self.tag.encode(writer)?;
+ self.length.encode(writer)
+ }
+}
+
+impl DerOrd for Header {
+ fn der_cmp(&self, other: &Self) -> Result<Ordering> {
+ match self.tag.der_cmp(&other.tag)? {
+ Ordering::Equal => self.length.der_cmp(&other.length),
+ ordering => Ok(ordering),
+ }
+ }
+}
diff --git a/src/length.rs b/src/length.rs
new file mode 100644
index 0000000..76ee0e9
--- /dev/null
+++ b/src/length.rs
@@ -0,0 +1,375 @@
+//! Length calculations for encoded ASN.1 DER values
+
+use crate::{Decode, DerOrd, Encode, Error, ErrorKind, Reader, Result, SliceWriter, Writer};
+use core::{
+ cmp::Ordering,
+ fmt,
+ ops::{Add, Sub},
+};
+
+/// Maximum number of octets in a DER encoding of a [`Length`] using the
+/// rules implemented by this crate.
+const MAX_DER_OCTETS: usize = 5;
+
+/// Maximum length as a `u32` (256 MiB).
+const MAX_U32: u32 = 0xfff_ffff;
+
+/// ASN.1-encoded length.
+///
+/// Maximum length is defined by the [`Length::MAX`] constant (256 MiB).
+#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord)]
+pub struct Length(u32);
+
+impl Length {
+ /// Length of `0`
+ pub const ZERO: Self = Self(0);
+
+ /// Length of `1`
+ pub const ONE: Self = Self(1);
+
+ /// Maximum length currently supported: 256 MiB
+ pub const MAX: Self = Self(MAX_U32);
+
+ /// Create a new [`Length`] for any value which fits inside of a [`u16`].
+ ///
+ /// This function is const-safe and therefore useful for [`Length`] constants.
+ pub const fn new(value: u16) -> Self {
+ Self(value as u32)
+ }
+
+ /// Is this length equal to zero?
+ pub fn is_zero(self) -> bool {
+ self == Self::ZERO
+ }
+
+ /// Get the length of DER Tag-Length-Value (TLV) encoded data if `self`
+ /// is the length of the inner "value" portion of the message.
+ pub fn for_tlv(self) -> Result<Self> {
+ Self::ONE + self.encoded_len()? + self
+ }
+
+ /// Perform saturating addition of two lengths.
+ pub fn saturating_add(self, rhs: Self) -> Self {
+ Self(self.0.saturating_add(rhs.0))
+ }
+
+ /// Perform saturating subtraction of two lengths.
+ pub fn saturating_sub(self, rhs: Self) -> Self {
+ Self(self.0.saturating_sub(rhs.0))
+ }
+
+ /// Get initial octet of the encoded length (if one is required).
+ ///
+ /// From X.690 Section 8.1.3.5:
+ /// > In the long form, the length octets shall consist of an initial octet
+ /// > and one or more subsequent octets. The initial octet shall be encoded
+ /// > as follows:
+ /// >
+ /// > a) bit 8 shall be one;
+ /// > b) bits 7 to 1 shall encode the number of subsequent octets in the
+ /// > length octets, as an unsigned binary integer with bit 7 as the
+ /// > most significant bit;
+ /// > c) the value 11111111â‚‚ shall not be used.
+ fn initial_octet(self) -> Option<u8> {
+ match self.0 {
+ 0x80..=0xFF => Some(0x81),
+ 0x100..=0xFFFF => Some(0x82),
+ 0x10000..=0xFFFFFF => Some(0x83),
+ 0x1000000..=MAX_U32 => Some(0x84),
+ _ => None,
+ }
+ }
+}
+
+impl Add for Length {
+ type Output = Result<Self>;
+
+ fn add(self, other: Self) -> Result<Self> {
+ self.0
+ .checked_add(other.0)
+ .ok_or_else(|| ErrorKind::Overflow.into())
+ .and_then(TryInto::try_into)
+ }
+}
+
+impl Add<u8> for Length {
+ type Output = Result<Self>;
+
+ fn add(self, other: u8) -> Result<Self> {
+ self + Length::from(other)
+ }
+}
+
+impl Add<u16> for Length {
+ type Output = Result<Self>;
+
+ fn add(self, other: u16) -> Result<Self> {
+ self + Length::from(other)
+ }
+}
+
+impl Add<u32> for Length {
+ type Output = Result<Self>;
+
+ fn add(self, other: u32) -> Result<Self> {
+ self + Length::try_from(other)?
+ }
+}
+
+impl Add<usize> for Length {
+ type Output = Result<Self>;
+
+ fn add(self, other: usize) -> Result<Self> {
+ self + Length::try_from(other)?
+ }
+}
+
+impl Add<Length> for Result<Length> {
+ type Output = Self;
+
+ fn add(self, other: Length) -> Self {
+ self? + other
+ }
+}
+
+impl Sub for Length {
+ type Output = Result<Self>;
+
+ fn sub(self, other: Length) -> Result<Self> {
+ self.0
+ .checked_sub(other.0)
+ .ok_or_else(|| ErrorKind::Overflow.into())
+ .and_then(TryInto::try_into)
+ }
+}
+
+impl Sub<Length> for Result<Length> {
+ type Output = Self;
+
+ fn sub(self, other: Length) -> Self {
+ self? - other
+ }
+}
+
+impl From<u8> for Length {
+ fn from(len: u8) -> Length {
+ Length(len.into())
+ }
+}
+
+impl From<u16> for Length {
+ fn from(len: u16) -> Length {
+ Length(len.into())
+ }
+}
+
+impl From<Length> for u32 {
+ fn from(length: Length) -> u32 {
+ length.0
+ }
+}
+
+impl TryFrom<u32> for Length {
+ type Error = Error;
+
+ fn try_from(len: u32) -> Result<Length> {
+ if len <= Self::MAX.0 {
+ Ok(Length(len))
+ } else {
+ Err(ErrorKind::Overflow.into())
+ }
+ }
+}
+
+impl TryFrom<usize> for Length {
+ type Error = Error;
+
+ fn try_from(len: usize) -> Result<Length> {
+ u32::try_from(len)
+ .map_err(|_| ErrorKind::Overflow)?
+ .try_into()
+ }
+}
+
+impl TryFrom<Length> for usize {
+ type Error = Error;
+
+ fn try_from(len: Length) -> Result<usize> {
+ len.0.try_into().map_err(|_| ErrorKind::Overflow.into())
+ }
+}
+
+impl<'a> Decode<'a> for Length {
+ fn decode<R: Reader<'a>>(reader: &mut R) -> Result<Length> {
+ match reader.read_byte()? {
+ // Note: per X.690 Section 8.1.3.6.1 the byte 0x80 encodes indefinite
+ // lengths, which are not allowed in DER, so disallow that byte.
+ len if len < 0x80 => Ok(len.into()),
+ // 1-4 byte variable-sized length prefix
+ tag @ 0x81..=0x84 => {
+ let nbytes = tag.checked_sub(0x80).ok_or(ErrorKind::Overlength)? as usize;
+ debug_assert!(nbytes <= 4);
+
+ let mut decoded_len = 0u32;
+ for _ in 0..nbytes {
+ decoded_len = decoded_len.checked_shl(8).ok_or(ErrorKind::Overflow)?
+ | u32::from(reader.read_byte()?);
+ }
+
+ let length = Length::try_from(decoded_len)?;
+
+ // X.690 Section 10.1: DER lengths must be encoded with a minimum
+ // number of octets
+ if length.initial_octet() == Some(tag) {
+ Ok(length)
+ } else {
+ Err(ErrorKind::Overlength.into())
+ }
+ }
+ _ => {
+ // We specialize to a maximum 4-byte length (including initial octet)
+ Err(ErrorKind::Overlength.into())
+ }
+ }
+ }
+}
+
+impl Encode for Length {
+ fn encoded_len(&self) -> Result<Length> {
+ match self.0 {
+ 0..=0x7F => Ok(Length(1)),
+ 0x80..=0xFF => Ok(Length(2)),
+ 0x100..=0xFFFF => Ok(Length(3)),
+ 0x10000..=0xFFFFFF => Ok(Length(4)),
+ 0x1000000..=MAX_U32 => Ok(Length(5)),
+ _ => Err(ErrorKind::Overflow.into()),
+ }
+ }
+
+ fn encode(&self, writer: &mut dyn Writer) -> Result<()> {
+ match self.initial_octet() {
+ Some(tag_byte) => {
+ writer.write_byte(tag_byte)?;
+
+ // Strip leading zeroes
+ match self.0.to_be_bytes() {
+ [0, 0, 0, byte] => writer.write_byte(byte),
+ [0, 0, bytes @ ..] => writer.write(&bytes),
+ [0, bytes @ ..] => writer.write(&bytes),
+ bytes => writer.write(&bytes),
+ }
+ }
+ #[allow(clippy::cast_possible_truncation)]
+ None => writer.write_byte(self.0 as u8),
+ }
+ }
+}
+
+impl DerOrd for Length {
+ fn der_cmp(&self, other: &Self) -> Result<Ordering> {
+ let mut buf1 = [0u8; MAX_DER_OCTETS];
+ let mut buf2 = [0u8; MAX_DER_OCTETS];
+
+ let mut encoder1 = SliceWriter::new(&mut buf1);
+ encoder1.encode(self)?;
+
+ let mut encoder2 = SliceWriter::new(&mut buf2);
+ encoder2.encode(other)?;
+
+ Ok(encoder1.finish()?.cmp(encoder2.finish()?))
+ }
+}
+
+impl fmt::Display for Length {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::Length;
+ use crate::{Decode, DerOrd, Encode, ErrorKind};
+ use core::cmp::Ordering;
+
+ #[test]
+ fn decode() {
+ assert_eq!(Length::ZERO, Length::from_der(&[0x00]).unwrap());
+
+ assert_eq!(Length::from(0x7Fu8), Length::from_der(&[0x7F]).unwrap());
+
+ assert_eq!(
+ Length::from(0x80u8),
+ Length::from_der(&[0x81, 0x80]).unwrap()
+ );
+
+ assert_eq!(
+ Length::from(0xFFu8),
+ Length::from_der(&[0x81, 0xFF]).unwrap()
+ );
+
+ assert_eq!(
+ Length::from(0x100u16),
+ Length::from_der(&[0x82, 0x01, 0x00]).unwrap()
+ );
+
+ assert_eq!(
+ Length::try_from(0x10000u32).unwrap(),
+ Length::from_der(&[0x83, 0x01, 0x00, 0x00]).unwrap()
+ );
+ }
+
+ #[test]
+ fn encode() {
+ let mut buffer = [0u8; 4];
+
+ assert_eq!(&[0x00], Length::ZERO.encode_to_slice(&mut buffer).unwrap());
+
+ assert_eq!(
+ &[0x7F],
+ Length::from(0x7Fu8).encode_to_slice(&mut buffer).unwrap()
+ );
+
+ assert_eq!(
+ &[0x81, 0x80],
+ Length::from(0x80u8).encode_to_slice(&mut buffer).unwrap()
+ );
+
+ assert_eq!(
+ &[0x81, 0xFF],
+ Length::from(0xFFu8).encode_to_slice(&mut buffer).unwrap()
+ );
+
+ assert_eq!(
+ &[0x82, 0x01, 0x00],
+ Length::from(0x100u16).encode_to_slice(&mut buffer).unwrap()
+ );
+
+ assert_eq!(
+ &[0x83, 0x01, 0x00, 0x00],
+ Length::try_from(0x10000u32)
+ .unwrap()
+ .encode_to_slice(&mut buffer)
+ .unwrap()
+ );
+ }
+
+ #[test]
+ fn reject_indefinite_lengths() {
+ assert!(Length::from_der(&[0x80]).is_err());
+ }
+
+ #[test]
+ fn add_overflows_when_max_length_exceeded() {
+ let result = Length::MAX + Length::ONE;
+ assert_eq!(
+ result.err().map(|err| err.kind()),
+ Some(ErrorKind::Overflow)
+ );
+ }
+
+ #[test]
+ fn der_ord() {
+ assert_eq!(Length::ONE.der_cmp(&Length::MAX).unwrap(), Ordering::Less);
+ }
+}
diff --git a/src/lib.rs b/src/lib.rs
new file mode 100644
index 0000000..f16857b
--- /dev/null
+++ b/src/lib.rs
@@ -0,0 +1,409 @@
+#![no_std]
+#![cfg_attr(docsrs, feature(doc_cfg))]
+#![doc = include_str!("../README.md")]
+#![doc(
+ html_logo_url = "https://raw.githubusercontent.com/RustCrypto/meta/master/logo.svg",
+ html_favicon_url = "https://raw.githubusercontent.com/RustCrypto/meta/master/logo.svg"
+)]
+#![forbid(unsafe_code)]
+#![warn(
+ clippy::cast_lossless,
+ clippy::cast_possible_truncation,
+ clippy::cast_possible_wrap,
+ clippy::cast_precision_loss,
+ clippy::cast_sign_loss,
+ clippy::checked_conversions,
+ clippy::implicit_saturating_sub,
+ clippy::integer_arithmetic,
+ clippy::panic,
+ clippy::panic_in_result_fn,
+ clippy::unwrap_used,
+ missing_docs,
+ rust_2018_idioms,
+ unused_lifetimes,
+ unused_qualifications
+)]
+
+//! # Usage
+//! ## [`Decode`] and [`Encode`] traits
+//! The [`Decode`] and [`Encode`] traits provide the decoding/encoding API
+//! respectively, and are designed to work in conjunction with concrete ASN.1
+//! types, including all types which impl the [`Sequence`] trait.
+//!
+//! The traits are impl'd for the following Rust core types:
+//! - `()`: ASN.1 `NULL`. See also [`Null`].
+//! - [`bool`]: ASN.1 `BOOLEAN`.
+//! - [`i8`], [`i16`], [`i32`], [`i64`], [`i128`]: ASN.1 `INTEGER`.
+//! - [`u8`], [`u16`], [`u32`], [`u64`], [`u128`]: ASN.1 `INTEGER`.
+//! - [`f64`]: ASN.1 `REAL` (gated on `real` crate feature)
+//! - [`str`], [`String`][`alloc::string::String`]: ASN.1 `UTF8String`.
+//! `String` requires `alloc` feature. See also [`Utf8StringRef`].
+//! - [`Option`]: ASN.1 `OPTIONAL`.
+//! - [`SystemTime`][`std::time::SystemTime`]: ASN.1 `GeneralizedTime`. Requires `std` feature.
+//! - [`Vec`][`alloc::vec::Vec`]: ASN.1 `SEQUENCE OF`. Requires `alloc` feature.
+//! - `[T; N]`: ASN.1 `SEQUENCE OF`. See also [`SequenceOf`].
+//!
+//! The following ASN.1 types provided by this crate also impl these traits:
+//! - [`Any`], [`AnyRef`]: ASN.1 `ANY`.
+//! - [`BitString`], [`BitStringRef`]: ASN.1 `BIT STRING`
+//! - [`GeneralizedTime`]: ASN.1 `GeneralizedTime`.
+//! - [`Ia5StringRef`]: ASN.1 `IA5String`.
+//! - [`Null`]: ASN.1 `NULL`.
+//! - [`ObjectIdentifier`]: ASN.1 `OBJECT IDENTIFIER`.
+//! - [`OctetString`], [`OctetStringRef`]: ASN.1 `OCTET STRING`.
+//! - [`PrintableStringRef`]: ASN.1 `PrintableString` (ASCII subset).
+//! - [`SequenceOf`]: ASN.1 `SEQUENCE OF`.
+//! - [`SetOf`], [`SetOfVec`]: ASN.1 `SET OF`.
+//! - [`UIntRef`]: ASN.1 unsigned `INTEGER` with raw access to encoded bytes.
+//! - [`UtcTime`]: ASN.1 `UTCTime`.
+//! - [`Utf8StringRef`]: ASN.1 `UTF8String`.
+//!
+//! Context specific fields can be modeled using these generic types:
+//! - [`ContextSpecific`]: decoder/encoder for owned context-specific fields
+//! - [`ContextSpecificRef`]: encode-only type for references to context-specific fields
+//!
+//! ## Example
+//! The following example implements X.509's `AlgorithmIdentifier` message type
+//! as defined in [RFC 5280 Section 4.1.1.2].
+//!
+//! The ASN.1 schema for this message type is as follows:
+//!
+//! ```text
+//! AlgorithmIdentifier ::= SEQUENCE {
+//! algorithm OBJECT IDENTIFIER,
+//! parameters ANY DEFINED BY algorithm OPTIONAL }
+//! ```
+//!
+//! Structured ASN.1 messages are typically encoded as a `SEQUENCE`, which
+//! this crate maps to a Rust struct using the [`Sequence`] trait. This
+//! trait is bounded on the [`Decode`] trait and provides a blanket impl
+//! of the [`Encode`] trait, so any type which impls [`Sequence`] can be
+//! used for both decoding and encoding.
+//!
+//! The following code example shows how to define a struct which maps to the
+//! above schema, as well as impl the [`Sequence`] trait for that struct:
+//!
+//! ```
+//! # #[cfg(all(feature = "alloc", feature = "oid"))]
+//! # {
+//! // Note: the following example does not require the `std` feature at all.
+//! // It does leverage the `alloc` feature, but also provides instructions for
+//! // "heapless" usage when the `alloc` feature is disabled.
+//! use der::{
+//! asn1::{AnyRef, ObjectIdentifier},
+//! DecodeValue, Decode, SliceReader, Encode, Header, Reader, Sequence
+//! };
+//!
+//! /// X.509 `AlgorithmIdentifier`.
+//! #[derive(Copy, Clone, Debug, Eq, PartialEq)]
+//! pub struct AlgorithmIdentifier<'a> {
+//! /// This field contains an ASN.1 `OBJECT IDENTIFIER`, a.k.a. OID.
+//! pub algorithm: ObjectIdentifier,
+//!
+//! /// This field is `OPTIONAL` and contains the ASN.1 `ANY` type, which
+//! /// in this example allows arbitrary algorithm-defined parameters.
+//! pub parameters: Option<AnyRef<'a>>
+//! }
+//!
+//! impl<'a> DecodeValue<'a> for AlgorithmIdentifier<'a> {
+//! fn decode_value<R: Reader<'a>>(reader: &mut R, _header: Header) -> der::Result<Self> {
+//! // The `der::Decoder::Decode` method can be used to decode any
+//! // type which impls the `Decode` trait, which is impl'd for
+//! // all of the ASN.1 built-in types in the `der` crate.
+//! //
+//! // Note that if your struct's fields don't contain an ASN.1
+//! // built-in type specifically, there are also helper methods
+//! // for all of the built-in types supported by this library
+//! // which can be used to select a specific type.
+//! //
+//! // For example, another way of decoding this particular field,
+//! // which contains an ASN.1 `OBJECT IDENTIFIER`, is by calling
+//! // `decoder.oid()`. Similar methods are defined for other
+//! // ASN.1 built-in types.
+//! let algorithm = reader.decode()?;
+//!
+//! // This field contains an ASN.1 `OPTIONAL` type. The `der` crate
+//! // maps this directly to Rust's `Option` type and provides
+//! // impls of the `Decode` and `Encode` traits for `Option`.
+//! // To explicitly request an `OPTIONAL` type be decoded, use the
+//! // `decoder.optional()` method.
+//! let parameters = reader.decode()?;
+//!
+//! // The value returned from the provided `FnOnce` will be
+//! // returned from the `any.sequence(...)` call above.
+//! // Note that the entire sequence body *MUST* be consumed
+//! // or an error will be returned.
+//! Ok(Self { algorithm, parameters })
+//! }
+//! }
+//!
+//! impl<'a> Sequence<'a> for AlgorithmIdentifier<'a> {
+//! // The `Sequence::fields` method is used for encoding and functions as
+//! // a visitor for all of the fields in a message.
+//! //
+//! // To implement it, you must define a slice containing `Encode`
+//! // trait objects, then pass it to the provided `field_encoder`
+//! // function, which is implemented by the `der` crate and handles
+//! // message serialization.
+//! //
+//! // Trait objects are used because they allow for slices containing
+//! // heterogeneous field types, and a callback is used to allow for the
+//! // construction of temporary field encoder types. The latter means
+//! // that the fields of your Rust struct don't necessarily need to
+//! // impl the `Encode` trait, but if they don't you must construct
+//! // a temporary wrapper value which does.
+//! //
+//! // Types which impl the `Sequence` trait receive blanket impls of both
+//! // the `Encode` and `Tagged` traits (where the latter is impl'd as
+//! // `Tagged::TAG = der::Tag::Sequence`.
+//! fn fields<F, T>(&self, field_encoder: F) -> der::Result<T>
+//! where
+//! F: FnOnce(&[&dyn Encode]) -> der::Result<T>,
+//! {
+//! field_encoder(&[&self.algorithm, &self.parameters])
+//! }
+//! }
+//!
+//! // Example parameters value: OID for the NIST P-256 elliptic curve.
+//! let parameters = "1.2.840.10045.3.1.7".parse::<ObjectIdentifier>().unwrap();
+//!
+//! // We need to convert `parameters` into an `Any<'a>` type, which wraps a
+//! // `&'a [u8]` byte slice.
+//! //
+//! // To do that, we need owned DER-encoded data so that we can have
+//! // `AnyRef` borrow a reference to it, so we have to serialize the OID.
+//! //
+//! // When the `alloc` feature of this crate is enabled, any type that impls
+//! // the `Encode` trait including all ASN.1 built-in types and any type
+//! // which impls `Sequence` can be serialized by calling `Encode::to_der()`.
+//! //
+//! // If you would prefer to avoid allocations, you can create a byte array
+//! // as backing storage instead, pass that to `der::Encoder::new`, and then
+//! // encode the `parameters` value using `encoder.encode(parameters)`.
+//! let der_encoded_parameters = parameters.to_vec().unwrap();
+//!
+//! let algorithm_identifier = AlgorithmIdentifier {
+//! // OID for `id-ecPublicKey`, if you're curious
+//! algorithm: "1.2.840.10045.2.1".parse().unwrap(),
+//!
+//! // `Any<'a>` impls `TryFrom<&'a [u8]>`, which parses the provided
+//! // slice as an ASN.1 DER-encoded message.
+//! parameters: Some(der_encoded_parameters.as_slice().try_into().unwrap())
+//! };
+//!
+//! // Serialize the `AlgorithmIdentifier` created above as ASN.1 DER,
+//! // allocating a `Vec<u8>` for storage.
+//! //
+//! // As mentioned earlier, if you don't have the `alloc` feature enabled you
+//! // can create a fix-sized array instead, then call `Encoder::new` with a
+//! // reference to it, then encode the message using
+//! // `encoder.encode(algorithm_identifier)`, then finally `encoder.finish()`
+//! // to obtain a byte slice containing the encoded message.
+//! let der_encoded_algorithm_identifier = algorithm_identifier.to_vec().unwrap();
+//!
+//! // Deserialize the `AlgorithmIdentifier` we just serialized from ASN.1 DER
+//! // using `der::Decode::from_bytes`.
+//! let decoded_algorithm_identifier = AlgorithmIdentifier::from_der(
+//! &der_encoded_algorithm_identifier
+//! ).unwrap();
+//!
+//! // Ensure the original `AlgorithmIdentifier` is the same as the one we just
+//! // decoded from ASN.1 DER.
+//! assert_eq!(algorithm_identifier, decoded_algorithm_identifier);
+//! # }
+//! ```
+//!
+//! ## Custom derive support
+//! When the `derive` feature of this crate is enabled, the following custom
+//! derive macros are available:
+//!
+//! - [`Choice`]: derive for `CHOICE` enum (see [`der_derive::Choice`])
+//! - [`Enumerated`]: derive for `ENUMERATED` enum (see [`der_derive::Enumerated`])
+//! - [`Sequence`]: derive for `SEQUENCE` struct (see [`der_derive::Sequence`])
+//!
+//! ### Derive [`Sequence`] for struct
+//! The following is a code example of how to use the [`Sequence`] custom derive:
+//!
+//! ```
+//! # #[cfg(all(feature = "alloc", feature = "derive", feature = "oid"))]
+//! # {
+//! use der::{asn1::{AnyRef, ObjectIdentifier}, Encode, Decode, Sequence};
+//!
+//! /// X.509 `AlgorithmIdentifier` (same as above)
+//! #[derive(Copy, Clone, Debug, Eq, PartialEq, Sequence)] // NOTE: added `Sequence`
+//! pub struct AlgorithmIdentifier<'a> {
+//! /// This field contains an ASN.1 `OBJECT IDENTIFIER`, a.k.a. OID.
+//! pub algorithm: ObjectIdentifier,
+//!
+//! /// This field is `OPTIONAL` and contains the ASN.1 `ANY` type, which
+//! /// in this example allows arbitrary algorithm-defined parameters.
+//! pub parameters: Option<AnyRef<'a>>
+//! }
+//!
+//! // Example parameters value: OID for the NIST P-256 elliptic curve.
+//! let parameters_oid = "1.2.840.10045.3.1.7".parse::<ObjectIdentifier>().unwrap();
+//!
+//! let algorithm_identifier = AlgorithmIdentifier {
+//! // OID for `id-ecPublicKey`, if you're curious
+//! algorithm: "1.2.840.10045.2.1".parse().unwrap(),
+//!
+//! // `Any<'a>` impls `From<&'a ObjectIdentifier>`, allowing OID constants to
+//! // be directly converted to an `AnyRef` type for this use case.
+//! parameters: Some(AnyRef::from(&parameters_oid))
+//! };
+//!
+//! // Encode
+//! let der_encoded_algorithm_identifier = algorithm_identifier.to_vec().unwrap();
+//!
+//! // Decode
+//! let decoded_algorithm_identifier = AlgorithmIdentifier::from_der(
+//! &der_encoded_algorithm_identifier
+//! ).unwrap();
+//!
+//! assert_eq!(algorithm_identifier, decoded_algorithm_identifier);
+//! # }
+//! ```
+//!
+//! For fields which don't directly impl [`Decode`] and [`Encode`],
+//! you can add annotations to convert to an intermediate ASN.1 type
+//! first, so long as that type impls `TryFrom` and `Into` for the
+//! ASN.1 type.
+//!
+//! For example, structs containing `&'a [u8]` fields may want them encoded
+//! as either a `BIT STRING` or `OCTET STRING`. By using the
+//! `#[asn1(type = "BIT STRING")]` annotation it's possible to select which
+//! ASN.1 type should be used.
+//!
+//! Building off the above example:
+//!
+//! ```rust
+//! # #[cfg(all(feature = "alloc", feature = "derive", feature = "oid"))]
+//! # {
+//! # use der::{asn1::{AnyRef, BitStringRef, ObjectIdentifier}, Sequence};
+//! #
+//! # #[derive(Copy, Clone, Debug, Eq, PartialEq, Sequence)]
+//! # pub struct AlgorithmIdentifier<'a> {
+//! # pub algorithm: ObjectIdentifier,
+//! # pub parameters: Option<AnyRef<'a>>
+//! # }
+//! /// X.509 `SubjectPublicKeyInfo` (SPKI)
+//! #[derive(Copy, Clone, Debug, Eq, PartialEq, Sequence)]
+//! pub struct SubjectPublicKeyInfo<'a> {
+//! /// X.509 `AlgorithmIdentifier`
+//! pub algorithm: AlgorithmIdentifier<'a>,
+//!
+//! /// Public key data
+//! pub subject_public_key: BitStringRef<'a>,
+//! }
+//! # }
+//! ```
+//!
+//! # See also
+//! For more information about ASN.1 DER we recommend the following guides:
+//!
+//! - [A Layman's Guide to a Subset of ASN.1, BER, and DER] (RSA Laboratories)
+//! - [A Warm Welcome to ASN.1 and DER] (Let's Encrypt)
+//!
+//! [RFC 5280 Section 4.1.1.2]: https://tools.ietf.org/html/rfc5280#section-4.1.1.2
+//! [A Layman's Guide to a Subset of ASN.1, BER, and DER]: https://luca.ntop.org/Teaching/Appunti/asn1.html
+//! [A Warm Welcome to ASN.1 and DER]: https://letsencrypt.org/docs/a-warm-welcome-to-asn1-and-der/
+//!
+//! [`Any`]: asn1::AnyRef
+//! [`AnyRef`]: asn1::AnyRef
+//! [`ContextSpecific`]: asn1::ContextSpecific
+//! [`ContextSpecificRef`]: asn1::ContextSpecificRef
+//! [`BitString`]: asn1::BitStringRef
+//! [`BitStringRef`]: asn1::BitStringRef
+//! [`GeneralizedTime`]: asn1::GeneralizedTime
+//! [`Ia5StringRef`]: asn1::Ia5StringRef
+//! [`Null`]: asn1::Null
+//! [`ObjectIdentifier`]: asn1::ObjectIdentifier
+//! [`OctetString`]: asn1::OctetStringRef
+//! [`OctetStringRef`]: asn1::OctetStringRef
+//! [`PrintableStringRef`]: asn1::PrintableStringRef
+//! [`SequenceOf`]: asn1::SequenceOf
+//! [`SetOf`]: asn1::SetOf
+//! [`SetOfVec`]: asn1::SetOfVec
+//! [`UIntRef`]: asn1::UIntRef
+//! [`UtcTime`]: asn1::UtcTime
+//! [`Utf8StringRef`]: asn1::Utf8StringRef
+
+/// Local Android change: Use std to allow building as a dylib.
+#[cfg(android_dylib)]
+extern crate std;
+
+#[cfg(feature = "alloc")]
+#[allow(unused_imports)]
+#[macro_use]
+extern crate alloc;
+#[cfg(feature = "std")]
+extern crate std;
+
+pub mod asn1;
+
+pub(crate) mod arrayvec;
+mod byte_slice;
+mod datetime;
+mod decode;
+mod encode;
+mod encode_ref;
+mod error;
+mod header;
+mod length;
+mod ord;
+mod reader;
+mod str_slice;
+mod tag;
+mod writer;
+
+#[cfg(feature = "alloc")]
+mod document;
+
+pub use crate::{
+ asn1::{AnyRef, Choice, Sequence},
+ datetime::DateTime,
+ decode::{Decode, DecodeOwned, DecodeValue},
+ encode::{Encode, EncodeValue},
+ encode_ref::{EncodeRef, EncodeValueRef},
+ error::{Error, ErrorKind, Result},
+ header::Header,
+ length::Length,
+ ord::{DerOrd, ValueOrd},
+ reader::{slice::SliceReader, Reader},
+ tag::{Class, FixedTag, Tag, TagMode, TagNumber, Tagged},
+ writer::{slice::SliceWriter, Writer},
+};
+
+#[cfg(feature = "alloc")]
+pub use crate::document::Document;
+
+#[cfg(feature = "bigint")]
+#[cfg_attr(docsrs, doc(cfg(feature = "bigint")))]
+pub use crypto_bigint as bigint;
+
+#[cfg(feature = "derive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
+pub use der_derive::{Choice, Enumerated, Sequence, ValueOrd};
+
+#[cfg(feature = "oid")]
+#[cfg_attr(docsrs, doc(cfg(feature = "oid")))]
+pub use const_oid as oid;
+
+#[cfg(feature = "pem")]
+#[cfg_attr(docsrs, doc(cfg(feature = "pem")))]
+pub use {
+ crate::{decode::DecodePem, encode::EncodePem, reader::pem::PemReader, writer::pem::PemWriter},
+ pem_rfc7468 as pem,
+};
+
+#[cfg(feature = "time")]
+#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
+pub use time;
+
+#[cfg(feature = "zeroize")]
+pub use zeroize;
+
+#[cfg(all(feature = "alloc", feature = "zeroize"))]
+pub use crate::document::SecretDocument;
+
+pub(crate) use crate::{arrayvec::ArrayVec, byte_slice::ByteSlice, str_slice::StrSlice};
diff --git a/src/ord.rs b/src/ord.rs
new file mode 100644
index 0000000..fd967bf
--- /dev/null
+++ b/src/ord.rs
@@ -0,0 +1,71 @@
+//! Ordering trait.
+
+use crate::{EncodeValue, Result, Tagged};
+use core::cmp::Ordering;
+
+/// DER ordering trait.
+///
+/// Compares the ordering of two values based on their ASN.1 DER
+/// serializations.
+///
+/// This is used by the DER encoding for `SET OF` in order to establish an
+/// ordering for the elements of sets.
+pub trait DerOrd {
+ /// Return an [`Ordering`] between `self` and `other` when serialized as
+ /// ASN.1 DER.
+ fn der_cmp(&self, other: &Self) -> Result<Ordering>;
+}
+
+/// DER value ordering trait.
+///
+/// Compares the ordering of the value portion of TLV-encoded DER productions.
+pub trait ValueOrd {
+ /// Return an [`Ordering`] between value portion of TLV-encoded `self` and
+ /// `other` when serialized as ASN.1 DER.
+ fn value_cmp(&self, other: &Self) -> Result<Ordering>;
+}
+
+impl<T> DerOrd for T
+where
+ T: EncodeValue + ValueOrd + Tagged,
+{
+ fn der_cmp(&self, other: &Self) -> Result<Ordering> {
+ match self.header()?.der_cmp(&other.header()?)? {
+ Ordering::Equal => self.value_cmp(other),
+ ordering => Ok(ordering),
+ }
+ }
+}
+
+/// Marker trait for types whose `Ord` impl can be used as `ValueOrd`.
+///
+/// This means the `Ord` impl will sort values in the same order as their DER
+/// encodings.
+pub trait OrdIsValueOrd: Ord {}
+
+impl<T> ValueOrd for T
+where
+ T: OrdIsValueOrd,
+{
+ fn value_cmp(&self, other: &Self) -> Result<Ordering> {
+ Ok(self.cmp(other))
+ }
+}
+
+/// Compare the order of two iterators using [`DerCmp`] on the values.
+pub(crate) fn iter_cmp<'a, I, T: 'a>(a: I, b: I) -> Result<Ordering>
+where
+ I: Iterator<Item = &'a T> + ExactSizeIterator,
+ T: DerOrd,
+{
+ let length_ord = a.len().cmp(&b.len());
+
+ for (value1, value2) in a.zip(b) {
+ match value1.der_cmp(value2)? {
+ Ordering::Equal => (),
+ other => return Ok(other),
+ }
+ }
+
+ Ok(length_ord)
+}
diff --git a/src/reader.rs b/src/reader.rs
new file mode 100644
index 0000000..b917323
--- /dev/null
+++ b/src/reader.rs
@@ -0,0 +1,168 @@
+//! Reader trait.
+
+mod nested;
+#[cfg(feature = "pem")]
+pub(crate) mod pem;
+pub(crate) mod slice;
+
+pub(crate) use nested::NestedReader;
+
+use crate::{
+ asn1::ContextSpecific, Decode, DecodeValue, Encode, Error, ErrorKind, FixedTag, Header, Length,
+ Result, Tag, TagMode, TagNumber,
+};
+
+#[cfg(feature = "alloc")]
+use alloc::vec::Vec;
+
+/// Reader trait which reads DER-encoded input.
+pub trait Reader<'r>: Sized {
+ /// Get the length of the input.
+ fn input_len(&self) -> Length;
+
+ /// Peek at the next byte of input without modifying the cursor.
+ fn peek_byte(&self) -> Option<u8>;
+
+ /// Peek forward in the input data, attempting to decode a [`Header`] from
+ /// the data at the current position in the decoder.
+ ///
+ /// Does not modify the decoder's state.
+ fn peek_header(&self) -> Result<Header>;
+
+ /// Get the position within the buffer.
+ fn position(&self) -> Length;
+
+ /// Attempt to read data borrowed directly from the input as a slice,
+ /// updating the internal cursor position.
+ ///
+ /// # Returns
+ /// - `Ok(slice)` on success
+ /// - `Err(ErrorKind::Incomplete)` if there is not enough data
+ /// - `Err(ErrorKind::Reader)` if the reader can't borrow from the input
+ fn read_slice(&mut self, len: Length) -> Result<&'r [u8]>;
+
+ /// Attempt to decode an ASN.1 `CONTEXT-SPECIFIC` field with the
+ /// provided [`TagNumber`].
+ fn context_specific<T>(&mut self, tag_number: TagNumber, tag_mode: TagMode) -> Result<Option<T>>
+ where
+ T: DecodeValue<'r> + FixedTag,
+ {
+ Ok(match tag_mode {
+ TagMode::Explicit => ContextSpecific::<T>::decode_explicit(self, tag_number)?,
+ TagMode::Implicit => ContextSpecific::<T>::decode_implicit(self, tag_number)?,
+ }
+ .map(|field| field.value))
+ }
+
+ /// Decode a value which impls the [`Decode`] trait.
+ fn decode<T: Decode<'r>>(&mut self) -> Result<T> {
+ T::decode(self).map_err(|e| e.nested(self.position()))
+ }
+
+ /// Return an error with the given [`ErrorKind`], annotating it with
+ /// context about where the error occurred.
+ fn error(&mut self, kind: ErrorKind) -> Error {
+ kind.at(self.position())
+ }
+
+ /// Finish decoding, returning the given value if there is no
+ /// remaining data, or an error otherwise
+ fn finish<T>(self, value: T) -> Result<T> {
+ if !self.is_finished() {
+ Err(ErrorKind::TrailingData {
+ decoded: self.position(),
+ remaining: self.remaining_len(),
+ }
+ .at(self.position()))
+ } else {
+ Ok(value)
+ }
+ }
+
+ /// Have we read all of the input data?
+ fn is_finished(&self) -> bool {
+ self.remaining_len().is_zero()
+ }
+
+ /// Offset within the original input stream.
+ ///
+ /// This is used for error reporting, and doesn't need to be overridden
+ /// by any reader implementations (except for the built-in `NestedReader`,
+ /// which consumes nested input messages)
+ fn offset(&self) -> Length {
+ self.position()
+ }
+
+ /// Peek at the next byte in the decoder and attempt to decode it as a
+ /// [`Tag`] value.
+ ///
+ /// Does not modify the decoder's state.
+ fn peek_tag(&self) -> Result<Tag> {
+ match self.peek_byte() {
+ Some(byte) => byte.try_into(),
+ None => Err(Error::incomplete(self.input_len())),
+ }
+ }
+
+ /// Read a single byte.
+ fn read_byte(&mut self) -> Result<u8> {
+ let mut buf = [0];
+ self.read_into(&mut buf)?;
+ Ok(buf[0])
+ }
+
+ /// Attempt to read input data, writing it into the provided buffer, and
+ /// returning a slice on success.
+ ///
+ /// # Returns
+ /// - `Ok(slice)` if there is sufficient data
+ /// - `Err(ErrorKind::Incomplete)` if there is not enough data
+ fn read_into<'o>(&mut self, buf: &'o mut [u8]) -> Result<&'o [u8]> {
+ let input = self.read_slice(buf.len().try_into()?)?;
+ buf.copy_from_slice(input);
+ Ok(buf)
+ }
+
+ /// Read nested data of the given length.
+ fn read_nested<'n, T, F>(&'n mut self, len: Length, f: F) -> Result<T>
+ where
+ F: FnOnce(&mut NestedReader<'n, Self>) -> Result<T>,
+ {
+ let mut reader = NestedReader::new(self, len)?;
+ let ret = f(&mut reader)?;
+ reader.finish(ret)
+ }
+
+ /// Read a byte vector of the given length.
+ #[cfg(feature = "alloc")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
+ fn read_vec(&mut self, len: Length) -> Result<Vec<u8>> {
+ let mut bytes = vec![0u8; usize::try_from(len)?];
+ self.read_into(&mut bytes)?;
+ Ok(bytes)
+ }
+
+ /// Get the number of bytes still remaining in the buffer.
+ fn remaining_len(&self) -> Length {
+ debug_assert!(self.position() <= self.input_len());
+ self.input_len().saturating_sub(self.position())
+ }
+
+ /// Read an ASN.1 `SEQUENCE`, creating a nested [`Reader`] for the body and
+ /// calling the provided closure with it.
+ fn sequence<'n, F, T>(&'n mut self, f: F) -> Result<T>
+ where
+ F: FnOnce(&mut NestedReader<'n, Self>) -> Result<T>,
+ {
+ let header = Header::decode(self)?;
+ header.tag.assert_eq(Tag::Sequence)?;
+ self.read_nested(header.length, f)
+ }
+
+ /// Obtain a slice of bytes contain a complete TLV production suitable for parsing later.
+ fn tlv_bytes(&mut self) -> Result<&'r [u8]> {
+ let header = self.peek_header()?;
+ let header_len = header.encoded_len()?;
+ self.read_slice((header_len + header.length)?)
+ }
+}
diff --git a/src/reader/nested.rs b/src/reader/nested.rs
new file mode 100644
index 0000000..40ede69
--- /dev/null
+++ b/src/reader/nested.rs
@@ -0,0 +1,96 @@
+//! Reader type for consuming nested TLV records within a DER document.
+
+use crate::{reader::Reader, Error, ErrorKind, Header, Length, Result};
+
+/// Reader type used by [`Reader::read_nested`].
+pub struct NestedReader<'i, R> {
+ /// Inner reader type.
+ inner: &'i mut R,
+
+ /// Nested input length.
+ input_len: Length,
+
+ /// Position within the nested input.
+ position: Length,
+}
+
+impl<'i, 'r, R: Reader<'r>> NestedReader<'i, R> {
+ /// Create a new nested reader which can read the given [`Length`].
+ pub(crate) fn new(inner: &'i mut R, len: Length) -> Result<Self> {
+ if len <= inner.remaining_len() {
+ Ok(Self {
+ inner,
+ input_len: len,
+ position: Length::ZERO,
+ })
+ } else {
+ Err(ErrorKind::Incomplete {
+ expected_len: (inner.offset() + len)?,
+ actual_len: (inner.offset() + inner.remaining_len())?,
+ }
+ .at(inner.offset()))
+ }
+ }
+
+ /// Move the position cursor the given length, returning an error if there
+ /// isn't enough remaining data in the nested input.
+ fn advance_position(&mut self, len: Length) -> Result<()> {
+ let new_position = (self.position + len)?;
+
+ if new_position <= self.input_len {
+ self.position = new_position;
+ Ok(())
+ } else {
+ Err(ErrorKind::Incomplete {
+ expected_len: (self.inner.offset() + len)?,
+ actual_len: (self.inner.offset() + self.remaining_len())?,
+ }
+ .at(self.inner.offset()))
+ }
+ }
+}
+
+impl<'i, 'r, R: Reader<'r>> Reader<'r> for NestedReader<'i, R> {
+ fn input_len(&self) -> Length {
+ self.input_len
+ }
+
+ fn peek_byte(&self) -> Option<u8> {
+ if self.is_finished() {
+ None
+ } else {
+ self.inner.peek_byte()
+ }
+ }
+
+ fn peek_header(&self) -> Result<Header> {
+ if self.is_finished() {
+ Err(Error::incomplete(self.offset()))
+ } else {
+ // TODO(tarcieri): handle peeking past nested length
+ self.inner.peek_header()
+ }
+ }
+
+ fn position(&self) -> Length {
+ self.position
+ }
+
+ fn read_slice(&mut self, len: Length) -> Result<&'r [u8]> {
+ self.advance_position(len)?;
+ self.inner.read_slice(len)
+ }
+
+ fn error(&mut self, kind: ErrorKind) -> Error {
+ self.inner.error(kind)
+ }
+
+ fn offset(&self) -> Length {
+ self.inner.offset()
+ }
+
+ fn read_into<'o>(&mut self, out: &'o mut [u8]) -> Result<&'o [u8]> {
+ self.advance_position(Length::try_from(out.len())?)?;
+ self.inner.read_into(out)
+ }
+}
diff --git a/src/reader/pem.rs b/src/reader/pem.rs
new file mode 100644
index 0000000..01bb4f2
--- /dev/null
+++ b/src/reader/pem.rs
@@ -0,0 +1,83 @@
+//! Streaming PEM reader.
+
+use super::Reader;
+use crate::{ErrorKind, Header, Length, Result};
+use pem_rfc7468::Decoder;
+
+/// `Reader` type which decodes PEM on-the-fly.
+#[cfg(feature = "pem")]
+#[cfg_attr(docsrs, doc(cfg(feature = "pem")))]
+#[derive(Clone)]
+pub struct PemReader<'i> {
+ /// Inner PEM decoder.
+ decoder: Decoder<'i>,
+
+ /// Input length (in bytes after Base64 decoding).
+ input_len: Length,
+
+ /// Position in the input buffer (in bytes after Base64 decoding).
+ position: Length,
+}
+
+#[cfg(feature = "pem")]
+#[cfg_attr(docsrs, doc(cfg(feature = "pem")))]
+impl<'i> PemReader<'i> {
+ /// Create a new PEM reader which decodes data on-the-fly.
+ ///
+ /// Uses the default 64-character line wrapping.
+ pub fn new(pem: &'i [u8]) -> Result<Self> {
+ let decoder = Decoder::new(pem)?;
+ let input_len = Length::try_from(decoder.remaining_len())?;
+
+ Ok(Self {
+ decoder,
+ input_len,
+ position: Length::ZERO,
+ })
+ }
+
+ /// Get the PEM label which will be used in the encapsulation boundaries
+ /// for this document.
+ pub fn type_label(&self) -> &'i str {
+ self.decoder.type_label()
+ }
+}
+
+#[cfg(feature = "pem")]
+#[cfg_attr(docsrs, doc(cfg(feature = "pem")))]
+impl<'i> Reader<'i> for PemReader<'i> {
+ fn input_len(&self) -> Length {
+ self.input_len
+ }
+
+ fn peek_byte(&self) -> Option<u8> {
+ // TODO(tarcieri): lookahead buffer
+ None
+ }
+
+ fn peek_header(&self) -> Result<Header> {
+ // TODO(tarcieri): lookahead buffer
+ Err(ErrorKind::Reader.into())
+ }
+
+ fn position(&self) -> Length {
+ self.position
+ }
+
+ fn read_slice(&mut self, _len: Length) -> Result<&'i [u8]> {
+ // Can't borrow from PEM because it requires decoding
+ Err(ErrorKind::Reader.into())
+ }
+
+ fn read_into<'o>(&mut self, buf: &'o mut [u8]) -> Result<&'o [u8]> {
+ let bytes = self.decoder.decode(buf)?;
+ self.position = (self.position + bytes.len())?;
+
+ debug_assert_eq!(
+ self.position,
+ (self.input_len - Length::try_from(self.decoder.remaining_len())?)?
+ );
+
+ Ok(bytes)
+ }
+}
diff --git a/src/reader/slice.rs b/src/reader/slice.rs
new file mode 100644
index 0000000..6bab091
--- /dev/null
+++ b/src/reader/slice.rs
@@ -0,0 +1,214 @@
+//! Slice reader.
+
+use crate::{ByteSlice, Decode, Error, ErrorKind, Header, Length, Reader, Result, Tag};
+
+/// [`Reader`] which consumes an input byte slice.
+#[derive(Clone, Debug)]
+pub struct SliceReader<'a> {
+ /// Byte slice being decoded.
+ bytes: ByteSlice<'a>,
+
+ /// Did the decoding operation fail?
+ failed: bool,
+
+ /// Position within the decoded slice.
+ position: Length,
+}
+
+impl<'a> SliceReader<'a> {
+ /// Create a new slice reader for the given byte slice.
+ pub fn new(bytes: &'a [u8]) -> Result<Self> {
+ Ok(Self {
+ bytes: ByteSlice::new(bytes)?,
+ failed: false,
+ position: Length::ZERO,
+ })
+ }
+
+ /// Return an error with the given [`ErrorKind`], annotating it with
+ /// context about where the error occurred.
+ pub fn error(&mut self, kind: ErrorKind) -> Error {
+ self.failed = true;
+ kind.at(self.position)
+ }
+
+ /// Return an error for an invalid value with the given tag.
+ pub fn value_error(&mut self, tag: Tag) -> Error {
+ self.error(tag.value_error().kind())
+ }
+
+ /// Did the decoding operation fail due to an error?
+ pub fn is_failed(&self) -> bool {
+ self.failed
+ }
+
+ /// Obtain the remaining bytes in this slice reader from the current cursor
+ /// position.
+ fn remaining(&self) -> Result<&'a [u8]> {
+ if self.is_failed() {
+ Err(ErrorKind::Failed.at(self.position))
+ } else {
+ self.bytes
+ .as_slice()
+ .get(self.position.try_into()?..)
+ .ok_or_else(|| Error::incomplete(self.input_len()))
+ }
+ }
+}
+
+impl<'a> Reader<'a> for SliceReader<'a> {
+ fn input_len(&self) -> Length {
+ self.bytes.len()
+ }
+
+ fn peek_byte(&self) -> Option<u8> {
+ self.remaining()
+ .ok()
+ .and_then(|bytes| bytes.get(0).cloned())
+ }
+
+ fn peek_header(&self) -> Result<Header> {
+ Header::decode(&mut self.clone())
+ }
+
+ fn position(&self) -> Length {
+ self.position
+ }
+
+ fn read_slice(&mut self, len: Length) -> Result<&'a [u8]> {
+ if self.is_failed() {
+ return Err(self.error(ErrorKind::Failed));
+ }
+
+ match self.remaining()?.get(..len.try_into()?) {
+ Some(result) => {
+ self.position = (self.position + len)?;
+ Ok(result)
+ }
+ None => Err(self.error(ErrorKind::Incomplete {
+ expected_len: (self.position + len)?,
+ actual_len: self.input_len(),
+ })),
+ }
+ }
+
+ fn decode<T: Decode<'a>>(&mut self) -> Result<T> {
+ if self.is_failed() {
+ return Err(self.error(ErrorKind::Failed));
+ }
+
+ T::decode(self).map_err(|e| {
+ self.failed = true;
+ e.nested(self.position)
+ })
+ }
+
+ fn error(&mut self, kind: ErrorKind) -> Error {
+ self.failed = true;
+ kind.at(self.position)
+ }
+
+ fn finish<T>(self, value: T) -> Result<T> {
+ if self.is_failed() {
+ Err(ErrorKind::Failed.at(self.position))
+ } else if !self.is_finished() {
+ Err(ErrorKind::TrailingData {
+ decoded: self.position,
+ remaining: self.remaining_len(),
+ }
+ .at(self.position))
+ } else {
+ Ok(value)
+ }
+ }
+
+ fn remaining_len(&self) -> Length {
+ debug_assert!(self.position <= self.input_len());
+ self.input_len().saturating_sub(self.position)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::SliceReader;
+ use crate::{Decode, ErrorKind, Length, Reader, Tag};
+ use hex_literal::hex;
+
+ // INTEGER: 42
+ const EXAMPLE_MSG: &[u8] = &hex!("02012A00");
+
+ #[test]
+ fn empty_message() {
+ let mut reader = SliceReader::new(&[]).unwrap();
+ let err = bool::decode(&mut reader).err().unwrap();
+ assert_eq!(Some(Length::ZERO), err.position());
+
+ match err.kind() {
+ ErrorKind::Incomplete {
+ expected_len,
+ actual_len,
+ } => {
+ assert_eq!(actual_len, 0u8.into());
+ assert_eq!(expected_len, 1u8.into());
+ }
+ other => panic!("unexpected error kind: {:?}", other),
+ }
+ }
+
+ #[test]
+ fn invalid_field_length() {
+ const MSG_LEN: usize = 2;
+
+ let mut reader = SliceReader::new(&EXAMPLE_MSG[..MSG_LEN]).unwrap();
+ let err = i8::decode(&mut reader).err().unwrap();
+ assert_eq!(Some(Length::from(2u8)), err.position());
+
+ match err.kind() {
+ ErrorKind::Incomplete {
+ expected_len,
+ actual_len,
+ } => {
+ assert_eq!(actual_len, MSG_LEN.try_into().unwrap());
+ assert_eq!(expected_len, (MSG_LEN + 1).try_into().unwrap());
+ }
+ other => panic!("unexpected error kind: {:?}", other),
+ }
+ }
+
+ #[test]
+ fn trailing_data() {
+ let mut reader = SliceReader::new(EXAMPLE_MSG).unwrap();
+ let x = i8::decode(&mut reader).unwrap();
+ assert_eq!(42i8, x);
+
+ let err = reader.finish(x).err().unwrap();
+ assert_eq!(Some(Length::from(3u8)), err.position());
+
+ assert_eq!(
+ ErrorKind::TrailingData {
+ decoded: 3u8.into(),
+ remaining: 1u8.into()
+ },
+ err.kind()
+ );
+ }
+
+ #[test]
+ fn peek_tag() {
+ let reader = SliceReader::new(EXAMPLE_MSG).unwrap();
+ assert_eq!(reader.position(), Length::ZERO);
+ assert_eq!(reader.peek_tag().unwrap(), Tag::Integer);
+ assert_eq!(reader.position(), Length::ZERO); // Position unchanged
+ }
+
+ #[test]
+ fn peek_header() {
+ let reader = SliceReader::new(EXAMPLE_MSG).unwrap();
+ assert_eq!(reader.position(), Length::ZERO);
+
+ let header = reader.peek_header().unwrap();
+ assert_eq!(header.tag, Tag::Integer);
+ assert_eq!(header.length, Length::ONE);
+ assert_eq!(reader.position(), Length::ZERO); // Position unchanged
+ }
+}
diff --git a/src/str_slice.rs b/src/str_slice.rs
new file mode 100644
index 0000000..0016e95
--- /dev/null
+++ b/src/str_slice.rs
@@ -0,0 +1,79 @@
+//! Common handling for types backed by `str` slices with enforcement of a
+//! library-level length limitation i.e. `Length::max()`.
+
+use crate::{ByteSlice, DecodeValue, EncodeValue, Header, Length, Reader, Result, Writer};
+use core::str;
+
+/// String slice newtype which respects the [`Length::max`] limit.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub(crate) struct StrSlice<'a> {
+ /// Inner value
+ pub(crate) inner: &'a str,
+
+ /// Precomputed `Length` (avoids possible panicking conversions)
+ pub(crate) length: Length,
+}
+
+impl<'a> StrSlice<'a> {
+ /// Create a new [`StrSlice`], ensuring that the byte representation of
+ /// the provided `str` value is shorter than `Length::max()`.
+ pub fn new(s: &'a str) -> Result<Self> {
+ Ok(Self {
+ inner: s,
+ length: Length::try_from(s.as_bytes().len())?,
+ })
+ }
+
+ /// Parse a [`StrSlice`] from UTF-8 encoded bytes.
+ pub fn from_bytes(bytes: &'a [u8]) -> Result<Self> {
+ Self::new(str::from_utf8(bytes)?)
+ }
+
+ /// Borrow the inner `str`
+ pub fn as_str(&self) -> &'a str {
+ self.inner
+ }
+
+ /// Borrow the inner byte slice
+ pub fn as_bytes(&self) -> &'a [u8] {
+ self.inner.as_bytes()
+ }
+
+ /// Get the [`Length`] of this [`StrSlice`]
+ pub fn len(self) -> Length {
+ self.length
+ }
+
+ /// Is this [`StrSlice`] empty?
+ pub fn is_empty(self) -> bool {
+ self.len() == Length::ZERO
+ }
+}
+
+impl AsRef<str> for StrSlice<'_> {
+ fn as_ref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl AsRef<[u8]> for StrSlice<'_> {
+ fn as_ref(&self) -> &[u8] {
+ self.as_bytes()
+ }
+}
+
+impl<'a> DecodeValue<'a> for StrSlice<'a> {
+ fn decode_value<R: Reader<'a>>(reader: &mut R, header: Header) -> Result<Self> {
+ Self::from_bytes(ByteSlice::decode_value(reader, header)?.as_slice())
+ }
+}
+
+impl<'a> EncodeValue for StrSlice<'a> {
+ fn value_len(&self) -> Result<Length> {
+ Ok(self.length)
+ }
+
+ fn encode_value(&self, writer: &mut dyn Writer) -> Result<()> {
+ writer.write(self.as_ref())
+ }
+}
diff --git a/src/tag.rs b/src/tag.rs
new file mode 100644
index 0000000..4906abb
--- /dev/null
+++ b/src/tag.rs
@@ -0,0 +1,444 @@
+//! ASN.1 tags.
+
+mod class;
+mod mode;
+mod number;
+
+pub use self::{class::Class, mode::TagMode, number::TagNumber};
+
+use crate::{Decode, DerOrd, Encode, Error, ErrorKind, Length, Reader, Result, Writer};
+use core::{cmp::Ordering, fmt};
+
+/// Indicator bit for constructed form encoding (i.e. vs primitive form)
+const CONSTRUCTED_FLAG: u8 = 0b100000;
+
+/// Types which have a constant ASN.1 [`Tag`].
+pub trait FixedTag {
+ /// ASN.1 tag
+ const TAG: Tag;
+}
+
+/// Types which have an ASN.1 [`Tag`].
+pub trait Tagged {
+ /// Get the ASN.1 tag that this type is encoded with.
+ fn tag(&self) -> Tag;
+}
+
+/// Types which are [`FixedTag`] always have a known [`Tag`] type.
+impl<T: FixedTag> Tagged for T {
+ fn tag(&self) -> Tag {
+ T::TAG
+ }
+}
+
+/// ASN.1 tags.
+///
+/// Tags are the leading identifier octet of the Tag-Length-Value encoding
+/// used by ASN.1 DER and identify the type of the subsequent value.
+///
+/// They are described in X.690 Section 8.1.2: Identifier octets, and
+/// structured as follows:
+///
+/// ```text
+/// | Class | P/C | Tag Number |
+/// ```
+///
+/// - Bits 8/7: [`Class`]
+/// - Bit 6: primitive (0) or constructed (1)
+/// - Bits 5-1: tag number
+#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord)]
+#[non_exhaustive]
+pub enum Tag {
+ /// `BOOLEAN` tag: `1`.
+ Boolean,
+
+ /// `INTEGER` tag: `2`.
+ Integer,
+
+ /// `BIT STRING` tag: `3`.
+ BitString,
+
+ /// `OCTET STRING` tag: `4`.
+ OctetString,
+
+ /// `NULL` tag: `5`.
+ Null,
+
+ /// `OBJECT IDENTIFIER` tag: `6`.
+ ObjectIdentifier,
+
+ /// `REAL` tag: `9`.
+ Real,
+
+ /// `ENUMERATED` tag: `10`.
+ Enumerated,
+
+ /// `UTF8String` tag: `12`.
+ Utf8String,
+
+ /// `SEQUENCE` tag: `16`.
+ Sequence,
+
+ /// `SET` and `SET OF` tag: `17`.
+ Set,
+
+ /// `NumericString` tag: `18`.
+ NumericString,
+
+ /// `PrintableString` tag: `19`.
+ PrintableString,
+
+ /// `IA5String` tag: `22`.
+ Ia5String,
+
+ /// `UTCTime` tag: `23`.
+ UtcTime,
+
+ /// `GeneralizedTime` tag: `24`.
+ GeneralizedTime,
+
+ /// `VisibleString` tag: `26`.
+ VisibleString,
+
+ /// `BMPString` tag: `30`.
+ BmpString,
+
+ /// Application tag.
+ Application {
+ /// Is this tag constructed? (vs primitive).
+ constructed: bool,
+
+ /// Tag number.
+ number: TagNumber,
+ },
+
+ /// Context-specific tag.
+ ContextSpecific {
+ /// Is this tag constructed? (vs primitive).
+ constructed: bool,
+
+ /// Tag number.
+ number: TagNumber,
+ },
+
+ /// Private tag number.
+ Private {
+ /// Is this tag constructed? (vs primitive).
+ constructed: bool,
+
+ /// Tag number.
+ number: TagNumber,
+ },
+}
+
+impl Tag {
+ /// Assert that this [`Tag`] matches the provided expected tag.
+ ///
+ /// On mismatch, returns an [`Error`] with [`ErrorKind::TagUnexpected`].
+ pub fn assert_eq(self, expected: Tag) -> Result<Tag> {
+ if self == expected {
+ Ok(self)
+ } else {
+ Err(self.unexpected_error(Some(expected)))
+ }
+ }
+
+ /// Get the [`Class`] that corresponds to this [`Tag`].
+ pub fn class(self) -> Class {
+ match self {
+ Tag::Application { .. } => Class::Application,
+ Tag::ContextSpecific { .. } => Class::ContextSpecific,
+ Tag::Private { .. } => Class::Private,
+ _ => Class::Universal,
+ }
+ }
+
+ /// Get the [`TagNumber`] (lower 6-bits) for this tag.
+ pub fn number(self) -> TagNumber {
+ TagNumber(self.octet() & TagNumber::MASK)
+ }
+
+ /// Does this tag represent a constructed (as opposed to primitive) field?
+ pub fn is_constructed(self) -> bool {
+ self.octet() & CONSTRUCTED_FLAG != 0
+ }
+
+ /// Is this an application tag?
+ pub fn is_application(self) -> bool {
+ self.class() == Class::Application
+ }
+
+ /// Is this a context-specific tag?
+ pub fn is_context_specific(self) -> bool {
+ self.class() == Class::ContextSpecific
+ }
+
+ /// Is this a private tag?
+ pub fn is_private(self) -> bool {
+ self.class() == Class::Private
+ }
+
+ /// Is this a universal tag?
+ pub fn is_universal(self) -> bool {
+ self.class() == Class::Universal
+ }
+
+ /// Get the octet encoding for this [`Tag`].
+ pub fn octet(self) -> u8 {
+ match self {
+ Tag::Boolean => 0x01,
+ Tag::Integer => 0x02,
+ Tag::BitString => 0x03,
+ Tag::OctetString => 0x04,
+ Tag::Null => 0x05,
+ Tag::ObjectIdentifier => 0x06,
+ Tag::Real => 0x09,
+ Tag::Enumerated => 0x0A,
+ Tag::Utf8String => 0x0C,
+ Tag::Sequence => 0x10 | CONSTRUCTED_FLAG,
+ Tag::Set => 0x11 | CONSTRUCTED_FLAG,
+ Tag::NumericString => 0x12,
+ Tag::PrintableString => 0x13,
+ Tag::Ia5String => 0x16,
+ Tag::UtcTime => 0x17,
+ Tag::GeneralizedTime => 0x18,
+ Tag::VisibleString => 0x1A,
+ Tag::BmpString => 0x1D,
+ Tag::Application {
+ constructed,
+ number,
+ }
+ | Tag::ContextSpecific {
+ constructed,
+ number,
+ }
+ | Tag::Private {
+ constructed,
+ number,
+ } => self.class().octet(constructed, number),
+ }
+ }
+
+ /// Create an [`Error`] for an invalid [`Length`].
+ pub fn length_error(self) -> Error {
+ ErrorKind::Length { tag: self }.into()
+ }
+
+ /// Create an [`Error`] for an non-canonical value with the ASN.1 type
+ /// identified by this tag.
+ pub fn non_canonical_error(self) -> Error {
+ ErrorKind::Noncanonical { tag: self }.into()
+ }
+
+ /// Create an [`Error`] because the current tag was unexpected, with an
+ /// optional expected tag.
+ pub fn unexpected_error(self, expected: Option<Self>) -> Error {
+ ErrorKind::TagUnexpected {
+ expected,
+ actual: self,
+ }
+ .into()
+ }
+
+ /// Create an [`Error`] for an invalid value with the ASN.1 type identified
+ /// by this tag.
+ pub fn value_error(self) -> Error {
+ ErrorKind::Value { tag: self }.into()
+ }
+}
+
+impl TryFrom<u8> for Tag {
+ type Error = Error;
+
+ fn try_from(byte: u8) -> Result<Tag> {
+ let constructed = byte & CONSTRUCTED_FLAG != 0;
+ let number = TagNumber::try_from(byte & TagNumber::MASK)?;
+
+ match byte {
+ 0x01 => Ok(Tag::Boolean),
+ 0x02 => Ok(Tag::Integer),
+ 0x03 => Ok(Tag::BitString),
+ 0x04 => Ok(Tag::OctetString),
+ 0x05 => Ok(Tag::Null),
+ 0x06 => Ok(Tag::ObjectIdentifier),
+ 0x09 => Ok(Tag::Real),
+ 0x0A => Ok(Tag::Enumerated),
+ 0x0C => Ok(Tag::Utf8String),
+ 0x12 => Ok(Tag::NumericString),
+ 0x13 => Ok(Tag::PrintableString),
+ 0x16 => Ok(Tag::Ia5String),
+ 0x17 => Ok(Tag::UtcTime),
+ 0x18 => Ok(Tag::GeneralizedTime),
+ 0x1A => Ok(Tag::VisibleString),
+ 0x1d => Ok(Tag::BmpString),
+ 0x30 => Ok(Tag::Sequence), // constructed
+ 0x31 => Ok(Tag::Set), // constructed
+ 0x40..=0x7E => Ok(Tag::Application {
+ constructed,
+ number,
+ }),
+ 0x80..=0xBE => Ok(Tag::ContextSpecific {
+ constructed,
+ number,
+ }),
+ 0xC0..=0xFE => Ok(Tag::Private {
+ constructed,
+ number,
+ }),
+ _ => Err(ErrorKind::TagUnknown { byte }.into()),
+ }
+ }
+}
+
+impl From<Tag> for u8 {
+ fn from(tag: Tag) -> u8 {
+ tag.octet()
+ }
+}
+
+impl From<&Tag> for u8 {
+ fn from(tag: &Tag) -> u8 {
+ u8::from(*tag)
+ }
+}
+
+impl<'a> Decode<'a> for Tag {
+ fn decode<R: Reader<'a>>(reader: &mut R) -> Result<Self> {
+ reader.read_byte().and_then(Self::try_from)
+ }
+}
+
+impl Encode for Tag {
+ fn encoded_len(&self) -> Result<Length> {
+ Ok(Length::ONE)
+ }
+
+ fn encode(&self, writer: &mut dyn Writer) -> Result<()> {
+ writer.write_byte(self.into())
+ }
+}
+
+impl DerOrd for Tag {
+ fn der_cmp(&self, other: &Self) -> Result<Ordering> {
+ Ok(self.octet().cmp(&other.octet()))
+ }
+}
+
+impl fmt::Display for Tag {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ const FIELD_TYPE: [&str; 2] = ["primitive", "constructed"];
+
+ match *self {
+ Tag::Boolean => f.write_str("BOOLEAN"),
+ Tag::Integer => f.write_str("INTEGER"),
+ Tag::BitString => f.write_str("BIT STRING"),
+ Tag::OctetString => f.write_str("OCTET STRING"),
+ Tag::Null => f.write_str("NULL"),
+ Tag::ObjectIdentifier => f.write_str("OBJECT IDENTIFIER"),
+ Tag::Real => f.write_str("REAL"),
+ Tag::Enumerated => f.write_str("ENUMERATED"),
+ Tag::Utf8String => f.write_str("UTF8String"),
+ Tag::Set => f.write_str("SET"),
+ Tag::NumericString => f.write_str("NumericString"),
+ Tag::PrintableString => f.write_str("PrintableString"),
+ Tag::Ia5String => f.write_str("IA5String"),
+ Tag::UtcTime => f.write_str("UTCTime"),
+ Tag::GeneralizedTime => f.write_str("GeneralizedTime"),
+ Tag::VisibleString => f.write_str("VisibleString"),
+ Tag::BmpString => f.write_str("BMPString"),
+ Tag::Sequence => f.write_str("SEQUENCE"),
+ Tag::Application {
+ constructed,
+ number,
+ } => write!(
+ f,
+ "APPLICATION [{}] ({})",
+ number,
+ FIELD_TYPE[usize::from(constructed)]
+ ),
+ Tag::ContextSpecific {
+ constructed,
+ number,
+ } => write!(
+ f,
+ "CONTEXT-SPECIFIC [{}] ({})",
+ number,
+ FIELD_TYPE[usize::from(constructed)]
+ ),
+ Tag::Private {
+ constructed,
+ number,
+ } => write!(
+ f,
+ "PRIVATE [{}] ({})",
+ number,
+ FIELD_TYPE[usize::from(constructed)]
+ ),
+ }
+ }
+}
+
+impl fmt::Debug for Tag {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "Tag(0x{:02x}: {})", u8::from(*self), self)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::TagNumber;
+ use super::{Class, Tag};
+
+ #[test]
+ fn tag_class() {
+ assert_eq!(Tag::Boolean.class(), Class::Universal);
+ assert_eq!(Tag::Integer.class(), Class::Universal);
+ assert_eq!(Tag::BitString.class(), Class::Universal);
+ assert_eq!(Tag::OctetString.class(), Class::Universal);
+ assert_eq!(Tag::Null.class(), Class::Universal);
+ assert_eq!(Tag::ObjectIdentifier.class(), Class::Universal);
+ assert_eq!(Tag::Real.class(), Class::Universal);
+ assert_eq!(Tag::Enumerated.class(), Class::Universal);
+ assert_eq!(Tag::Utf8String.class(), Class::Universal);
+ assert_eq!(Tag::Set.class(), Class::Universal);
+ assert_eq!(Tag::NumericString.class(), Class::Universal);
+ assert_eq!(Tag::PrintableString.class(), Class::Universal);
+ assert_eq!(Tag::Ia5String.class(), Class::Universal);
+ assert_eq!(Tag::UtcTime.class(), Class::Universal);
+ assert_eq!(Tag::GeneralizedTime.class(), Class::Universal);
+ assert_eq!(Tag::Sequence.class(), Class::Universal);
+
+ for num in 0..=30 {
+ for &constructed in &[false, true] {
+ let number = TagNumber::new(num);
+
+ assert_eq!(
+ Tag::Application {
+ constructed,
+ number
+ }
+ .class(),
+ Class::Application
+ );
+
+ assert_eq!(
+ Tag::ContextSpecific {
+ constructed,
+ number
+ }
+ .class(),
+ Class::ContextSpecific
+ );
+
+ assert_eq!(
+ Tag::Private {
+ constructed,
+ number
+ }
+ .class(),
+ Class::Private
+ );
+ }
+ }
+ }
+}
diff --git a/src/tag/class.rs b/src/tag/class.rs
new file mode 100644
index 0000000..8a3e2ed
--- /dev/null
+++ b/src/tag/class.rs
@@ -0,0 +1,50 @@
+//! Class of an ASN.1 tag.
+
+use super::{TagNumber, CONSTRUCTED_FLAG};
+use core::fmt;
+
+/// Class of an ASN.1 tag.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+#[repr(u8)]
+pub enum Class {
+ /// `UNIVERSAL`: built-in types whose meaning is the same in all
+ /// applications.
+ Universal = 0b00000000,
+
+ /// `APPLICATION`: types whose meaning is specific to an application,
+ ///
+ /// Types in two different applications may have the same
+ /// application-specific tag and different meanings.
+ Application = 0b01000000,
+
+ /// `CONTEXT-SPECIFIC`: types whose meaning is specific to a given
+ /// structured type.
+ ///
+ /// Context-specific tags are used to distinguish between component types
+ /// with the same underlying tag within the context of a given structured
+ /// type, and component types in two different structured types may have
+ /// the same tag and different meanings.
+ ContextSpecific = 0b10000000,
+
+ /// `PRIVATE`: types whose meaning is specific to a given enterprise.
+ Private = 0b11000000,
+}
+
+impl Class {
+ /// Compute the identifier octet for a tag number of this class.
+ #[allow(clippy::integer_arithmetic)]
+ pub(super) fn octet(self, constructed: bool, number: TagNumber) -> u8 {
+ self as u8 | number.value() | (u8::from(constructed) * CONSTRUCTED_FLAG)
+ }
+}
+
+impl fmt::Display for Class {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(match self {
+ Class::Universal => "UNIVERSAL",
+ Class::Application => "APPLICATION",
+ Class::ContextSpecific => "CONTEXT-SPECIFIC",
+ Class::Private => "PRIVATE",
+ })
+ }
+}
diff --git a/src/tag/mode.rs b/src/tag/mode.rs
new file mode 100644
index 0000000..892ce3d
--- /dev/null
+++ b/src/tag/mode.rs
@@ -0,0 +1,45 @@
+//! Tag modes.
+
+use crate::{Error, ErrorKind, Result};
+use core::{fmt, str::FromStr};
+
+/// Tagging modes: `EXPLICIT` versus `IMPLICIT`.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub enum TagMode {
+ /// `EXPLICIT` tagging.
+ ///
+ /// Tag is added in addition to the inner tag of the type.
+ Explicit,
+
+ /// `IMPLICIT` tagging.
+ ///
+ /// Tag replaces the existing tag of the inner type.
+ Implicit,
+}
+
+impl Default for TagMode {
+ fn default() -> TagMode {
+ TagMode::Explicit
+ }
+}
+
+impl FromStr for TagMode {
+ type Err = Error;
+
+ fn from_str(s: &str) -> Result<Self> {
+ match s {
+ "EXPLICIT" | "explicit" => Ok(TagMode::Explicit),
+ "IMPLICIT" | "implicit" => Ok(TagMode::Implicit),
+ _ => Err(ErrorKind::TagModeUnknown.into()),
+ }
+ }
+}
+
+impl fmt::Display for TagMode {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ TagMode::Explicit => f.write_str("EXPLICIT"),
+ TagMode::Implicit => f.write_str("IMPLICIT"),
+ }
+ }
+}
diff --git a/src/tag/number.rs b/src/tag/number.rs
new file mode 100644
index 0000000..1930e94
--- /dev/null
+++ b/src/tag/number.rs
@@ -0,0 +1,188 @@
+//! ASN.1 tag numbers
+
+use super::Tag;
+use crate::{Error, ErrorKind, Result};
+use core::fmt;
+
+/// ASN.1 tag numbers (i.e. lower 5 bits of a [`Tag`]).
+///
+/// From X.690 Section 8.1.2.2:
+///
+/// > bits 5 to 1 shall encode the number of the tag as a binary integer with
+/// > bit 5 as the most significant bit.
+///
+/// This library supports tag numbers ranging from zero to 30 (inclusive),
+/// which can be represented as a single identifier octet.
+///
+/// Section 8.1.2.4 describes how to support multi-byte tag numbers, which are
+/// encoded by using a leading tag number of 31 (`0b11111`). This library
+/// deliberately does not support this: tag numbers greater than 30 are
+/// disallowed.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)]
+pub struct TagNumber(pub(super) u8);
+
+impl TagNumber {
+ /// Tag number `0`
+ pub const N0: Self = Self(0);
+
+ /// Tag number `1`
+ pub const N1: Self = Self(1);
+
+ /// Tag number `2`
+ pub const N2: Self = Self(2);
+
+ /// Tag number `3`
+ pub const N3: Self = Self(3);
+
+ /// Tag number `4`
+ pub const N4: Self = Self(4);
+
+ /// Tag number `5`
+ pub const N5: Self = Self(5);
+
+ /// Tag number `6`
+ pub const N6: Self = Self(6);
+
+ /// Tag number `7`
+ pub const N7: Self = Self(7);
+
+ /// Tag number `8`
+ pub const N8: Self = Self(8);
+
+ /// Tag number `9`
+ pub const N9: Self = Self(9);
+
+ /// Tag number `10`
+ pub const N10: Self = Self(10);
+
+ /// Tag number `11`
+ pub const N11: Self = Self(11);
+
+ /// Tag number `12`
+ pub const N12: Self = Self(12);
+
+ /// Tag number `13`
+ pub const N13: Self = Self(13);
+
+ /// Tag number `14`
+ pub const N14: Self = Self(14);
+
+ /// Tag number `15`
+ pub const N15: Self = Self(15);
+
+ /// Tag number `16`
+ pub const N16: Self = Self(16);
+
+ /// Tag number `17`
+ pub const N17: Self = Self(17);
+
+ /// Tag number `18`
+ pub const N18: Self = Self(18);
+
+ /// Tag number `19`
+ pub const N19: Self = Self(19);
+
+ /// Tag number `20`
+ pub const N20: Self = Self(20);
+
+ /// Tag number `21`
+ pub const N21: Self = Self(21);
+
+ /// Tag number `22`
+ pub const N22: Self = Self(22);
+
+ /// Tag number `23`
+ pub const N23: Self = Self(23);
+
+ /// Tag number `24`
+ pub const N24: Self = Self(24);
+
+ /// Tag number `25`
+ pub const N25: Self = Self(25);
+
+ /// Tag number `26`
+ pub const N26: Self = Self(26);
+
+ /// Tag number `27`
+ pub const N27: Self = Self(27);
+
+ /// Tag number `28`
+ pub const N28: Self = Self(28);
+
+ /// Tag number `29`
+ pub const N29: Self = Self(29);
+
+ /// Tag number `30`
+ pub const N30: Self = Self(30);
+
+ /// Mask value used to obtain the tag number from a tag octet.
+ pub(super) const MASK: u8 = 0b11111;
+
+ /// Maximum tag number supported (inclusive).
+ const MAX: u8 = 30;
+
+ /// Create a new tag number (const-friendly).
+ ///
+ /// Panics if the tag number is greater than `30`.
+ /// For a fallible conversion, use [`TryFrom`] instead.
+ pub const fn new(byte: u8) -> Self {
+ #[allow(clippy::panic)]
+ if byte > Self::MAX {
+ panic!("tag number out of range");
+ }
+
+ Self(byte)
+ }
+
+ /// Create an `APPLICATION` tag with this tag number.
+ pub fn application(self, constructed: bool) -> Tag {
+ Tag::Application {
+ constructed,
+ number: self,
+ }
+ }
+
+ /// Create a `CONTEXT-SPECIFIC` tag with this tag number.
+ pub fn context_specific(self, constructed: bool) -> Tag {
+ Tag::ContextSpecific {
+ constructed,
+ number: self,
+ }
+ }
+
+ /// Create a `PRIVATE` tag with this tag number.
+ pub fn private(self, constructed: bool) -> Tag {
+ Tag::Private {
+ constructed,
+ number: self,
+ }
+ }
+
+ /// Get the inner value.
+ pub fn value(self) -> u8 {
+ self.0
+ }
+}
+
+impl TryFrom<u8> for TagNumber {
+ type Error = Error;
+
+ fn try_from(byte: u8) -> Result<Self> {
+ match byte {
+ 0..=Self::MAX => Ok(Self(byte)),
+ _ => Err(ErrorKind::TagNumberInvalid.into()),
+ }
+ }
+}
+
+impl From<TagNumber> for u8 {
+ fn from(tag_number: TagNumber) -> u8 {
+ tag_number.0
+ }
+}
+
+impl fmt::Display for TagNumber {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.0)
+ }
+}
diff --git a/src/writer.rs b/src/writer.rs
new file mode 100644
index 0000000..5e97744
--- /dev/null
+++ b/src/writer.rs
@@ -0,0 +1,30 @@
+//! Writer trait.
+
+#[cfg(feature = "pem")]
+pub(crate) mod pem;
+pub(crate) mod slice;
+
+use crate::Result;
+
+#[cfg(feature = "std")]
+use std::io;
+
+/// Writer trait which outputs encoded DER.
+pub trait Writer {
+ /// Write the given DER-encoded bytes as output.
+ fn write(&mut self, slice: &[u8]) -> Result<()>;
+
+ /// Write a single byte.
+ fn write_byte(&mut self, byte: u8) -> Result<()> {
+ self.write(&[byte])
+ }
+}
+
+#[cfg(feature = "std")]
+#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
+impl<W: io::Write> Writer for W {
+ fn write(&mut self, slice: &[u8]) -> Result<()> {
+ <Self as io::Write>::write(self, slice)?;
+ Ok(())
+ }
+}
diff --git a/src/writer/pem.rs b/src/writer/pem.rs
new file mode 100644
index 0000000..c554632
--- /dev/null
+++ b/src/writer/pem.rs
@@ -0,0 +1,42 @@
+//! Streaming PEM writer.
+
+use super::Writer;
+use crate::Result;
+use pem_rfc7468::{Encoder, LineEnding};
+
+/// `Writer` type which outputs PEM-encoded data.
+#[cfg_attr(docsrs, doc(cfg(feature = "pem")))]
+pub struct PemWriter<'w>(Encoder<'static, 'w>);
+
+impl<'w> PemWriter<'w> {
+ /// Create a new PEM writer which outputs into the provided buffer.
+ ///
+ /// Uses the default 64-character line wrapping.
+ pub fn new(
+ type_label: &'static str,
+ line_ending: LineEnding,
+ out: &'w mut [u8],
+ ) -> Result<Self> {
+ Ok(Self(Encoder::new(type_label, line_ending, out)?))
+ }
+
+ /// Get the PEM label which will be used in the encapsulation boundaries
+ /// for this document.
+ pub fn type_label(&self) -> &'static str {
+ self.0.type_label()
+ }
+
+ /// Finish encoding PEM, writing the post-encapsulation boundary.
+ ///
+ /// On success, returns the total number of bytes written to the output buffer.
+ pub fn finish(self) -> Result<usize> {
+ Ok(self.0.finish()?)
+ }
+}
+
+impl Writer for PemWriter<'_> {
+ fn write(&mut self, slice: &[u8]) -> Result<()> {
+ self.0.encode(slice)?;
+ Ok(())
+ }
+}
diff --git a/src/writer/slice.rs b/src/writer/slice.rs
new file mode 100644
index 0000000..5dafe99
--- /dev/null
+++ b/src/writer/slice.rs
@@ -0,0 +1,149 @@
+//! Slice writer.
+
+use crate::{
+ asn1::*, Encode, EncodeValue, ErrorKind, Header, Length, Result, Tag, TagMode, TagNumber,
+ Tagged, Writer,
+};
+
+/// [`Writer`] which encodes DER into a mutable output byte slice.
+#[derive(Debug)]
+pub struct SliceWriter<'a> {
+ /// Buffer into which DER-encoded message is written
+ bytes: &'a mut [u8],
+
+ /// Has the encoding operation failed?
+ failed: bool,
+
+ /// Total number of bytes written to buffer so far
+ position: Length,
+}
+
+impl<'a> SliceWriter<'a> {
+ /// Create a new encoder with the given byte slice as a backing buffer.
+ pub fn new(bytes: &'a mut [u8]) -> Self {
+ Self {
+ bytes,
+ failed: false,
+ position: Length::ZERO,
+ }
+ }
+
+ /// Encode a value which impls the [`Encode`] trait.
+ pub fn encode<T: Encode>(&mut self, encodable: &T) -> Result<()> {
+ if self.is_failed() {
+ self.error(ErrorKind::Failed)?;
+ }
+
+ encodable.encode(self).map_err(|e| {
+ self.failed = true;
+ e.nested(self.position)
+ })
+ }
+
+ /// Return an error with the given [`ErrorKind`], annotating it with
+ /// context about where the error occurred.
+ pub fn error<T>(&mut self, kind: ErrorKind) -> Result<T> {
+ self.failed = true;
+ Err(kind.at(self.position))
+ }
+
+ /// Did the decoding operation fail due to an error?
+ pub fn is_failed(&self) -> bool {
+ self.failed
+ }
+
+ /// Finish encoding to the buffer, returning a slice containing the data
+ /// written to the buffer.
+ pub fn finish(self) -> Result<&'a [u8]> {
+ let position = self.position;
+
+ if self.is_failed() {
+ return Err(ErrorKind::Failed.at(position));
+ }
+
+ self.bytes
+ .get(..usize::try_from(position)?)
+ .ok_or_else(|| ErrorKind::Overlength.at(position))
+ }
+
+ /// Encode a `CONTEXT-SPECIFIC` field with the provided tag number and mode.
+ pub fn context_specific<T>(
+ &mut self,
+ tag_number: TagNumber,
+ tag_mode: TagMode,
+ value: &T,
+ ) -> Result<()>
+ where
+ T: EncodeValue + Tagged,
+ {
+ ContextSpecificRef {
+ tag_number,
+ tag_mode,
+ value,
+ }
+ .encode(self)
+ }
+
+ /// Encode an ASN.1 `SEQUENCE` of the given length.
+ ///
+ /// Spawns a nested slice writer which is expected to be exactly the
+ /// specified length upon completion.
+ pub fn sequence<F>(&mut self, length: Length, f: F) -> Result<()>
+ where
+ F: FnOnce(&mut SliceWriter<'_>) -> Result<()>,
+ {
+ Header::new(Tag::Sequence, length).and_then(|header| header.encode(self))?;
+
+ let mut nested_encoder = SliceWriter::new(self.reserve(length)?);
+ f(&mut nested_encoder)?;
+
+ if nested_encoder.finish()?.len() == usize::try_from(length)? {
+ Ok(())
+ } else {
+ self.error(ErrorKind::Length { tag: Tag::Sequence })
+ }
+ }
+
+ /// Reserve a portion of the internal buffer, updating the internal cursor
+ /// position and returning a mutable slice.
+ fn reserve(&mut self, len: impl TryInto<Length>) -> Result<&mut [u8]> {
+ if self.is_failed() {
+ return Err(ErrorKind::Failed.at(self.position));
+ }
+
+ let len = len
+ .try_into()
+ .or_else(|_| self.error(ErrorKind::Overflow))?;
+
+ let end = (self.position + len).or_else(|e| self.error(e.kind()))?;
+ let slice = self
+ .bytes
+ .get_mut(self.position.try_into()?..end.try_into()?)
+ .ok_or_else(|| ErrorKind::Overlength.at(end))?;
+
+ self.position = end;
+ Ok(slice)
+ }
+}
+
+impl<'a> Writer for SliceWriter<'a> {
+ fn write(&mut self, slice: &[u8]) -> Result<()> {
+ self.reserve(slice.len())?.copy_from_slice(slice);
+ Ok(())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::SliceWriter;
+ use crate::{Encode, ErrorKind, Length};
+
+ #[test]
+ fn overlength_message() {
+ let mut buffer = [];
+ let mut writer = SliceWriter::new(&mut buffer);
+ let err = false.encode(&mut writer).err().unwrap();
+ assert_eq!(err.kind(), ErrorKind::Overlength);
+ assert_eq!(err.position(), Some(Length::ONE));
+ }
+}
diff --git a/tests/datetime.proptest-regressions b/tests/datetime.proptest-regressions
new file mode 100644
index 0000000..f280ac4
--- /dev/null
+++ b/tests/datetime.proptest-regressions
@@ -0,0 +1,8 @@
+# Seeds for failure cases proptest has generated in the past. It is
+# automatically read and these particular cases re-run before any
+# novel cases are generated.
+#
+# It is recommended to check this file in to source control so that
+# everyone who runs the test benefits from these saved cases.
+cc 00dbea7e90761c16aa20e2fbf7ffad420da0c84d4ed4e6df123de03c9b4567e5 # shrinks to year = 1970, month = 1, day = 1, hour = 0, min = 60, sec = 0
+cc 3b0bd01ef4cad6bea0a287f9cdcd56bad186125ec388d204f6afcd193ca12c39 # shrinks to year = 1970, month = 1, day = 1, hour = 0, min = 0, sec = 60
diff --git a/tests/datetime.rs b/tests/datetime.rs
new file mode 100644
index 0000000..454c1f0
--- /dev/null
+++ b/tests/datetime.rs
@@ -0,0 +1,64 @@
+//! Tests for the [`DateTime`] type.
+
+use der::{asn1::UtcTime, DateTime, Decode, Encode};
+use proptest::prelude::*;
+
+proptest! {
+ #[test]
+ fn roundtrip_datetime(
+ year in 1970u16..=9999,
+ month in 1u8..=12,
+ day in 1u8..=31,
+ hour in 0u8..=23,
+ min in 0u8..=59,
+ sec in 0u8..=59,
+ ) {
+ let datetime1 = make_datetime(year, month, day, hour, min, sec);
+ let datetime2 = DateTime::from_unix_duration(datetime1.unix_duration()).unwrap();
+ prop_assert_eq!(datetime1, datetime2);
+ }
+
+ #[test]
+ fn roundtrip_utctime(
+ year in 1970u16..=2049,
+ month in 1u8..=12,
+ day in 1u8..=31,
+ hour in 0u8..=23,
+ min in 0u8..=59,
+ sec in 0u8..=59,
+ ) {
+ let datetime = make_datetime(year, month, day, hour, min, sec);
+ let utc_time1 = UtcTime::try_from(datetime).unwrap();
+
+ let mut buf = [0u8; 128];
+ let mut encoder = der::SliceWriter::new(&mut buf);
+ utc_time1.encode(&mut encoder).unwrap();
+ let der_bytes = encoder.finish().unwrap();
+
+ let utc_time2 = UtcTime::from_der(der_bytes).unwrap();
+ prop_assert_eq!(utc_time1, utc_time2);
+ }
+}
+
+fn make_datetime(year: u16, month: u8, day: u8, hour: u8, min: u8, sec: u8) -> DateTime {
+ let max_day = if month == 2 {
+ let is_leap_year = year % 4 == 0 && (year % 100 != 0 || year % 400 == 0);
+
+ if is_leap_year {
+ 29
+ } else {
+ 28
+ }
+ } else {
+ 30
+ };
+
+ let day = if day > max_day { max_day } else { day };
+
+ DateTime::new(year, month, day, hour, min, sec).unwrap_or_else(|e| {
+ panic!(
+ "invalid DateTime: {:02}-{:02}-{:02}T{:02}:{:02}:{:02}: {}",
+ year, month, day, hour, min, sec, e
+ );
+ })
+}
diff --git a/tests/derive.rs b/tests/derive.rs
new file mode 100644
index 0000000..dac14f8
--- /dev/null
+++ b/tests/derive.rs
@@ -0,0 +1,459 @@
+//! Tests for custom derive support.
+//!
+//! # Debugging with `cargo expand`
+//!
+//! To expand the Rust code generated by the proc macro when debugging
+//! issues related to these tests, run:
+//!
+//! $ cargo expand --test derive --all-features
+
+#![cfg(all(feature = "derive", feature = "alloc"))]
+
+/// Custom derive test cases for the `Choice` macro.
+mod choice {
+ /// `Choice` with `EXPLICIT` tagging.
+ mod explicit {
+ use der::{
+ asn1::{GeneralizedTime, UtcTime},
+ Choice, Decode, Encode, SliceWriter,
+ };
+ use hex_literal::hex;
+ use std::time::Duration;
+
+ /// Custom derive test case for the `Choice` macro.
+ ///
+ /// Based on `Time` as defined in RFC 5280:
+ /// <https://tools.ietf.org/html/rfc5280#page-117>
+ ///
+ /// ```text
+ /// Time ::= CHOICE {
+ /// utcTime UTCTime,
+ /// generalTime GeneralizedTime }
+ /// ```
+ #[derive(Choice)]
+ pub enum Time {
+ #[asn1(type = "UTCTime")]
+ UtcTime(UtcTime),
+
+ #[asn1(type = "GeneralizedTime")]
+ GeneralTime(GeneralizedTime),
+ }
+
+ impl Time {
+ fn to_unix_duration(self) -> Duration {
+ match self {
+ Time::UtcTime(t) => t.to_unix_duration(),
+ Time::GeneralTime(t) => t.to_unix_duration(),
+ }
+ }
+ }
+
+ const UTC_TIMESTAMP_DER: &'static [u8] =
+ &hex!("17 0d 39 31 30 35 30 36 32 33 34 35 34 30 5a");
+ const GENERAL_TIMESTAMP_DER: &'static [u8] =
+ &hex!("18 0f 31 39 39 31 30 35 30 36 32 33 34 35 34 30 5a");
+
+ #[test]
+ fn decode() {
+ let utc_time = Time::from_der(UTC_TIMESTAMP_DER).unwrap();
+ assert_eq!(utc_time.to_unix_duration().as_secs(), 673573540);
+
+ let general_time = Time::from_der(GENERAL_TIMESTAMP_DER).unwrap();
+ assert_eq!(general_time.to_unix_duration().as_secs(), 673573540);
+ }
+
+ #[test]
+ fn encode() {
+ let mut buf = [0u8; 128];
+
+ let utc_time = Time::from_der(UTC_TIMESTAMP_DER).unwrap();
+ let mut encoder = SliceWriter::new(&mut buf);
+ utc_time.encode(&mut encoder).unwrap();
+ assert_eq!(UTC_TIMESTAMP_DER, encoder.finish().unwrap());
+
+ let general_time = Time::from_der(GENERAL_TIMESTAMP_DER).unwrap();
+ let mut encoder = SliceWriter::new(&mut buf);
+ general_time.encode(&mut encoder).unwrap();
+ assert_eq!(GENERAL_TIMESTAMP_DER, encoder.finish().unwrap());
+ }
+ }
+
+ /// `Choice` with `IMPLICIT` tagging.
+ mod implicit {
+ use der::{
+ asn1::{BitStringRef, GeneralizedTime},
+ Choice, Decode, Encode, SliceWriter,
+ };
+ use hex_literal::hex;
+
+ /// `Choice` macro test case for `IMPLICIT` tagging.
+ #[derive(Choice, Debug, Eq, PartialEq)]
+ #[asn1(tag_mode = "IMPLICIT")]
+ pub enum ImplicitChoice<'a> {
+ #[asn1(context_specific = "0", type = "BIT STRING")]
+ BitString(BitStringRef<'a>),
+
+ #[asn1(context_specific = "1", type = "GeneralizedTime")]
+ Time(GeneralizedTime),
+
+ #[asn1(context_specific = "2", type = "UTF8String")]
+ Utf8String(String),
+ }
+
+ impl<'a> ImplicitChoice<'a> {
+ pub fn bit_string(&self) -> Option<BitStringRef<'a>> {
+ match self {
+ Self::BitString(bs) => Some(*bs),
+ _ => None,
+ }
+ }
+
+ pub fn time(&self) -> Option<GeneralizedTime> {
+ match self {
+ Self::Time(time) => Some(*time),
+ _ => None,
+ }
+ }
+ }
+
+ const BITSTRING_DER: &'static [u8] = &hex!("80 04 00 01 02 03");
+ const TIME_DER: &'static [u8] = &hex!("81 0f 31 39 39 31 30 35 30 36 32 33 34 35 34 30 5a");
+
+ #[test]
+ fn decode() {
+ let cs_bit_string = ImplicitChoice::from_der(BITSTRING_DER).unwrap();
+ assert_eq!(
+ cs_bit_string.bit_string().unwrap().as_bytes().unwrap(),
+ &[1, 2, 3]
+ );
+
+ let cs_time = ImplicitChoice::from_der(TIME_DER).unwrap();
+ assert_eq!(
+ cs_time.time().unwrap().to_unix_duration().as_secs(),
+ 673573540
+ );
+ }
+
+ #[test]
+ fn encode() {
+ let mut buf = [0u8; 128];
+
+ let cs_bit_string = ImplicitChoice::from_der(BITSTRING_DER).unwrap();
+ let mut encoder = SliceWriter::new(&mut buf);
+ cs_bit_string.encode(&mut encoder).unwrap();
+ assert_eq!(BITSTRING_DER, encoder.finish().unwrap());
+
+ let cs_time = ImplicitChoice::from_der(TIME_DER).unwrap();
+ let mut encoder = SliceWriter::new(&mut buf);
+ cs_time.encode(&mut encoder).unwrap();
+ assert_eq!(TIME_DER, encoder.finish().unwrap());
+ }
+ }
+}
+
+/// Custom derive test cases for the `Enumerated` macro.
+mod enumerated {
+ use der::{Decode, Encode, Enumerated, SliceWriter};
+ use hex_literal::hex;
+
+ /// X.509 `CRLReason`.
+ #[derive(Enumerated, Copy, Clone, Debug, Eq, PartialEq)]
+ #[repr(u32)]
+ pub enum CrlReason {
+ Unspecified = 0,
+ KeyCompromise = 1,
+ CaCompromise = 2,
+ AffiliationChanged = 3,
+ Superseded = 4,
+ CessationOfOperation = 5,
+ CertificateHold = 6,
+ RemoveFromCrl = 8,
+ PrivilegeWithdrawn = 9,
+ AaCompromised = 10,
+ }
+
+ const UNSPECIFIED_DER: &[u8] = &hex!("0a 01 00");
+ const KEY_COMPROMISE_DER: &[u8] = &hex!("0a 01 01");
+
+ #[test]
+ fn decode() {
+ let unspecified = CrlReason::from_der(UNSPECIFIED_DER).unwrap();
+ assert_eq!(CrlReason::Unspecified, unspecified);
+
+ let key_compromise = CrlReason::from_der(KEY_COMPROMISE_DER).unwrap();
+ assert_eq!(CrlReason::KeyCompromise, key_compromise);
+ }
+
+ #[test]
+ fn encode() {
+ let mut buf = [0u8; 128];
+
+ let mut encoder = SliceWriter::new(&mut buf);
+ CrlReason::Unspecified.encode(&mut encoder).unwrap();
+ assert_eq!(UNSPECIFIED_DER, encoder.finish().unwrap());
+
+ let mut encoder = SliceWriter::new(&mut buf);
+ CrlReason::KeyCompromise.encode(&mut encoder).unwrap();
+ assert_eq!(KEY_COMPROMISE_DER, encoder.finish().unwrap());
+ }
+}
+
+/// Custom derive test cases for the `Sequence` macro.
+#[cfg(feature = "oid")]
+mod sequence {
+ use der::{
+ asn1::{AnyRef, ObjectIdentifier, SetOf},
+ Decode, Encode, Sequence, ValueOrd,
+ };
+ use hex_literal::hex;
+
+ pub fn default_false_example() -> bool {
+ false
+ }
+
+ // Issuing distribution point extension as defined in [RFC 5280 Section 5.2.5] and as identified by the [`PKIX_PE_SUBJECTINFOACCESS`](constant.PKIX_PE_SUBJECTINFOACCESS.html) OID.
+ //
+ // ```text
+ // IssuingDistributionPoint ::= SEQUENCE {
+ // distributionPoint [0] DistributionPointName OPTIONAL,
+ // onlyContainsUserCerts [1] BOOLEAN DEFAULT FALSE,
+ // onlyContainsCACerts [2] BOOLEAN DEFAULT FALSE,
+ // onlySomeReasons [3] ReasonFlags OPTIONAL,
+ // indirectCRL [4] BOOLEAN DEFAULT FALSE,
+ // onlyContainsAttributeCerts [5] BOOLEAN DEFAULT FALSE }
+ // -- at most one of onlyContainsUserCerts, onlyContainsCACerts,
+ // -- and onlyContainsAttributeCerts may be set to TRUE.
+ // ```
+ //
+ // [RFC 5280 Section 5.2.5]: https://datatracker.ietf.org/doc/html/rfc5280#section-5.2.5
+ #[derive(Sequence)]
+ pub struct IssuingDistributionPointExample {
+ // Omit distributionPoint and only_some_reasons because corresponding structs are not
+ // available here and are not germane to the example
+ // distributionPoint [0] DistributionPointName OPTIONAL,
+ //#[asn1(context_specific="0", optional="true", tag_mode="IMPLICIT")]
+ //pub distribution_point: Option<DistributionPointName<'a>>,
+ /// onlyContainsUserCerts [1] BOOLEAN DEFAULT FALSE,
+ #[asn1(
+ context_specific = "1",
+ default = "default_false_example",
+ tag_mode = "IMPLICIT"
+ )]
+ pub only_contains_user_certs: bool,
+
+ /// onlyContainsCACerts [2] BOOLEAN DEFAULT FALSE,
+ #[asn1(
+ context_specific = "2",
+ default = "default_false_example",
+ tag_mode = "IMPLICIT"
+ )]
+ pub only_contains_cacerts: bool,
+
+ // onlySomeReasons [3] ReasonFlags OPTIONAL,
+ //#[asn1(context_specific="3", optional="true", tag_mode="IMPLICIT")]
+ //pub only_some_reasons: Option<ReasonFlags<'a>>,
+ /// indirectCRL [4] BOOLEAN DEFAULT FALSE,
+ #[asn1(
+ context_specific = "4",
+ default = "default_false_example",
+ tag_mode = "IMPLICIT"
+ )]
+ pub indirect_crl: bool,
+
+ /// onlyContainsAttributeCerts [5] BOOLEAN DEFAULT FALSE
+ #[asn1(
+ context_specific = "5",
+ default = "default_false_example",
+ tag_mode = "IMPLICIT"
+ )]
+ pub only_contains_attribute_certs: bool,
+ }
+
+ // Extension as defined in [RFC 5280 Section 4.1.2.9].
+ //
+ // The ASN.1 definition for Extension objects is below. The extnValue type may be further parsed using a decoder corresponding to the extnID value.
+ //
+ // ```text
+ // Extension ::= SEQUENCE {
+ // extnID OBJECT IDENTIFIER,
+ // critical BOOLEAN DEFAULT FALSE,
+ // extnValue OCTET STRING
+ // -- contains the DER encoding of an ASN.1 value
+ // -- corresponding to the extension type identified
+ // -- by extnID
+ // }
+ // ```
+ //
+ // [RFC 5280 Section 4.1.2.9]: https://datatracker.ietf.org/doc/html/rfc5280#section-4.1.2.9
+ #[derive(Clone, Debug, Eq, PartialEq, Sequence)]
+ pub struct ExtensionExample<'a> {
+ /// extnID OBJECT IDENTIFIER,
+ pub extn_id: ObjectIdentifier,
+
+ /// critical BOOLEAN DEFAULT FALSE,
+ #[asn1(default = "default_false_example")]
+ pub critical: bool,
+
+ /// extnValue OCTET STRING
+ #[asn1(type = "OCTET STRING")]
+ pub extn_value: &'a [u8],
+ }
+
+ /// X.509 `AlgorithmIdentifier`
+ #[derive(Copy, Clone, Debug, Eq, PartialEq, Sequence, ValueOrd)]
+ pub struct AlgorithmIdentifier<'a> {
+ pub algorithm: ObjectIdentifier,
+ pub parameters: Option<AnyRef<'a>>,
+ }
+
+ /// X.509 `SubjectPublicKeyInfo` (SPKI)
+ #[derive(Copy, Clone, Debug, Eq, PartialEq, Sequence, ValueOrd)]
+ pub struct SubjectPublicKeyInfo<'a> {
+ pub algorithm: AlgorithmIdentifier<'a>,
+ #[asn1(type = "BIT STRING")]
+ pub subject_public_key: &'a [u8],
+ }
+
+ /// PKCS#8v2 `OneAsymmetricKey`
+ #[derive(Sequence)]
+ pub struct OneAsymmetricKey<'a> {
+ pub version: u8,
+ pub private_key_algorithm: AlgorithmIdentifier<'a>,
+ #[asn1(type = "OCTET STRING")]
+ pub private_key: &'a [u8],
+ #[asn1(context_specific = "0", extensible = "true", optional = "true")]
+ pub attributes: Option<SetOf<AnyRef<'a>, 1>>,
+ #[asn1(
+ context_specific = "1",
+ extensible = "true",
+ optional = "true",
+ type = "BIT STRING"
+ )]
+ pub public_key: Option<&'a [u8]>,
+ }
+
+ /// X.509 extension
+ // TODO(tarcieri): tests for code derived with the `default` attribute
+ #[derive(Clone, Debug, Eq, PartialEq, Sequence, ValueOrd)]
+ pub struct Extension<'a> {
+ extn_id: ObjectIdentifier,
+ #[asn1(default = "critical_default")]
+ critical: bool,
+ #[asn1(type = "OCTET STRING")]
+ extn_value: &'a [u8],
+ }
+
+ /// Default value of the `critical` bit
+ fn critical_default() -> bool {
+ false
+ }
+
+ const ID_EC_PUBLIC_KEY_OID: ObjectIdentifier =
+ ObjectIdentifier::new_unwrap("1.2.840.10045.2.1");
+
+ const PRIME256V1_OID: ObjectIdentifier = ObjectIdentifier::new_unwrap("1.2.840.10045.3.1.7");
+
+ const ALGORITHM_IDENTIFIER_DER: &[u8] =
+ &hex!("30 13 06 07 2a 86 48 ce 3d 02 01 06 08 2a 86 48 ce 3d 03 01 07");
+
+ #[derive(Sequence)]
+ #[asn1(tag_mode = "IMPLICIT")]
+ pub struct TypeCheckExpandedSequenceFieldAttributeCombinations<'a> {
+ pub simple: bool,
+ #[asn1(type = "BIT STRING")]
+ pub typed: &'a [u8],
+ #[asn1(context_specific = "0")]
+ pub context_specific: bool,
+ #[asn1(optional = "true")]
+ pub optional: Option<bool>,
+ #[asn1(default = "default_false_example")]
+ pub default: bool,
+ #[asn1(type = "BIT STRING", context_specific = "1")]
+ pub typed_context_specific: &'a [u8],
+ #[asn1(context_specific = "2", optional = "true")]
+ pub context_specific_optional: Option<bool>,
+ #[asn1(context_specific = "3", default = "default_false_example")]
+ pub context_specific_default: bool,
+ #[asn1(type = "BIT STRING", context_specific = "4", optional = "true")]
+ pub typed_context_specific_optional: Option<&'a [u8]>,
+ }
+
+ #[test]
+ fn idp_test() {
+ let idp = IssuingDistributionPointExample::from_der(&hex!("30038101FF")).unwrap();
+ assert_eq!(idp.only_contains_user_certs, true);
+ assert_eq!(idp.only_contains_cacerts, false);
+ assert_eq!(idp.indirect_crl, false);
+ assert_eq!(idp.only_contains_attribute_certs, false);
+
+ let idp = IssuingDistributionPointExample::from_der(&hex!("30038201FF")).unwrap();
+ assert_eq!(idp.only_contains_user_certs, false);
+ assert_eq!(idp.only_contains_cacerts, true);
+ assert_eq!(idp.indirect_crl, false);
+ assert_eq!(idp.only_contains_attribute_certs, false);
+
+ let idp = IssuingDistributionPointExample::from_der(&hex!("30038401FF")).unwrap();
+ assert_eq!(idp.only_contains_user_certs, false);
+ assert_eq!(idp.only_contains_cacerts, false);
+ assert_eq!(idp.indirect_crl, true);
+ assert_eq!(idp.only_contains_attribute_certs, false);
+
+ let idp = IssuingDistributionPointExample::from_der(&hex!("30038501FF")).unwrap();
+ assert_eq!(idp.only_contains_user_certs, false);
+ assert_eq!(idp.only_contains_cacerts, false);
+ assert_eq!(idp.indirect_crl, false);
+ assert_eq!(idp.only_contains_attribute_certs, true);
+ }
+
+ // demonstrates default field that is not context specific
+ #[test]
+ fn extension_test() {
+ let ext1 = ExtensionExample::from_der(&hex!(
+ "300F" // 0 15: SEQUENCE {
+ "0603551D13" // 2 3: OBJECT IDENTIFIER basicConstraints (2 5 29 19)
+ "0101FF" // 7 1: BOOLEAN TRUE
+ "0405" // 10 5: OCTET STRING, encapsulates {
+ "3003" // 12 3: SEQUENCE {
+ "0101FF" // 14 1: BOOLEAN TRUE
+ ))
+ .unwrap();
+ assert_eq!(ext1.critical, true);
+
+ let ext2 = ExtensionExample::from_der(&hex!(
+ "301F" // 0 31: SEQUENCE {
+ "0603551D23" // 2 3: OBJECT IDENTIFIER authorityKeyIdentifier (2 5 29 35)
+ "0418" // 7 24: OCTET STRING, encapsulates {
+ "3016" // 9 22: SEQUENCE {
+ "8014E47D5FD15C9586082C05AEBE75B665A7D95DA866" // 11 20: [0] E4 7D 5F D1 5C 95 86 08 2C 05 AE BE 75 B6 65 A7 D9 5D A8 66
+ ))
+ .unwrap();
+ assert_eq!(ext2.critical, false);
+ }
+
+ #[test]
+ fn decode() {
+ let algorithm_identifier =
+ AlgorithmIdentifier::from_der(&ALGORITHM_IDENTIFIER_DER).unwrap();
+
+ assert_eq!(ID_EC_PUBLIC_KEY_OID, algorithm_identifier.algorithm);
+ assert_eq!(
+ PRIME256V1_OID,
+ ObjectIdentifier::try_from(algorithm_identifier.parameters.unwrap()).unwrap()
+ );
+ }
+
+ #[test]
+ fn encode() {
+ let parameters_oid = PRIME256V1_OID;
+
+ let algorithm_identifier = AlgorithmIdentifier {
+ algorithm: ID_EC_PUBLIC_KEY_OID,
+ parameters: Some(AnyRef::from(&parameters_oid)),
+ };
+
+ assert_eq!(
+ ALGORITHM_IDENTIFIER_DER,
+ algorithm_identifier.to_vec().unwrap()
+ );
+ }
+}
diff --git a/tests/examples/spki.der b/tests/examples/spki.der
new file mode 100644
index 0000000..1b602ee
--- /dev/null
+++ b/tests/examples/spki.der
Binary files differ
diff --git a/tests/examples/spki.pem b/tests/examples/spki.pem
new file mode 100644
index 0000000..6891701
--- /dev/null
+++ b/tests/examples/spki.pem
@@ -0,0 +1,3 @@
+-----BEGIN PUBLIC KEY-----
+MCowBQYDK2VwAyEATSkWfz8ZEqb3rfopOgUaFcBexnuPFyZ7HFVQ3OhTvQ0=
+-----END PUBLIC KEY-----
diff --git a/tests/pem.rs b/tests/pem.rs
new file mode 100644
index 0000000..d2c8654
--- /dev/null
+++ b/tests/pem.rs
@@ -0,0 +1,67 @@
+//! PEM decoding and encoding tests.
+
+#![cfg(all(feature = "derive", feature = "oid", feature = "pem"))]
+
+use der::{
+ asn1::{BitString, ObjectIdentifier},
+ pem::{LineEnding, PemLabel},
+ Decode, DecodePem, EncodePem, Sequence,
+};
+
+/// Example SPKI document encoded as DER.
+const SPKI_DER: &[u8] = include_bytes!("examples/spki.der");
+
+/// Example SPKI document encoded as PEM.
+const SPKI_PEM: &str = include_str!("examples/spki.pem");
+
+/// X.509 `AlgorithmIdentifier`
+#[derive(Copy, Clone, Debug, Eq, PartialEq, Sequence)]
+pub struct AlgorithmIdentifier {
+ pub algorithm: ObjectIdentifier,
+ // pub parameters: ... (not used in spki.pem)
+}
+
+/// X.509 `SubjectPublicKeyInfo` (SPKI) in borrowed form
+#[derive(Copy, Clone, Debug, Eq, PartialEq, Sequence)]
+pub struct SpkiBorrowed<'a> {
+ pub algorithm: AlgorithmIdentifier,
+ #[asn1(type = "BIT STRING")]
+ pub subject_public_key: &'a [u8],
+}
+
+impl PemLabel for SpkiBorrowed<'_> {
+ const PEM_LABEL: &'static str = "PUBLIC KEY";
+}
+
+/// X.509 `SubjectPublicKeyInfo` (SPKI) in owned form
+#[derive(Clone, Debug, Eq, PartialEq, Sequence)]
+pub struct SpkiOwned {
+ pub algorithm: AlgorithmIdentifier,
+ pub subject_public_key: BitString,
+}
+
+impl PemLabel for SpkiOwned {
+ const PEM_LABEL: &'static str = "PUBLIC KEY";
+}
+
+#[test]
+fn from_pem() {
+ // Decode PEM to owned form.
+ let pem_spki = SpkiOwned::from_pem(SPKI_PEM).unwrap();
+
+ // Decode DER to borrowed form.
+ let der_spki = SpkiBorrowed::from_der(SPKI_DER).unwrap();
+
+ assert_eq!(pem_spki.algorithm, der_spki.algorithm);
+ assert_eq!(
+ pem_spki.subject_public_key.raw_bytes(),
+ der_spki.subject_public_key
+ );
+}
+
+#[test]
+fn to_pem() {
+ let spki = SpkiBorrowed::from_der(SPKI_DER).unwrap();
+ let pem = spki.to_pem(LineEnding::LF).unwrap();
+ assert_eq!(&pem, SPKI_PEM);
+}
diff --git a/tests/set_of.rs b/tests/set_of.rs
new file mode 100644
index 0000000..ba43d80
--- /dev/null
+++ b/tests/set_of.rs
@@ -0,0 +1,59 @@
+//! `SetOf` tests.
+
+#![cfg(feature = "alloc")]
+
+use der::{asn1::SetOfVec, DerOrd};
+use proptest::{prelude::*, string::*};
+
+proptest! {
+ #[test]
+ fn sort_equiv(bytes in bytes_regex(".{0,64}").unwrap()) {
+ let mut expected = bytes.clone();
+ expected.sort_by(|a, b| a.der_cmp(b).unwrap());
+
+ let set = SetOfVec::try_from(bytes).unwrap();
+ prop_assert_eq!(expected.as_slice(), set.as_slice());
+ }
+}
+
+/// Set ordering tests.
+#[cfg(all(feature = "derive", feature = "oid"))]
+mod ordering {
+ use der::{
+ asn1::{AnyRef, ObjectIdentifier, SetOf, SetOfVec},
+ Decode, Sequence, ValueOrd,
+ };
+ use hex_literal::hex;
+
+ /// X.501 `AttributeTypeAndValue`
+ #[derive(Copy, Clone, Debug, Eq, PartialEq, Sequence, ValueOrd)]
+ pub struct AttributeTypeAndValue<'a> {
+ pub oid: ObjectIdentifier,
+ pub value: AnyRef<'a>,
+ }
+
+ const OUT_OF_ORDER_RDN_EXAMPLE: &[u8] =
+ &hex!("311F301106035504030C0A4A4F484E20534D495448300A060355040A0C03313233");
+
+ /// For compatibility reasons, we allow non-canonical DER with out-of-order
+ /// sets in order to match the behavior of other implementations.
+ #[test]
+ fn allow_out_of_order_setof() {
+ assert!(SetOf::<AttributeTypeAndValue<'_>, 2>::from_der(OUT_OF_ORDER_RDN_EXAMPLE).is_ok());
+ }
+
+ /// Same as above, with `SetOfVec` instead of `SetOf`.
+ #[test]
+ fn allow_out_of_order_setofvec() {
+ assert!(SetOfVec::<AttributeTypeAndValue<'_>>::from_der(OUT_OF_ORDER_RDN_EXAMPLE).is_ok());
+ }
+
+ /// Test to ensure ordering is handled correctly.
+ #[test]
+ fn ordering_regression() {
+ let der_bytes = hex!("3139301906035504030C12546573742055736572393031353734333830301C060A0992268993F22C640101130E3437303031303030303134373333");
+ let set = SetOf::<AttributeTypeAndValue<'_>, 3>::from_der(&der_bytes).unwrap();
+ let attr1 = set.get(0).unwrap();
+ assert_eq!(ObjectIdentifier::new("2.5.4.3").unwrap(), attr1.oid);
+ }
+}