diff options
Diffstat (limited to 'src/error.rs')
-rw-r--r-- | src/error.rs | 192 |
1 files changed, 110 insertions, 82 deletions
diff --git a/src/error.rs b/src/error.rs index 01402d4..f24c4a6 100644 --- a/src/error.rs +++ b/src/error.rs @@ -11,7 +11,7 @@ use core::mem::ManuallyDrop; #[cfg(not(anyhow_no_ptr_addr_of))] use core::ptr; use core::ptr::NonNull; -#[cfg(backtrace)] +#[cfg(error_generic_member_access)] use std::error::{self, Request}; #[cfg(feature = "std")] @@ -99,7 +99,10 @@ impl Error { #[cfg(anyhow_no_ptr_addr_of)] object_downcast_mut: object_downcast_mut::<E>, object_drop_rest: object_drop_front::<E>, - #[cfg(all(not(backtrace), feature = "backtrace"))] + #[cfg(all( + not(error_generic_member_access), + any(std_backtrace, feature = "backtrace") + ))] object_backtrace: no_backtrace, }; @@ -124,7 +127,10 @@ impl Error { #[cfg(anyhow_no_ptr_addr_of)] object_downcast_mut: object_downcast_mut::<M>, object_drop_rest: object_drop_front::<M>, - #[cfg(all(not(backtrace), feature = "backtrace"))] + #[cfg(all( + not(error_generic_member_access), + any(std_backtrace, feature = "backtrace") + ))] object_backtrace: no_backtrace, }; @@ -150,7 +156,10 @@ impl Error { #[cfg(anyhow_no_ptr_addr_of)] object_downcast_mut: object_downcast_mut::<M>, object_drop_rest: object_drop_front::<M>, - #[cfg(all(not(backtrace), feature = "backtrace"))] + #[cfg(all( + not(error_generic_member_access), + any(std_backtrace, feature = "backtrace") + ))] object_backtrace: no_backtrace, }; @@ -178,7 +187,10 @@ impl Error { #[cfg(anyhow_no_ptr_addr_of)] object_downcast_mut: context_downcast_mut::<C, E>, object_drop_rest: context_drop_rest::<C, E>, - #[cfg(all(not(backtrace), feature = "backtrace"))] + #[cfg(all( + not(error_generic_member_access), + any(std_backtrace, feature = "backtrace") + ))] object_backtrace: no_backtrace, }; @@ -204,7 +216,10 @@ impl Error { #[cfg(anyhow_no_ptr_addr_of)] object_downcast_mut: object_downcast_mut::<Box<dyn StdError + Send + Sync>>, object_drop_rest: object_drop_front::<Box<dyn StdError + Send + Sync>>, - #[cfg(all(not(backtrace), feature = "backtrace"))] + #[cfg(all( + not(error_generic_member_access), + any(std_backtrace, feature = "backtrace") + ))] object_backtrace: no_backtrace, }; @@ -317,7 +332,10 @@ impl Error { #[cfg(anyhow_no_ptr_addr_of)] object_downcast_mut: context_chain_downcast_mut::<C>, object_drop_rest: context_chain_drop_rest::<C>, - #[cfg(all(not(backtrace), feature = "backtrace"))] + #[cfg(all( + not(error_generic_member_access), + any(std_backtrace, feature = "backtrace") + ))] object_backtrace: context_backtrace::<C>, }; @@ -345,21 +363,17 @@ impl Error { /// /// # Stability /// - /// Standard library backtraces are only available on the nightly channel. - /// Tracking issue: [rust-lang/rust#53487][tracking]. - /// - /// On stable compilers, this function is only available if the crate's + /// Standard library backtraces are only available when using Rust ≥ + /// 1.65. On older compilers, this function is only available if the crate's /// "backtrace" feature is enabled, and will use the `backtrace` crate as - /// the underlying backtrace implementation. + /// the underlying backtrace implementation. The return type of this + /// function on old compilers is `&(impl Debug + Display)`. /// /// ```toml /// [dependencies] /// anyhow = { version = "1.0", features = ["backtrace"] } /// ``` - /// - /// [tracking]: https://github.com/rust-lang/rust/issues/53487 - #[cfg(any(backtrace, feature = "backtrace"))] - #[cfg_attr(doc_cfg, doc(cfg(any(nightly, feature = "backtrace"))))] + #[cfg(any(std_backtrace, feature = "backtrace"))] pub fn backtrace(&self) -> &impl_backtrace!() { unsafe { ErrorImpl::backtrace(self.inner.by_ref()) } } @@ -523,7 +537,7 @@ impl Error { } } - #[cfg(backtrace)] + #[cfg(error_generic_member_access)] pub(crate) fn provide<'a>(&'a self, request: &mut Request<'a>) { unsafe { ErrorImpl::provide(self.inner.by_ref(), request) } } @@ -533,7 +547,7 @@ impl Error { // deref'ing to dyn Error where the provide implementation would include // only the original error's Backtrace from before it got wrapped into an // anyhow::Error. - #[cfg(backtrace)] + #[cfg(error_generic_member_access)] #[doc(hidden)] pub fn thiserror_provide<'a>(&'a self, request: &mut Request<'a>) { Self::provide(self, request); @@ -602,7 +616,10 @@ struct ErrorVTable { #[cfg(anyhow_no_ptr_addr_of)] object_downcast_mut: unsafe fn(Mut<ErrorImpl>, TypeId) -> Option<Mut<()>>, object_drop_rest: unsafe fn(Own<ErrorImpl>, TypeId), - #[cfg(all(not(backtrace), feature = "backtrace"))] + #[cfg(all( + not(error_generic_member_access), + any(std_backtrace, feature = "backtrace") + ))] object_backtrace: unsafe fn(Ref<ErrorImpl>) -> Option<&Backtrace>, } @@ -610,8 +627,8 @@ struct ErrorVTable { unsafe fn object_drop<E>(e: Own<ErrorImpl>) { // Cast back to ErrorImpl<E> so that the allocator receives the correct // Layout to deallocate the Box's memory. - let unerased = e.cast::<ErrorImpl<E>>().boxed(); - drop(unerased); + let unerased_own = e.cast::<ErrorImpl<E>>(); + drop(unsafe { unerased_own.boxed() }); } // Safety: requires layout of *e to match ErrorImpl<E>. @@ -620,8 +637,8 @@ unsafe fn object_drop_front<E>(e: Own<ErrorImpl>, target: TypeId) { // without dropping E itself. This is used by downcast after doing a // ptr::read to take ownership of the E. let _ = target; - let unerased = e.cast::<ErrorImpl<ManuallyDrop<E>>>().boxed(); - drop(unerased); + let unerased_own = e.cast::<ErrorImpl<ManuallyDrop<E>>>(); + drop(unsafe { unerased_own.boxed() }); } // Safety: requires layout of *e to match ErrorImpl<E>. @@ -631,15 +648,15 @@ where { // Attach E's native StdError vtable onto a pointer to self._object. - let unerased = e.cast::<ErrorImpl<E>>(); + let unerased_ref = e.cast::<ErrorImpl<E>>(); #[cfg(not(anyhow_no_ptr_addr_of))] - return Ref::from_raw(NonNull::new_unchecked( - ptr::addr_of!((*unerased.as_ptr())._object) as *mut E, - )); + return Ref::from_raw(unsafe { + NonNull::new_unchecked(ptr::addr_of!((*unerased_ref.as_ptr())._object) as *mut E) + }); #[cfg(anyhow_no_ptr_addr_of)] - return Ref::new(&unerased.deref()._object); + return Ref::new(unsafe { &unerased_ref.deref()._object }); } // Safety: requires layout of *e to match ErrorImpl<E>, and for `e` to be derived @@ -650,7 +667,8 @@ where E: StdError + Send + Sync + 'static, { // Attach E's native StdError vtable onto a pointer to self._object. - &mut e.cast::<ErrorImpl<E>>().deref_mut()._object + let unerased_mut = e.cast::<ErrorImpl<E>>(); + unsafe { &mut unerased_mut.deref_mut()._object } } // Safety: requires layout of *e to match ErrorImpl<E>. @@ -659,7 +677,8 @@ where E: StdError + Send + Sync + 'static, { // Attach ErrorImpl<E>'s native StdError vtable. The StdError impl is below. - e.cast::<ErrorImpl<E>>().boxed() + let unerased_own = e.cast::<ErrorImpl<E>>(); + unsafe { unerased_own.boxed() } } // Safety: requires layout of *e to match ErrorImpl<E>. @@ -671,18 +690,18 @@ where // Caller is looking for an E pointer and e is ErrorImpl<E>, take a // pointer to its E field. - let unerased = e.cast::<ErrorImpl<E>>(); + let unerased_ref = e.cast::<ErrorImpl<E>>(); #[cfg(not(anyhow_no_ptr_addr_of))] return Some( - Ref::from_raw(NonNull::new_unchecked( - ptr::addr_of!((*unerased.as_ptr())._object) as *mut E, - )) + Ref::from_raw(unsafe { + NonNull::new_unchecked(ptr::addr_of!((*unerased_ref.as_ptr())._object) as *mut E) + }) .cast::<()>(), ); #[cfg(anyhow_no_ptr_addr_of)] - return Some(Ref::new(&unerased.deref()._object).cast::<()>()); + return Some(Ref::new(unsafe { &unerased_ref.deref()._object }).cast::<()>()); } else { None } @@ -697,14 +716,18 @@ where if TypeId::of::<E>() == target { // Caller is looking for an E pointer and e is ErrorImpl<E>, take a // pointer to its E field. - let unerased = e.cast::<ErrorImpl<E>>().deref_mut(); + let unerased_mut = e.cast::<ErrorImpl<E>>(); + let unerased = unsafe { unerased_mut.deref_mut() }; Some(Mut::new(&mut unerased._object).cast::<()>()) } else { None } } -#[cfg(all(not(backtrace), feature = "backtrace"))] +#[cfg(all( + not(error_generic_member_access), + any(std_backtrace, feature = "backtrace") +))] fn no_backtrace(e: Ref<ErrorImpl>) -> Option<&Backtrace> { let _ = e; None @@ -718,10 +741,12 @@ where E: 'static, { if TypeId::of::<C>() == target { - let unerased = e.cast::<ErrorImpl<ContextError<C, E>>>().deref(); + let unerased_ref = e.cast::<ErrorImpl<ContextError<C, E>>>(); + let unerased = unsafe { unerased_ref.deref() }; Some(Ref::new(&unerased._object.context).cast::<()>()) } else if TypeId::of::<E>() == target { - let unerased = e.cast::<ErrorImpl<ContextError<C, E>>>().deref(); + let unerased_ref = e.cast::<ErrorImpl<ContextError<C, E>>>(); + let unerased = unsafe { unerased_ref.deref() }; Some(Ref::new(&unerased._object.error).cast::<()>()) } else { None @@ -736,10 +761,12 @@ where E: 'static, { if TypeId::of::<C>() == target { - let unerased = e.cast::<ErrorImpl<ContextError<C, E>>>().deref_mut(); + let unerased_mut = e.cast::<ErrorImpl<ContextError<C, E>>>(); + let unerased = unsafe { unerased_mut.deref_mut() }; Some(Mut::new(&mut unerased._object.context).cast::<()>()) } else if TypeId::of::<E>() == target { - let unerased = e.cast::<ErrorImpl<ContextError<C, E>>>().deref_mut(); + let unerased_mut = e.cast::<ErrorImpl<ContextError<C, E>>>(); + let unerased = unsafe { unerased_mut.deref_mut() }; Some(Mut::new(&mut unerased._object.error).cast::<()>()) } else { None @@ -756,15 +783,11 @@ where // Called after downcasting by value to either the C or the E and doing a // ptr::read to take ownership of that value. if TypeId::of::<C>() == target { - let unerased = e - .cast::<ErrorImpl<ContextError<ManuallyDrop<C>, E>>>() - .boxed(); - drop(unerased); + let unerased_own = e.cast::<ErrorImpl<ContextError<ManuallyDrop<C>, E>>>(); + drop(unsafe { unerased_own.boxed() }); } else { - let unerased = e - .cast::<ErrorImpl<ContextError<C, ManuallyDrop<E>>>>() - .boxed(); - drop(unerased); + let unerased_own = e.cast::<ErrorImpl<ContextError<C, ManuallyDrop<E>>>>(); + drop(unsafe { unerased_own.boxed() }); } } @@ -773,13 +796,14 @@ unsafe fn context_chain_downcast<C>(e: Ref<ErrorImpl>, target: TypeId) -> Option where C: 'static, { - let unerased = e.cast::<ErrorImpl<ContextError<C, Error>>>().deref(); + let unerased_ref = e.cast::<ErrorImpl<ContextError<C, Error>>>(); + let unerased = unsafe { unerased_ref.deref() }; if TypeId::of::<C>() == target { Some(Ref::new(&unerased._object.context).cast::<()>()) } else { // Recurse down the context chain per the inner error's vtable. let source = &unerased._object.error; - (vtable(source.inner.ptr).object_downcast)(source.inner.by_ref(), target) + unsafe { (vtable(source.inner.ptr).object_downcast)(source.inner.by_ref(), target) } } } @@ -789,13 +813,14 @@ unsafe fn context_chain_downcast_mut<C>(e: Mut<ErrorImpl>, target: TypeId) -> Op where C: 'static, { - let unerased = e.cast::<ErrorImpl<ContextError<C, Error>>>().deref_mut(); + let unerased_mut = e.cast::<ErrorImpl<ContextError<C, Error>>>(); + let unerased = unsafe { unerased_mut.deref_mut() }; if TypeId::of::<C>() == target { Some(Mut::new(&mut unerased._object.context).cast::<()>()) } else { // Recurse down the context chain per the inner error's vtable. let source = &mut unerased._object.error; - (vtable(source.inner.ptr).object_downcast_mut)(source.inner.by_mut(), target) + unsafe { (vtable(source.inner.ptr).object_downcast_mut)(source.inner.by_mut(), target) } } } @@ -807,33 +832,34 @@ where // Called after downcasting by value to either the C or one of the causes // and doing a ptr::read to take ownership of that value. if TypeId::of::<C>() == target { - let unerased = e - .cast::<ErrorImpl<ContextError<ManuallyDrop<C>, Error>>>() - .boxed(); + let unerased_own = e.cast::<ErrorImpl<ContextError<ManuallyDrop<C>, Error>>>(); // Drop the entire rest of the data structure rooted in the next Error. - drop(unerased); + drop(unsafe { unerased_own.boxed() }); } else { - let unerased = e - .cast::<ErrorImpl<ContextError<C, ManuallyDrop<Error>>>>() - .boxed(); + let unerased_own = e.cast::<ErrorImpl<ContextError<C, ManuallyDrop<Error>>>>(); + let unerased = unsafe { unerased_own.boxed() }; // Read the Own<ErrorImpl> from the next error. let inner = unerased._object.error.inner; drop(unerased); - let vtable = vtable(inner.ptr); + let vtable = unsafe { vtable(inner.ptr) }; // Recursively drop the next error using the same target typeid. - (vtable.object_drop_rest)(inner, target); + unsafe { (vtable.object_drop_rest)(inner, target) }; } } // Safety: requires layout of *e to match ErrorImpl<ContextError<C, Error>>. -#[cfg(all(not(backtrace), feature = "backtrace"))] +#[cfg(all( + not(error_generic_member_access), + any(std_backtrace, feature = "backtrace") +))] #[allow(clippy::unnecessary_wraps)] unsafe fn context_backtrace<C>(e: Ref<ErrorImpl>) -> Option<&Backtrace> where C: 'static, { - let unerased = e.cast::<ErrorImpl<ContextError<C, Error>>>().deref(); - let backtrace = ErrorImpl::backtrace(unerased._object.error.inner.by_ref()); + let unerased_ref = e.cast::<ErrorImpl<ContextError<C, Error>>>(); + let unerased = unsafe { unerased_ref.deref() }; + let backtrace = unsafe { ErrorImpl::backtrace(unerased._object.error.inner.by_ref()) }; Some(backtrace) } @@ -853,7 +879,7 @@ pub(crate) struct ErrorImpl<E = ()> { // avoids converting `p` into a reference. unsafe fn vtable(p: NonNull<ErrorImpl>) -> &'static ErrorVTable { // NOTE: This assumes that `ErrorVTable` is the first field of ErrorImpl. - *(p.as_ptr() as *const &'static ErrorVTable) + unsafe { *(p.as_ptr() as *const &'static ErrorVTable) } } // repr C to ensure that ContextError<C, E> has the same layout as @@ -877,7 +903,7 @@ impl ErrorImpl { pub(crate) unsafe fn error(this: Ref<Self>) -> &(dyn StdError + Send + Sync + 'static) { // Use vtable to attach E's native StdError vtable for the right // original type E. - (vtable(this.ptr).object_ref)(this).deref() + unsafe { (vtable(this.ptr).object_ref)(this).deref() } } #[cfg(feature = "std")] @@ -886,42 +912,44 @@ impl ErrorImpl { // original type E. #[cfg(not(anyhow_no_ptr_addr_of))] - return (vtable(this.ptr).object_ref)(this.by_ref()) - .by_mut() - .deref_mut(); + return unsafe { + (vtable(this.ptr).object_ref)(this.by_ref()) + .by_mut() + .deref_mut() + }; #[cfg(anyhow_no_ptr_addr_of)] - return (vtable(this.ptr).object_mut)(this); + return unsafe { (vtable(this.ptr).object_mut)(this) }; } - #[cfg(any(backtrace, feature = "backtrace"))] + #[cfg(any(std_backtrace, feature = "backtrace"))] pub(crate) unsafe fn backtrace(this: Ref<Self>) -> &Backtrace { // This unwrap can only panic if the underlying error's backtrace method // is nondeterministic, which would only happen in maliciously // constructed code. - this.deref() + unsafe { this.deref() } .backtrace .as_ref() .or_else(|| { - #[cfg(backtrace)] - return error::request_ref::<Backtrace>(Self::error(this)); - #[cfg(not(backtrace))] - return (vtable(this.ptr).object_backtrace)(this); + #[cfg(error_generic_member_access)] + return error::request_ref::<Backtrace>(unsafe { Self::error(this) }); + #[cfg(not(error_generic_member_access))] + return unsafe { (vtable(this.ptr).object_backtrace)(this) }; }) .expect("backtrace capture failed") } - #[cfg(backtrace)] + #[cfg(error_generic_member_access)] unsafe fn provide<'a>(this: Ref<'a, Self>, request: &mut Request<'a>) { - if let Some(backtrace) = &this.deref().backtrace { + if let Some(backtrace) = unsafe { &this.deref().backtrace } { request.provide_ref(backtrace); } - Self::error(this).provide(request); + unsafe { Self::error(this) }.provide(request); } #[cold] pub(crate) unsafe fn chain(this: Ref<Self>) -> Chain { - Chain::new(Self::error(this)) + Chain::new(unsafe { Self::error(this) }) } } @@ -933,7 +961,7 @@ where unsafe { ErrorImpl::error(self.erase()).source() } } - #[cfg(backtrace)] + #[cfg(error_generic_member_access)] fn provide<'a>(&'a self, request: &mut Request<'a>) { unsafe { ErrorImpl::provide(self.erase(), request) } } |