diff --git a/src/impls.rs b/src/impls.rs index e9c45732f2..4480dafe91 100644 --- a/src/impls.rs +++ b/src/impls.rs @@ -122,8 +122,14 @@ safety_comment! { /// bit pattern 0x01. /// /// [3] TODO(#429): Justify this claim. - unsafe_impl!(bool: TryFromBytes; |byte: MaybeAligned| *byte.unaligned_as_ref() < 2); + unsafe_impl!(=> TryFromBytes for bool; |byte| { + let byte = byte.transmute::(); + *byte.unaligned_as_ref() < 2 + }); } + +impl_size_eq!(bool, u8); + safety_comment! { /// SAFETY: /// - `Immutable`: `char` self-evidently does not contain any `UnsafeCell`s. @@ -159,11 +165,15 @@ safety_comment! { /// a `char`. /// /// [3] TODO(#429): Justify this claim. - unsafe_impl!(char: TryFromBytes; |candidate: MaybeAligned| { - let candidate = candidate.read_unaligned::(); - char::from_u32(candidate).is_some() + unsafe_impl!(=> TryFromBytes for char; |c| { + let c = c.transmute::, invariant::Valid, _>(); + let c = c.read_unaligned().into_inner(); + char::from_u32(c).is_some() }); } + +impl_size_eq!(char, Unalign); + safety_comment! { /// SAFETY: /// Per the Reference [1], `str` has the same layout as `[u8]`. @@ -200,12 +210,37 @@ safety_comment! { /// [2] Per https://doc.rust-lang.org/core/str/fn.from_utf8.html#errors: /// /// Returns `Err` if the slice is not UTF-8. - unsafe_impl!(str: TryFromBytes; |candidate: MaybeAligned<[u8]>| { - let candidate = candidate.unaligned_as_ref(); - core::str::from_utf8(candidate).is_ok() + unsafe_impl!(=> TryFromBytes for str; |c| { + let c = c.transmute::<[u8], invariant::Valid, _>(); + let c = c.unaligned_as_ref(); + core::str::from_utf8(c).is_ok() }); } +// SAFETY: `str` and `[u8]` have the same layout [1]. +// +// [1] Per https://doc.rust-lang.org/1.81.0/reference/type-layout.html#str-layout: +// +// String slices are a UTF-8 representation of characters that have the same +// layout as slices of type `[u8]`. +unsafe impl pointer::SizeEq for [u8] {} +// SAFETY: See previous safety comment. +unsafe impl pointer::SizeEq<[u8]> for str {} + +macro_rules! unsafe_impl_try_from_bytes_for_nonzero { + ($($nonzero:ident[$prim:ty]),*) => { + $( + unsafe_impl!(=> TryFromBytes for $nonzero; |n| { + unsafe impl pointer::SizeEq<$nonzero> for Unalign<$prim> {} + unsafe impl pointer::SizeEq> for $nonzero {} + + let n = n.transmute::, invariant::Valid, _>(); + $nonzero::new(n.read_unaligned().into_inner()).is_some() + }); + )* + } +} + safety_comment! { // `NonZeroXxx` is `IntoBytes`, but not `FromZeros` or `FromBytes`. // @@ -271,18 +306,20 @@ safety_comment! { /// /// [2] `NonZeroXxx` self-evidently does not contain `UnsafeCell`s. This is /// not a proof, but we are accepting this as a known risk per #1358. - unsafe_impl!(NonZeroU8: TryFromBytes; |n: MaybeAligned| NonZeroU8::new(n.read_unaligned::()).is_some()); - unsafe_impl!(NonZeroI8: TryFromBytes; |n: MaybeAligned| NonZeroI8::new(n.read_unaligned::()).is_some()); - unsafe_impl!(NonZeroU16: TryFromBytes; |n: MaybeAligned| NonZeroU16::new(n.read_unaligned::()).is_some()); - unsafe_impl!(NonZeroI16: TryFromBytes; |n: MaybeAligned| NonZeroI16::new(n.read_unaligned::()).is_some()); - unsafe_impl!(NonZeroU32: TryFromBytes; |n: MaybeAligned| NonZeroU32::new(n.read_unaligned::()).is_some()); - unsafe_impl!(NonZeroI32: TryFromBytes; |n: MaybeAligned| NonZeroI32::new(n.read_unaligned::()).is_some()); - unsafe_impl!(NonZeroU64: TryFromBytes; |n: MaybeAligned| NonZeroU64::new(n.read_unaligned::()).is_some()); - unsafe_impl!(NonZeroI64: TryFromBytes; |n: MaybeAligned| NonZeroI64::new(n.read_unaligned::()).is_some()); - unsafe_impl!(NonZeroU128: TryFromBytes; |n: MaybeAligned| NonZeroU128::new(n.read_unaligned::()).is_some()); - unsafe_impl!(NonZeroI128: TryFromBytes; |n: MaybeAligned| NonZeroI128::new(n.read_unaligned::()).is_some()); - unsafe_impl!(NonZeroUsize: TryFromBytes; |n: MaybeAligned| NonZeroUsize::new(n.read_unaligned::()).is_some()); - unsafe_impl!(NonZeroIsize: TryFromBytes; |n: MaybeAligned| NonZeroIsize::new(n.read_unaligned::()).is_some()); + unsafe_impl_try_from_bytes_for_nonzero!( + NonZeroU8[u8], + NonZeroI8[i8], + NonZeroU16[u16], + NonZeroI16[i16], + NonZeroU32[u32], + NonZeroI32[i32], + NonZeroU64[u64], + NonZeroI64[i64], + NonZeroU128[u128], + NonZeroI128[i128], + NonZeroUsize[usize], + NonZeroIsize[isize] + ); } safety_comment! { /// SAFETY: @@ -358,8 +395,7 @@ safety_comment! { #[cfg(feature = "alloc")] unsafe_impl!( #[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))] - T => TryFromBytes for Option>; - |c: Maybe>>| pointer::is_zeroed(c) + T => TryFromBytes for Option>; |c| pointer::is_zeroed(c) ); #[cfg(feature = "alloc")] unsafe_impl!( @@ -367,29 +403,26 @@ safety_comment! { T => FromZeros for Option> ); unsafe_impl!( - T => TryFromBytes for Option<&'_ T>; - |c: Maybe>| pointer::is_zeroed(c) + T => TryFromBytes for Option<&'_ T>; |c| pointer::is_zeroed(c) ); unsafe_impl!(T => FromZeros for Option<&'_ T>); unsafe_impl!( - T => TryFromBytes for Option<&'_ mut T>; - |c: Maybe>| pointer::is_zeroed(c) + T => TryFromBytes for Option<&'_ mut T>; |c| pointer::is_zeroed(c) ); unsafe_impl!(T => FromZeros for Option<&'_ mut T>); unsafe_impl!( - T => TryFromBytes for Option>; - |c: Maybe>>| pointer::is_zeroed(c) + T => TryFromBytes for Option>; |c| pointer::is_zeroed(c) ); unsafe_impl!(T => FromZeros for Option>); unsafe_impl_for_power_set!(A, B, C, D, E, F, G, H, I, J, K, L -> M => FromZeros for opt_fn!(...)); unsafe_impl_for_power_set!( A, B, C, D, E, F, G, H, I, J, K, L -> M => TryFromBytes for opt_fn!(...); - |c: Maybe| pointer::is_zeroed(c) + |c| pointer::is_zeroed(c) ); unsafe_impl_for_power_set!(A, B, C, D, E, F, G, H, I, J, K, L -> M => FromZeros for opt_extern_c_fn!(...)); unsafe_impl_for_power_set!( A, B, C, D, E, F, G, H, I, J, K, L -> M => TryFromBytes for opt_extern_c_fn!(...); - |c: Maybe| pointer::is_zeroed(c) + |c| pointer::is_zeroed(c) ); } @@ -417,17 +450,75 @@ mod atomics { use super::*; macro_rules! impl_traits_for_atomics { - ($($atomics:ident),* $(,)?) => { + ($($atomics:ident [$primitives:ident]),* $(,)?) => { $( impl_known_layout!($atomics); - impl_for_transparent_wrapper!(=> TryFromBytes for $atomics); - impl_for_transparent_wrapper!(=> FromZeros for $atomics); - impl_for_transparent_wrapper!(=> FromBytes for $atomics); - impl_for_transparent_wrapper!(=> IntoBytes for $atomics); + impl_for_transmute_from!(=> TryFromBytes for $atomics [UnsafeCell<$primitives>]); + impl_for_transmute_from!(=> FromZeros for $atomics [UnsafeCell<$primitives>]); + impl_for_transmute_from!(=> FromBytes for $atomics [UnsafeCell<$primitives>]); + impl_for_transmute_from!(=> IntoBytes for $atomics [UnsafeCell<$primitives>]); )* }; } + /// Implements `TransmuteFrom` for `$atomic`, `$prim`, and + /// `UnsafeCell<$prim>`. + /// + /// # Safety + /// + /// `$atomic` must have the same size and bit validity as `$prim`. + macro_rules! unsafe_impl_transmute_from_for_atomic { + ($($($tyvar:ident)? => $atomic:ty [$prim:ty]),*) => { + const _: () = { + use crate::pointer::{TransmuteFrom, SizeEq, invariant::Valid}; + + $( + #[allow(unused_unsafe)] // Force the caller to call this macro inside `safety_comment!`. + const _: () = unsafe {}; + + // SAFETY: The caller promised that `$atomic` and `$prim` have + // the same size and bit validity. + unsafe impl<$($tyvar)?> TransmuteFrom<$atomic, Valid, Valid> for $prim {} + // SAFETY: The caller promised that `$atomic` and `$prim` have + // the same size and bit validity. + unsafe impl<$($tyvar)?> TransmuteFrom<$prim, Valid, Valid> for $atomic {} + + // SAFETY: THe caller promised that `$atomic` and `$prim` + // have the same size. + unsafe impl<$($tyvar)?> SizeEq<$atomic> for $prim {} + // SAFETY: THe caller promised that `$atomic` and `$prim` + // have the same size. + unsafe impl<$($tyvar)?> SizeEq<$prim> for $atomic {} + // SAFETY: The caller promised that `$atomic` and `$prim` + // have the same size. `UnsafeCell` has the same size as + // `T` [1]. + // + // [1] Per https://doc.rust-lang.org/1.85.0/std/cell/struct.UnsafeCell.html#memory-layout: + // + // `UnsafeCell` has the same in-memory representation as + // its inner type `T`. A consequence of this guarantee is that + // it is possible to convert between `T` and `UnsafeCell`. + unsafe impl<$($tyvar)?> SizeEq<$atomic> for core::cell::UnsafeCell<$prim> {} + // SAFETY: See previous safety comment. + unsafe impl<$($tyvar)?> SizeEq> for $atomic {} + + // SAFETY: The caller promised that `$atomic` and `$prim` + // have the same bit validity. `UnsafeCell` has the same + // bit validity as `T` [1]. + // + // [1] Per https://doc.rust-lang.org/1.85.0/std/cell/struct.UnsafeCell.html#memory-layout: + // + // `UnsafeCell` has the same in-memory representation as + // its inner type `T`. A consequence of this guarantee is that + // it is possible to convert between `T` and `UnsafeCell`. + unsafe impl<$($tyvar)?> TransmuteFrom<$atomic, Valid, Valid> for core::cell::UnsafeCell<$prim> {} + // SAFETY: See previous safety comment. + unsafe impl<$($tyvar)?> TransmuteFrom, Valid, Valid> for $atomic {} + )* + }; + }; + } + #[cfg(target_has_atomic = "8")] #[cfg_attr(doc_cfg, doc(cfg(target_has_atomic = "8")))] mod atomic_8 { @@ -435,13 +526,13 @@ mod atomics { use super::*; - impl_traits_for_atomics!(AtomicU8, AtomicI8); + impl_traits_for_atomics!(AtomicU8[u8], AtomicI8[i8]); impl_known_layout!(AtomicBool); - impl_for_transparent_wrapper!(=> TryFromBytes for AtomicBool); - impl_for_transparent_wrapper!(=> FromZeros for AtomicBool); - impl_for_transparent_wrapper!(=> IntoBytes for AtomicBool); + impl_for_transmute_from!(=> TryFromBytes for AtomicBool [UnsafeCell]); + impl_for_transmute_from!(=> FromZeros for AtomicBool [UnsafeCell]); + impl_for_transmute_from!(=> IntoBytes for AtomicBool [UnsafeCell]); safety_comment! { /// SAFETY: @@ -469,9 +560,28 @@ mod atomics { assert_unaligned!(AtomicBool, AtomicU8, AtomicI8); /// SAFETY: - /// All of these pass an atomic type and that type's native equivalent, as - /// required by the macro safety preconditions. - unsafe_impl_transparent_wrapper_for_atomic!(AtomicU8 [u8], AtomicI8 [i8], AtomicBool [bool]); + /// `AtomicU8`, `AtomicI8`, and `AtomicBool` have the same size and + /// bit validity as `u8`, `i8`, and `bool` respectively [1][2][3]. + /// + /// [1] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicU8.html: + /// + /// This type has the same size, alignment, and bit validity as + /// the underlying integer type, `u8`. + /// + /// [2] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicI8.html: + /// + /// This type has the same size, alignment, and bit validity as + /// the underlying integer type, `i8`. + /// + /// [3] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicBool.html: + /// + /// This type has the same size, alignment, and bit validity a + /// `bool`. + unsafe_impl_transmute_from_for_atomic!( + => AtomicU8 [u8], + => AtomicI8 [i8], + => AtomicBool [bool] + ); } } @@ -482,13 +592,23 @@ mod atomics { use super::*; - impl_traits_for_atomics!(AtomicU16, AtomicI16); + impl_traits_for_atomics!(AtomicU16[u16], AtomicI16[i16]); safety_comment! { /// SAFETY: - /// All of these pass an atomic type and that type's native equivalent, as - /// required by the macro safety preconditions. - unsafe_impl_transparent_wrapper_for_atomic!(AtomicU16 [u16], AtomicI16 [i16]); + /// `AtomicU16` and `AtomicI16` have the same size and bit validity + /// as `u16` and `i16` respectively [1][2]. + /// + /// [1] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicU16.html: + /// + /// This type has the same size and bit validity as the underlying + /// integer type, `u16`. + /// + /// [2] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicI16.html: + /// + /// This type has the same size and bit validity as the underlying + /// integer type, `i16`. + unsafe_impl_transmute_from_for_atomic!(=> AtomicU16 [u16], => AtomicI16 [i16]); } } @@ -499,13 +619,23 @@ mod atomics { use super::*; - impl_traits_for_atomics!(AtomicU32, AtomicI32); + impl_traits_for_atomics!(AtomicU32[u32], AtomicI32[i32]); safety_comment! { /// SAFETY: - /// All of these pass an atomic type and that type's native equivalent, as - /// required by the macro safety preconditions. - unsafe_impl_transparent_wrapper_for_atomic!(AtomicU32 [u32], AtomicI32 [i32]); + /// `AtomicU32` and `AtomicI32` have the same size and bit validity + /// as `u32` and `i32` respectively [1][2]. + /// + /// [1] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicU32.html: + /// + /// This type has the same size and bit validity as the underlying + /// integer type, `u32`. + /// + /// [2] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicI32.html: + /// + /// This type has the same size and bit validity as the underlying + /// integer type, `i32`. + unsafe_impl_transmute_from_for_atomic!(=> AtomicU32 [u32], => AtomicI32 [i32]); } } @@ -516,13 +646,23 @@ mod atomics { use super::*; - impl_traits_for_atomics!(AtomicU64, AtomicI64); + impl_traits_for_atomics!(AtomicU64[u64], AtomicI64[i64]); safety_comment! { /// SAFETY: - /// All of these pass an atomic type and that type's native equivalent, as - /// required by the macro safety preconditions. - unsafe_impl_transparent_wrapper_for_atomic!(AtomicU64 [u64], AtomicI64 [i64]); + /// `AtomicU64` and `AtomicI64` have the same size and bit validity + /// as `u64` and `i64` respectively [1][2]. + /// + /// [1] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicU64.html: + /// + /// This type has the same size and bit validity as the underlying + /// integer type, `u64`. + /// + /// [2] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicI64.html: + /// + /// This type has the same size and bit validity as the underlying + /// integer type, `i64`. + unsafe_impl_transmute_from_for_atomic!(=> AtomicU64 [u64], => AtomicI64 [i64]); } } @@ -533,21 +673,34 @@ mod atomics { use super::*; - impl_traits_for_atomics!(AtomicUsize, AtomicIsize); + impl_traits_for_atomics!(AtomicUsize[usize], AtomicIsize[isize]); impl_known_layout!(T => AtomicPtr); // TODO(#170): Implement `FromBytes` and `IntoBytes` once we implement // those traits for `*mut T`. - impl_for_transparent_wrapper!(T => TryFromBytes for AtomicPtr); - impl_for_transparent_wrapper!(T => FromZeros for AtomicPtr); + impl_for_transmute_from!(T => TryFromBytes for AtomicPtr [UnsafeCell<*mut T>]); safety_comment! { /// SAFETY: - /// This passes an atomic type and that type's native equivalent, as - /// required by the macro safety preconditions. - unsafe_impl_transparent_wrapper_for_atomic!(AtomicUsize [usize], AtomicIsize [isize]); - unsafe_impl_transparent_wrapper_for_atomic!(T => AtomicPtr [*mut T]); + /// `AtomicUsize` and `AtomicIsize` have the same size and bit + /// validity as `usize` and `isize` respectively [1][2]. + /// + /// [1] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicUsize.html: + /// + /// This type has the same size and bit validity as the underlying + /// integer type, `usize`. + /// + /// [2] Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicIsize.html: + /// + /// This type has the same size and bit validity as the underlying + /// integer type, `isize`. + unsafe_impl_transmute_from_for_atomic!(=> AtomicUsize [usize], => AtomicIsize [isize]); + /// SAFETY: + /// Per https://doc.rust-lang.org/1.85.0/std/sync/atomic/struct.AtomicPtr.html: + /// + /// This type has the same size and bit validity as a `*mut T`. + unsafe_impl_transmute_from_for_atomic!(T => AtomicPtr [*mut T]); } } } @@ -577,14 +730,31 @@ safety_comment! { assert_unaligned!(PhantomData<()>, PhantomData, PhantomData); } -impl_for_transparent_wrapper!(T: Immutable => Immutable for Wrapping); -impl_for_transparent_wrapper!(T: TryFromBytes => TryFromBytes for Wrapping); -impl_for_transparent_wrapper!(T: FromZeros => FromZeros for Wrapping); -impl_for_transparent_wrapper!(T: FromBytes => FromBytes for Wrapping); -impl_for_transparent_wrapper!(T: IntoBytes => IntoBytes for Wrapping); -impl_for_transparent_wrapper!(T: Unaligned => Unaligned for Wrapping); +impl_for_transmute_from!(T: TryFromBytes => TryFromBytes for Wrapping[]); +impl_for_transmute_from!(T: FromZeros => FromZeros for Wrapping[]); +impl_for_transmute_from!(T: FromBytes => FromBytes for Wrapping[]); +impl_for_transmute_from!(T: IntoBytes => IntoBytes for Wrapping[]); assert_unaligned!(Wrapping<()>, Wrapping); +safety_comment! { + /// SAFETY: + /// Per [1], `Wrapping` has the same layout as `T`. Since its single + /// field (of type `T`) is public, it would be a breaking change to add or + /// remove fields. Thus, we know that `Wrapping` contains a `T` (as + /// opposed to just having the same size and alignment as `T`) with no pre- + /// or post-padding. Thus, `Wrapping` must have `UnsafeCell`s covering + /// the same byte ranges as `Inner = T`. + /// + /// [1] Per https://doc.rust-lang.org/1.81.0/std/num/struct.Wrapping.html#layout-1: + /// + /// `Wrapping` is guaranteed to have the same layout and ABI as `T` + unsafe_impl!(T: Immutable => Immutable for Wrapping); + /// SAFETY: + /// Per [1] in the preceding safety comment, `Wrapping` has the same + /// alignment as `T`. + unsafe_impl!(T: Unaligned => Unaligned for Wrapping); +} + safety_comment! { /// SAFETY: /// `TryFromBytes` (with no validator), `FromZeros`, `FromBytes`: @@ -592,24 +762,115 @@ safety_comment! { unsafe_impl!(T => TryFromBytes for CoreMaybeUninit); unsafe_impl!(T => FromZeros for CoreMaybeUninit); unsafe_impl!(T => FromBytes for CoreMaybeUninit); + /// SAFETY: + /// `MaybeUninit` has `UnsafeCell`s covering the same byte ranges as + /// `Inner = T`. This is not explicitly documented, but it can be inferred. + /// Per [1], `MaybeUninit` has the same size as `T`. Further, note the + /// signature of `MaybeUninit::assume_init_ref` [2]: + /// + /// pub unsafe fn assume_init_ref(&self) -> &T + /// + /// If the argument `&MaybeUninit` and the returned `&T` had + /// `UnsafeCell`s at different offsets, this would be unsound. Its existence + /// is proof that this is not the case. + /// + /// [1] Per https://doc.rust-lang.org/1.81.0/std/mem/union.MaybeUninit.html#layout-1: + /// + /// `MaybeUninit` is guaranteed to have the same size, alignment, and ABI + /// as `T`. + /// + /// [2] https://doc.rust-lang.org/1.81.0/std/mem/union.MaybeUninit.html#method.assume_init_ref + unsafe_impl!(T: Immutable => Immutable for CoreMaybeUninit); + /// SAFETY: + /// Per [1] in the preceding safety comment, `MaybeUninit` has the same + /// alignment as `T`. + unsafe_impl!(T: Unaligned => Unaligned for CoreMaybeUninit); } - -impl_for_transparent_wrapper!(T: Immutable => Immutable for CoreMaybeUninit); -impl_for_transparent_wrapper!(T: Unaligned => Unaligned for CoreMaybeUninit); assert_unaligned!(CoreMaybeUninit<()>, CoreMaybeUninit); -impl_for_transparent_wrapper!(T: ?Sized + Immutable => Immutable for ManuallyDrop); -impl_for_transparent_wrapper!(T: ?Sized + TryFromBytes => TryFromBytes for ManuallyDrop); -impl_for_transparent_wrapper!(T: ?Sized + FromZeros => FromZeros for ManuallyDrop); -impl_for_transparent_wrapper!(T: ?Sized + FromBytes => FromBytes for ManuallyDrop); -impl_for_transparent_wrapper!(T: ?Sized + IntoBytes => IntoBytes for ManuallyDrop); -impl_for_transparent_wrapper!(T: ?Sized + Unaligned => Unaligned for ManuallyDrop); +safety_comment! { + /// SAFETY: + /// `ManuallyDrop` has the same layout as `T` [1]. This strongly implies, + /// but does not guarantee, that it contains `UnsafeCell`s covering the same + /// byte ranges as in `T`. However, it also implements `Defer` + /// [2], which provides the ability to convert `&ManuallyDrop -> &T`. + /// This, combined with having the same size as `T`, implies that + /// `ManuallyDrop` exactly contains a `T` with the same fields and + /// `UnsafeCell`s covering the same byte ranges, or else the `Deref` impl + /// would permit safe code to obtain different shared references to the same + /// region of memory with different `UnsafeCell` coverage, which would in + /// turn permit interior mutation that would violate the invariants of a + /// shared reference. + /// + /// [1] Per https://doc.rust-lang.org/1.85.0/std/mem/struct.ManuallyDrop.html: + /// + /// `ManuallyDrop` is guaranteed to have the same layout and bit + /// validity as `T` + /// + /// [2] https://doc.rust-lang.org/1.85.0/std/mem/struct.ManuallyDrop.html#impl-Deref-for-ManuallyDrop%3CT%3E + unsafe_impl!(T: ?Sized + Immutable => Immutable for ManuallyDrop); +} + +// SAFETY: See inline safety comment justifying that the implementation of +// `is_bit_valid`is sound. +unsafe impl TryFromBytes for ManuallyDrop { + #[allow(clippy::missing_inline_in_public_items)] + fn only_derive_is_allowed_to_implement_this_trait() {} + + #[inline(always)] + fn is_bit_valid( + candidate: Maybe<'_, Self, A>, + ) -> bool { + // SAFETY: `ManuallyDrop` and `T` have the same size [1], so this + // cast preserves size. It also preserves provenance. + // + // [1] Per https://doc.rust-lang.org/1.85.0/std/mem/struct.ManuallyDrop.html: + // + // `ManuallyDrop` is guaranteed to have the same layout and bit + // validity as `T` + let c: Maybe<'_, T, A> = unsafe { candidate.cast_unsized(cast!()) }; + + // SAFETY: `ManuallyDrop` and `T` have the same bit validity [1], so + // this is a sound implementation of `ManuallyDrop::is_bit_valid`. + // + // [1] Per https://doc.rust-lang.org/1.85.0/std/mem/struct.ManuallyDrop.html: + // + // `ManuallyDrop` is guaranteed to have the same layout and bit + // validity as `T` + ::is_bit_valid(c) + } +} + +impl_for_transmute_from!(T: ?Sized + FromZeros => FromZeros for ManuallyDrop[]); +impl_for_transmute_from!(T: ?Sized + FromBytes => FromBytes for ManuallyDrop[]); +impl_for_transmute_from!(T: ?Sized + IntoBytes => IntoBytes for ManuallyDrop[]); +safety_comment! { + /// SAFETY: + /// `ManuallyDrop` has the same layout as `T` [1], and thus has the same + /// alignment as `T`. + /// + /// [1] Per https://doc.rust-lang.org/nightly/core/mem/struct.ManuallyDrop.html: + /// + /// `ManuallyDrop` is guaranteed to have the same layout and bit + /// validity as `T` + unsafe_impl!(T: ?Sized + Unaligned => Unaligned for ManuallyDrop); +} assert_unaligned!(ManuallyDrop<()>, ManuallyDrop); -impl_for_transparent_wrapper!(T: ?Sized + FromZeros => FromZeros for UnsafeCell); -impl_for_transparent_wrapper!(T: ?Sized + FromBytes => FromBytes for UnsafeCell); -impl_for_transparent_wrapper!(T: ?Sized + IntoBytes => IntoBytes for UnsafeCell); -impl_for_transparent_wrapper!(T: ?Sized + Unaligned => Unaligned for UnsafeCell); +impl_for_transmute_from!(T: ?Sized + FromZeros => FromZeros for UnsafeCell[]); +impl_for_transmute_from!(T: ?Sized + FromBytes => FromBytes for UnsafeCell[]); +impl_for_transmute_from!(T: ?Sized + IntoBytes => IntoBytes for UnsafeCell[]); +safety_comment! { + /// SAFETY: + /// `UnsafeCell` has the same in-memory representation as `T` [1], and + /// thus has the same alignment as `T`. + /// + /// [1] Per https://doc.rust-lang.org/1.81.0/core/cell/struct.UnsafeCell.html#memory-layout: + /// + /// `UnsafeCell` has the same in-memory representation as its inner + /// type `T`. + unsafe_impl!(T: ?Sized + Unaligned => Unaligned for UnsafeCell); +} assert_unaligned!(UnsafeCell<()>, UnsafeCell); // SAFETY: See safety comment in `is_bit_valid` impl. @@ -673,7 +934,7 @@ safety_comment! { /// /// [1] https://doc.rust-lang.org/1.81.0/reference/type-layout.html#array-layout unsafe_impl!(const N: usize, T: Immutable => Immutable for [T; N]); - unsafe_impl!(const N: usize, T: TryFromBytes => TryFromBytes for [T; N]; |c: Maybe<[T; N]>| { + unsafe_impl!(const N: usize, T: TryFromBytes => TryFromBytes for [T; N]; |c| { // Note that this call may panic, but it would still be sound even if it // did. `is_bit_valid` does not promise that it will not panic (in fact, // it explicitly warns that it's a possibility), and we have not @@ -686,7 +947,7 @@ safety_comment! { unsafe_impl!(const N: usize, T: Unaligned => Unaligned for [T; N]); assert_unaligned!([(); 0], [(); 1], [u8; 0], [u8; 1]); unsafe_impl!(T: Immutable => Immutable for [T]); - unsafe_impl!(T: TryFromBytes => TryFromBytes for [T]; |c: Maybe<[T]>| { + unsafe_impl!(T: TryFromBytes => TryFromBytes for [T]; |c| { // SAFETY: Per the reference [1]: // // An array of `[T; N]` has a size of `size_of::() * N` and the @@ -733,13 +994,9 @@ safety_comment! { /// documentation once this PR lands. unsafe_impl!(T: ?Sized => Immutable for *const T); unsafe_impl!(T: ?Sized => Immutable for *mut T); - unsafe_impl!(T => TryFromBytes for *const T; |c: Maybe<*const T>| { - pointer::is_zeroed(c) - }); + unsafe_impl!(T => TryFromBytes for *const T; |c| pointer::is_zeroed(c)); unsafe_impl!(T => FromZeros for *const T); - unsafe_impl!(T => TryFromBytes for *mut T; |c: Maybe<*const T>| { - pointer::is_zeroed(c) - }); + unsafe_impl!(T => TryFromBytes for *mut T; |c| pointer::is_zeroed(c)); unsafe_impl!(T => FromZeros for *mut T); } diff --git a/src/lib.rs b/src/lib.rs index 71193d661b..47b2c722df 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -387,7 +387,7 @@ use core::alloc::Layout; // Used by `TryFromBytes::is_bit_valid`. #[doc(hidden)] -pub use crate::pointer::{invariant::BecauseImmutable, Maybe, MaybeAligned, Ptr}; +pub use crate::pointer::{invariant::BecauseImmutable, Maybe, Ptr}; // Used by `KnownLayout`. #[doc(hidden)] pub use crate::layout::*; @@ -805,6 +805,17 @@ pub unsafe trait KnownLayout { // resulting size would not fit in a `usize`. meta.size_for_metadata(Self::LAYOUT) } + + #[doc(hidden)] + #[must_use] + #[inline(always)] + fn cast_from_raw + ?Sized>( + ptr: NonNull

, + ) -> NonNull { + let data = ptr.cast::(); + let meta = P::pointer_to_metadata(ptr.as_ptr()); + Self::raw_from_ptr_len(data, meta) + } } /// The metadata associated with a [`KnownLayout`] type. @@ -2843,29 +2854,43 @@ unsafe fn try_read_from( // We use `from_mut` despite not mutating via `c_ptr` so that we don't need // to add a `T: Immutable` bound. let c_ptr = Ptr::from_mut(&mut candidate); - let c_ptr = c_ptr.transparent_wrapper_into_inner(); // SAFETY: `c_ptr` has no uninitialized sub-ranges because it derived from // `candidate`, which the caller promises is entirely initialized. Since // `candidate` is a `MaybeUninit`, it has no validity requirements, and so - // no values written to `c_ptr` can violate its validity. Since `c_ptr` has - // `Exclusive` aliasing, no mutations may happen except via `c_ptr` so long - // as it is live, so we don't need to worry about the fact that `c_ptr` may - // have more restricted validity than `candidate`. + // no values written to an `Initialized` `c_ptr` can violate its validity. + // Since `c_ptr` has `Exclusive` aliasing, no mutations may happen except + // via `c_ptr` so long as it is live, so we don't need to worry about the + // fact that `c_ptr` may have more restricted validity than `candidate`. let c_ptr = unsafe { c_ptr.assume_validity::() }; + let c_ptr = c_ptr.transmute(); + // Since we don't have `T: KnownLayout`, we hack around that by using + // `Wrapping`, which implements `KnownLayout` even if `T` doesn't. + // // This call may panic. If that happens, it doesn't cause any soundness - // issues, as we have not generated any invalid state which we need to - // fix before returning. + // issues, as we have not generated any invalid state which we need to fix + // before returning. // - // Note that one panic or post-monomorphization error condition is - // calling `try_into_valid` (and thus `is_bit_valid`) with a shared - // pointer when `Self: !Immutable`. Since `Self: Immutable`, this panic - // condition will not happen. - if !T::is_bit_valid(c_ptr.forget_aligned()) { + // Note that one panic or post-monomorphization error condition is calling + // `try_into_valid` (and thus `is_bit_valid`) with a shared pointer when + // `Self: !Immutable`. Since `Self: Immutable`, this panic condition will + // not happen. + if !Wrapping::::is_bit_valid(c_ptr.forget_aligned()) { return Err(ValidityError::new(source).into()); } - // SAFETY: We just validated that `candidate` contains a valid `T`. + fn _assert_same_size_and_validity() + where + Wrapping: pointer::TransmuteFrom, + T: pointer::TransmuteFrom, invariant::Valid, invariant::Valid>, + { + } + + _assert_same_size_and_validity::(); + + // SAFETY: We just validated that `candidate` contains a valid + // `Wrapping`, which has the same size and bit validity as `T`, as + // guaranteed by the preceding type assertion. Ok(unsafe { candidate.assume_init() }) } @@ -3552,7 +3577,7 @@ pub unsafe trait FromBytes: FromZeros { { static_assert_dst_is_not_zst!(Self); match Ptr::from_ref(source).try_cast_into_no_leftover::<_, BecauseImmutable>(None) { - Ok(ptr) => Ok(ptr.bikeshed_recall_valid().as_ref()), + Ok(ptr) => Ok(ptr.recall_validity().as_ref()), Err(err) => Err(err.map_src(|src| src.as_ref())), } } @@ -3788,7 +3813,7 @@ pub unsafe trait FromBytes: FromZeros { { static_assert_dst_is_not_zst!(Self); match Ptr::from_mut(source).try_cast_into_no_leftover::<_, BecauseExclusive>(None) { - Ok(ptr) => Ok(ptr.bikeshed_recall_valid().as_mut()), + Ok(ptr) => Ok(ptr.recall_validity().as_mut()), Err(err) => Err(err.map_src(|src| src.as_mut())), } } @@ -4027,7 +4052,7 @@ pub unsafe trait FromBytes: FromZeros { let source = Ptr::from_ref(source); let maybe_slf = source.try_cast_into_no_leftover::<_, BecauseImmutable>(Some(count)); match maybe_slf { - Ok(slf) => Ok(slf.bikeshed_recall_valid().as_ref()), + Ok(slf) => Ok(slf.recall_validity().as_ref()), Err(err) => Err(err.map_src(|s| s.as_ref())), } } @@ -4258,7 +4283,9 @@ pub unsafe trait FromBytes: FromZeros { let source = Ptr::from_mut(source); let maybe_slf = source.try_cast_into_no_leftover::<_, BecauseImmutable>(Some(count)); match maybe_slf { - Ok(slf) => Ok(slf.bikeshed_recall_valid().as_mut()), + Ok(slf) => Ok(slf + .recall_validity::<_, (_, (_, (BecauseExclusive, BecauseExclusive)))>() + .as_mut()), Err(err) => Err(err.map_src(|s| s.as_mut())), } } @@ -4716,7 +4743,7 @@ fn ref_from_prefix_suffix( let (slf, prefix_suffix) = Ptr::from_ref(source) .try_cast_into::<_, BecauseImmutable>(cast_type, meta) .map_err(|err| err.map_src(|s| s.as_ref()))?; - Ok((slf.bikeshed_recall_valid().as_ref(), prefix_suffix.as_ref())) + Ok((slf.recall_validity().as_ref(), prefix_suffix.as_ref())) } /// Interprets the given affix of the given bytes as a `&mut Self` without @@ -4728,7 +4755,7 @@ fn ref_from_prefix_suffix( /// If there are insufficient bytes, or if that affix of `source` is not /// appropriately aligned, this returns `Err`. #[inline(always)] -fn mut_from_prefix_suffix( +fn mut_from_prefix_suffix( source: &mut [u8], meta: Option, cast_type: CastType, @@ -4736,7 +4763,7 @@ fn mut_from_prefix_suffix( let (slf, prefix_suffix) = Ptr::from_mut(source) .try_cast_into::<_, BecauseExclusive>(cast_type, meta) .map_err(|err| err.map_src(|s| s.as_mut()))?; - Ok((slf.bikeshed_recall_valid().as_mut(), prefix_suffix.as_mut())) + Ok((slf.recall_validity().as_mut(), prefix_suffix.as_mut())) } /// Analyzes whether a type is [`IntoBytes`]. diff --git a/src/pointer/inner.rs b/src/pointer/inner.rs index b58258da91..93dc7e3af2 100644 --- a/src/pointer/inner.rs +++ b/src/pointer/inner.rs @@ -488,34 +488,6 @@ impl<'a> PtrInner<'a, [u8]> { } } -#[allow(clippy::needless_lifetimes)] -impl<'a, T> PtrInner<'a, T> { - /// Performs an unaligned read of `self`'s referent. - /// - /// # Safety - /// - /// `self` must point to a properly initialized value of type `T`, and - /// reading a copy of `T` must not violate `T`'s safety invariants. - /// - /// `self`'s referent must not be concurrently modified during this call. - pub(crate) unsafe fn read_unaligned(self) -> T { - let raw = self.as_non_null().as_ptr(); - // SAFETY: The caller promises that `self` points to a bit-valid `T` and - // that reading a copy of it won't violate `T`'s safety invariants. The - // caller promises that `self`'s referent won't be concurrently modified - // during this operation. - // - // `raw` is valid for reads: - // - `self.as_non_null()` returns a `NonNull`, which is guaranteed to be - // non-null. - // - By invariant on `PtrInner`, `raw` is is either zero-sized or: - // - ...is within bounds of a single allocated object which lives for - // at least `'a`. - // - ...has valid provenance for that object. - unsafe { core::ptr::read_unaligned(raw) } - } -} - #[cfg(test)] mod tests { use super::*; @@ -530,9 +502,17 @@ mod tests { // SAFETY: `i` is in bounds by construction. let (l, r) = unsafe { ptr.split_at(i) }; // SAFETY: Points to a valid value by construction. - let l_sum: usize = l.iter().map(|ptr| unsafe { ptr.read_unaligned() }).sum(); + #[allow(clippy::undocumented_unsafe_blocks)] // Clippy false positive + let l_sum: usize = l + .iter() + .map(|ptr| unsafe { core::ptr::read_unaligned(ptr.as_non_null().as_ptr()) }) + .sum(); // SAFETY: Points to a valid value by construction. - let r_sum: usize = r.iter().map(|ptr| unsafe { ptr.read_unaligned() }).sum(); + #[allow(clippy::undocumented_unsafe_blocks)] // Clippy false positive + let r_sum: usize = r + .iter() + .map(|ptr| unsafe { core::ptr::read_unaligned(ptr.as_non_null().as_ptr()) }) + .sum(); assert_eq!(l_sum, i); assert_eq!(r_sum, N - i); assert_eq!(l_sum + r_sum, N); diff --git a/src/pointer/invariant.rs b/src/pointer/invariant.rs index b5bb3fd6c7..bbe38183b0 100644 --- a/src/pointer/invariant.rs +++ b/src/pointer/invariant.rs @@ -96,9 +96,11 @@ pub trait Reference: Aliasing + Sealed {} /// The `Ptr<'a, T>` adheres to the aliasing rules of a `&'a T`. /// /// The referent of a shared-aliased `Ptr` may be concurrently referenced by any -/// number of shared-aliased `Ptr` or `&T` references, and may not be -/// concurrently referenced by any exclusively-aliased `Ptr`s or `&mut T` -/// references. The referent must not be mutated, except via [`UnsafeCell`]s. +/// number of shared-aliased `Ptr` or `&T` references, or by any number of +/// `Ptr` or `&U` references as permitted by `T`'s library safety invariants, +/// and may not be concurrently referenced by any exclusively-aliased `Ptr`s or +/// `&mut` references. The referent must not be mutated, except via +/// [`UnsafeCell`]s, and only when permitted by `T`'s library safety invariants. /// /// [`UnsafeCell`]: core::cell::UnsafeCell pub enum Shared {} @@ -178,7 +180,8 @@ pub enum Initialized {} // required to uphold). unsafe impl Validity for Initialized {} -/// The referent of a `Ptr` is bit-valid for `T`. +/// The referent of a `Ptr` is valid for `T`, upholding bit validity and any +/// library safety invariants. pub enum Valid {} // SAFETY: `Valid`'s validity is well-defined for all `T: ?Sized`, and is not a // function of any property of `T` other than its bit validity. diff --git a/src/pointer/mod.rs b/src/pointer/mod.rs index d2bc8727ab..748ffbde87 100644 --- a/src/pointer/mod.rs +++ b/src/pointer/mod.rs @@ -12,13 +12,15 @@ mod inner; #[doc(hidden)] pub mod invariant; mod ptr; +mod transmute; #[doc(hidden)] -pub use invariant::{BecauseExclusive, BecauseImmutable, Read}; +pub(crate) use transmute::*; #[doc(hidden)] -pub use ptr::Ptr; - -use crate::Unaligned; +pub use { + invariant::{BecauseExclusive, BecauseImmutable, Read}, + ptr::Ptr, +}; /// A shorthand for a maybe-valid, maybe-aligned reference. Used as the argument /// to [`TryFromBytes::is_bit_valid`]. @@ -27,55 +29,6 @@ use crate::Unaligned; pub type Maybe<'a, T, Aliasing = invariant::Shared, Alignment = invariant::Unaligned> = Ptr<'a, T, (Aliasing, Alignment, invariant::Initialized)>; -/// A semi-user-facing wrapper type representing a maybe-aligned reference, for -/// use in [`TryFromBytes::is_bit_valid`]. -/// -/// [`TryFromBytes::is_bit_valid`]: crate::TryFromBytes::is_bit_valid -pub type MaybeAligned<'a, T, Aliasing = invariant::Shared, Alignment = invariant::Unaligned> = - Ptr<'a, T, (Aliasing, Alignment, invariant::Valid)>; - -// These methods are defined on the type alias, `MaybeAligned`, so as to bring -// them to the forefront of the rendered rustdoc for that type alias. -impl<'a, T, Aliasing, Alignment> MaybeAligned<'a, T, Aliasing, Alignment> -where - T: 'a + ?Sized, - Aliasing: invariant::Aliasing, - Alignment: invariant::Alignment, -{ - /// Reads the value from `MaybeAligned`. - #[must_use] - #[inline] - pub fn read_unaligned(self) -> T - where - T: Copy, - T: invariant::Read, - { - // SAFETY: By invariant on `MaybeAligned`, `self` contains - // validly-initialized data for `T`. By `T: Read`, we are - // permitted to perform a read of `self`'s referent. - unsafe { self.as_inner().read_unaligned() } - } -} - -impl<'a, T, Aliasing, Alignment> MaybeAligned<'a, T, Aliasing, Alignment> -where - T: 'a + ?Sized, - Aliasing: invariant::Reference, - Alignment: invariant::Alignment, -{ - /// Views the value as an aligned reference. - /// - /// This is only available if `T` is [`Unaligned`]. - #[must_use] - #[inline] - pub fn unaligned_as_ref(self) -> &'a T - where - T: Unaligned, - { - self.bikeshed_recall_aligned().as_ref() - } -} - /// Checks if the referent is zeroed. pub(crate) fn is_zeroed(ptr: Ptr<'_, T, I>) -> bool where diff --git a/src/pointer/ptr.rs b/src/pointer/ptr.rs index a3d487ce54..051d8fbd36 100644 --- a/src/pointer/ptr.rs +++ b/src/pointer/ptr.rs @@ -12,9 +12,12 @@ use core::{ ptr::NonNull, }; -use super::{inner::PtrInner, invariant::*}; use crate::{ - util::{AlignmentVariance, Covariant, TransparentWrapper, ValidityVariance}, + pointer::{ + inner::PtrInner, + invariant::*, + transmute::{MutationCompatible, SizeEq, TransmuteFromPtr}, + }, AlignmentError, CastError, CastType, KnownLayout, SizeError, TryFromBytes, ValidityError, }; @@ -383,53 +386,80 @@ mod _conversions { } } - /// `Ptr<'a, T = Wrapper>` → `Ptr<'a, U>` - impl<'a, T, I> Ptr<'a, T, I> + /// `Ptr<'a, T>` → `Ptr<'a, U>` + impl<'a, T: ?Sized, I> Ptr<'a, T, I> where - T: 'a + TransparentWrapper + ?Sized, I: Invariants, { - /// Converts `self` to a transparent wrapper type into a `Ptr` to the - /// wrapped inner type. - pub(crate) fn transparent_wrapper_into_inner( - self, - ) -> Ptr< - 'a, - T::Inner, - ( - I::Aliasing, - >::Applied, - >::Applied, - ), - > { + pub(crate) fn transmute(self) -> Ptr<'a, U, (I::Aliasing, Unaligned, V)> + where + T: KnownLayout, + V: Validity, + U: TransmuteFromPtr + + KnownLayout + + SizeEq + + ?Sized, + { // SAFETY: - // - By invariant on `TransparentWrapper::cast_into_inner`: - // - This cast preserves address and referent size, and thus the - // returned pointer addresses the same bytes as `p` - // - This cast preserves provenance - // - By invariant on `TransparentWrapper`, `T` and `T::Inner` have `UnsafeCell`s at the same - // byte ranges. Since `p` and the returned pointer address the - // same byte range, they refer to `UnsafeCell`s at the same byte - // ranges. - // - By invariant on `TransparentWrapper`, since `self` satisfies - // the validity invariant `I::Validity`, the returned pointer (of - // type `T::Inner`) satisfies the given "applied" validity - // invariant. - let ptr = unsafe { self.transmute_unchecked(|p| T::cast_into_inner(p)) }; - // SAFETY: By invariant on `TransparentWrapper`, since `self` - // satisfies the alignment invariant `I::Alignment`, the returned - // pointer (of type `T::Inner`) satisfies the given "applied" - // alignment invariant. - unsafe { ptr.assume_alignment() } + // - This cast preserves address and provenance + // - `U: SizeEq` guarantees that this cast preserves the number + // of bytes in the referent + // - If aliasing is `Shared`, then by `U: TransmuteFromPtr`, at + // least one of the following holds: + // - `T: Immutable` and `U: Immutable`, in which case it is + // trivially sound for shared code to operate on a `&T` and `&U` + // at the same time, as neither can perform interior mutation + // - It is directly guaranteed that it is sound for shared code to + // operate on these references simultaneously + // - By `U: TransmuteFromPtr`, it is + // sound to perform this transmute. + unsafe { self.transmute_unchecked(|t: NonNull| U::cast_from_raw(t)) } + } + + pub(crate) fn transmute_sized(self) -> Ptr<'a, U, (I::Aliasing, Unaligned, V)> + where + T: Sized, + V: Validity, + U: TransmuteFromPtr + SizeEq, + { + // SAFETY: + // - This cast preserves address and provenance + // - `U: SizeEq` guarantees that this cast preserves the number + // of bytes in the referent + // - If aliasing is `Shared`, then by `U: TransmuteFromPtr`, at + // least one of the following holds: + // - `T: Immutable` and `U: Immutable`, in which case it is + // trivially sound for shared code to operate on a `&T` and `&U` + // at the same time, as neither can perform interior mutation + // - It is directly guaranteed that it is sound for shared code to + // operate on these references simultaneously + // - By `U: TransmuteFromPtr`, it is + // sound to perform this transmute. + unsafe { self.transmute_unchecked(cast!()) } + } + + #[doc(hidden)] + #[inline(always)] + #[must_use] + pub fn recall_validity(self) -> Ptr<'a, T, (I::Aliasing, I::Alignment, V)> + where + V: Validity, + T: TransmuteFromPtr, + { + // SAFETY: + // - This cast is a no-op, and so trivially preserves address, + // referent size, and provenance + // - It is trivially sound to have multiple `&T` referencing the same + // referent simultaneously + // - By `T: TransmuteFromPtr`, it is + // sound to perform this transmute. + let ptr = unsafe { self.transmute_unchecked(|t| t) }; + // SAFETY: `self` and `ptr` have the same address and referent type. + // Therefore, if `self` satisfies `I::Alignment`, then so does + // `ptr`. + unsafe { ptr.assume_alignment::() } } - } - /// `Ptr<'a, T>` → `Ptr<'a, U>` - impl<'a, T: ?Sized, I> Ptr<'a, T, I> - where - I: Invariants, - { /// Casts to a different (unsized) target type without checking interior /// mutability. /// @@ -443,8 +473,9 @@ mod _conversions { /// following properties: /// - `u` addresses a subset of the bytes addressed by `p` /// - `u` has the same provenance as `p` - /// - If `I::Aliasing` is [`Shared`], `UnsafeCell`s in `*u` must exist - /// at ranges identical to those at which `UnsafeCell`s exist in `*p` + /// - If `I::Aliasing` is [`Shared`], it must not be possible for safe + /// code, operating on a `&T` and `&U` with the same referent + /// simultaneously, to cause undefined behavior /// - It is sound to transmute a pointer of type `T` with aliasing /// `I::Aliasing` and validity `I::Validity` to a pointer of type `U` /// with aliasing `I::Aliasing` and validity `V`. This is a subtle @@ -452,6 +483,12 @@ mod _conversions { /// `I::Aliasing`, `I::Validity`, and `V`, and may depend upon the /// presence, absence, or specific location of `UnsafeCell`s in `T` /// and/or `U`. See [`Validity`] for more details. + /// + /// `transmute_unchecked` guarantees that the pointer passed to `cast` + /// will reference a byte sequence which is either contained inside a + /// single allocated object or is zero sized. In either case, this means + /// that its size will fit in an `isize` and it will not wrap around the + /// address space. #[doc(hidden)] #[inline] pub unsafe fn transmute_unchecked( @@ -460,14 +497,12 @@ mod _conversions { ) -> Ptr<'a, U, (I::Aliasing, Unaligned, V)> where V: Validity, - F: FnOnce(*mut T) -> *mut U, + F: FnOnce(NonNull) -> NonNull, { - let ptr = cast(self.as_inner().as_non_null().as_ptr()); - - // SAFETY: Caller promises that `cast` returns a pointer whose - // address is in the range of `self.as_inner().as_non_null()`'s referent. By - // invariant, none of these addresses are null. - let ptr = unsafe { NonNull::new_unchecked(ptr) }; + // SAFETY: By invariant on `self`, `self.as_inner().as_non_null()` + // either references a zero-sized byte range, or else it references + // a byte range contained inside of a single allocated objection. + let ptr = cast(self.as_inner().as_non_null()); // SAFETY: // @@ -500,21 +535,14 @@ mod _conversions { // read or modify the referent for the lifetime `'a`. // - `Shared`: Since `self` has aliasing `Shared`, we know that // no other code may mutate the referent during the lifetime - // `'a`, except via `UnsafeCell`s. The caller promises that - // `UnsafeCell`s cover the same byte ranges in `*self` and - // `*ptr`. For each byte in the referent, there are two cases: - // - If the byte is not covered by an `UnsafeCell` in `*ptr`, - // then it is not covered in `*self`. By invariant on `self`, - // it will not be mutated during `'a`, as required by the - // constructed pointer. Similarly, the returned pointer will - // not permit any mutations to these locations, as required - // by the invariant on `self`. - // - If the byte is covered by an `UnsafeCell` in `*ptr`, then - // the returned pointer's invariants do not assume that the - // byte will not be mutated during `'a`. While the returned - // pointer will permit mutation of this byte during `'a`, by - // invariant on `self`, no other code assumes that this will - // not happen. + // `'a`, except via `UnsafeCell`s, and except as permitted by + // `T`'s library safety invariants. The caller promises that + // any safe operations which can be permitted on a `&T` and a + // `&U` simultaneously must be sound. Thus, no operations on a + // `&U` could violate `&T`'s library safety invariants, and + // vice-versa. Since any mutation via shared references outside + // of `UnsafeCell`s is unsound, this must be impossible using + // `&T` and `&U`. // - `Inaccessible`: There are no restrictions we need to uphold. // 7. `ptr` trivially satisfies the alignment invariant `Unaligned`. // 8. The caller promises that `ptr` conforms to the validity @@ -552,15 +580,47 @@ mod _conversions { // validity of the other. let ptr = unsafe { #[allow(clippy::as_conversions)] - self.transmute_unchecked(|p: *mut T| p as *mut crate::Unalign) + self.transmute_unchecked(NonNull::cast::>) }; ptr.bikeshed_recall_aligned() } } + + impl<'a, T, I> Ptr<'a, T, I> + where + T: ?Sized, + I: Invariants, + I::Aliasing: Reference, + { + /// Reads the referent. + #[must_use] + #[inline] + pub fn read_unaligned(self) -> T + where + T: Copy, + T: Read, + { + (*self.into_unalign().as_ref()).into_inner() + } + + /// Views the value as an aligned reference. + /// + /// This is only available if `T` is [`Unaligned`]. + #[must_use] + #[inline] + pub fn unaligned_as_ref(self) -> &'a T + where + T: crate::Unaligned, + { + self.bikeshed_recall_aligned().as_ref() + } + } } /// State transitions between invariants. mod _transitions { + use crate::pointer::transmute::TryTransmuteFromPtr; + use super::*; impl<'a, T, I> Ptr<'a, T, I> @@ -813,23 +873,6 @@ mod _transitions { unsafe { self.assume_initialized() } } - /// Recalls that `self`'s referent is bit-valid for `T`. - #[doc(hidden)] - #[must_use] - #[inline] - // TODO(#859): Reconsider the name of this method before making it - // public. - pub fn bikeshed_recall_valid(self) -> Ptr<'a, T, (I::Aliasing, I::Alignment, Valid)> - where - T: crate::FromBytes, - I: Invariants, - { - // TODO(#1866): Fix this unsoundness. - - // SAFETY: This is unsound! - unsafe { self.assume_valid() } - } - /// Checks that `self`'s referent is validly initialized for `T`, /// returning a `Ptr` with `Valid` on success. /// @@ -843,11 +886,13 @@ mod _transitions { /// On error, unsafe code may rely on this method's returned /// `ValidityError` containing `self`. #[inline] - pub(crate) fn try_into_valid( + pub(crate) fn try_into_valid( mut self, ) -> Result, ValidityError> where - T: TryFromBytes + Read, + T: TryFromBytes + + Read + + TryTransmuteFromPtr, I::Aliasing: Reference, I: Invariants, { @@ -856,11 +901,11 @@ mod _transitions { // fix before returning. if T::is_bit_valid(self.reborrow().forget_aligned()) { // SAFETY: If `T::is_bit_valid`, code may assume that `self` - // contains a bit-valid instance of `Self`. - // - // TODO(#1866): This is unsound! The returned `Ptr` may permit - // writing referents which do not satisfy the `Initialized` - // validity invariant of `self`. + // contains a bit-valid instance of `T`. By `T: + // TryTransmuteFromPtr`, so + // long as `self`'s referent conforms to the `Valid` validity + // for `T` (which we just confired), then this transmute is + // sound. Ok(unsafe { self.assume_valid() }) } else { Err(ValidityError::new(self)) @@ -900,11 +945,18 @@ mod _casts { /// following properties: /// - `u` addresses a subset of the bytes addressed by `p` /// - `u` has the same provenance as `p` - /// - If `I::Aliasing` is [`Shared`], `UnsafeCell`s in `*u` must exist - /// at ranges identical to those at which `UnsafeCell`s exist in `*p` + /// - If `I::Aliasing` is [`Shared`], it must not be possible for safe + /// code, operating on a `&T` and `&U` with the same referent + /// simultaneously, to cause undefined behavior + /// + /// `cast_unsized_unchecked` guarantees that the pointer passed to + /// `cast` will reference a byte sequence which is either contained + /// inside a single allocated object or is zero sized. In either case, + /// this means that its size will fit in an `isize` and it will not wrap + /// around the address space. #[doc(hidden)] #[inline] - pub unsafe fn cast_unsized_unchecked *mut U>( + pub unsafe fn cast_unsized_unchecked) -> NonNull>( self, cast: F, ) -> Ptr<'a, U, (I::Aliasing, Unaligned, I::Validity)> @@ -916,9 +968,9 @@ mod _casts { // satisfies: // - `u` addresses a subset of the bytes addressed by `p` // - `u` has the same provenance as `p` - // - If `I::Aliasing` is [`Shared`], `UnsafeCell`s in `*u` must - // exist at ranges identical to those at which `UnsafeCell`s - // exist in `*p` + // - If `I::Aliasing` is [`Shared`], it must not be possible for + // safe code, operating on a `&T` and `&U` with the same + // referent simultaneously, to cause undefined behavior // - By `U: CastableFrom`, // `I::Validity` is either `Uninit` or `Initialized`. In both // cases, the bit validity `I::Validity` has the same semantics @@ -927,6 +979,11 @@ mod _casts { // (_, _, I::Validity)>` are identical. As a consequence, neither // `self` nor the returned `Ptr` can be used to write values which // are invalid for the other. + // + // `transmute_unchecked` guarantees that it will only pass pointers + // to `cast` which either reference a zero-sized byte range or + // reference a byte range which is entirely contained inside of an + // allocated object. unsafe { self.transmute_unchecked(cast) } } @@ -940,20 +997,23 @@ mod _casts { /// - `u` has the same provenance as `p` #[doc(hidden)] #[inline] - pub unsafe fn cast_unsized( + pub unsafe fn cast_unsized( self, cast: F, ) -> Ptr<'a, U, (I::Aliasing, Unaligned, I::Validity)> where - T: Read, - U: 'a + ?Sized + Read + CastableFrom, - F: FnOnce(*mut T) -> *mut U, + T: MutationCompatible, + U: 'a + ?Sized + CastableFrom, + F: FnOnce(NonNull) -> NonNull, { - // SAFETY: Because `T` and `U` both implement `Read`, - // either: - // - `I::Aliasing` is `Exclusive` - // - `T` and `U` are both `Immutable`, in which case they trivially - // contain `UnsafeCell`s at identical locations + // SAFETY: Because `T: MutationCompatible`, one + // of the following holds: + // - `T: Read` and `U: Read`, in which + // case one of the following holds: + // - `I::Aliasing` is `Exclusive` + // - `T` and `U` are both `Immutable` + // - It is sound for safe code to operate on `&T` and `&U` with the + // same referent simultaneously // // The caller promises all other safety preconditions. unsafe { self.cast_unsized_unchecked(cast) } @@ -988,14 +1048,16 @@ mod _casts { // returned pointer addresses the same bytes as `p` // - `slice_from_raw_parts_mut` and `.cast` both preserve provenance let ptr: Ptr<'a, [u8], _> = unsafe { - self.cast_unsized(|p: *mut T| { - #[allow(clippy::as_conversions)] - core::ptr::slice_from_raw_parts_mut(p.cast::(), bytes) + self.cast_unsized(|p: NonNull| { + let ptr = core::ptr::slice_from_raw_parts_mut(p.cast::().as_ptr(), bytes); + // SAFETY: `ptr` has the same address as `p`, which is + // non-null. + core::ptr::NonNull::new_unchecked(ptr) }) }; let ptr = ptr.bikeshed_recall_aligned(); - ptr.bikeshed_recall_valid() + ptr.recall_validity() } } @@ -1214,7 +1276,7 @@ mod _casts { // inner type `T`. A consequence of this guarantee is that it is // possible to convert between `T` and `UnsafeCell`. #[allow(clippy::as_conversions)] - let ptr = unsafe { self.transmute_unchecked(|p| p as *mut T) }; + let ptr = unsafe { self.transmute_unchecked(cast!()) }; // SAFETY: `UnsafeCell` has the same alignment as `T` [1], // and so if `self` is guaranteed to be aligned, then so is the @@ -1321,10 +1383,12 @@ mod tests { }; // SAFETY: The bytes in `slf` must be initialized. - unsafe fn validate_and_get_len( + unsafe fn validate_and_get_len< + T: ?Sized + KnownLayout + FromBytes + Immutable, + >( slf: Ptr<'_, T, (Shared, Aligned, Initialized)>, ) -> usize { - let t = slf.bikeshed_recall_valid().as_ref(); + let t = slf.recall_validity().as_ref(); let bytes = { let len = mem::size_of_val(t); diff --git a/src/pointer/transmute.rs b/src/pointer/transmute.rs new file mode 100644 index 0000000000..d822f46300 --- /dev/null +++ b/src/pointer/transmute.rs @@ -0,0 +1,441 @@ +// Copyright 2025 The Fuchsia Authors +// +// Licensed under a BSD-style license , Apache License, Version 2.0 +// , or the MIT +// license , at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +use core::{ + cell::UnsafeCell, + mem::{ManuallyDrop, MaybeUninit}, + num::Wrapping, +}; + +use crate::{pointer::invariant::*, FromBytes, Immutable, IntoBytes, Unalign}; + +/// Transmutations which are sound to attempt, conditional on validating the bit +/// validity of the destination type. +/// +/// If a `Ptr` transmutation is `TryTransmuteFromPtr`, then it is sound to +/// perform that transmutation so long as some additional mechanism is used to +/// validate that the referent is bit-valid for the destination type. That +/// validation mechanism could be a type bound (such as `TransmuteFrom`) or a +/// runtime validity check. +/// +/// # Safety +/// +/// ## Post-conditions +/// +/// Given `Dst: TryTransmuteFromPtr`, callers may assume the +/// following: +/// +/// Given `src: Ptr<'a, Src, (A, _, SV)>`, if the referent of `src` is +/// `DV`-valid for `Dst`, then it is sound to transmute `src` into `dst: Ptr<'a, +/// Dst, (A, Unaligned, DV)>` by preserving pointer address and metadata. +/// +/// ## Pre-conditions +/// +/// Given `src: Ptr` and `dst: Ptr`, +/// `Dst: TryTransmuteFromPtr` is sound if all of the +/// following hold: +/// - Forwards transmutation: Either of the following hold: +/// - So long as `dst` is active, no mutation of `dst`'s referent is allowed +/// except via `dst` itself +/// - The set of `DV`-valid `Dst`s is a superset of the set of `SV`-valid +/// `Src`s +/// - Reverse transmutation: Either of the following hold: +/// - `dst` does not permit mutation of its referent +/// - The set of `DV`-valid `Dst`s is a subset of the set of `SV`-valid `Src`s +/// - No safe code, given access to `src` and `dst`, can cause undefined +/// behavior: Any of the following hold: +/// - `A` is `Exclusive` +/// - `Src: Immutable` and `Dst: Immutable` +/// - It is sound for shared code to operate on a `&Src` and `&Dst` which +/// reference the same byte range at the same time +/// +/// ## Proof +/// +/// Given: +/// - `src: Ptr<'a, Src, (A, _, SV)>` +/// - `src`'s referent is `DV`-valid for `Dst` +/// - `Dst: SizeEq` +/// +/// We are trying to prove that it is sound to perform a pointer address- and +/// metadata-preserving transmute from `src` to a `dst: Ptr<'a, Dst, (A, +/// Unaligned, DV)>`. We need to prove that such a transmute does not violate +/// any of `src`'s invariants, and that it satisfies all invariants of the +/// destination `Ptr` type. +/// +/// First, all of `src`'s `PtrInner` invariants are upheld. `src`'s address and +/// metadata are unchanged, so: +/// - If its referent is not zero sized, then it is still derived from a valid +/// allocation, `A` +/// - If its referent is not zero sized, then it still has valid provenance for +/// `A` +/// - If its referent is not zero sized, then it addresses a byte range entirely +/// contained in `A` +/// - The length of its referent fits in an `isize` +/// - Its referent does not wrap around the address space +/// - If its referent is not zero sized, `A` is guaranteed to live for at least +/// `'a` +/// +/// Since `Dst: SizeEq`, and since `dst` has the same address and metadata +/// as `src`, `dst` addresses the same byte range as `src`. `dst` also has the +/// same lifetime as `src`. Therefore, all of the `PtrInner` invariants +/// mentioned above also hold for `dst`. +/// +/// Second, since `src`'s address is unchanged, it still satisfies its +/// alignment. Since `dst`'s alignment is `Unaligned`, it trivially satisfies +/// its alignment. +/// +/// Third, aliasing is either `Exclusive` or `Shared`: +/// - If it is `Exclusive`, then both `src` and `dst` satisfy `Exclusive` +/// aliasing trivially: since `src` and `dst` have the same lifetime, `src` is +/// inaccessible so long as `dst` is alive, and no other live `Ptr`s or +/// references may reference the same referent. +/// - If it is `Shared`, then either: +/// - `Src: Immutable` and `Dst: Immutable`, and so `UnsafeCell`s trivially +/// cover the same byte ranges in both types. +/// - It is explicitly sound for safe code to operate on a `&Src` and a `&Dst` +/// pointing to the same byte range at the same time. +/// +/// Fourth, `src`'s validity is satisfied. By invariant, `src`'s referent began +/// as an `SV`-valid `Src`. It is guaranteed to remain so, as either of the +/// following hold: +/// - `dst` does not permit mutation of its referent. +/// - The set of `DV`-valid `Dst`s is a superset of the set of `SV`-valid +/// `Src`s. Thus, any value written via `dst` is guaranteed to be `SV`-valid +/// for `Src`. +/// +/// Fifth, `dst`'s validity is satisfied. It is a given of this proof that the +/// referent is `DV`-valid for `Dst`. It is guaranteed to remain so, as either +/// of the following hold: +/// - So long as `dst` is active, no mutation of the referent is allowed except +/// via `dst` itself. +/// - The set of `DV`-valid `Dst`s is a superset of the set of `SV`-valid +/// `Src`s. Thus, any value written via `src` is guaranteed to be a `DV`-valid +/// `Dst`. +pub unsafe trait TryTransmuteFromPtr: + SizeEq +{ +} + +#[allow(missing_copy_implementations, missing_debug_implementations)] +pub enum BecauseMutationCompatible {} + +// SAFETY: +// - Forwards transmutation: By `Dst: MutationCompatible`, we +// know that at least one of the following holds: +// - So long as `dst: Ptr` is active, no mutation of its referent is +// allowed except via `dst` itself if either of the following hold: +// - Aliasing is `Exclusive`, in which case, so long as the `Dst` `Ptr` +// exists, no mutation is permitted except via that `Ptr` +// - Aliasing is `Shared`, `Src: Immutable`, and `Dst: Immutable`, in which +// case no mutation is possible via either `Ptr` +// - `Dst: TransmuteFrom`, and so the set of `DV`-valid `Dst`s is +// a supserset of the set of `SV`-valid `Src`s +// - Reverse transmutation: `Src: TransmuteFrom`, and so the set of +// `DV`-valid `Dst`s is a subset of the set of `SV`-valid `Src`s +// - No safe code, given access to `src` and `dst`, can cause undefined +// behavior: By `Dst: MutationCompatible`, at least one of +// the following holds: +// - `A` is `Exclusive` +// - `Src: Immutable` and `Dst: Immutable` +// - `Dst: InvariantsEq`, which guarantees that `Src` and `Dst` have the +// same invariants, and have `UnsafeCell`s covering the same byte ranges +unsafe impl + TryTransmuteFromPtr for Dst +where + A: Aliasing, + SV: Validity, + DV: Validity, + Src: TransmuteFrom + ?Sized, + Dst: MutationCompatible + SizeEq + ?Sized, +{ +} + +// SAFETY: +// - Forwards transmutation: Since aliasing is `Shared` and `Src: Immutable`, +// `src` does not permit mutation of its referent. +// - Reverse transmutation: Since aliasing is `Shared` and `Dst: Immutable`, +// `dst` does not permit mutation of its referent. +// - No safe code, given access to `src` and `dst`, can cause undefined +// behavior: `Src: Immutable` and `Dst: Immutable` +unsafe impl TryTransmuteFromPtr for Dst +where + SV: Validity, + DV: Validity, + Src: Immutable + ?Sized, + Dst: Immutable + SizeEq + ?Sized, +{ +} + +/// Denotes that `src: Ptr` and `dst: Ptr`, +/// referencing the same referent at the same time, cannot be used by safe code +/// to break library safety invariants of `Src` or `Self`. +/// +/// # Safety +/// +/// At least one of the following must hold: +/// - `Src: Read` and `Self: Read` +/// - `Self: InvariantsEq`, and, for some `V`: +/// - `Dst: TransmuteFrom` +/// - `Src: TransmuteFrom` +pub unsafe trait MutationCompatible {} + +#[allow(missing_copy_implementations, missing_debug_implementations)] +pub enum BecauseRead {} + +// SAFETY: `Src: Read` and `Dst: Read`. +unsafe impl + MutationCompatible for Dst +where + Src: Read, + Dst: Read, +{ +} + +/// Denotes that two types have the same invariants. +/// +/// # Safety +/// +/// It is sound for safe code to operate on a `&T` and a `&Self` pointing to the +/// same referent at the same time - no such safe code can cause undefined +/// behavior. +pub unsafe trait InvariantsEq {} + +// SAFETY: Trivially sound to have multiple `&T` pointing to the same referent. +unsafe impl InvariantsEq for T {} + +// SAFETY: `Dst: InvariantsEq + TransmuteFrom`, and `Src: +// TransmuteFrom`. +unsafe impl + MutationCompatible for Dst +where + Src: TransmuteFrom, + Dst: TransmuteFrom + InvariantsEq, +{ +} + +pub(crate) enum BecauseInvariantsEq {} + +macro_rules! unsafe_impl_invariants_eq { + ($tyvar:ident => $t:ty, $u:ty) => { + unsafe impl<$tyvar> InvariantsEq<$t> for $u {} + unsafe impl<$tyvar> InvariantsEq<$u> for $t {} + }; +} + +// SAFETY: See bounds. +unsafe impl SizeEq> for Wrapping +where + T: SizeEq>, + T: SizeEq>, +{ +} +// SAFETY: See bounds. +unsafe impl SizeEq> for MaybeUninit +where + T: SizeEq>, + T: SizeEq>, +{ +} + +// SAFETY: `ManuallyDrop` has the same size and bit validity as `T` [1], and +// implements `Deref` [2]. Thus, it is already possible for safe +// code to obtain a `&T` and a `&ManuallyDrop` to the same referent at the +// same time. +// +// [1] Per https://doc.rust-lang.org/1.81.0/std/mem/struct.ManuallyDrop.html: +// +// `ManuallyDrop` is guaranteed to have the same layout and bit +// validity as `T` +// +// [2] https://doc.rust-lang.org/1.81.0/std/mem/struct.ManuallyDrop.html#impl-Deref-for-ManuallyDrop%3CT%3E +unsafe impl InvariantsEq for ManuallyDrop {} +// SAFETY: See previous safety comment. +unsafe impl InvariantsEq> for T {} + +/// Transmutations which are always sound. +/// +/// `TransmuteFromPtr` is a shorthand for [`TryTransmuteFromPtr`] and +/// [`TransmuteFrom`]. +/// +/// # Safety +/// +/// `Dst: TransmuteFromPtr` is equivalent to `Dst: +/// TryTransmuteFromPtr + TransmuteFrom`. +pub unsafe trait TransmuteFromPtr: + TryTransmuteFromPtr + TransmuteFrom +{ +} + +// SAFETY: The `where` bounds are equivalent to the safety invariant on +// `TransmuteFromPtr`. +unsafe impl + TransmuteFromPtr for Dst +where + Dst: TransmuteFrom + TryTransmuteFromPtr, +{ +} + +/// Denotes that any `SV`-valid `Src` may soundly be transmuted into a +/// `DV`-valid `Self`. +/// +/// # Safety +/// +/// The set of bit patterns allowed to appear in the referent of a `Ptr` must be a subset of the set allowed to appear in the referent of a +/// `Ptr`. +pub unsafe trait TransmuteFrom: SizeEq {} + +/// # Safety +/// +/// `T` and `Self` must have the same vtable kind (`Sized`, slice DST, `dyn`, +/// etc) and have the same size. In particular: +/// - If `T: Sized` and `Self: Sized`, then their sizes must be equal +/// - If `T: ?Sized` and `Self: ?Sized`, then it must be the case that, given +/// any `t: *mut T`, `t as *mut Self` produces a pointer which addresses the +/// same number of bytes as `t`. +pub unsafe trait SizeEq {} + +// SAFETY: `T` trivially has the same size and vtable kind as `T`, and since +// pointer `*mut T -> *mut T` pointer casts are no-ops, this cast trivially +// preserves referent size (when `T: ?Sized`). +unsafe impl SizeEq for T {} + +// SAFETY: Since `Src: IntoBytes`, the set of valid `Src`'s is the set of +// initialized bit patterns, which is exactly the set allowed in the referent of +// any `Initialized` `Ptr`. +unsafe impl TransmuteFrom for Dst +where + Src: IntoBytes + ?Sized, + Dst: SizeEq + ?Sized, +{ +} + +// SAFETY: Since `Dst: FromBytes`, any initialized bit pattern may appear in the +// referent of a `Ptr`. This is exactly equal to the set of +// bit patterns which may appear in the referent of any `Initialized` `Ptr`. +unsafe impl TransmuteFrom for Dst +where + Src: ?Sized, + Dst: FromBytes + SizeEq + ?Sized, +{ +} + +// TODO(#2354): This seems like a smell - the soundness of this bound has +// nothing to do with `Src` or `Dst` - we're basically just saying `[u8; N]` is +// transmutable into `[u8; N]`. + +// SAFETY: The set of allowed bit patterns in the referent of any `Initialized` +// `Ptr` is the same regardless of referent type. +unsafe impl TransmuteFrom for Dst +where + Src: ?Sized, + Dst: SizeEq + ?Sized, +{ +} + +// TODO(#2354): This seems like a smell - the soundness of this bound has +// nothing to do with `Dst` - we're basically just saying that any type is +// transmutable into `MaybeUninit<[u8; N]>`. + +// SAFETY: A `Dst` with validity `Uninit` permits any byte sequence, and +// therefore can be transmuted from any value. +unsafe impl TransmuteFrom for Dst +where + Src: ?Sized, + Dst: SizeEq + ?Sized, + V: Validity, +{ +} + +safety_comment! { + /// SAFETY: + /// - `ManuallyDrop` has the same size as `T` [1] + /// - `ManuallyDrop` has the same validity as `T` [1] + /// + /// [1] Per https://doc.rust-lang.org/1.81.0/std/mem/struct.ManuallyDrop.html: + /// + /// `ManuallyDrop` is guaranteed to have the same layout and bit + /// validity as `T` + unsafe_impl_for_transparent_wrapper!(T: ?Sized => ManuallyDrop); + + /// SAFETY: + /// - `Unalign` promises to have the same size as `T`. + /// - `Unalign` promises to have the same validity as `T`. + unsafe_impl_for_transparent_wrapper!(T => Unalign); + /// SAFETY: + /// `Unalign` promises to have the same size and validity as `T`. Given + /// `u: &Unalign`, it is already possible to obtain `let t = + /// u.try_deref().unwrap()`. Because `Unalign` has the same size as `T`, + /// the returned `&T` must point to the same referent as `u`, and thus it + /// must be sound for these two references to exist at the same time since + /// it's already possible for safe code to get into this state. + unsafe_impl_invariants_eq!(T => T, Unalign); + + /// SAFETY: + /// - `Wrapping` has the same size as `T` [1]. + /// - `Wrapping` has only one field, which is `pub` [2]. We are also + /// guaranteed per that `Wrapping` has the same layout as `T` [1]. The + /// only way for both of these to be true simultaneously is for + /// `Wrapping` to have the same bit validity as `T`. In particular, in + /// order to change the bit validity, one of the following would need to + /// happen: + /// - `Wrapping` could change its `repr`, but this would violate the + /// layout guarantee. + /// - `Wrapping` could add or change its fields, but this would be a + /// stability-breaking change. + /// + /// [1] Per https://doc.rust-lang.org/1.85.0/core/num/struct.Wrapping.html#layout-1: + /// + /// `Wrapping` is guaranteed to have the same layout and ABI as `T`. + /// + /// [2] Definition from https://doc.rust-lang.org/1.85.0/core/num/struct.Wrapping.html: + /// + /// ``` + /// #[repr(transparent)] + /// pub struct Wrapping(pub T); + /// ``` + unsafe_impl_for_transparent_wrapper!(T => Wrapping); + /// SAFETY: + /// By the preceding safety proof, `Wrapping` and `T` have the same + /// layout and bit validity. Since a `Wrapping`'s `T` field is `pub`, + /// given `w: &Wrapping`, it's possible to do `let t = &w.t`, which means + /// that it's already possible for safe code to obtain a `&Wrapping` and + /// a `&T` pointing to the same referent at the same time. Thus, this must + /// be sound. + unsafe_impl_invariants_eq!(T => T, Wrapping); + + /// SAFETY: + /// - `Unalign` has the same size as `T` [1]. + /// - Per [1], `Unalign` has the same bit validity as `T`. Technically + /// the term "representation" doesn't guarantee this, but the subsequent + /// sentence in the documentation makes it clear that this is the + /// intention. + /// + /// [1] Per https://doc.rust-lang.org/1.81.0/core/cell/struct.UnsafeCell.html#memory-layout: + /// + /// `UnsafeCell` has the same in-memory representation as its inner type + /// `T`. A consequence of this guarantee is that it is possible to convert + /// between `T` and `UnsafeCell`. + unsafe_impl_for_transparent_wrapper!(T: ?Sized => UnsafeCell); +} + +// SAFETY: `MaybeUninit` has no validity requirements. Currently this is not +// explicitly guaranteed, but it's obvious from `MaybeUninit`'s documentation +// that this is the intention: +// https://doc.rust-lang.org/1.85.0/core/mem/union.MaybeUninit.html +unsafe impl TransmuteFrom for MaybeUninit {} + +// SAFETY: `MaybeUninit` has the same size as `T` [1]. +// +// [1] Per https://doc.rust-lang.org/1.81.0/std/mem/union.MaybeUninit.html#layout-1: +// +// `MaybeUninit` is guaranteed to have the same size, alignment, and ABI as +// `T` +unsafe impl SizeEq for MaybeUninit {} +// SAFETY: See previous safety comment. +unsafe impl SizeEq> for T {} diff --git a/src/ref.rs b/src/ref.rs index 0f4ce00214..5ad76ff0cf 100644 --- a/src/ref.rs +++ b/src/ref.rs @@ -624,7 +624,7 @@ where let ptr = Ptr::from_ref(b.into_byte_slice()) .try_cast_into_no_leftover::(None) .expect("zerocopy internal error: into_ref should be infallible"); - let ptr = ptr.bikeshed_recall_valid(); + let ptr = ptr.recall_validity(); ptr.as_ref() } } @@ -658,7 +658,7 @@ where let ptr = Ptr::from_mut(b.into_byte_slice_mut()) .try_cast_into_no_leftover::(None) .expect("zerocopy internal error: into_ref should be infallible"); - let ptr = ptr.bikeshed_recall_valid(); + let ptr = ptr.recall_validity(); ptr.as_mut() } } @@ -770,7 +770,7 @@ where let ptr = Ptr::from_ref(b.deref()) .try_cast_into_no_leftover::(None) .expect("zerocopy internal error: Deref::deref should be infallible"); - let ptr = ptr.bikeshed_recall_valid(); + let ptr = ptr.recall_validity(); ptr.as_ref() } } @@ -799,7 +799,7 @@ where let ptr = Ptr::from_mut(b.deref_mut()) .try_cast_into_no_leftover::(None) .expect("zerocopy internal error: DerefMut::deref_mut should be infallible"); - let ptr = ptr.bikeshed_recall_valid(); + let ptr = ptr.recall_validity::<_, (_, (_, (BecauseExclusive, BecauseExclusive)))>(); ptr.as_mut() } } diff --git a/src/util/macro_util.rs b/src/util/macro_util.rs index 7db924fb59..3881d2bf45 100644 --- a/src/util/macro_util.rs +++ b/src/util/macro_util.rs @@ -17,15 +17,21 @@ #![allow(missing_debug_implementations)] -use core::mem::{self, ManuallyDrop}; +use core::{ + mem::{self, ManuallyDrop}, + ptr::NonNull, +}; // TODO(#29), TODO(https://github.com/rust-lang/rust/issues/69835): Remove this // `cfg` when `size_of_val_raw` is stabilized. #[cfg(__ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS)] -use core::ptr::{self, NonNull}; +use core::ptr; use crate::{ - pointer::invariant::{self, BecauseExclusive, BecauseImmutable, Invariants}, + pointer::{ + invariant::{self, BecauseExclusive, BecauseImmutable, Invariants}, + TryTransmuteFromPtr, + }, FromBytes, Immutable, IntoBytes, Ptr, TryFromBytes, ValidityError, }; @@ -548,7 +554,7 @@ pub unsafe fn transmute_mut<'dst, 'src: 'dst, Src: 'src, Dst: 'dst>( /// [`is_bit_valid`]: TryFromBytes::is_bit_valid #[doc(hidden)] #[inline] -fn try_cast_or_pme( +fn try_cast_or_pme( src: Ptr<'_, Src, I>, ) -> Result< Ptr<'_, Dst, (I::Aliasing, invariant::Unaligned, invariant::Valid)>, @@ -558,7 +564,9 @@ where // TODO(#2226): There should be a `Src: FromBytes` bound here, but doing so // requires deeper surgery. Src: invariant::Read, - Dst: TryFromBytes + invariant::Read, + Dst: TryFromBytes + + invariant::Read + + TryTransmuteFromPtr, I: Invariants, I::Aliasing: invariant::Reference, { @@ -570,7 +578,7 @@ where // `Src`. // - `p as *mut Dst` is a provenance-preserving cast #[allow(clippy::as_conversions)] - let c_ptr = unsafe { src.cast_unsized(|p| p as *mut Dst) }; + let c_ptr = unsafe { src.cast_unsized(NonNull::cast::) }; match c_ptr.try_into_valid() { Ok(ptr) => Ok(ptr), @@ -584,7 +592,7 @@ where // to the size of `Src`. // - `p as *mut Src` is a provenance-preserving cast #[allow(clippy::as_conversions)] - let ptr = unsafe { ptr.cast_unsized(|p| p as *mut Src) }; + let ptr = unsafe { ptr.cast_unsized(NonNull::cast::) }; // SAFETY: `ptr` is `src`, and has the same alignment invariant. let ptr = unsafe { ptr.assume_alignment::() }; // SAFETY: `ptr` is `src` and has the same validity invariant. @@ -637,8 +645,7 @@ where // // `MaybeUninit` is guaranteed to have the same size, alignment, and // ABI as `T` - let ptr: Ptr<'_, Dst, _> = - unsafe { ptr.cast_unsized(|mu: *mut mem::MaybeUninit| mu.cast()) }; + let ptr: Ptr<'_, Dst, _> = unsafe { ptr.cast_unsized(NonNull::>::cast) }; if Dst::is_bit_valid(ptr.forget_aligned()) { // SAFETY: Since `Dst::is_bit_valid`, we know that `ptr`'s referent is @@ -672,7 +679,7 @@ where { let ptr = Ptr::from_ref(src); let ptr = ptr.bikeshed_recall_initialized_immutable(); - match try_cast_or_pme::(ptr) { + match try_cast_or_pme::(ptr) { Ok(ptr) => { static_assert!(Src, Dst => mem::align_of::() <= mem::align_of::()); // SAFETY: We have checked that `Dst` does not have a stricter @@ -716,7 +723,7 @@ where { let ptr = Ptr::from_mut(src); let ptr = ptr.bikeshed_recall_initialized_from_bytes(); - match try_cast_or_pme::(ptr) { + match try_cast_or_pme::(ptr) { Ok(ptr) => { static_assert!(Src, Dst => mem::align_of::() <= mem::align_of::()); // SAFETY: We have checked that `Dst` does not have a stricter @@ -724,7 +731,7 @@ where let ptr = unsafe { ptr.assume_alignment::() }; Ok(ptr.as_mut()) } - Err(err) => Err(err.map_src(|ptr| ptr.bikeshed_recall_valid().as_mut())), + Err(err) => Err(err.map_src(|ptr| ptr.recall_validity().as_mut())), } } diff --git a/src/util/macros.rs b/src/util/macros.rs index c98711926e..cce7bae64b 100644 --- a/src/util/macros.rs +++ b/src/util/macros.rs @@ -43,23 +43,14 @@ macro_rules! safety_comment { /// `is_bit_valid` to unconditionally return `true`. In other words, it must /// be the case that any initialized sequence of bytes constitutes a valid /// instance of `$ty`. -/// - If an `is_bit_valid` impl is provided, then: -/// - Regardless of whether the provided closure takes a `Ptr<$repr>` or -/// `&$repr` argument, if `$ty` and `$repr` are different types, then it -/// must be the case that, given `t: *mut $ty` and `let r = t as *mut -/// $repr`: -/// - `r` refers to an object of equal or lesser size than the object -/// referred to by `t`. -/// - `r` refers to an object with `UnsafeCell`s at the same byte ranges as -/// the object referred to by `t`. -/// - The impl of `is_bit_valid` must only return `true` for its argument -/// `Ptr<$repr>` if the original `Ptr<$ty>` refers to a valid `$ty`. +/// - If an `is_bit_valid` impl is provided, then the impl of `is_bit_valid` +/// must only return `true` if its argument refers to a valid `$ty`. macro_rules! unsafe_impl { // Implement `$trait` for `$ty` with no bounds. - ($(#[$attr:meta])* $ty:ty: $trait:ident $(; |$candidate:ident: MaybeAligned<$repr:ty>| $is_bit_valid:expr)?) => { + ($(#[$attr:meta])* $ty:ty: $trait:ident $(; |$candidate:ident| $is_bit_valid:expr)?) => { $(#[$attr])* unsafe impl $trait for $ty { - unsafe_impl!(@method $trait $(; |$candidate: MaybeAligned<$repr>| $is_bit_valid)?); + unsafe_impl!(@method $trait $(; |$candidate| $is_bit_valid)?); } }; @@ -122,26 +113,26 @@ macro_rules! unsafe_impl { $(#[$attr:meta])* const $constname:ident : $constty:ident $(,)? $($tyvar:ident $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )?),* - => $trait:ident for $ty:ty $(; |$candidate:ident $(: MaybeAligned<$ref_repr:ty>)? $(: Maybe<$ptr_repr:ty>)?| $is_bit_valid:expr)? + => $trait:ident for $ty:ty $(; |$candidate:ident| $is_bit_valid:expr)? ) => { unsafe_impl!( @inner $(#[$attr])* @const $constname: $constty, $($tyvar $(: $(? $optbound +)* + $($bound +)*)?,)* - => $trait for $ty $(; |$candidate $(: MaybeAligned<$ref_repr>)? $(: Maybe<$ptr_repr>)?| $is_bit_valid)? + => $trait for $ty $(; |$candidate| $is_bit_valid)? ); }; ( $(#[$attr:meta])* $($tyvar:ident $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )?),* - => $trait:ident for $ty:ty $(; |$candidate:ident $(: MaybeAligned<$ref_repr:ty>)? $(: Maybe<$ptr_repr:ty>)?| $is_bit_valid:expr)? + => $trait:ident for $ty:ty $(; |$candidate:ident| $is_bit_valid:expr)? ) => { unsafe_impl!( @inner $(#[$attr])* $($tyvar $(: $(? $optbound +)* + $($bound +)*)?,)* - => $trait for $ty $(; |$candidate $(: MaybeAligned<$ref_repr>)? $(: Maybe<$ptr_repr>)?| $is_bit_valid)? + => $trait for $ty $(; |$candidate| $is_bit_valid)? ); }; ( @@ -149,63 +140,22 @@ macro_rules! unsafe_impl { $(#[$attr:meta])* $(@const $constname:ident : $constty:ident,)* $($tyvar:ident $(: $(? $optbound:ident +)* + $($bound:ident +)* )?,)* - => $trait:ident for $ty:ty $(; |$candidate:ident $(: MaybeAligned<$ref_repr:ty>)? $(: Maybe<$ptr_repr:ty>)?| $is_bit_valid:expr)? + => $trait:ident for $ty:ty $(; |$candidate:ident| $is_bit_valid:expr)? ) => { $(#[$attr])* #[allow(non_local_definitions)] unsafe impl<$($tyvar $(: $(? $optbound +)* $($bound +)*)?),* $(, const $constname: $constty,)*> $trait for $ty { - unsafe_impl!(@method $trait $(; |$candidate: $(MaybeAligned<$ref_repr>)? $(Maybe<$ptr_repr>)?| $is_bit_valid)?); + unsafe_impl!(@method $trait $(; |$candidate| $is_bit_valid)?); } }; - (@method TryFromBytes ; |$candidate:ident: MaybeAligned<$repr:ty>| $is_bit_valid:expr) => { + (@method TryFromBytes ; |$candidate:ident| $is_bit_valid:expr) => { #[allow(clippy::missing_inline_in_public_items)] #[cfg_attr(all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), coverage(off))] fn only_derive_is_allowed_to_implement_this_trait() {} #[inline] - fn is_bit_valid(candidate: Maybe<'_, Self, AA>) -> bool { - // SAFETY: - // - The cast preserves address. The caller has promised that the - // cast results in an object of equal or lesser size, and so the - // cast returns a pointer which references a subset of the bytes - // of `p`. - // - The cast preserves provenance. - // - The caller has promised that the destination type has - // `UnsafeCell`s at the same byte ranges as the source type. - #[allow(clippy::as_conversions)] - let candidate = unsafe { candidate.cast_unsized_unchecked::<$repr, _>(|p| p as *mut _) }; - - // TODO(#1866): Currently, `bikeshed_recall_valid` has a known - // soundness hole. Eventually this will need to be fixed by - // requiring that `T: FromBytes + IntoBytes`. This bound ensures - // that this already holds of `$repr` so that we're guaranteed to be - // forwards-compatible with that change. - #[inline(always)] fn is_from_bytes_into_bytes() {} - is_from_bytes_into_bytes::<$repr>(); - - let $candidate = candidate.bikeshed_recall_valid(); - $is_bit_valid - } - }; - (@method TryFromBytes ; |$candidate:ident: Maybe<$repr:ty>| $is_bit_valid:expr) => { - #[allow(clippy::missing_inline_in_public_items)] - #[cfg_attr(all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), coverage(off))] - fn only_derive_is_allowed_to_implement_this_trait() {} - - #[inline] - fn is_bit_valid(candidate: Maybe<'_, Self, AA>) -> bool { - // SAFETY: - // - The cast preserves address. The caller has promised that the - // cast results in an object of equal or lesser size, and so the - // cast returns a pointer which references a subset of the bytes - // of `p`. - // - The cast preserves provenance. - // - The caller has promised that the destination type has - // `UnsafeCell`s at the same byte ranges as the source type. - #[allow(clippy::as_conversions)] - let $candidate = unsafe { candidate.cast_unsized_unchecked::<$repr, _>(|p| p as *mut _) }; - + fn is_bit_valid($candidate: Maybe<'_, Self, AA>) -> bool { $is_bit_valid } }; @@ -220,159 +170,97 @@ macro_rules! unsafe_impl { #[cfg_attr(all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), coverage(off))] fn only_derive_is_allowed_to_implement_this_trait() {} }; - (@method $trait:ident; |$_candidate:ident $(: &$_ref_repr:ty)? $(: NonNull<$_ptr_repr:ty>)?| $_is_bit_valid:expr) => { + (@method $trait:ident; |$_candidate:ident| $_is_bit_valid:expr) => { compile_error!("Can't provide `is_bit_valid` impl for trait other than `TryFromBytes`"); }; } -/// Implements `$trait` for a type which implements `TransparentWrapper`. +/// Implements `$trait` for `$ty` where `$ty: TransmuteFrom<$repr>` (and +/// vice-versa). /// /// Calling this macro is safe; the internals of the macro emit appropriate /// trait bounds which ensure that the given impl is sound. -macro_rules! impl_for_transparent_wrapper { +macro_rules! impl_for_transmute_from { ( $(#[$attr:meta])* $($tyvar:ident $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )?)? - => $trait:ident for $ty:ty $(; |$candidate:ident $(: MaybeAligned<$ref_repr:ty>)? $(: Maybe<$ptr_repr:ty>)?| $is_bit_valid:expr)? + => $trait:ident for $ty:ty [$($unsafe_cell:ident)? <$repr:ty>] ) => { $(#[$attr])* #[allow(non_local_definitions)] - // This block implements `$trait` for `$ty` under the following - // conditions: - // - `$ty: TransparentWrapper` - // - `$ty::Inner: $trait` - // - For some `Xxx`, `$ty::XxxVariance = Covariant` (`Xxx` is determined - // by the `@define_is_transparent_wrapper` macro arms). This bound - // ensures that some layout property is the same between `$ty` and - // `$ty::Inner`. Which layout property this is depends on the trait - // being implemented (for example, `FromBytes` is not concerned with - // alignment, but is concerned with bit validity). - // - // In other words, `$ty` is guaranteed to soundly implement `$trait` - // because some property of its layout is the same as `$ty::Inner`, - // which implements `$trait`. Most of the complexity in this macro is to - // ensure that the above-mentioned conditions are actually met, and that - // the proper variance (ie, the proper layout property) is chosen. - - // SAFETY: - // - `is_transparent_wrapper` requires: - // - `W: TransparentWrapper` - // - `W::Inner: $trait` - // - `f` is generic over `I: Invariants`, and in its body, calls - // `is_transparent_wrapper::()`. Thus, this code will only - // compile if, for all `I: Invariants`: - // - `$ty: TransparentWrapper` - // - `$ty::Inner: $trait` - // - // These two facts - that `$ty: TransparentWrapper` and that - // `$ty::Inner: $trait` - are the preconditions to the full safety - // proofs, which are completed below in the - // `@define_is_transparent_wrapper` macro arms. The safety proof is - // slightly different for each trait. + // SAFETY: `is_trait` (defined and used below) requires `T: + // TransmuteFrom`, `R: TransmuteFrom`, and `R: $trait`. It is + // called using `$ty` and `$repr`, ensuring that `$ty` and `$repr` have + // equivalent bit validity, and ensuring that `$repr: $trait`. The + // supported traits - `TryFromBytes`, `FromZeros`, `FromBytes`, and + // `IntoBytes` - are defined only in terms of the bit validity of a + // type. Therefore, `$repr: $trait` ensures that `$ty: $trait` is sound. unsafe impl<$($tyvar $(: $(? $optbound +)* $($bound +)*)?)?> $trait for $ty { #[allow(dead_code, clippy::missing_inline_in_public_items)] #[cfg_attr(all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), coverage(off))] fn only_derive_is_allowed_to_implement_this_trait() { - use crate::{pointer::invariant::Invariants, util::*}; + use crate::pointer::{*, invariant::Valid}; - impl_for_transparent_wrapper!(@define_is_transparent_wrapper $trait); + impl_for_transmute_from!(@assert_is_supported_trait $trait); + + fn is_trait() + where + T: TransmuteFrom + ?Sized, + R: TransmuteFrom + ?Sized, + R: $trait, + { + } #[cfg_attr(all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), coverage(off))] - fn f() { - is_transparent_wrapper::(); + fn f<$($tyvar $(: $(? $optbound +)* $($bound +)*)?)?>() { + is_trait::<$ty, $repr>(); } } - impl_for_transparent_wrapper!( + impl_for_transmute_from!( @is_bit_valid $(<$tyvar $(: $(? $optbound +)* $($bound +)*)?>)? - $trait for $ty + $trait for $ty [$($unsafe_cell)? <$repr>] ); } }; - (@define_is_transparent_wrapper Immutable) => { - // SAFETY: `W: TransparentWrapper` - // requires that `W` has `UnsafeCell`s at the same byte offsets as - // `W::Inner`. `W::Inner: Immutable` implies that `W::Inner` does not - // contain any `UnsafeCell`s, and so `W` does not contain any - // `UnsafeCell`s. Since `W = $ty`, `$ty` can soundly implement - // `Immutable`. - impl_for_transparent_wrapper!(@define_is_transparent_wrapper Immutable, UnsafeCellVariance) - }; - (@define_is_transparent_wrapper FromZeros) => { - // SAFETY: `W: TransparentWrapper` - // requires that `W` has the same bit validity as `W::Inner`. `W::Inner: - // FromZeros` implies that the all-zeros bit pattern is a bit-valid - // instance of `W::Inner`, and so the all-zeros bit pattern is a - // bit-valid instance of `W`. Since `W = $ty`, `$ty` can soundly - // implement `FromZeros`. - impl_for_transparent_wrapper!(@define_is_transparent_wrapper FromZeros, ValidityVariance) - }; - (@define_is_transparent_wrapper FromBytes) => { - // SAFETY: `W: TransparentWrapper` - // requires that `W` has the same bit validity as `W::Inner`. `W::Inner: - // FromBytes` implies that any initialized bit pattern is a bit-valid - // instance of `W::Inner`, and so any initialized bit pattern is a - // bit-valid instance of `W`. Since `W = $ty`, `$ty` can soundly - // implement `FromBytes`. - impl_for_transparent_wrapper!(@define_is_transparent_wrapper FromBytes, ValidityVariance) - }; - (@define_is_transparent_wrapper IntoBytes) => { - // SAFETY: `W: TransparentWrapper` - // requires that `W` has the same bit validity as `W::Inner`. `W::Inner: - // IntoBytes` implies that no bit-valid instance of `W::Inner` contains - // uninitialized bytes, and so no bit-valid instance of `W` contains - // uninitialized bytes. Since `W = $ty`, `$ty` can soundly implement - // `IntoBytes`. - impl_for_transparent_wrapper!(@define_is_transparent_wrapper IntoBytes, ValidityVariance) - }; - (@define_is_transparent_wrapper Unaligned) => { - // SAFETY: `W: TransparentWrapper` - // requires that `W` has the same alignment as `W::Inner`. `W::Inner: - // Unaligned` implies `W::Inner`'s alignment is 1, and so `W`'s - // alignment is 1. Since `W = $ty`, `W` can soundly implement - // `Unaligned`. - impl_for_transparent_wrapper!(@define_is_transparent_wrapper Unaligned, AlignmentVariance) - }; - (@define_is_transparent_wrapper TryFromBytes) => { - // SAFETY: `W: TransparentWrapper` - // requires that `W` has the same bit validity as `W::Inner`. `W::Inner: - // TryFromBytes` implies that `::is_bit_valid(c)` only returns `true` if `c` references - // a bit-valid instance of `W::Inner`. Thus, `::is_bit_valid(c)` only returns `true` if `c` references - // a bit-valid instance of `W`. Below, we implement `::is_bit_valid` by deferring to `::is_bit_valid`. Since `W = $ty`, it is sound for `$ty` - // to implement `TryFromBytes` with this implementation of - // `is_bit_valid`. - impl_for_transparent_wrapper!(@define_is_transparent_wrapper TryFromBytes, ValidityVariance) - }; - (@define_is_transparent_wrapper $trait:ident, $variance:ident) => { - #[cfg_attr(all(coverage_nightly, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS), coverage(off))] - fn is_transparent_wrapper + ?Sized>() - where - W::Inner: $trait, - {} + (@assert_is_supported_trait TryFromBytes) => {}; + (@assert_is_supported_trait FromZeros) => {}; + (@assert_is_supported_trait FromBytes) => {}; + (@assert_is_supported_trait IntoBytes) => {}; + ( + @is_bit_valid + $(<$tyvar:ident $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )?>)? + TryFromBytes for $ty:ty [UnsafeCell<$repr:ty>] + ) => { + #[inline] + fn is_bit_valid(candidate: Maybe<'_, Self, A>) -> bool { + let c: Maybe<'_, Self, crate::pointer::invariant::Exclusive> = candidate.into_exclusive_or_pme(); + let c: Maybe<'_, $repr, _> = c.transmute::<_, _, (_, (_, (BecauseExclusive, BecauseExclusive)))>(); + // SAFETY: This macro ensures that `$repr` and `Self` have the same + // size and bit validity. Thus, a bit-valid instance of `$repr` is + // also a bit-valid instance of `Self`. + <$repr as TryFromBytes>::is_bit_valid(c) + } }; ( @is_bit_valid $(<$tyvar:ident $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )?>)? - TryFromBytes for $ty:ty + TryFromBytes for $ty:ty [<$repr:ty>] ) => { - // SAFETY: See safety comment in `(@define_is_transparent_wrapper - // TryFromBytes)` macro arm for an explanation of why this is a sound - // implementation of `is_bit_valid`. #[inline] fn is_bit_valid(candidate: Maybe<'_, Self, A>) -> bool { - TryFromBytes::is_bit_valid(candidate.transparent_wrapper_into_inner()) + // SAFETY: This macro ensures that `$repr` and `Self` have the same + // size and bit validity. Thus, a bit-valid instance of `$repr` is + // also a bit-valid instance of `Self`. + <$repr as TryFromBytes>::is_bit_valid(candidate.transmute_sized()) } }; ( @is_bit_valid $(<$tyvar:ident $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )?>)? - $trait:ident for $ty:ty + $trait:ident for $ty:ty [$($unsafe_cell:ident)? <$repr:ty>] ) => { // Trait other than `TryFromBytes`; no `is_bit_valid` impl. }; @@ -399,33 +287,33 @@ macro_rules! impl_for_transparent_wrapper { macro_rules! unsafe_impl_for_power_set { ( $first:ident $(, $rest:ident)* $(-> $ret:ident)? => $trait:ident for $macro:ident!(...) - $(; |$candidate:ident $(: MaybeAligned<$ref_repr:ty>)? $(: Maybe<$ptr_repr:ty>)?| $is_bit_valid:expr)? + $(; |$candidate:ident| $is_bit_valid:expr)? ) => { unsafe_impl_for_power_set!( $($rest),* $(-> $ret)? => $trait for $macro!(...) - $(; |$candidate $(: MaybeAligned<$ref_repr>)? $(: Maybe<$ptr_repr>)?| $is_bit_valid)? + $(; |$candidate| $is_bit_valid)? ); unsafe_impl_for_power_set!( @impl $first $(, $rest)* $(-> $ret)? => $trait for $macro!(...) - $(; |$candidate $(: MaybeAligned<$ref_repr>)? $(: Maybe<$ptr_repr>)?| $is_bit_valid)? + $(; |$candidate| $is_bit_valid)? ); }; ( $(-> $ret:ident)? => $trait:ident for $macro:ident!(...) - $(; |$candidate:ident $(: MaybeAligned<$ref_repr:ty>)? $(: Maybe<$ptr_repr:ty>)?| $is_bit_valid:expr)? + $(; |$candidate:ident| $is_bit_valid:expr)? ) => { unsafe_impl_for_power_set!( @impl $(-> $ret)? => $trait for $macro!(...) - $(; |$candidate $(: MaybeAligned<$ref_repr>)? $(: Maybe<$ptr_repr>)?| $is_bit_valid)? + $(; |$candidate| $is_bit_valid)? ); }; ( @impl $($vars:ident),* $(-> $ret:ident)? => $trait:ident for $macro:ident!(...) - $(; |$candidate:ident $(: MaybeAligned<$ref_repr:ty>)? $(: Maybe<$ptr_repr:ty>)?| $is_bit_valid:expr)? + $(; |$candidate:ident| $is_bit_valid:expr)? ) => { unsafe_impl!( $($vars,)* $($ret)? => $trait for $macro!($($vars),* $(-> $ret)?) - $(; |$candidate $(: MaybeAligned<$ref_repr>)? $(: Maybe<$ptr_repr>)?| $is_bit_valid)? + $(; |$candidate| $is_bit_valid)? ); }; } @@ -509,11 +397,11 @@ macro_rules! impl_or_verify { }; ( $($tyvar:ident $(: $(? $optbound:ident $(+)?)* $($bound:ident $(+)?)* )?),* - => $trait:ident for $ty:ty $(; |$candidate:ident $(: MaybeAligned<$ref_repr:ty>)? $(: Maybe<$ptr_repr:ty>)?| $is_bit_valid:expr)? + => $trait:ident for $ty:ty $(; |$candidate:ident| $is_bit_valid:expr)? ) => { impl_or_verify!(@impl { unsafe_impl!( $($tyvar $(: $(? $optbound +)* $($bound +)*)?),* => $trait for $ty - $(; |$candidate $(: MaybeAligned<$ref_repr>)? $(: Maybe<$ptr_repr>)?| $is_bit_valid)? + $(; |$candidate| $is_bit_valid)? ); }); impl_or_verify!(@verify $trait, { impl<$($tyvar $(: $(? $optbound +)* $($bound +)*)?),*> Subtrait for $ty {} @@ -803,3 +691,58 @@ macro_rules! static_assert_dst_is_not_zst { }, "cannot call this method on a dynamically-sized type whose trailing slice element is zero-sized"); }} } + +macro_rules! cast { + () => { + // SAFETY: `NonNull::as_ptr` returns a non-null pointer, so the argument + // to `NonNull::new_unchecked` is also non-null. + |p| { + #[allow(clippy::as_conversions)] + return core::ptr::NonNull::new_unchecked(core::ptr::NonNull::as_ptr(p) as *mut _); + } + }; +} + +/// Implements `TransmuteFrom` and `SizeEq` for `T` and `$wrapper`. +/// +/// # Safety +/// +/// `T` and `$wrapper` must have the same bit validity, and must have the +/// same size in the sense of `SizeEq`. +macro_rules! unsafe_impl_for_transparent_wrapper { + (T $(: ?$optbound:ident)? => $wrapper:ident) => { + const _: () = { + use crate::pointer::{TransmuteFrom, SizeEq, invariant::Valid}; + // SAFETY: The caller promises that `T` and `$wrapper` have the + // same bit validity. + unsafe impl TransmuteFrom for $wrapper {} + // SAFETY: See previous safety comment. + unsafe impl TransmuteFrom<$wrapper, Valid, Valid> for T {} + // SAFETY: The caller promises that `T` and `$wrapper` satisfy + // `SizeEq`. + unsafe impl SizeEq for $wrapper {} + // SAFETY: See previous safety comment. + unsafe impl SizeEq<$wrapper> for T {} + }; + + // So that this macro must be invoked inside `safety_comment!` or else + // it will generate a `clippy::undocumented_unsafe_blocks` warning. + #[allow(unused_unsafe)] + const _: () = unsafe {}; + }; +} + +macro_rules! impl_size_eq { + ($t:ty, $u:ty) => { + const _: () = { + use crate::pointer::SizeEq; + + static_assert!(=> mem::size_of::<$t>() == mem::size_of::<$u>()); + + // SAFETY: We've asserted that their sizes are equal. + unsafe impl SizeEq<$t> for $u {} + // SAFETY: We've asserted that their sizes are equal. + unsafe impl SizeEq<$u> for $t {} + }; + }; +} diff --git a/src/util/mod.rs b/src/util/mod.rs index e3302aa1e8..f368ce6e24 100644 --- a/src/util/mod.rs +++ b/src/util/mod.rs @@ -13,465 +13,13 @@ mod macros; pub mod macro_util; use core::{ - cell::UnsafeCell, marker::PhantomData, - mem::{self, ManuallyDrop, MaybeUninit}, - num::{NonZeroUsize, Wrapping}, + mem::{self, ManuallyDrop}, + num::NonZeroUsize, ptr::NonNull, }; -use crate::{ - error::AlignmentError, - pointer::invariant::{self, Invariants}, - Unalign, -}; - -/// A type which has the same layout as the type it wraps. -/// -/// # Safety -/// -/// `T: TransparentWrapper` implies that `T` has the same size as [`T::Inner`]. -/// Further, `T: TransparentWrapper` implies that: -/// - If `T::UnsafeCellVariance = Covariant`, then `T` has `UnsafeCell`s -/// covering the same byte ranges as `T::Inner`. -/// - If a `T` pointer satisfies the alignment invariant `I::Alignment`, then -/// that same pointer, cast to `T::Inner`, satisfies the alignment invariant -/// `>::Applied`. -/// - If a `T` pointer satisfies the validity invariant `I::Validity`, then that -/// same pointer, cast to `T::Inner`, satisfies the validity invariant -/// `>::Applied`. -/// -/// [`T::Inner`]: TransparentWrapper::Inner -/// [`UnsafeCell`]: core::cell::UnsafeCell -/// [`T::AlignmentVariance`]: TransparentWrapper::AlignmentVariance -/// [`T::ValidityVariance`]: TransparentWrapper::ValidityVariance -#[doc(hidden)] -pub unsafe trait TransparentWrapper { - type Inner: ?Sized; - - type UnsafeCellVariance; - type AlignmentVariance: AlignmentVariance; - type ValidityVariance: ValidityVariance; - - /// Casts a wrapper pointer to an inner pointer. - /// - /// # Safety - /// - /// The resulting pointer has the same address and provenance as `ptr`, and - /// addresses the same number of bytes. - fn cast_into_inner(ptr: *mut Self) -> *mut Self::Inner; - - /// Casts an inner pointer to a wrapper pointer. - /// - /// # Safety - /// - /// The resulting pointer has the same address and provenance as `ptr`, and - /// addresses the same number of bytes. - fn cast_from_inner(ptr: *mut Self::Inner) -> *mut Self; -} - -#[allow(unreachable_pub)] -#[doc(hidden)] -pub trait AlignmentVariance { - type Applied: invariant::Alignment; -} - -#[allow(unreachable_pub)] -#[doc(hidden)] -pub trait ValidityVariance { - type Applied: invariant::Validity; -} - -#[doc(hidden)] -#[allow(missing_copy_implementations, missing_debug_implementations)] -pub enum Covariant {} - -impl AlignmentVariance for Covariant { - type Applied = I; -} - -impl ValidityVariance for Covariant { - type Applied = I; -} - -#[doc(hidden)] -#[allow(missing_copy_implementations, missing_debug_implementations)] -pub enum Invariant {} - -impl AlignmentVariance for Invariant { - type Applied = invariant::Unaligned; -} - -impl ValidityVariance for Invariant { - type Applied = invariant::Uninit; -} - -// SAFETY: -// - Per [1], `MaybeUninit` has the same size as `T`. -// - See inline comments for other safety justifications. -// -// [1] Per https://doc.rust-lang.org/1.81.0/std/mem/union.MaybeUninit.html#layout-1: -// -// `MaybeUninit` is guaranteed to have the same size, alignment, and ABI as -// `T` -unsafe impl TransparentWrapper for MaybeUninit { - type Inner = T; - - // SAFETY: `MaybeUninit` has `UnsafeCell`s covering the same byte ranges - // as `Inner = T`. This is not explicitly documented, but it can be - // inferred. Per [1] in the preceding safety comment, `MaybeUninit` has - // the same size as `T`. Further, note the signature of - // `MaybeUninit::assume_init_ref` [2]: - // - // pub unsafe fn assume_init_ref(&self) -> &T - // - // If the argument `&MaybeUninit` and the returned `&T` had `UnsafeCell`s - // at different offsets, this would be unsound. Its existence is proof that - // this is not the case. - // - // [2] https://doc.rust-lang.org/1.81.0/std/mem/union.MaybeUninit.html#method.assume_init_ref - type UnsafeCellVariance = Covariant; - // SAFETY: Per [1], `MaybeUninit` has the same layout as `T`, and thus - // has the same alignment as `T`. - // - // [1] Per https://doc.rust-lang.org/std/mem/union.MaybeUninit.html#layout-1: - // - // `MaybeUninit` is guaranteed to have the same size, alignment, and - // ABI as `T`. - type AlignmentVariance = Covariant; - // SAFETY: `MaybeUninit` has no validity invariants. Thus, a valid - // `MaybeUninit` is not necessarily a valid `T`. - type ValidityVariance = Invariant; - - #[inline(always)] - fn cast_into_inner(ptr: *mut MaybeUninit) -> *mut T { - // SAFETY: Per [1] (from comment above), `MaybeUninit` has the same - // layout as `T`. Thus, this cast preserves size. - // - // This cast trivially preserves provenance. - ptr.cast::() - } - - #[inline(always)] - fn cast_from_inner(ptr: *mut T) -> *mut MaybeUninit { - // SAFETY: Per [1] (from comment above), `MaybeUninit` has the same - // layout as `T`. Thus, this cast preserves size. - // - // This cast trivially preserves provenance. - ptr.cast::>() - } -} - -// SAFETY: -// - Per [1], `ManuallyDrop` has the same size as `T`. -// - See inline comments for other safety justifications. -// -// [1] Per https://doc.rust-lang.org/1.81.0/std/mem/struct.ManuallyDrop.html: -// -// `ManuallyDrop` is guaranteed to have the same layout and bit validity as -// `T` -unsafe impl TransparentWrapper for ManuallyDrop { - type Inner = T; - - // SAFETY: Per [1], `ManuallyDrop` has `UnsafeCell`s covering the same - // byte ranges as `Inner = T`. - // - // [1] Per https://doc.rust-lang.org/1.81.0/std/mem/struct.ManuallyDrop.html: - // - // `ManuallyDrop` is guaranteed to have the same layout and bit - // validity as `T`, and is subject to the same layout optimizations as - // `T`. As a consequence, it has no effect on the assumptions that the - // compiler makes about its contents. - type UnsafeCellVariance = Covariant; - // SAFETY: Per [1], `ManuallyDrop` has the same layout as `T`, and thus - // has the same alignment as `T`. - // - // [1] Per https://doc.rust-lang.org/nightly/core/mem/struct.ManuallyDrop.html: - // - // `ManuallyDrop` is guaranteed to have the same layout and bit - // validity as `T` - type AlignmentVariance = Covariant; - - // SAFETY: Per [1] (from comment above), `ManuallyDrop` has the same bit - // validity as `T`. - type ValidityVariance = Covariant; - - #[inline(always)] - fn cast_into_inner(ptr: *mut ManuallyDrop) -> *mut T { - // SAFETY: Per [1] (from comment above), `ManuallyDrop` has the same - // layout as `T`. Thus, this cast preserves size even if `T` is unsized. - // - // This cast trivially preserves provenance. - #[allow(clippy::as_conversions)] - return ptr as *mut T; - } - - #[inline(always)] - fn cast_from_inner(ptr: *mut T) -> *mut ManuallyDrop { - // SAFETY: Per [1] (from comment above), `ManuallyDrop` has the same - // layout as `T`. Thus, this cast preserves size even if `T` is unsized. - // - // This cast trivially preserves provenance. - #[allow(clippy::as_conversions)] - return ptr as *mut ManuallyDrop; - } -} - -// SAFETY: -// - Per [1], `Wrapping` has the same size as `T`. -// - See inline comments for other safety justifications. -// -// [1] Per https://doc.rust-lang.org/1.81.0/std/num/struct.Wrapping.html#layout-1: -// -// `Wrapping` is guaranteed to have the same layout and ABI as `T`. -unsafe impl TransparentWrapper for Wrapping { - type Inner = T; - - // SAFETY: Per [1], `Wrapping` has the same layout as `T`. Since its - // single field (of type `T`) is public, it would be a breaking change to - // add or remove fields. Thus, we know that `Wrapping` contains a `T` (as - // opposed to just having the same size and alignment as `T`) with no pre- - // or post-padding. Thus, `Wrapping` must have `UnsafeCell`s covering the - // same byte ranges as `Inner = T`. - // - // [1] Per https://doc.rust-lang.org/1.81.0/std/num/struct.Wrapping.html#layout-1: - // - // `Wrapping` is guaranteed to have the same layout and ABI as `T`. - type UnsafeCellVariance = Covariant; - // SAFETY: Per [1], `Wrapping` has the same layout as `T`, and thus has - // the same alignment as `T`. - // - // [1] Per https://doc.rust-lang.org/core/num/struct.Wrapping.html#layout-1: - // - // `Wrapping` is guaranteed to have the same layout and ABI as `T`. - type AlignmentVariance = Covariant; - - // SAFETY: `Wrapping` has only one field, which is `pub` [2]. We are also - // guaranteed per [1] (from the comment above) that `Wrapping` has the - // same layout as `T`. The only way for both of these to be true - // simultaneously is for `Wrapping` to have the same bit validity as `T`. - // In particular, in order to change the bit validity, one of the following - // would need to happen: - // - `Wrapping` could change its `repr`, but this would violate the layout - // guarantee. - // - `Wrapping` could add or change its fields, but this would be a - // stability-breaking change. - // - // [2] https://doc.rust-lang.org/core/num/struct.Wrapping.html - type ValidityVariance = Covariant; - - #[inline(always)] - fn cast_into_inner(ptr: *mut Wrapping) -> *mut T { - // SAFETY: Per [1] (from comment above), `Wrapping` has the same - // layout as `T`. Thus, this cast preserves size. - // - // This cast trivially preserves provenance. - ptr.cast::() - } - - #[inline(always)] - fn cast_from_inner(ptr: *mut T) -> *mut Wrapping { - // SAFETY: Per [1] (from comment above), `Wrapping` has the same - // layout as `T`. Thus, this cast preserves size. - // - // This cast trivially preserves provenance. - ptr.cast::>() - } -} - -// SAFETY: -// - Per [1], `UnsafeCell` has the same size as `T`. -// - See inline comments for other safety justifications. -// -// [1] Per https://doc.rust-lang.org/1.81.0/core/cell/struct.UnsafeCell.html#memory-layout: -// -// `UnsafeCell` has the same in-memory representation as its inner type -// `T`. -unsafe impl TransparentWrapper for UnsafeCell { - type Inner = T; - - // SAFETY: Since we set this to `Invariant`, we make no safety claims. - type UnsafeCellVariance = Invariant; - - // SAFETY: Per [1] (from comment on impl), `Unalign` has the same - // representation as `T`, and thus has the same alignment as `T`. - type AlignmentVariance = Covariant; - - // SAFETY: Per [1], `Unalign` has the same bit validity as `T`. - // Technically the term "representation" doesn't guarantee this, but the - // subsequent sentence in the documentation makes it clear that this is the - // intention. - // - // [1] Per https://doc.rust-lang.org/1.81.0/core/cell/struct.UnsafeCell.html#memory-layout: - // - // `UnsafeCell` has the same in-memory representation as its inner type - // `T`. A consequence of this guarantee is that it is possible to convert - // between `T` and `UnsafeCell`. - type ValidityVariance = Covariant; - - #[inline(always)] - fn cast_into_inner(ptr: *mut UnsafeCell) -> *mut T { - // SAFETY: Per [1] (from comment above), `UnsafeCell` has the same - // representation as `T`. Thus, this cast preserves size. - // - // This cast trivially preserves provenance. - #[allow(clippy::as_conversions)] - return ptr as *mut T; - } - - #[inline(always)] - fn cast_from_inner(ptr: *mut T) -> *mut UnsafeCell { - // SAFETY: Per [1] (from comment above), `UnsafeCell` has the same - // representation as `T`. Thus, this cast preserves size. - // - // This cast trivially preserves provenance. - #[allow(clippy::as_conversions)] - return ptr as *mut UnsafeCell; - } -} - -// SAFETY: `Unalign` promises to have the same size as `T`. -// -// See inline comments for other safety justifications. -unsafe impl TransparentWrapper for Unalign { - type Inner = T; - - // SAFETY: `Unalign` promises to have `UnsafeCell`s covering the same - // byte ranges as `Inner = T`. - type UnsafeCellVariance = Covariant; - - // SAFETY: Since `Unalign` promises to have alignment 1 regardless of - // `T`'s alignment. Thus, an aligned pointer to `Unalign` is not - // necessarily an aligned pointer to `T`. - type AlignmentVariance = Invariant; - - // SAFETY: `Unalign` promises to have the same validity as `T`. - type ValidityVariance = Covariant; - - #[inline(always)] - fn cast_into_inner(ptr: *mut Unalign) -> *mut T { - // SAFETY: Per the safety comment on the impl block, `Unalign` has - // the size as `T`. Thus, this cast preserves size. - // - // This cast trivially preserves provenance. - ptr.cast::() - } - - #[inline(always)] - fn cast_from_inner(ptr: *mut T) -> *mut Unalign { - // SAFETY: Per the safety comment on the impl block, `Unalign` has - // the size as `T`. Thus, this cast preserves size. - // - // This cast trivially preserves provenance. - ptr.cast::>() - } -} - -/// Implements `TransparentWrapper` for an atomic type. -/// -/// # Safety -/// -/// The caller promises that `$atomic` is an atomic type whose natie equivalent -/// is `$native`. -#[cfg(all( - zerocopy_target_has_atomics_1_60_0, - any( - target_has_atomic = "8", - target_has_atomic = "16", - target_has_atomic = "32", - target_has_atomic = "64", - target_has_atomic = "ptr" - ) -))] -macro_rules! unsafe_impl_transparent_wrapper_for_atomic { - ($(#[$attr:meta])* $(,)?) => {}; - ($(#[$attr:meta])* $atomic:ty [$native:ty], $($atomics:ty [$natives:ty]),* $(,)?) => { - $(#[$attr])* - // SAFETY: See safety comment in next match arm. - unsafe impl crate::util::TransparentWrapper for $atomic { - unsafe_impl_transparent_wrapper_for_atomic!(@inner $atomic [$native]); - } - unsafe_impl_transparent_wrapper_for_atomic!($(#[$attr])* $($atomics [$natives],)*); - }; - ($(#[$attr:meta])* $tyvar:ident => $atomic:ty [$native:ty]) => { - // We implement for `$atomic` and set `Inner = $native`. The caller has - // promised that `$atomic` and `$native` are an atomic type and its - // native counterpart, respectively. Per [1], `$atomic` and `$native` - // have the same size. - // - // [1] Per (for example) https://doc.rust-lang.org/1.81.0/std/sync/atomic/struct.AtomicU64.html: - // - // This type has the same size and bit validity as the underlying - // integer type - $(#[$attr])* - unsafe impl<$tyvar, I: crate::invariant::Invariants> crate::util::TransparentWrapper for $atomic { - unsafe_impl_transparent_wrapper_for_atomic!(@inner $atomic [$native]); - } - }; - (@inner $atomic:ty [$native:ty]) => { - type Inner = UnsafeCell<$native>; - - // SAFETY: It is "obvious" that each atomic type contains a single - // `UnsafeCell` that covers all bytes of the type, but we can also prove - // it: - // - Since `$atomic` provides an API which permits loading and storing - // values of type `$native` via a `&self` (shared) reference, *some* - // interior mutation must be happening, and interior mutation can only - // happen via `UnsafeCell`. Further, there must be enough bytes in - // `$atomic` covered by an `UnsafeCell` to hold every possible value - // of `$native`. - // - Per [1], `$atomic` has the same size as `$native`. This on its own - // isn't enough: it would still be possible for `$atomic` to store - // `$native` using a compact representation (for `$native` types for - // which some bit patterns are illegal). However, this is ruled out by - // the fact that `$atomic` has the same bit validity as `$native` [1]. - // Thus, we can conclude that every byte of `$atomic` must be covered - // by an `UnsafeCell`. - // - // Thus, every byte of `$atomic` is covered by an `UnsafeCell`, and we - // set `type Inner = UnsafeCell<$native>`. Thus, `Self` and - // `Self::Inner` have `UnsafeCell`s covering the same byte ranges. - // - // [1] Per (for example) https://doc.rust-lang.org/1.81.0/std/sync/atomic/struct.AtomicU64.html: - // - // This type has the same size and bit validity as the underlying - // integer type - type UnsafeCellVariance = crate::util::Covariant; - - // SAFETY: No safety justification is required for an invariant - // variance. - type AlignmentVariance = crate::util::Invariant; - - // SAFETY: Per [1], all atomic types have the same bit validity as their - // native counterparts. The caller has promised that `$atomic` and - // `$native` are an atomic type and its native counterpart, - // respectively. - // - // [1] Per (for example) https://doc.rust-lang.org/1.81.0/std/sync/atomic/struct.AtomicU64.html: - // - // This type has the same size and bit validity as the underlying - // integer type - type ValidityVariance = crate::util::Covariant; - - #[inline(always)] - fn cast_into_inner(ptr: *mut $atomic) -> *mut UnsafeCell<$native> { - // SAFETY: Per [1] (from comment on impl block), `$atomic` has the - // same size as `$native`. Thus, this cast preserves size. - // - // This cast trivially preserves provenance. - ptr.cast::>() - } - - #[inline(always)] - fn cast_from_inner(ptr: *mut UnsafeCell<$native>) -> *mut $atomic { - // SAFETY: Per [1] (from comment on impl block), `$atomic` has the - // same size as `$native`. Thus, this cast preserves size. - // - // This cast trivially preserves provenance. - ptr.cast::<$atomic>() - } - }; -} +use super::*; /// Like [`PhantomData`], but [`Send`] and [`Sync`] regardless of whether the /// wrapped `T` is. @@ -739,14 +287,10 @@ pub(crate) const unsafe fn transmute_unchecked(src: Src) -> Dst { pub(crate) unsafe fn new_box( meta: T::PointerMetadata, allocate: unsafe fn(core::alloc::Layout) -> *mut u8, -) -> Result, crate::error::AllocError> +) -> Result, AllocError> where T: ?Sized + crate::KnownLayout, { - use crate::error::AllocError; - use crate::PointerMetadata; - use core::alloc::Layout; - let size = match meta.size_for_metadata(T::LAYOUT) { Some(size) => size, None => return Err(AllocError), diff --git a/src/wrappers.rs b/src/wrappers.rs index fd48236ae4..e425f623e5 100644 --- a/src/wrappers.rs +++ b/src/wrappers.rs @@ -139,7 +139,7 @@ safety_comment! { impl_or_verify!(T: Immutable => Immutable for Unalign); impl_or_verify!( T: TryFromBytes => TryFromBytes for Unalign; - |c: Maybe| T::is_bit_valid(c) + |c| T::is_bit_valid(c.transmute_sized()) ); impl_or_verify!(T: FromZeros => FromZeros for Unalign); impl_or_verify!(T: FromBytes => FromBytes for Unalign); @@ -188,7 +188,7 @@ impl Unalign { /// may prefer [`Deref::deref`], which is infallible. #[inline(always)] pub fn try_deref(&self) -> Result<&T, AlignmentError<&Self, T>> { - let inner = Ptr::from_ref(self).transparent_wrapper_into_inner(); + let inner = Ptr::from_ref(self).transmute_sized(); match inner.bikeshed_try_into_aligned() { Ok(aligned) => Ok(aligned.as_ref()), Err(err) => Err(err.map_src(|src| src.into_unalign().as_ref())), @@ -205,7 +205,7 @@ impl Unalign { /// callers may prefer [`DerefMut::deref_mut`], which is infallible. #[inline(always)] pub fn try_deref_mut(&mut self) -> Result<&mut T, AlignmentError<&mut Self, T>> { - let inner = Ptr::from_mut(self).transparent_wrapper_into_inner(); + let inner = Ptr::from_mut(self).transmute_sized::<_, _, (_, (_, _))>(); match inner.bikeshed_try_into_aligned() { Ok(aligned) => Ok(aligned.as_mut()), Err(err) => Err(err.map_src(|src| src.into_unalign().as_mut())), @@ -394,14 +394,17 @@ impl Deref for Unalign { #[inline(always)] fn deref(&self) -> &T { - Ptr::from_ref(self).transparent_wrapper_into_inner().bikeshed_recall_aligned().as_ref() + Ptr::from_ref(self).transmute_sized().bikeshed_recall_aligned().as_ref() } } impl DerefMut for Unalign { #[inline(always)] fn deref_mut(&mut self) -> &mut T { - Ptr::from_mut(self).transparent_wrapper_into_inner().bikeshed_recall_aligned().as_mut() + Ptr::from_mut(self) + .transmute_sized::<_, _, (_, (_, _))>() + .bikeshed_recall_aligned() + .as_mut() } } diff --git a/zerocopy-derive/src/enum.rs b/zerocopy-derive/src/enum.rs index 1be9235906..39d7ebb977 100644 --- a/zerocopy-derive/src/enum.rs +++ b/zerocopy-derive/src/enum.rs @@ -260,8 +260,8 @@ pub(crate) fn derive_is_bit_valid( // original type. let variant = unsafe { variants.cast_unsized_unchecked( - |p: *mut ___ZerocopyVariants #ty_generics| { - p as *mut #variant_struct_ident #ty_generics + |p: core_reexport::ptr::NonNull<___ZerocopyVariants #ty_generics>| { + p.cast::<#variant_struct_ident #ty_generics>() } ) }; @@ -321,15 +321,15 @@ pub(crate) fn derive_is_bit_valid( // - There are no `UnsafeCell`s in the tag because it is a // primitive integer. let tag_ptr = unsafe { - candidate.reborrow().cast_unsized_unchecked(|p: *mut Self| { - p as *mut ___ZerocopyTagPrimitive + candidate.reborrow().cast_unsized_unchecked(|p: core_reexport::ptr::NonNull| { + p.cast::<___ZerocopyTagPrimitive>() }) }; // SAFETY: `tag_ptr` is casted from `candidate`, whose referent // is `Initialized`. Since we have not written uninitialized // bytes into the referent, `tag_ptr` is also `Initialized`. let tag_ptr = unsafe { tag_ptr.assume_initialized() }; - tag_ptr.bikeshed_recall_valid().read_unaligned::<::zerocopy::BecauseImmutable>() + tag_ptr.recall_validity().read_unaligned::<::zerocopy::BecauseImmutable>() }; // SAFETY: @@ -343,8 +343,8 @@ pub(crate) fn derive_is_bit_valid( // original enum, and so preserves the locations of any // `UnsafeCell`s. let raw_enum = unsafe { - candidate.cast_unsized_unchecked(|p: *mut Self| { - p as *mut ___ZerocopyRawEnum #ty_generics + candidate.cast_unsized_unchecked(|p: core_reexport::ptr::NonNull| { + p.cast::<___ZerocopyRawEnum #ty_generics>() }) }; // SAFETY: `cast_unsized_unchecked` removes the initialization @@ -361,8 +361,13 @@ pub(crate) fn derive_is_bit_valid( // subfield pointer just points to a smaller portion of the // overall struct. let variants = unsafe { - raw_enum.cast_unsized_unchecked(|p: *mut ___ZerocopyRawEnum #ty_generics| { - core_reexport::ptr::addr_of_mut!((*p).variants) + raw_enum.cast_unsized_unchecked(|p: core_reexport::ptr::NonNull<___ZerocopyRawEnum #ty_generics>| { + let p = p.as_ptr(); + let ptr = core_reexport::ptr::addr_of_mut!((*p).variants); + // SAFETY: `ptr` is a projection into `p`, which is + // `NonNull`, and guaranteed not to wrap around the address + // space. Thus, `ptr` cannot be null. + unsafe { core_reexport::ptr::NonNull::new_unchecked(ptr) } }) }; diff --git a/zerocopy-derive/src/lib.rs b/zerocopy-derive/src/lib.rs index 6da4fcffce..9768a3741b 100644 --- a/zerocopy-derive/src/lib.rs +++ b/zerocopy-derive/src/lib.rs @@ -653,6 +653,8 @@ fn derive_try_from_bytes_struct( where ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, { + use ::zerocopy::util::macro_util::core_reexport; + true #(&& { // SAFETY: // - `project` is a field projection, and so it addresses a @@ -662,8 +664,18 @@ fn derive_try_from_bytes_struct( // the same byte ranges in the returned pointer's referent // as they do in `*slf` let field_candidate = unsafe { - let project = |slf: *mut Self| - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).#field_names); + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).#field_names); + // SAFETY: `cast_unsized_unchecked` promises that + // `slf` will either reference a zero-sized byte + // range, or else will reference a byte range that + // is entirely contained withing an allocated + // object. In either case, this guarantees that + // field projection will not wrap around the address + // space, and so `field` will be non-null. + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } + }; candidate.reborrow().cast_unsized_unchecked(project) }; @@ -711,6 +723,8 @@ fn derive_try_from_bytes_union( where ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, { + use ::zerocopy::util::macro_util::core_reexport; + false #(|| { // SAFETY: // - `project` is a field projection, and so it addresses a @@ -720,8 +734,18 @@ fn derive_try_from_bytes_union( // `self_type_trait_bounds`, neither `*slf` nor the // returned pointer's referent contain any `UnsafeCell`s let field_candidate = unsafe { - let project = |slf: *mut Self| - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).#field_names); + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).#field_names); + // SAFETY: `cast_unsized_unchecked` promises that + // `slf` will either reference a zero-sized byte + // range, or else will reference a byte range that + // is entirely contained withing an allocated + // object. In either case, this guarantees that + // field projection will not wrap around the address + // space, and so `field` will be non-null. + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } + }; candidate.reborrow().cast_unsized_unchecked(project) }; diff --git a/zerocopy-derive/src/output_tests.rs b/zerocopy-derive/src/output_tests.rs index b8aabdb518..bbeff5cef0 100644 --- a/zerocopy-derive/src/output_tests.rs +++ b/zerocopy-derive/src/output_tests.rs @@ -302,6 +302,7 @@ fn test_try_from_bytes() { where ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, { + use ::zerocopy::util::macro_util::core_reexport; true } } @@ -326,6 +327,7 @@ fn test_from_zeros() { where ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, { + use ::zerocopy::util::macro_util::core_reexport; true } } @@ -589,10 +591,14 @@ fn test_try_from_bytes_enum() { where ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, { + use ::zerocopy::util::macro_util::core_reexport; + true && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).0) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).0); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; @@ -601,48 +607,60 @@ fn test_try_from_bytes_enum() { > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).1) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).1); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).2) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).2); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).3) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).3); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).4) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).4); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).5) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).5); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; <[(X, Y); N] as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).6) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).6); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; @@ -683,10 +701,14 @@ fn test_try_from_bytes_enum() { where ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, { + use ::zerocopy::util::macro_util::core_reexport; + true && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).0) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).0); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; @@ -695,24 +717,30 @@ fn test_try_from_bytes_enum() { > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).1) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).1); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).2) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).2); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).3) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).3); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; @@ -721,8 +749,10 @@ fn test_try_from_bytes_enum() { ) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).4) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).4); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; @@ -748,18 +778,20 @@ fn test_try_from_bytes_enum() { } let tag = { let tag_ptr = unsafe { - candidate.reborrow().cast_unsized_unchecked(|p: *mut Self| { p as *mut ___ZerocopyTagPrimitive }) + candidate.reborrow().cast_unsized_unchecked(|p: core_reexport::ptr::NonNull| { p.cast::<___ZerocopyTagPrimitive>() }) }; let tag_ptr = unsafe { tag_ptr.assume_initialized() }; - tag_ptr.bikeshed_recall_valid().read_unaligned::<::zerocopy::BecauseImmutable>() + tag_ptr.recall_validity().read_unaligned::<::zerocopy::BecauseImmutable>() }; let raw_enum = unsafe { - candidate.cast_unsized_unchecked(|p: *mut Self| { p as *mut ___ZerocopyRawEnum<'a, N, X, Y> }) + candidate.cast_unsized_unchecked(|p: core_reexport::ptr::NonNull| { p.cast::<___ZerocopyRawEnum<'a, N, X, Y>>() }) }; let raw_enum = unsafe { raw_enum.assume_initialized() }; let variants = unsafe { - raw_enum.cast_unsized_unchecked(|p: *mut ___ZerocopyRawEnum<'a, N, X, Y>| { - core_reexport::ptr::addr_of_mut!((*p).variants) + raw_enum.cast_unsized_unchecked(|p: core_reexport::ptr::NonNull<___ZerocopyRawEnum<'a, N, X, Y>>| { + let p = p.as_ptr(); + let ptr = core_reexport::ptr::addr_of_mut!((*p).variants); + unsafe { core_reexport::ptr::NonNull::new_unchecked(ptr) } }) }; #[allow(non_upper_case_globals)] @@ -767,8 +799,8 @@ fn test_try_from_bytes_enum() { ___ZEROCOPY_TAG_UnitLike => true, ___ZEROCOPY_TAG_StructLike => { let variant = unsafe { - variants.cast_unsized_unchecked(|p: *mut ___ZerocopyVariants<'a, N, X, Y>| { - p as *mut ___ZerocopyVariantStruct_StructLike<'a, N, X, Y> + variants.cast_unsized_unchecked(|p: core_reexport::ptr::NonNull<___ZerocopyVariants<'a, N, X, Y>>| { + p.cast::<___ZerocopyVariantStruct_StructLike<'a, N, X, Y>>() }) }; let variant = unsafe { variant.assume_initialized() }; @@ -777,8 +809,8 @@ fn test_try_from_bytes_enum() { } ___ZEROCOPY_TAG_TupleLike => { let variant = unsafe { - variants.cast_unsized_unchecked(|p: *mut ___ZerocopyVariants<'a, N, X, Y>| { - p as *mut ___ZerocopyVariantStruct_TupleLike<'a, N, X, Y> + variants.cast_unsized_unchecked(|p: core_reexport::ptr::NonNull<___ZerocopyVariants<'a, N, X, Y>>| { + p.cast::<___ZerocopyVariantStruct_TupleLike<'a, N, X, Y>>() }) }; let variant = unsafe { variant.assume_initialized() }; @@ -883,10 +915,14 @@ fn test_try_from_bytes_enum() { where ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, { + use ::zerocopy::util::macro_util::core_reexport; + true && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).0) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).0); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; @@ -895,48 +931,60 @@ fn test_try_from_bytes_enum() { > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).1) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).1); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).2) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).2); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).3) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).3); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).4) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).4); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).5) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).5); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; <[(X, Y); N] as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).6) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).6); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; @@ -977,10 +1025,14 @@ fn test_try_from_bytes_enum() { where ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, { + use ::zerocopy::util::macro_util::core_reexport; + true && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).0) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).0); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; @@ -989,24 +1041,30 @@ fn test_try_from_bytes_enum() { > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).1) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).1); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).2) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).2); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).3) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).3); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; @@ -1015,8 +1073,10 @@ fn test_try_from_bytes_enum() { ) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).4) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).4); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; @@ -1042,18 +1102,20 @@ fn test_try_from_bytes_enum() { } let tag = { let tag_ptr = unsafe { - candidate.reborrow().cast_unsized_unchecked(|p: *mut Self| { p as *mut ___ZerocopyTagPrimitive }) + candidate.reborrow().cast_unsized_unchecked(|p: core_reexport::ptr::NonNull| { p.cast::<___ZerocopyTagPrimitive> ()}) }; let tag_ptr = unsafe { tag_ptr.assume_initialized() }; - tag_ptr.bikeshed_recall_valid().read_unaligned::<::zerocopy::BecauseImmutable>() + tag_ptr.recall_validity().read_unaligned::<::zerocopy::BecauseImmutable>() }; let raw_enum = unsafe { - candidate.cast_unsized_unchecked(|p: *mut Self| { p as *mut ___ZerocopyRawEnum<'a, N, X, Y> }) + candidate.cast_unsized_unchecked(|p: core_reexport::ptr::NonNull| { p.cast::<___ZerocopyRawEnum<'a, N, X, Y>> ()}) }; let raw_enum = unsafe { raw_enum.assume_initialized() }; let variants = unsafe { - raw_enum.cast_unsized_unchecked(|p: *mut ___ZerocopyRawEnum<'a, N, X, Y>| { - core_reexport::ptr::addr_of_mut!((*p).variants) + raw_enum.cast_unsized_unchecked(|p: core_reexport::ptr::NonNull<___ZerocopyRawEnum<'a, N, X, Y>>| { + let p = p.as_ptr(); + let ptr = core_reexport::ptr::addr_of_mut!((*p).variants); + unsafe { core_reexport::ptr::NonNull::new_unchecked(ptr) } }) }; #[allow(non_upper_case_globals)] @@ -1061,8 +1123,8 @@ fn test_try_from_bytes_enum() { ___ZEROCOPY_TAG_UnitLike => true, ___ZEROCOPY_TAG_StructLike => { let variant = unsafe { - variants.cast_unsized_unchecked(|p: *mut ___ZerocopyVariants<'a, N, X, Y>| { - p as *mut ___ZerocopyVariantStruct_StructLike<'a, N, X, Y> + variants.cast_unsized_unchecked(|p: core_reexport::ptr::NonNull<___ZerocopyVariants<'a, N, X, Y>>| { + p.cast::<___ZerocopyVariantStruct_StructLike<'a, N, X, Y>>() }) }; let variant = unsafe { variant.assume_initialized() }; @@ -1071,8 +1133,8 @@ fn test_try_from_bytes_enum() { } ___ZEROCOPY_TAG_TupleLike => { let variant = unsafe { - variants.cast_unsized_unchecked(|p: *mut ___ZerocopyVariants<'a, N, X, Y>| { - p as *mut ___ZerocopyVariantStruct_TupleLike<'a, N, X, Y> + variants.cast_unsized_unchecked(|p: core_reexport::ptr::NonNull<___ZerocopyVariants<'a, N, X, Y>>| { + p.cast::<___ZerocopyVariantStruct_TupleLike<'a, N, X, Y>>() }) }; let variant = unsafe { variant.assume_initialized() }; @@ -1177,10 +1239,14 @@ fn test_try_from_bytes_enum() { where ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, { + use ::zerocopy::util::macro_util::core_reexport; + true && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).0) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).0); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; @@ -1189,48 +1255,60 @@ fn test_try_from_bytes_enum() { > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).1) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).1); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).2) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).2); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).3) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).3); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).4) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).4); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).5) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).5); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; <[(X, Y); N] as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).6) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).6); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; @@ -1271,10 +1349,14 @@ fn test_try_from_bytes_enum() { where ___ZerocopyAliasing: ::zerocopy::pointer::invariant::Reference, { + use ::zerocopy::util::macro_util::core_reexport; + true && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).0) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).0); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; @@ -1283,24 +1365,30 @@ fn test_try_from_bytes_enum() { > as ::zerocopy::TryFromBytes>::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).1) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).1); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).2) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).2); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; ::is_bit_valid(field_candidate) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).3) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).3); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; @@ -1309,8 +1397,10 @@ fn test_try_from_bytes_enum() { ) } && { let field_candidate = unsafe { - let project = |slf: *mut Self| { - ::zerocopy::util::macro_util::core_reexport::ptr::addr_of_mut!((*slf).4) + let project = |slf: core_reexport::ptr::NonNull| { + let slf = slf.as_ptr(); + let field = core_reexport::ptr::addr_of_mut!((*slf).4); + unsafe { core_reexport::ptr::NonNull::new_unchecked(field) } }; candidate.reborrow().cast_unsized_unchecked(project) }; @@ -1336,18 +1426,20 @@ fn test_try_from_bytes_enum() { } let tag = { let tag_ptr = unsafe { - candidate.reborrow().cast_unsized_unchecked(|p: *mut Self| { p as *mut ___ZerocopyTagPrimitive }) + candidate.reborrow().cast_unsized_unchecked(|p: core_reexport::ptr::NonNull| { p.cast::<___ZerocopyTagPrimitive> ()}) }; let tag_ptr = unsafe { tag_ptr.assume_initialized() }; - tag_ptr.bikeshed_recall_valid().read_unaligned::<::zerocopy::BecauseImmutable>() + tag_ptr.recall_validity().read_unaligned::<::zerocopy::BecauseImmutable>() }; let raw_enum = unsafe { - candidate.cast_unsized_unchecked(|p: *mut Self| { p as *mut ___ZerocopyRawEnum<'a, N, X, Y> }) + candidate.cast_unsized_unchecked(|p: core_reexport::ptr::NonNull| { p.cast::<___ZerocopyRawEnum<'a, N, X, Y>> ()}) }; let raw_enum = unsafe { raw_enum.assume_initialized() }; let variants = unsafe { - raw_enum.cast_unsized_unchecked(|p: *mut ___ZerocopyRawEnum<'a, N, X, Y>| { - core_reexport::ptr::addr_of_mut!((*p).variants) + raw_enum.cast_unsized_unchecked(|p: core_reexport::ptr::NonNull<___ZerocopyRawEnum<'a, N, X, Y>>| { + let p = p.as_ptr(); + let ptr = core_reexport::ptr::addr_of_mut!((*p).variants); + unsafe { core_reexport::ptr::NonNull::new_unchecked(ptr) } }) }; #[allow(non_upper_case_globals)] @@ -1355,8 +1447,8 @@ fn test_try_from_bytes_enum() { ___ZEROCOPY_TAG_UnitLike => true, ___ZEROCOPY_TAG_StructLike => { let variant = unsafe { - variants.cast_unsized_unchecked(|p: *mut ___ZerocopyVariants<'a, N, X, Y>| { - p as *mut ___ZerocopyVariantStruct_StructLike<'a, N, X, Y> + variants.cast_unsized_unchecked(|p: core_reexport::ptr::NonNull<___ZerocopyVariants<'a, N, X, Y>>| { + p.cast::<___ZerocopyVariantStruct_StructLike<'a, N, X, Y>>() }) }; let variant = unsafe { variant.assume_initialized() }; @@ -1365,8 +1457,8 @@ fn test_try_from_bytes_enum() { } ___ZEROCOPY_TAG_TupleLike => { let variant = unsafe { - variants.cast_unsized_unchecked(|p: *mut ___ZerocopyVariants<'a, N, X, Y>| { - p as *mut ___ZerocopyVariantStruct_TupleLike<'a, N, X, Y> + variants.cast_unsized_unchecked(|p: core_reexport::ptr::NonNull<___ZerocopyVariants<'a, N, X, Y>>| { + p.cast::<___ZerocopyVariantStruct_TupleLike<'a, N, X, Y>>() }) }; let variant = unsafe { variant.assume_initialized() }; diff --git a/zerocopy-derive/tests/include.rs b/zerocopy-derive/tests/include.rs index 3a6cfc9e40..a65d61fc48 100644 --- a/zerocopy-derive/tests/include.rs +++ b/zerocopy-derive/tests/include.rs @@ -26,7 +26,7 @@ mod imp { #[allow(unused)] pub use { ::core::{ - assert_eq, assert_ne, + self, assert_eq, assert_ne, cell::UnsafeCell, convert::TryFrom, hash, @@ -123,7 +123,7 @@ pub mod util { // SAFETY: `T` and `MaybeUninit` have the same layout, so this is a // size-preserving cast. It is also a provenance-preserving cast. - let ptr = unsafe { ptr.cast_unsized_unchecked(|p| p as *mut T) }; + let ptr = unsafe { ptr.cast_unsized_unchecked(|p| p.cast()) }; assert!(::is_bit_valid(ptr)); } } diff --git a/zerocopy-derive/tests/struct_try_from_bytes.rs b/zerocopy-derive/tests/struct_try_from_bytes.rs index b483604031..f3cf12c48c 100644 --- a/zerocopy-derive/tests/struct_try_from_bytes.rs +++ b/zerocopy-derive/tests/struct_try_from_bytes.rs @@ -78,7 +78,7 @@ fn two_bad() { // the same bytes as `c`. // - The cast preserves provenance. // - Neither the input nor output types contain any `UnsafeCell`s. - let candidate = unsafe { candidate.cast_unsized_unchecked(|p| p as *mut Two) }; + let candidate = unsafe { candidate.cast_unsized_unchecked(|p| p.cast::()) }; // SAFETY: `candidate`'s referent is as-initialized as `Two`. let candidate = unsafe { candidate.assume_initialized() }; @@ -108,7 +108,11 @@ fn un_sized() { // the same bytes as `c`. // - The cast preserves provenance. // - Neither the input nor output types contain any `UnsafeCell`s. - let candidate = unsafe { candidate.cast_unsized_unchecked(|p| p as *mut Unsized) }; + let candidate = unsafe { + candidate.cast_unsized_unchecked(|p| { + imp::core::ptr::NonNull::new_unchecked(p.as_ptr() as *mut Unsized) + }) + }; // SAFETY: `candidate`'s referent is as-initialized as `Two`. let candidate = unsafe { candidate.assume_initialized() }; @@ -164,7 +168,8 @@ fn test_maybe_from_bytes() { // the same bytes as `c`. // - The cast preserves provenance. // - Neither the input nor output types contain any `UnsafeCell`s. - let candidate = unsafe { candidate.cast_unsized_unchecked(|p| p as *mut MaybeFromBytes) }; + let candidate = + unsafe { candidate.cast_unsized_unchecked(|p| p.cast::>()) }; // SAFETY: `[u8]` consists entirely of initialized bytes. let candidate = unsafe { candidate.assume_initialized() }; diff --git a/zerocopy-derive/tests/union_try_from_bytes.rs b/zerocopy-derive/tests/union_try_from_bytes.rs index 35f1816533..869ffccad5 100644 --- a/zerocopy-derive/tests/union_try_from_bytes.rs +++ b/zerocopy-derive/tests/union_try_from_bytes.rs @@ -73,7 +73,7 @@ fn two_bad() { // the same bytes as `c`. // - The cast preserves provenance. // - Neither the input nor output types contain any `UnsafeCell`s. - let candidate = unsafe { candidate.cast_unsized_unchecked(|p| p as *mut Two) }; + let candidate = unsafe { candidate.cast_unsized_unchecked(|p| p.cast::()) }; // SAFETY: `candidate`'s referent is as-initialized as `Two`. let candidate = unsafe { candidate.assume_initialized() }; @@ -102,7 +102,7 @@ fn bool_and_zst() { // the same bytes as `c`. // - The cast preserves provenance. // - Neither the input nor output types contain any `UnsafeCell`s. - let candidate = unsafe { candidate.cast_unsized_unchecked(|p| p as *mut BoolAndZst) }; + let candidate = unsafe { candidate.cast_unsized_unchecked(|p| p.cast::()) }; // SAFETY: `candidate`'s referent is fully initialized. let candidate = unsafe { candidate.assume_initialized() }; @@ -131,7 +131,8 @@ fn test_maybe_from_bytes() { // the same bytes as `c`. // - The cast preserves provenance. // - Neither the input nor output types contain any `UnsafeCell`s. - let candidate = unsafe { candidate.cast_unsized_unchecked(|p| p as *mut MaybeFromBytes) }; + let candidate = + unsafe { candidate.cast_unsized_unchecked(|p| p.cast::>()) }; // SAFETY: `[u8]` consists entirely of initialized bytes. let candidate = unsafe { candidate.assume_initialized() };