diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs index a409c7fad4175..ad4355476109f 100644 --- a/compiler/rustc_const_eval/src/interpret/place.rs +++ b/compiler/rustc_const_eval/src/interpret/place.rs @@ -6,8 +6,8 @@ use std::assert_matches::assert_matches; use either::{Either, Left, Right}; use rustc_abi::{BackendRepr, HasDataLayout, Size}; -use rustc_middle::ty::Ty; use rustc_middle::ty::layout::TyAndLayout; +use rustc_middle::ty::{self, Ty}; use rustc_middle::{bug, mir, span_bug}; use tracing::field::Empty; use tracing::{instrument, trace}; @@ -885,10 +885,36 @@ where dest.layout().ty, ); } + // If the source has padding, we want to always do a mem-to-mem copy to ensure consistent + // padding in the target independent of layout choices. + let src_has_padding = match src.layout().backend_repr { + BackendRepr::Scalar(_) => false, + BackendRepr::ScalarPair(left, right) + if matches!(src.layout().ty.kind(), ty::Ref(..) | ty::RawPtr(..)) => + { + // Wide pointers never have padding, so we can avoid calling `size()`. + debug_assert_eq!(left.size(self) + right.size(self), src.layout().size); + false + } + BackendRepr::ScalarPair(left, right) => { + let left_size = left.size(self); + let right_size = right.size(self); + // We have padding if the sizes don't add up to the total. + left_size + right_size != src.layout().size + } + // Everything else can only exist in memory anyway, so it doesn't matter. + BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => true, + }; - // Let us see if the layout is simple so we take a shortcut, - // avoid force_allocation. - let src = match self.read_immediate_raw(src)? { + let src_val = if src_has_padding { + // Do our best to get an mplace. If there's no mplace, then this is stored as an + // "optimized" local, so its padding is definitely uninitialized and we are fine. + src.to_op(self)?.as_mplace_or_imm() + } else { + // Do our best to get an immediate, to avoid having to force_allocate the destination. + self.read_immediate_raw(src)? + }; + let src = match src_val { Right(src_val) => { assert!(!src.layout().is_unsized()); assert!(!dest.layout().is_unsized()); diff --git a/compiler/rustc_const_eval/src/interpret/projection.rs b/compiler/rustc_const_eval/src/interpret/projection.rs index 027e634ef7f7b..db72c02e308c0 100644 --- a/compiler/rustc_const_eval/src/interpret/projection.rs +++ b/compiler/rustc_const_eval/src/interpret/projection.rs @@ -97,7 +97,7 @@ pub trait Projectable<'tcx, Prov: Provenance>: Sized + std::fmt::Debug { } /// Convert this to an `OpTy`. This might be an irreversible transformation, but is useful for - /// reading from this thing. + /// reading from this thing. This will never actually do a read from memory! fn to_op>( &self, ecx: &InterpCx<'tcx, M>, diff --git a/tests/ui/consts/const-eval/ptr_fragments.rs b/tests/ui/consts/const-eval/ptr_fragments.rs index c251eea8add54..7e802677a91c8 100644 --- a/tests/ui/consts/const-eval/ptr_fragments.rs +++ b/tests/ui/consts/const-eval/ptr_fragments.rs @@ -69,6 +69,7 @@ const _PARTIAL_OVERWRITE: () = { #[allow(dead_code)] fn fragment_in_dst_padding_gets_overwritten() { + // We can't use `repr(align)` here as that would make this not a `ScalarPair` any more. #[repr(C)] struct Pair { x: u128, diff --git a/tests/ui/consts/const-eval/ptr_fragments_in_final.rs b/tests/ui/consts/const-eval/ptr_fragments_in_final.rs index 4037a2d237221..76f292c3f8b52 100644 --- a/tests/ui/consts/const-eval/ptr_fragments_in_final.rs +++ b/tests/ui/consts/const-eval/ptr_fragments_in_final.rs @@ -37,4 +37,40 @@ const MIXED_PTR: MaybeUninit<*const u8> = { //~ERROR: partial pointer in final v } }; +/// This has pointer bytes in the padding of the memory that the final value is read from. +/// To ensure consistent behavior, we want to *always* copy that padding, even if the value +/// could be represented as a more efficient ScalarPair. Hence this must fail to compile. +fn fragment_in_padding() -> impl Copy { + // We can't use `repr(align)` here as that would make this not a `ScalarPair` any more. + #[repr(C)] + #[derive(Clone, Copy)] + struct Thing { + x: u128, + y: usize, + // at least one pointer worth of padding + } + // Ensure there is indeed padding. + const _: () = assert!(mem::size_of::() > 16 + mem::size_of::()); + + #[derive(Clone, Copy)] + union PreservePad { + thing: Thing, + bytes: [u8; mem::size_of::()], + } + + const A: Thing = unsafe { //~ERROR: partial pointer in final value + let mut buffer = [PreservePad { bytes: [0u8; mem::size_of::()] }; 2]; + // The offset half a pointer from the end, so that copying a `Thing` copies exactly + // half the pointer. + let offset = mem::size_of::() - mem::size_of::()/2; + // Ensure this is inside the padding. + assert!(offset >= std::mem::offset_of!(Thing, y) + mem::size_of::()); + + (&raw mut buffer).cast::<&i32>().byte_add(offset).write_unaligned(&1); + buffer[0].thing + }; + + A +} + fn main() {} diff --git a/tests/ui/consts/const-eval/ptr_fragments_in_final.stderr b/tests/ui/consts/const-eval/ptr_fragments_in_final.stderr index 41a8224165811..de0cd4db7e155 100644 --- a/tests/ui/consts/const-eval/ptr_fragments_in_final.stderr +++ b/tests/ui/consts/const-eval/ptr_fragments_in_final.stderr @@ -14,5 +14,13 @@ LL | const MIXED_PTR: MaybeUninit<*const u8> = { | = note: while pointers can be broken apart into individual bytes during const-evaluation, only complete pointers (with all their bytes in the right order) are supported in the final value -error: aborting due to 2 previous errors +error: encountered partial pointer in final value of constant + --> $DIR/ptr_fragments_in_final.rs:61:5 + | +LL | const A: Thing = unsafe { + | ^^^^^^^^^^^^^^ + | + = note: while pointers can be broken apart into individual bytes during const-evaluation, only complete pointers (with all their bytes in the right order) are supported in the final value + +error: aborting due to 3 previous errors