Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 30 additions & 4 deletions compiler/rustc_const_eval/src/interpret/place.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ use std::assert_matches::assert_matches;

use either::{Either, Left, Right};
use rustc_abi::{BackendRepr, HasDataLayout, Size};
use rustc_middle::ty::Ty;
use rustc_middle::ty::layout::TyAndLayout;
use rustc_middle::ty::{self, Ty};
use rustc_middle::{bug, mir, span_bug};
use tracing::field::Empty;
use tracing::{instrument, trace};
Expand Down Expand Up @@ -885,10 +885,36 @@ where
dest.layout().ty,
);
}
// If the source has padding, we want to always do a mem-to-mem copy to ensure consistent
// padding in the target independent of layout choices.
let src_has_padding = match src.layout().backend_repr {
BackendRepr::Scalar(_) => false,
BackendRepr::ScalarPair(left, right)
if matches!(src.layout().ty.kind(), ty::Ref(..) | ty::RawPtr(..)) =>
{
// Wide pointers never have padding, so we can avoid calling `size()`.
debug_assert_eq!(left.size(self) + right.size(self), src.layout().size);
false
}
BackendRepr::ScalarPair(left, right) => {
let left_size = left.size(self);
let right_size = right.size(self);
// We have padding if the sizes don't add up to the total.
left_size + right_size != src.layout().size
}
// Everything else can only exist in memory anyway, so it doesn't matter.
BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => true,
};

// Let us see if the layout is simple so we take a shortcut,
// avoid force_allocation.
let src = match self.read_immediate_raw(src)? {
let src_val = if src_has_padding {
// Do our best to get an mplace. If there's no mplace, then this is stored as an
// "optimized" local, so its padding is definitely uninitialized and we are fine.
src.to_op(self)?.as_mplace_or_imm()
} else {
// Do our best to get an immediate, to avoid having to force_allocate the destination.
self.read_immediate_raw(src)?
};
let src = match src_val {
Right(src_val) => {
assert!(!src.layout().is_unsized());
assert!(!dest.layout().is_unsized());
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_const_eval/src/interpret/projection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ pub trait Projectable<'tcx, Prov: Provenance>: Sized + std::fmt::Debug {
}

/// Convert this to an `OpTy`. This might be an irreversible transformation, but is useful for
/// reading from this thing.
/// reading from this thing. This will never actually do a read from memory!
fn to_op<M: Machine<'tcx, Provenance = Prov>>(
&self,
ecx: &InterpCx<'tcx, M>,
Expand Down
1 change: 1 addition & 0 deletions tests/ui/consts/const-eval/ptr_fragments.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ const _PARTIAL_OVERWRITE: () = {

#[allow(dead_code)]
fn fragment_in_dst_padding_gets_overwritten() {
// We can't use `repr(align)` here as that would make this not a `ScalarPair` any more.
#[repr(C)]
struct Pair {
x: u128,
Expand Down
36 changes: 36 additions & 0 deletions tests/ui/consts/const-eval/ptr_fragments_in_final.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,40 @@ const MIXED_PTR: MaybeUninit<*const u8> = { //~ERROR: partial pointer in final v
}
};

/// This has pointer bytes in the padding of the memory that the final value is read from.
/// To ensure consistent behavior, we want to *always* copy that padding, even if the value
/// could be represented as a more efficient ScalarPair. Hence this must fail to compile.
fn fragment_in_padding() -> impl Copy {
// We can't use `repr(align)` here as that would make this not a `ScalarPair` any more.
#[repr(C)]
#[derive(Clone, Copy)]
struct Thing {
x: u128,
y: usize,
// at least one pointer worth of padding
}
// Ensure there is indeed padding.
const _: () = assert!(mem::size_of::<Thing>() > 16 + mem::size_of::<usize>());

#[derive(Clone, Copy)]
union PreservePad {
thing: Thing,
bytes: [u8; mem::size_of::<Thing>()],
}

const A: Thing = unsafe { //~ERROR: partial pointer in final value
let mut buffer = [PreservePad { bytes: [0u8; mem::size_of::<Thing>()] }; 2];
// The offset half a pointer from the end, so that copying a `Thing` copies exactly
// half the pointer.
let offset = mem::size_of::<Thing>() - mem::size_of::<usize>()/2;
// Ensure this is inside the padding.
assert!(offset >= std::mem::offset_of!(Thing, y) + mem::size_of::<usize>());

(&raw mut buffer).cast::<&i32>().byte_add(offset).write_unaligned(&1);
buffer[0].thing
};

A
}

fn main() {}
10 changes: 9 additions & 1 deletion tests/ui/consts/const-eval/ptr_fragments_in_final.stderr
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,13 @@ LL | const MIXED_PTR: MaybeUninit<*const u8> = {
|
= note: while pointers can be broken apart into individual bytes during const-evaluation, only complete pointers (with all their bytes in the right order) are supported in the final value

error: aborting due to 2 previous errors
error: encountered partial pointer in final value of constant
--> $DIR/ptr_fragments_in_final.rs:61:5
|
LL | const A: Thing = unsafe {
| ^^^^^^^^^^^^^^
|
= note: while pointers can be broken apart into individual bytes during const-evaluation, only complete pointers (with all their bytes in the right order) are supported in the final value

error: aborting due to 3 previous errors

Loading