Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 28 additions & 1 deletion compiler/rustc_abi/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ use std::fmt;
#[cfg(feature = "nightly")]
use std::iter::Step;
use std::num::{NonZeroUsize, ParseIntError};
use std::ops::{Add, AddAssign, Deref, Mul, RangeFull, RangeInclusive, Sub};
use std::ops::{Add, AddAssign, Deref, Div, Mul, RangeFull, RangeInclusive, Sub};
use std::str::FromStr;

use bitflags::bitflags;
Expand Down Expand Up @@ -819,6 +819,14 @@ impl Size {
if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
}

#[inline]
pub fn checked_div<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
let dl = cx.data_layout();

let bytes = self.bytes().checked_div(count)?;
if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
}

/// Truncates `value` to `self` bits and then sign-extends it to 128 bits
/// (i.e., if it is negative, fill with 1's on the left).
#[inline]
Expand Down Expand Up @@ -906,6 +914,25 @@ impl Mul<u64> for Size {
}
}

impl Div<Size> for u64 {
type Output = Size;
#[inline]
fn div(self, size: Size) -> Size {
size / self
}
}

impl Div<u64> for Size {
type Output = Size;
#[inline]
fn div(self, count: u64) -> Size {
match self.bytes().checked_div(count) {
Some(bytes) => Size::from_bytes(bytes),
None => panic!("Size::div: {} / {} doesn't fit in u64", self.bytes(), count),
}
}
}

impl AddAssign for Size {
#[inline]
fn add_assign(&mut self, other: Size) {
Expand Down
4 changes: 4 additions & 0 deletions compiler/rustc_codegen_llvm/src/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -614,6 +614,10 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
}
}

fn load_relative(&mut self, ptr: &'ll Value, byte_offset: &'ll Value) -> &'ll Value {
unsafe { llvm::LLVMBuildLoadRelative(self.llbuilder, ptr, byte_offset) }
}

fn volatile_load(&mut self, ty: &'ll Type, ptr: &'ll Value) -> &'ll Value {
unsafe {
let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED);
Expand Down
47 changes: 43 additions & 4 deletions compiler/rustc_codegen_llvm/src/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -287,8 +287,12 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> {
self.const_bitcast(llval, llty)
};
} else {
let init =
const_alloc_to_llvm(self, alloc.inner(), /*static*/ false);
let init = const_alloc_to_llvm(
self,
alloc.inner(),
/*static*/ false,
/*vtable_base*/ None,
);
let alloc = alloc.inner();
let value = match alloc.mutability {
Mutability::Mut => self.static_addr_of_mut(init, alloc.align, None),
Expand Down Expand Up @@ -320,7 +324,12 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> {
}),
)))
.unwrap_memory();
let init = const_alloc_to_llvm(self, alloc.inner(), /*static*/ false);
let init = const_alloc_to_llvm(
self,
alloc.inner(),
/*static*/ false,
/*vtable_base*/ None,
);
self.static_addr_of_impl(init, alloc.inner().align, None)
}
GlobalAlloc::Static(def_id) => {
Expand Down Expand Up @@ -354,7 +363,37 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> {
}

fn const_data_from_alloc(&self, alloc: ConstAllocation<'_>) -> Self::Value {
const_alloc_to_llvm(self, alloc.inner(), /*static*/ false)
const_alloc_to_llvm(self, alloc.inner(), /*static*/ false, /*vtable_base*/ None)
}

fn construct_vtable(
&self,
vtable_allocation: ConstAllocation<'_>,
num_entries: u64,
) -> Self::Value {
// When constructing relative vtables, we need to create the global first before creating
// the initializer so the initializer has references to the global we will bind it to.
// Regular vtables aren't self-referential so we can just create the initializer on its
// own.
if self.sess().opts.unstable_opts.experimental_relative_rust_abi_vtables {
let llty = self.type_array(self.type_i32(), num_entries);
let vtable = self.static_addr_of_mut_from_type(
llty,
self.data_layout().i32_align.abi,
Some("vtable"),
);
let init = const_alloc_to_llvm(
self,
vtable_allocation.inner(),
/*static*/ false,
Some(vtable),
);
self.static_addr_of_impl_for_gv(init, vtable)
} else {
let vtable_const = self.const_data_from_alloc(vtable_allocation);
let align = self.data_layout().pointer_align().abi;
self.static_addr_of(vtable_const, align, Some("vtable"))
}
}

fn const_ptr_byte_offset(&self, base_addr: Self::Value, offset: abi::Size) -> Self::Value {
Expand Down
128 changes: 111 additions & 17 deletions compiler/rustc_codegen_llvm/src/consts.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::ops::Range;

use rustc_abi::{Align, HasDataLayout, Primitive, Scalar, Size, WrappingRange};
use rustc_abi::{Align, Endian, HasDataLayout, Primitive, Scalar, Size, WrappingRange};
use rustc_codegen_ssa::common;
use rustc_codegen_ssa::traits::*;
use rustc_hir::LangItem;
Expand Down Expand Up @@ -28,6 +28,7 @@ pub(crate) fn const_alloc_to_llvm<'ll>(
cx: &CodegenCx<'ll, '_>,
alloc: &Allocation,
is_static: bool,
vtable_base: Option<&'ll Value>,
) -> &'ll Value {
// We expect that callers of const_alloc_to_llvm will instead directly codegen a pointer or
// integer for any &ZST where the ZST is a constant (i.e. not a static). We should never be
Expand All @@ -43,6 +44,8 @@ pub(crate) fn const_alloc_to_llvm<'ll>(
let dl = cx.data_layout();
let pointer_size = dl.pointer_size();
let pointer_size_bytes = pointer_size.bytes() as usize;
let use_relative_layout = cx.sess().opts.unstable_opts.experimental_relative_rust_abi_vtables
&& vtable_base.is_some();

// Note: this function may call `inspect_with_uninit_and_ptr_outside_interpreter`, so `range`
// must be within the bounds of `alloc` and not contain or overlap a pointer provenance.
Expand All @@ -51,7 +54,11 @@ pub(crate) fn const_alloc_to_llvm<'ll>(
cx: &'a CodegenCx<'ll, 'b>,
alloc: &'a Allocation,
range: Range<usize>,
use_relative_layout: bool,
) {
let dl = cx.data_layout();
let pointer_size = dl.pointer_size();
let pointer_size_bytes = pointer_size.bytes() as usize;
let chunks = alloc.init_mask().range_as_init_chunks(range.clone().into());

let chunk_to_llval = move |chunk| match chunk {
Expand All @@ -74,7 +81,43 @@ pub(crate) fn const_alloc_to_llvm<'ll>(
let allow_uninit_chunks = chunks.clone().take(max.saturating_add(1)).count() <= max;

if allow_uninit_chunks {
llvals.extend(chunks.map(chunk_to_llval));
if use_relative_layout {
// Rather than being stored as a struct of pointers or byte-arrays, a relative
// vtable is a pure i32 array, so its components must be chunks of i32s. Here we
// explicitly group any sequence of bytes into i32s.
//
// Normally we can only do this if an 8-byte constant can fit into 4 bytes.
for chunk in chunks {
match chunk {
InitChunk::Init(range) => {
let range =
(range.start.bytes() as usize)..(range.end.bytes() as usize);
let bytes =
alloc.inspect_with_uninit_and_ptr_outside_interpreter(range);
for bytes in bytes.chunks_exact(pointer_size_bytes) {
assert!(
bytes[4..pointer_size_bytes].iter().all(|&x| x == 0),
"Cannot fit constant into 4-bytes: {:?}",
bytes
);
let bytes: [u8; 4] = bytes[0..4].try_into().unwrap();
let val: u32 = match dl.endian {
Endian::Big => u32::from_be_bytes(bytes),
Endian::Little => u32::from_le_bytes(bytes),
};
llvals.push(cx.const_u32(val));
}
}
InitChunk::Uninit(range) => {
let len = range.end.bytes() - range.start.bytes();
let val = cx.const_undef(cx.type_array(cx.type_i8(), len / 2));
llvals.push(val);
}
};
}
} else {
llvals.extend(chunks.map(chunk_to_llval));
}
} else {
// If this allocation contains any uninit bytes, codegen as if it was initialized
// (using some arbitrary value for uninit bytes).
Expand All @@ -92,7 +135,13 @@ pub(crate) fn const_alloc_to_llvm<'ll>(
// This `inspect` is okay since we have checked that there is no provenance, it
// is within the bounds of the allocation, and it doesn't affect interpreter execution
// (we inspect the result after interpreter execution).
append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, next_offset..offset);
append_chunks_of_init_and_uninit_bytes(
&mut llvals,
cx,
alloc,
next_offset..offset,
use_relative_layout,
);
}
let ptr_offset = read_target_uint(
dl.endian,
Expand All @@ -108,38 +157,64 @@ pub(crate) fn const_alloc_to_llvm<'ll>(

let address_space = cx.tcx.global_alloc(prov.alloc_id()).address_space(cx);

llvals.push(cx.scalar_to_backend(
InterpScalar::from_pointer(Pointer::new(prov, Size::from_bytes(ptr_offset)), &cx.tcx),
Scalar::Initialized {
value: Primitive::Pointer(address_space),
valid_range: WrappingRange::full(pointer_size),
},
cx.type_ptr_ext(address_space),
));
let s = {
let scalar = cx.scalar_to_backend(
InterpScalar::from_pointer(
Pointer::new(prov, Size::from_bytes(ptr_offset)),
&cx.tcx,
),
Scalar::Initialized {
value: Primitive::Pointer(address_space),
valid_range: WrappingRange::full(pointer_size),
},
cx.type_ptr_ext(address_space),
);

if use_relative_layout {
unsafe {
let fptr = llvm::LLVMDSOLocalEquivalent(scalar);
let sub = llvm::LLVMConstSub(
llvm::LLVMConstPtrToInt(fptr, cx.type_i64()),
llvm::LLVMConstPtrToInt(vtable_base.unwrap(), cx.type_i64()),
);
llvm::LLVMConstTrunc(sub, cx.type_i32())
}
} else {
scalar
}
};

llvals.push(s);
next_offset = offset + pointer_size_bytes;
}
if alloc.len() >= next_offset {
let range = next_offset..alloc.len();
// This `inspect` is okay since we have check that it is after all provenance, it is
// within the bounds of the allocation, and it doesn't affect interpreter execution (we
// inspect the result after interpreter execution).
append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, range);
append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, range, use_relative_layout);
}

// Avoid wrapping in a struct if there is only a single value. This ensures
// that LLVM is able to perform the string merging optimization if the constant
// is a valid C string. LLVM only considers bare arrays for this optimization,
// not arrays wrapped in a struct. LLVM handles this at:
// https://github.com/rust-lang/llvm-project/blob/acaea3d2bb8f351b740db7ebce7d7a40b9e21488/llvm/lib/Target/TargetLoweringObjectFile.cpp#L249-L280
if let &[data] = &*llvals { data } else { cx.const_struct(&llvals, true) }
if let &[data] = &*llvals {
data
} else if use_relative_layout {
cx.const_array(cx.type_i32(), &llvals)
} else {
cx.const_struct(&llvals, true)
}
}

fn codegen_static_initializer<'ll, 'tcx>(
cx: &CodegenCx<'ll, 'tcx>,
def_id: DefId,
) -> Result<(&'ll Value, ConstAllocation<'tcx>), ErrorHandled> {
let alloc = cx.tcx.eval_static_initializer(def_id)?;
Ok((const_alloc_to_llvm(cx, alloc.inner(), /*static*/ true), alloc))
Ok((const_alloc_to_llvm(cx, alloc.inner(), /*static*/ true, /*vtable_base*/ None), alloc))
}

fn set_global_alignment<'ll>(cx: &CodegenCx<'ll, '_>, gv: &'ll Value, mut align: Align) {
Expand Down Expand Up @@ -232,19 +307,29 @@ impl<'ll> CodegenCx<'ll, '_> {
cv: &'ll Value,
align: Align,
kind: Option<&str>,
) -> &'ll Value {
let gv = self.static_addr_of_mut_from_type(self.val_ty(cv), align, kind);
llvm::set_initializer(gv, cv);
gv
}

pub(crate) fn static_addr_of_mut_from_type(
&self,
ty: &'ll Type,
align: Align,
kind: Option<&str>,
) -> &'ll Value {
let gv = match kind {
Some(kind) if !self.tcx.sess.fewer_names() => {
let name = self.generate_local_symbol_name(kind);
let gv = self.define_global(&name, self.val_ty(cv)).unwrap_or_else(|| {
let gv = self.define_global(&name, ty).unwrap_or_else(|| {
bug!("symbol `{}` is already defined", name);
});
llvm::set_linkage(gv, llvm::Linkage::PrivateLinkage);
gv
}
_ => self.define_private_global(self.val_ty(cv)),
_ => self.define_private_global(ty),
};
llvm::set_initializer(gv, cv);
set_global_alignment(self, gv, align);
llvm::set_unnamed_address(gv, llvm::UnnamedAddr::Global);
gv
Expand Down Expand Up @@ -277,6 +362,15 @@ impl<'ll> CodegenCx<'ll, '_> {
gv
}

pub(crate) fn static_addr_of_impl_for_gv(&self, cv: &'ll Value, gv: &'ll Value) -> &'ll Value {
assert!(!self.const_globals.borrow().contains_key(&cv));
let mut binding = self.const_globals.borrow_mut();
binding.insert(cv, gv);
llvm::set_initializer(gv, cv);
llvm::set_global_constant(gv, true);
gv
}

#[instrument(level = "debug", skip(self))]
pub(crate) fn get_static(&self, def_id: DefId) -> &'ll Value {
let instance = Instance::mono(self.tcx, def_id);
Expand Down
9 changes: 9 additions & 0 deletions compiler/rustc_codegen_llvm/src/llvm/ffi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1169,6 +1169,8 @@ unsafe extern "C" {
pub(crate) fn LLVMGetAggregateElement(ConstantVal: &Value, Idx: c_uint) -> Option<&Value>;
pub(crate) fn LLVMGetConstOpcode(ConstantVal: &Value) -> Opcode;
pub(crate) fn LLVMIsAConstantExpr(Val: &Value) -> Option<&Value>;
pub(crate) fn LLVMConstSub<'a>(LHS: &'a Value, RHS: &'a Value) -> &'a Value;
pub(crate) fn LLVMConstTrunc<'a>(ConstantVal: &'a Value, ToType: &'a Type) -> &'a Value;

// Operations on global variables, functions, and aliases (globals)
pub(crate) fn LLVMIsDeclaration(Global: &Value) -> Bool;
Expand Down Expand Up @@ -1198,6 +1200,13 @@ unsafe extern "C" {
pub(crate) safe fn LLVMSetTailCall(CallInst: &Value, IsTailCall: Bool);
pub(crate) safe fn LLVMRustSetTailCallKind(CallInst: &Value, Kind: TailCallKind);

pub(crate) fn LLVMDSOLocalEquivalent(GlobalVar: &Value) -> &Value;
pub(crate) fn LLVMBuildLoadRelative<'a>(
Builder: &Builder<'a>,
Ptr: &'a Value,
ByteOffset: &'a Value,
) -> &'a Value;

// Operations on attributes
pub(crate) fn LLVMCreateStringAttribute(
C: &Context,
Expand Down
7 changes: 7 additions & 0 deletions compiler/rustc_codegen_llvm/src/type_.rs
Original file line number Diff line number Diff line change
Expand Up @@ -292,6 +292,13 @@ impl<'ll, 'tcx> LayoutTypeCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
fn fn_ptr_backend_type(&self, fn_abi: &FnAbi<'tcx, Ty<'tcx>>) -> &'ll Type {
fn_abi.ptr_to_llvm_type(self)
}
fn vtable_component_type(&self, fn_abi: &FnAbi<'tcx, Ty<'tcx>>) -> &'ll Type {
if self.sess().opts.unstable_opts.experimental_relative_rust_abi_vtables {
self.type_i32()
} else {
fn_abi.ptr_to_llvm_type(self)
}
}
fn reg_backend_type(&self, ty: &Reg) -> &'ll Type {
ty.llvm_type(self)
}
Expand Down
Loading
Loading