diff --git a/llvm/lib/Transforms/Utils/CallPromotionUtils.cpp b/llvm/lib/Transforms/Utils/CallPromotionUtils.cpp index 17cba2e642a19..f8ebf962bd4aa 100644 --- a/llvm/lib/Transforms/Utils/CallPromotionUtils.cpp +++ b/llvm/lib/Transforms/Utils/CallPromotionUtils.cpp @@ -692,14 +692,14 @@ bool llvm::tryPromoteCall(CallBase &CB) { if (!VTableEntryLoad) return false; // Not a vtable entry load. Value *VTableEntryPtr = VTableEntryLoad->getPointerOperand(); - APInt VTableOffset(DL.getTypeSizeInBits(VTableEntryPtr->getType()), 0); + APInt VTableOffset(DL.getIndexTypeSizeInBits(VTableEntryPtr->getType()), 0); Value *VTableBasePtr = VTableEntryPtr->stripAndAccumulateConstantOffsets( DL, VTableOffset, /* AllowNonInbounds */ true); LoadInst *VTablePtrLoad = dyn_cast(VTableBasePtr); if (!VTablePtrLoad) return false; // Not a vtable load. Value *Object = VTablePtrLoad->getPointerOperand(); - APInt ObjectOffset(DL.getTypeSizeInBits(Object->getType()), 0); + APInt ObjectOffset(DL.getIndexTypeSizeInBits(Object->getType()), 0); Value *ObjectBase = Object->stripAndAccumulateConstantOffsets( DL, ObjectOffset, /* AllowNonInbounds */ true); if (!(isa(ObjectBase) && ObjectOffset == 0)) @@ -712,7 +712,7 @@ bool llvm::tryPromoteCall(CallBase &CB) { VTablePtrLoad, VTablePtrLoad->getParent(), BBI, 0, nullptr, nullptr); if (!VTablePtr) return false; // No vtable found. - APInt VTableOffsetGVBase(DL.getTypeSizeInBits(VTablePtr->getType()), 0); + APInt VTableOffsetGVBase(DL.getIndexTypeSizeInBits(VTablePtr->getType()), 0); Value *VTableGVBase = VTablePtr->stripAndAccumulateConstantOffsets( DL, VTableOffsetGVBase, /* AllowNonInbounds */ true); GlobalVariable *GV = dyn_cast(VTableGVBase); diff --git a/llvm/test/Transforms/Inline/promote-call-bitwidth.ll b/llvm/test/Transforms/Inline/promote-call-bitwidth.ll new file mode 100644 index 0000000000000..6a0ddb5601253 --- /dev/null +++ b/llvm/test/Transforms/Inline/promote-call-bitwidth.ll @@ -0,0 +1,48 @@ +; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --function-signature --scrub-attributes +; RUN: opt -S -passes=inline < %s | FileCheck %s + +;; Check that we correctly use the index size when accumulating offsets during CallPromotion + +target datalayout = "p200:128:128:128:64-A200-P200-G200" + +define void @test(ptr addrspace(200) %arg1, ptr addrspace(200) %arg2) local_unnamed_addr addrspace(200) { +; CHECK-LABEL: define {{[^@]+}}@test +; CHECK-SAME: (ptr addrspace(200) [[ARG1:%.*]], ptr addrspace(200) [[ARG2:%.*]]) local_unnamed_addr addrspace(200) { +; CHECK-NEXT: entry: +; CHECK-NEXT: [[TMP0:%.*]] = load ptr addrspace(200), ptr addrspace(200) [[ARG2]], align 16 +; CHECK-NEXT: call addrspace(200) void [[TMP0]](ptr addrspace(200) [[ARG1]]) +; CHECK-NEXT: ret void +; +entry: + call void @call_fnptr(ptr addrspace(200) %arg1, ptr addrspace(200) %arg2) + ret void +} + +define internal void @call_fnptr(ptr addrspace(200) %this, ptr addrspace(200) %arg) unnamed_addr addrspace(200) align 2 { +entry: + %0 = load ptr addrspace(200), ptr addrspace(200) %arg, align 16 + call void %0(ptr addrspace(200) %this) + ret void +} + +define void @test2(ptr addrspace(200) %this) local_unnamed_addr addrspace(200) { +; CHECK-LABEL: define {{[^@]+}}@test2 +; CHECK-SAME: (ptr addrspace(200) [[THIS:%.*]]) local_unnamed_addr addrspace(200) { +; CHECK-NEXT: entry: +; CHECK-NEXT: [[VTABLE_I:%.*]] = load ptr addrspace(200), ptr addrspace(200) [[THIS]], align 16 +; CHECK-NEXT: [[FN_I:%.*]] = load ptr addrspace(200), ptr addrspace(200) [[VTABLE_I]], align 16 +; CHECK-NEXT: call addrspace(200) void [[FN_I]](ptr addrspace(200) [[THIS]]) +; CHECK-NEXT: ret void +; +entry: + call void @call_via_vtable(ptr addrspace(200) %this) + ret void +} + +define internal void @call_via_vtable(ptr addrspace(200) %this) unnamed_addr addrspace(200) { +entry: + %vtable = load ptr addrspace(200), ptr addrspace(200) %this, align 16 + %fn = load ptr addrspace(200), ptr addrspace(200) %vtable, align 16 + call void %fn(ptr addrspace(200) %this) + ret void +}