@@ -3277,12 +3277,13 @@ static bool isRemovableWrite(CallBase &CB, Value *UsedV,
32773277 return Dest && Dest->Ptr == UsedV;
32783278}
32793279
3280- static bool isAllocSiteRemovable (Instruction *AI,
3281- SmallVectorImpl<WeakTrackingVH> &Users,
3282- const TargetLibraryInfo &TLI) {
3280+ static std::optional<ModRefInfo>
3281+ isAllocSiteRemovable (Instruction *AI, SmallVectorImpl<WeakTrackingVH> &Users,
3282+ const TargetLibraryInfo &TLI, bool KnowInit ) {
32833283 SmallVector<Instruction*, 4 > Worklist;
32843284 const std::optional<StringRef> Family = getAllocationFamily (AI, &TLI);
32853285 Worklist.push_back (AI);
3286+ ModRefInfo Access = KnowInit ? ModRefInfo::NoModRef : ModRefInfo::Mod;
32863287
32873288 do {
32883289 Instruction *PI = Worklist.pop_back_val ();
@@ -3291,7 +3292,7 @@ static bool isAllocSiteRemovable(Instruction *AI,
32913292 switch (I->getOpcode ()) {
32923293 default :
32933294 // Give up the moment we see something we can't handle.
3294- return false ;
3295+ return std:: nullopt ;
32953296
32963297 case Instruction::AddrSpaceCast:
32973298 case Instruction::BitCast:
@@ -3306,10 +3307,10 @@ static bool isAllocSiteRemovable(Instruction *AI,
33063307 // We also fold comparisons in some conditions provided the alloc has
33073308 // not escaped (see isNeverEqualToUnescapedAlloc).
33083309 if (!ICI->isEquality ())
3309- return false ;
3310+ return std:: nullopt ;
33103311 unsigned OtherIndex = (ICI->getOperand (0 ) == PI) ? 1 : 0 ;
33113312 if (!isNeverEqualToUnescapedAlloc (ICI->getOperand (OtherIndex), TLI, AI))
3312- return false ;
3313+ return std:: nullopt ;
33133314
33143315 // Do not fold compares to aligned_alloc calls, as they may have to
33153316 // return null in case the required alignment cannot be satisfied,
@@ -3329,7 +3330,7 @@ static bool isAllocSiteRemovable(Instruction *AI,
33293330 if (CB && TLI.getLibFunc (*CB->getCalledFunction (), TheLibFunc) &&
33303331 TLI.has (TheLibFunc) && TheLibFunc == LibFunc_aligned_alloc &&
33313332 !AlignmentAndSizeKnownValid (CB))
3332- return false ;
3333+ return std:: nullopt ;
33333334 Users.emplace_back (I);
33343335 continue ;
33353336 }
@@ -3339,14 +3340,21 @@ static bool isAllocSiteRemovable(Instruction *AI,
33393340 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
33403341 switch (II->getIntrinsicID ()) {
33413342 default :
3342- return false ;
3343+ return std:: nullopt ;
33433344
33443345 case Intrinsic::memmove:
33453346 case Intrinsic::memcpy:
33463347 case Intrinsic::memset: {
33473348 MemIntrinsic *MI = cast<MemIntrinsic>(II);
3348- if (MI->isVolatile () || MI->getRawDest () != PI)
3349- return false ;
3349+ if (MI->isVolatile ())
3350+ return std::nullopt ;
3351+ // Note: this could also be ModRef, but we can still interpret that
3352+ // as just Mod in that case.
3353+ ModRefInfo NewAccess =
3354+ MI->getRawDest () == PI ? ModRefInfo::Mod : ModRefInfo::Ref;
3355+ if ((Access & ~NewAccess) != ModRefInfo::NoModRef)
3356+ return std::nullopt ;
3357+ Access |= NewAccess;
33503358 [[fallthrough]];
33513359 }
33523360 case Intrinsic::assume:
@@ -3365,11 +3373,6 @@ static bool isAllocSiteRemovable(Instruction *AI,
33653373 }
33663374 }
33673375
3368- if (isRemovableWrite (*cast<CallBase>(I), PI, TLI)) {
3369- Users.emplace_back (I);
3370- continue ;
3371- }
3372-
33733376 if (Family && getFreedOperand (cast<CallBase>(I), &TLI) == PI &&
33743377 getAllocationFamily (I, &TLI) == Family) {
33753378 Users.emplace_back (I);
@@ -3383,20 +3386,43 @@ static bool isAllocSiteRemovable(Instruction *AI,
33833386 continue ;
33843387 }
33853388
3386- return false ;
3389+ if (!isRefSet (Access) &&
3390+ isRemovableWrite (*cast<CallBase>(I), PI, TLI)) {
3391+ Access |= ModRefInfo::Mod;
3392+ Users.emplace_back (I);
3393+ continue ;
3394+ }
3395+
3396+ return std::nullopt ;
33873397
33883398 case Instruction::Store: {
33893399 StoreInst *SI = cast<StoreInst>(I);
33903400 if (SI->isVolatile () || SI->getPointerOperand () != PI)
3391- return false ;
3401+ return std::nullopt ;
3402+ if (isRefSet (Access))
3403+ return std::nullopt ;
3404+ Access |= ModRefInfo::Mod;
3405+ Users.emplace_back (I);
3406+ continue ;
3407+ }
3408+
3409+ case Instruction::Load: {
3410+ LoadInst *LI = cast<LoadInst>(I);
3411+ if (LI->isVolatile () || LI->getPointerOperand () != PI)
3412+ return std::nullopt ;
3413+ if (isModSet (Access))
3414+ return std::nullopt ;
3415+ Access |= ModRefInfo::Ref;
33923416 Users.emplace_back (I);
33933417 continue ;
33943418 }
33953419 }
33963420 llvm_unreachable (" missing a return?" );
33973421 }
33983422 } while (!Worklist.empty ());
3399- return true ;
3423+
3424+ assert (Access != ModRefInfo::ModRef);
3425+ return Access;
34003426}
34013427
34023428Instruction *InstCombinerImpl::visitAllocSite (Instruction &MI) {
@@ -3424,10 +3450,31 @@ Instruction *InstCombinerImpl::visitAllocSite(Instruction &MI) {
34243450 DIB.reset (new DIBuilder (*MI.getModule (), /* AllowUnresolved=*/ false ));
34253451 }
34263452
3427- if (isAllocSiteRemovable (&MI, Users, TLI)) {
3453+ // Determine what getInitialValueOfAllocation would return without actually
3454+ // allocating the result.
3455+ bool KnowInitUndef = false ;
3456+ bool KnowInitZero = false ;
3457+ Constant *Init =
3458+ getInitialValueOfAllocation (&MI, &TLI, Type::getInt8Ty (MI.getContext ()));
3459+ if (Init) {
3460+ if (isa<UndefValue>(Init))
3461+ KnowInitUndef = true ;
3462+ else if (Init->isNullValue ())
3463+ KnowInitZero = true ;
3464+ }
3465+ // The various sanitizers don't actually return undef memory, but rather
3466+ // memory initialized with special forms of runtime poison
3467+ auto &F = *MI.getFunction ();
3468+ if (F.hasFnAttribute (Attribute::SanitizeMemory) ||
3469+ F.hasFnAttribute (Attribute::SanitizeAddress))
3470+ KnowInitUndef = false ;
3471+
3472+ auto Removable =
3473+ isAllocSiteRemovable (&MI, Users, TLI, KnowInitZero | KnowInitUndef);
3474+ if (Removable) {
34283475 for (unsigned i = 0 , e = Users.size (); i != e; ++i) {
3429- // Lowering all @llvm.objectsize calls first because they may
3430- // use a bitcast/GEP of the alloca we are removing.
3476+ // Lowering all @llvm.objectsize and MTI calls first because they may use
3477+ // a bitcast/GEP of the alloca we are removing.
34313478 if (!Users[i])
34323479 continue ;
34333480
@@ -3444,6 +3491,17 @@ Instruction *InstCombinerImpl::visitAllocSite(Instruction &MI) {
34443491 eraseInstFromFunction (*I);
34453492 Users[i] = nullptr ; // Skip examining in the next loop.
34463493 }
3494+ if (auto *MTI = dyn_cast<MemTransferInst>(I)) {
3495+ if (KnowInitZero && isRefSet (*Removable)) {
3496+ IRBuilderBase::InsertPointGuard Guard (Builder);
3497+ Builder.SetInsertPoint (MTI);
3498+ auto *M = Builder.CreateMemSet (
3499+ MTI->getRawDest (),
3500+ ConstantInt::get (Type::getInt8Ty (MI.getContext ()), 0 ),
3501+ MTI->getLength (), MTI->getDestAlign ());
3502+ M->copyMetadata (*MTI);
3503+ }
3504+ }
34473505 }
34483506 }
34493507 for (unsigned i = 0 , e = Users.size (); i != e; ++i) {
@@ -3466,7 +3524,14 @@ Instruction *InstCombinerImpl::visitAllocSite(Instruction &MI) {
34663524 } else {
34673525 // Casts, GEP, or anything else: we're about to delete this instruction,
34683526 // so it can not have any valid uses.
3469- replaceInstUsesWith (*I, PoisonValue::get (I->getType ()));
3527+ Constant *Replace;
3528+ if (isa<LoadInst>(I)) {
3529+ assert (KnowInitZero || KnowInitUndef);
3530+ Replace = KnowInitUndef ? UndefValue::get (I->getType ())
3531+ : Constant::getNullValue (I->getType ());
3532+ } else
3533+ Replace = PoisonValue::get (I->getType ());
3534+ replaceInstUsesWith (*I, Replace);
34703535 }
34713536 eraseInstFromFunction (*I);
34723537 }
0 commit comments