-
Couldn't load subscription status.
- Fork 15k
[LSR] Don't count conditional loads/store as enabling pre/post-index #159573
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 1 commit
e093aa2
cf3be4e
01c8ed1
671aa5c
a08955c
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -1278,6 +1278,7 @@ struct LSRFixup { | |
| LSRFixup() = default; | ||
|
|
||
| bool isUseFullyOutsideLoop(const Loop *L) const; | ||
| bool isUseUnconditional(const Loop *L) const; | ||
|
|
||
| void print(raw_ostream &OS) const; | ||
| void dump() const; | ||
|
|
@@ -1318,6 +1319,11 @@ class LSRUse { | |
| /// the loop, in which case some special-case heuristics may be used. | ||
| bool AllFixupsOutsideLoop = true; | ||
|
|
||
| /// This records whether all of the fixups using this LSRUse are unconditional | ||
| /// within the loop, meaning they will be executed in every iteration of the | ||
| /// loop. | ||
| bool AllFixupsUnconditional = true; | ||
|
|
||
| /// RigidFormula is set to true to guarantee that this use will be associated | ||
| /// with a single formula--the one that initially matched. Some SCEV | ||
| /// expressions cannot be expanded. This allows LSR to consider the registers | ||
|
|
@@ -1422,15 +1428,19 @@ void Cost::RateRegister(const Formula &F, const SCEV *Reg, | |
| TTI->isIndexedStoreLegal(TTI->MIM_PostInc, AR->getType())) { | ||
| const SCEV *Start; | ||
| const SCEVConstant *Step; | ||
| if (match(AR, m_scev_AffineAddRec(m_SCEV(Start), m_SCEVConstant(Step)))) | ||
| if (match(AR, m_scev_AffineAddRec(m_SCEV(Start), m_SCEVConstant(Step)))) { | ||
| // If the step size matches the base offset, we could use pre-indexed | ||
| // addressing. | ||
| if (((AMK & TTI::AMK_PreIndexed) && F.BaseOffset.isFixed() && | ||
| Step->getAPInt() == F.BaseOffset.getFixedValue()) || | ||
| ((AMK & TTI::AMK_PostIndexed) && !isa<SCEVConstant>(Start) && | ||
| SE->isLoopInvariant(Start, L))) | ||
| bool CanPreIndex = (AMK & TTI::AMK_PreIndexed) && F.BaseOffset.isFixed() && | ||
| Step->getAPInt() == F.BaseOffset.getFixedValue(); | ||
| bool CanPostIndex = (AMK & TTI::AMK_PostIndexed) && !isa<SCEVConstant>(Start) && | ||
| SE->isLoopInvariant(Start, L); | ||
| // We can only pre or post index when the load/store is unconditional. | ||
| if ((CanPreIndex || CanPostIndex) && LU.AllFixupsUnconditional) | ||
| LoopCost = 0; | ||
| } | ||
| } | ||
|
|
||
| // If the loop counts down to zero and we'll be using a hardware loop then | ||
| // the addrec will be combined into the hardware loop instruction. | ||
| if (LU.Kind == LSRUse::ICmpZero && F.countsDownToZero() && | ||
|
|
@@ -1647,6 +1657,12 @@ bool LSRFixup::isUseFullyOutsideLoop(const Loop *L) const { | |
| return !L->contains(UserInst); | ||
| } | ||
|
|
||
| /// Test whether this fixup is for an instruction that's unconditional, i.e. | ||
| /// it's executed in every loop iteration. | ||
| bool LSRFixup::isUseUnconditional(const Loop *L) const { | ||
| return isGuaranteedToExecuteForEveryIteration(UserInst, L); | ||
|
||
| } | ||
|
|
||
| #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) | ||
| void LSRFixup::print(raw_ostream &OS) const { | ||
| OS << "UserInst="; | ||
|
|
@@ -1783,6 +1799,9 @@ void LSRUse::print(raw_ostream &OS) const { | |
| if (AllFixupsOutsideLoop) | ||
| OS << ", all-fixups-outside-loop"; | ||
|
|
||
| if (AllFixupsUnconditional) | ||
| OS << ", all-fixups-unconditional"; | ||
|
|
||
| if (WidestFixupType) | ||
| OS << ", widest fixup type: " << *WidestFixupType; | ||
| } | ||
|
|
@@ -3607,6 +3626,7 @@ void LSRInstance::CollectFixupsAndInitialFormulae() { | |
| LF.PostIncLoops = TmpPostIncLoops; | ||
| LF.Offset = Offset; | ||
| LU.AllFixupsOutsideLoop &= LF.isUseFullyOutsideLoop(L); | ||
| LU.AllFixupsUnconditional &= LF.isUseUnconditional(L); | ||
|
|
||
| // Create SCEV as Formula for calculating baseline cost | ||
| if (!VisitedLSRUse.count(LUIdx) && !LF.isUseFullyOutsideLoop(L)) { | ||
|
|
@@ -3803,6 +3823,7 @@ LSRInstance::CollectLoopInvariantFixupsAndFormulae() { | |
| LF.OperandValToReplace = U; | ||
| LF.Offset = Offset; | ||
| LU.AllFixupsOutsideLoop &= LF.isUseFullyOutsideLoop(L); | ||
| LU.AllFixupsUnconditional &= LF.isUseUnconditional(L); | ||
| if (!LU.WidestFixupType || | ||
| SE.getTypeSizeInBits(LU.WidestFixupType) < | ||
| SE.getTypeSizeInBits(LF.OperandValToReplace->getType())) | ||
|
|
@@ -4940,6 +4961,7 @@ void LSRInstance::NarrowSearchSpaceByCollapsingUnrolledCode() { | |
| LLVM_DEBUG(dbgs() << " Deleting use "; LU.print(dbgs()); dbgs() << '\n'); | ||
|
|
||
| LUThatHas->AllFixupsOutsideLoop &= LU.AllFixupsOutsideLoop; | ||
| LUThatHas->AllFixupsUnconditional &= LU.AllFixupsUnconditional; | ||
|
|
||
| // Transfer the fixups of LU to LUThatHas. | ||
| for (LSRFixup &Fixup : LU.Fixups) { | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.