@@ -2664,15 +2664,15 @@ LSRInstance::OptimizeLoopTermCond() {
26642664 // Conservatively avoid trying to use the post-inc value in non-latch
26652665 // exits if there may be pre-inc users in intervening blocks.
26662666 if (LatchBlock != ExitingBlock)
2667- for (IVUsers::const_iterator UI = IU. begin (), E = IU. end (); UI != E; ++UI )
2667+ for (const IVStrideUse & UI : IU )
26682668 // Test if the use is reachable from the exiting block. This dominator
26692669 // query is a conservative approximation of reachability.
2670- if (&* UI != CondUse &&
2671- !DT.properlyDominates (UI-> getUser ()->getParent (), ExitingBlock)) {
2670+ if (&UI != CondUse &&
2671+ !DT.properlyDominates (UI. getUser ()->getParent (), ExitingBlock)) {
26722672 // Conservatively assume there may be reuse if the quotient of their
26732673 // strides could be a legal scale.
26742674 const SCEV *A = IU.getStride (*CondUse, L);
2675- const SCEV *B = IU.getStride (* UI, L);
2675+ const SCEV *B = IU.getStride (UI, L);
26762676 if (!A || !B) continue ;
26772677 if (SE.getTypeSizeInBits (A->getType ()) !=
26782678 SE.getTypeSizeInBits (B->getType ())) {
@@ -2693,9 +2693,9 @@ LSRInstance::OptimizeLoopTermCond() {
26932693 C->getValue ().isMinSignedValue ())
26942694 goto decline_post_inc;
26952695 // Check for possible scaled-address reuse.
2696- if (isAddressUse (TTI, UI-> getUser (), UI-> getOperandValToReplace ())) {
2697- MemAccessTy AccessTy = getAccessType (
2698- TTI, UI-> getUser (), UI-> getOperandValToReplace ());
2696+ if (isAddressUse (TTI, UI. getUser (), UI. getOperandValToReplace ())) {
2697+ MemAccessTy AccessTy =
2698+ getAccessType ( TTI, UI. getUser (), UI. getOperandValToReplace ());
26992699 int64_t Scale = C->getSExtValue ();
27002700 if (TTI.isLegalAddressingMode (AccessTy.MemTy , /* BaseGV=*/ nullptr ,
27012701 /* BaseOffset=*/ 0 ,
0 commit comments