@@ -2663,15 +2663,15 @@ LSRInstance::OptimizeLoopTermCond() {
26632663 // Conservatively avoid trying to use the post-inc value in non-latch
26642664 // exits if there may be pre-inc users in intervening blocks.
26652665 if (LatchBlock != ExitingBlock)
2666- for (IVUsers::const_iterator UI = IU. begin (), E = IU. end (); UI != E; ++UI )
2666+ for (const IVStrideUse & UI : IU )
26672667 // Test if the use is reachable from the exiting block. This dominator
26682668 // query is a conservative approximation of reachability.
2669- if (&* UI != CondUse &&
2670- !DT.properlyDominates (UI-> getUser ()->getParent (), ExitingBlock)) {
2669+ if (&UI != CondUse &&
2670+ !DT.properlyDominates (UI. getUser ()->getParent (), ExitingBlock)) {
26712671 // Conservatively assume there may be reuse if the quotient of their
26722672 // strides could be a legal scale.
26732673 const SCEV *A = IU.getStride (*CondUse, L);
2674- const SCEV *B = IU.getStride (* UI, L);
2674+ const SCEV *B = IU.getStride (UI, L);
26752675 if (!A || !B) continue ;
26762676 if (SE.getTypeSizeInBits (A->getType ()) !=
26772677 SE.getTypeSizeInBits (B->getType ())) {
@@ -2692,9 +2692,9 @@ LSRInstance::OptimizeLoopTermCond() {
26922692 C->getValue ().isMinSignedValue ())
26932693 goto decline_post_inc;
26942694 // Check for possible scaled-address reuse.
2695- if (isAddressUse (TTI, UI-> getUser (), UI-> getOperandValToReplace ())) {
2696- MemAccessTy AccessTy = getAccessType (
2697- TTI, UI-> getUser (), UI-> getOperandValToReplace ());
2695+ if (isAddressUse (TTI, UI. getUser (), UI. getOperandValToReplace ())) {
2696+ MemAccessTy AccessTy =
2697+ getAccessType ( TTI, UI. getUser (), UI. getOperandValToReplace ());
26982698 int64_t Scale = C->getSExtValue ();
26992699 if (TTI.isLegalAddressingMode (AccessTy.MemTy , /* BaseGV=*/ nullptr ,
27002700 /* BaseOffset=*/ 0 ,
0 commit comments