diff --git a/llvm/lib/Analysis/ValueTracking.cpp b/llvm/lib/Analysis/ValueTracking.cpp index 6b61a3546e8b7..55feb15dfb152 100644 --- a/llvm/lib/Analysis/ValueTracking.cpp +++ b/llvm/lib/Analysis/ValueTracking.cpp @@ -1426,7 +1426,22 @@ static void computeKnownBitsFromOperator(const Operator *I, computeKnownBits(I->getOperand(0), Known, Depth + 1, Q); // Accumulate the constant indices in a separate variable // to minimize the number of calls to computeForAddSub. - APInt AccConstIndices(BitWidth, 0, /*IsSigned*/ true); + unsigned IndexWidth = Q.DL.getIndexTypeSizeInBits(I->getType()); + APInt AccConstIndices(IndexWidth, 0); + + auto AddIndexToKnown = [&](KnownBits IndexBits) { + if (IndexWidth == BitWidth) { + // Note that inbounds does *not* guarantee nsw for the addition, as only + // the offset is signed, while the base address is unsigned. + Known = KnownBits::add(Known, IndexBits); + } else { + // If the index width is smaller than the pointer width, only add the + // value to the low bits. + assert(IndexWidth < BitWidth && + "Index width can't be larger than pointer width"); + Known.insertBits(KnownBits::add(Known.trunc(IndexWidth), IndexBits), 0); + } + }; gep_type_iterator GTI = gep_type_begin(I); for (unsigned i = 1, e = I->getNumOperands(); i != e; ++i, ++GTI) { @@ -1464,43 +1479,34 @@ static void computeKnownBitsFromOperator(const Operator *I, break; } - unsigned IndexBitWidth = Index->getType()->getScalarSizeInBits(); - KnownBits IndexBits(IndexBitWidth); - computeKnownBits(Index, IndexBits, Depth + 1, Q); - TypeSize IndexTypeSize = GTI.getSequentialElementStride(Q.DL); - uint64_t TypeSizeInBytes = IndexTypeSize.getKnownMinValue(); - KnownBits ScalingFactor(IndexBitWidth); + TypeSize Stride = GTI.getSequentialElementStride(Q.DL); + uint64_t StrideInBytes = Stride.getKnownMinValue(); + if (!Stride.isScalable()) { + // Fast path for constant offset. + if (auto *CI = dyn_cast(Index)) { + AccConstIndices += + CI->getValue().sextOrTrunc(IndexWidth) * StrideInBytes; + continue; + } + } + + KnownBits IndexBits = + computeKnownBits(Index, Depth + 1, Q).sextOrTrunc(IndexWidth); + KnownBits ScalingFactor(IndexWidth); // Multiply by current sizeof type. // &A[i] == A + i * sizeof(*A[i]). - if (IndexTypeSize.isScalable()) { + if (Stride.isScalable()) { // For scalable types the only thing we know about sizeof is // that this is a multiple of the minimum size. - ScalingFactor.Zero.setLowBits(llvm::countr_zero(TypeSizeInBytes)); - } else if (IndexBits.isConstant()) { - APInt IndexConst = IndexBits.getConstant(); - APInt ScalingFactor(IndexBitWidth, TypeSizeInBytes); - IndexConst *= ScalingFactor; - AccConstIndices += IndexConst.sextOrTrunc(BitWidth); - continue; + ScalingFactor.Zero.setLowBits(llvm::countr_zero(StrideInBytes)); } else { ScalingFactor = - KnownBits::makeConstant(APInt(IndexBitWidth, TypeSizeInBytes)); + KnownBits::makeConstant(APInt(IndexWidth, StrideInBytes)); } - IndexBits = KnownBits::mul(IndexBits, ScalingFactor); - - // If the offsets have a different width from the pointer, according - // to the language reference we need to sign-extend or truncate them - // to the width of the pointer. - IndexBits = IndexBits.sextOrTrunc(BitWidth); - - // Note that inbounds does *not* guarantee nsw for the addition, as only - // the offset is signed, while the base address is unsigned. - Known = KnownBits::add(Known, IndexBits); - } - if (!Known.isUnknown() && !AccConstIndices.isZero()) { - KnownBits Index = KnownBits::makeConstant(AccConstIndices); - Known = KnownBits::add(Known, Index); + AddIndexToKnown(KnownBits::mul(IndexBits, ScalingFactor)); } + if (!Known.isUnknown() && !AccConstIndices.isZero()) + AddIndexToKnown(KnownBits::makeConstant(AccConstIndices)); break; } case Instruction::PHI: { diff --git a/llvm/unittests/Analysis/ValueTrackingTest.cpp b/llvm/unittests/Analysis/ValueTrackingTest.cpp index 39865fa195cf7..50e5e0e6b2ff5 100644 --- a/llvm/unittests/Analysis/ValueTrackingTest.cpp +++ b/llvm/unittests/Analysis/ValueTrackingTest.cpp @@ -2680,7 +2680,7 @@ TEST_F(ComputeKnownBitsTest, ComputeKnownBitsAbsoluteSymbol) { } TEST_F(ComputeKnownBitsTest, ComputeKnownBitsGEPExtendBeforeMul) { - // FIXME: The index should be extended before multiplying with the scale. + // The index should be extended before multiplying with the scale. parseAssembly(R"( target datalayout = "p:16:16:16" @@ -2692,12 +2692,12 @@ TEST_F(ComputeKnownBitsTest, ComputeKnownBitsGEPExtendBeforeMul) { } )"); KnownBits Known = computeKnownBits(A, M->getDataLayout()); - EXPECT_EQ(~64 & 0x7fff, Known.Zero); - EXPECT_EQ(64, Known.One); + EXPECT_EQ(~320 & 0x7fff, Known.Zero); + EXPECT_EQ(320, Known.One); } TEST_F(ComputeKnownBitsTest, ComputeKnownBitsGEPOnlyIndexBits) { - // FIXME: GEP should only affect the index width. + // GEP should only affect the index width. parseAssembly(R"( target datalayout = "p:16:16:16:8" @@ -2710,8 +2710,8 @@ TEST_F(ComputeKnownBitsTest, ComputeKnownBitsGEPOnlyIndexBits) { } )"); KnownBits Known = computeKnownBits(A, M->getDataLayout()); - EXPECT_EQ(0x7eff, Known.Zero); - EXPECT_EQ(0x100, Known.One); + EXPECT_EQ(0x7fff, Known.Zero); + EXPECT_EQ(0, Known.One); } TEST_F(ValueTrackingTest, HaveNoCommonBitsSet) {