Skip to content

Commit 9204779

Browse files
committed
[RISCV][IA] Factor out code for extracting operands from mem insts [nfc]
We're going to end up repeating the operand extraction four times once all of the routines have been updated to support both plain load/store and vp.load/vp.store. I plan to add masked.load/masked.store in the near future, and we'd need to add that to each of the four cases. Instead, factor out a single copy of the operand normalization.
1 parent 8f18dde commit 9204779

File tree

1 file changed

+52
-56
lines changed

1 file changed

+52
-56
lines changed

llvm/lib/Target/RISCV/RISCVInterleavedAccess.cpp

Lines changed: 52 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,52 @@ static bool isMultipleOfN(const Value *V, const DataLayout &DL, unsigned N) {
102102
return false;
103103
}
104104

105+
static bool getMemOperands(IRBuilderBase &Builder, unsigned Factor,
106+
VectorType *VTy, const DataLayout &DL, Type *XLenTy,
107+
Instruction *I, Value *&Ptr, Value *&Mask,
108+
Value *&VL, Align &Alignment) {
109+
ElementCount EC = VTy->getElementCount();
110+
if (auto *LI = dyn_cast<LoadInst>(I)) {
111+
assert(LI->isSimple());
112+
Ptr = LI->getPointerOperand();
113+
Alignment = LI->getAlign();
114+
assert(!Mask && "Unexpected mask on a load\n");
115+
Mask = Builder.getAllOnesMask(EC);
116+
VL = isa<FixedVectorType>(VTy) ? Builder.CreateElementCount(XLenTy, EC)
117+
: Constant::getAllOnesValue(XLenTy);
118+
return true;
119+
}
120+
if (auto *SI = dyn_cast<StoreInst>(I)) {
121+
assert(SI->isSimple());
122+
Ptr = SI->getPointerOperand();
123+
Alignment = SI->getAlign();
124+
assert(!Mask && "Unexpected mask on a store");
125+
Mask = Builder.getAllOnesMask(EC);
126+
VL = isa<FixedVectorType>(VTy) ? Builder.CreateElementCount(XLenTy, EC)
127+
: Constant::getAllOnesValue(XLenTy);
128+
return true;
129+
}
130+
auto *VPLdSt = cast<VPIntrinsic>(I);
131+
assert((VPLdSt->getIntrinsicID() == Intrinsic::vp_load ||
132+
VPLdSt->getIntrinsicID() == Intrinsic::vp_store) &&
133+
"Unexpected intrinsic");
134+
Ptr = VPLdSt->getMemoryPointerParam();
135+
Alignment = VPLdSt->getPointerAlignment().value_or(
136+
DL.getABITypeAlign(VTy->getElementType()));
137+
138+
assert(Mask && "vp.load needs a mask!");
139+
140+
Value *WideEVL = VPLdSt->getVectorLengthParam();
141+
// Conservatively check if EVL is a multiple of factor, otherwise some
142+
// (trailing) elements might be lost after the transformation.
143+
if (!isMultipleOfN(WideEVL, I->getDataLayout(), Factor))
144+
return false;
145+
146+
auto *FactorC = ConstantInt::get(WideEVL->getType(), Factor);
147+
VL = Builder.CreateZExt(Builder.CreateExactUDiv(WideEVL, FactorC), XLenTy);
148+
return true;
149+
}
150+
105151
/// Lower an interleaved load into a vlsegN intrinsic.
106152
///
107153
/// E.g. Lower an interleaved load (Factor = 2):
@@ -271,34 +317,9 @@ bool RISCVTargetLowering::lowerDeinterleaveIntrinsicToLoad(
271317

272318
Value *Ptr, *VL;
273319
Align Alignment;
274-
if (auto *LI = dyn_cast<LoadInst>(Load)) {
275-
assert(LI->isSimple());
276-
Ptr = LI->getPointerOperand();
277-
Alignment = LI->getAlign();
278-
assert(!Mask && "Unexpected mask on a load\n");
279-
Mask = Builder.getAllOnesMask(ResVTy->getElementCount());
280-
VL = isa<FixedVectorType>(ResVTy)
281-
? Builder.CreateElementCount(XLenTy, ResVTy->getElementCount())
282-
: Constant::getAllOnesValue(XLenTy);
283-
} else {
284-
auto *VPLoad = cast<VPIntrinsic>(Load);
285-
assert(VPLoad->getIntrinsicID() == Intrinsic::vp_load &&
286-
"Unexpected intrinsic");
287-
Ptr = VPLoad->getMemoryPointerParam();
288-
Alignment = VPLoad->getPointerAlignment().value_or(
289-
DL.getABITypeAlign(ResVTy->getElementType()));
290-
291-
assert(Mask && "vp.load needs a mask!");
292-
293-
Value *WideEVL = VPLoad->getVectorLengthParam();
294-
// Conservatively check if EVL is a multiple of factor, otherwise some
295-
// (trailing) elements might be lost after the transformation.
296-
if (!isMultipleOfN(WideEVL, Load->getDataLayout(), Factor))
297-
return false;
298-
299-
auto *FactorC = ConstantInt::get(WideEVL->getType(), Factor);
300-
VL = Builder.CreateZExt(Builder.CreateExactUDiv(WideEVL, FactorC), XLenTy);
301-
}
320+
if (!getMemOperands(Builder, Factor, ResVTy, DL, XLenTy, Load, Ptr, Mask, VL,
321+
Alignment))
322+
return false;
302323

303324
Type *PtrTy = Ptr->getType();
304325
unsigned AS = PtrTy->getPointerAddressSpace();
@@ -360,34 +381,9 @@ bool RISCVTargetLowering::lowerInterleaveIntrinsicToStore(
360381

361382
Value *Ptr, *VL;
362383
Align Alignment;
363-
if (auto *SI = dyn_cast<StoreInst>(Store)) {
364-
assert(SI->isSimple());
365-
Ptr = SI->getPointerOperand();
366-
Alignment = SI->getAlign();
367-
assert(!Mask && "Unexpected mask on a store");
368-
Mask = Builder.getAllOnesMask(InVTy->getElementCount());
369-
VL = isa<FixedVectorType>(InVTy)
370-
? Builder.CreateElementCount(XLenTy, InVTy->getElementCount())
371-
: Constant::getAllOnesValue(XLenTy);
372-
} else {
373-
auto *VPStore = cast<VPIntrinsic>(Store);
374-
assert(VPStore->getIntrinsicID() == Intrinsic::vp_store &&
375-
"Unexpected intrinsic");
376-
Ptr = VPStore->getMemoryPointerParam();
377-
Alignment = VPStore->getPointerAlignment().value_or(
378-
DL.getABITypeAlign(InVTy->getElementType()));
379-
380-
assert(Mask && "vp.store needs a mask!");
381-
382-
Value *WideEVL = VPStore->getVectorLengthParam();
383-
// Conservatively check if EVL is a multiple of factor, otherwise some
384-
// (trailing) elements might be lost after the transformation.
385-
if (!isMultipleOfN(WideEVL, DL, Factor))
386-
return false;
387-
388-
auto *FactorC = ConstantInt::get(WideEVL->getType(), Factor);
389-
VL = Builder.CreateZExt(Builder.CreateExactUDiv(WideEVL, FactorC), XLenTy);
390-
}
384+
if (!getMemOperands(Builder, Factor, InVTy, DL, XLenTy, Store, Ptr, Mask, VL,
385+
Alignment))
386+
return false;
391387
Type *PtrTy = Ptr->getType();
392388
unsigned AS = Ptr->getType()->getPointerAddressSpace();
393389
if (!isLegalInterleavedAccessType(InVTy, Factor, Alignment, AS, DL))

0 commit comments

Comments
 (0)