Skip to content

Commit f6641e2

Browse files
authored
[RISCV][IA] Factor out code for extracting operands from mem insts [nfc] (#149344)
We're going to end up repeating the operand extraction four times once all of the routines have been updated to support both plain load/store and vp.load/vp.store. I plan to add masked.load/masked.store in the near future, and we'd need to add that to each of the four cases. Instead, factor out a single copy of the operand normalization.
1 parent b5e71d7 commit f6641e2

File tree

1 file changed

+56
-82
lines changed

1 file changed

+56
-82
lines changed

llvm/lib/Target/RISCV/RISCVInterleavedAccess.cpp

Lines changed: 56 additions & 82 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,56 @@ static bool isMultipleOfN(const Value *V, const DataLayout &DL, unsigned N) {
102102
return false;
103103
}
104104

105+
/// Do the common operand retrieval and validition required by the
106+
/// routines below.
107+
static bool getMemOperands(unsigned Factor, VectorType *VTy, Type *XLenTy,
108+
Instruction *I, Value *&Ptr, Value *&Mask,
109+
Value *&VL, Align &Alignment) {
110+
111+
IRBuilder<> Builder(I);
112+
const DataLayout &DL = I->getDataLayout();
113+
ElementCount EC = VTy->getElementCount();
114+
if (auto *LI = dyn_cast<LoadInst>(I)) {
115+
assert(LI->isSimple());
116+
Ptr = LI->getPointerOperand();
117+
Alignment = LI->getAlign();
118+
assert(!Mask && "Unexpected mask on a load");
119+
Mask = Builder.getAllOnesMask(EC);
120+
VL = isa<FixedVectorType>(VTy) ? Builder.CreateElementCount(XLenTy, EC)
121+
: Constant::getAllOnesValue(XLenTy);
122+
return true;
123+
}
124+
if (auto *SI = dyn_cast<StoreInst>(I)) {
125+
assert(SI->isSimple());
126+
Ptr = SI->getPointerOperand();
127+
Alignment = SI->getAlign();
128+
assert(!Mask && "Unexpected mask on a store");
129+
Mask = Builder.getAllOnesMask(EC);
130+
VL = isa<FixedVectorType>(VTy) ? Builder.CreateElementCount(XLenTy, EC)
131+
: Constant::getAllOnesValue(XLenTy);
132+
return true;
133+
}
134+
auto *VPLdSt = cast<VPIntrinsic>(I);
135+
assert((VPLdSt->getIntrinsicID() == Intrinsic::vp_load ||
136+
VPLdSt->getIntrinsicID() == Intrinsic::vp_store) &&
137+
"Unexpected intrinsic");
138+
Ptr = VPLdSt->getMemoryPointerParam();
139+
Alignment = VPLdSt->getPointerAlignment().value_or(
140+
DL.getABITypeAlign(VTy->getElementType()));
141+
142+
assert(Mask && "vp.load and vp.store needs a mask!");
143+
144+
Value *WideEVL = VPLdSt->getVectorLengthParam();
145+
// Conservatively check if EVL is a multiple of factor, otherwise some
146+
// (trailing) elements might be lost after the transformation.
147+
if (!isMultipleOfN(WideEVL, I->getDataLayout(), Factor))
148+
return false;
149+
150+
auto *FactorC = ConstantInt::get(WideEVL->getType(), Factor);
151+
VL = Builder.CreateZExt(Builder.CreateExactUDiv(WideEVL, FactorC), XLenTy);
152+
return true;
153+
}
154+
105155
/// Lower an interleaved load into a vlsegN intrinsic.
106156
///
107157
/// E.g. Lower an interleaved load (Factor = 2):
@@ -127,32 +177,8 @@ bool RISCVTargetLowering::lowerInterleavedLoad(
127177

128178
Value *Ptr, *VL;
129179
Align Alignment;
130-
if (auto *LI = dyn_cast<LoadInst>(Load)) {
131-
assert(LI->isSimple());
132-
Ptr = LI->getPointerOperand();
133-
Alignment = LI->getAlign();
134-
assert(!Mask && "Unexpected mask on a load\n");
135-
Mask = Builder.getAllOnesMask(VTy->getElementCount());
136-
VL = Builder.CreateElementCount(XLenTy, VTy->getElementCount());
137-
} else {
138-
auto *VPLoad = cast<VPIntrinsic>(Load);
139-
assert(VPLoad->getIntrinsicID() == Intrinsic::vp_load &&
140-
"Unexpected intrinsic");
141-
Ptr = VPLoad->getMemoryPointerParam();
142-
Alignment = VPLoad->getPointerAlignment().value_or(
143-
DL.getABITypeAlign(VTy->getElementType()));
144-
145-
assert(Mask && "vp.load needs a mask!");
146-
147-
Value *WideEVL = VPLoad->getVectorLengthParam();
148-
// Conservatively check if EVL is a multiple of factor, otherwise some
149-
// (trailing) elements might be lost after the transformation.
150-
if (!isMultipleOfN(WideEVL, DL, Factor))
151-
return false;
152-
153-
auto *FactorC = ConstantInt::get(WideEVL->getType(), Factor);
154-
VL = Builder.CreateZExt(Builder.CreateExactUDiv(WideEVL, FactorC), XLenTy);
155-
}
180+
if (!getMemOperands(Factor, VTy, XLenTy, Load, Ptr, Mask, VL, Alignment))
181+
return false;
156182

157183
Type *PtrTy = Ptr->getType();
158184
unsigned AS = PtrTy->getPointerAddressSpace();
@@ -296,34 +322,8 @@ bool RISCVTargetLowering::lowerDeinterleaveIntrinsicToLoad(
296322

297323
Value *Ptr, *VL;
298324
Align Alignment;
299-
if (auto *LI = dyn_cast<LoadInst>(Load)) {
300-
assert(LI->isSimple());
301-
Ptr = LI->getPointerOperand();
302-
Alignment = LI->getAlign();
303-
assert(!Mask && "Unexpected mask on a load\n");
304-
Mask = Builder.getAllOnesMask(ResVTy->getElementCount());
305-
VL = isa<FixedVectorType>(ResVTy)
306-
? Builder.CreateElementCount(XLenTy, ResVTy->getElementCount())
307-
: Constant::getAllOnesValue(XLenTy);
308-
} else {
309-
auto *VPLoad = cast<VPIntrinsic>(Load);
310-
assert(VPLoad->getIntrinsicID() == Intrinsic::vp_load &&
311-
"Unexpected intrinsic");
312-
Ptr = VPLoad->getMemoryPointerParam();
313-
Alignment = VPLoad->getPointerAlignment().value_or(
314-
DL.getABITypeAlign(ResVTy->getElementType()));
315-
316-
assert(Mask && "vp.load needs a mask!");
317-
318-
Value *WideEVL = VPLoad->getVectorLengthParam();
319-
// Conservatively check if EVL is a multiple of factor, otherwise some
320-
// (trailing) elements might be lost after the transformation.
321-
if (!isMultipleOfN(WideEVL, Load->getDataLayout(), Factor))
322-
return false;
323-
324-
auto *FactorC = ConstantInt::get(WideEVL->getType(), Factor);
325-
VL = Builder.CreateZExt(Builder.CreateExactUDiv(WideEVL, FactorC), XLenTy);
326-
}
325+
if (!getMemOperands(Factor, ResVTy, XLenTy, Load, Ptr, Mask, VL, Alignment))
326+
return false;
327327

328328
Type *PtrTy = Ptr->getType();
329329
unsigned AS = PtrTy->getPointerAddressSpace();
@@ -385,34 +385,8 @@ bool RISCVTargetLowering::lowerInterleaveIntrinsicToStore(
385385

386386
Value *Ptr, *VL;
387387
Align Alignment;
388-
if (auto *SI = dyn_cast<StoreInst>(Store)) {
389-
assert(SI->isSimple());
390-
Ptr = SI->getPointerOperand();
391-
Alignment = SI->getAlign();
392-
assert(!Mask && "Unexpected mask on a store");
393-
Mask = Builder.getAllOnesMask(InVTy->getElementCount());
394-
VL = isa<FixedVectorType>(InVTy)
395-
? Builder.CreateElementCount(XLenTy, InVTy->getElementCount())
396-
: Constant::getAllOnesValue(XLenTy);
397-
} else {
398-
auto *VPStore = cast<VPIntrinsic>(Store);
399-
assert(VPStore->getIntrinsicID() == Intrinsic::vp_store &&
400-
"Unexpected intrinsic");
401-
Ptr = VPStore->getMemoryPointerParam();
402-
Alignment = VPStore->getPointerAlignment().value_or(
403-
DL.getABITypeAlign(InVTy->getElementType()));
404-
405-
assert(Mask && "vp.store needs a mask!");
406-
407-
Value *WideEVL = VPStore->getVectorLengthParam();
408-
// Conservatively check if EVL is a multiple of factor, otherwise some
409-
// (trailing) elements might be lost after the transformation.
410-
if (!isMultipleOfN(WideEVL, DL, Factor))
411-
return false;
412-
413-
auto *FactorC = ConstantInt::get(WideEVL->getType(), Factor);
414-
VL = Builder.CreateZExt(Builder.CreateExactUDiv(WideEVL, FactorC), XLenTy);
415-
}
388+
if (!getMemOperands(Factor, InVTy, XLenTy, Store, Ptr, Mask, VL, Alignment))
389+
return false;
416390
Type *PtrTy = Ptr->getType();
417391
unsigned AS = Ptr->getType()->getPointerAddressSpace();
418392
if (!isLegalInterleavedAccessType(InVTy, Factor, Alignment, AS, DL))

0 commit comments

Comments
 (0)