@@ -131,10 +131,14 @@ static bool getMemOperands(unsigned Factor, VectorType *VTy, Type *XLenTy,
131
131
: Constant::getAllOnesValue (XLenTy);
132
132
return true ;
133
133
}
134
- if (auto *VPLdSt = dyn_cast<VPIntrinsic>(I)) {
135
- assert ((VPLdSt->getIntrinsicID () == Intrinsic::vp_load ||
136
- VPLdSt->getIntrinsicID () == Intrinsic::vp_store) &&
137
- " Unexpected intrinsic" );
134
+
135
+ auto *II = cast<IntrinsicInst>(I);
136
+ switch (II->getIntrinsicID ()) {
137
+ default :
138
+ llvm_unreachable (" Unsupported intrinsic type" );
139
+ case Intrinsic::vp_load:
140
+ case Intrinsic::vp_store: {
141
+ auto *VPLdSt = cast<VPIntrinsic>(I);
138
142
Ptr = VPLdSt->getMemoryPointerParam ();
139
143
Alignment = VPLdSt->getPointerAlignment ().value_or (
140
144
DL.getABITypeAlign (VTy->getElementType ()));
@@ -151,21 +155,21 @@ static bool getMemOperands(unsigned Factor, VectorType *VTy, Type *XLenTy,
151
155
VL = Builder.CreateZExt (Builder.CreateExactUDiv (WideEVL, FactorC), XLenTy);
152
156
return true ;
153
157
}
154
- auto *II = cast<IntrinsicInst>(I);
155
- assert (II->getIntrinsicID () == Intrinsic::masked_load &&
156
- " Unexpected intrinsic" );
157
- Ptr = II->getOperand (0 );
158
- Alignment = cast<ConstantInt>(II->getArgOperand (1 ))->getAlignValue ();
158
+ case Intrinsic::masked_load: {
159
+ Ptr = II->getOperand (0 );
160
+ Alignment = cast<ConstantInt>(II->getArgOperand (1 ))->getAlignValue ();
159
161
160
- if (!isa<UndefValue>(II->getOperand (3 )))
161
- return false ;
162
+ if (!isa<UndefValue>(II->getOperand (3 )))
163
+ return false ;
162
164
163
- assert (Mask && " masked.load needs a mask!" );
165
+ assert (Mask && " masked.load needs a mask!" );
164
166
165
- VL = isa<FixedVectorType>(VTy)
166
- ? Builder.CreateElementCount (XLenTy, VTy->getElementCount ())
167
- : Constant::getAllOnesValue (XLenTy);
168
- return true ;
167
+ VL = isa<FixedVectorType>(VTy)
168
+ ? Builder.CreateElementCount (XLenTy, VTy->getElementCount ())
169
+ : Constant::getAllOnesValue (XLenTy);
170
+ return true ;
171
+ }
172
+ }
169
173
}
170
174
171
175
// / Lower an interleaved load into a vlsegN intrinsic.
0 commit comments