@@ -285,8 +285,15 @@ RISCVLegalizerInfo::RISCVLegalizerInfo(const RISCVSubtarget &ST)
285285 .clampScalar (0 , s32, (XLen == 64 || ST.hasStdExtD ()) ? s64 : s32)
286286 .clampScalar (1 , sXLen , sXLen );
287287
288- auto &LoadStoreActions =
289- getActionDefinitionsBuilder ({G_LOAD, G_STORE})
288+ auto &LoadActions = getActionDefinitionsBuilder (G_LOAD);
289+ auto &StoreActions = getActionDefinitionsBuilder (G_STORE);
290+
291+ LoadActions
292+ .legalForTypesWithMemDesc ({{s32, p0, s8, 8 },
293+ {s32, p0, s16, 16 },
294+ {s32, p0, s32, 32 },
295+ {p0, p0, sXLen , XLen}});
296+ StoreActions
290297 .legalForTypesWithMemDesc ({{s32, p0, s8, 8 },
291298 {s32, p0, s16, 16 },
292299 {s32, p0, s32, 32 },
@@ -295,58 +302,94 @@ RISCVLegalizerInfo::RISCVLegalizerInfo(const RISCVSubtarget &ST)
295302 getActionDefinitionsBuilder ({G_SEXTLOAD, G_ZEXTLOAD})
296303 .legalForTypesWithMemDesc ({{s32, p0, s8, 8 }, {s32, p0, s16, 16 }});
297304 if (XLen == 64 ) {
298- LoadStoreActions.legalForTypesWithMemDesc ({{s64, p0, s8, 8 },
299- {s64, p0, s16, 16 },
300- {s64, p0, s32, 32 },
301- {s64, p0, s64, 64 }});
305+ LoadActions.legalForTypesWithMemDesc ({{s64, p0, s8, 8 },
306+ {s64, p0, s16, 16 },
307+ {s64, p0, s32, 32 },
308+ {s64, p0, s64, 64 }});
309+ StoreActions.legalForTypesWithMemDesc ({{s64, p0, s8, 8 },
310+ {s64, p0, s16, 16 },
311+ {s64, p0, s32, 32 },
312+ {s64, p0, s64, 64 }});
302313 ExtLoadActions.legalForTypesWithMemDesc (
303314 {{s64, p0, s8, 8 }, {s64, p0, s16, 16 }, {s64, p0, s32, 32 }});
304315 } else if (ST.hasStdExtD ()) {
305- LoadStoreActions.legalForTypesWithMemDesc ({{s64, p0, s64, 64 }});
316+ LoadActions.legalForTypesWithMemDesc ({{s64, p0, s64, 64 }});
317+ StoreActions.legalForTypesWithMemDesc ({{s64, p0, s64, 64 }});
306318 }
307319
308320 // Vector loads/stores.
309321 if (ST.hasVInstructions ()) {
310- LoadStoreActions.legalForTypesWithMemDesc ({{nxv2s8, p0, nxv2s8, 8 },
311- {nxv4s8, p0, nxv4s8, 8 },
312- {nxv8s8, p0, nxv8s8, 8 },
313- {nxv16s8, p0, nxv16s8, 8 },
314- {nxv32s8, p0, nxv32s8, 8 },
315- {nxv64s8, p0, nxv64s8, 8 },
316- {nxv2s16, p0, nxv2s16, 16 },
317- {nxv4s16, p0, nxv4s16, 16 },
318- {nxv8s16, p0, nxv8s16, 16 },
319- {nxv16s16, p0, nxv16s16, 16 },
320- {nxv32s16, p0, nxv32s16, 16 },
321- {nxv2s32, p0, nxv2s32, 32 },
322- {nxv4s32, p0, nxv4s32, 32 },
323- {nxv8s32, p0, nxv8s32, 32 },
324- {nxv16s32, p0, nxv16s32, 32 }});
325-
326- if (ST.getELen () == 64 )
327- LoadStoreActions.legalForTypesWithMemDesc ({{nxv1s8, p0, nxv1s8, 8 },
328- {nxv1s16, p0, nxv1s16, 16 },
329- {nxv1s32, p0, nxv1s32, 32 }});
330-
331- if (ST.hasVInstructionsI64 ())
332- LoadStoreActions.legalForTypesWithMemDesc ({{nxv1s64, p0, nxv1s64, 64 },
333- {nxv2s64, p0, nxv2s64, 64 },
334- {nxv4s64, p0, nxv4s64, 64 },
335- {nxv8s64, p0, nxv8s64, 64 }});
322+ LoadActions.legalForTypesWithMemDesc ({{nxv2s8, p0, nxv2s8, 8 },
323+ {nxv4s8, p0, nxv4s8, 8 },
324+ {nxv8s8, p0, nxv8s8, 8 },
325+ {nxv16s8, p0, nxv16s8, 8 },
326+ {nxv32s8, p0, nxv32s8, 8 },
327+ {nxv64s8, p0, nxv64s8, 8 },
328+ {nxv2s16, p0, nxv2s16, 16 },
329+ {nxv4s16, p0, nxv4s16, 16 },
330+ {nxv8s16, p0, nxv8s16, 16 },
331+ {nxv16s16, p0, nxv16s16, 16 },
332+ {nxv32s16, p0, nxv32s16, 16 },
333+ {nxv2s32, p0, nxv2s32, 32 },
334+ {nxv4s32, p0, nxv4s32, 32 },
335+ {nxv8s32, p0, nxv8s32, 32 },
336+ {nxv16s32, p0, nxv16s32, 32 }});
337+ StoreActions.legalForTypesWithMemDesc ({{nxv2s8, p0, nxv2s8, 8 },
338+ {nxv4s8, p0, nxv4s8, 8 },
339+ {nxv8s8, p0, nxv8s8, 8 },
340+ {nxv16s8, p0, nxv16s8, 8 },
341+ {nxv32s8, p0, nxv32s8, 8 },
342+ {nxv64s8, p0, nxv64s8, 8 },
343+ {nxv2s16, p0, nxv2s16, 16 },
344+ {nxv4s16, p0, nxv4s16, 16 },
345+ {nxv8s16, p0, nxv8s16, 16 },
346+ {nxv16s16, p0, nxv16s16, 16 },
347+ {nxv32s16, p0, nxv32s16, 16 },
348+ {nxv2s32, p0, nxv2s32, 32 },
349+ {nxv4s32, p0, nxv4s32, 32 },
350+ {nxv8s32, p0, nxv8s32, 32 },
351+ {nxv16s32, p0, nxv16s32, 32 }});
352+
353+ if (ST.getELen () == 64 ) {
354+ LoadActions.legalForTypesWithMemDesc ({{nxv1s8, p0, nxv1s8, 8 },
355+ {nxv1s16, p0, nxv1s16, 16 },
356+ {nxv1s32, p0, nxv1s32, 32 }});
357+ StoreActions.legalForTypesWithMemDesc ({{nxv1s8, p0, nxv1s8, 8 },
358+ {nxv1s16, p0, nxv1s16, 16 },
359+ {nxv1s32, p0, nxv1s32, 32 }});
360+ }
361+
362+ if (ST.hasVInstructionsI64 ()) {
363+ LoadActions.legalForTypesWithMemDesc ({{nxv1s64, p0, nxv1s64, 64 },
364+ {nxv2s64, p0, nxv2s64, 64 },
365+ {nxv4s64, p0, nxv4s64, 64 },
366+ {nxv8s64, p0, nxv8s64, 64 }});
367+ StoreActions.legalForTypesWithMemDesc ({{nxv1s64, p0, nxv1s64, 64 },
368+ {nxv2s64, p0, nxv2s64, 64 },
369+ {nxv4s64, p0, nxv4s64, 64 },
370+ {nxv8s64, p0, nxv8s64, 64 }});
371+ }
336372
337373 // we will take the custom lowering logic if we have scalable vector types
338374 // with non-standard alignments
339- LoadStoreActions.customIf (typeIsLegalIntOrFPVec (0 , IntOrFPVecTys, ST));
375+ LoadActions.customIf (typeIsLegalIntOrFPVec (0 , IntOrFPVecTys, ST));
376+ StoreActions.customIf (typeIsLegalIntOrFPVec (0 , IntOrFPVecTys, ST));
340377
341378 // Pointers require that XLen sized elements are legal.
342- if (XLen <= ST.getELen ())
343- LoadStoreActions.customIf (typeIsLegalPtrVec (0 , PtrVecTys, ST));
379+ if (XLen <= ST.getELen ()) {
380+ LoadActions.customIf (typeIsLegalPtrVec (0 , PtrVecTys, ST));
381+ StoreActions.customIf (typeIsLegalPtrVec (0 , PtrVecTys, ST));
382+ }
344383 }
345384
346- LoadStoreActions .widenScalarToNextPow2 (0 , /* MinSize = */ 8 )
385+ LoadActions .widenScalarToNextPow2 (0 , /* MinSize = */ 8 )
347386 .lowerIfMemSizeNotByteSizePow2 ()
348387 .clampScalar (0 , s32, sXLen )
349388 .lower ();
389+ StoreActions
390+ .clampScalar (0 , s32, sXLen )
391+ .lowerIfMemSizeNotByteSizePow2 ()
392+ .lower ();
350393
351394 ExtLoadActions.widenScalarToNextPow2 (0 ).clampScalar (0 , s32, sXLen ).lower ();
352395
0 commit comments