@@ -351,7 +351,7 @@ bool X86FastISel::X86FastEmitLoad(EVT VT, X86AddressMode &AM,
351
351
case MVT::f32 :
352
352
if (X86ScalarSSEf32) {
353
353
Opc = HasAVX512 ? X86::VMOVSSZrm : HasAVX ? X86::VMOVSSrm : X86::MOVSSrm;
354
- RC = &X86::FR32RegClass;
354
+ RC = HasAVX512 ? &X86::FR32XRegClass : &X86::FR32RegClass;
355
355
} else {
356
356
Opc = X86::LD_Fp32m;
357
357
RC = &X86::RFP32RegClass;
@@ -360,7 +360,7 @@ bool X86FastISel::X86FastEmitLoad(EVT VT, X86AddressMode &AM,
360
360
case MVT::f64 :
361
361
if (X86ScalarSSEf64) {
362
362
Opc = HasAVX512 ? X86::VMOVSDZrm : HasAVX ? X86::VMOVSDrm : X86::MOVSDrm;
363
- RC = &X86::FR64RegClass;
363
+ RC = HasAVX512 ? &X86::FR64XRegClass : &X86::FR64RegClass;
364
364
} else {
365
365
Opc = X86::LD_Fp64m;
366
366
RC = &X86::RFP64RegClass;
@@ -379,7 +379,7 @@ bool X86FastISel::X86FastEmitLoad(EVT VT, X86AddressMode &AM,
379
379
else
380
380
Opc = HasVLX ? X86::VMOVUPSZ128rm :
381
381
HasAVX ? X86::VMOVUPSrm : X86::MOVUPSrm;
382
- RC = &X86::VR128RegClass;
382
+ RC = HasVLX ? &X86::VR128XRegClass : &X86::VR128RegClass;
383
383
break ;
384
384
case MVT::v2f64:
385
385
if (IsNonTemporal && Alignment >= 16 && HasSSE41)
@@ -391,7 +391,7 @@ bool X86FastISel::X86FastEmitLoad(EVT VT, X86AddressMode &AM,
391
391
else
392
392
Opc = HasVLX ? X86::VMOVUPDZ128rm :
393
393
HasAVX ? X86::VMOVUPDrm : X86::MOVUPDrm;
394
- RC = &X86::VR128RegClass;
394
+ RC = HasVLX ? &X86::VR128XRegClass : &X86::VR128RegClass;
395
395
break ;
396
396
case MVT::v4i32:
397
397
case MVT::v2i64:
@@ -406,7 +406,7 @@ bool X86FastISel::X86FastEmitLoad(EVT VT, X86AddressMode &AM,
406
406
else
407
407
Opc = HasVLX ? X86::VMOVDQU64Z128rm :
408
408
HasAVX ? X86::VMOVDQUrm : X86::MOVDQUrm;
409
- RC = &X86::VR128RegClass;
409
+ RC = HasVLX ? &X86::VR128XRegClass : &X86::VR128RegClass;
410
410
break ;
411
411
case MVT::v8f32:
412
412
assert (HasAVX);
@@ -418,7 +418,7 @@ bool X86FastISel::X86FastEmitLoad(EVT VT, X86AddressMode &AM,
418
418
Opc = HasVLX ? X86::VMOVAPSZ256rm : X86::VMOVAPSYrm;
419
419
else
420
420
Opc = HasVLX ? X86::VMOVUPSZ256rm : X86::VMOVUPSYrm;
421
- RC = &X86::VR256RegClass;
421
+ RC = HasVLX ? &X86::VR256XRegClass : &X86::VR256RegClass;
422
422
break ;
423
423
case MVT::v4f64:
424
424
assert (HasAVX);
@@ -430,7 +430,7 @@ bool X86FastISel::X86FastEmitLoad(EVT VT, X86AddressMode &AM,
430
430
Opc = HasVLX ? X86::VMOVAPDZ256rm : X86::VMOVAPDYrm;
431
431
else
432
432
Opc = HasVLX ? X86::VMOVUPDZ256rm : X86::VMOVUPDYrm;
433
- RC = &X86::VR256RegClass;
433
+ RC = HasVLX ? &X86::VR256XRegClass : &X86::VR256RegClass;
434
434
break ;
435
435
case MVT::v8i32:
436
436
case MVT::v4i64:
@@ -445,7 +445,7 @@ bool X86FastISel::X86FastEmitLoad(EVT VT, X86AddressMode &AM,
445
445
Opc = HasVLX ? X86::VMOVDQA64Z256rm : X86::VMOVDQAYrm;
446
446
else
447
447
Opc = HasVLX ? X86::VMOVDQU64Z256rm : X86::VMOVDQUYrm;
448
- RC = &X86::VR256RegClass;
448
+ RC = HasVLX ? &X86::VR256XRegClass : &X86::VR256RegClass;
449
449
break ;
450
450
case MVT::v16f32:
451
451
assert (HasAVX512);
@@ -3723,7 +3723,7 @@ unsigned X86FastISel::X86MaterializeFP(const ConstantFP *CFP, MVT VT) {
3723
3723
Opc = Subtarget->hasAVX512 ()
3724
3724
? X86::VMOVSSZrm
3725
3725
: Subtarget->hasAVX () ? X86::VMOVSSrm : X86::MOVSSrm;
3726
- RC = &X86::FR32RegClass;
3726
+ RC = Subtarget-> hasAVX512 () ? &X86::FR32XRegClass : &X86::FR32RegClass;
3727
3727
} else {
3728
3728
Opc = X86::LD_Fp32m;
3729
3729
RC = &X86::RFP32RegClass;
@@ -3734,7 +3734,7 @@ unsigned X86FastISel::X86MaterializeFP(const ConstantFP *CFP, MVT VT) {
3734
3734
Opc = Subtarget->hasAVX512 ()
3735
3735
? X86::VMOVSDZrm
3736
3736
: Subtarget->hasAVX () ? X86::VMOVSDrm : X86::MOVSDrm;
3737
- RC = &X86::FR64RegClass;
3737
+ RC = Subtarget-> hasAVX512 () ? &X86::FR64XRegClass : &X86::FR64RegClass;
3738
3738
} else {
3739
3739
Opc = X86::LD_Fp64m;
3740
3740
RC = &X86::RFP64RegClass;
0 commit comments