@@ -236,7 +236,6 @@ void FastISel::flushLocalValueMap() {
236
236
LastLocalValue = EmitStartPt;
237
237
recomputeInsertPt ();
238
238
SavedInsertPt = FuncInfo.InsertPt ;
239
- LastFlushPoint = FuncInfo.InsertPt ;
240
239
}
241
240
242
241
bool FastISel::hasTrivialKill (const Value *V) {
@@ -458,8 +457,6 @@ void FastISel::removeDeadCode(MachineBasicBlock::iterator I,
458
457
assert (I.isValid () && E.isValid () && std::distance (I, E) > 0 &&
459
458
" Invalid iterator!" );
460
459
while (I != E) {
461
- if (LastFlushPoint == I)
462
- LastFlushPoint = E;
463
460
if (SavedInsertPt == I)
464
461
SavedInsertPt = E;
465
462
if (EmitStartPt == I)
@@ -1210,11 +1207,6 @@ bool FastISel::selectCall(const User *I) {
1210
1207
1211
1208
// Handle simple inline asms.
1212
1209
if (const InlineAsm *IA = dyn_cast<InlineAsm>(Call->getCalledOperand ())) {
1213
- // If the inline asm has side effects, then make sure that no local value
1214
- // lives across by flushing the local value map.
1215
- if (IA->hasSideEffects ())
1216
- flushLocalValueMap ();
1217
-
1218
1210
// Don't attempt to handle constraints.
1219
1211
if (!IA->getConstraintString ().empty ())
1220
1212
return false ;
@@ -1244,15 +1236,6 @@ bool FastISel::selectCall(const User *I) {
1244
1236
if (const auto *II = dyn_cast<IntrinsicInst>(Call))
1245
1237
return selectIntrinsicCall (II);
1246
1238
1247
- // Usually, it does not make sense to initialize a value,
1248
- // make an unrelated function call and use the value, because
1249
- // it tends to be spilled on the stack. So, we move the pointer
1250
- // to the last local value to the beginning of the block, so that
1251
- // all the values which have already been materialized,
1252
- // appear after the call. It also makes sense to skip intrinsics
1253
- // since they tend to be inlined.
1254
- flushLocalValueMap ();
1255
-
1256
1239
return lowerCall (Call);
1257
1240
}
1258
1241
@@ -1409,20 +1392,6 @@ bool FastISel::selectIntrinsicCall(const IntrinsicInst *II) {
1409
1392
return selectXRayCustomEvent (II);
1410
1393
case Intrinsic::xray_typedevent:
1411
1394
return selectXRayTypedEvent (II);
1412
-
1413
- case Intrinsic::memcpy:
1414
- case Intrinsic::memcpy_element_unordered_atomic:
1415
- case Intrinsic::memcpy_inline:
1416
- case Intrinsic::memmove:
1417
- case Intrinsic::memmove_element_unordered_atomic:
1418
- case Intrinsic::memset:
1419
- case Intrinsic::memset_element_unordered_atomic:
1420
- // Flush the local value map just like we do for regular calls,
1421
- // to avoid excessive spills and reloads.
1422
- // These intrinsics mostly turn into library calls at O0; and
1423
- // even memcpy_inline should be treated like one for this purpose.
1424
- flushLocalValueMap ();
1425
- break ;
1426
1395
}
1427
1396
1428
1397
return fastLowerIntrinsicCall (II);
0 commit comments