@@ -421,9 +421,18 @@ ModRefInfo AAResults::getModRefInfo(const LoadInst *L,
421421 const MemoryLocation &Loc,
422422 AAQueryInfo &AAQI) {
423423 // Be conservative in the face of atomic.
424- if (isStrongerThan (L->getOrdering (), AtomicOrdering::Unordered ))
424+ if (isStrongerThan (L->getOrdering (), AtomicOrdering::Monotonic ))
425425 return ModRefInfo::ModRef;
426426
427+ // For Monotonic and unordered atomic loads, if the locations are not NoAlias,
428+ // we must be conservative and return ModRef to prevent unsafe reordering of
429+ // accesses to the same memory.
430+ if (L->isAtomic ()){
431+ if (Loc.Ptr &&
432+ alias (MemoryLocation::get (L), Loc, AAQI, L) != AliasResult::NoAlias)
433+ return ModRefInfo::ModRef;
434+ }
435+
427436 // If the load address doesn't alias the given address, it doesn't read
428437 // or write the specified memory.
429438 if (Loc.Ptr ) {
@@ -439,7 +448,7 @@ ModRefInfo AAResults::getModRefInfo(const StoreInst *S,
439448 const MemoryLocation &Loc,
440449 AAQueryInfo &AAQI) {
441450 // Be conservative in the face of atomic.
442- if (isStrongerThan (S->getOrdering (), AtomicOrdering::Unordered ))
451+ if (isStrongerThan (S->getOrdering (), AtomicOrdering::Monotonic ))
443452 return ModRefInfo::ModRef;
444453
445454 if (Loc.Ptr ) {
@@ -458,7 +467,7 @@ ModRefInfo AAResults::getModRefInfo(const StoreInst *S,
458467 }
459468
460469 // Otherwise, a store just writes.
461- return ModRefInfo::Mod ;
470+ return ModRefInfo::ModRef ;
462471}
463472
464473ModRefInfo AAResults::getModRefInfo (const FenceInst *S,
0 commit comments