@@ -433,9 +433,18 @@ ModRefInfo AAResults::getModRefInfo(const LoadInst *L,
433433 const MemoryLocation &Loc,
434434 AAQueryInfo &AAQI) {
435435 // Be conservative in the face of atomic.
436- if (isStrongerThan (L->getOrdering (), AtomicOrdering::Unordered ))
436+ if (isStrongerThan (L->getOrdering (), AtomicOrdering::Monotonic ))
437437 return ModRefInfo::ModRef;
438438
439+ // For Monotonic and unordered atomic loads, if the locations are not NoAlias,
440+ // we must be conservative and return ModRef to prevent unsafe reordering of
441+ // accesses to the same memory.
442+ if (L->isAtomic ()){
443+ if (Loc.Ptr &&
444+ alias (MemoryLocation::get (L), Loc, AAQI, L) != AliasResult::NoAlias)
445+ return ModRefInfo::ModRef;
446+ }
447+
439448 // If the load address doesn't alias the given address, it doesn't read
440449 // or write the specified memory.
441450 if (Loc.Ptr ) {
@@ -451,7 +460,7 @@ ModRefInfo AAResults::getModRefInfo(const StoreInst *S,
451460 const MemoryLocation &Loc,
452461 AAQueryInfo &AAQI) {
453462 // Be conservative in the face of atomic.
454- if (isStrongerThan (S->getOrdering (), AtomicOrdering::Unordered ))
463+ if (isStrongerThan (S->getOrdering (), AtomicOrdering::Monotonic ))
455464 return ModRefInfo::ModRef;
456465
457466 if (Loc.Ptr ) {
@@ -470,7 +479,7 @@ ModRefInfo AAResults::getModRefInfo(const StoreInst *S,
470479 }
471480
472481 // Otherwise, a store just writes.
473- return ModRefInfo::Mod ;
482+ return ModRefInfo::ModRef ;
474483}
475484
476485ModRefInfo AAResults::getModRefInfo (const FenceInst *S,
0 commit comments