Skip to content

Commit 1c1df58

Browse files
authored
Merge branch 'main' into p/libc-hdrgen-auto-types
2 parents 5aae702 + 0301bf9 commit 1c1df58

38 files changed

+855
-279
lines changed

llvm/include/llvm/SandboxIR/Context.h

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ class BBIterator;
2626
class Constant;
2727
class Module;
2828
class Value;
29+
class Use;
2930

3031
class Context {
3132
public:
@@ -37,6 +38,8 @@ class Context {
3738
// destination BB and an iterator pointing to the insertion position.
3839
using MoveInstrCallback =
3940
std::function<void(Instruction *, const BBIterator &)>;
41+
// A SetUseCallback receives the Use that is about to get its source set.
42+
using SetUseCallback = std::function<void(const Use &, Value *)>;
4043

4144
/// An ID for a registered callback. Used for deregistration. A dedicated type
4245
/// is employed so as to keep IDs opaque to the end user; only Context should
@@ -98,6 +101,9 @@ class Context {
98101
/// Callbacks called when an IR instruction is about to get moved. Keys are
99102
/// used as IDs for deregistration.
100103
MapVector<CallbackID, MoveInstrCallback> MoveInstrCallbacks;
104+
/// Callbacks called when a Use gets its source set. Keys are used as IDs for
105+
/// deregistration.
106+
MapVector<CallbackID, SetUseCallback> SetUseCallbacks;
101107

102108
/// A counter used for assigning callback IDs during registration. The same
103109
/// counter is used for all kinds of callbacks so we can detect mismatched
@@ -129,6 +135,10 @@ class Context {
129135
void runEraseInstrCallbacks(Instruction *I);
130136
void runCreateInstrCallbacks(Instruction *I);
131137
void runMoveInstrCallbacks(Instruction *I, const BBIterator &Where);
138+
void runSetUseCallbacks(const Use &U, Value *NewSrc);
139+
140+
friend class User; // For runSetUseCallbacks().
141+
friend class Value; // For runSetUseCallbacks().
132142

133143
// Friends for getOrCreateConstant().
134144
#define DEF_CONST(ID, CLASS) friend class CLASS;
@@ -281,7 +291,10 @@ class Context {
281291
CallbackID registerMoveInstrCallback(MoveInstrCallback CB);
282292
void unregisterMoveInstrCallback(CallbackID ID);
283293

284-
// TODO: Add callbacks for instructions inserted/removed if needed.
294+
/// Register a callback that gets called when a Use gets set.
295+
/// \Returns a callback ID for later deregistration.
296+
CallbackID registerSetUseCallback(SetUseCallback CB);
297+
void unregisterSetUseCallback(CallbackID ID);
285298
};
286299

287300
} // namespace sandboxir

llvm/include/llvm/Transforms/Vectorize/SandboxVectorizer/DependencyGraph.h

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -313,6 +313,7 @@ class DependencyGraph {
313313
std::optional<Context::CallbackID> CreateInstrCB;
314314
std::optional<Context::CallbackID> EraseInstrCB;
315315
std::optional<Context::CallbackID> MoveInstrCB;
316+
std::optional<Context::CallbackID> SetUseCB;
316317

317318
std::unique_ptr<BatchAAResults> BatchAA;
318319

@@ -368,6 +369,8 @@ class DependencyGraph {
368369
/// Called by the callbacks when instruction \p I is about to be moved to
369370
/// \p To.
370371
void notifyMoveInstr(Instruction *I, const BBIterator &To);
372+
/// Called by the callbacks when \p U's source is about to be set to \p NewSrc
373+
void notifySetUse(const Use &U, Value *NewSrc);
371374

372375
public:
373376
/// This constructor also registers callbacks.
@@ -381,6 +384,8 @@ class DependencyGraph {
381384
[this](Instruction *I, const BBIterator &To) {
382385
notifyMoveInstr(I, To);
383386
});
387+
SetUseCB = Ctx.registerSetUseCallback(
388+
[this](const Use &U, Value *NewSrc) { notifySetUse(U, NewSrc); });
384389
}
385390
~DependencyGraph() {
386391
if (CreateInstrCB)
@@ -389,6 +394,8 @@ class DependencyGraph {
389394
Ctx->unregisterEraseInstrCallback(*EraseInstrCB);
390395
if (MoveInstrCB)
391396
Ctx->unregisterMoveInstrCallback(*MoveInstrCB);
397+
if (SetUseCB)
398+
Ctx->unregisterSetUseCallback(*SetUseCB);
392399
}
393400

394401
DGNode *getNode(Instruction *I) const {

llvm/lib/Analysis/LoopAccessAnalysis.cpp

Lines changed: 50 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -855,16 +855,61 @@ getStrideFromAddRec(const SCEVAddRecExpr *AR, const Loop *Lp, Type *AccessTy,
855855
return Stride;
856856
}
857857

858+
static bool isNoWrapAddRec(Value *Ptr, const SCEVAddRecExpr *AR,
859+
PredicatedScalarEvolution &PSE, const Loop *L);
860+
858861
/// Check whether a pointer address cannot wrap.
859862
static bool isNoWrap(PredicatedScalarEvolution &PSE,
860863
const DenseMap<Value *, const SCEV *> &Strides, Value *Ptr,
861-
Type *AccessTy, Loop *L, bool Assume) {
862-
const SCEV *PtrScev = PSE.getSCEV(Ptr);
864+
Type *AccessTy, const Loop *L, bool Assume,
865+
std::optional<int64_t> Stride = std::nullopt) {
866+
const SCEV *PtrScev = replaceSymbolicStrideSCEV(PSE, Strides, Ptr);
863867
if (PSE.getSE()->isLoopInvariant(PtrScev, L))
864868
return true;
865869

866-
return getPtrStride(PSE, AccessTy, Ptr, L, Strides, Assume).has_value() ||
867-
PSE.hasNoOverflow(Ptr, SCEVWrapPredicate::IncrementNUSW);
870+
const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(PtrScev);
871+
if (!AR) {
872+
if (!Assume)
873+
return false;
874+
AR = PSE.getAsAddRec(Ptr);
875+
}
876+
877+
// The address calculation must not wrap. Otherwise, a dependence could be
878+
// inverted.
879+
if (isNoWrapAddRec(Ptr, AR, PSE, L))
880+
return true;
881+
882+
// An nusw getelementptr that is an AddRec cannot wrap. If it would wrap,
883+
// the distance between the previously accessed location and the wrapped
884+
// location will be larger than half the pointer index type space. In that
885+
// case, the GEP would be poison and any memory access dependent on it would
886+
// be immediate UB when executed.
887+
if (auto *GEP = dyn_cast<GetElementPtrInst>(Ptr);
888+
GEP && GEP->hasNoUnsignedSignedWrap())
889+
return true;
890+
891+
if (!Stride)
892+
Stride = getStrideFromAddRec(AR, L, AccessTy, Ptr, PSE);
893+
if (Stride) {
894+
// If the null pointer is undefined, then a access sequence which would
895+
// otherwise access it can be assumed not to unsigned wrap. Note that this
896+
// assumes the object in memory is aligned to the natural alignment.
897+
unsigned AddrSpace = AR->getType()->getPointerAddressSpace();
898+
if (!NullPointerIsDefined(L->getHeader()->getParent(), AddrSpace) &&
899+
(Stride == 1 || Stride == -1))
900+
return true;
901+
}
902+
903+
if (Assume) {
904+
PSE.setNoOverflow(Ptr, SCEVWrapPredicate::IncrementNUSW);
905+
LLVM_DEBUG(dbgs() << "LAA: Pointer may wrap:\n"
906+
<< "LAA: Pointer: " << *Ptr << "\n"
907+
<< "LAA: SCEV: " << *AR << "\n"
908+
<< "LAA: Added an overflow assumption\n");
909+
return true;
910+
}
911+
912+
return PSE.hasNoOverflow(Ptr, SCEVWrapPredicate::IncrementNUSW);
868913
}
869914

870915
static void visitPointers(Value *StartPtr, const Loop &InnermostLoop,
@@ -1505,36 +1550,9 @@ llvm::getPtrStride(PredicatedScalarEvolution &PSE, Type *AccessTy, Value *Ptr,
15051550
if (!ShouldCheckWrap || !Stride)
15061551
return Stride;
15071552

1508-
// The address calculation must not wrap. Otherwise, a dependence could be
1509-
// inverted.
1510-
if (isNoWrapAddRec(Ptr, AR, PSE, Lp))
1511-
return Stride;
1512-
1513-
// An nusw getelementptr that is an AddRec cannot wrap. If it would wrap,
1514-
// the distance between the previously accessed location and the wrapped
1515-
// location will be larger than half the pointer index type space. In that
1516-
// case, the GEP would be poison and any memory access dependent on it would
1517-
// be immediate UB when executed.
1518-
if (auto *GEP = dyn_cast<GetElementPtrInst>(Ptr);
1519-
GEP && GEP->hasNoUnsignedSignedWrap())
1520-
return Stride;
1521-
1522-
// If the null pointer is undefined, then a access sequence which would
1523-
// otherwise access it can be assumed not to unsigned wrap. Note that this
1524-
// assumes the object in memory is aligned to the natural alignment.
1525-
unsigned AddrSpace = Ty->getPointerAddressSpace();
1526-
if (!NullPointerIsDefined(Lp->getHeader()->getParent(), AddrSpace) &&
1527-
(Stride == 1 || Stride == -1))
1553+
if (isNoWrap(PSE, StridesMap, Ptr, AccessTy, Lp, Assume, Stride))
15281554
return Stride;
15291555

1530-
if (Assume) {
1531-
PSE.setNoOverflow(Ptr, SCEVWrapPredicate::IncrementNUSW);
1532-
LLVM_DEBUG(dbgs() << "LAA: Pointer may wrap:\n"
1533-
<< "LAA: Pointer: " << *Ptr << "\n"
1534-
<< "LAA: SCEV: " << *AR << "\n"
1535-
<< "LAA: Added an overflow assumption\n");
1536-
return Stride;
1537-
}
15381556
LLVM_DEBUG(
15391557
dbgs() << "LAA: Bad stride - Pointer may wrap in the address space "
15401558
<< *Ptr << " SCEV: " << *AR << "\n");

llvm/lib/SandboxIR/Context.cpp

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -687,6 +687,11 @@ void Context::runMoveInstrCallbacks(Instruction *I, const BBIterator &WhereIt) {
687687
CBEntry.second(I, WhereIt);
688688
}
689689

690+
void Context::runSetUseCallbacks(const Use &U, Value *NewSrc) {
691+
for (auto &CBEntry : SetUseCallbacks)
692+
CBEntry.second(U, NewSrc);
693+
}
694+
690695
// An arbitrary limit, to check for accidental misuse. We expect a small number
691696
// of callbacks to be registered at a time, but we can increase this number if
692697
// we discover we needed more.
@@ -732,4 +737,17 @@ void Context::unregisterMoveInstrCallback(CallbackID ID) {
732737
"Callback ID not found in MoveInstrCallbacks during deregistration");
733738
}
734739

740+
Context::CallbackID Context::registerSetUseCallback(SetUseCallback CB) {
741+
assert(SetUseCallbacks.size() <= MaxRegisteredCallbacks &&
742+
"SetUseCallbacks size limit exceeded");
743+
CallbackID ID{NextCallbackID++};
744+
SetUseCallbacks[ID] = CB;
745+
return ID;
746+
}
747+
void Context::unregisterSetUseCallback(CallbackID ID) {
748+
[[maybe_unused]] bool Erased = SetUseCallbacks.erase(ID);
749+
assert(Erased &&
750+
"Callback ID not found in SetUseCallbacks during deregistration");
751+
}
752+
735753
} // namespace llvm::sandboxir

llvm/lib/SandboxIR/User.cpp

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -90,17 +90,20 @@ bool User::classof(const Value *From) {
9090

9191
void User::setOperand(unsigned OperandIdx, Value *Operand) {
9292
assert(isa<llvm::User>(Val) && "No operands!");
93-
Ctx.getTracker().emplaceIfTracking<UseSet>(getOperandUse(OperandIdx));
93+
const auto &U = getOperandUse(OperandIdx);
94+
Ctx.getTracker().emplaceIfTracking<UseSet>(U);
95+
Ctx.runSetUseCallbacks(U, Operand);
9496
// We are delegating to llvm::User::setOperand().
9597
cast<llvm::User>(Val)->setOperand(OperandIdx, Operand->Val);
9698
}
9799

98100
bool User::replaceUsesOfWith(Value *FromV, Value *ToV) {
99101
auto &Tracker = Ctx.getTracker();
100-
if (Tracker.isTracking()) {
101-
for (auto OpIdx : seq<unsigned>(0, getNumOperands())) {
102-
auto Use = getOperandUse(OpIdx);
103-
if (Use.get() == FromV)
102+
for (auto OpIdx : seq<unsigned>(0, getNumOperands())) {
103+
auto Use = getOperandUse(OpIdx);
104+
if (Use.get() == FromV) {
105+
Ctx.runSetUseCallbacks(Use, ToV);
106+
if (Tracker.isTracking())
104107
Tracker.emplaceIfTracking<UseSet>(Use);
105108
}
106109
}

llvm/lib/SandboxIR/Value.cpp

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -51,14 +51,15 @@ void Value::replaceUsesWithIf(
5151
llvm::Value *OtherVal = OtherV->Val;
5252
// We are delegating RUWIf to LLVM IR's RUWIf.
5353
Val->replaceUsesWithIf(
54-
OtherVal, [&ShouldReplace, this](llvm::Use &LLVMUse) -> bool {
54+
OtherVal, [&ShouldReplace, this, OtherV](llvm::Use &LLVMUse) -> bool {
5555
User *DstU = cast_or_null<User>(Ctx.getValue(LLVMUse.getUser()));
5656
if (DstU == nullptr)
5757
return false;
5858
Use UseToReplace(&LLVMUse, DstU, Ctx);
5959
if (!ShouldReplace(UseToReplace))
6060
return false;
6161
Ctx.getTracker().emplaceIfTracking<UseSet>(UseToReplace);
62+
Ctx.runSetUseCallbacks(UseToReplace, OtherV);
6263
return true;
6364
});
6465
}
@@ -67,8 +68,9 @@ void Value::replaceAllUsesWith(Value *Other) {
6768
assert(getType() == Other->getType() &&
6869
"Replacing with Value of different type!");
6970
auto &Tracker = Ctx.getTracker();
70-
if (Tracker.isTracking()) {
71-
for (auto Use : uses())
71+
for (auto Use : uses()) {
72+
Ctx.runSetUseCallbacks(Use, Other);
73+
if (Tracker.isTracking())
7274
Tracker.track(std::make_unique<UseSet>(Use));
7375
}
7476
// We are delegating RAUW to LLVM IR's RAUW.

0 commit comments

Comments
 (0)