Skip to content

Commit 76fa821

Browse files
andykaylorLukacma
authored andcommitted
[CIR] Handle return with cleanups (llvm#163849)
This adds support for branching through a cleanup block when a return statement is encountered while we're in a scope with cleanups.
1 parent 51022bd commit 76fa821

File tree

10 files changed

+522
-81
lines changed

10 files changed

+522
-81
lines changed

clang/include/clang/CIR/MissingFeatures.h

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -219,6 +219,9 @@ struct MissingFeatures {
219219
static bool checkBitfieldClipping() { return false; }
220220
static bool cirgenABIInfo() { return false; }
221221
static bool cleanupAfterErrorDiags() { return false; }
222+
static bool cleanupAppendInsts() { return false; }
223+
static bool cleanupBranchThrough() { return false; }
224+
static bool cleanupIndexAndBIAdjustment() { return false; }
222225
static bool cleanupsToDeactivate() { return false; }
223226
static bool constEmitterAggILE() { return false; }
224227
static bool constEmitterArrayILE() { return false; }
@@ -239,6 +242,7 @@ struct MissingFeatures {
239242
static bool deleteArray() { return false; }
240243
static bool devirtualizeMemberFunction() { return false; }
241244
static bool ehCleanupFlags() { return false; }
245+
static bool ehCleanupHasPrebranchedFallthrough() { return false; }
242246
static bool ehCleanupScope() { return false; }
243247
static bool ehCleanupScopeRequiresEHCleanup() { return false; }
244248
static bool ehCleanupBranchFixups() { return false; }
@@ -296,6 +300,7 @@ struct MissingFeatures {
296300
static bool setNonGC() { return false; }
297301
static bool setObjCGCLValueClass() { return false; }
298302
static bool setTargetAttributes() { return false; }
303+
static bool simplifyCleanupEntry() { return false; }
299304
static bool sourceLanguageCases() { return false; }
300305
static bool stackBase() { return false; }
301306
static bool stackSaveOp() { return false; }

clang/lib/CIR/CodeGen/CIRGenCleanup.cpp

Lines changed: 213 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,46 @@ using namespace clang::CIRGen;
2828
// CIRGenFunction cleanup related
2929
//===----------------------------------------------------------------------===//
3030

31+
/// Build a unconditional branch to the lexical scope cleanup block
32+
/// or with the labeled blocked if already solved.
33+
///
34+
/// Track on scope basis, goto's we need to fix later.
35+
cir::BrOp CIRGenFunction::emitBranchThroughCleanup(mlir::Location loc,
36+
JumpDest dest) {
37+
// Insert a branch: to the cleanup block (unsolved) or to the already
38+
// materialized label. Keep track of unsolved goto's.
39+
assert(dest.getBlock() && "assumes incoming valid dest");
40+
auto brOp = cir::BrOp::create(builder, loc, dest.getBlock());
41+
42+
// Calculate the innermost active normal cleanup.
43+
EHScopeStack::stable_iterator topCleanup =
44+
ehStack.getInnermostActiveNormalCleanup();
45+
46+
// If we're not in an active normal cleanup scope, or if the
47+
// destination scope is within the innermost active normal cleanup
48+
// scope, we don't need to worry about fixups.
49+
if (topCleanup == ehStack.stable_end() ||
50+
topCleanup.encloses(dest.getScopeDepth())) { // works for invalid
51+
// FIXME(cir): should we clear insertion point here?
52+
return brOp;
53+
}
54+
55+
// If we can't resolve the destination cleanup scope, just add this
56+
// to the current cleanup scope as a branch fixup.
57+
if (!dest.getScopeDepth().isValid()) {
58+
BranchFixup &fixup = ehStack.addBranchFixup();
59+
fixup.destination = dest.getBlock();
60+
fixup.destinationIndex = dest.getDestIndex();
61+
fixup.initialBranch = brOp;
62+
fixup.optimisticBranchBlock = nullptr;
63+
// FIXME(cir): should we clear insertion point here?
64+
return brOp;
65+
}
66+
67+
cgm.errorNYI(loc, "emitBranchThroughCleanup: valid destination scope depth");
68+
return brOp;
69+
}
70+
3171
/// Emits all the code to cause the given temporary to be cleaned up.
3272
void CIRGenFunction::emitCXXTemporary(const CXXTemporary *temporary,
3373
QualType tempType, Address ptr) {
@@ -40,6 +80,19 @@ void CIRGenFunction::emitCXXTemporary(const CXXTemporary *temporary,
4080

4181
void EHScopeStack::Cleanup::anchor() {}
4282

83+
EHScopeStack::stable_iterator
84+
EHScopeStack::getInnermostActiveNormalCleanup() const {
85+
stable_iterator si = getInnermostNormalCleanup();
86+
stable_iterator se = stable_end();
87+
while (si != se) {
88+
EHCleanupScope &cleanup = llvm::cast<EHCleanupScope>(*find(si));
89+
if (cleanup.isActive())
90+
return si;
91+
si = cleanup.getEnclosingNormalCleanup();
92+
}
93+
return stable_end();
94+
}
95+
4396
/// Push an entry of the given size onto this protected-scope stack.
4497
char *EHScopeStack::allocate(size_t size) {
4598
size = llvm::alignTo(size, ScopeStackAlignment);
@@ -75,14 +128,30 @@ void EHScopeStack::deallocate(size_t size) {
75128
startOfData += llvm::alignTo(size, ScopeStackAlignment);
76129
}
77130

131+
/// Remove any 'null' fixups on the stack. However, we can't pop more
132+
/// fixups than the fixup depth on the innermost normal cleanup, or
133+
/// else fixups that we try to add to that cleanup will end up in the
134+
/// wrong place. We *could* try to shrink fixup depths, but that's
135+
/// actually a lot of work for little benefit.
136+
void EHScopeStack::popNullFixups() {
137+
// We expect this to only be called when there's still an innermost
138+
// normal cleanup; otherwise there really shouldn't be any fixups.
139+
cgf->cgm.errorNYI("popNullFixups");
140+
}
141+
78142
void *EHScopeStack::pushCleanup(CleanupKind kind, size_t size) {
79143
char *buffer = allocate(EHCleanupScope::getSizeForCleanupSize(size));
144+
bool isNormalCleanup = kind & NormalCleanup;
80145
bool isEHCleanup = kind & EHCleanup;
81146
bool isLifetimeMarker = kind & LifetimeMarker;
82147

83148
assert(!cir::MissingFeatures::innermostEHScope());
84149

85-
EHCleanupScope *scope = new (buffer) EHCleanupScope(size);
150+
EHCleanupScope *scope = new (buffer)
151+
EHCleanupScope(size, branchFixups.size(), innermostNormalCleanup);
152+
153+
if (isNormalCleanup)
154+
innermostNormalCleanup = stable_begin();
86155

87156
if (isLifetimeMarker)
88157
cgf->cgm.errorNYI("push lifetime marker cleanup");
@@ -100,12 +169,23 @@ void EHScopeStack::popCleanup() {
100169

101170
assert(isa<EHCleanupScope>(*begin()));
102171
EHCleanupScope &cleanup = cast<EHCleanupScope>(*begin());
172+
innermostNormalCleanup = cleanup.getEnclosingNormalCleanup();
103173
deallocate(cleanup.getAllocatedSize());
104174

105175
// Destroy the cleanup.
106176
cleanup.destroy();
107177

108-
assert(!cir::MissingFeatures::ehCleanupBranchFixups());
178+
// Check whether we can shrink the branch-fixups stack.
179+
if (!branchFixups.empty()) {
180+
// If we no longer have any normal cleanups, all the fixups are
181+
// complete.
182+
if (!hasNormalCleanups()) {
183+
branchFixups.clear();
184+
} else {
185+
// Otherwise we can still trim out unnecessary nulls.
186+
popNullFixups();
187+
}
188+
}
109189
}
110190

111191
EHCatchScope *EHScopeStack::pushCatch(unsigned numHandlers) {
@@ -123,24 +203,40 @@ static void emitCleanup(CIRGenFunction &cgf, EHScopeStack::Cleanup *cleanup) {
123203
assert(cgf.haveInsertPoint() && "cleanup ended with no insertion point?");
124204
}
125205

206+
static mlir::Block *createNormalEntry(CIRGenFunction &cgf,
207+
EHCleanupScope &scope) {
208+
assert(scope.isNormalCleanup());
209+
mlir::Block *entry = scope.getNormalBlock();
210+
if (!entry) {
211+
mlir::OpBuilder::InsertionGuard guard(cgf.getBuilder());
212+
entry = cgf.curLexScope->getOrCreateCleanupBlock(cgf.getBuilder());
213+
scope.setNormalBlock(entry);
214+
}
215+
return entry;
216+
}
217+
126218
/// Pops a cleanup block. If the block includes a normal cleanup, the
127219
/// current insertion point is threaded through the cleanup, as are
128220
/// any branch fixups on the cleanup.
129221
void CIRGenFunction::popCleanupBlock() {
130222
assert(!ehStack.empty() && "cleanup stack is empty!");
131223
assert(isa<EHCleanupScope>(*ehStack.begin()) && "top not a cleanup!");
132224
EHCleanupScope &scope = cast<EHCleanupScope>(*ehStack.begin());
225+
assert(scope.getFixupDepth() <= ehStack.getNumBranchFixups());
133226

134227
// Remember activation information.
135228
bool isActive = scope.isActive();
136229

137-
assert(!cir::MissingFeatures::ehCleanupBranchFixups());
230+
// - whether there are branch fix-ups through this cleanup
231+
unsigned fixupDepth = scope.getFixupDepth();
232+
bool hasFixups = ehStack.getNumBranchFixups() != fixupDepth;
138233

139234
// - whether there's a fallthrough
140235
mlir::Block *fallthroughSource = builder.getInsertionBlock();
141236
bool hasFallthrough = fallthroughSource != nullptr && isActive;
142237

143-
bool requiresNormalCleanup = scope.isNormalCleanup() && hasFallthrough;
238+
bool requiresNormalCleanup =
239+
scope.isNormalCleanup() && (hasFixups || hasFallthrough);
144240

145241
// If we don't need the cleanup at all, we're done.
146242
assert(!cir::MissingFeatures::ehCleanupScopeRequiresEHCleanup());
@@ -175,9 +271,119 @@ void CIRGenFunction::popCleanupBlock() {
175271

176272
assert(!cir::MissingFeatures::ehCleanupFlags());
177273

178-
ehStack.popCleanup();
179-
scope.markEmitted();
180-
emitCleanup(*this, cleanup);
274+
// If we have a fallthrough and no other need for the cleanup,
275+
// emit it directly.
276+
if (hasFallthrough && !hasFixups) {
277+
assert(!cir::MissingFeatures::ehCleanupScopeRequiresEHCleanup());
278+
ehStack.popCleanup();
279+
scope.markEmitted();
280+
emitCleanup(*this, cleanup);
281+
} else {
282+
// Otherwise, the best approach is to thread everything through
283+
// the cleanup block and then try to clean up after ourselves.
284+
285+
// Force the entry block to exist.
286+
mlir::Block *normalEntry = createNormalEntry(*this, scope);
287+
288+
// I. Set up the fallthrough edge in.
289+
mlir::OpBuilder::InsertPoint savedInactiveFallthroughIP;
290+
291+
// If there's a fallthrough, we need to store the cleanup
292+
// destination index. For fall-throughs this is always zero.
293+
if (hasFallthrough) {
294+
assert(!cir::MissingFeatures::ehCleanupHasPrebranchedFallthrough());
295+
296+
} else if (fallthroughSource) {
297+
// Otherwise, save and clear the IP if we don't have fallthrough
298+
// because the cleanup is inactive.
299+
assert(!isActive && "source without fallthrough for active cleanup");
300+
savedInactiveFallthroughIP = builder.saveInsertionPoint();
301+
}
302+
303+
// II. Emit the entry block. This implicitly branches to it if
304+
// we have fallthrough. All the fixups and existing branches
305+
// should already be branched to it.
306+
builder.setInsertionPointToEnd(normalEntry);
307+
308+
// intercept normal cleanup to mark SEH scope end
309+
assert(!cir::MissingFeatures::ehCleanupScopeRequiresEHCleanup());
310+
311+
// III. Figure out where we're going and build the cleanup
312+
// epilogue.
313+
bool hasEnclosingCleanups =
314+
(scope.getEnclosingNormalCleanup() != ehStack.stable_end());
315+
316+
// Compute the branch-through dest if we need it:
317+
// - if there are branch-throughs threaded through the scope
318+
// - if fall-through is a branch-through
319+
// - if there are fixups that will be optimistically forwarded
320+
// to the enclosing cleanup
321+
assert(!cir::MissingFeatures::cleanupBranchThrough());
322+
if (hasFixups && hasEnclosingCleanups)
323+
cgm.errorNYI("cleanup branch-through dest");
324+
325+
mlir::Block *fallthroughDest = nullptr;
326+
327+
// If there's exactly one branch-after and no other threads,
328+
// we can route it without a switch.
329+
// Skip for SEH, since ExitSwitch is used to generate code to indicate
330+
// abnormal termination. (SEH: Except _leave and fall-through at
331+
// the end, all other exits in a _try (return/goto/continue/break)
332+
// are considered as abnormal terminations, using NormalCleanupDestSlot
333+
// to indicate abnormal termination)
334+
assert(!cir::MissingFeatures::cleanupBranchThrough());
335+
assert(!cir::MissingFeatures::ehCleanupScopeRequiresEHCleanup());
336+
337+
// IV. Pop the cleanup and emit it.
338+
scope.markEmitted();
339+
ehStack.popCleanup();
340+
assert(ehStack.hasNormalCleanups() == hasEnclosingCleanups);
341+
342+
emitCleanup(*this, cleanup);
343+
344+
// Append the prepared cleanup prologue from above.
345+
assert(!cir::MissingFeatures::cleanupAppendInsts());
346+
347+
// Optimistically hope that any fixups will continue falling through.
348+
if (fixupDepth != ehStack.getNumBranchFixups())
349+
cgm.errorNYI("cleanup fixup depth mismatch");
350+
351+
// V. Set up the fallthrough edge out.
352+
353+
// Case 1: a fallthrough source exists but doesn't branch to the
354+
// cleanup because the cleanup is inactive.
355+
if (!hasFallthrough && fallthroughSource) {
356+
// Prebranched fallthrough was forwarded earlier.
357+
// Non-prebranched fallthrough doesn't need to be forwarded.
358+
// Either way, all we need to do is restore the IP we cleared before.
359+
assert(!isActive);
360+
cgm.errorNYI("cleanup inactive fallthrough");
361+
362+
// Case 2: a fallthrough source exists and should branch to the
363+
// cleanup, but we're not supposed to branch through to the next
364+
// cleanup.
365+
} else if (hasFallthrough && fallthroughDest) {
366+
cgm.errorNYI("cleanup fallthrough destination");
367+
368+
// Case 3: a fallthrough source exists and should branch to the
369+
// cleanup and then through to the next.
370+
} else if (hasFallthrough) {
371+
// Everything is already set up for this.
372+
373+
// Case 4: no fallthrough source exists.
374+
} else {
375+
// FIXME(cir): should we clear insertion point here?
376+
}
377+
378+
// VI. Assorted cleaning.
379+
380+
// Check whether we can merge NormalEntry into a single predecessor.
381+
// This might invalidate (non-IR) pointers to NormalEntry.
382+
//
383+
// If it did invalidate those pointers, and normalEntry was the same
384+
// as NormalExit, go back and patch up the fixups.
385+
assert(!cir::MissingFeatures::simplifyCleanupEntry());
386+
}
181387
}
182388

183389
/// Pops cleanup blocks until the given savepoint is reached.

clang/lib/CIR/CodeGen/CIRGenCleanup.h

Lines changed: 24 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -151,6 +151,18 @@ class EHCatchScope : public EHScope {
151151
/// A cleanup scope which generates the cleanup blocks lazily.
152152
class alignas(EHScopeStack::ScopeStackAlignment) EHCleanupScope
153153
: public EHScope {
154+
/// The nearest normal cleanup scope enclosing this one.
155+
EHScopeStack::stable_iterator enclosingNormal;
156+
157+
/// The dual entry/exit block along the normal edge. This is lazily
158+
/// created if needed before the cleanup is popped.
159+
mlir::Block *normalBlock = nullptr;
160+
161+
/// The number of fixups required by enclosing scopes (not including
162+
/// this one). If this is the top cleanup scope, all the fixups
163+
/// from this index onwards belong to this scope.
164+
unsigned fixupDepth = 0;
165+
154166
public:
155167
/// Gets the size required for a lazy cleanup scope with the given
156168
/// cleanup-data requirements.
@@ -162,7 +174,10 @@ class alignas(EHScopeStack::ScopeStackAlignment) EHCleanupScope
162174
return sizeof(EHCleanupScope) + cleanupBits.cleanupSize;
163175
}
164176

165-
EHCleanupScope(unsigned cleanupSize) : EHScope(EHScope::Cleanup) {
177+
EHCleanupScope(unsigned cleanupSize, unsigned fixupDepth,
178+
EHScopeStack::stable_iterator enclosingNormal)
179+
: EHScope(EHScope::Cleanup), enclosingNormal(enclosingNormal),
180+
fixupDepth(fixupDepth) {
166181
// TODO(cir): When exception handling is upstreamed, isNormalCleanup and
167182
// isEHCleanup will be arguments to the constructor.
168183
cleanupBits.isNormalCleanup = true;
@@ -180,11 +195,19 @@ class alignas(EHScopeStack::ScopeStackAlignment) EHCleanupScope
180195
// Objects of EHCleanupScope are not destructed. Use destroy().
181196
~EHCleanupScope() = delete;
182197

198+
mlir::Block *getNormalBlock() const { return normalBlock; }
199+
void setNormalBlock(mlir::Block *bb) { normalBlock = bb; }
200+
183201
bool isNormalCleanup() const { return cleanupBits.isNormalCleanup; }
184202

185203
bool isActive() const { return cleanupBits.isActive; }
186204
void setActive(bool isActive) { cleanupBits.isActive = isActive; }
187205

206+
unsigned getFixupDepth() const { return fixupDepth; }
207+
EHScopeStack::stable_iterator getEnclosingNormalCleanup() const {
208+
return enclosingNormal;
209+
}
210+
188211
size_t getCleanupSize() const { return cleanupBits.cleanupSize; }
189212
void *getCleanupBuffer() { return this + 1; }
190213

0 commit comments

Comments
 (0)