1
- // ===--- LoadBorrowInvalidationChecker .cpp --------------------------------===//
1
+ // ===--- LoadBorrowImmutabilityChecker .cpp --------------------------------===//
2
2
//
3
3
// This source file is part of the Swift.org open source project
4
4
//
13
13
// / \file
14
14
// /
15
15
// / This file defines a verifier that exhaustively validates that there aren't
16
- // / any load_borrows in a SIL module that are invalidated by a write to their
16
+ // / any load_borrows in a SIL module that have in-scope writes to their
17
17
// / underlying storage.
18
18
// /
19
19
// ===----------------------------------------------------------------------===//
20
20
21
- #define DEBUG_TYPE " sil-load-borrow-invalidation -checker"
21
+ #define DEBUG_TYPE " sil-load-borrow-immutability -checker"
22
22
#include " VerifierPrivate.h"
23
23
#include " swift/Basic/Debug.h"
24
24
#include " swift/Basic/LLVM.h"
@@ -39,26 +39,26 @@ using namespace swift::silverifier;
39
39
// Write Gatherer
40
40
// ===----------------------------------------------------------------------===//
41
41
42
- static bool constructValuesForBuiltinKey (
43
- Operand *op, BuiltinInst *bi,
44
- SmallVectorImpl<Operand *> &wellBehavedWriteAccumulator ) {
42
+ // Helper for gatherAddressWrites.
43
+ static bool gatherBuiltinWrites ( Operand *op, BuiltinInst *bi,
44
+ SmallVectorImpl<Operand *> &writeAccumulator ) {
45
45
// If we definitely do not write to memory, just return true early.
46
46
if (!bi->mayWriteToMemory ()) {
47
47
return true ;
48
48
}
49
49
50
50
// TODO: Should we make this an exhaustive list so that when new builtins are
51
51
// added, they need to actually update this code?
52
- wellBehavedWriteAccumulator .push_back (op);
52
+ writeAccumulator .push_back (op);
53
53
return true ;
54
54
}
55
55
56
56
// / Returns true if we were able to ascertain that either the initialValue has
57
57
// / no write uses or all of the write uses were writes that we could understand.
58
58
static bool
59
- constructValuesForKey (SILValue initialValue ,
60
- SmallVectorImpl<Operand *> &wellBehavedWriteAccumulator ) {
61
- SmallVector<Operand *, 8 > worklist (initialValue ->getUses ());
59
+ gatherAddressWrites (SILValue address ,
60
+ SmallVectorImpl<Operand *> &writeAccumulator ) {
61
+ SmallVector<Operand *, 8 > worklist (address ->getUses ());
62
62
63
63
while (!worklist.empty ()) {
64
64
auto *op = worklist.pop_back_val ();
@@ -81,7 +81,7 @@ constructValuesForKey(SILValue initialValue,
81
81
if (auto *oeai = dyn_cast<OpenExistentialAddrInst>(user)) {
82
82
// Mutable access!
83
83
if (oeai->getAccessKind () != OpenedExistentialAccess::Immutable) {
84
- wellBehavedWriteAccumulator .push_back (op);
84
+ writeAccumulator .push_back (op);
85
85
}
86
86
87
87
// Otherwise, look through it and continue.
@@ -90,8 +90,8 @@ constructValuesForKey(SILValue initialValue,
90
90
}
91
91
92
92
// Add any destroy_addrs to the resultAccumulator.
93
- if (isa<DestroyAddrInst>(user)) {
94
- wellBehavedWriteAccumulator .push_back (op);
93
+ if (isa<DestroyAddrInst>(user) || isa<DestroyValueInst>(user) ) {
94
+ writeAccumulator .push_back (op);
95
95
continue ;
96
96
}
97
97
@@ -103,13 +103,13 @@ constructValuesForKey(SILValue initialValue,
103
103
104
104
if (auto *mdi = dyn_cast<MarkDependenceInst>(user)) {
105
105
if (mdi->getValue () == op->get ()) {
106
- wellBehavedWriteAccumulator .push_back (op);
106
+ writeAccumulator .push_back (op);
107
107
}
108
108
continue ;
109
109
}
110
110
111
111
if (isa<InjectEnumAddrInst>(user)) {
112
- wellBehavedWriteAccumulator .push_back (op);
112
+ writeAccumulator .push_back (op);
113
113
continue ;
114
114
}
115
115
@@ -121,7 +121,7 @@ constructValuesForKey(SILValue initialValue,
121
121
if (auto *ccbi = dyn_cast<CheckedCastAddrBranchInst>(user)) {
122
122
if (ccbi->getConsumptionKind () == CastConsumptionKind::TakeAlways ||
123
123
ccbi->getConsumptionKind () == CastConsumptionKind::TakeOnSuccess) {
124
- wellBehavedWriteAccumulator .push_back (op);
124
+ writeAccumulator .push_back (op);
125
125
continue ;
126
126
}
127
127
}
@@ -139,7 +139,7 @@ constructValuesForKey(SILValue initialValue,
139
139
if (auto *bai = dyn_cast<BeginAccessInst>(user)) {
140
140
// If we do not have a read, mark this as a write.
141
141
if (bai->getAccessKind () != SILAccessKind::Read) {
142
- wellBehavedWriteAccumulator .push_back (op);
142
+ writeAccumulator .push_back (op);
143
143
}
144
144
145
145
// Otherwise, add the users to the worklist and continue.
@@ -150,20 +150,20 @@ constructValuesForKey(SILValue initialValue,
150
150
// If we have a load, we just need to mark the load [take] as a write.
151
151
if (auto *li = dyn_cast<LoadInst>(user)) {
152
152
if (li->getOwnershipQualifier () == LoadOwnershipQualifier::Take) {
153
- wellBehavedWriteAccumulator .push_back (op);
153
+ writeAccumulator .push_back (op);
154
154
}
155
155
continue ;
156
156
}
157
157
158
158
#define NEVER_OR_SOMETIMES_LOADABLE_CHECKED_REF_STORAGE (Name, name, NAME ) \
159
159
if (auto *li = dyn_cast<Load##Name##Inst>(user)) { \
160
160
if (li->isTake () == IsTake) { \
161
- wellBehavedWriteAccumulator .push_back (op); \
161
+ writeAccumulator .push_back (op); \
162
162
} \
163
163
continue ; \
164
164
} \
165
165
if (isa<Store##Name##Inst>(user)) { \
166
- wellBehavedWriteAccumulator .push_back (op); \
166
+ writeAccumulator .push_back (op); \
167
167
continue ; \
168
168
}
169
169
#include " swift/AST/ReferenceStorage.def"
@@ -173,7 +173,7 @@ constructValuesForKey(SILValue initialValue,
173
173
// interprocedural analysis that we do not perform here.
174
174
if (auto fas = FullApplySite::isa (user)) {
175
175
if (fas.isIndirectResultOperand (*op)) {
176
- wellBehavedWriteAccumulator .push_back (op);
176
+ writeAccumulator .push_back (op);
177
177
continue ;
178
178
}
179
179
@@ -191,12 +191,12 @@ constructValuesForKey(SILValue initialValue,
191
191
}
192
192
193
193
if (argConv.isInoutConvention ()) {
194
- wellBehavedWriteAccumulator .push_back (op);
194
+ writeAccumulator .push_back (op);
195
195
continue ;
196
196
}
197
197
198
198
if (argConv.isOwnedConvention ()) {
199
- wellBehavedWriteAccumulator .push_back (op);
199
+ writeAccumulator .push_back (op);
200
200
continue ;
201
201
}
202
202
@@ -207,22 +207,22 @@ constructValuesForKey(SILValue initialValue,
207
207
}
208
208
209
209
if (auto as = ApplySite::isa (user)) {
210
- wellBehavedWriteAccumulator .push_back (op);
210
+ writeAccumulator .push_back (op);
211
211
continue ;
212
212
}
213
213
214
214
// Copy addr that read are just loads.
215
215
if (auto *cai = dyn_cast<CopyAddrInst>(user)) {
216
216
// If our value is the destination, this is a write.
217
217
if (cai->getDest () == op->get ()) {
218
- wellBehavedWriteAccumulator .push_back (op);
218
+ writeAccumulator .push_back (op);
219
219
continue ;
220
220
}
221
221
222
222
// Ok, so we are Src by process of elimination. Make sure we are not being
223
223
// taken.
224
224
if (cai->isTakeOfSrc ()) {
225
- wellBehavedWriteAccumulator .push_back (op);
225
+ writeAccumulator .push_back (op);
226
226
continue ;
227
227
}
228
228
@@ -231,7 +231,7 @@ constructValuesForKey(SILValue initialValue,
231
231
}
232
232
233
233
if (isa<StoreInst>(user) || isa<AssignInst>(user)) {
234
- wellBehavedWriteAccumulator .push_back (op);
234
+ writeAccumulator .push_back (op);
235
235
continue ;
236
236
}
237
237
@@ -261,7 +261,7 @@ constructValuesForKey(SILValue initialValue,
261
261
}
262
262
263
263
if (info.isIndirectMutating () || info.isConsumed ()) {
264
- wellBehavedWriteAccumulator .push_back (op);
264
+ writeAccumulator .push_back (op);
265
265
continue ;
266
266
}
267
267
}
@@ -273,19 +273,19 @@ constructValuesForKey(SILValue initialValue,
273
273
274
274
// unconditional_checked_cast_addr does a take on its input memory.
275
275
if (isa<UnconditionalCheckedCastAddrInst>(user)) {
276
- wellBehavedWriteAccumulator .push_back (op);
276
+ writeAccumulator .push_back (op);
277
277
continue ;
278
278
}
279
279
280
280
if (auto *ccabi = dyn_cast<CheckedCastAddrBranchInst>(user)) {
281
281
if (ccabi->getConsumptionKind () != CastConsumptionKind::CopyOnSuccess) {
282
- wellBehavedWriteAccumulator .push_back (op);
282
+ writeAccumulator .push_back (op);
283
283
}
284
284
continue ;
285
285
}
286
286
287
287
if (auto *bi = dyn_cast<BuiltinInst>(user)) {
288
- if (constructValuesForBuiltinKey (op, bi, wellBehavedWriteAccumulator )) {
288
+ if (gatherBuiltinWrites (op, bi, writeAccumulator )) {
289
289
continue ;
290
290
}
291
291
}
@@ -304,14 +304,15 @@ constructValuesForKey(SILValue initialValue,
304
304
}
305
305
306
306
// ===----------------------------------------------------------------------===//
307
- // Load Borrow Never Invalidated Analysis
307
+ // Load Borrow Immutability Analysis
308
308
// ===----------------------------------------------------------------------===//
309
309
310
- LoadBorrowNeverInvalidatedAnalysis::LoadBorrowNeverInvalidatedAnalysis (
310
+ LoadBorrowImmutabilityAnalysis::LoadBorrowImmutabilityAnalysis (
311
311
DeadEndBlocks &deadEndBlocks)
312
- : cache(constructValuesForKey ), deadEndBlocks(deadEndBlocks) {}
312
+ : cache(gatherAddressWrites ), deadEndBlocks(deadEndBlocks) {}
313
313
314
- bool LoadBorrowNeverInvalidatedAnalysis::
314
+ // \p address may be an address, pointer, or box type.
315
+ bool LoadBorrowImmutabilityAnalysis::
315
316
doesAddressHaveWriteThatInvalidatesLoadBorrow (
316
317
LoadBorrowInst *lbi, ArrayRef<Operand *> endBorrowUses,
317
318
SILValue address) {
@@ -379,7 +380,7 @@ bool LoadBorrowNeverInvalidatedAnalysis::
379
380
// Top Level Entrypoint
380
381
// ===----------------------------------------------------------------------===//
381
382
382
- bool LoadBorrowNeverInvalidatedAnalysis ::
383
+ bool LoadBorrowImmutabilityAnalysis ::
383
384
doesBoxHaveWritesThatInvalidateLoadBorrow (LoadBorrowInst *lbi,
384
385
ArrayRef<Operand *> endBorrowUses,
385
386
SILValue originalBox) {
@@ -450,7 +451,7 @@ bool LoadBorrowNeverInvalidatedAnalysis::
450
451
451
452
return false ;
452
453
}
453
- bool LoadBorrowNeverInvalidatedAnalysis ::isInvalidated (
454
+ bool LoadBorrowImmutabilityAnalysis ::isInvalidated (
454
455
LoadBorrowInst *lbi) {
455
456
456
457
// FIXME: To be reenabled separately in a follow-on commit.
0 commit comments