Skip to content

Commit 1a16de2

Browse files
authored
Merge pull request #2068 from swiftwasm/main
[pull] swiftwasm from main
2 parents deddadc + a353176 commit 1a16de2

File tree

8 files changed

+444
-603
lines changed

8 files changed

+444
-603
lines changed

lib/SIL/Verifier/CMakeLists.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
target_sources(swiftSIL PRIVATE
2-
LoadBorrowInvalidationChecker.cpp
2+
LoadBorrowImmutabilityChecker.cpp
33
LinearLifetimeChecker.cpp
44
MemoryLifetime.cpp
55
SILOwnershipVerifier.cpp
Lines changed: 369 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,369 @@
1+
//===--- LoadBorrowImmutabilityChecker.cpp --------------------------------===//
2+
//
3+
// This source file is part of the Swift.org open source project
4+
//
5+
// Copyright (c) 2014 - 2020 Apple Inc. and the Swift project authors
6+
// Licensed under Apache License v2.0 with Runtime Library Exception
7+
//
8+
// See https://swift.org/LICENSE.txt for license information
9+
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
10+
//
11+
//===----------------------------------------------------------------------===//
12+
///
13+
/// \file
14+
///
15+
/// This file defines a verifier that exhaustively validates that there aren't
16+
/// any load_borrows in a SIL module that have in-scope writes to their
17+
/// underlying storage.
18+
///
19+
//===----------------------------------------------------------------------===//
20+
21+
#define DEBUG_TYPE "sil-load-borrow-immutability-checker"
22+
#include "VerifierPrivate.h"
23+
#include "swift/Basic/Debug.h"
24+
#include "swift/Basic/LLVM.h"
25+
#include "swift/Basic/MultiMapCache.h"
26+
#include "swift/SIL/BasicBlockUtils.h"
27+
#include "swift/SIL/LinearLifetimeChecker.h"
28+
#include "swift/SIL/MemAccessUtils.h"
29+
#include "swift/SIL/OwnershipUtils.h"
30+
#include "swift/SIL/Projection.h"
31+
#include "swift/SIL/SILInstruction.h"
32+
#include "llvm/Support/Debug.h"
33+
#include "llvm/Support/ErrorHandling.h"
34+
35+
using namespace swift;
36+
using namespace swift::silverifier;
37+
38+
//===----------------------------------------------------------------------===//
39+
// Write Gatherer
40+
//===----------------------------------------------------------------------===//
41+
42+
namespace {
43+
44+
// Visitor for visitAccessPathUses().
45+
class GatherWritesVisitor : public AccessUseVisitor {
46+
// Result: writes to the AccessPath being visited.
47+
SmallVectorImpl<Operand *> &writeAccumulator;
48+
49+
public:
50+
GatherWritesVisitor(SmallVectorImpl<Operand *> &writes)
51+
: AccessUseVisitor(AccessUseType::Overlapping,
52+
NestedAccessType::StopAtAccessBegin),
53+
writeAccumulator(writes) {}
54+
55+
bool visitUse(Operand *op, AccessUseType useTy);
56+
};
57+
58+
// Functor for MultiMapCache construction.
59+
struct GatherWrites {
60+
const SILFunction *function;
61+
GatherWrites(const SILFunction *function) : function(function) {}
62+
63+
bool operator()(const AccessPath &accessPath,
64+
SmallVectorImpl<Operand *> &writeAccumulator) {
65+
GatherWritesVisitor visitor(writeAccumulator);
66+
return visitAccessPathUses(visitor, accessPath,
67+
const_cast<SILFunction *>(function));
68+
}
69+
};
70+
71+
} // end anonymous namespace
72+
73+
// Filter out recognized uses that do not write to memory.
74+
//
75+
// TODO: Ensure that all of the conditional-write logic below is encapsulated in
76+
// mayWriteToMemory and just call that instead. Possibly add additional
77+
// verification that visitAccessPathUses recognizes all instructions that may
78+
// propagate pointers (even though they don't write).
79+
bool GatherWritesVisitor::visitUse(Operand *op, AccessUseType useTy) {
80+
// If this operand is for a dependent type, then it does not actually access
81+
// the operand's address value. It only uses the metatype defined by the
82+
// operation (e.g. open_existential).
83+
if (op->isTypeDependent()) {
84+
return true;
85+
}
86+
SILInstruction *user = op->getUser();
87+
if (isIncidentalUse(user)) {
88+
return true;
89+
}
90+
switch (user->getKind()) {
91+
92+
// Known reads...
93+
case SILInstructionKind::LoadBorrowInst:
94+
case SILInstructionKind::SelectEnumAddrInst:
95+
case SILInstructionKind::SwitchEnumAddrInst:
96+
case SILInstructionKind::DeallocStackInst:
97+
case SILInstructionKind::DeallocBoxInst:
98+
case SILInstructionKind::WitnessMethodInst:
99+
case SILInstructionKind::ExistentialMetatypeInst:
100+
return true;
101+
102+
// Known writes...
103+
case SILInstructionKind::DestroyAddrInst:
104+
case SILInstructionKind::DestroyValueInst:
105+
case SILInstructionKind::InjectEnumAddrInst:
106+
case SILInstructionKind::StoreInst:
107+
case SILInstructionKind::AssignInst:
108+
case SILInstructionKind::UncheckedTakeEnumDataAddrInst:
109+
case SILInstructionKind::MarkFunctionEscapeInst:
110+
writeAccumulator.push_back(op);
111+
return true;
112+
113+
// Load/Store variations...
114+
#define NEVER_OR_SOMETIMES_LOADABLE_CHECKED_REF_STORAGE(Name, name, NAME) \
115+
case SILInstructionKind::Load##Name##Inst: \
116+
if (cast<Load##Name##Inst>(user)->isTake() == IsTake) { \
117+
writeAccumulator.push_back(op); \
118+
} \
119+
return true; \
120+
\
121+
case SILInstructionKind::Store##Name##Inst: \
122+
writeAccumulator.push_back(op); \
123+
return true;
124+
#include "swift/AST/ReferenceStorage.def"
125+
126+
// Ignored pointer uses...
127+
128+
// Allow store_borrow within the load_borrow scope.
129+
// FIXME: explain why.
130+
case SILInstructionKind::StoreBorrowInst:
131+
// Returns are never in scope.
132+
case SILInstructionKind::ReturnInst:
133+
return true;
134+
135+
// Reads that may perform a "take"...
136+
137+
case SILInstructionKind::LoadInst:
138+
if (cast<LoadInst>(user)->getOwnershipQualifier()
139+
== LoadOwnershipQualifier::Take) {
140+
writeAccumulator.push_back(op);
141+
}
142+
return true;
143+
144+
case SILInstructionKind::UnconditionalCheckedCastAddrInst:
145+
return true;
146+
147+
case SILInstructionKind::CheckedCastAddrBranchInst: {
148+
auto *ccbi = cast<CheckedCastAddrBranchInst>(user);
149+
if (ccbi->getConsumptionKind() != CastConsumptionKind::CopyOnSuccess) {
150+
writeAccumulator.push_back(op);
151+
}
152+
return true;
153+
}
154+
155+
// Conditional writes...
156+
157+
case SILInstructionKind::CopyAddrInst:
158+
if (cast<CopyAddrInst>(user)->getDest() == op->get()) {
159+
writeAccumulator.push_back(op);
160+
return true;
161+
}
162+
// This operand is the copy source. Check if it is taken.
163+
if (cast<CopyAddrInst>(user)->isTakeOfSrc()) {
164+
writeAccumulator.push_back(op);
165+
}
166+
return true;
167+
168+
// If this value is dependent on another, conservatively consider it a write.
169+
//
170+
// FIXME: explain why a mark_dependence effectively writes to storage.
171+
case SILInstructionKind::MarkDependenceInst:
172+
if (cast<MarkDependenceInst>(user)->getValue() == op->get()) {
173+
writeAccumulator.push_back(op);
174+
}
175+
return true;
176+
177+
// Check for mutable existentials.
178+
case SILInstructionKind::OpenExistentialAddrInst:
179+
if (cast<OpenExistentialAddrInst>(user)->getAccessKind()
180+
!= OpenedExistentialAccess::Immutable) {
181+
writeAccumulator.push_back(op);
182+
}
183+
return true;
184+
185+
case SILInstructionKind::BeginAccessInst:
186+
if (cast<BeginAccessInst>(user)->getAccessKind() != SILAccessKind::Read) {
187+
writeAccumulator.push_back(op);
188+
}
189+
return true;
190+
191+
case SILInstructionKind::BuiltinInst:
192+
if (!cast<BuiltinInst>(user)->mayWriteToMemory()) {
193+
return true;
194+
}
195+
writeAccumulator.push_back(op);
196+
return true;
197+
198+
case SILInstructionKind::YieldInst: {
199+
SILYieldInfo info = cast<YieldInst>(user)->getYieldInfoForOperand(*op);
200+
if (info.isIndirectInGuaranteed()) {
201+
return true;
202+
}
203+
if (info.isIndirectMutating() || info.isConsumed()) {
204+
writeAccumulator.push_back(op);
205+
return true;
206+
}
207+
break; // unknown yield convention
208+
}
209+
210+
default:
211+
break;
212+
} // end switch(user->getKind())
213+
214+
// If we have a FullApplySite, see if we use the value as an
215+
// indirect_guaranteed parameter. If we use it as inout, we need
216+
// interprocedural analysis that we do not perform here.
217+
if (auto fas = FullApplySite::isa(user)) {
218+
if (fas.isIndirectResultOperand(*op)) {
219+
writeAccumulator.push_back(op);
220+
return true;
221+
}
222+
auto argConv = fas.getArgumentConvention(*op);
223+
224+
// A box or pointer value may be passed directly. Consider that a write.
225+
if (!argConv.isIndirectConvention()) {
226+
writeAccumulator.push_back(op);
227+
return true;
228+
}
229+
if (argConv == SILArgumentConvention::Indirect_In_Guaranteed) {
230+
return true;
231+
}
232+
if (argConv.isInoutConvention()) {
233+
writeAccumulator.push_back(op);
234+
return true;
235+
}
236+
if (argConv.isOwnedConvention()) {
237+
writeAccumulator.push_back(op);
238+
return true;
239+
}
240+
// Otherwise, be conservative and return that we had a write that we did
241+
// not understand.
242+
llvm::errs() << "Full apply site not understood: " << *user;
243+
return false;
244+
}
245+
246+
// Handle a capture-by-address like a write.
247+
if (auto as = ApplySite::isa(user)) {
248+
writeAccumulator.push_back(op);
249+
return true;
250+
}
251+
// We don't have an inclusive list of all use patterns for non-address
252+
// values. References and pointers can be passed to almost anything that takes
253+
// a value. We assume that visitAccessPathUses has already looked past
254+
// operations that can propagate a reference or pointer, and simply check that
255+
// the leaf use that it returned cannot itself write to memory.
256+
if (!op->get()->getType().isAddress() && !user->mayWriteToMemory()) {
257+
return true;
258+
}
259+
// If we did not recognize the user, just return conservatively that it was
260+
// written to in a way we did not understand.
261+
llvm::errs() << "Function: " << user->getFunction()->getName() << "\n";
262+
llvm::errs() << "Value: " << op->get();
263+
llvm::errs() << "Unknown instruction: " << *user;
264+
llvm::report_fatal_error("Unexpected instruction using borrowed address?!");
265+
return false;
266+
}
267+
268+
//===----------------------------------------------------------------------===//
269+
// Load Borrow Immutability Analysis
270+
//===----------------------------------------------------------------------===//
271+
272+
LoadBorrowImmutabilityAnalysis::LoadBorrowImmutabilityAnalysis(
273+
DeadEndBlocks &deadEndBlocks, const SILFunction *f)
274+
: cache(GatherWrites(f)), deadEndBlocks(deadEndBlocks) {}
275+
276+
// \p address may be an address, pointer, or box type.
277+
bool LoadBorrowImmutabilityAnalysis::isImmutableInScope(
278+
LoadBorrowInst *lbi, ArrayRef<Operand *> endBorrowUses,
279+
AccessPath accessPath) {
280+
281+
SmallPtrSet<SILBasicBlock *, 8> visitedBlocks;
282+
LinearLifetimeChecker checker(visitedBlocks, deadEndBlocks);
283+
auto writes = cache.get(accessPath);
284+
285+
// Treat None as a write.
286+
if (!writes) {
287+
llvm::errs() << "Failed to find cached writes for: ";
288+
accessPath.getStorage().print(llvm::errs());
289+
return false;
290+
}
291+
// Then for each write...
292+
for (auto *op : *writes) {
293+
visitedBlocks.clear();
294+
295+
// First see if the write is a dead end block. In such a case, just skip it.
296+
if (deadEndBlocks.isDeadEnd(op->getUser()->getParent())) {
297+
continue;
298+
}
299+
// See if the write is within the load borrow's lifetime. If it isn't, we
300+
// don't have to worry about it.
301+
if (!checker.validateLifetime(lbi, endBorrowUses, op)) {
302+
continue;
303+
}
304+
llvm::errs() << "Write: " << *op->getUser();
305+
return false;
306+
}
307+
// Ok, we are good.
308+
return true;
309+
}
310+
311+
//===----------------------------------------------------------------------===//
312+
// Top Level Entrypoint
313+
//===----------------------------------------------------------------------===//
314+
315+
bool LoadBorrowImmutabilityAnalysis::isImmutable(LoadBorrowInst *lbi) {
316+
AccessPath accessPath = AccessPath::computeInScope(lbi->getOperand());
317+
// Bail on an invalid AccessPath. AccessPath completeness is verified
318+
// independently--it may be invalid in extraordinary situations. When
319+
// AccessPath is valid, we know all its uses are recognizable.
320+
if (!accessPath.isValid()) {
321+
return true;
322+
}
323+
// If we have a let address, then we are already done.
324+
if (accessPath.getStorage().isLetAccess()) {
325+
return true;
326+
}
327+
// At this point, we know that we /may/ have writes. Now we go through various
328+
// cases to try and exhaustively identify if those writes overlap with our
329+
// load_borrow.
330+
SmallVector<Operand *, 8> endBorrowUses;
331+
transform(lbi->getUsersOfType<EndBorrowInst>(),
332+
std::back_inserter(endBorrowUses),
333+
[](EndBorrowInst *ebi) { return &ebi->getAllOperands()[0]; });
334+
335+
switch (accessPath.getStorage().getKind()) {
336+
case AccessedStorage::Nested: {
337+
// If we have a begin_access and...
338+
auto *bai = cast<BeginAccessInst>(accessPath.getStorage().getValue());
339+
// We do not have a modify, assume we are correct.
340+
if (bai->getAccessKind() != SILAccessKind::Modify) {
341+
return true;
342+
}
343+
// Otherwise, validate that any writes to our begin_access is not when the
344+
// load_borrow's result is live.
345+
//
346+
// TODO: As a separate analysis, verify that the load_borrow scope is always
347+
// nested within the begin_access scope (to ensure no aliasing access).
348+
return isImmutableInScope(lbi, endBorrowUses, accessPath);
349+
}
350+
case AccessedStorage::Argument: {
351+
auto *arg =
352+
cast<SILFunctionArgument>(accessPath.getStorage().getArgument());
353+
if (arg->hasConvention(SILArgumentConvention::Indirect_In_Guaranteed)) {
354+
return true;
355+
}
356+
return isImmutableInScope(lbi, endBorrowUses, accessPath);
357+
}
358+
// FIXME: A yielded address could overlap with another in this function.
359+
case AccessedStorage::Yield:
360+
case AccessedStorage::Stack:
361+
case AccessedStorage::Box:
362+
case AccessedStorage::Class:
363+
case AccessedStorage::Tail:
364+
case AccessedStorage::Global:
365+
case AccessedStorage::Unidentified:
366+
return isImmutableInScope(lbi, endBorrowUses, accessPath);
367+
}
368+
llvm_unreachable("Covered switch isn't covered?!");
369+
}

0 commit comments

Comments
 (0)