Skip to content

Commit 6b1af3d

Browse files
authored
Merge pull request swiftlang#24458 from atrick/clean-accessed-storage
Remove macros and visitors from AccessedStorage.
2 parents 7c49272 + 5cd187d commit 6b1af3d

File tree

4 files changed

+53
-111
lines changed

4 files changed

+53
-111
lines changed

include/swift/SIL/AccessedStorage.def

Lines changed: 0 additions & 39 deletions
This file was deleted.

include/swift/SIL/MemAccessUtils.h

Lines changed: 9 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -106,11 +106,15 @@ class AccessedStorage {
106106
/// Enumerate over all valid begin_access bases. Clients can use a covered
107107
/// switch to warn if findAccessedAddressBase ever adds a case.
108108
enum Kind : uint8_t {
109-
#define ACCESSED_STORAGE(Name) Name,
110-
#define ACCESSED_STORAGE_RANGE(Name, Start, End) \
111-
First_##Name = Start, Last_##Name = End,
112-
#include "swift/SIL/AccessedStorage.def"
113-
NumKindBits = countBitsUsed(unsigned(Last_AccessedStorageKind))
109+
Box,
110+
Stack,
111+
Global,
112+
Class,
113+
Argument,
114+
Yield,
115+
Nested,
116+
Unidentified,
117+
NumKindBits = countBitsUsed(static_cast<unsigned>(Unidentified))
114118
};
115119

116120
static const char *getKindName(Kind k);
@@ -322,26 +326,6 @@ class AccessedStorage {
322326
bool operator==(const AccessedStorage &) const = delete;
323327
bool operator!=(const AccessedStorage &) const = delete;
324328
};
325-
326-
template <class ImplTy, class ResultTy = void, typename... ArgTys>
327-
class AccessedStorageVisitor {
328-
ImplTy &asImpl() { return static_cast<ImplTy &>(*this); }
329-
330-
public:
331-
#define ACCESSED_STORAGE(Name) \
332-
ResultTy visit##Name(const AccessedStorage &storage, ArgTys &&... args);
333-
#include "swift/SIL/AccessedStorage.def"
334-
335-
ResultTy visit(const AccessedStorage &storage, ArgTys &&... args) {
336-
switch (storage.getKind()) {
337-
#define ACCESSED_STORAGE(Name) \
338-
case AccessedStorage::Name: \
339-
return asImpl().visit##Name(storage, std::forward<ArgTys>(args)...);
340-
#include "swift/SIL/AccessedStorage.def"
341-
}
342-
}
343-
};
344-
345329
} // end namespace swift
346330

347331
namespace llvm {

lib/SIL/MemAccessUtils.cpp

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -140,10 +140,22 @@ const ValueDecl *AccessedStorage::getDecl(SILFunction *F) const {
140140

141141
const char *AccessedStorage::getKindName(AccessedStorage::Kind k) {
142142
switch (k) {
143-
#define ACCESSED_STORAGE(NAME) \
144-
case AccessedStorage::NAME: \
145-
return #NAME;
146-
#include "swift/SIL/AccessedStorage.def"
143+
case Box:
144+
return "Box";
145+
case Stack:
146+
return "Stack";
147+
case Nested:
148+
return "Nested";
149+
case Unidentified:
150+
return "Unidentified";
151+
case Argument:
152+
return "Argument";
153+
case Yield:
154+
return "Yield";
155+
case Global:
156+
return "Global";
157+
case Class:
158+
return "Class";
147159
}
148160
llvm_unreachable("unhandled kind");
149161
}

lib/SILOptimizer/Mandatory/SemanticARCOpts.cpp

Lines changed: 28 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -339,56 +339,29 @@ bool SemanticARCOptVisitor::visitCopyValueInst(CopyValueInst *cvi) {
339339
// load [copy] Optimizations
340340
//===----------------------------------------------------------------------===//
341341

342-
/// A flow insensitive analysis that tells the load [copy] analysis if the
343-
/// storage has 0, 1, >1 writes to it.
344-
///
345-
/// In the case of 0 writes, we return CanOptimizeLoadCopyResult::Always.
346-
///
347-
/// In the case of 1 write, we return OnlyIfStorageIsLocal. We are taking
348-
/// advantage of definite initialization implying that an alloc_stack must be
349-
/// written to once before any loads from the memory location. Thus if we are
350-
/// local and see 1 write, we can still change to load_borrow if all other uses
351-
/// check out.
352-
///
353-
/// If there is 2+ writes, we can not optimize = (.
354-
namespace {
355-
356-
struct CanOptimizeLoadCopyFromAccessVisitor
357-
: AccessedStorageVisitor<CanOptimizeLoadCopyFromAccessVisitor, bool> {
358-
SILFunction &f;
359-
360-
CanOptimizeLoadCopyFromAccessVisitor(SILFunction &f) : f(f) {}
361-
362-
// Stubs
363-
bool visitBox(const AccessedStorage &boxStorage) { return false; }
364-
bool visitStack(const AccessedStorage &stackStorage) { return false; }
365-
bool visitGlobal(const AccessedStorage &globalStorage) { return false; }
366-
bool visitClass(const AccessedStorage &classStorage) { return false; }
367-
bool visitYield(const AccessedStorage &yieldStorage) { return false; }
368-
bool visitUnidentified(const AccessedStorage &unidentifiedStorage) {
369-
return false;
370-
}
371-
bool visitNested(const AccessedStorage &nested) {
372-
llvm_unreachable("Visitor should never see nested since we lookup our "
373-
"address storage using lookup non nested");
374-
}
375-
376-
bool visitArgument(const AccessedStorage &argumentStorage);
377-
};
378-
379-
} // namespace
342+
// A flow insensitive analysis that tells the load [copy] analysis if the
343+
// storage has 0, 1, >1 writes to it.
344+
//
345+
// In the case of 0 writes, we return CanOptimizeLoadCopyResult::Always.
346+
//
347+
// In the case of 1 write, we return OnlyIfStorageIsLocal. We are taking
348+
// advantage of definite initialization implying that an alloc_stack must be
349+
// written to once before any loads from the memory location. Thus if we are
350+
// local and see 1 write, we can still change to load_borrow if all other uses
351+
// check out.
352+
//
353+
// If there is 2+ writes, we can not optimize = (.
380354

381-
bool CanOptimizeLoadCopyFromAccessVisitor::visitArgument(
382-
const AccessedStorage &storage) {
355+
bool mayFunctionMutateArgument(const AccessedStorage &storage, SILFunction &f) {
383356
auto *arg = cast<SILFunctionArgument>(storage.getArgument(&f));
384357

385358
// Then check if we have an in_guaranteed argument. In this case, we can
386359
// always optimize load [copy] from this.
387360
if (arg->hasConvention(SILArgumentConvention::Indirect_In_Guaranteed))
388-
return true;
361+
return false;
389362

390363
// For now just return false.
391-
return false;
364+
return true;
392365
}
393366

394367
static bool isWrittenTo(SILFunction &f, SILValue value) {
@@ -402,7 +375,19 @@ static bool isWrittenTo(SILFunction &f, SILValue value) {
402375
// way (ignoring stores that are obviously the only initializer to
403376
// memory). We have to do this since load_borrow assumes that the
404377
// underlying memory is never written to.
405-
return !CanOptimizeLoadCopyFromAccessVisitor(f).visit(storage);
378+
switch (storage.getKind()) {
379+
case AccessedStorage::Box:
380+
case AccessedStorage::Stack:
381+
case AccessedStorage::Global:
382+
case AccessedStorage::Class:
383+
case AccessedStorage::Yield:
384+
case AccessedStorage::Nested:
385+
case AccessedStorage::Unidentified:
386+
return true;
387+
388+
case AccessedStorage::Argument:
389+
return mayFunctionMutateArgument(storage, f);
390+
}
406391
}
407392

408393
// Convert a load [copy] from unique storage [read] that has all uses that can

0 commit comments

Comments
 (0)