Skip to content

Commit e527d8b

Browse files
committed
[AddressLowering] Fix indirect coro yield handling
The begin_apply instruction introduces storage which is only valid until the coroutine is ended via end_apply/abort_apply. Previously, AddressLowering assumed the memory was valid "as long as it needed to be": either as an argument to the function being lowered (so valid for the whole lifetime) or as a new alloc_stack (whose lifetime is determined by AddressLowering itself). The result was that the storage made available by the begin_apply was used after the end_apply/abort_apply when there were uses of [a copy of] [a projection of] the yield after the end_apply/abort_apply. Here, the behavior is fixed. Now, the storage is used "as soon as possible". There are two cases: (1) If an indirect value's ownership is transferred into the caller (i.e. its convention is `@in` or `@in_constant`), the value is `copy_addr [take]`n into caller-local storage when lowering the begin_apply. (2) If an indirect value's ownership is only lent to the caller (i.e. its convention is `@in_guaranteed`), no equivalent operation could be done: there's no `copy_addr [borrow]`; on the other hand, there's no need to do it--uses of the value must have occurred within the coroutine's range. Instead, it's necessary to disable the store-copy optimization in this case: storage must be allocated for a copy of [a projection of] the yielded value.
1 parent 8617a5d commit e527d8b

File tree

2 files changed

+175
-6
lines changed

2 files changed

+175
-6
lines changed

lib/SILOptimizer/Mandatory/AddressLowering.cpp

Lines changed: 57 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -141,6 +141,7 @@
141141
#include "swift/SIL/BasicBlockUtils.h"
142142
#include "swift/SIL/DebugUtils.h"
143143
#include "swift/SIL/DynamicCasts.h"
144+
#include "swift/SIL/MemAccessUtils.h"
144145
#include "swift/SIL/OwnershipUtils.h"
145146
#include "swift/SIL/PrettyStackTrace.h"
146147
#include "swift/SIL/PrunedLiveness.h"
@@ -156,6 +157,7 @@
156157
#include "swift/SILOptimizer/Utils/InstructionDeleter.h"
157158
#include "swift/SILOptimizer/Utils/StackNesting.h"
158159
#include "llvm/ADT/DenseMap.h"
160+
#include "llvm/ADT/STLExtras.h"
159161
#include "llvm/ADT/SetVector.h"
160162
#include "llvm/Support/CommandLine.h"
161163
#include "llvm/Support/Debug.h"
@@ -344,6 +346,22 @@ static bool isStoreCopy(SILValue value) {
344346
if (!copyInst->hasOneUse())
345347
return false;
346348

349+
auto source = copyInst->getOperand();
350+
if (source->getOwnershipKind() == OwnershipKind::Guaranteed) {
351+
// [in_guaranteed_begin_apply_results] If any root of the source is a
352+
// begin_apply, we can't rely on projecting from the (rewritten) source:
353+
// The store may not be in the coroutine's range. The result would be
354+
// attempting to access invalid storage.
355+
SmallVector<SILValue, 4> roots;
356+
findGuaranteedReferenceRoots(source, /*lookThroughNestedBorrows=*/true,
357+
roots);
358+
if (llvm::any_of(roots, [](SILValue root) {
359+
return isa<BeginApplyInst>(root->getDefiningInstruction());
360+
})) {
361+
return false;
362+
}
363+
}
364+
347365
auto *user = value->getSingleUse()->getUser();
348366
return isa<StoreInst>(user);
349367
}
@@ -928,8 +946,19 @@ static bool doesNotNeedStackAllocation(SILValue value) {
928946
auto *defInst = value->getDefiningInstruction();
929947
if (!defInst)
930948
return false;
931-
932-
if (isa<LoadBorrowInst>(defInst) || isa<BeginApplyInst>(defInst))
949+
// [in_guaranteed_begin_apply_results] OSSA ensures that every use of a
950+
// guaranteed value resulting from a begin_apply will occur in the
951+
// coroutine's range (i.e. "before" the end_apply/abort apply).
952+
// AddressLowering takes advantage of this lack of uses outside of the
953+
// coroutine's range to directly use the storage that is yielded by the
954+
// coroutine rather than moving it to local storage.
955+
//
956+
// It is, however, valid in OSSA to have uses of an owned value produced by a
957+
// begin_apply outside of the coroutine range. So in that case, it is
958+
// necessary to introduce new storage and move to it.
959+
if (isa<LoadBorrowInst>(defInst) ||
960+
(isa<BeginApplyInst>(defInst) &&
961+
value.getOwnershipKind() == OwnershipKind::Guaranteed))
933962
return true;
934963

935964
return false;
@@ -2223,9 +2252,32 @@ void ApplyRewriter::convertBeginApplyWithOpaqueYield() {
22232252
continue;
22242253
}
22252254
if (oldResult.getType().isAddressOnly(*pass.function)) {
2226-
// Remap storage when an address-only type is yielded as an opaque value.
2227-
pass.valueStorageMap.setStorageAddress(&oldResult, &newResult);
2228-
pass.valueStorageMap.getStorage(&oldResult).markRewritten();
2255+
auto info = newCall->getSubstCalleeConv().getYieldInfoForOperandIndex(i);
2256+
assert(info.isFormalIndirect());
2257+
if (info.isConsumed()) {
2258+
// Because it is legal to have uses of an owned value produced by a
2259+
// begin_apply after a coroutine's range, AddressLowering must move the
2260+
// value into local storage so that such out-of-coroutine-range uses can
2261+
// be rewritten in terms of that address (instead of being rewritten in
2262+
// terms of the yielded owned storage which is no longer valid beyond
2263+
// the coroutine's range).
2264+
auto &storage = pass.valueStorageMap.getStorage(&oldResult);
2265+
auto destAddr = addrMat.materializeAddress(&oldResult);
2266+
storage.storageAddress = destAddr;
2267+
storage.markRewritten();
2268+
resultBuilder.createCopyAddr(callLoc, &newResult, destAddr,
2269+
info.isConsumed() ? IsTake : IsNotTake,
2270+
IsInitialization);
2271+
} else {
2272+
// [in_guaranteed_begin_apply_results] Because OSSA ensure that all uses
2273+
// of a guaranteed value produced by a begin_apply are used within the
2274+
// coroutine's range, AddressLowering will not introduce uses of
2275+
// invalid memory by rewriting the uses of a yielded guaranteed opaque
2276+
// value as uses of yielded guaranteed storage. However, it must
2277+
// allocate storage for copies of [projections of] such values.
2278+
pass.valueStorageMap.setStorageAddress(&oldResult, &newResult);
2279+
pass.valueStorageMap.getStorage(&oldResult).markRewritten();
2280+
}
22292281
continue;
22302282
}
22312283
assert(oldResult.getType().isObject());

test/SILOptimizer/address_lowering.sil

Lines changed: 118 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1648,8 +1648,8 @@ entry:
16481648
// CHECK: {{bb[0-9]+}}({{%[^,]+}} : $*Klass, [[OPAQUE_OUT:%[^,]+]] : $*T):
16491649
// CHECK: ([[ADDR_1:%[^,]+]], [[ADDR_2:%[^,]+]], {{%[^,]+}}) = begin_apply
16501650
// CHECK: [[INSTANCE_1:%[^,]+]] = load [take] [[ADDR_1]]
1651-
// CHECK: begin_borrow [[INSTANCE_1]]
16521651
// CHECK: copy_addr [take] [[ADDR_2]] to [init] [[OPAQUE_OUT]]
1652+
// CHECK: begin_borrow [[INSTANCE_1]]
16531653
// CHECK-LABEL: } // end sil function 'testBeginApplyCYield1LoadableNontrivialOwned1OpaqueOwned'
16541654
sil [ossa] @testBeginApplyCYield1LoadableNontrivialOwned1OpaqueOwned : $@convention(thin) <T> () -> (@out Klass, @out T) {
16551655
entry:
@@ -1710,6 +1710,123 @@ entry:
17101710
return %retval : $T
17111711
}
17121712

1713+
// Verify that a copy_value of a @in_guaranteed yield generates storage.
1714+
// CHECK-LABEL: sil [ossa] @testBeginApplyGCopyConsumeInGuaranteedValue : {{.*}} {
1715+
// CHECK: bb0:
1716+
// CHECK: [[COPY_STORAGE:%[^,]+]] = alloc_stack $T
1717+
// CHECK: ([[YIELD_STORAGE:%[^,]+]], [[TOKEN:%[^,]+]]) = begin_apply
1718+
// CHECK: copy_addr [[YIELD_STORAGE]] to [init] [[COPY_STORAGE]]
1719+
// CHECK: end_apply [[TOKEN]]
1720+
// CHECK: apply undef<T>([[COPY_STORAGE]])
1721+
// CHECK: dealloc_stack [[COPY_STORAGE]]
1722+
// CHECK-LABEL: } // end sil function 'testBeginApplyGCopyConsumeInGuaranteedValue'
1723+
sil [ossa] @testBeginApplyGCopyConsumeInGuaranteedValue : $@convention(thin) <T> () -> () {
1724+
entry:
1725+
(%yield, %token) = begin_apply undef<T>() : $@yield_once @convention(thin) <τ_0_0> () -> @yields @in_guaranteed τ_0_0
1726+
%copy = copy_value %yield : $T
1727+
end_apply %token
1728+
apply undef<T>(%copy) : $@convention(thin) <T> (@in T) -> ()
1729+
%retval = tuple ()
1730+
return %retval : $()
1731+
}
1732+
1733+
// Verify that a copy_value that is a "copy-store" whose source is an
1734+
// @in_guaranteed storage generates storage.
1735+
// CHECK-LABEL: sil [ossa] @testBeginApplyH1CopyStoreInGuaranteedValue : {{.*}} {
1736+
// CHECK: [[COPY_STORAGE:%[^,]+]] = alloc_stack $T
1737+
// CHECK: ([[YIELD_STORAGE:%[^,]+]], [[TOKEN:%[^,]+]]) = begin_apply
1738+
// CHECK: copy_addr [[YIELD_STORAGE]] to [init] [[COPY_STORAGE]]
1739+
// CHECK: end_apply [[TOKEN]]
1740+
// CHECK: [[PTR:%[^,]+]] = apply undef
1741+
// CHECK: [[ADDR:%[^,]+]] = pointer_to_address [[PTR:%[^,]+]]
1742+
// CHECK: copy_addr [take] [[COPY_STORAGE]] to [[ADDR]]
1743+
// CHECK-LABEL: } // end sil function 'testBeginApplyH1CopyStoreInGuaranteedValue'
1744+
sil [ossa] @testBeginApplyH1CopyStoreInGuaranteedValue : $@convention(thin) <T> () -> () {
1745+
entry:
1746+
(%yield, %token) = begin_apply undef<T>() : $@yield_once @convention(thin) <τ_0_0> () -> @yields @in_guaranteed τ_0_0
1747+
%copy = copy_value %yield : $T
1748+
end_apply %token
1749+
%ptr = apply undef<T>() : $@convention(method) <τ_0_0> () -> Builtin.RawPointer
1750+
%addr = pointer_to_address %ptr : $Builtin.RawPointer to [strict] $*T
1751+
store %copy to [assign] %addr : $*T
1752+
%retval = tuple ()
1753+
return %retval : $()
1754+
}
1755+
1756+
// sil [ossa] @testBeginApplyIConsumeInValue : {{.*}} {
1757+
// bb0:
1758+
// [[IN_STORAGE:%[^,]+]] = alloc_stack $T
1759+
// ([[YIELD_STORAGE:%[^,]+]], [[TOKEN:%[^,]+]]) = begin_apply undef<T>()
1760+
// copy_addr [take] [[YIELD_STORAGE]] to [init] [[IN_STORAGE]]
1761+
// end_apply [[TOKEN]]
1762+
// apply undef<T>([[IN_STORAGE]])
1763+
// } // end sil function 'testBeginApplyIConsumeInValue'
1764+
sil [ossa] @testBeginApplyIConsumeInValue : $@convention(thin) <T> () -> () {
1765+
entry:
1766+
(%yield, %token) = begin_apply undef<T>() : $@yield_once @convention(thin) <τ_0_0> () -> @yields @in τ_0_0
1767+
end_apply %token
1768+
apply undef<T>(%yield) : $@convention(thin) <T> (@in T) -> ()
1769+
%retval = tuple ()
1770+
return %retval : $()
1771+
}
1772+
1773+
// sil [ossa] @testBeginApplyJStoreInValue : {{.*}} {
1774+
// [[IN_STORAGE:%[^,]+]] = alloc_stack $T
1775+
// ([[YIELD_STORAGE:%[^,]+]], [[TOKEN:%[^,]+]]) = begin_apply
1776+
// copy_addr [take] [[YIELD_STORAGE]] to [init] [[IN_STORAGE]]
1777+
// end_apply [[TOKEN]]
1778+
// [[PTR:%[^,]+]] = apply
1779+
// [[ADDR:%[^,]+]] = pointer_to_address [[PTR:%[^,]+]]
1780+
// copy_addr [take] [[IN_STORAGE]] to [[ADDR]]
1781+
// } // end sil function 'testBeginApplyJStoreInValue'
1782+
sil [ossa] @testBeginApplyJStoreInValue : $@convention(thin) <T> () -> () {
1783+
entry:
1784+
(%yield, %token) = begin_apply undef<T>() : $@yield_once @convention(thin) <τ_0_0> () -> @yields @in τ_0_0
1785+
end_apply %token
1786+
%ptr = apply undef<T>() : $@convention(method) <τ_0_0> () -> Builtin.RawPointer
1787+
%addr = pointer_to_address %ptr : $Builtin.RawPointer to [strict] $*T
1788+
store %yield to [assign] %addr : $*T
1789+
%retval = tuple ()
1790+
return %retval : $()
1791+
}
1792+
1793+
// sil [ossa] @testBeginApplyKConsumeInConstantValue : {{.*}} {
1794+
// bb0:
1795+
// [[IN_CONSTANT_STORAGE:%[^,]+]] = alloc_stack $T
1796+
// ([[YIELD_STORAGE:%[^,]+]], [[TOKEN:%[^,]+]]) = begin_apply undef<T>()
1797+
// copy_addr [take] [[YIELD_STORAGE]] to [init] [[IN_CONSTANT_STORAGE]]
1798+
// end_apply [[TOKEN]]
1799+
// apply undef<T>([[IN_CONSTANT_STORAGE]])
1800+
// } // end sil function 'testBeginApplyKConsumeInConstantValue'
1801+
sil [ossa] @testBeginApplyKConsumeInConstantValue : $@convention(thin) <T> () -> () {
1802+
entry:
1803+
(%yield, %token) = begin_apply undef<T>() : $@yield_once @convention(thin) <τ_0_0> () -> @yields @in_constant τ_0_0
1804+
end_apply %token
1805+
apply undef<T>(%yield) : $@convention(thin) <T> (@in_constant T) -> ()
1806+
%retval = tuple ()
1807+
return %retval : $()
1808+
}
1809+
1810+
// sil [ossa] @testBeginApplyLStoreInConstantValue : {{.*}} {
1811+
// [[IN_CONSTANT_STORAGE:%[^,]+]] = alloc_stack $T
1812+
// ([[YIELD_STORAGE:%[^,]+]], [[TOKEN:%[^,]+]]) = begin_apply
1813+
// copy_addr [take] [[YIELD_STORAGE]] to [init] [[IN_CONSTANT_STORAGE]]
1814+
// end_apply [[TOKEN]]
1815+
// [[PTR:%[^,]+]] = apply
1816+
// [[ADDR:%[^,]+]] = pointer_to_address [[PTR:%[^,]+]]
1817+
// copy_addr [take] [[IN_CONSTANT_STORAGE]] to [[ADDR]]
1818+
// } // end sil function 'testBeginApplyLStoreInConstantValue'
1819+
sil [ossa] @testBeginApplyLStoreInConstantValue : $@convention(thin) <T> () -> () {
1820+
entry:
1821+
(%yield, %token) = begin_apply undef<T>() : $@yield_once @convention(thin) <τ_0_0> () -> @yields @in_constant τ_0_0
1822+
end_apply %token
1823+
%ptr = apply undef<T>() : $@convention(method) <τ_0_0> () -> Builtin.RawPointer
1824+
%addr = pointer_to_address %ptr : $Builtin.RawPointer to [strict] $*T
1825+
store %yield to [assign] %addr : $*T
1826+
%retval = tuple ()
1827+
return %retval : $()
1828+
}
1829+
17131830
// CHECK-LABEL: sil hidden [ossa] @testOpaqueYield :
17141831
// CHECK: bb0(%0 : @guaranteed $TestGeneric<T>):
17151832
// CHECK: [[REF:%.*]] = ref_element_addr %0 : $TestGeneric<T>, #TestGeneric.borrowedGeneric

0 commit comments

Comments
 (0)