|
110 | 110 |
|
111 | 111 | #define DEBUG_TYPE "irgensil" |
112 | 112 |
|
| 113 | +#pragma clang optimize off |
| 114 | + |
113 | 115 | using namespace swift; |
114 | 116 | using namespace irgen; |
115 | 117 |
|
@@ -882,6 +884,14 @@ class IRGenSILFunction : |
882 | 884 | return isTaskAlloc; |
883 | 885 | } |
884 | 886 |
|
| 887 | + static bool isCallToMalloc(llvm::Value *val) { |
| 888 | + auto *call = dyn_cast<llvm::CallInst>(val); |
| 889 | + if (!call) |
| 890 | + return false; |
| 891 | + auto *callee = call->getCalledFunction(); |
| 892 | + return callee && callee->getName() == "malloc"; |
| 893 | + } |
| 894 | + |
885 | 895 | static bool isTaskAlloc(llvm::Value *Storage) { |
886 | 896 | while (Storage) { |
887 | 897 | if (auto *LdInst = dyn_cast<llvm::LoadInst>(Storage)) |
@@ -6518,7 +6528,8 @@ void IRGenSILFunction::emitDebugInfoAfterAllocStack(AllocStackInst *i, |
6518 | 6528 |
|
6519 | 6529 | // At this point addr must be an alloca or an undef. |
6520 | 6530 | assert(isa<llvm::AllocaInst>(addr) || isa<llvm::UndefValue>(addr) || |
6521 | | - isa<llvm::IntrinsicInst>(addr) || isCallToSwiftTaskAlloc(addr)); |
| 6531 | + isa<llvm::IntrinsicInst>(addr) || isCallToSwiftTaskAlloc(addr) || |
| 6532 | + isCallToMalloc(addr)); |
6522 | 6533 |
|
6523 | 6534 | auto Indirection = DirectValue; |
6524 | 6535 | if (InCoroContext(*CurSILFn, *i)) |
@@ -6575,7 +6586,8 @@ void IRGenSILFunction::visitAllocStackInst(swift::AllocStackInst *i) { |
6575 | 6586 | DebugTypeInfo DbgTy; |
6576 | 6587 | emitDebugInfoBeforeAllocStack(i, type, DbgTy); |
6577 | 6588 |
|
6578 | | - auto stackAddr = type.allocateStack(*this, i->getElementType(), dbgname); |
| 6589 | + auto stackAddr = type.allocateStack(*this, i->getElementType(), dbgname, |
| 6590 | + i->isStackAllocationNested()); |
6579 | 6591 | setLoweredStackAddress(i, stackAddr); |
6580 | 6592 | Address addr = stackAddr.getAddress(); |
6581 | 6593 |
|
@@ -6668,23 +6680,27 @@ void IRGenSILFunction::visitDeallocStackInst(swift::DeallocStackInst *i) { |
6668 | 6680 | if (auto *closure = dyn_cast<PartialApplyInst>(i->getOperand())) { |
6669 | 6681 | assert(closure->isOnStack()); |
6670 | 6682 | auto stackAddr = LoweredPartialApplyAllocations[i->getOperand()]; |
6671 | | - emitDeallocateDynamicAlloca(stackAddr); |
| 6683 | + emitDeallocateDynamicAlloca(stackAddr, closure->isStackAllocationNested()); |
6672 | 6684 | return; |
6673 | 6685 | } |
6674 | 6686 | if (isaResultOf<BeginApplyInst>(i->getOperand())) { |
6675 | 6687 | auto *mvi = getAsResultOf<BeginApplyInst>(i->getOperand()); |
6676 | 6688 | auto *bai = cast<BeginApplyInst>(mvi->getParent()); |
| 6689 | + // FIXME: [non_nested] |
6677 | 6690 | const auto &coroutine = getLoweredCoroutine(bai->getTokenResult()); |
6678 | 6691 | emitDeallocYieldOnce2CoroutineFrame(*this, |
6679 | 6692 | coroutine.getCalleeAllocatedFrame()); |
6680 | 6693 | return; |
6681 | 6694 | } |
6682 | 6695 |
|
6683 | | - auto allocatedType = i->getOperand()->getType(); |
| 6696 | + auto *asi = cast<AllocStackInst>(i->getOperand()); |
| 6697 | + |
| 6698 | + auto allocatedType = asi->getType(); |
6684 | 6699 | const TypeInfo &allocatedTI = getTypeInfo(allocatedType); |
6685 | | - StackAddress stackAddr = getLoweredStackAddress(i->getOperand()); |
| 6700 | + StackAddress stackAddr = getLoweredStackAddress(asi); |
| 6701 | + auto isNested = asi->isStackAllocationNested(); |
6686 | 6702 |
|
6687 | | - allocatedTI.deallocateStack(*this, stackAddr, allocatedType); |
| 6703 | + allocatedTI.deallocateStack(*this, stackAddr, allocatedType, isNested); |
6688 | 6704 | } |
6689 | 6705 |
|
6690 | 6706 | void IRGenSILFunction::visitDeallocStackRefInst(DeallocStackRefInst *i) { |
|
0 commit comments