diff --git a/include/swift/AST/SILLayout.h b/include/swift/AST/SILLayout.h index 63c486e579263..c19a07461f86b 100644 --- a/include/swift/AST/SILLayout.h +++ b/include/swift/AST/SILLayout.h @@ -79,6 +79,11 @@ class SILField final { /// destroying the old value and emplacing a new value with the modified /// field in the same place. bool isMutable() const { return LoweredTypeAndFlags.getInt() & IsMutable; } + + /// Change the current SILField's mutability field. + void setIsMutable(bool newValue) { + LoweredTypeAndFlags = {getLoweredType(), getFlagsValue(newValue)}; + } }; /// A layout. @@ -133,14 +138,25 @@ class SILLayout final : public llvm::FoldingSetNode, bool isMutable() const { return GenericSigAndFlags.getInt() & IsMutable; } - + + /// Returns a SILLayout that is the same as the current layout but with the + /// mutability of each field specified via index in \p + /// fieldIndexMutabilityUpdatePairs to have its mutability be the index's + /// associated bool pair. + SILLayout *withMutable(ASTContext &ctx, + std::initializer_list> + fieldIndexMutabilityUpdatePairs) const; + /// True if the layout captures the generic arguments it is substituted with /// and can provide generic bindings when passed as a closure argument. bool capturesGenericEnvironment() const { return GenericSigAndFlags.getInt() & CapturesGenericEnvironment; } - + /// Get the fields inside the layout. + /// + /// NOTE: The types inside the fields have not been specialized for the given + /// environment. ArrayRef getFields() const { return getTrailingObjects(NumFields); } /// Produce a profile of this layout, for use in a folding set. diff --git a/include/swift/AST/Types.h b/include/swift/AST/Types.h index a990ebe24b952..f2836a60647e9 100644 --- a/include/swift/AST/Types.h +++ b/include/swift/AST/Types.h @@ -6167,6 +6167,7 @@ DEFINE_EMPTY_CAN_TYPE_WRAPPER(SILFunctionType, Type) class SILBoxType; class SILLayout; // From SIL class SILModule; // From SIL +class SILField; // From SIL typedef CanTypeWrapper CanSILBoxType; /// The SIL-only type for boxes, which represent a reference to a (non-class) @@ -6187,6 +6188,52 @@ class SILBoxType final : public TypeBase, public llvm::FoldingSetNode SILLayout *getLayout() const { return Layout; } SubstitutionMap getSubstitutions() const { return Substitutions; } + /// Return the fields of this type from its layout. + /// + /// NOTE: These types have not been appropriately specialized either for a + /// SILFunction where it occurs or any substitutions that are stored within + /// the substitution map of the function. In order to get appropriate SILTypes + /// for fields to work with directly in SIL, please call getFieldType below + /// which handles the relevant specializations correctly for you. + ArrayRef getFields() const; + + /// Return the SILType of the field of the layout of the SILBoxType. + /// + /// NOTE: This ensures that the type is probably specialized both for the + /// substitutions of this type and the relevant SILFunction. + /// + /// Defined in SILType.cpp. + SILType getFieldType(SILFunction &fn, unsigned index); + + /// Returns the number of fields in the box type's layout. + unsigned getNumFields() const { return getFields().size(); } + + /// Returns true if the given field in the box is mutable. Returns false + /// otherwise. + bool isFieldMutable(unsigned index) const; + + /// Returns a SILBoxType that is the same as the current box type but with the + /// mutability of each field specified via index in \p + /// fieldIndexMutabilityUpdatePairs to have its mutability be the index's + /// associated bool pair. + CanSILBoxType withMutable(ASTContext &ctx, + std::initializer_list> + fieldIndexMutabilityUpdatePairs) const; + + using SILFieldIndexToSILTypeTransform = std::function; + using SILFieldToSILTypeRange = + iterator_range::iterator, + SILFieldIndexToSILTypeTransform>>; + + /// Returns a range of SILTypes that have been specialized correctly for use + /// in the passed in SILFunction. + /// + /// DISCUSSION: The inner range is an IntRange since the inner API that we use + /// to transform fields is defined in terms of indices. + /// + /// Defined in SILType.cpp. + SILFieldToSILTypeRange getSILFieldTypes(SILFunction &fn); + // TODO: SILBoxTypes should be explicitly constructed in terms of specific // layouts. As a staging mechanism, we expose the old single-boxed-type // interface. diff --git a/include/swift/SIL/OperandDatastructures.h b/include/swift/SIL/OperandDatastructures.h index 91e74ecbdd997..18b1fd99b2093 100644 --- a/include/swift/SIL/OperandDatastructures.h +++ b/include/swift/SIL/OperandDatastructures.h @@ -102,6 +102,19 @@ class OperandWorklist { return false; } + /// Pushes the operands of all uses of \p value onto the worklist if the + /// operands have never been pushed before. Returns \p true if we inserted + /// /any/ values. + /// + /// This is a bulk convenience API. + bool pushResultOperandsIfNotVisited(SILValue value) { + bool insertedOperand = false; + for (auto *use : value->getUses()) { + insertedOperand |= pushIfNotVisited(use); + } + return insertedOperand; + } + /// Pushes the operands of all uses of \p instruction onto the worklist if the /// operands have never been pushed before. Returns \p true if we inserted /// /any/ values. @@ -110,9 +123,7 @@ class OperandWorklist { bool pushResultOperandsIfNotVisited(SILInstruction *inst) { bool insertedOperand = false; for (auto result : inst->getResults()) { - for (auto *use : result->getUses()) { - insertedOperand |= pushIfNotVisited(use); - } + insertedOperand |= pushResultOperandsIfNotVisited(result); } return insertedOperand; } diff --git a/include/swift/SIL/SILArgument.h b/include/swift/SIL/SILArgument.h index d64e8d41206b4..9a70417c92243 100644 --- a/include/swift/SIL/SILArgument.h +++ b/include/swift/SIL/SILArgument.h @@ -366,13 +366,15 @@ class SILFunctionArgument : public SILArgument { ValueOwnershipKind ownershipKind, ValueDecl *decl = nullptr, bool isNoImplicitCopy = false, LifetimeAnnotation lifetimeAnnotation = LifetimeAnnotation::None, - bool isCapture = false, bool isParameterPack = false) + bool isCapture = false, bool isParameterPack = false, + bool isInferredImmutable = false) : SILArgument(ValueKind::SILFunctionArgument, parentBlock, type, ownershipKind, decl) { sharedUInt32().SILFunctionArgument.noImplicitCopy = isNoImplicitCopy; sharedUInt32().SILFunctionArgument.lifetimeAnnotation = lifetimeAnnotation; sharedUInt32().SILFunctionArgument.closureCapture = isCapture; sharedUInt32().SILFunctionArgument.parameterPack = isParameterPack; + sharedUInt32().SILFunctionArgument.inferredImmutable = isInferredImmutable; } // A special constructor, only intended for use in @@ -415,6 +417,16 @@ class SILFunctionArgument : public SILArgument { sharedUInt32().SILFunctionArgument.parameterPack = isPack; } + /// Returns true if this argument is inferred to be immutable. + bool isInferredImmutable() const { + return sharedUInt32().SILFunctionArgument.inferredImmutable; + } + + /// Set whether this argument is inferred to be immutable. + void setInferredImmutable(bool newValue) { + sharedUInt32().SILFunctionArgument.inferredImmutable = newValue; + } + LifetimeAnnotation getLifetimeAnnotation() const { return LifetimeAnnotation::Case( sharedUInt32().SILFunctionArgument.lifetimeAnnotation); @@ -472,6 +484,7 @@ class SILFunctionArgument : public SILArgument { setLifetimeAnnotation(arg->getLifetimeAnnotation()); setClosureCapture(arg->isClosureCapture()); setFormalParameterPack(arg->isFormalParameterPack()); + setInferredImmutable(arg->isInferredImmutable()); } static bool classof(const SILInstruction *) = delete; diff --git a/include/swift/SIL/SILBuilder.h b/include/swift/SIL/SILBuilder.h index dde860ce12daa..bc642627565ab 100644 --- a/include/swift/SIL/SILBuilder.h +++ b/include/swift/SIL/SILBuilder.h @@ -490,12 +490,12 @@ class SILBuilder { bool reflection = false, UsesMoveableValueDebugInfo_t usesMoveableValueDebugInfo = DoesNotUseMoveableValueDebugInfo, - HasPointerEscape_t hasPointerEscape = DoesNotHavePointerEscape) { - return createAllocBox(loc, SILBoxType::get(fieldType.getASTType()), Var, - hasDynamicLifetime, reflection, - usesMoveableValueDebugInfo, - /*skipVarDeclAssert*/ false, - hasPointerEscape); + HasPointerEscape_t hasPointerEscape = DoesNotHavePointerEscape, + bool inferredImmutable = false) { + return createAllocBox( + loc, SILBoxType::get(fieldType.getASTType()), Var, hasDynamicLifetime, + reflection, usesMoveableValueDebugInfo, + /*skipVarDeclAssert*/ false, hasPointerEscape, inferredImmutable); } AllocBoxInst *createAllocBox( @@ -506,7 +506,8 @@ class SILBuilder { UsesMoveableValueDebugInfo_t usesMoveableValueDebugInfo = DoesNotUseMoveableValueDebugInfo, bool skipVarDeclAssert = false, - HasPointerEscape_t hasPointerEscape = DoesNotHavePointerEscape) { + HasPointerEscape_t hasPointerEscape = DoesNotHavePointerEscape, + bool inferredImmutable = false) { #if NDEBUG (void)skipVarDeclAssert; #endif @@ -521,7 +522,7 @@ class SILBuilder { return insert(AllocBoxInst::create( getSILDebugLocation(Loc, true), BoxType, *F, substituteAnonymousArgs(Name, Var, Loc), hasDynamicLifetime, reflection, - usesMoveableValueDebugInfo, hasPointerEscape)); + usesMoveableValueDebugInfo, hasPointerEscape, inferredImmutable)); } AllocExistentialBoxInst * diff --git a/include/swift/SIL/SILCloner.h b/include/swift/SIL/SILCloner.h index ee8eff68168b8..95249db57d086 100644 --- a/include/swift/SIL/SILCloner.h +++ b/include/swift/SIL/SILCloner.h @@ -1156,7 +1156,8 @@ SILCloner::visitAllocBoxInst(AllocBoxInst *Inst) { Loc, this->getOpType(Inst->getType()).template castTo(), VarInfo, Inst->hasDynamicLifetime(), Inst->emitReflectionMetadata(), Inst->usesMoveableValueDebugInfo(), - /*skipVarDeclAssert*/ true, Inst->hasPointerEscape())); + /*skipVarDeclAssert*/ true, Inst->hasPointerEscape(), + Inst->isInferredImmutable())); } template diff --git a/include/swift/SIL/SILInstruction.h b/include/swift/SIL/SILInstruction.h index a24c08d0e3fd9..3024232cb2144 100644 --- a/include/swift/SIL/SILInstruction.h +++ b/include/swift/SIL/SILInstruction.h @@ -2527,14 +2527,16 @@ class AllocBoxInst final HasDynamicLifetime_t hasDynamicLifetime, bool reflection = false, UsesMoveableValueDebugInfo_t usesMoveableValueDebugInfo = DoesNotUseMoveableValueDebugInfo, - HasPointerEscape_t hasPointerEscape = DoesNotHavePointerEscape); + HasPointerEscape_t hasPointerEscape = DoesNotHavePointerEscape, + bool inferredImmutable = false); static AllocBoxInst *create( SILDebugLocation Loc, CanSILBoxType boxType, SILFunction &F, std::optional Var, HasDynamicLifetime_t hasDynamicLifetime, bool reflection = false, UsesMoveableValueDebugInfo_t wasMoved = DoesNotUseMoveableValueDebugInfo, - HasPointerEscape_t hasPointerEscape = DoesNotHavePointerEscape); + HasPointerEscape_t hasPointerEscape = DoesNotHavePointerEscape, + bool inferredImmutable = false); public: CanSILBoxType getBoxType() const { @@ -2557,6 +2559,14 @@ class AllocBoxInst final return HasPointerEscape_t(sharedUInt8().AllocBoxInst.pointerEscape); } + void setInferredImmutable(bool value) { + sharedUInt8().AllocBoxInst.inferredImmutable = value; + } + + bool isInferredImmutable() const { + return sharedUInt8().AllocBoxInst.inferredImmutable; + } + /// True if the box should be emitted with reflection metadata for its /// contents. bool emitReflectionMetadata() const { diff --git a/include/swift/SIL/SILNode.h b/include/swift/SIL/SILNode.h index 8cf66fbf9902c..3087115b40948 100644 --- a/include/swift/SIL/SILNode.h +++ b/include/swift/SIL/SILNode.h @@ -236,7 +236,8 @@ class alignas(8) SILNode : dynamicLifetime : 1, reflection : 1, usesMoveableValueDebugInfo : 1, - pointerEscape : 1); + pointerEscape : 1, + inferredImmutable : 1); SHARED_FIELD(AllocRefInstBase, uint8_t objC : 1, @@ -318,7 +319,7 @@ class alignas(8) SILNode : SHARED_FIELD(PointerToAddressInst, uint32_t alignment); SHARED_FIELD(SILFunctionArgument, uint32_t noImplicitCopy : 1, lifetimeAnnotation : 2, closureCapture : 1, - parameterPack : 1); + parameterPack : 1, inferredImmutable : 1); SHARED_FIELD(MergeRegionIsolationInst, uint32_t numOperands); // Do not use `_sharedUInt32_private` outside of SILNode. diff --git a/include/swift/SILOptimizer/PassManager/Passes.def b/include/swift/SILOptimizer/PassManager/Passes.def index 75c46153e5473..4933c2923c83a 100644 --- a/include/swift/SILOptimizer/PassManager/Passes.def +++ b/include/swift/SILOptimizer/PassManager/Passes.def @@ -485,6 +485,8 @@ LEGACY_PASS(DiagnoseUnnecessaryPreconcurrencyImports, "sil-diagnose-unnecessary- "Diagnose any preconcurrency imports that Sema and TransferNonSendable did not use") LEGACY_PASS(ThunkLowering, "sil-thunk-lowering", "Lower thunk instructions to actual thunks") +LEGACY_PASS(MarkNeverWrittenMutableClosureBoxesAsImmutable, "mark-never-written-mutable-closure-boxes-as-immutable", + "Mark never written mutable closure boxes as immutable") LEGACY_PASS(PruneVTables, "prune-vtables", "Mark class methods that do not require vtable dispatch") diff --git a/include/swift/SILOptimizer/Utils/SILIsolationInfo.h b/include/swift/SILOptimizer/Utils/SILIsolationInfo.h index ecf4338b0e0ed..0cc7aea0d828f 100644 --- a/include/swift/SILOptimizer/Utils/SILIsolationInfo.h +++ b/include/swift/SILOptimizer/Utils/SILIsolationInfo.h @@ -568,6 +568,7 @@ class SILIsolationInfo { /// SILIsolationInfo. static SILIsolationInfo getFunctionIsolation(SILFunction *fn); +private: /// A helper that is used to ensure that we treat certain builtin values as /// non-Sendable that the AST level otherwise thinks are non-Sendable. /// @@ -580,12 +581,20 @@ class SILIsolationInfo { return !isNonSendableType(type, fn); } - static bool isNonSendableType(SILValue value) { - return isNonSendableType(value->getType(), value->getFunction()); +public: + static bool isSendable(SILValue value); + + static bool isNonSendable(SILValue value) { return !isSendable(value); } + + static bool boxContainsOnlySendableFields(AllocBoxInst *abi) { + return boxTypeContainsOnlySendableFields(abi->getBoxType(), + abi->getFunction()); } - static bool isSendableType(SILValue value) { - return !isNonSendableType(value); + static bool boxTypeContainsOnlySendableFields(CanSILBoxType boxType, + SILFunction *fn) { + return llvm::all_of(boxType->getSILFieldTypes(*fn), + [&](SILType type) { return isSendableType(type, fn); }); } bool hasSameIsolation(ActorIsolation actorIsolation) const; diff --git a/lib/AST/ASTContext.cpp b/lib/AST/ASTContext.cpp index e0096a775485c..4cf718487ff4d 100644 --- a/lib/AST/ASTContext.cpp +++ b/lib/AST/ASTContext.cpp @@ -6984,6 +6984,21 @@ SILLayout *SILLayout::get(ASTContext &C, return newLayout; } +SILLayout * +SILLayout::withMutable(ASTContext &ctx, + std::initializer_list> + fieldIndexMutabilityUpdatePairs) const { + // Copy the fields, setting the mutable field to newMutable. + SmallVector newFields; + llvm::copy(getFields(), std::back_inserter(newFields)); + for (auto p : fieldIndexMutabilityUpdatePairs) { + newFields[p.first].setIsMutable(p.second); + } + + return SILLayout::get(ctx, getGenericSignature(), newFields, + capturesGenericEnvironment()); +} + CanSILBoxType SILBoxType::get(ASTContext &C, SILLayout *Layout, SubstitutionMap Substitutions) { @@ -7020,6 +7035,23 @@ CanSILBoxType SILBoxType::get(CanType boxedType) { return get(boxedType->getASTContext(), layout, subMap); } +CanSILBoxType +SILBoxType::withMutable(ASTContext &ctx, + std::initializer_list> + fieldIndexMutabilityUpdatePairs) const { + return SILBoxType::get( + ctx, getLayout()->withMutable(ctx, fieldIndexMutabilityUpdatePairs), + getSubstitutions()); +} + +ArrayRef SILBoxType::getFields() const { + return getLayout()->getFields(); +} + +bool SILBoxType::isFieldMutable(unsigned index) const { + return getFields()[index].isMutable(); +} + LayoutConstraint LayoutConstraint::getLayoutConstraint(LayoutConstraintKind Kind, ASTContext &C) { diff --git a/lib/SIL/IR/SILInstructions.cpp b/lib/SIL/IR/SILInstructions.cpp index f6abf477dbcfb..3674d2e45be0c 100644 --- a/lib/SIL/IR/SILInstructions.cpp +++ b/lib/SIL/IR/SILInstructions.cpp @@ -379,7 +379,7 @@ AllocBoxInst::AllocBoxInst( std::optional Var, HasDynamicLifetime_t hasDynamicLifetime, bool reflection, UsesMoveableValueDebugInfo_t usesMoveableValueDebugInfo, - HasPointerEscape_t hasPointerEscape) + HasPointerEscape_t hasPointerEscape, bool inferredImmutable) : NullaryInstructionWithTypeDependentOperandsBase( Loc, TypeDependentOperands, SILType::getPrimitiveObjectType(BoxType)), VarInfo(Var, getTrailingObjects()) { @@ -397,6 +397,7 @@ AllocBoxInst::AllocBoxInst( (bool)usesMoveableValueDebugInfo; sharedUInt8().AllocBoxInst.pointerEscape = (bool)hasPointerEscape; + sharedUInt8().AllocBoxInst.inferredImmutable = inferredImmutable; } AllocBoxInst * @@ -404,15 +405,17 @@ AllocBoxInst::create(SILDebugLocation Loc, CanSILBoxType BoxType, SILFunction &F, std::optional Var, HasDynamicLifetime_t hasDynamicLifetime, bool reflection, UsesMoveableValueDebugInfo_t usesMoveableValueDebugInfo, - HasPointerEscape_t hasPointerEscape) { + HasPointerEscape_t hasPointerEscape, + bool inferredImmutable) { SmallVector TypeDependentOperands; collectTypeDependentOperands(TypeDependentOperands, F, BoxType); auto Sz = totalSizeToAlloc(TypeDependentOperands.size(), Var ? Var->Name.size() : 0); auto Buf = F.getModule().allocateInst(Sz, alignof(AllocBoxInst)); - return ::new (Buf) AllocBoxInst(Loc, BoxType, TypeDependentOperands, F, Var, - hasDynamicLifetime, reflection, - usesMoveableValueDebugInfo, hasPointerEscape); + return ::new (Buf) + AllocBoxInst(Loc, BoxType, TypeDependentOperands, F, Var, + hasDynamicLifetime, reflection, usesMoveableValueDebugInfo, + hasPointerEscape, inferredImmutable); } SILType AllocBoxInst::getAddressType() const { diff --git a/lib/SIL/IR/SILPrinter.cpp b/lib/SIL/IR/SILPrinter.cpp index a64b010ad502c..2ad1c3d5681f1 100644 --- a/lib/SIL/IR/SILPrinter.cpp +++ b/lib/SIL/IR/SILPrinter.cpp @@ -187,6 +187,7 @@ struct SILValuePrinterInfo { bool IsCapture = false; bool IsReborrow = false; bool IsEscaping = false; + bool IsInferredImmutable = false; bool needPrintType = false; SILValuePrinterInfo(ID ValueID) : ValueID(ValueID), Type(), OwnershipKind() {} @@ -198,18 +199,22 @@ struct SILValuePrinterInfo { SILValuePrinterInfo(ID ValueID, SILType Type, ValueOwnershipKind OwnershipKind, bool IsNoImplicitCopy, LifetimeAnnotation Lifetime, bool IsCapture, - bool IsReborrow, bool IsEscaping, bool needPrintType) + bool IsReborrow, bool IsEscaping, + bool IsInferredImmutable, bool needPrintType) : ValueID(ValueID), Type(Type), OwnershipKind(OwnershipKind), IsNoImplicitCopy(IsNoImplicitCopy), Lifetime(Lifetime), IsCapture(IsCapture), IsReborrow(IsReborrow), IsEscaping(IsEscaping), - needPrintType(needPrintType){} + IsInferredImmutable(IsInferredImmutable), needPrintType(needPrintType) { + } SILValuePrinterInfo(ID ValueID, SILType Type, bool IsNoImplicitCopy, LifetimeAnnotation Lifetime, bool IsCapture, - bool IsReborrow, bool IsEscaping, bool needPrintType) + bool IsReborrow, bool IsEscaping, + bool IsInferredImmutable, bool needPrintType) : ValueID(ValueID), Type(Type), OwnershipKind(), IsNoImplicitCopy(IsNoImplicitCopy), Lifetime(Lifetime), IsCapture(IsCapture), IsReborrow(IsReborrow), IsEscaping(IsEscaping), - needPrintType(needPrintType) {} + IsInferredImmutable(IsInferredImmutable), needPrintType(needPrintType) { + } SILValuePrinterInfo(ID ValueID, SILType Type, ValueOwnershipKind OwnershipKind, bool IsReborrow, bool IsEscaping, bool needPrintType) @@ -798,6 +803,10 @@ class SILPrinter : public SILInstructionVisitor { *this << separator << "@pointer_escape"; separator = " "; } + if (i.IsInferredImmutable) { + *this << separator << "@inferredImmutable"; + separator = " "; + } if (!i.IsReborrow && i.OwnershipKind && *i.OwnershipKind != OwnershipKind::None) { *this << separator << "@" << i.OwnershipKind.value(); separator = " "; @@ -856,7 +865,8 @@ class SILPrinter : public SILInstructionVisitor { return {Ctx.getID(arg), arg->getType(), arg->isNoImplicitCopy(), arg->getLifetimeAnnotation(), arg->isClosureCapture(), arg->isReborrow(), - arg->hasPointerEscape(), /*needPrintType=*/true}; + arg->hasPointerEscape(), arg->isInferredImmutable(), + /*needPrintType=*/true}; } SILValuePrinterInfo getIDAndType(SILArgument *arg) { return {Ctx.getID(arg), arg->getType(), /*needPrintType=*/true}; @@ -874,6 +884,7 @@ class SILPrinter : public SILInstructionVisitor { arg->isClosureCapture(), arg->isReborrow(), arg->hasPointerEscape(), + arg->isInferredImmutable(), /*needPrintType=*/true}; } SILValuePrinterInfo getIDAndTypeAndOwnership(SILArgument *arg) { @@ -1684,7 +1695,7 @@ class SILPrinter : public SILInstructionVisitor { void visitAllocBoxInst(AllocBoxInst *ABI) { if (ABI->hasDynamicLifetime()) *this << "[dynamic_lifetime] "; - + if (ABI->emitReflectionMetadata()) { *this << "[reflection] "; } @@ -1693,6 +1704,10 @@ class SILPrinter : public SILInstructionVisitor { *this << "[pointer_escape] "; } + if (ABI->isInferredImmutable()) { + *this << "[inferred_immutable] "; + } + if (ABI->usesMoveableValueDebugInfo() && !ABI->getAddressType().isMoveOnly()) { *this << "[moveable_value_debuginfo] "; diff --git a/lib/SIL/IR/SILType.cpp b/lib/SIL/IR/SILType.cpp index ca07179612f0f..6baf78bd8582a 100644 --- a/lib/SIL/IR/SILType.cpp +++ b/lib/SIL/IR/SILType.cpp @@ -1033,6 +1033,20 @@ SILType SILType::getSILBoxFieldType(const SILFunction *f, unsigned field) const f->getModule().Types, field); } +SILType SILBoxType::getFieldType(SILFunction &fn, unsigned index) { + return ::getSILBoxFieldType(fn.getTypeExpansionContext(), this, + fn.getModule().Types, index); +} + +SILBoxType::SILFieldToSILTypeRange +SILBoxType::getSILFieldTypes(SILFunction &fn) { + auto transform = [this, &fn](unsigned index) { + return getFieldType(fn, index); + }; + return llvm::map_range(range(getNumFields()), + SILFieldIndexToSILTypeTransform(transform)); +} + SILType SILType::getSingletonAggregateFieldType(SILModule &M, ResilienceExpansion expansion) const { diff --git a/lib/SIL/Parser/ParseSIL.cpp b/lib/SIL/Parser/ParseSIL.cpp index 8143816df7cd5..a82bbec5c2dcb 100644 --- a/lib/SIL/Parser/ParseSIL.cpp +++ b/lib/SIL/Parser/ParseSIL.cpp @@ -2692,6 +2692,7 @@ bool SILParser::parseSpecificSILInstruction(SILBuilder &B, UsesMoveableValueDebugInfo_t usesMoveableValueDebugInfo = DoesNotUseMoveableValueDebugInfo; auto hasPointerEscape = DoesNotHavePointerEscape; + bool inferredImmutable = false; StringRef attrName; SourceLoc attrLoc; while (parseSILOptional(attrName, attrLoc, *this)) { @@ -2703,10 +2704,12 @@ bool SILParser::parseSpecificSILInstruction(SILBuilder &B, usesMoveableValueDebugInfo = UsesMoveableValueDebugInfo; } else if (attrName == "pointer_escape") { hasPointerEscape = HasPointerEscape; + } else if (attrName == "inferred_immutable") { + inferredImmutable = true; } else { P.diagnose(attrLoc, diag::sil_invalid_attribute_for_expected, attrName, - "dynamic_lifetime, reflection, pointer_escape or " - "usesMoveableValueDebugInfo"); + "dynamic_lifetime, reflection, pointer_escape, " + "inferred_immutable or usesMoveableValueDebugInfo"); } } @@ -2722,10 +2725,10 @@ bool SILParser::parseSpecificSILInstruction(SILBuilder &B, if (Ty.isMoveOnly()) usesMoveableValueDebugInfo = UsesMoveableValueDebugInfo; - ResultVal = B.createAllocBox(InstLoc, Ty.castTo(), VarInfo, - hasDynamicLifetime, hasReflection, - usesMoveableValueDebugInfo, - /*skipVarDeclAssert*/ false, hasPointerEscape); + ResultVal = B.createAllocBox( + InstLoc, Ty.castTo(), VarInfo, hasDynamicLifetime, + hasReflection, usesMoveableValueDebugInfo, + /*skipVarDeclAssert*/ false, hasPointerEscape, inferredImmutable); break; } case SILInstructionKind::ApplyInst: @@ -7236,9 +7239,11 @@ bool SILParser::parseSILBasicBlock(SILBuilder &B) { bool foundEagerMove = false; bool foundReborrow = false; bool hasPointerEscape = false; - while (auto attributeName = parseOptionalAttribute( - {"noImplicitCopy", "_lexical", "_eagerMove", - "closureCapture", "reborrow", "pointer_escape"})) { + bool foundInferredImmutable = false; + while ( + auto attributeName = parseOptionalAttribute( + {"noImplicitCopy", "_lexical", "_eagerMove", "closureCapture", + "reborrow", "pointer_escape", "inferredImmutable"})) { if (*attributeName == "noImplicitCopy") foundNoImplicitCopy = true; else if (*attributeName == "_lexical") @@ -7251,6 +7256,8 @@ bool SILParser::parseSILBasicBlock(SILBuilder &B) { foundReborrow = true; else if (*attributeName == "pointer_escape") hasPointerEscape = true; + else if (*attributeName == "inferredImmutable") + foundInferredImmutable = true; else { llvm_unreachable("Unexpected attribute!"); } @@ -7288,6 +7295,7 @@ bool SILParser::parseSILBasicBlock(SILBuilder &B) { fArg->setLifetimeAnnotation(lifetime); fArg->setReborrow(foundReborrow); fArg->setHasPointerEscape(hasPointerEscape); + fArg->setInferredImmutable(foundInferredImmutable); Arg = fArg; // Today, we construct the ownership kind straight from the function diff --git a/lib/SILGen/SILGenDecl.cpp b/lib/SILGen/SILGenDecl.cpp index 0efd5c0e56c69..b6b73b68c4561 100644 --- a/lib/SILGen/SILGenDecl.cpp +++ b/lib/SILGen/SILGenDecl.cpp @@ -608,6 +608,16 @@ class LocalVariableInitialization : public SingleBufferInitialization { Box = SGF.B.createMarkUnresolvedReferenceBindingInst( decl, Box, MarkUnresolvedReferenceBindingInst::Kind::InOut); + // If we are from a capture list, then the variable that we are creating is + // just a temporary used to initialize the value in the closure caller. We + // want to treat that as a temporary. The actual var decl is represented in + // the closure using a function parameter. So leave the value as a + // temporary. + auto isFromVarDecl = IsFromVarDecl_t::IsFromVarDecl; + if (decl->isCaptureList()) { + isFromVarDecl = IsNotFromVarDecl; + } + if (SGF.getASTContext().SILOpts.supportsLexicalLifetimes(SGF.getModule())) { auto loweredType = SGF.getTypeLowering(decl->getTypeInContext()).getLoweredType(); auto lifetime = SGF.F.getLifetime(decl, loweredType); @@ -619,7 +629,7 @@ class LocalVariableInitialization : public SingleBufferInitialization { // requires one. Box = SGF.B.createBeginBorrow(decl, Box, IsLexical_t(lifetime.isLexical()), - DoesNotHavePointerEscape, IsFromVarDecl); + DoesNotHavePointerEscape, isFromVarDecl); } Addr = SGF.B.createProjectBox(decl, Box, 0); diff --git a/lib/SILOptimizer/Analysis/RegionAnalysis.cpp b/lib/SILOptimizer/Analysis/RegionAnalysis.cpp index 92bacbac7a567..e2eaa9256c3f7 100644 --- a/lib/SILOptimizer/Analysis/RegionAnalysis.cpp +++ b/lib/SILOptimizer/Analysis/RegionAnalysis.cpp @@ -117,7 +117,7 @@ struct AddressBaseComputingVisitor SILValue visitAll(SILValue sourceAddr) { // If our initial value is Sendable, then it is our "value". - if (SILIsolationInfo::isSendableType(sourceAddr)) + if (SILIsolationInfo::isSendable(sourceAddr)) value = sourceAddr; SILValue result = visit(sourceAddr); @@ -167,8 +167,7 @@ struct AddressBaseComputingVisitor // If this is a type case, see if the result of the cast is sendable. In // such a case, we do not want to look through this cast. if (castType == AccessStorageCast::Type && - !SILIsolationInfo::isNonSendableType(cast->getType(), - cast->getFunction())) + !SILIsolationInfo::isNonSendable(cast)) return SILValue(); // Do not look through begin_borrow [var_decl]. They are start new semantic @@ -213,14 +212,12 @@ struct AddressBaseComputingVisitor case ProjectionKind::Enum: { auto op = cast(projInst)->getOperand(); - bool isOperandSendable = !SILIsolationInfo::isNonSendableType( - op->getType(), op->getFunction()); + bool isOperandSendable = !SILIsolationInfo::isNonSendable(op); // If our operand is Sendable and our field is non-Sendable and we have // not stashed a value yet, stash value. if (!value && isOperandSendable && - SILIsolationInfo::isNonSendableType(projInst->getType(), - projInst->getFunction())) { + SILIsolationInfo::isNonSendable(projInst)) { value = projInst; } @@ -230,15 +227,13 @@ struct AddressBaseComputingVisitor // These are merges if we have multiple fields. auto op = cast(projInst)->getOperand(); - bool isOperandSendable = !SILIsolationInfo::isNonSendableType( - op->getType(), op->getFunction()); + bool isOperandSendable = !SILIsolationInfo::isNonSendable(op); // If our operand is Sendable and our field is non-Sendable, we need to // bail since we want to root the non-Sendable type in the Sendable // type. if (!value && isOperandSendable && - SILIsolationInfo::isNonSendableType(projInst->getType(), - projInst->getFunction())) + SILIsolationInfo::isNonSendable(projInst)) value = projInst; isProjectedFromAggregate |= op->getType().getNumTupleElements() > 1; @@ -247,15 +242,13 @@ struct AddressBaseComputingVisitor case ProjectionKind::Struct: auto op = cast(projInst)->getOperand(); - bool isOperandSendable = !SILIsolationInfo::isNonSendableType( - op->getType(), op->getFunction()); + bool isOperandSendable = !SILIsolationInfo::isNonSendable(op); // If our operand is Sendable and our field is non-Sendable, we need to // bail since we want to root the non-Sendable type in the Sendable // type. if (!value && isOperandSendable && - SILIsolationInfo::isNonSendableType(projInst->getType(), - projInst->getFunction())) + SILIsolationInfo::isNonSendable(projInst)) value = projInst; // These are merges if we have multiple fields. @@ -637,7 +630,7 @@ RegionAnalysisValueMap::initializeTrackableValue( self->stateIndexToEquivalenceClass[iter.first->second.getID()] = value; // Before we do anything, see if we have a Sendable value. - if (!SILIsolationInfo::isNonSendableType(value->getType(), fn)) { + if (!SILIsolationInfo::isNonSendable(value)) { iter.first->getSecond().addFlag(TrackableValueFlag::isSendable); return {{iter.first->first, iter.first->second}, true}; } @@ -680,7 +673,7 @@ TrackableValue RegionAnalysisValueMap::getTrackableValueHelper( // Then check our oracle to see if the value is actually sendable. If we have // a Sendable value, just return early. - if (!SILIsolationInfo::isNonSendableType(value->getType(), fn)) { + if (!SILIsolationInfo::isNonSendable(value)) { iter.first->getSecond().addFlag(TrackableValueFlag::isSendable); return {iter.first->first, iter.first->second}; } @@ -889,11 +882,9 @@ struct UnderlyingTrackedObjectValueVisitor { /// Visit \p sourceValue returning a load base if we find one. The actual /// underlying object is value. SILValue visit(SILValue sourceValue) { - auto *fn = sourceValue->getFunction(); - // If our result is ever Sendable, we record that as our value if we do // not have a value yet. We always want to take the first one. - if (SILIsolationInfo::isSendableType(sourceValue->getType(), fn)) { + if (SILIsolationInfo::isSendable(sourceValue)) { if (!value) { value = sourceValue; } @@ -901,8 +892,8 @@ struct UnderlyingTrackedObjectValueVisitor { if (auto *svi = dyn_cast(sourceValue)) { if (isStaticallyLookThroughInst(svi)) { - if (!value && SILIsolationInfo::isSendableType(svi->getOperand(0)) && - SILIsolationInfo::isNonSendableType(svi)) { + if (!value && SILIsolationInfo::isSendable(svi->getOperand(0)) && + SILIsolationInfo::isNonSendable(svi)) { value = svi; } @@ -912,14 +903,13 @@ struct UnderlyingTrackedObjectValueVisitor { // If we have a cast and our operand and result are non-Sendable, treat it // as a look through. if (isLookThroughIfOperandAndResultNonSendable(svi)) { - if (SILIsolationInfo::isNonSendableType(svi->getType(), fn) && - SILIsolationInfo::isNonSendableType(svi->getOperand(0)->getType(), - fn)) { + if (SILIsolationInfo::isNonSendable(svi) && + SILIsolationInfo::isNonSendable(svi->getOperand(0))) { return svi->getOperand(0); } - if (!value && SILIsolationInfo::isSendableType(svi->getOperand(0)) && - SILIsolationInfo::isNonSendableType(svi)) { + if (!value && SILIsolationInfo::isSendable(svi->getOperand(0)) && + SILIsolationInfo::isNonSendable(svi)) { value = svi; } } @@ -927,8 +917,8 @@ struct UnderlyingTrackedObjectValueVisitor { if (auto *inst = sourceValue->getDefiningInstruction()) { if (isStaticallyLookThroughInst(inst)) { - if (!value && SILIsolationInfo::isSendableType(inst->getOperand(0)) && - SILIsolationInfo::isNonSendableType(sourceValue)) { + if (!value && SILIsolationInfo::isSendable(inst->getOperand(0)) && + SILIsolationInfo::isNonSendable(sourceValue)) { value = sourceValue; } @@ -942,7 +932,7 @@ struct UnderlyingTrackedObjectValueVisitor { public: SILValue visitAll(SILValue sourceValue) { // Before we do anything, - if (SILIsolationInfo::isSendableType(sourceValue)) + if (SILIsolationInfo::isSendable(sourceValue)) value = sourceValue; SILValue result = visit(sourceValue); @@ -2010,7 +2000,7 @@ class PartitionOpTranslator { // being uniquely identified and captured. SILValue val = op.get(); if (val->getType().isAddress() && - isNonSendableType(val->getType())) { + SILIsolationInfo::isNonSendable(val)) { auto trackVal = getTrackableValue(val, true); (void)trackVal; REGIONBASEDISOLATION_LOG( @@ -2018,7 +2008,7 @@ class PartitionOpTranslator { continue; } if (auto *pbi = dyn_cast(val)) { - if (isNonSendableType(pbi->getType())) { + if (SILIsolationInfo::isNonSendable(pbi)) { auto trackVal = getTrackableValue(val, true); (void)trackVal; continue; @@ -2116,14 +2106,6 @@ class PartitionOpTranslator { RegionAnalysisValueMap &getValueMap() const { return valueMap; } private: - /// Check if the passed in type is NonSendable. - /// - /// NOTE: We special case RawPointer and NativeObject to ensure they are - /// treated as non-Sendable and strict checking is applied to it. - bool isNonSendableType(SILType type) const { - return SILIsolationInfo::isNonSendableType(type, function); - } - TrackableValueLookupResult getTrackableValue(SILValue value, bool isAddressCapturedByPartialApply = false) { @@ -3222,8 +3204,7 @@ class PartitionOpTranslator { case TranslationSemantics::AssertingIfNonSendable: // Do not error if all of our operands are sendable. if (llvm::none_of(inst->getOperandValues(), [&](SILValue value) { - return ::SILIsolationInfo::isNonSendableType(value->getType(), - inst->getFunction()); + return ::SILIsolationInfo::isNonSendable(value); })) return; llvm::errs() << "BadInst: " << *inst; @@ -3721,8 +3702,8 @@ IGNORE_IF_SENDABLE_RESULT_ASSIGN_OTHERWISE(StructExtractInst) TranslationSemantics PartitionOpTranslator::visit##INST(INST *cast) { \ assert(isLookThroughIfOperandAndResultNonSendable(cast) && "Out of sync"); \ bool isOperandNonSendable = \ - isNonSendableType(cast->getOperand()->getType()); \ - bool isResultNonSendable = isNonSendableType(cast->getType()); \ + SILIsolationInfo::isNonSendable(cast->getOperand()); \ + bool isResultNonSendable = SILIsolationInfo::isNonSendable(cast); \ \ if (isOperandNonSendable) { \ if (isResultNonSendable) { \ @@ -3802,7 +3783,7 @@ PartitionOpTranslator::visitAllocStackInst(AllocStackInst *asi) { // Before we do anything, see if asi is Sendable or if it is non-Sendable, // that it is from a var decl. In both cases, we can just return assign fresh // and exit early. - if (!SILIsolationInfo::isNonSendableType(asi) || asi->isFromVarDecl()) + if (!SILIsolationInfo::isNonSendable(asi) || asi->isFromVarDecl()) return TranslationSemantics::AssignFresh; // Ok at this point we know that our value is a non-Sendable temporary. @@ -3855,7 +3836,7 @@ PartitionOpTranslator::visitBeginBorrowInst(BeginBorrowInst *bbi) { /// the address so that if we have a load from a non-Sendable base, we properly /// require the base. TranslationSemantics PartitionOpTranslator::visitLoadInst(LoadInst *li) { - if (SILIsolationInfo::isSendableType(li->getOperand())) { + if (SILIsolationInfo::isSendable(li->getOperand())) { translateSILRequire(li->getOperand()); } @@ -3875,7 +3856,7 @@ TranslationSemantics PartitionOpTranslator::visitLoadInst(LoadInst *li) { /// require the base. TranslationSemantics PartitionOpTranslator::visitLoadBorrowInst(LoadBorrowInst *lbi) { - if (SILIsolationInfo::isSendableType(lbi->getOperand())) { + if (SILIsolationInfo::isSendable(lbi->getOperand())) { translateSILRequire(lbi->getOperand()); } @@ -3908,7 +3889,7 @@ PartitionOpTranslator::visitRefToBridgeObjectInst(RefToBridgeObjectInst *r) { TranslationSemantics PartitionOpTranslator::visitPackElementGetInst(PackElementGetInst *r) { - if (!isNonSendableType(r->getType())) + if (!SILIsolationInfo::isNonSendable(r)) return TranslationSemantics::Require; translateSILAssign(SILValue(r), r->getPackOperand()); return TranslationSemantics::Special; @@ -3916,7 +3897,7 @@ PartitionOpTranslator::visitPackElementGetInst(PackElementGetInst *r) { TranslationSemantics PartitionOpTranslator::visitTuplePackElementAddrInst( TuplePackElementAddrInst *r) { - if (!isNonSendableType(r->getType())) { + if (!SILIsolationInfo::isNonSendable(r)) { translateSILRequire(r->getTuple()); } else { translateSILAssign(SILValue(r), r->getTupleOperand()); @@ -3926,7 +3907,7 @@ TranslationSemantics PartitionOpTranslator::visitTuplePackElementAddrInst( TranslationSemantics PartitionOpTranslator::visitTuplePackExtractInst(TuplePackExtractInst *r) { - if (!isNonSendableType(r->getType())) { + if (!SILIsolationInfo::isNonSendable(r)) { translateSILRequire(r->getTuple()); } else { translateSILAssign(SILValue(r), r->getTupleOperand()); @@ -3937,7 +3918,7 @@ PartitionOpTranslator::visitTuplePackExtractInst(TuplePackExtractInst *r) { TranslationSemantics PartitionOpTranslator::visitPackElementSetInst(PackElementSetInst *r) { // If the value we are storing is sendable, treat this as a require. - if (!isNonSendableType(r->getValue()->getType())) { + if (!SILIsolationInfo::isNonSendable(r->getValue())) { return TranslationSemantics::Require; } @@ -3953,7 +3934,7 @@ PartitionOpTranslator::visitRawPointerToRefInst(RawPointerToRefInst *r) { // // NOTE: From RBI perspective, RawPointer is non-Sendable, so this is really // just look through if operand and result non-Sendable. - if (isNonSendableType(r->getType())) + if (SILIsolationInfo::isNonSendable(r)) return TranslationSemantics::LookThrough; // Otherwise to be conservative, we need to treat this as a require. @@ -3968,7 +3949,7 @@ PartitionOpTranslator::visitRefToRawPointerInst(RefToRawPointerInst *r) { // // NOTE: From RBI perspective, RawPointer is non-Sendable, so this is really // just look through if operand and result non-Sendable. - if (isNonSendableType(r->getOperand()->getType())) + if (SILIsolationInfo::isNonSendable(r->getOperand())) return TranslationSemantics::LookThrough; // Otherwise to be conservative, we need to treat the raw pointer as a fresh @@ -3993,7 +3974,7 @@ TranslationSemantics PartitionOpTranslator::visitMergeIsolationRegionInst( TranslationSemantics PartitionOpTranslator::visitPointerToAddressInst(PointerToAddressInst *ptai) { - if (!isNonSendableType(ptai->getType())) { + if (!SILIsolationInfo::isNonSendable(ptai)) { return TranslationSemantics::Require; } return TranslationSemantics::Assign; @@ -4022,7 +4003,7 @@ TranslationSemantics PartitionOpTranslator::visitUnconditionalCheckedCastInst( TranslationSemantics PartitionOpTranslator::visitRefElementAddrInst(RefElementAddrInst *reai) { // If our field is a NonSendableType... - if (!isNonSendableType(reai->getType())) { + if (!SILIsolationInfo::isNonSendable(reai)) { // And the field is a let... then ignore it. We know that we cannot race on // any writes to the field. if (reai->getField()->isLet()) { @@ -4042,7 +4023,7 @@ PartitionOpTranslator::visitRefElementAddrInst(RefElementAddrInst *reai) { TranslationSemantics PartitionOpTranslator::visitRefTailAddrInst(RefTailAddrInst *reai) { // If our trailing type is Sendable... - if (!isNonSendableType(reai->getType())) { + if (!SILIsolationInfo::isNonSendable(reai)) { // And our ref_tail_addr is immutable... we can ignore the access since we // cannot race against a write to any of these fields. if (reai->isImmutable()) { diff --git a/lib/SILOptimizer/Mandatory/CMakeLists.txt b/lib/SILOptimizer/Mandatory/CMakeLists.txt index 4a93bd3152ba4..78c4024efa51b 100644 --- a/lib/SILOptimizer/Mandatory/CMakeLists.txt +++ b/lib/SILOptimizer/Mandatory/CMakeLists.txt @@ -50,6 +50,7 @@ target_sources(swiftSILOptimizer PRIVATE YieldOnceCheck.cpp OSLogOptimization.cpp MoveOnlyWrappedTypeEliminator.cpp + MarkNeverWrittenMutableClosureBoxesAsImmutable.cpp RegionAnalysisInvalidationTransform.cpp DiagnosticDeadFunctionElimination.cpp OwnershipModelEliminator.cpp) diff --git a/lib/SILOptimizer/Mandatory/MarkNeverWrittenMutableClosureBoxesAsImmutable.cpp b/lib/SILOptimizer/Mandatory/MarkNeverWrittenMutableClosureBoxesAsImmutable.cpp new file mode 100644 index 0000000000000..e778fb7eb073e --- /dev/null +++ b/lib/SILOptimizer/Mandatory/MarkNeverWrittenMutableClosureBoxesAsImmutable.cpp @@ -0,0 +1,273 @@ +//===--- MarkNeverWrittenMutableClosureBoxesAsImmutable.cpp ---------------===// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2026 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===----------------------------------------------------------------------===// + +#define DEBUG_TYPE "sil-mark-never-written-mutable-closure-boxes-as-immutable" + +#include "swift/SIL/ApplySite.h" +#include "swift/SIL/OperandDatastructures.h" +#include "swift/SILOptimizer/PassManager/Passes.h" +#include "swift/SILOptimizer/PassManager/Transforms.h" +#include "swift/SILOptimizer/Utils/SILIsolationInfo.h" + +using namespace swift; + +//===----------------------------------------------------------------------===// +// MARK: Implementation +//===----------------------------------------------------------------------===// + +static bool isImmutable(SILValue start, StoreWeakInst *allowableWeakStore, + llvm::DenseSet &visitedArgs) { + LLVM_DEBUG(llvm::dbgs() << "Checking in function " + << start->getFunction()->getName() << ": " << *start); + // We store the partial apply that we are going to visit serially after we + // finish processing the partial_apply so that we do not create too many + // OperandWorklist. We can only create a finite amount of them. + SmallVector funcArgsToVisit; + + { + // Walk the uses to search for the partial_apply. If we have a debug_value, + // a move_value [lexical], or a begin_borrow [lexical]. + OperandWorklist worklist(start->getFunction()); + worklist.pushResultOperandsIfNotVisited(start); + + while (auto *use = worklist.pop()) { + auto *user = use->getUser(); + LLVM_DEBUG(llvm::dbgs() << " Visiting User: " << *user); + + // Uses to skip. + if (isa(user) || isa(user) || + isa(user) || isa(user) || + isa(user) || isa(user)) { + LLVM_DEBUG(llvm::dbgs() << " Ignoring!\n"); + continue; + } + + // Uses to look through. + if (isa(user) || isa(user) || + isa(user) || isa(user) || + isa(user)) { + LLVM_DEBUG(llvm::dbgs() << " Looking through!\n"); + worklist.pushResultOperandsIfNotVisited(user); + continue; + } + + // If we have a store_weak, continue if it is the store_weak that we are + // ok with. + if (auto *swi = dyn_cast(user); + swi && swi == allowableWeakStore) { + LLVM_DEBUG(llvm::dbgs() << " Ignoring allowable store_weak!\n"); + continue; + } + + // Visit partial_apply uses and see if: + // + // 1. We can look up the function. + // + // 2. If we already know that the function argument is inferred + // immutable. In that case, we can just continue. + // + // 3. Then we check if we already visited the function argument. That + // means that we know that it is not immutable if it has not been marked + // yet... so just return false. + // + // 4. Otherwise, we add it to a worklist to process after we finish + // walking uses in this function. We do this to ensure we do not create + // too many OperandWorklists at the same time since we can only create a + // finite amount of them at the same time. + if (auto *pai = dyn_cast(user)) { + if (auto *calleeFunc = pai->getReferencedFunctionOrNull()) { + auto calleeArgIndex = ApplySite(pai).getCalleeArgIndex(*use); + auto *fArg = cast( + calleeFunc->getArgument(calleeArgIndex)); + if (fArg->isInferredImmutable()) { + LLVM_DEBUG(llvm::dbgs() + << " Found partial_apply with inferred immutable " + "function arg. Can ignore it!\n"); + continue; + } + if (visitedArgs.count(fArg)) { + LLVM_DEBUG(llvm::dbgs() + << " Found mutable function arg user!\n"); + + return false; + } + LLVM_DEBUG(llvm::dbgs() + << " Found partial apply to check later!\n"); + funcArgsToVisit.push_back(fArg); + continue; + } + } + + // Unrecognized user. Bail. + LLVM_DEBUG(llvm::dbgs() + << " Not transforming due to unhandled user!\n"); + return false; + } + } + + // Now check recursively if our function argument users are immutable. We do + // this after we walk to avoid creating too many OperandWorklist. + bool allFArgUsersImmutable = true; + for (auto *fArg : funcArgsToVisit) { + assert(!fArg->isInferredImmutable() && "Should have been checked earlier"); + visitedArgs.insert(fArg); + if (isImmutable(fArg, nullptr, visitedArgs)) { + fArg->setInferredImmutable(true); + continue; + } + allFArgUsersImmutable = false; + } + + return allFArgUsersImmutable; +} + +/// Make sure that the given box fits out pattern matching conditions and return +/// its single initializing begin_borrow scope and store_weak so we can do a +/// later more intensive recursive check. +/// +/// The conditions are: +/// +/// 1. The box must be mutable. +/// +/// 2. The box must contain a weak reference to a Sendable type. +/// +/// 3. The box must have a single begin_borrow user that all uses are +/// initialized from. +/// +/// 4. There must be a single store_weak that initializes the box from a +/// project_box from the single begin_borrow. +/// +/// 5. The box should not have a debug_value use. +/// +/// This is safe since later we are going to recursively look at uses of the +/// begin_borrow and if we find any memory uses that are a load_weak or a +/// different store_weak besides the one we found, we fail the box. +static StoreWeakInst *isPatternMatchableBox(AllocBoxInst *abi) { + LLVM_DEBUG(llvm::dbgs() << "Checking if box can be matched: " << *abi); + + CanSILBoxType boxType = abi->getType().castTo(); + if (boxType->getNumFields() != 1 || + !SILIsolationInfo::boxContainsOnlySendableFields(abi)) { + LLVM_DEBUG(llvm::dbgs() << " Cannot match since either has multiple " + "fields or a non-Sendable field\n"); + return nullptr; + } + + // For now to be conservative, only do this if we have a weak + // parameter. + if (auto ownership = boxType->getFieldType(*abi->getFunction(), 0) + .getReferenceStorageOwnership(); + !ownership || *ownership != ReferenceOwnership::Weak) { + LLVM_DEBUG(llvm::dbgs() + << " Cannot match since field is not a weak reference\n"); + return nullptr; + } + + BeginBorrowInst *singleBBI = nullptr; + for (auto *use : abi->getUses()) { + if (isa(use->getUser()) || + isa(use->getUser()) || + isa(use->getUser())) + continue; + auto *bbi = dyn_cast(use->getUser()); + if (!bbi) { + LLVM_DEBUG(llvm::dbgs() + << " Cannot match since has a non-begin_borrow, " + "destroy_value, dealloc_box immediate user: " + << *use->getUser()); + return nullptr; + } + + if (bbi->isFromVarDecl()) { + LLVM_DEBUG(llvm::dbgs() + << " Cannot match since begin_borrow from var_decl\n"); + return nullptr; + } + + if (singleBBI) { + LLVM_DEBUG(llvm::dbgs() << " Cannot match since found multiple " + "begin_borrow initializations\n"); + return nullptr; + } + singleBBI = bbi; + } + + if (!singleBBI) { + LLVM_DEBUG(llvm::dbgs() << " Cannot match since did not find " + "begin_borrow for initialization\n"); + return nullptr; + } + + // Now look for a single store_weak from a project_box from our singleBBI. + // + // DISCUSSION: We could be lazier here and leave the checking of multiple + // store_weak to the later recursive check... but why not just check now and + // end earlier. + StoreWeakInst *singleStoreWeak = nullptr; + for (auto *use : singleBBI->getUsersOfType()) { + if (auto *swi = use->getSingleUserOfType()) { + if (singleStoreWeak) { + LLVM_DEBUG(llvm::dbgs() + << " Cannot match since found multiple store_weak\n"); + return nullptr; + } + singleStoreWeak = swi; + } + } + if (!singleStoreWeak) { + LLVM_DEBUG(llvm::dbgs() << " Cannot match since did not find a single " + "store_weak initialization\n"); + return {}; + } + + return singleStoreWeak; +} + +namespace { + +class MarkNeverWrittenMutableClosureBoxesAsImmutable + : public SILModuleTransform { + void run() override { + bool madeChange = false; + llvm::DenseSet visitedArgs; + for (auto &fn : *getModule()) { + for (auto &block : fn) { + for (auto &inst : block) { + auto *abi = dyn_cast(&inst); + if (!abi) + continue; + auto *singleInitialization = isPatternMatchableBox(abi); + if (!singleInitialization || + !isImmutable(abi, singleInitialization, visitedArgs)) + continue; + + abi->setInferredImmutable(true); + LLVM_DEBUG(llvm::dbgs() << "Marking Box as Inferred Immutable!\n"); + madeChange = true; + } + } + } + + if (madeChange) + invalidateAll(); + }; +}; + +} // namespace + +//===----------------------------------------------------------------------===// +// MARK: Top Level Entrypoint +//===----------------------------------------------------------------------===// + +SILTransform *swift::createMarkNeverWrittenMutableClosureBoxesAsImmutable() { + return new MarkNeverWrittenMutableClosureBoxesAsImmutable(); +} diff --git a/lib/SILOptimizer/Mandatory/SendNonSendable.cpp b/lib/SILOptimizer/Mandatory/SendNonSendable.cpp index f64ca367d4540..2a4bb8b0c9d26 100644 --- a/lib/SILOptimizer/Mandatory/SendNonSendable.cpp +++ b/lib/SILOptimizer/Mandatory/SendNonSendable.cpp @@ -2128,16 +2128,9 @@ bool SentNeverSendableDiagnosticInferrer::initForIsolatedPartialApply( // diagnostic that mentions that the reason we are emitting an error is b/c // the value is mutable. if (auto boxTy = op->get()->getType().getAs(); - boxTy && boxTy->getLayout()->isMutable() && - llvm::all_of(boxTy->getLayout()->getFields(), - [&op](const SILField &field) -> bool { - auto fieldTy = field.getAddressType(); - if (fieldTy.hasTypeParameter()) - fieldTy = - op->getFunction()->mapTypeIntoEnvironment(fieldTy); - return SILIsolationInfo::isSendableType( - fieldTy, op->getFunction()); - })) { + boxTy && boxTy->getNumFields() == 1 && boxTy->isFieldMutable(0) && + SILIsolationInfo::boxTypeContainsOnlySendableFields( + boxTy, op->getFunction())) { diagnosticEmitter.emitNamedFunctionArgumentClosureMutable( diagnosticOp->getUser()->getLoc(), rootValueAndName->first, crossing); return true; diff --git a/lib/SILOptimizer/PassManager/PassPipeline.cpp b/lib/SILOptimizer/PassManager/PassPipeline.cpp index 0d6d0de776fbe..00a19cf7a5fff 100644 --- a/lib/SILOptimizer/PassManager/PassPipeline.cpp +++ b/lib/SILOptimizer/PassManager/PassPipeline.cpp @@ -115,6 +115,8 @@ static void addDefiniteInitialization(SILPassPipelinePlan &P) { // should be in the -Onone pass pipeline and the prepare optimizations pipeline. static void addMandatoryDiagnosticOptPipeline(SILPassPipelinePlan &P) { P.startPipeline("Mandatory Diagnostic Passes + Enabling Optimization Passes"); + + P.addMarkNeverWrittenMutableClosureBoxesAsImmutable(); P.addDiagnoseInvalidEscapingCaptures(); P.addReferenceBindingTransform(); P.addNestedSemanticFunctionCheck(); diff --git a/lib/SILOptimizer/Utils/SILIsolationInfo.cpp b/lib/SILOptimizer/Utils/SILIsolationInfo.cpp index 69a7490801a25..4ad5e81e7b403 100644 --- a/lib/SILOptimizer/Utils/SILIsolationInfo.cpp +++ b/lib/SILOptimizer/Utils/SILIsolationInfo.cpp @@ -302,7 +302,7 @@ static SILValue lookThroughNonVarDeclOwnershipInsts(SILValue v) { static bool isPartialApplyNonisolatedUnsafe(PartialApplyInst *pai) { bool foundOneNonIsolatedUnsafe = false; for (auto &op : pai->getArgumentOperands()) { - if (SILIsolationInfo::isSendableType(op.get())) + if (SILIsolationInfo::isSendable(op.get())) continue; // Normally we would not look through copy_value, begin_borrow, or @@ -852,7 +852,7 @@ SILIsolationInfo SILIsolationInfo::get(SILInstruction *inst) { } } else { // Ok, we have a temporary. If it is non-Sendable... - if (SILIsolationInfo::isNonSendableType(asi)) { + if (SILIsolationInfo::isNonSendable(asi)) { if (auto isolation = inferIsolationInfoForTempAllocStack(asi)) return isolation; } @@ -922,7 +922,7 @@ SILIsolationInfo SILIsolationInfo::get(SILInstruction *inst) { SILIsolationInfo SILIsolationInfo::get(SILArgument *arg) { // Return early if we do not have a non-Sendable type. - if (!SILIsolationInfo::isNonSendableType(arg->getType(), arg->getFunction())) + if (!SILIsolationInfo::isNonSendable(arg)) return {}; // Handle a switch_enum from a global-actor-isolated type. @@ -1580,6 +1580,49 @@ void SILIsolationInfo::printForOneLineLogging(SILFunction *fn, } } +bool SILIsolationInfo::isSendable(SILValue value) { + // If the type system says we are sendable, then we are always sendable. + if (isSendableType(value->getType(), value->getFunction())) + return true; + + if (auto *fArg = dyn_cast(value); + fArg && fArg->isClosureCapture() && fArg->isInferredImmutable()) { + CanSILBoxType boxType = fArg->getType().getAs(); + if (!boxType || boxType->getNumFields() != 1) + return false; + auto innerType = boxType->getFieldType(*fArg->getFunction(), 0); + // We can only do this if the underlying type is Sendable. + if (isNonSendableType(innerType, fArg->getFunction())) + return false; + // For now to be conservative, only do this if we have a weak parameter. + if (auto ownership = innerType.getReferenceStorageOwnership(); + !ownership || *ownership != ReferenceOwnership::Weak) + return false; + // Ok, we can treat this as Sendable. + return true; + } + + if (auto *abi = dyn_cast(lookThroughOwnershipInsts(value)); + abi && abi->isInferredImmutable()) { + CanSILBoxType boxType = abi->getType().castTo(); + if (boxType->getNumFields() != 1) + return false; + + auto innerType = boxType->getFieldType(*abi->getFunction(), 0); + if (isNonSendableType(innerType, abi->getFunction())) + return false; + + // For now to be conservative, only do this if we have a weak parameter. + if (auto ownership = innerType.getReferenceStorageOwnership(); + !ownership || *ownership != ReferenceOwnership::Weak) + return false; + + return true; + } + + return false; +} + // Check if the passed in type is NonSendable. // // NOTE: We special case RawPointer and NativeObject to ensure they are diff --git a/lib/Serialization/DeserializeSIL.cpp b/lib/Serialization/DeserializeSIL.cpp index eeefad61ed033..528f681b7bcc5 100644 --- a/lib/Serialization/DeserializeSIL.cpp +++ b/lib/Serialization/DeserializeSIL.cpp @@ -1329,6 +1329,8 @@ SILBasicBlock *SILDeserializer::readSILBasicBlock(SILFunction *Fn, fArg->setClosureCapture(isClosureCapture); bool isFormalParameterPack = (Args[I + 1] >> 17) & 0x1; fArg->setFormalParameterPack(isFormalParameterPack); + bool isInferredImmutable = (Args[I + 1] >> 18) & 0x1; + fArg->setInferredImmutable(isInferredImmutable); Arg = fArg; } else { Arg = CurrentBB->createPhiArgument(SILArgTy, OwnershipKind, @@ -1805,11 +1807,12 @@ bool SILDeserializer::readSILInstruction(SILFunction *Fn, auto usesMoveableValueDebugInfo = UsesMoveableValueDebugInfo_t((Attr >> 2) & 0x1); auto pointerEscape = HasPointerEscape_t((Attr >> 3) & 0x1); + auto inferredImmutable = (Attr >> 4) & 0x1; ResultInst = Builder.createAllocBox( Loc, cast(MF->getType(TyID)->getCanonicalType()), std::nullopt, hasDynamicLifetime, reflection, usesMoveableValueDebugInfo, - /*skipVarDeclAssert*/ false, pointerEscape); + /*skipVarDeclAssert*/ false, pointerEscape, inferredImmutable); break; } case SILInstructionKind::AllocStackInst: { diff --git a/lib/Serialization/ModuleFormat.h b/lib/Serialization/ModuleFormat.h index 37209eff5af6f..694d06031e754 100644 --- a/lib/Serialization/ModuleFormat.h +++ b/lib/Serialization/ModuleFormat.h @@ -58,7 +58,7 @@ const uint16_t SWIFTMODULE_VERSION_MAJOR = 0; /// describe what change you made. The content of this comment isn't important; /// it just ensures a conflict if two people change the module format. /// Don't worry about adhering to the 80-column limit for this line. -const uint16_t SWIFTMODULE_VERSION_MINOR = 978; // @warn attribute +const uint16_t SWIFTMODULE_VERSION_MINOR = 979; // inferred_immutable /// A standard hash seed used for all string hashes in a serialized module. /// diff --git a/lib/Serialization/SerializeSIL.cpp b/lib/Serialization/SerializeSIL.cpp index c285a40304611..83277be7f5438 100644 --- a/lib/Serialization/SerializeSIL.cpp +++ b/lib/Serialization/SerializeSIL.cpp @@ -764,8 +764,9 @@ void SILSerializer::writeSILBasicBlock(const SILBasicBlock &BB) { packedMetadata |= unsigned(SFA->getLifetimeAnnotation()) << 14; // 2 bits packedMetadata |= unsigned(SFA->isClosureCapture()) << 16; // 1 bit packedMetadata |= unsigned(SFA->isFormalParameterPack()) << 17; // 1 bit + packedMetadata |= unsigned(SFA->isInferredImmutable()) << 18; // 1 bit } - // Used: 17 bits. Free: 15. + // Used: 18 bits. Free: 14. // // TODO: We should be able to shrink the packed metadata of the first two. Args.push_back(packedMetadata); @@ -1293,6 +1294,7 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { flags |= unsigned(ABI->emitReflectionMetadata()) << 1; flags |= unsigned(ABI->usesMoveableValueDebugInfo()) << 2; flags |= unsigned(ABI->hasPointerEscape()) << 3; + flags |= unsigned(ABI->isInferredImmutable()) << 4; writeOneTypeLayout(ABI->getKind(), flags, ABI->getType()); diff --git a/test/Concurrency/transfernonsendable_closure_captures.swift b/test/Concurrency/transfernonsendable_closure_captures.swift index 1ae7131dcdd59..f129086bb5501 100644 --- a/test/Concurrency/transfernonsendable_closure_captures.swift +++ b/test/Concurrency/transfernonsendable_closure_captures.swift @@ -43,7 +43,6 @@ func testCopyableSendableStructWithEscapingMainActorAsync() { } } -// TODO: We should say that it is due to the box being mutable. func testMutableCopyableSendableStructWithEscapingMainActorAsync() { var x = CopyableStructSendable() x = CopyableStructSendable() @@ -80,8 +79,6 @@ func testMutableCopyableNonsendableStructWithEscapingMainActorAsync() { // Nonescaping // -// TODO: Study these. - func testCopyableSendableStructWithNonescapingMainActorAsync() { let x = CopyableStructSendable() let _ = { @@ -381,8 +378,7 @@ func testCopyableSendableClassWithEscapingMainActorAsyncWeakCapture() { let x = KlassSendable() let _ = { [weak x] in escapingAsyncUse { @MainActor in - useValue(x) // expected-error {{sending 'x' risks causing data races}} - // expected-note @-1 {{main actor-isolated closure captures reference to mutable 'x' which remains modifiable by code in the current task}} + useValue(x) } } } @@ -392,8 +388,7 @@ func testMutableCopyableSendableClassWithEscapingMainActorAsyncWeakCapture() { x = KlassSendable() let _ = { [weak x] in escapingAsyncUse { @MainActor in - useValue(x) // expected-error {{sending 'x' risks causing data races}} - // expected-note @-1 {{main actor-isolated closure captures reference to mutable 'x' which remains modifiable by code in the current task}} + useValue(x) } } } @@ -423,8 +418,7 @@ func testCopyableSendableClassWithNonescapingMainActorAsyncWeakCapture() { let x = KlassSendable() let _ = { [weak x] in nonescapingAsyncUse { @MainActor in - useValue(x) // expected-error {{sending 'x' risks causing data races}} - // expected-note @-1 {{main actor-isolated closure captures reference to mutable 'x' which remains modifiable by code in the current task}} + useValue(x) } } } @@ -434,8 +428,7 @@ func testMutableCopyableSendableClassWithNonescapingMainActorAsyncWeakCapture() x = KlassSendable() let _ = { [weak x] in nonescapingAsyncUse { @MainActor in - useValue(x) // expected-error {{sending 'x' risks causing data races}} - // expected-note @-1 {{main actor-isolated closure captures reference to mutable 'x' which remains modifiable by code in the current task}} + useValue(x) } } } @@ -461,6 +454,608 @@ func testMutableCopyableNonsendableClassWithNonescapingMainActorAsyncWeakCapture } } +//////////////////////////////////////////// +// MARK: Advanced Weak Capture Patterns // +//////////////////////////////////////////// + +// Test: Chained closures with weak captures (single weak reference passed through chain) +// This exercises the ConvertWeakVarCaptureToWeakLet optimization for multi-level closures +func testChainedClosuresReadOnlyWeakCaptureSendable() { + let obj = KlassSendable() + + // Outer closure captures obj weakly + let outer = { [weak obj] in + // Inner closure also captures the weak reference + let inner = { + escapingAsyncUse { @MainActor in + // Only reading from weak capture - should be promotable to 'let' + if let obj = obj { + useValue(obj) + } + } + } + inner() + } + outer() +} + +func testChainedClosuresReadOnlyWeakCaptureNonsendable() { + let obj = KlassNonsendable() + + let outer = { [weak obj] in + let inner = { + escapingAsyncUse { @MainActor in + if let obj = obj { // expected-error {{sending 'obj' risks causing data races}} + // expected-note @-1 {{task-isolated 'obj' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(obj) + } + } + } + inner() + } + outer() +} + +// Test: Multiple weak captures in same closure +// Verifies that optimization handles multiple weak boxes independently +func testMultipleWeakCapturesSendable() { + let obj1 = KlassSendable() + let obj2 = KlassSendable() + let obj3 = KlassSendable() + + let _ = { [weak obj1, weak obj2, weak obj3] in + escapingAsyncUse { @MainActor in + // All three are read-only - all should be promotable + if let o1 = obj1 { useValue(o1) } + if let o2 = obj2 { useValue(o2) } + if let o3 = obj3 { useValue(o3) } + } + } +} + +func testMultipleWeakCapturesNonsendable() { + let obj1 = KlassNonsendable() + let obj2 = KlassNonsendable() + let obj3 = KlassNonsendable() + + let _ = { [weak obj1, weak obj2, weak obj3] in + escapingAsyncUse { @MainActor in + if let o1 = obj1 { // expected-error {{sending 'obj1' risks causing data races}} + useValue(o1) // expected-note @-1 {{task-isolated 'obj1' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + } + if let o2 = obj2 { // expected-error {{sending 'obj2' risks causing data races}} + // expected-note @-1 {{task-isolated 'obj2' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(o2) + } + if let o3 = obj3 { // expected-error {{sending 'obj3' risks causing data races}} + // expected-note @-1 {{task-isolated 'obj3' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(o3) + } + } + } +} + +// In this case, even though we are writing to the outside mutableObj, the inner +// mutableObj is actually a separate capture variable that shadows +// mutableObj. So we perform the transformation on that. +func testMixedWeakCapturesReadAndWriteSendable() { + var mutableObj: KlassSendable? = KlassSendable() + let immutableObj = KlassSendable() + + let _ = { [weak mutableObj, weak immutableObj] in + escapingAsyncUse { @MainActor in + // Read from immutable (should promote to let) + if let obj = immutableObj { + useValue(obj) + } + + // Read from immutable. The binding here is just shadowing the outside + // mutable object. + if let obj = mutableObj { + useValue(obj) + } + } + } + + mutableObj = nil +} + +// Test: Weak self pattern (common in delegate/callback scenarios) +class DelegateSendable: @unchecked Sendable { + func setupCallback() { + let _ = { [weak self] in + escapingAsyncUse { @MainActor in + // Common pattern: weak self to avoid retain cycles + guard let self = self else { return } + useValue(self) + } + } + } +} + +class DelegateNonsendable { + func setupCallback() { + let _ = { [weak self] in + escapingAsyncUse { @MainActor in + guard let self = self else { return } // expected-error {{sending 'self' risks causing data races}} + // expected-note @-1 {{task-isolated 'self' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(self) + } + } + } +} + +// Test: Weak capture with control flow (loops) +func testWeakCaptureInLoopSendable() { + let obj = KlassSendable() + + let _ = { [weak obj] in + escapingAsyncUse { @MainActor in + // Accessing weak capture multiple times in a loop + for _ in 0..<10 { + if let obj = obj { + useValue(obj) + } + } + } + } +} + +func testWeakCaptureInLoopNonsendable() { + let obj = KlassNonsendable() + + let _ = { [weak obj] in + escapingAsyncUse { @MainActor in + for _ in 0..<10 { + if let obj = obj { // expected-error {{sending 'obj' risks causing data races}} + // expected-note @-1 {{task-isolated 'obj' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(obj) + } + } + } + } +} + +// Test: Weak capture with conditional access +func testWeakCaptureConditionalAccessSendable() { + let obj = KlassSendable() + let condition = true + + let _ = { [weak obj] in + escapingAsyncUse { @MainActor in + if condition { + if let obj = obj { + useValue(obj) + } + } else { + // Not accessing weak capture in this branch + } + } + } +} + +// Test: Nested weak captures (closure within closure, both capture weakly) +func testNestedWeakCapturesSendable() { + let outer = KlassSendable() + + let _ = { [weak outer] in + let inner = KlassSendable() + + // TODO: We should be able to handle multiple. + let _ = { [weak outer, weak inner] in + escapingAsyncUse { @MainActor in + if let o = outer { useValue(o) } + if let i = inner { useValue(i) } + } + } + } +} + +func testNestedWeakCapturesNonsendable() { + let outer = KlassNonsendable() + + let _ = { [weak outer] in + let inner = KlassNonsendable() + + let _ = { [weak outer, weak inner] in + escapingAsyncUse { @MainActor in + if let o = outer { // expected-error {{sending 'outer' risks causing data races}} + // expected-note @-1 {{task-isolated 'outer' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(o) + } + if let i = inner { // expected-error {{sending 'inner' risks causing data races}} + // expected-note @-1 {{task-isolated 'inner' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(i) + } + } + } + } +} + +// Test: Escaping closure that returns weak reference +func testEscapingClosureReturningWeakCaptureSendable() -> (() -> KlassSendable?) { + let obj = KlassSendable() + + // Closure escapes and holds weak reference + return { [weak obj] in + return obj + } +} + +func testEscapingClosureReturningWeakCaptureNonsendable() -> (() -> KlassNonsendable?) { + let obj = KlassNonsendable() + + return { [weak obj] in + return obj + } +} + +// Test: Weak capture with immediate deallocation (edge case) +func testWeakCaptureImmediateDeallocSendable() { + do { + let obj = KlassSendable() + let _ = { [weak obj] in + escapingAsyncUse { @MainActor in + // obj might already be deallocated + if let obj = obj { + useValue(obj) + } + } + } + // obj deallocates here + } +} + +let globalSendable = KlassSendable() + +// Test: Chained closures where intermediate closure modifies different captures +// This matches the SIL test pattern: callee_1 writes to one box, callee_2 reads all +func testChainedClosuresPartialWriteSendable() { + let readOnly1 = KlassSendable() + var writable = KlassSendable() + writable = KlassSendable() + let readOnly2 = KlassSendable() + + let _ = { [weak readOnly1, weak writable, weak readOnly2] in + // First level: modify writable, read others + if let obj = readOnly1 { useValue(obj) } + writable = globalSendable // Modify this one + if let obj = readOnly2 { useValue(obj) } + + // Second level: read all including the modified one + let _ = { + escapingAsyncUse { @MainActor in + if let obj = readOnly1 { useValue(obj) } // Read-only throughout + if let obj = writable { // expected-error {{sending 'writable' risks causing data races}} + // expected-note @-1 {{closure captures reference to mutable 'writable' which remains modifiable by code in the current task}} + useValue(obj) + } + if let obj = readOnly2 { useValue(obj) } // Read-only throughout + } + } + } +} + +// Test: Weak capture list with transformation +func testWeakCaptureWithTransformationSendable() { + let obj = KlassSendable() + + let _ = { [weak obj] in + // Transform weak optional to strong optional + let strongRef = obj + + escapingAsyncUse { @MainActor in + if let strong = strongRef { + useValue(strong) + } + } + } +} + +// Test: Multiple closures capturing same weak reference (but separately) +func testMultipleClosuresSameWeakCaptureSendable() { + let obj = KlassSendable() + + let closure1 = { [weak obj] in + escapingAsyncUse { @MainActor in + if let o = obj { useValue(o) } + } + } + + let closure2 = { [weak obj] in + escapingAsyncUse { @MainActor in + if let o = obj { useValue(o) } + } + } + + closure1() + closure2() +} + +func testMultipleClosuresSameWeakCaptureNonsendable() { + let obj = KlassNonsendable() + + let closure1 = { [weak obj] in + escapingAsyncUse { @MainActor in + if let o = obj { // expected-error {{sending 'obj' risks causing data races}} + // expected-note @-1 {{task-isolated 'obj' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(o) + } + } + } + + let closure2 = { [weak obj] in + escapingAsyncUse { @MainActor in + if let o = obj { // expected-error {{sending 'obj' risks causing data races}} + // expected-note @-1 {{task-isolated 'obj' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(o) + } + } + } + + closure1() + closure2() +} + +// Test: Empty closure with weak capture (should still optimize) +func testEmptyClosureWeakCaptureSendable() { + let obj = KlassSendable() + + let _ = { [weak obj] in + escapingAsyncUse { @MainActor in + // Capture exists but unused - still promotable to let + _ = obj + } + } +} + +/////////////////////////////////////////////////// +// MARK: Protocol-Based Weak Capture Tests // +// (Tests with class-bound protocol constraints) // +/////////////////////////////////////////////////// + +// Protocol declarations for testing +protocol ProtocolSendable: AnyObject, Sendable { + func doSomething() +} + +protocol ProtocolNonsendable: AnyObject { + func doSomething() +} + +// Test: Simple weak capture of protocol type (Sendable) +func testWeakCaptureProtocolSendable(_ obj: T) { + let _ = { [weak obj] in + escapingAsyncUse { @MainActor in + if let obj = obj { + useValue(obj) + } + } + } +} + +// Test: Simple weak capture of protocol type (Nonsendable) +func testWeakCaptureProtocolNonsendable(_ obj: T) { + let _ = { [weak obj] in + escapingAsyncUse { @MainActor in + if let obj = obj { // expected-error {{sending 'obj' risks causing data races}} + // expected-note @-1 {{task-isolated 'obj' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(obj) + } + } + } +} + +// Test: Chained closures with protocol weak captures +func testChainedClosuresProtocolSendable(_ obj: T) { + let outer = { [weak obj] in + let inner = { + escapingAsyncUse { @MainActor in + if let obj = obj { + useValue(obj) + } + } + } + inner() + } + outer() +} + +func testChainedClosuresProtocolNonsendable(_ obj: T) { + let outer = { [weak obj] in + let inner = { + escapingAsyncUse { @MainActor in + if let obj = obj { // expected-error {{sending 'obj' risks causing data races}} + // expected-note @-1 {{task-isolated 'obj' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(obj) + } + } + } + inner() + } + outer() +} + +// Test: Multiple protocol weak captures +func testMultipleProtocolWeakCapturesSendable(_ obj1: T, _ obj2: U, _ obj3: V) { + let _ = { [weak obj1, weak obj2, weak obj3] in + escapingAsyncUse { @MainActor in + if let o1 = obj1 { useValue(o1) } + if let o2 = obj2 { useValue(o2) } + if let o3 = obj3 { useValue(o3) } + } + } +} + +func testMultipleProtocolWeakCapturesNonsendable(_ obj1: T, _ obj2: U, _ obj3: V) { + let _ = { [weak obj1, weak obj2, weak obj3] in + escapingAsyncUse { @MainActor in + if let o1 = obj1 { // expected-error {{sending 'obj1' risks causing data races}} + // expected-note @-1 {{task-isolated 'obj1' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(o1) + } + if let o2 = obj2 { // expected-error {{sending 'obj2' risks causing data races}} + // expected-note @-1 {{task-isolated 'obj2' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(o2) + } + if let o3 = obj3 { // expected-error {{sending 'obj3' risks causing data races}} + // expected-note @-1 {{task-isolated 'obj3' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(o3) + } + } + } +} + +// Test: Protocol conforming to itself with weak self pattern +protocol DelegateProtocolSendable: AnyObject, Sendable { + func setupCallback() +} + +protocol DelegateProtocolNonsendable: AnyObject { + func setupCallback() +} + +class ConcreteDelegateSendable: DelegateProtocolSendable, @unchecked Sendable { + func setupCallback() { + let _ = { [weak self] in + escapingAsyncUse { @MainActor in + guard let self = self else { return } + useValue(self) + } + } + } +} + +class ConcreteDelegateNonsendable: DelegateProtocolNonsendable { + func setupCallback() { + let _ = { [weak self] in + escapingAsyncUse { @MainActor in + guard let self = self else { return } // expected-error {{sending 'self' risks causing data races}} + // expected-note @-1 {{task-isolated 'self' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(self) + } + } + } +} + +// Test: Nested weak captures with protocols +func testNestedProtocolWeakCapturesSendable(_ outer: T, _ inner: U) { + let _ = { [weak outer] in + let _ = { [weak outer, weak inner] in + escapingAsyncUse { @MainActor in + if let o = outer { useValue(o) } + if let i = inner { useValue(i) } + } + } + } +} + +func testNestedProtocolWeakCapturesNonsendable(_ outer: T, _ inner: U) { + let _ = { [weak outer] in + let _ = { [weak outer, weak inner] in + escapingAsyncUse { @MainActor in + if let o = outer { // expected-error {{sending 'outer' risks causing data races}} + // expected-note @-1 {{task-isolated 'outer' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(o) + } + if let i = inner { // expected-error {{sending 'inner' risks causing data races}} + // expected-note @-1 {{task-isolated 'inner' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(i) + } + } + } + } +} + +// Test: Protocol with associated type constraints (more complex generic case) +protocol DataSourceProtocol: AnyObject, Sendable { + associatedtype Item + func fetchItem() -> Item +} + +class ConcreteDataSource: DataSourceProtocol, @unchecked Sendable { + func fetchItem() -> T { + fatalError() + } +} + +func testWeakCaptureProtocolWithAssociatedTypeSendable(_ dataSource: DS) { + let _ = { [weak dataSource] in + escapingAsyncUse { @MainActor in + if let ds = dataSource { + useValue(ds) + } + } + } +} + +// Test: Weak capture in protocol extension +extension ProtocolSendable { + func createWeakCapturingClosure() -> () -> Void { + return { [weak self] in + escapingAsyncUse { @MainActor in + guard let self = self else { return } + useValue(self) + } + } + } +} + +extension ProtocolNonsendable { + func createWeakCapturingClosure() -> () -> Void { + return { [weak self] in + escapingAsyncUse { @MainActor in + guard let self = self else { return } // expected-error {{sending 'self' risks causing data races}} + // expected-note @-1 {{task-isolated 'self' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(self) + } + } + } +} + +// Test: Existential vs generic - both should work +func testExistentialWeakCaptureSendable(_ obj: any ProtocolSendable) { + let _ = { [weak obj] in + escapingAsyncUse { @MainActor in + if let obj = obj { + useValue(obj) + } + } + } +} + +func testExistentialWeakCaptureNonsendable(_ obj: any ProtocolNonsendable) { + let _ = { [weak obj] in + escapingAsyncUse { @MainActor in + if let obj = obj { // expected-error {{sending 'obj' risks causing data races}} + // expected-note @-1 {{task-isolated 'obj' is captured by a main actor-isolated closure. main actor-isolated uses in closure may race against later nonisolated uses}} + useValue(obj) + } + } + } +} + +// Test: Mixed protocol and class weak captures +func testMixedProtocolClassWeakCapturesSendable(_ protocolObj: T, _ classObj: KlassSendable) { + let _ = { [weak protocolObj, weak classObj] in + escapingAsyncUse { @MainActor in + if let p = protocolObj { useValue(p) } + if let c = classObj { useValue(c) } + } + } +} + +// Test: Protocol composition with weak capture +func testProtocolCompositionWeakCapture(_ obj: any (ProtocolSendable & Sendable)) { + let _ = { [weak obj] in + escapingAsyncUse { @MainActor in + if let obj = obj { + useValue(obj) + } + } + } +} + ////////////////////////////////// // MARK: Unowned Capture Lists // ////////////////////////////////// diff --git a/test/SIL/Parser/basic2.sil b/test/SIL/Parser/basic2.sil index 833beb6c32073..99d3727c103b4 100644 --- a/test/SIL/Parser/basic2.sil +++ b/test/SIL/Parser/basic2.sil @@ -504,4 +504,13 @@ bb0: dealloc_stack %0 %9999 = tuple () return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @test_inferred_immutable : $@convention(thin) (@guaranteed Klass) -> () { +// CHECK: bb0(%0 : @inferredImmutable @guaranteed $Klass): +// CHECK: } // end sil function 'test_inferred_immutable' +sil [ossa] @test_inferred_immutable : $@convention(thin) (@guaranteed Klass) -> () { +bb0(%0 : @inferredImmutable @guaranteed $Klass): + %9999 = tuple () + return %9999 : $() } \ No newline at end of file diff --git a/test/SIL/Serialization/basic2.sil b/test/SIL/Serialization/basic2.sil index 8746bad59ddc8..f31801d5087a1 100644 --- a/test/SIL/Serialization/basic2.sil +++ b/test/SIL/Serialization/basic2.sil @@ -34,6 +34,15 @@ bb0(%0 : @guaranteed $Klass): return %9999 : $() } +// CHECK-LABEL: sil [ossa] @inferred_immutable : $@convention(thin) (@guaranteed Klass) -> () { +// CHECK: bb0(%0 : @inferredImmutable @guaranteed $Klass): +// CHECK: } // end sil function 'inferred_immutable' +sil [ossa] @inferred_immutable : $@convention(thin) (@guaranteed Klass) -> () { +bb0(%0 : @inferredImmutable @guaranteed $Klass): + %9999 = tuple () + return %9999 : $() +} + // CHECK-LABEL: sil [ossa] @merge_isolation_region : $@convention(thin) (@guaranteed Klass) -> () { // CHECK: merge_isolation_region %0 : $Klass, %1 : $*C // CHECK: merge_isolation_region %1 : $*C, %0 : $Klass, %0 : $Klass diff --git a/test/SILGen/closures.swift b/test/SILGen/closures.swift index 803a6980ed17f..2bad053afd5b9 100644 --- a/test/SILGen/closures.swift +++ b/test/SILGen/closures.swift @@ -665,7 +665,7 @@ class SuperSub : SuperBase { // -- We enter with an assumed strong +1. // CHECK: bb0([[SELF:%.*]] : @guaranteed $UnownedSelfNestedCapture): // CHECK: [[OUTER_SELF_CAPTURE:%.*]] = alloc_box ${ var @sil_unowned UnownedSelfNestedCapture } -// CHECK: [[OUTER_SELF_LIFETIME:%.*]] = begin_borrow [lexical] [var_decl] [[OUTER_SELF_CAPTURE]] +// CHECK: [[OUTER_SELF_LIFETIME:%.*]] = begin_borrow [lexical] [[OUTER_SELF_CAPTURE]] // CHECK: [[PB:%.*]] = project_box [[OUTER_SELF_LIFETIME]] // -- strong +2 // CHECK: [[SELF_COPY:%.*]] = copy_value [[SELF]] diff --git a/test/SILGen/dynamic_self.swift b/test/SILGen/dynamic_self.swift index f69ec350f9511..db7eb96e72d49 100644 --- a/test/SILGen/dynamic_self.swift +++ b/test/SILGen/dynamic_self.swift @@ -240,7 +240,7 @@ class Z { // so that IRGen can recover metadata. // CHECK: [[WEAK_SELF:%.*]] = alloc_box ${ var @sil_weak Optional } - // CHECK: [[WEAK_SELF_LIFETIME:%.*]] = begin_borrow [lexical] [var_decl] [[WEAK_SELF]] + // CHECK: [[WEAK_SELF_LIFETIME:%.*]] = begin_borrow [lexical] [[WEAK_SELF]] // CHECK: [[FN:%.*]] = function_ref @$s12dynamic_self1ZC23testDynamicSelfCaptures1xACXDSi_tFyycfU1_ : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @thick @dynamic_self Z.Type) -> () // CHECK: [[WEAK_SELF_COPY:%.*]] = copy_value [[WEAK_SELF_LIFETIME]] : ${ var @sil_weak Optional } // CHECK-NEXT: [[DYNAMIC_SELF:%.*]] = metatype $@thick @dynamic_self Z.Type diff --git a/test/SILOptimizer/mark_weak_var_as_sendable.sil b/test/SILOptimizer/mark_weak_var_as_sendable.sil new file mode 100644 index 0000000000000..b808da57c9979 --- /dev/null +++ b/test/SILOptimizer/mark_weak_var_as_sendable.sil @@ -0,0 +1,1270 @@ +// RUN: %target-sil-opt -swift-version 6 -enable-sil-verify-all %s -mark-never-written-mutable-closure-boxes-as-immutable | %FileCheck %s + +import Swift +import SwiftShims +import Builtin + +//////////////////////// +// MARK: Declarations // +//////////////////////// + +class C : @unchecked Sendable {} +sil @use_value : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () +class NonSendable {} + +//////////////////////// +// MARK: Simple Tests // +//////////////////////// + +// CHECK-LABEL: sil private [ossa] @simple_callee : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'simple_callee' +sil private [ossa] @simple_callee : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + %1 = project_box %0, 0 + debug_value %1, var, name "c", argno 1, expr op_deref + %3 = begin_access [read] [unknown] %1 + %4 = load_weak %3 + end_access %3 + %6 = alloc_stack $Optional + store %4 to [init] %6 + %8 = function_ref @use_value : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + %9 = apply %8(%6) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + destroy_addr %6 + dealloc_stack %6 + %12 = tuple () + return %12 +} + +// CHECK-LABEL: sil [ossa] @simple_caller : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: } // end sil function 'simple_caller' +sil [ossa] @simple_caller : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + destroy_value %3 + + %f = function_ref @simple_callee : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %pa = partial_apply [callee_guaranteed] %f(%0c) : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil private [ossa] @simple_callee_two : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'simple_callee_two' +sil private [ossa] @simple_callee_two : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %0a : @closureCapture @guaranteed ${ var @sil_weak Optional }): + %1 = project_box %0, 0 + debug_value %1, var, name "c", argno 1, expr op_deref + %3 = begin_access [read] [unknown] %1 + %4 = load_weak %3 + end_access %3 + %6 = alloc_stack $Optional + store %4 to [init] %6 + %8 = function_ref @use_value : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + %9 = apply %8(%6) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + destroy_addr %6 + dealloc_stack %6 + + %1a = project_box %0a, 0 + debug_value %1a, var, name "c", argno 1, expr op_deref + %3a = begin_access [read] [unknown] %1a + %4a = load_weak %3a + end_access %3a + %6a = alloc_stack $Optional + store %4a to [init] %6a + %8a = function_ref @use_value : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + %9a = apply %8a(%6a) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + destroy_addr %6a + dealloc_stack %6a + + %12 = tuple () + return %12 +} + +// CHECK-LABEL: sil [ossa] @simple_caller_two_same : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: } // end sil function 'simple_caller_two_same' +sil [ossa] @simple_caller_two_same : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + %0a = alloc_box ${ var @sil_weak Optional } + %1a = begin_borrow [lexical] %0a + %2a = project_box %1a, 0 + store_weak %3 to [init] %2a + destroy_value %3 + + %f = function_ref @simple_callee_two : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %0ac = copy_value %0a + %pa = partial_apply [callee_guaranteed] %f(%0c, %0ac) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + end_borrow %1a + destroy_value %0a + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil private [ossa] @simple_callee_two_one_write : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'simple_callee_two_one_write' +sil private [ossa] @simple_callee_two_one_write : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %0a : @closureCapture @guaranteed ${ var @sil_weak Optional }): + %1 = project_box %0, 0 + debug_value %1, var, name "c", argno 1, expr op_deref + %3 = begin_access [read] [unknown] %1 + %4 = load_weak %3 + end_access %3 + %6 = alloc_stack $Optional + store %4 to [init] %6 + %8 = function_ref @use_value : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + %9 = apply %8(%6) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + destroy_addr %6 + dealloc_stack %6 + + %1a = project_box %0a, 0 + debug_value %1a, var, name "c", argno 1, expr op_deref + %3a = begin_access [read] [unknown] %1a + %4a = load_weak %3a + end_access %3a + %4w = begin_access [modify] [unknown] %1a + %enum = enum $Optional, #Optional.none + store_weak %enum to %4w + end_access %4w + %6a = alloc_stack $Optional + store %4a to [init] %6a + %8a = function_ref @use_value : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + %9a = apply %8a(%6a) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + destroy_addr %6a + dealloc_stack %6a + + %12 = tuple () + return %12 +} + +// CHECK-LABEL: sil [ossa] @simple_caller_two_one_write : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK-NOT: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: } // end sil function 'simple_caller_two_one_write' +sil [ossa] @simple_caller_two_one_write : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + %0a = alloc_box ${ var @sil_weak Optional } + %1a = begin_borrow [lexical] %0a + %2a = project_box %1a, 0 + store_weak %3 to [init] %2a + destroy_value %3 + + %f = function_ref @simple_callee_two_one_write : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %0ac = copy_value %0a + %pa = partial_apply [callee_guaranteed] %f(%0c, %0ac) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + end_borrow %1a + destroy_value %0a + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil private [ossa] @simple_callee_two_two_write : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'simple_callee_two_two_write' +sil private [ossa] @simple_callee_two_two_write : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %0a : @closureCapture @guaranteed ${ var @sil_weak Optional }): + %1 = project_box %0, 0 + debug_value %1, var, name "c", argno 1, expr op_deref + %3 = begin_access [read] [unknown] %1 + %4 = load_weak %3 + end_access %3 + %4w = begin_access [modify] [unknown] %1 + %enum = enum $Optional, #Optional.none + store_weak %enum to %4w + end_access %4w + + %6 = alloc_stack $Optional + store %4 to [init] %6 + %8 = function_ref @use_value : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + %9 = apply %8(%6) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + destroy_addr %6 + dealloc_stack %6 + + %1a = project_box %0a, 0 + debug_value %1a, var, name "c", argno 1, expr op_deref + %3a = begin_access [read] [unknown] %1a + %4a = load_weak %3a + end_access %3a + %6a = alloc_stack $Optional + store %4a to [init] %6a + %8a = function_ref @use_value : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + %9a = apply %8a(%6a) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + destroy_addr %6a + dealloc_stack %6a + + %12 = tuple () + return %12 +} + +// CHECK-LABEL: sil [ossa] @simple_caller_two_two_write : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: } // end sil function 'simple_caller_two_two_write' +sil [ossa] @simple_caller_two_two_write : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + %0a = alloc_box ${ var @sil_weak Optional } + %1a = begin_borrow [lexical] %0a + %2a = project_box %1a, 0 + store_weak %3 to [init] %2a + destroy_value %3 + + %f = function_ref @simple_callee_two_two_write : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %0ac = copy_value %0a + %pa = partial_apply [callee_guaranteed] %f(%0c, %0ac) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + end_borrow %1a + destroy_value %0a + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +/// We do not specialize in this case since our value is non-Sendable. We only transform if our value is actually Sendable. +// CHECK-LABEL: sil private [ossa] @simple_callee_nonsendable : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'simple_callee_nonsendable' +sil private [ossa] @simple_callee_nonsendable : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + %1 = project_box %0, 0 + debug_value %1, var, name "c", argno 1, expr op_deref + %3 = begin_access [read] [unknown] %1 + %4 = load_weak %3 + end_access %3 + %6 = alloc_stack $Optional + store %4 to [init] %6 + %8 = function_ref @use_value : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + %9 = apply %8(%6) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + destroy_addr %6 + dealloc_stack %6 + %12 = tuple () + return %12 +} + +// CHECK-LABEL: sil [ossa] @simple_caller_nonsendable : $@convention(thin) (@owned NonSendable) -> () { +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK-NOT: [inferred_immutable] +// CHECK: } // end sil function 'simple_caller_nonsendable' +sil [ossa] @simple_caller_nonsendable : $@convention(thin) (@owned NonSendable) -> () { +bb0(%arg : @owned $NonSendable): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + destroy_value %3 + + %f = function_ref @simple_callee_nonsendable : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %pa = partial_apply [callee_guaranteed] %f(%0c) : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +////////////////////////////// +// MARK: Indirect Out Tests // +////////////////////////////// + +// These tests validate that we pattern match known patterns of indirect out +// parameters. We do not need to consider full generics since only class types +// and class bound archetypes can be made weak. + +// CHECK-LABEL: sil private [ossa] @indirect_out_callee : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> @out Optional { +// CHECK: bb0(%0 : $*Optional, %1 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'indirect_out_callee' +sil private [ossa] @indirect_out_callee : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> @out Optional { +bb0(%result : $*Optional, %0 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + %1 = project_box %0, 0 + debug_value %1, var, name "c", argno 1, expr op_deref + %3 = begin_access [read] [unknown] %1 + %4 = load_weak %3 + end_access %3 + %6 = alloc_stack $Optional + store %4 to [init] %6 + %8 = function_ref @use_value : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + %9 = apply %8(%6) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + copy_addr [take] %6 to [init] %result + dealloc_stack %6 + %12 = tuple () + return %12 +} + +// CHECK-LABEL: sil [ossa] @indirect_out_caller : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: } // end sil function 'indirect_out_caller' +sil [ossa] @indirect_out_caller : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + destroy_value %3 + + %f = function_ref @indirect_out_callee : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> @out Optional + %0c = copy_value %0 + %pa = partial_apply [callee_guaranteed] %f(%0c) : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> @out Optional + destroy_value %pa + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @closure_escapes_caller : $@convention(thin) (@owned C) -> @owned @callee_guaranteed () -> () { +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: } // end sil function 'closure_escapes_caller' +sil [ossa] @closure_escapes_caller : $@convention(thin) (@owned C) -> @owned @callee_guaranteed () -> () { +bb0(%arg : @owned $C): + %box = alloc_box ${ var @sil_weak Optional } + %box_b = begin_borrow [lexical] %box + %box_p = project_box %box_b, 0 + %arg_opt = enum $Optional, #Optional.some!enumelt, %arg + store_weak %arg_opt to [init] %box_p + destroy_value %arg_opt + + %fn = function_ref @simple_callee : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + %box_c = copy_value %box + %closure = partial_apply [callee_guaranteed] %fn(%box_c) : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + + end_borrow %box_b + destroy_value %box + + return %closure : $@callee_guaranteed () -> () +} + +///////////////////////////////// +// MARK: Interesting CallGraph // +///////////////////////////////// + +// CHECK-LABEL: sil [ossa] @chained_callee : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'chained_callee' +sil [ossa] @chained_callee : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + debug_value %0, var, name "c", argno 1, expr op_deref + %2 = function_ref @simple_callee : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + %1 = copy_value %0 + %3 = partial_apply [callee_guaranteed] %2(%1) : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + destroy_value %3 + %4 = tuple () + return %4 +} + +// CHECK-LABEL: sil [ossa] @simple_chained_caller : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: } // end sil function 'simple_chained_caller' +sil [ossa] @simple_chained_caller : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + destroy_value %3 + + %f = function_ref @chained_callee : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %pa = partial_apply [callee_guaranteed] %f(%0c) : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +// Test 1: chained_read_all_with_intermediate_write +// callee_1 writes to box #2, callee_2 reads from all boxes +// Expected: box #1 and #3 convert to let, box #2 stays var + +// CHECK-LABEL: sil private [ossa] @chained_callee_2_read_all_with_intermediate_write : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'chained_callee_2_read_all_with_intermediate_write' +sil private [ossa] @chained_callee_2_read_all_with_intermediate_write : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + // Read from box #1 + %3 = project_box %0, 0 + %4 = begin_access [read] [unknown] %3 + %5 = load_weak %4 + end_access %4 + %7 = alloc_stack $Optional + store %5 to [init] %7 + %9 = function_ref @use_value : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + %10 = apply %9(%7) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + destroy_addr %7 + dealloc_stack %7 + + // Read from box #2 + %13 = project_box %1, 0 + %14 = begin_access [read] [unknown] %13 + %15 = load_weak %14 + end_access %14 + %17 = alloc_stack $Optional + store %15 to [init] %17 + %19 = function_ref @use_value : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + %20 = apply %19(%17) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + destroy_addr %17 + dealloc_stack %17 + + // Read from box #3 + %23 = project_box %2, 0 + %24 = begin_access [read] [unknown] %23 + %25 = load_weak %24 + end_access %24 + %27 = alloc_stack $Optional + store %25 to [init] %27 + %29 = function_ref @use_value : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + %30 = apply %29(%27) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + destroy_addr %27 + dealloc_stack %27 + + %33 = tuple () + return %33 : $() +} + +// CHECK-LABEL: sil private [ossa] @chained_callee_1_read_all_with_intermediate_write : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'chained_callee_1_read_all_with_intermediate_write' +sil private [ossa] @chained_callee_1_read_all_with_intermediate_write : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + // Read from box #1 + %3 = project_box %0, 0 + %4 = begin_access [read] [unknown] %3 + %5 = load_weak %4 + end_access %4 + %7 = alloc_stack $Optional + store %5 to [init] %7 + %9 = function_ref @use_value : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + %10 = apply %9(%7) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + destroy_addr %7 + dealloc_stack %7 + + // Write to box #2 + %13 = project_box %1, 0 + %14 = begin_access [modify] [unknown] %13 + %15 = enum $Optional, #Optional.none + store_weak %15 to %14 + end_access %14 + + // Read from box #3 + %18 = project_box %2, 0 + %19 = begin_access [read] [unknown] %18 + %20 = load_weak %19 + end_access %19 + %22 = alloc_stack $Optional + store %20 to [init] %22 + %24 = function_ref @use_value : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + %25 = apply %24(%22) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () + destroy_addr %22 + dealloc_stack %22 + + // Form closure to callee_2 + %28 = function_ref @chained_callee_2_read_all_with_intermediate_write : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %29 = copy_value %0 + %30 = copy_value %1 + %31 = copy_value %2 + %32 = partial_apply [callee_guaranteed] %28(%29, %30, %31) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %32 + %34 = tuple () + return %34 : $() +} + +// CHECK-LABEL: sil [ossa] @chained_caller_read_all_with_intermediate_write : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: } // end sil function 'chained_caller_read_all_with_intermediate_write' +sil [ossa] @chained_caller_read_all_with_intermediate_write : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + // Create 3 boxes + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + + %0a = alloc_box ${ var @sil_weak Optional } + %1a = begin_borrow [lexical] %0a + %2a = project_box %1a, 0 + store_weak %3 to [init] %2a + + %0b = alloc_box ${ var @sil_weak Optional } + %1b = begin_borrow [lexical] %0b + %2b = project_box %1b, 0 + store_weak %3 to [init] %2b + + destroy_value %3 + + // Form closure + %f = function_ref @chained_callee_1_read_all_with_intermediate_write : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %0ac = copy_value %0a + %0bc = copy_value %0b + %pa = partial_apply [callee_guaranteed] %f(%0c, %0ac, %0bc) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + + end_borrow %1b + destroy_value %0b + end_borrow %1a + destroy_value %0a + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +// Test 2: chained_write_to_first_param +// callee_1 only forms closure, callee_2 writes to box #1 +// Expected: box #1 stays var, boxes #2 and #3 convert to let + +// CHECK-LABEL: sil private [ossa] @chained_callee_2_write_to_first_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'chained_callee_2_write_to_first_param' +sil private [ossa] @chained_callee_2_write_to_first_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + // Write to box #1 + %3 = project_box %0, 0 + %4 = begin_access [modify] [unknown] %3 + %5 = enum $Optional, #Optional.none + store_weak %5 to %4 + end_access %4 + + %8 = tuple () + return %8 : $() +} + +// CHECK-LABEL: sil private [ossa] @chained_callee_1_write_to_first_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'chained_callee_1_write_to_first_param' +sil private [ossa] @chained_callee_1_write_to_first_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + // Form closure to callee_2 + %3 = function_ref @chained_callee_2_write_to_first_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %4 = copy_value %0 + %5 = copy_value %1 + %6 = copy_value %2 + %7 = partial_apply [callee_guaranteed] %3(%4, %5, %6) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %7 + %9 = tuple () + return %9 : $() +} + +// CHECK-LABEL: sil [ossa] @chained_caller_write_to_first_param : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: } // end sil function 'chained_caller_write_to_first_param' +sil [ossa] @chained_caller_write_to_first_param : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + + %0a = alloc_box ${ var @sil_weak Optional } + %1a = begin_borrow [lexical] %0a + %2a = project_box %1a, 0 + store_weak %3 to [init] %2a + + %0b = alloc_box ${ var @sil_weak Optional } + %1b = begin_borrow [lexical] %0b + %2b = project_box %1b, 0 + store_weak %3 to [init] %2b + + destroy_value %3 + + %f = function_ref @chained_callee_1_write_to_first_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %0ac = copy_value %0a + %0bc = copy_value %0b + %pa = partial_apply [callee_guaranteed] %f(%0c, %0ac, %0bc) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + + end_borrow %1b + destroy_value %0b + end_borrow %1a + destroy_value %0a + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +// Test 3: chained_write_to_second_param +// callee_1 only forms closure, callee_2 writes to box #2 +// Expected: boxes #1 and #3 convert to let, box #2 stays var + +// CHECK-LABEL: sil private [ossa] @chained_callee_2_write_to_second_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'chained_callee_2_write_to_second_param' +sil private [ossa] @chained_callee_2_write_to_second_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + // Write to box #2 + %3 = project_box %1, 0 + %4 = begin_access [modify] [unknown] %3 + %5 = enum $Optional, #Optional.none + store_weak %5 to %4 + end_access %4 + + %8 = tuple () + return %8 : $() +} + +// CHECK-LABEL: sil private [ossa] @chained_callee_1_write_to_second_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'chained_callee_1_write_to_second_param' +sil private [ossa] @chained_callee_1_write_to_second_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + // Form closure to callee_2 + %3 = function_ref @chained_callee_2_write_to_second_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %4 = copy_value %0 + %5 = copy_value %1 + %6 = copy_value %2 + %7 = partial_apply [callee_guaranteed] %3(%4, %5, %6) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %7 + %9 = tuple () + return %9 : $() +} + +// CHECK-LABEL: sil [ossa] @chained_caller_write_to_second_param : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: } // end sil function 'chained_caller_write_to_second_param' +sil [ossa] @chained_caller_write_to_second_param : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + + %0a = alloc_box ${ var @sil_weak Optional } + %1a = begin_borrow [lexical] %0a + %2a = project_box %1a, 0 + store_weak %3 to [init] %2a + + %0b = alloc_box ${ var @sil_weak Optional } + %1b = begin_borrow [lexical] %0b + %2b = project_box %1b, 0 + store_weak %3 to [init] %2b + + destroy_value %3 + + %f = function_ref @chained_callee_1_write_to_second_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %0ac = copy_value %0a + %0bc = copy_value %0b + %pa = partial_apply [callee_guaranteed] %f(%0c, %0ac, %0bc) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + + end_borrow %1b + destroy_value %0b + end_borrow %1a + destroy_value %0a + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +// Test 4: chained_write_to_third_param +// callee_1 only forms closure, callee_2 writes to box #3 +// Expected: boxes #1 and #2 convert to let, box #3 stays var + +// CHECK-LABEL: sil private [ossa] @chained_callee_2_write_to_third_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'chained_callee_2_write_to_third_param' +sil private [ossa] @chained_callee_2_write_to_third_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + // Write to box #3 + %3 = project_box %2, 0 + %4 = begin_access [modify] [unknown] %3 + %5 = enum $Optional, #Optional.none + store_weak %5 to %4 + end_access %4 + + %8 = tuple () + return %8 : $() +} + +// CHECK-LABEL: sil private [ossa] @chained_callee_1_write_to_third_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'chained_callee_1_write_to_third_param' +sil private [ossa] @chained_callee_1_write_to_third_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + // Form closure to callee_2 + %3 = function_ref @chained_callee_2_write_to_third_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %4 = copy_value %0 + %5 = copy_value %1 + %6 = copy_value %2 + %7 = partial_apply [callee_guaranteed] %3(%4, %5, %6) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %7 + %9 = tuple () + return %9 : $() +} + +// CHECK-LABEL: sil [ossa] @chained_caller_write_to_third_param : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK: } // end sil function 'chained_caller_write_to_third_param' +sil [ossa] @chained_caller_write_to_third_param : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + + %0a = alloc_box ${ var @sil_weak Optional } + %1a = begin_borrow [lexical] %0a + %2a = project_box %1a, 0 + store_weak %3 to [init] %2a + + %0b = alloc_box ${ var @sil_weak Optional } + %1b = begin_borrow [lexical] %0b + %2b = project_box %1b, 0 + store_weak %3 to [init] %2b + + destroy_value %3 + + %f = function_ref @chained_callee_1_write_to_third_param : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %0ac = copy_value %0a + %0bc = copy_value %0b + %pa = partial_apply [callee_guaranteed] %f(%0c, %0ac, %0bc) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + + end_borrow %1b + destroy_value %0b + end_borrow %1a + destroy_value %0a + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +// Test 5: chained_write_to_first_and_second +// callee_1 only forms closure, callee_2 writes to boxes #1 and #2 +// Expected: boxes #1 and #2 stay var, box #3 converts to let + +// CHECK-LABEL: sil private [ossa] @chained_callee_2_write_to_first_and_second : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'chained_callee_2_write_to_first_and_second' +sil private [ossa] @chained_callee_2_write_to_first_and_second : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + // Write to box #1 + %3 = project_box %0, 0 + %4 = begin_access [modify] [unknown] %3 + %5 = enum $Optional, #Optional.none + store_weak %5 to %4 + end_access %4 + + // Write to box #2 + %8 = project_box %1, 0 + %9 = begin_access [modify] [unknown] %8 + %10 = enum $Optional, #Optional.none + store_weak %10 to %9 + end_access %9 + + %13 = tuple () + return %13 : $() +} + +// CHECK-LABEL: sil private [ossa] @chained_callee_1_write_to_first_and_second : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'chained_callee_1_write_to_first_and_second' +sil private [ossa] @chained_callee_1_write_to_first_and_second : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + // Form closure to callee_2 + %3 = function_ref @chained_callee_2_write_to_first_and_second : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %4 = copy_value %0 + %5 = copy_value %1 + %6 = copy_value %2 + %7 = partial_apply [callee_guaranteed] %3(%4, %5, %6) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %7 + %9 = tuple () + return %9 : $() +} + +// CHECK-LABEL: sil [ossa] @chained_caller_write_to_first_and_second : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: } // end sil function 'chained_caller_write_to_first_and_second' +sil [ossa] @chained_caller_write_to_first_and_second : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + + %0a = alloc_box ${ var @sil_weak Optional } + %1a = begin_borrow [lexical] %0a + %2a = project_box %1a, 0 + store_weak %3 to [init] %2a + + %0b = alloc_box ${ var @sil_weak Optional } + %1b = begin_borrow [lexical] %0b + %2b = project_box %1b, 0 + store_weak %3 to [init] %2b + + destroy_value %3 + + %f = function_ref @chained_callee_1_write_to_first_and_second : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %0ac = copy_value %0a + %0bc = copy_value %0b + %pa = partial_apply [callee_guaranteed] %f(%0c, %0ac, %0bc) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + + end_borrow %1b + destroy_value %0b + end_borrow %1a + destroy_value %0a + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +// Test 6: chained_write_to_all_params +// callee_1 only forms closure, callee_2 writes to all boxes +// Expected: all boxes stay var (no conversion) + +// CHECK-LABEL: sil private [ossa] @chained_callee_2_write_to_all_params : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'chained_callee_2_write_to_all_params' +sil private [ossa] @chained_callee_2_write_to_all_params : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + // Write to box #1 + %3 = project_box %0, 0 + %4 = begin_access [modify] [unknown] %3 + %5 = enum $Optional, #Optional.none + store_weak %5 to %4 + end_access %4 + + // Write to box #2 + %8 = project_box %1, 0 + %9 = begin_access [modify] [unknown] %8 + %10 = enum $Optional, #Optional.none + store_weak %10 to %9 + end_access %9 + + // Write to box #3 + %13 = project_box %2, 0 + %14 = begin_access [modify] [unknown] %13 + %15 = enum $Optional, #Optional.none + store_weak %15 to %14 + end_access %14 + + %18 = tuple () + return %18 : $() +} + +// CHECK-LABEL: sil private [ossa] @chained_callee_1_write_to_all_params : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'chained_callee_1_write_to_all_params' +sil private [ossa] @chained_callee_1_write_to_all_params : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + // Form closure to callee_2 + %3 = function_ref @chained_callee_2_write_to_all_params : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %4 = copy_value %0 + %5 = copy_value %1 + %6 = copy_value %2 + %7 = partial_apply [callee_guaranteed] %3(%4, %5, %6) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %7 + %9 = tuple () + return %9 : $() +} + +// CHECK-LABEL: sil [ossa] @chained_caller_write_to_all_params : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK-NOT: [inferred_immutable] +// CHECK: } // end sil function 'chained_caller_write_to_all_params' +sil [ossa] @chained_caller_write_to_all_params : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + + %0a = alloc_box ${ var @sil_weak Optional } + %1a = begin_borrow [lexical] %0a + %2a = project_box %1a, 0 + store_weak %3 to [init] %2a + + %0b = alloc_box ${ var @sil_weak Optional } + %1b = begin_borrow [lexical] %0b + %2b = project_box %1b, 0 + store_weak %3 to [init] %2b + + destroy_value %3 + + %f = function_ref @chained_callee_1_write_to_all_params : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %0ac = copy_value %0a + %0bc = copy_value %0b + %pa = partial_apply [callee_guaranteed] %f(%0c, %0ac, %0bc) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + + end_borrow %1b + destroy_value %0b + end_borrow %1a + destroy_value %0a + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +///////////////////////////////////////////// +// MARK: Caller Writes Multiple Times // +///////////////////////////////////////////// + +// Test that we do NOT mark a box as immutable when the caller writes to it multiple times. +// This ensures we don't accidentally pattern match unexpected cases. + +// CHECK-LABEL: sil [ossa] @simple_caller_write_twice : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK-NOT: [inferred_immutable] +// CHECK: } // end sil function 'simple_caller_write_twice' +sil [ossa] @simple_caller_write_twice : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + + // First write + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + + // Second write + %none = enum $Optional, #Optional.none + store_weak %none to %2 + + destroy_value %3 + + %f = function_ref @simple_callee : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %pa = partial_apply [callee_guaranteed] %f(%0c) : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @simple_caller_write_three_times : $@convention(thin) (@owned C, @owned C) -> () { +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK-NOT: [inferred_immutable] +// CHECK: } // end sil function 'simple_caller_write_three_times' +sil [ossa] @simple_caller_write_three_times : $@convention(thin) (@owned C, @owned C) -> () { +bb0(%arg1 : @owned $C, %arg2 : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + + // First write + %3 = enum $Optional, #Optional.some!enumelt, %arg1 + store_weak %3 to [init] %2 + destroy_value %3 + + // Second write + %4 = enum $Optional, #Optional.some!enumelt, %arg2 + store_weak %4 to %2 + destroy_value %4 + + // Third write + %none = enum $Optional, #Optional.none + store_weak %none to %2 + + %f = function_ref @simple_callee : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %pa = partial_apply [callee_guaranteed] %f(%0c) : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @simple_caller_conditional_write : $@convention(thin) (@owned C, @owned C) -> () { +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK-NOT: [inferred_immutable] +// CHECK: } // end sil function 'simple_caller_conditional_write' +sil [ossa] @simple_caller_conditional_write : $@convention(thin) (@owned C, @owned C) -> () { +bb0(%arg1 : @owned $C, %arg2 : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + + // First write + %3 = enum $Optional, #Optional.some!enumelt, %arg1 + store_weak %3 to [init] %2 + destroy_value %3 + + // Conditional write + cond_br undef, bb_write, bb_no_write + +bb_write: + %4 = enum $Optional, #Optional.some!enumelt, %arg2 + store_weak %4 to %2 + destroy_value %4 + br bb_cont + +bb_no_write: + destroy_value %arg2 + br bb_cont + +bb_cont: + %f = function_ref @simple_callee : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %pa = partial_apply [callee_guaranteed] %f(%0c) : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +// Test with multiple boxes where one is written multiple times +// CHECK-LABEL: sil [ossa] @caller_two_boxes_one_multiple_writes : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK-NOT: [inferred_immutable] +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: } // end sil function 'caller_two_boxes_one_multiple_writes' +sil [ossa] @caller_two_boxes_one_multiple_writes : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + // Box 1: written multiple times (should NOT be marked immutable) + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + + // Second write to box 1 + %none = enum $Optional, #Optional.none + store_weak %none to %2 + + // Box 2: written only once (should be marked immutable) + %0a = alloc_box ${ var @sil_weak Optional } + %1a = begin_borrow [lexical] %0a + %2a = project_box %1a, 0 + store_weak %3 to [init] %2a + destroy_value %3 + + %f = function_ref @simple_callee_two : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %0ac = copy_value %0a + %pa = partial_apply [callee_guaranteed] %f(%0c, %0ac) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + end_borrow %1a + destroy_value %0a + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +// Test with loop containing multiple writes +// CHECK-LABEL: sil [ossa] @simple_caller_write_in_loop : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK-NOT: [inferred_immutable] +// CHECK: } // end sil function 'simple_caller_write_in_loop' +sil [ossa] @simple_caller_write_in_loop : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + + %zero = integer_literal $Builtin.Int64, 0 + %ten = integer_literal $Builtin.Int64, 10 + br bb_loop(%zero : $Builtin.Int64) + +bb_loop(%i : $Builtin.Int64): + // Write in loop + store_weak %3 to %2 + + %one = integer_literal $Builtin.Int64, 1 + %next = builtin "add_Int64"(%i : $Builtin.Int64, %one : $Builtin.Int64) : $Builtin.Int64 + %cmp = builtin "cmp_slt_Int64"(%next : $Builtin.Int64, %ten : $Builtin.Int64) : $Builtin.Int1 + cond_br %cmp, bb_latch, bb_exit + +bb_latch: + br bb_loop(%next : $Builtin.Int64) + +bb_exit: + destroy_value %3 + + %f = function_ref @simple_callee : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %pa = partial_apply [callee_guaranteed] %f(%0c) : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +///////////////// +// MARK: Loops // +///////////////// + +// CHECK-LABEL: sil [ossa] @callee_with_closure_in_loop : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @inferredImmutable @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'callee_with_closure_in_loop' +sil [ossa] @callee_with_closure_in_loop : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () { +bb0(%box : @closureCapture @guaranteed ${ var @sil_weak Optional }): + %zero = integer_literal $Builtin.Int64, 0 + %ten = integer_literal $Builtin.Int64, 10 + br bb_loop(%zero : $Builtin.Int64) + +bb_loop(%i : $Builtin.Int64): + // Read weak box in loop + %addr = project_box %box, 0 + %val = load_weak %addr : $*@sil_weak Optional + destroy_value %val + + %one = integer_literal $Builtin.Int64, 1 + %next = builtin "add_Int64"(%i : $Builtin.Int64, %one : $Builtin.Int64) : $Builtin.Int64 + %cmp = builtin "cmp_slt_Int64"(%next : $Builtin.Int64, %ten : $Builtin.Int64) : $Builtin.Int1 + cond_br %cmp, bb_latch, bb_exit + +bb_latch: + br bb_loop(%next : $Builtin.Int64) + +bb_exit: + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil [ossa] @caller_callee_with_closure_in_loop : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box [inferred_immutable] ${ var @sil_weak Optional } +// CHECK: } // end sil function 'caller_callee_with_closure_in_loop' +sil [ossa] @caller_callee_with_closure_in_loop : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + destroy_value %3 + + %f = function_ref @callee_with_closure_in_loop : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %pa = partial_apply [callee_guaranteed] %f(%0c) : $@convention(thin) (@guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +} + +/////////////////////////////// +// MARK: Caller With Diamond // +/////////////////////////////// + +// We do not handle this case since whenever we would generate a diamond, SILGen +// should load_weak the value. We do not generally phi the values themselves. +// CHECK-LABEL: sil [ossa] @callee_with_diamond_1 : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +// CHECK: bb0(%0 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %1 : @closureCapture @guaranteed ${ var @sil_weak Optional }): +// CHECK: } // end sil function 'callee_with_diamond_1' +sil [ossa] @callee_with_diamond_1 : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () { +bb0(%box1 : @closureCapture @guaranteed ${ var @sil_weak Optional }, %box2 : @closureCapture @guaranteed ${ var @sil_weak Optional }): + cond_br undef, bb_lhs, bb_rhs + +bb_lhs: + br bb_cont(%box1 : ${ var @sil_weak Optional }) + +bb_rhs: + br bb_cont(%box2 : ${ var @sil_weak Optional }) + +bb_cont(%box : @guaranteed ${ var @sil_weak Optional }): + %boxBorrow = borrowed %box from (%box1, %box2) + %addr = project_box %boxBorrow, 0 + %val = load_weak %addr : $*@sil_weak Optional + destroy_value %val + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil [ossa] @caller_callee_with_diamond_1 : $@convention(thin) (@owned C) -> () { +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK: alloc_box ${ var @sil_weak Optional } +// CHECK-NOT: [inferred_immutable] +// CHECK: } // end sil function 'caller_callee_with_diamond_1' +sil [ossa] @caller_callee_with_diamond_1 : $@convention(thin) (@owned C) -> () { +bb0(%arg : @owned $C): + %0 = alloc_box ${ var @sil_weak Optional } + %1 = begin_borrow [lexical] %0 + %2 = project_box %1, 0 + %3 = enum $Optional, #Optional.some!enumelt, %arg + store_weak %3 to [init] %2 + %0a = alloc_box ${ var @sil_weak Optional } + %1a = begin_borrow [lexical] %0a + %2a = project_box %1a, 0 + store_weak %3 to [init] %2a + destroy_value %3 + + %f = function_ref @callee_with_diamond_1 : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + %0c = copy_value %0 + %0ac = copy_value %0a + %pa = partial_apply [callee_guaranteed] %f(%0c, %0ac) : $@convention(thin) (@guaranteed { var @sil_weak Optional }, @guaranteed { var @sil_weak Optional }) -> () + destroy_value %pa + end_borrow %1a + destroy_value %0a + end_borrow %1 + destroy_value %0 + %9999 = tuple () + return %9999 : $() +}