@@ -898,3 +898,205 @@ bb0( %0 : $Int):
898
898
%7 = tuple ()
899
899
return %7 : $()
900
900
}
901
+
902
+ // 4.2 fails to diagnose this conflict.
903
+ sil private @closureForDirectPartialApplyTakingInout : $@convention(thin) (@inout Int, @inout_aliasable Int) -> () {
904
+ bb0(%0 : $*Int, %1 : $*Int):
905
+ %access = begin_access [read] [unknown] %1 : $*Int
906
+ %val = load [trivial] %access : $*Int
907
+ end_access %access : $*Int
908
+ %v = tuple ()
909
+ return %v : $()
910
+ }
911
+
912
+ // 4.2 fails to diagnose this conflict.
913
+ sil hidden @directPartialApplyTakingInout : $@convention(thin) (Int) -> () {
914
+ bb0(%0 : $Int):
915
+ %box = alloc_box ${ var Int }, var, name "i"
916
+ %boxadr = project_box %box : ${ var Int }, 0
917
+ store %0 to [trivial] %boxadr : $*Int
918
+ %f = function_ref @closureForDirectPartialApplyTakingInout : $@convention(thin) (@inout Int, @inout_aliasable Int) -> ()
919
+ %pa = partial_apply [callee_guaranteed] %f(%boxadr) : $@convention(thin) (@inout Int, @inout_aliasable Int) -> ()
920
+ %nepa = convert_escape_to_noescape %pa : $@callee_guaranteed (@inout Int) -> () to $@noescape @callee_guaranteed (@inout Int) -> ()
921
+ %access = begin_access [modify] [unknown] %boxadr : $*Int
922
+ %call = apply %nepa(%access) : $@noescape @callee_guaranteed (@inout Int) -> ()
923
+ end_access %access : $*Int
924
+ destroy_value %box : ${ var Int }
925
+ %v = tuple ()
926
+ return %v : $()
927
+ }
928
+
929
+ // 4.2 fails to diagnose this conflict.
930
+ sil private @closureForDirectPartialApplyChain : $@convention(thin) (@inout Int, Int, @inout_aliasable Int) -> () {
931
+ bb0(%0 : $*Int, %1 : $Int, %2 : $*Int):
932
+ %access = begin_access [read] [unknown] %2 : $*Int
933
+ %val = load [trivial] %access : $*Int
934
+ end_access %access : $*Int
935
+ %v = tuple ()
936
+ return %v : $()
937
+ }
938
+
939
+ // 4.2 fails to diagnose this conflict.
940
+ sil hidden @directPartialApplyChain : $@convention(thin) (Int) -> () {
941
+ bb0(%0 : $Int):
942
+ %box = alloc_box ${ var Int }, var, name "i"
943
+ %boxadr = project_box %box : ${ var Int }, 0
944
+ store %0 to [trivial] %boxadr : $*Int
945
+ %f = function_ref @closureForDirectPartialApplyChain : $@convention(thin) (@inout Int, Int, @inout_aliasable Int) -> ()
946
+ %pa1 = partial_apply [callee_guaranteed] %f(%boxadr) : $@convention(thin) (@inout Int, Int, @inout_aliasable Int) -> ()
947
+ %pa2 = partial_apply [callee_guaranteed] %pa1(%0) : $@callee_guaranteed (@inout Int, Int) -> ()
948
+ %nepa = convert_escape_to_noescape %pa2 : $@callee_guaranteed (@inout Int) -> () to $@noescape @callee_guaranteed (@inout Int) -> ()
949
+ %access = begin_access [modify] [unknown] %boxadr : $*Int
950
+ %call = apply %nepa(%access) : $@noescape @callee_guaranteed (@inout Int) -> ()
951
+ end_access %access : $*Int
952
+ destroy_value %box : ${ var Int }
953
+ %v = tuple ()
954
+ return %v : $()
955
+ }
956
+
957
+ /*
958
+ // This SIL pattern is unsupported on the 4.2 branch. It will assert in checkForViolationWithCall.
959
+ // It is handled post 4.2. We haven't seen a source level test that exposes this pre-4.2.
960
+ sil private @closureForPartialApplyArgChain : $@convention(thin) (Int, @inout_aliasable Int) -> () {
961
+ bb0(%0 : $Int, %1 : $*Int):
962
+ %access = begin_access [read] [unknown] %1 : $*Int
963
+ %val = load [trivial] %access : $*Int
964
+ end_access %access : $*Int
965
+ %v = tuple ()
966
+ return %v : $()
967
+ }
968
+
969
+ sil hidden @partialApplyArgChain : $@convention(thin) (Int) -> () {
970
+ bb0(%0 : $Int):
971
+ %2 = alloc_box ${ var Int }
972
+ %3 = project_box %2 : ${ var Int }, 0
973
+ store %0 to [trivial] %3 : $*Int
974
+ %4 = function_ref @takesInoutAndNoEscapeClosure : $@convention(thin) (@inout Int, @noescape @owned @callee_owned () -> ()) -> ()
975
+ %5 = function_ref @closureForPartialApplyArgChain : $@convention(thin) (Int, @inout_aliasable Int) -> ()
976
+ %6 = partial_apply %5(%3) : $@convention(thin) (Int, @inout_aliasable Int) -> ()
977
+ %7 = partial_apply %6(%0) : $@callee_owned (Int) -> ()
978
+ %conv = convert_escape_to_noescape %7 : $@callee_owned () -> () to $@callee_owned @noescape () -> ()
979
+ %8 = begin_access [modify] [unknown] %3 : $*Int
980
+ %9 = apply %4(%8, %conv) : $@convention(thin) (@inout Int, @noescape @owned @callee_owned () -> ()) -> ()
981
+ end_access %8: $*Int
982
+ destroy_value %2 : ${ var Int }
983
+ %v = tuple ()
984
+ return %v : $()
985
+ }
986
+ */
987
+
988
+ class SomeClass {}
989
+
990
+ // LLDB uses mark_uninitialized [var] in an unsupported way via an address to
991
+ // pointer. LLDB shouldn't do this, but until it is removed and validated we
992
+ // need a test for this.
993
+ //
994
+ // Check that this doesn't trigger the DiagnoseStaticExclusivity
995
+ // assert that all accesses have a valid AccessedStorage source.
996
+ sil @lldb_unsupported_markuninitialized_testcase : $@convention(thin) (UnsafeMutablePointer<Any>) -> () {
997
+ bb0(%0 : $UnsafeMutablePointer<Any>):
998
+ %2 = struct_extract %0 : $UnsafeMutablePointer<Any>, #UnsafeMutablePointer._rawValue
999
+ %3 = integer_literal $Builtin.Int64, 8
1000
+ %4 = index_raw_pointer %2 : $Builtin.RawPointer, %3 : $Builtin.Int64
1001
+ %5 = pointer_to_address %4 : $Builtin.RawPointer to [strict] $*Builtin.RawPointer
1002
+ %6 = load [trivial] %5 : $*Builtin.RawPointer
1003
+ %7 = pointer_to_address %6 : $Builtin.RawPointer to [strict] $*Error
1004
+ %8 = mark_uninitialized [var] %7 : $*Error
1005
+ %9 = struct_extract %0 : $UnsafeMutablePointer<Any>, #UnsafeMutablePointer._rawValue
1006
+ %10 = integer_literal $Builtin.Int64, 0
1007
+ %11 = index_raw_pointer %9 : $Builtin.RawPointer, %10 : $Builtin.Int64
1008
+ %12 = pointer_to_address %11 : $Builtin.RawPointer to [strict] $*Builtin.RawPointer
1009
+ %13 = load [trivial] %12 : $*Builtin.RawPointer
1010
+ %14 = pointer_to_address %13 : $Builtin.RawPointer to [strict] $*@thick SomeClass.Type
1011
+ %15 = mark_uninitialized [var] %14 : $*@thick SomeClass.Type
1012
+ %16 = metatype $@thick SomeClass.Type
1013
+ %17 = begin_access [modify] [unsafe] %15 : $*@thick SomeClass.Type
1014
+ assign %16 to %17 : $*@thick SomeClass.Type
1015
+ end_access %17 : $*@thick SomeClass.Type
1016
+ %9999 = tuple()
1017
+ return %9999 : $()
1018
+ }
1019
+
1020
+ sil @addressor : $@convention(thin) () -> UnsafeMutablePointer<Int32>
1021
+
1022
+ // An addressor produces an unsafely accessed RawPointer. Pass the
1023
+ // address to an inout can result in an enforced (unknown) access on
1024
+ // the unsafe address. We need to handle this case without asserting.
1025
+ sil @addressorAccess : $@convention(thin) (@guaranteed ClassWithStoredProperty, Int32) -> () {
1026
+ bb0(%0 : $ClassWithStoredProperty, %1 : $Int32):
1027
+ %f = function_ref @addressor : $@convention(thin) () -> UnsafeMutablePointer<Int32>
1028
+ %up = apply %f() : $@convention(thin) () -> UnsafeMutablePointer<Int32>
1029
+ %o = unchecked_ref_cast %0 : $ClassWithStoredProperty to $Builtin.NativeObject
1030
+ %ptr = struct_extract %up : $UnsafeMutablePointer<Int32>, #UnsafeMutablePointer._rawValue
1031
+ %adr = pointer_to_address %ptr : $Builtin.RawPointer to [strict] $*Int32
1032
+ %dep = mark_dependence %adr : $*Int32 on %o : $Builtin.NativeObject
1033
+ %a1 = begin_access [modify] [unsafe] %dep : $*Int32
1034
+ %a2 = begin_access [modify] [static] %a1 : $*Int32
1035
+ store %1 to [trivial] %a2 : $*Int32
1036
+ end_access %a2 : $*Int32
1037
+ end_access %a1 : $*Int32
1038
+ %v = tuple ()
1039
+ return %v : $()
1040
+ }
1041
+
1042
+ // <rdar://problem/41976355> [SR-8201]: Swift 4.2 Crash in DiagnoseStaticExclusivity (llvm_unreachable)
1043
+ // mutatingNoescapeWithThunk and mutatingNoescapeConflictWithThunk.
1044
+ // Test that noescape closure verification allows a closure with @inout_aliasable argument convention,
1045
+ // which may only be used as a nonescaping closure, can be passed to a reabstraction thunk
1046
+ // without the @noescape function-type argument convention.
1047
+
1048
+ sil @mutatingNoescapeHelper : $@convention(thin) (Optional<UnsafePointer<Int32>>, @inout_aliasable Int32) -> () {
1049
+ bb0(%0 : $Optional<UnsafePointer<Int32>>, %1 : $*Int32):
1050
+ %up = unchecked_enum_data %0 : $Optional<UnsafePointer<Int32>>, #Optional.some!enumelt.1
1051
+ %p = struct_extract %up : $UnsafePointer<Int32>, #UnsafePointer._rawValue
1052
+ %adr = pointer_to_address %p : $Builtin.RawPointer to [strict] $*Int32
1053
+ %val = load [trivial] %adr : $*Int32
1054
+ %access = begin_access [modify] [unknown] %1 : $*Int32 // expected-note {{conflicting access is here}}
1055
+ store %val to [trivial] %access : $*Int32
1056
+ end_access %access : $*Int32
1057
+ %v = tuple ()
1058
+ return %v : $()
1059
+ }
1060
+
1061
+ sil shared [transparent] [serializable] [reabstraction_thunk] @mutatingNoescapeThunk : $@convention(thin) (UnsafePointer<Int32>, @guaranteed @callee_guaranteed (Optional<UnsafePointer<Int32>>) -> ()) -> () {
1062
+ bb0(%0 : $UnsafePointer<Int32>, %1 : $@callee_guaranteed (Optional<UnsafePointer<Int32>>) -> ()):
1063
+ %e = enum $Optional<UnsafePointer<Int32>>, #Optional.some!enumelt.1, %0 : $UnsafePointer<Int32>
1064
+ %c = apply %1(%e) : $@callee_guaranteed (Optional<UnsafePointer<Int32>>) -> ()
1065
+ %v = tuple ()
1066
+ return %v : $()
1067
+ }
1068
+
1069
+ sil @takeMutatingNoescapeClosure : $@convention(thin) <τ_0_0> (UnsafePointer<τ_0_0>, @noescape @callee_guaranteed (UnsafePointer<Int32>) -> ()) -> ()
1070
+
1071
+ // A (mutating) closure taking an @inout_aliasable argument may be
1072
+ // passed to a reabstraction_thunk as an escaping function type. This
1073
+ // is valid as long as the thunk is only used as a @nosecape type.
1074
+ sil @mutatingNoescapeWithThunk : $@convention(method) (UnsafePointer<Int32>, @inout Int32) -> () {
1075
+ bb0(%0 : $UnsafePointer<Int32>, %1 : $*Int32):
1076
+ %f1 = function_ref @mutatingNoescapeHelper : $@convention(thin) (Optional<UnsafePointer<Int32>>, @inout_aliasable Int32) -> ()
1077
+ %pa1 = partial_apply [callee_guaranteed] %f1(%1) : $@convention(thin) (Optional<UnsafePointer<Int32>>, @inout_aliasable Int32) -> ()
1078
+ %thunk = function_ref @mutatingNoescapeThunk : $@convention(thin) (UnsafePointer<Int32>, @guaranteed @callee_guaranteed (Optional<UnsafePointer<Int32>>) -> ()) -> ()
1079
+ %pa2 = partial_apply [callee_guaranteed] %thunk(%pa1) : $@convention(thin) (UnsafePointer<Int32>, @guaranteed @callee_guaranteed (Optional<UnsafePointer<Int32>>) -> ()) -> ()
1080
+ %closure = convert_escape_to_noescape [not_guaranteed] %pa2 : $@callee_guaranteed (UnsafePointer<Int32>) -> () to $@noescape @callee_guaranteed (UnsafePointer<Int32>) -> ()
1081
+ %f2 = function_ref @takeMutatingNoescapeClosure : $@convention(thin) <τ_0_0> (UnsafePointer<τ_0_0>, @noescape @callee_guaranteed (UnsafePointer<Int32>) -> ()) -> ()
1082
+ %call = apply %f2<Int32>(%0, %closure) : $@convention(thin) <τ_0_0> (UnsafePointer<τ_0_0>, @noescape @callee_guaranteed (UnsafePointer<Int32>) -> ()) -> ()
1083
+ %v = tuple ()
1084
+ return %v : $()
1085
+ }
1086
+
1087
+ sil @takeInoutAndMutatingNoescapeClosure : $@convention(thin) <τ_0_0> (@inout Int32, UnsafePointer<τ_0_0>, @noescape @callee_guaranteed (UnsafePointer<Int32>) -> ()) -> ()
1088
+
1089
+ sil @mutatingNoescapeConflictWithThunk : $@convention(method) (UnsafePointer<Int32>, @inout Int32) -> () {
1090
+ bb0(%0 : $UnsafePointer<Int32>, %1 : $*Int32):
1091
+ %f1 = function_ref @mutatingNoescapeHelper : $@convention(thin) (Optional<UnsafePointer<Int32>>, @inout_aliasable Int32) -> ()
1092
+ %pa1 = partial_apply [callee_guaranteed] %f1(%1) : $@convention(thin) (Optional<UnsafePointer<Int32>>, @inout_aliasable Int32) -> ()
1093
+ %thunk = function_ref @mutatingNoescapeThunk : $@convention(thin) (UnsafePointer<Int32>, @guaranteed @callee_guaranteed (Optional<UnsafePointer<Int32>>) -> ()) -> ()
1094
+ %pa2 = partial_apply [callee_guaranteed] %thunk(%pa1) : $@convention(thin) (UnsafePointer<Int32>, @guaranteed @callee_guaranteed (Optional<UnsafePointer<Int32>>) -> ()) -> ()
1095
+ %closure = convert_escape_to_noescape [not_guaranteed] %pa2 : $@callee_guaranteed (UnsafePointer<Int32>) -> () to $@noescape @callee_guaranteed (UnsafePointer<Int32>) -> ()
1096
+ %f2 = function_ref @takeInoutAndMutatingNoescapeClosure : $@convention(thin) <τ_0_0> (@inout Int32, UnsafePointer<τ_0_0>, @noescape @callee_guaranteed (UnsafePointer<Int32>) -> ()) -> ()
1097
+ %access = begin_access [modify] [unknown] %1 : $*Int32 // expected-error {{overlapping accesses, but modification requires exclusive access; consider copying to a local variable}}
1098
+ %call = apply %f2<Int32>(%access, %0, %closure) : $@convention(thin) <τ_0_0> (@inout Int32, UnsafePointer<τ_0_0>, @noescape @callee_guaranteed (UnsafePointer<Int32>) -> ()) -> ()
1099
+ end_access %access : $*Int32
1100
+ %v = tuple ()
1101
+ return %v : $()
1102
+ }
0 commit comments