@@ -189,6 +189,87 @@ public func resilientAny(s : ResilientWeakRef) {
189
189
// CHECK: call void @__swift_destroy_boxed_opaque_existential_0(%Any* [[ANY]])
190
190
// CHECK: ret void
191
191
192
+ // Make sure that MemoryLayout properties access resilient types' metadata
193
+ // instead of hardcoding sizes based on compile-time layouts.
194
+
195
+ // CHECK-LABEL: define{{.*}} swiftcc {{i32|i64}} @"$s17struct_resilience38memoryLayoutDotSizeWithResilientStructSiyF"()
196
+ public func memoryLayoutDotSizeWithResilientStruct( ) -> Int {
197
+ // CHECK: entry:
198
+ // CHECK: [[TMP:%.*]] = call swiftcc %swift.metadata_response @"$s16resilient_struct4SizeVMa"([[INT]] 0)
199
+ // CHECK: [[METADATA:%.*]] = extractvalue %swift.metadata_response [[TMP]], 0
200
+ // CHECK: [[METADATA_ADDR:%.*]] = bitcast %swift.type* [[METADATA]] to i8***
201
+ // CHECK: [[VWT_ADDR:%.*]] = getelementptr inbounds i8**, i8*** [[METADATA_ADDR]], [[INT]] -1
202
+ // CHECK: [[VWT:%.*]] = load i8**, i8*** [[VWT_ADDR]]
203
+
204
+ // CHECK-NEXT: [[VWT_CAST:%.*]] = bitcast i8** [[VWT]] to %swift.vwtable*
205
+ // CHECK-NEXT: [[WITNESS_ADDR:%.*]] = getelementptr inbounds %swift.vwtable, %swift.vwtable* [[VWT_CAST]], i32 0, i32 8
206
+ // CHECK: [[WITNESS_FOR_SIZE:%.*]] = load [[INT]], [[INT]]* [[WITNESS_ADDR]]
207
+
208
+ // CHECK: ret [[INT]] [[WITNESS_FOR_SIZE]]
209
+ return MemoryLayout< Size> . size
210
+ }
211
+
212
+ // CHECK-LABEL: define{{.*}} swiftcc {{i32|i64}} @"$s17struct_resilience40memoryLayoutDotStrideWithResilientStructSiyF"()
213
+ public func memoryLayoutDotStrideWithResilientStruct( ) -> Int {
214
+ // CHECK: entry:
215
+ // CHECK: [[TMP:%.*]] = call swiftcc %swift.metadata_response @"$s16resilient_struct4SizeVMa"([[INT]] 0)
216
+ // CHECK: [[METADATA:%.*]] = extractvalue %swift.metadata_response [[TMP]], 0
217
+ // CHECK: [[METADATA_ADDR:%.*]] = bitcast %swift.type* [[METADATA]] to i8***
218
+ // CHECK: [[VWT_ADDR:%.*]] = getelementptr inbounds i8**, i8*** [[METADATA_ADDR]], [[INT]] -1
219
+ // CHECK: [[VWT:%.*]] = load i8**, i8*** [[VWT_ADDR]]
220
+
221
+ // CHECK-NEXT: [[VWT_CAST:%.*]] = bitcast i8** [[VWT]] to %swift.vwtable*
222
+ // CHECK-NEXT: [[WITNESS_ADDR:%.*]] = getelementptr inbounds %swift.vwtable, %swift.vwtable* [[VWT_CAST]], i32 0, i32 9
223
+ // CHECK: [[WITNESS_FOR_STRIDE:%.*]] = load [[INT]], [[INT]]* [[WITNESS_ADDR]]
224
+
225
+ // CHECK: ret [[INT]] [[WITNESS_FOR_STRIDE]]
226
+ return MemoryLayout< Size> . stride
227
+ }
228
+
229
+ // CHECK-LABEL: define{{.*}} swiftcc {{i32|i64}} @"$s17struct_resilience43memoryLayoutDotAlignmentWithResilientStructSiyF"()
230
+ public func memoryLayoutDotAlignmentWithResilientStruct( ) -> Int {
231
+ // CHECK: entry:
232
+ // CHECK: [[TMP:%.*]] = call swiftcc %swift.metadata_response @"$s16resilient_struct4SizeVMa"([[INT]] 0)
233
+ // CHECK: [[METADATA:%.*]] = extractvalue %swift.metadata_response [[TMP]], 0
234
+ // CHECK: [[METADATA_ADDR:%.*]] = bitcast %swift.type* [[METADATA]] to i8***
235
+ // CHECK: [[VWT_ADDR:%.*]] = getelementptr inbounds i8**, i8*** [[METADATA_ADDR]], [[INT]] -1
236
+ // CHECK: [[VWT:%.*]] = load i8**, i8*** [[VWT_ADDR]]
237
+
238
+ // CHECK-NEXT: [[VWT_CAST:%.*]] = bitcast i8** [[VWT]] to %swift.vwtable*
239
+ // CHECK-NEXT: [[WITNESS_ADDR:%.*]] = getelementptr inbounds %swift.vwtable, %swift.vwtable* [[VWT_CAST]], i32 0, i32 10
240
+ // CHECK: [[WITNESS_FOR_FLAGS:%.*]] = load i32, i32* [[WITNESS_ADDR]]
241
+
242
+ // Not checked because it only exists on 64-bit: [[EXTENDED_FLAGS:%.*]] = zext i32 [[WITNESS_FOR_FLAGS]] to [[INT]]
243
+
244
+ // CHECK: [[ALIGNMENT_MASK:%.*]] = and [[INT]] {{%.*}}, 255
245
+ // CHECK: [[ALIGNMENT:%.*]] = add [[INT]] [[ALIGNMENT_MASK]], 1
246
+
247
+ // CHECK: ret [[INT]] [[ALIGNMENT]]
248
+ return MemoryLayout< Size> . alignment
249
+ }
250
+
251
+
252
+ // Make sure that MemoryLayout.offset(of:) on a resilient type uses the accessor
253
+ // in the key path instead of hardcoding offsets based on compile-time layouts.
254
+
255
+ // CHECK-LABEL: define{{.*}} swiftcc { {{i32|i64}}, i8 } @"$s17struct_resilience42memoryLayoutDotOffsetOfWithResilientStructSiSgyF"()
256
+ public func memoryLayoutDotOffsetOfWithResilientStruct( ) -> Int ? {
257
+ // CHECK-NEXT: entry:
258
+ // CHECK: [[RAW_KEY_PATH:%.*]] = call %swift.refcounted* @swift_getKeyPath
259
+ // CHECK: [[WRITABLE_KEY_PATH:%.*]] = bitcast %swift.refcounted* [[RAW_KEY_PATH]] to %Ts15WritableKeyPathCy16resilient_struct4SizeVSiG*
260
+ // CHECK: [[PARTIAL_KEY_PATH:%.*]] = bitcast %Ts15WritableKeyPathCy16resilient_struct4SizeVSiG* [[WRITABLE_KEY_PATH]] to %Ts14PartialKeyPathCy16resilient_struct4SizeVG*
261
+ // CHECK: [[ANY_KEY_PATH:%.*]] = bitcast %Ts14PartialKeyPathCy16resilient_struct4SizeVG* [[PARTIAL_KEY_PATH]] to %Ts10AnyKeyPathC*
262
+
263
+ // CHECK: [[STORED_INLINE_OFFSET:%.*]] = call swiftcc { [[INT]], i8 } @"$ss10AnyKeyPathC19_storedInlineOffsetSiSgvgTj"(%Ts10AnyKeyPathC* swiftself [[ANY_KEY_PATH]])
264
+ // CHECK: [[VALUE:%.*]] = extractvalue { [[INT]], i8 } [[STORED_INLINE_OFFSET]], 0
265
+
266
+ // CHECK: [[RET_PARTIAL:%.*]] = insertvalue { [[INT]], i8 } undef, [[INT]] [[VALUE]], 0
267
+ // CHECK: [[RET:%.*]] = insertvalue { [[INT]], i8 } [[RET_PARTIAL]]
268
+ // CHECK: ret { [[INT]], i8 } [[RET]]
269
+ return MemoryLayout< Size> . offset( of: \Size . w)
270
+ }
271
+
272
+
192
273
// Public metadata accessor for our resilient struct
193
274
194
275
// CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc %swift.metadata_response @"$s17struct_resilience6MySizeVMa"
0 commit comments