@@ -254,3 +254,147 @@ entry(%instance : @none $TrivialStruct):
254
254
%retval = tuple ()
255
255
return %retval : $()
256
256
}
257
+
258
+ // Fold destroy_addr and a load [copy] into a load [take] even when that
259
+ // load [take] is guarded by an access scope.
260
+ //
261
+ // CHECK-LABEL: sil [ossa] @fold_scoped_load : {{.*}} {
262
+ // CHECK: load [take]
263
+ // CHECK-LABEL: // end sil function 'fold_scoped_load'
264
+ sil [ossa] @fold_scoped_load : $@convention(thin) (@owned S) -> (@owned S) {
265
+ entry(%instance : @owned $S):
266
+ %addr = alloc_stack $S
267
+ %store_scope = begin_access [modify] [static] %addr : $*S
268
+ store %instance to [init] %store_scope : $*S
269
+ end_access %store_scope : $*S
270
+ %load_scope = begin_access [read] [static] %addr : $*S
271
+ %value = load [copy] %load_scope : $*S
272
+ end_access %load_scope : $*S
273
+ destroy_addr %addr : $*S
274
+ dealloc_stack %addr : $*S
275
+ return %value : $S
276
+ }
277
+
278
+ // Don't fold when there's a deinit barrier in the way.
279
+ //
280
+ // CHECK-LABEL: sil [ossa] @nofold_scoped_load_barrier : {{.*}} {
281
+ // CHECK: load [copy]
282
+ // CHECK-LABEL: // end sil function 'nofold_scoped_load_barrier'
283
+ sil [ossa] @nofold_scoped_load_barrier : $@convention(thin) (@owned S) -> (@owned S) {
284
+ entry(%instance : @owned $S):
285
+ %addr = alloc_stack $S
286
+ %store_scope = begin_access [modify] [static] %addr : $*S
287
+ store %instance to [init] %store_scope : $*S
288
+ end_access %store_scope : $*S
289
+ %load_scope = begin_access [read] [static] %addr : $*S
290
+ %value = load [copy] %load_scope : $*S
291
+ %unknown = function_ref @unknown : $@convention(thin) () -> ()
292
+ apply %unknown() : $@convention(thin) () -> ()
293
+ end_access %load_scope : $*S
294
+ destroy_addr %addr : $*S
295
+ dealloc_stack %addr : $*S
296
+ return %value : $S
297
+ }
298
+
299
+ // Don't fold with a copy_addr of a struct_element_addr.
300
+ //
301
+ // CHECK-LABEL: sil [ossa] @nofold_scoped_copy_addr_projection : {{.*}} {
302
+ // CHECK: load [copy]
303
+ // CHECK-LABEL: // end sil function 'nofold_scoped_copy_addr_projection'
304
+ sil [ossa] @nofold_scoped_copy_addr_projection : $@convention(thin) (@owned S) -> (@owned S) {
305
+ entry(%instance : @owned $S):
306
+ %addr = alloc_stack $S
307
+ %store_scope = begin_access [modify] [static] %addr : $*S
308
+ store %instance to [init] %store_scope : $*S
309
+ end_access %store_scope : $*S
310
+ %load_scope = begin_access [read] [static] %addr : $*S
311
+ %field_addr = struct_element_addr %load_scope : $*S, #S.x
312
+ %field = load [copy] %field_addr : $*X
313
+ end_access %load_scope : $*S
314
+ destroy_addr %addr : $*S
315
+ dealloc_stack %addr : $*S
316
+ %value = struct $S (%field : $X)
317
+ return %value : $S
318
+ }
319
+
320
+ // Don't fold destroy of outer scope with struct_element_addr of inner scope.
321
+ //
322
+ // CHECK-LABEL: sil [ossa] @nofold_with_copy_addr_projection : {{.*}} {
323
+ // CHECK: copy_addr {{%[^,]+}}
324
+ // CHECK-LABEL: // end sil function 'nofold_with_copy_addr_projection'
325
+ sil [ossa] @nofold_with_copy_addr_projection : $@convention(thin) (@owned S) -> (@owned S) {
326
+ entry(%instance : @owned $S):
327
+ %addr = alloc_stack $S
328
+ %addr_2 = alloc_stack $S
329
+ %store_scope = begin_access [modify] [static] %addr : $*S
330
+ store %instance to [init] %store_scope : $*S
331
+ end_access %store_scope : $*S
332
+ %outer = begin_access [read] [static] %addr : $*S
333
+ apply undef(%outer) : $@convention(thin) (@inout S) -> ()
334
+ %inner = begin_access [read] [static] %outer : $*S
335
+ %field_addr = struct_element_addr %inner : $*S, #S.x
336
+ %field_addr_2 = struct_element_addr %addr_2 : $*S, #S.x
337
+ copy_addr %field_addr to [initialization] %field_addr_2 : $*X
338
+ end_access %inner : $*S
339
+ destroy_addr %outer : $*S
340
+ end_access %outer : $*S
341
+ %value = load [take] %addr_2 : $*S
342
+ dealloc_stack %addr_2 : $*S
343
+ dealloc_stack %addr : $*S
344
+ return %value : $S
345
+ }
346
+
347
+ // Don't fold destroy of outer scope with struct_element_addr of inner scope.
348
+ //
349
+ // CHECK-LABEL: sil [ossa] @fold_scoped_destroy_with_scoped_copy_addr : {{.*}} {
350
+ // CHECK: copy_addr [take] {{%[^,]+}}
351
+ // CHECK-LABEL: // end sil function 'fold_scoped_destroy_with_scoped_copy_addr'
352
+ sil [ossa] @fold_scoped_destroy_with_scoped_copy_addr : $@convention(thin) (@owned S) -> (@owned S) {
353
+ entry(%instance : @owned $S):
354
+ %addr = alloc_stack $S
355
+ %addr_2 = alloc_stack $S
356
+ %store_scope = begin_access [modify] [static] %addr : $*S
357
+ store %instance to [init] %store_scope : $*S
358
+ end_access %store_scope : $*S
359
+ %outer = begin_access [read] [static] %addr : $*S
360
+ apply undef(%outer) : $@convention(thin) (@inout S) -> ()
361
+ %inner = begin_access [read] [static] %outer : $*S
362
+ copy_addr %inner to [initialization] %addr_2 : $*S
363
+ end_access %inner : $*S
364
+ destroy_addr %outer : $*S
365
+ end_access %outer : $*S
366
+ %value = load [take] %addr_2 : $*S
367
+ dealloc_stack %addr_2 : $*S
368
+ dealloc_stack %addr : $*S
369
+ return %value : $S
370
+ }
371
+
372
+ // Don't fold with an unrelated load [copy].
373
+ // CHECK-LABEL: sil [ossa] @nofold_unrelated_scoped_load_copy : {{.*}} {
374
+ // CHECK: load [copy]
375
+ // CHECK: destroy_addr
376
+ // CHECK: destroy_addr
377
+ // CHECK-LABEL: // end sil function 'nofold_unrelated_scoped_load_copy'
378
+ sil [ossa] @nofold_unrelated_scoped_load_copy : $@convention(thin) (@owned X) -> (@owned X) {
379
+ entry(%instance : @owned $X):
380
+ %copy = copy_value %instance : $X
381
+ %addr_1 = alloc_stack $X
382
+ %addr_2 = alloc_stack $X
383
+ store %instance to [init] %addr_1 : $*X
384
+ store %copy to [init] %addr_2 : $*X
385
+
386
+ %access = begin_access [read] [static] %addr_1 : $*X
387
+ %loaded = load [copy] %access : $*X
388
+ end_access %access : $*X
389
+ destroy_addr %addr_2 : $*X
390
+
391
+ %barrier = function_ref @unknown : $@convention(thin) () -> ()
392
+ apply %barrier() : $@convention(thin) () -> ()
393
+
394
+ destroy_addr %addr_1 : $*X
395
+
396
+ dealloc_stack %addr_2 : $*X
397
+ dealloc_stack %addr_1 : $*X
398
+
399
+ return %loaded : $X
400
+ }
0 commit comments