@@ -292,8 +292,8 @@ LoadBorrowImmutabilityAnalysis::LoadBorrowImmutabilityAnalysis(
292
292
// \p address may be an address, pointer, or box type.
293
293
bool LoadBorrowImmutabilityAnalysis::isImmutableInScope (
294
294
LoadBorrowInst *lbi, ArrayRef<Operand *> endBorrowUses,
295
- AccessPath accessPath ) {
296
-
295
+ AccessPathWithBase accessPathWithBase ) {
296
+ auto accessPath = accessPathWithBase. accessPath ;
297
297
LinearLifetimeChecker checker (deadEndBlocks);
298
298
auto writes = cache.get (accessPath);
299
299
@@ -303,12 +303,21 @@ bool LoadBorrowImmutabilityAnalysis::isImmutableInScope(
303
303
accessPath.getStorage ().print (llvm::errs ());
304
304
return false ;
305
305
}
306
+ auto ownershipRoot = accessPath.getStorage ().isReference ()
307
+ ? findOwnershipReferenceRoot (accessPathWithBase.base )
308
+ : SILValue ();
306
309
// Then for each write...
307
310
for (auto *op : *writes) {
308
311
// First see if the write is a dead end block. In such a case, just skip it.
309
312
if (deadEndBlocks.isDeadEnd (op->getUser ()->getParent ())) {
310
313
continue ;
311
314
}
315
+ // A destroy_value will be a definite write only when the destroy is on the
316
+ // ownershipRoot
317
+ if (isa<DestroyValueInst>(op->getUser ())) {
318
+ if (op->get () != ownershipRoot)
319
+ continue ;
320
+ }
312
321
// See if the write is within the load borrow's lifetime. If it isn't, we
313
322
// don't have to worry about it.
314
323
if (!checker.validateLifetime (lbi, endBorrowUses, op)) {
@@ -326,7 +335,9 @@ bool LoadBorrowImmutabilityAnalysis::isImmutableInScope(
326
335
// ===----------------------------------------------------------------------===//
327
336
328
337
bool LoadBorrowImmutabilityAnalysis::isImmutable (LoadBorrowInst *lbi) {
329
- AccessPath accessPath = AccessPath::computeInScope (lbi->getOperand ());
338
+ auto accessPathWithBase = AccessPathWithBase::compute (lbi->getOperand ());
339
+ auto accessPath = accessPathWithBase.accessPath ;
340
+
330
341
// Bail on an invalid AccessPath. AccessPath completeness is verified
331
342
// independently--it may be invalid in extraordinary situations. When
332
343
// AccessPath is valid, we know all its uses are recognizable.
@@ -358,15 +369,15 @@ bool LoadBorrowImmutabilityAnalysis::isImmutable(LoadBorrowInst *lbi) {
358
369
//
359
370
// TODO: As a separate analysis, verify that the load_borrow scope is always
360
371
// nested within the begin_access scope (to ensure no aliasing access).
361
- return isImmutableInScope (lbi, endBorrowUses, accessPath );
372
+ return isImmutableInScope (lbi, endBorrowUses, accessPathWithBase );
362
373
}
363
374
case AccessedStorage::Argument: {
364
375
auto *arg =
365
376
cast<SILFunctionArgument>(accessPath.getStorage ().getArgument ());
366
377
if (arg->hasConvention (SILArgumentConvention::Indirect_In_Guaranteed)) {
367
378
return true ;
368
379
}
369
- return isImmutableInScope (lbi, endBorrowUses, accessPath );
380
+ return isImmutableInScope (lbi, endBorrowUses, accessPathWithBase );
370
381
}
371
382
// FIXME: A yielded address could overlap with another in this function.
372
383
case AccessedStorage::Yield:
@@ -376,7 +387,7 @@ bool LoadBorrowImmutabilityAnalysis::isImmutable(LoadBorrowInst *lbi) {
376
387
case AccessedStorage::Tail:
377
388
case AccessedStorage::Global:
378
389
case AccessedStorage::Unidentified:
379
- return isImmutableInScope (lbi, endBorrowUses, accessPath );
390
+ return isImmutableInScope (lbi, endBorrowUses, accessPathWithBase );
380
391
}
381
392
llvm_unreachable (" Covered switch isn't covered?!" );
382
393
}
0 commit comments