|
13 | 13 | #define DEBUG_TYPE "sil-semantic-arc-opts"
|
14 | 14 | #include "swift/Basic/STLExtras.h"
|
15 | 15 | #include "swift/SIL/BasicBlockUtils.h"
|
| 16 | +#include "swift/SIL/MemAccessUtils.h" |
16 | 17 | #include "swift/SIL/OwnershipUtils.h"
|
17 | 18 | #include "swift/SIL/SILArgument.h"
|
| 19 | +#include "swift/SIL/SILBuilder.h" |
18 | 20 | #include "swift/SIL/SILInstruction.h"
|
19 | 21 | #include "swift/SIL/SILVisitor.h"
|
20 | 22 | #include "swift/SILOptimizer/Analysis/PostOrderAnalysis.h"
|
|
27 | 29 | using namespace swift;
|
28 | 30 |
|
29 | 31 | STATISTIC(NumEliminatedInsts, "number of removed instructions");
|
| 32 | +STATISTIC(NumLoadCopyConvertedToLoadBorrow, |
| 33 | + "number of load_copy converted to load_borrow"); |
30 | 34 |
|
31 | 35 | //===----------------------------------------------------------------------===//
|
32 | 36 | // Utility
|
@@ -79,6 +83,7 @@ struct SemanticARCOptVisitor
|
79 | 83 | bool visitSILInstruction(SILInstruction *i) { return false; }
|
80 | 84 | bool visitCopyValueInst(CopyValueInst *cvi);
|
81 | 85 | bool visitBeginBorrowInst(BeginBorrowInst *bbi);
|
| 86 | + bool visitLoadInst(LoadInst *li); |
82 | 87 | };
|
83 | 88 |
|
84 | 89 | } // end anonymous namespace
|
@@ -249,6 +254,116 @@ bool SemanticARCOptVisitor::visitCopyValueInst(CopyValueInst *cvi) {
|
249 | 254 | return false;
|
250 | 255 | }
|
251 | 256 |
|
| 257 | +//===----------------------------------------------------------------------===// |
| 258 | +// load [copy] Optimizations |
| 259 | +//===----------------------------------------------------------------------===// |
| 260 | + |
| 261 | +/// A flow insensitive analysis that tells the load [copy] analysis if the |
| 262 | +/// storage has 0, 1, >1 writes to it. |
| 263 | +/// |
| 264 | +/// In the case of 0 writes, we return CanOptimizeLoadCopyResult::Always. |
| 265 | +/// |
| 266 | +/// In the case of 1 write, we return OnlyIfStorageIsLocal. We are taking |
| 267 | +/// advantage of definite initialization implying that an alloc_stack must be |
| 268 | +/// written to once before any loads from the memory location. Thus if we are |
| 269 | +/// local and see 1 write, we can still change to load_borrow if all other uses |
| 270 | +/// check out. |
| 271 | +/// |
| 272 | +/// If there is 2+ writes, we can not optimize = (. |
| 273 | +namespace { |
| 274 | + |
| 275 | +struct CanOptimizeLoadCopyFromAccessVisitor |
| 276 | + : AccessedStorageVisitor<CanOptimizeLoadCopyFromAccessVisitor, bool> { |
| 277 | + SILFunction &f; |
| 278 | + |
| 279 | + CanOptimizeLoadCopyFromAccessVisitor(SILFunction &f) : f(f) {} |
| 280 | + |
| 281 | + // Stubs |
| 282 | + bool visitBox(const AccessedStorage &boxStorage) { return false; } |
| 283 | + bool visitStack(const AccessedStorage &stackStorage) { return false; } |
| 284 | + bool visitGlobal(const AccessedStorage &globalStorage) { return false; } |
| 285 | + bool visitClass(const AccessedStorage &classStorage) { return false; } |
| 286 | + bool visitYield(const AccessedStorage &yieldStorage) { return false; } |
| 287 | + bool visitUnidentified(const AccessedStorage &unidentifiedStorage) { |
| 288 | + return false; |
| 289 | + } |
| 290 | + bool visitNested(const AccessedStorage &nested) { |
| 291 | + llvm_unreachable("Visitor should never see nested since we lookup our " |
| 292 | + "address storage using lookup non nested"); |
| 293 | + } |
| 294 | + |
| 295 | + bool visitArgument(const AccessedStorage &argumentStorage); |
| 296 | +}; |
| 297 | + |
| 298 | +} // namespace |
| 299 | + |
| 300 | +bool CanOptimizeLoadCopyFromAccessVisitor::visitArgument( |
| 301 | + const AccessedStorage &storage) { |
| 302 | + auto *arg = cast<SILFunctionArgument>(storage.getArgument(&f)); |
| 303 | + |
| 304 | + // Then check if we have an in_guaranteed argument. In this case, we can |
| 305 | + // always optimize load [copy] from this. |
| 306 | + if (arg->hasConvention(SILArgumentConvention::Indirect_In_Guaranteed)) |
| 307 | + return true; |
| 308 | + |
| 309 | + // For now just return false. |
| 310 | + return false; |
| 311 | +} |
| 312 | + |
| 313 | +static bool isWrittenTo(SILFunction &f, SILValue value) { |
| 314 | + // Then find our accessed storage. If we can not find anything, be |
| 315 | + // conservative and assume that the value is written to. |
| 316 | + const auto &storage = findAccessedStorageNonNested(value); |
| 317 | + if (!storage) |
| 318 | + return false; |
| 319 | + |
| 320 | + // Then see if we ever write to this address in a flow insensitive |
| 321 | + // way (ignoring stores that are obviously the only initializer to |
| 322 | + // memory). We have to do this since load_borrow assumes that the |
| 323 | + // underlying memory is never written to. |
| 324 | + return !CanOptimizeLoadCopyFromAccessVisitor(f).visit(storage); |
| 325 | +} |
| 326 | + |
| 327 | +// Convert a load [copy] from unique storage [read] that has all uses that can |
| 328 | +// accept a guaranteed parameter to a load_borrow. |
| 329 | +bool SemanticARCOptVisitor::visitLoadInst(LoadInst *li) { |
| 330 | + if (li->getOwnershipQualifier() != LoadOwnershipQualifier::Copy) |
| 331 | + return false; |
| 332 | + |
| 333 | + // Ok, we have our load [copy]. Make sure its value is never |
| 334 | + // consumed. If it is consumed, we need to pass off a +1 value, so |
| 335 | + // bail. |
| 336 | + // |
| 337 | + // FIXME: We should consider if it is worth promoting a load [copy] |
| 338 | + // -> load_borrow if we can put a copy_value on a cold path and thus |
| 339 | + // eliminate RR traffic on a hot path. |
| 340 | + SmallVector<DestroyValueInst *, 32> destroyValues; |
| 341 | + if (isConsumed(li, destroyValues)) |
| 342 | + return false; |
| 343 | + |
| 344 | + // Then check if our address is ever written to. If it is, then we |
| 345 | + // can not use the load_borrow. |
| 346 | + if (isWrittenTo(*li->getFunction(), li->getOperand())) |
| 347 | + return false; |
| 348 | + |
| 349 | + // Ok, we can perform our optimization. Convert the load [copy] into a |
| 350 | + // load_borrow. |
| 351 | + auto *lbi = |
| 352 | + SILBuilderWithScope(li).createLoadBorrow(li->getLoc(), li->getOperand()); |
| 353 | + while (!destroyValues.empty()) { |
| 354 | + auto *dvi = destroyValues.pop_back_val(); |
| 355 | + SILBuilderWithScope(dvi).createEndBorrow(dvi->getLoc(), lbi); |
| 356 | + dvi->eraseFromParent(); |
| 357 | + ++NumEliminatedInsts; |
| 358 | + } |
| 359 | + |
| 360 | + li->replaceAllUsesWith(lbi); |
| 361 | + li->eraseFromParent(); |
| 362 | + ++NumEliminatedInsts; |
| 363 | + ++NumLoadCopyConvertedToLoadBorrow; |
| 364 | + return true; |
| 365 | +} |
| 366 | + |
252 | 367 | //===----------------------------------------------------------------------===//
|
253 | 368 | // Top Level Entrypoint
|
254 | 369 | //===----------------------------------------------------------------------===//
|
|
0 commit comments