Skip to content

Commit 1b984ba

Browse files
committed
[BOLT][NFC] Add sink block to flow CFG in profile inference
Test Plan: tbd Reviewers: Subscribers: Tasks: Tags: Differential Revision: https://phabricator.intern.facebook.com/D58380996
1 parent 3c8e0b8 commit 1b984ba

File tree

2 files changed

+34
-7
lines changed

2 files changed

+34
-7
lines changed

bolt/lib/Profile/StaleProfileMatching.cpp

Lines changed: 32 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -309,22 +309,33 @@ createFlowFunction(const BinaryFunction::BasicBlockOrderType &BlockOrder) {
309309
FlowFunction Func;
310310

311311
// Add a special "dummy" source so that there is always a unique entry point.
312-
// Because of the extra source, for all other blocks in FlowFunction it holds
313-
// that Block.Index == BB->getIndex() + 1
314312
FlowBlock EntryBlock;
315313
EntryBlock.Index = 0;
316314
Func.Blocks.push_back(EntryBlock);
317315

316+
auto BinaryBlockIsExit = [&](const BinaryBasicBlock &BB) {
317+
if (BB.successors().empty())
318+
return true;
319+
return false;
320+
};
321+
318322
// Create FlowBlock for every basic block in the binary function
319323
for (const BinaryBasicBlock *BB : BlockOrder) {
320324
Func.Blocks.emplace_back();
321325
FlowBlock &Block = Func.Blocks.back();
322326
Block.Index = Func.Blocks.size() - 1;
327+
Block.HasSuccessors = BinaryBlockIsExit(*BB);
323328
(void)BB;
324329
assert(Block.Index == BB->getIndex() + 1 &&
325330
"incorrectly assigned basic block index");
326331
}
327332

333+
// Add a special "dummy" sink block so there is always a unique sink
334+
FlowBlock SinkBlock;
335+
SinkBlock.Index = Func.Blocks.size();
336+
Func.Blocks.push_back(SinkBlock);
337+
Func.Sink = SinkBlock.Index;
338+
328339
// Create FlowJump for each jump between basic blocks in the binary function
329340
std::vector<uint64_t> InDegree(Func.Blocks.size(), 0);
330341
for (const BinaryBasicBlock *SrcBB : BlockOrder) {
@@ -360,25 +371,37 @@ createFlowFunction(const BinaryFunction::BasicBlockOrderType &BlockOrder) {
360371
// Add dummy edges to the extra sources. If there are multiple entry blocks,
361372
// add an unlikely edge from 0 to the subsequent ones
362373
assert(InDegree[0] == 0 && "dummy entry blocks shouldn't have predecessors");
363-
for (uint64_t I = 1; I < Func.Blocks.size(); I++) {
374+
for (uint64_t I = 1; I < BlockOrder.size() + 1; I++) {
364375
const BinaryBasicBlock *BB = BlockOrder[I - 1];
365376
if (BB->isEntryPoint() || InDegree[I] == 0) {
366377
Func.Jumps.emplace_back();
367378
FlowJump &Jump = Func.Jumps.back();
368-
Jump.Source = 0;
379+
Jump.Source = Func.Entry;
369380
Jump.Target = I;
370381
if (!BB->isEntryPoint())
371382
Jump.IsUnlikely = true;
372383
}
373384
}
374385

386+
// Add dummy edges from the exit blocks to the sink block.
387+
for (uint64_t I = 1; I < BlockOrder.size() + 1; I++) {
388+
FlowBlock &Block = Func.Blocks[I];
389+
if (Block.HasSuccessors) {
390+
Func.Jumps.emplace_back();
391+
FlowJump &Jump = Func.Jumps.back();
392+
Jump.Source = I;
393+
Jump.Target = Func.Sink;
394+
}
395+
}
396+
375397
// Create necessary metadata for the flow function
376398
for (FlowJump &Jump : Func.Jumps) {
377399
assert(Jump.Source < Func.Blocks.size());
378400
Func.Blocks[Jump.Source].SuccJumps.push_back(&Jump);
379401
assert(Jump.Target < Func.Blocks.size());
380402
Func.Blocks[Jump.Target].PredJumps.push_back(&Jump);
381403
}
404+
382405
return Func;
383406
}
384407

@@ -395,7 +418,7 @@ void matchWeightsByHashes(BinaryContext &BC,
395418
const BinaryFunction::BasicBlockOrderType &BlockOrder,
396419
const yaml::bolt::BinaryFunctionProfile &YamlBF,
397420
FlowFunction &Func) {
398-
assert(Func.Blocks.size() == BlockOrder.size() + 1);
421+
assert(Func.Blocks.size() == BlockOrder.size() + 2);
399422

400423
std::vector<FlowBlock *> Blocks;
401424
std::vector<BlendedBlockHash> BlendedHashes;
@@ -618,7 +641,7 @@ void assignProfile(BinaryFunction &BF,
618641
FlowFunction &Func) {
619642
BinaryContext &BC = BF.getBinaryContext();
620643

621-
assert(Func.Blocks.size() == BlockOrder.size() + 1);
644+
assert(Func.Blocks.size() == BlockOrder.size() + 2);
622645
for (uint64_t I = 0; I < BlockOrder.size(); I++) {
623646
FlowBlock &Block = Func.Blocks[I + 1];
624647
BinaryBasicBlock *BB = BlockOrder[I];
@@ -640,6 +663,9 @@ void assignProfile(BinaryFunction &BF,
640663
if (Jump->Flow == 0)
641664
continue;
642665

666+
// Skip the artificial sink block
667+
if (Jump->Target == Func.Sink)
668+
continue;
643669
BinaryBasicBlock &SuccBB = *BlockOrder[Jump->Target - 1];
644670
// Check if the edge corresponds to a regular jump or a landing pad
645671
if (BB->getSuccessor(SuccBB.getLabel())) {

llvm/include/llvm/Transforms/Utils/SampleProfileInference.h

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,10 +31,10 @@ struct FlowBlock {
3131
uint64_t Flow{0};
3232
std::vector<FlowJump *> SuccJumps;
3333
std::vector<FlowJump *> PredJumps;
34+
bool HasSuccessors{false};
3435

3536
/// Check if it is the entry block in the function.
3637
bool isEntry() const { return PredJumps.empty(); }
37-
3838
/// Check if it is an exit block in the function.
3939
bool isExit() const { return SuccJumps.empty(); }
4040
};
@@ -57,6 +57,7 @@ struct FlowFunction {
5757
std::vector<FlowJump> Jumps;
5858
/// The index of the entry block.
5959
uint64_t Entry{0};
60+
uint64_t Sink{0};
6061
};
6162

6263
/// Various thresholds and options controlling the behavior of the profile

0 commit comments

Comments
 (0)