@@ -415,37 +415,36 @@ enum BasicBlockFlags : uint64_t
415
415
BBF_CLONED_FINALLY_BEGIN = MAKE_BBFLAG ( 7 ), // First block of a cloned finally region
416
416
BBF_CLONED_FINALLY_END = MAKE_BBFLAG ( 8 ), // Last block of a cloned finally region
417
417
BBF_HAS_SUPPRESSGC_CALL = MAKE_BBFLAG ( 9 ), // BB contains a call to a method with SuppressGCTransitionAttribute
418
- BBF_RUN_RARELY = MAKE_BBFLAG (10 ), // BB is rarely run (catch clauses, blocks with throws etc)
419
- BBF_HAS_LABEL = MAKE_BBFLAG (11 ), // BB needs a label
420
- BBF_LOOP_ALIGN = MAKE_BBFLAG (12 ), // Block is lexically the first block in a loop we intend to align.
421
- BBF_HAS_ALIGN = MAKE_BBFLAG (13 ), // BB ends with 'align' instruction
422
- BBF_HAS_JMP = MAKE_BBFLAG (14 ), // BB executes a JMP instruction (instead of return)
423
- BBF_GC_SAFE_POINT = MAKE_BBFLAG (15 ), // BB has a GC safe point (e.g. a call)
424
- BBF_HAS_MDARRAYREF = MAKE_BBFLAG (16 ), // Block has a multi-dimensional array reference
425
- BBF_HAS_NEWOBJ = MAKE_BBFLAG (17 ), // BB contains 'new' of an object type.
426
-
427
- BBF_RETLESS_CALL = MAKE_BBFLAG (18 ), // BBJ_CALLFINALLY that will never return (and therefore, won't need a paired
418
+ BBF_HAS_LABEL = MAKE_BBFLAG (10 ), // BB needs a label
419
+ BBF_LOOP_ALIGN = MAKE_BBFLAG (11 ), // Block is lexically the first block in a loop we intend to align.
420
+ BBF_HAS_ALIGN = MAKE_BBFLAG (12 ), // BB ends with 'align' instruction
421
+ BBF_HAS_JMP = MAKE_BBFLAG (13 ), // BB executes a JMP instruction (instead of return)
422
+ BBF_GC_SAFE_POINT = MAKE_BBFLAG (14 ), // BB has a GC safe point (e.g. a call)
423
+ BBF_HAS_MDARRAYREF = MAKE_BBFLAG (15 ), // Block has a multi-dimensional array reference
424
+ BBF_HAS_NEWOBJ = MAKE_BBFLAG (16 ), // BB contains 'new' of an object type.
425
+
426
+ BBF_RETLESS_CALL = MAKE_BBFLAG (17 ), // BBJ_CALLFINALLY that will never return (and therefore, won't need a paired
428
427
// BBJ_CALLFINALLYRET); see isBBCallFinallyPair().
429
- BBF_COLD = MAKE_BBFLAG (19 ), // BB is cold
430
- BBF_PROF_WEIGHT = MAKE_BBFLAG (20 ), // BB weight is computed from profile data
431
- BBF_KEEP_BBJ_ALWAYS = MAKE_BBFLAG (21 ), // A special BBJ_ALWAYS block, used by EH code generation. Keep the jump kind
428
+ BBF_COLD = MAKE_BBFLAG (18 ), // BB is cold
429
+ BBF_PROF_WEIGHT = MAKE_BBFLAG (19 ), // BB weight is computed from profile data
430
+ BBF_KEEP_BBJ_ALWAYS = MAKE_BBFLAG (20 ), // A special BBJ_ALWAYS block, used by EH code generation. Keep the jump kind
432
431
// as BBJ_ALWAYS. Used on x86 for the final step block out of a finally.
433
- BBF_HAS_CALL = MAKE_BBFLAG (22 ), // BB contains a call
434
- BBF_DOMINATED_BY_EXCEPTIONAL_ENTRY = MAKE_BBFLAG (23 ), // Block is dominated by exceptional entry.
435
- BBF_BACKWARD_JUMP = MAKE_BBFLAG (24 ), // BB is surrounded by a backward jump/switch arc
436
- BBF_BACKWARD_JUMP_SOURCE = MAKE_BBFLAG (25 ), // Block is a source of a backward jump
437
- BBF_BACKWARD_JUMP_TARGET = MAKE_BBFLAG (26 ), // Block is a target of a backward jump
438
- BBF_PATCHPOINT = MAKE_BBFLAG (27 ), // Block is a patchpoint
439
- BBF_PARTIAL_COMPILATION_PATCHPOINT = MAKE_BBFLAG (28 ), // Block is a partial compilation patchpoint
440
- BBF_HAS_HISTOGRAM_PROFILE = MAKE_BBFLAG (29 ), // BB contains a call needing a histogram profile
441
- BBF_TAILCALL_SUCCESSOR = MAKE_BBFLAG (30 ), // BB has pred that has potential tail call
442
- BBF_RECURSIVE_TAILCALL = MAKE_BBFLAG (31 ), // Block has recursive tailcall that may turn into a loop
443
- BBF_NO_CSE_IN = MAKE_BBFLAG (32 ), // Block should kill off any incoming CSE
444
- BBF_CAN_ADD_PRED = MAKE_BBFLAG (33 ), // Ok to add pred edge to this block, even when "safe" edge creation disabled
445
- BBF_HAS_VALUE_PROFILE = MAKE_BBFLAG (34 ), // Block has a node that needs a value probing
446
- BBF_HAS_NEWARR = MAKE_BBFLAG (35 ), // BB contains 'new' of an array type.
447
- BBF_MAY_HAVE_BOUNDS_CHECKS = MAKE_BBFLAG (36 ), // BB *likely* has a bounds check (after rangecheck phase).
448
- BBF_ASYNC_RESUMPTION = MAKE_BBFLAG (37 ), // Block is a resumption block in an async method
432
+ BBF_HAS_CALL = MAKE_BBFLAG (21 ), // BB contains a call
433
+ BBF_DOMINATED_BY_EXCEPTIONAL_ENTRY = MAKE_BBFLAG (22 ), // Block is dominated by exceptional entry.
434
+ BBF_BACKWARD_JUMP = MAKE_BBFLAG (23 ), // BB is surrounded by a backward jump/switch arc
435
+ BBF_BACKWARD_JUMP_SOURCE = MAKE_BBFLAG (24 ), // Block is a source of a backward jump
436
+ BBF_BACKWARD_JUMP_TARGET = MAKE_BBFLAG (25 ), // Block is a target of a backward jump
437
+ BBF_PATCHPOINT = MAKE_BBFLAG (26 ), // Block is a patchpoint
438
+ BBF_PARTIAL_COMPILATION_PATCHPOINT = MAKE_BBFLAG (27 ), // Block is a partial compilation patchpoint
439
+ BBF_HAS_HISTOGRAM_PROFILE = MAKE_BBFLAG (28 ), // BB contains a call needing a histogram profile
440
+ BBF_TAILCALL_SUCCESSOR = MAKE_BBFLAG (29 ), // BB has pred that has potential tail call
441
+ BBF_RECURSIVE_TAILCALL = MAKE_BBFLAG (30 ), // Block has recursive tailcall that may turn into a loop
442
+ BBF_NO_CSE_IN = MAKE_BBFLAG (31 ), // Block should kill off any incoming CSE
443
+ BBF_CAN_ADD_PRED = MAKE_BBFLAG (32 ), // Ok to add pred edge to this block, even when "safe" edge creation disabled
444
+ BBF_HAS_VALUE_PROFILE = MAKE_BBFLAG (33 ), // Block has a node that needs a value probing
445
+ BBF_HAS_NEWARR = MAKE_BBFLAG (34 ), // BB contains 'new' of an array type.
446
+ BBF_MAY_HAVE_BOUNDS_CHECKS = MAKE_BBFLAG (35 ), // BB *likely* has a bounds check (after rangecheck phase).
447
+ BBF_ASYNC_RESUMPTION = MAKE_BBFLAG (36 ), // Block is a resumption block in an async method
449
448
450
449
// The following are sets of flags.
451
450
@@ -468,7 +467,6 @@ enum BasicBlockFlags : uint64_t
468
467
// Flags gained by the bottom block when a block is split.
469
468
// Note, this is a conservative guess.
470
469
// For example, the bottom block might or might not have BBF_HAS_NEWARR, but we assume it has BBF_HAS_NEWARR.
471
- // TODO: Should BBF_RUN_RARELY be added to BBF_SPLIT_GAINED ?
472
470
473
471
BBF_SPLIT_GAINED = BBF_DONT_REMOVE | BBF_HAS_JMP | BBF_BACKWARD_JUMP | BBF_PROF_WEIGHT | BBF_HAS_NEWARR | \
474
472
BBF_HAS_NEWOBJ | BBF_KEEP_BBJ_ALWAYS | BBF_CLONED_FINALLY_END | BBF_HAS_HISTOGRAM_PROFILE | BBF_HAS_VALUE_PROFILE | BBF_HAS_MDARRAYREF | BBF_NEEDS_GCPOLL | BBF_MAY_HAVE_BOUNDS_CHECKS | BBF_ASYNC_RESUMPTION ,
@@ -1169,9 +1167,16 @@ struct BasicBlock : private LIR::Range
1169
1167
unsigned bbRefs ; // number of blocks that can reach here, either by fall-through or a branch. If this falls to zero,
1170
1168
// the block is unreachable.
1171
1169
1170
+ #define BB_UNITY_WEIGHT 100.0 // how much a normal execute once block weighs
1171
+ #define BB_UNITY_WEIGHT_UNSIGNED 100 // how much a normal execute once block weighs
1172
+ #define BB_LOOP_WEIGHT_SCALE 8.0 // synthetic profile scale factor for loops
1173
+ #define BB_ZERO_WEIGHT 0.0
1174
+ #define BB_COLD_WEIGHT 0.01 // Upper bound for cold weights; used during block layout
1175
+ #define BB_MAX_WEIGHT FLT_MAX // maximum finite weight -- needs rethinking.
1176
+
1172
1177
bool isRunRarely () const
1173
1178
{
1174
- return HasFlag (BBF_RUN_RARELY );
1179
+ return ( bbWeight == BB_ZERO_WEIGHT );
1175
1180
}
1176
1181
1177
1182
bool isLoopAlign () const
@@ -1196,13 +1201,6 @@ struct BasicBlock : private LIR::Range
1196
1201
const char * dspToString (int blockNumPadding = 0 ) const ;
1197
1202
#endif // DEBUG
1198
1203
1199
- #define BB_UNITY_WEIGHT 100.0 // how much a normal execute once block weighs
1200
- #define BB_UNITY_WEIGHT_UNSIGNED 100 // how much a normal execute once block weighs
1201
- #define BB_LOOP_WEIGHT_SCALE 8.0 // synthetic profile scale factor for loops
1202
- #define BB_ZERO_WEIGHT 0.0
1203
- #define BB_COLD_WEIGHT 0.01 // Upper bound for cold weights; used during block layout
1204
- #define BB_MAX_WEIGHT FLT_MAX // maximum finite weight -- needs rethinking.
1205
-
1206
1204
weight_t bbWeight ; // The dynamic execution weight of this block
1207
1205
1208
1206
// getCalledCount -- get the value used to normalize weights for this method
@@ -1235,15 +1233,6 @@ struct BasicBlock : private LIR::Range
1235
1233
{
1236
1234
this -> SetFlags (BBF_PROF_WEIGHT );
1237
1235
this -> bbWeight = weight ;
1238
-
1239
- if (weight == BB_ZERO_WEIGHT)
1240
- {
1241
- this ->SetFlags (BBF_RUN_RARELY);
1242
- }
1243
- else
1244
- {
1245
- this ->RemoveFlags (BBF_RUN_RARELY);
1246
- }
1247
1236
}
1248
1237
1249
1238
// increaseBBProfileWeight -- Increase the profile-derived weight for a basic block
@@ -1278,44 +1267,20 @@ struct BasicBlock : private LIR::Range
1278
1267
{
1279
1268
assert (0 <= percentage && percentage <= 100 );
1280
1269
1281
- this ->bbWeight = (bSrc->bbWeight * percentage) / 100 ;
1282
-
1283
- if (bSrc->hasProfileWeight ())
1284
- {
1285
- this ->SetFlags (BBF_PROF_WEIGHT);
1286
- }
1287
- else
1288
- {
1289
- this ->RemoveFlags (BBF_PROF_WEIGHT);
1290
- }
1291
-
1292
- if (this ->bbWeight == BB_ZERO_WEIGHT)
1293
- {
1294
- this ->SetFlags (BBF_RUN_RARELY);
1295
- }
1296
- else
1297
- {
1298
- this ->RemoveFlags (BBF_RUN_RARELY);
1299
- }
1270
+ this -> bbWeight = (bSrc -> bbWeight * percentage ) / 100 ;
1271
+ const BasicBlockFlags hasProfileWeight = bSrc -> GetFlagsRaw () & BBF_PROF_WEIGHT ;
1272
+ this -> RemoveFlags (BBF_PROF_WEIGHT );
1273
+ this -> SetFlags (hasProfileWeight );
1300
1274
}
1301
1275
1302
1276
// Scale a blocks' weight by some factor.
1303
1277
//
1304
1278
void scaleBBWeight (weight_t scale )
1305
1279
{
1306
1280
this -> bbWeight = this -> bbWeight * scale ;
1307
-
1308
- if (this ->bbWeight == BB_ZERO_WEIGHT)
1309
- {
1310
- this ->SetFlags (BBF_RUN_RARELY);
1311
- }
1312
- else
1313
- {
1314
- this ->RemoveFlags (BBF_RUN_RARELY);
1315
- }
1316
1281
}
1317
1282
1318
- // Set block weight to zero, and set run rarely flag .
1283
+ // Set block weight to zero.
1319
1284
//
1320
1285
void bbSetRunRarely ()
1321
1286
{
@@ -1808,71 +1773,6 @@ struct BasicBlock : private LIR::Range
1808
1773
{
1809
1774
}
1810
1775
1811
- // Iteratable collection of successors of a block.
1812
- template <typename TPosition>
1813
- class Successors
1814
- {
1815
- Compiler* m_comp;
1816
- BasicBlock* m_block;
1817
-
1818
- public:
1819
- Successors (Compiler* comp, BasicBlock* block)
1820
- : m_comp(comp)
1821
- , m_block(block)
1822
- {
1823
- }
1824
-
1825
- class iterator
1826
- {
1827
- Compiler* m_comp;
1828
- BasicBlock* m_block;
1829
- TPosition m_pos;
1830
-
1831
- public:
1832
- iterator (Compiler* comp, BasicBlock* block)
1833
- : m_comp(comp)
1834
- , m_block(block)
1835
- , m_pos(comp, block)
1836
- {
1837
- }
1838
-
1839
- iterator ()
1840
- : m_pos()
1841
- {
1842
- }
1843
-
1844
- void operator ++(void )
1845
- {
1846
- m_pos.Advance (m_comp, m_block);
1847
- }
1848
-
1849
- BasicBlock* operator *()
1850
- {
1851
- return m_pos.Current (m_comp, m_block);
1852
- }
1853
-
1854
- bool operator ==(const iterator& other)
1855
- {
1856
- return m_pos == other.m_pos ;
1857
- }
1858
-
1859
- bool operator !=(const iterator& other)
1860
- {
1861
- return m_pos != other.m_pos ;
1862
- }
1863
- };
1864
-
1865
- iterator begin ()
1866
- {
1867
- return iterator (m_comp, m_block);
1868
- }
1869
-
1870
- iterator end ()
1871
- {
1872
- return iterator ();
1873
- }
1874
- };
1875
-
1876
1776
template < typename TFunc >
1877
1777
BasicBlockVisit VisitEHEnclosedHandlerSecondPassSuccs (Compiler * comp , TFunc func );
1878
1778
0 commit comments