@@ -255,7 +255,7 @@ static bool ir_is_dead_load_ex(ir_ctx *ctx, ir_ref ref, uint32_t flags, ir_insn
255255{
256256 if ((flags & (IR_OP_FLAG_MEM |IR_OP_FLAG_MEM_MASK )) == (IR_OP_FLAG_MEM |IR_OP_FLAG_MEM_LOAD )) {
257257 return ctx -> use_lists [ref ].count == 1 ;
258- } else if (insn -> op == IR_ALLOCA ) {
258+ } else if (insn -> op == IR_ALLOCA || insn -> op == IR_BLOCK_BEGIN ) {
259259 return ctx -> use_lists [ref ].count == 1 ;
260260 }
261261 return 0 ;
@@ -644,8 +644,13 @@ static void ir_sccp_remove_unfeasible_merge_inputs(ir_ctx *ctx, ir_insn *_values
644644 next_insn = use_insn ;
645645 } else if (use_insn -> op != IR_NOP ) {
646646 IR_ASSERT (use_insn -> op1 == ref );
647- use_insn -> op1 = prev ;
648- ir_use_list_add (ctx , prev , use );
647+ IR_ASSERT (use_insn -> op == IR_VAR );
648+ ir_ref region = prev ;
649+ while (!IR_IS_BB_START (ctx -> ir_base [region ].op )) {
650+ region = ctx -> ir_base [region ].op1 ;
651+ }
652+ use_insn -> op1 = region ;
653+ ir_use_list_add (ctx , region , use );
649654 p = & ctx -> use_edges [use_list -> refs + k ];
650655 }
651656 }
@@ -1240,6 +1245,22 @@ static void ir_merge_blocks(ir_ctx *ctx, ir_ref end, ir_ref begin, ir_bitqueue *
12401245 }
12411246}
12421247
1248+ static void ir_remove_unused_vars (ir_ctx * ctx , ir_ref start , ir_ref end )
1249+ {
1250+ ir_use_list * use_list = & ctx -> use_lists [start ];
1251+ ir_ref * p , use , n = use_list -> count ;
1252+
1253+ for (p = & ctx -> use_edges [use_list -> refs ]; n > 0 ; p ++ , n -- ) {
1254+ use = * p ;
1255+ if (use != end ) {
1256+ ir_insn * use_insn = & ctx -> ir_base [use ];
1257+ IR_ASSERT (use_insn -> op == IR_VAR );
1258+ IR_ASSERT (ctx -> use_lists [use ].count == 0 );
1259+ MAKE_NOP (use_insn );
1260+ }
1261+ }
1262+ }
1263+
12431264static bool ir_try_remove_empty_diamond (ir_ctx * ctx , ir_ref ref , ir_insn * insn , ir_bitqueue * worklist )
12441265{
12451266 if (insn -> inputs_count == 2 ) {
@@ -1289,8 +1310,12 @@ static bool ir_try_remove_empty_diamond(ir_ctx *ctx, ir_ref ref, ir_insn *insn,
12891310 ir_ref next_ref = ctx -> use_edges [ctx -> use_lists [ref ].refs ];
12901311 ir_insn * next = & ctx -> ir_base [next_ref ];
12911312
1292- IR_ASSERT (ctx -> use_lists [start1_ref ].count == 1 );
1293- IR_ASSERT (ctx -> use_lists [start2_ref ].count == 1 );
1313+ if (ctx -> use_lists [start1_ref ].count != 1 ) {
1314+ ir_remove_unused_vars (ctx , start1_ref , end1_ref );
1315+ }
1316+ if (ctx -> use_lists [start2_ref ].count != 1 ) {
1317+ ir_remove_unused_vars (ctx , start2_ref , end2_ref );
1318+ }
12941319
12951320 next -> op1 = root -> op1 ;
12961321 ir_use_list_replace_one (ctx , root -> op1 , root_ref , next_ref );
@@ -1331,7 +1356,9 @@ static bool ir_try_remove_empty_diamond(ir_ctx *ctx, ir_ref ref, ir_insn *insn,
13311356 if (start -> op != IR_CASE_VAL && start -> op != IR_CASE_DEFAULT ) {
13321357 return 0 ;
13331358 }
1334- IR_ASSERT (ctx -> use_lists [start_ref ].count == 1 );
1359+ if (ctx -> use_lists [start_ref ].count != 1 ) {
1360+ ir_remove_unused_vars (ctx , start_ref , end_ref );
1361+ }
13351362 if (!root_ref ) {
13361363 root_ref = start -> op1 ;
13371364 if (ctx -> use_lists [root_ref ].count != count ) {
@@ -1454,8 +1481,12 @@ static bool ir_optimize_phi(ir_ctx *ctx, ir_ref merge_ref, ir_insn *merge, ir_re
14541481 }
14551482 next = & ctx -> ir_base [next_ref ];
14561483
1457- IR_ASSERT (ctx -> use_lists [start1_ref ].count == 1 );
1458- IR_ASSERT (ctx -> use_lists [start2_ref ].count == 1 );
1484+ if (ctx -> use_lists [start1_ref ].count != 1 ) {
1485+ ir_remove_unused_vars (ctx , start1_ref , end1_ref );
1486+ }
1487+ if (ctx -> use_lists [start2_ref ].count != 1 ) {
1488+ ir_remove_unused_vars (ctx , start2_ref , end2_ref );
1489+ }
14591490
14601491 insn -> op = (
14611492 (is_less ? cond -> op1 : cond -> op2 )
@@ -1540,8 +1571,12 @@ static bool ir_optimize_phi(ir_ctx *ctx, ir_ref merge_ref, ir_insn *merge, ir_re
15401571 }
15411572 next = & ctx -> ir_base [next_ref ];
15421573
1543- IR_ASSERT (ctx -> use_lists [start1_ref ].count == 1 );
1544- IR_ASSERT (ctx -> use_lists [start2_ref ].count == 1 );
1574+ if (ctx -> use_lists [start1_ref ].count != 1 ) {
1575+ ir_remove_unused_vars (ctx , start1_ref , end1_ref );
1576+ }
1577+ if (ctx -> use_lists [start2_ref ].count != 1 ) {
1578+ ir_remove_unused_vars (ctx , start2_ref , end2_ref );
1579+ }
15451580
15461581 insn -> op = IR_ABS ;
15471582 insn -> inputs_count = 1 ;
@@ -1605,8 +1640,12 @@ static bool ir_optimize_phi(ir_ctx *ctx, ir_ref merge_ref, ir_insn *merge, ir_re
16051640 }
16061641 next = & ctx -> ir_base [next_ref ];
16071642
1608- IR_ASSERT (ctx -> use_lists [start1_ref ].count == 1 );
1609- IR_ASSERT (ctx -> use_lists [start2_ref ].count == 1 );
1643+ if (ctx -> use_lists [start1_ref ].count != 1 ) {
1644+ ir_remove_unused_vars (ctx , start1_ref , end1_ref );
1645+ }
1646+ if (ctx -> use_lists [start2_ref ].count != 1 ) {
1647+ ir_remove_unused_vars (ctx , start2_ref , end2_ref );
1648+ }
16101649
16111650 insn -> op = IR_COND ;
16121651 insn -> inputs_count = 3 ;
@@ -2126,9 +2165,13 @@ static void ir_optimize_merge(ir_ctx *ctx, ir_ref merge_ref, ir_insn *merge, ir_
21262165
21272166 ir_ref next_ref = ctx -> use_edges [use_list -> refs + 1 ];
21282167 ir_insn * next = & ctx -> ir_base [next_ref ];
2129- IR_ASSERT (next -> op != IR_PHI );
21302168
2131- if (phi -> op == IR_PHI ) {
2169+ if (next -> op == IR_PHI ) {
2170+ SWAP_REFS (phi_ref , next_ref );
2171+ SWAP_INSNS (phi , next );
2172+ }
2173+
2174+ if (phi -> op == IR_PHI && next -> op != IR_PHI ) {
21322175 if (next -> op == IR_IF && next -> op1 == merge_ref && ctx -> use_lists [phi_ref ].count == 1 ) {
21332176 if (next -> op2 == phi_ref ) {
21342177 if (ir_try_split_if (ctx , next_ref , next , worklist )) {
0 commit comments