@@ -97,6 +97,8 @@ pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
97
97
/// A cold block is a block that is unlikely to be executed at runtime.
98
98
cold_blocks : IndexVec < mir:: BasicBlock , bool > ,
99
99
100
+ nop_landing_pads : DenseBitSet < mir:: BasicBlock > ,
101
+
100
102
/// The location where each MIR arg/var/tmp/ret is stored. This is
101
103
/// usually an `PlaceRef` representing an alloca, but not always:
102
104
/// sometimes we can skip the alloca and just store the value
@@ -181,8 +183,14 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
181
183
182
184
let mut mir = tcx. instance_mir ( instance. def ) ;
183
185
184
- let fn_abi = cx. fn_abi_of_instance ( instance, ty:: List :: empty ( ) ) ;
185
- debug ! ( "fn_abi: {:?}" , fn_abi) ;
186
+ let nop_landing_pads = rustc_mir_transform:: remove_noop_landing_pads:: find_noop_landing_pads (
187
+ mir,
188
+ Some ( rustc_mir_transform:: remove_noop_landing_pads:: ExtraInfo {
189
+ tcx,
190
+ instance,
191
+ typing_env : cx. typing_env ( ) ,
192
+ } ) ,
193
+ ) ;
186
194
187
195
if tcx. features ( ) . ergonomic_clones ( ) {
188
196
let monomorphized_mir = instance. instantiate_mir_and_normalize_erasing_regions (
@@ -193,19 +201,23 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
193
201
mir = tcx. arena . alloc ( optimize_use_clone :: < Bx > ( cx, monomorphized_mir) ) ;
194
202
}
195
203
204
+ let fn_abi = cx. fn_abi_of_instance ( instance, ty:: List :: empty ( ) ) ;
205
+ debug ! ( "fn_abi: {:?}" , fn_abi) ;
206
+
196
207
let debug_context = cx. create_function_debug_context ( instance, fn_abi, llfn, & mir) ;
197
208
198
209
let start_llbb = Bx :: append_block ( cx, llfn, "start" ) ;
199
210
let mut start_bx = Bx :: build ( cx, start_llbb) ;
200
211
201
- if mir. basic_blocks . iter ( ) . any ( |bb| {
202
- bb. is_cleanup || matches ! ( bb. terminator( ) . unwind( ) , Some ( mir:: UnwindAction :: Terminate ( _) ) )
212
+ if mir:: traversal:: mono_reachable ( & mir, tcx, instance) . any ( |( bb, block) | {
213
+ ( block. is_cleanup && !nop_landing_pads. contains ( bb) )
214
+ || matches ! ( block. terminator( ) . unwind( ) , Some ( mir:: UnwindAction :: Terminate ( _) ) )
203
215
} ) {
204
216
start_bx. set_personality_fn ( cx. eh_personality ( ) ) ;
205
217
}
206
218
207
- let cleanup_kinds =
208
- base :: wants_new_eh_instructions ( tcx . sess ) . then ( || analyze:: cleanup_kinds ( & mir) ) ;
219
+ let cleanup_kinds = base :: wants_new_eh_instructions ( tcx . sess )
220
+ . then ( || analyze:: cleanup_kinds ( & mir, & nop_landing_pads ) ) ;
209
221
210
222
let cached_llbbs: IndexVec < mir:: BasicBlock , CachedLlbb < Bx :: BasicBlock > > =
211
223
mir. basic_blocks
@@ -233,6 +245,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
233
245
debug_context,
234
246
per_local_var_debug_info : None ,
235
247
caller_location : None ,
248
+ nop_landing_pads,
236
249
} ;
237
250
238
251
// It may seem like we should iterate over `required_consts` to ensure they all successfully
@@ -244,7 +257,36 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
244
257
fx. compute_per_local_var_debug_info ( & mut start_bx) . unzip ( ) ;
245
258
fx. per_local_var_debug_info = per_local_var_debug_info;
246
259
247
- let traversal_order = traversal:: mono_reachable_reverse_postorder ( mir, tcx, instance) ;
260
+ let mut traversal_order = traversal:: mono_reachable_reverse_postorder ( mir, tcx, instance) ;
261
+
262
+ // Filter out blocks that won't be codegen'd because of nop_landing_pads optimization.
263
+ // FIXME: We might want to integrate the nop_landing_pads analysis into mono reachability.
264
+ {
265
+ let mut reachable = DenseBitSet :: new_empty ( mir. basic_blocks . len ( ) ) ;
266
+ let mut to_visit = vec ! [ mir:: START_BLOCK ] ;
267
+ while let Some ( next) = to_visit. pop ( ) {
268
+ if !reachable. insert ( next) {
269
+ continue ;
270
+ }
271
+
272
+ let block = & mir. basic_blocks [ next] ;
273
+ if let Some ( mir:: UnwindAction :: Cleanup ( target) ) = block. terminator ( ) . unwind ( )
274
+ && fx. nop_landing_pads . contains ( * target)
275
+ {
276
+ // This edge will not be followed when we actually codegen, so skip generating it here.
277
+ //
278
+ // It's guaranteed that the cleanup block (`target`) occurs only in
279
+ // UnwindAction::Cleanup(...) -- i.e., we can't incorrectly filter too much here --
280
+ // because cleanup transitions must happen via UnwindAction::Cleanup.
281
+ to_visit. extend ( block. terminator ( ) . successors ( ) . filter ( |s| s != target) ) ;
282
+ } else {
283
+ to_visit. extend ( block. terminator ( ) . successors ( ) ) ;
284
+ }
285
+ }
286
+
287
+ traversal_order. retain ( |bb| reachable. contains ( * bb) ) ;
288
+ }
289
+
248
290
let memory_locals = analyze:: non_ssa_locals ( & fx, & traversal_order) ;
249
291
250
292
// Allocate variable and temp allocas
0 commit comments