@@ -97,6 +97,8 @@ pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
97
97
/// A cold block is a block that is unlikely to be executed at runtime.
98
98
cold_blocks : IndexVec < mir:: BasicBlock , bool > ,
99
99
100
+ nop_landing_pads : DenseBitSet < mir:: BasicBlock > ,
101
+
100
102
/// The location where each MIR arg/var/tmp/ret is stored. This is
101
103
/// usually an `PlaceRef` representing an alloca, but not always:
102
104
/// sometimes we can skip the alloca and just store the value
@@ -176,8 +178,14 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
176
178
177
179
let mut mir = tcx. instance_mir ( instance. def ) ;
178
180
179
- let fn_abi = cx. fn_abi_of_instance ( instance, ty:: List :: empty ( ) ) ;
180
- debug ! ( "fn_abi: {:?}" , fn_abi) ;
181
+ let nop_landing_pads = rustc_mir_transform:: remove_noop_landing_pads:: find_noop_landing_pads (
182
+ mir,
183
+ Some ( rustc_mir_transform:: remove_noop_landing_pads:: ExtraInfo {
184
+ tcx,
185
+ instance,
186
+ typing_env : cx. typing_env ( ) ,
187
+ } ) ,
188
+ ) ;
181
189
182
190
if tcx. features ( ) . ergonomic_clones ( ) {
183
191
let monomorphized_mir = instance. instantiate_mir_and_normalize_erasing_regions (
@@ -188,19 +196,23 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
188
196
mir = tcx. arena . alloc ( optimize_use_clone :: < Bx > ( cx, monomorphized_mir) ) ;
189
197
}
190
198
199
+ let fn_abi = cx. fn_abi_of_instance ( instance, ty:: List :: empty ( ) ) ;
200
+ debug ! ( "fn_abi: {:?}" , fn_abi) ;
201
+
191
202
let debug_context = cx. create_function_debug_context ( instance, fn_abi, llfn, & mir) ;
192
203
193
204
let start_llbb = Bx :: append_block ( cx, llfn, "start" ) ;
194
205
let mut start_bx = Bx :: build ( cx, start_llbb) ;
195
206
196
- if mir. basic_blocks . iter ( ) . any ( |bb| {
197
- bb. is_cleanup || matches ! ( bb. terminator( ) . unwind( ) , Some ( mir:: UnwindAction :: Terminate ( _) ) )
207
+ if mir:: traversal:: mono_reachable ( & mir, tcx, instance) . any ( |( bb, block) | {
208
+ ( block. is_cleanup && !nop_landing_pads. contains ( bb) )
209
+ || matches ! ( block. terminator( ) . unwind( ) , Some ( mir:: UnwindAction :: Terminate ( _) ) )
198
210
} ) {
199
211
start_bx. set_personality_fn ( cx. eh_personality ( ) ) ;
200
212
}
201
213
202
- let cleanup_kinds =
203
- base :: wants_new_eh_instructions ( tcx . sess ) . then ( || analyze:: cleanup_kinds ( & mir) ) ;
214
+ let cleanup_kinds = base :: wants_new_eh_instructions ( tcx . sess )
215
+ . then ( || analyze:: cleanup_kinds ( & mir, & nop_landing_pads ) ) ;
204
216
205
217
let cached_llbbs: IndexVec < mir:: BasicBlock , CachedLlbb < Bx :: BasicBlock > > =
206
218
mir. basic_blocks
@@ -228,6 +240,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
228
240
debug_context,
229
241
per_local_var_debug_info : None ,
230
242
caller_location : None ,
243
+ nop_landing_pads,
231
244
} ;
232
245
233
246
// It may seem like we should iterate over `required_consts` to ensure they all successfully
@@ -239,7 +252,36 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
239
252
fx. compute_per_local_var_debug_info ( & mut start_bx) . unzip ( ) ;
240
253
fx. per_local_var_debug_info = per_local_var_debug_info;
241
254
242
- let traversal_order = traversal:: mono_reachable_reverse_postorder ( mir, tcx, instance) ;
255
+ let mut traversal_order = traversal:: mono_reachable_reverse_postorder ( mir, tcx, instance) ;
256
+
257
+ // Filter out blocks that won't be codegen'd because of nop_landing_pads optimization.
258
+ // FIXME: We might want to integrate the nop_landing_pads analysis into mono reachability.
259
+ {
260
+ let mut reachable = DenseBitSet :: new_empty ( mir. basic_blocks . len ( ) ) ;
261
+ let mut to_visit = vec ! [ mir:: START_BLOCK ] ;
262
+ while let Some ( next) = to_visit. pop ( ) {
263
+ if !reachable. insert ( next) {
264
+ continue ;
265
+ }
266
+
267
+ let block = & mir. basic_blocks [ next] ;
268
+ if let Some ( mir:: UnwindAction :: Cleanup ( target) ) = block. terminator ( ) . unwind ( )
269
+ && fx. nop_landing_pads . contains ( * target)
270
+ {
271
+ // This edge will not be followed when we actually codegen, so skip generating it here.
272
+ //
273
+ // It's guaranteed that the cleanup block (`target`) occurs only in
274
+ // UnwindAction::Cleanup(...) -- i.e., we can't incorrectly filter too much here --
275
+ // because cleanup transitions must happen via UnwindAction::Cleanup.
276
+ to_visit. extend ( block. terminator ( ) . successors ( ) . filter ( |s| s != target) ) ;
277
+ } else {
278
+ to_visit. extend ( block. terminator ( ) . successors ( ) ) ;
279
+ }
280
+ }
281
+
282
+ traversal_order. retain ( |bb| reachable. contains ( * bb) ) ;
283
+ }
284
+
243
285
let memory_locals = analyze:: non_ssa_locals ( & fx, & traversal_order) ;
244
286
245
287
// Allocate variable and temp allocas
0 commit comments