3
3
use rustc:: middle:: codegen_fn_attrs:: CodegenFnAttrFlags ;
4
4
use rustc:: mir:: visit:: * ;
5
5
use rustc:: mir:: * ;
6
- use rustc:: ty:: subst:: { InternalSubsts , Subst , SubstsRef } ;
6
+ use rustc:: ty:: subst:: { InternalSubsts , Subst } ;
7
7
use rustc:: ty:: { self , Instance , InstanceDef , ParamEnv , Ty , TyCtxt , TypeFoldable } ;
8
8
use rustc_attr as attr;
9
- use rustc_hir:: def_id:: DefId ;
10
9
use rustc_index:: bit_set:: BitSet ;
11
10
use rustc_index:: vec:: { Idx , IndexVec } ;
12
11
use rustc_session:: config:: Sanitizer ;
@@ -29,8 +28,7 @@ pub struct Inline;
29
28
30
29
#[ derive( Copy , Clone , Debug ) ]
31
30
struct CallSite < ' tcx > {
32
- callee : DefId ,
33
- substs : SubstsRef < ' tcx > ,
31
+ callee : Instance < ' tcx > ,
34
32
bb : BasicBlock ,
35
33
source_info : SourceInfo ,
36
34
}
@@ -94,13 +92,19 @@ impl Inliner<'tcx> {
94
92
local_change = false ;
95
93
while let Some ( callsite) = callsites. pop_front ( ) {
96
94
debug ! ( "checking whether to inline callsite {:?}" , callsite) ;
97
- if !self . tcx . is_mir_available ( callsite. callee ) {
98
- debug ! ( "checking whether to inline callsite {:?} - MIR unavailable" , callsite) ;
99
- continue ;
95
+
96
+ if let InstanceDef :: Item ( callee_def_id) = callsite. callee . def {
97
+ if !self . tcx . is_mir_available ( callee_def_id) {
98
+ debug ! (
99
+ "checking whether to inline callsite {:?} - MIR unavailable" ,
100
+ callsite,
101
+ ) ;
102
+ continue ;
103
+ }
100
104
}
101
105
102
106
let self_node_id = self . tcx . hir ( ) . as_local_node_id ( self . source . def_id ( ) ) . unwrap ( ) ;
103
- let callee_node_id = self . tcx . hir ( ) . as_local_node_id ( callsite. callee ) ;
107
+ let callee_node_id = self . tcx . hir ( ) . as_local_node_id ( callsite. callee . def_id ( ) ) ;
104
108
105
109
let callee_body = if let Some ( callee_node_id) = callee_node_id {
106
110
// Avoid a cycle here by only using `optimized_mir` only if we have
@@ -110,19 +114,21 @@ impl Inliner<'tcx> {
110
114
if !self . tcx . dep_graph . is_fully_enabled ( )
111
115
&& self_node_id. as_u32 ( ) < callee_node_id. as_u32 ( )
112
116
{
113
- self . tcx . optimized_mir ( callsite. callee )
117
+ self . tcx . instance_mir ( callsite. callee . def )
114
118
} else {
115
119
continue ;
116
120
}
117
121
} else {
118
122
// This cannot result in a cycle since the callee MIR is from another crate
119
123
// and is already optimized.
120
- self . tcx . optimized_mir ( callsite. callee )
124
+ self . tcx . instance_mir ( callsite. callee . def )
121
125
} ;
122
126
127
+ let callee_body: & Body < ' tcx > = & * callee_body;
128
+
123
129
let callee_body = if self . consider_optimizing ( callsite, callee_body) {
124
130
self . tcx . subst_and_normalize_erasing_regions (
125
- & callsite. substs ,
131
+ & callsite. callee . substs ,
126
132
param_env,
127
133
callee_body,
128
134
)
@@ -183,18 +189,13 @@ impl Inliner<'tcx> {
183
189
let terminator = bb_data. terminator ( ) ;
184
190
if let TerminatorKind :: Call { func : ref op, .. } = terminator. kind {
185
191
if let ty:: FnDef ( callee_def_id, substs) = op. ty ( caller_body, self . tcx ) . kind {
186
- let instance = Instance :: resolve ( self . tcx , param_env, callee_def_id, substs) ?;
192
+ let callee = Instance :: resolve ( self . tcx , param_env, callee_def_id, substs) ?;
187
193
188
- if let InstanceDef :: Virtual ( ..) = instance . def {
194
+ if let InstanceDef :: Virtual ( ..) | InstanceDef :: Intrinsic ( _ ) = callee . def {
189
195
return None ;
190
196
}
191
197
192
- return Some ( CallSite {
193
- callee : instance. def_id ( ) ,
194
- substs : instance. substs ,
195
- bb,
196
- source_info : terminator. source_info ,
197
- } ) ;
198
+ return Some ( CallSite { callee, bb, source_info : terminator. source_info } ) ;
198
199
}
199
200
}
200
201
@@ -219,7 +220,7 @@ impl Inliner<'tcx> {
219
220
return false ;
220
221
}
221
222
222
- let codegen_fn_attrs = tcx. codegen_fn_attrs ( callsite. callee ) ;
223
+ let codegen_fn_attrs = tcx. codegen_fn_attrs ( callsite. callee . def_id ( ) ) ;
223
224
224
225
if codegen_fn_attrs. flags . contains ( CodegenFnAttrFlags :: TRACK_CALLER ) {
225
226
debug ! ( "`#[track_caller]` present - not inlining" ) ;
@@ -264,8 +265,8 @@ impl Inliner<'tcx> {
264
265
// Only inline local functions if they would be eligible for cross-crate
265
266
// inlining. This is to ensure that the final crate doesn't have MIR that
266
267
// reference unexported symbols
267
- if callsite. callee . is_local ( ) {
268
- if callsite. substs . non_erasable_generics ( ) . count ( ) == 0 && !hinted {
268
+ if callsite. callee . def_id ( ) . is_local ( ) {
269
+ if callsite. callee . substs . non_erasable_generics ( ) . count ( ) == 0 && !hinted {
269
270
debug ! ( " callee is an exported function - not inlining" ) ;
270
271
return false ;
271
272
}
@@ -321,7 +322,7 @@ impl Inliner<'tcx> {
321
322
work_list. push ( target) ;
322
323
// If the location doesn't actually need dropping, treat it like
323
324
// a regular goto.
324
- let ty = location. ty ( callee_body, tcx) . subst ( tcx, callsite. substs ) . ty ;
325
+ let ty = location. ty ( callee_body, tcx) . subst ( tcx, callsite. callee . substs ) . ty ;
325
326
if ty. needs_drop ( tcx, param_env) {
326
327
cost += CALL_PENALTY ;
327
328
if let Some ( unwind) = unwind {
@@ -371,7 +372,7 @@ impl Inliner<'tcx> {
371
372
372
373
for v in callee_body. vars_and_temps_iter ( ) {
373
374
let v = & callee_body. local_decls [ v] ;
374
- let ty = v. ty . subst ( tcx, callsite. substs ) ;
375
+ let ty = v. ty . subst ( tcx, callsite. callee . substs ) ;
375
376
// Cost of the var is the size in machine-words, if we know
376
377
// it.
377
378
if let Some ( size) = type_size_of ( tcx, param_env, ty) {
@@ -399,7 +400,7 @@ impl Inliner<'tcx> {
399
400
& self ,
400
401
callsite : CallSite < ' tcx > ,
401
402
caller_body : & mut BodyAndCache < ' tcx > ,
402
- mut callee_body : BodyAndCache < ' tcx > ,
403
+ mut callee_body : Body < ' tcx > ,
403
404
) -> bool {
404
405
let terminator = caller_body[ callsite. bb ] . terminator . take ( ) . unwrap ( ) ;
405
406
match terminator. kind {
@@ -501,6 +502,13 @@ impl Inliner<'tcx> {
501
502
caller_body. var_debug_info . push ( var_debug_info) ;
502
503
}
503
504
505
+ // HACK(eddyb) work around the `basic_blocks` field of `mir::Body`
506
+ // being private, due to `BodyAndCache` implementing `DerefMut`
507
+ // to `mir::Body` (which would allow bypassing `basic_blocks_mut`).
508
+ // The only way to make `basic_blocks` public again would be to
509
+ // remove that `DerefMut` impl and add more `*_mut` accessors.
510
+ let mut callee_body = BodyAndCache :: new ( callee_body) ;
511
+
504
512
for ( bb, mut block) in callee_body. basic_blocks_mut ( ) . drain_enumerated ( ..) {
505
513
integrator. visit_basic_block_data ( bb, & mut block) ;
506
514
caller_body. basic_blocks_mut ( ) . push ( block) ;
@@ -554,7 +562,9 @@ impl Inliner<'tcx> {
554
562
// tmp2 = tuple_tmp.2
555
563
//
556
564
// and the vector is `[closure_ref, tmp0, tmp1, tmp2]`.
557
- if tcx. is_closure ( callsite. callee ) {
565
+ // FIXME(eddyb) make this check for `"rust-call"` ABI combined with
566
+ // `callee_body.spread_arg == None`, instead of special-casing closures.
567
+ if tcx. is_closure ( callsite. callee . def_id ( ) ) {
558
568
let mut args = args. into_iter ( ) ;
559
569
let self_ = self . create_temp_if_necessary ( args. next ( ) . unwrap ( ) , callsite, caller_body) ;
560
570
let tuple = self . create_temp_if_necessary ( args. next ( ) . unwrap ( ) , callsite, caller_body) ;
0 commit comments