1
1
use std:: cmp;
2
2
3
- use rustc_abi:: { BackendRepr , ExternAbi , HasDataLayout , Reg , WrappingRange } ;
3
+ use rustc_abi:: { BackendRepr , ExternAbi , HasDataLayout , Reg , Size , WrappingRange } ;
4
4
use rustc_ast as ast;
5
5
use rustc_ast:: { InlineAsmOptions , InlineAsmTemplatePiece } ;
6
6
use rustc_data_structures:: packed:: Pu128 ;
@@ -158,7 +158,7 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
158
158
llargs : & [ Bx :: Value ] ,
159
159
destination : Option < ( ReturnDest < ' tcx , Bx :: Value > , mir:: BasicBlock ) > ,
160
160
mut unwind : mir:: UnwindAction ,
161
- copied_constant_arguments : & [ PlaceRef < ' tcx , < Bx as BackendTypes > :: Value > ] ,
161
+ lifetime_ends_after_call : & [ ( Bx :: Value , Size ) ] ,
162
162
instance : Option < Instance < ' tcx > > ,
163
163
mergeable_succ : bool ,
164
164
) -> MergingSucc {
@@ -245,8 +245,8 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
245
245
if let Some ( ( ret_dest, target) ) = destination {
246
246
bx. switch_to_block ( fx. llbb ( target) ) ;
247
247
fx. set_debug_loc ( bx, self . terminator . source_info ) ;
248
- for tmp in copied_constant_arguments {
249
- bx. lifetime_end ( tmp. val . llval , tmp . layout . size ) ;
248
+ for & ( tmp, size ) in lifetime_ends_after_call {
249
+ bx. lifetime_end ( tmp, size) ;
250
250
}
251
251
fx. store_return ( bx, ret_dest, & fn_abi. ret , invokeret) ;
252
252
}
@@ -259,8 +259,8 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
259
259
}
260
260
261
261
if let Some ( ( ret_dest, target) ) = destination {
262
- for tmp in copied_constant_arguments {
263
- bx. lifetime_end ( tmp. val . llval , tmp . layout . size ) ;
262
+ for & ( tmp, size ) in lifetime_ends_after_call {
263
+ bx. lifetime_end ( tmp, size) ;
264
264
}
265
265
fx. store_return ( bx, ret_dest, & fn_abi. ret , llret) ;
266
266
self . funclet_br ( fx, bx, target, mergeable_succ)
@@ -1049,7 +1049,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
1049
1049
( args, None )
1050
1050
} ;
1051
1051
1052
- let mut copied_constant_arguments = vec ! [ ] ;
1052
+ // Keeps track of temporary allocas whose liftime need to be ended after the call.
1053
+ let mut lifetime_ends_after_call: Vec < ( Bx :: Value , Size ) > = Vec :: new ( ) ;
1053
1054
' make_args: for ( i, arg) in first_args. iter ( ) . enumerate ( ) {
1054
1055
let mut op = self . codegen_operand ( bx, & arg. node ) ;
1055
1056
@@ -1137,19 +1138,26 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
1137
1138
bx. lifetime_start ( tmp. val . llval , tmp. layout . size ) ;
1138
1139
op. val . store ( bx, tmp) ;
1139
1140
op. val = Ref ( tmp. val ) ;
1140
- copied_constant_arguments . push ( tmp) ;
1141
+ lifetime_ends_after_call . push ( ( tmp. val . llval , tmp . layout . size ) ) ;
1141
1142
}
1142
1143
_ => { }
1143
1144
}
1144
1145
1145
- self . codegen_argument ( bx, op, & mut llargs, & fn_abi. args [ i] ) ;
1146
+ self . codegen_argument (
1147
+ bx,
1148
+ op,
1149
+ & mut llargs,
1150
+ & fn_abi. args [ i] ,
1151
+ & mut lifetime_ends_after_call,
1152
+ ) ;
1146
1153
}
1147
1154
let num_untupled = untuple. map ( |tup| {
1148
1155
self . codegen_arguments_untupled (
1149
1156
bx,
1150
1157
& tup. node ,
1151
1158
& mut llargs,
1152
1159
& fn_abi. args [ first_args. len ( ) ..] ,
1160
+ & mut lifetime_ends_after_call,
1153
1161
)
1154
1162
} ) ;
1155
1163
@@ -1174,7 +1182,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
1174
1182
) ;
1175
1183
1176
1184
let last_arg = fn_abi. args . last ( ) . unwrap ( ) ;
1177
- self . codegen_argument ( bx, location, & mut llargs, last_arg) ;
1185
+ self . codegen_argument (
1186
+ bx,
1187
+ location,
1188
+ & mut llargs,
1189
+ last_arg,
1190
+ & mut lifetime_ends_after_call,
1191
+ ) ;
1178
1192
}
1179
1193
1180
1194
let fn_ptr = match ( instance, llfn) {
@@ -1190,7 +1204,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
1190
1204
& llargs,
1191
1205
destination,
1192
1206
unwind,
1193
- & copied_constant_arguments ,
1207
+ & lifetime_ends_after_call ,
1194
1208
instance,
1195
1209
mergeable_succ,
1196
1210
)
@@ -1475,6 +1489,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
1475
1489
op : OperandRef < ' tcx , Bx :: Value > ,
1476
1490
llargs : & mut Vec < Bx :: Value > ,
1477
1491
arg : & ArgAbi < ' tcx , Ty < ' tcx > > ,
1492
+ lifetime_ends_after_call : & mut Vec < ( Bx :: Value , Size ) > ,
1478
1493
) {
1479
1494
match arg. mode {
1480
1495
PassMode :: Ignore => return ,
@@ -1513,7 +1528,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
1513
1528
None => arg. layout . align . abi ,
1514
1529
} ;
1515
1530
let scratch = PlaceValue :: alloca ( bx, arg. layout . size , required_align) ;
1531
+ bx. lifetime_start ( scratch. llval , arg. layout . size ) ;
1516
1532
op. val . store ( bx, scratch. with_type ( arg. layout ) ) ;
1533
+ lifetime_ends_after_call. push ( ( scratch. llval , arg. layout . size ) ) ;
1517
1534
( scratch. llval , scratch. align , true )
1518
1535
}
1519
1536
PassMode :: Cast { .. } => {
@@ -1534,7 +1551,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
1534
1551
// alignment requirements may be higher than the type's alignment, so copy
1535
1552
// to a higher-aligned alloca.
1536
1553
let scratch = PlaceValue :: alloca ( bx, arg. layout . size , required_align) ;
1554
+ bx. lifetime_start ( scratch. llval , arg. layout . size ) ;
1537
1555
bx. typed_place_copy ( scratch, op_place_val, op. layout ) ;
1556
+ lifetime_ends_after_call. push ( ( scratch. llval , arg. layout . size ) ) ;
1538
1557
( scratch. llval , scratch. align , true )
1539
1558
} else {
1540
1559
( op_place_val. llval , op_place_val. align , true )
@@ -1616,6 +1635,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
1616
1635
operand : & mir:: Operand < ' tcx > ,
1617
1636
llargs : & mut Vec < Bx :: Value > ,
1618
1637
args : & [ ArgAbi < ' tcx , Ty < ' tcx > > ] ,
1638
+ lifetime_ends_after_call : & mut Vec < ( Bx :: Value , Size ) > ,
1619
1639
) -> usize {
1620
1640
let tuple = self . codegen_operand ( bx, operand) ;
1621
1641
@@ -1628,13 +1648,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
1628
1648
for i in 0 ..tuple. layout . fields . count ( ) {
1629
1649
let field_ptr = tuple_ptr. project_field ( bx, i) ;
1630
1650
let field = bx. load_operand ( field_ptr) ;
1631
- self . codegen_argument ( bx, field, llargs, & args[ i] ) ;
1651
+ self . codegen_argument ( bx, field, llargs, & args[ i] , lifetime_ends_after_call ) ;
1632
1652
}
1633
1653
} else {
1634
1654
// If the tuple is immediate, the elements are as well.
1635
1655
for i in 0 ..tuple. layout . fields . count ( ) {
1636
1656
let op = tuple. extract_field ( self , bx, i) ;
1637
- self . codegen_argument ( bx, op, llargs, & args[ i] ) ;
1657
+ self . codegen_argument ( bx, op, llargs, & args[ i] , lifetime_ends_after_call ) ;
1638
1658
}
1639
1659
}
1640
1660
tuple. layout . fields . count ( )
0 commit comments