1 use std::borrow::Cow;
2 use std::convert::TryFrom;
3 
4 use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
5 use rustc_middle::ty::layout::{self, LayoutOf as _, TyAndLayout};
6 use rustc_middle::ty::Instance;
7 use rustc_middle::{
8     mir,
9     ty::{self, Ty},
10 };
11 use rustc_target::abi;
12 use rustc_target::spec::abi::Abi;
13 
14 use super::{
15     FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy, Scalar,
16     StackPopCleanup, StackPopUnwind,
17 };
18 
19 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
fn_can_unwind(&self, attrs: CodegenFnAttrFlags, abi: Abi) -> bool20     fn fn_can_unwind(&self, attrs: CodegenFnAttrFlags, abi: Abi) -> bool {
21         layout::fn_can_unwind(*self.tcx, attrs, abi)
22     }
23 
eval_terminator( &mut self, terminator: &mir::Terminator<'tcx>, ) -> InterpResult<'tcx>24     pub(super) fn eval_terminator(
25         &mut self,
26         terminator: &mir::Terminator<'tcx>,
27     ) -> InterpResult<'tcx> {
28         use rustc_middle::mir::TerminatorKind::*;
29         match terminator.kind {
30             Return => {
31                 self.pop_stack_frame(/* unwinding */ false)?
32             }
33 
34             Goto { target } => self.go_to_block(target),
35 
36             SwitchInt { ref discr, ref targets, switch_ty } => {
37                 let discr = self.read_immediate(&self.eval_operand(discr, None)?)?;
38                 trace!("SwitchInt({:?})", *discr);
39                 assert_eq!(discr.layout.ty, switch_ty);
40 
41                 // Branch to the `otherwise` case by default, if no match is found.
42                 assert!(!targets.iter().is_empty());
43                 let mut target_block = targets.otherwise();
44 
45                 for (const_int, target) in targets.iter() {
46                     // Compare using binary_op, to also support pointer values
47                     let res = self
48                         .overflowing_binary_op(
49                             mir::BinOp::Eq,
50                             &discr,
51                             &ImmTy::from_uint(const_int, discr.layout),
52                         )?
53                         .0;
54                     if res.to_bool()? {
55                         target_block = target;
56                         break;
57                     }
58                 }
59 
60                 self.go_to_block(target_block);
61             }
62 
63             Call { ref func, ref args, destination, ref cleanup, from_hir_call: _, fn_span: _ } => {
64                 let old_stack = self.frame_idx();
65                 let old_loc = self.frame().loc;
66                 let func = self.eval_operand(func, None)?;
67                 let (fn_val, abi, caller_can_unwind) = match *func.layout.ty.kind() {
68                     ty::FnPtr(sig) => {
69                         let caller_abi = sig.abi();
70                         let fn_ptr = self.read_pointer(&func)?;
71                         let fn_val = self.memory.get_fn(fn_ptr)?;
72                         (
73                             fn_val,
74                             caller_abi,
75                             self.fn_can_unwind(CodegenFnAttrFlags::empty(), caller_abi),
76                         )
77                     }
78                     ty::FnDef(def_id, substs) => {
79                         let sig = func.layout.ty.fn_sig(*self.tcx);
80                         (
81                             FnVal::Instance(
82                                 self.resolve(ty::WithOptConstParam::unknown(def_id), substs)?,
83                             ),
84                             sig.abi(),
85                             self.fn_can_unwind(self.tcx.codegen_fn_attrs(def_id).flags, sig.abi()),
86                         )
87                     }
88                     _ => span_bug!(
89                         terminator.source_info.span,
90                         "invalid callee of type {:?}",
91                         func.layout.ty
92                     ),
93                 };
94                 let args = self.eval_operands(args)?;
95                 let dest_place;
96                 let ret = match destination {
97                     Some((dest, ret)) => {
98                         dest_place = self.eval_place(dest)?;
99                         Some((&dest_place, ret))
100                     }
101                     None => None,
102                 };
103                 self.eval_fn_call(
104                     fn_val,
105                     abi,
106                     &args[..],
107                     ret,
108                     match (cleanup, caller_can_unwind) {
109                         (Some(cleanup), true) => StackPopUnwind::Cleanup(*cleanup),
110                         (None, true) => StackPopUnwind::Skip,
111                         (_, false) => StackPopUnwind::NotAllowed,
112                     },
113                 )?;
114                 // Sanity-check that `eval_fn_call` either pushed a new frame or
115                 // did a jump to another block.
116                 if self.frame_idx() == old_stack && self.frame().loc == old_loc {
117                     span_bug!(terminator.source_info.span, "evaluating this call made no progress");
118                 }
119             }
120 
121             Drop { place, target, unwind } => {
122                 let place = self.eval_place(place)?;
123                 let ty = place.layout.ty;
124                 trace!("TerminatorKind::drop: {:?}, type {}", place, ty);
125 
126                 let instance = Instance::resolve_drop_in_place(*self.tcx, ty);
127                 self.drop_in_place(&place, instance, target, unwind)?;
128             }
129 
130             Assert { ref cond, expected, ref msg, target, cleanup } => {
131                 let cond_val =
132                     self.read_immediate(&self.eval_operand(cond, None)?)?.to_scalar()?.to_bool()?;
133                 if expected == cond_val {
134                     self.go_to_block(target);
135                 } else {
136                     M::assert_panic(self, msg, cleanup)?;
137                 }
138             }
139 
140             Abort => {
141                 M::abort(self, "the program aborted execution".to_owned())?;
142             }
143 
144             // When we encounter Resume, we've finished unwinding
145             // cleanup for the current stack frame. We pop it in order
146             // to continue unwinding the next frame
147             Resume => {
148                 trace!("unwinding: resuming from cleanup");
149                 // By definition, a Resume terminator means
150                 // that we're unwinding
151                 self.pop_stack_frame(/* unwinding */ true)?;
152                 return Ok(());
153             }
154 
155             // It is UB to ever encounter this.
156             Unreachable => throw_ub!(Unreachable),
157 
158             // These should never occur for MIR we actually run.
159             DropAndReplace { .. }
160             | FalseEdge { .. }
161             | FalseUnwind { .. }
162             | Yield { .. }
163             | GeneratorDrop => span_bug!(
164                 terminator.source_info.span,
165                 "{:#?} should have been eliminated by MIR pass",
166                 terminator.kind
167             ),
168 
169             // Inline assembly can't be interpreted.
170             InlineAsm { .. } => throw_unsup_format!("inline assembly is not supported"),
171         }
172 
173         Ok(())
174     }
175 
check_argument_compat( rust_abi: bool, caller: TyAndLayout<'tcx>, callee: TyAndLayout<'tcx>, ) -> bool176     fn check_argument_compat(
177         rust_abi: bool,
178         caller: TyAndLayout<'tcx>,
179         callee: TyAndLayout<'tcx>,
180     ) -> bool {
181         if caller.ty == callee.ty {
182             // No question
183             return true;
184         }
185         if !rust_abi {
186             // Don't risk anything
187             return false;
188         }
189         // Compare layout
190         match (caller.abi, callee.abi) {
191             // Different valid ranges are okay (once we enforce validity,
192             // that will take care to make it UB to leave the range, just
193             // like for transmute).
194             (abi::Abi::Scalar(caller), abi::Abi::Scalar(callee)) => caller.value == callee.value,
195             (abi::Abi::ScalarPair(caller1, caller2), abi::Abi::ScalarPair(callee1, callee2)) => {
196                 caller1.value == callee1.value && caller2.value == callee2.value
197             }
198             // Be conservative
199             _ => false,
200         }
201     }
202 
203     /// Pass a single argument, checking the types for compatibility.
pass_argument( &mut self, rust_abi: bool, caller_arg: &mut impl Iterator<Item = OpTy<'tcx, M::PointerTag>>, callee_arg: &PlaceTy<'tcx, M::PointerTag>, ) -> InterpResult<'tcx>204     fn pass_argument(
205         &mut self,
206         rust_abi: bool,
207         caller_arg: &mut impl Iterator<Item = OpTy<'tcx, M::PointerTag>>,
208         callee_arg: &PlaceTy<'tcx, M::PointerTag>,
209     ) -> InterpResult<'tcx> {
210         if rust_abi && callee_arg.layout.is_zst() {
211             // Nothing to do.
212             trace!("Skipping callee ZST");
213             return Ok(());
214         }
215         let caller_arg = caller_arg.next().ok_or_else(|| {
216             err_ub_format!("calling a function with fewer arguments than it requires")
217         })?;
218         if rust_abi {
219             assert!(!caller_arg.layout.is_zst(), "ZSTs must have been already filtered out");
220         }
221         // Now, check
222         if !Self::check_argument_compat(rust_abi, caller_arg.layout, callee_arg.layout) {
223             throw_ub_format!(
224                 "calling a function with argument of type {:?} passing data of type {:?}",
225                 callee_arg.layout.ty,
226                 caller_arg.layout.ty
227             )
228         }
229         // We allow some transmutes here
230         self.copy_op_transmute(&caller_arg, callee_arg)
231     }
232 
233     /// Call this function -- pushing the stack frame and initializing the arguments.
eval_fn_call( &mut self, fn_val: FnVal<'tcx, M::ExtraFnVal>, caller_abi: Abi, args: &[OpTy<'tcx, M::PointerTag>], ret: Option<(&PlaceTy<'tcx, M::PointerTag>, mir::BasicBlock)>, mut unwind: StackPopUnwind, ) -> InterpResult<'tcx>234     pub(crate) fn eval_fn_call(
235         &mut self,
236         fn_val: FnVal<'tcx, M::ExtraFnVal>,
237         caller_abi: Abi,
238         args: &[OpTy<'tcx, M::PointerTag>],
239         ret: Option<(&PlaceTy<'tcx, M::PointerTag>, mir::BasicBlock)>,
240         mut unwind: StackPopUnwind,
241     ) -> InterpResult<'tcx> {
242         trace!("eval_fn_call: {:#?}", fn_val);
243 
244         let instance = match fn_val {
245             FnVal::Instance(instance) => instance,
246             FnVal::Other(extra) => {
247                 return M::call_extra_fn(self, extra, caller_abi, args, ret, unwind);
248             }
249         };
250 
251         let get_abi = |this: &Self, instance_ty: Ty<'tcx>| match instance_ty.kind() {
252             ty::FnDef(..) => instance_ty.fn_sig(*this.tcx).abi(),
253             ty::Closure(..) => Abi::RustCall,
254             ty::Generator(..) => Abi::Rust,
255             _ => span_bug!(this.cur_span(), "unexpected callee ty: {:?}", instance_ty),
256         };
257 
258         // ABI check
259         let check_abi = |callee_abi: Abi| -> InterpResult<'tcx> {
260             let normalize_abi = |abi| match abi {
261                 Abi::Rust | Abi::RustCall | Abi::RustIntrinsic | Abi::PlatformIntrinsic =>
262                 // These are all the same ABI, really.
263                 {
264                     Abi::Rust
265                 }
266                 abi => abi,
267             };
268             if normalize_abi(caller_abi) != normalize_abi(callee_abi) {
269                 throw_ub_format!(
270                     "calling a function with ABI {} using caller ABI {}",
271                     callee_abi.name(),
272                     caller_abi.name()
273                 )
274             }
275             Ok(())
276         };
277 
278         match instance.def {
279             ty::InstanceDef::Intrinsic(..) => {
280                 if M::enforce_abi(self) {
281                     check_abi(get_abi(self, instance.ty(*self.tcx, self.param_env)))?;
282                 }
283                 assert!(caller_abi == Abi::RustIntrinsic || caller_abi == Abi::PlatformIntrinsic);
284                 M::call_intrinsic(self, instance, args, ret, unwind)
285             }
286             ty::InstanceDef::VtableShim(..)
287             | ty::InstanceDef::ReifyShim(..)
288             | ty::InstanceDef::ClosureOnceShim { .. }
289             | ty::InstanceDef::FnPtrShim(..)
290             | ty::InstanceDef::DropGlue(..)
291             | ty::InstanceDef::CloneShim(..)
292             | ty::InstanceDef::Item(_) => {
293                 // We need MIR for this fn
294                 let body =
295                     match M::find_mir_or_eval_fn(self, instance, caller_abi, args, ret, unwind)? {
296                         Some(body) => body,
297                         None => return Ok(()),
298                     };
299 
300                 // Check against the ABI of the MIR body we are calling (not the ABI of `instance`;
301                 // these can differ when `find_mir_or_eval_fn` does something clever like resolve
302                 // exported symbol names).
303                 let callee_def_id = body.source.def_id();
304                 let callee_abi = get_abi(self, self.tcx.type_of(callee_def_id));
305 
306                 if M::enforce_abi(self) {
307                     check_abi(callee_abi)?;
308                 }
309 
310                 if !matches!(unwind, StackPopUnwind::NotAllowed)
311                     && !self
312                         .fn_can_unwind(self.tcx.codegen_fn_attrs(callee_def_id).flags, callee_abi)
313                 {
314                     // The callee cannot unwind.
315                     unwind = StackPopUnwind::NotAllowed;
316                 }
317 
318                 self.push_stack_frame(
319                     instance,
320                     body,
321                     ret.map(|p| p.0),
322                     StackPopCleanup::Goto { ret: ret.map(|p| p.1), unwind },
323                 )?;
324 
325                 // If an error is raised here, pop the frame again to get an accurate backtrace.
326                 // To this end, we wrap it all in a `try` block.
327                 let res: InterpResult<'tcx> = try {
328                     trace!(
329                         "caller ABI: {:?}, args: {:#?}",
330                         caller_abi,
331                         args.iter()
332                             .map(|arg| (arg.layout.ty, format!("{:?}", **arg)))
333                             .collect::<Vec<_>>()
334                     );
335                     trace!(
336                         "spread_arg: {:?}, locals: {:#?}",
337                         body.spread_arg,
338                         body.args_iter()
339                             .map(|local| (
340                                 local,
341                                 self.layout_of_local(self.frame(), local, None).unwrap().ty
342                             ))
343                             .collect::<Vec<_>>()
344                     );
345 
346                     // Figure out how to pass which arguments.
347                     // The Rust ABI is special: ZST get skipped.
348                     let rust_abi = matches!(caller_abi, Abi::Rust | Abi::RustCall);
349 
350                     // We have two iterators: Where the arguments come from,
351                     // and where they go to.
352 
353                     // For where they come from: If the ABI is RustCall, we untuple the
354                     // last incoming argument.  These two iterators do not have the same type,
355                     // so to keep the code paths uniform we accept an allocation
356                     // (for RustCall ABI only).
357                     let caller_args: Cow<'_, [OpTy<'tcx, M::PointerTag>]> =
358                         if caller_abi == Abi::RustCall && !args.is_empty() {
359                             // Untuple
360                             let (untuple_arg, args) = args.split_last().unwrap();
361                             trace!("eval_fn_call: Will pass last argument by untupling");
362                             Cow::from(
363                                 args.iter()
364                                     .map(|&a| Ok(a))
365                                     .chain(
366                                         (0..untuple_arg.layout.fields.count())
367                                             .map(|i| self.operand_field(untuple_arg, i)),
368                                     )
369                                     .collect::<InterpResult<'_, Vec<OpTy<'tcx, M::PointerTag>>>>(
370                                     )?,
371                             )
372                         } else {
373                             // Plain arg passing
374                             Cow::from(args)
375                         };
376                     // Skip ZSTs
377                     let mut caller_iter =
378                         caller_args.iter().filter(|op| !rust_abi || !op.layout.is_zst()).copied();
379 
380                     // Now we have to spread them out across the callee's locals,
381                     // taking into account the `spread_arg`.  If we could write
382                     // this is a single iterator (that handles `spread_arg`), then
383                     // `pass_argument` would be the loop body. It takes care to
384                     // not advance `caller_iter` for ZSTs.
385                     for local in body.args_iter() {
386                         let dest = self.eval_place(mir::Place::from(local))?;
387                         if Some(local) == body.spread_arg {
388                             // Must be a tuple
389                             for i in 0..dest.layout.fields.count() {
390                                 let dest = self.place_field(&dest, i)?;
391                                 self.pass_argument(rust_abi, &mut caller_iter, &dest)?;
392                             }
393                         } else {
394                             // Normal argument
395                             self.pass_argument(rust_abi, &mut caller_iter, &dest)?;
396                         }
397                     }
398                     // Now we should have no more caller args
399                     if caller_iter.next().is_some() {
400                         throw_ub_format!("calling a function with more arguments than it expected")
401                     }
402                     // Don't forget to check the return type!
403                     if let Some((caller_ret, _)) = ret {
404                         let callee_ret = self.eval_place(mir::Place::return_place())?;
405                         if !Self::check_argument_compat(
406                             rust_abi,
407                             caller_ret.layout,
408                             callee_ret.layout,
409                         ) {
410                             throw_ub_format!(
411                                 "calling a function with return type {:?} passing \
412                                      return place of type {:?}",
413                                 callee_ret.layout.ty,
414                                 caller_ret.layout.ty
415                             )
416                         }
417                     } else {
418                         let local = mir::RETURN_PLACE;
419                         let callee_layout = self.layout_of_local(self.frame(), local, None)?;
420                         if !callee_layout.abi.is_uninhabited() {
421                             throw_ub_format!("calling a returning function without a return place")
422                         }
423                     }
424                 };
425                 match res {
426                     Err(err) => {
427                         self.stack_mut().pop();
428                         Err(err)
429                     }
430                     Ok(()) => Ok(()),
431                 }
432             }
433             // cannot use the shim here, because that will only result in infinite recursion
434             ty::InstanceDef::Virtual(_, idx) => {
435                 let mut args = args.to_vec();
436                 // We have to implement all "object safe receivers".  Currently we
437                 // support built-in pointers `(&, &mut, Box)` as well as unsized-self.  We do
438                 // not yet support custom self types.
439                 // Also see `compiler/rustc_codegen_llvm/src/abi.rs` and `compiler/rustc_codegen_ssa/src/mir/block.rs`.
440                 let receiver_place = match args[0].layout.ty.builtin_deref(true) {
441                     Some(_) => {
442                         // Built-in pointer.
443                         self.deref_operand(&args[0])?
444                     }
445                     None => {
446                         // Unsized self.
447                         args[0].assert_mem_place()
448                     }
449                 };
450                 // Find and consult vtable
451                 let vtable = self.scalar_to_ptr(receiver_place.vtable());
452                 let fn_val = self.get_vtable_slot(vtable, u64::try_from(idx).unwrap())?;
453 
454                 // `*mut receiver_place.layout.ty` is almost the layout that we
455                 // want for args[0]: We have to project to field 0 because we want
456                 // a thin pointer.
457                 assert!(receiver_place.layout.is_unsized());
458                 let receiver_ptr_ty = self.tcx.mk_mut_ptr(receiver_place.layout.ty);
459                 let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0);
460                 // Adjust receiver argument.
461                 args[0] = OpTy::from(ImmTy::from_immediate(
462                     Scalar::from_maybe_pointer(receiver_place.ptr, self).into(),
463                     this_receiver_ptr,
464                 ));
465                 trace!("Patched self operand to {:#?}", args[0]);
466                 // recurse with concrete function
467                 self.eval_fn_call(fn_val, caller_abi, &args, ret, unwind)
468             }
469         }
470     }
471 
drop_in_place( &mut self, place: &PlaceTy<'tcx, M::PointerTag>, instance: ty::Instance<'tcx>, target: mir::BasicBlock, unwind: Option<mir::BasicBlock>, ) -> InterpResult<'tcx>472     fn drop_in_place(
473         &mut self,
474         place: &PlaceTy<'tcx, M::PointerTag>,
475         instance: ty::Instance<'tcx>,
476         target: mir::BasicBlock,
477         unwind: Option<mir::BasicBlock>,
478     ) -> InterpResult<'tcx> {
479         trace!("drop_in_place: {:?},\n  {:?}, {:?}", *place, place.layout.ty, instance);
480         // We take the address of the object.  This may well be unaligned, which is fine
481         // for us here.  However, unaligned accesses will probably make the actual drop
482         // implementation fail -- a problem shared by rustc.
483         let place = self.force_allocation(place)?;
484 
485         let (instance, place) = match place.layout.ty.kind() {
486             ty::Dynamic(..) => {
487                 // Dropping a trait object.
488                 self.unpack_dyn_trait(&place)?
489             }
490             _ => (instance, place),
491         };
492 
493         let arg = ImmTy::from_immediate(
494             place.to_ref(self),
495             self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
496         );
497 
498         let ty = self.tcx.mk_unit(); // return type is ()
499         let dest = MPlaceTy::dangling(self.layout_of(ty)?);
500 
501         self.eval_fn_call(
502             FnVal::Instance(instance),
503             Abi::Rust,
504             &[arg.into()],
505             Some((&dest.into(), target)),
506             match unwind {
507                 Some(cleanup) => StackPopUnwind::Cleanup(cleanup),
508                 None => StackPopUnwind::Skip,
509             },
510         )
511     }
512 }
513