1 use std::convert::{TryFrom, TryInto};
2 use std::mem;
3 use std::num::NonZeroUsize;
4 use std::time::Duration;
5 
6 use log::trace;
7 
8 use rustc_hir::def_id::{DefId, CRATE_DEF_INDEX};
9 use rustc_middle::mir;
10 use rustc_middle::ty::{
11     self,
12     layout::{LayoutOf, TyAndLayout},
13     List, TyCtxt,
14 };
15 use rustc_span::Symbol;
16 use rustc_target::abi::{Align, FieldsShape, Size, Variants};
17 use rustc_target::spec::abi::Abi;
18 
19 use rand::RngCore;
20 
21 use crate::*;
22 
23 impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
24 
25 /// Gets an instance for a path.
try_resolve_did<'mir, 'tcx>(tcx: TyCtxt<'tcx>, path: &[&str]) -> Option<DefId>26 fn try_resolve_did<'mir, 'tcx>(tcx: TyCtxt<'tcx>, path: &[&str]) -> Option<DefId> {
27     tcx.crates(()).iter().find(|&&krate| tcx.crate_name(krate).as_str() == path[0]).and_then(
28         |krate| {
29             let krate = DefId { krate: *krate, index: CRATE_DEF_INDEX };
30             let mut items = tcx.item_children(krate);
31             let mut path_it = path.iter().skip(1).peekable();
32 
33             while let Some(segment) = path_it.next() {
34                 for item in mem::replace(&mut items, Default::default()).iter() {
35                     if item.ident.name.as_str() == *segment {
36                         if path_it.peek().is_none() {
37                             return Some(item.res.def_id());
38                         }
39 
40                         items = tcx.item_children(item.res.def_id());
41                         break;
42                     }
43                 }
44             }
45             None
46         },
47     )
48 }
49 
50 pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
51     /// Gets an instance for a path.
resolve_path(&self, path: &[&str]) -> ty::Instance<'tcx>52     fn resolve_path(&self, path: &[&str]) -> ty::Instance<'tcx> {
53         let did = try_resolve_did(self.eval_context_ref().tcx.tcx, path)
54             .unwrap_or_else(|| panic!("failed to find required Rust item: {:?}", path));
55         ty::Instance::mono(self.eval_context_ref().tcx.tcx, did)
56     }
57 
58     /// Evaluates the scalar at the specified path. Returns Some(val)
59     /// if the path could be resolved, and None otherwise
eval_path_scalar(&mut self, path: &[&str]) -> InterpResult<'tcx, Scalar<Tag>>60     fn eval_path_scalar(&mut self, path: &[&str]) -> InterpResult<'tcx, Scalar<Tag>> {
61         let this = self.eval_context_mut();
62         let instance = this.resolve_path(path);
63         let cid = GlobalId { instance, promoted: None };
64         let const_val = this.eval_to_allocation(cid)?;
65         let const_val = this.read_scalar(&const_val.into())?;
66         return Ok(const_val.check_init()?);
67     }
68 
69     /// Helper function to get a `libc` constant as a `Scalar`.
eval_libc(&mut self, name: &str) -> InterpResult<'tcx, Scalar<Tag>>70     fn eval_libc(&mut self, name: &str) -> InterpResult<'tcx, Scalar<Tag>> {
71         self.eval_context_mut().eval_path_scalar(&["libc", name])
72     }
73 
74     /// Helper function to get a `libc` constant as an `i32`.
eval_libc_i32(&mut self, name: &str) -> InterpResult<'tcx, i32>75     fn eval_libc_i32(&mut self, name: &str) -> InterpResult<'tcx, i32> {
76         // TODO: Cache the result.
77         self.eval_libc(name)?.to_i32()
78     }
79 
80     /// Helper function to get a `windows` constant as a `Scalar`.
eval_windows(&mut self, module: &str, name: &str) -> InterpResult<'tcx, Scalar<Tag>>81     fn eval_windows(&mut self, module: &str, name: &str) -> InterpResult<'tcx, Scalar<Tag>> {
82         self.eval_context_mut().eval_path_scalar(&["std", "sys", "windows", module, name])
83     }
84 
85     /// Helper function to get a `windows` constant as a `u64`.
eval_windows_u64(&mut self, module: &str, name: &str) -> InterpResult<'tcx, u64>86     fn eval_windows_u64(&mut self, module: &str, name: &str) -> InterpResult<'tcx, u64> {
87         // TODO: Cache the result.
88         self.eval_windows(module, name)?.to_u64()
89     }
90 
91     /// Helper function to get the `TyAndLayout` of a `libc` type
libc_ty_layout(&mut self, name: &str) -> InterpResult<'tcx, TyAndLayout<'tcx>>92     fn libc_ty_layout(&mut self, name: &str) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
93         let this = self.eval_context_mut();
94         let ty = this.resolve_path(&["libc", name]).ty(*this.tcx, ty::ParamEnv::reveal_all());
95         this.layout_of(ty)
96     }
97 
98     /// Helper function to get the `TyAndLayout` of a `windows` type
windows_ty_layout(&mut self, name: &str) -> InterpResult<'tcx, TyAndLayout<'tcx>>99     fn windows_ty_layout(&mut self, name: &str) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
100         let this = self.eval_context_mut();
101         let ty = this
102             .resolve_path(&["std", "sys", "windows", "c", name])
103             .ty(*this.tcx, ty::ParamEnv::reveal_all());
104         this.layout_of(ty)
105     }
106 
107     /// Write a 0 of the appropriate size to `dest`.
write_null(&mut self, dest: &PlaceTy<'tcx, Tag>) -> InterpResult<'tcx>108     fn write_null(&mut self, dest: &PlaceTy<'tcx, Tag>) -> InterpResult<'tcx> {
109         self.eval_context_mut().write_scalar(Scalar::from_int(0, dest.layout.size), dest)
110     }
111 
112     /// Test if this pointer equals 0.
ptr_is_null(&self, ptr: Pointer<Option<Tag>>) -> InterpResult<'tcx, bool>113     fn ptr_is_null(&self, ptr: Pointer<Option<Tag>>) -> InterpResult<'tcx, bool> {
114         let this = self.eval_context_ref();
115         let null = Scalar::null_ptr(this);
116         this.ptr_eq(Scalar::from_maybe_pointer(ptr, this), null)
117     }
118 
119     /// Get the `Place` for a local
local_place(&mut self, local: mir::Local) -> InterpResult<'tcx, PlaceTy<'tcx, Tag>>120     fn local_place(&mut self, local: mir::Local) -> InterpResult<'tcx, PlaceTy<'tcx, Tag>> {
121         let this = self.eval_context_mut();
122         let place = mir::Place { local: local, projection: List::empty() };
123         this.eval_place(place)
124     }
125 
126     /// Generate some random bytes, and write them to `dest`.
gen_random(&mut self, ptr: Pointer<Option<Tag>>, len: u64) -> InterpResult<'tcx>127     fn gen_random(&mut self, ptr: Pointer<Option<Tag>>, len: u64) -> InterpResult<'tcx> {
128         // Some programs pass in a null pointer and a length of 0
129         // to their platform's random-generation function (e.g. getrandom())
130         // on Linux. For compatibility with these programs, we don't perform
131         // any additional checks - it's okay if the pointer is invalid,
132         // since we wouldn't actually be writing to it.
133         if len == 0 {
134             return Ok(());
135         }
136         let this = self.eval_context_mut();
137 
138         let mut data = vec![0; usize::try_from(len).unwrap()];
139 
140         if this.machine.communicate() {
141             // Fill the buffer using the host's rng.
142             getrandom::getrandom(&mut data)
143                 .map_err(|err| err_unsup_format!("host getrandom failed: {}", err))?;
144         } else {
145             let rng = this.memory.extra.rng.get_mut();
146             rng.fill_bytes(&mut data);
147         }
148 
149         this.memory.write_bytes(ptr, data.iter().copied())
150     }
151 
152     /// Call a function: Push the stack frame and pass the arguments.
153     /// For now, arguments must be scalars (so that the caller does not have to know the layout).
call_function( &mut self, f: ty::Instance<'tcx>, caller_abi: Abi, args: &[Immediate<Tag>], dest: Option<&PlaceTy<'tcx, Tag>>, stack_pop: StackPopCleanup, ) -> InterpResult<'tcx>154     fn call_function(
155         &mut self,
156         f: ty::Instance<'tcx>,
157         caller_abi: Abi,
158         args: &[Immediate<Tag>],
159         dest: Option<&PlaceTy<'tcx, Tag>>,
160         stack_pop: StackPopCleanup,
161     ) -> InterpResult<'tcx> {
162         let this = self.eval_context_mut();
163         let param_env = ty::ParamEnv::reveal_all(); // in Miri this is always the param_env we use... and this.param_env is private.
164         let callee_abi = f.ty(*this.tcx, param_env).fn_sig(*this.tcx).abi();
165         if this.machine.enforce_abi && callee_abi != caller_abi {
166             throw_ub_format!(
167                 "calling a function with ABI {} using caller ABI {}",
168                 callee_abi.name(),
169                 caller_abi.name()
170             )
171         }
172 
173         // Push frame.
174         let mir = &*this.load_mir(f.def, None)?;
175         this.push_stack_frame(f, mir, dest, stack_pop)?;
176 
177         // Initialize arguments.
178         let mut callee_args = this.frame().body.args_iter();
179         for arg in args {
180             let callee_arg = this.local_place(
181                 callee_args
182                     .next()
183                     .ok_or_else(|| err_ub_format!("callee has fewer arguments than expected"))?,
184             )?;
185             this.write_immediate(*arg, &callee_arg)?;
186         }
187         if callee_args.next().is_some() {
188             throw_ub_format!("callee has more arguments than expected");
189         }
190 
191         Ok(())
192     }
193 
194     /// Visits the memory covered by `place`, sensitive to freezing: the 2nd parameter
195     /// of `action` will be true if this is frozen, false if this is in an `UnsafeCell`.
196     /// The range is relative to `place`.
197     ///
198     /// Assumes that the `place` has a proper pointer in it.
visit_freeze_sensitive( &self, place: &MPlaceTy<'tcx, Tag>, size: Size, mut action: impl FnMut(AllocRange, bool) -> InterpResult<'tcx>, ) -> InterpResult<'tcx>199     fn visit_freeze_sensitive(
200         &self,
201         place: &MPlaceTy<'tcx, Tag>,
202         size: Size,
203         mut action: impl FnMut(AllocRange, bool) -> InterpResult<'tcx>,
204     ) -> InterpResult<'tcx> {
205         let this = self.eval_context_ref();
206         trace!("visit_frozen(place={:?}, size={:?})", *place, size);
207         debug_assert_eq!(
208             size,
209             this.size_and_align_of_mplace(place)?
210                 .map(|(size, _)| size)
211                 .unwrap_or_else(|| place.layout.size)
212         );
213         // Store how far we proceeded into the place so far. Everything to the left of
214         // this offset has already been handled, in the sense that the frozen parts
215         // have had `action` called on them.
216         let ptr = place.ptr.into_pointer_or_addr().unwrap();
217         let start_offset = ptr.into_parts().1 as Size; // we just compare offsets, the abs. value never matters
218         let mut cur_offset = start_offset;
219         // Called when we detected an `UnsafeCell` at the given offset and size.
220         // Calls `action` and advances `cur_ptr`.
221         let mut unsafe_cell_action = |unsafe_cell_ptr: Pointer<Option<Tag>>,
222                                       unsafe_cell_size: Size| {
223             let unsafe_cell_ptr = unsafe_cell_ptr.into_pointer_or_addr().unwrap();
224             debug_assert_eq!(unsafe_cell_ptr.provenance, ptr.provenance);
225             // We assume that we are given the fields in increasing offset order,
226             // and nothing else changes.
227             let unsafe_cell_offset = unsafe_cell_ptr.into_parts().1 as Size; // we just compare offsets, the abs. value never matters
228             assert!(unsafe_cell_offset >= cur_offset);
229             let frozen_size = unsafe_cell_offset - cur_offset;
230             // Everything between the cur_ptr and this `UnsafeCell` is frozen.
231             if frozen_size != Size::ZERO {
232                 action(alloc_range(cur_offset - start_offset, frozen_size), /*frozen*/ true)?;
233             }
234             cur_offset += frozen_size;
235             // This `UnsafeCell` is NOT frozen.
236             if unsafe_cell_size != Size::ZERO {
237                 action(
238                     alloc_range(cur_offset - start_offset, unsafe_cell_size),
239                     /*frozen*/ false,
240                 )?;
241             }
242             cur_offset += unsafe_cell_size;
243             // Done
244             Ok(())
245         };
246         // Run a visitor
247         {
248             let mut visitor = UnsafeCellVisitor {
249                 ecx: this,
250                 unsafe_cell_action: |place| {
251                     trace!("unsafe_cell_action on {:?}", place.ptr);
252                     // We need a size to go on.
253                     let unsafe_cell_size = this
254                         .size_and_align_of_mplace(&place)?
255                         .map(|(size, _)| size)
256                         // for extern types, just cover what we can
257                         .unwrap_or_else(|| place.layout.size);
258                     // Now handle this `UnsafeCell`, unless it is empty.
259                     if unsafe_cell_size != Size::ZERO {
260                         unsafe_cell_action(place.ptr, unsafe_cell_size)
261                     } else {
262                         Ok(())
263                     }
264                 },
265             };
266             visitor.visit_value(place)?;
267         }
268         // The part between the end_ptr and the end of the place is also frozen.
269         // So pretend there is a 0-sized `UnsafeCell` at the end.
270         unsafe_cell_action(place.ptr.wrapping_offset(size, this), Size::ZERO)?;
271         // Done!
272         return Ok(());
273 
274         /// Visiting the memory covered by a `MemPlace`, being aware of
275         /// whether we are inside an `UnsafeCell` or not.
276         struct UnsafeCellVisitor<'ecx, 'mir, 'tcx, F>
277         where
278             F: FnMut(&MPlaceTy<'tcx, Tag>) -> InterpResult<'tcx>,
279         {
280             ecx: &'ecx MiriEvalContext<'mir, 'tcx>,
281             unsafe_cell_action: F,
282         }
283 
284         impl<'ecx, 'mir, 'tcx: 'mir, F> ValueVisitor<'mir, 'tcx, Evaluator<'mir, 'tcx>>
285             for UnsafeCellVisitor<'ecx, 'mir, 'tcx, F>
286         where
287             F: FnMut(&MPlaceTy<'tcx, Tag>) -> InterpResult<'tcx>,
288         {
289             type V = MPlaceTy<'tcx, Tag>;
290 
291             #[inline(always)]
292             fn ecx(&self) -> &MiriEvalContext<'mir, 'tcx> {
293                 &self.ecx
294             }
295 
296             // Hook to detect `UnsafeCell`.
297             fn visit_value(&mut self, v: &MPlaceTy<'tcx, Tag>) -> InterpResult<'tcx> {
298                 trace!("UnsafeCellVisitor: {:?} {:?}", *v, v.layout.ty);
299                 let is_unsafe_cell = match v.layout.ty.kind() {
300                     ty::Adt(adt, _) =>
301                         Some(adt.did) == self.ecx.tcx.lang_items().unsafe_cell_type(),
302                     _ => false,
303                 };
304                 if is_unsafe_cell {
305                     // We do not have to recurse further, this is an `UnsafeCell`.
306                     (self.unsafe_cell_action)(v)
307                 } else if self.ecx.type_is_freeze(v.layout.ty) {
308                     // This is `Freeze`, there cannot be an `UnsafeCell`
309                     Ok(())
310                 } else if matches!(v.layout.fields, FieldsShape::Union(..)) {
311                     // A (non-frozen) union. We fall back to whatever the type says.
312                     (self.unsafe_cell_action)(v)
313                 } else {
314                     // We want to not actually read from memory for this visit. So, before
315                     // walking this value, we have to make sure it is not a
316                     // `Variants::Multiple`.
317                     match v.layout.variants {
318                         Variants::Multiple { .. } => {
319                             // A multi-variant enum, or generator, or so.
320                             // Treat this like a union: without reading from memory,
321                             // we cannot determine the variant we are in. Reading from
322                             // memory would be subject to Stacked Borrows rules, leading
323                             // to all sorts of "funny" recursion.
324                             // We only end up here if the type is *not* freeze, so we just call the
325                             // `UnsafeCell` action.
326                             (self.unsafe_cell_action)(v)
327                         }
328                         Variants::Single { .. } => {
329                             // Proceed further, try to find where exactly that `UnsafeCell`
330                             // is hiding.
331                             self.walk_value(v)
332                         }
333                     }
334                 }
335             }
336 
337             // Make sure we visit aggregrates in increasing offset order.
338             fn visit_aggregate(
339                 &mut self,
340                 place: &MPlaceTy<'tcx, Tag>,
341                 fields: impl Iterator<Item = InterpResult<'tcx, MPlaceTy<'tcx, Tag>>>,
342             ) -> InterpResult<'tcx> {
343                 match place.layout.fields {
344                     FieldsShape::Array { .. } => {
345                         // For the array layout, we know the iterator will yield sorted elements so
346                         // we can avoid the allocation.
347                         self.walk_aggregate(place, fields)
348                     }
349                     FieldsShape::Arbitrary { .. } => {
350                         // Gather the subplaces and sort them before visiting.
351                         let mut places =
352                             fields.collect::<InterpResult<'tcx, Vec<MPlaceTy<'tcx, Tag>>>>()?;
353                         // we just compare offsets, the abs. value never matters
354                         places.sort_by_key(|place| {
355                             place.ptr.into_pointer_or_addr().unwrap().into_parts().1 as Size
356                         });
357                         self.walk_aggregate(place, places.into_iter().map(Ok))
358                     }
359                     FieldsShape::Union { .. } | FieldsShape::Primitive => {
360                         // Uh, what?
361                         bug!("unions/primitives are not aggregates we should ever visit")
362                     }
363                 }
364             }
365 
366             fn visit_union(
367                 &mut self,
368                 _v: &MPlaceTy<'tcx, Tag>,
369                 _fields: NonZeroUsize,
370             ) -> InterpResult<'tcx> {
371                 bug!("we should have already handled unions in `visit_value`")
372             }
373         }
374     }
375 
376     // Writes several `ImmTy`s contiguously into memory. This is useful when you have to pack
377     // different values into a struct.
write_packed_immediates( &mut self, place: &MPlaceTy<'tcx, Tag>, imms: &[ImmTy<'tcx, Tag>], ) -> InterpResult<'tcx>378     fn write_packed_immediates(
379         &mut self,
380         place: &MPlaceTy<'tcx, Tag>,
381         imms: &[ImmTy<'tcx, Tag>],
382     ) -> InterpResult<'tcx> {
383         let this = self.eval_context_mut();
384 
385         let mut offset = Size::from_bytes(0);
386 
387         for &imm in imms {
388             this.write_immediate(
389                 *imm,
390                 &place.offset(offset, MemPlaceMeta::None, imm.layout, &*this.tcx)?.into(),
391             )?;
392             offset += imm.layout.size;
393         }
394         Ok(())
395     }
396 
397     /// Helper function used inside the shims of foreign functions to check that isolation is
398     /// disabled. It returns an error using the `name` of the foreign function if this is not the
399     /// case.
check_no_isolation(&self, name: &str) -> InterpResult<'tcx>400     fn check_no_isolation(&self, name: &str) -> InterpResult<'tcx> {
401         if !self.eval_context_ref().machine.communicate() {
402             self.reject_in_isolation(name, RejectOpWith::Abort)?;
403         }
404         Ok(())
405     }
406 
407     /// Helper function used inside the shims of foreign functions which reject the op
408     /// when isolation is enabled. It is used to print a warning/backtrace about the rejection.
reject_in_isolation(&self, op_name: &str, reject_with: RejectOpWith) -> InterpResult<'tcx>409     fn reject_in_isolation(&self, op_name: &str, reject_with: RejectOpWith) -> InterpResult<'tcx> {
410         let this = self.eval_context_ref();
411         match reject_with {
412             RejectOpWith::Abort => isolation_abort_error(op_name),
413             RejectOpWith::WarningWithoutBacktrace => {
414                 this.tcx
415                     .sess
416                     .warn(&format!("{} was made to return an error due to isolation", op_name));
417                 Ok(())
418             }
419             RejectOpWith::Warning => {
420                 register_diagnostic(NonHaltingDiagnostic::RejectedIsolatedOp(op_name.to_string()));
421                 Ok(())
422             }
423             RejectOpWith::NoWarning => Ok(()), // no warning
424         }
425     }
426 
427     /// Helper function used inside the shims of foreign functions to assert that the target OS
428     /// is `target_os`. It panics showing a message with the `name` of the foreign function
429     /// if this is not the case.
assert_target_os(&self, target_os: &str, name: &str)430     fn assert_target_os(&self, target_os: &str, name: &str) {
431         assert_eq!(
432             self.eval_context_ref().tcx.sess.target.os,
433             target_os,
434             "`{}` is only available on the `{}` target OS",
435             name,
436             target_os,
437         )
438     }
439 
440     /// Get last error variable as a place, lazily allocating thread-local storage for it if
441     /// necessary.
last_error_place(&mut self) -> InterpResult<'tcx, MPlaceTy<'tcx, Tag>>442     fn last_error_place(&mut self) -> InterpResult<'tcx, MPlaceTy<'tcx, Tag>> {
443         let this = self.eval_context_mut();
444         if let Some(errno_place) = this.active_thread_ref().last_error {
445             Ok(errno_place)
446         } else {
447             // Allocate new place, set initial value to 0.
448             let errno_layout = this.machine.layouts.u32;
449             let errno_place = this.allocate(errno_layout, MiriMemoryKind::Machine.into())?;
450             this.write_scalar(Scalar::from_u32(0), &errno_place.into())?;
451             this.active_thread_mut().last_error = Some(errno_place);
452             Ok(errno_place)
453         }
454     }
455 
456     /// Sets the last error variable.
set_last_error(&mut self, scalar: Scalar<Tag>) -> InterpResult<'tcx>457     fn set_last_error(&mut self, scalar: Scalar<Tag>) -> InterpResult<'tcx> {
458         let this = self.eval_context_mut();
459         let errno_place = this.last_error_place()?;
460         this.write_scalar(scalar, &errno_place.into())
461     }
462 
463     /// Gets the last error variable.
get_last_error(&mut self) -> InterpResult<'tcx, Scalar<Tag>>464     fn get_last_error(&mut self) -> InterpResult<'tcx, Scalar<Tag>> {
465         let this = self.eval_context_mut();
466         let errno_place = this.last_error_place()?;
467         this.read_scalar(&errno_place.into())?.check_init()
468     }
469 
470     /// Sets the last OS error using a `std::io::ErrorKind`. This function tries to produce the most
471     /// similar OS error from the `std::io::ErrorKind` and sets it as the last OS error.
set_last_error_from_io_error(&mut self, err_kind: std::io::ErrorKind) -> InterpResult<'tcx>472     fn set_last_error_from_io_error(&mut self, err_kind: std::io::ErrorKind) -> InterpResult<'tcx> {
473         use std::io::ErrorKind::*;
474         let this = self.eval_context_mut();
475         let target = &this.tcx.sess.target;
476         let target_os = &target.os;
477         let last_error = if target.families.contains(&"unix".to_owned()) {
478             this.eval_libc(match err_kind {
479                 ConnectionRefused => "ECONNREFUSED",
480                 ConnectionReset => "ECONNRESET",
481                 PermissionDenied => "EPERM",
482                 BrokenPipe => "EPIPE",
483                 NotConnected => "ENOTCONN",
484                 ConnectionAborted => "ECONNABORTED",
485                 AddrNotAvailable => "EADDRNOTAVAIL",
486                 AddrInUse => "EADDRINUSE",
487                 NotFound => "ENOENT",
488                 Interrupted => "EINTR",
489                 InvalidInput => "EINVAL",
490                 TimedOut => "ETIMEDOUT",
491                 AlreadyExists => "EEXIST",
492                 WouldBlock => "EWOULDBLOCK",
493                 _ => {
494                     throw_unsup_format!(
495                         "io error {:?} cannot be translated into a raw os error",
496                         err_kind
497                     )
498                 }
499             })?
500         } else if target.families.contains(&"windows".to_owned()) {
501             // FIXME: we have to finish implementing the Windows equivalent of this.
502             this.eval_windows(
503                 "c",
504                 match err_kind {
505                     NotFound => "ERROR_FILE_NOT_FOUND",
506                     PermissionDenied => "ERROR_ACCESS_DENIED",
507                     _ =>
508                         throw_unsup_format!(
509                             "io error {:?} cannot be translated into a raw os error",
510                             err_kind
511                         ),
512                 },
513             )?
514         } else {
515             throw_unsup_format!(
516                 "setting the last OS error from an io::Error is unsupported for {}.",
517                 target_os
518             )
519         };
520         this.set_last_error(last_error)
521     }
522 
523     /// Helper function that consumes an `std::io::Result<T>` and returns an
524     /// `InterpResult<'tcx,T>::Ok` instead. In case the result is an error, this function returns
525     /// `Ok(-1)` and sets the last OS error accordingly.
526     ///
527     /// This function uses `T: From<i32>` instead of `i32` directly because some IO related
528     /// functions return different integer types (like `read`, that returns an `i64`).
try_unwrap_io_result<T: From<i32>>( &mut self, result: std::io::Result<T>, ) -> InterpResult<'tcx, T>529     fn try_unwrap_io_result<T: From<i32>>(
530         &mut self,
531         result: std::io::Result<T>,
532     ) -> InterpResult<'tcx, T> {
533         match result {
534             Ok(ok) => Ok(ok),
535             Err(e) => {
536                 self.eval_context_mut().set_last_error_from_io_error(e.kind())?;
537                 Ok((-1).into())
538             }
539         }
540     }
541 
read_scalar_at_offset( &self, op: &OpTy<'tcx, Tag>, offset: u64, layout: TyAndLayout<'tcx>, ) -> InterpResult<'tcx, ScalarMaybeUninit<Tag>>542     fn read_scalar_at_offset(
543         &self,
544         op: &OpTy<'tcx, Tag>,
545         offset: u64,
546         layout: TyAndLayout<'tcx>,
547     ) -> InterpResult<'tcx, ScalarMaybeUninit<Tag>> {
548         let this = self.eval_context_ref();
549         let op_place = this.deref_operand(op)?;
550         let offset = Size::from_bytes(offset);
551         // Ensure that the following read at an offset is within bounds
552         assert!(op_place.layout.size >= offset + layout.size);
553         let value_place = op_place.offset(offset, MemPlaceMeta::None, layout, this)?;
554         this.read_scalar(&value_place.into())
555     }
556 
write_scalar_at_offset( &mut self, op: &OpTy<'tcx, Tag>, offset: u64, value: impl Into<ScalarMaybeUninit<Tag>>, layout: TyAndLayout<'tcx>, ) -> InterpResult<'tcx, ()>557     fn write_scalar_at_offset(
558         &mut self,
559         op: &OpTy<'tcx, Tag>,
560         offset: u64,
561         value: impl Into<ScalarMaybeUninit<Tag>>,
562         layout: TyAndLayout<'tcx>,
563     ) -> InterpResult<'tcx, ()> {
564         let this = self.eval_context_mut();
565         let op_place = this.deref_operand(op)?;
566         let offset = Size::from_bytes(offset);
567         // Ensure that the following read at an offset is within bounds
568         assert!(op_place.layout.size >= offset + layout.size);
569         let value_place = op_place.offset(offset, MemPlaceMeta::None, layout, this)?;
570         this.write_scalar(value, &value_place.into())
571     }
572 
573     /// Parse a `timespec` struct and return it as a `std::time::Duration`. It returns `None`
574     /// if the value in the `timespec` struct is invalid. Some libc functions will return
575     /// `EINVAL` in this case.
read_timespec(&mut self, tp: &MPlaceTy<'tcx, Tag>) -> InterpResult<'tcx, Option<Duration>>576     fn read_timespec(&mut self, tp: &MPlaceTy<'tcx, Tag>) -> InterpResult<'tcx, Option<Duration>> {
577         let this = self.eval_context_mut();
578         let seconds_place = this.mplace_field(&tp, 0)?;
579         let seconds_scalar = this.read_scalar(&seconds_place.into())?;
580         let seconds = seconds_scalar.to_machine_isize(this)?;
581         let nanoseconds_place = this.mplace_field(&tp, 1)?;
582         let nanoseconds_scalar = this.read_scalar(&nanoseconds_place.into())?;
583         let nanoseconds = nanoseconds_scalar.to_machine_isize(this)?;
584 
585         Ok(try {
586             // tv_sec must be non-negative.
587             let seconds: u64 = seconds.try_into().ok()?;
588             // tv_nsec must be non-negative.
589             let nanoseconds: u32 = nanoseconds.try_into().ok()?;
590             if nanoseconds >= 1_000_000_000 {
591                 // tv_nsec must not be greater than 999,999,999.
592                 None?
593             }
594             Duration::new(seconds, nanoseconds)
595         })
596     }
597 
read_c_str<'a>(&'a self, ptr: Pointer<Option<Tag>>) -> InterpResult<'tcx, &'a [u8]> where 'tcx: 'a, 'mir: 'a,598     fn read_c_str<'a>(&'a self, ptr: Pointer<Option<Tag>>) -> InterpResult<'tcx, &'a [u8]>
599     where
600         'tcx: 'a,
601         'mir: 'a,
602     {
603         let this = self.eval_context_ref();
604         let size1 = Size::from_bytes(1);
605 
606         // Step 1: determine the length.
607         let mut len = Size::ZERO;
608         loop {
609             // FIXME: We are re-getting the allocation each time around the loop.
610             // Would be nice if we could somehow "extend" an existing AllocRange.
611             let alloc = this.memory.get(ptr.offset(len, this)?.into(), size1, Align::ONE)?.unwrap(); // not a ZST, so we will get a result
612             let byte = alloc.read_scalar(alloc_range(Size::ZERO, size1))?.to_u8()?;
613             if byte == 0 {
614                 break;
615             } else {
616                 len = len + size1;
617             }
618         }
619 
620         // Step 2: get the bytes.
621         this.memory.read_bytes(ptr.into(), len)
622     }
623 
read_wide_str(&self, mut ptr: Pointer<Option<Tag>>) -> InterpResult<'tcx, Vec<u16>>624     fn read_wide_str(&self, mut ptr: Pointer<Option<Tag>>) -> InterpResult<'tcx, Vec<u16>> {
625         let this = self.eval_context_ref();
626         let size2 = Size::from_bytes(2);
627         let align2 = Align::from_bytes(2).unwrap();
628 
629         let mut wchars = Vec::new();
630         loop {
631             // FIXME: We are re-getting the allocation each time around the loop.
632             // Would be nice if we could somehow "extend" an existing AllocRange.
633             let alloc = this.memory.get(ptr.into(), size2, align2)?.unwrap(); // not a ZST, so we will get a result
634             let wchar = alloc.read_scalar(alloc_range(Size::ZERO, size2))?.to_u16()?;
635             if wchar == 0 {
636                 break;
637             } else {
638                 wchars.push(wchar);
639                 ptr = ptr.offset(size2, this)?;
640             }
641         }
642 
643         Ok(wchars)
644     }
645 
646     /// Check that the ABI is what we expect.
check_abi<'a>(&self, abi: Abi, exp_abi: Abi) -> InterpResult<'a, ()>647     fn check_abi<'a>(&self, abi: Abi, exp_abi: Abi) -> InterpResult<'a, ()> {
648         if self.eval_context_ref().machine.enforce_abi && abi != exp_abi {
649             throw_ub_format!(
650                 "calling a function with ABI {} using caller ABI {}",
651                 exp_abi.name(),
652                 abi.name()
653             )
654         }
655         Ok(())
656     }
657 
frame_in_std(&self) -> bool658     fn frame_in_std(&self) -> bool {
659         let this = self.eval_context_ref();
660         this.tcx.lang_items().start_fn().map_or(false, |start_fn| {
661             this.tcx.def_path(this.frame().instance.def_id()).krate
662                 == this.tcx.def_path(start_fn).krate
663         })
664     }
665 
666     /// Handler that should be called when unsupported functionality is encountered.
667     /// This function will either panic within the context of the emulated application
668     /// or return an error in the Miri process context
669     ///
670     /// Return value of `Ok(bool)` indicates whether execution should continue.
handle_unsupported<S: AsRef<str>>(&mut self, error_msg: S) -> InterpResult<'tcx, ()>671     fn handle_unsupported<S: AsRef<str>>(&mut self, error_msg: S) -> InterpResult<'tcx, ()> {
672         let this = self.eval_context_mut();
673         if this.machine.panic_on_unsupported {
674             // message is slightly different here to make automated analysis easier
675             let error_msg = format!("unsupported Miri functionality: {}", error_msg.as_ref());
676             this.start_panic(error_msg.as_ref(), StackPopUnwind::Skip)?;
677             return Ok(());
678         } else {
679             throw_unsup_format!("{}", error_msg.as_ref());
680         }
681     }
682 
check_abi_and_shim_symbol_clash( &mut self, abi: Abi, exp_abi: Abi, link_name: Symbol, ) -> InterpResult<'tcx, ()>683     fn check_abi_and_shim_symbol_clash(
684         &mut self,
685         abi: Abi,
686         exp_abi: Abi,
687         link_name: Symbol,
688     ) -> InterpResult<'tcx, ()> {
689         self.check_abi(abi, exp_abi)?;
690         if let Some(body) = self.eval_context_mut().lookup_exported_symbol(link_name)? {
691             throw_machine_stop!(TerminationInfo::SymbolShimClashing {
692                 link_name,
693                 span: body.span.data(),
694             })
695         }
696         Ok(())
697     }
698 
check_shim<'a, const N: usize>( &mut self, abi: Abi, exp_abi: Abi, link_name: Symbol, args: &'a [OpTy<'tcx, Tag>], ) -> InterpResult<'tcx, &'a [OpTy<'tcx, Tag>; N]> where &'a [OpTy<'tcx, Tag>; N]: TryFrom<&'a [OpTy<'tcx, Tag>]>,699     fn check_shim<'a, const N: usize>(
700         &mut self,
701         abi: Abi,
702         exp_abi: Abi,
703         link_name: Symbol,
704         args: &'a [OpTy<'tcx, Tag>],
705     ) -> InterpResult<'tcx, &'a [OpTy<'tcx, Tag>; N]>
706     where
707         &'a [OpTy<'tcx, Tag>; N]: TryFrom<&'a [OpTy<'tcx, Tag>]>,
708     {
709         self.check_abi_and_shim_symbol_clash(abi, exp_abi, link_name)?;
710         check_arg_count(args)
711     }
712 
713     /// Mark a machine allocation that was just created as immutable.
mark_immutable(&mut self, mplace: &MemPlace<Tag>)714     fn mark_immutable(&mut self, mplace: &MemPlace<Tag>) {
715         let this = self.eval_context_mut();
716         this.memory
717             .mark_immutable(mplace.ptr.into_pointer_or_addr().unwrap().provenance.alloc_id)
718             .unwrap();
719     }
720 }
721 
722 /// Check that the number of args is what we expect.
check_arg_count<'a, 'tcx, const N: usize>( args: &'a [OpTy<'tcx, Tag>], ) -> InterpResult<'tcx, &'a [OpTy<'tcx, Tag>; N]> where &'a [OpTy<'tcx, Tag>; N]: TryFrom<&'a [OpTy<'tcx, Tag>]>,723 pub fn check_arg_count<'a, 'tcx, const N: usize>(
724     args: &'a [OpTy<'tcx, Tag>],
725 ) -> InterpResult<'tcx, &'a [OpTy<'tcx, Tag>; N]>
726 where
727     &'a [OpTy<'tcx, Tag>; N]: TryFrom<&'a [OpTy<'tcx, Tag>]>,
728 {
729     if let Ok(ops) = args.try_into() {
730         return Ok(ops);
731     }
732     throw_ub_format!("incorrect number of arguments: got {}, expected {}", args.len(), N)
733 }
734 
isolation_abort_error(name: &str) -> InterpResult<'static>735 pub fn isolation_abort_error(name: &str) -> InterpResult<'static> {
736     throw_machine_stop!(TerminationInfo::UnsupportedInIsolation(format!(
737         "{} not available when isolation is enabled",
738         name,
739     )))
740 }
741 
immty_from_int_checked<'tcx>( int: impl Into<i128>, layout: TyAndLayout<'tcx>, ) -> InterpResult<'tcx, ImmTy<'tcx, Tag>>742 pub fn immty_from_int_checked<'tcx>(
743     int: impl Into<i128>,
744     layout: TyAndLayout<'tcx>,
745 ) -> InterpResult<'tcx, ImmTy<'tcx, Tag>> {
746     let int = int.into();
747     Ok(ImmTy::try_from_int(int, layout).ok_or_else(|| {
748         err_unsup_format!("signed value {:#x} does not fit in {} bits", int, layout.size.bits())
749     })?)
750 }
751 
immty_from_uint_checked<'tcx>( int: impl Into<u128>, layout: TyAndLayout<'tcx>, ) -> InterpResult<'tcx, ImmTy<'tcx, Tag>>752 pub fn immty_from_uint_checked<'tcx>(
753     int: impl Into<u128>,
754     layout: TyAndLayout<'tcx>,
755 ) -> InterpResult<'tcx, ImmTy<'tcx, Tag>> {
756     let int = int.into();
757     Ok(ImmTy::try_from_uint(int, layout).ok_or_else(|| {
758         err_unsup_format!("unsigned value {:#x} does not fit in {} bits", int, layout.size.bits())
759     })?)
760 }
761