diff options
| author | Oliver Scherer <github35764891676564198441@oli-obk.de> | 2019-12-25 01:04:32 +0100 |
|---|---|---|
| committer | Oliver Scherer <github35764891676564198441@oli-obk.de> | 2019-12-26 12:10:36 +0100 |
| commit | 3e7fa3c6f22798d4228a0dc3ec50bdf185084d40 (patch) | |
| tree | d2c24117e045845de1ac866fb85abc3a73d19eda | |
| parent | c0b16b4e6aa94cd83fd2c029356ba537dc4502c6 (diff) | |
| download | rust-3e7fa3c6f22798d4228a0dc3ec50bdf185084d40.tar.gz rust-3e7fa3c6f22798d4228a0dc3ec50bdf185084d40.zip | |
Move const eval machine into its own module
| -rw-r--r-- | src/librustc_mir/const_eval.rs | 355 | ||||
| -rw-r--r-- | src/librustc_mir/const_eval/error.rs | 30 | ||||
| -rw-r--r-- | src/librustc_mir/const_eval/machine.rs | 328 |
3 files changed, 369 insertions, 344 deletions
diff --git a/src/librustc_mir/const_eval.rs b/src/librustc_mir/const_eval.rs index 8a3bab2a282..833547d4d05 100644 --- a/src/librustc_mir/const_eval.rs +++ b/src/librustc_mir/const_eval.rs @@ -1,21 +1,18 @@ // Not in interpret to make sure we do not use private implementation details -use std::borrow::{Borrow, Cow}; -use std::collections::hash_map::Entry; use std::convert::TryInto; use std::error::Error; use std::fmt; use std::hash::Hash; use crate::interpret::eval_nullary_intrinsic; +use crate::interpret::eval_nullary_intrinsic; use rustc::hir::def::DefKind; -use rustc::hir::def_id::DefId; use rustc::mir; use rustc::mir::interpret::{ConstEvalErr, ErrorHandled, ScalarMaybeUndef}; use rustc::traits::Reveal; -use rustc::ty::layout::{self, HasTyCtxt, LayoutOf, VariantIdx}; -use rustc::ty::{self, subst::Subst, Ty, TyCtxt}; -use rustc_data_structures::fx::FxHashMap; +use rustc::ty::layout::{self, LayoutOf, VariantIdx}; +use rustc::ty::{self, subst::Subst, TyCtxt}; use syntax::{ source_map::{Span, DUMMY_SP}, @@ -23,17 +20,16 @@ use syntax::{ }; use crate::interpret::{ - self, intern_const_alloc_recursive, snapshot, AllocId, Allocation, AssertMessage, ConstValue, - GlobalId, ImmTy, Immediate, InterpCx, InterpErrorInfo, InterpResult, MPlaceTy, Machine, Memory, - MemoryKind, OpTy, PlaceTy, Pointer, RawConst, RefTracking, Scalar, StackPopCleanup, + intern_const_alloc_recursive, Allocation, ConstValue, GlobalId, ImmTy, Immediate, InterpCx, + InterpErrorInfo, InterpResult, MPlaceTy, Machine, MemoryKind, OpTy, RawConst, RefTracking, + Scalar, StackPopCleanup, }; -/// Number of steps until the detector even starts doing anything. -/// Also, a warning is shown to the user when this number is reached. -const STEPS_UNTIL_DETECTOR_ENABLED: isize = 1_000_000; -/// The number of steps between loop detector snapshots. -/// Should be a power of two for performance reasons. -const DETECTOR_SNAPSHOT_PERIOD: isize = 256; +mod error; +mod machine; + +pub use error::*; +pub use machine::*; /// The `InterpCx` is only meant to be used to do field and index projections into constants for /// `simd_shuffle` and const patterns in match arms. @@ -173,335 +169,6 @@ fn eval_body_using_ecx<'mir, 'tcx>( Ok(ret) } -#[derive(Clone, Debug)] -pub enum ConstEvalError { - NeedsRfc(String), - ConstAccessesStatic, -} - -impl<'tcx> Into<InterpErrorInfo<'tcx>> for ConstEvalError { - fn into(self) -> InterpErrorInfo<'tcx> { - err_unsup!(Unsupported(self.to_string())).into() - } -} - -impl fmt::Display for ConstEvalError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - use self::ConstEvalError::*; - match *self { - NeedsRfc(ref msg) => { - write!(f, "\"{}\" needs an rfc before being allowed inside constants", msg) - } - ConstAccessesStatic => write!(f, "constant accesses static"), - } - } -} - -impl Error for ConstEvalError {} - -// Extra machine state for CTFE, and the Machine instance -pub struct CompileTimeInterpreter<'mir, 'tcx> { - /// When this value is negative, it indicates the number of interpreter - /// steps *until* the loop detector is enabled. When it is positive, it is - /// the number of steps after the detector has been enabled modulo the loop - /// detector period. - pub(super) steps_since_detector_enabled: isize, - - /// Extra state to detect loops. - pub(super) loop_detector: snapshot::InfiniteLoopDetector<'mir, 'tcx>, -} - -#[derive(Copy, Clone, Debug)] -pub struct MemoryExtra { - /// Whether this machine may read from statics - can_access_statics: bool, -} - -impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> { - fn new() -> Self { - CompileTimeInterpreter { - loop_detector: Default::default(), - steps_since_detector_enabled: -STEPS_UNTIL_DETECTOR_ENABLED, - } - } -} - -impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> { - #[inline(always)] - fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool - where - K: Borrow<Q>, - { - FxHashMap::contains_key(self, k) - } - - #[inline(always)] - fn insert(&mut self, k: K, v: V) -> Option<V> { - FxHashMap::insert(self, k, v) - } - - #[inline(always)] - fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V> - where - K: Borrow<Q>, - { - FxHashMap::remove(self, k) - } - - #[inline(always)] - fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> { - self.iter().filter_map(move |(k, v)| f(k, &*v)).collect() - } - - #[inline(always)] - fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> { - match self.get(&k) { - Some(v) => Ok(v), - None => { - vacant()?; - bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading") - } - } - } - - #[inline(always)] - fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> { - match self.entry(k) { - Entry::Occupied(e) => Ok(e.into_mut()), - Entry::Vacant(e) => { - let v = vacant()?; - Ok(e.insert(v)) - } - } - } -} - -crate type CompileTimeEvalContext<'mir, 'tcx> = - InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>; - -impl interpret::MayLeak for ! { - #[inline(always)] - fn may_leak(self) -> bool { - // `self` is uninhabited - self - } -} - -impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> { - type MemoryKinds = !; - type PointerTag = (); - type ExtraFnVal = !; - - type FrameExtra = (); - type MemoryExtra = MemoryExtra; - type AllocExtra = (); - - type MemoryMap = FxHashMap<AllocId, (MemoryKind<!>, Allocation)>; - - const STATIC_KIND: Option<!> = None; // no copying of statics allowed - - // We do not check for alignment to avoid having to carry an `Align` - // in `ConstValue::ByRef`. - const CHECK_ALIGN: bool = false; - - #[inline(always)] - fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool { - false // for now, we don't enforce validity - } - - fn find_mir_or_eval_fn( - ecx: &mut InterpCx<'mir, 'tcx, Self>, - instance: ty::Instance<'tcx>, - args: &[OpTy<'tcx>], - ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>, - _unwind: Option<mir::BasicBlock>, // unwinding is not supported in consts - ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> { - debug!("find_mir_or_eval_fn: {:?}", instance); - - // Only check non-glue functions - if let ty::InstanceDef::Item(def_id) = instance.def { - // Execution might have wandered off into other crates, so we cannot do a stability- - // sensitive check here. But we can at least rule out functions that are not const - // at all. - if ecx.tcx.is_const_fn_raw(def_id) { - // If this function is a `const fn` then as an optimization we can query this - // evaluation immediately. - // - // For the moment we only do this for functions which take no arguments - // (or all arguments are ZSTs) so that we don't memoize too much. - // - // Because `#[track_caller]` adds an implicit non-ZST argument, we also cannot - // perform this optimization on items tagged with it. - let no_implicit_args = !instance.def.requires_caller_location(ecx.tcx()); - if args.iter().all(|a| a.layout.is_zst()) && no_implicit_args { - let gid = GlobalId { instance, promoted: None }; - ecx.eval_const_fn_call(gid, ret)?; - return Ok(None); - } - } else { - // Some functions we support even if they are non-const -- but avoid testing - // that for const fn! We certainly do *not* want to actually call the fn - // though, so be sure we return here. - return if ecx.hook_panic_fn(instance, args, ret)? { - Ok(None) - } else { - throw_unsup_format!("calling non-const function `{}`", instance) - }; - } - } - // This is a const fn. Call it. - Ok(Some(match ecx.load_mir(instance.def, None) { - Ok(body) => *body, - Err(err) => { - if let err_unsup!(NoMirFor(ref path)) = err.kind { - return Err(ConstEvalError::NeedsRfc(format!( - "calling extern function `{}`", - path - )) - .into()); - } - return Err(err); - } - })) - } - - fn call_extra_fn( - _ecx: &mut InterpCx<'mir, 'tcx, Self>, - fn_val: !, - _args: &[OpTy<'tcx>], - _ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>, - _unwind: Option<mir::BasicBlock>, - ) -> InterpResult<'tcx> { - match fn_val {} - } - - fn call_intrinsic( - ecx: &mut InterpCx<'mir, 'tcx, Self>, - span: Span, - instance: ty::Instance<'tcx>, - args: &[OpTy<'tcx>], - ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>, - _unwind: Option<mir::BasicBlock>, - ) -> InterpResult<'tcx> { - if ecx.emulate_intrinsic(span, instance, args, ret)? { - return Ok(()); - } - // An intrinsic that we do not support - let intrinsic_name = ecx.tcx.item_name(instance.def_id()); - Err(ConstEvalError::NeedsRfc(format!("calling intrinsic `{}`", intrinsic_name)).into()) - } - - fn assert_panic( - ecx: &mut InterpCx<'mir, 'tcx, Self>, - _span: Span, - msg: &AssertMessage<'tcx>, - _unwind: Option<mir::BasicBlock>, - ) -> InterpResult<'tcx> { - use rustc::mir::interpret::PanicInfo::*; - Err(match msg { - BoundsCheck { ref len, ref index } => { - let len = ecx - .read_immediate(ecx.eval_operand(len, None)?) - .expect("can't eval len") - .to_scalar()? - .to_machine_usize(&*ecx)?; - let index = ecx - .read_immediate(ecx.eval_operand(index, None)?) - .expect("can't eval index") - .to_scalar()? - .to_machine_usize(&*ecx)?; - err_panic!(BoundsCheck { len, index }) - } - Overflow(op) => err_panic!(Overflow(*op)), - OverflowNeg => err_panic!(OverflowNeg), - DivisionByZero => err_panic!(DivisionByZero), - RemainderByZero => err_panic!(RemainderByZero), - ResumedAfterReturn(generator_kind) => err_panic!(ResumedAfterReturn(*generator_kind)), - ResumedAfterPanic(generator_kind) => err_panic!(ResumedAfterPanic(*generator_kind)), - Panic { .. } => bug!("`Panic` variant cannot occur in MIR"), - } - .into()) - } - - fn ptr_to_int(_mem: &Memory<'mir, 'tcx, Self>, _ptr: Pointer) -> InterpResult<'tcx, u64> { - Err(ConstEvalError::NeedsRfc("pointer-to-integer cast".to_string()).into()) - } - - fn binary_ptr_op( - _ecx: &InterpCx<'mir, 'tcx, Self>, - _bin_op: mir::BinOp, - _left: ImmTy<'tcx>, - _right: ImmTy<'tcx>, - ) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> { - Err(ConstEvalError::NeedsRfc("pointer arithmetic or comparison".to_string()).into()) - } - - fn find_foreign_static( - _tcx: TyCtxt<'tcx>, - _def_id: DefId, - ) -> InterpResult<'tcx, Cow<'tcx, Allocation<Self::PointerTag>>> { - throw_unsup!(ReadForeignStatic) - } - - #[inline(always)] - fn init_allocation_extra<'b>( - _memory_extra: &MemoryExtra, - _id: AllocId, - alloc: Cow<'b, Allocation>, - _kind: Option<MemoryKind<!>>, - ) -> (Cow<'b, Allocation<Self::PointerTag>>, Self::PointerTag) { - // We do not use a tag so we can just cheaply forward the allocation - (alloc, ()) - } - - #[inline(always)] - fn tag_static_base_pointer(_memory_extra: &MemoryExtra, _id: AllocId) -> Self::PointerTag { - () - } - - fn box_alloc( - _ecx: &mut InterpCx<'mir, 'tcx, Self>, - _dest: PlaceTy<'tcx>, - ) -> InterpResult<'tcx> { - Err(ConstEvalError::NeedsRfc("heap allocations via `box` keyword".to_string()).into()) - } - - fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> { - { - let steps = &mut ecx.machine.steps_since_detector_enabled; - - *steps += 1; - if *steps < 0 { - return Ok(()); - } - - *steps %= DETECTOR_SNAPSHOT_PERIOD; - if *steps != 0 { - return Ok(()); - } - } - - let span = ecx.frame().span; - ecx.machine.loop_detector.observe_and_analyze(*ecx.tcx, span, &ecx.memory, &ecx.stack[..]) - } - - #[inline(always)] - fn stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> { - Ok(()) - } - - fn before_access_static( - memory_extra: &MemoryExtra, - _allocation: &Allocation, - ) -> InterpResult<'tcx> { - if memory_extra.can_access_statics { - Ok(()) - } else { - Err(ConstEvalError::ConstAccessesStatic.into()) - } - } -} - /// Extracts a field of a (variant of a) const. // this function uses `unwrap` copiously, because an already validated constant must have valid // fields and can thus never fail outside of compiler bugs diff --git a/src/librustc_mir/const_eval/error.rs b/src/librustc_mir/const_eval/error.rs new file mode 100644 index 00000000000..8948cc0fc3e --- /dev/null +++ b/src/librustc_mir/const_eval/error.rs @@ -0,0 +1,30 @@ +use std::error::Error; +use std::fmt; + +use crate::interpret::InterpErrorInfo; + +#[derive(Clone, Debug)] +pub enum ConstEvalError { + NeedsRfc(String), + ConstAccessesStatic, +} + +impl<'tcx> Into<InterpErrorInfo<'tcx>> for ConstEvalError { + fn into(self) -> InterpErrorInfo<'tcx> { + err_unsup!(Unsupported(self.to_string())).into() + } +} + +impl fmt::Display for ConstEvalError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use self::ConstEvalError::*; + match *self { + NeedsRfc(ref msg) => { + write!(f, "\"{}\" needs an rfc before being allowed inside constants", msg) + } + ConstAccessesStatic => write!(f, "constant accesses static"), + } + } +} + +impl Error for ConstEvalError {} diff --git a/src/librustc_mir/const_eval/machine.rs b/src/librustc_mir/const_eval/machine.rs new file mode 100644 index 00000000000..0e3794ee3ab --- /dev/null +++ b/src/librustc_mir/const_eval/machine.rs @@ -0,0 +1,328 @@ +use rustc::hir::def_id::DefId; +use rustc::mir; +use rustc::ty::layout::HasTyCtxt; +use rustc::ty::{self, Ty, TyCtxt}; +use std::borrow::{Borrow, Cow}; +use std::collections::hash_map::Entry; +use std::hash::Hash; + +use rustc_data_structures::fx::FxHashMap; + +use syntax::source_map::Span; + +use crate::interpret::{ + self, snapshot, AllocId, Allocation, AssertMessage, GlobalId, ImmTy, InterpCx, InterpResult, + Memory, MemoryKind, OpTy, PlaceTy, Pointer, Scalar, +}; + +use super::error::*; + +/// Number of steps until the detector even starts doing anything. +/// Also, a warning is shown to the user when this number is reached. +const STEPS_UNTIL_DETECTOR_ENABLED: isize = 1_000_000; +/// The number of steps between loop detector snapshots. +/// Should be a power of two for performance reasons. +const DETECTOR_SNAPSHOT_PERIOD: isize = 256; + +// Extra machine state for CTFE, and the Machine instance +pub struct CompileTimeInterpreter<'mir, 'tcx> { + /// When this value is negative, it indicates the number of interpreter + /// steps *until* the loop detector is enabled. When it is positive, it is + /// the number of steps after the detector has been enabled modulo the loop + /// detector period. + pub(super) steps_since_detector_enabled: isize, + + /// Extra state to detect loops. + pub(super) loop_detector: snapshot::InfiniteLoopDetector<'mir, 'tcx>, +} + +#[derive(Copy, Clone, Debug)] +pub struct MemoryExtra { + /// Whether this machine may read from statics + can_access_statics: bool, +} + +impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> { + fn new() -> Self { + CompileTimeInterpreter { + loop_detector: Default::default(), + steps_since_detector_enabled: -STEPS_UNTIL_DETECTOR_ENABLED, + } + } +} + +impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> { + #[inline(always)] + fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool + where + K: Borrow<Q>, + { + FxHashMap::contains_key(self, k) + } + + #[inline(always)] + fn insert(&mut self, k: K, v: V) -> Option<V> { + FxHashMap::insert(self, k, v) + } + + #[inline(always)] + fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V> + where + K: Borrow<Q>, + { + FxHashMap::remove(self, k) + } + + #[inline(always)] + fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> { + self.iter().filter_map(move |(k, v)| f(k, &*v)).collect() + } + + #[inline(always)] + fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> { + match self.get(&k) { + Some(v) => Ok(v), + None => { + vacant()?; + bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading") + } + } + } + + #[inline(always)] + fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> { + match self.entry(k) { + Entry::Occupied(e) => Ok(e.into_mut()), + Entry::Vacant(e) => { + let v = vacant()?; + Ok(e.insert(v)) + } + } + } +} + +crate type CompileTimeEvalContext<'mir, 'tcx> = + InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>; + +impl interpret::MayLeak for ! { + #[inline(always)] + fn may_leak(self) -> bool { + // `self` is uninhabited + self + } +} + +impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> { + type MemoryKinds = !; + type PointerTag = (); + type ExtraFnVal = !; + + type FrameExtra = (); + type MemoryExtra = MemoryExtra; + type AllocExtra = (); + + type MemoryMap = FxHashMap<AllocId, (MemoryKind<!>, Allocation)>; + + const STATIC_KIND: Option<!> = None; // no copying of statics allowed + + // We do not check for alignment to avoid having to carry an `Align` + // in `ConstValue::ByRef`. + const CHECK_ALIGN: bool = false; + + #[inline(always)] + fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool { + false // for now, we don't enforce validity + } + + fn find_mir_or_eval_fn( + ecx: &mut InterpCx<'mir, 'tcx, Self>, + instance: ty::Instance<'tcx>, + args: &[OpTy<'tcx>], + ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>, + _unwind: Option<mir::BasicBlock>, // unwinding is not supported in consts + ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> { + debug!("find_mir_or_eval_fn: {:?}", instance); + + // Only check non-glue functions + if let ty::InstanceDef::Item(def_id) = instance.def { + // Execution might have wandered off into other crates, so we cannot do a stability- + // sensitive check here. But we can at least rule out functions that are not const + // at all. + if ecx.tcx.is_const_fn_raw(def_id) { + // If this function is a `const fn` then as an optimization we can query this + // evaluation immediately. + // + // For the moment we only do this for functions which take no arguments + // (or all arguments are ZSTs) so that we don't memoize too much. + // + // Because `#[track_caller]` adds an implicit non-ZST argument, we also cannot + // perform this optimization on items tagged with it. + let no_implicit_args = !instance.def.requires_caller_location(ecx.tcx()); + if args.iter().all(|a| a.layout.is_zst()) && no_implicit_args { + let gid = GlobalId { instance, promoted: None }; + ecx.eval_const_fn_call(gid, ret)?; + return Ok(None); + } + } else { + // Some functions we support even if they are non-const -- but avoid testing + // that for const fn! We certainly do *not* want to actually call the fn + // though, so be sure we return here. + return if ecx.hook_panic_fn(instance, args, ret)? { + Ok(None) + } else { + throw_unsup_format!("calling non-const function `{}`", instance) + }; + } + } + // This is a const fn. Call it. + Ok(Some(match ecx.load_mir(instance.def, None) { + Ok(body) => *body, + Err(err) => { + if let err_unsup!(NoMirFor(ref path)) = err.kind { + return Err(ConstEvalError::NeedsRfc(format!( + "calling extern function `{}`", + path + )) + .into()); + } + return Err(err); + } + })) + } + + fn call_extra_fn( + _ecx: &mut InterpCx<'mir, 'tcx, Self>, + fn_val: !, + _args: &[OpTy<'tcx>], + _ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>, + _unwind: Option<mir::BasicBlock>, + ) -> InterpResult<'tcx> { + match fn_val {} + } + + fn call_intrinsic( + ecx: &mut InterpCx<'mir, 'tcx, Self>, + span: Span, + instance: ty::Instance<'tcx>, + args: &[OpTy<'tcx>], + ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>, + _unwind: Option<mir::BasicBlock>, + ) -> InterpResult<'tcx> { + if ecx.emulate_intrinsic(span, instance, args, ret)? { + return Ok(()); + } + // An intrinsic that we do not support + let intrinsic_name = ecx.tcx.item_name(instance.def_id()); + Err(ConstEvalError::NeedsRfc(format!("calling intrinsic `{}`", intrinsic_name)).into()) + } + + fn assert_panic( + ecx: &mut InterpCx<'mir, 'tcx, Self>, + _span: Span, + msg: &AssertMessage<'tcx>, + _unwind: Option<mir::BasicBlock>, + ) -> InterpResult<'tcx> { + use rustc::mir::interpret::PanicInfo::*; + Err(match msg { + BoundsCheck { ref len, ref index } => { + let len = ecx + .read_immediate(ecx.eval_operand(len, None)?) + .expect("can't eval len") + .to_scalar()? + .to_machine_usize(&*ecx)?; + let index = ecx + .read_immediate(ecx.eval_operand(index, None)?) + .expect("can't eval index") + .to_scalar()? + .to_machine_usize(&*ecx)?; + err_panic!(BoundsCheck { len, index }) + } + Overflow(op) => err_panic!(Overflow(*op)), + OverflowNeg => err_panic!(OverflowNeg), + DivisionByZero => err_panic!(DivisionByZero), + RemainderByZero => err_panic!(RemainderByZero), + ResumedAfterReturn(generator_kind) => err_panic!(ResumedAfterReturn(*generator_kind)), + ResumedAfterPanic(generator_kind) => err_panic!(ResumedAfterPanic(*generator_kind)), + Panic { .. } => bug!("`Panic` variant cannot occur in MIR"), + } + .into()) + } + + fn ptr_to_int(_mem: &Memory<'mir, 'tcx, Self>, _ptr: Pointer) -> InterpResult<'tcx, u64> { + Err(ConstEvalError::NeedsRfc("pointer-to-integer cast".to_string()).into()) + } + + fn binary_ptr_op( + _ecx: &InterpCx<'mir, 'tcx, Self>, + _bin_op: mir::BinOp, + _left: ImmTy<'tcx>, + _right: ImmTy<'tcx>, + ) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> { + Err(ConstEvalError::NeedsRfc("pointer arithmetic or comparison".to_string()).into()) + } + + fn find_foreign_static( + _tcx: TyCtxt<'tcx>, + _def_id: DefId, + ) -> InterpResult<'tcx, Cow<'tcx, Allocation<Self::PointerTag>>> { + throw_unsup!(ReadForeignStatic) + } + + #[inline(always)] + fn init_allocation_extra<'b>( + _memory_extra: &MemoryExtra, + _id: AllocId, + alloc: Cow<'b, Allocation>, + _kind: Option<MemoryKind<!>>, + ) -> (Cow<'b, Allocation<Self::PointerTag>>, Self::PointerTag) { + // We do not use a tag so we can just cheaply forward the allocation + (alloc, ()) + } + + #[inline(always)] + fn tag_static_base_pointer(_memory_extra: &MemoryExtra, _id: AllocId) -> Self::PointerTag { + () + } + + fn box_alloc( + _ecx: &mut InterpCx<'mir, 'tcx, Self>, + _dest: PlaceTy<'tcx>, + ) -> InterpResult<'tcx> { + Err(ConstEvalError::NeedsRfc("heap allocations via `box` keyword".to_string()).into()) + } + + fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> { + { + let steps = &mut ecx.machine.steps_since_detector_enabled; + + *steps += 1; + if *steps < 0 { + return Ok(()); + } + + *steps %= DETECTOR_SNAPSHOT_PERIOD; + if *steps != 0 { + return Ok(()); + } + } + + let span = ecx.frame().span; + ecx.machine.loop_detector.observe_and_analyze(*ecx.tcx, span, &ecx.memory, &ecx.stack[..]) + } + + #[inline(always)] + fn stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> { + Ok(()) + } + + fn before_access_static( + memory_extra: &MemoryExtra, + _allocation: &Allocation, + ) -> InterpResult<'tcx> { + if memory_extra.can_access_statics { + Ok(()) + } else { + Err(ConstEvalError::ConstAccessesStatic.into()) + } + } +} |
