pub struct CompileTimeInterpreter<'mir, 'tcx> {
pub(super) num_evaluated_steps: usize,
pub(super) stack: Vec<Frame<'mir, 'tcx, AllocId, ()>>,
pub(super) can_access_statics: CanAccessStatics,
pub(super) check_alignment: CheckAlignment,
}
Expand description
Extra machine state for CTFE, and the Machine instance
Fields§
§num_evaluated_steps: usize
The number of terminators that have been evaluated.
This is used to produce lints informing the user that the compiler is not stuck.
Set to usize::MAX
to never report anything.
stack: Vec<Frame<'mir, 'tcx, AllocId, ()>>
The virtual call stack.
can_access_statics: CanAccessStatics
We need to make sure consts never point to anything mutable, even recursively. That is relied on for pattern matching on consts with references. To achieve this, two pieces have to work together:
- Interning makes everything outside of statics immutable.
- Pointers to allocations inside of statics can never leak outside, to a non-static global. This boolean here controls the second part.
check_alignment: CheckAlignment
Whether to check alignment during evaluation.
Implementations§
source§impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx>
impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx>
pub(crate) fn new( can_access_statics: CanAccessStatics, check_alignment: CheckAlignment ) -> Self
Trait Implementations§
source§impl<'mir, 'tcx> Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx>
impl<'mir, 'tcx> Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx>
§type Provenance = AllocId
type Provenance = AllocId
AllocId
they belong to.§type ProvenanceExtra = ()
type ProvenanceExtra = ()
§type ExtraFnVal = !
type ExtraFnVal = !
dlsym
that can later be called to execute the right thing.§type MemoryMap = IndexMap<AllocId, (MemoryKind<MemoryKind>, Allocation<AllocId, (), Box<[u8], Global>>), BuildHasherDefault<FxHasher>>
type MemoryMap = IndexMap<AllocId, (MemoryKind<MemoryKind>, Allocation<AllocId, (), Box<[u8], Global>>), BuildHasherDefault<FxHasher>>
source§const GLOBAL_KIND: Option<Self::MemoryKind> = None
const GLOBAL_KIND: Option<Self::MemoryKind> = None
tcx
) –
or None if such memory should not be mutated and thus any such attempt will cause
a ModifiedStatic
error to be raised.
Statics are copied under two circumstances: When they are mutated, and when
adjust_allocation
(see below) returns an owned allocation
that is added to the memory so that the work is not done twice.§type AllocExtra = ()
type AllocExtra = ()
§type FrameExtra = ()
type FrameExtra = ()
source§fn use_addr_for_alignment_check(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool
fn use_addr_for_alignment_check(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool
source§fn ignore_optional_overflow_checks(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool
fn ignore_optional_overflow_checks(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool
source§fn unwind_terminate(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_reason: UnwindTerminateReason
) -> InterpResult<'tcx>
fn unwind_terminate( _ecx: &mut InterpCx<'mir, 'tcx, Self>, _reason: UnwindTerminateReason ) -> InterpResult<'tcx>
source§fn call_extra_fn(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
fn_val: !,
_abi: CallAbi,
_args: &[FnArg<'tcx>],
_destination: &PlaceTy<'tcx, Self::Provenance>,
_target: Option<BasicBlock>,
_unwind: UnwindAction
) -> InterpResult<'tcx>
fn call_extra_fn( _ecx: &mut InterpCx<'mir, 'tcx, Self>, fn_val: !, _abi: CallAbi, _args: &[FnArg<'tcx>], _destination: &PlaceTy<'tcx, Self::Provenance>, _target: Option<BasicBlock>, _unwind: UnwindAction ) -> InterpResult<'tcx>
fn_val
. It is the hook’s responsibility to advance the instruction
pointer as appropriate.source§fn adjust_allocation<'b>(
_ecx: &InterpCx<'mir, 'tcx, Self>,
_id: AllocId,
alloc: Cow<'b, Allocation>,
_kind: Option<MemoryKind<Self::MemoryKind>>
) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance>>>
fn adjust_allocation<'b>( _ecx: &InterpCx<'mir, 'tcx, Self>, _id: AllocId, alloc: Cow<'b, Allocation>, _kind: Option<MemoryKind<Self::MemoryKind>> ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance>>>
source§fn extern_static_base_pointer(
ecx: &InterpCx<'mir, 'tcx, Self>,
def_id: DefId
) -> InterpResult<'tcx, Pointer>
fn extern_static_base_pointer( ecx: &InterpCx<'mir, 'tcx, Self>, def_id: DefId ) -> InterpResult<'tcx, Pointer>
extern static
.source§fn adjust_alloc_base_pointer(
_ecx: &InterpCx<'mir, 'tcx, Self>,
ptr: Pointer<AllocId>
) -> InterpResult<'tcx, Pointer<AllocId>>
fn adjust_alloc_base_pointer( _ecx: &InterpCx<'mir, 'tcx, Self>, ptr: Pointer<AllocId> ) -> InterpResult<'tcx, Pointer<AllocId>>
source§fn ptr_from_addr_cast(
_ecx: &InterpCx<'mir, 'tcx, Self>,
addr: u64
) -> InterpResult<'tcx, Pointer<Option<AllocId>>>
fn ptr_from_addr_cast( _ecx: &InterpCx<'mir, 'tcx, Self>, addr: u64 ) -> InterpResult<'tcx, Pointer<Option<AllocId>>>
source§fn ptr_get_alloc(
_ecx: &InterpCx<'mir, 'tcx, Self>,
ptr: Pointer<AllocId>
) -> Option<(AllocId, Size, Self::ProvenanceExtra)>
fn ptr_get_alloc( _ecx: &InterpCx<'mir, 'tcx, Self>, ptr: Pointer<AllocId> ) -> Option<(AllocId, Size, Self::ProvenanceExtra)>
§type MemoryKind = MemoryKind
type MemoryKind = MemoryKind
source§const PANIC_ON_ALLOC_FAIL: bool = false
const PANIC_ON_ALLOC_FAIL: bool = false
source§fn enforce_alignment(ecx: &InterpCx<'mir, 'tcx, Self>) -> CheckAlignment
fn enforce_alignment(ecx: &InterpCx<'mir, 'tcx, Self>) -> CheckAlignment
source§fn enforce_validity(
ecx: &InterpCx<'mir, 'tcx, Self>,
layout: TyAndLayout<'tcx>
) -> bool
fn enforce_validity( ecx: &InterpCx<'mir, 'tcx, Self>, layout: TyAndLayout<'tcx> ) -> bool
fn alignment_check_failed( ecx: &InterpCx<'mir, 'tcx, Self>, has: Align, required: Align, check: CheckAlignment ) -> InterpResult<'tcx, ()>
source§fn load_mir(
ecx: &InterpCx<'mir, 'tcx, Self>,
instance: InstanceDef<'tcx>
) -> InterpResult<'tcx, &'tcx Body<'tcx>>
fn load_mir( ecx: &InterpCx<'mir, 'tcx, Self>, instance: InstanceDef<'tcx> ) -> InterpResult<'tcx, &'tcx Body<'tcx>>
source§fn find_mir_or_eval_fn(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
orig_instance: Instance<'tcx>,
_abi: CallAbi,
args: &[FnArg<'tcx>],
dest: &PlaceTy<'tcx>,
ret: Option<BasicBlock>,
_unwind: UnwindAction
) -> InterpResult<'tcx, Option<(&'mir Body<'tcx>, Instance<'tcx>)>>
fn find_mir_or_eval_fn( ecx: &mut InterpCx<'mir, 'tcx, Self>, orig_instance: Instance<'tcx>, _abi: CallAbi, args: &[FnArg<'tcx>], dest: &PlaceTy<'tcx>, ret: Option<BasicBlock>, _unwind: UnwindAction ) -> InterpResult<'tcx, Option<(&'mir Body<'tcx>, Instance<'tcx>)>>
source§fn panic_nounwind(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
msg: &str
) -> InterpResult<'tcx>
fn panic_nounwind( ecx: &mut InterpCx<'mir, 'tcx, Self>, msg: &str ) -> InterpResult<'tcx>
source§fn call_intrinsic(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
instance: Instance<'tcx>,
args: &[OpTy<'tcx>],
dest: &PlaceTy<'tcx, Self::Provenance>,
target: Option<BasicBlock>,
_unwind: UnwindAction
) -> InterpResult<'tcx>
fn call_intrinsic( ecx: &mut InterpCx<'mir, 'tcx, Self>, instance: Instance<'tcx>, args: &[OpTy<'tcx>], dest: &PlaceTy<'tcx, Self::Provenance>, target: Option<BasicBlock>, _unwind: UnwindAction ) -> InterpResult<'tcx>
source§fn assert_panic(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
msg: &AssertMessage<'tcx>,
_unwind: UnwindAction
) -> InterpResult<'tcx>
fn assert_panic( ecx: &mut InterpCx<'mir, 'tcx, Self>, msg: &AssertMessage<'tcx>, _unwind: UnwindAction ) -> InterpResult<'tcx>
Assert
MIR terminators that trigger a panic.source§fn binary_ptr_op(
_ecx: &InterpCx<'mir, 'tcx, Self>,
_bin_op: BinOp,
_left: &ImmTy<'tcx>,
_right: &ImmTy<'tcx>
) -> InterpResult<'tcx, (ImmTy<'tcx>, bool)>
fn binary_ptr_op( _ecx: &InterpCx<'mir, 'tcx, Self>, _bin_op: BinOp, _left: &ImmTy<'tcx>, _right: &ImmTy<'tcx> ) -> InterpResult<'tcx, (ImmTy<'tcx>, bool)>
source§fn increment_const_eval_counter(
ecx: &mut InterpCx<'mir, 'tcx, Self>
) -> InterpResult<'tcx>
fn increment_const_eval_counter( ecx: &mut InterpCx<'mir, 'tcx, Self> ) -> InterpResult<'tcx>
StatementKind::ConstEvalCounter
instruction.
You can use this to detect long or endlessly running programs.source§fn expose_ptr(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_ptr: Pointer<AllocId>
) -> InterpResult<'tcx>
fn expose_ptr( _ecx: &mut InterpCx<'mir, 'tcx, Self>, _ptr: Pointer<AllocId> ) -> InterpResult<'tcx>
source§fn init_frame_extra(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
frame: Frame<'mir, 'tcx>
) -> InterpResult<'tcx, Frame<'mir, 'tcx>>
fn init_frame_extra( ecx: &mut InterpCx<'mir, 'tcx, Self>, frame: Frame<'mir, 'tcx> ) -> InterpResult<'tcx, Frame<'mir, 'tcx>>
source§fn stack<'a>(
ecx: &'a InterpCx<'mir, 'tcx, Self>
) -> &'a [Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>]
fn stack<'a>( ecx: &'a InterpCx<'mir, 'tcx, Self> ) -> &'a [Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>]
source§fn stack_mut<'a>(
ecx: &'a mut InterpCx<'mir, 'tcx, Self>
) -> &'a mut Vec<Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>>
fn stack_mut<'a>( ecx: &'a mut InterpCx<'mir, 'tcx, Self> ) -> &'a mut Vec<Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>>
source§fn before_access_global(
_tcx: TyCtxt<'tcx>,
machine: &Self,
alloc_id: AllocId,
alloc: ConstAllocation<'tcx>,
static_def_id: Option<DefId>,
is_write: bool
) -> InterpResult<'tcx>
fn before_access_global( _tcx: TyCtxt<'tcx>, machine: &Self, alloc_id: AllocId, alloc: ConstAllocation<'tcx>, static_def_id: Option<DefId>, is_write: bool ) -> InterpResult<'tcx>
def_id
is Some
if this is the “lazy” allocation of a static.source§const POST_MONO_CHECKS: bool = true
const POST_MONO_CHECKS: bool = true
source§fn enforce_abi(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool
fn enforce_abi(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool
source§fn before_access_local_mut<'a>(
_ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
_frame: usize,
_local: Local
) -> InterpResult<'tcx>where
'tcx: 'mir,
fn before_access_local_mut<'a>( _ecx: &'a mut InterpCx<'mir, 'tcx, Self>, _frame: usize, _local: Local ) -> InterpResult<'tcx>where 'tcx: 'mir,
local
of the frame
.
Since writing a ZST is not actually accessing memory or locals, this is never invoked
for ZST reads. Read moresource§fn before_terminator(
_ecx: &mut InterpCx<'mir, 'tcx, Self>
) -> InterpResult<'tcx>
fn before_terminator( _ecx: &mut InterpCx<'mir, 'tcx, Self> ) -> InterpResult<'tcx>
source§fn thread_local_static_base_pointer(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
def_id: DefId
) -> InterpResult<'tcx, Pointer<Self::Provenance>>
fn thread_local_static_base_pointer( _ecx: &mut InterpCx<'mir, 'tcx, Self>, def_id: DefId ) -> InterpResult<'tcx, Pointer<Self::Provenance>>
AllocId
for the given thread-local static in the current thread.fn eval_inline_asm( _ecx: &mut InterpCx<'mir, 'tcx, Self>, _template: &'tcx [InlineAsmTemplatePiece], _operands: &[InlineAsmOperand<'tcx>], _options: InlineAsmOptions ) -> InterpResult<'tcx>
source§fn before_memory_read(
_tcx: TyCtxt<'tcx>,
_machine: &Self,
_alloc_extra: &Self::AllocExtra,
_prov: (AllocId, Self::ProvenanceExtra),
_range: AllocRange
) -> InterpResult<'tcx>
fn before_memory_read( _tcx: TyCtxt<'tcx>, _machine: &Self, _alloc_extra: &Self::AllocExtra, _prov: (AllocId, Self::ProvenanceExtra), _range: AllocRange ) -> InterpResult<'tcx>
source§fn before_memory_write(
_tcx: TyCtxt<'tcx>,
_machine: &mut Self,
_alloc_extra: &mut Self::AllocExtra,
_prov: (AllocId, Self::ProvenanceExtra),
_range: AllocRange
) -> InterpResult<'tcx>
fn before_memory_write( _tcx: TyCtxt<'tcx>, _machine: &mut Self, _alloc_extra: &mut Self::AllocExtra, _prov: (AllocId, Self::ProvenanceExtra), _range: AllocRange ) -> InterpResult<'tcx>
source§fn before_memory_deallocation(
_tcx: TyCtxt<'tcx>,
_machine: &mut Self,
_alloc_extra: &mut Self::AllocExtra,
_prov: (AllocId, Self::ProvenanceExtra),
_range: AllocRange
) -> InterpResult<'tcx>
fn before_memory_deallocation( _tcx: TyCtxt<'tcx>, _machine: &mut Self, _alloc_extra: &mut Self::AllocExtra, _prov: (AllocId, Self::ProvenanceExtra), _range: AllocRange ) -> InterpResult<'tcx>
source§fn retag_ptr_value(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_kind: RetagKind,
val: &ImmTy<'tcx, Self::Provenance>
) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>
fn retag_ptr_value( _ecx: &mut InterpCx<'mir, 'tcx, Self>, _kind: RetagKind, val: &ImmTy<'tcx, Self::Provenance> ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>
source§fn retag_place_contents(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_kind: RetagKind,
_place: &PlaceTy<'tcx, Self::Provenance>
) -> InterpResult<'tcx>
fn retag_place_contents( _ecx: &mut InterpCx<'mir, 'tcx, Self>, _kind: RetagKind, _place: &PlaceTy<'tcx, Self::Provenance> ) -> InterpResult<'tcx>
source§fn protect_in_place_function_argument(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
place: &PlaceTy<'tcx, Self::Provenance>
) -> InterpResult<'tcx>
fn protect_in_place_function_argument( ecx: &mut InterpCx<'mir, 'tcx, Self>, place: &PlaceTy<'tcx, Self::Provenance> ) -> InterpResult<'tcx>
source§fn after_stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx>
fn after_stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx>
source§fn before_stack_pop(
_ecx: &InterpCx<'mir, 'tcx, Self>,
_frame: &Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>
) -> InterpResult<'tcx>
fn before_stack_pop( _ecx: &InterpCx<'mir, 'tcx, Self>, _frame: &Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra> ) -> InterpResult<'tcx>
source§fn after_stack_pop(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_frame: Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>,
unwinding: bool
) -> InterpResult<'tcx, StackPopJump>
fn after_stack_pop( _ecx: &mut InterpCx<'mir, 'tcx, Self>, _frame: Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>, unwinding: bool ) -> InterpResult<'tcx, StackPopJump>
locals
have already been destroyed!source§fn after_local_allocated(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_frame: usize,
_local: Local,
_mplace: &MPlaceTy<'tcx, Self::Provenance>
) -> InterpResult<'tcx>
fn after_local_allocated( _ecx: &mut InterpCx<'mir, 'tcx, Self>, _frame: usize, _local: Local, _mplace: &MPlaceTy<'tcx, Self::Provenance> ) -> InterpResult<'tcx>
Auto Trait Implementations§
impl<'mir, 'tcx> !RefUnwindSafe for CompileTimeInterpreter<'mir, 'tcx>
impl<'mir, 'tcx> !Send for CompileTimeInterpreter<'mir, 'tcx>
impl<'mir, 'tcx> !Sync for CompileTimeInterpreter<'mir, 'tcx>
impl<'mir, 'tcx> Unpin for CompileTimeInterpreter<'mir, 'tcx>
impl<'mir, 'tcx> !UnwindSafe for CompileTimeInterpreter<'mir, 'tcx>
Blanket Implementations§
source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere T: ?Sized,
source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Layout§
Note: Most layout information is completely unstable and may even differ between compilations. The only exception is types with certain repr(...)
attributes. Please see the Rust Reference's “Type Layout” chapter for details on type layout guarantees.
Size: 40 bytes