1use std::assert_matches::assert_matches;
5
6use either::{Either, Left, Right};
7use rustc_abi as abi;
8use rustc_abi::{BackendRepr, HasDataLayout, Size};
9use rustc_hir::def::Namespace;
10use rustc_middle::mir::interpret::ScalarSizeMismatch;
11use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, TyAndLayout};
12use rustc_middle::ty::print::{FmtPrinter, PrettyPrinter};
13use rustc_middle::ty::{ConstInt, ScalarInt, Ty, TyCtxt};
14use rustc_middle::{bug, mir, span_bug, ty};
15use rustc_span::DUMMY_SP;
16use tracing::field::Empty;
17use tracing::trace;
18
19use super::{
20 CtfeProvenance, Frame, InterpCx, InterpResult, MPlaceTy, Machine, MemPlace, MemPlaceMeta,
21 OffsetMode, PlaceTy, Pointer, Projectable, Provenance, Scalar, alloc_range, err_ub,
22 from_known_layout, interp_ok, mir_assign_valid_types, throw_ub,
23};
24use crate::enter_trace_span;
25
26#[derive(Copy, Clone, Debug)]
34pub enum Immediate<Prov: Provenance = CtfeProvenance> {
35 Scalar(Scalar<Prov>),
37 ScalarPair(Scalar<Prov>, Scalar<Prov>),
40 Uninit,
42}
43
44impl<Prov: Provenance> From<Scalar<Prov>> for Immediate<Prov> {
45 #[inline(always)]
46 fn from(val: Scalar<Prov>) -> Self {
47 Immediate::Scalar(val)
48 }
49}
50
51impl<Prov: Provenance> Immediate<Prov> {
52 pub fn new_pointer_with_meta(
53 ptr: Pointer<Option<Prov>>,
54 meta: MemPlaceMeta<Prov>,
55 cx: &impl HasDataLayout,
56 ) -> Self {
57 let ptr = Scalar::from_maybe_pointer(ptr, cx);
58 match meta {
59 MemPlaceMeta::None => Immediate::from(ptr),
60 MemPlaceMeta::Meta(meta) => Immediate::ScalarPair(ptr, meta),
61 }
62 }
63
64 pub fn new_slice(ptr: Pointer<Option<Prov>>, len: u64, cx: &impl HasDataLayout) -> Self {
65 Immediate::ScalarPair(
66 Scalar::from_maybe_pointer(ptr, cx),
67 Scalar::from_target_usize(len, cx),
68 )
69 }
70
71 pub fn new_dyn_trait(
72 val: Pointer<Option<Prov>>,
73 vtable: Pointer<Option<Prov>>,
74 cx: &impl HasDataLayout,
75 ) -> Self {
76 Immediate::ScalarPair(
77 Scalar::from_maybe_pointer(val, cx),
78 Scalar::from_maybe_pointer(vtable, cx),
79 )
80 }
81
82 #[inline]
83 #[cfg_attr(debug_assertions, track_caller)] pub fn to_scalar(self) -> Scalar<Prov> {
85 match self {
86 Immediate::Scalar(val) => val,
87 Immediate::ScalarPair(..) => bug!("Got a scalar pair where a scalar was expected"),
88 Immediate::Uninit => bug!("Got uninit where a scalar was expected"),
89 }
90 }
91
92 #[inline]
93 #[cfg_attr(debug_assertions, track_caller)] pub fn to_scalar_int(self) -> ScalarInt {
95 self.to_scalar().try_to_scalar_int().unwrap()
96 }
97
98 #[inline]
99 #[cfg_attr(debug_assertions, track_caller)] pub fn to_scalar_pair(self) -> (Scalar<Prov>, Scalar<Prov>) {
101 match self {
102 Immediate::ScalarPair(val1, val2) => (val1, val2),
103 Immediate::Scalar(..) => bug!("Got a scalar where a scalar pair was expected"),
104 Immediate::Uninit => bug!("Got uninit where a scalar pair was expected"),
105 }
106 }
107
108 #[inline]
110 #[cfg_attr(debug_assertions, track_caller)] pub fn to_scalar_and_meta(self) -> (Scalar<Prov>, MemPlaceMeta<Prov>) {
112 match self {
113 Immediate::ScalarPair(val1, val2) => (val1, MemPlaceMeta::Meta(val2)),
114 Immediate::Scalar(val) => (val, MemPlaceMeta::None),
115 Immediate::Uninit => bug!("Got uninit where a scalar or scalar pair was expected"),
116 }
117 }
118
119 pub fn assert_matches_abi(self, abi: BackendRepr, msg: &str, cx: &impl HasDataLayout) {
121 match (self, abi) {
122 (Immediate::Scalar(scalar), BackendRepr::Scalar(s)) => {
123 assert_eq!(scalar.size(), s.size(cx), "{msg}: scalar value has wrong size");
124 if !matches!(s.primitive(), abi::Primitive::Pointer(..)) {
125 assert!(
127 matches!(scalar, Scalar::Int(..)),
128 "{msg}: scalar value should be an integer, but has provenance"
129 );
130 }
131 }
132 (Immediate::ScalarPair(a_val, b_val), BackendRepr::ScalarPair(a, b)) => {
133 assert_eq!(
134 a_val.size(),
135 a.size(cx),
136 "{msg}: first component of scalar pair has wrong size"
137 );
138 if !matches!(a.primitive(), abi::Primitive::Pointer(..)) {
139 assert!(
140 matches!(a_val, Scalar::Int(..)),
141 "{msg}: first component of scalar pair should be an integer, but has provenance"
142 );
143 }
144 assert_eq!(
145 b_val.size(),
146 b.size(cx),
147 "{msg}: second component of scalar pair has wrong size"
148 );
149 if !matches!(b.primitive(), abi::Primitive::Pointer(..)) {
150 assert!(
151 matches!(b_val, Scalar::Int(..)),
152 "{msg}: second component of scalar pair should be an integer, but has provenance"
153 );
154 }
155 }
156 (Immediate::Uninit, _) => {
157 assert!(abi.is_sized(), "{msg}: unsized immediates are not a thing");
158 }
159 _ => {
160 bug!("{msg}: value {self:?} does not match ABI {abi:?})",)
161 }
162 }
163 }
164
165 pub fn clear_provenance<'tcx>(&mut self) -> InterpResult<'tcx> {
166 match self {
167 Immediate::Scalar(s) => {
168 s.clear_provenance()?;
169 }
170 Immediate::ScalarPair(a, b) => {
171 a.clear_provenance()?;
172 b.clear_provenance()?;
173 }
174 Immediate::Uninit => {}
175 }
176 interp_ok(())
177 }
178}
179
180#[derive(Clone)]
183pub struct ImmTy<'tcx, Prov: Provenance = CtfeProvenance> {
184 imm: Immediate<Prov>,
185 pub layout: TyAndLayout<'tcx>,
186}
187
188impl<Prov: Provenance> std::fmt::Display for ImmTy<'_, Prov> {
189 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
190 fn print_scalar<'a, 'tcx, Prov: Provenance>(
192 p: &mut FmtPrinter<'a, 'tcx>,
193 s: Scalar<Prov>,
194 ty: Ty<'tcx>,
195 ) -> Result<(), std::fmt::Error> {
196 match s {
197 Scalar::Int(int) => p.pretty_print_const_scalar_int(int, ty, true),
198 Scalar::Ptr(ptr, _sz) => {
199 p.pretty_print_const_pointer(ptr, ty)
203 }
204 }
205 }
206 ty::tls::with(|tcx| {
207 match self.imm {
208 Immediate::Scalar(s) => {
209 if let Some(ty) = tcx.lift(self.layout.ty) {
210 let s = FmtPrinter::print_string(tcx, Namespace::ValueNS, |p| {
211 print_scalar(p, s, ty)
212 })?;
213 f.write_str(&s)?;
214 return Ok(());
215 }
216 write!(f, "{:x}: {}", s, self.layout.ty)
217 }
218 Immediate::ScalarPair(a, b) => {
219 write!(f, "({:x}, {:x}): {}", a, b, self.layout.ty)
221 }
222 Immediate::Uninit => {
223 write!(f, "uninit: {}", self.layout.ty)
224 }
225 }
226 })
227 }
228}
229
230impl<Prov: Provenance> std::fmt::Debug for ImmTy<'_, Prov> {
231 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
232 f.debug_struct("ImmTy")
234 .field("imm", &self.imm)
235 .field("ty", &format_args!("{}", self.layout.ty))
236 .finish()
237 }
238}
239
240impl<'tcx, Prov: Provenance> std::ops::Deref for ImmTy<'tcx, Prov> {
241 type Target = Immediate<Prov>;
242 #[inline(always)]
243 fn deref(&self) -> &Immediate<Prov> {
244 &self.imm
245 }
246}
247
248impl<'tcx, Prov: Provenance> ImmTy<'tcx, Prov> {
249 #[inline]
250 pub fn from_scalar(val: Scalar<Prov>, layout: TyAndLayout<'tcx>) -> Self {
251 debug_assert!(layout.backend_repr.is_scalar(), "`ImmTy::from_scalar` on non-scalar layout");
252 debug_assert_eq!(val.size(), layout.size);
253 ImmTy { imm: val.into(), layout }
254 }
255
256 #[inline]
257 pub fn from_scalar_pair(a: Scalar<Prov>, b: Scalar<Prov>, layout: TyAndLayout<'tcx>) -> Self {
258 debug_assert!(
259 matches!(layout.backend_repr, BackendRepr::ScalarPair(..)),
260 "`ImmTy::from_scalar_pair` on non-scalar-pair layout"
261 );
262 let imm = Immediate::ScalarPair(a, b);
263 ImmTy { imm, layout }
264 }
265
266 #[inline(always)]
267 pub fn from_immediate(imm: Immediate<Prov>, layout: TyAndLayout<'tcx>) -> Self {
268 debug_assert!(
270 match (imm, layout.backend_repr) {
271 (Immediate::Scalar(..), BackendRepr::Scalar(..)) => true,
272 (Immediate::ScalarPair(..), BackendRepr::ScalarPair(..)) => true,
273 (Immediate::Uninit, _) if layout.is_sized() => true,
274 _ => false,
275 },
276 "immediate {imm:?} does not fit to layout {layout:?}",
277 );
278 ImmTy { imm, layout }
279 }
280
281 #[inline]
282 pub fn uninit(layout: TyAndLayout<'tcx>) -> Self {
283 debug_assert!(layout.is_sized(), "immediates must be sized");
284 ImmTy { imm: Immediate::Uninit, layout }
285 }
286
287 #[inline]
288 pub fn from_scalar_int(s: ScalarInt, layout: TyAndLayout<'tcx>) -> Self {
289 Self::from_scalar(Scalar::from(s), layout)
290 }
291
292 #[inline]
293 pub fn from_uint(i: impl Into<u128>, layout: TyAndLayout<'tcx>) -> Self {
294 Self::from_scalar(Scalar::from_uint(i, layout.size), layout)
295 }
296
297 #[inline]
298 pub fn from_int(i: impl Into<i128>, layout: TyAndLayout<'tcx>) -> Self {
299 Self::from_scalar(Scalar::from_int(i, layout.size), layout)
300 }
301
302 #[inline]
303 pub fn from_bool(b: bool, tcx: TyCtxt<'tcx>) -> Self {
304 let layout = tcx
306 .layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(tcx.types.bool))
307 .unwrap();
308 Self::from_scalar(Scalar::from_bool(b), layout)
309 }
310
311 #[inline]
312 pub fn from_ordering(c: std::cmp::Ordering, tcx: TyCtxt<'tcx>) -> Self {
313 let ty = tcx.ty_ordering_enum(DUMMY_SP);
315 let layout =
316 tcx.layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(ty)).unwrap();
317 Self::from_scalar(Scalar::Int(c.into()), layout)
318 }
319
320 pub fn from_pair(a: Self, b: Self, cx: &(impl HasTypingEnv<'tcx> + HasTyCtxt<'tcx>)) -> Self {
321 let layout = cx
322 .tcx()
323 .layout_of(
324 cx.typing_env().as_query_input(Ty::new_tup(cx.tcx(), &[a.layout.ty, b.layout.ty])),
325 )
326 .unwrap();
327 Self::from_scalar_pair(a.to_scalar(), b.to_scalar(), layout)
328 }
329
330 #[inline]
333 pub fn to_scalar_int(&self) -> InterpResult<'tcx, ScalarInt> {
334 let s = self.to_scalar().to_scalar_int()?;
335 if s.size() != self.layout.size {
336 throw_ub!(ScalarSizeMismatch(ScalarSizeMismatch {
337 target_size: self.layout.size.bytes(),
338 data_size: s.size().bytes(),
339 }));
340 }
341 interp_ok(s)
342 }
343
344 #[inline]
345 pub fn to_const_int(self) -> ConstInt {
346 assert!(self.layout.ty.is_integral());
347 let int = self.imm.to_scalar_int();
348 assert_eq!(int.size(), self.layout.size);
349 ConstInt::new(int, self.layout.ty.is_signed(), self.layout.ty.is_ptr_sized_integral())
350 }
351
352 #[inline]
353 #[cfg_attr(debug_assertions, track_caller)] pub fn to_pair(self, cx: &(impl HasTyCtxt<'tcx> + HasTypingEnv<'tcx>)) -> (Self, Self) {
355 let layout = self.layout;
356 let (val0, val1) = self.to_scalar_pair();
357 (
358 ImmTy::from_scalar(val0, layout.field(cx, 0)),
359 ImmTy::from_scalar(val1, layout.field(cx, 1)),
360 )
361 }
362
363 fn offset_(&self, offset: Size, layout: TyAndLayout<'tcx>, cx: &impl HasDataLayout) -> Self {
367 if cfg!(debug_assertions) {
369 self.assert_matches_abi(
370 self.layout.backend_repr,
371 "invalid input to Immediate::offset",
372 cx,
373 );
374 }
375 assert!(
379 offset + layout.size <= self.layout.size,
380 "attempting to project to field at offset {} with size {} into immediate with layout {:#?}",
381 offset.bytes(),
382 layout.size.bytes(),
383 self.layout,
384 );
385 let inner_val: Immediate<_> = match (**self, self.layout.backend_repr) {
388 (Immediate::Uninit, _) => Immediate::Uninit,
390 _ if layout.is_uninhabited() => Immediate::Uninit,
394 _ if layout.is_zst() => Immediate::Uninit,
397 _ if matches!(layout.backend_repr, BackendRepr::Memory { .. })
400 && matches!(layout.variants, abi::Variants::Single { .. })
401 && matches!(&layout.fields, abi::FieldsShape::Arbitrary { offsets, .. } if offsets.len() == 0) =>
402 {
403 Immediate::Uninit
404 }
405 _ if layout.size == self.layout.size => {
407 assert_eq!(offset.bytes(), 0);
408 **self
409 }
410 (Immediate::ScalarPair(a_val, b_val), BackendRepr::ScalarPair(a, b)) => {
412 Immediate::from(if offset.bytes() == 0 {
413 a_val
414 } else {
415 assert_eq!(offset, a.size(cx).align_to(b.align(cx).abi));
416 b_val
417 })
418 }
419 _ => bug!(
421 "invalid field access on immediate {} at offset {}, original layout {:#?}",
422 self,
423 offset.bytes(),
424 self.layout
425 ),
426 };
427 inner_val.assert_matches_abi(
429 layout.backend_repr,
430 "invalid field type in Immediate::offset",
431 cx,
432 );
433
434 ImmTy::from_immediate(inner_val, layout)
435 }
436}
437
438impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for ImmTy<'tcx, Prov> {
439 #[inline(always)]
440 fn layout(&self) -> TyAndLayout<'tcx> {
441 self.layout
442 }
443
444 #[inline(always)]
445 fn meta(&self) -> MemPlaceMeta<Prov> {
446 debug_assert!(self.layout.is_sized()); MemPlaceMeta::None
448 }
449
450 fn offset_with_meta<M: Machine<'tcx, Provenance = Prov>>(
451 &self,
452 offset: Size,
453 _mode: OffsetMode,
454 meta: MemPlaceMeta<Prov>,
455 layout: TyAndLayout<'tcx>,
456 ecx: &InterpCx<'tcx, M>,
457 ) -> InterpResult<'tcx, Self> {
458 assert_matches!(meta, MemPlaceMeta::None); interp_ok(self.offset_(offset, layout, ecx))
460 }
461
462 #[inline(always)]
463 fn to_op<M: Machine<'tcx, Provenance = Prov>>(
464 &self,
465 _ecx: &InterpCx<'tcx, M>,
466 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
467 interp_ok(self.clone().into())
468 }
469}
470
471#[derive(Copy, Clone, Debug)]
475pub(super) enum Operand<Prov: Provenance = CtfeProvenance> {
476 Immediate(Immediate<Prov>),
477 Indirect(MemPlace<Prov>),
478}
479
480#[derive(Clone)]
481pub struct OpTy<'tcx, Prov: Provenance = CtfeProvenance> {
482 op: Operand<Prov>, pub layout: TyAndLayout<'tcx>,
484}
485
486impl<Prov: Provenance> std::fmt::Debug for OpTy<'_, Prov> {
487 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
488 f.debug_struct("OpTy")
490 .field("op", &self.op)
491 .field("ty", &format_args!("{}", self.layout.ty))
492 .finish()
493 }
494}
495
496impl<'tcx, Prov: Provenance> From<ImmTy<'tcx, Prov>> for OpTy<'tcx, Prov> {
497 #[inline(always)]
498 fn from(val: ImmTy<'tcx, Prov>) -> Self {
499 OpTy { op: Operand::Immediate(val.imm), layout: val.layout }
500 }
501}
502
503impl<'tcx, Prov: Provenance> From<MPlaceTy<'tcx, Prov>> for OpTy<'tcx, Prov> {
504 #[inline(always)]
505 fn from(mplace: MPlaceTy<'tcx, Prov>) -> Self {
506 OpTy { op: Operand::Indirect(*mplace.mplace()), layout: mplace.layout }
507 }
508}
509
510impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
511 #[inline(always)]
512 pub(super) fn op(&self) -> &Operand<Prov> {
513 &self.op
514 }
515}
516
517impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for OpTy<'tcx, Prov> {
518 #[inline(always)]
519 fn layout(&self) -> TyAndLayout<'tcx> {
520 self.layout
521 }
522
523 #[inline]
524 fn meta(&self) -> MemPlaceMeta<Prov> {
525 match self.as_mplace_or_imm() {
526 Left(mplace) => mplace.meta(),
527 Right(_) => {
528 debug_assert!(self.layout.is_sized(), "unsized immediates are not a thing");
529 MemPlaceMeta::None
530 }
531 }
532 }
533
534 fn offset_with_meta<M: Machine<'tcx, Provenance = Prov>>(
535 &self,
536 offset: Size,
537 mode: OffsetMode,
538 meta: MemPlaceMeta<Prov>,
539 layout: TyAndLayout<'tcx>,
540 ecx: &InterpCx<'tcx, M>,
541 ) -> InterpResult<'tcx, Self> {
542 match self.as_mplace_or_imm() {
543 Left(mplace) => {
544 interp_ok(mplace.offset_with_meta(offset, mode, meta, layout, ecx)?.into())
545 }
546 Right(imm) => {
547 assert_matches!(meta, MemPlaceMeta::None); interp_ok(imm.offset_(offset, layout, ecx).into())
550 }
551 }
552 }
553
554 #[inline(always)]
555 fn to_op<M: Machine<'tcx, Provenance = Prov>>(
556 &self,
557 _ecx: &InterpCx<'tcx, M>,
558 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
559 interp_ok(self.clone())
560 }
561}
562
563impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
564 fn read_immediate_from_mplace_raw(
569 &self,
570 mplace: &MPlaceTy<'tcx, M::Provenance>,
571 ) -> InterpResult<'tcx, Option<ImmTy<'tcx, M::Provenance>>> {
572 if mplace.layout.is_unsized() {
573 return interp_ok(None);
575 }
576
577 let Some(alloc) = self.get_place_alloc(mplace)? else {
578 return interp_ok(Some(ImmTy::uninit(mplace.layout)));
580 };
581
582 interp_ok(match mplace.layout.backend_repr {
589 BackendRepr::Scalar(abi::Scalar::Initialized { value: s, .. }) => {
590 let size = s.size(self);
591 assert_eq!(size, mplace.layout.size, "abi::Scalar size does not match layout size");
592 let scalar = alloc.read_scalar(
593 alloc_range(Size::ZERO, size),
594 matches!(s, abi::Primitive::Pointer(_)),
595 )?;
596 Some(ImmTy::from_scalar(scalar, mplace.layout))
597 }
598 BackendRepr::ScalarPair(
599 abi::Scalar::Initialized { value: a, .. },
600 abi::Scalar::Initialized { value: b, .. },
601 ) => {
602 let (a_size, b_size) = (a.size(self), b.size(self));
606 let b_offset = a_size.align_to(b.align(self).abi);
607 assert!(b_offset.bytes() > 0); let a_val = alloc.read_scalar(
609 alloc_range(Size::ZERO, a_size),
610 matches!(a, abi::Primitive::Pointer(_)),
611 )?;
612 let b_val = alloc.read_scalar(
613 alloc_range(b_offset, b_size),
614 matches!(b, abi::Primitive::Pointer(_)),
615 )?;
616 Some(ImmTy::from_immediate(Immediate::ScalarPair(a_val, b_val), mplace.layout))
617 }
618 _ => {
619 None
621 }
622 })
623 }
624
625 pub fn read_immediate_raw(
634 &self,
635 src: &impl Projectable<'tcx, M::Provenance>,
636 ) -> InterpResult<'tcx, Either<MPlaceTy<'tcx, M::Provenance>, ImmTy<'tcx, M::Provenance>>> {
637 interp_ok(match src.to_op(self)?.as_mplace_or_imm() {
638 Left(ref mplace) => {
639 if let Some(val) = self.read_immediate_from_mplace_raw(mplace)? {
640 Right(val)
641 } else {
642 Left(mplace.clone())
643 }
644 }
645 Right(val) => Right(val),
646 })
647 }
648
649 #[inline(always)]
653 pub fn read_immediate(
654 &self,
655 op: &impl Projectable<'tcx, M::Provenance>,
656 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
657 if !matches!(
658 op.layout().backend_repr,
659 BackendRepr::Scalar(abi::Scalar::Initialized { .. })
660 | BackendRepr::ScalarPair(
661 abi::Scalar::Initialized { .. },
662 abi::Scalar::Initialized { .. }
663 )
664 ) {
665 span_bug!(self.cur_span(), "primitive read not possible for type: {}", op.layout().ty);
666 }
667 let imm = self.read_immediate_raw(op)?.right().unwrap();
668 if matches!(*imm, Immediate::Uninit) {
669 throw_ub!(InvalidUninitBytes(None));
670 }
671 interp_ok(imm)
672 }
673
674 pub fn read_scalar(
676 &self,
677 op: &impl Projectable<'tcx, M::Provenance>,
678 ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
679 interp_ok(self.read_immediate(op)?.to_scalar())
680 }
681
682 pub fn read_pointer(
687 &self,
688 op: &impl Projectable<'tcx, M::Provenance>,
689 ) -> InterpResult<'tcx, Pointer<Option<M::Provenance>>> {
690 self.read_scalar(op)?.to_pointer(self)
691 }
692 pub fn read_target_usize(
694 &self,
695 op: &impl Projectable<'tcx, M::Provenance>,
696 ) -> InterpResult<'tcx, u64> {
697 self.read_scalar(op)?.to_target_usize(self)
698 }
699 pub fn read_target_isize(
701 &self,
702 op: &impl Projectable<'tcx, M::Provenance>,
703 ) -> InterpResult<'tcx, i64> {
704 self.read_scalar(op)?.to_target_isize(self)
705 }
706
707 pub fn read_str(&self, mplace: &MPlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx, &str> {
709 let len = mplace.len(self)?;
710 let bytes = self.read_bytes_ptr_strip_provenance(mplace.ptr(), Size::from_bytes(len))?;
711 let s = std::str::from_utf8(bytes).map_err(|err| err_ub!(InvalidStr(err)))?;
712 interp_ok(s)
713 }
714
715 pub fn local_to_op(
717 &self,
718 local: mir::Local,
719 layout: Option<TyAndLayout<'tcx>>,
720 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
721 self.local_at_frame_to_op(self.frame(), local, layout)
722 }
723
724 pub fn local_at_frame_to_op(
730 &self,
731 frame: &Frame<'tcx, M::Provenance, M::FrameExtra>,
732 local: mir::Local,
733 layout: Option<TyAndLayout<'tcx>>,
734 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
735 let layout = self.layout_of_local(frame, local, layout)?;
736 let op = *frame.locals[local].access()?;
737 if matches!(op, Operand::Immediate(_)) {
738 assert!(!layout.is_unsized());
739 }
740 M::after_local_read(self, frame, local)?;
741 interp_ok(OpTy { op, layout })
742 }
743
744 pub fn place_to_op(
748 &self,
749 place: &PlaceTy<'tcx, M::Provenance>,
750 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
751 match place.as_mplace_or_local() {
752 Left(mplace) => interp_ok(mplace.into()),
753 Right((local, offset, locals_addr, _)) => {
754 debug_assert!(place.layout.is_sized()); debug_assert_eq!(locals_addr, self.frame().locals_addr());
756 let base = self.local_to_op(local, None)?;
757 interp_ok(match offset {
758 Some(offset) => base.offset(offset, place.layout, self)?,
759 None => {
760 debug_assert_eq!(place.layout, base.layout);
762 base
763 }
764 })
765 }
766 }
767 }
768
769 pub fn eval_place_to_op(
772 &self,
773 mir_place: mir::Place<'tcx>,
774 layout: Option<TyAndLayout<'tcx>>,
775 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
776 let _trace = enter_trace_span!(
777 M,
778 step::eval_place_to_op,
779 ?mir_place,
780 tracing_separate_thread = Empty
781 );
782
783 let layout = if mir_place.projection.is_empty() { layout } else { None };
786
787 let mut op = self.local_to_op(mir_place.local, layout)?;
788 for elem in mir_place.projection.iter() {
790 op = self.project(&op, elem)?
791 }
792
793 trace!("eval_place_to_op: got {:?}", op);
794 if cfg!(debug_assertions) {
796 let normalized_place_ty = self
797 .instantiate_from_current_frame_and_normalize_erasing_regions(
798 mir_place.ty(&self.frame().body.local_decls, *self.tcx).ty,
799 )?;
800 if !mir_assign_valid_types(
801 *self.tcx,
802 self.typing_env(),
803 self.layout_of(normalized_place_ty)?,
804 op.layout,
805 ) {
806 span_bug!(
807 self.cur_span(),
808 "eval_place of a MIR place with type {} produced an interpreter operand with type {}",
809 normalized_place_ty,
810 op.layout.ty,
811 )
812 }
813 }
814 interp_ok(op)
815 }
816
817 #[inline]
821 pub fn eval_operand(
822 &self,
823 mir_op: &mir::Operand<'tcx>,
824 layout: Option<TyAndLayout<'tcx>>,
825 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
826 let _trace =
827 enter_trace_span!(M, step::eval_operand, ?mir_op, tracing_separate_thread = Empty);
828
829 use rustc_middle::mir::Operand::*;
830 let op = match mir_op {
831 &Copy(place) | &Move(place) => self.eval_place_to_op(place, layout)?,
833
834 Constant(constant) => {
835 let c = self.instantiate_from_current_frame_and_normalize_erasing_regions(
836 constant.const_,
837 )?;
838
839 self.eval_mir_constant(&c, constant.span, layout)?
844 }
845 };
846 trace!("{:?}: {:?}", mir_op, op);
847 interp_ok(op)
848 }
849
850 pub(crate) fn const_val_to_op(
851 &self,
852 val_val: mir::ConstValue,
853 ty: Ty<'tcx>,
854 layout: Option<TyAndLayout<'tcx>>,
855 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
856 let adjust_scalar = |scalar| -> InterpResult<'tcx, _> {
858 interp_ok(match scalar {
859 Scalar::Ptr(ptr, size) => Scalar::Ptr(self.global_root_pointer(ptr)?, size),
860 Scalar::Int(int) => Scalar::Int(int),
861 })
862 };
863 let layout =
864 from_known_layout(self.tcx, self.typing_env(), layout, || self.layout_of(ty).into())?;
865 let imm = match val_val {
866 mir::ConstValue::Indirect { alloc_id, offset } => {
867 let ptr = self.global_root_pointer(Pointer::new(
869 CtfeProvenance::from(alloc_id).as_immutable(),
870 offset,
871 ))?;
872 return interp_ok(self.ptr_to_mplace(ptr.into(), layout).into());
873 }
874 mir::ConstValue::Scalar(x) => adjust_scalar(x)?.into(),
875 mir::ConstValue::ZeroSized => Immediate::Uninit,
876 mir::ConstValue::Slice { alloc_id, meta } => {
877 let ptr = Pointer::new(CtfeProvenance::from(alloc_id).as_immutable(), Size::ZERO);
879 Immediate::new_slice(self.global_root_pointer(ptr)?.into(), meta, self)
880 }
881 };
882 interp_ok(OpTy { op: Operand::Immediate(imm), layout })
883 }
884}
885
886#[cfg(target_pointer_width = "64")]
888mod size_asserts {
889 use rustc_data_structures::static_assert_size;
890
891 use super::*;
892 static_assert_size!(ImmTy<'_>, 64);
894 static_assert_size!(Immediate, 48);
895 static_assert_size!(OpTy<'_>, 72);
896 static_assert_size!(Operand, 56);
897 }