rustc_mir_transform/
gvn.rs

1//! Global value numbering.
2//!
3//! MIR may contain repeated and/or redundant computations. The objective of this pass is to detect
4//! such redundancies and re-use the already-computed result when possible.
5//!
6//! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available
7//! values, the locals in which they are stored, and the assignment location.
8//!
9//! We traverse all assignments `x = rvalue` and operands.
10//!
11//! For each SSA one, we compute a symbolic representation of values that are assigned to SSA
12//! locals. This symbolic representation is defined by the `Value` enum. Each produced instance of
13//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values.
14//!
15//! For each non-SSA
16//! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we
17//! replace the rvalue/operand by that constant. Otherwise, if there is an SSA local `y`
18//! associated to this `VnIndex`, and if its definition location strictly dominates the assignment
19//! to `x`, we replace the assignment by `x = y`.
20//!
21//! By opportunity, this pass simplifies some `Rvalue`s based on the accumulated knowledge.
22//!
23//! # Operational semantic
24//!
25//! Operationally, this pass attempts to prove bitwise equality between locals. Given this MIR:
26//! ```ignore (MIR)
27//! _a = some value // has VnIndex i
28//! // some MIR
29//! _b = some other value // also has VnIndex i
30//! ```
31//!
32//! We consider it to be replaceable by:
33//! ```ignore (MIR)
34//! _a = some value // has VnIndex i
35//! // some MIR
36//! _c = some other value // also has VnIndex i
37//! assume(_a bitwise equal to _c) // follows from having the same VnIndex
38//! _b = _a // follows from the `assume`
39//! ```
40//!
41//! Which is simplifiable to:
42//! ```ignore (MIR)
43//! _a = some value // has VnIndex i
44//! // some MIR
45//! _b = _a
46//! ```
47//!
48//! # Handling of references
49//!
50//! We handle references by assigning a different "provenance" index to each Ref/RawPtr rvalue.
51//! This ensure that we do not spuriously merge borrows that should not be merged. Meanwhile, we
52//! consider all the derefs of an immutable reference to a freeze type to give the same value:
53//! ```ignore (MIR)
54//! _a = *_b // _b is &Freeze
55//! _c = *_b // replaced by _c = _a
56//! ```
57//!
58//! # Determinism of constant propagation
59//!
60//! When registering a new `Value`, we attempt to opportunistically evaluate it as a constant.
61//! The evaluated form is inserted in `evaluated` as an `OpTy` or `None` if evaluation failed.
62//!
63//! The difficulty is non-deterministic evaluation of MIR constants. Some `Const` can have
64//! different runtime values each time they are evaluated. This is the case with
65//! `Const::Slice` which have a new pointer each time they are evaluated, and constants that
66//! contain a fn pointer (`AllocId` pointing to a `GlobalAlloc::Function`) pointing to a different
67//! symbol in each codegen unit.
68//!
69//! Meanwhile, we want to be able to read indirect constants. For instance:
70//! ```
71//! static A: &'static &'static u8 = &&63;
72//! fn foo() -> u8 {
73//!     **A // We want to replace by 63.
74//! }
75//! fn bar() -> u8 {
76//!     b"abc"[1] // We want to replace by 'b'.
77//! }
78//! ```
79//!
80//! The `Value::Constant` variant stores a possibly unevaluated constant. Evaluating that constant
81//! may be non-deterministic. When that happens, we assign a disambiguator to ensure that we do not
82//! merge the constants. See `duplicate_slice` test in `gvn.rs`.
83//!
84//! Second, when writing constants in MIR, we do not write `Const::Slice` or `Const`
85//! that contain `AllocId`s.
86
87use std::borrow::Cow;
88
89use either::Either;
90use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx};
91use rustc_const_eval::const_eval::DummyMachine;
92use rustc_const_eval::interpret::{
93    ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar,
94    intern_const_alloc_for_constprop,
95};
96use rustc_data_structures::fx::{FxIndexSet, MutableValues};
97use rustc_data_structures::graph::dominators::Dominators;
98use rustc_hir::def::DefKind;
99use rustc_index::bit_set::DenseBitSet;
100use rustc_index::{IndexVec, newtype_index};
101use rustc_middle::bug;
102use rustc_middle::mir::interpret::GlobalAlloc;
103use rustc_middle::mir::visit::*;
104use rustc_middle::mir::*;
105use rustc_middle::ty::layout::HasTypingEnv;
106use rustc_middle::ty::{self, Ty, TyCtxt};
107use rustc_span::DUMMY_SP;
108use smallvec::SmallVec;
109use tracing::{debug, instrument, trace};
110
111use crate::ssa::SsaLocals;
112
113pub(super) struct GVN;
114
115impl<'tcx> crate::MirPass<'tcx> for GVN {
116    fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
117        sess.mir_opt_level() >= 2
118    }
119
120    #[instrument(level = "trace", skip(self, tcx, body))]
121    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
122        debug!(def_id = ?body.source.def_id());
123
124        let typing_env = body.typing_env(tcx);
125        let ssa = SsaLocals::new(tcx, body, typing_env);
126        // Clone dominators because we need them while mutating the body.
127        let dominators = body.basic_blocks.dominators().clone();
128
129        let mut state = VnState::new(tcx, body, typing_env, &ssa, dominators, &body.local_decls);
130
131        for local in body.args_iter().filter(|&local| ssa.is_ssa(local)) {
132            let opaque = state.new_opaque(body.local_decls[local].ty);
133            state.assign(local, opaque);
134        }
135
136        let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec();
137        for bb in reverse_postorder {
138            let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
139            state.visit_basic_block_data(bb, data);
140        }
141
142        // For each local that is reused (`y` above), we remove its storage statements do avoid any
143        // difficulty. Those locals are SSA, so should be easy to optimize by LLVM without storage
144        // statements.
145        StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body);
146    }
147
148    fn is_required(&self) -> bool {
149        false
150    }
151}
152
153newtype_index! {
154    struct VnIndex {}
155}
156
157#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
158enum AddressKind {
159    Ref(BorrowKind),
160    Address(RawPtrKind),
161}
162
163#[derive(Debug, PartialEq, Eq, Hash)]
164enum Value<'tcx> {
165    // Root values.
166    /// Used to represent values we know nothing about.
167    /// The `usize` is a counter incremented by `new_opaque`.
168    Opaque(usize),
169    /// Evaluated or unevaluated constant value.
170    Constant {
171        value: Const<'tcx>,
172        /// Some constants do not have a deterministic value. To avoid merging two instances of the
173        /// same `Const`, we assign them an additional integer index.
174        // `disambiguator` is 0 iff the constant is deterministic.
175        disambiguator: usize,
176    },
177    /// An aggregate value, either tuple/closure/struct/enum.
178    /// This does not contain unions, as we cannot reason with the value.
179    Aggregate(VariantIdx, Vec<VnIndex>),
180    /// A raw pointer aggregate built from a thin pointer and metadata.
181    RawPtr {
182        /// Thin pointer component. This is field 0 in MIR.
183        pointer: VnIndex,
184        /// Metadata component. This is field 1 in MIR.
185        metadata: VnIndex,
186    },
187    /// This corresponds to a `[value; count]` expression.
188    Repeat(VnIndex, ty::Const<'tcx>),
189    /// The address of a place.
190    Address {
191        place: Place<'tcx>,
192        kind: AddressKind,
193        /// Give each borrow and pointer a different provenance, so we don't merge them.
194        provenance: usize,
195    },
196
197    // Extractions.
198    /// This is the *value* obtained by projecting another value.
199    Projection(VnIndex, ProjectionElem<VnIndex, ()>),
200    /// Discriminant of the given value.
201    Discriminant(VnIndex),
202    /// Length of an array or slice.
203    Len(VnIndex),
204
205    // Operations.
206    NullaryOp(NullOp<'tcx>, Ty<'tcx>),
207    UnaryOp(UnOp, VnIndex),
208    BinaryOp(BinOp, VnIndex, VnIndex),
209    Cast {
210        kind: CastKind,
211        value: VnIndex,
212    },
213}
214
215struct VnState<'body, 'tcx> {
216    tcx: TyCtxt<'tcx>,
217    ecx: InterpCx<'tcx, DummyMachine>,
218    local_decls: &'body LocalDecls<'tcx>,
219    is_coroutine: bool,
220    /// Value stored in each local.
221    locals: IndexVec<Local, Option<VnIndex>>,
222    /// Locals that are assigned that value.
223    // This vector does not hold all the values of `VnIndex` that we create.
224    rev_locals: IndexVec<VnIndex, SmallVec<[Local; 1]>>,
225    values: FxIndexSet<(Value<'tcx>, Ty<'tcx>)>,
226    /// Values evaluated as constants if possible.
227    evaluated: IndexVec<VnIndex, Option<OpTy<'tcx>>>,
228    /// Counter to generate different values.
229    next_opaque: usize,
230    /// Cache the deref values.
231    derefs: Vec<VnIndex>,
232    ssa: &'body SsaLocals,
233    dominators: Dominators<BasicBlock>,
234    reused_locals: DenseBitSet<Local>,
235}
236
237impl<'body, 'tcx> VnState<'body, 'tcx> {
238    fn new(
239        tcx: TyCtxt<'tcx>,
240        body: &Body<'tcx>,
241        typing_env: ty::TypingEnv<'tcx>,
242        ssa: &'body SsaLocals,
243        dominators: Dominators<BasicBlock>,
244        local_decls: &'body LocalDecls<'tcx>,
245    ) -> Self {
246        // Compute a rough estimate of the number of values in the body from the number of
247        // statements. This is meant to reduce the number of allocations, but it's all right if
248        // we miss the exact amount. We estimate based on 2 values per statement (one in LHS and
249        // one in RHS) and 4 values per terminator (for call operands).
250        let num_values =
251            2 * body.basic_blocks.iter().map(|bbdata| bbdata.statements.len()).sum::<usize>()
252                + 4 * body.basic_blocks.len();
253        VnState {
254            tcx,
255            ecx: InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine),
256            local_decls,
257            is_coroutine: body.coroutine.is_some(),
258            locals: IndexVec::from_elem(None, local_decls),
259            rev_locals: IndexVec::with_capacity(num_values),
260            values: FxIndexSet::with_capacity_and_hasher(num_values, Default::default()),
261            evaluated: IndexVec::with_capacity(num_values),
262            next_opaque: 1,
263            derefs: Vec::new(),
264            ssa,
265            dominators,
266            reused_locals: DenseBitSet::new_empty(local_decls.len()),
267        }
268    }
269
270    fn typing_env(&self) -> ty::TypingEnv<'tcx> {
271        self.ecx.typing_env()
272    }
273
274    #[instrument(level = "trace", skip(self), ret)]
275    fn insert(&mut self, ty: Ty<'tcx>, value: Value<'tcx>) -> VnIndex {
276        let (index, new) = self.values.insert_full((value, ty));
277        let index = VnIndex::from_usize(index);
278        if new {
279            // Grow `evaluated` and `rev_locals` here to amortize the allocations.
280            let evaluated = self.eval_to_const(index);
281            let _index = self.evaluated.push(evaluated);
282            debug_assert_eq!(index, _index);
283            let _index = self.rev_locals.push(SmallVec::new());
284            debug_assert_eq!(index, _index);
285        }
286        index
287    }
288
289    fn next_opaque(&mut self) -> usize {
290        let next_opaque = self.next_opaque;
291        self.next_opaque += 1;
292        next_opaque
293    }
294
295    /// Create a new `Value` for which we have no information at all, except that it is distinct
296    /// from all the others.
297    #[instrument(level = "trace", skip(self), ret)]
298    fn new_opaque(&mut self, ty: Ty<'tcx>) -> VnIndex {
299        let value = Value::Opaque(self.next_opaque());
300        self.insert(ty, value)
301    }
302
303    /// Create a new `Value::Address` distinct from all the others.
304    #[instrument(level = "trace", skip(self), ret)]
305    fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> VnIndex {
306        let pty = place.ty(self.local_decls, self.tcx).ty;
307        let ty = match kind {
308            AddressKind::Ref(bk) => {
309                Ty::new_ref(self.tcx, self.tcx.lifetimes.re_erased, pty, bk.to_mutbl_lossy())
310            }
311            AddressKind::Address(mutbl) => Ty::new_ptr(self.tcx, pty, mutbl.to_mutbl_lossy()),
312        };
313        let value = Value::Address { place, kind, provenance: self.next_opaque() };
314        self.insert(ty, value)
315    }
316
317    #[inline]
318    fn get(&self, index: VnIndex) -> &Value<'tcx> {
319        &self.values.get_index(index.as_usize()).unwrap().0
320    }
321
322    #[inline]
323    fn ty(&self, index: VnIndex) -> Ty<'tcx> {
324        self.values.get_index(index.as_usize()).unwrap().1
325    }
326
327    /// Record that `local` is assigned `value`. `local` must be SSA.
328    #[instrument(level = "trace", skip(self))]
329    fn assign(&mut self, local: Local, value: VnIndex) {
330        debug_assert!(self.ssa.is_ssa(local));
331        self.locals[local] = Some(value);
332        self.rev_locals[value].push(local);
333    }
334
335    fn insert_constant(&mut self, value: Const<'tcx>) -> VnIndex {
336        let disambiguator = if value.is_deterministic() {
337            // The constant is deterministic, no need to disambiguate.
338            0
339        } else {
340            // Multiple mentions of this constant will yield different values,
341            // so assign a different `disambiguator` to ensure they do not get the same `VnIndex`.
342            let disambiguator = self.next_opaque();
343            // `disambiguator: 0` means deterministic.
344            debug_assert_ne!(disambiguator, 0);
345            disambiguator
346        };
347        self.insert(value.ty(), Value::Constant { value, disambiguator })
348    }
349
350    fn insert_bool(&mut self, flag: bool) -> VnIndex {
351        // Booleans are deterministic.
352        let value = Const::from_bool(self.tcx, flag);
353        debug_assert!(value.is_deterministic());
354        self.insert(self.tcx.types.bool, Value::Constant { value, disambiguator: 0 })
355    }
356
357    fn insert_scalar(&mut self, ty: Ty<'tcx>, scalar: Scalar) -> VnIndex {
358        // Scalars are deterministic.
359        let value = Const::from_scalar(self.tcx, scalar, ty);
360        debug_assert!(value.is_deterministic());
361        self.insert(ty, Value::Constant { value, disambiguator: 0 })
362    }
363
364    fn insert_tuple(&mut self, ty: Ty<'tcx>, values: Vec<VnIndex>) -> VnIndex {
365        self.insert(ty, Value::Aggregate(VariantIdx::ZERO, values))
366    }
367
368    fn insert_deref(&mut self, ty: Ty<'tcx>, value: VnIndex) -> VnIndex {
369        let value = self.insert(ty, Value::Projection(value, ProjectionElem::Deref));
370        self.derefs.push(value);
371        value
372    }
373
374    fn invalidate_derefs(&mut self) {
375        for deref in std::mem::take(&mut self.derefs) {
376            let opaque = self.next_opaque();
377            self.values.get_index_mut2(deref.index()).unwrap().0 = Value::Opaque(opaque);
378        }
379    }
380
381    #[instrument(level = "trace", skip(self), ret)]
382    fn eval_to_const(&mut self, value: VnIndex) -> Option<OpTy<'tcx>> {
383        use Value::*;
384        let ty = self.ty(value);
385        // Avoid computing layouts inside a coroutine, as that can cause cycles.
386        let ty = if !self.is_coroutine || ty.is_scalar() {
387            self.ecx.layout_of(ty).ok()?
388        } else {
389            return None;
390        };
391        let op = match *self.get(value) {
392            _ if ty.is_zst() => ImmTy::uninit(ty).into(),
393
394            Opaque(_) => return None,
395            // Do not bother evaluating repeat expressions. This would uselessly consume memory.
396            Repeat(..) => return None,
397
398            Constant { ref value, disambiguator: _ } => {
399                self.ecx.eval_mir_constant(value, DUMMY_SP, None).discard_err()?
400            }
401            Aggregate(variant, ref fields) => {
402                let fields = fields
403                    .iter()
404                    .map(|&f| self.evaluated[f].as_ref())
405                    .collect::<Option<Vec<_>>>()?;
406                let variant = if ty.ty.is_enum() { Some(variant) } else { None };
407                if matches!(ty.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..))
408                {
409                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
410                    let variant_dest = if let Some(variant) = variant {
411                        self.ecx.project_downcast(&dest, variant).discard_err()?
412                    } else {
413                        dest.clone()
414                    };
415                    for (field_index, op) in fields.into_iter().enumerate() {
416                        let field_dest = self
417                            .ecx
418                            .project_field(&variant_dest, FieldIdx::from_usize(field_index))
419                            .discard_err()?;
420                        self.ecx.copy_op(op, &field_dest).discard_err()?;
421                    }
422                    self.ecx
423                        .write_discriminant(variant.unwrap_or(FIRST_VARIANT), &dest)
424                        .discard_err()?;
425                    self.ecx
426                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
427                        .discard_err()?;
428                    dest.into()
429                } else {
430                    return None;
431                }
432            }
433            RawPtr { pointer, metadata } => {
434                let pointer = self.evaluated[pointer].as_ref()?;
435                let metadata = self.evaluated[metadata].as_ref()?;
436
437                // Pointers don't have fields, so don't `project_field` them.
438                let data = self.ecx.read_pointer(pointer).discard_err()?;
439                let meta = if metadata.layout.is_zst() {
440                    MemPlaceMeta::None
441                } else {
442                    MemPlaceMeta::Meta(self.ecx.read_scalar(metadata).discard_err()?)
443                };
444                let ptr_imm = Immediate::new_pointer_with_meta(data, meta, &self.ecx);
445                ImmTy::from_immediate(ptr_imm, ty).into()
446            }
447
448            Projection(base, elem) => {
449                let base = self.evaluated[base].as_ref()?;
450                // `Index` by constants should have been replaced by `ConstantIndex` by
451                // `simplify_place_projection`.
452                let elem = elem.try_map(|_| None, |()| ty.ty)?;
453                self.ecx.project(base, elem).discard_err()?
454            }
455            Address { place, kind: _, provenance: _ } => {
456                if !place.is_indirect_first_projection() {
457                    return None;
458                }
459                let local = self.locals[place.local]?;
460                let pointer = self.evaluated[local].as_ref()?;
461                let mut mplace = self.ecx.deref_pointer(pointer).discard_err()?;
462                for elem in place.projection.iter().skip(1) {
463                    // `Index` by constants should have been replaced by `ConstantIndex` by
464                    // `simplify_place_projection`.
465                    let elem = elem.try_map(|_| None, |ty| ty)?;
466                    mplace = self.ecx.project(&mplace, elem).discard_err()?;
467                }
468                let pointer = mplace.to_ref(&self.ecx);
469                ImmTy::from_immediate(pointer, ty).into()
470            }
471
472            Discriminant(base) => {
473                let base = self.evaluated[base].as_ref()?;
474                let variant = self.ecx.read_discriminant(base).discard_err()?;
475                let discr_value =
476                    self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?;
477                discr_value.into()
478            }
479            Len(slice) => {
480                let slice = self.evaluated[slice].as_ref()?;
481                let len = slice.len(&self.ecx).discard_err()?;
482                ImmTy::from_uint(len, ty).into()
483            }
484            NullaryOp(null_op, arg_ty) => {
485                let arg_layout = self.ecx.layout_of(arg_ty).ok()?;
486                if let NullOp::SizeOf | NullOp::AlignOf = null_op
487                    && arg_layout.is_unsized()
488                {
489                    return None;
490                }
491                let val = match null_op {
492                    NullOp::SizeOf => arg_layout.size.bytes(),
493                    NullOp::AlignOf => arg_layout.align.abi.bytes(),
494                    NullOp::OffsetOf(fields) => self
495                        .ecx
496                        .tcx
497                        .offset_of_subfield(self.typing_env(), arg_layout, fields.iter())
498                        .bytes(),
499                    NullOp::UbChecks => return None,
500                    NullOp::ContractChecks => return None,
501                };
502                ImmTy::from_uint(val, ty).into()
503            }
504            UnaryOp(un_op, operand) => {
505                let operand = self.evaluated[operand].as_ref()?;
506                let operand = self.ecx.read_immediate(operand).discard_err()?;
507                let val = self.ecx.unary_op(un_op, &operand).discard_err()?;
508                val.into()
509            }
510            BinaryOp(bin_op, lhs, rhs) => {
511                let lhs = self.evaluated[lhs].as_ref()?;
512                let lhs = self.ecx.read_immediate(lhs).discard_err()?;
513                let rhs = self.evaluated[rhs].as_ref()?;
514                let rhs = self.ecx.read_immediate(rhs).discard_err()?;
515                let val = self.ecx.binary_op(bin_op, &lhs, &rhs).discard_err()?;
516                val.into()
517            }
518            Cast { kind, value } => match kind {
519                CastKind::IntToInt | CastKind::IntToFloat => {
520                    let value = self.evaluated[value].as_ref()?;
521                    let value = self.ecx.read_immediate(value).discard_err()?;
522                    let res = self.ecx.int_to_int_or_float(&value, ty).discard_err()?;
523                    res.into()
524                }
525                CastKind::FloatToFloat | CastKind::FloatToInt => {
526                    let value = self.evaluated[value].as_ref()?;
527                    let value = self.ecx.read_immediate(value).discard_err()?;
528                    let res = self.ecx.float_to_float_or_int(&value, ty).discard_err()?;
529                    res.into()
530                }
531                CastKind::Transmute => {
532                    let value = self.evaluated[value].as_ref()?;
533                    // `offset` for immediates generally only supports projections that match the
534                    // type of the immediate. However, as a HACK, we exploit that it can also do
535                    // limited transmutes: it only works between types with the same layout, and
536                    // cannot transmute pointers to integers.
537                    if value.as_mplace_or_imm().is_right() {
538                        let can_transmute = match (value.layout.backend_repr, ty.backend_repr) {
539                            (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => {
540                                s1.size(&self.ecx) == s2.size(&self.ecx)
541                                    && !matches!(s1.primitive(), Primitive::Pointer(..))
542                            }
543                            (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => {
544                                a1.size(&self.ecx) == a2.size(&self.ecx) &&
545                                b1.size(&self.ecx) == b2.size(&self.ecx) &&
546                                // The alignment of the second component determines its offset, so that also needs to match.
547                                b1.align(&self.ecx) == b2.align(&self.ecx) &&
548                                // None of the inputs may be a pointer.
549                                !matches!(a1.primitive(), Primitive::Pointer(..))
550                                    && !matches!(b1.primitive(), Primitive::Pointer(..))
551                            }
552                            _ => false,
553                        };
554                        if !can_transmute {
555                            return None;
556                        }
557                    }
558                    value.offset(Size::ZERO, ty, &self.ecx).discard_err()?
559                }
560                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) => {
561                    let src = self.evaluated[value].as_ref()?;
562                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
563                    self.ecx.unsize_into(src, ty, &dest).discard_err()?;
564                    self.ecx
565                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
566                        .discard_err()?;
567                    dest.into()
568                }
569                CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
570                    let src = self.evaluated[value].as_ref()?;
571                    let src = self.ecx.read_immediate(src).discard_err()?;
572                    let ret = self.ecx.ptr_to_ptr(&src, ty).discard_err()?;
573                    ret.into()
574                }
575                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::UnsafeFnPointer, _) => {
576                    let src = self.evaluated[value].as_ref()?;
577                    let src = self.ecx.read_immediate(src).discard_err()?;
578                    ImmTy::from_immediate(*src, ty).into()
579                }
580                _ => return None,
581            },
582        };
583        Some(op)
584    }
585
586    fn project(
587        &mut self,
588        place_ty: PlaceTy<'tcx>,
589        value: VnIndex,
590        proj: PlaceElem<'tcx>,
591        from_non_ssa_index: &mut bool,
592    ) -> Option<(PlaceTy<'tcx>, VnIndex)> {
593        let projection_ty = place_ty.projection_ty(self.tcx, proj);
594        let proj = match proj {
595            ProjectionElem::Deref => {
596                if let Some(Mutability::Not) = place_ty.ty.ref_mutability()
597                    && projection_ty.ty.is_freeze(self.tcx, self.typing_env())
598                {
599                    // An immutable borrow `_x` always points to the same value for the
600                    // lifetime of the borrow, so we can merge all instances of `*_x`.
601                    return Some((projection_ty, self.insert_deref(projection_ty.ty, value)));
602                } else {
603                    return None;
604                }
605            }
606            ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index),
607            ProjectionElem::Field(f, _) => {
608                if let Value::Aggregate(_, fields) = self.get(value) {
609                    return Some((projection_ty, fields[f.as_usize()]));
610                } else if let Value::Projection(outer_value, ProjectionElem::Downcast(_, read_variant)) = self.get(value)
611                    && let Value::Aggregate(written_variant, fields) = self.get(*outer_value)
612                    // This pass is not aware of control-flow, so we do not know whether the
613                    // replacement we are doing is actually reachable. We could be in any arm of
614                    // ```
615                    // match Some(x) {
616                    //     Some(y) => /* stuff */,
617                    //     None => /* other */,
618                    // }
619                    // ```
620                    //
621                    // In surface rust, the current statement would be unreachable.
622                    //
623                    // However, from the reference chapter on enums and RFC 2195,
624                    // accessing the wrong variant is not UB if the enum has repr.
625                    // So it's not impossible for a series of MIR opts to generate
626                    // a downcast to an inactive variant.
627                    && written_variant == read_variant
628                {
629                    return Some((projection_ty, fields[f.as_usize()]));
630                }
631                ProjectionElem::Field(f, ())
632            }
633            ProjectionElem::Index(idx) => {
634                if let Value::Repeat(inner, _) = self.get(value) {
635                    *from_non_ssa_index |= self.locals[idx].is_none();
636                    return Some((projection_ty, *inner));
637                }
638                let idx = self.locals[idx]?;
639                ProjectionElem::Index(idx)
640            }
641            ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
642                match self.get(value) {
643                    Value::Repeat(inner, _) => {
644                        return Some((projection_ty, *inner));
645                    }
646                    Value::Aggregate(_, operands) => {
647                        let offset = if from_end {
648                            operands.len() - offset as usize
649                        } else {
650                            offset as usize
651                        };
652                        let value = operands.get(offset).copied()?;
653                        return Some((projection_ty, value));
654                    }
655                    _ => {}
656                };
657                ProjectionElem::ConstantIndex { offset, min_length, from_end }
658            }
659            ProjectionElem::Subslice { from, to, from_end } => {
660                ProjectionElem::Subslice { from, to, from_end }
661            }
662            ProjectionElem::OpaqueCast(_) => ProjectionElem::OpaqueCast(()),
663            ProjectionElem::Subtype(_) => ProjectionElem::Subtype(()),
664            ProjectionElem::UnwrapUnsafeBinder(_) => ProjectionElem::UnwrapUnsafeBinder(()),
665        };
666
667        let value = self.insert(projection_ty.ty, Value::Projection(value, proj));
668        Some((projection_ty, value))
669    }
670
671    /// Simplify the projection chain if we know better.
672    #[instrument(level = "trace", skip(self))]
673    fn simplify_place_projection(&mut self, place: &mut Place<'tcx>, location: Location) {
674        // If the projection is indirect, we treat the local as a value, so can replace it with
675        // another local.
676        if place.is_indirect_first_projection()
677            && let Some(base) = self.locals[place.local]
678            && let Some(new_local) = self.try_as_local(base, location)
679            && place.local != new_local
680        {
681            place.local = new_local;
682            self.reused_locals.insert(new_local);
683        }
684
685        let mut projection = Cow::Borrowed(&place.projection[..]);
686
687        for i in 0..projection.len() {
688            let elem = projection[i];
689            if let ProjectionElem::Index(idx_local) = elem
690                && let Some(idx) = self.locals[idx_local]
691            {
692                if let Some(offset) = self.evaluated[idx].as_ref()
693                    && let Some(offset) = self.ecx.read_target_usize(offset).discard_err()
694                    && let Some(min_length) = offset.checked_add(1)
695                {
696                    projection.to_mut()[i] =
697                        ProjectionElem::ConstantIndex { offset, min_length, from_end: false };
698                } else if let Some(new_idx_local) = self.try_as_local(idx, location)
699                    && idx_local != new_idx_local
700                {
701                    projection.to_mut()[i] = ProjectionElem::Index(new_idx_local);
702                    self.reused_locals.insert(new_idx_local);
703                }
704            }
705        }
706
707        if projection.is_owned() {
708            place.projection = self.tcx.mk_place_elems(&projection);
709        }
710
711        trace!(?place);
712    }
713
714    /// Represent the *value* which would be read from `place`, and point `place` to a preexisting
715    /// place with the same value (if that already exists).
716    #[instrument(level = "trace", skip(self), ret)]
717    fn simplify_place_value(
718        &mut self,
719        place: &mut Place<'tcx>,
720        location: Location,
721    ) -> Option<VnIndex> {
722        self.simplify_place_projection(place, location);
723
724        // Invariant: `place` and `place_ref` point to the same value, even if they point to
725        // different memory locations.
726        let mut place_ref = place.as_ref();
727
728        // Invariant: `value` holds the value up-to the `index`th projection excluded.
729        let mut value = self.locals[place.local]?;
730        // Invariant: `value` has type `place_ty`, with optional downcast variant if needed.
731        let mut place_ty = PlaceTy::from_ty(self.local_decls[place.local].ty);
732        let mut from_non_ssa_index = false;
733        for (index, proj) in place.projection.iter().enumerate() {
734            if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
735                && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
736                && let AddressKind::Ref(BorrowKind::Shared) = kind
737                && let Some(v) = self.simplify_place_value(&mut pointee, location)
738            {
739                value = v;
740                // `pointee` holds a `Place`, so `ProjectionElem::Index` holds a `Local`.
741                // That local is SSA, but we otherwise have no guarantee on that local's value at
742                // the current location compared to its value where `pointee` was borrowed.
743                if pointee.projection.iter().all(|elem| !matches!(elem, ProjectionElem::Index(_))) {
744                    place_ref =
745                        pointee.project_deeper(&place.projection[index..], self.tcx).as_ref();
746                }
747            }
748            if let Some(local) = self.try_as_local(value, location) {
749                // Both `local` and `Place { local: place.local, projection: projection[..index] }`
750                // hold the same value. Therefore, following place holds the value in the original
751                // `place`.
752                place_ref = PlaceRef { local, projection: &place.projection[index..] };
753            }
754
755            (place_ty, value) = self.project(place_ty, value, proj, &mut from_non_ssa_index)?;
756        }
757
758        if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
759            && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
760            && let AddressKind::Ref(BorrowKind::Shared) = kind
761            && let Some(v) = self.simplify_place_value(&mut pointee, location)
762        {
763            value = v;
764            // `pointee` holds a `Place`, so `ProjectionElem::Index` holds a `Local`.
765            // That local is SSA, but we otherwise have no guarantee on that local's value at
766            // the current location compared to its value where `pointee` was borrowed.
767            if pointee.projection.iter().all(|elem| !matches!(elem, ProjectionElem::Index(_))) {
768                place_ref = pointee.project_deeper(&[], self.tcx).as_ref();
769            }
770        }
771        if let Some(new_local) = self.try_as_local(value, location) {
772            place_ref = PlaceRef { local: new_local, projection: &[] };
773        } else if from_non_ssa_index {
774            // If access to non-SSA locals is unavoidable, bail out.
775            return None;
776        }
777
778        if place_ref.local != place.local || place_ref.projection.len() < place.projection.len() {
779            // By the invariant on `place_ref`.
780            *place = place_ref.project_deeper(&[], self.tcx);
781            self.reused_locals.insert(place_ref.local);
782        }
783
784        Some(value)
785    }
786
787    #[instrument(level = "trace", skip(self), ret)]
788    fn simplify_operand(
789        &mut self,
790        operand: &mut Operand<'tcx>,
791        location: Location,
792    ) -> Option<VnIndex> {
793        match *operand {
794            Operand::Constant(ref constant) => Some(self.insert_constant(constant.const_)),
795            Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
796                let value = self.simplify_place_value(place, location)?;
797                if let Some(const_) = self.try_as_constant(value) {
798                    *operand = Operand::Constant(Box::new(const_));
799                }
800                Some(value)
801            }
802        }
803    }
804
805    #[instrument(level = "trace", skip(self), ret)]
806    fn simplify_rvalue(
807        &mut self,
808        lhs: &Place<'tcx>,
809        rvalue: &mut Rvalue<'tcx>,
810        location: Location,
811    ) -> Option<VnIndex> {
812        let value = match *rvalue {
813            // Forward values.
814            Rvalue::Use(ref mut operand) => return self.simplify_operand(operand, location),
815            Rvalue::CopyForDeref(place) => {
816                let mut operand = Operand::Copy(place);
817                let val = self.simplify_operand(&mut operand, location);
818                *rvalue = Rvalue::Use(operand);
819                return val;
820            }
821
822            // Roots.
823            Rvalue::Repeat(ref mut op, amount) => {
824                let op = self.simplify_operand(op, location)?;
825                Value::Repeat(op, amount)
826            }
827            Rvalue::NullaryOp(op, ty) => Value::NullaryOp(op, ty),
828            Rvalue::Aggregate(..) => return self.simplify_aggregate(lhs, rvalue, location),
829            Rvalue::Ref(_, borrow_kind, ref mut place) => {
830                self.simplify_place_projection(place, location);
831                return Some(self.new_pointer(*place, AddressKind::Ref(borrow_kind)));
832            }
833            Rvalue::RawPtr(mutbl, ref mut place) => {
834                self.simplify_place_projection(place, location);
835                return Some(self.new_pointer(*place, AddressKind::Address(mutbl)));
836            }
837            Rvalue::WrapUnsafeBinder(ref mut op, _) => {
838                let value = self.simplify_operand(op, location)?;
839                Value::Cast { kind: CastKind::Transmute, value }
840            }
841
842            // Operations.
843            Rvalue::Len(ref mut place) => return self.simplify_len(place, location),
844            Rvalue::Cast(ref mut kind, ref mut value, to) => {
845                return self.simplify_cast(kind, value, to, location);
846            }
847            Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
848                return self.simplify_binary(op, lhs, rhs, location);
849            }
850            Rvalue::UnaryOp(op, ref mut arg_op) => {
851                return self.simplify_unary(op, arg_op, location);
852            }
853            Rvalue::Discriminant(ref mut place) => {
854                let place = self.simplify_place_value(place, location)?;
855                if let Some(discr) = self.simplify_discriminant(place) {
856                    return Some(discr);
857                }
858                Value::Discriminant(place)
859            }
860
861            // Unsupported values.
862            Rvalue::ThreadLocalRef(..) | Rvalue::ShallowInitBox(..) => return None,
863        };
864        let ty = rvalue.ty(self.local_decls, self.tcx);
865        Some(self.insert(ty, value))
866    }
867
868    fn simplify_discriminant(&mut self, place: VnIndex) -> Option<VnIndex> {
869        let enum_ty = self.ty(place);
870        if enum_ty.is_enum()
871            && let Value::Aggregate(variant, _) = *self.get(place)
872        {
873            let discr = self.ecx.discriminant_for_variant(enum_ty, variant).discard_err()?;
874            return Some(self.insert_scalar(discr.layout.ty, discr.to_scalar()));
875        }
876
877        None
878    }
879
880    fn try_as_place_elem(
881        &mut self,
882        ty: Ty<'tcx>,
883        proj: ProjectionElem<VnIndex, ()>,
884        loc: Location,
885    ) -> Option<PlaceElem<'tcx>> {
886        proj.try_map(
887            |value| {
888                let local = self.try_as_local(value, loc)?;
889                self.reused_locals.insert(local);
890                Some(local)
891            },
892            |()| ty,
893        )
894    }
895
896    fn simplify_aggregate_to_copy(
897        &mut self,
898        lhs: &Place<'tcx>,
899        rvalue: &mut Rvalue<'tcx>,
900        location: Location,
901        fields: &[VnIndex],
902        variant_index: VariantIdx,
903    ) -> Option<VnIndex> {
904        let Some(&first_field) = fields.first() else {
905            return None;
906        };
907        let Value::Projection(copy_from_value, _) = *self.get(first_field) else {
908            return None;
909        };
910        // All fields must correspond one-to-one and come from the same aggregate value.
911        if fields.iter().enumerate().any(|(index, &v)| {
912            if let Value::Projection(pointer, ProjectionElem::Field(from_index, _)) = *self.get(v)
913                && copy_from_value == pointer
914                && from_index.index() == index
915            {
916                return false;
917            }
918            true
919        }) {
920            return None;
921        }
922
923        let mut copy_from_local_value = copy_from_value;
924        if let Value::Projection(pointer, proj) = *self.get(copy_from_value)
925            && let ProjectionElem::Downcast(_, read_variant) = proj
926        {
927            if variant_index == read_variant {
928                // When copying a variant, there is no need to downcast.
929                copy_from_local_value = pointer;
930            } else {
931                // The copied variant must be identical.
932                return None;
933            }
934        }
935
936        // Allow introducing places with non-constant offsets, as those are still better than
937        // reconstructing an aggregate.
938        if self.ty(copy_from_local_value) == rvalue.ty(self.local_decls, self.tcx)
939            && let Some(place) = self.try_as_place(copy_from_local_value, location, true)
940        {
941            // Avoid creating `*a = copy (*b)`, as they might be aliases resulting in overlapping assignments.
942            // FIXME: This also avoids any kind of projection, not just derefs. We can add allowed projections.
943            if lhs.as_local().is_some() {
944                self.reused_locals.insert(place.local);
945                *rvalue = Rvalue::Use(Operand::Copy(place));
946            }
947            return Some(copy_from_local_value);
948        }
949
950        None
951    }
952
953    fn simplify_aggregate(
954        &mut self,
955        lhs: &Place<'tcx>,
956        rvalue: &mut Rvalue<'tcx>,
957        location: Location,
958    ) -> Option<VnIndex> {
959        let tcx = self.tcx;
960        let ty = rvalue.ty(self.local_decls, tcx);
961
962        let Rvalue::Aggregate(box ref kind, ref mut field_ops) = *rvalue else { bug!() };
963
964        if field_ops.is_empty() {
965            let is_zst = match *kind {
966                AggregateKind::Array(..)
967                | AggregateKind::Tuple
968                | AggregateKind::Closure(..)
969                | AggregateKind::CoroutineClosure(..) => true,
970                // Only enums can be non-ZST.
971                AggregateKind::Adt(did, ..) => tcx.def_kind(did) != DefKind::Enum,
972                // Coroutines are never ZST, as they at least contain the implicit states.
973                AggregateKind::Coroutine(..) => false,
974                AggregateKind::RawPtr(..) => bug!("MIR for RawPtr aggregate must have 2 fields"),
975            };
976
977            if is_zst {
978                return Some(self.insert_constant(Const::zero_sized(ty)));
979            }
980        }
981
982        let fields: Vec<_> = field_ops
983            .iter_mut()
984            .map(|op| {
985                self.simplify_operand(op, location)
986                    .unwrap_or_else(|| self.new_opaque(op.ty(self.local_decls, self.tcx)))
987            })
988            .collect();
989
990        let variant_index = match *kind {
991            AggregateKind::Array(..) | AggregateKind::Tuple => {
992                assert!(!field_ops.is_empty());
993                FIRST_VARIANT
994            }
995            AggregateKind::Closure(..)
996            | AggregateKind::CoroutineClosure(..)
997            | AggregateKind::Coroutine(..) => FIRST_VARIANT,
998            AggregateKind::Adt(_, variant_index, _, _, None) => variant_index,
999            // Do not track unions.
1000            AggregateKind::Adt(_, _, _, _, Some(_)) => return None,
1001            AggregateKind::RawPtr(..) => {
1002                assert_eq!(field_ops.len(), 2);
1003                let [mut pointer, metadata] = fields.try_into().unwrap();
1004
1005                // Any thin pointer of matching mutability is fine as the data pointer.
1006                let mut was_updated = false;
1007                while let Value::Cast { kind: CastKind::PtrToPtr, value: cast_value } =
1008                    self.get(pointer)
1009                    && let ty::RawPtr(from_pointee_ty, from_mtbl) = self.ty(*cast_value).kind()
1010                    && let ty::RawPtr(_, output_mtbl) = ty.kind()
1011                    && from_mtbl == output_mtbl
1012                    && from_pointee_ty.is_sized(self.tcx, self.typing_env())
1013                {
1014                    pointer = *cast_value;
1015                    was_updated = true;
1016                }
1017
1018                if was_updated && let Some(op) = self.try_as_operand(pointer, location) {
1019                    field_ops[FieldIdx::ZERO] = op;
1020                }
1021
1022                return Some(self.insert(ty, Value::RawPtr { pointer, metadata }));
1023            }
1024        };
1025
1026        if ty.is_array() && fields.len() > 4 {
1027            let first = fields[0];
1028            if fields.iter().all(|&v| v == first) {
1029                let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap());
1030                if let Some(op) = self.try_as_operand(first, location) {
1031                    *rvalue = Rvalue::Repeat(op, len);
1032                }
1033                return Some(self.insert(ty, Value::Repeat(first, len)));
1034            }
1035        }
1036
1037        if let Some(value) =
1038            self.simplify_aggregate_to_copy(lhs, rvalue, location, &fields, variant_index)
1039        {
1040            return Some(value);
1041        }
1042
1043        Some(self.insert(ty, Value::Aggregate(variant_index, fields)))
1044    }
1045
1046    #[instrument(level = "trace", skip(self), ret)]
1047    fn simplify_unary(
1048        &mut self,
1049        op: UnOp,
1050        arg_op: &mut Operand<'tcx>,
1051        location: Location,
1052    ) -> Option<VnIndex> {
1053        let mut arg_index = self.simplify_operand(arg_op, location)?;
1054        let arg_ty = self.ty(arg_index);
1055        let ret_ty = op.ty(self.tcx, arg_ty);
1056
1057        // PtrMetadata doesn't care about *const vs *mut vs & vs &mut,
1058        // so start by removing those distinctions so we can update the `Operand`
1059        if op == UnOp::PtrMetadata {
1060            let mut was_updated = false;
1061            loop {
1062                match self.get(arg_index) {
1063                    // Pointer casts that preserve metadata, such as
1064                    // `*const [i32]` <-> `*mut [i32]` <-> `*mut [f32]`.
1065                    // It's critical that this not eliminate cases like
1066                    // `*const [T]` -> `*const T` which remove metadata.
1067                    // We run on potentially-generic MIR, though, so unlike codegen
1068                    // we can't always know exactly what the metadata are.
1069                    // To allow things like `*mut (?A, ?T)` <-> `*mut (?B, ?T)`,
1070                    // it's fine to get a projection as the type.
1071                    Value::Cast { kind: CastKind::PtrToPtr, value: inner }
1072                        if self.pointers_have_same_metadata(self.ty(*inner), arg_ty) =>
1073                    {
1074                        arg_index = *inner;
1075                        was_updated = true;
1076                        continue;
1077                    }
1078
1079                    // `&mut *p`, `&raw *p`, etc don't change metadata.
1080                    Value::Address { place, kind: _, provenance: _ }
1081                        if let PlaceRef { local, projection: [PlaceElem::Deref] } =
1082                            place.as_ref()
1083                            && let Some(local_index) = self.locals[local] =>
1084                    {
1085                        arg_index = local_index;
1086                        was_updated = true;
1087                        continue;
1088                    }
1089
1090                    _ => {
1091                        if was_updated && let Some(op) = self.try_as_operand(arg_index, location) {
1092                            *arg_op = op;
1093                        }
1094                        break;
1095                    }
1096                }
1097            }
1098        }
1099
1100        let value = match (op, self.get(arg_index)) {
1101            (UnOp::Not, Value::UnaryOp(UnOp::Not, inner)) => return Some(*inner),
1102            (UnOp::Neg, Value::UnaryOp(UnOp::Neg, inner)) => return Some(*inner),
1103            (UnOp::Not, Value::BinaryOp(BinOp::Eq, lhs, rhs)) => {
1104                Value::BinaryOp(BinOp::Ne, *lhs, *rhs)
1105            }
1106            (UnOp::Not, Value::BinaryOp(BinOp::Ne, lhs, rhs)) => {
1107                Value::BinaryOp(BinOp::Eq, *lhs, *rhs)
1108            }
1109            (UnOp::PtrMetadata, Value::RawPtr { metadata, .. }) => return Some(*metadata),
1110            // We have an unsizing cast, which assigns the length to wide pointer metadata.
1111            (
1112                UnOp::PtrMetadata,
1113                Value::Cast {
1114                    kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1115                    value: inner,
1116                },
1117            ) if let ty::Slice(..) = arg_ty.builtin_deref(true).unwrap().kind()
1118                && let ty::Array(_, len) = self.ty(*inner).builtin_deref(true).unwrap().kind() =>
1119            {
1120                return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1121            }
1122            _ => Value::UnaryOp(op, arg_index),
1123        };
1124        Some(self.insert(ret_ty, value))
1125    }
1126
1127    #[instrument(level = "trace", skip(self), ret)]
1128    fn simplify_binary(
1129        &mut self,
1130        op: BinOp,
1131        lhs_operand: &mut Operand<'tcx>,
1132        rhs_operand: &mut Operand<'tcx>,
1133        location: Location,
1134    ) -> Option<VnIndex> {
1135        let lhs = self.simplify_operand(lhs_operand, location);
1136        let rhs = self.simplify_operand(rhs_operand, location);
1137
1138        // Only short-circuit options after we called `simplify_operand`
1139        // on both operands for side effect.
1140        let mut lhs = lhs?;
1141        let mut rhs = rhs?;
1142
1143        let lhs_ty = self.ty(lhs);
1144
1145        // If we're comparing pointers, remove `PtrToPtr` casts if the from
1146        // types of both casts and the metadata all match.
1147        if let BinOp::Eq | BinOp::Ne | BinOp::Lt | BinOp::Le | BinOp::Gt | BinOp::Ge = op
1148            && lhs_ty.is_any_ptr()
1149            && let Value::Cast { kind: CastKind::PtrToPtr, value: lhs_value } = self.get(lhs)
1150            && let Value::Cast { kind: CastKind::PtrToPtr, value: rhs_value } = self.get(rhs)
1151            && let lhs_from = self.ty(*lhs_value)
1152            && lhs_from == self.ty(*rhs_value)
1153            && self.pointers_have_same_metadata(lhs_from, lhs_ty)
1154        {
1155            lhs = *lhs_value;
1156            rhs = *rhs_value;
1157            if let Some(lhs_op) = self.try_as_operand(lhs, location)
1158                && let Some(rhs_op) = self.try_as_operand(rhs, location)
1159            {
1160                *lhs_operand = lhs_op;
1161                *rhs_operand = rhs_op;
1162            }
1163        }
1164
1165        if let Some(value) = self.simplify_binary_inner(op, lhs_ty, lhs, rhs) {
1166            return Some(value);
1167        }
1168        let ty = op.ty(self.tcx, lhs_ty, self.ty(rhs));
1169        let value = Value::BinaryOp(op, lhs, rhs);
1170        Some(self.insert(ty, value))
1171    }
1172
1173    fn simplify_binary_inner(
1174        &mut self,
1175        op: BinOp,
1176        lhs_ty: Ty<'tcx>,
1177        lhs: VnIndex,
1178        rhs: VnIndex,
1179    ) -> Option<VnIndex> {
1180        // Floats are weird enough that none of the logic below applies.
1181        let reasonable_ty =
1182            lhs_ty.is_integral() || lhs_ty.is_bool() || lhs_ty.is_char() || lhs_ty.is_any_ptr();
1183        if !reasonable_ty {
1184            return None;
1185        }
1186
1187        let layout = self.ecx.layout_of(lhs_ty).ok()?;
1188
1189        let as_bits = |value: VnIndex| {
1190            let constant = self.evaluated[value].as_ref()?;
1191            if layout.backend_repr.is_scalar() {
1192                let scalar = self.ecx.read_scalar(constant).discard_err()?;
1193                scalar.to_bits(constant.layout.size).discard_err()
1194            } else {
1195                // `constant` is a wide pointer. Do not evaluate to bits.
1196                None
1197            }
1198        };
1199
1200        // Represent the values as `Left(bits)` or `Right(VnIndex)`.
1201        use Either::{Left, Right};
1202        let a = as_bits(lhs).map_or(Right(lhs), Left);
1203        let b = as_bits(rhs).map_or(Right(rhs), Left);
1204
1205        let result = match (op, a, b) {
1206            // Neutral elements.
1207            (
1208                BinOp::Add
1209                | BinOp::AddWithOverflow
1210                | BinOp::AddUnchecked
1211                | BinOp::BitOr
1212                | BinOp::BitXor,
1213                Left(0),
1214                Right(p),
1215            )
1216            | (
1217                BinOp::Add
1218                | BinOp::AddWithOverflow
1219                | BinOp::AddUnchecked
1220                | BinOp::BitOr
1221                | BinOp::BitXor
1222                | BinOp::Sub
1223                | BinOp::SubWithOverflow
1224                | BinOp::SubUnchecked
1225                | BinOp::Offset
1226                | BinOp::Shl
1227                | BinOp::Shr,
1228                Right(p),
1229                Left(0),
1230            )
1231            | (BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked, Left(1), Right(p))
1232            | (
1233                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::Div,
1234                Right(p),
1235                Left(1),
1236            ) => p,
1237            // Attempt to simplify `x & ALL_ONES` to `x`, with `ALL_ONES` depending on type size.
1238            (BinOp::BitAnd, Right(p), Left(ones)) | (BinOp::BitAnd, Left(ones), Right(p))
1239                if ones == layout.size.truncate(u128::MAX)
1240                    || (layout.ty.is_bool() && ones == 1) =>
1241            {
1242                p
1243            }
1244            // Absorbing elements.
1245            (
1246                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::BitAnd,
1247                _,
1248                Left(0),
1249            )
1250            | (BinOp::Rem, _, Left(1))
1251            | (
1252                BinOp::Mul
1253                | BinOp::MulWithOverflow
1254                | BinOp::MulUnchecked
1255                | BinOp::Div
1256                | BinOp::Rem
1257                | BinOp::BitAnd
1258                | BinOp::Shl
1259                | BinOp::Shr,
1260                Left(0),
1261                _,
1262            ) => self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size)),
1263            // Attempt to simplify `x | ALL_ONES` to `ALL_ONES`.
1264            (BinOp::BitOr, _, Left(ones)) | (BinOp::BitOr, Left(ones), _)
1265                if ones == layout.size.truncate(u128::MAX)
1266                    || (layout.ty.is_bool() && ones == 1) =>
1267            {
1268                self.insert_scalar(lhs_ty, Scalar::from_uint(ones, layout.size))
1269            }
1270            // Sub/Xor with itself.
1271            (BinOp::Sub | BinOp::SubWithOverflow | BinOp::SubUnchecked | BinOp::BitXor, a, b)
1272                if a == b =>
1273            {
1274                self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size))
1275            }
1276            // Comparison:
1277            // - if both operands can be computed as bits, just compare the bits;
1278            // - if we proved that both operands have the same value, we can insert true/false;
1279            // - otherwise, do nothing, as we do not try to prove inequality.
1280            (BinOp::Eq, Left(a), Left(b)) => self.insert_bool(a == b),
1281            (BinOp::Eq, a, b) if a == b => self.insert_bool(true),
1282            (BinOp::Ne, Left(a), Left(b)) => self.insert_bool(a != b),
1283            (BinOp::Ne, a, b) if a == b => self.insert_bool(false),
1284            _ => return None,
1285        };
1286
1287        if op.is_overflowing() {
1288            let ty = Ty::new_tup(self.tcx, &[self.ty(result), self.tcx.types.bool]);
1289            let false_val = self.insert_bool(false);
1290            Some(self.insert_tuple(ty, vec![result, false_val]))
1291        } else {
1292            Some(result)
1293        }
1294    }
1295
1296    fn simplify_cast(
1297        &mut self,
1298        initial_kind: &mut CastKind,
1299        initial_operand: &mut Operand<'tcx>,
1300        to: Ty<'tcx>,
1301        location: Location,
1302    ) -> Option<VnIndex> {
1303        use CastKind::*;
1304        use rustc_middle::ty::adjustment::PointerCoercion::*;
1305
1306        let mut kind = *initial_kind;
1307        let mut value = self.simplify_operand(initial_operand, location)?;
1308        let mut from = self.ty(value);
1309        if from == to {
1310            return Some(value);
1311        }
1312
1313        if let CastKind::PointerCoercion(ReifyFnPointer | ClosureFnPointer(_), _) = kind {
1314            // Each reification of a generic fn may get a different pointer.
1315            // Do not try to merge them.
1316            return Some(self.new_opaque(to));
1317        }
1318
1319        let mut was_ever_updated = false;
1320        loop {
1321            let mut was_updated_this_iteration = false;
1322
1323            // Transmuting between raw pointers is just a pointer cast so long as
1324            // they have the same metadata type (like `*const i32` <=> `*mut u64`
1325            // or `*mut [i32]` <=> `*const [u64]`), including the common special
1326            // case of `*const T` <=> `*mut T`.
1327            if let Transmute = kind
1328                && from.is_raw_ptr()
1329                && to.is_raw_ptr()
1330                && self.pointers_have_same_metadata(from, to)
1331            {
1332                kind = PtrToPtr;
1333                was_updated_this_iteration = true;
1334            }
1335
1336            // If a cast just casts away the metadata again, then we can get it by
1337            // casting the original thin pointer passed to `from_raw_parts`
1338            if let PtrToPtr = kind
1339                && let Value::RawPtr { pointer, .. } = self.get(value)
1340                && let ty::RawPtr(to_pointee, _) = to.kind()
1341                && to_pointee.is_sized(self.tcx, self.typing_env())
1342            {
1343                from = self.ty(*pointer);
1344                value = *pointer;
1345                was_updated_this_iteration = true;
1346                if from == to {
1347                    return Some(*pointer);
1348                }
1349            }
1350
1351            // Aggregate-then-Transmute can just transmute the original field value,
1352            // so long as the bytes of a value from only from a single field.
1353            if let Transmute = kind
1354                && let Value::Aggregate(variant_idx, field_values) = self.get(value)
1355                && let Some((field_idx, field_ty)) =
1356                    self.value_is_all_in_one_field(from, *variant_idx)
1357            {
1358                from = field_ty;
1359                value = field_values[field_idx.as_usize()];
1360                was_updated_this_iteration = true;
1361                if field_ty == to {
1362                    return Some(value);
1363                }
1364            }
1365
1366            // Various cast-then-cast cases can be simplified.
1367            if let Value::Cast { kind: inner_kind, value: inner_value } = *self.get(value) {
1368                let inner_from = self.ty(inner_value);
1369                let new_kind = match (inner_kind, kind) {
1370                    // Even if there's a narrowing cast in here that's fine, because
1371                    // things like `*mut [i32] -> *mut i32 -> *const i32` and
1372                    // `*mut [i32] -> *const [i32] -> *const i32` can skip the middle in MIR.
1373                    (PtrToPtr, PtrToPtr) => Some(PtrToPtr),
1374                    // PtrToPtr-then-Transmute is fine so long as the pointer cast is identity:
1375                    // `*const T -> *mut T -> NonNull<T>` is fine, but we need to check for narrowing
1376                    // to skip things like `*const [i32] -> *const i32 -> NonNull<T>`.
1377                    (PtrToPtr, Transmute) if self.pointers_have_same_metadata(inner_from, from) => {
1378                        Some(Transmute)
1379                    }
1380                    // Similarly, for Transmute-then-PtrToPtr. Note that we need to check different
1381                    // variables for their metadata, and thus this can't merge with the previous arm.
1382                    (Transmute, PtrToPtr) if self.pointers_have_same_metadata(from, to) => {
1383                        Some(Transmute)
1384                    }
1385                    // If would be legal to always do this, but we don't want to hide information
1386                    // from the backend that it'd otherwise be able to use for optimizations.
1387                    (Transmute, Transmute)
1388                        if !self.type_may_have_niche_of_interest_to_backend(from) =>
1389                    {
1390                        Some(Transmute)
1391                    }
1392                    _ => None,
1393                };
1394                if let Some(new_kind) = new_kind {
1395                    kind = new_kind;
1396                    from = inner_from;
1397                    value = inner_value;
1398                    was_updated_this_iteration = true;
1399                    if inner_from == to {
1400                        return Some(inner_value);
1401                    }
1402                }
1403            }
1404
1405            if was_updated_this_iteration {
1406                was_ever_updated = true;
1407            } else {
1408                break;
1409            }
1410        }
1411
1412        if was_ever_updated && let Some(op) = self.try_as_operand(value, location) {
1413            *initial_operand = op;
1414            *initial_kind = kind;
1415        }
1416
1417        Some(self.insert(to, Value::Cast { kind, value }))
1418    }
1419
1420    fn simplify_len(&mut self, place: &mut Place<'tcx>, location: Location) -> Option<VnIndex> {
1421        // Trivial case: we are fetching a statically known length.
1422        let place_ty = place.ty(self.local_decls, self.tcx).ty;
1423        if let ty::Array(_, len) = place_ty.kind() {
1424            return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1425        }
1426
1427        let mut inner = self.simplify_place_value(place, location)?;
1428
1429        // The length information is stored in the wide pointer.
1430        // Reborrowing copies length information from one pointer to the other.
1431        while let Value::Address { place: borrowed, .. } = self.get(inner)
1432            && let [PlaceElem::Deref] = borrowed.projection[..]
1433            && let Some(borrowed) = self.locals[borrowed.local]
1434        {
1435            inner = borrowed;
1436        }
1437
1438        // We have an unsizing cast, which assigns the length to wide pointer metadata.
1439        if let Value::Cast { kind, value: from } = self.get(inner)
1440            && let CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) = kind
1441            && let Some(from) = self.ty(*from).builtin_deref(true)
1442            && let ty::Array(_, len) = from.kind()
1443            && let Some(to) = self.ty(inner).builtin_deref(true)
1444            && let ty::Slice(..) = to.kind()
1445        {
1446            return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1447        }
1448
1449        // Fallback: a symbolic `Len`.
1450        Some(self.insert(self.tcx.types.usize, Value::Len(inner)))
1451    }
1452
1453    fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool {
1454        let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1455        let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1456        if left_meta_ty == right_meta_ty {
1457            true
1458        } else if let Ok(left) =
1459            self.tcx.try_normalize_erasing_regions(self.typing_env(), left_meta_ty)
1460            && let Ok(right) =
1461                self.tcx.try_normalize_erasing_regions(self.typing_env(), right_meta_ty)
1462        {
1463            left == right
1464        } else {
1465            false
1466        }
1467    }
1468
1469    /// Returns `false` if we know for sure that this type has no interesting niche,
1470    /// and thus we can skip transmuting through it without worrying.
1471    ///
1472    /// The backend will emit `assume`s when transmuting between types with niches,
1473    /// so we want to preserve `i32 -> char -> u32` so that that data is around,
1474    /// but it's fine to skip whole-range-is-value steps like `A -> u32 -> B`.
1475    fn type_may_have_niche_of_interest_to_backend(&self, ty: Ty<'tcx>) -> bool {
1476        let Ok(layout) = self.ecx.layout_of(ty) else {
1477            // If it's too generic or something, then assume it might be interesting later.
1478            return true;
1479        };
1480
1481        if layout.uninhabited {
1482            return true;
1483        }
1484
1485        match layout.backend_repr {
1486            BackendRepr::Scalar(a) => !a.is_always_valid(&self.ecx),
1487            BackendRepr::ScalarPair(a, b) => {
1488                !a.is_always_valid(&self.ecx) || !b.is_always_valid(&self.ecx)
1489            }
1490            BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => false,
1491        }
1492    }
1493
1494    fn value_is_all_in_one_field(
1495        &self,
1496        ty: Ty<'tcx>,
1497        variant: VariantIdx,
1498    ) -> Option<(FieldIdx, Ty<'tcx>)> {
1499        if let Ok(layout) = self.ecx.layout_of(ty)
1500            && let abi::Variants::Single { index } = layout.variants
1501            && index == variant
1502            && let Some((field_idx, field_layout)) = layout.non_1zst_field(&self.ecx)
1503            && layout.size == field_layout.size
1504        {
1505            // We needed to check the variant to avoid trying to read the tag
1506            // field from an enum where no fields have variants, since that tag
1507            // field isn't in the `Aggregate` from which we're getting values.
1508            Some((field_idx, field_layout.ty))
1509        } else if let ty::Adt(adt, args) = ty.kind()
1510            && adt.is_struct()
1511            && adt.repr().transparent()
1512            && let [single_field] = adt.non_enum_variant().fields.raw.as_slice()
1513        {
1514            Some((FieldIdx::ZERO, single_field.ty(self.tcx, args)))
1515        } else {
1516            None
1517        }
1518    }
1519}
1520
1521fn op_to_prop_const<'tcx>(
1522    ecx: &mut InterpCx<'tcx, DummyMachine>,
1523    op: &OpTy<'tcx>,
1524) -> Option<ConstValue> {
1525    // Do not attempt to propagate unsized locals.
1526    if op.layout.is_unsized() {
1527        return None;
1528    }
1529
1530    // This constant is a ZST, just return an empty value.
1531    if op.layout.is_zst() {
1532        return Some(ConstValue::ZeroSized);
1533    }
1534
1535    // Do not synthetize too large constants. Codegen will just memcpy them, which we'd like to
1536    // avoid.
1537    if !matches!(op.layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
1538        return None;
1539    }
1540
1541    // If this constant has scalar ABI, return it as a `ConstValue::Scalar`.
1542    if let BackendRepr::Scalar(abi::Scalar::Initialized { .. }) = op.layout.backend_repr
1543        && let Some(scalar) = ecx.read_scalar(op).discard_err()
1544    {
1545        if !scalar.try_to_scalar_int().is_ok() {
1546            // Check that we do not leak a pointer.
1547            // Those pointers may lose part of their identity in codegen.
1548            // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1549            return None;
1550        }
1551        return Some(ConstValue::Scalar(scalar));
1552    }
1553
1554    // If this constant is already represented as an `Allocation`,
1555    // try putting it into global memory to return it.
1556    if let Either::Left(mplace) = op.as_mplace_or_imm() {
1557        let (size, _align) = ecx.size_and_align_of_val(&mplace).discard_err()??;
1558
1559        // Do not try interning a value that contains provenance.
1560        // Due to https://github.com/rust-lang/rust/issues/79738, doing so could lead to bugs.
1561        // FIXME: remove this hack once that issue is fixed.
1562        let alloc_ref = ecx.get_ptr_alloc(mplace.ptr(), size).discard_err()??;
1563        if alloc_ref.has_provenance() {
1564            return None;
1565        }
1566
1567        let pointer = mplace.ptr().into_pointer_or_addr().ok()?;
1568        let (prov, offset) = pointer.prov_and_relative_offset();
1569        let alloc_id = prov.alloc_id();
1570        intern_const_alloc_for_constprop(ecx, alloc_id).discard_err()?;
1571
1572        // `alloc_id` may point to a static. Codegen will choke on an `Indirect` with anything
1573        // by `GlobalAlloc::Memory`, so do fall through to copying if needed.
1574        // FIXME: find a way to treat this more uniformly (probably by fixing codegen)
1575        if let GlobalAlloc::Memory(alloc) = ecx.tcx.global_alloc(alloc_id)
1576            // Transmuting a constant is just an offset in the allocation. If the alignment of the
1577            // allocation is not enough, fallback to copying into a properly aligned value.
1578            && alloc.inner().align >= op.layout.align.abi
1579        {
1580            return Some(ConstValue::Indirect { alloc_id, offset });
1581        }
1582    }
1583
1584    // Everything failed: create a new allocation to hold the data.
1585    let alloc_id =
1586        ecx.intern_with_temp_alloc(op.layout, |ecx, dest| ecx.copy_op(op, dest)).discard_err()?;
1587    let value = ConstValue::Indirect { alloc_id, offset: Size::ZERO };
1588
1589    // Check that we do not leak a pointer.
1590    // Those pointers may lose part of their identity in codegen.
1591    // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1592    if ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner().provenance().ptrs().is_empty() {
1593        return Some(value);
1594    }
1595
1596    None
1597}
1598
1599impl<'tcx> VnState<'_, 'tcx> {
1600    /// If either [`Self::try_as_constant`] as [`Self::try_as_place`] succeeds,
1601    /// returns that result as an [`Operand`].
1602    fn try_as_operand(&mut self, index: VnIndex, location: Location) -> Option<Operand<'tcx>> {
1603        if let Some(const_) = self.try_as_constant(index) {
1604            Some(Operand::Constant(Box::new(const_)))
1605        } else if let Some(place) = self.try_as_place(index, location, false) {
1606            self.reused_locals.insert(place.local);
1607            Some(Operand::Copy(place))
1608        } else {
1609            None
1610        }
1611    }
1612
1613    /// If `index` is a `Value::Constant`, return the `Constant` to be put in the MIR.
1614    fn try_as_constant(&mut self, index: VnIndex) -> Option<ConstOperand<'tcx>> {
1615        // This was already constant in MIR, do not change it. If the constant is not
1616        // deterministic, adding an additional mention of it in MIR will not give the same value as
1617        // the former mention.
1618        if let Value::Constant { value, disambiguator: 0 } = *self.get(index) {
1619            debug_assert!(value.is_deterministic());
1620            return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1621        }
1622
1623        let op = self.evaluated[index].as_ref()?;
1624        if op.layout.is_unsized() {
1625            // Do not attempt to propagate unsized locals.
1626            return None;
1627        }
1628
1629        let value = op_to_prop_const(&mut self.ecx, op)?;
1630
1631        // Check that we do not leak a pointer.
1632        // Those pointers may lose part of their identity in codegen.
1633        // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1634        assert!(!value.may_have_provenance(self.tcx, op.layout.size));
1635
1636        let const_ = Const::Val(value, op.layout.ty);
1637        Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_ })
1638    }
1639
1640    /// Construct a place which holds the same value as `index` and for which all locals strictly
1641    /// dominate `loc`. If you used this place, add its base local to `reused_locals` to remove
1642    /// storage statements.
1643    #[instrument(level = "trace", skip(self), ret)]
1644    fn try_as_place(
1645        &mut self,
1646        mut index: VnIndex,
1647        loc: Location,
1648        allow_complex_projection: bool,
1649    ) -> Option<Place<'tcx>> {
1650        let mut projection = SmallVec::<[PlaceElem<'tcx>; 1]>::new();
1651        loop {
1652            if let Some(local) = self.try_as_local(index, loc) {
1653                projection.reverse();
1654                let place =
1655                    Place { local, projection: self.tcx.mk_place_elems(projection.as_slice()) };
1656                return Some(place);
1657            } else if let Value::Projection(pointer, proj) = *self.get(index)
1658                && (allow_complex_projection || proj.is_stable_offset())
1659                && let Some(proj) = self.try_as_place_elem(self.ty(index), proj, loc)
1660            {
1661                projection.push(proj);
1662                index = pointer;
1663            } else {
1664                return None;
1665            }
1666        }
1667    }
1668
1669    /// If there is a local which is assigned `index`, and its assignment strictly dominates `loc`,
1670    /// return it. If you used this local, add it to `reused_locals` to remove storage statements.
1671    fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option<Local> {
1672        let other = self.rev_locals.get(index)?;
1673        other
1674            .iter()
1675            .find(|&&other| self.ssa.assignment_dominates(&self.dominators, other, loc))
1676            .copied()
1677    }
1678}
1679
1680impl<'tcx> MutVisitor<'tcx> for VnState<'_, 'tcx> {
1681    fn tcx(&self) -> TyCtxt<'tcx> {
1682        self.tcx
1683    }
1684
1685    fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
1686        self.simplify_place_projection(place, location);
1687        if context.is_mutating_use() && place.is_indirect() {
1688            // Non-local mutation maybe invalidate deref.
1689            self.invalidate_derefs();
1690        }
1691        self.super_place(place, context, location);
1692    }
1693
1694    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1695        self.simplify_operand(operand, location);
1696        self.super_operand(operand, location);
1697    }
1698
1699    fn visit_assign(
1700        &mut self,
1701        lhs: &mut Place<'tcx>,
1702        rvalue: &mut Rvalue<'tcx>,
1703        location: Location,
1704    ) {
1705        self.simplify_place_projection(lhs, location);
1706
1707        let value = self.simplify_rvalue(lhs, rvalue, location);
1708        if let Some(value) = value {
1709            if let Some(const_) = self.try_as_constant(value) {
1710                *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_)));
1711            } else if let Some(place) = self.try_as_place(value, location, false)
1712                && *rvalue != Rvalue::Use(Operand::Move(place))
1713                && *rvalue != Rvalue::Use(Operand::Copy(place))
1714            {
1715                *rvalue = Rvalue::Use(Operand::Copy(place));
1716                self.reused_locals.insert(place.local);
1717            }
1718        }
1719
1720        if lhs.is_indirect() {
1721            // Non-local mutation maybe invalidate deref.
1722            self.invalidate_derefs();
1723        }
1724
1725        if let Some(local) = lhs.as_local()
1726            && self.ssa.is_ssa(local)
1727            && let rvalue_ty = rvalue.ty(self.local_decls, self.tcx)
1728            // FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark
1729            // `local` as reusable if we have an exact type match.
1730            && self.local_decls[local].ty == rvalue_ty
1731        {
1732            let value = value.unwrap_or_else(|| self.new_opaque(rvalue_ty));
1733            self.assign(local, value);
1734        }
1735    }
1736
1737    fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
1738        if let Terminator { kind: TerminatorKind::Call { destination, .. }, .. } = terminator {
1739            if let Some(local) = destination.as_local()
1740                && self.ssa.is_ssa(local)
1741            {
1742                let ty = self.local_decls[local].ty;
1743                let opaque = self.new_opaque(ty);
1744                self.assign(local, opaque);
1745            }
1746        }
1747        // Function calls and ASM may invalidate (nested) derefs. We must handle them carefully.
1748        // Currently, only preserving derefs for trivial terminators like SwitchInt and Goto.
1749        let safe_to_preserve_derefs = matches!(
1750            terminator.kind,
1751            TerminatorKind::SwitchInt { .. } | TerminatorKind::Goto { .. }
1752        );
1753        if !safe_to_preserve_derefs {
1754            self.invalidate_derefs();
1755        }
1756        self.super_terminator(terminator, location);
1757    }
1758}
1759
1760struct StorageRemover<'tcx> {
1761    tcx: TyCtxt<'tcx>,
1762    reused_locals: DenseBitSet<Local>,
1763}
1764
1765impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> {
1766    fn tcx(&self) -> TyCtxt<'tcx> {
1767        self.tcx
1768    }
1769
1770    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
1771        if let Operand::Move(place) = *operand
1772            && !place.is_indirect_first_projection()
1773            && self.reused_locals.contains(place.local)
1774        {
1775            *operand = Operand::Copy(place);
1776        }
1777    }
1778
1779    fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) {
1780        match stmt.kind {
1781            // When removing storage statements, we need to remove both (#107511).
1782            StatementKind::StorageLive(l) | StatementKind::StorageDead(l)
1783                if self.reused_locals.contains(l) =>
1784            {
1785                stmt.make_nop()
1786            }
1787            _ => self.super_statement(stmt, loc),
1788        }
1789    }
1790}