rustc_mir_transform/
gvn.rs

1//! Global value numbering.
2//!
3//! MIR may contain repeated and/or redundant computations. The objective of this pass is to detect
4//! such redundancies and re-use the already-computed result when possible.
5//!
6//! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available
7//! values, the locals in which they are stored, and the assignment location.
8//!
9//! We traverse all assignments `x = rvalue` and operands.
10//!
11//! For each SSA one, we compute a symbolic representation of values that are assigned to SSA
12//! locals. This symbolic representation is defined by the `Value` enum. Each produced instance of
13//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values.
14//!
15//! For each non-SSA
16//! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we
17//! replace the rvalue/operand by that constant. Otherwise, if there is an SSA local `y`
18//! associated to this `VnIndex`, and if its definition location strictly dominates the assignment
19//! to `x`, we replace the assignment by `x = y`.
20//!
21//! By opportunity, this pass simplifies some `Rvalue`s based on the accumulated knowledge.
22//!
23//! # Operational semantic
24//!
25//! Operationally, this pass attempts to prove bitwise equality between locals. Given this MIR:
26//! ```ignore (MIR)
27//! _a = some value // has VnIndex i
28//! // some MIR
29//! _b = some other value // also has VnIndex i
30//! ```
31//!
32//! We consider it to be replaceable by:
33//! ```ignore (MIR)
34//! _a = some value // has VnIndex i
35//! // some MIR
36//! _c = some other value // also has VnIndex i
37//! assume(_a bitwise equal to _c) // follows from having the same VnIndex
38//! _b = _a // follows from the `assume`
39//! ```
40//!
41//! Which is simplifiable to:
42//! ```ignore (MIR)
43//! _a = some value // has VnIndex i
44//! // some MIR
45//! _b = _a
46//! ```
47//!
48//! # Handling of references
49//!
50//! We handle references by assigning a different "provenance" index to each Ref/RawPtr rvalue.
51//! This ensure that we do not spuriously merge borrows that should not be merged. Meanwhile, we
52//! consider all the derefs of an immutable reference to a freeze type to give the same value:
53//! ```ignore (MIR)
54//! _a = *_b // _b is &Freeze
55//! _c = *_b // replaced by _c = _a
56//! ```
57//!
58//! # Determinism of constant propagation
59//!
60//! When registering a new `Value`, we attempt to opportunistically evaluate it as a constant.
61//! The evaluated form is inserted in `evaluated` as an `OpTy` or `None` if evaluation failed.
62//!
63//! The difficulty is non-deterministic evaluation of MIR constants. Some `Const` can have
64//! different runtime values each time they are evaluated. This is the case with
65//! `Const::Slice` which have a new pointer each time they are evaluated, and constants that
66//! contain a fn pointer (`AllocId` pointing to a `GlobalAlloc::Function`) pointing to a different
67//! symbol in each codegen unit.
68//!
69//! Meanwhile, we want to be able to read indirect constants. For instance:
70//! ```
71//! static A: &'static &'static u8 = &&63;
72//! fn foo() -> u8 {
73//!     **A // We want to replace by 63.
74//! }
75//! fn bar() -> u8 {
76//!     b"abc"[1] // We want to replace by 'b'.
77//! }
78//! ```
79//!
80//! The `Value::Constant` variant stores a possibly unevaluated constant. Evaluating that constant
81//! may be non-deterministic. When that happens, we assign a disambiguator to ensure that we do not
82//! merge the constants. See `duplicate_slice` test in `gvn.rs`.
83//!
84//! Second, when writing constants in MIR, we do not write `Const::Slice` or `Const`
85//! that contain `AllocId`s.
86
87use std::borrow::Cow;
88
89use either::Either;
90use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx};
91use rustc_const_eval::const_eval::DummyMachine;
92use rustc_const_eval::interpret::{
93    ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar,
94    intern_const_alloc_for_constprop,
95};
96use rustc_data_structures::fx::{FxIndexSet, MutableValues};
97use rustc_data_structures::graph::dominators::Dominators;
98use rustc_hir::def::DefKind;
99use rustc_index::bit_set::DenseBitSet;
100use rustc_index::{IndexVec, newtype_index};
101use rustc_middle::bug;
102use rustc_middle::mir::interpret::GlobalAlloc;
103use rustc_middle::mir::visit::*;
104use rustc_middle::mir::*;
105use rustc_middle::ty::layout::HasTypingEnv;
106use rustc_middle::ty::{self, Ty, TyCtxt};
107use rustc_span::DUMMY_SP;
108use smallvec::SmallVec;
109use tracing::{debug, instrument, trace};
110
111use crate::ssa::SsaLocals;
112
113pub(super) struct GVN;
114
115impl<'tcx> crate::MirPass<'tcx> for GVN {
116    fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
117        sess.mir_opt_level() >= 2
118    }
119
120    #[instrument(level = "trace", skip(self, tcx, body))]
121    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
122        debug!(def_id = ?body.source.def_id());
123
124        let typing_env = body.typing_env(tcx);
125        let ssa = SsaLocals::new(tcx, body, typing_env);
126        // Clone dominators because we need them while mutating the body.
127        let dominators = body.basic_blocks.dominators().clone();
128
129        let mut state = VnState::new(tcx, body, typing_env, &ssa, dominators, &body.local_decls);
130
131        for local in body.args_iter().filter(|&local| ssa.is_ssa(local)) {
132            let opaque = state.new_opaque(body.local_decls[local].ty);
133            state.assign(local, opaque);
134        }
135
136        let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec();
137        for bb in reverse_postorder {
138            let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
139            state.visit_basic_block_data(bb, data);
140        }
141
142        // For each local that is reused (`y` above), we remove its storage statements do avoid any
143        // difficulty. Those locals are SSA, so should be easy to optimize by LLVM without storage
144        // statements.
145        StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body);
146    }
147
148    fn is_required(&self) -> bool {
149        false
150    }
151}
152
153newtype_index! {
154    struct VnIndex {}
155}
156
157#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
158enum AddressKind {
159    Ref(BorrowKind),
160    Address(RawPtrKind),
161}
162
163#[derive(Debug, PartialEq, Eq, Hash)]
164enum Value<'tcx> {
165    // Root values.
166    /// Used to represent values we know nothing about.
167    /// The `usize` is a counter incremented by `new_opaque`.
168    Opaque(usize),
169    /// Evaluated or unevaluated constant value.
170    Constant {
171        value: Const<'tcx>,
172        /// Some constants do not have a deterministic value. To avoid merging two instances of the
173        /// same `Const`, we assign them an additional integer index.
174        // `disambiguator` is 0 iff the constant is deterministic.
175        disambiguator: usize,
176    },
177    /// An aggregate value, either tuple/closure/struct/enum.
178    /// This does not contain unions, as we cannot reason with the value.
179    Aggregate(VariantIdx, Vec<VnIndex>),
180    /// A raw pointer aggregate built from a thin pointer and metadata.
181    RawPtr {
182        /// Thin pointer component. This is field 0 in MIR.
183        pointer: VnIndex,
184        /// Metadata component. This is field 1 in MIR.
185        metadata: VnIndex,
186    },
187    /// This corresponds to a `[value; count]` expression.
188    Repeat(VnIndex, ty::Const<'tcx>),
189    /// The address of a place.
190    Address {
191        place: Place<'tcx>,
192        kind: AddressKind,
193        /// Give each borrow and pointer a different provenance, so we don't merge them.
194        provenance: usize,
195    },
196
197    // Extractions.
198    /// This is the *value* obtained by projecting another value.
199    Projection(VnIndex, ProjectionElem<VnIndex, ()>),
200    /// Discriminant of the given value.
201    Discriminant(VnIndex),
202    /// Length of an array or slice.
203    Len(VnIndex),
204
205    // Operations.
206    NullaryOp(NullOp<'tcx>, Ty<'tcx>),
207    UnaryOp(UnOp, VnIndex),
208    BinaryOp(BinOp, VnIndex, VnIndex),
209    Cast {
210        kind: CastKind,
211        value: VnIndex,
212    },
213}
214
215struct VnState<'body, 'tcx> {
216    tcx: TyCtxt<'tcx>,
217    ecx: InterpCx<'tcx, DummyMachine>,
218    local_decls: &'body LocalDecls<'tcx>,
219    is_coroutine: bool,
220    /// Value stored in each local.
221    locals: IndexVec<Local, Option<VnIndex>>,
222    /// Locals that are assigned that value.
223    // This vector does not hold all the values of `VnIndex` that we create.
224    rev_locals: IndexVec<VnIndex, SmallVec<[Local; 1]>>,
225    values: FxIndexSet<(Value<'tcx>, Ty<'tcx>)>,
226    /// Values evaluated as constants if possible.
227    evaluated: IndexVec<VnIndex, Option<OpTy<'tcx>>>,
228    /// Counter to generate different values.
229    next_opaque: usize,
230    /// Cache the deref values.
231    derefs: Vec<VnIndex>,
232    ssa: &'body SsaLocals,
233    dominators: Dominators<BasicBlock>,
234    reused_locals: DenseBitSet<Local>,
235}
236
237impl<'body, 'tcx> VnState<'body, 'tcx> {
238    fn new(
239        tcx: TyCtxt<'tcx>,
240        body: &Body<'tcx>,
241        typing_env: ty::TypingEnv<'tcx>,
242        ssa: &'body SsaLocals,
243        dominators: Dominators<BasicBlock>,
244        local_decls: &'body LocalDecls<'tcx>,
245    ) -> Self {
246        // Compute a rough estimate of the number of values in the body from the number of
247        // statements. This is meant to reduce the number of allocations, but it's all right if
248        // we miss the exact amount. We estimate based on 2 values per statement (one in LHS and
249        // one in RHS) and 4 values per terminator (for call operands).
250        let num_values =
251            2 * body.basic_blocks.iter().map(|bbdata| bbdata.statements.len()).sum::<usize>()
252                + 4 * body.basic_blocks.len();
253        VnState {
254            tcx,
255            ecx: InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine),
256            local_decls,
257            is_coroutine: body.coroutine.is_some(),
258            locals: IndexVec::from_elem(None, local_decls),
259            rev_locals: IndexVec::with_capacity(num_values),
260            values: FxIndexSet::with_capacity_and_hasher(num_values, Default::default()),
261            evaluated: IndexVec::with_capacity(num_values),
262            next_opaque: 1,
263            derefs: Vec::new(),
264            ssa,
265            dominators,
266            reused_locals: DenseBitSet::new_empty(local_decls.len()),
267        }
268    }
269
270    fn typing_env(&self) -> ty::TypingEnv<'tcx> {
271        self.ecx.typing_env()
272    }
273
274    #[instrument(level = "trace", skip(self), ret)]
275    fn insert(&mut self, ty: Ty<'tcx>, value: Value<'tcx>) -> VnIndex {
276        let (index, new) = self.values.insert_full((value, ty));
277        let index = VnIndex::from_usize(index);
278        if new {
279            // Grow `evaluated` and `rev_locals` here to amortize the allocations.
280            let evaluated = self.eval_to_const(index);
281            let _index = self.evaluated.push(evaluated);
282            debug_assert_eq!(index, _index);
283            let _index = self.rev_locals.push(SmallVec::new());
284            debug_assert_eq!(index, _index);
285        }
286        index
287    }
288
289    fn next_opaque(&mut self) -> usize {
290        let next_opaque = self.next_opaque;
291        self.next_opaque += 1;
292        next_opaque
293    }
294
295    /// Create a new `Value` for which we have no information at all, except that it is distinct
296    /// from all the others.
297    #[instrument(level = "trace", skip(self), ret)]
298    fn new_opaque(&mut self, ty: Ty<'tcx>) -> VnIndex {
299        let value = Value::Opaque(self.next_opaque());
300        self.insert(ty, value)
301    }
302
303    /// Create a new `Value::Address` distinct from all the others.
304    #[instrument(level = "trace", skip(self), ret)]
305    fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> VnIndex {
306        let pty = place.ty(self.local_decls, self.tcx).ty;
307        let ty = match kind {
308            AddressKind::Ref(bk) => {
309                Ty::new_ref(self.tcx, self.tcx.lifetimes.re_erased, pty, bk.to_mutbl_lossy())
310            }
311            AddressKind::Address(mutbl) => Ty::new_ptr(self.tcx, pty, mutbl.to_mutbl_lossy()),
312        };
313        let value = Value::Address { place, kind, provenance: self.next_opaque() };
314        self.insert(ty, value)
315    }
316
317    #[inline]
318    fn get(&self, index: VnIndex) -> &Value<'tcx> {
319        &self.values.get_index(index.as_usize()).unwrap().0
320    }
321
322    #[inline]
323    fn ty(&self, index: VnIndex) -> Ty<'tcx> {
324        self.values.get_index(index.as_usize()).unwrap().1
325    }
326
327    /// Record that `local` is assigned `value`. `local` must be SSA.
328    #[instrument(level = "trace", skip(self))]
329    fn assign(&mut self, local: Local, value: VnIndex) {
330        debug_assert!(self.ssa.is_ssa(local));
331        self.locals[local] = Some(value);
332        self.rev_locals[value].push(local);
333    }
334
335    fn insert_constant(&mut self, value: Const<'tcx>) -> VnIndex {
336        let disambiguator = if value.is_deterministic() {
337            // The constant is deterministic, no need to disambiguate.
338            0
339        } else {
340            // Multiple mentions of this constant will yield different values,
341            // so assign a different `disambiguator` to ensure they do not get the same `VnIndex`.
342            let disambiguator = self.next_opaque();
343            // `disambiguator: 0` means deterministic.
344            debug_assert_ne!(disambiguator, 0);
345            disambiguator
346        };
347        self.insert(value.ty(), Value::Constant { value, disambiguator })
348    }
349
350    fn insert_bool(&mut self, flag: bool) -> VnIndex {
351        // Booleans are deterministic.
352        let value = Const::from_bool(self.tcx, flag);
353        debug_assert!(value.is_deterministic());
354        self.insert(self.tcx.types.bool, Value::Constant { value, disambiguator: 0 })
355    }
356
357    fn insert_scalar(&mut self, ty: Ty<'tcx>, scalar: Scalar) -> VnIndex {
358        // Scalars are deterministic.
359        let value = Const::from_scalar(self.tcx, scalar, ty);
360        debug_assert!(value.is_deterministic());
361        self.insert(ty, Value::Constant { value, disambiguator: 0 })
362    }
363
364    fn insert_tuple(&mut self, ty: Ty<'tcx>, values: Vec<VnIndex>) -> VnIndex {
365        self.insert(ty, Value::Aggregate(VariantIdx::ZERO, values))
366    }
367
368    fn insert_deref(&mut self, ty: Ty<'tcx>, value: VnIndex) -> VnIndex {
369        let value = self.insert(ty, Value::Projection(value, ProjectionElem::Deref));
370        self.derefs.push(value);
371        value
372    }
373
374    fn invalidate_derefs(&mut self) {
375        for deref in std::mem::take(&mut self.derefs) {
376            let opaque = self.next_opaque();
377            self.values.get_index_mut2(deref.index()).unwrap().0 = Value::Opaque(opaque);
378        }
379    }
380
381    #[instrument(level = "trace", skip(self), ret)]
382    fn eval_to_const(&mut self, value: VnIndex) -> Option<OpTy<'tcx>> {
383        use Value::*;
384        let ty = self.ty(value);
385        // Avoid computing layouts inside a coroutine, as that can cause cycles.
386        let ty = if !self.is_coroutine || ty.is_scalar() {
387            self.ecx.layout_of(ty).ok()?
388        } else {
389            return None;
390        };
391        let op = match *self.get(value) {
392            _ if ty.is_zst() => ImmTy::uninit(ty).into(),
393
394            Opaque(_) => return None,
395            // Do not bother evaluating repeat expressions. This would uselessly consume memory.
396            Repeat(..) => return None,
397
398            Constant { ref value, disambiguator: _ } => {
399                self.ecx.eval_mir_constant(value, DUMMY_SP, None).discard_err()?
400            }
401            Aggregate(variant, ref fields) => {
402                let fields = fields
403                    .iter()
404                    .map(|&f| self.evaluated[f].as_ref())
405                    .collect::<Option<Vec<_>>>()?;
406                let variant = if ty.ty.is_enum() { Some(variant) } else { None };
407                if matches!(ty.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..))
408                {
409                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
410                    let variant_dest = if let Some(variant) = variant {
411                        self.ecx.project_downcast(&dest, variant).discard_err()?
412                    } else {
413                        dest.clone()
414                    };
415                    for (field_index, op) in fields.into_iter().enumerate() {
416                        let field_dest = self
417                            .ecx
418                            .project_field(&variant_dest, FieldIdx::from_usize(field_index))
419                            .discard_err()?;
420                        self.ecx.copy_op(op, &field_dest).discard_err()?;
421                    }
422                    self.ecx
423                        .write_discriminant(variant.unwrap_or(FIRST_VARIANT), &dest)
424                        .discard_err()?;
425                    self.ecx
426                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
427                        .discard_err()?;
428                    dest.into()
429                } else {
430                    return None;
431                }
432            }
433            RawPtr { pointer, metadata } => {
434                let pointer = self.evaluated[pointer].as_ref()?;
435                let metadata = self.evaluated[metadata].as_ref()?;
436
437                // Pointers don't have fields, so don't `project_field` them.
438                let data = self.ecx.read_pointer(pointer).discard_err()?;
439                let meta = if metadata.layout.is_zst() {
440                    MemPlaceMeta::None
441                } else {
442                    MemPlaceMeta::Meta(self.ecx.read_scalar(metadata).discard_err()?)
443                };
444                let ptr_imm = Immediate::new_pointer_with_meta(data, meta, &self.ecx);
445                ImmTy::from_immediate(ptr_imm, ty).into()
446            }
447
448            Projection(base, elem) => {
449                let base = self.evaluated[base].as_ref()?;
450                let elem = match elem {
451                    ProjectionElem::Deref => ProjectionElem::Deref,
452                    ProjectionElem::Downcast(name, read_variant) => {
453                        ProjectionElem::Downcast(name, read_variant)
454                    }
455                    ProjectionElem::Field(f, ()) => ProjectionElem::Field(f, ty.ty),
456                    ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
457                        ProjectionElem::ConstantIndex { offset, min_length, from_end }
458                    }
459                    ProjectionElem::Subslice { from, to, from_end } => {
460                        ProjectionElem::Subslice { from, to, from_end }
461                    }
462                    ProjectionElem::OpaqueCast(()) => ProjectionElem::OpaqueCast(ty.ty),
463                    ProjectionElem::Subtype(()) => ProjectionElem::Subtype(ty.ty),
464                    ProjectionElem::UnwrapUnsafeBinder(()) => {
465                        ProjectionElem::UnwrapUnsafeBinder(ty.ty)
466                    }
467                    // This should have been replaced by a `ConstantIndex` earlier.
468                    ProjectionElem::Index(_) => return None,
469                };
470                self.ecx.project(base, elem).discard_err()?
471            }
472            Address { place, kind: _, provenance: _ } => {
473                if !place.is_indirect_first_projection() {
474                    return None;
475                }
476                let local = self.locals[place.local]?;
477                let pointer = self.evaluated[local].as_ref()?;
478                let mut mplace = self.ecx.deref_pointer(pointer).discard_err()?;
479                for proj in place.projection.iter().skip(1) {
480                    // We have no call stack to associate a local with a value, so we cannot
481                    // interpret indexing.
482                    if matches!(proj, ProjectionElem::Index(_)) {
483                        return None;
484                    }
485                    mplace = self.ecx.project(&mplace, proj).discard_err()?;
486                }
487                let pointer = mplace.to_ref(&self.ecx);
488                ImmTy::from_immediate(pointer, ty).into()
489            }
490
491            Discriminant(base) => {
492                let base = self.evaluated[base].as_ref()?;
493                let variant = self.ecx.read_discriminant(base).discard_err()?;
494                let discr_value =
495                    self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?;
496                discr_value.into()
497            }
498            Len(slice) => {
499                let slice = self.evaluated[slice].as_ref()?;
500                let len = slice.len(&self.ecx).discard_err()?;
501                ImmTy::from_uint(len, ty).into()
502            }
503            NullaryOp(null_op, arg_ty) => {
504                let arg_layout = self.ecx.layout_of(arg_ty).ok()?;
505                if let NullOp::SizeOf | NullOp::AlignOf = null_op
506                    && arg_layout.is_unsized()
507                {
508                    return None;
509                }
510                let val = match null_op {
511                    NullOp::SizeOf => arg_layout.size.bytes(),
512                    NullOp::AlignOf => arg_layout.align.abi.bytes(),
513                    NullOp::OffsetOf(fields) => self
514                        .ecx
515                        .tcx
516                        .offset_of_subfield(self.typing_env(), arg_layout, fields.iter())
517                        .bytes(),
518                    NullOp::UbChecks => return None,
519                    NullOp::ContractChecks => return None,
520                };
521                ImmTy::from_uint(val, ty).into()
522            }
523            UnaryOp(un_op, operand) => {
524                let operand = self.evaluated[operand].as_ref()?;
525                let operand = self.ecx.read_immediate(operand).discard_err()?;
526                let val = self.ecx.unary_op(un_op, &operand).discard_err()?;
527                val.into()
528            }
529            BinaryOp(bin_op, lhs, rhs) => {
530                let lhs = self.evaluated[lhs].as_ref()?;
531                let lhs = self.ecx.read_immediate(lhs).discard_err()?;
532                let rhs = self.evaluated[rhs].as_ref()?;
533                let rhs = self.ecx.read_immediate(rhs).discard_err()?;
534                let val = self.ecx.binary_op(bin_op, &lhs, &rhs).discard_err()?;
535                val.into()
536            }
537            Cast { kind, value } => match kind {
538                CastKind::IntToInt | CastKind::IntToFloat => {
539                    let value = self.evaluated[value].as_ref()?;
540                    let value = self.ecx.read_immediate(value).discard_err()?;
541                    let res = self.ecx.int_to_int_or_float(&value, ty).discard_err()?;
542                    res.into()
543                }
544                CastKind::FloatToFloat | CastKind::FloatToInt => {
545                    let value = self.evaluated[value].as_ref()?;
546                    let value = self.ecx.read_immediate(value).discard_err()?;
547                    let res = self.ecx.float_to_float_or_int(&value, ty).discard_err()?;
548                    res.into()
549                }
550                CastKind::Transmute => {
551                    let value = self.evaluated[value].as_ref()?;
552                    // `offset` for immediates generally only supports projections that match the
553                    // type of the immediate. However, as a HACK, we exploit that it can also do
554                    // limited transmutes: it only works between types with the same layout, and
555                    // cannot transmute pointers to integers.
556                    if value.as_mplace_or_imm().is_right() {
557                        let can_transmute = match (value.layout.backend_repr, ty.backend_repr) {
558                            (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => {
559                                s1.size(&self.ecx) == s2.size(&self.ecx)
560                                    && !matches!(s1.primitive(), Primitive::Pointer(..))
561                            }
562                            (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => {
563                                a1.size(&self.ecx) == a2.size(&self.ecx) &&
564                                b1.size(&self.ecx) == b2.size(&self.ecx) &&
565                                // The alignment of the second component determines its offset, so that also needs to match.
566                                b1.align(&self.ecx) == b2.align(&self.ecx) &&
567                                // None of the inputs may be a pointer.
568                                !matches!(a1.primitive(), Primitive::Pointer(..))
569                                    && !matches!(b1.primitive(), Primitive::Pointer(..))
570                            }
571                            _ => false,
572                        };
573                        if !can_transmute {
574                            return None;
575                        }
576                    }
577                    value.offset(Size::ZERO, ty, &self.ecx).discard_err()?
578                }
579                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) => {
580                    let src = self.evaluated[value].as_ref()?;
581                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
582                    self.ecx.unsize_into(src, ty, &dest).discard_err()?;
583                    self.ecx
584                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
585                        .discard_err()?;
586                    dest.into()
587                }
588                CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
589                    let src = self.evaluated[value].as_ref()?;
590                    let src = self.ecx.read_immediate(src).discard_err()?;
591                    let ret = self.ecx.ptr_to_ptr(&src, ty).discard_err()?;
592                    ret.into()
593                }
594                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::UnsafeFnPointer, _) => {
595                    let src = self.evaluated[value].as_ref()?;
596                    let src = self.ecx.read_immediate(src).discard_err()?;
597                    ImmTy::from_immediate(*src, ty).into()
598                }
599                _ => return None,
600            },
601        };
602        Some(op)
603    }
604
605    fn project(
606        &mut self,
607        place_ty: PlaceTy<'tcx>,
608        value: VnIndex,
609        proj: PlaceElem<'tcx>,
610        from_non_ssa_index: &mut bool,
611    ) -> Option<(PlaceTy<'tcx>, VnIndex)> {
612        let projection_ty = place_ty.projection_ty(self.tcx, proj);
613        let proj = match proj {
614            ProjectionElem::Deref => {
615                if let Some(Mutability::Not) = place_ty.ty.ref_mutability()
616                    && projection_ty.ty.is_freeze(self.tcx, self.typing_env())
617                {
618                    // An immutable borrow `_x` always points to the same value for the
619                    // lifetime of the borrow, so we can merge all instances of `*_x`.
620                    return Some((projection_ty, self.insert_deref(projection_ty.ty, value)));
621                } else {
622                    return None;
623                }
624            }
625            ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index),
626            ProjectionElem::Field(f, _) => {
627                if let Value::Aggregate(_, fields) = self.get(value) {
628                    return Some((projection_ty, fields[f.as_usize()]));
629                } else if let Value::Projection(outer_value, ProjectionElem::Downcast(_, read_variant)) = self.get(value)
630                    && let Value::Aggregate(written_variant, fields) = self.get(*outer_value)
631                    // This pass is not aware of control-flow, so we do not know whether the
632                    // replacement we are doing is actually reachable. We could be in any arm of
633                    // ```
634                    // match Some(x) {
635                    //     Some(y) => /* stuff */,
636                    //     None => /* other */,
637                    // }
638                    // ```
639                    //
640                    // In surface rust, the current statement would be unreachable.
641                    //
642                    // However, from the reference chapter on enums and RFC 2195,
643                    // accessing the wrong variant is not UB if the enum has repr.
644                    // So it's not impossible for a series of MIR opts to generate
645                    // a downcast to an inactive variant.
646                    && written_variant == read_variant
647                {
648                    return Some((projection_ty, fields[f.as_usize()]));
649                }
650                ProjectionElem::Field(f, ())
651            }
652            ProjectionElem::Index(idx) => {
653                if let Value::Repeat(inner, _) = self.get(value) {
654                    *from_non_ssa_index |= self.locals[idx].is_none();
655                    return Some((projection_ty, *inner));
656                }
657                let idx = self.locals[idx]?;
658                ProjectionElem::Index(idx)
659            }
660            ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
661                match self.get(value) {
662                    Value::Repeat(inner, _) => {
663                        return Some((projection_ty, *inner));
664                    }
665                    Value::Aggregate(_, operands) => {
666                        let offset = if from_end {
667                            operands.len() - offset as usize
668                        } else {
669                            offset as usize
670                        };
671                        let value = operands.get(offset).copied()?;
672                        return Some((projection_ty, value));
673                    }
674                    _ => {}
675                };
676                ProjectionElem::ConstantIndex { offset, min_length, from_end }
677            }
678            ProjectionElem::Subslice { from, to, from_end } => {
679                ProjectionElem::Subslice { from, to, from_end }
680            }
681            ProjectionElem::OpaqueCast(_) => ProjectionElem::OpaqueCast(()),
682            ProjectionElem::Subtype(_) => ProjectionElem::Subtype(()),
683            ProjectionElem::UnwrapUnsafeBinder(_) => ProjectionElem::UnwrapUnsafeBinder(()),
684        };
685
686        let value = self.insert(projection_ty.ty, Value::Projection(value, proj));
687        Some((projection_ty, value))
688    }
689
690    /// Simplify the projection chain if we know better.
691    #[instrument(level = "trace", skip(self))]
692    fn simplify_place_projection(&mut self, place: &mut Place<'tcx>, location: Location) {
693        // If the projection is indirect, we treat the local as a value, so can replace it with
694        // another local.
695        if place.is_indirect_first_projection()
696            && let Some(base) = self.locals[place.local]
697            && let Some(new_local) = self.try_as_local(base, location)
698            && place.local != new_local
699        {
700            place.local = new_local;
701            self.reused_locals.insert(new_local);
702        }
703
704        let mut projection = Cow::Borrowed(&place.projection[..]);
705
706        for i in 0..projection.len() {
707            let elem = projection[i];
708            if let ProjectionElem::Index(idx_local) = elem
709                && let Some(idx) = self.locals[idx_local]
710            {
711                if let Some(offset) = self.evaluated[idx].as_ref()
712                    && let Some(offset) = self.ecx.read_target_usize(offset).discard_err()
713                    && let Some(min_length) = offset.checked_add(1)
714                {
715                    projection.to_mut()[i] =
716                        ProjectionElem::ConstantIndex { offset, min_length, from_end: false };
717                } else if let Some(new_idx_local) = self.try_as_local(idx, location)
718                    && idx_local != new_idx_local
719                {
720                    projection.to_mut()[i] = ProjectionElem::Index(new_idx_local);
721                    self.reused_locals.insert(new_idx_local);
722                }
723            }
724        }
725
726        if projection.is_owned() {
727            place.projection = self.tcx.mk_place_elems(&projection);
728        }
729
730        trace!(?place);
731    }
732
733    /// Represent the *value* which would be read from `place`, and point `place` to a preexisting
734    /// place with the same value (if that already exists).
735    #[instrument(level = "trace", skip(self), ret)]
736    fn simplify_place_value(
737        &mut self,
738        place: &mut Place<'tcx>,
739        location: Location,
740    ) -> Option<VnIndex> {
741        self.simplify_place_projection(place, location);
742
743        // Invariant: `place` and `place_ref` point to the same value, even if they point to
744        // different memory locations.
745        let mut place_ref = place.as_ref();
746
747        // Invariant: `value` holds the value up-to the `index`th projection excluded.
748        let mut value = self.locals[place.local]?;
749        // Invariant: `value` has type `place_ty`, with optional downcast variant if needed.
750        let mut place_ty = PlaceTy::from_ty(self.local_decls[place.local].ty);
751        let mut from_non_ssa_index = false;
752        for (index, proj) in place.projection.iter().enumerate() {
753            if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
754                && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
755                && let AddressKind::Ref(BorrowKind::Shared) = kind
756                && let Some(v) = self.simplify_place_value(&mut pointee, location)
757            {
758                value = v;
759                // `pointee` holds a `Place`, so `ProjectionElem::Index` holds a `Local`.
760                // That local is SSA, but we otherwise have no guarantee on that local's value at
761                // the current location compared to its value where `pointee` was borrowed.
762                if pointee.projection.iter().all(|elem| !matches!(elem, ProjectionElem::Index(_))) {
763                    place_ref =
764                        pointee.project_deeper(&place.projection[index..], self.tcx).as_ref();
765                }
766            }
767            if let Some(local) = self.try_as_local(value, location) {
768                // Both `local` and `Place { local: place.local, projection: projection[..index] }`
769                // hold the same value. Therefore, following place holds the value in the original
770                // `place`.
771                place_ref = PlaceRef { local, projection: &place.projection[index..] };
772            }
773
774            (place_ty, value) = self.project(place_ty, value, proj, &mut from_non_ssa_index)?;
775        }
776
777        if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
778            && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
779            && let AddressKind::Ref(BorrowKind::Shared) = kind
780            && let Some(v) = self.simplify_place_value(&mut pointee, location)
781        {
782            value = v;
783            // `pointee` holds a `Place`, so `ProjectionElem::Index` holds a `Local`.
784            // That local is SSA, but we otherwise have no guarantee on that local's value at
785            // the current location compared to its value where `pointee` was borrowed.
786            if pointee.projection.iter().all(|elem| !matches!(elem, ProjectionElem::Index(_))) {
787                place_ref = pointee.project_deeper(&[], self.tcx).as_ref();
788            }
789        }
790        if let Some(new_local) = self.try_as_local(value, location) {
791            place_ref = PlaceRef { local: new_local, projection: &[] };
792        } else if from_non_ssa_index {
793            // If access to non-SSA locals is unavoidable, bail out.
794            return None;
795        }
796
797        if place_ref.local != place.local || place_ref.projection.len() < place.projection.len() {
798            // By the invariant on `place_ref`.
799            *place = place_ref.project_deeper(&[], self.tcx);
800            self.reused_locals.insert(place_ref.local);
801        }
802
803        Some(value)
804    }
805
806    #[instrument(level = "trace", skip(self), ret)]
807    fn simplify_operand(
808        &mut self,
809        operand: &mut Operand<'tcx>,
810        location: Location,
811    ) -> Option<VnIndex> {
812        match *operand {
813            Operand::Constant(ref constant) => Some(self.insert_constant(constant.const_)),
814            Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
815                let value = self.simplify_place_value(place, location)?;
816                if let Some(const_) = self.try_as_constant(value) {
817                    *operand = Operand::Constant(Box::new(const_));
818                }
819                Some(value)
820            }
821        }
822    }
823
824    #[instrument(level = "trace", skip(self), ret)]
825    fn simplify_rvalue(
826        &mut self,
827        lhs: &Place<'tcx>,
828        rvalue: &mut Rvalue<'tcx>,
829        location: Location,
830    ) -> Option<VnIndex> {
831        let value = match *rvalue {
832            // Forward values.
833            Rvalue::Use(ref mut operand) => return self.simplify_operand(operand, location),
834            Rvalue::CopyForDeref(place) => {
835                let mut operand = Operand::Copy(place);
836                let val = self.simplify_operand(&mut operand, location);
837                *rvalue = Rvalue::Use(operand);
838                return val;
839            }
840
841            // Roots.
842            Rvalue::Repeat(ref mut op, amount) => {
843                let op = self.simplify_operand(op, location)?;
844                Value::Repeat(op, amount)
845            }
846            Rvalue::NullaryOp(op, ty) => Value::NullaryOp(op, ty),
847            Rvalue::Aggregate(..) => return self.simplify_aggregate(lhs, rvalue, location),
848            Rvalue::Ref(_, borrow_kind, ref mut place) => {
849                self.simplify_place_projection(place, location);
850                return Some(self.new_pointer(*place, AddressKind::Ref(borrow_kind)));
851            }
852            Rvalue::RawPtr(mutbl, ref mut place) => {
853                self.simplify_place_projection(place, location);
854                return Some(self.new_pointer(*place, AddressKind::Address(mutbl)));
855            }
856            Rvalue::WrapUnsafeBinder(ref mut op, _) => {
857                let value = self.simplify_operand(op, location)?;
858                Value::Cast { kind: CastKind::Transmute, value }
859            }
860
861            // Operations.
862            Rvalue::Len(ref mut place) => return self.simplify_len(place, location),
863            Rvalue::Cast(ref mut kind, ref mut value, to) => {
864                return self.simplify_cast(kind, value, to, location);
865            }
866            Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
867                return self.simplify_binary(op, lhs, rhs, location);
868            }
869            Rvalue::UnaryOp(op, ref mut arg_op) => {
870                return self.simplify_unary(op, arg_op, location);
871            }
872            Rvalue::Discriminant(ref mut place) => {
873                let place = self.simplify_place_value(place, location)?;
874                if let Some(discr) = self.simplify_discriminant(place) {
875                    return Some(discr);
876                }
877                Value::Discriminant(place)
878            }
879
880            // Unsupported values.
881            Rvalue::ThreadLocalRef(..) | Rvalue::ShallowInitBox(..) => return None,
882        };
883        let ty = rvalue.ty(self.local_decls, self.tcx);
884        Some(self.insert(ty, value))
885    }
886
887    fn simplify_discriminant(&mut self, place: VnIndex) -> Option<VnIndex> {
888        let enum_ty = self.ty(place);
889        if enum_ty.is_enum()
890            && let Value::Aggregate(variant, _) = *self.get(place)
891        {
892            let discr = self.ecx.discriminant_for_variant(enum_ty, variant).discard_err()?;
893            return Some(self.insert_scalar(discr.layout.ty, discr.to_scalar()));
894        }
895
896        None
897    }
898
899    fn try_as_place_elem(
900        &mut self,
901        ty: Ty<'tcx>,
902        proj: ProjectionElem<VnIndex, ()>,
903        loc: Location,
904    ) -> Option<PlaceElem<'tcx>> {
905        Some(match proj {
906            ProjectionElem::Deref => ProjectionElem::Deref,
907            ProjectionElem::Field(idx, ()) => ProjectionElem::Field(idx, ty),
908            ProjectionElem::Index(idx) => {
909                let Some(local) = self.try_as_local(idx, loc) else {
910                    return None;
911                };
912                self.reused_locals.insert(local);
913                ProjectionElem::Index(local)
914            }
915            ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
916                ProjectionElem::ConstantIndex { offset, min_length, from_end }
917            }
918            ProjectionElem::Subslice { from, to, from_end } => {
919                ProjectionElem::Subslice { from, to, from_end }
920            }
921            ProjectionElem::Downcast(symbol, idx) => ProjectionElem::Downcast(symbol, idx),
922            ProjectionElem::OpaqueCast(()) => ProjectionElem::OpaqueCast(ty),
923            ProjectionElem::Subtype(()) => ProjectionElem::Subtype(ty),
924            ProjectionElem::UnwrapUnsafeBinder(()) => ProjectionElem::UnwrapUnsafeBinder(ty),
925        })
926    }
927
928    fn simplify_aggregate_to_copy(
929        &mut self,
930        lhs: &Place<'tcx>,
931        rvalue: &mut Rvalue<'tcx>,
932        location: Location,
933        fields: &[VnIndex],
934        variant_index: VariantIdx,
935    ) -> Option<VnIndex> {
936        let Some(&first_field) = fields.first() else {
937            return None;
938        };
939        let Value::Projection(copy_from_value, _) = *self.get(first_field) else {
940            return None;
941        };
942        // All fields must correspond one-to-one and come from the same aggregate value.
943        if fields.iter().enumerate().any(|(index, &v)| {
944            if let Value::Projection(pointer, ProjectionElem::Field(from_index, _)) = *self.get(v)
945                && copy_from_value == pointer
946                && from_index.index() == index
947            {
948                return false;
949            }
950            true
951        }) {
952            return None;
953        }
954
955        let mut copy_from_local_value = copy_from_value;
956        if let Value::Projection(pointer, proj) = *self.get(copy_from_value)
957            && let ProjectionElem::Downcast(_, read_variant) = proj
958        {
959            if variant_index == read_variant {
960                // When copying a variant, there is no need to downcast.
961                copy_from_local_value = pointer;
962            } else {
963                // The copied variant must be identical.
964                return None;
965            }
966        }
967
968        // Allow introducing places with non-constant offsets, as those are still better than
969        // reconstructing an aggregate.
970        if self.ty(copy_from_local_value) == rvalue.ty(self.local_decls, self.tcx)
971            && let Some(place) = self.try_as_place(copy_from_local_value, location, true)
972        {
973            // Avoid creating `*a = copy (*b)`, as they might be aliases resulting in overlapping assignments.
974            // FIXME: This also avoids any kind of projection, not just derefs. We can add allowed projections.
975            if lhs.as_local().is_some() {
976                self.reused_locals.insert(place.local);
977                *rvalue = Rvalue::Use(Operand::Copy(place));
978            }
979            return Some(copy_from_local_value);
980        }
981
982        None
983    }
984
985    fn simplify_aggregate(
986        &mut self,
987        lhs: &Place<'tcx>,
988        rvalue: &mut Rvalue<'tcx>,
989        location: Location,
990    ) -> Option<VnIndex> {
991        let tcx = self.tcx;
992        let ty = rvalue.ty(self.local_decls, tcx);
993
994        let Rvalue::Aggregate(box ref kind, ref mut field_ops) = *rvalue else { bug!() };
995
996        if field_ops.is_empty() {
997            let is_zst = match *kind {
998                AggregateKind::Array(..)
999                | AggregateKind::Tuple
1000                | AggregateKind::Closure(..)
1001                | AggregateKind::CoroutineClosure(..) => true,
1002                // Only enums can be non-ZST.
1003                AggregateKind::Adt(did, ..) => tcx.def_kind(did) != DefKind::Enum,
1004                // Coroutines are never ZST, as they at least contain the implicit states.
1005                AggregateKind::Coroutine(..) => false,
1006                AggregateKind::RawPtr(..) => bug!("MIR for RawPtr aggregate must have 2 fields"),
1007            };
1008
1009            if is_zst {
1010                return Some(self.insert_constant(Const::zero_sized(ty)));
1011            }
1012        }
1013
1014        let fields: Vec<_> = field_ops
1015            .iter_mut()
1016            .map(|op| {
1017                self.simplify_operand(op, location)
1018                    .unwrap_or_else(|| self.new_opaque(op.ty(self.local_decls, self.tcx)))
1019            })
1020            .collect();
1021
1022        let variant_index = match *kind {
1023            AggregateKind::Array(..) | AggregateKind::Tuple => {
1024                assert!(!field_ops.is_empty());
1025                FIRST_VARIANT
1026            }
1027            AggregateKind::Closure(..)
1028            | AggregateKind::CoroutineClosure(..)
1029            | AggregateKind::Coroutine(..) => FIRST_VARIANT,
1030            AggregateKind::Adt(_, variant_index, _, _, None) => variant_index,
1031            // Do not track unions.
1032            AggregateKind::Adt(_, _, _, _, Some(_)) => return None,
1033            AggregateKind::RawPtr(..) => {
1034                assert_eq!(field_ops.len(), 2);
1035                let [mut pointer, metadata] = fields.try_into().unwrap();
1036
1037                // Any thin pointer of matching mutability is fine as the data pointer.
1038                let mut was_updated = false;
1039                while let Value::Cast { kind: CastKind::PtrToPtr, value: cast_value } =
1040                    self.get(pointer)
1041                    && let ty::RawPtr(from_pointee_ty, from_mtbl) = self.ty(*cast_value).kind()
1042                    && let ty::RawPtr(_, output_mtbl) = ty.kind()
1043                    && from_mtbl == output_mtbl
1044                    && from_pointee_ty.is_sized(self.tcx, self.typing_env())
1045                {
1046                    pointer = *cast_value;
1047                    was_updated = true;
1048                }
1049
1050                if was_updated && let Some(op) = self.try_as_operand(pointer, location) {
1051                    field_ops[FieldIdx::ZERO] = op;
1052                }
1053
1054                return Some(self.insert(ty, Value::RawPtr { pointer, metadata }));
1055            }
1056        };
1057
1058        if ty.is_array() && fields.len() > 4 {
1059            let first = fields[0];
1060            if fields.iter().all(|&v| v == first) {
1061                let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap());
1062                if let Some(op) = self.try_as_operand(first, location) {
1063                    *rvalue = Rvalue::Repeat(op, len);
1064                }
1065                return Some(self.insert(ty, Value::Repeat(first, len)));
1066            }
1067        }
1068
1069        if let Some(value) =
1070            self.simplify_aggregate_to_copy(lhs, rvalue, location, &fields, variant_index)
1071        {
1072            return Some(value);
1073        }
1074
1075        Some(self.insert(ty, Value::Aggregate(variant_index, fields)))
1076    }
1077
1078    #[instrument(level = "trace", skip(self), ret)]
1079    fn simplify_unary(
1080        &mut self,
1081        op: UnOp,
1082        arg_op: &mut Operand<'tcx>,
1083        location: Location,
1084    ) -> Option<VnIndex> {
1085        let mut arg_index = self.simplify_operand(arg_op, location)?;
1086        let arg_ty = self.ty(arg_index);
1087        let ret_ty = op.ty(self.tcx, arg_ty);
1088
1089        // PtrMetadata doesn't care about *const vs *mut vs & vs &mut,
1090        // so start by removing those distinctions so we can update the `Operand`
1091        if op == UnOp::PtrMetadata {
1092            let mut was_updated = false;
1093            loop {
1094                match self.get(arg_index) {
1095                    // Pointer casts that preserve metadata, such as
1096                    // `*const [i32]` <-> `*mut [i32]` <-> `*mut [f32]`.
1097                    // It's critical that this not eliminate cases like
1098                    // `*const [T]` -> `*const T` which remove metadata.
1099                    // We run on potentially-generic MIR, though, so unlike codegen
1100                    // we can't always know exactly what the metadata are.
1101                    // To allow things like `*mut (?A, ?T)` <-> `*mut (?B, ?T)`,
1102                    // it's fine to get a projection as the type.
1103                    Value::Cast { kind: CastKind::PtrToPtr, value: inner }
1104                        if self.pointers_have_same_metadata(self.ty(*inner), arg_ty) =>
1105                    {
1106                        arg_index = *inner;
1107                        was_updated = true;
1108                        continue;
1109                    }
1110
1111                    // `&mut *p`, `&raw *p`, etc don't change metadata.
1112                    Value::Address { place, kind: _, provenance: _ }
1113                        if let PlaceRef { local, projection: [PlaceElem::Deref] } =
1114                            place.as_ref()
1115                            && let Some(local_index) = self.locals[local] =>
1116                    {
1117                        arg_index = local_index;
1118                        was_updated = true;
1119                        continue;
1120                    }
1121
1122                    _ => {
1123                        if was_updated && let Some(op) = self.try_as_operand(arg_index, location) {
1124                            *arg_op = op;
1125                        }
1126                        break;
1127                    }
1128                }
1129            }
1130        }
1131
1132        let value = match (op, self.get(arg_index)) {
1133            (UnOp::Not, Value::UnaryOp(UnOp::Not, inner)) => return Some(*inner),
1134            (UnOp::Neg, Value::UnaryOp(UnOp::Neg, inner)) => return Some(*inner),
1135            (UnOp::Not, Value::BinaryOp(BinOp::Eq, lhs, rhs)) => {
1136                Value::BinaryOp(BinOp::Ne, *lhs, *rhs)
1137            }
1138            (UnOp::Not, Value::BinaryOp(BinOp::Ne, lhs, rhs)) => {
1139                Value::BinaryOp(BinOp::Eq, *lhs, *rhs)
1140            }
1141            (UnOp::PtrMetadata, Value::RawPtr { metadata, .. }) => return Some(*metadata),
1142            // We have an unsizing cast, which assigns the length to wide pointer metadata.
1143            (
1144                UnOp::PtrMetadata,
1145                Value::Cast {
1146                    kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1147                    value: inner,
1148                },
1149            ) if let ty::Slice(..) = arg_ty.builtin_deref(true).unwrap().kind()
1150                && let ty::Array(_, len) = self.ty(*inner).builtin_deref(true).unwrap().kind() =>
1151            {
1152                return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1153            }
1154            _ => Value::UnaryOp(op, arg_index),
1155        };
1156        Some(self.insert(ret_ty, value))
1157    }
1158
1159    #[instrument(level = "trace", skip(self), ret)]
1160    fn simplify_binary(
1161        &mut self,
1162        op: BinOp,
1163        lhs_operand: &mut Operand<'tcx>,
1164        rhs_operand: &mut Operand<'tcx>,
1165        location: Location,
1166    ) -> Option<VnIndex> {
1167        let lhs = self.simplify_operand(lhs_operand, location);
1168        let rhs = self.simplify_operand(rhs_operand, location);
1169
1170        // Only short-circuit options after we called `simplify_operand`
1171        // on both operands for side effect.
1172        let mut lhs = lhs?;
1173        let mut rhs = rhs?;
1174
1175        let lhs_ty = self.ty(lhs);
1176
1177        // If we're comparing pointers, remove `PtrToPtr` casts if the from
1178        // types of both casts and the metadata all match.
1179        if let BinOp::Eq | BinOp::Ne | BinOp::Lt | BinOp::Le | BinOp::Gt | BinOp::Ge = op
1180            && lhs_ty.is_any_ptr()
1181            && let Value::Cast { kind: CastKind::PtrToPtr, value: lhs_value } = self.get(lhs)
1182            && let Value::Cast { kind: CastKind::PtrToPtr, value: rhs_value } = self.get(rhs)
1183            && let lhs_from = self.ty(*lhs_value)
1184            && lhs_from == self.ty(*rhs_value)
1185            && self.pointers_have_same_metadata(lhs_from, lhs_ty)
1186        {
1187            lhs = *lhs_value;
1188            rhs = *rhs_value;
1189            if let Some(lhs_op) = self.try_as_operand(lhs, location)
1190                && let Some(rhs_op) = self.try_as_operand(rhs, location)
1191            {
1192                *lhs_operand = lhs_op;
1193                *rhs_operand = rhs_op;
1194            }
1195        }
1196
1197        if let Some(value) = self.simplify_binary_inner(op, lhs_ty, lhs, rhs) {
1198            return Some(value);
1199        }
1200        let ty = op.ty(self.tcx, lhs_ty, self.ty(rhs));
1201        let value = Value::BinaryOp(op, lhs, rhs);
1202        Some(self.insert(ty, value))
1203    }
1204
1205    fn simplify_binary_inner(
1206        &mut self,
1207        op: BinOp,
1208        lhs_ty: Ty<'tcx>,
1209        lhs: VnIndex,
1210        rhs: VnIndex,
1211    ) -> Option<VnIndex> {
1212        // Floats are weird enough that none of the logic below applies.
1213        let reasonable_ty =
1214            lhs_ty.is_integral() || lhs_ty.is_bool() || lhs_ty.is_char() || lhs_ty.is_any_ptr();
1215        if !reasonable_ty {
1216            return None;
1217        }
1218
1219        let layout = self.ecx.layout_of(lhs_ty).ok()?;
1220
1221        let as_bits = |value: VnIndex| {
1222            let constant = self.evaluated[value].as_ref()?;
1223            if layout.backend_repr.is_scalar() {
1224                let scalar = self.ecx.read_scalar(constant).discard_err()?;
1225                scalar.to_bits(constant.layout.size).discard_err()
1226            } else {
1227                // `constant` is a wide pointer. Do not evaluate to bits.
1228                None
1229            }
1230        };
1231
1232        // Represent the values as `Left(bits)` or `Right(VnIndex)`.
1233        use Either::{Left, Right};
1234        let a = as_bits(lhs).map_or(Right(lhs), Left);
1235        let b = as_bits(rhs).map_or(Right(rhs), Left);
1236
1237        let result = match (op, a, b) {
1238            // Neutral elements.
1239            (
1240                BinOp::Add
1241                | BinOp::AddWithOverflow
1242                | BinOp::AddUnchecked
1243                | BinOp::BitOr
1244                | BinOp::BitXor,
1245                Left(0),
1246                Right(p),
1247            )
1248            | (
1249                BinOp::Add
1250                | BinOp::AddWithOverflow
1251                | BinOp::AddUnchecked
1252                | BinOp::BitOr
1253                | BinOp::BitXor
1254                | BinOp::Sub
1255                | BinOp::SubWithOverflow
1256                | BinOp::SubUnchecked
1257                | BinOp::Offset
1258                | BinOp::Shl
1259                | BinOp::Shr,
1260                Right(p),
1261                Left(0),
1262            )
1263            | (BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked, Left(1), Right(p))
1264            | (
1265                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::Div,
1266                Right(p),
1267                Left(1),
1268            ) => p,
1269            // Attempt to simplify `x & ALL_ONES` to `x`, with `ALL_ONES` depending on type size.
1270            (BinOp::BitAnd, Right(p), Left(ones)) | (BinOp::BitAnd, Left(ones), Right(p))
1271                if ones == layout.size.truncate(u128::MAX)
1272                    || (layout.ty.is_bool() && ones == 1) =>
1273            {
1274                p
1275            }
1276            // Absorbing elements.
1277            (
1278                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::BitAnd,
1279                _,
1280                Left(0),
1281            )
1282            | (BinOp::Rem, _, Left(1))
1283            | (
1284                BinOp::Mul
1285                | BinOp::MulWithOverflow
1286                | BinOp::MulUnchecked
1287                | BinOp::Div
1288                | BinOp::Rem
1289                | BinOp::BitAnd
1290                | BinOp::Shl
1291                | BinOp::Shr,
1292                Left(0),
1293                _,
1294            ) => self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size)),
1295            // Attempt to simplify `x | ALL_ONES` to `ALL_ONES`.
1296            (BinOp::BitOr, _, Left(ones)) | (BinOp::BitOr, Left(ones), _)
1297                if ones == layout.size.truncate(u128::MAX)
1298                    || (layout.ty.is_bool() && ones == 1) =>
1299            {
1300                self.insert_scalar(lhs_ty, Scalar::from_uint(ones, layout.size))
1301            }
1302            // Sub/Xor with itself.
1303            (BinOp::Sub | BinOp::SubWithOverflow | BinOp::SubUnchecked | BinOp::BitXor, a, b)
1304                if a == b =>
1305            {
1306                self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size))
1307            }
1308            // Comparison:
1309            // - if both operands can be computed as bits, just compare the bits;
1310            // - if we proved that both operands have the same value, we can insert true/false;
1311            // - otherwise, do nothing, as we do not try to prove inequality.
1312            (BinOp::Eq, Left(a), Left(b)) => self.insert_bool(a == b),
1313            (BinOp::Eq, a, b) if a == b => self.insert_bool(true),
1314            (BinOp::Ne, Left(a), Left(b)) => self.insert_bool(a != b),
1315            (BinOp::Ne, a, b) if a == b => self.insert_bool(false),
1316            _ => return None,
1317        };
1318
1319        if op.is_overflowing() {
1320            let ty = Ty::new_tup(self.tcx, &[self.ty(result), self.tcx.types.bool]);
1321            let false_val = self.insert_bool(false);
1322            Some(self.insert_tuple(ty, vec![result, false_val]))
1323        } else {
1324            Some(result)
1325        }
1326    }
1327
1328    fn simplify_cast(
1329        &mut self,
1330        initial_kind: &mut CastKind,
1331        initial_operand: &mut Operand<'tcx>,
1332        to: Ty<'tcx>,
1333        location: Location,
1334    ) -> Option<VnIndex> {
1335        use CastKind::*;
1336        use rustc_middle::ty::adjustment::PointerCoercion::*;
1337
1338        let mut kind = *initial_kind;
1339        let mut value = self.simplify_operand(initial_operand, location)?;
1340        let mut from = self.ty(value);
1341        if from == to {
1342            return Some(value);
1343        }
1344
1345        if let CastKind::PointerCoercion(ReifyFnPointer | ClosureFnPointer(_), _) = kind {
1346            // Each reification of a generic fn may get a different pointer.
1347            // Do not try to merge them.
1348            return Some(self.new_opaque(to));
1349        }
1350
1351        let mut was_ever_updated = false;
1352        loop {
1353            let mut was_updated_this_iteration = false;
1354
1355            // Transmuting between raw pointers is just a pointer cast so long as
1356            // they have the same metadata type (like `*const i32` <=> `*mut u64`
1357            // or `*mut [i32]` <=> `*const [u64]`), including the common special
1358            // case of `*const T` <=> `*mut T`.
1359            if let Transmute = kind
1360                && from.is_raw_ptr()
1361                && to.is_raw_ptr()
1362                && self.pointers_have_same_metadata(from, to)
1363            {
1364                kind = PtrToPtr;
1365                was_updated_this_iteration = true;
1366            }
1367
1368            // If a cast just casts away the metadata again, then we can get it by
1369            // casting the original thin pointer passed to `from_raw_parts`
1370            if let PtrToPtr = kind
1371                && let Value::RawPtr { pointer, .. } = self.get(value)
1372                && let ty::RawPtr(to_pointee, _) = to.kind()
1373                && to_pointee.is_sized(self.tcx, self.typing_env())
1374            {
1375                from = self.ty(*pointer);
1376                value = *pointer;
1377                was_updated_this_iteration = true;
1378                if from == to {
1379                    return Some(*pointer);
1380                }
1381            }
1382
1383            // Aggregate-then-Transmute can just transmute the original field value,
1384            // so long as the bytes of a value from only from a single field.
1385            if let Transmute = kind
1386                && let Value::Aggregate(variant_idx, field_values) = self.get(value)
1387                && let Some((field_idx, field_ty)) =
1388                    self.value_is_all_in_one_field(from, *variant_idx)
1389            {
1390                from = field_ty;
1391                value = field_values[field_idx.as_usize()];
1392                was_updated_this_iteration = true;
1393                if field_ty == to {
1394                    return Some(value);
1395                }
1396            }
1397
1398            // Various cast-then-cast cases can be simplified.
1399            if let Value::Cast { kind: inner_kind, value: inner_value } = *self.get(value) {
1400                let inner_from = self.ty(inner_value);
1401                let new_kind = match (inner_kind, kind) {
1402                    // Even if there's a narrowing cast in here that's fine, because
1403                    // things like `*mut [i32] -> *mut i32 -> *const i32` and
1404                    // `*mut [i32] -> *const [i32] -> *const i32` can skip the middle in MIR.
1405                    (PtrToPtr, PtrToPtr) => Some(PtrToPtr),
1406                    // PtrToPtr-then-Transmute is fine so long as the pointer cast is identity:
1407                    // `*const T -> *mut T -> NonNull<T>` is fine, but we need to check for narrowing
1408                    // to skip things like `*const [i32] -> *const i32 -> NonNull<T>`.
1409                    (PtrToPtr, Transmute) if self.pointers_have_same_metadata(inner_from, from) => {
1410                        Some(Transmute)
1411                    }
1412                    // Similarly, for Transmute-then-PtrToPtr. Note that we need to check different
1413                    // variables for their metadata, and thus this can't merge with the previous arm.
1414                    (Transmute, PtrToPtr) if self.pointers_have_same_metadata(from, to) => {
1415                        Some(Transmute)
1416                    }
1417                    // If would be legal to always do this, but we don't want to hide information
1418                    // from the backend that it'd otherwise be able to use for optimizations.
1419                    (Transmute, Transmute)
1420                        if !self.type_may_have_niche_of_interest_to_backend(from) =>
1421                    {
1422                        Some(Transmute)
1423                    }
1424                    _ => None,
1425                };
1426                if let Some(new_kind) = new_kind {
1427                    kind = new_kind;
1428                    from = inner_from;
1429                    value = inner_value;
1430                    was_updated_this_iteration = true;
1431                    if inner_from == to {
1432                        return Some(inner_value);
1433                    }
1434                }
1435            }
1436
1437            if was_updated_this_iteration {
1438                was_ever_updated = true;
1439            } else {
1440                break;
1441            }
1442        }
1443
1444        if was_ever_updated && let Some(op) = self.try_as_operand(value, location) {
1445            *initial_operand = op;
1446            *initial_kind = kind;
1447        }
1448
1449        Some(self.insert(to, Value::Cast { kind, value }))
1450    }
1451
1452    fn simplify_len(&mut self, place: &mut Place<'tcx>, location: Location) -> Option<VnIndex> {
1453        // Trivial case: we are fetching a statically known length.
1454        let place_ty = place.ty(self.local_decls, self.tcx).ty;
1455        if let ty::Array(_, len) = place_ty.kind() {
1456            return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1457        }
1458
1459        let mut inner = self.simplify_place_value(place, location)?;
1460
1461        // The length information is stored in the wide pointer.
1462        // Reborrowing copies length information from one pointer to the other.
1463        while let Value::Address { place: borrowed, .. } = self.get(inner)
1464            && let [PlaceElem::Deref] = borrowed.projection[..]
1465            && let Some(borrowed) = self.locals[borrowed.local]
1466        {
1467            inner = borrowed;
1468        }
1469
1470        // We have an unsizing cast, which assigns the length to wide pointer metadata.
1471        if let Value::Cast { kind, value: from } = self.get(inner)
1472            && let CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) = kind
1473            && let Some(from) = self.ty(*from).builtin_deref(true)
1474            && let ty::Array(_, len) = from.kind()
1475            && let Some(to) = self.ty(inner).builtin_deref(true)
1476            && let ty::Slice(..) = to.kind()
1477        {
1478            return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1479        }
1480
1481        // Fallback: a symbolic `Len`.
1482        Some(self.insert(self.tcx.types.usize, Value::Len(inner)))
1483    }
1484
1485    fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool {
1486        let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1487        let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1488        if left_meta_ty == right_meta_ty {
1489            true
1490        } else if let Ok(left) =
1491            self.tcx.try_normalize_erasing_regions(self.typing_env(), left_meta_ty)
1492            && let Ok(right) =
1493                self.tcx.try_normalize_erasing_regions(self.typing_env(), right_meta_ty)
1494        {
1495            left == right
1496        } else {
1497            false
1498        }
1499    }
1500
1501    /// Returns `false` if we know for sure that this type has no interesting niche,
1502    /// and thus we can skip transmuting through it without worrying.
1503    ///
1504    /// The backend will emit `assume`s when transmuting between types with niches,
1505    /// so we want to preserve `i32 -> char -> u32` so that that data is around,
1506    /// but it's fine to skip whole-range-is-value steps like `A -> u32 -> B`.
1507    fn type_may_have_niche_of_interest_to_backend(&self, ty: Ty<'tcx>) -> bool {
1508        let Ok(layout) = self.ecx.layout_of(ty) else {
1509            // If it's too generic or something, then assume it might be interesting later.
1510            return true;
1511        };
1512
1513        if layout.uninhabited {
1514            return true;
1515        }
1516
1517        match layout.backend_repr {
1518            BackendRepr::Scalar(a) => !a.is_always_valid(&self.ecx),
1519            BackendRepr::ScalarPair(a, b) => {
1520                !a.is_always_valid(&self.ecx) || !b.is_always_valid(&self.ecx)
1521            }
1522            BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => false,
1523        }
1524    }
1525
1526    fn value_is_all_in_one_field(
1527        &self,
1528        ty: Ty<'tcx>,
1529        variant: VariantIdx,
1530    ) -> Option<(FieldIdx, Ty<'tcx>)> {
1531        if let Ok(layout) = self.ecx.layout_of(ty)
1532            && let abi::Variants::Single { index } = layout.variants
1533            && index == variant
1534            && let Some((field_idx, field_layout)) = layout.non_1zst_field(&self.ecx)
1535            && layout.size == field_layout.size
1536        {
1537            // We needed to check the variant to avoid trying to read the tag
1538            // field from an enum where no fields have variants, since that tag
1539            // field isn't in the `Aggregate` from which we're getting values.
1540            Some((field_idx, field_layout.ty))
1541        } else if let ty::Adt(adt, args) = ty.kind()
1542            && adt.is_struct()
1543            && adt.repr().transparent()
1544            && let [single_field] = adt.non_enum_variant().fields.raw.as_slice()
1545        {
1546            Some((FieldIdx::ZERO, single_field.ty(self.tcx, args)))
1547        } else {
1548            None
1549        }
1550    }
1551}
1552
1553fn op_to_prop_const<'tcx>(
1554    ecx: &mut InterpCx<'tcx, DummyMachine>,
1555    op: &OpTy<'tcx>,
1556) -> Option<ConstValue> {
1557    // Do not attempt to propagate unsized locals.
1558    if op.layout.is_unsized() {
1559        return None;
1560    }
1561
1562    // This constant is a ZST, just return an empty value.
1563    if op.layout.is_zst() {
1564        return Some(ConstValue::ZeroSized);
1565    }
1566
1567    // Do not synthetize too large constants. Codegen will just memcpy them, which we'd like to
1568    // avoid.
1569    if !matches!(op.layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
1570        return None;
1571    }
1572
1573    // If this constant has scalar ABI, return it as a `ConstValue::Scalar`.
1574    if let BackendRepr::Scalar(abi::Scalar::Initialized { .. }) = op.layout.backend_repr
1575        && let Some(scalar) = ecx.read_scalar(op).discard_err()
1576    {
1577        if !scalar.try_to_scalar_int().is_ok() {
1578            // Check that we do not leak a pointer.
1579            // Those pointers may lose part of their identity in codegen.
1580            // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1581            return None;
1582        }
1583        return Some(ConstValue::Scalar(scalar));
1584    }
1585
1586    // If this constant is already represented as an `Allocation`,
1587    // try putting it into global memory to return it.
1588    if let Either::Left(mplace) = op.as_mplace_or_imm() {
1589        let (size, _align) = ecx.size_and_align_of_val(&mplace).discard_err()??;
1590
1591        // Do not try interning a value that contains provenance.
1592        // Due to https://github.com/rust-lang/rust/issues/79738, doing so could lead to bugs.
1593        // FIXME: remove this hack once that issue is fixed.
1594        let alloc_ref = ecx.get_ptr_alloc(mplace.ptr(), size).discard_err()??;
1595        if alloc_ref.has_provenance() {
1596            return None;
1597        }
1598
1599        let pointer = mplace.ptr().into_pointer_or_addr().ok()?;
1600        let (prov, offset) = pointer.prov_and_relative_offset();
1601        let alloc_id = prov.alloc_id();
1602        intern_const_alloc_for_constprop(ecx, alloc_id).discard_err()?;
1603
1604        // `alloc_id` may point to a static. Codegen will choke on an `Indirect` with anything
1605        // by `GlobalAlloc::Memory`, so do fall through to copying if needed.
1606        // FIXME: find a way to treat this more uniformly (probably by fixing codegen)
1607        if let GlobalAlloc::Memory(alloc) = ecx.tcx.global_alloc(alloc_id)
1608            // Transmuting a constant is just an offset in the allocation. If the alignment of the
1609            // allocation is not enough, fallback to copying into a properly aligned value.
1610            && alloc.inner().align >= op.layout.align.abi
1611        {
1612            return Some(ConstValue::Indirect { alloc_id, offset });
1613        }
1614    }
1615
1616    // Everything failed: create a new allocation to hold the data.
1617    let alloc_id =
1618        ecx.intern_with_temp_alloc(op.layout, |ecx, dest| ecx.copy_op(op, dest)).discard_err()?;
1619    let value = ConstValue::Indirect { alloc_id, offset: Size::ZERO };
1620
1621    // Check that we do not leak a pointer.
1622    // Those pointers may lose part of their identity in codegen.
1623    // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1624    if ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner().provenance().ptrs().is_empty() {
1625        return Some(value);
1626    }
1627
1628    None
1629}
1630
1631impl<'tcx> VnState<'_, 'tcx> {
1632    /// If either [`Self::try_as_constant`] as [`Self::try_as_place`] succeeds,
1633    /// returns that result as an [`Operand`].
1634    fn try_as_operand(&mut self, index: VnIndex, location: Location) -> Option<Operand<'tcx>> {
1635        if let Some(const_) = self.try_as_constant(index) {
1636            Some(Operand::Constant(Box::new(const_)))
1637        } else if let Some(place) = self.try_as_place(index, location, false) {
1638            self.reused_locals.insert(place.local);
1639            Some(Operand::Copy(place))
1640        } else {
1641            None
1642        }
1643    }
1644
1645    /// If `index` is a `Value::Constant`, return the `Constant` to be put in the MIR.
1646    fn try_as_constant(&mut self, index: VnIndex) -> Option<ConstOperand<'tcx>> {
1647        // This was already constant in MIR, do not change it. If the constant is not
1648        // deterministic, adding an additional mention of it in MIR will not give the same value as
1649        // the former mention.
1650        if let Value::Constant { value, disambiguator: 0 } = *self.get(index) {
1651            debug_assert!(value.is_deterministic());
1652            return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1653        }
1654
1655        let op = self.evaluated[index].as_ref()?;
1656        if op.layout.is_unsized() {
1657            // Do not attempt to propagate unsized locals.
1658            return None;
1659        }
1660
1661        let value = op_to_prop_const(&mut self.ecx, op)?;
1662
1663        // Check that we do not leak a pointer.
1664        // Those pointers may lose part of their identity in codegen.
1665        // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1666        assert!(!value.may_have_provenance(self.tcx, op.layout.size));
1667
1668        let const_ = Const::Val(value, op.layout.ty);
1669        Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_ })
1670    }
1671
1672    /// Construct a place which holds the same value as `index` and for which all locals strictly
1673    /// dominate `loc`. If you used this place, add its base local to `reused_locals` to remove
1674    /// storage statements.
1675    #[instrument(level = "trace", skip(self), ret)]
1676    fn try_as_place(
1677        &mut self,
1678        mut index: VnIndex,
1679        loc: Location,
1680        allow_complex_projection: bool,
1681    ) -> Option<Place<'tcx>> {
1682        let mut projection = SmallVec::<[PlaceElem<'tcx>; 1]>::new();
1683        loop {
1684            if let Some(local) = self.try_as_local(index, loc) {
1685                projection.reverse();
1686                let place =
1687                    Place { local, projection: self.tcx.mk_place_elems(projection.as_slice()) };
1688                return Some(place);
1689            } else if let Value::Projection(pointer, proj) = *self.get(index)
1690                && (allow_complex_projection || proj.is_stable_offset())
1691                && let Some(proj) = self.try_as_place_elem(self.ty(index), proj, loc)
1692            {
1693                projection.push(proj);
1694                index = pointer;
1695            } else {
1696                return None;
1697            }
1698        }
1699    }
1700
1701    /// If there is a local which is assigned `index`, and its assignment strictly dominates `loc`,
1702    /// return it. If you used this local, add it to `reused_locals` to remove storage statements.
1703    fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option<Local> {
1704        let other = self.rev_locals.get(index)?;
1705        other
1706            .iter()
1707            .find(|&&other| self.ssa.assignment_dominates(&self.dominators, other, loc))
1708            .copied()
1709    }
1710}
1711
1712impl<'tcx> MutVisitor<'tcx> for VnState<'_, 'tcx> {
1713    fn tcx(&self) -> TyCtxt<'tcx> {
1714        self.tcx
1715    }
1716
1717    fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
1718        self.simplify_place_projection(place, location);
1719        if context.is_mutating_use() && place.is_indirect() {
1720            // Non-local mutation maybe invalidate deref.
1721            self.invalidate_derefs();
1722        }
1723        self.super_place(place, context, location);
1724    }
1725
1726    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1727        self.simplify_operand(operand, location);
1728        self.super_operand(operand, location);
1729    }
1730
1731    fn visit_assign(
1732        &mut self,
1733        lhs: &mut Place<'tcx>,
1734        rvalue: &mut Rvalue<'tcx>,
1735        location: Location,
1736    ) {
1737        self.simplify_place_projection(lhs, location);
1738
1739        let value = self.simplify_rvalue(lhs, rvalue, location);
1740        if let Some(value) = value {
1741            if let Some(const_) = self.try_as_constant(value) {
1742                *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_)));
1743            } else if let Some(place) = self.try_as_place(value, location, false)
1744                && *rvalue != Rvalue::Use(Operand::Move(place))
1745                && *rvalue != Rvalue::Use(Operand::Copy(place))
1746            {
1747                *rvalue = Rvalue::Use(Operand::Copy(place));
1748                self.reused_locals.insert(place.local);
1749            }
1750        }
1751
1752        if lhs.is_indirect() {
1753            // Non-local mutation maybe invalidate deref.
1754            self.invalidate_derefs();
1755        }
1756
1757        if let Some(local) = lhs.as_local()
1758            && self.ssa.is_ssa(local)
1759            && let rvalue_ty = rvalue.ty(self.local_decls, self.tcx)
1760            // FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark
1761            // `local` as reusable if we have an exact type match.
1762            && self.local_decls[local].ty == rvalue_ty
1763        {
1764            let value = value.unwrap_or_else(|| self.new_opaque(rvalue_ty));
1765            self.assign(local, value);
1766        }
1767    }
1768
1769    fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
1770        if let Terminator { kind: TerminatorKind::Call { destination, .. }, .. } = terminator {
1771            if let Some(local) = destination.as_local()
1772                && self.ssa.is_ssa(local)
1773            {
1774                let ty = self.local_decls[local].ty;
1775                let opaque = self.new_opaque(ty);
1776                self.assign(local, opaque);
1777            }
1778        }
1779        // Function calls and ASM may invalidate (nested) derefs. We must handle them carefully.
1780        // Currently, only preserving derefs for trivial terminators like SwitchInt and Goto.
1781        let safe_to_preserve_derefs = matches!(
1782            terminator.kind,
1783            TerminatorKind::SwitchInt { .. } | TerminatorKind::Goto { .. }
1784        );
1785        if !safe_to_preserve_derefs {
1786            self.invalidate_derefs();
1787        }
1788        self.super_terminator(terminator, location);
1789    }
1790}
1791
1792struct StorageRemover<'tcx> {
1793    tcx: TyCtxt<'tcx>,
1794    reused_locals: DenseBitSet<Local>,
1795}
1796
1797impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> {
1798    fn tcx(&self) -> TyCtxt<'tcx> {
1799        self.tcx
1800    }
1801
1802    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
1803        if let Operand::Move(place) = *operand
1804            && !place.is_indirect_first_projection()
1805            && self.reused_locals.contains(place.local)
1806        {
1807            *operand = Operand::Copy(place);
1808        }
1809    }
1810
1811    fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) {
1812        match stmt.kind {
1813            // When removing storage statements, we need to remove both (#107511).
1814            StatementKind::StorageLive(l) | StatementKind::StorageDead(l)
1815                if self.reused_locals.contains(l) =>
1816            {
1817                stmt.make_nop()
1818            }
1819            _ => self.super_statement(stmt, loc),
1820        }
1821    }
1822}