rustc_middle/mir/interpret/allocation/
provenance_map.rs1use std::cmp;
5use std::ops::Range;
6
7use rustc_abi::{HasDataLayout, Size};
8use rustc_data_structures::sorted_map::SortedMap;
9use rustc_macros::HashStable;
10use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
11use tracing::trace;
12
13use super::{AllocRange, CtfeProvenance, Provenance, alloc_range};
14
15#[derive(Clone, PartialEq, Eq, Hash, Debug)]
17#[derive(HashStable)]
18pub struct ProvenanceMap<Prov = CtfeProvenance> {
19 ptrs: SortedMap<Size, Prov>,
22 bytes: Option<Box<SortedMap<Size, (Prov, u8)>>>,
27}
28
29impl<D: Decoder, Prov: Provenance + Decodable<D>> Decodable<D> for ProvenanceMap<Prov> {
32 fn decode(d: &mut D) -> Self {
33 Self { ptrs: Decodable::decode(d), bytes: None }
35 }
36}
37impl<S: Encoder, Prov: Provenance + Encodable<S>> Encodable<S> for ProvenanceMap<Prov> {
38 fn encode(&self, s: &mut S) {
39 let Self { ptrs, bytes } = self;
40 assert!(bytes.is_none()); ptrs.encode(s)
42 }
43}
44
45impl<Prov> ProvenanceMap<Prov> {
46 pub fn new() -> Self {
47 ProvenanceMap { ptrs: SortedMap::new(), bytes: None }
48 }
49
50 pub fn from_presorted_ptrs(r: Vec<(Size, Prov)>) -> Self {
53 ProvenanceMap { ptrs: SortedMap::from_presorted_elements(r), bytes: None }
54 }
55}
56
57impl ProvenanceMap {
58 #[inline]
63 pub fn ptrs(&self) -> &SortedMap<Size, CtfeProvenance> {
64 assert!(self.bytes.is_none(), "`ptrs()` called on non-interned allocation");
65 &self.ptrs
66 }
67}
68
69impl<Prov: Provenance> ProvenanceMap<Prov> {
70 fn adjusted_range_ptrs(range: AllocRange, cx: &impl HasDataLayout) -> Range<Size> {
71 let adjusted_start = Size::from_bytes(
74 range.start.bytes().saturating_sub(cx.data_layout().pointer_size().bytes() - 1),
75 );
76 adjusted_start..range.end()
77 }
78
79 pub(super) fn range_ptrs_get(
83 &self,
84 range: AllocRange,
85 cx: &impl HasDataLayout,
86 ) -> &[(Size, Prov)] {
87 self.ptrs.range(Self::adjusted_range_ptrs(range, cx))
88 }
89
90 fn range_ptrs_is_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool {
92 self.ptrs.range_is_empty(Self::adjusted_range_ptrs(range, cx))
93 }
94
95 fn range_bytes_get(&self, range: AllocRange) -> &[(Size, (Prov, u8))] {
97 if let Some(bytes) = self.bytes.as_ref() {
98 bytes.range(range.start..range.end())
99 } else {
100 &[]
101 }
102 }
103
104 fn range_bytes_is_empty(&self, range: AllocRange) -> bool {
106 self.bytes.as_ref().is_none_or(|bytes| bytes.range_is_empty(range.start..range.end()))
107 }
108
109 pub fn get_byte(&self, offset: Size, cx: &impl HasDataLayout) -> Option<(Prov, u8)> {
111 let prov = self.range_ptrs_get(alloc_range(offset, Size::from_bytes(1)), cx);
112 debug_assert!(prov.len() <= 1);
113 if let Some(entry) = prov.first() {
114 debug_assert!(self.bytes.as_ref().is_none_or(|b| !b.contains_key(&offset)));
116 Some((entry.1, (offset - entry.0).bytes() as u8))
117 } else {
118 self.bytes.as_ref().and_then(|b| b.get(&offset).copied())
120 }
121 }
122
123 pub fn get_range(
125 &self,
126 cx: &impl HasDataLayout,
127 range: AllocRange,
128 ) -> impl Iterator<Item = Prov> {
129 let ptr_provs = self.range_ptrs_get(range, cx).iter().map(|(_, p)| *p);
130 let byte_provs = self.range_bytes_get(range).iter().map(|(_, (p, _))| *p);
131 ptr_provs.chain(byte_provs)
132 }
133
134 pub fn merge_bytes(&mut self, cx: &impl HasDataLayout) -> bool {
137 let Some(bytes) = self.bytes.as_deref_mut() else {
138 return true;
139 };
140 let ptr_size = cx.data_layout().pointer_size();
141 while let Some((offset, (prov, _))) = bytes.iter().next().copied() {
142 let range = offset..offset + ptr_size;
144 let frags = bytes.range(range.clone());
145 if frags.len() != ptr_size.bytes_usize() {
146 return false;
147 }
148 for (idx, (_offset, (frag_prov, frag_idx))) in frags.iter().copied().enumerate() {
149 if frag_prov != prov || frag_idx != idx as u8 {
150 return false;
151 }
152 }
153 bytes.remove_range(range);
155 self.ptrs.insert(offset, prov);
156 }
157 self.bytes = None;
159 true
160 }
161
162 pub fn get_ptr(&self, offset: Size) -> Option<Prov> {
165 self.ptrs.get(&offset).copied()
166 }
167
168 pub fn range_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool {
174 self.range_ptrs_is_empty(range, cx) && self.range_bytes_is_empty(range)
175 }
176
177 pub fn provenances(&self) -> impl Iterator<Item = Prov> {
179 let bytes = self.bytes.iter().flat_map(|b| b.values().map(|(p, _i)| p));
180 self.ptrs.values().chain(bytes).copied()
181 }
182
183 pub fn insert_ptr(&mut self, offset: Size, prov: Prov, cx: &impl HasDataLayout) {
184 debug_assert!(self.range_empty(alloc_range(offset, cx.data_layout().pointer_size()), cx));
185 self.ptrs.insert(offset, prov);
186 }
187
188 pub fn clear(&mut self, range: AllocRange, cx: &impl HasDataLayout) {
191 let start = range.start;
192 let end = range.end();
193 if let Some(bytes) = self.bytes.as_mut() {
195 bytes.remove_range(start..end);
196 }
197
198 let pointer_size = cx.data_layout().pointer_size();
199
200 let (first, last) = {
203 if self.range_ptrs_is_empty(range, cx) {
205 return;
207 }
208
209 let provenance = self.range_ptrs_get(range, cx);
212 (provenance.first().unwrap().0, provenance.last().unwrap().0 + pointer_size)
213 };
214
215 if first < start {
217 let prov = self.ptrs[&first];
219 let bytes = self.bytes.get_or_insert_with(Box::default);
220 for offset in first..start {
221 bytes.insert(offset, (prov, (offset - first).bytes() as u8));
222 }
223 }
224 if last > end {
225 let begin_of_last = last - pointer_size;
226 let prov = self.ptrs[&begin_of_last];
228 let bytes = self.bytes.get_or_insert_with(Box::default);
229 for offset in end..last {
230 bytes.insert(offset, (prov, (offset - begin_of_last).bytes() as u8));
231 }
232 }
233
234 self.ptrs.remove_range(first..last);
238 }
239
240 pub fn write_wildcards(&mut self, cx: &impl HasDataLayout, range: AllocRange) {
246 let wildcard = Prov::WILDCARD.unwrap();
247
248 let bytes = self.bytes.get_or_insert_with(Box::default);
249
250 let ptr_range = Self::adjusted_range_ptrs(range, cx);
252 let ptrs = self.ptrs.range(ptr_range.clone());
253 if let Some((offset, prov)) = ptrs.first().copied() {
254 for byte_ofs in offset..range.start {
255 bytes.insert(byte_ofs, (prov, (byte_ofs - offset).bytes() as u8));
256 }
257 }
258 if let Some((offset, prov)) = ptrs.last().copied() {
259 for byte_ofs in range.end()..offset + cx.data_layout().pointer_size() {
260 bytes.insert(byte_ofs, (prov, (byte_ofs - offset).bytes() as u8));
261 }
262 }
263 self.ptrs.remove_range(ptr_range);
264
265 for offset in range.start..range.end() {
267 bytes.insert(offset, (wildcard, 0));
269 }
270 }
271}
272
273pub struct ProvenanceCopy<Prov> {
277 dest_ptrs: Option<Box<[(Size, Prov)]>>,
278 dest_bytes: Option<Box<[(Size, (Prov, u8))]>>,
279}
280
281impl<Prov: Provenance> ProvenanceMap<Prov> {
282 pub fn prepare_copy(
283 &self,
284 src: AllocRange,
285 dest: Size,
286 count: u64,
287 cx: &impl HasDataLayout,
288 ) -> ProvenanceCopy<Prov> {
289 let shift_offset = move |idx, offset| {
290 let dest_offset = dest + src.size * idx; (offset - src.start) + dest_offset };
295 let ptr_size = cx.data_layout().pointer_size();
296
297 let mut dest_ptrs_box = None;
302 if src.size >= ptr_size {
303 let adjusted_end = Size::from_bytes(src.end().bytes() - (ptr_size.bytes() - 1));
304 let ptrs = self.ptrs.range(src.start..adjusted_end);
305 let mut dest_ptrs = Vec::with_capacity(ptrs.len() * (count as usize));
312 for i in 0..count {
313 dest_ptrs
314 .extend(ptrs.iter().map(|&(offset, reloc)| (shift_offset(i, offset), reloc)));
315 }
316 debug_assert_eq!(dest_ptrs.len(), dest_ptrs.capacity());
317 dest_ptrs_box = Some(dest_ptrs.into_boxed_slice());
318 };
319
320 let mut dest_bytes_box = None;
324 let begin_overlap = self.range_ptrs_get(alloc_range(src.start, Size::ZERO), cx).first();
325 let end_overlap = self.range_ptrs_get(alloc_range(src.end(), Size::ZERO), cx).first();
326 if begin_overlap.is_some() || end_overlap.is_some() || self.bytes.is_some() {
328 let mut bytes: Vec<(Size, (Prov, u8))> = Vec::new();
329 if let Some(entry) = begin_overlap {
331 trace!("start overlapping entry: {entry:?}");
332 let entry_end = cmp::min(entry.0 + ptr_size, src.end());
334 for offset in src.start..entry_end {
335 bytes.push((offset, (entry.1, (offset - entry.0).bytes() as u8)));
336 }
337 } else {
338 trace!("no start overlapping entry");
339 }
340
341 bytes.extend(self.range_bytes_get(src));
343
344 if let Some(entry) = end_overlap {
346 trace!("end overlapping entry: {entry:?}");
347 let entry_start = cmp::max(entry.0, src.start);
349 for offset in entry_start..src.end() {
350 if bytes.last().is_none_or(|bytes_entry| bytes_entry.0 < offset) {
351 bytes.push((offset, (entry.1, (offset - entry.0).bytes() as u8)));
354 } else {
355 assert!(entry.0 <= src.start);
359 }
360 }
361 } else {
362 trace!("no end overlapping entry");
363 }
364 trace!("byte provenances: {bytes:?}");
365
366 let mut dest_bytes = Vec::with_capacity(bytes.len() * (count as usize));
368 for i in 0..count {
369 dest_bytes
370 .extend(bytes.iter().map(|&(offset, reloc)| (shift_offset(i, offset), reloc)));
371 }
372 debug_assert_eq!(dest_bytes.len(), dest_bytes.capacity());
373 dest_bytes_box = Some(dest_bytes.into_boxed_slice());
374 }
375
376 ProvenanceCopy { dest_ptrs: dest_ptrs_box, dest_bytes: dest_bytes_box }
377 }
378
379 pub fn apply_copy(&mut self, copy: ProvenanceCopy<Prov>) {
383 if let Some(dest_ptrs) = copy.dest_ptrs {
384 self.ptrs.insert_presorted(dest_ptrs.into());
385 }
386 if let Some(dest_bytes) = copy.dest_bytes
387 && !dest_bytes.is_empty()
388 {
389 self.bytes.get_or_insert_with(Box::default).insert_presorted(dest_bytes.into());
390 }
391 }
392}