core/array/iter/
iter_inner.rs

1//! Defines the `IntoIter` owned iterator for arrays.
2
3use crate::mem::MaybeUninit;
4use crate::num::NonZero;
5use crate::ops::{IndexRange, NeverShortCircuit, Try};
6#[cfg(not(feature = "ferrocene_subset"))]
7use crate::{fmt, iter};
8
9// Ferrocene addition: imports for certified subset
10#[cfg(feature = "ferrocene_subset")]
11#[rustfmt::skip]
12use crate::iter;
13
14#[allow(private_bounds)]
15trait PartialDrop {
16    /// # Safety
17    /// `self[alive]` are all initialized before the call,
18    /// then are never used (without reinitializing them) after it.
19    unsafe fn partial_drop(&mut self, alive: IndexRange);
20}
21impl<T> PartialDrop for [MaybeUninit<T>] {
22    unsafe fn partial_drop(&mut self, alive: IndexRange) {
23        // SAFETY: We know that all elements within `alive` are properly initialized.
24        unsafe { self.get_unchecked_mut(alive).assume_init_drop() }
25    }
26}
27impl<T, const N: usize> PartialDrop for [MaybeUninit<T>; N] {
28    unsafe fn partial_drop(&mut self, alive: IndexRange) {
29        let slice: &mut [MaybeUninit<T>] = self;
30        // SAFETY: Initialized elements in the array are also initialized in the slice.
31        unsafe { slice.partial_drop(alive) }
32    }
33}
34
35/// The internals of a by-value array iterator.
36///
37/// The real `array::IntoIter<T, N>` stores a `PolymorphicIter<[MaybeUninit<T>, N]>`
38/// which it unsizes to `PolymorphicIter<[MaybeUninit<T>]>` to iterate.
39#[allow(private_bounds)]
40pub(super) struct PolymorphicIter<DATA: ?Sized>
41where
42    DATA: PartialDrop,
43{
44    /// The elements in `data` that have not been yielded yet.
45    ///
46    /// Invariants:
47    /// - `alive.end <= N`
48    ///
49    /// (And the `IndexRange` type requires `alive.start <= alive.end`.)
50    alive: IndexRange,
51
52    /// This is the array we are iterating over.
53    ///
54    /// Elements with index `i` where `alive.start <= i < alive.end` have not
55    /// been yielded yet and are valid array entries. Elements with indices `i
56    /// < alive.start` or `i >= alive.end` have been yielded already and must
57    /// not be accessed anymore! Those dead elements might even be in a
58    /// completely uninitialized state!
59    ///
60    /// So the invariants are:
61    /// - `data[alive]` is alive (i.e. contains valid elements)
62    /// - `data[..alive.start]` and `data[alive.end..]` are dead (i.e. the
63    ///   elements were already read and must not be touched anymore!)
64    data: DATA,
65}
66
67#[allow(private_bounds)]
68impl<DATA: ?Sized> PolymorphicIter<DATA>
69where
70    DATA: PartialDrop,
71{
72    #[inline]
73    pub(super) const fn len(&self) -> usize {
74        self.alive.len()
75    }
76}
77
78#[allow(private_bounds)]
79impl<DATA: ?Sized> Drop for PolymorphicIter<DATA>
80where
81    DATA: PartialDrop,
82{
83    #[inline]
84    fn drop(&mut self) {
85        // SAFETY: by our type invariant `self.alive` is exactly the initialized
86        // items, and this is drop so nothing can use the items afterwards.
87        unsafe { self.data.partial_drop(self.alive.clone()) }
88    }
89}
90
91impl<T, const N: usize> PolymorphicIter<[MaybeUninit<T>; N]> {
92    #[inline]
93    pub(super) const fn empty() -> Self {
94        Self { alive: IndexRange::zero_to(0), data: [const { MaybeUninit::uninit() }; N] }
95    }
96
97    /// # Safety
98    /// `data[alive]` are all initialized.
99    #[inline]
100    pub(super) const unsafe fn new_unchecked(alive: IndexRange, data: [MaybeUninit<T>; N]) -> Self {
101        Self { alive, data }
102    }
103}
104
105impl<T: Clone, const N: usize> Clone for PolymorphicIter<[MaybeUninit<T>; N]> {
106    #[inline]
107    fn clone(&self) -> Self {
108        // Note, we don't really need to match the exact same alive range, so
109        // we can just clone into offset 0 regardless of where `self` is.
110        let mut new = Self::empty();
111
112        fn clone_into_new<U: Clone>(
113            source: &PolymorphicIter<[MaybeUninit<U>]>,
114            target: &mut PolymorphicIter<[MaybeUninit<U>]>,
115        ) {
116            // Clone all alive elements.
117            for (src, dst) in iter::zip(source.as_slice(), &mut target.data) {
118                // Write a clone into the new array, then update its alive range.
119                // If cloning panics, we'll correctly drop the previous items.
120                dst.write(src.clone());
121                // This addition cannot overflow as we're iterating a slice,
122                // the length of which always fits in usize.
123                target.alive = IndexRange::zero_to(target.alive.end() + 1);
124            }
125        }
126
127        clone_into_new(self, &mut new);
128        new
129    }
130}
131
132impl<T> PolymorphicIter<[MaybeUninit<T>]> {
133    #[inline]
134    pub(super) fn as_slice(&self) -> &[T] {
135        // SAFETY: We know that all elements within `alive` are properly initialized.
136        unsafe {
137            let slice = self.data.get_unchecked(self.alive.clone());
138            slice.assume_init_ref()
139        }
140    }
141
142    #[inline]
143    #[cfg(not(feature = "ferrocene_subset"))]
144    pub(super) fn as_mut_slice(&mut self) -> &mut [T] {
145        // SAFETY: We know that all elements within `alive` are properly initialized.
146        unsafe {
147            let slice = self.data.get_unchecked_mut(self.alive.clone());
148            slice.assume_init_mut()
149        }
150    }
151}
152
153#[cfg(not(feature = "ferrocene_subset"))]
154impl<T: fmt::Debug> fmt::Debug for PolymorphicIter<[MaybeUninit<T>]> {
155    #[inline]
156    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
157        // Only print the elements that were not yielded yet: we cannot
158        // access the yielded elements anymore.
159        f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
160    }
161}
162
163/// Iterator-equivalent methods.
164///
165/// We don't implement the actual iterator traits because we want to implement
166/// things like `try_fold` that require `Self: Sized` (which we're not).
167impl<T> PolymorphicIter<[MaybeUninit<T>]> {
168    #[inline]
169    pub(super) fn next(&mut self) -> Option<T> {
170        // Get the next index from the front.
171        //
172        // Increasing `alive.start` by 1 maintains the invariant regarding
173        // `alive`. However, due to this change, for a short time, the alive
174        // zone is not `data[alive]` anymore, but `data[idx..alive.end]`.
175        self.alive.next().map(|idx| {
176            // Read the element from the array.
177            // SAFETY: `idx` is an index into the former "alive" region of the
178            // array. Reading this element means that `data[idx]` is regarded as
179            // dead now (i.e. do not touch). As `idx` was the start of the
180            // alive-zone, the alive zone is now `data[alive]` again, restoring
181            // all invariants.
182            unsafe { self.data.get_unchecked(idx).assume_init_read() }
183        })
184    }
185
186    #[inline]
187    pub(super) fn size_hint(&self) -> (usize, Option<usize>) {
188        let len = self.len();
189        (len, Some(len))
190    }
191
192    #[inline]
193    pub(super) fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
194        // This also moves the start, which marks them as conceptually "dropped",
195        // so if anything goes bad then our drop impl won't double-free them.
196        let range_to_drop = self.alive.take_prefix(n);
197        let remaining = n - range_to_drop.len();
198
199        // SAFETY: These elements are currently initialized, so it's fine to drop them.
200        unsafe {
201            let slice = self.data.get_unchecked_mut(range_to_drop);
202            slice.assume_init_drop();
203        }
204
205        NonZero::new(remaining).map_or(Ok(()), Err)
206    }
207
208    #[inline]
209    pub(super) fn fold<B>(&mut self, init: B, f: impl FnMut(B, T) -> B) -> B {
210        self.try_fold(init, NeverShortCircuit::wrap_mut_2(f)).0
211    }
212
213    #[inline]
214    pub(super) fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R
215    where
216        F: FnMut(B, T) -> R,
217        R: Try<Output = B>,
218    {
219        // `alive` is an `IndexRange`, not an arbitrary iterator, so we can
220        // trust that its `try_fold` isn't going to do something weird like
221        // call the fold-er multiple times for the same index.
222        let data = &mut self.data;
223        self.alive.try_fold(init, move |accum, idx| {
224            // SAFETY: `idx` has been removed from the alive range, so we're not
225            // going to drop it (even if `f` panics) and thus its ok to give
226            // out ownership of that item to `f` to handle.
227            let elem = unsafe { data.get_unchecked(idx).assume_init_read() };
228            f(accum, elem)
229        })
230    }
231
232    #[inline]
233    pub(super) fn next_back(&mut self) -> Option<T> {
234        // Get the next index from the back.
235        //
236        // Decreasing `alive.end` by 1 maintains the invariant regarding
237        // `alive`. However, due to this change, for a short time, the alive
238        // zone is not `data[alive]` anymore, but `data[alive.start..=idx]`.
239        self.alive.next_back().map(|idx| {
240            // Read the element from the array.
241            // SAFETY: `idx` is an index into the former "alive" region of the
242            // array. Reading this element means that `data[idx]` is regarded as
243            // dead now (i.e. do not touch). As `idx` was the end of the
244            // alive-zone, the alive zone is now `data[alive]` again, restoring
245            // all invariants.
246            unsafe { self.data.get_unchecked(idx).assume_init_read() }
247        })
248    }
249
250    #[inline]
251    pub(super) fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
252        // This also moves the end, which marks them as conceptually "dropped",
253        // so if anything goes bad then our drop impl won't double-free them.
254        let range_to_drop = self.alive.take_suffix(n);
255        let remaining = n - range_to_drop.len();
256
257        // SAFETY: These elements are currently initialized, so it's fine to drop them.
258        unsafe {
259            let slice = self.data.get_unchecked_mut(range_to_drop);
260            slice.assume_init_drop();
261        }
262
263        NonZero::new(remaining).map_or(Ok(()), Err)
264    }
265
266    #[inline]
267    pub(super) fn rfold<B>(&mut self, init: B, f: impl FnMut(B, T) -> B) -> B {
268        self.try_rfold(init, NeverShortCircuit::wrap_mut_2(f)).0
269    }
270
271    #[inline]
272    pub(super) fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R
273    where
274        F: FnMut(B, T) -> R,
275        R: Try<Output = B>,
276    {
277        // `alive` is an `IndexRange`, not an arbitrary iterator, so we can
278        // trust that its `try_rfold` isn't going to do something weird like
279        // call the fold-er multiple times for the same index.
280        let data = &mut self.data;
281        self.alive.try_rfold(init, move |accum, idx| {
282            // SAFETY: `idx` has been removed from the alive range, so we're not
283            // going to drop it (even if `f` panics) and thus its ok to give
284            // out ownership of that item to `f` to handle.
285            let elem = unsafe { data.get_unchecked(idx).assume_init_read() };
286            f(accum, elem)
287        })
288    }
289}