Skip to main content

alloc/vec/
into_iter.rs

1use core::iter::{
2    FusedIterator, InPlaceIterable, SourceIter, TrustedFused, TrustedLen,
3    TrustedRandomAccessNoCoerce,
4};
5use core::marker::PhantomData;
6use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
7use core::num::NonZero;
8#[cfg(not(no_global_oom_handling))]
9use core::ops::Deref;
10use core::panic::UnwindSafe;
11use core::ptr::{self, NonNull};
12use core::{array, fmt, slice};
13
14#[cfg(not(no_global_oom_handling))]
15use super::AsVecIntoIter;
16use crate::alloc::{Allocator, Global};
17#[cfg(not(no_global_oom_handling))]
18use crate::collections::VecDeque;
19use crate::raw_vec::RawVec;
20
21macro non_null {
22    (mut $place:expr, $t:ident) => {{
23        #![allow(unused_unsafe)] // we're sometimes used within an unsafe block
24        unsafe { &mut *((&raw mut $place) as *mut NonNull<$t>) }
25    }},
26    ($place:expr, $t:ident) => {{
27        #![allow(unused_unsafe)] // we're sometimes used within an unsafe block
28        unsafe { *((&raw const $place) as *const NonNull<$t>) }
29    }},
30}
31
32/// An iterator that moves out of a vector.
33///
34/// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
35/// (provided by the [`IntoIterator`] trait).
36///
37/// # Example
38///
39/// ```
40/// let v = vec![0, 1, 2];
41/// let iter: std::vec::IntoIter<_> = v.into_iter();
42/// ```
43#[stable(feature = "rust1", since = "1.0.0")]
44#[rustc_insignificant_dtor]
45pub struct IntoIter<
46    T,
47    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
48> {
49    pub(super) buf: NonNull<T>,
50    pub(super) phantom: PhantomData<T>,
51    pub(super) cap: usize,
52    // the drop impl reconstructs a RawVec from buf, cap and alloc
53    // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop
54    pub(super) alloc: ManuallyDrop<A>,
55    pub(super) ptr: NonNull<T>,
56    /// If T is a ZST, this is actually ptr+len. This encoding is picked so that
57    /// ptr == end is a quick test for the Iterator being empty, that works
58    /// for both ZST and non-ZST.
59    /// For non-ZSTs the pointer is treated as `NonNull<T>`
60    pub(super) end: *const T,
61}
62
63// Manually mirroring what `Vec` has,
64// because otherwise we get `T: RefUnwindSafe` from `NonNull`.
65#[stable(feature = "catch_unwind", since = "1.9.0")]
66impl<T: UnwindSafe, A: Allocator + UnwindSafe> UnwindSafe for IntoIter<T, A> {}
67
68#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
69impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
70    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
71        f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
72    }
73}
74
75impl<T, A: Allocator> IntoIter<T, A> {
76    /// Returns the remaining items of this iterator as a slice.
77    ///
78    /// # Examples
79    ///
80    /// ```
81    /// let vec = vec!['a', 'b', 'c'];
82    /// let mut into_iter = vec.into_iter();
83    /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
84    /// let _ = into_iter.next().unwrap();
85    /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
86    /// ```
87    #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
88    pub fn as_slice(&self) -> &[T] {
89        unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len()) }
90    }
91
92    /// Returns the remaining items of this iterator as a mutable slice.
93    ///
94    /// # Examples
95    ///
96    /// ```
97    /// let vec = vec!['a', 'b', 'c'];
98    /// let mut into_iter = vec.into_iter();
99    /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
100    /// into_iter.as_mut_slice()[2] = 'z';
101    /// assert_eq!(into_iter.next().unwrap(), 'a');
102    /// assert_eq!(into_iter.next().unwrap(), 'b');
103    /// assert_eq!(into_iter.next().unwrap(), 'z');
104    /// ```
105    #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
106    pub fn as_mut_slice(&mut self) -> &mut [T] {
107        unsafe { &mut *self.as_raw_mut_slice() }
108    }
109
110    /// Returns a reference to the underlying allocator.
111    #[unstable(feature = "allocator_api", issue = "32838")]
112    #[inline]
113    pub fn allocator(&self) -> &A {
114        &self.alloc
115    }
116
117    fn as_raw_mut_slice(&mut self) -> *mut [T] {
118        ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len())
119    }
120
121    /// Drops remaining elements and relinquishes the backing allocation.
122    ///
123    /// This method guarantees it won't panic before relinquishing the backing
124    /// allocation.
125    ///
126    /// This is roughly equivalent to the following, but more efficient
127    ///
128    /// ```
129    /// # let mut vec = Vec::<u8>::with_capacity(10);
130    /// # let ptr = vec.as_mut_ptr();
131    /// # let mut into_iter = vec.into_iter();
132    /// let mut into_iter = std::mem::replace(&mut into_iter, Vec::new().into_iter());
133    /// (&mut into_iter).for_each(drop);
134    /// std::mem::forget(into_iter);
135    /// # // FIXME(https://github.com/rust-lang/miri/issues/3670):
136    /// # // use -Zmiri-disable-leak-check instead of unleaking in tests meant to leak.
137    /// # drop(unsafe { Vec::<u8>::from_raw_parts(ptr, 0, 10) });
138    /// ```
139    ///
140    /// This method is used by in-place iteration, refer to the vec::in_place_collect
141    /// documentation for an overview.
142    #[cfg(not(no_global_oom_handling))]
143    pub(super) fn forget_allocation_drop_remaining(&mut self) {
144        let remaining = self.as_raw_mut_slice();
145
146        // overwrite the individual fields instead of creating a new
147        // struct and then overwriting &mut self.
148        // this creates less assembly
149        self.cap = 0;
150        self.buf = RawVec::new().non_null();
151        self.ptr = self.buf;
152        self.end = self.buf.as_ptr();
153
154        // Dropping the remaining elements can panic, so this needs to be
155        // done only after updating the other fields.
156        unsafe {
157            ptr::drop_in_place(remaining);
158        }
159    }
160
161    /// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed.
162    ///
163    /// This method does not consume `self`, and leaves deallocation to `impl Drop for IntoIter`.
164    /// If consuming `self` is possible, consider calling
165    /// [`Self::forget_remaining_elements_and_dealloc()`] instead.
166    pub(crate) fn forget_remaining_elements(&mut self) {
167        // For the ZST case, it is crucial that we mutate `end` here, not `ptr`.
168        // `ptr` must stay aligned, while `end` may be unaligned.
169        self.end = self.ptr.as_ptr();
170    }
171
172    /// Forgets to Drop the remaining elements and frees the backing allocation.
173    /// Consuming version of [`Self::forget_remaining_elements()`].
174    ///
175    /// This can be used in place of `drop(self)` when `self` is known to be exhausted,
176    /// to avoid producing a needless `drop_in_place::<[T]>()`.
177    #[inline]
178    pub(crate) fn forget_remaining_elements_and_dealloc(self) {
179        let mut this = ManuallyDrop::new(self);
180        // SAFETY: `this` is in ManuallyDrop, so it will not be double-freed.
181        unsafe {
182            this.dealloc_only();
183        }
184    }
185
186    /// Frees the allocation, without checking or dropping anything else.
187    ///
188    /// The safe version of this method is [`Self::forget_remaining_elements_and_dealloc()`].
189    /// This function exists only to share code between that method and the `impl Drop`.
190    ///
191    /// # Safety
192    ///
193    /// This function must only be called with an [`IntoIter`] that is not going to be dropped
194    /// or otherwise used in any way, either because it is being forgotten or because its `Drop`
195    /// is already executing; otherwise a double-free will occur, and possibly a read from freed
196    /// memory if there are any remaining elements.
197    #[inline]
198    unsafe fn dealloc_only(&mut self) {
199        unsafe {
200            // SAFETY: our caller promises not to touch `*self` again
201            let alloc = ManuallyDrop::take(&mut self.alloc);
202            // RawVec handles deallocation
203            let _ = RawVec::from_nonnull_in(self.buf, self.cap, alloc);
204        }
205    }
206
207    #[cfg(not(no_global_oom_handling))]
208    #[inline]
209    pub(crate) fn into_vecdeque(self) -> VecDeque<T, A> {
210        // Keep our `Drop` impl from dropping the elements and the allocator
211        let mut this = ManuallyDrop::new(self);
212
213        // SAFETY: This allocation originally came from a `Vec`, so it passes
214        // all those checks. We have `this.buf` ≤ `this.ptr` ≤ `this.end`,
215        // so the `offset_from_unsigned`s below cannot wrap, and will produce a well-formed
216        // range. `end` ≤ `buf + cap`, so the range will be in-bounds.
217        // Taking `alloc` is ok because nothing else is going to look at it,
218        // since our `Drop` impl isn't going to run so there's no more code.
219        unsafe {
220            let buf = this.buf.as_ptr();
221            let initialized = if T::IS_ZST {
222                // All the pointers are the same for ZSTs, so it's fine to
223                // say that they're all at the beginning of the "allocation".
224                0..this.len()
225            } else {
226                this.ptr.offset_from_unsigned(this.buf)..this.end.offset_from_unsigned(buf)
227            };
228            let cap = this.cap;
229            let alloc = ManuallyDrop::take(&mut this.alloc);
230            VecDeque::from_contiguous_raw_parts_in(buf, initialized, cap, alloc)
231        }
232    }
233}
234
235#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
236impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
237    fn as_ref(&self) -> &[T] {
238        self.as_slice()
239    }
240}
241
242#[stable(feature = "rust1", since = "1.0.0")]
243unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
244#[stable(feature = "rust1", since = "1.0.0")]
245unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {}
246
247#[stable(feature = "rust1", since = "1.0.0")]
248impl<T, A: Allocator> Iterator for IntoIter<T, A> {
249    type Item = T;
250
251    #[inline]
252    fn next(&mut self) -> Option<T> {
253        let ptr = if T::IS_ZST {
254            if self.ptr.as_ptr() == self.end as *mut T {
255                return None;
256            }
257            // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
258            // reducing the `end`.
259            self.end = self.end.wrapping_byte_sub(1);
260            self.ptr
261        } else {
262            if self.ptr == non_null!(self.end, T) {
263                return None;
264            }
265            let old = self.ptr;
266            self.ptr = unsafe { old.add(1) };
267            old
268        };
269        Some(unsafe { ptr.read() })
270    }
271
272    #[inline]
273    fn size_hint(&self) -> (usize, Option<usize>) {
274        let exact = if T::IS_ZST {
275            self.end.addr().wrapping_sub(self.ptr.as_ptr().addr())
276        } else {
277            unsafe { non_null!(self.end, T).offset_from_unsigned(self.ptr) }
278        };
279        (exact, Some(exact))
280    }
281
282    #[inline]
283    fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
284        let step_size = self.len().min(n);
285        let to_drop = ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), step_size);
286        if T::IS_ZST {
287            // See `next` for why we sub `end` here.
288            self.end = self.end.wrapping_byte_sub(step_size);
289        } else {
290            // SAFETY: the min() above ensures that step_size is in bounds
291            self.ptr = unsafe { self.ptr.add(step_size) };
292        }
293        // SAFETY: the min() above ensures that step_size is in bounds
294        unsafe {
295            ptr::drop_in_place(to_drop);
296        }
297        NonZero::new(n - step_size).map_or(Ok(()), Err)
298    }
299
300    #[inline]
301    fn count(self) -> usize {
302        self.len()
303    }
304
305    #[inline]
306    fn last(mut self) -> Option<T> {
307        self.next_back()
308    }
309
310    #[inline]
311    fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> {
312        let mut raw_ary = [const { MaybeUninit::uninit() }; N];
313
314        let len = self.len();
315
316        if T::IS_ZST {
317            if len < N {
318                self.forget_remaining_elements();
319                // Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct
320                return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) });
321            }
322
323            self.end = self.end.wrapping_byte_sub(N);
324            // Safety: ditto
325            return Ok(unsafe { raw_ary.transpose().assume_init() });
326        }
327
328        if len < N {
329            // Safety: `len` indicates that this many elements are available and we just checked that
330            // it fits into the array.
331            unsafe {
332                ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, len);
333                self.forget_remaining_elements();
334                return Err(array::IntoIter::new_unchecked(raw_ary, 0..len));
335            }
336        }
337
338        // Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize
339        // the array.
340        unsafe {
341            ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, N);
342            self.ptr = self.ptr.add(N);
343            Ok(raw_ary.transpose().assume_init())
344        }
345    }
346
347    fn fold<B, F>(mut self, mut accum: B, mut f: F) -> B
348    where
349        F: FnMut(B, Self::Item) -> B,
350    {
351        if T::IS_ZST {
352            while self.ptr.as_ptr() != self.end.cast_mut() {
353                // SAFETY: we just checked that `self.ptr` is in bounds.
354                let tmp = unsafe { self.ptr.read() };
355                // See `next` for why we subtract from `end` here.
356                self.end = self.end.wrapping_byte_sub(1);
357                accum = f(accum, tmp);
358            }
359        } else {
360            // SAFETY: `self.end` can only be null if `T` is a ZST.
361            while self.ptr != non_null!(self.end, T) {
362                // SAFETY: we just checked that `self.ptr` is in bounds.
363                let tmp = unsafe { self.ptr.read() };
364                // SAFETY: the maximum this can be is `self.end`.
365                // Increment `self.ptr` first to avoid double dropping in the event of a panic.
366                self.ptr = unsafe { self.ptr.add(1) };
367                accum = f(accum, tmp);
368            }
369        }
370
371        // There are in fact no remaining elements to forget, but by doing this we can avoid
372        // potentially generating a needless loop to drop the elements that cannot exist at
373        // this point.
374        self.forget_remaining_elements_and_dealloc();
375
376        accum
377    }
378
379    fn try_fold<B, F, R>(&mut self, mut accum: B, mut f: F) -> R
380    where
381        Self: Sized,
382        F: FnMut(B, Self::Item) -> R,
383        R: core::ops::Try<Output = B>,
384    {
385        if T::IS_ZST {
386            while self.ptr.as_ptr() != self.end.cast_mut() {
387                // SAFETY: we just checked that `self.ptr` is in bounds.
388                let tmp = unsafe { self.ptr.read() };
389                // See `next` for why we subtract from `end` here.
390                self.end = self.end.wrapping_byte_sub(1);
391                accum = f(accum, tmp)?;
392            }
393        } else {
394            // SAFETY: `self.end` can only be null if `T` is a ZST.
395            while self.ptr != non_null!(self.end, T) {
396                // SAFETY: we just checked that `self.ptr` is in bounds.
397                let tmp = unsafe { self.ptr.read() };
398                // SAFETY: the maximum this can be is `self.end`.
399                // Increment `self.ptr` first to avoid double dropping in the event of a panic.
400                self.ptr = unsafe { self.ptr.add(1) };
401                accum = f(accum, tmp)?;
402            }
403        }
404        R::from_output(accum)
405    }
406
407    unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
408    where
409        Self: TrustedRandomAccessNoCoerce,
410    {
411        // SAFETY: the caller must guarantee that `i` is in bounds of the
412        // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
413        // is guaranteed to pointer to an element of the `Vec<T>` and
414        // thus guaranteed to be valid to dereference.
415        //
416        // Also note the implementation of `Self: TrustedRandomAccess` requires
417        // that `T: Copy` so reading elements from the buffer doesn't invalidate
418        // them for `Drop`.
419        unsafe { self.ptr.add(i).read() }
420    }
421}
422
423#[stable(feature = "rust1", since = "1.0.0")]
424impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
425    #[inline]
426    fn next_back(&mut self) -> Option<T> {
427        if T::IS_ZST {
428            if self.ptr.as_ptr() == self.end as *mut _ {
429                return None;
430            }
431            // See above for why 'ptr.offset' isn't used
432            self.end = self.end.wrapping_byte_sub(1);
433            // Note that even though this is next_back() we're reading from `self.ptr`, not
434            // `self.end`. We track our length using the byte offset from `self.ptr` to `self.end`,
435            // so the end pointer may not be suitably aligned for T.
436            Some(unsafe { ptr::read(self.ptr.as_ptr()) })
437        } else {
438            if self.ptr == non_null!(self.end, T) {
439                return None;
440            }
441            unsafe {
442                self.end = self.end.sub(1);
443                Some(ptr::read(self.end))
444            }
445        }
446    }
447
448    #[inline]
449    fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
450        let step_size = self.len().min(n);
451        if T::IS_ZST {
452            // SAFETY: same as for advance_by()
453            self.end = self.end.wrapping_byte_sub(step_size);
454        } else {
455            // SAFETY: same as for advance_by()
456            self.end = unsafe { self.end.sub(step_size) };
457        }
458        let to_drop = if T::IS_ZST {
459            // ZST may cause unalignment
460            ptr::slice_from_raw_parts_mut(ptr::NonNull::<T>::dangling().as_ptr(), step_size)
461        } else {
462            ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size)
463        };
464        // SAFETY: same as for advance_by()
465        unsafe {
466            ptr::drop_in_place(to_drop);
467        }
468        NonZero::new(n - step_size).map_or(Ok(()), Err)
469    }
470}
471
472#[stable(feature = "rust1", since = "1.0.0")]
473impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
474    fn is_empty(&self) -> bool {
475        if T::IS_ZST {
476            self.ptr.as_ptr() == self.end as *mut _
477        } else {
478            self.ptr == non_null!(self.end, T)
479        }
480    }
481}
482
483#[stable(feature = "fused", since = "1.26.0")]
484impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
485
486#[doc(hidden)]
487#[unstable(issue = "none", feature = "trusted_fused")]
488unsafe impl<T, A: Allocator> TrustedFused for IntoIter<T, A> {}
489
490#[unstable(feature = "trusted_len", issue = "37572")]
491unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
492
493#[stable(feature = "default_iters", since = "1.70.0")]
494impl<T, A> Default for IntoIter<T, A>
495where
496    A: Allocator + Default,
497{
498    /// Creates an empty `vec::IntoIter`.
499    ///
500    /// ```
501    /// # use std::vec;
502    /// let iter: vec::IntoIter<u8> = Default::default();
503    /// assert_eq!(iter.len(), 0);
504    /// assert_eq!(iter.as_slice(), &[]);
505    /// ```
506    fn default() -> Self {
507        super::Vec::new_in(Default::default()).into_iter()
508    }
509}
510
511#[doc(hidden)]
512#[unstable(issue = "none", feature = "std_internals")]
513#[rustc_unsafe_specialization_marker]
514pub trait NonDrop {}
515
516// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
517// and thus we can't implement drop-handling
518#[unstable(issue = "none", feature = "std_internals")]
519impl<T: Copy> NonDrop for T {}
520
521#[doc(hidden)]
522#[unstable(issue = "none", feature = "std_internals")]
523// TrustedRandomAccess (without NoCoerce) must not be implemented because
524// subtypes/supertypes of `T` might not be `NonDrop`
525unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A>
526where
527    T: NonDrop,
528{
529    const MAY_HAVE_SIDE_EFFECT: bool = false;
530}
531
532#[cfg(not(no_global_oom_handling))]
533#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
534impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
535    fn clone(&self) -> Self {
536        self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter()
537    }
538}
539
540#[stable(feature = "rust1", since = "1.0.0")]
541unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
542    fn drop(&mut self) {
543        struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
544
545        impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
546            fn drop(&mut self) {
547                unsafe {
548                    self.0.dealloc_only();
549                }
550            }
551        }
552
553        let guard = DropGuard(self);
554        // destroy the remaining elements
555        unsafe {
556            ptr::drop_in_place(guard.0.as_raw_mut_slice());
557        }
558        // now `guard` will be dropped and do the rest
559    }
560}
561
562// In addition to the SAFETY invariants of the following three unsafe traits
563// also refer to the vec::in_place_collect module documentation to get an overview
564#[unstable(issue = "none", feature = "inplace_iteration")]
565#[doc(hidden)]
566unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {
567    const EXPAND_BY: Option<NonZero<usize>> = NonZero::new(1);
568    const MERGE_BY: Option<NonZero<usize>> = NonZero::new(1);
569}
570
571#[unstable(issue = "none", feature = "inplace_iteration")]
572#[doc(hidden)]
573unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
574    type Source = Self;
575
576    #[inline]
577    unsafe fn as_inner(&mut self) -> &mut Self::Source {
578        self
579    }
580}
581
582#[cfg(not(no_global_oom_handling))]
583unsafe impl<T> AsVecIntoIter for IntoIter<T> {
584    type Item = T;
585
586    fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> {
587        self
588    }
589}