1use core::iter::{
2 FusedIterator, InPlaceIterable, SourceIter, TrustedFused, TrustedLen,
3 TrustedRandomAccessNoCoerce,
4};
5use core::marker::PhantomData;
6use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
7use core::num::NonZero;
8#[cfg(not(no_global_oom_handling))]
9use core::ops::Deref;
10use core::panic::UnwindSafe;
11use core::ptr::{self, NonNull};
12use core::{array, fmt, slice};
13
14#[cfg(not(no_global_oom_handling))]
15use super::AsVecIntoIter;
16use crate::alloc::{Allocator, Global};
17#[cfg(not(no_global_oom_handling))]
18use crate::collections::VecDeque;
19use crate::raw_vec::RawVec;
20
21macro non_null {
22 (mut $place:expr, $t:ident) => {{
23 #![allow(unused_unsafe)] unsafe { &mut *((&raw mut $place) as *mut NonNull<$t>) }
25 }},
26 ($place:expr, $t:ident) => {{
27 #![allow(unused_unsafe)] unsafe { *((&raw const $place) as *const NonNull<$t>) }
29 }},
30}
31
32#[stable(feature = "rust1", since = "1.0.0")]
44#[rustc_insignificant_dtor]
45pub struct IntoIter<
46 T,
47 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
48> {
49 pub(super) buf: NonNull<T>,
50 pub(super) phantom: PhantomData<T>,
51 pub(super) cap: usize,
52 pub(super) alloc: ManuallyDrop<A>,
55 pub(super) ptr: NonNull<T>,
56 pub(super) end: *const T,
61}
62
63#[stable(feature = "catch_unwind", since = "1.9.0")]
66impl<T: UnwindSafe, A: Allocator + UnwindSafe> UnwindSafe for IntoIter<T, A> {}
67
68#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
69impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
70 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
71 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
72 }
73}
74
75impl<T, A: Allocator> IntoIter<T, A> {
76 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
88 pub fn as_slice(&self) -> &[T] {
89 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len()) }
90 }
91
92 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
106 pub fn as_mut_slice(&mut self) -> &mut [T] {
107 unsafe { &mut *self.as_raw_mut_slice() }
108 }
109
110 #[unstable(feature = "allocator_api", issue = "32838")]
112 #[inline]
113 pub fn allocator(&self) -> &A {
114 &self.alloc
115 }
116
117 fn as_raw_mut_slice(&mut self) -> *mut [T] {
118 ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len())
119 }
120
121 #[cfg(not(no_global_oom_handling))]
143 pub(super) fn forget_allocation_drop_remaining(&mut self) {
144 let remaining = self.as_raw_mut_slice();
145
146 self.cap = 0;
150 self.buf = RawVec::new().non_null();
151 self.ptr = self.buf;
152 self.end = self.buf.as_ptr();
153
154 unsafe {
157 ptr::drop_in_place(remaining);
158 }
159 }
160
161 pub(crate) fn forget_remaining_elements(&mut self) {
167 self.end = self.ptr.as_ptr();
170 }
171
172 #[inline]
178 pub(crate) fn forget_remaining_elements_and_dealloc(self) {
179 let mut this = ManuallyDrop::new(self);
180 unsafe {
182 this.dealloc_only();
183 }
184 }
185
186 #[inline]
198 unsafe fn dealloc_only(&mut self) {
199 unsafe {
200 let alloc = ManuallyDrop::take(&mut self.alloc);
202 let _ = RawVec::from_nonnull_in(self.buf, self.cap, alloc);
204 }
205 }
206
207 #[cfg(not(no_global_oom_handling))]
208 #[inline]
209 pub(crate) fn into_vecdeque(self) -> VecDeque<T, A> {
210 let mut this = ManuallyDrop::new(self);
212
213 unsafe {
220 let buf = this.buf.as_ptr();
221 let initialized = if T::IS_ZST {
222 0..this.len()
225 } else {
226 this.ptr.offset_from_unsigned(this.buf)..this.end.offset_from_unsigned(buf)
227 };
228 let cap = this.cap;
229 let alloc = ManuallyDrop::take(&mut this.alloc);
230 VecDeque::from_contiguous_raw_parts_in(buf, initialized, cap, alloc)
231 }
232 }
233}
234
235#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
236impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
237 fn as_ref(&self) -> &[T] {
238 self.as_slice()
239 }
240}
241
242#[stable(feature = "rust1", since = "1.0.0")]
243unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
244#[stable(feature = "rust1", since = "1.0.0")]
245unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {}
246
247#[stable(feature = "rust1", since = "1.0.0")]
248impl<T, A: Allocator> Iterator for IntoIter<T, A> {
249 type Item = T;
250
251 #[inline]
252 fn next(&mut self) -> Option<T> {
253 let ptr = if T::IS_ZST {
254 if self.ptr.as_ptr() == self.end as *mut T {
255 return None;
256 }
257 self.end = self.end.wrapping_byte_sub(1);
260 self.ptr
261 } else {
262 if self.ptr == non_null!(self.end, T) {
263 return None;
264 }
265 let old = self.ptr;
266 self.ptr = unsafe { old.add(1) };
267 old
268 };
269 Some(unsafe { ptr.read() })
270 }
271
272 #[inline]
273 fn size_hint(&self) -> (usize, Option<usize>) {
274 let exact = if T::IS_ZST {
275 self.end.addr().wrapping_sub(self.ptr.as_ptr().addr())
276 } else {
277 unsafe { non_null!(self.end, T).offset_from_unsigned(self.ptr) }
278 };
279 (exact, Some(exact))
280 }
281
282 #[inline]
283 fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
284 let step_size = self.len().min(n);
285 let to_drop = ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), step_size);
286 if T::IS_ZST {
287 self.end = self.end.wrapping_byte_sub(step_size);
289 } else {
290 self.ptr = unsafe { self.ptr.add(step_size) };
292 }
293 unsafe {
295 ptr::drop_in_place(to_drop);
296 }
297 NonZero::new(n - step_size).map_or(Ok(()), Err)
298 }
299
300 #[inline]
301 fn count(self) -> usize {
302 self.len()
303 }
304
305 #[inline]
306 fn last(mut self) -> Option<T> {
307 self.next_back()
308 }
309
310 #[inline]
311 fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> {
312 let mut raw_ary = [const { MaybeUninit::uninit() }; N];
313
314 let len = self.len();
315
316 if T::IS_ZST {
317 if len < N {
318 self.forget_remaining_elements();
319 return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) });
321 }
322
323 self.end = self.end.wrapping_byte_sub(N);
324 return Ok(unsafe { raw_ary.transpose().assume_init() });
326 }
327
328 if len < N {
329 unsafe {
332 ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, len);
333 self.forget_remaining_elements();
334 return Err(array::IntoIter::new_unchecked(raw_ary, 0..len));
335 }
336 }
337
338 unsafe {
341 ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, N);
342 self.ptr = self.ptr.add(N);
343 Ok(raw_ary.transpose().assume_init())
344 }
345 }
346
347 fn fold<B, F>(mut self, mut accum: B, mut f: F) -> B
348 where
349 F: FnMut(B, Self::Item) -> B,
350 {
351 if T::IS_ZST {
352 while self.ptr.as_ptr() != self.end.cast_mut() {
353 let tmp = unsafe { self.ptr.read() };
355 self.end = self.end.wrapping_byte_sub(1);
357 accum = f(accum, tmp);
358 }
359 } else {
360 while self.ptr != non_null!(self.end, T) {
362 let tmp = unsafe { self.ptr.read() };
364 self.ptr = unsafe { self.ptr.add(1) };
367 accum = f(accum, tmp);
368 }
369 }
370
371 self.forget_remaining_elements_and_dealloc();
375
376 accum
377 }
378
379 fn try_fold<B, F, R>(&mut self, mut accum: B, mut f: F) -> R
380 where
381 Self: Sized,
382 F: FnMut(B, Self::Item) -> R,
383 R: core::ops::Try<Output = B>,
384 {
385 if T::IS_ZST {
386 while self.ptr.as_ptr() != self.end.cast_mut() {
387 let tmp = unsafe { self.ptr.read() };
389 self.end = self.end.wrapping_byte_sub(1);
391 accum = f(accum, tmp)?;
392 }
393 } else {
394 while self.ptr != non_null!(self.end, T) {
396 let tmp = unsafe { self.ptr.read() };
398 self.ptr = unsafe { self.ptr.add(1) };
401 accum = f(accum, tmp)?;
402 }
403 }
404 R::from_output(accum)
405 }
406
407 unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
408 where
409 Self: TrustedRandomAccessNoCoerce,
410 {
411 unsafe { self.ptr.add(i).read() }
420 }
421}
422
423#[stable(feature = "rust1", since = "1.0.0")]
424impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
425 #[inline]
426 fn next_back(&mut self) -> Option<T> {
427 if T::IS_ZST {
428 if self.ptr.as_ptr() == self.end as *mut _ {
429 return None;
430 }
431 self.end = self.end.wrapping_byte_sub(1);
433 Some(unsafe { ptr::read(self.ptr.as_ptr()) })
437 } else {
438 if self.ptr == non_null!(self.end, T) {
439 return None;
440 }
441 unsafe {
442 self.end = self.end.sub(1);
443 Some(ptr::read(self.end))
444 }
445 }
446 }
447
448 #[inline]
449 fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
450 let step_size = self.len().min(n);
451 if T::IS_ZST {
452 self.end = self.end.wrapping_byte_sub(step_size);
454 } else {
455 self.end = unsafe { self.end.sub(step_size) };
457 }
458 let to_drop = if T::IS_ZST {
459 ptr::slice_from_raw_parts_mut(ptr::NonNull::<T>::dangling().as_ptr(), step_size)
461 } else {
462 ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size)
463 };
464 unsafe {
466 ptr::drop_in_place(to_drop);
467 }
468 NonZero::new(n - step_size).map_or(Ok(()), Err)
469 }
470}
471
472#[stable(feature = "rust1", since = "1.0.0")]
473impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
474 fn is_empty(&self) -> bool {
475 if T::IS_ZST {
476 self.ptr.as_ptr() == self.end as *mut _
477 } else {
478 self.ptr == non_null!(self.end, T)
479 }
480 }
481}
482
483#[stable(feature = "fused", since = "1.26.0")]
484impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
485
486#[doc(hidden)]
487#[unstable(issue = "none", feature = "trusted_fused")]
488unsafe impl<T, A: Allocator> TrustedFused for IntoIter<T, A> {}
489
490#[unstable(feature = "trusted_len", issue = "37572")]
491unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
492
493#[stable(feature = "default_iters", since = "1.70.0")]
494impl<T, A> Default for IntoIter<T, A>
495where
496 A: Allocator + Default,
497{
498 fn default() -> Self {
507 super::Vec::new_in(Default::default()).into_iter()
508 }
509}
510
511#[doc(hidden)]
512#[unstable(issue = "none", feature = "std_internals")]
513#[rustc_unsafe_specialization_marker]
514pub trait NonDrop {}
515
516#[unstable(issue = "none", feature = "std_internals")]
519impl<T: Copy> NonDrop for T {}
520
521#[doc(hidden)]
522#[unstable(issue = "none", feature = "std_internals")]
523unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A>
526where
527 T: NonDrop,
528{
529 const MAY_HAVE_SIDE_EFFECT: bool = false;
530}
531
532#[cfg(not(no_global_oom_handling))]
533#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
534impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
535 fn clone(&self) -> Self {
536 self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter()
537 }
538}
539
540#[stable(feature = "rust1", since = "1.0.0")]
541unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
542 fn drop(&mut self) {
543 struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
544
545 impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
546 fn drop(&mut self) {
547 unsafe {
548 self.0.dealloc_only();
549 }
550 }
551 }
552
553 let guard = DropGuard(self);
554 unsafe {
556 ptr::drop_in_place(guard.0.as_raw_mut_slice());
557 }
558 }
560}
561
562#[unstable(issue = "none", feature = "inplace_iteration")]
565#[doc(hidden)]
566unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {
567 const EXPAND_BY: Option<NonZero<usize>> = NonZero::new(1);
568 const MERGE_BY: Option<NonZero<usize>> = NonZero::new(1);
569}
570
571#[unstable(issue = "none", feature = "inplace_iteration")]
572#[doc(hidden)]
573unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
574 type Source = Self;
575
576 #[inline]
577 unsafe fn as_inner(&mut self) -> &mut Self::Source {
578 self
579 }
580}
581
582#[cfg(not(no_global_oom_handling))]
583unsafe impl<T> AsVecIntoIter for IntoIter<T> {
584 type Item = T;
585
586 fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> {
587 self
588 }
589}