1#![unstable(feature = "raw_vec_internals", reason = "unstable const warnings", issue = "none")]
2#![cfg_attr(test, allow(dead_code))]
3
4use core::marker::PhantomData;
8use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
9use core::ptr::{self, Alignment, NonNull, Unique};
10use core::{cmp, hint};
11
12#[cfg(not(no_global_oom_handling))]
13use crate::alloc::handle_alloc_error;
14use crate::alloc::{Allocator, Global, Layout};
15use crate::boxed::Box;
16use crate::collections::TryReserveError;
17use crate::collections::TryReserveErrorKind::*;
18
19#[cfg(test)]
20mod tests;
21
22#[cfg(not(no_global_oom_handling))]
26#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
27#[track_caller]
28fn capacity_overflow() -> ! {
29 panic!("capacity overflow");
30}
31
32enum AllocInit {
33 Uninitialized,
35 #[cfg(not(no_global_oom_handling))]
36 Zeroed,
38}
39
40type Cap = core::num::niche_types::UsizeNoHighBit;
41
42const ZERO_CAP: Cap = unsafe { Cap::new_unchecked(0) };
43
44unsafe fn new_cap<T>(cap: usize) -> Cap {
48 if T::IS_ZST { ZERO_CAP } else { unsafe { Cap::new_unchecked(cap) } }
49}
50
51#[allow(missing_debug_implementations)]
74pub(crate) struct RawVec<T, A: Allocator = Global> {
75 inner: RawVecInner<A>,
76 _marker: PhantomData<T>,
77}
78
79#[allow(missing_debug_implementations)]
86struct RawVecInner<A: Allocator = Global> {
87 ptr: Unique<u8>,
88 cap: Cap,
94 alloc: A,
95}
96
97impl<T> RawVec<T, Global> {
98 #[must_use]
104 pub(crate) const fn new() -> Self {
105 Self::new_in(Global)
106 }
107
108 #[cfg(not(any(no_global_oom_handling, test)))]
124 #[must_use]
125 #[inline]
126 #[track_caller]
127 pub(crate) fn with_capacity(capacity: usize) -> Self {
128 Self { inner: RawVecInner::with_capacity(capacity, T::LAYOUT), _marker: PhantomData }
129 }
130
131 #[cfg(not(any(no_global_oom_handling, test)))]
133 #[must_use]
134 #[inline]
135 #[track_caller]
136 pub(crate) fn with_capacity_zeroed(capacity: usize) -> Self {
137 Self {
138 inner: RawVecInner::with_capacity_zeroed_in(capacity, Global, T::LAYOUT),
139 _marker: PhantomData,
140 }
141 }
142}
143
144impl RawVecInner<Global> {
145 #[cfg(not(any(no_global_oom_handling, test)))]
146 #[must_use]
147 #[inline]
148 #[track_caller]
149 fn with_capacity(capacity: usize, elem_layout: Layout) -> Self {
150 match Self::try_allocate_in(capacity, AllocInit::Uninitialized, Global, elem_layout) {
151 Ok(res) => res,
152 Err(err) => handle_error(err),
153 }
154 }
155}
156
157const fn min_non_zero_cap(size: usize) -> usize {
163 if size == 1 {
164 8
165 } else if size <= 1024 {
166 4
167 } else {
168 1
169 }
170}
171
172impl<T, A: Allocator> RawVec<T, A> {
173 #[cfg(not(no_global_oom_handling))]
174 pub(crate) const MIN_NON_ZERO_CAP: usize = min_non_zero_cap(size_of::<T>());
175
176 #[inline]
179 pub(crate) const fn new_in(alloc: A) -> Self {
180 Self { inner: RawVecInner::new_in(alloc, Alignment::of::<T>()), _marker: PhantomData }
181 }
182
183 #[cfg(not(no_global_oom_handling))]
186 #[inline]
187 #[track_caller]
188 pub(crate) fn with_capacity_in(capacity: usize, alloc: A) -> Self {
189 Self {
190 inner: RawVecInner::with_capacity_in(capacity, alloc, T::LAYOUT),
191 _marker: PhantomData,
192 }
193 }
194
195 #[inline]
198 pub(crate) fn try_with_capacity_in(capacity: usize, alloc: A) -> Result<Self, TryReserveError> {
199 match RawVecInner::try_with_capacity_in(capacity, alloc, T::LAYOUT) {
200 Ok(inner) => Ok(Self { inner, _marker: PhantomData }),
201 Err(e) => Err(e),
202 }
203 }
204
205 #[cfg(not(no_global_oom_handling))]
208 #[inline]
209 #[track_caller]
210 pub(crate) fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self {
211 Self {
212 inner: RawVecInner::with_capacity_zeroed_in(capacity, alloc, T::LAYOUT),
213 _marker: PhantomData,
214 }
215 }
216
217 pub(crate) unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit<T>], A> {
230 debug_assert!(
232 len <= self.capacity(),
233 "`len` must be smaller than or equal to `self.capacity()`"
234 );
235
236 let me = ManuallyDrop::new(self);
237 unsafe {
238 let slice = ptr::slice_from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len);
239 Box::from_raw_in(slice, ptr::read(&me.inner.alloc))
240 }
241 }
242
243 #[inline]
254 pub(crate) unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self {
255 unsafe {
257 let ptr = ptr.cast();
258 let capacity = new_cap::<T>(capacity);
259 Self {
260 inner: RawVecInner::from_raw_parts_in(ptr, capacity, alloc),
261 _marker: PhantomData,
262 }
263 }
264 }
265
266 #[inline]
272 pub(crate) unsafe fn from_nonnull_in(ptr: NonNull<T>, capacity: usize, alloc: A) -> Self {
273 unsafe {
275 let ptr = ptr.cast();
276 let capacity = new_cap::<T>(capacity);
277 Self { inner: RawVecInner::from_nonnull_in(ptr, capacity, alloc), _marker: PhantomData }
278 }
279 }
280
281 #[inline]
285 pub(crate) const fn ptr(&self) -> *mut T {
286 self.inner.ptr()
287 }
288
289 #[inline]
290 pub(crate) const fn non_null(&self) -> NonNull<T> {
291 self.inner.non_null()
292 }
293
294 #[inline]
298 pub(crate) const fn capacity(&self) -> usize {
299 self.inner.capacity(size_of::<T>())
300 }
301
302 #[inline]
304 pub(crate) fn allocator(&self) -> &A {
305 self.inner.allocator()
306 }
307
308 #[cfg(not(no_global_oom_handling))]
328 #[inline]
329 #[track_caller]
330 pub(crate) fn reserve(&mut self, len: usize, additional: usize) {
331 self.inner.reserve(len, additional, T::LAYOUT)
332 }
333
334 #[cfg(not(no_global_oom_handling))]
337 #[inline(never)]
338 #[track_caller]
339 pub(crate) fn grow_one(&mut self) {
340 self.inner.grow_one(T::LAYOUT)
341 }
342
343 pub(crate) fn try_reserve(
345 &mut self,
346 len: usize,
347 additional: usize,
348 ) -> Result<(), TryReserveError> {
349 self.inner.try_reserve(len, additional, T::LAYOUT)
350 }
351
352 #[cfg(not(no_global_oom_handling))]
370 #[track_caller]
371 pub(crate) fn reserve_exact(&mut self, len: usize, additional: usize) {
372 self.inner.reserve_exact(len, additional, T::LAYOUT)
373 }
374
375 pub(crate) fn try_reserve_exact(
377 &mut self,
378 len: usize,
379 additional: usize,
380 ) -> Result<(), TryReserveError> {
381 self.inner.try_reserve_exact(len, additional, T::LAYOUT)
382 }
383
384 #[cfg(not(no_global_oom_handling))]
395 #[track_caller]
396 #[inline]
397 pub(crate) fn shrink_to_fit(&mut self, cap: usize) {
398 self.inner.shrink_to_fit(cap, T::LAYOUT)
399 }
400}
401
402unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec<T, A> {
403 fn drop(&mut self) {
405 unsafe { self.inner.deallocate(T::LAYOUT) }
407 }
408}
409
410impl<A: Allocator> RawVecInner<A> {
411 #[inline]
412 const fn new_in(alloc: A, align: Alignment) -> Self {
413 let ptr = Unique::from_non_null(NonNull::without_provenance(align.as_nonzero()));
414 Self { ptr, cap: ZERO_CAP, alloc }
416 }
417
418 #[cfg(not(no_global_oom_handling))]
419 #[inline]
420 #[track_caller]
421 fn with_capacity_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
422 match Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout) {
423 Ok(this) => {
424 unsafe {
425 hint::assert_unchecked(!this.needs_to_grow(0, capacity, elem_layout));
427 }
428 this
429 }
430 Err(err) => handle_error(err),
431 }
432 }
433
434 #[inline]
435 fn try_with_capacity_in(
436 capacity: usize,
437 alloc: A,
438 elem_layout: Layout,
439 ) -> Result<Self, TryReserveError> {
440 Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout)
441 }
442
443 #[cfg(not(no_global_oom_handling))]
444 #[inline]
445 #[track_caller]
446 fn with_capacity_zeroed_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
447 match Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc, elem_layout) {
448 Ok(res) => res,
449 Err(err) => handle_error(err),
450 }
451 }
452
453 fn try_allocate_in(
454 capacity: usize,
455 init: AllocInit,
456 alloc: A,
457 elem_layout: Layout,
458 ) -> Result<Self, TryReserveError> {
459 let layout = match layout_array(capacity, elem_layout) {
462 Ok(layout) => layout,
463 Err(_) => return Err(CapacityOverflow.into()),
464 };
465
466 if layout.size() == 0 {
468 return Ok(Self::new_in(alloc, elem_layout.alignment()));
469 }
470
471 let result = match init {
472 AllocInit::Uninitialized => alloc.allocate(layout),
473 #[cfg(not(no_global_oom_handling))]
474 AllocInit::Zeroed => alloc.allocate_zeroed(layout),
475 };
476 let ptr = match result {
477 Ok(ptr) => ptr,
478 Err(_) => return Err(AllocError { layout, non_exhaustive: () }.into()),
479 };
480
481 Ok(Self {
485 ptr: Unique::from(ptr.cast()),
486 cap: unsafe { Cap::new_unchecked(capacity) },
487 alloc,
488 })
489 }
490
491 #[inline]
492 unsafe fn from_raw_parts_in(ptr: *mut u8, cap: Cap, alloc: A) -> Self {
493 Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap, alloc }
494 }
495
496 #[inline]
497 unsafe fn from_nonnull_in(ptr: NonNull<u8>, cap: Cap, alloc: A) -> Self {
498 Self { ptr: Unique::from(ptr), cap, alloc }
499 }
500
501 #[inline]
502 const fn ptr<T>(&self) -> *mut T {
503 self.non_null::<T>().as_ptr()
504 }
505
506 #[inline]
507 const fn non_null<T>(&self) -> NonNull<T> {
508 self.ptr.cast().as_non_null_ptr()
509 }
510
511 #[inline]
512 const fn capacity(&self, elem_size: usize) -> usize {
513 if elem_size == 0 { usize::MAX } else { self.cap.as_inner() }
514 }
515
516 #[inline]
517 fn allocator(&self) -> &A {
518 &self.alloc
519 }
520
521 #[inline]
522 fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull<u8>, Layout)> {
523 if elem_layout.size() == 0 || self.cap.as_inner() == 0 {
524 None
525 } else {
526 unsafe {
531 let alloc_size = elem_layout.size().unchecked_mul(self.cap.as_inner());
532 let layout = Layout::from_size_align_unchecked(alloc_size, elem_layout.align());
533 Some((self.ptr.into(), layout))
534 }
535 }
536 }
537
538 #[cfg(not(no_global_oom_handling))]
539 #[inline]
540 #[track_caller]
541 fn reserve(&mut self, len: usize, additional: usize, elem_layout: Layout) {
542 #[cold]
547 fn do_reserve_and_handle<A: Allocator>(
548 slf: &mut RawVecInner<A>,
549 len: usize,
550 additional: usize,
551 elem_layout: Layout,
552 ) {
553 if let Err(err) = slf.grow_amortized(len, additional, elem_layout) {
554 handle_error(err);
555 }
556 }
557
558 if self.needs_to_grow(len, additional, elem_layout) {
559 do_reserve_and_handle(self, len, additional, elem_layout);
560 }
561 }
562
563 #[cfg(not(no_global_oom_handling))]
564 #[inline]
565 #[track_caller]
566 fn grow_one(&mut self, elem_layout: Layout) {
567 if let Err(err) = self.grow_amortized(self.cap.as_inner(), 1, elem_layout) {
568 handle_error(err);
569 }
570 }
571
572 fn try_reserve(
573 &mut self,
574 len: usize,
575 additional: usize,
576 elem_layout: Layout,
577 ) -> Result<(), TryReserveError> {
578 if self.needs_to_grow(len, additional, elem_layout) {
579 self.grow_amortized(len, additional, elem_layout)?;
580 }
581 unsafe {
582 hint::assert_unchecked(!self.needs_to_grow(len, additional, elem_layout));
584 }
585 Ok(())
586 }
587
588 #[cfg(not(no_global_oom_handling))]
589 #[track_caller]
590 fn reserve_exact(&mut self, len: usize, additional: usize, elem_layout: Layout) {
591 if let Err(err) = self.try_reserve_exact(len, additional, elem_layout) {
592 handle_error(err);
593 }
594 }
595
596 fn try_reserve_exact(
597 &mut self,
598 len: usize,
599 additional: usize,
600 elem_layout: Layout,
601 ) -> Result<(), TryReserveError> {
602 if self.needs_to_grow(len, additional, elem_layout) {
603 self.grow_exact(len, additional, elem_layout)?;
604 }
605 unsafe {
606 hint::assert_unchecked(!self.needs_to_grow(len, additional, elem_layout));
608 }
609 Ok(())
610 }
611
612 #[cfg(not(no_global_oom_handling))]
613 #[inline]
614 #[track_caller]
615 fn shrink_to_fit(&mut self, cap: usize, elem_layout: Layout) {
616 if let Err(err) = self.shrink(cap, elem_layout) {
617 handle_error(err);
618 }
619 }
620
621 #[inline]
622 fn needs_to_grow(&self, len: usize, additional: usize, elem_layout: Layout) -> bool {
623 additional > self.capacity(elem_layout.size()).wrapping_sub(len)
624 }
625
626 #[inline]
627 unsafe fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) {
628 self.ptr = Unique::from(ptr.cast());
632 self.cap = unsafe { Cap::new_unchecked(cap) };
633 }
634
635 fn grow_amortized(
636 &mut self,
637 len: usize,
638 additional: usize,
639 elem_layout: Layout,
640 ) -> Result<(), TryReserveError> {
641 debug_assert!(additional > 0);
643
644 if elem_layout.size() == 0 {
645 return Err(CapacityOverflow.into());
648 }
649
650 let required_cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
652
653 let cap = cmp::max(self.cap.as_inner() * 2, required_cap);
656 let cap = cmp::max(min_non_zero_cap(elem_layout.size()), cap);
657
658 let new_layout = layout_array(cap, elem_layout)?;
659
660 let ptr = finish_grow(new_layout, self.current_memory(elem_layout), &mut self.alloc)?;
661 unsafe { self.set_ptr_and_cap(ptr, cap) };
664 Ok(())
665 }
666
667 fn grow_exact(
668 &mut self,
669 len: usize,
670 additional: usize,
671 elem_layout: Layout,
672 ) -> Result<(), TryReserveError> {
673 if elem_layout.size() == 0 {
674 return Err(CapacityOverflow.into());
677 }
678
679 let cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
680 let new_layout = layout_array(cap, elem_layout)?;
681
682 let ptr = finish_grow(new_layout, self.current_memory(elem_layout), &mut self.alloc)?;
683 unsafe {
685 self.set_ptr_and_cap(ptr, cap);
686 }
687 Ok(())
688 }
689
690 #[cfg(not(no_global_oom_handling))]
691 #[inline]
692 fn shrink(&mut self, cap: usize, elem_layout: Layout) -> Result<(), TryReserveError> {
693 assert!(cap <= self.capacity(elem_layout.size()), "Tried to shrink to a larger capacity");
694 unsafe { self.shrink_unchecked(cap, elem_layout) }
696 }
697
698 #[cfg(not(no_global_oom_handling))]
709 unsafe fn shrink_unchecked(
710 &mut self,
711 cap: usize,
712 elem_layout: Layout,
713 ) -> Result<(), TryReserveError> {
714 let (ptr, layout) =
715 if let Some(mem) = self.current_memory(elem_layout) { mem } else { return Ok(()) };
716
717 if cap == 0 {
721 unsafe { self.alloc.deallocate(ptr, layout) };
722 self.ptr =
723 unsafe { Unique::new_unchecked(ptr::without_provenance_mut(elem_layout.align())) };
724 self.cap = ZERO_CAP;
725 } else {
726 let ptr = unsafe {
727 let new_size = elem_layout.size().unchecked_mul(cap);
730 let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
731 self.alloc
732 .shrink(ptr, layout, new_layout)
733 .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?
734 };
735 unsafe {
737 self.set_ptr_and_cap(ptr, cap);
738 }
739 }
740 Ok(())
741 }
742
743 unsafe fn deallocate(&mut self, elem_layout: Layout) {
751 if let Some((ptr, layout)) = self.current_memory(elem_layout) {
752 unsafe {
753 self.alloc.deallocate(ptr, layout);
754 }
755 }
756 }
757}
758
759#[cold]
762fn finish_grow<A>(
763 new_layout: Layout,
764 current_memory: Option<(NonNull<u8>, Layout)>,
765 alloc: &mut A,
766) -> Result<NonNull<[u8]>, TryReserveError>
767where
768 A: Allocator,
769{
770 let memory = if let Some((ptr, old_layout)) = current_memory {
771 debug_assert_eq!(old_layout.align(), new_layout.align());
772 unsafe {
773 hint::assert_unchecked(old_layout.align() == new_layout.align());
775 alloc.grow(ptr, old_layout, new_layout)
776 }
777 } else {
778 alloc.allocate(new_layout)
779 };
780
781 memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }.into())
782}
783
784#[cfg(not(no_global_oom_handling))]
786#[cold]
787#[optimize(size)]
788#[track_caller]
789fn handle_error(e: TryReserveError) -> ! {
790 match e.kind() {
791 CapacityOverflow => capacity_overflow(),
792 AllocError { layout, .. } => handle_alloc_error(layout),
793 }
794}
795
796#[inline]
797fn layout_array(cap: usize, elem_layout: Layout) -> Result<Layout, TryReserveError> {
798 elem_layout.repeat(cap).map(|(layout, _pad)| layout).map_err(|_| CapacityOverflow.into())
799}