Skip to main content

alloc/
rc.rs

1//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
2//! Counted'.
3//!
4//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
5//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
6//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
7//! given allocation is destroyed, the value stored in that allocation (often
8//! referred to as "inner value") is also dropped.
9//!
10//! Shared references in Rust disallow mutation by default, and [`Rc`]
11//! is no exception: you cannot generally obtain a mutable reference to
12//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
13//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
14//! inside an `Rc`][mutability].
15//!
16//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
17//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
18//! does not implement [`Send`]. As a result, the Rust compiler
19//! will check *at compile time* that you are not sending [`Rc`]s between
20//! threads. If you need multi-threaded, atomic reference counting, use
21//! [`sync::Arc`][arc].
22//!
23//! The [`downgrade`][downgrade] method can be used to create a non-owning
24//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
25//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
26//! already been dropped. In other words, `Weak` pointers do not keep the value
27//! inside the allocation alive; however, they *do* keep the allocation
28//! (the backing store for the inner value) alive.
29//!
30//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
31//! [`Weak`] is used to break cycles. For example, a tree could have strong
32//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
33//! children back to their parents.
34//!
35//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
36//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
37//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
38//! functions, called using [fully qualified syntax]:
39//!
40//! ```
41//! use std::rc::Rc;
42//!
43//! let my_rc = Rc::new(());
44//! let my_weak = Rc::downgrade(&my_rc);
45//! ```
46//!
47//! `Rc<T>`'s implementations of traits like `Clone` may also be called using
48//! fully qualified syntax. Some people prefer to use fully qualified syntax,
49//! while others prefer using method-call syntax.
50//!
51//! ```
52//! use std::rc::Rc;
53//!
54//! let rc = Rc::new(());
55//! // Method-call syntax
56//! let rc2 = rc.clone();
57//! // Fully qualified syntax
58//! let rc3 = Rc::clone(&rc);
59//! ```
60//!
61//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
62//! already been dropped.
63//!
64//! # Cloning references
65//!
66//! Creating a new reference to the same allocation as an existing reference counted pointer
67//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
68//!
69//! ```
70//! use std::rc::Rc;
71//!
72//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
73//! // The two syntaxes below are equivalent.
74//! let a = foo.clone();
75//! let b = Rc::clone(&foo);
76//! // a and b both point to the same memory location as foo.
77//! ```
78//!
79//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
80//! the meaning of the code. In the example above, this syntax makes it easier to see that
81//! this code is creating a new reference rather than copying the whole content of foo.
82//!
83//! # Examples
84//!
85//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
86//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
87//! unique ownership, because more than one gadget may belong to the same
88//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
89//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
90//!
91//! ```
92//! use std::rc::Rc;
93//!
94//! struct Owner {
95//!     name: String,
96//!     // ...other fields
97//! }
98//!
99//! struct Gadget {
100//!     id: i32,
101//!     owner: Rc<Owner>,
102//!     // ...other fields
103//! }
104//!
105//! fn main() {
106//!     // Create a reference-counted `Owner`.
107//!     let gadget_owner: Rc<Owner> = Rc::new(
108//!         Owner {
109//!             name: "Gadget Man".to_string(),
110//!         }
111//!     );
112//!
113//!     // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
114//!     // gives us a new pointer to the same `Owner` allocation, incrementing
115//!     // the reference count in the process.
116//!     let gadget1 = Gadget {
117//!         id: 1,
118//!         owner: Rc::clone(&gadget_owner),
119//!     };
120//!     let gadget2 = Gadget {
121//!         id: 2,
122//!         owner: Rc::clone(&gadget_owner),
123//!     };
124//!
125//!     // Dispose of our local variable `gadget_owner`.
126//!     drop(gadget_owner);
127//!
128//!     // Despite dropping `gadget_owner`, we're still able to print out the name
129//!     // of the `Owner` of the `Gadget`s. This is because we've only dropped a
130//!     // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
131//!     // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
132//!     // live. The field projection `gadget1.owner.name` works because
133//!     // `Rc<Owner>` automatically dereferences to `Owner`.
134//!     println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
135//!     println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
136//!
137//!     // At the end of the function, `gadget1` and `gadget2` are destroyed, and
138//!     // with them the last counted references to our `Owner`. Gadget Man now
139//!     // gets destroyed as well.
140//! }
141//! ```
142//!
143//! If our requirements change, and we also need to be able to traverse from
144//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
145//! to `Gadget` introduces a cycle. This means that their
146//! reference counts can never reach 0, and the allocation will never be destroyed:
147//! a memory leak. In order to get around this, we can use [`Weak`]
148//! pointers.
149//!
150//! Rust actually makes it somewhat difficult to produce this loop in the first
151//! place. In order to end up with two values that point at each other, one of
152//! them needs to be mutable. This is difficult because [`Rc`] enforces
153//! memory safety by only giving out shared references to the value it wraps,
154//! and these don't allow direct mutation. We need to wrap the part of the
155//! value we wish to mutate in a [`RefCell`], which provides *interior
156//! mutability*: a method to achieve mutability through a shared reference.
157//! [`RefCell`] enforces Rust's borrowing rules at runtime.
158//!
159//! ```
160//! use std::rc::Rc;
161//! use std::rc::Weak;
162//! use std::cell::RefCell;
163//!
164//! struct Owner {
165//!     name: String,
166//!     gadgets: RefCell<Vec<Weak<Gadget>>>,
167//!     // ...other fields
168//! }
169//!
170//! struct Gadget {
171//!     id: i32,
172//!     owner: Rc<Owner>,
173//!     // ...other fields
174//! }
175//!
176//! fn main() {
177//!     // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
178//!     // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
179//!     // a shared reference.
180//!     let gadget_owner: Rc<Owner> = Rc::new(
181//!         Owner {
182//!             name: "Gadget Man".to_string(),
183//!             gadgets: RefCell::new(vec![]),
184//!         }
185//!     );
186//!
187//!     // Create `Gadget`s belonging to `gadget_owner`, as before.
188//!     let gadget1 = Rc::new(
189//!         Gadget {
190//!             id: 1,
191//!             owner: Rc::clone(&gadget_owner),
192//!         }
193//!     );
194//!     let gadget2 = Rc::new(
195//!         Gadget {
196//!             id: 2,
197//!             owner: Rc::clone(&gadget_owner),
198//!         }
199//!     );
200//!
201//!     // Add the `Gadget`s to their `Owner`.
202//!     {
203//!         let mut gadgets = gadget_owner.gadgets.borrow_mut();
204//!         gadgets.push(Rc::downgrade(&gadget1));
205//!         gadgets.push(Rc::downgrade(&gadget2));
206//!
207//!         // `RefCell` dynamic borrow ends here.
208//!     }
209//!
210//!     // Iterate over our `Gadget`s, printing their details out.
211//!     for gadget_weak in gadget_owner.gadgets.borrow().iter() {
212//!
213//!         // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
214//!         // guarantee the allocation still exists, we need to call
215//!         // `upgrade`, which returns an `Option<Rc<Gadget>>`.
216//!         //
217//!         // In this case we know the allocation still exists, so we simply
218//!         // `unwrap` the `Option`. In a more complicated program, you might
219//!         // need graceful error handling for a `None` result.
220//!
221//!         let gadget = gadget_weak.upgrade().unwrap();
222//!         println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
223//!     }
224//!
225//!     // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
226//!     // are destroyed. There are now no strong (`Rc`) pointers to the
227//!     // gadgets, so they are destroyed. This zeroes the reference count on
228//!     // Gadget Man, so he gets destroyed as well.
229//! }
230//! ```
231//!
232//! [clone]: Clone::clone
233//! [`Cell`]: core::cell::Cell
234//! [`RefCell`]: core::cell::RefCell
235//! [arc]: crate::sync::Arc
236//! [`Deref`]: core::ops::Deref
237//! [downgrade]: Rc::downgrade
238//! [upgrade]: Weak::upgrade
239//! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable
240//! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
241
242#![stable(feature = "rust1", since = "1.0.0")]
243
244use core::any::Any;
245use core::cell::{Cell, CloneFromCell};
246#[cfg(not(no_global_oom_handling))]
247use core::clone::TrivialClone;
248use core::clone::{CloneToUninit, UseCloned};
249use core::cmp::Ordering;
250use core::hash::{Hash, Hasher};
251use core::intrinsics::abort;
252#[cfg(not(no_global_oom_handling))]
253use core::iter;
254use core::marker::{PhantomData, Unsize};
255use core::mem::{self, Alignment, ManuallyDrop};
256use core::num::NonZeroUsize;
257use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver};
258#[cfg(not(no_global_oom_handling))]
259use core::ops::{Residual, Try};
260use core::panic::{RefUnwindSafe, UnwindSafe};
261#[cfg(not(no_global_oom_handling))]
262use core::pin::Pin;
263use core::pin::PinCoerceUnsized;
264use core::ptr::{self, NonNull, drop_in_place};
265#[cfg(not(no_global_oom_handling))]
266use core::slice::from_raw_parts_mut;
267use core::{borrow, fmt, hint};
268
269#[cfg(not(no_global_oom_handling))]
270use crate::alloc::handle_alloc_error;
271use crate::alloc::{AllocError, Allocator, Global, Layout};
272use crate::borrow::{Cow, ToOwned};
273use crate::boxed::Box;
274#[cfg(not(no_global_oom_handling))]
275use crate::string::String;
276#[cfg(not(no_global_oom_handling))]
277use crate::vec::Vec;
278
279// This is repr(C) to future-proof against possible field-reordering, which
280// would interfere with otherwise safe [into|from]_raw() of transmutable
281// inner types.
282// repr(align(2)) (forcing alignment to at least 2) is required because usize
283// has 1-byte alignment on AVR.
284#[repr(C, align(2))]
285struct RcInner<T: ?Sized> {
286    strong: Cell<usize>,
287    weak: Cell<usize>,
288    value: T,
289}
290
291/// Calculate layout for `RcInner<T>` using the inner value's layout
292fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout {
293    // Calculate layout using the given value layout.
294    // Previously, layout was calculated on the expression
295    // `&*(ptr as *const RcInner<T>)`, but this created a misaligned
296    // reference (see #54908).
297    Layout::new::<RcInner<()>>().extend(layout).unwrap().0.pad_to_align()
298}
299
300/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
301/// Counted'.
302///
303/// See the [module-level documentation](./index.html) for more details.
304///
305/// The inherent methods of `Rc` are all associated functions, which means
306/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
307/// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
308///
309/// [get_mut]: Rc::get_mut
310#[doc(search_unbox)]
311#[rustc_diagnostic_item = "Rc"]
312#[stable(feature = "rust1", since = "1.0.0")]
313#[rustc_insignificant_dtor]
314#[diagnostic::on_move(
315    message = "the type `{Self}` does not implement `Copy`",
316    label = "this move could be avoided by cloning the original `{Self}`, which is inexpensive",
317    note = "consider using `Rc::clone`"
318)]
319
320pub struct Rc<
321    T: ?Sized,
322    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
323> {
324    ptr: NonNull<RcInner<T>>,
325    phantom: PhantomData<RcInner<T>>,
326    alloc: A,
327}
328
329#[stable(feature = "rust1", since = "1.0.0")]
330impl<T: ?Sized, A: Allocator> !Send for Rc<T, A> {}
331
332// Note that this negative impl isn't strictly necessary for correctness,
333// as `Rc` transitively contains a `Cell`, which is itself `!Sync`.
334// However, given how important `Rc`'s `!Sync`-ness is,
335// having an explicit negative impl is nice for documentation purposes
336// and results in nicer error messages.
337#[stable(feature = "rust1", since = "1.0.0")]
338impl<T: ?Sized, A: Allocator> !Sync for Rc<T, A> {}
339
340#[stable(feature = "catch_unwind", since = "1.9.0")]
341impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Rc<T, A> {}
342#[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")]
343impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> RefUnwindSafe for Rc<T, A> {}
344
345#[unstable(feature = "coerce_unsized", issue = "18598")]
346impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Rc<U, A>> for Rc<T, A> {}
347
348#[unstable(feature = "dispatch_from_dyn", issue = "none")]
349impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
350
351// SAFETY: `Rc::clone` doesn't access any `Cell`s which could contain the `Rc` being cloned.
352#[unstable(feature = "cell_get_cloned", issue = "145329")]
353unsafe impl<T: ?Sized> CloneFromCell for Rc<T> {}
354
355impl<T: ?Sized> Rc<T> {
356    #[inline]
357    unsafe fn from_inner(ptr: NonNull<RcInner<T>>) -> Self {
358        unsafe { Self::from_inner_in(ptr, Global) }
359    }
360
361    #[inline]
362    unsafe fn from_ptr(ptr: *mut RcInner<T>) -> Self {
363        unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
364    }
365}
366
367impl<T: ?Sized, A: Allocator> Rc<T, A> {
368    #[inline(always)]
369    fn inner(&self) -> &RcInner<T> {
370        // This unsafety is ok because while this Rc is alive we're guaranteed
371        // that the inner pointer is valid.
372        unsafe { self.ptr.as_ref() }
373    }
374
375    #[inline]
376    fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
377        let this = mem::ManuallyDrop::new(this);
378        (this.ptr, unsafe { ptr::read(&this.alloc) })
379    }
380
381    #[inline]
382    unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
383        Self { ptr, phantom: PhantomData, alloc }
384    }
385
386    #[inline]
387    unsafe fn from_ptr_in(ptr: *mut RcInner<T>, alloc: A) -> Self {
388        unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) }
389    }
390
391    // Non-inlined part of `drop`.
392    #[inline(never)]
393    unsafe fn drop_slow(&mut self) {
394        // Reconstruct the "strong weak" pointer and drop it when this
395        // variable goes out of scope. This ensures that the memory is
396        // deallocated even if the destructor of `T` panics.
397        let _weak = Weak { ptr: self.ptr, alloc: &self.alloc };
398
399        // Destroy the contained object.
400        // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed.
401        unsafe {
402            ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value);
403        }
404    }
405}
406
407impl<T> Rc<T> {
408    /// Constructs a new `Rc<T>`.
409    ///
410    /// # Examples
411    ///
412    /// ```
413    /// use std::rc::Rc;
414    ///
415    /// let five = Rc::new(5);
416    /// ```
417    #[cfg(not(no_global_oom_handling))]
418    #[stable(feature = "rust1", since = "1.0.0")]
419    pub fn new(value: T) -> Rc<T> {
420        // There is an implicit weak pointer owned by all the strong
421        // pointers, which ensures that the weak destructor never frees
422        // the allocation while the strong destructor is running, even
423        // if the weak pointer is stored inside the strong one.
424        unsafe {
425            Self::from_inner(
426                Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value }))
427                    .into(),
428            )
429        }
430    }
431
432    /// Constructs a new `Rc<T>` while giving you a `Weak<T>` to the allocation,
433    /// to allow you to construct a `T` which holds a weak pointer to itself.
434    ///
435    /// Generally, a structure circularly referencing itself, either directly or
436    /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
437    /// Using this function, you get access to the weak pointer during the
438    /// initialization of `T`, before the `Rc<T>` is created, such that you can
439    /// clone and store it inside the `T`.
440    ///
441    /// `new_cyclic` first allocates the managed allocation for the `Rc<T>`,
442    /// then calls your closure, giving it a `Weak<T>` to this allocation,
443    /// and only afterwards completes the construction of the `Rc<T>` by placing
444    /// the `T` returned from your closure into the allocation.
445    ///
446    /// Since the new `Rc<T>` is not fully-constructed until `Rc<T>::new_cyclic`
447    /// returns, calling [`upgrade`] on the weak reference inside your closure will
448    /// fail and result in a `None` value.
449    ///
450    /// # Panics
451    ///
452    /// If `data_fn` panics, the panic is propagated to the caller, and the
453    /// temporary [`Weak<T>`] is dropped normally.
454    ///
455    /// # Examples
456    ///
457    /// ```
458    /// # #![allow(dead_code)]
459    /// use std::rc::{Rc, Weak};
460    ///
461    /// struct Gadget {
462    ///     me: Weak<Gadget>,
463    /// }
464    ///
465    /// impl Gadget {
466    ///     /// Constructs a reference counted Gadget.
467    ///     fn new() -> Rc<Self> {
468    ///         // `me` is a `Weak<Gadget>` pointing at the new allocation of the
469    ///         // `Rc` we're constructing.
470    ///         Rc::new_cyclic(|me| {
471    ///             // Create the actual struct here.
472    ///             Gadget { me: me.clone() }
473    ///         })
474    ///     }
475    ///
476    ///     /// Returns a reference counted pointer to Self.
477    ///     fn me(&self) -> Rc<Self> {
478    ///         self.me.upgrade().unwrap()
479    ///     }
480    /// }
481    /// ```
482    /// [`upgrade`]: Weak::upgrade
483    #[cfg(not(no_global_oom_handling))]
484    #[stable(feature = "arc_new_cyclic", since = "1.60.0")]
485    pub fn new_cyclic<F>(data_fn: F) -> Rc<T>
486    where
487        F: FnOnce(&Weak<T>) -> T,
488    {
489        Self::new_cyclic_in(data_fn, Global)
490    }
491
492    /// Constructs a new `Rc` with uninitialized contents.
493    ///
494    /// # Examples
495    ///
496    /// ```
497    /// use std::rc::Rc;
498    ///
499    /// let mut five = Rc::<u32>::new_uninit();
500    ///
501    /// // Deferred initialization:
502    /// Rc::get_mut(&mut five).unwrap().write(5);
503    ///
504    /// let five = unsafe { five.assume_init() };
505    ///
506    /// assert_eq!(*five, 5)
507    /// ```
508    #[cfg(not(no_global_oom_handling))]
509    #[stable(feature = "new_uninit", since = "1.82.0")]
510    #[must_use]
511    pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
512        unsafe {
513            Rc::from_ptr(Rc::allocate_for_layout(
514                Layout::new::<T>(),
515                |layout| Global.allocate(layout),
516                <*mut u8>::cast,
517            ))
518        }
519    }
520
521    /// Constructs a new `Rc` with uninitialized contents, with the memory
522    /// being filled with `0` bytes.
523    ///
524    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
525    /// incorrect usage of this method.
526    ///
527    /// # Examples
528    ///
529    /// ```
530    /// use std::rc::Rc;
531    ///
532    /// let zero = Rc::<u32>::new_zeroed();
533    /// let zero = unsafe { zero.assume_init() };
534    ///
535    /// assert_eq!(*zero, 0)
536    /// ```
537    ///
538    /// [zeroed]: mem::MaybeUninit::zeroed
539    #[cfg(not(no_global_oom_handling))]
540    #[stable(feature = "new_zeroed_alloc", since = "1.92.0")]
541    #[must_use]
542    pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
543        unsafe {
544            Rc::from_ptr(Rc::allocate_for_layout(
545                Layout::new::<T>(),
546                |layout| Global.allocate_zeroed(layout),
547                <*mut u8>::cast,
548            ))
549        }
550    }
551
552    /// Constructs a new `Rc<T>`, returning an error if the allocation fails
553    ///
554    /// # Examples
555    ///
556    /// ```
557    /// #![feature(allocator_api)]
558    /// use std::rc::Rc;
559    ///
560    /// let five = Rc::try_new(5);
561    /// # Ok::<(), std::alloc::AllocError>(())
562    /// ```
563    #[unstable(feature = "allocator_api", issue = "32838")]
564    pub fn try_new(value: T) -> Result<Rc<T>, AllocError> {
565        // There is an implicit weak pointer owned by all the strong
566        // pointers, which ensures that the weak destructor never frees
567        // the allocation while the strong destructor is running, even
568        // if the weak pointer is stored inside the strong one.
569        unsafe {
570            Ok(Self::from_inner(
571                Box::leak(Box::try_new(RcInner {
572                    strong: Cell::new(1),
573                    weak: Cell::new(1),
574                    value,
575                })?)
576                .into(),
577            ))
578        }
579    }
580
581    /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
582    ///
583    /// # Examples
584    ///
585    /// ```
586    /// #![feature(allocator_api)]
587    ///
588    /// use std::rc::Rc;
589    ///
590    /// let mut five = Rc::<u32>::try_new_uninit()?;
591    ///
592    /// // Deferred initialization:
593    /// Rc::get_mut(&mut five).unwrap().write(5);
594    ///
595    /// let five = unsafe { five.assume_init() };
596    ///
597    /// assert_eq!(*five, 5);
598    /// # Ok::<(), std::alloc::AllocError>(())
599    /// ```
600    #[unstable(feature = "allocator_api", issue = "32838")]
601    pub fn try_new_uninit() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
602        unsafe {
603            Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
604                Layout::new::<T>(),
605                |layout| Global.allocate(layout),
606                <*mut u8>::cast,
607            )?))
608        }
609    }
610
611    /// Constructs a new `Rc` with uninitialized contents, with the memory
612    /// being filled with `0` bytes, returning an error if the allocation fails
613    ///
614    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
615    /// incorrect usage of this method.
616    ///
617    /// # Examples
618    ///
619    /// ```
620    /// #![feature(allocator_api)]
621    ///
622    /// use std::rc::Rc;
623    ///
624    /// let zero = Rc::<u32>::try_new_zeroed()?;
625    /// let zero = unsafe { zero.assume_init() };
626    ///
627    /// assert_eq!(*zero, 0);
628    /// # Ok::<(), std::alloc::AllocError>(())
629    /// ```
630    ///
631    /// [zeroed]: mem::MaybeUninit::zeroed
632    #[unstable(feature = "allocator_api", issue = "32838")]
633    pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
634        unsafe {
635            Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
636                Layout::new::<T>(),
637                |layout| Global.allocate_zeroed(layout),
638                <*mut u8>::cast,
639            )?))
640        }
641    }
642    /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
643    /// `value` will be pinned in memory and unable to be moved.
644    #[cfg(not(no_global_oom_handling))]
645    #[stable(feature = "pin", since = "1.33.0")]
646    #[must_use]
647    pub fn pin(value: T) -> Pin<Rc<T>> {
648        unsafe { Pin::new_unchecked(Rc::new(value)) }
649    }
650
651    /// Maps the value in an `Rc`, reusing the allocation if possible.
652    ///
653    /// `f` is called on a reference to the value in the `Rc`, and the result is returned, also in
654    /// an `Rc`.
655    ///
656    /// Note: this is an associated function, which means that you have
657    /// to call it as `Rc::map(r, f)` instead of `r.map(f)`. This
658    /// is so that there is no conflict with a method on the inner type.
659    ///
660    /// # Examples
661    ///
662    /// ```
663    /// #![feature(smart_pointer_try_map)]
664    ///
665    /// use std::rc::Rc;
666    ///
667    /// let r = Rc::new(7);
668    /// let new = Rc::map(r, |i| i + 7);
669    /// assert_eq!(*new, 14);
670    /// ```
671    #[cfg(not(no_global_oom_handling))]
672    #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
673    pub fn map<U>(this: Self, f: impl FnOnce(&T) -> U) -> Rc<U> {
674        if size_of::<T>() == size_of::<U>()
675            && align_of::<T>() == align_of::<U>()
676            && Rc::is_unique(&this)
677        {
678            unsafe {
679                let ptr = Rc::into_raw(this);
680                let value = ptr.read();
681                let mut allocation = Rc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());
682
683                Rc::get_mut_unchecked(&mut allocation).write(f(&value));
684                allocation.assume_init()
685            }
686        } else {
687            Rc::new(f(&*this))
688        }
689    }
690
691    /// Attempts to map the value in an `Rc`, reusing the allocation if possible.
692    ///
693    /// `f` is called on a reference to the value in the `Rc`, and if the operation succeeds, the
694    /// result is returned, also in an `Rc`.
695    ///
696    /// Note: this is an associated function, which means that you have
697    /// to call it as `Rc::try_map(r, f)` instead of `r.try_map(f)`. This
698    /// is so that there is no conflict with a method on the inner type.
699    ///
700    /// # Examples
701    ///
702    /// ```
703    /// #![feature(smart_pointer_try_map)]
704    ///
705    /// use std::rc::Rc;
706    ///
707    /// let b = Rc::new(7);
708    /// let new = Rc::try_map(b, |&i| u32::try_from(i)).unwrap();
709    /// assert_eq!(*new, 7);
710    /// ```
711    #[cfg(not(no_global_oom_handling))]
712    #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
713    pub fn try_map<R>(
714        this: Self,
715        f: impl FnOnce(&T) -> R,
716    ) -> <R::Residual as Residual<Rc<R::Output>>>::TryType
717    where
718        R: Try,
719        R::Residual: Residual<Rc<R::Output>>,
720    {
721        if size_of::<T>() == size_of::<R::Output>()
722            && align_of::<T>() == align_of::<R::Output>()
723            && Rc::is_unique(&this)
724        {
725            unsafe {
726                let ptr = Rc::into_raw(this);
727                let value = ptr.read();
728                let mut allocation = Rc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());
729
730                Rc::get_mut_unchecked(&mut allocation).write(f(&value)?);
731                try { allocation.assume_init() }
732            }
733        } else {
734            try { Rc::new(f(&*this)?) }
735        }
736    }
737}
738
739impl<T, A: Allocator> Rc<T, A> {
740    /// Constructs a new `Rc` in the provided allocator.
741    ///
742    /// # Examples
743    ///
744    /// ```
745    /// #![feature(allocator_api)]
746    ///
747    /// use std::rc::Rc;
748    /// use std::alloc::System;
749    ///
750    /// let five = Rc::new_in(5, System);
751    /// ```
752    #[cfg(not(no_global_oom_handling))]
753    #[unstable(feature = "allocator_api", issue = "32838")]
754    #[inline]
755    pub fn new_in(value: T, alloc: A) -> Rc<T, A> {
756        // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
757        // That would make code size bigger.
758        match Self::try_new_in(value, alloc) {
759            Ok(m) => m,
760            Err(_) => handle_alloc_error(Layout::new::<RcInner<T>>()),
761        }
762    }
763
764    /// Constructs a new `Rc` with uninitialized contents in the provided allocator.
765    ///
766    /// # Examples
767    ///
768    /// ```
769    /// #![feature(get_mut_unchecked)]
770    /// #![feature(allocator_api)]
771    ///
772    /// use std::rc::Rc;
773    /// use std::alloc::System;
774    ///
775    /// let mut five = Rc::<u32, _>::new_uninit_in(System);
776    ///
777    /// let five = unsafe {
778    ///     // Deferred initialization:
779    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
780    ///
781    ///     five.assume_init()
782    /// };
783    ///
784    /// assert_eq!(*five, 5)
785    /// ```
786    #[cfg(not(no_global_oom_handling))]
787    #[unstable(feature = "allocator_api", issue = "32838")]
788    #[inline]
789    pub fn new_uninit_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
790        unsafe {
791            Rc::from_ptr_in(
792                Rc::allocate_for_layout(
793                    Layout::new::<T>(),
794                    |layout| alloc.allocate(layout),
795                    <*mut u8>::cast,
796                ),
797                alloc,
798            )
799        }
800    }
801
802    /// Constructs a new `Rc` with uninitialized contents, with the memory
803    /// being filled with `0` bytes, in the provided allocator.
804    ///
805    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
806    /// incorrect usage of this method.
807    ///
808    /// # Examples
809    ///
810    /// ```
811    /// #![feature(allocator_api)]
812    ///
813    /// use std::rc::Rc;
814    /// use std::alloc::System;
815    ///
816    /// let zero = Rc::<u32, _>::new_zeroed_in(System);
817    /// let zero = unsafe { zero.assume_init() };
818    ///
819    /// assert_eq!(*zero, 0)
820    /// ```
821    ///
822    /// [zeroed]: mem::MaybeUninit::zeroed
823    #[cfg(not(no_global_oom_handling))]
824    #[unstable(feature = "allocator_api", issue = "32838")]
825    #[inline]
826    pub fn new_zeroed_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
827        unsafe {
828            Rc::from_ptr_in(
829                Rc::allocate_for_layout(
830                    Layout::new::<T>(),
831                    |layout| alloc.allocate_zeroed(layout),
832                    <*mut u8>::cast,
833                ),
834                alloc,
835            )
836        }
837    }
838
839    /// Constructs a new `Rc<T, A>` in the given allocator while giving you a `Weak<T, A>` to the allocation,
840    /// to allow you to construct a `T` which holds a weak pointer to itself.
841    ///
842    /// Generally, a structure circularly referencing itself, either directly or
843    /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
844    /// Using this function, you get access to the weak pointer during the
845    /// initialization of `T`, before the `Rc<T, A>` is created, such that you can
846    /// clone and store it inside the `T`.
847    ///
848    /// `new_cyclic_in` first allocates the managed allocation for the `Rc<T, A>`,
849    /// then calls your closure, giving it a `Weak<T, A>` to this allocation,
850    /// and only afterwards completes the construction of the `Rc<T, A>` by placing
851    /// the `T` returned from your closure into the allocation.
852    ///
853    /// Since the new `Rc<T, A>` is not fully-constructed until `Rc<T, A>::new_cyclic_in`
854    /// returns, calling [`upgrade`] on the weak reference inside your closure will
855    /// fail and result in a `None` value.
856    ///
857    /// # Panics
858    ///
859    /// If `data_fn` panics, the panic is propagated to the caller, and the
860    /// temporary [`Weak<T, A>`] is dropped normally.
861    ///
862    /// # Examples
863    ///
864    /// See [`new_cyclic`].
865    ///
866    /// [`new_cyclic`]: Rc::new_cyclic
867    /// [`upgrade`]: Weak::upgrade
868    #[cfg(not(no_global_oom_handling))]
869    #[unstable(feature = "allocator_api", issue = "32838")]
870    pub fn new_cyclic_in<F>(data_fn: F, alloc: A) -> Rc<T, A>
871    where
872        F: FnOnce(&Weak<T, A>) -> T,
873    {
874        // Construct the inner in the "uninitialized" state with a single
875        // weak reference.
876        let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
877            RcInner {
878                strong: Cell::new(0),
879                weak: Cell::new(1),
880                value: mem::MaybeUninit::<T>::uninit(),
881            },
882            alloc,
883        ));
884        let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into();
885        let init_ptr: NonNull<RcInner<T>> = uninit_ptr.cast();
886
887        let weak = Weak { ptr: init_ptr, alloc };
888
889        // It's important we don't give up ownership of the weak pointer, or
890        // else the memory might be freed by the time `data_fn` returns. If
891        // we really wanted to pass ownership, we could create an additional
892        // weak pointer for ourselves, but this would result in additional
893        // updates to the weak reference count which might not be necessary
894        // otherwise.
895        let data = data_fn(&weak);
896
897        let strong = unsafe {
898            let inner = init_ptr.as_ptr();
899            ptr::write(&raw mut (*inner).value, data);
900
901            let prev_value = (*inner).strong.get();
902            debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
903            (*inner).strong.set(1);
904
905            // Strong references should collectively own a shared weak reference,
906            // so don't run the destructor for our old weak reference.
907            // Calling into_raw_with_allocator has the double effect of giving us back the allocator,
908            // and forgetting the weak reference.
909            let alloc = weak.into_raw_with_allocator().1;
910
911            Rc::from_inner_in(init_ptr, alloc)
912        };
913
914        strong
915    }
916
917    /// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation
918    /// fails
919    ///
920    /// # Examples
921    ///
922    /// ```
923    /// #![feature(allocator_api)]
924    /// use std::rc::Rc;
925    /// use std::alloc::System;
926    ///
927    /// let five = Rc::try_new_in(5, System);
928    /// # Ok::<(), std::alloc::AllocError>(())
929    /// ```
930    #[unstable(feature = "allocator_api", issue = "32838")]
931    #[inline]
932    pub fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError> {
933        // There is an implicit weak pointer owned by all the strong
934        // pointers, which ensures that the weak destructor never frees
935        // the allocation while the strong destructor is running, even
936        // if the weak pointer is stored inside the strong one.
937        let (ptr, alloc) = Box::into_unique(Box::try_new_in(
938            RcInner { strong: Cell::new(1), weak: Cell::new(1), value },
939            alloc,
940        )?);
941        Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
942    }
943
944    /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an
945    /// error if the allocation fails
946    ///
947    /// # Examples
948    ///
949    /// ```
950    /// #![feature(allocator_api)]
951    /// #![feature(get_mut_unchecked)]
952    ///
953    /// use std::rc::Rc;
954    /// use std::alloc::System;
955    ///
956    /// let mut five = Rc::<u32, _>::try_new_uninit_in(System)?;
957    ///
958    /// let five = unsafe {
959    ///     // Deferred initialization:
960    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
961    ///
962    ///     five.assume_init()
963    /// };
964    ///
965    /// assert_eq!(*five, 5);
966    /// # Ok::<(), std::alloc::AllocError>(())
967    /// ```
968    #[unstable(feature = "allocator_api", issue = "32838")]
969    #[inline]
970    pub fn try_new_uninit_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
971        unsafe {
972            Ok(Rc::from_ptr_in(
973                Rc::try_allocate_for_layout(
974                    Layout::new::<T>(),
975                    |layout| alloc.allocate(layout),
976                    <*mut u8>::cast,
977                )?,
978                alloc,
979            ))
980        }
981    }
982
983    /// Constructs a new `Rc` with uninitialized contents, with the memory
984    /// being filled with `0` bytes, in the provided allocator, returning an error if the allocation
985    /// fails
986    ///
987    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
988    /// incorrect usage of this method.
989    ///
990    /// # Examples
991    ///
992    /// ```
993    /// #![feature(allocator_api)]
994    ///
995    /// use std::rc::Rc;
996    /// use std::alloc::System;
997    ///
998    /// let zero = Rc::<u32, _>::try_new_zeroed_in(System)?;
999    /// let zero = unsafe { zero.assume_init() };
1000    ///
1001    /// assert_eq!(*zero, 0);
1002    /// # Ok::<(), std::alloc::AllocError>(())
1003    /// ```
1004    ///
1005    /// [zeroed]: mem::MaybeUninit::zeroed
1006    #[unstable(feature = "allocator_api", issue = "32838")]
1007    #[inline]
1008    pub fn try_new_zeroed_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
1009        unsafe {
1010            Ok(Rc::from_ptr_in(
1011                Rc::try_allocate_for_layout(
1012                    Layout::new::<T>(),
1013                    |layout| alloc.allocate_zeroed(layout),
1014                    <*mut u8>::cast,
1015                )?,
1016                alloc,
1017            ))
1018        }
1019    }
1020
1021    /// Constructs a new `Pin<Rc<T>>` in the provided allocator. If `T` does not implement `Unpin`, then
1022    /// `value` will be pinned in memory and unable to be moved.
1023    #[cfg(not(no_global_oom_handling))]
1024    #[unstable(feature = "allocator_api", issue = "32838")]
1025    #[inline]
1026    pub fn pin_in(value: T, alloc: A) -> Pin<Self>
1027    where
1028        A: 'static,
1029    {
1030        unsafe { Pin::new_unchecked(Rc::new_in(value, alloc)) }
1031    }
1032
1033    /// Returns the inner value, if the `Rc` has exactly one strong reference.
1034    ///
1035    /// Otherwise, an [`Err`] is returned with the same `Rc` that was
1036    /// passed in.
1037    ///
1038    /// This will succeed even if there are outstanding weak references.
1039    ///
1040    /// # Examples
1041    ///
1042    /// ```
1043    /// use std::rc::Rc;
1044    ///
1045    /// let x = Rc::new(3);
1046    /// assert_eq!(Rc::try_unwrap(x), Ok(3));
1047    ///
1048    /// let x = Rc::new(4);
1049    /// let _y = Rc::clone(&x);
1050    /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
1051    /// ```
1052    #[inline]
1053    #[stable(feature = "rc_unique", since = "1.4.0")]
1054    pub fn try_unwrap(this: Self) -> Result<T, Self> {
1055        if Rc::strong_count(&this) == 1 {
1056            let this = ManuallyDrop::new(this);
1057
1058            let val: T = unsafe { ptr::read(&**this) }; // copy the contained object
1059            let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator
1060
1061            // Indicate to Weaks that they can't be promoted by decrementing
1062            // the strong count, and then remove the implicit "strong weak"
1063            // pointer while also handling drop logic by just crafting a
1064            // fake Weak.
1065            this.inner().dec_strong();
1066            let _weak = Weak { ptr: this.ptr, alloc };
1067            Ok(val)
1068        } else {
1069            Err(this)
1070        }
1071    }
1072
1073    /// Returns the inner value, if the `Rc` has exactly one strong reference.
1074    ///
1075    /// Otherwise, [`None`] is returned and the `Rc` is dropped.
1076    ///
1077    /// This will succeed even if there are outstanding weak references.
1078    ///
1079    /// If `Rc::into_inner` is called on every clone of this `Rc`,
1080    /// it is guaranteed that exactly one of the calls returns the inner value.
1081    /// This means in particular that the inner value is not dropped.
1082    ///
1083    /// [`Rc::try_unwrap`] is conceptually similar to `Rc::into_inner`.
1084    /// And while they are meant for different use-cases, `Rc::into_inner(this)`
1085    /// is in fact equivalent to <code>[Rc::try_unwrap]\(this).[ok][Result::ok]()</code>.
1086    /// (Note that the same kind of equivalence does **not** hold true for
1087    /// [`Arc`](crate::sync::Arc), due to race conditions that do not apply to `Rc`!)
1088    ///
1089    /// # Examples
1090    ///
1091    /// ```
1092    /// use std::rc::Rc;
1093    ///
1094    /// let x = Rc::new(3);
1095    /// assert_eq!(Rc::into_inner(x), Some(3));
1096    ///
1097    /// let x = Rc::new(4);
1098    /// let y = Rc::clone(&x);
1099    ///
1100    /// assert_eq!(Rc::into_inner(y), None);
1101    /// assert_eq!(Rc::into_inner(x), Some(4));
1102    /// ```
1103    #[inline]
1104    #[stable(feature = "rc_into_inner", since = "1.70.0")]
1105    pub fn into_inner(this: Self) -> Option<T> {
1106        Rc::try_unwrap(this).ok()
1107    }
1108}
1109
1110impl<T> Rc<[T]> {
1111    /// Constructs a new reference-counted slice with uninitialized contents.
1112    ///
1113    /// # Examples
1114    ///
1115    /// ```
1116    /// use std::rc::Rc;
1117    ///
1118    /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1119    ///
1120    /// // Deferred initialization:
1121    /// let data = Rc::get_mut(&mut values).unwrap();
1122    /// data[0].write(1);
1123    /// data[1].write(2);
1124    /// data[2].write(3);
1125    ///
1126    /// let values = unsafe { values.assume_init() };
1127    ///
1128    /// assert_eq!(*values, [1, 2, 3])
1129    /// ```
1130    #[cfg(not(no_global_oom_handling))]
1131    #[stable(feature = "new_uninit", since = "1.82.0")]
1132    #[must_use]
1133    pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1134        unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
1135    }
1136
1137    /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1138    /// filled with `0` bytes.
1139    ///
1140    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1141    /// incorrect usage of this method.
1142    ///
1143    /// # Examples
1144    ///
1145    /// ```
1146    /// use std::rc::Rc;
1147    ///
1148    /// let values = Rc::<[u32]>::new_zeroed_slice(3);
1149    /// let values = unsafe { values.assume_init() };
1150    ///
1151    /// assert_eq!(*values, [0, 0, 0])
1152    /// ```
1153    ///
1154    /// [zeroed]: mem::MaybeUninit::zeroed
1155    #[cfg(not(no_global_oom_handling))]
1156    #[stable(feature = "new_zeroed_alloc", since = "1.92.0")]
1157    #[must_use]
1158    pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1159        unsafe {
1160            Rc::from_ptr(Rc::allocate_for_layout(
1161                Layout::array::<T>(len).unwrap(),
1162                |layout| Global.allocate_zeroed(layout),
1163                |mem| {
1164                    ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1165                        as *mut RcInner<[mem::MaybeUninit<T>]>
1166                },
1167            ))
1168        }
1169    }
1170
1171    /// Converts the reference-counted slice into a reference-counted array.
1172    ///
1173    /// This operation does not reallocate; the underlying array of the slice is simply reinterpreted as an array type.
1174    ///
1175    /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1176    #[unstable(feature = "alloc_slice_into_array", issue = "148082")]
1177    #[inline]
1178    #[must_use]
1179    pub fn into_array<const N: usize>(self) -> Option<Rc<[T; N]>> {
1180        if self.len() == N {
1181            let ptr = Self::into_raw(self) as *const [T; N];
1182
1183            // SAFETY: The underlying array of a slice has the exact same layout as an actual array `[T; N]` if `N` is equal to the slice's length.
1184            let me = unsafe { Rc::from_raw(ptr) };
1185            Some(me)
1186        } else {
1187            None
1188        }
1189    }
1190}
1191
1192impl<T, A: Allocator> Rc<[T], A> {
1193    /// Constructs a new reference-counted slice with uninitialized contents.
1194    ///
1195    /// # Examples
1196    ///
1197    /// ```
1198    /// #![feature(get_mut_unchecked)]
1199    /// #![feature(allocator_api)]
1200    ///
1201    /// use std::rc::Rc;
1202    /// use std::alloc::System;
1203    ///
1204    /// let mut values = Rc::<[u32], _>::new_uninit_slice_in(3, System);
1205    ///
1206    /// let values = unsafe {
1207    ///     // Deferred initialization:
1208    ///     Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
1209    ///     Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
1210    ///     Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
1211    ///
1212    ///     values.assume_init()
1213    /// };
1214    ///
1215    /// assert_eq!(*values, [1, 2, 3])
1216    /// ```
1217    #[cfg(not(no_global_oom_handling))]
1218    #[unstable(feature = "allocator_api", issue = "32838")]
1219    #[inline]
1220    pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1221        unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) }
1222    }
1223
1224    /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1225    /// filled with `0` bytes.
1226    ///
1227    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1228    /// incorrect usage of this method.
1229    ///
1230    /// # Examples
1231    ///
1232    /// ```
1233    /// #![feature(allocator_api)]
1234    ///
1235    /// use std::rc::Rc;
1236    /// use std::alloc::System;
1237    ///
1238    /// let values = Rc::<[u32], _>::new_zeroed_slice_in(3, System);
1239    /// let values = unsafe { values.assume_init() };
1240    ///
1241    /// assert_eq!(*values, [0, 0, 0])
1242    /// ```
1243    ///
1244    /// [zeroed]: mem::MaybeUninit::zeroed
1245    #[cfg(not(no_global_oom_handling))]
1246    #[unstable(feature = "allocator_api", issue = "32838")]
1247    #[inline]
1248    pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1249        unsafe {
1250            Rc::from_ptr_in(
1251                Rc::allocate_for_layout(
1252                    Layout::array::<T>(len).unwrap(),
1253                    |layout| alloc.allocate_zeroed(layout),
1254                    |mem| {
1255                        ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1256                            as *mut RcInner<[mem::MaybeUninit<T>]>
1257                    },
1258                ),
1259                alloc,
1260            )
1261        }
1262    }
1263}
1264
1265impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> {
1266    /// Converts to `Rc<T>`.
1267    ///
1268    /// # Safety
1269    ///
1270    /// As with [`MaybeUninit::assume_init`],
1271    /// it is up to the caller to guarantee that the inner value
1272    /// really is in an initialized state.
1273    /// Calling this when the content is not yet fully initialized
1274    /// causes immediate undefined behavior.
1275    ///
1276    /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1277    ///
1278    /// # Examples
1279    ///
1280    /// ```
1281    /// use std::rc::Rc;
1282    ///
1283    /// let mut five = Rc::<u32>::new_uninit();
1284    ///
1285    /// // Deferred initialization:
1286    /// Rc::get_mut(&mut five).unwrap().write(5);
1287    ///
1288    /// let five = unsafe { five.assume_init() };
1289    ///
1290    /// assert_eq!(*five, 5)
1291    /// ```
1292    #[stable(feature = "new_uninit", since = "1.82.0")]
1293    #[inline]
1294    pub unsafe fn assume_init(self) -> Rc<T, A> {
1295        let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1296        unsafe { Rc::from_inner_in(ptr.cast(), alloc) }
1297    }
1298}
1299
1300impl<T: ?Sized + CloneToUninit> Rc<T> {
1301    /// Constructs a new `Rc<T>` with a clone of `value`.
1302    ///
1303    /// # Examples
1304    ///
1305    /// ```
1306    /// #![feature(clone_from_ref)]
1307    /// use std::rc::Rc;
1308    ///
1309    /// let hello: Rc<str> = Rc::clone_from_ref("hello");
1310    /// ```
1311    #[cfg(not(no_global_oom_handling))]
1312    #[unstable(feature = "clone_from_ref", issue = "149075")]
1313    pub fn clone_from_ref(value: &T) -> Rc<T> {
1314        Rc::clone_from_ref_in(value, Global)
1315    }
1316
1317    /// Constructs a new `Rc<T>` with a clone of `value`, returning an error if allocation fails
1318    ///
1319    /// # Examples
1320    ///
1321    /// ```
1322    /// #![feature(clone_from_ref)]
1323    /// #![feature(allocator_api)]
1324    /// use std::rc::Rc;
1325    ///
1326    /// let hello: Rc<str> = Rc::try_clone_from_ref("hello")?;
1327    /// # Ok::<(), std::alloc::AllocError>(())
1328    /// ```
1329    #[unstable(feature = "clone_from_ref", issue = "149075")]
1330    //#[unstable(feature = "allocator_api", issue = "32838")]
1331    pub fn try_clone_from_ref(value: &T) -> Result<Rc<T>, AllocError> {
1332        Rc::try_clone_from_ref_in(value, Global)
1333    }
1334}
1335
1336impl<T: ?Sized + CloneToUninit, A: Allocator> Rc<T, A> {
1337    /// Constructs a new `Rc<T>` with a clone of `value` in the provided allocator.
1338    ///
1339    /// # Examples
1340    ///
1341    /// ```
1342    /// #![feature(clone_from_ref)]
1343    /// #![feature(allocator_api)]
1344    /// use std::rc::Rc;
1345    /// use std::alloc::System;
1346    ///
1347    /// let hello: Rc<str, System> = Rc::clone_from_ref_in("hello", System);
1348    /// ```
1349    #[cfg(not(no_global_oom_handling))]
1350    #[unstable(feature = "clone_from_ref", issue = "149075")]
1351    //#[unstable(feature = "allocator_api", issue = "32838")]
1352    pub fn clone_from_ref_in(value: &T, alloc: A) -> Rc<T, A> {
1353        // `in_progress` drops the allocation if we panic before finishing initializing it.
1354        let mut in_progress: UniqueRcUninit<T, A> = UniqueRcUninit::new(value, alloc);
1355
1356        // Initialize with clone of value.
1357        let initialized_clone = unsafe {
1358            // Clone. If the clone panics, `in_progress` will be dropped and clean up.
1359            value.clone_to_uninit(in_progress.data_ptr().cast());
1360            // Cast type of pointer, now that it is initialized.
1361            in_progress.into_rc()
1362        };
1363
1364        initialized_clone
1365    }
1366
1367    /// Constructs a new `Rc<T>` with a clone of `value` in the provided allocator, returning an error if allocation fails
1368    ///
1369    /// # Examples
1370    ///
1371    /// ```
1372    /// #![feature(clone_from_ref)]
1373    /// #![feature(allocator_api)]
1374    /// use std::rc::Rc;
1375    /// use std::alloc::System;
1376    ///
1377    /// let hello: Rc<str, System> = Rc::try_clone_from_ref_in("hello", System)?;
1378    /// # Ok::<(), std::alloc::AllocError>(())
1379    /// ```
1380    #[unstable(feature = "clone_from_ref", issue = "149075")]
1381    //#[unstable(feature = "allocator_api", issue = "32838")]
1382    pub fn try_clone_from_ref_in(value: &T, alloc: A) -> Result<Rc<T, A>, AllocError> {
1383        // `in_progress` drops the allocation if we panic before finishing initializing it.
1384        let mut in_progress: UniqueRcUninit<T, A> = UniqueRcUninit::try_new(value, alloc)?;
1385
1386        // Initialize with clone of value.
1387        let initialized_clone = unsafe {
1388            // Clone. If the clone panics, `in_progress` will be dropped and clean up.
1389            value.clone_to_uninit(in_progress.data_ptr().cast());
1390            // Cast type of pointer, now that it is initialized.
1391            in_progress.into_rc()
1392        };
1393
1394        Ok(initialized_clone)
1395    }
1396}
1397
1398impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> {
1399    /// Converts to `Rc<[T]>`.
1400    ///
1401    /// # Safety
1402    ///
1403    /// As with [`MaybeUninit::assume_init`],
1404    /// it is up to the caller to guarantee that the inner value
1405    /// really is in an initialized state.
1406    /// Calling this when the content is not yet fully initialized
1407    /// causes immediate undefined behavior.
1408    ///
1409    /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1410    ///
1411    /// # Examples
1412    ///
1413    /// ```
1414    /// use std::rc::Rc;
1415    ///
1416    /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1417    ///
1418    /// // Deferred initialization:
1419    /// let data = Rc::get_mut(&mut values).unwrap();
1420    /// data[0].write(1);
1421    /// data[1].write(2);
1422    /// data[2].write(3);
1423    ///
1424    /// let values = unsafe { values.assume_init() };
1425    ///
1426    /// assert_eq!(*values, [1, 2, 3])
1427    /// ```
1428    #[stable(feature = "new_uninit", since = "1.82.0")]
1429    #[inline]
1430    pub unsafe fn assume_init(self) -> Rc<[T], A> {
1431        let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1432        unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) }
1433    }
1434}
1435
1436impl<T: ?Sized> Rc<T> {
1437    /// Constructs an `Rc<T>` from a raw pointer.
1438    ///
1439    /// The raw pointer must have been previously returned by a call to
1440    /// [`Rc<U>::into_raw`][into_raw] or [`Rc<U>::into_raw_with_allocator`][into_raw_with_allocator].
1441    ///
1442    /// # Safety
1443    ///
1444    /// * Creating a `Rc<T>` from a pointer other than one returned from
1445    ///   [`Rc<U>::into_raw`][into_raw] or [`Rc<U>::into_raw_with_allocator`][into_raw_with_allocator]
1446    ///   is undefined behavior.
1447    /// * If `U` is sized, it must have the same size and alignment as `T`. This
1448    ///   is trivially true if `U` is `T`.
1449    /// * If `U` is unsized, its data pointer must have the same size and
1450    ///   alignment as `T`. This is trivially true if `Rc<U>` was constructed
1451    ///   through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1452    ///   coercion].
1453    /// * Note that if `U` or `U`'s data pointer is not `T` but has the same size
1454    ///   and alignment, this is basically like transmuting references of
1455    ///   different types. See [`mem::transmute`][transmute] for more information
1456    ///   on what restrictions apply in this case.
1457    /// * The raw pointer must point to a block of memory allocated by the global allocator
1458    /// * The user of `from_raw` has to make sure a specific value of `T` is only
1459    ///   dropped once.
1460    ///
1461    /// This function is unsafe because improper use may lead to memory unsafety,
1462    /// even if the returned `Rc<T>` is never accessed.
1463    ///
1464    /// [into_raw]: Rc::into_raw
1465    /// [into_raw_with_allocator]: Rc::into_raw_with_allocator
1466    /// [transmute]: core::mem::transmute
1467    /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1468    ///
1469    /// # Examples
1470    ///
1471    /// ```
1472    /// use std::rc::Rc;
1473    ///
1474    /// let x = Rc::new("hello".to_owned());
1475    /// let x_ptr = Rc::into_raw(x);
1476    ///
1477    /// unsafe {
1478    ///     // Convert back to an `Rc` to prevent leak.
1479    ///     let x = Rc::from_raw(x_ptr);
1480    ///     assert_eq!(&*x, "hello");
1481    ///
1482    ///     // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1483    /// }
1484    ///
1485    /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1486    /// ```
1487    ///
1488    /// Convert a slice back into its original array:
1489    ///
1490    /// ```
1491    /// use std::rc::Rc;
1492    ///
1493    /// let x: Rc<[u32]> = Rc::new([1, 2, 3]);
1494    /// let x_ptr: *const [u32] = Rc::into_raw(x);
1495    ///
1496    /// unsafe {
1497    ///     let x: Rc<[u32; 3]> = Rc::from_raw(x_ptr.cast::<[u32; 3]>());
1498    ///     assert_eq!(&*x, &[1, 2, 3]);
1499    /// }
1500    /// ```
1501    #[inline]
1502    #[stable(feature = "rc_raw", since = "1.17.0")]
1503    pub unsafe fn from_raw(ptr: *const T) -> Self {
1504        unsafe { Self::from_raw_in(ptr, Global) }
1505    }
1506
1507    /// Consumes the `Rc`, returning the wrapped pointer.
1508    ///
1509    /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1510    /// [`Rc::from_raw`].
1511    ///
1512    /// # Examples
1513    ///
1514    /// ```
1515    /// use std::rc::Rc;
1516    ///
1517    /// let x = Rc::new("hello".to_owned());
1518    /// let x_ptr = Rc::into_raw(x);
1519    /// assert_eq!(unsafe { &*x_ptr }, "hello");
1520    /// # // Prevent leaks for Miri.
1521    /// # drop(unsafe { Rc::from_raw(x_ptr) });
1522    /// ```
1523    #[must_use = "losing the pointer will leak memory"]
1524    #[stable(feature = "rc_raw", since = "1.17.0")]
1525    #[rustc_never_returns_null_ptr]
1526    pub fn into_raw(this: Self) -> *const T {
1527        let this = ManuallyDrop::new(this);
1528        Self::as_ptr(&*this)
1529    }
1530
1531    /// Increments the strong reference count on the `Rc<T>` associated with the
1532    /// provided pointer by one.
1533    ///
1534    /// # Safety
1535    ///
1536    /// The pointer must have been obtained through `Rc::into_raw` and must satisfy the
1537    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1538    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1539    /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1540    /// allocated by the global allocator.
1541    ///
1542    /// [from_raw_in]: Rc::from_raw_in
1543    ///
1544    /// # Examples
1545    ///
1546    /// ```
1547    /// use std::rc::Rc;
1548    ///
1549    /// let five = Rc::new(5);
1550    ///
1551    /// unsafe {
1552    ///     let ptr = Rc::into_raw(five);
1553    ///     Rc::increment_strong_count(ptr);
1554    ///
1555    ///     let five = Rc::from_raw(ptr);
1556    ///     assert_eq!(2, Rc::strong_count(&five));
1557    /// #   // Prevent leaks for Miri.
1558    /// #   Rc::decrement_strong_count(ptr);
1559    /// }
1560    /// ```
1561    #[inline]
1562    #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1563    pub unsafe fn increment_strong_count(ptr: *const T) {
1564        unsafe { Self::increment_strong_count_in(ptr, Global) }
1565    }
1566
1567    /// Decrements the strong reference count on the `Rc<T>` associated with the
1568    /// provided pointer by one.
1569    ///
1570    /// # Safety
1571    ///
1572    /// The pointer must have been obtained through `Rc::into_raw`and must satisfy the
1573    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1574    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1575    /// least 1) when invoking this method, and `ptr` must point to a block of memory
1576    /// allocated by the global allocator. This method can be used to release the final `Rc` and
1577    /// backing storage, but **should not** be called after the final `Rc` has been released.
1578    ///
1579    /// [from_raw_in]: Rc::from_raw_in
1580    ///
1581    /// # Examples
1582    ///
1583    /// ```
1584    /// use std::rc::Rc;
1585    ///
1586    /// let five = Rc::new(5);
1587    ///
1588    /// unsafe {
1589    ///     let ptr = Rc::into_raw(five);
1590    ///     Rc::increment_strong_count(ptr);
1591    ///
1592    ///     let five = Rc::from_raw(ptr);
1593    ///     assert_eq!(2, Rc::strong_count(&five));
1594    ///     Rc::decrement_strong_count(ptr);
1595    ///     assert_eq!(1, Rc::strong_count(&five));
1596    /// }
1597    /// ```
1598    #[inline]
1599    #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1600    pub unsafe fn decrement_strong_count(ptr: *const T) {
1601        unsafe { Self::decrement_strong_count_in(ptr, Global) }
1602    }
1603}
1604
1605impl<T: ?Sized, A: Allocator> Rc<T, A> {
1606    /// Returns a reference to the underlying allocator.
1607    ///
1608    /// Note: this is an associated function, which means that you have
1609    /// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This
1610    /// is so that there is no conflict with a method on the inner type.
1611    #[inline]
1612    #[unstable(feature = "allocator_api", issue = "32838")]
1613    pub fn allocator(this: &Self) -> &A {
1614        &this.alloc
1615    }
1616
1617    /// Consumes the `Rc`, returning the wrapped pointer and allocator.
1618    ///
1619    /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1620    /// [`Rc::from_raw_in`].
1621    ///
1622    /// # Examples
1623    ///
1624    /// ```
1625    /// #![feature(allocator_api)]
1626    /// use std::rc::Rc;
1627    /// use std::alloc::System;
1628    ///
1629    /// let x = Rc::new_in("hello".to_owned(), System);
1630    /// let (ptr, alloc) = Rc::into_raw_with_allocator(x);
1631    /// assert_eq!(unsafe { &*ptr }, "hello");
1632    /// let x = unsafe { Rc::from_raw_in(ptr, alloc) };
1633    /// assert_eq!(&*x, "hello");
1634    /// ```
1635    #[must_use = "losing the pointer will leak memory"]
1636    #[unstable(feature = "allocator_api", issue = "32838")]
1637    pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
1638        let this = mem::ManuallyDrop::new(this);
1639        let ptr = Self::as_ptr(&this);
1640        // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
1641        let alloc = unsafe { ptr::read(&this.alloc) };
1642        (ptr, alloc)
1643    }
1644
1645    /// Provides a raw pointer to the data.
1646    ///
1647    /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
1648    /// for as long as there are strong counts in the `Rc`.
1649    ///
1650    /// # Examples
1651    ///
1652    /// ```
1653    /// use std::rc::Rc;
1654    ///
1655    /// let x = Rc::new(0);
1656    /// let y = Rc::clone(&x);
1657    /// let x_ptr = Rc::as_ptr(&x);
1658    /// assert_eq!(x_ptr, Rc::as_ptr(&y));
1659    /// assert_eq!(unsafe { *x_ptr }, 0);
1660    /// ```
1661    #[stable(feature = "weak_into_raw", since = "1.45.0")]
1662    #[rustc_never_returns_null_ptr]
1663    pub fn as_ptr(this: &Self) -> *const T {
1664        let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
1665
1666        // SAFETY: This cannot go through Deref::deref or Rc::inner because
1667        // this is required to retain raw/mut provenance such that e.g. `get_mut` can
1668        // write through the pointer after the Rc is recovered through `from_raw`.
1669        unsafe { &raw mut (*ptr).value }
1670    }
1671
1672    /// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator.
1673    ///
1674    /// The raw pointer must have been previously returned by a call to [`Rc<U,
1675    /// A>::into_raw`][into_raw] or [`Rc<U, A>::into_raw_with_allocator`][into_raw_with_allocator].
1676    ///
1677    /// # Safety
1678    ///
1679    /// * Creating a `Rc<T, A>` from a pointer other than one returned from
1680    ///   [`Rc<U, A>::into_raw`][into_raw] or [`Rc<U, A>::into_raw_with_allocator`][into_raw_with_allocator]
1681    ///   is undefined behavior.
1682    /// * If `U` is sized, it must have the same size and alignment as `T`. This
1683    ///   is trivially true if `U` is `T`.
1684    /// * If `U` is unsized, its data pointer must have the same size and
1685    ///   alignment as `T`. This is trivially true if `Rc<U, A>` was constructed
1686    ///   through `Rc<T, A>` and then converted to `Rc<U, A>` through an [unsized
1687    ///   coercion].
1688    /// * Note that if `U` or `U`'s data pointer is not `T` but has the same size
1689    ///   and alignment, this is basically like transmuting references of
1690    ///   different types. See [`mem::transmute`][transmute] for more information
1691    ///   on what restrictions apply in this case.
1692    /// * The raw pointer must point to a block of memory allocated by `alloc`
1693    /// * The user of `from_raw` has to make sure a specific value of `T` is only
1694    ///   dropped once.
1695    ///
1696    /// This function is unsafe because improper use may lead to memory unsafety,
1697    /// even if the returned `Rc<T, A>` is never accessed.
1698    ///
1699    /// [into_raw]: Rc::into_raw
1700    /// [into_raw_with_allocator]: Rc::into_raw_with_allocator
1701    /// [transmute]: core::mem::transmute
1702    /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1703    ///
1704    /// # Examples
1705    ///
1706    /// ```
1707    /// #![feature(allocator_api)]
1708    ///
1709    /// use std::rc::Rc;
1710    /// use std::alloc::System;
1711    ///
1712    /// let x = Rc::new_in("hello".to_owned(), System);
1713    /// let (x_ptr, _alloc) = Rc::into_raw_with_allocator(x);
1714    ///
1715    /// unsafe {
1716    ///     // Convert back to an `Rc` to prevent leak.
1717    ///     let x = Rc::from_raw_in(x_ptr, System);
1718    ///     assert_eq!(&*x, "hello");
1719    ///
1720    ///     // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1721    /// }
1722    ///
1723    /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1724    /// ```
1725    ///
1726    /// Convert a slice back into its original array:
1727    ///
1728    /// ```
1729    /// #![feature(allocator_api)]
1730    ///
1731    /// use std::rc::Rc;
1732    /// use std::alloc::System;
1733    ///
1734    /// let x: Rc<[u32], _> = Rc::new_in([1, 2, 3], System);
1735    /// let x_ptr: *const [u32] = Rc::into_raw_with_allocator(x).0;
1736    ///
1737    /// unsafe {
1738    ///     let x: Rc<[u32; 3], _> = Rc::from_raw_in(x_ptr.cast::<[u32; 3]>(), System);
1739    ///     assert_eq!(&*x, &[1, 2, 3]);
1740    /// }
1741    /// ```
1742    #[unstable(feature = "allocator_api", issue = "32838")]
1743    pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
1744        let offset = unsafe { data_offset(ptr) };
1745
1746        // Reverse the offset to find the original RcInner.
1747        let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
1748
1749        unsafe { Self::from_ptr_in(rc_ptr, alloc) }
1750    }
1751
1752    /// Creates a new [`Weak`] pointer to this allocation.
1753    ///
1754    /// # Examples
1755    ///
1756    /// ```
1757    /// use std::rc::Rc;
1758    ///
1759    /// let five = Rc::new(5);
1760    ///
1761    /// let weak_five = Rc::downgrade(&five);
1762    /// ```
1763    #[must_use = "this returns a new `Weak` pointer, \
1764                  without modifying the original `Rc`"]
1765    #[stable(feature = "rc_weak", since = "1.4.0")]
1766    pub fn downgrade(this: &Self) -> Weak<T, A>
1767    where
1768        A: Clone,
1769    {
1770        this.inner().inc_weak();
1771        // Make sure we do not create a dangling Weak
1772        debug_assert!(!is_dangling(this.ptr.as_ptr()));
1773        Weak { ptr: this.ptr, alloc: this.alloc.clone() }
1774    }
1775
1776    /// Gets the number of [`Weak`] pointers to this allocation.
1777    ///
1778    /// # Examples
1779    ///
1780    /// ```
1781    /// use std::rc::Rc;
1782    ///
1783    /// let five = Rc::new(5);
1784    /// let _weak_five = Rc::downgrade(&five);
1785    ///
1786    /// assert_eq!(1, Rc::weak_count(&five));
1787    /// ```
1788    #[inline]
1789    #[stable(feature = "rc_counts", since = "1.15.0")]
1790    pub fn weak_count(this: &Self) -> usize {
1791        this.inner().weak() - 1
1792    }
1793
1794    /// Gets the number of strong (`Rc`) pointers to this allocation.
1795    ///
1796    /// # Examples
1797    ///
1798    /// ```
1799    /// use std::rc::Rc;
1800    ///
1801    /// let five = Rc::new(5);
1802    /// let _also_five = Rc::clone(&five);
1803    ///
1804    /// assert_eq!(2, Rc::strong_count(&five));
1805    /// ```
1806    #[inline]
1807    #[stable(feature = "rc_counts", since = "1.15.0")]
1808    pub fn strong_count(this: &Self) -> usize {
1809        this.inner().strong()
1810    }
1811
1812    /// Increments the strong reference count on the `Rc<T>` associated with the
1813    /// provided pointer by one.
1814    ///
1815    /// # Safety
1816    ///
1817    /// The pointer must have been obtained through `Rc::into_raw` and must satisfy the
1818    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1819    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1820    /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1821    /// allocated by `alloc`.
1822    ///
1823    /// [from_raw_in]: Rc::from_raw_in
1824    ///
1825    /// # Examples
1826    ///
1827    /// ```
1828    /// #![feature(allocator_api)]
1829    ///
1830    /// use std::rc::Rc;
1831    /// use std::alloc::System;
1832    ///
1833    /// let five = Rc::new_in(5, System);
1834    ///
1835    /// unsafe {
1836    ///     let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
1837    ///     Rc::increment_strong_count_in(ptr, System);
1838    ///
1839    ///     let five = Rc::from_raw_in(ptr, System);
1840    ///     assert_eq!(2, Rc::strong_count(&five));
1841    /// #   // Prevent leaks for Miri.
1842    /// #   Rc::decrement_strong_count_in(ptr, System);
1843    /// }
1844    /// ```
1845    #[inline]
1846    #[unstable(feature = "allocator_api", issue = "32838")]
1847    pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A)
1848    where
1849        A: Clone,
1850    {
1851        // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
1852        let rc = unsafe { mem::ManuallyDrop::new(Rc::<T, A>::from_raw_in(ptr, alloc)) };
1853        // Now increase refcount, but don't drop new refcount either
1854        let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
1855    }
1856
1857    /// Decrements the strong reference count on the `Rc<T>` associated with the
1858    /// provided pointer by one.
1859    ///
1860    /// # Safety
1861    ///
1862    /// The pointer must have been obtained through `Rc::into_raw`and must satisfy the
1863    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1864    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1865    /// least 1) when invoking this method, and `ptr` must point to a block of memory
1866    /// allocated by `alloc`. This method can be used to release the final `Rc` and
1867    /// backing storage, but **should not** be called after the final `Rc` has been released.
1868    ///
1869    /// [from_raw_in]: Rc::from_raw_in
1870    ///
1871    /// # Examples
1872    ///
1873    /// ```
1874    /// #![feature(allocator_api)]
1875    ///
1876    /// use std::rc::Rc;
1877    /// use std::alloc::System;
1878    ///
1879    /// let five = Rc::new_in(5, System);
1880    ///
1881    /// unsafe {
1882    ///     let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
1883    ///     Rc::increment_strong_count_in(ptr, System);
1884    ///
1885    ///     let five = Rc::from_raw_in(ptr, System);
1886    ///     assert_eq!(2, Rc::strong_count(&five));
1887    ///     Rc::decrement_strong_count_in(ptr, System);
1888    ///     assert_eq!(1, Rc::strong_count(&five));
1889    /// }
1890    /// ```
1891    #[inline]
1892    #[unstable(feature = "allocator_api", issue = "32838")]
1893    pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) {
1894        unsafe { drop(Rc::from_raw_in(ptr, alloc)) };
1895    }
1896
1897    /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
1898    /// this allocation.
1899    #[inline]
1900    fn is_unique(this: &Self) -> bool {
1901        Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
1902    }
1903
1904    /// Returns a mutable reference into the given `Rc`, if there are
1905    /// no other `Rc` or [`Weak`] pointers to the same allocation.
1906    ///
1907    /// Returns [`None`] otherwise, because it is not safe to
1908    /// mutate a shared value.
1909    ///
1910    /// See also [`make_mut`][make_mut], which will [`clone`][clone]
1911    /// the inner value when there are other `Rc` pointers.
1912    ///
1913    /// [make_mut]: Rc::make_mut
1914    /// [clone]: Clone::clone
1915    ///
1916    /// # Examples
1917    ///
1918    /// ```
1919    /// use std::rc::Rc;
1920    ///
1921    /// let mut x = Rc::new(3);
1922    /// *Rc::get_mut(&mut x).unwrap() = 4;
1923    /// assert_eq!(*x, 4);
1924    ///
1925    /// let _y = Rc::clone(&x);
1926    /// assert!(Rc::get_mut(&mut x).is_none());
1927    /// ```
1928    #[inline]
1929    #[stable(feature = "rc_unique", since = "1.4.0")]
1930    pub fn get_mut(this: &mut Self) -> Option<&mut T> {
1931        if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
1932    }
1933
1934    /// Returns a mutable reference into the given `Rc`,
1935    /// without any check.
1936    ///
1937    /// See also [`get_mut`], which is safe and does appropriate checks.
1938    ///
1939    /// [`get_mut`]: Rc::get_mut
1940    ///
1941    /// # Safety
1942    ///
1943    /// If any other `Rc` or [`Weak`] pointers to the same allocation exist, then
1944    /// they must not be dereferenced or have active borrows for the duration
1945    /// of the returned borrow, and their inner type must be exactly the same as the
1946    /// inner type of this Rc (including lifetimes). This is trivially the case if no
1947    /// such pointers exist, for example immediately after `Rc::new`.
1948    ///
1949    /// # Examples
1950    ///
1951    /// ```
1952    /// #![feature(get_mut_unchecked)]
1953    ///
1954    /// use std::rc::Rc;
1955    ///
1956    /// let mut x = Rc::new(String::new());
1957    /// unsafe {
1958    ///     Rc::get_mut_unchecked(&mut x).push_str("foo")
1959    /// }
1960    /// assert_eq!(*x, "foo");
1961    /// ```
1962    /// Other `Rc` pointers to the same allocation must be to the same type.
1963    /// ```no_run
1964    /// #![feature(get_mut_unchecked)]
1965    ///
1966    /// use std::rc::Rc;
1967    ///
1968    /// let x: Rc<str> = Rc::from("Hello, world!");
1969    /// let mut y: Rc<[u8]> = x.clone().into();
1970    /// unsafe {
1971    ///     // this is Undefined Behavior, because x's inner type is str, not [u8]
1972    ///     Rc::get_mut_unchecked(&mut y).fill(0xff); // 0xff is invalid in UTF-8
1973    /// }
1974    /// println!("{}", &*x); // Invalid UTF-8 in a str
1975    /// ```
1976    /// Other `Rc` pointers to the same allocation must be to the exact same type, including lifetimes.
1977    /// ```no_run
1978    /// #![feature(get_mut_unchecked)]
1979    ///
1980    /// use std::rc::Rc;
1981    ///
1982    /// let x: Rc<&str> = Rc::new("Hello, world!");
1983    /// {
1984    ///     let s = String::from("Oh, no!");
1985    ///     let mut y: Rc<&str> = x.clone();
1986    ///     unsafe {
1987    ///         // this is Undefined Behavior, because x's inner type
1988    ///         // is &'long str, not &'short str
1989    ///         *Rc::get_mut_unchecked(&mut y) = &s;
1990    ///     }
1991    /// }
1992    /// println!("{}", &*x); // Use-after-free
1993    /// ```
1994    #[inline]
1995    #[unstable(feature = "get_mut_unchecked", issue = "63292")]
1996    pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
1997        // We are careful to *not* create a reference covering the "count" fields, as
1998        // this would conflict with accesses to the reference counts (e.g. by `Weak`).
1999        unsafe { &mut (*this.ptr.as_ptr()).value }
2000    }
2001
2002    #[inline]
2003    #[stable(feature = "ptr_eq", since = "1.17.0")]
2004    /// Returns `true` if the two `Rc`s point to the same allocation in a vein similar to
2005    /// [`ptr::eq`]. This function ignores the metadata of  `dyn Trait` pointers.
2006    ///
2007    /// # Examples
2008    ///
2009    /// ```
2010    /// use std::rc::Rc;
2011    ///
2012    /// let five = Rc::new(5);
2013    /// let same_five = Rc::clone(&five);
2014    /// let other_five = Rc::new(5);
2015    ///
2016    /// assert!(Rc::ptr_eq(&five, &same_five));
2017    /// assert!(!Rc::ptr_eq(&five, &other_five));
2018    /// ```
2019    pub fn ptr_eq(this: &Self, other: &Self) -> bool {
2020        ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr())
2021    }
2022}
2023
2024#[cfg(not(no_global_oom_handling))]
2025impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Rc<T, A> {
2026    /// Makes a mutable reference into the given `Rc`.
2027    ///
2028    /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
2029    /// [`clone`] the inner value to a new allocation to ensure unique ownership.  This is also
2030    /// referred to as clone-on-write.
2031    ///
2032    /// However, if there are no other `Rc` pointers to this allocation, but some [`Weak`]
2033    /// pointers, then the [`Weak`] pointers will be disassociated and the inner value will not
2034    /// be cloned.
2035    ///
2036    /// See also [`get_mut`], which will fail rather than cloning the inner value
2037    /// or disassociating [`Weak`] pointers.
2038    ///
2039    /// [`clone`]: Clone::clone
2040    /// [`get_mut`]: Rc::get_mut
2041    ///
2042    /// # Examples
2043    ///
2044    /// ```
2045    /// use std::rc::Rc;
2046    ///
2047    /// let mut data = Rc::new(5);
2048    ///
2049    /// *Rc::make_mut(&mut data) += 1;         // Won't clone anything
2050    /// let mut other_data = Rc::clone(&data); // Won't clone inner data
2051    /// *Rc::make_mut(&mut data) += 1;         // Clones inner data
2052    /// *Rc::make_mut(&mut data) += 1;         // Won't clone anything
2053    /// *Rc::make_mut(&mut other_data) *= 2;   // Won't clone anything
2054    ///
2055    /// // Now `data` and `other_data` point to different allocations.
2056    /// assert_eq!(*data, 8);
2057    /// assert_eq!(*other_data, 12);
2058    /// ```
2059    ///
2060    /// [`Weak`] pointers will be disassociated:
2061    ///
2062    /// ```
2063    /// use std::rc::Rc;
2064    ///
2065    /// let mut data = Rc::new(75);
2066    /// let weak = Rc::downgrade(&data);
2067    ///
2068    /// assert!(75 == *data);
2069    /// assert!(75 == *weak.upgrade().unwrap());
2070    ///
2071    /// *Rc::make_mut(&mut data) += 1;
2072    ///
2073    /// assert!(76 == *data);
2074    /// assert!(weak.upgrade().is_none());
2075    /// ```
2076    #[inline]
2077    #[stable(feature = "rc_unique", since = "1.4.0")]
2078    pub fn make_mut(this: &mut Self) -> &mut T {
2079        let size_of_val = size_of_val::<T>(&**this);
2080
2081        if Rc::strong_count(this) != 1 {
2082            // Gotta clone the data, there are other Rcs.
2083            *this = Rc::clone_from_ref_in(&**this, this.alloc.clone());
2084        } else if Rc::weak_count(this) != 0 {
2085            // Can just steal the data, all that's left is Weaks
2086
2087            // We don't need panic-protection like the above branch does, but we might as well
2088            // use the same mechanism.
2089            let mut in_progress: UniqueRcUninit<T, A> =
2090                UniqueRcUninit::new(&**this, this.alloc.clone());
2091            unsafe {
2092                // Initialize `in_progress` with move of **this.
2093                // We have to express this in terms of bytes because `T: ?Sized`; there is no
2094                // operation that just copies a value based on its `size_of_val()`.
2095                ptr::copy_nonoverlapping(
2096                    ptr::from_ref(&**this).cast::<u8>(),
2097                    in_progress.data_ptr().cast::<u8>(),
2098                    size_of_val,
2099                );
2100
2101                this.inner().dec_strong();
2102                // Remove implicit strong-weak ref (no need to craft a fake
2103                // Weak here -- we know other Weaks can clean up for us)
2104                this.inner().dec_weak();
2105                // Replace `this` with newly constructed Rc that has the moved data.
2106                ptr::write(this, in_progress.into_rc());
2107            }
2108        }
2109        // This unsafety is ok because we're guaranteed that the pointer
2110        // returned is the *only* pointer that will ever be returned to T. Our
2111        // reference count is guaranteed to be 1 at this point, and we required
2112        // the `Rc<T>` itself to be `mut`, so we're returning the only possible
2113        // reference to the allocation.
2114        unsafe { &mut this.ptr.as_mut().value }
2115    }
2116}
2117
2118impl<T: Clone, A: Allocator> Rc<T, A> {
2119    /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
2120    /// clone.
2121    ///
2122    /// Assuming `rc_t` is of type `Rc<T>`, this function is functionally equivalent to
2123    /// `(*rc_t).clone()`, but will avoid cloning the inner value where possible.
2124    ///
2125    /// # Examples
2126    ///
2127    /// ```
2128    /// # use std::{ptr, rc::Rc};
2129    /// let inner = String::from("test");
2130    /// let ptr = inner.as_ptr();
2131    ///
2132    /// let rc = Rc::new(inner);
2133    /// let inner = Rc::unwrap_or_clone(rc);
2134    /// // The inner value was not cloned
2135    /// assert!(ptr::eq(ptr, inner.as_ptr()));
2136    ///
2137    /// let rc = Rc::new(inner);
2138    /// let rc2 = rc.clone();
2139    /// let inner = Rc::unwrap_or_clone(rc);
2140    /// // Because there were 2 references, we had to clone the inner value.
2141    /// assert!(!ptr::eq(ptr, inner.as_ptr()));
2142    /// // `rc2` is the last reference, so when we unwrap it we get back
2143    /// // the original `String`.
2144    /// let inner = Rc::unwrap_or_clone(rc2);
2145    /// assert!(ptr::eq(ptr, inner.as_ptr()));
2146    /// ```
2147    #[inline]
2148    #[stable(feature = "arc_unwrap_or_clone", since = "1.76.0")]
2149    pub fn unwrap_or_clone(this: Self) -> T {
2150        Rc::try_unwrap(this).unwrap_or_else(|rc| (*rc).clone())
2151    }
2152}
2153
2154impl<A: Allocator> Rc<dyn Any, A> {
2155    /// Attempts to downcast the `Rc<dyn Any>` to a concrete type.
2156    ///
2157    /// # Examples
2158    ///
2159    /// ```
2160    /// use std::any::Any;
2161    /// use std::rc::Rc;
2162    ///
2163    /// fn print_if_string(value: Rc<dyn Any>) {
2164    ///     if let Ok(string) = value.downcast::<String>() {
2165    ///         println!("String ({}): {}", string.len(), string);
2166    ///     }
2167    /// }
2168    ///
2169    /// let my_string = "Hello World".to_string();
2170    /// print_if_string(Rc::new(my_string));
2171    /// print_if_string(Rc::new(0i8));
2172    /// ```
2173    #[inline]
2174    #[stable(feature = "rc_downcast", since = "1.29.0")]
2175    pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> {
2176        if (*self).is::<T>() {
2177            unsafe {
2178                let (ptr, alloc) = Rc::into_inner_with_allocator(self);
2179                Ok(Rc::from_inner_in(ptr.cast(), alloc))
2180            }
2181        } else {
2182            Err(self)
2183        }
2184    }
2185
2186    /// Downcasts the `Rc<dyn Any>` to a concrete type.
2187    ///
2188    /// For a safe alternative see [`downcast`].
2189    ///
2190    /// # Examples
2191    ///
2192    /// ```
2193    /// #![feature(downcast_unchecked)]
2194    ///
2195    /// use std::any::Any;
2196    /// use std::rc::Rc;
2197    ///
2198    /// let x: Rc<dyn Any> = Rc::new(1_usize);
2199    ///
2200    /// unsafe {
2201    ///     assert_eq!(*x.downcast_unchecked::<usize>(), 1);
2202    /// }
2203    /// ```
2204    ///
2205    /// # Safety
2206    ///
2207    /// The contained value must be of type `T`. Calling this method
2208    /// with the incorrect type is *undefined behavior*.
2209    ///
2210    ///
2211    /// [`downcast`]: Self::downcast
2212    #[inline]
2213    #[unstable(feature = "downcast_unchecked", issue = "90850")]
2214    pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> {
2215        unsafe {
2216            let (ptr, alloc) = Rc::into_inner_with_allocator(self);
2217            Rc::from_inner_in(ptr.cast(), alloc)
2218        }
2219    }
2220}
2221
2222impl<T: ?Sized> Rc<T> {
2223    /// Allocates an `RcInner<T>` with sufficient space for
2224    /// a possibly-unsized inner value where the value has the layout provided.
2225    ///
2226    /// The function `mem_to_rc_inner` is called with the data pointer
2227    /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2228    #[cfg(not(no_global_oom_handling))]
2229    unsafe fn allocate_for_layout(
2230        value_layout: Layout,
2231        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2232        mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2233    ) -> *mut RcInner<T> {
2234        let layout = rc_inner_layout_for_value_layout(value_layout);
2235        unsafe {
2236            Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rc_inner)
2237                .unwrap_or_else(|_| handle_alloc_error(layout))
2238        }
2239    }
2240
2241    /// Allocates an `RcInner<T>` with sufficient space for
2242    /// a possibly-unsized inner value where the value has the layout provided,
2243    /// returning an error if allocation fails.
2244    ///
2245    /// The function `mem_to_rc_inner` is called with the data pointer
2246    /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2247    #[inline]
2248    unsafe fn try_allocate_for_layout(
2249        value_layout: Layout,
2250        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2251        mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2252    ) -> Result<*mut RcInner<T>, AllocError> {
2253        let layout = rc_inner_layout_for_value_layout(value_layout);
2254
2255        // Allocate for the layout.
2256        let ptr = allocate(layout)?;
2257
2258        // Initialize the RcInner
2259        let inner = mem_to_rc_inner(ptr.as_non_null_ptr().as_ptr());
2260        unsafe {
2261            debug_assert_eq!(Layout::for_value_raw(inner), layout);
2262
2263            (&raw mut (*inner).strong).write(Cell::new(1));
2264            (&raw mut (*inner).weak).write(Cell::new(1));
2265        }
2266
2267        Ok(inner)
2268    }
2269}
2270
2271impl<T: ?Sized, A: Allocator> Rc<T, A> {
2272    /// Allocates an `RcInner<T>` with sufficient space for an unsized inner value
2273    #[cfg(not(no_global_oom_handling))]
2274    unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcInner<T> {
2275        // Allocate for the `RcInner<T>` using the given value.
2276        unsafe {
2277            Rc::<T>::allocate_for_layout(
2278                Layout::for_value_raw(ptr),
2279                |layout| alloc.allocate(layout),
2280                |mem| mem.with_metadata_of(ptr as *const RcInner<T>),
2281            )
2282        }
2283    }
2284
2285    #[cfg(not(no_global_oom_handling))]
2286    fn from_box_in(src: Box<T, A>) -> Rc<T, A> {
2287        unsafe {
2288            let value_size = size_of_val(&*src);
2289            let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src));
2290
2291            // Copy value as bytes
2292            ptr::copy_nonoverlapping(
2293                (&raw const *src) as *const u8,
2294                (&raw mut (*ptr).value) as *mut u8,
2295                value_size,
2296            );
2297
2298            // Free the allocation without dropping its contents
2299            let (bptr, alloc) = Box::into_raw_with_allocator(src);
2300            let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop<T>, alloc.by_ref());
2301            drop(src);
2302
2303            Self::from_ptr_in(ptr, alloc)
2304        }
2305    }
2306}
2307
2308impl<T> Rc<[T]> {
2309    /// Allocates an `RcInner<[T]>` with the given length.
2310    #[cfg(not(no_global_oom_handling))]
2311    unsafe fn allocate_for_slice(len: usize) -> *mut RcInner<[T]> {
2312        unsafe {
2313            Self::allocate_for_layout(
2314                Layout::array::<T>(len).unwrap(),
2315                |layout| Global.allocate(layout),
2316                |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2317            )
2318        }
2319    }
2320
2321    /// Copy elements from slice into newly allocated `Rc<[T]>`
2322    ///
2323    /// Unsafe because the caller must either take ownership, bind `T: Copy` or
2324    /// bind `T: TrivialClone`.
2325    #[cfg(not(no_global_oom_handling))]
2326    unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
2327        unsafe {
2328            let ptr = Self::allocate_for_slice(v.len());
2329            ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).value) as *mut T, v.len());
2330            Self::from_ptr(ptr)
2331        }
2332    }
2333
2334    /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
2335    ///
2336    /// Behavior is undefined should the size be wrong.
2337    #[cfg(not(no_global_oom_handling))]
2338    unsafe fn from_iter_exact(iter: impl Iterator<Item = T>, len: usize) -> Rc<[T]> {
2339        // Panic guard while cloning T elements.
2340        // In the event of a panic, elements that have been written
2341        // into the new RcInner will be dropped, then the memory freed.
2342        struct Guard<T> {
2343            mem: NonNull<u8>,
2344            elems: *mut T,
2345            layout: Layout,
2346            n_elems: usize,
2347        }
2348
2349        impl<T> Drop for Guard<T> {
2350            fn drop(&mut self) {
2351                unsafe {
2352                    let slice = from_raw_parts_mut(self.elems, self.n_elems);
2353                    ptr::drop_in_place(slice);
2354
2355                    Global.deallocate(self.mem, self.layout);
2356                }
2357            }
2358        }
2359
2360        unsafe {
2361            let ptr = Self::allocate_for_slice(len);
2362
2363            let mem = ptr as *mut _ as *mut u8;
2364            let layout = Layout::for_value_raw(ptr);
2365
2366            // Pointer to first element
2367            let elems = (&raw mut (*ptr).value) as *mut T;
2368
2369            let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
2370
2371            for (i, item) in iter.enumerate() {
2372                ptr::write(elems.add(i), item);
2373                guard.n_elems += 1;
2374            }
2375
2376            // All clear. Forget the guard so it doesn't free the new RcInner.
2377            mem::forget(guard);
2378
2379            Self::from_ptr(ptr)
2380        }
2381    }
2382}
2383
2384impl<T, A: Allocator> Rc<[T], A> {
2385    /// Allocates an `RcInner<[T]>` with the given length.
2386    #[inline]
2387    #[cfg(not(no_global_oom_handling))]
2388    unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcInner<[T]> {
2389        unsafe {
2390            Rc::<[T]>::allocate_for_layout(
2391                Layout::array::<T>(len).unwrap(),
2392                |layout| alloc.allocate(layout),
2393                |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2394            )
2395        }
2396    }
2397}
2398
2399#[cfg(not(no_global_oom_handling))]
2400/// Specialization trait used for `From<&[T]>`.
2401trait RcFromSlice<T> {
2402    fn from_slice(slice: &[T]) -> Self;
2403}
2404
2405#[cfg(not(no_global_oom_handling))]
2406impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
2407    #[inline]
2408    default fn from_slice(v: &[T]) -> Self {
2409        unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
2410    }
2411}
2412
2413#[cfg(not(no_global_oom_handling))]
2414impl<T: TrivialClone> RcFromSlice<T> for Rc<[T]> {
2415    #[inline]
2416    fn from_slice(v: &[T]) -> Self {
2417        // SAFETY: `T` implements `TrivialClone`, so this is sound and equivalent
2418        // to the above.
2419        unsafe { Rc::copy_from_slice(v) }
2420    }
2421}
2422
2423#[stable(feature = "rust1", since = "1.0.0")]
2424impl<T: ?Sized, A: Allocator> Deref for Rc<T, A> {
2425    type Target = T;
2426
2427    #[inline(always)]
2428    fn deref(&self) -> &T {
2429        &self.inner().value
2430    }
2431}
2432
2433#[unstable(feature = "pin_coerce_unsized_trait", issue = "150112")]
2434unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Rc<T, A> {}
2435
2436//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2437#[unstable(feature = "pin_coerce_unsized_trait", issue = "150112")]
2438unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for UniqueRc<T, A> {}
2439
2440#[unstable(feature = "deref_pure_trait", issue = "87121")]
2441unsafe impl<T: ?Sized, A: Allocator> DerefPure for Rc<T, A> {}
2442
2443//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2444#[unstable(feature = "deref_pure_trait", issue = "87121")]
2445unsafe impl<T: ?Sized, A: Allocator> DerefPure for UniqueRc<T, A> {}
2446
2447#[unstable(feature = "legacy_receiver_trait", issue = "none")]
2448impl<T: ?Sized> LegacyReceiver for Rc<T> {}
2449
2450#[stable(feature = "rust1", since = "1.0.0")]
2451unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> {
2452    /// Drops the `Rc`.
2453    ///
2454    /// This will decrement the strong reference count. If the strong reference
2455    /// count reaches zero then the only other references (if any) are
2456    /// [`Weak`], so we `drop` the inner value.
2457    ///
2458    /// # Examples
2459    ///
2460    /// ```
2461    /// use std::rc::Rc;
2462    ///
2463    /// struct Foo;
2464    ///
2465    /// impl Drop for Foo {
2466    ///     fn drop(&mut self) {
2467    ///         println!("dropped!");
2468    ///     }
2469    /// }
2470    ///
2471    /// let foo  = Rc::new(Foo);
2472    /// let foo2 = Rc::clone(&foo);
2473    ///
2474    /// drop(foo);    // Doesn't print anything
2475    /// drop(foo2);   // Prints "dropped!"
2476    /// ```
2477    #[inline]
2478    fn drop(&mut self) {
2479        unsafe {
2480            self.inner().dec_strong();
2481            if self.inner().strong() == 0 {
2482                self.drop_slow();
2483            }
2484        }
2485    }
2486}
2487
2488#[stable(feature = "rust1", since = "1.0.0")]
2489impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> {
2490    /// Makes a clone of the `Rc` pointer.
2491    ///
2492    /// This creates another pointer to the same allocation, increasing the
2493    /// strong reference count.
2494    ///
2495    /// # Examples
2496    ///
2497    /// ```
2498    /// use std::rc::Rc;
2499    ///
2500    /// let five = Rc::new(5);
2501    ///
2502    /// let _ = Rc::clone(&five);
2503    /// ```
2504    #[inline]
2505    fn clone(&self) -> Self {
2506        unsafe {
2507            self.inner().inc_strong();
2508            Self::from_inner_in(self.ptr, self.alloc.clone())
2509        }
2510    }
2511}
2512
2513#[unstable(feature = "ergonomic_clones", issue = "132290")]
2514impl<T: ?Sized, A: Allocator + Clone> UseCloned for Rc<T, A> {}
2515
2516#[cfg(not(no_global_oom_handling))]
2517#[stable(feature = "rust1", since = "1.0.0")]
2518impl<T: Default> Default for Rc<T> {
2519    /// Creates a new `Rc<T>`, with the `Default` value for `T`.
2520    ///
2521    /// # Examples
2522    ///
2523    /// ```
2524    /// use std::rc::Rc;
2525    ///
2526    /// let x: Rc<i32> = Default::default();
2527    /// assert_eq!(*x, 0);
2528    /// ```
2529    #[inline]
2530    fn default() -> Self {
2531        unsafe {
2532            Self::from_inner(
2533                Box::leak(Box::write(
2534                    Box::new_uninit(),
2535                    RcInner { strong: Cell::new(1), weak: Cell::new(1), value: T::default() },
2536                ))
2537                .into(),
2538            )
2539        }
2540    }
2541}
2542
2543#[cfg(not(no_global_oom_handling))]
2544#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2545impl Default for Rc<str> {
2546    /// Creates an empty `str` inside an `Rc`.
2547    ///
2548    /// This may or may not share an allocation with other Rcs on the same thread.
2549    #[inline]
2550    fn default() -> Self {
2551        let rc = Rc::<[u8]>::default();
2552        // `[u8]` has the same layout as `str`.
2553        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2554    }
2555}
2556
2557#[cfg(not(no_global_oom_handling))]
2558#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2559impl<T> Default for Rc<[T]> {
2560    /// Creates an empty `[T]` inside an `Rc`.
2561    ///
2562    /// This may or may not share an allocation with other Rcs on the same thread.
2563    #[inline]
2564    fn default() -> Self {
2565        let arr: [T; 0] = [];
2566        Rc::from(arr)
2567    }
2568}
2569
2570#[cfg(not(no_global_oom_handling))]
2571#[stable(feature = "pin_default_impls", since = "1.91.0")]
2572impl<T> Default for Pin<Rc<T>>
2573where
2574    T: ?Sized,
2575    Rc<T>: Default,
2576{
2577    #[inline]
2578    fn default() -> Self {
2579        unsafe { Pin::new_unchecked(Rc::<T>::default()) }
2580    }
2581}
2582
2583#[stable(feature = "rust1", since = "1.0.0")]
2584trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
2585    fn eq(&self, other: &Rc<T, A>) -> bool;
2586    fn ne(&self, other: &Rc<T, A>) -> bool;
2587}
2588
2589#[stable(feature = "rust1", since = "1.0.0")]
2590impl<T: ?Sized + PartialEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2591    #[inline]
2592    default fn eq(&self, other: &Rc<T, A>) -> bool {
2593        **self == **other
2594    }
2595
2596    #[inline]
2597    default fn ne(&self, other: &Rc<T, A>) -> bool {
2598        **self != **other
2599    }
2600}
2601
2602// Hack to allow specializing on `Eq` even though `Eq` has a method.
2603#[rustc_unsafe_specialization_marker]
2604pub(crate) trait MarkerEq: PartialEq<Self> {}
2605
2606impl<T: Eq> MarkerEq for T {}
2607
2608/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
2609/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
2610/// store large values, that are slow to clone, but also heavy to check for equality, causing this
2611/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
2612/// the same value, than two `&T`s.
2613///
2614/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
2615#[stable(feature = "rust1", since = "1.0.0")]
2616impl<T: ?Sized + MarkerEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2617    #[inline]
2618    fn eq(&self, other: &Rc<T, A>) -> bool {
2619        Rc::ptr_eq(self, other) || **self == **other
2620    }
2621
2622    #[inline]
2623    fn ne(&self, other: &Rc<T, A>) -> bool {
2624        !Rc::ptr_eq(self, other) && **self != **other
2625    }
2626}
2627
2628#[stable(feature = "rust1", since = "1.0.0")]
2629impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> {
2630    /// Equality for two `Rc`s.
2631    ///
2632    /// Two `Rc`s are equal if their inner values are equal, even if they are
2633    /// stored in different allocation.
2634    ///
2635    /// If `T` also implements `Eq` (implying reflexivity of equality),
2636    /// two `Rc`s that point to the same allocation are
2637    /// always equal.
2638    ///
2639    /// # Examples
2640    ///
2641    /// ```
2642    /// use std::rc::Rc;
2643    ///
2644    /// let five = Rc::new(5);
2645    ///
2646    /// assert!(five == Rc::new(5));
2647    /// ```
2648    #[inline]
2649    fn eq(&self, other: &Rc<T, A>) -> bool {
2650        RcEqIdent::eq(self, other)
2651    }
2652
2653    /// Inequality for two `Rc`s.
2654    ///
2655    /// Two `Rc`s are not equal if their inner values are not equal.
2656    ///
2657    /// If `T` also implements `Eq` (implying reflexivity of equality),
2658    /// two `Rc`s that point to the same allocation are
2659    /// always equal.
2660    ///
2661    /// # Examples
2662    ///
2663    /// ```
2664    /// use std::rc::Rc;
2665    ///
2666    /// let five = Rc::new(5);
2667    ///
2668    /// assert!(five != Rc::new(6));
2669    /// ```
2670    #[inline]
2671    fn ne(&self, other: &Rc<T, A>) -> bool {
2672        RcEqIdent::ne(self, other)
2673    }
2674}
2675
2676#[stable(feature = "rust1", since = "1.0.0")]
2677impl<T: ?Sized + Eq, A: Allocator> Eq for Rc<T, A> {}
2678
2679#[stable(feature = "rust1", since = "1.0.0")]
2680impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
2681    /// Partial comparison for two `Rc`s.
2682    ///
2683    /// The two are compared by calling `partial_cmp()` on their inner values.
2684    ///
2685    /// # Examples
2686    ///
2687    /// ```
2688    /// use std::rc::Rc;
2689    /// use std::cmp::Ordering;
2690    ///
2691    /// let five = Rc::new(5);
2692    ///
2693    /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
2694    /// ```
2695    #[inline(always)]
2696    fn partial_cmp(&self, other: &Rc<T, A>) -> Option<Ordering> {
2697        (**self).partial_cmp(&**other)
2698    }
2699
2700    /// Less-than comparison for two `Rc`s.
2701    ///
2702    /// The two are compared by calling `<` on their inner values.
2703    ///
2704    /// # Examples
2705    ///
2706    /// ```
2707    /// use std::rc::Rc;
2708    ///
2709    /// let five = Rc::new(5);
2710    ///
2711    /// assert!(five < Rc::new(6));
2712    /// ```
2713    #[inline(always)]
2714    fn lt(&self, other: &Rc<T, A>) -> bool {
2715        **self < **other
2716    }
2717
2718    /// 'Less than or equal to' comparison for two `Rc`s.
2719    ///
2720    /// The two are compared by calling `<=` on their inner values.
2721    ///
2722    /// # Examples
2723    ///
2724    /// ```
2725    /// use std::rc::Rc;
2726    ///
2727    /// let five = Rc::new(5);
2728    ///
2729    /// assert!(five <= Rc::new(5));
2730    /// ```
2731    #[inline(always)]
2732    fn le(&self, other: &Rc<T, A>) -> bool {
2733        **self <= **other
2734    }
2735
2736    /// Greater-than comparison for two `Rc`s.
2737    ///
2738    /// The two are compared by calling `>` on their inner values.
2739    ///
2740    /// # Examples
2741    ///
2742    /// ```
2743    /// use std::rc::Rc;
2744    ///
2745    /// let five = Rc::new(5);
2746    ///
2747    /// assert!(five > Rc::new(4));
2748    /// ```
2749    #[inline(always)]
2750    fn gt(&self, other: &Rc<T, A>) -> bool {
2751        **self > **other
2752    }
2753
2754    /// 'Greater than or equal to' comparison for two `Rc`s.
2755    ///
2756    /// The two are compared by calling `>=` on their inner values.
2757    ///
2758    /// # Examples
2759    ///
2760    /// ```
2761    /// use std::rc::Rc;
2762    ///
2763    /// let five = Rc::new(5);
2764    ///
2765    /// assert!(five >= Rc::new(5));
2766    /// ```
2767    #[inline(always)]
2768    fn ge(&self, other: &Rc<T, A>) -> bool {
2769        **self >= **other
2770    }
2771}
2772
2773#[stable(feature = "rust1", since = "1.0.0")]
2774impl<T: ?Sized + Ord, A: Allocator> Ord for Rc<T, A> {
2775    /// Comparison for two `Rc`s.
2776    ///
2777    /// The two are compared by calling `cmp()` on their inner values.
2778    ///
2779    /// # Examples
2780    ///
2781    /// ```
2782    /// use std::rc::Rc;
2783    /// use std::cmp::Ordering;
2784    ///
2785    /// let five = Rc::new(5);
2786    ///
2787    /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
2788    /// ```
2789    #[inline]
2790    fn cmp(&self, other: &Rc<T, A>) -> Ordering {
2791        (**self).cmp(&**other)
2792    }
2793}
2794
2795#[stable(feature = "rust1", since = "1.0.0")]
2796impl<T: ?Sized + Hash, A: Allocator> Hash for Rc<T, A> {
2797    fn hash<H: Hasher>(&self, state: &mut H) {
2798        (**self).hash(state);
2799    }
2800}
2801
2802#[stable(feature = "rust1", since = "1.0.0")]
2803impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Rc<T, A> {
2804    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2805        fmt::Display::fmt(&**self, f)
2806    }
2807}
2808
2809#[stable(feature = "rust1", since = "1.0.0")]
2810impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> {
2811    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2812        fmt::Debug::fmt(&**self, f)
2813    }
2814}
2815
2816#[stable(feature = "rust1", since = "1.0.0")]
2817impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> {
2818    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2819        fmt::Pointer::fmt(&(&raw const **self), f)
2820    }
2821}
2822
2823#[cfg(not(no_global_oom_handling))]
2824#[stable(feature = "from_for_ptrs", since = "1.6.0")]
2825impl<T> From<T> for Rc<T> {
2826    /// Converts a generic type `T` into an `Rc<T>`
2827    ///
2828    /// The conversion allocates on the heap and moves `t`
2829    /// from the stack into it.
2830    ///
2831    /// # Example
2832    /// ```rust
2833    /// # use std::rc::Rc;
2834    /// let x = 5;
2835    /// let rc = Rc::new(5);
2836    ///
2837    /// assert_eq!(Rc::from(x), rc);
2838    /// ```
2839    fn from(t: T) -> Self {
2840        Rc::new(t)
2841    }
2842}
2843
2844#[cfg(not(no_global_oom_handling))]
2845#[stable(feature = "shared_from_array", since = "1.74.0")]
2846impl<T, const N: usize> From<[T; N]> for Rc<[T]> {
2847    /// Converts a [`[T; N]`](prim@array) into an `Rc<[T]>`.
2848    ///
2849    /// The conversion moves the array into a newly allocated `Rc`.
2850    ///
2851    /// # Example
2852    ///
2853    /// ```
2854    /// # use std::rc::Rc;
2855    /// let original: [i32; 3] = [1, 2, 3];
2856    /// let shared: Rc<[i32]> = Rc::from(original);
2857    /// assert_eq!(&[1, 2, 3], &shared[..]);
2858    /// ```
2859    #[inline]
2860    fn from(v: [T; N]) -> Rc<[T]> {
2861        Rc::<[T; N]>::from(v)
2862    }
2863}
2864
2865#[cfg(not(no_global_oom_handling))]
2866#[stable(feature = "shared_from_slice", since = "1.21.0")]
2867impl<T: Clone> From<&[T]> for Rc<[T]> {
2868    /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2869    ///
2870    /// # Example
2871    ///
2872    /// ```
2873    /// # use std::rc::Rc;
2874    /// let original: &[i32] = &[1, 2, 3];
2875    /// let shared: Rc<[i32]> = Rc::from(original);
2876    /// assert_eq!(&[1, 2, 3], &shared[..]);
2877    /// ```
2878    #[inline]
2879    fn from(v: &[T]) -> Rc<[T]> {
2880        <Self as RcFromSlice<T>>::from_slice(v)
2881    }
2882}
2883
2884#[cfg(not(no_global_oom_handling))]
2885#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2886impl<T: Clone> From<&mut [T]> for Rc<[T]> {
2887    /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2888    ///
2889    /// # Example
2890    ///
2891    /// ```
2892    /// # use std::rc::Rc;
2893    /// let mut original = [1, 2, 3];
2894    /// let original: &mut [i32] = &mut original;
2895    /// let shared: Rc<[i32]> = Rc::from(original);
2896    /// assert_eq!(&[1, 2, 3], &shared[..]);
2897    /// ```
2898    #[inline]
2899    fn from(v: &mut [T]) -> Rc<[T]> {
2900        Rc::from(&*v)
2901    }
2902}
2903
2904#[cfg(not(no_global_oom_handling))]
2905#[stable(feature = "shared_from_slice", since = "1.21.0")]
2906impl From<&str> for Rc<str> {
2907    /// Allocates a reference-counted string slice and copies `v` into it.
2908    ///
2909    /// # Example
2910    ///
2911    /// ```
2912    /// # use std::rc::Rc;
2913    /// let shared: Rc<str> = Rc::from("statue");
2914    /// assert_eq!("statue", &shared[..]);
2915    /// ```
2916    #[inline]
2917    fn from(v: &str) -> Rc<str> {
2918        let rc = Rc::<[u8]>::from(v.as_bytes());
2919        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2920    }
2921}
2922
2923#[cfg(not(no_global_oom_handling))]
2924#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2925impl From<&mut str> for Rc<str> {
2926    /// Allocates a reference-counted string slice and copies `v` into it.
2927    ///
2928    /// # Example
2929    ///
2930    /// ```
2931    /// # use std::rc::Rc;
2932    /// let mut original = String::from("statue");
2933    /// let original: &mut str = &mut original;
2934    /// let shared: Rc<str> = Rc::from(original);
2935    /// assert_eq!("statue", &shared[..]);
2936    /// ```
2937    #[inline]
2938    fn from(v: &mut str) -> Rc<str> {
2939        Rc::from(&*v)
2940    }
2941}
2942
2943#[cfg(not(no_global_oom_handling))]
2944#[stable(feature = "shared_from_slice", since = "1.21.0")]
2945impl From<String> for Rc<str> {
2946    /// Allocates a reference-counted string slice and copies `v` into it.
2947    ///
2948    /// # Example
2949    ///
2950    /// ```
2951    /// # use std::rc::Rc;
2952    /// let original: String = "statue".to_owned();
2953    /// let shared: Rc<str> = Rc::from(original);
2954    /// assert_eq!("statue", &shared[..]);
2955    /// ```
2956    #[inline]
2957    fn from(v: String) -> Rc<str> {
2958        Rc::from(&v[..])
2959    }
2960}
2961
2962#[cfg(not(no_global_oom_handling))]
2963#[stable(feature = "shared_from_slice", since = "1.21.0")]
2964impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Rc<T, A> {
2965    /// Move a boxed object to a new, reference counted, allocation.
2966    ///
2967    /// # Example
2968    ///
2969    /// ```
2970    /// # use std::rc::Rc;
2971    /// let original: Box<i32> = Box::new(1);
2972    /// let shared: Rc<i32> = Rc::from(original);
2973    /// assert_eq!(1, *shared);
2974    /// ```
2975    #[inline]
2976    fn from(v: Box<T, A>) -> Rc<T, A> {
2977        Rc::from_box_in(v)
2978    }
2979}
2980
2981#[cfg(not(no_global_oom_handling))]
2982#[stable(feature = "shared_from_slice", since = "1.21.0")]
2983impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> {
2984    /// Allocates a reference-counted slice and moves `v`'s items into it.
2985    ///
2986    /// # Example
2987    ///
2988    /// ```
2989    /// # use std::rc::Rc;
2990    /// let unique: Vec<i32> = vec![1, 2, 3];
2991    /// let shared: Rc<[i32]> = Rc::from(unique);
2992    /// assert_eq!(&[1, 2, 3], &shared[..]);
2993    /// ```
2994    #[inline]
2995    fn from(v: Vec<T, A>) -> Rc<[T], A> {
2996        unsafe {
2997            let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
2998
2999            let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
3000            ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).value) as *mut T, len);
3001
3002            // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
3003            // without dropping its contents or the allocator
3004            let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
3005
3006            Self::from_ptr_in(rc_ptr, alloc)
3007        }
3008    }
3009}
3010
3011#[stable(feature = "shared_from_cow", since = "1.45.0")]
3012impl<'a, B> From<Cow<'a, B>> for Rc<B>
3013where
3014    B: ToOwned + ?Sized,
3015    Rc<B>: From<&'a B> + From<B::Owned>,
3016{
3017    /// Creates a reference-counted pointer from a clone-on-write pointer by
3018    /// copying its content.
3019    ///
3020    /// # Example
3021    ///
3022    /// ```rust
3023    /// # use std::rc::Rc;
3024    /// # use std::borrow::Cow;
3025    /// let cow: Cow<'_, str> = Cow::Borrowed("eggplant");
3026    /// let shared: Rc<str> = Rc::from(cow);
3027    /// assert_eq!("eggplant", &shared[..]);
3028    /// ```
3029    #[inline]
3030    fn from(cow: Cow<'a, B>) -> Rc<B> {
3031        match cow {
3032            Cow::Borrowed(s) => Rc::from(s),
3033            Cow::Owned(s) => Rc::from(s),
3034        }
3035    }
3036}
3037
3038#[stable(feature = "shared_from_str", since = "1.62.0")]
3039impl From<Rc<str>> for Rc<[u8]> {
3040    /// Converts a reference-counted string slice into a byte slice.
3041    ///
3042    /// # Example
3043    ///
3044    /// ```
3045    /// # use std::rc::Rc;
3046    /// let string: Rc<str> = Rc::from("eggplant");
3047    /// let bytes: Rc<[u8]> = Rc::from(string);
3048    /// assert_eq!("eggplant".as_bytes(), bytes.as_ref());
3049    /// ```
3050    #[inline]
3051    fn from(rc: Rc<str>) -> Self {
3052        // SAFETY: `str` has the same layout as `[u8]`.
3053        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const [u8]) }
3054    }
3055}
3056
3057#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
3058impl<T, A: Allocator, const N: usize> TryFrom<Rc<[T], A>> for Rc<[T; N], A> {
3059    type Error = Rc<[T], A>;
3060
3061    fn try_from(boxed_slice: Rc<[T], A>) -> Result<Self, Self::Error> {
3062        if boxed_slice.len() == N {
3063            let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice);
3064            Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) })
3065        } else {
3066            Err(boxed_slice)
3067        }
3068    }
3069}
3070
3071#[cfg(not(no_global_oom_handling))]
3072#[stable(feature = "shared_from_iter", since = "1.37.0")]
3073impl<T> FromIterator<T> for Rc<[T]> {
3074    /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`.
3075    ///
3076    /// # Performance characteristics
3077    ///
3078    /// ## The general case
3079    ///
3080    /// In the general case, collecting into `Rc<[T]>` is done by first
3081    /// collecting into a `Vec<T>`. That is, when writing the following:
3082    ///
3083    /// ```rust
3084    /// # use std::rc::Rc;
3085    /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
3086    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
3087    /// ```
3088    ///
3089    /// this behaves as if we wrote:
3090    ///
3091    /// ```rust
3092    /// # use std::rc::Rc;
3093    /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
3094    ///     .collect::<Vec<_>>() // The first set of allocations happens here.
3095    ///     .into(); // A second allocation for `Rc<[T]>` happens here.
3096    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
3097    /// ```
3098    ///
3099    /// This will allocate as many times as needed for constructing the `Vec<T>`
3100    /// and then it will allocate once for turning the `Vec<T>` into the `Rc<[T]>`.
3101    ///
3102    /// ## Iterators of known length
3103    ///
3104    /// When your `Iterator` implements `TrustedLen` and is of an exact size,
3105    /// a single allocation will be made for the `Rc<[T]>`. For example:
3106    ///
3107    /// ```rust
3108    /// # use std::rc::Rc;
3109    /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
3110    /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
3111    /// ```
3112    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
3113        ToRcSlice::to_rc_slice(iter.into_iter())
3114    }
3115}
3116
3117/// Specialization trait used for collecting into `Rc<[T]>`.
3118#[cfg(not(no_global_oom_handling))]
3119trait ToRcSlice<T>: Iterator<Item = T> + Sized {
3120    fn to_rc_slice(self) -> Rc<[T]>;
3121}
3122
3123#[cfg(not(no_global_oom_handling))]
3124impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
3125    default fn to_rc_slice(self) -> Rc<[T]> {
3126        self.collect::<Vec<T>>().into()
3127    }
3128}
3129
3130#[cfg(not(no_global_oom_handling))]
3131impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
3132    fn to_rc_slice(self) -> Rc<[T]> {
3133        // This is the case for a `TrustedLen` iterator.
3134        let (low, high) = self.size_hint();
3135        if let Some(high) = high {
3136            debug_assert_eq!(
3137                low,
3138                high,
3139                "TrustedLen iterator's size hint is not exact: {:?}",
3140                (low, high)
3141            );
3142
3143            unsafe {
3144                // SAFETY: We need to ensure that the iterator has an exact length and we have.
3145                Rc::from_iter_exact(self, low)
3146            }
3147        } else {
3148            // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
3149            // length exceeding `usize::MAX`.
3150            // The default implementation would collect into a vec which would panic.
3151            // Thus we panic here immediately without invoking `Vec` code.
3152            panic!("capacity overflow");
3153        }
3154    }
3155}
3156
3157/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
3158/// managed allocation.
3159///
3160/// The allocation is accessed by calling [`upgrade`] on the `Weak`
3161/// pointer, which returns an <code>[Option]<[Rc]\<T>></code>.
3162///
3163/// Since a `Weak` reference does not count towards ownership, it will not
3164/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
3165/// guarantees about the value still being present. Thus it may return [`None`]
3166/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
3167/// itself (the backing store) from being deallocated.
3168///
3169/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
3170/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
3171/// prevent circular references between [`Rc`] pointers, since mutual owning references
3172/// would never allow either [`Rc`] to be dropped. For example, a tree could
3173/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
3174/// pointers from children back to their parents.
3175///
3176/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
3177///
3178/// [`upgrade`]: Weak::upgrade
3179#[stable(feature = "rc_weak", since = "1.4.0")]
3180#[rustc_diagnostic_item = "RcWeak"]
3181pub struct Weak<
3182    T: ?Sized,
3183    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
3184> {
3185    // This is a `NonNull` to allow optimizing the size of this type in enums,
3186    // but it is not necessarily a valid pointer.
3187    // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
3188    // to allocate space on the heap. That's not a value a real pointer
3189    // will ever have because RcInner has alignment at least 2.
3190    ptr: NonNull<RcInner<T>>,
3191    alloc: A,
3192}
3193
3194#[stable(feature = "rc_weak", since = "1.4.0")]
3195impl<T: ?Sized, A: Allocator> !Send for Weak<T, A> {}
3196#[stable(feature = "rc_weak", since = "1.4.0")]
3197impl<T: ?Sized, A: Allocator> !Sync for Weak<T, A> {}
3198
3199#[unstable(feature = "coerce_unsized", issue = "18598")]
3200impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {}
3201
3202#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3203impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
3204
3205// SAFETY: `Weak::clone` doesn't access any `Cell`s which could contain the `Weak` being cloned.
3206#[unstable(feature = "cell_get_cloned", issue = "145329")]
3207unsafe impl<T: ?Sized> CloneFromCell for Weak<T> {}
3208
3209impl<T> Weak<T> {
3210    /// Constructs a new `Weak<T>`, without allocating any memory.
3211    /// Calling [`upgrade`] on the return value always gives [`None`].
3212    ///
3213    /// [`upgrade`]: Weak::upgrade
3214    ///
3215    /// # Examples
3216    ///
3217    /// ```
3218    /// use std::rc::Weak;
3219    ///
3220    /// let empty: Weak<i64> = Weak::new();
3221    /// assert!(empty.upgrade().is_none());
3222    /// ```
3223    #[inline]
3224    #[stable(feature = "downgraded_weak", since = "1.10.0")]
3225    #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")]
3226    #[must_use]
3227    pub const fn new() -> Weak<T> {
3228        Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc: Global }
3229    }
3230}
3231
3232impl<T, A: Allocator> Weak<T, A> {
3233    /// Constructs a new `Weak<T>`, without allocating any memory, technically in the provided
3234    /// allocator.
3235    /// Calling [`upgrade`] on the return value always gives [`None`].
3236    ///
3237    /// [`upgrade`]: Weak::upgrade
3238    ///
3239    /// # Examples
3240    ///
3241    /// ```
3242    /// use std::rc::Weak;
3243    ///
3244    /// let empty: Weak<i64> = Weak::new();
3245    /// assert!(empty.upgrade().is_none());
3246    /// ```
3247    #[inline]
3248    #[unstable(feature = "allocator_api", issue = "32838")]
3249    pub fn new_in(alloc: A) -> Weak<T, A> {
3250        Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc }
3251    }
3252}
3253
3254pub(crate) fn is_dangling<T: ?Sized>(ptr: *const T) -> bool {
3255    (ptr.cast::<()>()).addr() == usize::MAX
3256}
3257
3258/// Helper type to allow accessing the reference counts without
3259/// making any assertions about the data field.
3260struct WeakInner<'a> {
3261    weak: &'a Cell<usize>,
3262    strong: &'a Cell<usize>,
3263}
3264
3265impl<T: ?Sized> Weak<T> {
3266    /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3267    ///
3268    /// This can be used to safely get a strong reference (by calling [`upgrade`]
3269    /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3270    ///
3271    /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3272    /// as these don't own anything; the method still works on them).
3273    ///
3274    /// # Safety
3275    ///
3276    /// The pointer must have originated from the [`into_raw`] and must still own its potential
3277    /// weak reference, and `ptr` must point to a block of memory allocated by the global allocator.
3278    ///
3279    /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3280    /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3281    /// count is not modified by this operation) and therefore it must be paired with a previous
3282    /// call to [`into_raw`].
3283    ///
3284    /// # Examples
3285    ///
3286    /// ```
3287    /// use std::rc::{Rc, Weak};
3288    ///
3289    /// let strong = Rc::new("hello".to_owned());
3290    ///
3291    /// let raw_1 = Rc::downgrade(&strong).into_raw();
3292    /// let raw_2 = Rc::downgrade(&strong).into_raw();
3293    ///
3294    /// assert_eq!(2, Rc::weak_count(&strong));
3295    ///
3296    /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3297    /// assert_eq!(1, Rc::weak_count(&strong));
3298    ///
3299    /// drop(strong);
3300    ///
3301    /// // Decrement the last weak count.
3302    /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3303    /// ```
3304    ///
3305    /// [`into_raw`]: Weak::into_raw
3306    /// [`upgrade`]: Weak::upgrade
3307    /// [`new`]: Weak::new
3308    #[inline]
3309    #[stable(feature = "weak_into_raw", since = "1.45.0")]
3310    pub unsafe fn from_raw(ptr: *const T) -> Self {
3311        unsafe { Self::from_raw_in(ptr, Global) }
3312    }
3313
3314    /// Consumes the `Weak<T>` and turns it into a raw pointer.
3315    ///
3316    /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3317    /// one weak reference (the weak count is not modified by this operation). It can be turned
3318    /// back into the `Weak<T>` with [`from_raw`].
3319    ///
3320    /// The same restrictions of accessing the target of the pointer as with
3321    /// [`as_ptr`] apply.
3322    ///
3323    /// # Examples
3324    ///
3325    /// ```
3326    /// use std::rc::{Rc, Weak};
3327    ///
3328    /// let strong = Rc::new("hello".to_owned());
3329    /// let weak = Rc::downgrade(&strong);
3330    /// let raw = weak.into_raw();
3331    ///
3332    /// assert_eq!(1, Rc::weak_count(&strong));
3333    /// assert_eq!("hello", unsafe { &*raw });
3334    ///
3335    /// drop(unsafe { Weak::from_raw(raw) });
3336    /// assert_eq!(0, Rc::weak_count(&strong));
3337    /// ```
3338    ///
3339    /// [`from_raw`]: Weak::from_raw
3340    /// [`as_ptr`]: Weak::as_ptr
3341    #[must_use = "losing the pointer will leak memory"]
3342    #[stable(feature = "weak_into_raw", since = "1.45.0")]
3343    pub fn into_raw(self) -> *const T {
3344        mem::ManuallyDrop::new(self).as_ptr()
3345    }
3346}
3347
3348impl<T: ?Sized, A: Allocator> Weak<T, A> {
3349    /// Returns a reference to the underlying allocator.
3350    #[inline]
3351    #[unstable(feature = "allocator_api", issue = "32838")]
3352    pub fn allocator(&self) -> &A {
3353        &self.alloc
3354    }
3355
3356    /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
3357    ///
3358    /// The pointer is valid only if there are some strong references. The pointer may be dangling,
3359    /// unaligned or even [`null`] otherwise.
3360    ///
3361    /// # Examples
3362    ///
3363    /// ```
3364    /// use std::rc::Rc;
3365    /// use std::ptr;
3366    ///
3367    /// let strong = Rc::new("hello".to_owned());
3368    /// let weak = Rc::downgrade(&strong);
3369    /// // Both point to the same object
3370    /// assert!(ptr::eq(&*strong, weak.as_ptr()));
3371    /// // The strong here keeps it alive, so we can still access the object.
3372    /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
3373    ///
3374    /// drop(strong);
3375    /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
3376    /// // undefined behavior.
3377    /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
3378    /// ```
3379    ///
3380    /// [`null`]: ptr::null
3381    #[must_use]
3382    #[stable(feature = "rc_as_ptr", since = "1.45.0")]
3383    pub fn as_ptr(&self) -> *const T {
3384        let ptr: *mut RcInner<T> = NonNull::as_ptr(self.ptr);
3385
3386        if is_dangling(ptr) {
3387            // If the pointer is dangling, we return the sentinel directly. This cannot be
3388            // a valid payload address, as the payload is at least as aligned as RcInner (usize).
3389            ptr as *const T
3390        } else {
3391            // SAFETY: if is_dangling returns false, then the pointer is dereferenceable.
3392            // The payload may be dropped at this point, and we have to maintain provenance,
3393            // so use raw pointer manipulation.
3394            unsafe { &raw mut (*ptr).value }
3395        }
3396    }
3397
3398    /// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
3399    ///
3400    /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3401    /// one weak reference (the weak count is not modified by this operation). It can be turned
3402    /// back into the `Weak<T>` with [`from_raw_in`].
3403    ///
3404    /// The same restrictions of accessing the target of the pointer as with
3405    /// [`as_ptr`] apply.
3406    ///
3407    /// # Examples
3408    ///
3409    /// ```
3410    /// #![feature(allocator_api)]
3411    /// use std::rc::{Rc, Weak};
3412    /// use std::alloc::System;
3413    ///
3414    /// let strong = Rc::new_in("hello".to_owned(), System);
3415    /// let weak = Rc::downgrade(&strong);
3416    /// let (raw, alloc) = weak.into_raw_with_allocator();
3417    ///
3418    /// assert_eq!(1, Rc::weak_count(&strong));
3419    /// assert_eq!("hello", unsafe { &*raw });
3420    ///
3421    /// drop(unsafe { Weak::from_raw_in(raw, alloc) });
3422    /// assert_eq!(0, Rc::weak_count(&strong));
3423    /// ```
3424    ///
3425    /// [`from_raw_in`]: Weak::from_raw_in
3426    /// [`as_ptr`]: Weak::as_ptr
3427    #[must_use = "losing the pointer will leak memory"]
3428    #[inline]
3429    #[unstable(feature = "allocator_api", issue = "32838")]
3430    pub fn into_raw_with_allocator(self) -> (*const T, A) {
3431        let this = mem::ManuallyDrop::new(self);
3432        let result = this.as_ptr();
3433        // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
3434        let alloc = unsafe { ptr::read(&this.alloc) };
3435        (result, alloc)
3436    }
3437
3438    /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3439    ///
3440    /// This can be used to safely get a strong reference (by calling [`upgrade`]
3441    /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3442    ///
3443    /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3444    /// as these don't own anything; the method still works on them).
3445    ///
3446    /// # Safety
3447    ///
3448    /// The pointer must have originated from the [`into_raw`] and must still own its potential
3449    /// weak reference, and `ptr` must point to a block of memory allocated by `alloc`.
3450    ///
3451    /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3452    /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3453    /// count is not modified by this operation) and therefore it must be paired with a previous
3454    /// call to [`into_raw`].
3455    ///
3456    /// # Examples
3457    ///
3458    /// ```
3459    /// use std::rc::{Rc, Weak};
3460    ///
3461    /// let strong = Rc::new("hello".to_owned());
3462    ///
3463    /// let raw_1 = Rc::downgrade(&strong).into_raw();
3464    /// let raw_2 = Rc::downgrade(&strong).into_raw();
3465    ///
3466    /// assert_eq!(2, Rc::weak_count(&strong));
3467    ///
3468    /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3469    /// assert_eq!(1, Rc::weak_count(&strong));
3470    ///
3471    /// drop(strong);
3472    ///
3473    /// // Decrement the last weak count.
3474    /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3475    /// ```
3476    ///
3477    /// [`into_raw`]: Weak::into_raw
3478    /// [`upgrade`]: Weak::upgrade
3479    /// [`new`]: Weak::new
3480    #[inline]
3481    #[unstable(feature = "allocator_api", issue = "32838")]
3482    pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
3483        // See Weak::as_ptr for context on how the input pointer is derived.
3484
3485        let ptr = if is_dangling(ptr) {
3486            // This is a dangling Weak.
3487            ptr as *mut RcInner<T>
3488        } else {
3489            // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
3490            // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
3491            let offset = unsafe { data_offset(ptr) };
3492            // Thus, we reverse the offset to get the whole RcInner.
3493            // SAFETY: the pointer originated from a Weak, so this offset is safe.
3494            unsafe { ptr.byte_sub(offset) as *mut RcInner<T> }
3495        };
3496
3497        // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
3498        Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc }
3499    }
3500
3501    /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
3502    /// dropping of the inner value if successful.
3503    ///
3504    /// Returns [`None`] if the inner value has since been dropped.
3505    ///
3506    /// # Examples
3507    ///
3508    /// ```
3509    /// use std::rc::Rc;
3510    ///
3511    /// let five = Rc::new(5);
3512    ///
3513    /// let weak_five = Rc::downgrade(&five);
3514    ///
3515    /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
3516    /// assert!(strong_five.is_some());
3517    ///
3518    /// // Destroy all strong pointers.
3519    /// drop(strong_five);
3520    /// drop(five);
3521    ///
3522    /// assert!(weak_five.upgrade().is_none());
3523    /// ```
3524    #[must_use = "this returns a new `Rc`, \
3525                  without modifying the original weak pointer"]
3526    #[stable(feature = "rc_weak", since = "1.4.0")]
3527    pub fn upgrade(&self) -> Option<Rc<T, A>>
3528    where
3529        A: Clone,
3530    {
3531        let inner = self.inner()?;
3532
3533        if inner.strong() == 0 {
3534            None
3535        } else {
3536            unsafe {
3537                inner.inc_strong();
3538                Some(Rc::from_inner_in(self.ptr, self.alloc.clone()))
3539            }
3540        }
3541    }
3542
3543    /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
3544    ///
3545    /// If `self` was created using [`Weak::new`], this will return 0.
3546    #[must_use]
3547    #[stable(feature = "weak_counts", since = "1.41.0")]
3548    pub fn strong_count(&self) -> usize {
3549        if let Some(inner) = self.inner() { inner.strong() } else { 0 }
3550    }
3551
3552    /// Gets the number of `Weak` pointers pointing to this allocation.
3553    ///
3554    /// If no strong pointers remain, this will return zero.
3555    #[must_use]
3556    #[stable(feature = "weak_counts", since = "1.41.0")]
3557    pub fn weak_count(&self) -> usize {
3558        if let Some(inner) = self.inner() {
3559            if inner.strong() > 0 {
3560                inner.weak() - 1 // subtract the implicit weak ptr
3561            } else {
3562                0
3563            }
3564        } else {
3565            0
3566        }
3567    }
3568
3569    /// Returns `None` when the pointer is dangling and there is no allocated `RcInner`,
3570    /// (i.e., when this `Weak` was created by `Weak::new`).
3571    #[inline]
3572    fn inner(&self) -> Option<WeakInner<'_>> {
3573        if is_dangling(self.ptr.as_ptr()) {
3574            None
3575        } else {
3576            // We are careful to *not* create a reference covering the "data" field, as
3577            // the field may be mutated concurrently (for example, if the last `Rc`
3578            // is dropped, the data field will be dropped in-place).
3579            Some(unsafe {
3580                let ptr = self.ptr.as_ptr();
3581                WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
3582            })
3583        }
3584    }
3585
3586    /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
3587    /// both don't point to any allocation (because they were created with `Weak::new()`). However,
3588    /// this function ignores the metadata of  `dyn Trait` pointers.
3589    ///
3590    /// # Notes
3591    ///
3592    /// Since this compares pointers it means that `Weak::new()` will equal each
3593    /// other, even though they don't point to any allocation.
3594    ///
3595    /// # Examples
3596    ///
3597    /// ```
3598    /// use std::rc::Rc;
3599    ///
3600    /// let first_rc = Rc::new(5);
3601    /// let first = Rc::downgrade(&first_rc);
3602    /// let second = Rc::downgrade(&first_rc);
3603    ///
3604    /// assert!(first.ptr_eq(&second));
3605    ///
3606    /// let third_rc = Rc::new(5);
3607    /// let third = Rc::downgrade(&third_rc);
3608    ///
3609    /// assert!(!first.ptr_eq(&third));
3610    /// ```
3611    ///
3612    /// Comparing `Weak::new`.
3613    ///
3614    /// ```
3615    /// use std::rc::{Rc, Weak};
3616    ///
3617    /// let first = Weak::new();
3618    /// let second = Weak::new();
3619    /// assert!(first.ptr_eq(&second));
3620    ///
3621    /// let third_rc = Rc::new(());
3622    /// let third = Rc::downgrade(&third_rc);
3623    /// assert!(!first.ptr_eq(&third));
3624    /// ```
3625    #[inline]
3626    #[must_use]
3627    #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
3628    pub fn ptr_eq(&self, other: &Self) -> bool {
3629        ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
3630    }
3631}
3632
3633#[stable(feature = "rc_weak", since = "1.4.0")]
3634unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
3635    /// Drops the `Weak` pointer.
3636    ///
3637    /// # Examples
3638    ///
3639    /// ```
3640    /// use std::rc::{Rc, Weak};
3641    ///
3642    /// struct Foo;
3643    ///
3644    /// impl Drop for Foo {
3645    ///     fn drop(&mut self) {
3646    ///         println!("dropped!");
3647    ///     }
3648    /// }
3649    ///
3650    /// let foo = Rc::new(Foo);
3651    /// let weak_foo = Rc::downgrade(&foo);
3652    /// let other_weak_foo = Weak::clone(&weak_foo);
3653    ///
3654    /// drop(weak_foo);   // Doesn't print anything
3655    /// drop(foo);        // Prints "dropped!"
3656    ///
3657    /// assert!(other_weak_foo.upgrade().is_none());
3658    /// ```
3659    fn drop(&mut self) {
3660        let inner = if let Some(inner) = self.inner() { inner } else { return };
3661
3662        inner.dec_weak();
3663        // the weak count starts at 1, and will only go to zero if all
3664        // the strong pointers have disappeared.
3665        if inner.weak() == 0 {
3666            unsafe {
3667                self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
3668            }
3669        }
3670    }
3671}
3672
3673#[stable(feature = "rc_weak", since = "1.4.0")]
3674impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
3675    /// Makes a clone of the `Weak` pointer that points to the same allocation.
3676    ///
3677    /// # Examples
3678    ///
3679    /// ```
3680    /// use std::rc::{Rc, Weak};
3681    ///
3682    /// let weak_five = Rc::downgrade(&Rc::new(5));
3683    ///
3684    /// let _ = Weak::clone(&weak_five);
3685    /// ```
3686    #[inline]
3687    fn clone(&self) -> Weak<T, A> {
3688        if let Some(inner) = self.inner() {
3689            inner.inc_weak()
3690        }
3691        Weak { ptr: self.ptr, alloc: self.alloc.clone() }
3692    }
3693}
3694
3695#[unstable(feature = "ergonomic_clones", issue = "132290")]
3696impl<T: ?Sized, A: Allocator + Clone> UseCloned for Weak<T, A> {}
3697
3698#[stable(feature = "rc_weak", since = "1.4.0")]
3699impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> {
3700    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3701        write!(f, "(Weak)")
3702    }
3703}
3704
3705#[stable(feature = "downgraded_weak", since = "1.10.0")]
3706impl<T> Default for Weak<T> {
3707    /// Constructs a new `Weak<T>`, without allocating any memory.
3708    /// Calling [`upgrade`] on the return value always gives [`None`].
3709    ///
3710    /// [`upgrade`]: Weak::upgrade
3711    ///
3712    /// # Examples
3713    ///
3714    /// ```
3715    /// use std::rc::Weak;
3716    ///
3717    /// let empty: Weak<i64> = Default::default();
3718    /// assert!(empty.upgrade().is_none());
3719    /// ```
3720    fn default() -> Weak<T> {
3721        Weak::new()
3722    }
3723}
3724
3725// NOTE: If you mem::forget Rcs (or Weaks), drop is skipped and the ref-count
3726// is not decremented, meaning the ref-count can overflow, and then you can
3727// free the allocation while outstanding Rcs (or Weaks) exist, which would be
3728// unsound. We abort because this is such a degenerate scenario that we don't
3729// care about what happens -- no real program should ever experience this.
3730//
3731// This should have negligible overhead since you don't actually need to
3732// clone these much in Rust thanks to ownership and move-semantics.
3733
3734#[doc(hidden)]
3735trait RcInnerPtr {
3736    fn weak_ref(&self) -> &Cell<usize>;
3737    fn strong_ref(&self) -> &Cell<usize>;
3738
3739    #[inline]
3740    fn strong(&self) -> usize {
3741        self.strong_ref().get()
3742    }
3743
3744    #[inline]
3745    fn inc_strong(&self) {
3746        let strong = self.strong();
3747
3748        // We insert an `assume` here to hint LLVM at an otherwise
3749        // missed optimization.
3750        // SAFETY: The reference count will never be zero when this is
3751        // called.
3752        unsafe {
3753            hint::assert_unchecked(strong != 0);
3754        }
3755
3756        let strong = strong.wrapping_add(1);
3757        self.strong_ref().set(strong);
3758
3759        // We want to abort on overflow instead of dropping the value.
3760        // Checking for overflow after the store instead of before
3761        // allows for slightly better code generation.
3762        if core::intrinsics::unlikely(strong == 0) {
3763            abort();
3764        }
3765    }
3766
3767    #[inline]
3768    fn dec_strong(&self) {
3769        self.strong_ref().set(self.strong() - 1);
3770    }
3771
3772    #[inline]
3773    fn weak(&self) -> usize {
3774        self.weak_ref().get()
3775    }
3776
3777    #[inline]
3778    fn inc_weak(&self) {
3779        let weak = self.weak();
3780
3781        // We insert an `assume` here to hint LLVM at an otherwise
3782        // missed optimization.
3783        // SAFETY: The reference count will never be zero when this is
3784        // called.
3785        unsafe {
3786            hint::assert_unchecked(weak != 0);
3787        }
3788
3789        let weak = weak.wrapping_add(1);
3790        self.weak_ref().set(weak);
3791
3792        // We want to abort on overflow instead of dropping the value.
3793        // Checking for overflow after the store instead of before
3794        // allows for slightly better code generation.
3795        if core::intrinsics::unlikely(weak == 0) {
3796            abort();
3797        }
3798    }
3799
3800    #[inline]
3801    fn dec_weak(&self) {
3802        self.weak_ref().set(self.weak() - 1);
3803    }
3804}
3805
3806impl<T: ?Sized> RcInnerPtr for RcInner<T> {
3807    #[inline(always)]
3808    fn weak_ref(&self) -> &Cell<usize> {
3809        &self.weak
3810    }
3811
3812    #[inline(always)]
3813    fn strong_ref(&self) -> &Cell<usize> {
3814        &self.strong
3815    }
3816}
3817
3818impl<'a> RcInnerPtr for WeakInner<'a> {
3819    #[inline(always)]
3820    fn weak_ref(&self) -> &Cell<usize> {
3821        self.weak
3822    }
3823
3824    #[inline(always)]
3825    fn strong_ref(&self) -> &Cell<usize> {
3826        self.strong
3827    }
3828}
3829
3830#[stable(feature = "rust1", since = "1.0.0")]
3831impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Rc<T, A> {
3832    fn borrow(&self) -> &T {
3833        &**self
3834    }
3835}
3836
3837#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
3838impl<T: ?Sized, A: Allocator> AsRef<T> for Rc<T, A> {
3839    fn as_ref(&self) -> &T {
3840        &**self
3841    }
3842}
3843
3844#[stable(feature = "pin", since = "1.33.0")]
3845impl<T: ?Sized, A: Allocator> Unpin for Rc<T, A> {}
3846
3847/// Gets the offset within an `RcInner` for the payload behind a pointer.
3848///
3849/// # Safety
3850///
3851/// The pointer must point to (and have valid metadata for) a previously
3852/// valid instance of T, but the T is allowed to be dropped.
3853unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> usize {
3854    // Align the unsized value to the end of the RcInner.
3855    // Because RcInner is repr(C), it will always be the last field in memory.
3856    // SAFETY: since the only unsized types possible are slices, trait objects,
3857    // and extern types, the input safety requirement is currently enough to
3858    // satisfy the requirements of Alignment::of_val_raw; this is an implementation
3859    // detail of the language that must not be relied upon outside of std.
3860    unsafe { data_offset_alignment(Alignment::of_val_raw(ptr)) }
3861}
3862
3863#[inline]
3864fn data_offset_alignment(alignment: Alignment) -> usize {
3865    let layout = Layout::new::<RcInner<()>>();
3866    layout.size() + layout.padding_needed_for(alignment)
3867}
3868
3869/// A uniquely owned [`Rc`].
3870///
3871/// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong
3872/// reference. Multiple weak pointers can be created, but attempts to upgrade those to strong
3873/// references will fail unless the `UniqueRc` they point to has been converted into a regular `Rc`.
3874///
3875/// Because they are uniquely owned, the contents of a `UniqueRc` can be freely mutated. A common
3876/// use case is to have an object be mutable during its initialization phase but then have it become
3877/// immutable and converted to a normal `Rc`.
3878///
3879/// This can be used as a flexible way to create cyclic data structures, as in the example below.
3880///
3881/// ```
3882/// #![feature(unique_rc_arc)]
3883/// use std::rc::{Rc, Weak, UniqueRc};
3884///
3885/// struct Gadget {
3886///     #[allow(dead_code)]
3887///     me: Weak<Gadget>,
3888/// }
3889///
3890/// fn create_gadget() -> Option<Rc<Gadget>> {
3891///     let mut rc = UniqueRc::new(Gadget {
3892///         me: Weak::new(),
3893///     });
3894///     rc.me = UniqueRc::downgrade(&rc);
3895///     Some(UniqueRc::into_rc(rc))
3896/// }
3897///
3898/// create_gadget().unwrap();
3899/// ```
3900///
3901/// An advantage of using `UniqueRc` over [`Rc::new_cyclic`] to build cyclic data structures is that
3902/// [`Rc::new_cyclic`]'s `data_fn` parameter cannot be async or return a [`Result`]. As shown in the
3903/// previous example, `UniqueRc` allows for more flexibility in the construction of cyclic data,
3904/// including fallible or async constructors.
3905#[unstable(feature = "unique_rc_arc", issue = "112566")]
3906pub struct UniqueRc<
3907    T: ?Sized,
3908    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
3909> {
3910    ptr: NonNull<RcInner<T>>,
3911    // Define the ownership of `RcInner<T>` for drop-check
3912    _marker: PhantomData<RcInner<T>>,
3913    // Invariance is necessary for soundness: once other `Weak`
3914    // references exist, we already have a form of shared mutability!
3915    _marker2: PhantomData<*mut T>,
3916    alloc: A,
3917}
3918
3919// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3920// but having an explicit negative impl is nice for documentation purposes
3921// and results in nicer error messages.
3922#[unstable(feature = "unique_rc_arc", issue = "112566")]
3923impl<T: ?Sized, A: Allocator> !Send for UniqueRc<T, A> {}
3924
3925// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3926// but having an explicit negative impl is nice for documentation purposes
3927// and results in nicer error messages.
3928#[unstable(feature = "unique_rc_arc", issue = "112566")]
3929impl<T: ?Sized, A: Allocator> !Sync for UniqueRc<T, A> {}
3930
3931#[unstable(feature = "unique_rc_arc", issue = "112566")]
3932impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<UniqueRc<U, A>>
3933    for UniqueRc<T, A>
3934{
3935}
3936
3937//#[unstable(feature = "unique_rc_arc", issue = "112566")]
3938#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3939impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<UniqueRc<U>> for UniqueRc<T> {}
3940
3941#[unstable(feature = "unique_rc_arc", issue = "112566")]
3942impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for UniqueRc<T, A> {
3943    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3944        fmt::Display::fmt(&**self, f)
3945    }
3946}
3947
3948#[unstable(feature = "unique_rc_arc", issue = "112566")]
3949impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for UniqueRc<T, A> {
3950    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3951        fmt::Debug::fmt(&**self, f)
3952    }
3953}
3954
3955#[unstable(feature = "unique_rc_arc", issue = "112566")]
3956impl<T: ?Sized, A: Allocator> fmt::Pointer for UniqueRc<T, A> {
3957    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3958        fmt::Pointer::fmt(&(&raw const **self), f)
3959    }
3960}
3961
3962#[unstable(feature = "unique_rc_arc", issue = "112566")]
3963impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for UniqueRc<T, A> {
3964    fn borrow(&self) -> &T {
3965        &**self
3966    }
3967}
3968
3969#[unstable(feature = "unique_rc_arc", issue = "112566")]
3970impl<T: ?Sized, A: Allocator> borrow::BorrowMut<T> for UniqueRc<T, A> {
3971    fn borrow_mut(&mut self) -> &mut T {
3972        &mut **self
3973    }
3974}
3975
3976#[unstable(feature = "unique_rc_arc", issue = "112566")]
3977impl<T: ?Sized, A: Allocator> AsRef<T> for UniqueRc<T, A> {
3978    fn as_ref(&self) -> &T {
3979        &**self
3980    }
3981}
3982
3983#[unstable(feature = "unique_rc_arc", issue = "112566")]
3984impl<T: ?Sized, A: Allocator> AsMut<T> for UniqueRc<T, A> {
3985    fn as_mut(&mut self) -> &mut T {
3986        &mut **self
3987    }
3988}
3989
3990#[unstable(feature = "unique_rc_arc", issue = "112566")]
3991impl<T: ?Sized, A: Allocator> Unpin for UniqueRc<T, A> {}
3992
3993#[cfg(not(no_global_oom_handling))]
3994#[unstable(feature = "unique_rc_arc", issue = "112566")]
3995impl<T> From<T> for UniqueRc<T> {
3996    #[inline(always)]
3997    fn from(value: T) -> Self {
3998        Self::new(value)
3999    }
4000}
4001
4002#[unstable(feature = "unique_rc_arc", issue = "112566")]
4003impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for UniqueRc<T, A> {
4004    /// Equality for two `UniqueRc`s.
4005    ///
4006    /// Two `UniqueRc`s are equal if their inner values are equal.
4007    ///
4008    /// # Examples
4009    ///
4010    /// ```
4011    /// #![feature(unique_rc_arc)]
4012    /// use std::rc::UniqueRc;
4013    ///
4014    /// let five = UniqueRc::new(5);
4015    ///
4016    /// assert!(five == UniqueRc::new(5));
4017    /// ```
4018    #[inline]
4019    fn eq(&self, other: &Self) -> bool {
4020        PartialEq::eq(&**self, &**other)
4021    }
4022
4023    /// Inequality for two `UniqueRc`s.
4024    ///
4025    /// Two `UniqueRc`s are not equal if their inner values are not equal.
4026    ///
4027    /// # Examples
4028    ///
4029    /// ```
4030    /// #![feature(unique_rc_arc)]
4031    /// use std::rc::UniqueRc;
4032    ///
4033    /// let five = UniqueRc::new(5);
4034    ///
4035    /// assert!(five != UniqueRc::new(6));
4036    /// ```
4037    #[inline]
4038    fn ne(&self, other: &Self) -> bool {
4039        PartialEq::ne(&**self, &**other)
4040    }
4041}
4042
4043#[unstable(feature = "unique_rc_arc", issue = "112566")]
4044impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for UniqueRc<T, A> {
4045    /// Partial comparison for two `UniqueRc`s.
4046    ///
4047    /// The two are compared by calling `partial_cmp()` on their inner values.
4048    ///
4049    /// # Examples
4050    ///
4051    /// ```
4052    /// #![feature(unique_rc_arc)]
4053    /// use std::rc::UniqueRc;
4054    /// use std::cmp::Ordering;
4055    ///
4056    /// let five = UniqueRc::new(5);
4057    ///
4058    /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&UniqueRc::new(6)));
4059    /// ```
4060    #[inline(always)]
4061    fn partial_cmp(&self, other: &UniqueRc<T, A>) -> Option<Ordering> {
4062        (**self).partial_cmp(&**other)
4063    }
4064
4065    /// Less-than comparison for two `UniqueRc`s.
4066    ///
4067    /// The two are compared by calling `<` on their inner values.
4068    ///
4069    /// # Examples
4070    ///
4071    /// ```
4072    /// #![feature(unique_rc_arc)]
4073    /// use std::rc::UniqueRc;
4074    ///
4075    /// let five = UniqueRc::new(5);
4076    ///
4077    /// assert!(five < UniqueRc::new(6));
4078    /// ```
4079    #[inline(always)]
4080    fn lt(&self, other: &UniqueRc<T, A>) -> bool {
4081        **self < **other
4082    }
4083
4084    /// 'Less than or equal to' comparison for two `UniqueRc`s.
4085    ///
4086    /// The two are compared by calling `<=` on their inner values.
4087    ///
4088    /// # Examples
4089    ///
4090    /// ```
4091    /// #![feature(unique_rc_arc)]
4092    /// use std::rc::UniqueRc;
4093    ///
4094    /// let five = UniqueRc::new(5);
4095    ///
4096    /// assert!(five <= UniqueRc::new(5));
4097    /// ```
4098    #[inline(always)]
4099    fn le(&self, other: &UniqueRc<T, A>) -> bool {
4100        **self <= **other
4101    }
4102
4103    /// Greater-than comparison for two `UniqueRc`s.
4104    ///
4105    /// The two are compared by calling `>` on their inner values.
4106    ///
4107    /// # Examples
4108    ///
4109    /// ```
4110    /// #![feature(unique_rc_arc)]
4111    /// use std::rc::UniqueRc;
4112    ///
4113    /// let five = UniqueRc::new(5);
4114    ///
4115    /// assert!(five > UniqueRc::new(4));
4116    /// ```
4117    #[inline(always)]
4118    fn gt(&self, other: &UniqueRc<T, A>) -> bool {
4119        **self > **other
4120    }
4121
4122    /// 'Greater than or equal to' comparison for two `UniqueRc`s.
4123    ///
4124    /// The two are compared by calling `>=` on their inner values.
4125    ///
4126    /// # Examples
4127    ///
4128    /// ```
4129    /// #![feature(unique_rc_arc)]
4130    /// use std::rc::UniqueRc;
4131    ///
4132    /// let five = UniqueRc::new(5);
4133    ///
4134    /// assert!(five >= UniqueRc::new(5));
4135    /// ```
4136    #[inline(always)]
4137    fn ge(&self, other: &UniqueRc<T, A>) -> bool {
4138        **self >= **other
4139    }
4140}
4141
4142#[unstable(feature = "unique_rc_arc", issue = "112566")]
4143impl<T: ?Sized + Ord, A: Allocator> Ord for UniqueRc<T, A> {
4144    /// Comparison for two `UniqueRc`s.
4145    ///
4146    /// The two are compared by calling `cmp()` on their inner values.
4147    ///
4148    /// # Examples
4149    ///
4150    /// ```
4151    /// #![feature(unique_rc_arc)]
4152    /// use std::rc::UniqueRc;
4153    /// use std::cmp::Ordering;
4154    ///
4155    /// let five = UniqueRc::new(5);
4156    ///
4157    /// assert_eq!(Ordering::Less, five.cmp(&UniqueRc::new(6)));
4158    /// ```
4159    #[inline]
4160    fn cmp(&self, other: &UniqueRc<T, A>) -> Ordering {
4161        (**self).cmp(&**other)
4162    }
4163}
4164
4165#[unstable(feature = "unique_rc_arc", issue = "112566")]
4166impl<T: ?Sized + Eq, A: Allocator> Eq for UniqueRc<T, A> {}
4167
4168#[unstable(feature = "unique_rc_arc", issue = "112566")]
4169impl<T: ?Sized + Hash, A: Allocator> Hash for UniqueRc<T, A> {
4170    fn hash<H: Hasher>(&self, state: &mut H) {
4171        (**self).hash(state);
4172    }
4173}
4174
4175// Depends on A = Global
4176impl<T> UniqueRc<T> {
4177    /// Creates a new `UniqueRc`.
4178    ///
4179    /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
4180    /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
4181    /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
4182    /// point to the new [`Rc`].
4183    #[cfg(not(no_global_oom_handling))]
4184    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4185    pub fn new(value: T) -> Self {
4186        Self::new_in(value, Global)
4187    }
4188
4189    /// Maps the value in a `UniqueRc`, reusing the allocation if possible.
4190    ///
4191    /// `f` is called on a reference to the value in the `UniqueRc`, and the result is returned,
4192    /// also in a `UniqueRc`.
4193    ///
4194    /// Note: this is an associated function, which means that you have
4195    /// to call it as `UniqueRc::map(u, f)` instead of `u.map(f)`. This
4196    /// is so that there is no conflict with a method on the inner type.
4197    ///
4198    /// # Examples
4199    ///
4200    /// ```
4201    /// #![feature(smart_pointer_try_map)]
4202    /// #![feature(unique_rc_arc)]
4203    ///
4204    /// use std::rc::UniqueRc;
4205    ///
4206    /// let r = UniqueRc::new(7);
4207    /// let new = UniqueRc::map(r, |i| i + 7);
4208    /// assert_eq!(*new, 14);
4209    /// ```
4210    #[cfg(not(no_global_oom_handling))]
4211    #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
4212    pub fn map<U>(this: Self, f: impl FnOnce(T) -> U) -> UniqueRc<U> {
4213        if size_of::<T>() == size_of::<U>()
4214            && align_of::<T>() == align_of::<U>()
4215            && UniqueRc::weak_count(&this) == 0
4216        {
4217            unsafe {
4218                let ptr = UniqueRc::into_raw(this);
4219                let value = ptr.read();
4220                let mut allocation = UniqueRc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());
4221
4222                allocation.write(f(value));
4223                allocation.assume_init()
4224            }
4225        } else {
4226            UniqueRc::new(f(UniqueRc::unwrap(this)))
4227        }
4228    }
4229
4230    /// Attempts to map the value in a `UniqueRc`, reusing the allocation if possible.
4231    ///
4232    /// `f` is called on a reference to the value in the `UniqueRc`, and if the operation succeeds,
4233    /// the result is returned, also in a `UniqueRc`.
4234    ///
4235    /// Note: this is an associated function, which means that you have
4236    /// to call it as `UniqueRc::try_map(u, f)` instead of `u.try_map(f)`. This
4237    /// is so that there is no conflict with a method on the inner type.
4238    ///
4239    /// # Examples
4240    ///
4241    /// ```
4242    /// #![feature(smart_pointer_try_map)]
4243    /// #![feature(unique_rc_arc)]
4244    ///
4245    /// use std::rc::UniqueRc;
4246    ///
4247    /// let b = UniqueRc::new(7);
4248    /// let new = UniqueRc::try_map(b, u32::try_from).unwrap();
4249    /// assert_eq!(*new, 7);
4250    /// ```
4251    #[cfg(not(no_global_oom_handling))]
4252    #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
4253    pub fn try_map<R>(
4254        this: Self,
4255        f: impl FnOnce(T) -> R,
4256    ) -> <R::Residual as Residual<UniqueRc<R::Output>>>::TryType
4257    where
4258        R: Try,
4259        R::Residual: Residual<UniqueRc<R::Output>>,
4260    {
4261        if size_of::<T>() == size_of::<R::Output>()
4262            && align_of::<T>() == align_of::<R::Output>()
4263            && UniqueRc::weak_count(&this) == 0
4264        {
4265            unsafe {
4266                let ptr = UniqueRc::into_raw(this);
4267                let value = ptr.read();
4268                let mut allocation = UniqueRc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());
4269
4270                allocation.write(f(value)?);
4271                try { allocation.assume_init() }
4272            }
4273        } else {
4274            try { UniqueRc::new(f(UniqueRc::unwrap(this))?) }
4275        }
4276    }
4277
4278    #[cfg(not(no_global_oom_handling))]
4279    fn unwrap(this: Self) -> T {
4280        let this = ManuallyDrop::new(this);
4281        let val: T = unsafe { ptr::read(&**this) };
4282
4283        let _weak = Weak { ptr: this.ptr, alloc: Global };
4284
4285        val
4286    }
4287}
4288
4289impl<T: ?Sized> UniqueRc<T> {
4290    #[cfg(not(no_global_oom_handling))]
4291    unsafe fn from_raw(ptr: *const T) -> Self {
4292        let offset = unsafe { data_offset(ptr) };
4293
4294        // Reverse the offset to find the original RcInner.
4295        let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
4296
4297        Self {
4298            ptr: unsafe { NonNull::new_unchecked(rc_ptr) },
4299            _marker: PhantomData,
4300            _marker2: PhantomData,
4301            alloc: Global,
4302        }
4303    }
4304
4305    #[cfg(not(no_global_oom_handling))]
4306    fn into_raw(this: Self) -> *const T {
4307        let this = ManuallyDrop::new(this);
4308        Self::as_ptr(&*this)
4309    }
4310}
4311
4312impl<T, A: Allocator> UniqueRc<T, A> {
4313    /// Creates a new `UniqueRc` in the provided allocator.
4314    ///
4315    /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
4316    /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
4317    /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
4318    /// point to the new [`Rc`].
4319    #[cfg(not(no_global_oom_handling))]
4320    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4321    pub fn new_in(value: T, alloc: A) -> Self {
4322        let (ptr, alloc) = Box::into_unique(Box::new_in(
4323            RcInner {
4324                strong: Cell::new(0),
4325                // keep one weak reference so if all the weak pointers that are created are dropped
4326                // the UniqueRc still stays valid.
4327                weak: Cell::new(1),
4328                value,
4329            },
4330            alloc,
4331        ));
4332        Self { ptr: ptr.into(), _marker: PhantomData, _marker2: PhantomData, alloc }
4333    }
4334}
4335
4336impl<T: ?Sized, A: Allocator> UniqueRc<T, A> {
4337    /// Converts the `UniqueRc` into a regular [`Rc`].
4338    ///
4339    /// This consumes the `UniqueRc` and returns a regular [`Rc`] that contains the `value` that
4340    /// is passed to `into_rc`.
4341    ///
4342    /// Any weak references created before this method is called can now be upgraded to strong
4343    /// references.
4344    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4345    pub fn into_rc(this: Self) -> Rc<T, A> {
4346        let mut this = ManuallyDrop::new(this);
4347
4348        // Move the allocator out.
4349        // SAFETY: `this.alloc` will not be accessed again, nor dropped because it is in
4350        // a `ManuallyDrop`.
4351        let alloc: A = unsafe { ptr::read(&this.alloc) };
4352
4353        // SAFETY: This pointer was allocated at creation time so we know it is valid.
4354        unsafe {
4355            // Convert our weak reference into a strong reference
4356            this.ptr.as_mut().strong.set(1);
4357            Rc::from_inner_in(this.ptr, alloc)
4358        }
4359    }
4360
4361    #[cfg(not(no_global_oom_handling))]
4362    fn weak_count(this: &Self) -> usize {
4363        this.inner().weak() - 1
4364    }
4365
4366    #[cfg(not(no_global_oom_handling))]
4367    fn inner(&self) -> &RcInner<T> {
4368        // SAFETY: while this UniqueRc is alive we're guaranteed that the inner pointer is valid.
4369        unsafe { self.ptr.as_ref() }
4370    }
4371
4372    #[cfg(not(no_global_oom_handling))]
4373    fn as_ptr(this: &Self) -> *const T {
4374        let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
4375
4376        // SAFETY: This cannot go through Deref::deref or UniqueRc::inner because
4377        // this is required to retain raw/mut provenance such that e.g. `get_mut` can
4378        // write through the pointer after the Rc is recovered through `from_raw`.
4379        unsafe { &raw mut (*ptr).value }
4380    }
4381
4382    #[inline]
4383    #[cfg(not(no_global_oom_handling))]
4384    fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
4385        let this = mem::ManuallyDrop::new(this);
4386        (this.ptr, unsafe { ptr::read(&this.alloc) })
4387    }
4388
4389    #[inline]
4390    #[cfg(not(no_global_oom_handling))]
4391    unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
4392        Self { ptr, _marker: PhantomData, _marker2: PhantomData, alloc }
4393    }
4394}
4395
4396impl<T: ?Sized, A: Allocator + Clone> UniqueRc<T, A> {
4397    /// Creates a new weak reference to the `UniqueRc`.
4398    ///
4399    /// Attempting to upgrade this weak reference will fail before the `UniqueRc` has been converted
4400    /// to a [`Rc`] using [`UniqueRc::into_rc`].
4401    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4402    pub fn downgrade(this: &Self) -> Weak<T, A> {
4403        // SAFETY: This pointer was allocated at creation time and we guarantee that we only have
4404        // one strong reference before converting to a regular Rc.
4405        unsafe {
4406            this.ptr.as_ref().inc_weak();
4407        }
4408        Weak { ptr: this.ptr, alloc: this.alloc.clone() }
4409    }
4410}
4411
4412#[cfg(not(no_global_oom_handling))]
4413impl<T, A: Allocator> UniqueRc<mem::MaybeUninit<T>, A> {
4414    unsafe fn assume_init(self) -> UniqueRc<T, A> {
4415        let (ptr, alloc) = UniqueRc::into_inner_with_allocator(self);
4416        unsafe { UniqueRc::from_inner_in(ptr.cast(), alloc) }
4417    }
4418}
4419
4420#[unstable(feature = "unique_rc_arc", issue = "112566")]
4421impl<T: ?Sized, A: Allocator> Deref for UniqueRc<T, A> {
4422    type Target = T;
4423
4424    fn deref(&self) -> &T {
4425        // SAFETY: This pointer was allocated at creation time so we know it is valid.
4426        unsafe { &self.ptr.as_ref().value }
4427    }
4428}
4429
4430#[unstable(feature = "unique_rc_arc", issue = "112566")]
4431impl<T: ?Sized, A: Allocator> DerefMut for UniqueRc<T, A> {
4432    fn deref_mut(&mut self) -> &mut T {
4433        // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we
4434        // have unique ownership and therefore it's safe to make a mutable reference because
4435        // `UniqueRc` owns the only strong reference to itself.
4436        unsafe { &mut (*self.ptr.as_ptr()).value }
4437    }
4438}
4439
4440#[unstable(feature = "unique_rc_arc", issue = "112566")]
4441unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc<T, A> {
4442    fn drop(&mut self) {
4443        unsafe {
4444            // destroy the contained object
4445            drop_in_place(DerefMut::deref_mut(self));
4446
4447            // remove the implicit "strong weak" pointer now that we've destroyed the contents.
4448            self.ptr.as_ref().dec_weak();
4449
4450            if self.ptr.as_ref().weak() == 0 {
4451                self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
4452            }
4453        }
4454    }
4455}
4456
4457/// A unique owning pointer to a [`RcInner`] **that does not imply the contents are initialized,**
4458/// but will deallocate it (without dropping the value) when dropped.
4459///
4460/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic.
4461/// It is nearly a duplicate of `UniqueRc<MaybeUninit<T>, A>` except that it allows `T: !Sized`,
4462/// which `MaybeUninit` does not.
4463struct UniqueRcUninit<T: ?Sized, A: Allocator> {
4464    ptr: NonNull<RcInner<T>>,
4465    layout_for_value: Layout,
4466    alloc: Option<A>,
4467}
4468
4469impl<T: ?Sized, A: Allocator> UniqueRcUninit<T, A> {
4470    /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it.
4471    #[cfg(not(no_global_oom_handling))]
4472    fn new(for_value: &T, alloc: A) -> UniqueRcUninit<T, A> {
4473        let layout = Layout::for_value(for_value);
4474        let ptr = unsafe {
4475            Rc::allocate_for_layout(
4476                layout,
4477                |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner),
4478                |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner<T>),
4479            )
4480        };
4481        Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
4482    }
4483
4484    /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it,
4485    /// returning an error if allocation fails.
4486    fn try_new(for_value: &T, alloc: A) -> Result<UniqueRcUninit<T, A>, AllocError> {
4487        let layout = Layout::for_value(for_value);
4488        let ptr = unsafe {
4489            Rc::try_allocate_for_layout(
4490                layout,
4491                |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner),
4492                |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner<T>),
4493            )?
4494        };
4495        Ok(Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) })
4496    }
4497
4498    /// Returns the pointer to be written into to initialize the [`Rc`].
4499    fn data_ptr(&mut self) -> *mut T {
4500        let offset = data_offset_alignment(self.layout_for_value.alignment());
4501        unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
4502    }
4503
4504    /// Upgrade this into a normal [`Rc`].
4505    ///
4506    /// # Safety
4507    ///
4508    /// The data must have been initialized (by writing to [`Self::data_ptr()`]).
4509    unsafe fn into_rc(self) -> Rc<T, A> {
4510        let mut this = ManuallyDrop::new(self);
4511        let ptr = this.ptr;
4512        let alloc = this.alloc.take().unwrap();
4513
4514        // SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
4515        // for having initialized the data.
4516        unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) }
4517    }
4518}
4519
4520impl<T: ?Sized, A: Allocator> Drop for UniqueRcUninit<T, A> {
4521    fn drop(&mut self) {
4522        // SAFETY:
4523        // * new() produced a pointer safe to deallocate.
4524        // * We own the pointer unless into_rc() was called, which forgets us.
4525        unsafe {
4526            self.alloc.take().unwrap().deallocate(
4527                self.ptr.cast(),
4528                rc_inner_layout_for_value_layout(self.layout_for_value),
4529            );
4530        }
4531    }
4532}
4533
4534#[unstable(feature = "allocator_api", issue = "32838")]
4535unsafe impl<T: ?Sized + Allocator, A: Allocator> Allocator for Rc<T, A> {
4536    #[inline]
4537    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
4538        (**self).allocate(layout)
4539    }
4540
4541    #[inline]
4542    fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
4543        (**self).allocate_zeroed(layout)
4544    }
4545
4546    #[inline]
4547    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
4548        // SAFETY: the safety contract must be upheld by the caller
4549        unsafe { (**self).deallocate(ptr, layout) }
4550    }
4551
4552    #[inline]
4553    unsafe fn grow(
4554        &self,
4555        ptr: NonNull<u8>,
4556        old_layout: Layout,
4557        new_layout: Layout,
4558    ) -> Result<NonNull<[u8]>, AllocError> {
4559        // SAFETY: the safety contract must be upheld by the caller
4560        unsafe { (**self).grow(ptr, old_layout, new_layout) }
4561    }
4562
4563    #[inline]
4564    unsafe fn grow_zeroed(
4565        &self,
4566        ptr: NonNull<u8>,
4567        old_layout: Layout,
4568        new_layout: Layout,
4569    ) -> Result<NonNull<[u8]>, AllocError> {
4570        // SAFETY: the safety contract must be upheld by the caller
4571        unsafe { (**self).grow_zeroed(ptr, old_layout, new_layout) }
4572    }
4573
4574    #[inline]
4575    unsafe fn shrink(
4576        &self,
4577        ptr: NonNull<u8>,
4578        old_layout: Layout,
4579        new_layout: Layout,
4580    ) -> Result<NonNull<[u8]>, AllocError> {
4581        // SAFETY: the safety contract must be upheld by the caller
4582        unsafe { (**self).shrink(ptr, old_layout, new_layout) }
4583    }
4584}