core/ptr/non_null.rs
1use crate::clone::TrivialClone;
2use crate::cmp::Ordering;
3use crate::marker::{Destruct, PointeeSized, Unsize};
4use crate::mem::{MaybeUninit, SizedTypeProperties, transmute};
5use crate::num::NonZero;
6use crate::ops::{CoerceUnsized, DispatchFromDyn};
7use crate::pin::PinCoerceUnsized;
8use crate::ptr::Unique;
9use crate::slice::{self, SliceIndex};
10use crate::ub_checks::assert_unsafe_precondition;
11use crate::{fmt, hash, intrinsics, mem, ptr};
12
13/// `*mut T` but non-zero and [covariant].
14///
15/// This is often the correct thing to use when building data structures using
16/// raw pointers, but is ultimately more dangerous to use because of its additional
17/// properties. If you're not sure if you should use `NonNull<T>`, just use `*mut T`!
18///
19/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
20/// is never dereferenced. This is so that enums may use this forbidden value
21/// as a discriminant -- `Option<NonNull<T>>` has the same size as `*mut T`.
22/// However the pointer may still dangle if it isn't dereferenced.
23///
24/// Unlike `*mut T`, `NonNull<T>` is covariant over `T`. This is usually the correct
25/// choice for most data structures and safe abstractions, such as `Box`, `Rc`, `Arc`, `Vec`,
26/// and `LinkedList`.
27///
28/// In rare cases, if your type exposes a way to mutate the value of `T` through a `NonNull<T>`,
29/// and you need to prevent unsoundness from variance (for example, if `T` could be a reference
30/// with a shorter lifetime), you should add a field to make your type invariant, such as
31/// `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`.
32///
33/// Example of a type that must be invariant:
34/// ```rust
35/// use std::cell::Cell;
36/// use std::marker::PhantomData;
37/// struct Invariant<T> {
38/// ptr: std::ptr::NonNull<T>,
39/// _invariant: PhantomData<Cell<T>>,
40/// }
41/// ```
42///
43/// Notice that `NonNull<T>` has a `From` instance for `&T`. However, this does
44/// not change the fact that mutating through a (pointer derived from a) shared
45/// reference is undefined behavior unless the mutation happens inside an
46/// [`UnsafeCell<T>`]. The same goes for creating a mutable reference from a shared
47/// reference. When using this `From` instance without an `UnsafeCell<T>`,
48/// it is your responsibility to ensure that `as_mut` is never called, and `as_ptr`
49/// is never used for mutation.
50///
51/// # Representation
52///
53/// Thanks to the [null pointer optimization],
54/// `NonNull<T>` and `Option<NonNull<T>>`
55/// are guaranteed to have the same size and alignment:
56///
57/// ```
58/// use std::ptr::NonNull;
59///
60/// assert_eq!(size_of::<NonNull<i16>>(), size_of::<Option<NonNull<i16>>>());
61/// assert_eq!(align_of::<NonNull<i16>>(), align_of::<Option<NonNull<i16>>>());
62///
63/// assert_eq!(size_of::<NonNull<str>>(), size_of::<Option<NonNull<str>>>());
64/// assert_eq!(align_of::<NonNull<str>>(), align_of::<Option<NonNull<str>>>());
65/// ```
66///
67/// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
68/// [`PhantomData`]: crate::marker::PhantomData
69/// [`UnsafeCell<T>`]: crate::cell::UnsafeCell
70/// [null pointer optimization]: crate::option#representation
71#[stable(feature = "nonnull", since = "1.25.0")]
72#[repr(transparent)]
73#[rustc_layout_scalar_valid_range_start(1)]
74#[rustc_nonnull_optimization_guaranteed]
75#[rustc_diagnostic_item = "NonNull"]
76#[ferrocene::prevalidated]
77pub struct NonNull<T: PointeeSized> {
78 // Remember to use `.as_ptr()` instead of `.pointer`, as field projecting to
79 // this is banned by <https://github.com/rust-lang/compiler-team/issues/807>.
80 pointer: *const T,
81}
82
83/// `NonNull` pointers are not `Send` because the data they reference may be aliased.
84// N.B., this impl is unnecessary, but should provide better error messages.
85#[stable(feature = "nonnull", since = "1.25.0")]
86impl<T: PointeeSized> !Send for NonNull<T> {}
87
88/// `NonNull` pointers are not `Sync` because the data they reference may be aliased.
89// N.B., this impl is unnecessary, but should provide better error messages.
90#[stable(feature = "nonnull", since = "1.25.0")]
91impl<T: PointeeSized> !Sync for NonNull<T> {}
92
93impl<T: Sized> NonNull<T> {
94 /// Creates a pointer with the given address and no [provenance][crate::ptr#provenance].
95 ///
96 /// For more details, see the equivalent method on a raw pointer, [`ptr::without_provenance_mut`].
97 ///
98 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
99 #[stable(feature = "nonnull_provenance", since = "1.89.0")]
100 #[rustc_const_stable(feature = "nonnull_provenance", since = "1.89.0")]
101 #[must_use]
102 #[inline]
103 pub const fn without_provenance(addr: NonZero<usize>) -> Self {
104 // SAFETY: we know `addr` is non-zero and all nonzero integers are valid raw pointers.
105 unsafe { transmute(addr) }
106 }
107
108 /// Creates a new `NonNull` that is dangling, but well-aligned.
109 ///
110 /// This is useful for initializing types which lazily allocate, like
111 /// `Vec::new` does.
112 ///
113 /// Note that the address of the returned pointer may potentially
114 /// be that of a valid pointer, which means this must not be used
115 /// as a "not yet initialized" sentinel value.
116 /// Types that lazily allocate must track initialization by some other means.
117 ///
118 /// # Examples
119 ///
120 /// ```
121 /// use std::ptr::NonNull;
122 ///
123 /// let ptr = NonNull::<u32>::dangling();
124 /// // Important: don't try to access the value of `ptr` without
125 /// // initializing it first! The pointer is not null but isn't valid either!
126 /// ```
127 #[stable(feature = "nonnull", since = "1.25.0")]
128 #[rustc_const_stable(feature = "const_nonnull_dangling", since = "1.36.0")]
129 #[must_use]
130 #[inline]
131 pub const fn dangling() -> Self {
132 let align = crate::ptr::Alignment::of::<T>();
133 NonNull::without_provenance(align.as_nonzero())
134 }
135
136 /// Converts an address back to a mutable pointer, picking up some previously 'exposed'
137 /// [provenance][crate::ptr#provenance].
138 ///
139 /// For more details, see the equivalent method on a raw pointer, [`ptr::with_exposed_provenance_mut`].
140 ///
141 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
142 #[stable(feature = "nonnull_provenance", since = "1.89.0")]
143 #[inline]
144 pub fn with_exposed_provenance(addr: NonZero<usize>) -> Self {
145 // SAFETY: we know `addr` is non-zero.
146 unsafe {
147 let ptr = crate::ptr::with_exposed_provenance_mut(addr.get());
148 NonNull::new_unchecked(ptr)
149 }
150 }
151
152 /// Returns a shared references to the value. In contrast to [`as_ref`], this does not require
153 /// that the value has to be initialized.
154 ///
155 /// For the mutable counterpart see [`as_uninit_mut`].
156 ///
157 /// [`as_ref`]: NonNull::as_ref
158 /// [`as_uninit_mut`]: NonNull::as_uninit_mut
159 ///
160 /// # Safety
161 ///
162 /// When calling this method, you have to ensure that
163 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
164 /// Note that because the created reference is to `MaybeUninit<T>`, the
165 /// source pointer can point to uninitialized memory.
166 #[inline]
167 #[must_use]
168 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
169 pub const unsafe fn as_uninit_ref<'a>(self) -> &'a MaybeUninit<T> {
170 // SAFETY: the caller must guarantee that `self` meets all the
171 // requirements for a reference.
172 unsafe { &*self.cast().as_ptr() }
173 }
174
175 /// Returns a unique references to the value. In contrast to [`as_mut`], this does not require
176 /// that the value has to be initialized.
177 ///
178 /// For the shared counterpart see [`as_uninit_ref`].
179 ///
180 /// [`as_mut`]: NonNull::as_mut
181 /// [`as_uninit_ref`]: NonNull::as_uninit_ref
182 ///
183 /// # Safety
184 ///
185 /// When calling this method, you have to ensure that
186 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
187 /// Note that because the created reference is to `MaybeUninit<T>`, the
188 /// source pointer can point to uninitialized memory.
189 #[inline]
190 #[must_use]
191 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
192 pub const unsafe fn as_uninit_mut<'a>(self) -> &'a mut MaybeUninit<T> {
193 // SAFETY: the caller must guarantee that `self` meets all the
194 // requirements for a reference.
195 unsafe { &mut *self.cast().as_ptr() }
196 }
197
198 /// Casts from a pointer-to-`T` to a pointer-to-`[T; N]`.
199 #[inline]
200 #[unstable(feature = "ptr_cast_array", issue = "144514")]
201 #[ferrocene::prevalidated]
202 pub const fn cast_array<const N: usize>(self) -> NonNull<[T; N]> {
203 self.cast()
204 }
205}
206
207impl<T: PointeeSized> NonNull<T> {
208 /// Creates a new `NonNull`.
209 ///
210 /// # Safety
211 ///
212 /// `ptr` must be non-null.
213 ///
214 /// # Examples
215 ///
216 /// ```
217 /// use std::ptr::NonNull;
218 ///
219 /// let mut x = 0u32;
220 /// let ptr = unsafe { NonNull::new_unchecked(&mut x as *mut _) };
221 /// ```
222 ///
223 /// *Incorrect* usage of this function:
224 ///
225 /// ```rust,no_run
226 /// use std::ptr::NonNull;
227 ///
228 /// // NEVER DO THAT!!! This is undefined behavior. ⚠️
229 /// let ptr = unsafe { NonNull::<u32>::new_unchecked(std::ptr::null_mut()) };
230 /// ```
231 #[stable(feature = "nonnull", since = "1.25.0")]
232 #[rustc_const_stable(feature = "const_nonnull_new_unchecked", since = "1.25.0")]
233 #[inline]
234 #[track_caller]
235 #[ferrocene::prevalidated]
236 pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
237 // SAFETY: the caller must guarantee that `ptr` is non-null.
238 unsafe {
239 assert_unsafe_precondition!(
240 check_language_ub,
241 "NonNull::new_unchecked requires that the pointer is non-null",
242 (ptr: *mut () = ptr as *mut ()) => !ptr.is_null()
243 );
244 transmute(ptr)
245 }
246 }
247
248 /// Creates a new `NonNull` if `ptr` is non-null.
249 ///
250 /// # Panics during const evaluation
251 ///
252 /// This method will panic during const evaluation if the pointer cannot be
253 /// determined to be null or not. See [`is_null`] for more information.
254 ///
255 /// [`is_null`]: ../primitive.pointer.html#method.is_null-1
256 ///
257 /// # Examples
258 ///
259 /// ```
260 /// use std::ptr::NonNull;
261 ///
262 /// let mut x = 0u32;
263 /// let ptr = NonNull::<u32>::new(&mut x as *mut _).expect("ptr is null!");
264 ///
265 /// if let Some(ptr) = NonNull::<u32>::new(std::ptr::null_mut()) {
266 /// unreachable!();
267 /// }
268 /// ```
269 #[stable(feature = "nonnull", since = "1.25.0")]
270 #[rustc_const_stable(feature = "const_nonnull_new", since = "1.85.0")]
271 #[inline]
272 pub const fn new(ptr: *mut T) -> Option<Self> {
273 if !ptr.is_null() {
274 // SAFETY: The pointer is already checked and is not null
275 Some(unsafe { Self::new_unchecked(ptr) })
276 } else {
277 None
278 }
279 }
280
281 /// Converts a reference to a `NonNull` pointer.
282 #[stable(feature = "non_null_from_ref", since = "1.89.0")]
283 #[rustc_const_stable(feature = "non_null_from_ref", since = "1.89.0")]
284 #[inline]
285 #[ferrocene::prevalidated]
286 pub const fn from_ref(r: &T) -> Self {
287 // SAFETY: A reference cannot be null.
288 unsafe { transmute(r as *const T) }
289 }
290
291 /// Converts a mutable reference to a `NonNull` pointer.
292 #[stable(feature = "non_null_from_ref", since = "1.89.0")]
293 #[rustc_const_stable(feature = "non_null_from_ref", since = "1.89.0")]
294 #[inline]
295 #[ferrocene::prevalidated]
296 pub const fn from_mut(r: &mut T) -> Self {
297 // SAFETY: A mutable reference cannot be null.
298 unsafe { transmute(r as *mut T) }
299 }
300
301 /// Performs the same functionality as [`std::ptr::from_raw_parts`], except that a
302 /// `NonNull` pointer is returned, as opposed to a raw `*const` pointer.
303 ///
304 /// See the documentation of [`std::ptr::from_raw_parts`] for more details.
305 ///
306 /// [`std::ptr::from_raw_parts`]: crate::ptr::from_raw_parts
307 #[unstable(feature = "ptr_metadata", issue = "81513")]
308 #[inline]
309 pub const fn from_raw_parts(
310 data_pointer: NonNull<impl super::Thin>,
311 metadata: <T as super::Pointee>::Metadata,
312 ) -> NonNull<T> {
313 // SAFETY: The result of `ptr::from::raw_parts_mut` is non-null because `data_pointer` is.
314 unsafe {
315 NonNull::new_unchecked(super::from_raw_parts_mut(data_pointer.as_ptr(), metadata))
316 }
317 }
318
319 /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
320 ///
321 /// The pointer can be later reconstructed with [`NonNull::from_raw_parts`].
322 #[unstable(feature = "ptr_metadata", issue = "81513")]
323 #[must_use = "this returns the result of the operation, \
324 without modifying the original"]
325 #[inline]
326 pub const fn to_raw_parts(self) -> (NonNull<()>, <T as super::Pointee>::Metadata) {
327 (self.cast(), super::metadata(self.as_ptr()))
328 }
329
330 /// Gets the "address" portion of the pointer.
331 ///
332 /// For more details, see the equivalent method on a raw pointer, [`pointer::addr`].
333 ///
334 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
335 #[must_use]
336 #[inline]
337 #[stable(feature = "strict_provenance", since = "1.84.0")]
338 pub fn addr(self) -> NonZero<usize> {
339 // SAFETY: The pointer is guaranteed by the type to be non-null,
340 // meaning that the address will be non-zero.
341 unsafe { NonZero::new_unchecked(self.as_ptr().addr()) }
342 }
343
344 /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
345 /// [`with_exposed_provenance`][NonNull::with_exposed_provenance] and returns the "address" portion.
346 ///
347 /// For more details, see the equivalent method on a raw pointer, [`pointer::expose_provenance`].
348 ///
349 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
350 #[stable(feature = "nonnull_provenance", since = "1.89.0")]
351 pub fn expose_provenance(self) -> NonZero<usize> {
352 // SAFETY: The pointer is guaranteed by the type to be non-null,
353 // meaning that the address will be non-zero.
354 unsafe { NonZero::new_unchecked(self.as_ptr().expose_provenance()) }
355 }
356
357 /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
358 /// `self`.
359 ///
360 /// For more details, see the equivalent method on a raw pointer, [`pointer::with_addr`].
361 ///
362 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
363 #[must_use]
364 #[inline]
365 #[stable(feature = "strict_provenance", since = "1.84.0")]
366 pub fn with_addr(self, addr: NonZero<usize>) -> Self {
367 // SAFETY: The result of `ptr::from::with_addr` is non-null because `addr` is guaranteed to be non-zero.
368 unsafe { NonNull::new_unchecked(self.as_ptr().with_addr(addr.get()) as *mut _) }
369 }
370
371 /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
372 /// [provenance][crate::ptr#provenance] of `self`.
373 ///
374 /// For more details, see the equivalent method on a raw pointer, [`pointer::map_addr`].
375 ///
376 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
377 #[must_use]
378 #[inline]
379 #[stable(feature = "strict_provenance", since = "1.84.0")]
380 pub fn map_addr(self, f: impl FnOnce(NonZero<usize>) -> NonZero<usize>) -> Self {
381 self.with_addr(f(self.addr()))
382 }
383
384 /// Acquires the underlying `*mut` pointer.
385 ///
386 /// # Examples
387 ///
388 /// ```
389 /// use std::ptr::NonNull;
390 ///
391 /// let mut x = 0u32;
392 /// let ptr = NonNull::new(&mut x).expect("ptr is null!");
393 ///
394 /// let x_value = unsafe { *ptr.as_ptr() };
395 /// assert_eq!(x_value, 0);
396 ///
397 /// unsafe { *ptr.as_ptr() += 2; }
398 /// let x_value = unsafe { *ptr.as_ptr() };
399 /// assert_eq!(x_value, 2);
400 /// ```
401 #[stable(feature = "nonnull", since = "1.25.0")]
402 #[rustc_const_stable(feature = "const_nonnull_as_ptr", since = "1.32.0")]
403 #[rustc_never_returns_null_ptr]
404 #[must_use]
405 #[inline(always)]
406 #[ferrocene::prevalidated]
407 pub const fn as_ptr(self) -> *mut T {
408 // This is a transmute for the same reasons as `NonZero::get`.
409
410 // SAFETY: `NonNull` is `transparent` over a `*const T`, and `*const T`
411 // and `*mut T` have the same layout, so transitively we can transmute
412 // our `NonNull` to a `*mut T` directly.
413 unsafe { mem::transmute::<Self, *mut T>(self) }
414 }
415
416 /// Returns a shared reference to the value. If the value may be uninitialized, [`as_uninit_ref`]
417 /// must be used instead.
418 ///
419 /// For the mutable counterpart see [`as_mut`].
420 ///
421 /// [`as_uninit_ref`]: NonNull::as_uninit_ref
422 /// [`as_mut`]: NonNull::as_mut
423 ///
424 /// # Safety
425 ///
426 /// When calling this method, you have to ensure that
427 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
428 ///
429 /// # Examples
430 ///
431 /// ```
432 /// use std::ptr::NonNull;
433 ///
434 /// let mut x = 0u32;
435 /// let ptr = NonNull::new(&mut x as *mut _).expect("ptr is null!");
436 ///
437 /// let ref_x = unsafe { ptr.as_ref() };
438 /// println!("{ref_x}");
439 /// ```
440 ///
441 /// [the module documentation]: crate::ptr#safety
442 #[stable(feature = "nonnull", since = "1.25.0")]
443 #[rustc_const_stable(feature = "const_nonnull_as_ref", since = "1.73.0")]
444 #[must_use]
445 #[inline(always)]
446 #[ferrocene::prevalidated]
447 pub const unsafe fn as_ref<'a>(&self) -> &'a T {
448 // SAFETY: the caller must guarantee that `self` meets all the
449 // requirements for a reference.
450 // `cast_const` avoids a mutable raw pointer deref.
451 unsafe { &*self.as_ptr().cast_const() }
452 }
453
454 /// Returns a unique reference to the value. If the value may be uninitialized, [`as_uninit_mut`]
455 /// must be used instead.
456 ///
457 /// For the shared counterpart see [`as_ref`].
458 ///
459 /// [`as_uninit_mut`]: NonNull::as_uninit_mut
460 /// [`as_ref`]: NonNull::as_ref
461 ///
462 /// # Safety
463 ///
464 /// When calling this method, you have to ensure that
465 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
466 /// # Examples
467 ///
468 /// ```
469 /// use std::ptr::NonNull;
470 ///
471 /// let mut x = 0u32;
472 /// let mut ptr = NonNull::new(&mut x).expect("null pointer");
473 ///
474 /// let x_ref = unsafe { ptr.as_mut() };
475 /// assert_eq!(*x_ref, 0);
476 /// *x_ref += 2;
477 /// assert_eq!(*x_ref, 2);
478 /// ```
479 ///
480 /// [the module documentation]: crate::ptr#safety
481 #[stable(feature = "nonnull", since = "1.25.0")]
482 #[rustc_const_stable(feature = "const_ptr_as_ref", since = "1.83.0")]
483 #[must_use]
484 #[inline(always)]
485 #[ferrocene::prevalidated]
486 pub const unsafe fn as_mut<'a>(&mut self) -> &'a mut T {
487 // SAFETY: the caller must guarantee that `self` meets all the
488 // requirements for a mutable reference.
489 unsafe { &mut *self.as_ptr() }
490 }
491
492 /// Casts to a pointer of another type.
493 ///
494 /// # Examples
495 ///
496 /// ```
497 /// use std::ptr::NonNull;
498 ///
499 /// let mut x = 0u32;
500 /// let ptr = NonNull::new(&mut x as *mut _).expect("null pointer");
501 ///
502 /// let casted_ptr = ptr.cast::<i8>();
503 /// let raw_ptr: *mut i8 = casted_ptr.as_ptr();
504 /// ```
505 #[stable(feature = "nonnull_cast", since = "1.27.0")]
506 #[rustc_const_stable(feature = "const_nonnull_cast", since = "1.36.0")]
507 #[must_use = "this returns the result of the operation, \
508 without modifying the original"]
509 #[inline]
510 #[ferrocene::prevalidated]
511 pub const fn cast<U>(self) -> NonNull<U> {
512 // SAFETY: `self` is a `NonNull` pointer which is necessarily non-null
513 unsafe { transmute(self.as_ptr() as *mut U) }
514 }
515
516 /// Try to cast to a pointer of another type by checking alignment.
517 ///
518 /// If the pointer is properly aligned to the target type, it will be
519 /// cast to the target type. Otherwise, `None` is returned.
520 ///
521 /// # Examples
522 ///
523 /// ```rust
524 /// #![feature(pointer_try_cast_aligned)]
525 /// use std::ptr::NonNull;
526 ///
527 /// let mut x = 0u64;
528 ///
529 /// let aligned = NonNull::from_mut(&mut x);
530 /// let unaligned = unsafe { aligned.byte_add(1) };
531 ///
532 /// assert!(aligned.try_cast_aligned::<u32>().is_some());
533 /// assert!(unaligned.try_cast_aligned::<u32>().is_none());
534 /// ```
535 #[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
536 #[must_use = "this returns the result of the operation, \
537 without modifying the original"]
538 #[inline]
539 pub fn try_cast_aligned<U>(self) -> Option<NonNull<U>> {
540 if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
541 }
542
543 /// Adds an offset to a pointer.
544 ///
545 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
546 /// offset of `3 * size_of::<T>()` bytes.
547 ///
548 /// # Safety
549 ///
550 /// If any of the following conditions are violated, the result is Undefined Behavior:
551 ///
552 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
553 ///
554 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
555 /// [allocation], and the entire memory range between `self` and the result must be in
556 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
557 /// of the address space.
558 ///
559 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
560 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
561 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
562 /// safe.
563 ///
564 /// [allocation]: crate::ptr#allocation
565 ///
566 /// # Examples
567 ///
568 /// ```
569 /// use std::ptr::NonNull;
570 ///
571 /// let mut s = [1, 2, 3];
572 /// let ptr: NonNull<u32> = NonNull::new(s.as_mut_ptr()).unwrap();
573 ///
574 /// unsafe {
575 /// println!("{}", ptr.offset(1).read());
576 /// println!("{}", ptr.offset(2).read());
577 /// }
578 /// ```
579 #[inline(always)]
580 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
581 #[must_use = "returns a new pointer rather than modifying its argument"]
582 #[stable(feature = "non_null_convenience", since = "1.80.0")]
583 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
584 #[ferrocene::prevalidated]
585 pub const unsafe fn offset(self, count: isize) -> Self
586 where
587 T: Sized,
588 {
589 // SAFETY: the caller must uphold the safety contract for `offset`.
590 // Additionally safety contract of `offset` guarantees that the resulting pointer is
591 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
592 // construct `NonNull`.
593 unsafe { transmute(intrinsics::offset(self.as_ptr(), count)) }
594 }
595
596 /// Calculates the offset from a pointer in bytes.
597 ///
598 /// `count` is in units of **bytes**.
599 ///
600 /// This is purely a convenience for casting to a `u8` pointer and
601 /// using [offset][pointer::offset] on it. See that method for documentation
602 /// and safety requirements.
603 ///
604 /// For non-`Sized` pointees this operation changes only the data pointer,
605 /// leaving the metadata untouched.
606 #[must_use]
607 #[inline(always)]
608 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
609 #[stable(feature = "non_null_convenience", since = "1.80.0")]
610 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
611 pub const unsafe fn byte_offset(self, count: isize) -> Self {
612 // SAFETY: the caller must uphold the safety contract for `offset` and `byte_offset` has
613 // the same safety contract.
614 // Additionally safety contract of `offset` guarantees that the resulting pointer is
615 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
616 // construct `NonNull`.
617 unsafe { transmute(self.as_ptr().byte_offset(count)) }
618 }
619
620 /// Adds an offset to a pointer (convenience for `.offset(count as isize)`).
621 ///
622 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
623 /// offset of `3 * size_of::<T>()` bytes.
624 ///
625 /// # Safety
626 ///
627 /// If any of the following conditions are violated, the result is Undefined Behavior:
628 ///
629 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
630 ///
631 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
632 /// [allocation], and the entire memory range between `self` and the result must be in
633 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
634 /// of the address space.
635 ///
636 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
637 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
638 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
639 /// safe.
640 ///
641 /// [allocation]: crate::ptr#allocation
642 ///
643 /// # Examples
644 ///
645 /// ```
646 /// use std::ptr::NonNull;
647 ///
648 /// let s: &str = "123";
649 /// let ptr: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap();
650 ///
651 /// unsafe {
652 /// println!("{}", ptr.add(1).read() as char);
653 /// println!("{}", ptr.add(2).read() as char);
654 /// }
655 /// ```
656 #[inline(always)]
657 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
658 #[must_use = "returns a new pointer rather than modifying its argument"]
659 #[stable(feature = "non_null_convenience", since = "1.80.0")]
660 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
661 #[ferrocene::prevalidated]
662 pub const unsafe fn add(self, count: usize) -> Self
663 where
664 T: Sized,
665 {
666 // SAFETY: the caller must uphold the safety contract for `offset`.
667 // Additionally safety contract of `offset` guarantees that the resulting pointer is
668 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
669 // construct `NonNull`.
670 unsafe { transmute(intrinsics::offset(self.as_ptr(), count)) }
671 }
672
673 /// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
674 ///
675 /// `count` is in units of bytes.
676 ///
677 /// This is purely a convenience for casting to a `u8` pointer and
678 /// using [`add`][NonNull::add] on it. See that method for documentation
679 /// and safety requirements.
680 ///
681 /// For non-`Sized` pointees this operation changes only the data pointer,
682 /// leaving the metadata untouched.
683 #[must_use]
684 #[inline(always)]
685 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
686 #[stable(feature = "non_null_convenience", since = "1.80.0")]
687 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
688 pub const unsafe fn byte_add(self, count: usize) -> Self {
689 // SAFETY: the caller must uphold the safety contract for `add` and `byte_add` has the same
690 // safety contract.
691 // Additionally safety contract of `add` guarantees that the resulting pointer is pointing
692 // to an allocation, there can't be an allocation at null, thus it's safe to construct
693 // `NonNull`.
694 unsafe { transmute(self.as_ptr().byte_add(count)) }
695 }
696
697 /// Subtracts an offset from a pointer (convenience for
698 /// `.offset((count as isize).wrapping_neg())`).
699 ///
700 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
701 /// offset of `3 * size_of::<T>()` bytes.
702 ///
703 /// # Safety
704 ///
705 /// If any of the following conditions are violated, the result is Undefined Behavior:
706 ///
707 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
708 ///
709 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
710 /// [allocation], and the entire memory range between `self` and the result must be in
711 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
712 /// of the address space.
713 ///
714 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
715 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
716 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
717 /// safe.
718 ///
719 /// [allocation]: crate::ptr#allocation
720 ///
721 /// # Examples
722 ///
723 /// ```
724 /// use std::ptr::NonNull;
725 ///
726 /// let s: &str = "123";
727 ///
728 /// unsafe {
729 /// let end: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap().add(3);
730 /// println!("{}", end.sub(1).read() as char);
731 /// println!("{}", end.sub(2).read() as char);
732 /// }
733 /// ```
734 #[inline(always)]
735 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
736 #[must_use = "returns a new pointer rather than modifying its argument"]
737 #[stable(feature = "non_null_convenience", since = "1.80.0")]
738 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
739 #[ferrocene::prevalidated]
740 pub const unsafe fn sub(self, count: usize) -> Self
741 where
742 T: Sized,
743 {
744 if T::IS_ZST {
745 // Pointer arithmetic does nothing when the pointee is a ZST.
746 self
747 } else {
748 // SAFETY: the caller must uphold the safety contract for `offset`.
749 // Because the pointee is *not* a ZST, that means that `count` is
750 // at most `isize::MAX`, and thus the negation cannot overflow.
751 unsafe { self.offset((count as isize).unchecked_neg()) }
752 }
753 }
754
755 /// Calculates the offset from a pointer in bytes (convenience for
756 /// `.byte_offset((count as isize).wrapping_neg())`).
757 ///
758 /// `count` is in units of bytes.
759 ///
760 /// This is purely a convenience for casting to a `u8` pointer and
761 /// using [`sub`][NonNull::sub] on it. See that method for documentation
762 /// and safety requirements.
763 ///
764 /// For non-`Sized` pointees this operation changes only the data pointer,
765 /// leaving the metadata untouched.
766 #[must_use]
767 #[inline(always)]
768 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
769 #[stable(feature = "non_null_convenience", since = "1.80.0")]
770 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
771 pub const unsafe fn byte_sub(self, count: usize) -> Self {
772 // SAFETY: the caller must uphold the safety contract for `sub` and `byte_sub` has the same
773 // safety contract.
774 // Additionally safety contract of `sub` guarantees that the resulting pointer is pointing
775 // to an allocation, there can't be an allocation at null, thus it's safe to construct
776 // `NonNull`.
777 unsafe { transmute(self.as_ptr().byte_sub(count)) }
778 }
779
780 /// Calculates the distance between two pointers within the same allocation. The returned value is in
781 /// units of T: the distance in bytes divided by `size_of::<T>()`.
782 ///
783 /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
784 /// except that it has a lot more opportunities for UB, in exchange for the compiler
785 /// better understanding what you are doing.
786 ///
787 /// The primary motivation of this method is for computing the `len` of an array/slice
788 /// of `T` that you are currently representing as a "start" and "end" pointer
789 /// (and "end" is "one past the end" of the array).
790 /// In that case, `end.offset_from(start)` gets you the length of the array.
791 ///
792 /// All of the following safety requirements are trivially satisfied for this usecase.
793 ///
794 /// [`offset`]: #method.offset
795 ///
796 /// # Safety
797 ///
798 /// If any of the following conditions are violated, the result is Undefined Behavior:
799 ///
800 /// * `self` and `origin` must either
801 ///
802 /// * point to the same address, or
803 /// * both be *derived from* a pointer to the same [allocation], and the memory range between
804 /// the two pointers must be in bounds of that object. (See below for an example.)
805 ///
806 /// * The distance between the pointers, in bytes, must be an exact multiple
807 /// of the size of `T`.
808 ///
809 /// As a consequence, the absolute distance between the pointers, in bytes, computed on
810 /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
811 /// implied by the in-bounds requirement, and the fact that no allocation can be larger
812 /// than `isize::MAX` bytes.
813 ///
814 /// The requirement for pointers to be derived from the same allocation is primarily
815 /// needed for `const`-compatibility: the distance between pointers into *different* allocated
816 /// objects is not known at compile-time. However, the requirement also exists at
817 /// runtime and may be exploited by optimizations. If you wish to compute the difference between
818 /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
819 /// origin as isize) / size_of::<T>()`.
820 // FIXME: recommend `addr()` instead of `as usize` once that is stable.
821 ///
822 /// [`add`]: #method.add
823 /// [allocation]: crate::ptr#allocation
824 ///
825 /// # Panics
826 ///
827 /// This function panics if `T` is a Zero-Sized Type ("ZST").
828 ///
829 /// # Examples
830 ///
831 /// Basic usage:
832 ///
833 /// ```
834 /// use std::ptr::NonNull;
835 ///
836 /// let a = [0; 5];
837 /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
838 /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
839 /// unsafe {
840 /// assert_eq!(ptr2.offset_from(ptr1), 2);
841 /// assert_eq!(ptr1.offset_from(ptr2), -2);
842 /// assert_eq!(ptr1.offset(2), ptr2);
843 /// assert_eq!(ptr2.offset(-2), ptr1);
844 /// }
845 /// ```
846 ///
847 /// *Incorrect* usage:
848 ///
849 /// ```rust,no_run
850 /// use std::ptr::NonNull;
851 ///
852 /// let ptr1 = NonNull::new(Box::into_raw(Box::new(0u8))).unwrap();
853 /// let ptr2 = NonNull::new(Box::into_raw(Box::new(1u8))).unwrap();
854 /// let diff = (ptr2.addr().get() as isize).wrapping_sub(ptr1.addr().get() as isize);
855 /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
856 /// let diff_plus_1 = diff.wrapping_add(1);
857 /// let ptr2_other = NonNull::new(ptr1.as_ptr().wrapping_byte_offset(diff_plus_1)).unwrap();
858 /// assert_eq!(ptr2.addr(), ptr2_other.addr());
859 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
860 /// // computing their offset is undefined behavior, even though
861 /// // they point to addresses that are in-bounds of the same object!
862 ///
863 /// let one = unsafe { ptr2_other.offset_from(ptr2) }; // Undefined Behavior! ⚠️
864 /// ```
865 #[inline]
866 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
867 #[stable(feature = "non_null_convenience", since = "1.80.0")]
868 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
869 pub const unsafe fn offset_from(self, origin: NonNull<T>) -> isize
870 where
871 T: Sized,
872 {
873 // SAFETY: the caller must uphold the safety contract for `offset_from`.
874 unsafe { self.as_ptr().offset_from(origin.as_ptr()) }
875 }
876
877 /// Calculates the distance between two pointers within the same allocation. The returned value is in
878 /// units of **bytes**.
879 ///
880 /// This is purely a convenience for casting to a `u8` pointer and
881 /// using [`offset_from`][NonNull::offset_from] on it. See that method for
882 /// documentation and safety requirements.
883 ///
884 /// For non-`Sized` pointees this operation considers only the data pointers,
885 /// ignoring the metadata.
886 #[inline(always)]
887 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
888 #[stable(feature = "non_null_convenience", since = "1.80.0")]
889 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
890 pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: NonNull<U>) -> isize {
891 // SAFETY: the caller must uphold the safety contract for `byte_offset_from`.
892 unsafe { self.as_ptr().byte_offset_from(origin.as_ptr()) }
893 }
894
895 // N.B. `wrapping_offset``, `wrapping_add`, etc are not implemented because they can wrap to null
896
897 /// Calculates the distance between two pointers within the same allocation, *where it's known that
898 /// `self` is equal to or greater than `origin`*. The returned value is in
899 /// units of T: the distance in bytes is divided by `size_of::<T>()`.
900 ///
901 /// This computes the same value that [`offset_from`](#method.offset_from)
902 /// would compute, but with the added precondition that the offset is
903 /// guaranteed to be non-negative. This method is equivalent to
904 /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
905 /// but it provides slightly more information to the optimizer, which can
906 /// sometimes allow it to optimize slightly better with some backends.
907 ///
908 /// This method can be though of as recovering the `count` that was passed
909 /// to [`add`](#method.add) (or, with the parameters in the other order,
910 /// to [`sub`](#method.sub)). The following are all equivalent, assuming
911 /// that their safety preconditions are met:
912 /// ```rust
913 /// # unsafe fn blah(ptr: std::ptr::NonNull<u32>, origin: std::ptr::NonNull<u32>, count: usize) -> bool { unsafe {
914 /// ptr.offset_from_unsigned(origin) == count
915 /// # &&
916 /// origin.add(count) == ptr
917 /// # &&
918 /// ptr.sub(count) == origin
919 /// # } }
920 /// ```
921 ///
922 /// # Safety
923 ///
924 /// - The distance between the pointers must be non-negative (`self >= origin`)
925 ///
926 /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
927 /// apply to this method as well; see it for the full details.
928 ///
929 /// Importantly, despite the return type of this method being able to represent
930 /// a larger offset, it's still *not permitted* to pass pointers which differ
931 /// by more than `isize::MAX` *bytes*. As such, the result of this method will
932 /// always be less than or equal to `isize::MAX as usize`.
933 ///
934 /// # Panics
935 ///
936 /// This function panics if `T` is a Zero-Sized Type ("ZST").
937 ///
938 /// # Examples
939 ///
940 /// ```
941 /// use std::ptr::NonNull;
942 ///
943 /// let a = [0; 5];
944 /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
945 /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
946 /// unsafe {
947 /// assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
948 /// assert_eq!(ptr1.add(2), ptr2);
949 /// assert_eq!(ptr2.sub(2), ptr1);
950 /// assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
951 /// }
952 ///
953 /// // This would be incorrect, as the pointers are not correctly ordered:
954 /// // ptr1.offset_from_unsigned(ptr2)
955 /// ```
956 #[inline]
957 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
958 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
959 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
960 #[ferrocene::prevalidated]
961 pub const unsafe fn offset_from_unsigned(self, subtracted: NonNull<T>) -> usize
962 where
963 T: Sized,
964 {
965 // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
966 unsafe { self.as_ptr().offset_from_unsigned(subtracted.as_ptr()) }
967 }
968
969 /// Calculates the distance between two pointers within the same allocation, *where it's known that
970 /// `self` is equal to or greater than `origin`*. The returned value is in
971 /// units of **bytes**.
972 ///
973 /// This is purely a convenience for casting to a `u8` pointer and
974 /// using [`offset_from_unsigned`][NonNull::offset_from_unsigned] on it.
975 /// See that method for documentation and safety requirements.
976 ///
977 /// For non-`Sized` pointees this operation considers only the data pointers,
978 /// ignoring the metadata.
979 #[inline(always)]
980 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
981 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
982 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
983 pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: NonNull<U>) -> usize {
984 // SAFETY: the caller must uphold the safety contract for `byte_offset_from_unsigned`.
985 unsafe { self.as_ptr().byte_offset_from_unsigned(origin.as_ptr()) }
986 }
987
988 /// Reads the value from `self` without moving it. This leaves the
989 /// memory in `self` unchanged.
990 ///
991 /// See [`ptr::read`] for safety concerns and examples.
992 ///
993 /// [`ptr::read`]: crate::ptr::read()
994 #[inline]
995 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
996 #[stable(feature = "non_null_convenience", since = "1.80.0")]
997 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
998 #[ferrocene::prevalidated]
999 pub const unsafe fn read(self) -> T
1000 where
1001 T: Sized,
1002 {
1003 // SAFETY: the caller must uphold the safety contract for `read`.
1004 unsafe { ptr::read(self.as_ptr()) }
1005 }
1006
1007 /// Performs a volatile read of the value from `self` without moving it. This
1008 /// leaves the memory in `self` unchanged.
1009 ///
1010 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1011 /// to not be elided or reordered by the compiler across other volatile
1012 /// operations.
1013 ///
1014 /// See [`ptr::read_volatile`] for safety concerns and examples.
1015 ///
1016 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1017 #[inline]
1018 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1019 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1020 pub unsafe fn read_volatile(self) -> T
1021 where
1022 T: Sized,
1023 {
1024 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1025 unsafe { ptr::read_volatile(self.as_ptr()) }
1026 }
1027
1028 /// Reads the value from `self` without moving it. This leaves the
1029 /// memory in `self` unchanged.
1030 ///
1031 /// Unlike `read`, the pointer may be unaligned.
1032 ///
1033 /// See [`ptr::read_unaligned`] for safety concerns and examples.
1034 ///
1035 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1036 #[inline]
1037 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1038 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1039 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
1040 pub const unsafe fn read_unaligned(self) -> T
1041 where
1042 T: Sized,
1043 {
1044 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1045 unsafe { ptr::read_unaligned(self.as_ptr()) }
1046 }
1047
1048 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1049 /// and destination may overlap.
1050 ///
1051 /// NOTE: this has the *same* argument order as [`ptr::copy`].
1052 ///
1053 /// See [`ptr::copy`] for safety concerns and examples.
1054 ///
1055 /// [`ptr::copy`]: crate::ptr::copy()
1056 #[inline(always)]
1057 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1058 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1059 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1060 pub const unsafe fn copy_to(self, dest: NonNull<T>, count: usize)
1061 where
1062 T: Sized,
1063 {
1064 // SAFETY: the caller must uphold the safety contract for `copy`.
1065 unsafe { ptr::copy(self.as_ptr(), dest.as_ptr(), count) }
1066 }
1067
1068 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1069 /// and destination may *not* overlap.
1070 ///
1071 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1072 ///
1073 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1074 ///
1075 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1076 #[inline(always)]
1077 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1078 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1079 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1080 pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull<T>, count: usize)
1081 where
1082 T: Sized,
1083 {
1084 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1085 unsafe { ptr::copy_nonoverlapping(self.as_ptr(), dest.as_ptr(), count) }
1086 }
1087
1088 /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1089 /// and destination may overlap.
1090 ///
1091 /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1092 ///
1093 /// See [`ptr::copy`] for safety concerns and examples.
1094 ///
1095 /// [`ptr::copy`]: crate::ptr::copy()
1096 #[inline(always)]
1097 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1098 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1099 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1100 pub const unsafe fn copy_from(self, src: NonNull<T>, count: usize)
1101 where
1102 T: Sized,
1103 {
1104 // SAFETY: the caller must uphold the safety contract for `copy`.
1105 unsafe { ptr::copy(src.as_ptr(), self.as_ptr(), count) }
1106 }
1107
1108 /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1109 /// and destination may *not* overlap.
1110 ///
1111 /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1112 ///
1113 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1114 ///
1115 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1116 #[inline(always)]
1117 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1118 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1119 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1120 pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull<T>, count: usize)
1121 where
1122 T: Sized,
1123 {
1124 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1125 unsafe { ptr::copy_nonoverlapping(src.as_ptr(), self.as_ptr(), count) }
1126 }
1127
1128 /// Executes the destructor (if any) of the pointed-to value.
1129 ///
1130 /// See [`ptr::drop_in_place`] for safety concerns and examples.
1131 ///
1132 /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1133 #[inline(always)]
1134 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1135 #[rustc_const_unstable(feature = "const_drop_in_place", issue = "109342")]
1136 pub const unsafe fn drop_in_place(self)
1137 where
1138 T: [const] Destruct,
1139 {
1140 // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1141 unsafe { ptr::drop_in_place(self.as_ptr()) }
1142 }
1143
1144 /// Overwrites a memory location with the given value without reading or
1145 /// dropping the old value.
1146 ///
1147 /// See [`ptr::write`] for safety concerns and examples.
1148 ///
1149 /// [`ptr::write`]: crate::ptr::write()
1150 #[inline(always)]
1151 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1152 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1153 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1154 pub const unsafe fn write(self, val: T)
1155 where
1156 T: Sized,
1157 {
1158 // SAFETY: the caller must uphold the safety contract for `write`.
1159 unsafe { ptr::write(self.as_ptr(), val) }
1160 }
1161
1162 /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1163 /// bytes of memory starting at `self` to `val`.
1164 ///
1165 /// See [`ptr::write_bytes`] for safety concerns and examples.
1166 ///
1167 /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1168 #[inline(always)]
1169 #[doc(alias = "memset")]
1170 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1171 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1172 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1173 pub const unsafe fn write_bytes(self, val: u8, count: usize)
1174 where
1175 T: Sized,
1176 {
1177 // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1178 unsafe { ptr::write_bytes(self.as_ptr(), val, count) }
1179 }
1180
1181 /// Performs a volatile write of a memory location with the given value without
1182 /// reading or dropping the old value.
1183 ///
1184 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1185 /// to not be elided or reordered by the compiler across other volatile
1186 /// operations.
1187 ///
1188 /// See [`ptr::write_volatile`] for safety concerns and examples.
1189 ///
1190 /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1191 #[inline(always)]
1192 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1193 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1194 pub unsafe fn write_volatile(self, val: T)
1195 where
1196 T: Sized,
1197 {
1198 // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1199 unsafe { ptr::write_volatile(self.as_ptr(), val) }
1200 }
1201
1202 /// Overwrites a memory location with the given value without reading or
1203 /// dropping the old value.
1204 ///
1205 /// Unlike `write`, the pointer may be unaligned.
1206 ///
1207 /// See [`ptr::write_unaligned`] for safety concerns and examples.
1208 ///
1209 /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1210 #[inline(always)]
1211 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1212 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1213 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1214 pub const unsafe fn write_unaligned(self, val: T)
1215 where
1216 T: Sized,
1217 {
1218 // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1219 unsafe { ptr::write_unaligned(self.as_ptr(), val) }
1220 }
1221
1222 /// Replaces the value at `self` with `src`, returning the old
1223 /// value, without dropping either.
1224 ///
1225 /// See [`ptr::replace`] for safety concerns and examples.
1226 ///
1227 /// [`ptr::replace`]: crate::ptr::replace()
1228 #[inline(always)]
1229 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1230 #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "1.88.0")]
1231 pub const unsafe fn replace(self, src: T) -> T
1232 where
1233 T: Sized,
1234 {
1235 // SAFETY: the caller must uphold the safety contract for `replace`.
1236 unsafe { ptr::replace(self.as_ptr(), src) }
1237 }
1238
1239 /// Swaps the values at two mutable locations of the same type, without
1240 /// deinitializing either. They may overlap, unlike `mem::swap` which is
1241 /// otherwise equivalent.
1242 ///
1243 /// See [`ptr::swap`] for safety concerns and examples.
1244 ///
1245 /// [`ptr::swap`]: crate::ptr::swap()
1246 #[inline(always)]
1247 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1248 #[rustc_const_stable(feature = "const_swap", since = "1.85.0")]
1249 pub const unsafe fn swap(self, with: NonNull<T>)
1250 where
1251 T: Sized,
1252 {
1253 // SAFETY: the caller must uphold the safety contract for `swap`.
1254 unsafe { ptr::swap(self.as_ptr(), with.as_ptr()) }
1255 }
1256
1257 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1258 /// `align`.
1259 ///
1260 /// If it is not possible to align the pointer, the implementation returns
1261 /// `usize::MAX`.
1262 ///
1263 /// The offset is expressed in number of `T` elements, and not bytes.
1264 ///
1265 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1266 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1267 /// the returned offset is correct in all terms other than alignment.
1268 ///
1269 /// When this is called during compile-time evaluation (which is unstable), the implementation
1270 /// may return `usize::MAX` in cases where that can never happen at runtime. This is because the
1271 /// actual alignment of pointers is not known yet during compile-time, so an offset with
1272 /// guaranteed alignment can sometimes not be computed. For example, a buffer declared as `[u8;
1273 /// N]` might be allocated at an odd or an even address, but at compile-time this is not yet
1274 /// known, so the execution has to be correct for either choice. It is therefore impossible to
1275 /// find an offset that is guaranteed to be 2-aligned. (This behavior is subject to change, as usual
1276 /// for unstable APIs.)
1277 ///
1278 /// # Panics
1279 ///
1280 /// The function panics if `align` is not a power-of-two.
1281 ///
1282 /// # Examples
1283 ///
1284 /// Accessing adjacent `u8` as `u16`
1285 ///
1286 /// ```
1287 /// use std::ptr::NonNull;
1288 ///
1289 /// # unsafe {
1290 /// let x = [5_u8, 6, 7, 8, 9];
1291 /// let ptr = NonNull::new(x.as_ptr() as *mut u8).unwrap();
1292 /// let offset = ptr.align_offset(align_of::<u16>());
1293 ///
1294 /// if offset < x.len() - 1 {
1295 /// let u16_ptr = ptr.add(offset).cast::<u16>();
1296 /// assert!(u16_ptr.read() == u16::from_ne_bytes([5, 6]) || u16_ptr.read() == u16::from_ne_bytes([6, 7]));
1297 /// } else {
1298 /// // while the pointer can be aligned via `offset`, it would point
1299 /// // outside the allocation
1300 /// }
1301 /// # }
1302 /// ```
1303 #[inline]
1304 #[must_use]
1305 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1306 pub fn align_offset(self, align: usize) -> usize
1307 where
1308 T: Sized,
1309 {
1310 if !align.is_power_of_two() {
1311 panic!("align_offset: align is not a power-of-two");
1312 }
1313
1314 {
1315 // SAFETY: `align` has been checked to be a power of 2 above.
1316 unsafe { ptr::align_offset(self.as_ptr(), align) }
1317 }
1318 }
1319
1320 /// Returns whether the pointer is properly aligned for `T`.
1321 ///
1322 /// # Examples
1323 ///
1324 /// ```
1325 /// use std::ptr::NonNull;
1326 ///
1327 /// // On some platforms, the alignment of i32 is less than 4.
1328 /// #[repr(align(4))]
1329 /// struct AlignedI32(i32);
1330 ///
1331 /// let data = AlignedI32(42);
1332 /// let ptr = NonNull::<AlignedI32>::from(&data);
1333 ///
1334 /// assert!(ptr.is_aligned());
1335 /// assert!(!NonNull::new(ptr.as_ptr().wrapping_byte_add(1)).unwrap().is_aligned());
1336 /// ```
1337 #[inline]
1338 #[must_use]
1339 #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1340 pub fn is_aligned(self) -> bool
1341 where
1342 T: Sized,
1343 {
1344 self.as_ptr().is_aligned()
1345 }
1346
1347 /// Returns whether the pointer is aligned to `align`.
1348 ///
1349 /// For non-`Sized` pointees this operation considers only the data pointer,
1350 /// ignoring the metadata.
1351 ///
1352 /// # Panics
1353 ///
1354 /// The function panics if `align` is not a power-of-two (this includes 0).
1355 ///
1356 /// # Examples
1357 ///
1358 /// ```
1359 /// #![feature(pointer_is_aligned_to)]
1360 ///
1361 /// // On some platforms, the alignment of i32 is less than 4.
1362 /// #[repr(align(4))]
1363 /// struct AlignedI32(i32);
1364 ///
1365 /// let data = AlignedI32(42);
1366 /// let ptr = &data as *const AlignedI32;
1367 ///
1368 /// assert!(ptr.is_aligned_to(1));
1369 /// assert!(ptr.is_aligned_to(2));
1370 /// assert!(ptr.is_aligned_to(4));
1371 ///
1372 /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1373 /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1374 ///
1375 /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1376 /// ```
1377 #[inline]
1378 #[must_use]
1379 #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1380 pub fn is_aligned_to(self, align: usize) -> bool {
1381 self.as_ptr().is_aligned_to(align)
1382 }
1383}
1384
1385impl<T> NonNull<T> {
1386 /// Casts from a type to its maybe-uninitialized version.
1387 #[must_use]
1388 #[inline(always)]
1389 #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1390 pub const fn cast_uninit(self) -> NonNull<MaybeUninit<T>> {
1391 self.cast()
1392 }
1393
1394 /// Creates a non-null raw slice from a thin pointer and a length.
1395 ///
1396 /// The `len` argument is the number of **elements**, not the number of bytes.
1397 ///
1398 /// This function is safe, but dereferencing the return value is unsafe.
1399 /// See the documentation of [`slice::from_raw_parts`] for slice safety requirements.
1400 ///
1401 /// # Examples
1402 ///
1403 /// ```rust
1404 /// #![feature(ptr_cast_slice)]
1405 /// use std::ptr::NonNull;
1406 ///
1407 /// // create a slice pointer when starting out with a pointer to the first element
1408 /// let mut x = [5, 6, 7];
1409 /// let nonnull_pointer = NonNull::new(x.as_mut_ptr()).unwrap();
1410 /// let slice = nonnull_pointer.cast_slice(3);
1411 /// assert_eq!(unsafe { slice.as_ref()[2] }, 7);
1412 /// ```
1413 ///
1414 /// (Note that this example artificially demonstrates a use of this method,
1415 /// but `let slice = NonNull::from(&x[..]);` would be a better way to write code like this.)
1416 #[inline]
1417 #[must_use]
1418 #[unstable(feature = "ptr_cast_slice", issue = "149103")]
1419 pub const fn cast_slice(self, len: usize) -> NonNull<[T]> {
1420 NonNull::slice_from_raw_parts(self, len)
1421 }
1422}
1423impl<T> NonNull<MaybeUninit<T>> {
1424 /// Casts from a maybe-uninitialized type to its initialized version.
1425 ///
1426 /// This is always safe, since UB can only occur if the pointer is read
1427 /// before being initialized.
1428 #[must_use]
1429 #[inline(always)]
1430 #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1431 pub const fn cast_init(self) -> NonNull<T> {
1432 self.cast()
1433 }
1434}
1435
1436impl<T> NonNull<[T]> {
1437 /// Creates a non-null raw slice from a thin pointer and a length.
1438 ///
1439 /// The `len` argument is the number of **elements**, not the number of bytes.
1440 ///
1441 /// This function is safe, but dereferencing the return value is unsafe.
1442 /// See the documentation of [`slice::from_raw_parts`] for slice safety requirements.
1443 ///
1444 /// # Examples
1445 ///
1446 /// ```rust
1447 /// use std::ptr::NonNull;
1448 ///
1449 /// // create a slice pointer when starting out with a pointer to the first element
1450 /// let mut x = [5, 6, 7];
1451 /// let nonnull_pointer = NonNull::new(x.as_mut_ptr()).unwrap();
1452 /// let slice = NonNull::slice_from_raw_parts(nonnull_pointer, 3);
1453 /// assert_eq!(unsafe { slice.as_ref()[2] }, 7);
1454 /// ```
1455 ///
1456 /// (Note that this example artificially demonstrates a use of this method,
1457 /// but `let slice = NonNull::from(&x[..]);` would be a better way to write code like this.)
1458 #[stable(feature = "nonnull_slice_from_raw_parts", since = "1.70.0")]
1459 #[rustc_const_stable(feature = "const_slice_from_raw_parts_mut", since = "1.83.0")]
1460 #[must_use]
1461 #[inline]
1462 pub const fn slice_from_raw_parts(data: NonNull<T>, len: usize) -> Self {
1463 // SAFETY: `data` is a `NonNull` pointer which is necessarily non-null
1464 unsafe { Self::new_unchecked(super::slice_from_raw_parts_mut(data.as_ptr(), len)) }
1465 }
1466
1467 /// Returns the length of a non-null raw slice.
1468 ///
1469 /// The returned value is the number of **elements**, not the number of bytes.
1470 ///
1471 /// This function is safe, even when the non-null raw slice cannot be dereferenced to a slice
1472 /// because the pointer does not have a valid address.
1473 ///
1474 /// # Examples
1475 ///
1476 /// ```rust
1477 /// use std::ptr::NonNull;
1478 ///
1479 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1480 /// assert_eq!(slice.len(), 3);
1481 /// ```
1482 #[stable(feature = "slice_ptr_len_nonnull", since = "1.63.0")]
1483 #[rustc_const_stable(feature = "const_slice_ptr_len_nonnull", since = "1.63.0")]
1484 #[must_use]
1485 #[inline]
1486 pub const fn len(self) -> usize {
1487 self.as_ptr().len()
1488 }
1489
1490 /// Returns `true` if the non-null raw slice has a length of 0.
1491 ///
1492 /// # Examples
1493 ///
1494 /// ```rust
1495 /// use std::ptr::NonNull;
1496 ///
1497 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1498 /// assert!(!slice.is_empty());
1499 /// ```
1500 #[stable(feature = "slice_ptr_is_empty_nonnull", since = "1.79.0")]
1501 #[rustc_const_stable(feature = "const_slice_ptr_is_empty_nonnull", since = "1.79.0")]
1502 #[must_use]
1503 #[inline]
1504 pub const fn is_empty(self) -> bool {
1505 self.len() == 0
1506 }
1507
1508 /// Returns a non-null pointer to the slice's buffer.
1509 ///
1510 /// # Examples
1511 ///
1512 /// ```rust
1513 /// #![feature(slice_ptr_get)]
1514 /// use std::ptr::NonNull;
1515 ///
1516 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1517 /// assert_eq!(slice.as_non_null_ptr(), NonNull::<i8>::dangling());
1518 /// ```
1519 #[inline]
1520 #[must_use]
1521 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1522 pub const fn as_non_null_ptr(self) -> NonNull<T> {
1523 self.cast()
1524 }
1525
1526 /// Returns a raw pointer to the slice's buffer.
1527 ///
1528 /// # Examples
1529 ///
1530 /// ```rust
1531 /// #![feature(slice_ptr_get)]
1532 /// use std::ptr::NonNull;
1533 ///
1534 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1535 /// assert_eq!(slice.as_mut_ptr(), NonNull::<i8>::dangling().as_ptr());
1536 /// ```
1537 #[inline]
1538 #[must_use]
1539 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1540 #[rustc_never_returns_null_ptr]
1541 pub const fn as_mut_ptr(self) -> *mut T {
1542 self.as_non_null_ptr().as_ptr()
1543 }
1544
1545 /// Returns a shared reference to a slice of possibly uninitialized values. In contrast to
1546 /// [`as_ref`], this does not require that the value has to be initialized.
1547 ///
1548 /// For the mutable counterpart see [`as_uninit_slice_mut`].
1549 ///
1550 /// [`as_ref`]: NonNull::as_ref
1551 /// [`as_uninit_slice_mut`]: NonNull::as_uninit_slice_mut
1552 ///
1553 /// # Safety
1554 ///
1555 /// When calling this method, you have to ensure that all of the following is true:
1556 ///
1557 /// * The pointer must be [valid] for reads for `ptr.len() * size_of::<T>()` many bytes,
1558 /// and it must be properly aligned. This means in particular:
1559 ///
1560 /// * The entire memory range of this slice must be contained within a single allocation!
1561 /// Slices can never span across multiple allocations.
1562 ///
1563 /// * The pointer must be aligned even for zero-length slices. One
1564 /// reason for this is that enum layout optimizations may rely on references
1565 /// (including slices of any length) being aligned and non-null to distinguish
1566 /// them from other data. You can obtain a pointer that is usable as `data`
1567 /// for zero-length slices using [`NonNull::dangling()`].
1568 ///
1569 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1570 /// See the safety documentation of [`pointer::offset`].
1571 ///
1572 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1573 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1574 /// In particular, while this reference exists, the memory the pointer points to must
1575 /// not get mutated (except inside `UnsafeCell`).
1576 ///
1577 /// This applies even if the result of this method is unused!
1578 ///
1579 /// See also [`slice::from_raw_parts`].
1580 ///
1581 /// [valid]: crate::ptr#safety
1582 #[inline]
1583 #[must_use]
1584 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1585 pub const unsafe fn as_uninit_slice<'a>(self) -> &'a [MaybeUninit<T>] {
1586 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1587 unsafe { slice::from_raw_parts(self.cast().as_ptr(), self.len()) }
1588 }
1589
1590 /// Returns a unique reference to a slice of possibly uninitialized values. In contrast to
1591 /// [`as_mut`], this does not require that the value has to be initialized.
1592 ///
1593 /// For the shared counterpart see [`as_uninit_slice`].
1594 ///
1595 /// [`as_mut`]: NonNull::as_mut
1596 /// [`as_uninit_slice`]: NonNull::as_uninit_slice
1597 ///
1598 /// # Safety
1599 ///
1600 /// When calling this method, you have to ensure that all of the following is true:
1601 ///
1602 /// * The pointer must be [valid] for reads and writes for `ptr.len() * size_of::<T>()`
1603 /// many bytes, and it must be properly aligned. This means in particular:
1604 ///
1605 /// * The entire memory range of this slice must be contained within a single allocation!
1606 /// Slices can never span across multiple allocations.
1607 ///
1608 /// * The pointer must be aligned even for zero-length slices. One
1609 /// reason for this is that enum layout optimizations may rely on references
1610 /// (including slices of any length) being aligned and non-null to distinguish
1611 /// them from other data. You can obtain a pointer that is usable as `data`
1612 /// for zero-length slices using [`NonNull::dangling()`].
1613 ///
1614 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1615 /// See the safety documentation of [`pointer::offset`].
1616 ///
1617 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1618 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1619 /// In particular, while this reference exists, the memory the pointer points to must
1620 /// not get accessed (read or written) through any other pointer.
1621 ///
1622 /// This applies even if the result of this method is unused!
1623 ///
1624 /// See also [`slice::from_raw_parts_mut`].
1625 ///
1626 /// [valid]: crate::ptr#safety
1627 ///
1628 /// # Examples
1629 ///
1630 /// ```rust
1631 /// #![feature(allocator_api, ptr_as_uninit)]
1632 ///
1633 /// use std::alloc::{Allocator, Layout, Global};
1634 /// use std::mem::MaybeUninit;
1635 /// use std::ptr::NonNull;
1636 ///
1637 /// let memory: NonNull<[u8]> = Global.allocate(Layout::new::<[u8; 32]>())?;
1638 /// // This is safe as `memory` is valid for reads and writes for `memory.len()` many bytes.
1639 /// // Note that calling `memory.as_mut()` is not allowed here as the content may be uninitialized.
1640 /// # #[allow(unused_variables)]
1641 /// let slice: &mut [MaybeUninit<u8>] = unsafe { memory.as_uninit_slice_mut() };
1642 /// # // Prevent leaks for Miri.
1643 /// # unsafe { Global.deallocate(memory.cast(), Layout::new::<[u8; 32]>()); }
1644 /// # Ok::<_, std::alloc::AllocError>(())
1645 /// ```
1646 #[inline]
1647 #[must_use]
1648 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1649 pub const unsafe fn as_uninit_slice_mut<'a>(self) -> &'a mut [MaybeUninit<T>] {
1650 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
1651 unsafe { slice::from_raw_parts_mut(self.cast().as_ptr(), self.len()) }
1652 }
1653
1654 /// Returns a raw pointer to an element or subslice, without doing bounds
1655 /// checking.
1656 ///
1657 /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1658 /// is *[undefined behavior]* even if the resulting pointer is not used.
1659 ///
1660 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1661 ///
1662 /// # Examples
1663 ///
1664 /// ```
1665 /// #![feature(slice_ptr_get)]
1666 /// use std::ptr::NonNull;
1667 ///
1668 /// let x = &mut [1, 2, 4];
1669 /// let x = NonNull::slice_from_raw_parts(NonNull::new(x.as_mut_ptr()).unwrap(), x.len());
1670 ///
1671 /// unsafe {
1672 /// assert_eq!(x.get_unchecked_mut(1).as_ptr(), x.as_non_null_ptr().as_ptr().add(1));
1673 /// }
1674 /// ```
1675 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1676 #[rustc_const_unstable(feature = "const_index", issue = "143775")]
1677 #[inline]
1678 pub const unsafe fn get_unchecked_mut<I>(self, index: I) -> NonNull<I::Output>
1679 where
1680 I: [const] SliceIndex<[T]>,
1681 {
1682 // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1683 // As a consequence, the resulting pointer cannot be null.
1684 unsafe { NonNull::new_unchecked(self.as_ptr().get_unchecked_mut(index)) }
1685 }
1686}
1687
1688#[stable(feature = "nonnull", since = "1.25.0")]
1689impl<T: PointeeSized> Clone for NonNull<T> {
1690 #[inline(always)]
1691 #[ferrocene::prevalidated]
1692 fn clone(&self) -> Self {
1693 *self
1694 }
1695}
1696
1697#[stable(feature = "nonnull", since = "1.25.0")]
1698impl<T: PointeeSized> Copy for NonNull<T> {}
1699
1700#[doc(hidden)]
1701#[unstable(feature = "trivial_clone", issue = "none")]
1702unsafe impl<T: PointeeSized> TrivialClone for NonNull<T> {}
1703
1704#[unstable(feature = "coerce_unsized", issue = "18598")]
1705impl<T: PointeeSized, U: PointeeSized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
1706
1707#[unstable(feature = "dispatch_from_dyn", issue = "none")]
1708impl<T: PointeeSized, U: PointeeSized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
1709
1710#[stable(feature = "pin", since = "1.33.0")]
1711unsafe impl<T: PointeeSized> PinCoerceUnsized for NonNull<T> {}
1712
1713#[stable(feature = "nonnull", since = "1.25.0")]
1714impl<T: PointeeSized> fmt::Debug for NonNull<T> {
1715 #[ferrocene::prevalidated]
1716 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1717 fmt::Pointer::fmt(&self.as_ptr(), f)
1718 }
1719}
1720
1721#[stable(feature = "nonnull", since = "1.25.0")]
1722impl<T: PointeeSized> fmt::Pointer for NonNull<T> {
1723 #[ferrocene::prevalidated]
1724 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1725 fmt::Pointer::fmt(&self.as_ptr(), f)
1726 }
1727}
1728
1729#[stable(feature = "nonnull", since = "1.25.0")]
1730impl<T: PointeeSized> Eq for NonNull<T> {}
1731
1732#[stable(feature = "nonnull", since = "1.25.0")]
1733impl<T: PointeeSized> PartialEq for NonNull<T> {
1734 #[inline]
1735 #[allow(ambiguous_wide_pointer_comparisons)]
1736 #[ferrocene::prevalidated]
1737 fn eq(&self, other: &Self) -> bool {
1738 self.as_ptr() == other.as_ptr()
1739 }
1740}
1741
1742#[stable(feature = "nonnull", since = "1.25.0")]
1743impl<T: PointeeSized> Ord for NonNull<T> {
1744 #[inline]
1745 #[allow(ambiguous_wide_pointer_comparisons)]
1746 fn cmp(&self, other: &Self) -> Ordering {
1747 self.as_ptr().cmp(&other.as_ptr())
1748 }
1749}
1750
1751#[stable(feature = "nonnull", since = "1.25.0")]
1752impl<T: PointeeSized> PartialOrd for NonNull<T> {
1753 #[inline]
1754 #[allow(ambiguous_wide_pointer_comparisons)]
1755 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
1756 self.as_ptr().partial_cmp(&other.as_ptr())
1757 }
1758}
1759
1760#[stable(feature = "nonnull", since = "1.25.0")]
1761impl<T: PointeeSized> hash::Hash for NonNull<T> {
1762 #[inline]
1763 fn hash<H: hash::Hasher>(&self, state: &mut H) {
1764 self.as_ptr().hash(state)
1765 }
1766}
1767
1768#[unstable(feature = "ptr_internals", issue = "none")]
1769#[rustc_const_unstable(feature = "const_convert", issue = "143773")]
1770impl<T: PointeeSized> const From<Unique<T>> for NonNull<T> {
1771 #[inline]
1772 fn from(unique: Unique<T>) -> Self {
1773 unique.as_non_null_ptr()
1774 }
1775}
1776
1777#[stable(feature = "nonnull", since = "1.25.0")]
1778#[rustc_const_unstable(feature = "const_convert", issue = "143773")]
1779impl<T: PointeeSized> const From<&mut T> for NonNull<T> {
1780 /// Converts a `&mut T` to a `NonNull<T>`.
1781 ///
1782 /// This conversion is safe and infallible since references cannot be null.
1783 #[inline]
1784 #[ferrocene::prevalidated]
1785 fn from(r: &mut T) -> Self {
1786 NonNull::from_mut(r)
1787 }
1788}
1789
1790#[stable(feature = "nonnull", since = "1.25.0")]
1791#[rustc_const_unstable(feature = "const_convert", issue = "143773")]
1792impl<T: PointeeSized> const From<&T> for NonNull<T> {
1793 /// Converts a `&T` to a `NonNull<T>`.
1794 ///
1795 /// This conversion is safe and infallible since references cannot be null.
1796 #[inline]
1797 fn from(r: &T) -> Self {
1798 NonNull::from_ref(r)
1799 }
1800}