core/ptr/
mut_ptr.rs

1use super::*;
2#[cfg(not(feature = "ferrocene_certified"))]
3use crate::cmp::Ordering::{Equal, Greater, Less};
4use crate::intrinsics::const_eval_select;
5#[cfg(not(feature = "ferrocene_certified"))]
6use crate::marker::{Destruct, PointeeSized};
7#[cfg(not(feature = "ferrocene_certified"))]
8use crate::mem::{self, SizedTypeProperties};
9#[cfg(not(feature = "ferrocene_certified"))]
10use crate::slice::{self, SliceIndex};
11
12// Ferrocene addition: imports for certified subset
13#[cfg(feature = "ferrocene_certified")]
14#[rustfmt::skip]
15use crate::marker::PointeeSized;
16
17impl<T: PointeeSized> *mut T {
18    #[doc = include_str!("docs/is_null.md")]
19    ///
20    /// # Examples
21    ///
22    /// ```
23    /// let mut s = [1, 2, 3];
24    /// let ptr: *mut u32 = s.as_mut_ptr();
25    /// assert!(!ptr.is_null());
26    /// ```
27    #[stable(feature = "rust1", since = "1.0.0")]
28    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
29    #[rustc_diagnostic_item = "ptr_is_null"]
30    #[inline]
31    #[cfg(not(feature = "ferrocene_certified"))]
32    pub const fn is_null(self) -> bool {
33        self.cast_const().is_null()
34    }
35
36    /// Casts to a pointer of another type.
37    #[stable(feature = "ptr_cast", since = "1.38.0")]
38    #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
39    #[rustc_diagnostic_item = "ptr_cast"]
40    #[inline(always)]
41    pub const fn cast<U>(self) -> *mut U {
42        self as _
43    }
44
45    /// Try to cast to a pointer of another type by checking alignment.
46    ///
47    /// If the pointer is properly aligned to the target type, it will be
48    /// cast to the target type. Otherwise, `None` is returned.
49    ///
50    /// # Examples
51    ///
52    /// ```rust
53    /// #![feature(pointer_try_cast_aligned)]
54    ///
55    /// let mut x = 0u64;
56    ///
57    /// let aligned: *mut u64 = &mut x;
58    /// let unaligned = unsafe { aligned.byte_add(1) };
59    ///
60    /// assert!(aligned.try_cast_aligned::<u32>().is_some());
61    /// assert!(unaligned.try_cast_aligned::<u32>().is_none());
62    /// ```
63    #[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
64    #[must_use = "this returns the result of the operation, \
65                  without modifying the original"]
66    #[inline]
67    #[cfg(not(feature = "ferrocene_certified"))]
68    pub fn try_cast_aligned<U>(self) -> Option<*mut U> {
69        if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
70    }
71
72    /// Uses the address value in a new pointer of another type.
73    ///
74    /// This operation will ignore the address part of its `meta` operand and discard existing
75    /// metadata of `self`. For pointers to a sized types (thin pointers), this has the same effect
76    /// as a simple cast. For pointers to an unsized type (fat pointers) this recombines the address
77    /// with new metadata such as slice lengths or `dyn`-vtable.
78    ///
79    /// The resulting pointer will have provenance of `self`. This operation is semantically the
80    /// same as creating a new pointer with the data pointer value of `self` but the metadata of
81    /// `meta`, being fat or thin depending on the `meta` operand.
82    ///
83    /// # Examples
84    ///
85    /// This function is primarily useful for enabling pointer arithmetic on potentially fat
86    /// pointers. The pointer is cast to a sized pointee to utilize offset operations and then
87    /// recombined with its own original metadata.
88    ///
89    /// ```
90    /// #![feature(set_ptr_value)]
91    /// # use core::fmt::Debug;
92    /// let mut arr: [i32; 3] = [1, 2, 3];
93    /// let mut ptr = arr.as_mut_ptr() as *mut dyn Debug;
94    /// let thin = ptr as *mut u8;
95    /// unsafe {
96    ///     ptr = thin.add(8).with_metadata_of(ptr);
97    ///     # assert_eq!(*(ptr as *mut i32), 3);
98    ///     println!("{:?}", &*ptr); // will print "3"
99    /// }
100    /// ```
101    ///
102    /// # *Incorrect* usage
103    ///
104    /// The provenance from pointers is *not* combined. The result must only be used to refer to the
105    /// address allowed by `self`.
106    ///
107    /// ```rust,no_run
108    /// #![feature(set_ptr_value)]
109    /// let mut x = 0u32;
110    /// let mut y = 1u32;
111    ///
112    /// let x = (&mut x) as *mut u32;
113    /// let y = (&mut y) as *mut u32;
114    ///
115    /// let offset = (x as usize - y as usize) / 4;
116    /// let bad = x.wrapping_add(offset).with_metadata_of(y);
117    ///
118    /// // This dereference is UB. The pointer only has provenance for `x` but points to `y`.
119    /// println!("{:?}", unsafe { &*bad });
120    /// ```
121    #[unstable(feature = "set_ptr_value", issue = "75091")]
122    #[must_use = "returns a new pointer rather than modifying its argument"]
123    #[inline]
124    #[cfg(not(feature = "ferrocene_certified"))]
125    pub const fn with_metadata_of<U>(self, meta: *const U) -> *mut U
126    where
127        U: PointeeSized,
128    {
129        from_raw_parts_mut::<U>(self as *mut (), metadata(meta))
130    }
131
132    /// Changes constness without changing the type.
133    ///
134    /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
135    /// refactored.
136    ///
137    /// While not strictly required (`*mut T` coerces to `*const T`), this is provided for symmetry
138    /// with [`cast_mut`] on `*const T` and may have documentation value if used instead of implicit
139    /// coercion.
140    ///
141    /// [`cast_mut`]: pointer::cast_mut
142    #[stable(feature = "ptr_const_cast", since = "1.65.0")]
143    #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")]
144    #[rustc_diagnostic_item = "ptr_cast_const"]
145    #[inline(always)]
146    pub const fn cast_const(self) -> *const T {
147        self as _
148    }
149
150    #[doc = include_str!("./docs/addr.md")]
151    ///
152    /// [without_provenance]: without_provenance_mut
153    #[must_use]
154    #[inline(always)]
155    #[stable(feature = "strict_provenance", since = "1.84.0")]
156    pub fn addr(self) -> usize {
157        // A pointer-to-integer transmute currently has exactly the right semantics: it returns the
158        // address without exposing the provenance. Note that this is *not* a stable guarantee about
159        // transmute semantics, it relies on sysroot crates having special status.
160        // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
161        // provenance).
162        unsafe { mem::transmute(self.cast::<()>()) }
163    }
164
165    /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
166    /// [`with_exposed_provenance_mut`] and returns the "address" portion.
167    ///
168    /// This is equivalent to `self as usize`, which semantically discards provenance information.
169    /// Furthermore, this (like the `as` cast) has the implicit side-effect of marking the
170    /// provenance as 'exposed', so on platforms that support it you can later call
171    /// [`with_exposed_provenance_mut`] to reconstitute the original pointer including its provenance.
172    ///
173    /// Due to its inherent ambiguity, [`with_exposed_provenance_mut`] may not be supported by tools
174    /// that help you to stay conformant with the Rust memory model. It is recommended to use
175    /// [Strict Provenance][crate::ptr#strict-provenance] APIs such as [`with_addr`][pointer::with_addr]
176    /// wherever possible, in which case [`addr`][pointer::addr] should be used instead of `expose_provenance`.
177    ///
178    /// On most platforms this will produce a value with the same bytes as the original pointer,
179    /// because all the bytes are dedicated to describing the address. Platforms which need to store
180    /// additional information in the pointer may not support this operation, since the 'expose'
181    /// side-effect which is required for [`with_exposed_provenance_mut`] to work is typically not
182    /// available.
183    ///
184    /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
185    ///
186    /// [`with_exposed_provenance_mut`]: with_exposed_provenance_mut
187    #[inline(always)]
188    #[stable(feature = "exposed_provenance", since = "1.84.0")]
189    #[cfg(not(feature = "ferrocene_certified"))]
190    pub fn expose_provenance(self) -> usize {
191        self.cast::<()>() as usize
192    }
193
194    /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
195    /// `self`.
196    ///
197    /// This is similar to a `addr as *mut T` cast, but copies
198    /// the *provenance* of `self` to the new pointer.
199    /// This avoids the inherent ambiguity of the unary cast.
200    ///
201    /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
202    /// `self` to the given address, and therefore has all the same capabilities and restrictions.
203    ///
204    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
205    #[must_use]
206    #[inline]
207    #[stable(feature = "strict_provenance", since = "1.84.0")]
208    #[cfg(not(feature = "ferrocene_certified"))]
209    pub fn with_addr(self, addr: usize) -> Self {
210        // This should probably be an intrinsic to avoid doing any sort of arithmetic, but
211        // meanwhile, we can implement it with `wrapping_offset`, which preserves the pointer's
212        // provenance.
213        let self_addr = self.addr() as isize;
214        let dest_addr = addr as isize;
215        let offset = dest_addr.wrapping_sub(self_addr);
216        self.wrapping_byte_offset(offset)
217    }
218
219    /// Creates a new pointer by mapping `self`'s address to a new one, preserving the original
220    /// pointer's [provenance][crate::ptr#provenance].
221    ///
222    /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
223    ///
224    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
225    #[must_use]
226    #[inline]
227    #[stable(feature = "strict_provenance", since = "1.84.0")]
228    #[cfg(not(feature = "ferrocene_certified"))]
229    pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
230        self.with_addr(f(self.addr()))
231    }
232
233    /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
234    ///
235    /// The pointer can be later reconstructed with [`from_raw_parts_mut`].
236    #[unstable(feature = "ptr_metadata", issue = "81513")]
237    #[inline]
238    #[cfg(not(feature = "ferrocene_certified"))]
239    pub const fn to_raw_parts(self) -> (*mut (), <T as super::Pointee>::Metadata) {
240        (self.cast(), super::metadata(self))
241    }
242
243    #[doc = include_str!("./docs/as_ref.md")]
244    ///
245    /// ```
246    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
247    ///
248    /// unsafe {
249    ///     let val_back = &*ptr;
250    ///     println!("We got back the value: {val_back}!");
251    /// }
252    /// ```
253    ///
254    /// # Examples
255    ///
256    /// ```
257    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
258    ///
259    /// unsafe {
260    ///     if let Some(val_back) = ptr.as_ref() {
261    ///         println!("We got back the value: {val_back}!");
262    ///     }
263    /// }
264    /// ```
265    ///
266    /// # See Also
267    ///
268    /// For the mutable counterpart see [`as_mut`].
269    ///
270    /// [`is_null`]: #method.is_null-1
271    /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1
272    /// [`as_mut`]: #method.as_mut
273
274    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
275    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
276    #[inline]
277    #[cfg(not(feature = "ferrocene_certified"))]
278    pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
279        // SAFETY: the caller must guarantee that `self` is valid for a
280        // reference if it isn't null.
281        if self.is_null() { None } else { unsafe { Some(&*self) } }
282    }
283
284    /// Returns a shared reference to the value behind the pointer.
285    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_ref`] must be used instead.
286    /// If the pointer may be null, but the value is known to have been initialized, [`as_ref`] must be used instead.
287    ///
288    /// For the mutable counterpart see [`as_mut_unchecked`].
289    ///
290    /// [`as_ref`]: #method.as_ref
291    /// [`as_uninit_ref`]: #method.as_uninit_ref
292    /// [`as_mut_unchecked`]: #method.as_mut_unchecked
293    ///
294    /// # Safety
295    ///
296    /// When calling this method, you have to ensure that the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
297    ///
298    /// # Examples
299    ///
300    /// ```
301    /// #![feature(ptr_as_ref_unchecked)]
302    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
303    ///
304    /// unsafe {
305    ///     println!("We got back the value: {}!", ptr.as_ref_unchecked());
306    /// }
307    /// ```
308    // FIXME: mention it in the docs for `as_ref` and `as_uninit_ref` once stabilized.
309    #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
310    #[inline]
311    #[must_use]
312    #[cfg(not(feature = "ferrocene_certified"))]
313    pub const unsafe fn as_ref_unchecked<'a>(self) -> &'a T {
314        // SAFETY: the caller must guarantee that `self` is valid for a reference
315        unsafe { &*self }
316    }
317
318    #[doc = include_str!("./docs/as_uninit_ref.md")]
319    ///
320    /// [`is_null`]: #method.is_null-1
321    /// [`as_ref`]: pointer#method.as_ref-1
322    ///
323    /// # See Also
324    /// For the mutable counterpart see [`as_uninit_mut`].
325    ///
326    /// [`as_uninit_mut`]: #method.as_uninit_mut
327    ///
328    /// # Examples
329    ///
330    /// ```
331    /// #![feature(ptr_as_uninit)]
332    ///
333    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
334    ///
335    /// unsafe {
336    ///     if let Some(val_back) = ptr.as_uninit_ref() {
337    ///         println!("We got back the value: {}!", val_back.assume_init());
338    ///     }
339    /// }
340    /// ```
341    #[inline]
342    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
343    #[cfg(not(feature = "ferrocene_certified"))]
344    pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
345    where
346        T: Sized,
347    {
348        // SAFETY: the caller must guarantee that `self` meets all the
349        // requirements for a reference.
350        if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
351    }
352
353    #[doc = include_str!("./docs/offset.md")]
354    ///
355    /// # Examples
356    ///
357    /// ```
358    /// let mut s = [1, 2, 3];
359    /// let ptr: *mut u32 = s.as_mut_ptr();
360    ///
361    /// unsafe {
362    ///     assert_eq!(2, *ptr.offset(1));
363    ///     assert_eq!(3, *ptr.offset(2));
364    /// }
365    /// ```
366    #[stable(feature = "rust1", since = "1.0.0")]
367    #[must_use = "returns a new pointer rather than modifying its argument"]
368    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
369    #[inline(always)]
370    #[track_caller]
371    #[cfg(not(feature = "ferrocene_certified"))]
372    pub const unsafe fn offset(self, count: isize) -> *mut T
373    where
374        T: Sized,
375    {
376        #[inline]
377        #[rustc_allow_const_fn_unstable(const_eval_select)]
378        const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
379            // We can use const_eval_select here because this is only for UB checks.
380            const_eval_select!(
381                @capture { this: *const (), count: isize, size: usize } -> bool:
382                if const {
383                    true
384                } else {
385                    // `size` is the size of a Rust type, so we know that
386                    // `size <= isize::MAX` and thus `as` cast here is not lossy.
387                    let Some(byte_offset) = count.checked_mul(size as isize) else {
388                        return false;
389                    };
390                    let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
391                    !overflow
392                }
393            )
394        }
395
396        ub_checks::assert_unsafe_precondition!(
397            check_language_ub,
398            "ptr::offset requires the address calculation to not overflow",
399            (
400                this: *const () = self as *const (),
401                count: isize = count,
402                size: usize = size_of::<T>(),
403            ) => runtime_offset_nowrap(this, count, size)
404        );
405
406        // SAFETY: the caller must uphold the safety contract for `offset`.
407        // The obtained pointer is valid for writes since the caller must
408        // guarantee that it points to the same allocation as `self`.
409        unsafe { intrinsics::offset(self, count) }
410    }
411
412    /// Adds a signed offset in bytes to a pointer.
413    ///
414    /// `count` is in units of **bytes**.
415    ///
416    /// This is purely a convenience for casting to a `u8` pointer and
417    /// using [offset][pointer::offset] on it. See that method for documentation
418    /// and safety requirements.
419    ///
420    /// For non-`Sized` pointees this operation changes only the data pointer,
421    /// leaving the metadata untouched.
422    #[must_use]
423    #[inline(always)]
424    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
425    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
426    #[track_caller]
427    #[cfg(not(feature = "ferrocene_certified"))]
428    pub const unsafe fn byte_offset(self, count: isize) -> Self {
429        // SAFETY: the caller must uphold the safety contract for `offset`.
430        unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
431    }
432
433    /// Adds a signed offset to a pointer using wrapping arithmetic.
434    ///
435    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
436    /// offset of `3 * size_of::<T>()` bytes.
437    ///
438    /// # Safety
439    ///
440    /// This operation itself is always safe, but using the resulting pointer is not.
441    ///
442    /// The resulting pointer "remembers" the [allocation] that `self` points to
443    /// (this is called "[Provenance](ptr/index.html#provenance)").
444    /// The pointer must not be used to read or write other allocations.
445    ///
446    /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
447    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
448    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
449    /// `x` and `y` point into the same allocation.
450    ///
451    /// Compared to [`offset`], this method basically delays the requirement of staying within the
452    /// same allocation: [`offset`] is immediate Undefined Behavior when crossing object
453    /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
454    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
455    /// can be optimized better and is thus preferable in performance-sensitive code.
456    ///
457    /// The delayed check only considers the value of the pointer that was dereferenced, not the
458    /// intermediate values used during the computation of the final result. For example,
459    /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
460    /// words, leaving the allocation and then re-entering it later is permitted.
461    ///
462    /// [`offset`]: #method.offset
463    /// [allocation]: crate::ptr#allocation
464    ///
465    /// # Examples
466    ///
467    /// ```
468    /// // Iterate using a raw pointer in increments of two elements
469    /// let mut data = [1u8, 2, 3, 4, 5];
470    /// let mut ptr: *mut u8 = data.as_mut_ptr();
471    /// let step = 2;
472    /// let end_rounded_up = ptr.wrapping_offset(6);
473    ///
474    /// while ptr != end_rounded_up {
475    ///     unsafe {
476    ///         *ptr = 0;
477    ///     }
478    ///     ptr = ptr.wrapping_offset(step);
479    /// }
480    /// assert_eq!(&data, &[0, 2, 0, 4, 0]);
481    /// ```
482    #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
483    #[must_use = "returns a new pointer rather than modifying its argument"]
484    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
485    #[inline(always)]
486    #[cfg(not(feature = "ferrocene_certified"))]
487    pub const fn wrapping_offset(self, count: isize) -> *mut T
488    where
489        T: Sized,
490    {
491        // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
492        unsafe { intrinsics::arith_offset(self, count) as *mut T }
493    }
494
495    /// Adds a signed offset in bytes to a pointer using wrapping arithmetic.
496    ///
497    /// `count` is in units of **bytes**.
498    ///
499    /// This is purely a convenience for casting to a `u8` pointer and
500    /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
501    /// for documentation.
502    ///
503    /// For non-`Sized` pointees this operation changes only the data pointer,
504    /// leaving the metadata untouched.
505    #[must_use]
506    #[inline(always)]
507    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
508    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
509    #[cfg(not(feature = "ferrocene_certified"))]
510    pub const fn wrapping_byte_offset(self, count: isize) -> Self {
511        self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
512    }
513
514    /// Masks out bits of the pointer according to a mask.
515    ///
516    /// This is convenience for `ptr.map_addr(|a| a & mask)`.
517    ///
518    /// For non-`Sized` pointees this operation changes only the data pointer,
519    /// leaving the metadata untouched.
520    ///
521    /// ## Examples
522    ///
523    /// ```
524    /// #![feature(ptr_mask)]
525    /// let mut v = 17_u32;
526    /// let ptr: *mut u32 = &mut v;
527    ///
528    /// // `u32` is 4 bytes aligned,
529    /// // which means that lower 2 bits are always 0.
530    /// let tag_mask = 0b11;
531    /// let ptr_mask = !tag_mask;
532    ///
533    /// // We can store something in these lower bits
534    /// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
535    ///
536    /// // Get the "tag" back
537    /// let tag = tagged_ptr.addr() & tag_mask;
538    /// assert_eq!(tag, 0b10);
539    ///
540    /// // Note that `tagged_ptr` is unaligned, it's UB to read from/write to it.
541    /// // To get original pointer `mask` can be used:
542    /// let masked_ptr = tagged_ptr.mask(ptr_mask);
543    /// assert_eq!(unsafe { *masked_ptr }, 17);
544    ///
545    /// unsafe { *masked_ptr = 0 };
546    /// assert_eq!(v, 0);
547    /// ```
548    #[unstable(feature = "ptr_mask", issue = "98290")]
549    #[must_use = "returns a new pointer rather than modifying its argument"]
550    #[inline(always)]
551    #[cfg(not(feature = "ferrocene_certified"))]
552    pub fn mask(self, mask: usize) -> *mut T {
553        intrinsics::ptr_mask(self.cast::<()>(), mask).cast_mut().with_metadata_of(self)
554    }
555
556    /// Returns `None` if the pointer is null, or else returns a unique reference to
557    /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_mut`]
558    /// must be used instead.
559    ///
560    /// For the shared counterpart see [`as_ref`].
561    ///
562    /// [`as_uninit_mut`]: #method.as_uninit_mut
563    /// [`as_ref`]: pointer#method.as_ref-1
564    ///
565    /// # Safety
566    ///
567    /// When calling this method, you have to ensure that *either*
568    /// the pointer is null *or*
569    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
570    ///
571    /// # Panics during const evaluation
572    ///
573    /// This method will panic during const evaluation if the pointer cannot be
574    /// determined to be null or not. See [`is_null`] for more information.
575    ///
576    /// [`is_null`]: #method.is_null-1
577    ///
578    /// # Examples
579    ///
580    /// ```
581    /// let mut s = [1, 2, 3];
582    /// let ptr: *mut u32 = s.as_mut_ptr();
583    /// let first_value = unsafe { ptr.as_mut().unwrap() };
584    /// *first_value = 4;
585    /// # assert_eq!(s, [4, 2, 3]);
586    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
587    /// ```
588    ///
589    /// # Null-unchecked version
590    ///
591    /// If you are sure the pointer can never be null and are looking for some kind of
592    /// `as_mut_unchecked` that returns the `&mut T` instead of `Option<&mut T>`, know that
593    /// you can dereference the pointer directly.
594    ///
595    /// ```
596    /// let mut s = [1, 2, 3];
597    /// let ptr: *mut u32 = s.as_mut_ptr();
598    /// let first_value = unsafe { &mut *ptr };
599    /// *first_value = 4;
600    /// # assert_eq!(s, [4, 2, 3]);
601    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
602    /// ```
603    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
604    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
605    #[inline]
606    #[cfg(not(feature = "ferrocene_certified"))]
607    pub const unsafe fn as_mut<'a>(self) -> Option<&'a mut T> {
608        // SAFETY: the caller must guarantee that `self` is be valid for
609        // a mutable reference if it isn't null.
610        if self.is_null() { None } else { unsafe { Some(&mut *self) } }
611    }
612
613    /// Returns a unique reference to the value behind the pointer.
614    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_mut`] must be used instead.
615    /// If the pointer may be null, but the value is known to have been initialized, [`as_mut`] must be used instead.
616    ///
617    /// For the shared counterpart see [`as_ref_unchecked`].
618    ///
619    /// [`as_mut`]: #method.as_mut
620    /// [`as_uninit_mut`]: #method.as_uninit_mut
621    /// [`as_ref_unchecked`]: #method.as_mut_unchecked
622    ///
623    /// # Safety
624    ///
625    /// When calling this method, you have to ensure that
626    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
627    ///
628    /// # Examples
629    ///
630    /// ```
631    /// #![feature(ptr_as_ref_unchecked)]
632    /// let mut s = [1, 2, 3];
633    /// let ptr: *mut u32 = s.as_mut_ptr();
634    /// let first_value = unsafe { ptr.as_mut_unchecked() };
635    /// *first_value = 4;
636    /// # assert_eq!(s, [4, 2, 3]);
637    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
638    /// ```
639    // FIXME: mention it in the docs for `as_mut` and `as_uninit_mut` once stabilized.
640    #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
641    #[inline]
642    #[must_use]
643    #[cfg(not(feature = "ferrocene_certified"))]
644    pub const unsafe fn as_mut_unchecked<'a>(self) -> &'a mut T {
645        // SAFETY: the caller must guarantee that `self` is valid for a reference
646        unsafe { &mut *self }
647    }
648
649    /// Returns `None` if the pointer is null, or else returns a unique reference to
650    /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
651    /// that the value has to be initialized.
652    ///
653    /// For the shared counterpart see [`as_uninit_ref`].
654    ///
655    /// [`as_mut`]: #method.as_mut
656    /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1
657    ///
658    /// # Safety
659    ///
660    /// When calling this method, you have to ensure that *either* the pointer is null *or*
661    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
662    ///
663    /// # Panics during const evaluation
664    ///
665    /// This method will panic during const evaluation if the pointer cannot be
666    /// determined to be null or not. See [`is_null`] for more information.
667    ///
668    /// [`is_null`]: #method.is_null-1
669    #[inline]
670    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
671    #[cfg(not(feature = "ferrocene_certified"))]
672    pub const unsafe fn as_uninit_mut<'a>(self) -> Option<&'a mut MaybeUninit<T>>
673    where
674        T: Sized,
675    {
676        // SAFETY: the caller must guarantee that `self` meets all the
677        // requirements for a reference.
678        if self.is_null() { None } else { Some(unsafe { &mut *(self as *mut MaybeUninit<T>) }) }
679    }
680
681    /// Returns whether two pointers are guaranteed to be equal.
682    ///
683    /// At runtime this function behaves like `Some(self == other)`.
684    /// However, in some contexts (e.g., compile-time evaluation),
685    /// it is not always possible to determine equality of two pointers, so this function may
686    /// spuriously return `None` for pointers that later actually turn out to have its equality known.
687    /// But when it returns `Some`, the pointers' equality is guaranteed to be known.
688    ///
689    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
690    /// version and unsafe code must not
691    /// rely on the result of this function for soundness. It is suggested to only use this function
692    /// for performance optimizations where spurious `None` return values by this function do not
693    /// affect the outcome, but just the performance.
694    /// The consequences of using this method to make runtime and compile-time code behave
695    /// differently have not been explored. This method should not be used to introduce such
696    /// differences, and it should also not be stabilized before we have a better understanding
697    /// of this issue.
698    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
699    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
700    #[inline]
701    #[cfg(not(feature = "ferrocene_certified"))]
702    pub const fn guaranteed_eq(self, other: *mut T) -> Option<bool>
703    where
704        T: Sized,
705    {
706        (self as *const T).guaranteed_eq(other as _)
707    }
708
709    /// Returns whether two pointers are guaranteed to be inequal.
710    ///
711    /// At runtime this function behaves like `Some(self != other)`.
712    /// However, in some contexts (e.g., compile-time evaluation),
713    /// it is not always possible to determine inequality of two pointers, so this function may
714    /// spuriously return `None` for pointers that later actually turn out to have its inequality known.
715    /// But when it returns `Some`, the pointers' inequality is guaranteed to be known.
716    ///
717    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
718    /// version and unsafe code must not
719    /// rely on the result of this function for soundness. It is suggested to only use this function
720    /// for performance optimizations where spurious `None` return values by this function do not
721    /// affect the outcome, but just the performance.
722    /// The consequences of using this method to make runtime and compile-time code behave
723    /// differently have not been explored. This method should not be used to introduce such
724    /// differences, and it should also not be stabilized before we have a better understanding
725    /// of this issue.
726    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
727    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
728    #[inline]
729    #[cfg(not(feature = "ferrocene_certified"))]
730    pub const fn guaranteed_ne(self, other: *mut T) -> Option<bool>
731    where
732        T: Sized,
733    {
734        (self as *const T).guaranteed_ne(other as _)
735    }
736
737    /// Calculates the distance between two pointers within the same allocation. The returned value is in
738    /// units of T: the distance in bytes divided by `size_of::<T>()`.
739    ///
740    /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
741    /// except that it has a lot more opportunities for UB, in exchange for the compiler
742    /// better understanding what you are doing.
743    ///
744    /// The primary motivation of this method is for computing the `len` of an array/slice
745    /// of `T` that you are currently representing as a "start" and "end" pointer
746    /// (and "end" is "one past the end" of the array).
747    /// In that case, `end.offset_from(start)` gets you the length of the array.
748    ///
749    /// All of the following safety requirements are trivially satisfied for this usecase.
750    ///
751    /// [`offset`]: pointer#method.offset-1
752    ///
753    /// # Safety
754    ///
755    /// If any of the following conditions are violated, the result is Undefined Behavior:
756    ///
757    /// * `self` and `origin` must either
758    ///
759    ///   * point to the same address, or
760    ///   * both be [derived from][crate::ptr#provenance] a pointer to the same [allocation], and the memory range between
761    ///     the two pointers must be in bounds of that object. (See below for an example.)
762    ///
763    /// * The distance between the pointers, in bytes, must be an exact multiple
764    ///   of the size of `T`.
765    ///
766    /// As a consequence, the absolute distance between the pointers, in bytes, computed on
767    /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
768    /// implied by the in-bounds requirement, and the fact that no allocation can be larger
769    /// than `isize::MAX` bytes.
770    ///
771    /// The requirement for pointers to be derived from the same allocation is primarily
772    /// needed for `const`-compatibility: the distance between pointers into *different* allocated
773    /// objects is not known at compile-time. However, the requirement also exists at
774    /// runtime and may be exploited by optimizations. If you wish to compute the difference between
775    /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
776    /// origin as isize) / size_of::<T>()`.
777    // FIXME: recommend `addr()` instead of `as usize` once that is stable.
778    ///
779    /// [`add`]: #method.add
780    /// [allocation]: crate::ptr#allocation
781    ///
782    /// # Panics
783    ///
784    /// This function panics if `T` is a Zero-Sized Type ("ZST").
785    ///
786    /// # Examples
787    ///
788    /// Basic usage:
789    ///
790    /// ```
791    /// let mut a = [0; 5];
792    /// let ptr1: *mut i32 = &mut a[1];
793    /// let ptr2: *mut i32 = &mut a[3];
794    /// unsafe {
795    ///     assert_eq!(ptr2.offset_from(ptr1), 2);
796    ///     assert_eq!(ptr1.offset_from(ptr2), -2);
797    ///     assert_eq!(ptr1.offset(2), ptr2);
798    ///     assert_eq!(ptr2.offset(-2), ptr1);
799    /// }
800    /// ```
801    ///
802    /// *Incorrect* usage:
803    ///
804    /// ```rust,no_run
805    /// let ptr1 = Box::into_raw(Box::new(0u8));
806    /// let ptr2 = Box::into_raw(Box::new(1u8));
807    /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
808    /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
809    /// let ptr2_other = (ptr1 as *mut u8).wrapping_offset(diff).wrapping_offset(1);
810    /// assert_eq!(ptr2 as usize, ptr2_other as usize);
811    /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
812    /// // computing their offset is undefined behavior, even though
813    /// // they point to addresses that are in-bounds of the same object!
814    /// unsafe {
815    ///     let one = ptr2_other.offset_from(ptr2); // Undefined Behavior! ⚠️
816    /// }
817    /// ```
818    #[stable(feature = "ptr_offset_from", since = "1.47.0")]
819    #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")]
820    #[inline(always)]
821    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
822    #[cfg(not(feature = "ferrocene_certified"))]
823    pub const unsafe fn offset_from(self, origin: *const T) -> isize
824    where
825        T: Sized,
826    {
827        // SAFETY: the caller must uphold the safety contract for `offset_from`.
828        unsafe { (self as *const T).offset_from(origin) }
829    }
830
831    /// Calculates the distance between two pointers within the same allocation. The returned value is in
832    /// units of **bytes**.
833    ///
834    /// This is purely a convenience for casting to a `u8` pointer and
835    /// using [`offset_from`][pointer::offset_from] on it. See that method for
836    /// documentation and safety requirements.
837    ///
838    /// For non-`Sized` pointees this operation considers only the data pointers,
839    /// ignoring the metadata.
840    #[inline(always)]
841    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
842    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
843    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
844    #[cfg(not(feature = "ferrocene_certified"))]
845    pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: *const U) -> isize {
846        // SAFETY: the caller must uphold the safety contract for `offset_from`.
847        unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
848    }
849
850    /// Calculates the distance between two pointers within the same allocation, *where it's known that
851    /// `self` is equal to or greater than `origin`*. The returned value is in
852    /// units of T: the distance in bytes is divided by `size_of::<T>()`.
853    ///
854    /// This computes the same value that [`offset_from`](#method.offset_from)
855    /// would compute, but with the added precondition that the offset is
856    /// guaranteed to be non-negative.  This method is equivalent to
857    /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
858    /// but it provides slightly more information to the optimizer, which can
859    /// sometimes allow it to optimize slightly better with some backends.
860    ///
861    /// This method can be thought of as recovering the `count` that was passed
862    /// to [`add`](#method.add) (or, with the parameters in the other order,
863    /// to [`sub`](#method.sub)).  The following are all equivalent, assuming
864    /// that their safety preconditions are met:
865    /// ```rust
866    /// # unsafe fn blah(ptr: *mut i32, origin: *mut i32, count: usize) -> bool { unsafe {
867    /// ptr.offset_from_unsigned(origin) == count
868    /// # &&
869    /// origin.add(count) == ptr
870    /// # &&
871    /// ptr.sub(count) == origin
872    /// # } }
873    /// ```
874    ///
875    /// # Safety
876    ///
877    /// - The distance between the pointers must be non-negative (`self >= origin`)
878    ///
879    /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
880    ///   apply to this method as well; see it for the full details.
881    ///
882    /// Importantly, despite the return type of this method being able to represent
883    /// a larger offset, it's still *not permitted* to pass pointers which differ
884    /// by more than `isize::MAX` *bytes*.  As such, the result of this method will
885    /// always be less than or equal to `isize::MAX as usize`.
886    ///
887    /// # Panics
888    ///
889    /// This function panics if `T` is a Zero-Sized Type ("ZST").
890    ///
891    /// # Examples
892    ///
893    /// ```
894    /// let mut a = [0; 5];
895    /// let p: *mut i32 = a.as_mut_ptr();
896    /// unsafe {
897    ///     let ptr1: *mut i32 = p.add(1);
898    ///     let ptr2: *mut i32 = p.add(3);
899    ///
900    ///     assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
901    ///     assert_eq!(ptr1.add(2), ptr2);
902    ///     assert_eq!(ptr2.sub(2), ptr1);
903    ///     assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
904    /// }
905    ///
906    /// // This would be incorrect, as the pointers are not correctly ordered:
907    /// // ptr1.offset_from(ptr2)
908    /// ```
909    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
910    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
911    #[inline]
912    #[track_caller]
913    #[cfg(not(feature = "ferrocene_certified"))]
914    pub const unsafe fn offset_from_unsigned(self, origin: *const T) -> usize
915    where
916        T: Sized,
917    {
918        // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
919        unsafe { (self as *const T).offset_from_unsigned(origin) }
920    }
921
922    /// Calculates the distance between two pointers within the same allocation, *where it's known that
923    /// `self` is equal to or greater than `origin`*. The returned value is in
924    /// units of **bytes**.
925    ///
926    /// This is purely a convenience for casting to a `u8` pointer and
927    /// using [`offset_from_unsigned`][pointer::offset_from_unsigned] on it.
928    /// See that method for documentation and safety requirements.
929    ///
930    /// For non-`Sized` pointees this operation considers only the data pointers,
931    /// ignoring the metadata.
932    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
933    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
934    #[inline]
935    #[track_caller]
936    #[cfg(not(feature = "ferrocene_certified"))]
937    pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: *mut U) -> usize {
938        // SAFETY: the caller must uphold the safety contract for `byte_offset_from_unsigned`.
939        unsafe { (self as *const T).byte_offset_from_unsigned(origin) }
940    }
941
942    #[doc = include_str!("./docs/add.md")]
943    ///
944    /// # Examples
945    ///
946    /// ```
947    /// let mut s: String = "123".to_string();
948    /// let ptr: *mut u8 = s.as_mut_ptr();
949    ///
950    /// unsafe {
951    ///     assert_eq!('2', *ptr.add(1) as char);
952    ///     assert_eq!('3', *ptr.add(2) as char);
953    /// }
954    /// ```
955    #[stable(feature = "pointer_methods", since = "1.26.0")]
956    #[must_use = "returns a new pointer rather than modifying its argument"]
957    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
958    #[inline(always)]
959    #[track_caller]
960    pub const unsafe fn add(self, count: usize) -> Self
961    where
962        T: Sized,
963    {
964        #[cfg(debug_assertions)]
965        #[inline]
966        #[rustc_allow_const_fn_unstable(const_eval_select)]
967        const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
968            const_eval_select!(
969                @capture { this: *const (), count: usize, size: usize } -> bool:
970                if const {
971                    true
972                } else {
973                    let Some(byte_offset) = count.checked_mul(size) else {
974                        return false;
975                    };
976                    let (_, overflow) = this.addr().overflowing_add(byte_offset);
977                    byte_offset <= (isize::MAX as usize) && !overflow
978                }
979            )
980        }
981
982        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
983        ub_checks::assert_unsafe_precondition!(
984            check_language_ub,
985            "ptr::add requires that the address calculation does not overflow",
986            (
987                this: *const () = self as *const (),
988                count: usize = count,
989                size: usize = size_of::<T>(),
990            ) => runtime_add_nowrap(this, count, size)
991        );
992
993        // SAFETY: the caller must uphold the safety contract for `offset`.
994        unsafe { intrinsics::offset(self, count) }
995    }
996
997    /// Adds an unsigned offset in bytes to a pointer.
998    ///
999    /// `count` is in units of bytes.
1000    ///
1001    /// This is purely a convenience for casting to a `u8` pointer and
1002    /// using [add][pointer::add] on it. See that method for documentation
1003    /// and safety requirements.
1004    ///
1005    /// For non-`Sized` pointees this operation changes only the data pointer,
1006    /// leaving the metadata untouched.
1007    #[must_use]
1008    #[inline(always)]
1009    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1010    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1011    #[track_caller]
1012    #[cfg(not(feature = "ferrocene_certified"))]
1013    pub const unsafe fn byte_add(self, count: usize) -> Self {
1014        // SAFETY: the caller must uphold the safety contract for `add`.
1015        unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
1016    }
1017
1018    /// Subtracts an unsigned offset from a pointer.
1019    ///
1020    /// This can only move the pointer backward (or not move it). If you need to move forward or
1021    /// backward depending on the value, then you might want [`offset`](#method.offset) instead
1022    /// which takes a signed offset.
1023    ///
1024    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1025    /// offset of `3 * size_of::<T>()` bytes.
1026    ///
1027    /// # Safety
1028    ///
1029    /// If any of the following conditions are violated, the result is Undefined Behavior:
1030    ///
1031    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
1032    ///   "wrapping around"), must fit in an `isize`.
1033    ///
1034    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
1035    ///   [allocation], and the entire memory range between `self` and the result must be in
1036    ///   bounds of that allocation. In particular, this range must not "wrap around" the edge
1037    ///   of the address space.
1038    ///
1039    /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
1040    /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
1041    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
1042    /// safe.
1043    ///
1044    /// Consider using [`wrapping_sub`] instead if these constraints are
1045    /// difficult to satisfy. The only advantage of this method is that it
1046    /// enables more aggressive compiler optimizations.
1047    ///
1048    /// [`wrapping_sub`]: #method.wrapping_sub
1049    /// [allocation]: crate::ptr#allocation
1050    ///
1051    /// # Examples
1052    ///
1053    /// ```
1054    /// let s: &str = "123";
1055    ///
1056    /// unsafe {
1057    ///     let end: *const u8 = s.as_ptr().add(3);
1058    ///     assert_eq!('3', *end.sub(1) as char);
1059    ///     assert_eq!('2', *end.sub(2) as char);
1060    /// }
1061    /// ```
1062    #[stable(feature = "pointer_methods", since = "1.26.0")]
1063    #[must_use = "returns a new pointer rather than modifying its argument"]
1064    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1065    #[inline(always)]
1066    #[track_caller]
1067    #[cfg(not(feature = "ferrocene_certified"))]
1068    pub const unsafe fn sub(self, count: usize) -> Self
1069    where
1070        T: Sized,
1071    {
1072        #[cfg(debug_assertions)]
1073        #[inline]
1074        #[rustc_allow_const_fn_unstable(const_eval_select)]
1075        const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
1076            const_eval_select!(
1077                @capture { this: *const (), count: usize, size: usize } -> bool:
1078                if const {
1079                    true
1080                } else {
1081                    let Some(byte_offset) = count.checked_mul(size) else {
1082                        return false;
1083                    };
1084                    byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
1085                }
1086            )
1087        }
1088
1089        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
1090        ub_checks::assert_unsafe_precondition!(
1091            check_language_ub,
1092            "ptr::sub requires that the address calculation does not overflow",
1093            (
1094                this: *const () = self as *const (),
1095                count: usize = count,
1096                size: usize = size_of::<T>(),
1097            ) => runtime_sub_nowrap(this, count, size)
1098        );
1099
1100        if T::IS_ZST {
1101            // Pointer arithmetic does nothing when the pointee is a ZST.
1102            self
1103        } else {
1104            // SAFETY: the caller must uphold the safety contract for `offset`.
1105            // Because the pointee is *not* a ZST, that means that `count` is
1106            // at most `isize::MAX`, and thus the negation cannot overflow.
1107            unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
1108        }
1109    }
1110
1111    /// Subtracts an unsigned offset in bytes from a pointer.
1112    ///
1113    /// `count` is in units of bytes.
1114    ///
1115    /// This is purely a convenience for casting to a `u8` pointer and
1116    /// using [sub][pointer::sub] on it. See that method for documentation
1117    /// and safety requirements.
1118    ///
1119    /// For non-`Sized` pointees this operation changes only the data pointer,
1120    /// leaving the metadata untouched.
1121    #[must_use]
1122    #[inline(always)]
1123    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1124    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1125    #[track_caller]
1126    #[cfg(not(feature = "ferrocene_certified"))]
1127    pub const unsafe fn byte_sub(self, count: usize) -> Self {
1128        // SAFETY: the caller must uphold the safety contract for `sub`.
1129        unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
1130    }
1131
1132    /// Adds an unsigned offset to a pointer using wrapping arithmetic.
1133    ///
1134    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1135    /// offset of `3 * size_of::<T>()` bytes.
1136    ///
1137    /// # Safety
1138    ///
1139    /// This operation itself is always safe, but using the resulting pointer is not.
1140    ///
1141    /// The resulting pointer "remembers" the [allocation] that `self` points to; it must not
1142    /// be used to read or write other allocations.
1143    ///
1144    /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
1145    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1146    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1147    /// `x` and `y` point into the same allocation.
1148    ///
1149    /// Compared to [`add`], this method basically delays the requirement of staying within the
1150    /// same allocation: [`add`] is immediate Undefined Behavior when crossing object
1151    /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
1152    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
1153    /// can be optimized better and is thus preferable in performance-sensitive code.
1154    ///
1155    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1156    /// intermediate values used during the computation of the final result. For example,
1157    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1158    /// allocation and then re-entering it later is permitted.
1159    ///
1160    /// [`add`]: #method.add
1161    /// [allocation]: crate::ptr#allocation
1162    ///
1163    /// # Examples
1164    ///
1165    /// ```
1166    /// // Iterate using a raw pointer in increments of two elements
1167    /// let data = [1u8, 2, 3, 4, 5];
1168    /// let mut ptr: *const u8 = data.as_ptr();
1169    /// let step = 2;
1170    /// let end_rounded_up = ptr.wrapping_add(6);
1171    ///
1172    /// // This loop prints "1, 3, 5, "
1173    /// while ptr != end_rounded_up {
1174    ///     unsafe {
1175    ///         print!("{}, ", *ptr);
1176    ///     }
1177    ///     ptr = ptr.wrapping_add(step);
1178    /// }
1179    /// ```
1180    #[stable(feature = "pointer_methods", since = "1.26.0")]
1181    #[must_use = "returns a new pointer rather than modifying its argument"]
1182    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1183    #[inline(always)]
1184    #[cfg(not(feature = "ferrocene_certified"))]
1185    pub const fn wrapping_add(self, count: usize) -> Self
1186    where
1187        T: Sized,
1188    {
1189        self.wrapping_offset(count as isize)
1190    }
1191
1192    /// Adds an unsigned offset in bytes to a pointer using wrapping arithmetic.
1193    ///
1194    /// `count` is in units of bytes.
1195    ///
1196    /// This is purely a convenience for casting to a `u8` pointer and
1197    /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
1198    ///
1199    /// For non-`Sized` pointees this operation changes only the data pointer,
1200    /// leaving the metadata untouched.
1201    #[must_use]
1202    #[inline(always)]
1203    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1204    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1205    #[cfg(not(feature = "ferrocene_certified"))]
1206    pub const fn wrapping_byte_add(self, count: usize) -> Self {
1207        self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
1208    }
1209
1210    /// Subtracts an unsigned offset from a pointer using wrapping arithmetic.
1211    ///
1212    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1213    /// offset of `3 * size_of::<T>()` bytes.
1214    ///
1215    /// # Safety
1216    ///
1217    /// This operation itself is always safe, but using the resulting pointer is not.
1218    ///
1219    /// The resulting pointer "remembers" the [allocation] that `self` points to; it must not
1220    /// be used to read or write other allocations.
1221    ///
1222    /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
1223    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1224    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1225    /// `x` and `y` point into the same allocation.
1226    ///
1227    /// Compared to [`sub`], this method basically delays the requirement of staying within the
1228    /// same allocation: [`sub`] is immediate Undefined Behavior when crossing object
1229    /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
1230    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
1231    /// can be optimized better and is thus preferable in performance-sensitive code.
1232    ///
1233    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1234    /// intermediate values used during the computation of the final result. For example,
1235    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1236    /// allocation and then re-entering it later is permitted.
1237    ///
1238    /// [`sub`]: #method.sub
1239    /// [allocation]: crate::ptr#allocation
1240    ///
1241    /// # Examples
1242    ///
1243    /// ```
1244    /// // Iterate using a raw pointer in increments of two elements (backwards)
1245    /// let data = [1u8, 2, 3, 4, 5];
1246    /// let mut ptr: *const u8 = data.as_ptr();
1247    /// let start_rounded_down = ptr.wrapping_sub(2);
1248    /// ptr = ptr.wrapping_add(4);
1249    /// let step = 2;
1250    /// // This loop prints "5, 3, 1, "
1251    /// while ptr != start_rounded_down {
1252    ///     unsafe {
1253    ///         print!("{}, ", *ptr);
1254    ///     }
1255    ///     ptr = ptr.wrapping_sub(step);
1256    /// }
1257    /// ```
1258    #[stable(feature = "pointer_methods", since = "1.26.0")]
1259    #[must_use = "returns a new pointer rather than modifying its argument"]
1260    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1261    #[inline(always)]
1262    #[cfg(not(feature = "ferrocene_certified"))]
1263    pub const fn wrapping_sub(self, count: usize) -> Self
1264    where
1265        T: Sized,
1266    {
1267        self.wrapping_offset((count as isize).wrapping_neg())
1268    }
1269
1270    /// Subtracts an unsigned offset in bytes from a pointer using wrapping arithmetic.
1271    ///
1272    /// `count` is in units of bytes.
1273    ///
1274    /// This is purely a convenience for casting to a `u8` pointer and
1275    /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
1276    ///
1277    /// For non-`Sized` pointees this operation changes only the data pointer,
1278    /// leaving the metadata untouched.
1279    #[must_use]
1280    #[inline(always)]
1281    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1282    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1283    #[cfg(not(feature = "ferrocene_certified"))]
1284    pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1285        self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
1286    }
1287
1288    /// Reads the value from `self` without moving it. This leaves the
1289    /// memory in `self` unchanged.
1290    ///
1291    /// See [`ptr::read`] for safety concerns and examples.
1292    ///
1293    /// [`ptr::read`]: crate::ptr::read()
1294    #[stable(feature = "pointer_methods", since = "1.26.0")]
1295    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1296    #[inline(always)]
1297    #[track_caller]
1298    pub const unsafe fn read(self) -> T
1299    where
1300        T: Sized,
1301    {
1302        // SAFETY: the caller must uphold the safety contract for ``.
1303        unsafe { read(self) }
1304    }
1305
1306    /// Performs a volatile read of the value from `self` without moving it. This
1307    /// leaves the memory in `self` unchanged.
1308    ///
1309    /// Volatile operations are intended to act on I/O memory, and are guaranteed
1310    /// to not be elided or reordered by the compiler across other volatile
1311    /// operations.
1312    ///
1313    /// See [`ptr::read_volatile`] for safety concerns and examples.
1314    ///
1315    /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1316    #[stable(feature = "pointer_methods", since = "1.26.0")]
1317    #[inline(always)]
1318    #[track_caller]
1319    #[cfg(not(feature = "ferrocene_certified"))]
1320    pub unsafe fn read_volatile(self) -> T
1321    where
1322        T: Sized,
1323    {
1324        // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1325        unsafe { read_volatile(self) }
1326    }
1327
1328    /// Reads the value from `self` without moving it. This leaves the
1329    /// memory in `self` unchanged.
1330    ///
1331    /// Unlike `read`, the pointer may be unaligned.
1332    ///
1333    /// See [`ptr::read_unaligned`] for safety concerns and examples.
1334    ///
1335    /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1336    #[stable(feature = "pointer_methods", since = "1.26.0")]
1337    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1338    #[inline(always)]
1339    #[track_caller]
1340    #[cfg(not(feature = "ferrocene_certified"))]
1341    pub const unsafe fn read_unaligned(self) -> T
1342    where
1343        T: Sized,
1344    {
1345        // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1346        unsafe { read_unaligned(self) }
1347    }
1348
1349    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1350    /// and destination may overlap.
1351    ///
1352    /// NOTE: this has the *same* argument order as [`ptr::copy`].
1353    ///
1354    /// See [`ptr::copy`] for safety concerns and examples.
1355    ///
1356    /// [`ptr::copy`]: crate::ptr::copy()
1357    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1358    #[stable(feature = "pointer_methods", since = "1.26.0")]
1359    #[inline(always)]
1360    #[track_caller]
1361    #[cfg(not(feature = "ferrocene_certified"))]
1362    pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
1363    where
1364        T: Sized,
1365    {
1366        // SAFETY: the caller must uphold the safety contract for `copy`.
1367        unsafe { copy(self, dest, count) }
1368    }
1369
1370    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1371    /// and destination may *not* overlap.
1372    ///
1373    /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1374    ///
1375    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1376    ///
1377    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1378    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1379    #[stable(feature = "pointer_methods", since = "1.26.0")]
1380    #[inline(always)]
1381    #[track_caller]
1382    #[cfg(not(feature = "ferrocene_certified"))]
1383    pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
1384    where
1385        T: Sized,
1386    {
1387        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1388        unsafe { copy_nonoverlapping(self, dest, count) }
1389    }
1390
1391    /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1392    /// and destination may overlap.
1393    ///
1394    /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1395    ///
1396    /// See [`ptr::copy`] for safety concerns and examples.
1397    ///
1398    /// [`ptr::copy`]: crate::ptr::copy()
1399    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1400    #[stable(feature = "pointer_methods", since = "1.26.0")]
1401    #[inline(always)]
1402    #[track_caller]
1403    #[cfg(not(feature = "ferrocene_certified"))]
1404    pub const unsafe fn copy_from(self, src: *const T, count: usize)
1405    where
1406        T: Sized,
1407    {
1408        // SAFETY: the caller must uphold the safety contract for `copy`.
1409        unsafe { copy(src, self, count) }
1410    }
1411
1412    /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1413    /// and destination may *not* overlap.
1414    ///
1415    /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1416    ///
1417    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1418    ///
1419    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1420    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1421    #[stable(feature = "pointer_methods", since = "1.26.0")]
1422    #[inline(always)]
1423    #[track_caller]
1424    #[cfg(not(feature = "ferrocene_certified"))]
1425    pub const unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize)
1426    where
1427        T: Sized,
1428    {
1429        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1430        unsafe { copy_nonoverlapping(src, self, count) }
1431    }
1432
1433    /// Executes the destructor (if any) of the pointed-to value.
1434    ///
1435    /// See [`ptr::drop_in_place`] for safety concerns and examples.
1436    ///
1437    /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1438    #[stable(feature = "pointer_methods", since = "1.26.0")]
1439    #[rustc_const_unstable(feature = "const_drop_in_place", issue = "109342")]
1440    #[inline(always)]
1441    #[cfg(not(feature = "ferrocene_certified"))]
1442    pub const unsafe fn drop_in_place(self)
1443    where
1444        T: [const] Destruct,
1445    {
1446        // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1447        unsafe { drop_in_place(self) }
1448    }
1449
1450    /// Overwrites a memory location with the given value without reading or
1451    /// dropping the old value.
1452    ///
1453    /// See [`ptr::write`] for safety concerns and examples.
1454    ///
1455    /// [`ptr::write`]: crate::ptr::write()
1456    #[stable(feature = "pointer_methods", since = "1.26.0")]
1457    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1458    #[inline(always)]
1459    #[track_caller]
1460    #[cfg(not(feature = "ferrocene_certified"))]
1461    pub const unsafe fn write(self, val: T)
1462    where
1463        T: Sized,
1464    {
1465        // SAFETY: the caller must uphold the safety contract for `write`.
1466        unsafe { write(self, val) }
1467    }
1468
1469    /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1470    /// bytes of memory starting at `self` to `val`.
1471    ///
1472    /// See [`ptr::write_bytes`] for safety concerns and examples.
1473    ///
1474    /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1475    #[doc(alias = "memset")]
1476    #[stable(feature = "pointer_methods", since = "1.26.0")]
1477    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1478    #[inline(always)]
1479    #[track_caller]
1480    pub const unsafe fn write_bytes(self, val: u8, count: usize)
1481    where
1482        T: Sized,
1483    {
1484        // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1485        unsafe { write_bytes(self, val, count) }
1486    }
1487
1488    /// Performs a volatile write of a memory location with the given value without
1489    /// reading or dropping the old value.
1490    ///
1491    /// Volatile operations are intended to act on I/O memory, and are guaranteed
1492    /// to not be elided or reordered by the compiler across other volatile
1493    /// operations.
1494    ///
1495    /// See [`ptr::write_volatile`] for safety concerns and examples.
1496    ///
1497    /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1498    #[stable(feature = "pointer_methods", since = "1.26.0")]
1499    #[inline(always)]
1500    #[track_caller]
1501    #[cfg(not(feature = "ferrocene_certified"))]
1502    pub unsafe fn write_volatile(self, val: T)
1503    where
1504        T: Sized,
1505    {
1506        // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1507        unsafe { write_volatile(self, val) }
1508    }
1509
1510    /// Overwrites a memory location with the given value without reading or
1511    /// dropping the old value.
1512    ///
1513    /// Unlike `write`, the pointer may be unaligned.
1514    ///
1515    /// See [`ptr::write_unaligned`] for safety concerns and examples.
1516    ///
1517    /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1518    #[stable(feature = "pointer_methods", since = "1.26.0")]
1519    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1520    #[inline(always)]
1521    #[track_caller]
1522    #[cfg(not(feature = "ferrocene_certified"))]
1523    pub const unsafe fn write_unaligned(self, val: T)
1524    where
1525        T: Sized,
1526    {
1527        // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1528        unsafe { write_unaligned(self, val) }
1529    }
1530
1531    /// Replaces the value at `self` with `src`, returning the old
1532    /// value, without dropping either.
1533    ///
1534    /// See [`ptr::replace`] for safety concerns and examples.
1535    ///
1536    /// [`ptr::replace`]: crate::ptr::replace()
1537    #[stable(feature = "pointer_methods", since = "1.26.0")]
1538    #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "1.88.0")]
1539    #[inline(always)]
1540    #[cfg(not(feature = "ferrocene_certified"))]
1541    pub const unsafe fn replace(self, src: T) -> T
1542    where
1543        T: Sized,
1544    {
1545        // SAFETY: the caller must uphold the safety contract for `replace`.
1546        unsafe { replace(self, src) }
1547    }
1548
1549    /// Swaps the values at two mutable locations of the same type, without
1550    /// deinitializing either. They may overlap, unlike `mem::swap` which is
1551    /// otherwise equivalent.
1552    ///
1553    /// See [`ptr::swap`] for safety concerns and examples.
1554    ///
1555    /// [`ptr::swap`]: crate::ptr::swap()
1556    #[stable(feature = "pointer_methods", since = "1.26.0")]
1557    #[rustc_const_stable(feature = "const_swap", since = "1.85.0")]
1558    #[inline(always)]
1559    #[cfg(not(feature = "ferrocene_certified"))]
1560    pub const unsafe fn swap(self, with: *mut T)
1561    where
1562        T: Sized,
1563    {
1564        // SAFETY: the caller must uphold the safety contract for `swap`.
1565        unsafe { swap(self, with) }
1566    }
1567
1568    /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1569    /// `align`.
1570    ///
1571    /// If it is not possible to align the pointer, the implementation returns
1572    /// `usize::MAX`.
1573    ///
1574    /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1575    /// used with the `wrapping_add` method.
1576    ///
1577    /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1578    /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1579    /// the returned offset is correct in all terms other than alignment.
1580    ///
1581    /// # Panics
1582    ///
1583    /// The function panics if `align` is not a power-of-two.
1584    ///
1585    /// # Examples
1586    ///
1587    /// Accessing adjacent `u8` as `u16`
1588    ///
1589    /// ```
1590    /// # unsafe {
1591    /// let mut x = [5_u8, 6, 7, 8, 9];
1592    /// let ptr = x.as_mut_ptr();
1593    /// let offset = ptr.align_offset(align_of::<u16>());
1594    ///
1595    /// if offset < x.len() - 1 {
1596    ///     let u16_ptr = ptr.add(offset).cast::<u16>();
1597    ///     *u16_ptr = 0;
1598    ///
1599    ///     assert!(x == [0, 0, 7, 8, 9] || x == [5, 0, 0, 8, 9]);
1600    /// } else {
1601    ///     // while the pointer can be aligned via `offset`, it would point
1602    ///     // outside the allocation
1603    /// }
1604    /// # }
1605    /// ```
1606    #[must_use]
1607    #[inline]
1608    #[stable(feature = "align_offset", since = "1.36.0")]
1609    #[cfg(not(feature = "ferrocene_certified"))]
1610    pub fn align_offset(self, align: usize) -> usize
1611    where
1612        T: Sized,
1613    {
1614        if !align.is_power_of_two() {
1615            panic!("align_offset: align is not a power-of-two");
1616        }
1617
1618        // SAFETY: `align` has been checked to be a power of 2 above
1619        let ret = unsafe { align_offset(self, align) };
1620
1621        // Inform Miri that we want to consider the resulting pointer to be suitably aligned.
1622        #[cfg(miri)]
1623        if ret != usize::MAX {
1624            intrinsics::miri_promise_symbolic_alignment(
1625                self.wrapping_add(ret).cast_const().cast(),
1626                align,
1627            );
1628        }
1629
1630        ret
1631    }
1632
1633    /// Returns whether the pointer is properly aligned for `T`.
1634    ///
1635    /// # Examples
1636    ///
1637    /// ```
1638    /// // On some platforms, the alignment of i32 is less than 4.
1639    /// #[repr(align(4))]
1640    /// struct AlignedI32(i32);
1641    ///
1642    /// let mut data = AlignedI32(42);
1643    /// let ptr = &mut data as *mut AlignedI32;
1644    ///
1645    /// assert!(ptr.is_aligned());
1646    /// assert!(!ptr.wrapping_byte_add(1).is_aligned());
1647    /// ```
1648    #[must_use]
1649    #[inline]
1650    #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1651    #[cfg(not(feature = "ferrocene_certified"))]
1652    pub fn is_aligned(self) -> bool
1653    where
1654        T: Sized,
1655    {
1656        self.is_aligned_to(align_of::<T>())
1657    }
1658
1659    /// Returns whether the pointer is aligned to `align`.
1660    ///
1661    /// For non-`Sized` pointees this operation considers only the data pointer,
1662    /// ignoring the metadata.
1663    ///
1664    /// # Panics
1665    ///
1666    /// The function panics if `align` is not a power-of-two (this includes 0).
1667    ///
1668    /// # Examples
1669    ///
1670    /// ```
1671    /// #![feature(pointer_is_aligned_to)]
1672    ///
1673    /// // On some platforms, the alignment of i32 is less than 4.
1674    /// #[repr(align(4))]
1675    /// struct AlignedI32(i32);
1676    ///
1677    /// let mut data = AlignedI32(42);
1678    /// let ptr = &mut data as *mut AlignedI32;
1679    ///
1680    /// assert!(ptr.is_aligned_to(1));
1681    /// assert!(ptr.is_aligned_to(2));
1682    /// assert!(ptr.is_aligned_to(4));
1683    ///
1684    /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1685    /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1686    ///
1687    /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1688    /// ```
1689    #[must_use]
1690    #[inline]
1691    #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1692    #[cfg(not(feature = "ferrocene_certified"))]
1693    pub fn is_aligned_to(self, align: usize) -> bool {
1694        if !align.is_power_of_two() {
1695            panic!("is_aligned_to: align is not a power-of-two");
1696        }
1697
1698        self.addr() & (align - 1) == 0
1699    }
1700}
1701
1702impl<T> *mut T {
1703    /// Casts from a type to its maybe-uninitialized version.
1704    ///
1705    /// This is always safe, since UB can only occur if the pointer is read
1706    /// before being initialized.
1707    #[must_use]
1708    #[inline(always)]
1709    #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1710    #[cfg(not(feature = "ferrocene_certified"))]
1711    pub const fn cast_uninit(self) -> *mut MaybeUninit<T> {
1712        self as _
1713    }
1714}
1715#[cfg(not(feature = "ferrocene_certified"))]
1716impl<T> *mut MaybeUninit<T> {
1717    /// Casts from a maybe-uninitialized type to its initialized version.
1718    ///
1719    /// This is always safe, since UB can only occur if the pointer is read
1720    /// before being initialized.
1721    #[must_use]
1722    #[inline(always)]
1723    #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1724    pub const fn cast_init(self) -> *mut T {
1725        self as _
1726    }
1727}
1728
1729impl<T> *mut [T] {
1730    /// Returns the length of a raw slice.
1731    ///
1732    /// The returned value is the number of **elements**, not the number of bytes.
1733    ///
1734    /// This function is safe, even when the raw slice cannot be cast to a slice
1735    /// reference because the pointer is null or unaligned.
1736    ///
1737    /// # Examples
1738    ///
1739    /// ```rust
1740    /// use std::ptr;
1741    ///
1742    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1743    /// assert_eq!(slice.len(), 3);
1744    /// ```
1745    #[inline(always)]
1746    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1747    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1748    pub const fn len(self) -> usize {
1749        metadata(self)
1750    }
1751
1752    /// Returns `true` if the raw slice has a length of 0.
1753    ///
1754    /// # Examples
1755    ///
1756    /// ```
1757    /// use std::ptr;
1758    ///
1759    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1760    /// assert!(!slice.is_empty());
1761    /// ```
1762    #[inline(always)]
1763    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1764    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1765    pub const fn is_empty(self) -> bool {
1766        self.len() == 0
1767    }
1768
1769    /// Gets a raw, mutable pointer to the underlying array.
1770    ///
1771    /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1772    #[unstable(feature = "slice_as_array", issue = "133508")]
1773    #[inline]
1774    #[must_use]
1775    pub const fn as_mut_array<const N: usize>(self) -> Option<*mut [T; N]> {
1776        if self.len() == N {
1777            let me = self.as_mut_ptr() as *mut [T; N];
1778            Some(me)
1779        } else {
1780            None
1781        }
1782    }
1783
1784    /// Divides one mutable raw slice into two at an index.
1785    ///
1786    /// The first will contain all indices from `[0, mid)` (excluding
1787    /// the index `mid` itself) and the second will contain all
1788    /// indices from `[mid, len)` (excluding the index `len` itself).
1789    ///
1790    /// # Panics
1791    ///
1792    /// Panics if `mid > len`.
1793    ///
1794    /// # Safety
1795    ///
1796    /// `mid` must be [in-bounds] of the underlying [allocation].
1797    /// Which means `self` must be dereferenceable and span a single allocation
1798    /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
1799    /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
1800    ///
1801    /// Since `len` being in-bounds it is not a safety invariant of `*mut [T]` the
1802    /// safety requirements of this method are the same as for [`split_at_mut_unchecked`].
1803    /// The explicit bounds check is only as useful as `len` is correct.
1804    ///
1805    /// [`split_at_mut_unchecked`]: #method.split_at_mut_unchecked
1806    /// [in-bounds]: #method.add
1807    /// [allocation]: crate::ptr#allocation
1808    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1809    ///
1810    /// # Examples
1811    ///
1812    /// ```
1813    /// #![feature(raw_slice_split)]
1814    /// #![feature(slice_ptr_get)]
1815    ///
1816    /// let mut v = [1, 0, 3, 0, 5, 6];
1817    /// let ptr = &mut v as *mut [_];
1818    /// unsafe {
1819    ///     let (left, right) = ptr.split_at_mut(2);
1820    ///     assert_eq!(&*left, [1, 0]);
1821    ///     assert_eq!(&*right, [3, 0, 5, 6]);
1822    /// }
1823    /// ```
1824    #[inline(always)]
1825    #[track_caller]
1826    #[unstable(feature = "raw_slice_split", issue = "95595")]
1827    #[cfg(not(feature = "ferrocene_certified"))]
1828    pub unsafe fn split_at_mut(self, mid: usize) -> (*mut [T], *mut [T]) {
1829        assert!(mid <= self.len());
1830        // SAFETY: The assert above is only a safety-net as long as `self.len()` is correct
1831        // The actual safety requirements of this function are the same as for `split_at_mut_unchecked`
1832        unsafe { self.split_at_mut_unchecked(mid) }
1833    }
1834
1835    /// Divides one mutable raw slice into two at an index, without doing bounds checking.
1836    ///
1837    /// The first will contain all indices from `[0, mid)` (excluding
1838    /// the index `mid` itself) and the second will contain all
1839    /// indices from `[mid, len)` (excluding the index `len` itself).
1840    ///
1841    /// # Safety
1842    ///
1843    /// `mid` must be [in-bounds] of the underlying [allocation].
1844    /// Which means `self` must be dereferenceable and span a single allocation
1845    /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
1846    /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
1847    ///
1848    /// [in-bounds]: #method.add
1849    /// [out-of-bounds index]: #method.add
1850    /// [allocation]: crate::ptr#allocation
1851    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1852    ///
1853    /// # Examples
1854    ///
1855    /// ```
1856    /// #![feature(raw_slice_split)]
1857    ///
1858    /// let mut v = [1, 0, 3, 0, 5, 6];
1859    /// // scoped to restrict the lifetime of the borrows
1860    /// unsafe {
1861    ///     let ptr = &mut v as *mut [_];
1862    ///     let (left, right) = ptr.split_at_mut_unchecked(2);
1863    ///     assert_eq!(&*left, [1, 0]);
1864    ///     assert_eq!(&*right, [3, 0, 5, 6]);
1865    ///     (&mut *left)[1] = 2;
1866    ///     (&mut *right)[1] = 4;
1867    /// }
1868    /// assert_eq!(v, [1, 2, 3, 4, 5, 6]);
1869    /// ```
1870    #[inline(always)]
1871    #[unstable(feature = "raw_slice_split", issue = "95595")]
1872    #[cfg(not(feature = "ferrocene_certified"))]
1873    pub unsafe fn split_at_mut_unchecked(self, mid: usize) -> (*mut [T], *mut [T]) {
1874        let len = self.len();
1875        let ptr = self.as_mut_ptr();
1876
1877        // SAFETY: Caller must pass a valid pointer and an index that is in-bounds.
1878        let tail = unsafe { ptr.add(mid) };
1879        (
1880            crate::ptr::slice_from_raw_parts_mut(ptr, mid),
1881            crate::ptr::slice_from_raw_parts_mut(tail, len - mid),
1882        )
1883    }
1884
1885    /// Returns a raw pointer to the slice's buffer.
1886    ///
1887    /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
1888    ///
1889    /// # Examples
1890    ///
1891    /// ```rust
1892    /// #![feature(slice_ptr_get)]
1893    /// use std::ptr;
1894    ///
1895    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1896    /// assert_eq!(slice.as_mut_ptr(), ptr::null_mut());
1897    /// ```
1898    #[inline(always)]
1899    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1900    pub const fn as_mut_ptr(self) -> *mut T {
1901        self as *mut T
1902    }
1903
1904    /// Returns a raw pointer to an element or subslice, without doing bounds
1905    /// checking.
1906    ///
1907    /// Calling this method with an [out-of-bounds index] or when `self` is not dereferenceable
1908    /// is *[undefined behavior]* even if the resulting pointer is not used.
1909    ///
1910    /// [out-of-bounds index]: #method.add
1911    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1912    ///
1913    /// # Examples
1914    ///
1915    /// ```
1916    /// #![feature(slice_ptr_get)]
1917    ///
1918    /// let x = &mut [1, 2, 4] as *mut [i32];
1919    ///
1920    /// unsafe {
1921    ///     assert_eq!(x.get_unchecked_mut(1), x.as_mut_ptr().add(1));
1922    /// }
1923    /// ```
1924    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1925    #[rustc_const_unstable(feature = "const_index", issue = "143775")]
1926    #[inline(always)]
1927    #[cfg(not(feature = "ferrocene_certified"))]
1928    pub const unsafe fn get_unchecked_mut<I>(self, index: I) -> *mut I::Output
1929    where
1930        I: [const] SliceIndex<[T]>,
1931    {
1932        // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1933        unsafe { index.get_unchecked_mut(self) }
1934    }
1935
1936    #[doc = include_str!("docs/as_uninit_slice.md")]
1937    ///
1938    /// # See Also
1939    /// For the mutable counterpart see [`as_uninit_slice_mut`](pointer::as_uninit_slice_mut).
1940    #[inline]
1941    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1942    #[cfg(not(feature = "ferrocene_certified"))]
1943    pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1944        if self.is_null() {
1945            None
1946        } else {
1947            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1948            Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1949        }
1950    }
1951
1952    /// Returns `None` if the pointer is null, or else returns a unique slice to
1953    /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
1954    /// that the value has to be initialized.
1955    ///
1956    /// For the shared counterpart see [`as_uninit_slice`].
1957    ///
1958    /// [`as_mut`]: #method.as_mut
1959    /// [`as_uninit_slice`]: #method.as_uninit_slice-1
1960    ///
1961    /// # Safety
1962    ///
1963    /// When calling this method, you have to ensure that *either* the pointer is null *or*
1964    /// all of the following is true:
1965    ///
1966    /// * The pointer must be [valid] for reads and writes for `ptr.len() * size_of::<T>()`
1967    ///   many bytes, and it must be properly aligned. This means in particular:
1968    ///
1969    ///     * The entire memory range of this slice must be contained within a single [allocation]!
1970    ///       Slices can never span across multiple allocations.
1971    ///
1972    ///     * The pointer must be aligned even for zero-length slices. One
1973    ///       reason for this is that enum layout optimizations may rely on references
1974    ///       (including slices of any length) being aligned and non-null to distinguish
1975    ///       them from other data. You can obtain a pointer that is usable as `data`
1976    ///       for zero-length slices using [`NonNull::dangling()`].
1977    ///
1978    /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1979    ///   See the safety documentation of [`pointer::offset`].
1980    ///
1981    /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1982    ///   arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1983    ///   In particular, while this reference exists, the memory the pointer points to must
1984    ///   not get accessed (read or written) through any other pointer.
1985    ///
1986    /// This applies even if the result of this method is unused!
1987    ///
1988    /// See also [`slice::from_raw_parts_mut`][].
1989    ///
1990    /// [valid]: crate::ptr#safety
1991    /// [allocation]: crate::ptr#allocation
1992    ///
1993    /// # Panics during const evaluation
1994    ///
1995    /// This method will panic during const evaluation if the pointer cannot be
1996    /// determined to be null or not. See [`is_null`] for more information.
1997    ///
1998    /// [`is_null`]: #method.is_null-1
1999    #[inline]
2000    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
2001    #[cfg(not(feature = "ferrocene_certified"))]
2002    pub const unsafe fn as_uninit_slice_mut<'a>(self) -> Option<&'a mut [MaybeUninit<T>]> {
2003        if self.is_null() {
2004            None
2005        } else {
2006            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
2007            Some(unsafe { slice::from_raw_parts_mut(self as *mut MaybeUninit<T>, self.len()) })
2008        }
2009    }
2010}
2011
2012impl<T> *mut T {
2013    /// Casts from a pointer-to-`T` to a pointer-to-`[T; N]`.
2014    #[inline]
2015    #[unstable(feature = "ptr_cast_array", issue = "144514")]
2016    pub const fn cast_array<const N: usize>(self) -> *mut [T; N] {
2017        self.cast()
2018    }
2019}
2020
2021#[cfg(not(feature = "ferrocene_certified"))]
2022impl<T, const N: usize> *mut [T; N] {
2023    /// Returns a raw pointer to the array's buffer.
2024    ///
2025    /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
2026    ///
2027    /// # Examples
2028    ///
2029    /// ```rust
2030    /// #![feature(array_ptr_get)]
2031    /// use std::ptr;
2032    ///
2033    /// let arr: *mut [i8; 3] = ptr::null_mut();
2034    /// assert_eq!(arr.as_mut_ptr(), ptr::null_mut());
2035    /// ```
2036    #[inline]
2037    #[unstable(feature = "array_ptr_get", issue = "119834")]
2038    pub const fn as_mut_ptr(self) -> *mut T {
2039        self as *mut T
2040    }
2041
2042    /// Returns a raw pointer to a mutable slice containing the entire array.
2043    ///
2044    /// # Examples
2045    ///
2046    /// ```
2047    /// #![feature(array_ptr_get)]
2048    ///
2049    /// let mut arr = [1, 2, 5];
2050    /// let ptr: *mut [i32; 3] = &mut arr;
2051    /// unsafe {
2052    ///     (&mut *ptr.as_mut_slice())[..2].copy_from_slice(&[3, 4]);
2053    /// }
2054    /// assert_eq!(arr, [3, 4, 5]);
2055    /// ```
2056    #[inline]
2057    #[unstable(feature = "array_ptr_get", issue = "119834")]
2058    pub const fn as_mut_slice(self) -> *mut [T] {
2059        self
2060    }
2061}
2062
2063/// Pointer equality is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2064#[stable(feature = "rust1", since = "1.0.0")]
2065impl<T: PointeeSized> PartialEq for *mut T {
2066    #[inline(always)]
2067    #[allow(ambiguous_wide_pointer_comparisons)]
2068    fn eq(&self, other: &*mut T) -> bool {
2069        *self == *other
2070    }
2071}
2072
2073/// Pointer equality is an equivalence relation.
2074#[stable(feature = "rust1", since = "1.0.0")]
2075impl<T: PointeeSized> Eq for *mut T {}
2076
2077/// Pointer comparison is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2078#[stable(feature = "rust1", since = "1.0.0")]
2079#[cfg(not(feature = "ferrocene_certified"))]
2080impl<T: PointeeSized> Ord for *mut T {
2081    #[inline]
2082    #[allow(ambiguous_wide_pointer_comparisons)]
2083    fn cmp(&self, other: &*mut T) -> Ordering {
2084        if self < other {
2085            Less
2086        } else if self == other {
2087            Equal
2088        } else {
2089            Greater
2090        }
2091    }
2092}
2093
2094/// Pointer comparison is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2095#[stable(feature = "rust1", since = "1.0.0")]
2096#[cfg(not(feature = "ferrocene_certified"))]
2097impl<T: PointeeSized> PartialOrd for *mut T {
2098    #[inline(always)]
2099    #[allow(ambiguous_wide_pointer_comparisons)]
2100    fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
2101        Some(self.cmp(other))
2102    }
2103
2104    #[inline(always)]
2105    #[allow(ambiguous_wide_pointer_comparisons)]
2106    fn lt(&self, other: &*mut T) -> bool {
2107        *self < *other
2108    }
2109
2110    #[inline(always)]
2111    #[allow(ambiguous_wide_pointer_comparisons)]
2112    fn le(&self, other: &*mut T) -> bool {
2113        *self <= *other
2114    }
2115
2116    #[inline(always)]
2117    #[allow(ambiguous_wide_pointer_comparisons)]
2118    fn gt(&self, other: &*mut T) -> bool {
2119        *self > *other
2120    }
2121
2122    #[inline(always)]
2123    #[allow(ambiguous_wide_pointer_comparisons)]
2124    fn ge(&self, other: &*mut T) -> bool {
2125        *self >= *other
2126    }
2127}
2128
2129#[stable(feature = "raw_ptr_default", since = "1.88.0")]
2130#[cfg(not(feature = "ferrocene_certified"))]
2131impl<T: ?Sized + Thin> Default for *mut T {
2132    /// Returns the default value of [`null_mut()`][crate::ptr::null_mut].
2133    fn default() -> Self {
2134        crate::ptr::null_mut()
2135    }
2136}