core/ptr/
mut_ptr.rs

1use super::*;
2#[cfg(not(feature = "ferrocene_subset"))]
3use crate::cmp::Ordering::{Equal, Greater, Less};
4use crate::intrinsics::const_eval_select;
5#[cfg(not(feature = "ferrocene_subset"))]
6use crate::marker::{Destruct, PointeeSized};
7#[cfg(not(feature = "ferrocene_subset"))]
8use crate::mem::{self, SizedTypeProperties};
9#[cfg(not(feature = "ferrocene_subset"))]
10use crate::slice::{self, SliceIndex};
11
12// Ferrocene addition: imports for certified subset
13#[cfg(feature = "ferrocene_subset")]
14#[rustfmt::skip]
15use crate::{marker::PointeeSized, slice::SliceIndex};
16
17impl<T: PointeeSized> *mut T {
18    #[doc = include_str!("docs/is_null.md")]
19    ///
20    /// # Examples
21    ///
22    /// ```
23    /// let mut s = [1, 2, 3];
24    /// let ptr: *mut u32 = s.as_mut_ptr();
25    /// assert!(!ptr.is_null());
26    /// ```
27    #[stable(feature = "rust1", since = "1.0.0")]
28    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
29    #[rustc_diagnostic_item = "ptr_is_null"]
30    #[inline]
31    pub const fn is_null(self) -> bool {
32        self.cast_const().is_null()
33    }
34
35    /// Casts to a pointer of another type.
36    #[stable(feature = "ptr_cast", since = "1.38.0")]
37    #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
38    #[rustc_diagnostic_item = "ptr_cast"]
39    #[inline(always)]
40    pub const fn cast<U>(self) -> *mut U {
41        self as _
42    }
43
44    /// Try to cast to a pointer of another type by checking alignment.
45    ///
46    /// If the pointer is properly aligned to the target type, it will be
47    /// cast to the target type. Otherwise, `None` is returned.
48    ///
49    /// # Examples
50    ///
51    /// ```rust
52    /// #![feature(pointer_try_cast_aligned)]
53    ///
54    /// let mut x = 0u64;
55    ///
56    /// let aligned: *mut u64 = &mut x;
57    /// let unaligned = unsafe { aligned.byte_add(1) };
58    ///
59    /// assert!(aligned.try_cast_aligned::<u32>().is_some());
60    /// assert!(unaligned.try_cast_aligned::<u32>().is_none());
61    /// ```
62    #[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
63    #[must_use = "this returns the result of the operation, \
64                  without modifying the original"]
65    #[inline]
66    #[cfg(not(feature = "ferrocene_subset"))]
67    pub fn try_cast_aligned<U>(self) -> Option<*mut U> {
68        if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
69    }
70
71    /// Uses the address value in a new pointer of another type.
72    ///
73    /// This operation will ignore the address part of its `meta` operand and discard existing
74    /// metadata of `self`. For pointers to a sized types (thin pointers), this has the same effect
75    /// as a simple cast. For pointers to an unsized type (fat pointers) this recombines the address
76    /// with new metadata such as slice lengths or `dyn`-vtable.
77    ///
78    /// The resulting pointer will have provenance of `self`. This operation is semantically the
79    /// same as creating a new pointer with the data pointer value of `self` but the metadata of
80    /// `meta`, being fat or thin depending on the `meta` operand.
81    ///
82    /// # Examples
83    ///
84    /// This function is primarily useful for enabling pointer arithmetic on potentially fat
85    /// pointers. The pointer is cast to a sized pointee to utilize offset operations and then
86    /// recombined with its own original metadata.
87    ///
88    /// ```
89    /// #![feature(set_ptr_value)]
90    /// # use core::fmt::Debug;
91    /// let mut arr: [i32; 3] = [1, 2, 3];
92    /// let mut ptr = arr.as_mut_ptr() as *mut dyn Debug;
93    /// let thin = ptr as *mut u8;
94    /// unsafe {
95    ///     ptr = thin.add(8).with_metadata_of(ptr);
96    ///     # assert_eq!(*(ptr as *mut i32), 3);
97    ///     println!("{:?}", &*ptr); // will print "3"
98    /// }
99    /// ```
100    ///
101    /// # *Incorrect* usage
102    ///
103    /// The provenance from pointers is *not* combined. The result must only be used to refer to the
104    /// address allowed by `self`.
105    ///
106    /// ```rust,no_run
107    /// #![feature(set_ptr_value)]
108    /// let mut x = 0u32;
109    /// let mut y = 1u32;
110    ///
111    /// let x = (&mut x) as *mut u32;
112    /// let y = (&mut y) as *mut u32;
113    ///
114    /// let offset = (x as usize - y as usize) / 4;
115    /// let bad = x.wrapping_add(offset).with_metadata_of(y);
116    ///
117    /// // This dereference is UB. The pointer only has provenance for `x` but points to `y`.
118    /// println!("{:?}", unsafe { &*bad });
119    /// ```
120    #[unstable(feature = "set_ptr_value", issue = "75091")]
121    #[must_use = "returns a new pointer rather than modifying its argument"]
122    #[inline]
123    #[cfg(not(feature = "ferrocene_subset"))]
124    pub const fn with_metadata_of<U>(self, meta: *const U) -> *mut U
125    where
126        U: PointeeSized,
127    {
128        from_raw_parts_mut::<U>(self as *mut (), metadata(meta))
129    }
130
131    /// Changes constness without changing the type.
132    ///
133    /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
134    /// refactored.
135    ///
136    /// While not strictly required (`*mut T` coerces to `*const T`), this is provided for symmetry
137    /// with [`cast_mut`] on `*const T` and may have documentation value if used instead of implicit
138    /// coercion.
139    ///
140    /// [`cast_mut`]: pointer::cast_mut
141    #[stable(feature = "ptr_const_cast", since = "1.65.0")]
142    #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")]
143    #[rustc_diagnostic_item = "ptr_cast_const"]
144    #[inline(always)]
145    pub const fn cast_const(self) -> *const T {
146        self as _
147    }
148
149    #[doc = include_str!("./docs/addr.md")]
150    ///
151    /// [without_provenance]: without_provenance_mut
152    #[must_use]
153    #[inline(always)]
154    #[stable(feature = "strict_provenance", since = "1.84.0")]
155    pub fn addr(self) -> usize {
156        // A pointer-to-integer transmute currently has exactly the right semantics: it returns the
157        // address without exposing the provenance. Note that this is *not* a stable guarantee about
158        // transmute semantics, it relies on sysroot crates having special status.
159        // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
160        // provenance).
161        unsafe { mem::transmute(self.cast::<()>()) }
162    }
163
164    /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
165    /// [`with_exposed_provenance_mut`] and returns the "address" portion.
166    ///
167    /// This is equivalent to `self as usize`, which semantically discards provenance information.
168    /// Furthermore, this (like the `as` cast) has the implicit side-effect of marking the
169    /// provenance as 'exposed', so on platforms that support it you can later call
170    /// [`with_exposed_provenance_mut`] to reconstitute the original pointer including its provenance.
171    ///
172    /// Due to its inherent ambiguity, [`with_exposed_provenance_mut`] may not be supported by tools
173    /// that help you to stay conformant with the Rust memory model. It is recommended to use
174    /// [Strict Provenance][crate::ptr#strict-provenance] APIs such as [`with_addr`][pointer::with_addr]
175    /// wherever possible, in which case [`addr`][pointer::addr] should be used instead of `expose_provenance`.
176    ///
177    /// On most platforms this will produce a value with the same bytes as the original pointer,
178    /// because all the bytes are dedicated to describing the address. Platforms which need to store
179    /// additional information in the pointer may not support this operation, since the 'expose'
180    /// side-effect which is required for [`with_exposed_provenance_mut`] to work is typically not
181    /// available.
182    ///
183    /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
184    ///
185    /// [`with_exposed_provenance_mut`]: with_exposed_provenance_mut
186    #[inline(always)]
187    #[stable(feature = "exposed_provenance", since = "1.84.0")]
188    #[cfg(not(feature = "ferrocene_subset"))]
189    pub fn expose_provenance(self) -> usize {
190        self.cast::<()>() as usize
191    }
192
193    /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
194    /// `self`.
195    ///
196    /// This is similar to a `addr as *mut T` cast, but copies
197    /// the *provenance* of `self` to the new pointer.
198    /// This avoids the inherent ambiguity of the unary cast.
199    ///
200    /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
201    /// `self` to the given address, and therefore has all the same capabilities and restrictions.
202    ///
203    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
204    #[must_use]
205    #[inline]
206    #[stable(feature = "strict_provenance", since = "1.84.0")]
207    #[cfg(not(feature = "ferrocene_subset"))]
208    pub fn with_addr(self, addr: usize) -> Self {
209        // This should probably be an intrinsic to avoid doing any sort of arithmetic, but
210        // meanwhile, we can implement it with `wrapping_offset`, which preserves the pointer's
211        // provenance.
212        let self_addr = self.addr() as isize;
213        let dest_addr = addr as isize;
214        let offset = dest_addr.wrapping_sub(self_addr);
215        self.wrapping_byte_offset(offset)
216    }
217
218    /// Creates a new pointer by mapping `self`'s address to a new one, preserving the original
219    /// pointer's [provenance][crate::ptr#provenance].
220    ///
221    /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
222    ///
223    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
224    #[must_use]
225    #[inline]
226    #[stable(feature = "strict_provenance", since = "1.84.0")]
227    #[cfg(not(feature = "ferrocene_subset"))]
228    pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
229        self.with_addr(f(self.addr()))
230    }
231
232    /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
233    ///
234    /// The pointer can be later reconstructed with [`from_raw_parts_mut`].
235    #[unstable(feature = "ptr_metadata", issue = "81513")]
236    #[inline]
237    pub const fn to_raw_parts(self) -> (*mut (), <T as super::Pointee>::Metadata) {
238        (self.cast(), super::metadata(self))
239    }
240
241    #[doc = include_str!("./docs/as_ref.md")]
242    ///
243    /// ```
244    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
245    ///
246    /// unsafe {
247    ///     let val_back = &*ptr;
248    ///     println!("We got back the value: {val_back}!");
249    /// }
250    /// ```
251    ///
252    /// # Examples
253    ///
254    /// ```
255    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
256    ///
257    /// unsafe {
258    ///     if let Some(val_back) = ptr.as_ref() {
259    ///         println!("We got back the value: {val_back}!");
260    ///     }
261    /// }
262    /// ```
263    ///
264    /// # See Also
265    ///
266    /// For the mutable counterpart see [`as_mut`].
267    ///
268    /// [`is_null`]: #method.is_null-1
269    /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1
270    /// [`as_mut`]: #method.as_mut
271
272    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
273    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
274    #[inline]
275    #[cfg(not(feature = "ferrocene_subset"))]
276    pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
277        // SAFETY: the caller must guarantee that `self` is valid for a
278        // reference if it isn't null.
279        if self.is_null() { None } else { unsafe { Some(&*self) } }
280    }
281
282    /// Returns a shared reference to the value behind the pointer.
283    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_ref`] must be used instead.
284    /// If the pointer may be null, but the value is known to have been initialized, [`as_ref`] must be used instead.
285    ///
286    /// For the mutable counterpart see [`as_mut_unchecked`].
287    ///
288    /// [`as_ref`]: #method.as_ref
289    /// [`as_uninit_ref`]: #method.as_uninit_ref
290    /// [`as_mut_unchecked`]: #method.as_mut_unchecked
291    ///
292    /// # Safety
293    ///
294    /// When calling this method, you have to ensure that the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
295    ///
296    /// # Examples
297    ///
298    /// ```
299    /// #![feature(ptr_as_ref_unchecked)]
300    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
301    ///
302    /// unsafe {
303    ///     println!("We got back the value: {}!", ptr.as_ref_unchecked());
304    /// }
305    /// ```
306    // FIXME: mention it in the docs for `as_ref` and `as_uninit_ref` once stabilized.
307    #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
308    #[inline]
309    #[must_use]
310    #[cfg(not(feature = "ferrocene_subset"))]
311    pub const unsafe fn as_ref_unchecked<'a>(self) -> &'a T {
312        // SAFETY: the caller must guarantee that `self` is valid for a reference
313        unsafe { &*self }
314    }
315
316    #[doc = include_str!("./docs/as_uninit_ref.md")]
317    ///
318    /// [`is_null`]: #method.is_null-1
319    /// [`as_ref`]: pointer#method.as_ref-1
320    ///
321    /// # See Also
322    /// For the mutable counterpart see [`as_uninit_mut`].
323    ///
324    /// [`as_uninit_mut`]: #method.as_uninit_mut
325    ///
326    /// # Examples
327    ///
328    /// ```
329    /// #![feature(ptr_as_uninit)]
330    ///
331    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
332    ///
333    /// unsafe {
334    ///     if let Some(val_back) = ptr.as_uninit_ref() {
335    ///         println!("We got back the value: {}!", val_back.assume_init());
336    ///     }
337    /// }
338    /// ```
339    #[inline]
340    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
341    #[cfg(not(feature = "ferrocene_subset"))]
342    pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
343    where
344        T: Sized,
345    {
346        // SAFETY: the caller must guarantee that `self` meets all the
347        // requirements for a reference.
348        if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
349    }
350
351    #[doc = include_str!("./docs/offset.md")]
352    ///
353    /// # Examples
354    ///
355    /// ```
356    /// let mut s = [1, 2, 3];
357    /// let ptr: *mut u32 = s.as_mut_ptr();
358    ///
359    /// unsafe {
360    ///     assert_eq!(2, *ptr.offset(1));
361    ///     assert_eq!(3, *ptr.offset(2));
362    /// }
363    /// ```
364    #[stable(feature = "rust1", since = "1.0.0")]
365    #[must_use = "returns a new pointer rather than modifying its argument"]
366    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
367    #[inline(always)]
368    #[track_caller]
369    pub const unsafe fn offset(self, count: isize) -> *mut T
370    where
371        T: Sized,
372    {
373        #[inline]
374        #[rustc_allow_const_fn_unstable(const_eval_select)]
375        const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
376            // We can use const_eval_select here because this is only for UB checks.
377            const_eval_select!(
378                @capture { this: *const (), count: isize, size: usize } -> bool:
379                if const {
380                    true
381                } else {
382                    // `size` is the size of a Rust type, so we know that
383                    // `size <= isize::MAX` and thus `as` cast here is not lossy.
384                    let Some(byte_offset) = count.checked_mul(size as isize) else {
385                        return false;
386                    };
387                    let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
388                    !overflow
389                }
390            )
391        }
392
393        ub_checks::assert_unsafe_precondition!(
394            check_language_ub,
395            "ptr::offset requires the address calculation to not overflow",
396            (
397                this: *const () = self as *const (),
398                count: isize = count,
399                size: usize = size_of::<T>(),
400            ) => runtime_offset_nowrap(this, count, size)
401        );
402
403        // SAFETY: the caller must uphold the safety contract for `offset`.
404        // The obtained pointer is valid for writes since the caller must
405        // guarantee that it points to the same allocation as `self`.
406        unsafe { intrinsics::offset(self, count) }
407    }
408
409    /// Adds a signed offset in bytes to a pointer.
410    ///
411    /// `count` is in units of **bytes**.
412    ///
413    /// This is purely a convenience for casting to a `u8` pointer and
414    /// using [offset][pointer::offset] on it. See that method for documentation
415    /// and safety requirements.
416    ///
417    /// For non-`Sized` pointees this operation changes only the data pointer,
418    /// leaving the metadata untouched.
419    #[must_use]
420    #[inline(always)]
421    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
422    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
423    #[track_caller]
424    #[cfg(not(feature = "ferrocene_subset"))]
425    pub const unsafe fn byte_offset(self, count: isize) -> Self {
426        // SAFETY: the caller must uphold the safety contract for `offset`.
427        unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
428    }
429
430    /// Adds a signed offset to a pointer using wrapping arithmetic.
431    ///
432    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
433    /// offset of `3 * size_of::<T>()` bytes.
434    ///
435    /// # Safety
436    ///
437    /// This operation itself is always safe, but using the resulting pointer is not.
438    ///
439    /// The resulting pointer "remembers" the [allocation] that `self` points to
440    /// (this is called "[Provenance](ptr/index.html#provenance)").
441    /// The pointer must not be used to read or write other allocations.
442    ///
443    /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
444    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
445    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
446    /// `x` and `y` point into the same allocation.
447    ///
448    /// Compared to [`offset`], this method basically delays the requirement of staying within the
449    /// same allocation: [`offset`] is immediate Undefined Behavior when crossing object
450    /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
451    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
452    /// can be optimized better and is thus preferable in performance-sensitive code.
453    ///
454    /// The delayed check only considers the value of the pointer that was dereferenced, not the
455    /// intermediate values used during the computation of the final result. For example,
456    /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
457    /// words, leaving the allocation and then re-entering it later is permitted.
458    ///
459    /// [`offset`]: #method.offset
460    /// [allocation]: crate::ptr#allocation
461    ///
462    /// # Examples
463    ///
464    /// ```
465    /// // Iterate using a raw pointer in increments of two elements
466    /// let mut data = [1u8, 2, 3, 4, 5];
467    /// let mut ptr: *mut u8 = data.as_mut_ptr();
468    /// let step = 2;
469    /// let end_rounded_up = ptr.wrapping_offset(6);
470    ///
471    /// while ptr != end_rounded_up {
472    ///     unsafe {
473    ///         *ptr = 0;
474    ///     }
475    ///     ptr = ptr.wrapping_offset(step);
476    /// }
477    /// assert_eq!(&data, &[0, 2, 0, 4, 0]);
478    /// ```
479    #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
480    #[must_use = "returns a new pointer rather than modifying its argument"]
481    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
482    #[inline(always)]
483    pub const fn wrapping_offset(self, count: isize) -> *mut T
484    where
485        T: Sized,
486    {
487        // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
488        unsafe { intrinsics::arith_offset(self, count) as *mut T }
489    }
490
491    /// Adds a signed offset in bytes to a pointer using wrapping arithmetic.
492    ///
493    /// `count` is in units of **bytes**.
494    ///
495    /// This is purely a convenience for casting to a `u8` pointer and
496    /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
497    /// for documentation.
498    ///
499    /// For non-`Sized` pointees this operation changes only the data pointer,
500    /// leaving the metadata untouched.
501    #[must_use]
502    #[inline(always)]
503    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
504    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
505    #[cfg(not(feature = "ferrocene_subset"))]
506    pub const fn wrapping_byte_offset(self, count: isize) -> Self {
507        self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
508    }
509
510    /// Masks out bits of the pointer according to a mask.
511    ///
512    /// This is convenience for `ptr.map_addr(|a| a & mask)`.
513    ///
514    /// For non-`Sized` pointees this operation changes only the data pointer,
515    /// leaving the metadata untouched.
516    ///
517    /// ## Examples
518    ///
519    /// ```
520    /// #![feature(ptr_mask)]
521    /// let mut v = 17_u32;
522    /// let ptr: *mut u32 = &mut v;
523    ///
524    /// // `u32` is 4 bytes aligned,
525    /// // which means that lower 2 bits are always 0.
526    /// let tag_mask = 0b11;
527    /// let ptr_mask = !tag_mask;
528    ///
529    /// // We can store something in these lower bits
530    /// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
531    ///
532    /// // Get the "tag" back
533    /// let tag = tagged_ptr.addr() & tag_mask;
534    /// assert_eq!(tag, 0b10);
535    ///
536    /// // Note that `tagged_ptr` is unaligned, it's UB to read from/write to it.
537    /// // To get original pointer `mask` can be used:
538    /// let masked_ptr = tagged_ptr.mask(ptr_mask);
539    /// assert_eq!(unsafe { *masked_ptr }, 17);
540    ///
541    /// unsafe { *masked_ptr = 0 };
542    /// assert_eq!(v, 0);
543    /// ```
544    #[unstable(feature = "ptr_mask", issue = "98290")]
545    #[must_use = "returns a new pointer rather than modifying its argument"]
546    #[inline(always)]
547    #[cfg(not(feature = "ferrocene_subset"))]
548    pub fn mask(self, mask: usize) -> *mut T {
549        intrinsics::ptr_mask(self.cast::<()>(), mask).cast_mut().with_metadata_of(self)
550    }
551
552    /// Returns `None` if the pointer is null, or else returns a unique reference to
553    /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_mut`]
554    /// must be used instead.
555    ///
556    /// For the shared counterpart see [`as_ref`].
557    ///
558    /// [`as_uninit_mut`]: #method.as_uninit_mut
559    /// [`as_ref`]: pointer#method.as_ref-1
560    ///
561    /// # Safety
562    ///
563    /// When calling this method, you have to ensure that *either*
564    /// the pointer is null *or*
565    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
566    ///
567    /// # Panics during const evaluation
568    ///
569    /// This method will panic during const evaluation if the pointer cannot be
570    /// determined to be null or not. See [`is_null`] for more information.
571    ///
572    /// [`is_null`]: #method.is_null-1
573    ///
574    /// # Examples
575    ///
576    /// ```
577    /// let mut s = [1, 2, 3];
578    /// let ptr: *mut u32 = s.as_mut_ptr();
579    /// let first_value = unsafe { ptr.as_mut().unwrap() };
580    /// *first_value = 4;
581    /// # assert_eq!(s, [4, 2, 3]);
582    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
583    /// ```
584    ///
585    /// # Null-unchecked version
586    ///
587    /// If you are sure the pointer can never be null and are looking for some kind of
588    /// `as_mut_unchecked` that returns the `&mut T` instead of `Option<&mut T>`, know that
589    /// you can dereference the pointer directly.
590    ///
591    /// ```
592    /// let mut s = [1, 2, 3];
593    /// let ptr: *mut u32 = s.as_mut_ptr();
594    /// let first_value = unsafe { &mut *ptr };
595    /// *first_value = 4;
596    /// # assert_eq!(s, [4, 2, 3]);
597    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
598    /// ```
599    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
600    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
601    #[inline]
602    pub const unsafe fn as_mut<'a>(self) -> Option<&'a mut T> {
603        // SAFETY: the caller must guarantee that `self` is be valid for
604        // a mutable reference if it isn't null.
605        if self.is_null() { None } else { unsafe { Some(&mut *self) } }
606    }
607
608    /// Returns a unique reference to the value behind the pointer.
609    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_mut`] must be used instead.
610    /// If the pointer may be null, but the value is known to have been initialized, [`as_mut`] must be used instead.
611    ///
612    /// For the shared counterpart see [`as_ref_unchecked`].
613    ///
614    /// [`as_mut`]: #method.as_mut
615    /// [`as_uninit_mut`]: #method.as_uninit_mut
616    /// [`as_ref_unchecked`]: #method.as_mut_unchecked
617    ///
618    /// # Safety
619    ///
620    /// When calling this method, you have to ensure that
621    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
622    ///
623    /// # Examples
624    ///
625    /// ```
626    /// #![feature(ptr_as_ref_unchecked)]
627    /// let mut s = [1, 2, 3];
628    /// let ptr: *mut u32 = s.as_mut_ptr();
629    /// let first_value = unsafe { ptr.as_mut_unchecked() };
630    /// *first_value = 4;
631    /// # assert_eq!(s, [4, 2, 3]);
632    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
633    /// ```
634    // FIXME: mention it in the docs for `as_mut` and `as_uninit_mut` once stabilized.
635    #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
636    #[inline]
637    #[must_use]
638    #[cfg(not(feature = "ferrocene_subset"))]
639    pub const unsafe fn as_mut_unchecked<'a>(self) -> &'a mut T {
640        // SAFETY: the caller must guarantee that `self` is valid for a reference
641        unsafe { &mut *self }
642    }
643
644    /// Returns `None` if the pointer is null, or else returns a unique reference to
645    /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
646    /// that the value has to be initialized.
647    ///
648    /// For the shared counterpart see [`as_uninit_ref`].
649    ///
650    /// [`as_mut`]: #method.as_mut
651    /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1
652    ///
653    /// # Safety
654    ///
655    /// When calling this method, you have to ensure that *either* the pointer is null *or*
656    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
657    ///
658    /// # Panics during const evaluation
659    ///
660    /// This method will panic during const evaluation if the pointer cannot be
661    /// determined to be null or not. See [`is_null`] for more information.
662    ///
663    /// [`is_null`]: #method.is_null-1
664    #[inline]
665    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
666    #[cfg(not(feature = "ferrocene_subset"))]
667    pub const unsafe fn as_uninit_mut<'a>(self) -> Option<&'a mut MaybeUninit<T>>
668    where
669        T: Sized,
670    {
671        // SAFETY: the caller must guarantee that `self` meets all the
672        // requirements for a reference.
673        if self.is_null() { None } else { Some(unsafe { &mut *(self as *mut MaybeUninit<T>) }) }
674    }
675
676    /// Returns whether two pointers are guaranteed to be equal.
677    ///
678    /// At runtime this function behaves like `Some(self == other)`.
679    /// However, in some contexts (e.g., compile-time evaluation),
680    /// it is not always possible to determine equality of two pointers, so this function may
681    /// spuriously return `None` for pointers that later actually turn out to have its equality known.
682    /// But when it returns `Some`, the pointers' equality is guaranteed to be known.
683    ///
684    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
685    /// version and unsafe code must not
686    /// rely on the result of this function for soundness. It is suggested to only use this function
687    /// for performance optimizations where spurious `None` return values by this function do not
688    /// affect the outcome, but just the performance.
689    /// The consequences of using this method to make runtime and compile-time code behave
690    /// differently have not been explored. This method should not be used to introduce such
691    /// differences, and it should also not be stabilized before we have a better understanding
692    /// of this issue.
693    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
694    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
695    #[inline]
696    #[cfg(not(feature = "ferrocene_subset"))]
697    pub const fn guaranteed_eq(self, other: *mut T) -> Option<bool>
698    where
699        T: Sized,
700    {
701        (self as *const T).guaranteed_eq(other as _)
702    }
703
704    /// Returns whether two pointers are guaranteed to be inequal.
705    ///
706    /// At runtime this function behaves like `Some(self != other)`.
707    /// However, in some contexts (e.g., compile-time evaluation),
708    /// it is not always possible to determine inequality of two pointers, so this function may
709    /// spuriously return `None` for pointers that later actually turn out to have its inequality known.
710    /// But when it returns `Some`, the pointers' inequality is guaranteed to be known.
711    ///
712    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
713    /// version and unsafe code must not
714    /// rely on the result of this function for soundness. It is suggested to only use this function
715    /// for performance optimizations where spurious `None` return values by this function do not
716    /// affect the outcome, but just the performance.
717    /// The consequences of using this method to make runtime and compile-time code behave
718    /// differently have not been explored. This method should not be used to introduce such
719    /// differences, and it should also not be stabilized before we have a better understanding
720    /// of this issue.
721    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
722    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
723    #[inline]
724    #[cfg(not(feature = "ferrocene_subset"))]
725    pub const fn guaranteed_ne(self, other: *mut T) -> Option<bool>
726    where
727        T: Sized,
728    {
729        (self as *const T).guaranteed_ne(other as _)
730    }
731
732    /// Calculates the distance between two pointers within the same allocation. The returned value is in
733    /// units of T: the distance in bytes divided by `size_of::<T>()`.
734    ///
735    /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
736    /// except that it has a lot more opportunities for UB, in exchange for the compiler
737    /// better understanding what you are doing.
738    ///
739    /// The primary motivation of this method is for computing the `len` of an array/slice
740    /// of `T` that you are currently representing as a "start" and "end" pointer
741    /// (and "end" is "one past the end" of the array).
742    /// In that case, `end.offset_from(start)` gets you the length of the array.
743    ///
744    /// All of the following safety requirements are trivially satisfied for this usecase.
745    ///
746    /// [`offset`]: pointer#method.offset-1
747    ///
748    /// # Safety
749    ///
750    /// If any of the following conditions are violated, the result is Undefined Behavior:
751    ///
752    /// * `self` and `origin` must either
753    ///
754    ///   * point to the same address, or
755    ///   * both be [derived from][crate::ptr#provenance] a pointer to the same [allocation], and the memory range between
756    ///     the two pointers must be in bounds of that object. (See below for an example.)
757    ///
758    /// * The distance between the pointers, in bytes, must be an exact multiple
759    ///   of the size of `T`.
760    ///
761    /// As a consequence, the absolute distance between the pointers, in bytes, computed on
762    /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
763    /// implied by the in-bounds requirement, and the fact that no allocation can be larger
764    /// than `isize::MAX` bytes.
765    ///
766    /// The requirement for pointers to be derived from the same allocation is primarily
767    /// needed for `const`-compatibility: the distance between pointers into *different* allocated
768    /// objects is not known at compile-time. However, the requirement also exists at
769    /// runtime and may be exploited by optimizations. If you wish to compute the difference between
770    /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
771    /// origin as isize) / size_of::<T>()`.
772    // FIXME: recommend `addr()` instead of `as usize` once that is stable.
773    ///
774    /// [`add`]: #method.add
775    /// [allocation]: crate::ptr#allocation
776    ///
777    /// # Panics
778    ///
779    /// This function panics if `T` is a Zero-Sized Type ("ZST").
780    ///
781    /// # Examples
782    ///
783    /// Basic usage:
784    ///
785    /// ```
786    /// let mut a = [0; 5];
787    /// let ptr1: *mut i32 = &mut a[1];
788    /// let ptr2: *mut i32 = &mut a[3];
789    /// unsafe {
790    ///     assert_eq!(ptr2.offset_from(ptr1), 2);
791    ///     assert_eq!(ptr1.offset_from(ptr2), -2);
792    ///     assert_eq!(ptr1.offset(2), ptr2);
793    ///     assert_eq!(ptr2.offset(-2), ptr1);
794    /// }
795    /// ```
796    ///
797    /// *Incorrect* usage:
798    ///
799    /// ```rust,no_run
800    /// let ptr1 = Box::into_raw(Box::new(0u8));
801    /// let ptr2 = Box::into_raw(Box::new(1u8));
802    /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
803    /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
804    /// let ptr2_other = (ptr1 as *mut u8).wrapping_offset(diff).wrapping_offset(1);
805    /// assert_eq!(ptr2 as usize, ptr2_other as usize);
806    /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
807    /// // computing their offset is undefined behavior, even though
808    /// // they point to addresses that are in-bounds of the same object!
809    /// unsafe {
810    ///     let one = ptr2_other.offset_from(ptr2); // Undefined Behavior! ⚠️
811    /// }
812    /// ```
813    #[stable(feature = "ptr_offset_from", since = "1.47.0")]
814    #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")]
815    #[inline(always)]
816    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
817    #[cfg(not(feature = "ferrocene_subset"))]
818    pub const unsafe fn offset_from(self, origin: *const T) -> isize
819    where
820        T: Sized,
821    {
822        // SAFETY: the caller must uphold the safety contract for `offset_from`.
823        unsafe { (self as *const T).offset_from(origin) }
824    }
825
826    /// Calculates the distance between two pointers within the same allocation. The returned value is in
827    /// units of **bytes**.
828    ///
829    /// This is purely a convenience for casting to a `u8` pointer and
830    /// using [`offset_from`][pointer::offset_from] on it. See that method for
831    /// documentation and safety requirements.
832    ///
833    /// For non-`Sized` pointees this operation considers only the data pointers,
834    /// ignoring the metadata.
835    #[inline(always)]
836    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
837    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
838    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
839    #[cfg(not(feature = "ferrocene_subset"))]
840    pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: *const U) -> isize {
841        // SAFETY: the caller must uphold the safety contract for `offset_from`.
842        unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
843    }
844
845    /// Calculates the distance between two pointers within the same allocation, *where it's known that
846    /// `self` is equal to or greater than `origin`*. The returned value is in
847    /// units of T: the distance in bytes is divided by `size_of::<T>()`.
848    ///
849    /// This computes the same value that [`offset_from`](#method.offset_from)
850    /// would compute, but with the added precondition that the offset is
851    /// guaranteed to be non-negative.  This method is equivalent to
852    /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
853    /// but it provides slightly more information to the optimizer, which can
854    /// sometimes allow it to optimize slightly better with some backends.
855    ///
856    /// This method can be thought of as recovering the `count` that was passed
857    /// to [`add`](#method.add) (or, with the parameters in the other order,
858    /// to [`sub`](#method.sub)).  The following are all equivalent, assuming
859    /// that their safety preconditions are met:
860    /// ```rust
861    /// # unsafe fn blah(ptr: *mut i32, origin: *mut i32, count: usize) -> bool { unsafe {
862    /// ptr.offset_from_unsigned(origin) == count
863    /// # &&
864    /// origin.add(count) == ptr
865    /// # &&
866    /// ptr.sub(count) == origin
867    /// # } }
868    /// ```
869    ///
870    /// # Safety
871    ///
872    /// - The distance between the pointers must be non-negative (`self >= origin`)
873    ///
874    /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
875    ///   apply to this method as well; see it for the full details.
876    ///
877    /// Importantly, despite the return type of this method being able to represent
878    /// a larger offset, it's still *not permitted* to pass pointers which differ
879    /// by more than `isize::MAX` *bytes*.  As such, the result of this method will
880    /// always be less than or equal to `isize::MAX as usize`.
881    ///
882    /// # Panics
883    ///
884    /// This function panics if `T` is a Zero-Sized Type ("ZST").
885    ///
886    /// # Examples
887    ///
888    /// ```
889    /// let mut a = [0; 5];
890    /// let p: *mut i32 = a.as_mut_ptr();
891    /// unsafe {
892    ///     let ptr1: *mut i32 = p.add(1);
893    ///     let ptr2: *mut i32 = p.add(3);
894    ///
895    ///     assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
896    ///     assert_eq!(ptr1.add(2), ptr2);
897    ///     assert_eq!(ptr2.sub(2), ptr1);
898    ///     assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
899    /// }
900    ///
901    /// // This would be incorrect, as the pointers are not correctly ordered:
902    /// // ptr1.offset_from(ptr2)
903    /// ```
904    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
905    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
906    #[inline]
907    #[track_caller]
908    pub const unsafe fn offset_from_unsigned(self, origin: *const T) -> usize
909    where
910        T: Sized,
911    {
912        // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
913        unsafe { (self as *const T).offset_from_unsigned(origin) }
914    }
915
916    /// Calculates the distance between two pointers within the same allocation, *where it's known that
917    /// `self` is equal to or greater than `origin`*. The returned value is in
918    /// units of **bytes**.
919    ///
920    /// This is purely a convenience for casting to a `u8` pointer and
921    /// using [`offset_from_unsigned`][pointer::offset_from_unsigned] on it.
922    /// See that method for documentation and safety requirements.
923    ///
924    /// For non-`Sized` pointees this operation considers only the data pointers,
925    /// ignoring the metadata.
926    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
927    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
928    #[inline]
929    #[track_caller]
930    #[cfg(not(feature = "ferrocene_subset"))]
931    pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: *mut U) -> usize {
932        // SAFETY: the caller must uphold the safety contract for `byte_offset_from_unsigned`.
933        unsafe { (self as *const T).byte_offset_from_unsigned(origin) }
934    }
935
936    #[doc = include_str!("./docs/add.md")]
937    ///
938    /// # Examples
939    ///
940    /// ```
941    /// let mut s: String = "123".to_string();
942    /// let ptr: *mut u8 = s.as_mut_ptr();
943    ///
944    /// unsafe {
945    ///     assert_eq!('2', *ptr.add(1) as char);
946    ///     assert_eq!('3', *ptr.add(2) as char);
947    /// }
948    /// ```
949    #[stable(feature = "pointer_methods", since = "1.26.0")]
950    #[must_use = "returns a new pointer rather than modifying its argument"]
951    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
952    #[inline(always)]
953    #[track_caller]
954    pub const unsafe fn add(self, count: usize) -> Self
955    where
956        T: Sized,
957    {
958        #[cfg(debug_assertions)]
959        #[inline]
960        #[rustc_allow_const_fn_unstable(const_eval_select)]
961        const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
962            const_eval_select!(
963                @capture { this: *const (), count: usize, size: usize } -> bool:
964                if const {
965                    true
966                } else {
967                    let Some(byte_offset) = count.checked_mul(size) else {
968                        return false;
969                    };
970                    let (_, overflow) = this.addr().overflowing_add(byte_offset);
971                    byte_offset <= (isize::MAX as usize) && !overflow
972                }
973            )
974        }
975
976        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
977        ub_checks::assert_unsafe_precondition!(
978            check_language_ub,
979            "ptr::add requires that the address calculation does not overflow",
980            (
981                this: *const () = self as *const (),
982                count: usize = count,
983                size: usize = size_of::<T>(),
984            ) => runtime_add_nowrap(this, count, size)
985        );
986
987        // SAFETY: the caller must uphold the safety contract for `offset`.
988        unsafe { intrinsics::offset(self, count) }
989    }
990
991    /// Adds an unsigned offset in bytes to a pointer.
992    ///
993    /// `count` is in units of bytes.
994    ///
995    /// This is purely a convenience for casting to a `u8` pointer and
996    /// using [add][pointer::add] on it. See that method for documentation
997    /// and safety requirements.
998    ///
999    /// For non-`Sized` pointees this operation changes only the data pointer,
1000    /// leaving the metadata untouched.
1001    #[must_use]
1002    #[inline(always)]
1003    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1004    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1005    #[track_caller]
1006    #[cfg(not(feature = "ferrocene_subset"))]
1007    pub const unsafe fn byte_add(self, count: usize) -> Self {
1008        // SAFETY: the caller must uphold the safety contract for `add`.
1009        unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
1010    }
1011
1012    /// Subtracts an unsigned offset from a pointer.
1013    ///
1014    /// This can only move the pointer backward (or not move it). If you need to move forward or
1015    /// backward depending on the value, then you might want [`offset`](#method.offset) instead
1016    /// which takes a signed offset.
1017    ///
1018    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1019    /// offset of `3 * size_of::<T>()` bytes.
1020    ///
1021    /// # Safety
1022    ///
1023    /// If any of the following conditions are violated, the result is Undefined Behavior:
1024    ///
1025    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
1026    ///   "wrapping around"), must fit in an `isize`.
1027    ///
1028    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
1029    ///   [allocation], and the entire memory range between `self` and the result must be in
1030    ///   bounds of that allocation. In particular, this range must not "wrap around" the edge
1031    ///   of the address space.
1032    ///
1033    /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
1034    /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
1035    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
1036    /// safe.
1037    ///
1038    /// Consider using [`wrapping_sub`] instead if these constraints are
1039    /// difficult to satisfy. The only advantage of this method is that it
1040    /// enables more aggressive compiler optimizations.
1041    ///
1042    /// [`wrapping_sub`]: #method.wrapping_sub
1043    /// [allocation]: crate::ptr#allocation
1044    ///
1045    /// # Examples
1046    ///
1047    /// ```
1048    /// let s: &str = "123";
1049    ///
1050    /// unsafe {
1051    ///     let end: *const u8 = s.as_ptr().add(3);
1052    ///     assert_eq!('3', *end.sub(1) as char);
1053    ///     assert_eq!('2', *end.sub(2) as char);
1054    /// }
1055    /// ```
1056    #[stable(feature = "pointer_methods", since = "1.26.0")]
1057    #[must_use = "returns a new pointer rather than modifying its argument"]
1058    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1059    #[inline(always)]
1060    #[track_caller]
1061    pub const unsafe fn sub(self, count: usize) -> Self
1062    where
1063        T: Sized,
1064    {
1065        #[cfg(debug_assertions)]
1066        #[inline]
1067        #[rustc_allow_const_fn_unstable(const_eval_select)]
1068        const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
1069            const_eval_select!(
1070                @capture { this: *const (), count: usize, size: usize } -> bool:
1071                if const {
1072                    true
1073                } else {
1074                    let Some(byte_offset) = count.checked_mul(size) else {
1075                        return false;
1076                    };
1077                    byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
1078                }
1079            )
1080        }
1081
1082        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
1083        ub_checks::assert_unsafe_precondition!(
1084            check_language_ub,
1085            "ptr::sub requires that the address calculation does not overflow",
1086            (
1087                this: *const () = self as *const (),
1088                count: usize = count,
1089                size: usize = size_of::<T>(),
1090            ) => runtime_sub_nowrap(this, count, size)
1091        );
1092
1093        if T::IS_ZST {
1094            // Pointer arithmetic does nothing when the pointee is a ZST.
1095            self
1096        } else {
1097            // SAFETY: the caller must uphold the safety contract for `offset`.
1098            // Because the pointee is *not* a ZST, that means that `count` is
1099            // at most `isize::MAX`, and thus the negation cannot overflow.
1100            unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
1101        }
1102    }
1103
1104    /// Subtracts an unsigned offset in bytes from a pointer.
1105    ///
1106    /// `count` is in units of bytes.
1107    ///
1108    /// This is purely a convenience for casting to a `u8` pointer and
1109    /// using [sub][pointer::sub] on it. See that method for documentation
1110    /// and safety requirements.
1111    ///
1112    /// For non-`Sized` pointees this operation changes only the data pointer,
1113    /// leaving the metadata untouched.
1114    #[must_use]
1115    #[inline(always)]
1116    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1117    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1118    #[track_caller]
1119    #[cfg(not(feature = "ferrocene_subset"))]
1120    pub const unsafe fn byte_sub(self, count: usize) -> Self {
1121        // SAFETY: the caller must uphold the safety contract for `sub`.
1122        unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
1123    }
1124
1125    /// Adds an unsigned offset to a pointer using wrapping arithmetic.
1126    ///
1127    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1128    /// offset of `3 * size_of::<T>()` bytes.
1129    ///
1130    /// # Safety
1131    ///
1132    /// This operation itself is always safe, but using the resulting pointer is not.
1133    ///
1134    /// The resulting pointer "remembers" the [allocation] that `self` points to; it must not
1135    /// be used to read or write other allocations.
1136    ///
1137    /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
1138    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1139    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1140    /// `x` and `y` point into the same allocation.
1141    ///
1142    /// Compared to [`add`], this method basically delays the requirement of staying within the
1143    /// same allocation: [`add`] is immediate Undefined Behavior when crossing object
1144    /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
1145    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
1146    /// can be optimized better and is thus preferable in performance-sensitive code.
1147    ///
1148    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1149    /// intermediate values used during the computation of the final result. For example,
1150    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1151    /// allocation and then re-entering it later is permitted.
1152    ///
1153    /// [`add`]: #method.add
1154    /// [allocation]: crate::ptr#allocation
1155    ///
1156    /// # Examples
1157    ///
1158    /// ```
1159    /// // Iterate using a raw pointer in increments of two elements
1160    /// let data = [1u8, 2, 3, 4, 5];
1161    /// let mut ptr: *const u8 = data.as_ptr();
1162    /// let step = 2;
1163    /// let end_rounded_up = ptr.wrapping_add(6);
1164    ///
1165    /// // This loop prints "1, 3, 5, "
1166    /// while ptr != end_rounded_up {
1167    ///     unsafe {
1168    ///         print!("{}, ", *ptr);
1169    ///     }
1170    ///     ptr = ptr.wrapping_add(step);
1171    /// }
1172    /// ```
1173    #[stable(feature = "pointer_methods", since = "1.26.0")]
1174    #[must_use = "returns a new pointer rather than modifying its argument"]
1175    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1176    #[inline(always)]
1177    pub const fn wrapping_add(self, count: usize) -> Self
1178    where
1179        T: Sized,
1180    {
1181        self.wrapping_offset(count as isize)
1182    }
1183
1184    /// Adds an unsigned offset in bytes to a pointer using wrapping arithmetic.
1185    ///
1186    /// `count` is in units of bytes.
1187    ///
1188    /// This is purely a convenience for casting to a `u8` pointer and
1189    /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
1190    ///
1191    /// For non-`Sized` pointees this operation changes only the data pointer,
1192    /// leaving the metadata untouched.
1193    #[must_use]
1194    #[inline(always)]
1195    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1196    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1197    #[cfg(not(feature = "ferrocene_subset"))]
1198    pub const fn wrapping_byte_add(self, count: usize) -> Self {
1199        self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
1200    }
1201
1202    /// Subtracts an unsigned offset from a pointer using wrapping arithmetic.
1203    ///
1204    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1205    /// offset of `3 * size_of::<T>()` bytes.
1206    ///
1207    /// # Safety
1208    ///
1209    /// This operation itself is always safe, but using the resulting pointer is not.
1210    ///
1211    /// The resulting pointer "remembers" the [allocation] that `self` points to; it must not
1212    /// be used to read or write other allocations.
1213    ///
1214    /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
1215    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1216    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1217    /// `x` and `y` point into the same allocation.
1218    ///
1219    /// Compared to [`sub`], this method basically delays the requirement of staying within the
1220    /// same allocation: [`sub`] is immediate Undefined Behavior when crossing object
1221    /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
1222    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
1223    /// can be optimized better and is thus preferable in performance-sensitive code.
1224    ///
1225    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1226    /// intermediate values used during the computation of the final result. For example,
1227    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1228    /// allocation and then re-entering it later is permitted.
1229    ///
1230    /// [`sub`]: #method.sub
1231    /// [allocation]: crate::ptr#allocation
1232    ///
1233    /// # Examples
1234    ///
1235    /// ```
1236    /// // Iterate using a raw pointer in increments of two elements (backwards)
1237    /// let data = [1u8, 2, 3, 4, 5];
1238    /// let mut ptr: *const u8 = data.as_ptr();
1239    /// let start_rounded_down = ptr.wrapping_sub(2);
1240    /// ptr = ptr.wrapping_add(4);
1241    /// let step = 2;
1242    /// // This loop prints "5, 3, 1, "
1243    /// while ptr != start_rounded_down {
1244    ///     unsafe {
1245    ///         print!("{}, ", *ptr);
1246    ///     }
1247    ///     ptr = ptr.wrapping_sub(step);
1248    /// }
1249    /// ```
1250    #[stable(feature = "pointer_methods", since = "1.26.0")]
1251    #[must_use = "returns a new pointer rather than modifying its argument"]
1252    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1253    #[inline(always)]
1254    #[cfg(not(feature = "ferrocene_subset"))]
1255    pub const fn wrapping_sub(self, count: usize) -> Self
1256    where
1257        T: Sized,
1258    {
1259        self.wrapping_offset((count as isize).wrapping_neg())
1260    }
1261
1262    /// Subtracts an unsigned offset in bytes from a pointer using wrapping arithmetic.
1263    ///
1264    /// `count` is in units of bytes.
1265    ///
1266    /// This is purely a convenience for casting to a `u8` pointer and
1267    /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
1268    ///
1269    /// For non-`Sized` pointees this operation changes only the data pointer,
1270    /// leaving the metadata untouched.
1271    #[must_use]
1272    #[inline(always)]
1273    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1274    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1275    #[cfg(not(feature = "ferrocene_subset"))]
1276    pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1277        self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
1278    }
1279
1280    /// Reads the value from `self` without moving it. This leaves the
1281    /// memory in `self` unchanged.
1282    ///
1283    /// See [`ptr::read`] for safety concerns and examples.
1284    ///
1285    /// [`ptr::read`]: crate::ptr::read()
1286    #[stable(feature = "pointer_methods", since = "1.26.0")]
1287    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1288    #[inline(always)]
1289    #[track_caller]
1290    pub const unsafe fn read(self) -> T
1291    where
1292        T: Sized,
1293    {
1294        // SAFETY: the caller must uphold the safety contract for ``.
1295        unsafe { read(self) }
1296    }
1297
1298    /// Performs a volatile read of the value from `self` without moving it. This
1299    /// leaves the memory in `self` unchanged.
1300    ///
1301    /// Volatile operations are intended to act on I/O memory, and are guaranteed
1302    /// to not be elided or reordered by the compiler across other volatile
1303    /// operations.
1304    ///
1305    /// See [`ptr::read_volatile`] for safety concerns and examples.
1306    ///
1307    /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1308    #[stable(feature = "pointer_methods", since = "1.26.0")]
1309    #[inline(always)]
1310    #[track_caller]
1311    #[cfg(not(feature = "ferrocene_subset"))]
1312    pub unsafe fn read_volatile(self) -> T
1313    where
1314        T: Sized,
1315    {
1316        // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1317        unsafe { read_volatile(self) }
1318    }
1319
1320    /// Reads the value from `self` without moving it. This leaves the
1321    /// memory in `self` unchanged.
1322    ///
1323    /// Unlike `read`, the pointer may be unaligned.
1324    ///
1325    /// See [`ptr::read_unaligned`] for safety concerns and examples.
1326    ///
1327    /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1328    #[stable(feature = "pointer_methods", since = "1.26.0")]
1329    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1330    #[inline(always)]
1331    #[track_caller]
1332    #[cfg(not(feature = "ferrocene_subset"))]
1333    pub const unsafe fn read_unaligned(self) -> T
1334    where
1335        T: Sized,
1336    {
1337        // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1338        unsafe { read_unaligned(self) }
1339    }
1340
1341    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1342    /// and destination may overlap.
1343    ///
1344    /// NOTE: this has the *same* argument order as [`ptr::copy`].
1345    ///
1346    /// See [`ptr::copy`] for safety concerns and examples.
1347    ///
1348    /// [`ptr::copy`]: crate::ptr::copy()
1349    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1350    #[stable(feature = "pointer_methods", since = "1.26.0")]
1351    #[inline(always)]
1352    #[track_caller]
1353    #[cfg(not(feature = "ferrocene_subset"))]
1354    pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
1355    where
1356        T: Sized,
1357    {
1358        // SAFETY: the caller must uphold the safety contract for `copy`.
1359        unsafe { copy(self, dest, count) }
1360    }
1361
1362    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1363    /// and destination may *not* overlap.
1364    ///
1365    /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1366    ///
1367    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1368    ///
1369    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1370    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1371    #[stable(feature = "pointer_methods", since = "1.26.0")]
1372    #[inline(always)]
1373    #[track_caller]
1374    #[cfg(not(feature = "ferrocene_subset"))]
1375    pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
1376    where
1377        T: Sized,
1378    {
1379        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1380        unsafe { copy_nonoverlapping(self, dest, count) }
1381    }
1382
1383    /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1384    /// and destination may overlap.
1385    ///
1386    /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1387    ///
1388    /// See [`ptr::copy`] for safety concerns and examples.
1389    ///
1390    /// [`ptr::copy`]: crate::ptr::copy()
1391    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1392    #[stable(feature = "pointer_methods", since = "1.26.0")]
1393    #[inline(always)]
1394    #[track_caller]
1395    #[cfg(not(feature = "ferrocene_subset"))]
1396    pub const unsafe fn copy_from(self, src: *const T, count: usize)
1397    where
1398        T: Sized,
1399    {
1400        // SAFETY: the caller must uphold the safety contract for `copy`.
1401        unsafe { copy(src, self, count) }
1402    }
1403
1404    /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1405    /// and destination may *not* overlap.
1406    ///
1407    /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1408    ///
1409    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1410    ///
1411    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1412    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1413    #[stable(feature = "pointer_methods", since = "1.26.0")]
1414    #[inline(always)]
1415    #[track_caller]
1416    #[cfg(not(feature = "ferrocene_subset"))]
1417    pub const unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize)
1418    where
1419        T: Sized,
1420    {
1421        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1422        unsafe { copy_nonoverlapping(src, self, count) }
1423    }
1424
1425    /// Executes the destructor (if any) of the pointed-to value.
1426    ///
1427    /// See [`ptr::drop_in_place`] for safety concerns and examples.
1428    ///
1429    /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1430    #[stable(feature = "pointer_methods", since = "1.26.0")]
1431    #[rustc_const_unstable(feature = "const_drop_in_place", issue = "109342")]
1432    #[inline(always)]
1433    #[cfg(not(feature = "ferrocene_subset"))]
1434    pub const unsafe fn drop_in_place(self)
1435    where
1436        T: [const] Destruct,
1437    {
1438        // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1439        unsafe { drop_in_place(self) }
1440    }
1441
1442    /// Overwrites a memory location with the given value without reading or
1443    /// dropping the old value.
1444    ///
1445    /// See [`ptr::write`] for safety concerns and examples.
1446    ///
1447    /// [`ptr::write`]: crate::ptr::write()
1448    #[stable(feature = "pointer_methods", since = "1.26.0")]
1449    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1450    #[inline(always)]
1451    #[track_caller]
1452    pub const unsafe fn write(self, val: T)
1453    where
1454        T: Sized,
1455    {
1456        // SAFETY: the caller must uphold the safety contract for `write`.
1457        unsafe { write(self, val) }
1458    }
1459
1460    /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1461    /// bytes of memory starting at `self` to `val`.
1462    ///
1463    /// See [`ptr::write_bytes`] for safety concerns and examples.
1464    ///
1465    /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1466    #[doc(alias = "memset")]
1467    #[stable(feature = "pointer_methods", since = "1.26.0")]
1468    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1469    #[inline(always)]
1470    #[track_caller]
1471    pub const unsafe fn write_bytes(self, val: u8, count: usize)
1472    where
1473        T: Sized,
1474    {
1475        // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1476        unsafe { write_bytes(self, val, count) }
1477    }
1478
1479    /// Performs a volatile write of a memory location with the given value without
1480    /// reading or dropping the old value.
1481    ///
1482    /// Volatile operations are intended to act on I/O memory, and are guaranteed
1483    /// to not be elided or reordered by the compiler across other volatile
1484    /// operations.
1485    ///
1486    /// See [`ptr::write_volatile`] for safety concerns and examples.
1487    ///
1488    /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1489    #[stable(feature = "pointer_methods", since = "1.26.0")]
1490    #[inline(always)]
1491    #[track_caller]
1492    #[cfg(not(feature = "ferrocene_subset"))]
1493    pub unsafe fn write_volatile(self, val: T)
1494    where
1495        T: Sized,
1496    {
1497        // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1498        unsafe { write_volatile(self, val) }
1499    }
1500
1501    /// Overwrites a memory location with the given value without reading or
1502    /// dropping the old value.
1503    ///
1504    /// Unlike `write`, the pointer may be unaligned.
1505    ///
1506    /// See [`ptr::write_unaligned`] for safety concerns and examples.
1507    ///
1508    /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1509    #[stable(feature = "pointer_methods", since = "1.26.0")]
1510    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1511    #[inline(always)]
1512    #[track_caller]
1513    #[cfg(not(feature = "ferrocene_subset"))]
1514    pub const unsafe fn write_unaligned(self, val: T)
1515    where
1516        T: Sized,
1517    {
1518        // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1519        unsafe { write_unaligned(self, val) }
1520    }
1521
1522    /// Replaces the value at `self` with `src`, returning the old
1523    /// value, without dropping either.
1524    ///
1525    /// See [`ptr::replace`] for safety concerns and examples.
1526    ///
1527    /// [`ptr::replace`]: crate::ptr::replace()
1528    #[stable(feature = "pointer_methods", since = "1.26.0")]
1529    #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "1.88.0")]
1530    #[inline(always)]
1531    pub const unsafe fn replace(self, src: T) -> T
1532    where
1533        T: Sized,
1534    {
1535        // SAFETY: the caller must uphold the safety contract for `replace`.
1536        unsafe { replace(self, src) }
1537    }
1538
1539    /// Swaps the values at two mutable locations of the same type, without
1540    /// deinitializing either. They may overlap, unlike `mem::swap` which is
1541    /// otherwise equivalent.
1542    ///
1543    /// See [`ptr::swap`] for safety concerns and examples.
1544    ///
1545    /// [`ptr::swap`]: crate::ptr::swap()
1546    #[stable(feature = "pointer_methods", since = "1.26.0")]
1547    #[rustc_const_stable(feature = "const_swap", since = "1.85.0")]
1548    #[inline(always)]
1549    #[cfg(not(feature = "ferrocene_subset"))]
1550    pub const unsafe fn swap(self, with: *mut T)
1551    where
1552        T: Sized,
1553    {
1554        // SAFETY: the caller must uphold the safety contract for `swap`.
1555        unsafe { swap(self, with) }
1556    }
1557
1558    /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1559    /// `align`.
1560    ///
1561    /// If it is not possible to align the pointer, the implementation returns
1562    /// `usize::MAX`.
1563    ///
1564    /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1565    /// used with the `wrapping_add` method.
1566    ///
1567    /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1568    /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1569    /// the returned offset is correct in all terms other than alignment.
1570    ///
1571    /// # Panics
1572    ///
1573    /// The function panics if `align` is not a power-of-two.
1574    ///
1575    /// # Examples
1576    ///
1577    /// Accessing adjacent `u8` as `u16`
1578    ///
1579    /// ```
1580    /// # unsafe {
1581    /// let mut x = [5_u8, 6, 7, 8, 9];
1582    /// let ptr = x.as_mut_ptr();
1583    /// let offset = ptr.align_offset(align_of::<u16>());
1584    ///
1585    /// if offset < x.len() - 1 {
1586    ///     let u16_ptr = ptr.add(offset).cast::<u16>();
1587    ///     *u16_ptr = 0;
1588    ///
1589    ///     assert!(x == [0, 0, 7, 8, 9] || x == [5, 0, 0, 8, 9]);
1590    /// } else {
1591    ///     // while the pointer can be aligned via `offset`, it would point
1592    ///     // outside the allocation
1593    /// }
1594    /// # }
1595    /// ```
1596    #[must_use]
1597    #[inline]
1598    #[stable(feature = "align_offset", since = "1.36.0")]
1599    #[cfg(not(feature = "ferrocene_subset"))]
1600    pub fn align_offset(self, align: usize) -> usize
1601    where
1602        T: Sized,
1603    {
1604        if !align.is_power_of_two() {
1605            panic!("align_offset: align is not a power-of-two");
1606        }
1607
1608        // SAFETY: `align` has been checked to be a power of 2 above
1609        let ret = unsafe { align_offset(self, align) };
1610
1611        // Inform Miri that we want to consider the resulting pointer to be suitably aligned.
1612        #[cfg(miri)]
1613        if ret != usize::MAX {
1614            intrinsics::miri_promise_symbolic_alignment(
1615                self.wrapping_add(ret).cast_const().cast(),
1616                align,
1617            );
1618        }
1619
1620        ret
1621    }
1622
1623    /// Returns whether the pointer is properly aligned for `T`.
1624    ///
1625    /// # Examples
1626    ///
1627    /// ```
1628    /// // On some platforms, the alignment of i32 is less than 4.
1629    /// #[repr(align(4))]
1630    /// struct AlignedI32(i32);
1631    ///
1632    /// let mut data = AlignedI32(42);
1633    /// let ptr = &mut data as *mut AlignedI32;
1634    ///
1635    /// assert!(ptr.is_aligned());
1636    /// assert!(!ptr.wrapping_byte_add(1).is_aligned());
1637    /// ```
1638    #[must_use]
1639    #[inline]
1640    #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1641    #[cfg(not(feature = "ferrocene_subset"))]
1642    pub fn is_aligned(self) -> bool
1643    where
1644        T: Sized,
1645    {
1646        self.is_aligned_to(align_of::<T>())
1647    }
1648
1649    /// Returns whether the pointer is aligned to `align`.
1650    ///
1651    /// For non-`Sized` pointees this operation considers only the data pointer,
1652    /// ignoring the metadata.
1653    ///
1654    /// # Panics
1655    ///
1656    /// The function panics if `align` is not a power-of-two (this includes 0).
1657    ///
1658    /// # Examples
1659    ///
1660    /// ```
1661    /// #![feature(pointer_is_aligned_to)]
1662    ///
1663    /// // On some platforms, the alignment of i32 is less than 4.
1664    /// #[repr(align(4))]
1665    /// struct AlignedI32(i32);
1666    ///
1667    /// let mut data = AlignedI32(42);
1668    /// let ptr = &mut data as *mut AlignedI32;
1669    ///
1670    /// assert!(ptr.is_aligned_to(1));
1671    /// assert!(ptr.is_aligned_to(2));
1672    /// assert!(ptr.is_aligned_to(4));
1673    ///
1674    /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1675    /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1676    ///
1677    /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1678    /// ```
1679    #[must_use]
1680    #[inline]
1681    #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1682    #[cfg(not(feature = "ferrocene_subset"))]
1683    pub fn is_aligned_to(self, align: usize) -> bool {
1684        if !align.is_power_of_two() {
1685            panic!("is_aligned_to: align is not a power-of-two");
1686        }
1687
1688        self.addr() & (align - 1) == 0
1689    }
1690}
1691
1692impl<T> *mut T {
1693    /// Casts from a type to its maybe-uninitialized version.
1694    ///
1695    /// This is always safe, since UB can only occur if the pointer is read
1696    /// before being initialized.
1697    #[must_use]
1698    #[inline(always)]
1699    #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1700    #[cfg(not(feature = "ferrocene_subset"))]
1701    pub const fn cast_uninit(self) -> *mut MaybeUninit<T> {
1702        self as _
1703    }
1704}
1705#[cfg(not(feature = "ferrocene_subset"))]
1706impl<T> *mut MaybeUninit<T> {
1707    /// Casts from a maybe-uninitialized type to its initialized version.
1708    ///
1709    /// This is always safe, since UB can only occur if the pointer is read
1710    /// before being initialized.
1711    #[must_use]
1712    #[inline(always)]
1713    #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1714    pub const fn cast_init(self) -> *mut T {
1715        self as _
1716    }
1717}
1718
1719impl<T> *mut [T] {
1720    /// Returns the length of a raw slice.
1721    ///
1722    /// The returned value is the number of **elements**, not the number of bytes.
1723    ///
1724    /// This function is safe, even when the raw slice cannot be cast to a slice
1725    /// reference because the pointer is null or unaligned.
1726    ///
1727    /// # Examples
1728    ///
1729    /// ```rust
1730    /// use std::ptr;
1731    ///
1732    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1733    /// assert_eq!(slice.len(), 3);
1734    /// ```
1735    #[inline(always)]
1736    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1737    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1738    pub const fn len(self) -> usize {
1739        metadata(self)
1740    }
1741
1742    /// Returns `true` if the raw slice has a length of 0.
1743    ///
1744    /// # Examples
1745    ///
1746    /// ```
1747    /// use std::ptr;
1748    ///
1749    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1750    /// assert!(!slice.is_empty());
1751    /// ```
1752    #[inline(always)]
1753    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1754    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1755    pub const fn is_empty(self) -> bool {
1756        self.len() == 0
1757    }
1758
1759    /// Gets a raw, mutable pointer to the underlying array.
1760    ///
1761    /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1762    #[stable(feature = "core_slice_as_array", since = "CURRENT_RUSTC_VERSION")]
1763    #[rustc_const_stable(feature = "core_slice_as_array", since = "CURRENT_RUSTC_VERSION")]
1764    #[inline]
1765    #[must_use]
1766    pub const fn as_mut_array<const N: usize>(self) -> Option<*mut [T; N]> {
1767        if self.len() == N {
1768            let me = self.as_mut_ptr() as *mut [T; N];
1769            Some(me)
1770        } else {
1771            None
1772        }
1773    }
1774
1775    /// Divides one mutable raw slice into two at an index.
1776    ///
1777    /// The first will contain all indices from `[0, mid)` (excluding
1778    /// the index `mid` itself) and the second will contain all
1779    /// indices from `[mid, len)` (excluding the index `len` itself).
1780    ///
1781    /// # Panics
1782    ///
1783    /// Panics if `mid > len`.
1784    ///
1785    /// # Safety
1786    ///
1787    /// `mid` must be [in-bounds] of the underlying [allocation].
1788    /// Which means `self` must be dereferenceable and span a single allocation
1789    /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
1790    /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
1791    ///
1792    /// Since `len` being in-bounds it is not a safety invariant of `*mut [T]` the
1793    /// safety requirements of this method are the same as for [`split_at_mut_unchecked`].
1794    /// The explicit bounds check is only as useful as `len` is correct.
1795    ///
1796    /// [`split_at_mut_unchecked`]: #method.split_at_mut_unchecked
1797    /// [in-bounds]: #method.add
1798    /// [allocation]: crate::ptr#allocation
1799    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1800    ///
1801    /// # Examples
1802    ///
1803    /// ```
1804    /// #![feature(raw_slice_split)]
1805    /// #![feature(slice_ptr_get)]
1806    ///
1807    /// let mut v = [1, 0, 3, 0, 5, 6];
1808    /// let ptr = &mut v as *mut [_];
1809    /// unsafe {
1810    ///     let (left, right) = ptr.split_at_mut(2);
1811    ///     assert_eq!(&*left, [1, 0]);
1812    ///     assert_eq!(&*right, [3, 0, 5, 6]);
1813    /// }
1814    /// ```
1815    #[inline(always)]
1816    #[track_caller]
1817    #[unstable(feature = "raw_slice_split", issue = "95595")]
1818    pub unsafe fn split_at_mut(self, mid: usize) -> (*mut [T], *mut [T]) {
1819        assert!(mid <= self.len());
1820        // SAFETY: The assert above is only a safety-net as long as `self.len()` is correct
1821        // The actual safety requirements of this function are the same as for `split_at_mut_unchecked`
1822        unsafe { self.split_at_mut_unchecked(mid) }
1823    }
1824
1825    /// Divides one mutable raw slice into two at an index, without doing bounds checking.
1826    ///
1827    /// The first will contain all indices from `[0, mid)` (excluding
1828    /// the index `mid` itself) and the second will contain all
1829    /// indices from `[mid, len)` (excluding the index `len` itself).
1830    ///
1831    /// # Safety
1832    ///
1833    /// `mid` must be [in-bounds] of the underlying [allocation].
1834    /// Which means `self` must be dereferenceable and span a single allocation
1835    /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
1836    /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
1837    ///
1838    /// [in-bounds]: #method.add
1839    /// [out-of-bounds index]: #method.add
1840    /// [allocation]: crate::ptr#allocation
1841    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1842    ///
1843    /// # Examples
1844    ///
1845    /// ```
1846    /// #![feature(raw_slice_split)]
1847    ///
1848    /// let mut v = [1, 0, 3, 0, 5, 6];
1849    /// // scoped to restrict the lifetime of the borrows
1850    /// unsafe {
1851    ///     let ptr = &mut v as *mut [_];
1852    ///     let (left, right) = ptr.split_at_mut_unchecked(2);
1853    ///     assert_eq!(&*left, [1, 0]);
1854    ///     assert_eq!(&*right, [3, 0, 5, 6]);
1855    ///     (&mut *left)[1] = 2;
1856    ///     (&mut *right)[1] = 4;
1857    /// }
1858    /// assert_eq!(v, [1, 2, 3, 4, 5, 6]);
1859    /// ```
1860    #[inline(always)]
1861    #[unstable(feature = "raw_slice_split", issue = "95595")]
1862    pub unsafe fn split_at_mut_unchecked(self, mid: usize) -> (*mut [T], *mut [T]) {
1863        let len = self.len();
1864        let ptr = self.as_mut_ptr();
1865
1866        // SAFETY: Caller must pass a valid pointer and an index that is in-bounds.
1867        let tail = unsafe { ptr.add(mid) };
1868        (
1869            crate::ptr::slice_from_raw_parts_mut(ptr, mid),
1870            crate::ptr::slice_from_raw_parts_mut(tail, len - mid),
1871        )
1872    }
1873
1874    /// Returns a raw pointer to the slice's buffer.
1875    ///
1876    /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
1877    ///
1878    /// # Examples
1879    ///
1880    /// ```rust
1881    /// #![feature(slice_ptr_get)]
1882    /// use std::ptr;
1883    ///
1884    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1885    /// assert_eq!(slice.as_mut_ptr(), ptr::null_mut());
1886    /// ```
1887    #[inline(always)]
1888    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1889    pub const fn as_mut_ptr(self) -> *mut T {
1890        self as *mut T
1891    }
1892
1893    /// Returns a raw pointer to an element or subslice, without doing bounds
1894    /// checking.
1895    ///
1896    /// Calling this method with an [out-of-bounds index] or when `self` is not dereferenceable
1897    /// is *[undefined behavior]* even if the resulting pointer is not used.
1898    ///
1899    /// [out-of-bounds index]: #method.add
1900    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1901    ///
1902    /// # Examples
1903    ///
1904    /// ```
1905    /// #![feature(slice_ptr_get)]
1906    ///
1907    /// let x = &mut [1, 2, 4] as *mut [i32];
1908    ///
1909    /// unsafe {
1910    ///     assert_eq!(x.get_unchecked_mut(1), x.as_mut_ptr().add(1));
1911    /// }
1912    /// ```
1913    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1914    #[rustc_const_unstable(feature = "const_index", issue = "143775")]
1915    #[inline(always)]
1916    pub const unsafe fn get_unchecked_mut<I>(self, index: I) -> *mut I::Output
1917    where
1918        I: [const] SliceIndex<[T]>,
1919    {
1920        // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1921        unsafe { index.get_unchecked_mut(self) }
1922    }
1923
1924    #[doc = include_str!("docs/as_uninit_slice.md")]
1925    ///
1926    /// # See Also
1927    /// For the mutable counterpart see [`as_uninit_slice_mut`](pointer::as_uninit_slice_mut).
1928    #[inline]
1929    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1930    #[cfg(not(feature = "ferrocene_subset"))]
1931    pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1932        if self.is_null() {
1933            None
1934        } else {
1935            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1936            Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1937        }
1938    }
1939
1940    /// Returns `None` if the pointer is null, or else returns a unique slice to
1941    /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
1942    /// that the value has to be initialized.
1943    ///
1944    /// For the shared counterpart see [`as_uninit_slice`].
1945    ///
1946    /// [`as_mut`]: #method.as_mut
1947    /// [`as_uninit_slice`]: #method.as_uninit_slice-1
1948    ///
1949    /// # Safety
1950    ///
1951    /// When calling this method, you have to ensure that *either* the pointer is null *or*
1952    /// all of the following is true:
1953    ///
1954    /// * The pointer must be [valid] for reads and writes for `ptr.len() * size_of::<T>()`
1955    ///   many bytes, and it must be properly aligned. This means in particular:
1956    ///
1957    ///     * The entire memory range of this slice must be contained within a single [allocation]!
1958    ///       Slices can never span across multiple allocations.
1959    ///
1960    ///     * The pointer must be aligned even for zero-length slices. One
1961    ///       reason for this is that enum layout optimizations may rely on references
1962    ///       (including slices of any length) being aligned and non-null to distinguish
1963    ///       them from other data. You can obtain a pointer that is usable as `data`
1964    ///       for zero-length slices using [`NonNull::dangling()`].
1965    ///
1966    /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1967    ///   See the safety documentation of [`pointer::offset`].
1968    ///
1969    /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1970    ///   arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1971    ///   In particular, while this reference exists, the memory the pointer points to must
1972    ///   not get accessed (read or written) through any other pointer.
1973    ///
1974    /// This applies even if the result of this method is unused!
1975    ///
1976    /// See also [`slice::from_raw_parts_mut`][].
1977    ///
1978    /// [valid]: crate::ptr#safety
1979    /// [allocation]: crate::ptr#allocation
1980    ///
1981    /// # Panics during const evaluation
1982    ///
1983    /// This method will panic during const evaluation if the pointer cannot be
1984    /// determined to be null or not. See [`is_null`] for more information.
1985    ///
1986    /// [`is_null`]: #method.is_null-1
1987    #[inline]
1988    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1989    #[cfg(not(feature = "ferrocene_subset"))]
1990    pub const unsafe fn as_uninit_slice_mut<'a>(self) -> Option<&'a mut [MaybeUninit<T>]> {
1991        if self.is_null() {
1992            None
1993        } else {
1994            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
1995            Some(unsafe { slice::from_raw_parts_mut(self as *mut MaybeUninit<T>, self.len()) })
1996        }
1997    }
1998}
1999
2000impl<T> *mut T {
2001    /// Casts from a pointer-to-`T` to a pointer-to-`[T; N]`.
2002    #[inline]
2003    #[unstable(feature = "ptr_cast_array", issue = "144514")]
2004    pub const fn cast_array<const N: usize>(self) -> *mut [T; N] {
2005        self.cast()
2006    }
2007}
2008
2009#[cfg(not(feature = "ferrocene_subset"))]
2010impl<T, const N: usize> *mut [T; N] {
2011    /// Returns a raw pointer to the array's buffer.
2012    ///
2013    /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
2014    ///
2015    /// # Examples
2016    ///
2017    /// ```rust
2018    /// #![feature(array_ptr_get)]
2019    /// use std::ptr;
2020    ///
2021    /// let arr: *mut [i8; 3] = ptr::null_mut();
2022    /// assert_eq!(arr.as_mut_ptr(), ptr::null_mut());
2023    /// ```
2024    #[inline]
2025    #[unstable(feature = "array_ptr_get", issue = "119834")]
2026    pub const fn as_mut_ptr(self) -> *mut T {
2027        self as *mut T
2028    }
2029
2030    /// Returns a raw pointer to a mutable slice containing the entire array.
2031    ///
2032    /// # Examples
2033    ///
2034    /// ```
2035    /// #![feature(array_ptr_get)]
2036    ///
2037    /// let mut arr = [1, 2, 5];
2038    /// let ptr: *mut [i32; 3] = &mut arr;
2039    /// unsafe {
2040    ///     (&mut *ptr.as_mut_slice())[..2].copy_from_slice(&[3, 4]);
2041    /// }
2042    /// assert_eq!(arr, [3, 4, 5]);
2043    /// ```
2044    #[inline]
2045    #[unstable(feature = "array_ptr_get", issue = "119834")]
2046    pub const fn as_mut_slice(self) -> *mut [T] {
2047        self
2048    }
2049}
2050
2051/// Pointer equality is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2052#[stable(feature = "rust1", since = "1.0.0")]
2053#[diagnostic::on_const(
2054    message = "pointers cannot be reliably compared during const eval",
2055    note = "see issue #53020 <https://github.com/rust-lang/rust/issues/53020> for more information"
2056)]
2057impl<T: PointeeSized> PartialEq for *mut T {
2058    #[inline(always)]
2059    #[allow(ambiguous_wide_pointer_comparisons)]
2060    fn eq(&self, other: &*mut T) -> bool {
2061        *self == *other
2062    }
2063}
2064
2065/// Pointer equality is an equivalence relation.
2066#[stable(feature = "rust1", since = "1.0.0")]
2067#[diagnostic::on_const(
2068    message = "pointers cannot be reliably compared during const eval",
2069    note = "see issue #53020 <https://github.com/rust-lang/rust/issues/53020> for more information"
2070)]
2071impl<T: PointeeSized> Eq for *mut T {}
2072
2073/// Pointer comparison is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2074#[cfg(not(feature = "ferrocene_subset"))]
2075#[stable(feature = "rust1", since = "1.0.0")]
2076#[diagnostic::on_const(
2077    message = "pointers cannot be reliably compared during const eval",
2078    note = "see issue #53020 <https://github.com/rust-lang/rust/issues/53020> for more information"
2079)]
2080impl<T: PointeeSized> Ord for *mut T {
2081    #[inline]
2082    #[allow(ambiguous_wide_pointer_comparisons)]
2083    fn cmp(&self, other: &*mut T) -> Ordering {
2084        if self < other {
2085            Less
2086        } else if self == other {
2087            Equal
2088        } else {
2089            Greater
2090        }
2091    }
2092}
2093
2094/// Pointer comparison is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2095#[cfg(not(feature = "ferrocene_subset"))]
2096#[stable(feature = "rust1", since = "1.0.0")]
2097#[diagnostic::on_const(
2098    message = "pointers cannot be reliably compared during const eval",
2099    note = "see issue #53020 <https://github.com/rust-lang/rust/issues/53020> for more information"
2100)]
2101impl<T: PointeeSized> PartialOrd for *mut T {
2102    #[inline(always)]
2103    #[allow(ambiguous_wide_pointer_comparisons)]
2104    fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
2105        Some(self.cmp(other))
2106    }
2107
2108    #[inline(always)]
2109    #[allow(ambiguous_wide_pointer_comparisons)]
2110    fn lt(&self, other: &*mut T) -> bool {
2111        *self < *other
2112    }
2113
2114    #[inline(always)]
2115    #[allow(ambiguous_wide_pointer_comparisons)]
2116    fn le(&self, other: &*mut T) -> bool {
2117        *self <= *other
2118    }
2119
2120    #[inline(always)]
2121    #[allow(ambiguous_wide_pointer_comparisons)]
2122    fn gt(&self, other: &*mut T) -> bool {
2123        *self > *other
2124    }
2125
2126    #[inline(always)]
2127    #[allow(ambiguous_wide_pointer_comparisons)]
2128    fn ge(&self, other: &*mut T) -> bool {
2129        *self >= *other
2130    }
2131}
2132
2133#[stable(feature = "raw_ptr_default", since = "1.88.0")]
2134#[cfg(not(feature = "ferrocene_subset"))]
2135impl<T: ?Sized + Thin> Default for *mut T {
2136    /// Returns the default value of [`null_mut()`][crate::ptr::null_mut].
2137    fn default() -> Self {
2138        crate::ptr::null_mut()
2139    }
2140}