core/ptr/
const_ptr.rs

1use super::*;
2#[cfg(not(feature = "ferrocene_certified"))]
3use crate::cmp::Ordering::{Equal, Greater, Less};
4use crate::intrinsics::const_eval_select;
5#[cfg(not(feature = "ferrocene_certified"))]
6use crate::mem::{self, SizedTypeProperties};
7#[cfg(not(feature = "ferrocene_certified"))]
8use crate::slice::{self, SliceIndex};
9
10// Ferrocene addition: imports for certified subset
11#[cfg(feature = "ferrocene_certified")]
12#[rustfmt::skip]
13use crate::mem;
14
15impl<T: PointeeSized> *const T {
16    #[doc = include_str!("docs/is_null.md")]
17    ///
18    /// # Examples
19    ///
20    /// ```
21    /// let s: &str = "Follow the rabbit";
22    /// let ptr: *const u8 = s.as_ptr();
23    /// assert!(!ptr.is_null());
24    /// ```
25    #[stable(feature = "rust1", since = "1.0.0")]
26    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
27    #[rustc_diagnostic_item = "ptr_const_is_null"]
28    #[inline]
29    #[rustc_allow_const_fn_unstable(const_eval_select)]
30    pub const fn is_null(self) -> bool {
31        // Compare via a cast to a thin pointer, so fat pointers are only
32        // considering their "data" part for null-ness.
33        let ptr = self as *const u8;
34        const_eval_select!(
35            @capture { ptr: *const u8 } -> bool:
36            // This use of `const_raw_ptr_comparison` has been explicitly blessed by t-lang.
37            if const #[rustc_allow_const_fn_unstable(const_raw_ptr_comparison)] {
38                match (ptr).guaranteed_eq(null_mut()) {
39                    Some(res) => res,
40                    // To remain maximally conservative, we stop execution when we don't
41                    // know whether the pointer is null or not.
42                    // We can *not* return `false` here, that would be unsound in `NonNull::new`!
43                    None => panic!("null-ness of this pointer cannot be determined in const context"),
44                }
45            } else {
46                ptr.addr() == 0
47            }
48        )
49    }
50
51    /// Casts to a pointer of another type.
52    #[stable(feature = "ptr_cast", since = "1.38.0")]
53    #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
54    #[rustc_diagnostic_item = "const_ptr_cast"]
55    #[inline(always)]
56    pub const fn cast<U>(self) -> *const U {
57        self as _
58    }
59
60    /// Try to cast to a pointer of another type by checking alignment.
61    ///
62    /// If the pointer is properly aligned to the target type, it will be
63    /// cast to the target type. Otherwise, `None` is returned.
64    ///
65    /// # Examples
66    ///
67    /// ```rust
68    /// #![feature(pointer_try_cast_aligned)]
69    ///
70    /// let x = 0u64;
71    ///
72    /// let aligned: *const u64 = &x;
73    /// let unaligned = unsafe { aligned.byte_add(1) };
74    ///
75    /// assert!(aligned.try_cast_aligned::<u32>().is_some());
76    /// assert!(unaligned.try_cast_aligned::<u32>().is_none());
77    /// ```
78    #[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
79    #[must_use = "this returns the result of the operation, \
80                  without modifying the original"]
81    #[inline]
82    pub fn try_cast_aligned<U>(self) -> Option<*const U> {
83        if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
84    }
85
86    /// Uses the address value in a new pointer of another type.
87    ///
88    /// This operation will ignore the address part of its `meta` operand and discard existing
89    /// metadata of `self`. For pointers to a sized types (thin pointers), this has the same effect
90    /// as a simple cast. For pointers to an unsized type (fat pointers) this recombines the address
91    /// with new metadata such as slice lengths or `dyn`-vtable.
92    ///
93    /// The resulting pointer will have provenance of `self`. This operation is semantically the
94    /// same as creating a new pointer with the data pointer value of `self` but the metadata of
95    /// `meta`, being fat or thin depending on the `meta` operand.
96    ///
97    /// # Examples
98    ///
99    /// This function is primarily useful for enabling pointer arithmetic on potentially fat
100    /// pointers. The pointer is cast to a sized pointee to utilize offset operations and then
101    /// recombined with its own original metadata.
102    ///
103    /// ```
104    /// #![feature(set_ptr_value)]
105    /// # use core::fmt::Debug;
106    /// let arr: [i32; 3] = [1, 2, 3];
107    /// let mut ptr = arr.as_ptr() as *const dyn Debug;
108    /// let thin = ptr as *const u8;
109    /// unsafe {
110    ///     ptr = thin.add(8).with_metadata_of(ptr);
111    ///     # assert_eq!(*(ptr as *const i32), 3);
112    ///     println!("{:?}", &*ptr); // will print "3"
113    /// }
114    /// ```
115    ///
116    /// # *Incorrect* usage
117    ///
118    /// The provenance from pointers is *not* combined. The result must only be used to refer to the
119    /// address allowed by `self`.
120    ///
121    /// ```rust,no_run
122    /// #![feature(set_ptr_value)]
123    /// let x = 0u32;
124    /// let y = 1u32;
125    ///
126    /// let x = (&x) as *const u32;
127    /// let y = (&y) as *const u32;
128    ///
129    /// let offset = (x as usize - y as usize) / 4;
130    /// let bad = x.wrapping_add(offset).with_metadata_of(y);
131    ///
132    /// // This dereference is UB. The pointer only has provenance for `x` but points to `y`.
133    /// println!("{:?}", unsafe { &*bad });
134    /// ```
135    #[unstable(feature = "set_ptr_value", issue = "75091")]
136    #[must_use = "returns a new pointer rather than modifying its argument"]
137    #[inline]
138    #[cfg(not(feature = "ferrocene_certified"))]
139    pub const fn with_metadata_of<U>(self, meta: *const U) -> *const U
140    where
141        U: PointeeSized,
142    {
143        from_raw_parts::<U>(self as *const (), metadata(meta))
144    }
145
146    /// Changes constness without changing the type.
147    ///
148    /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
149    /// refactored.
150    #[stable(feature = "ptr_const_cast", since = "1.65.0")]
151    #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")]
152    #[rustc_diagnostic_item = "ptr_cast_mut"]
153    #[inline(always)]
154    #[cfg(not(feature = "ferrocene_certified"))]
155    pub const fn cast_mut(self) -> *mut T {
156        self as _
157    }
158
159    #[doc = include_str!("./docs/addr.md")]
160    #[must_use]
161    #[inline(always)]
162    #[stable(feature = "strict_provenance", since = "1.84.0")]
163    pub fn addr(self) -> usize {
164        // A pointer-to-integer transmute currently has exactly the right semantics: it returns the
165        // address without exposing the provenance. Note that this is *not* a stable guarantee about
166        // transmute semantics, it relies on sysroot crates having special status.
167        // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
168        // provenance).
169        unsafe { mem::transmute(self.cast::<()>()) }
170    }
171
172    /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
173    /// [`with_exposed_provenance`] and returns the "address" portion.
174    ///
175    /// This is equivalent to `self as usize`, which semantically discards provenance information.
176    /// Furthermore, this (like the `as` cast) has the implicit side-effect of marking the
177    /// provenance as 'exposed', so on platforms that support it you can later call
178    /// [`with_exposed_provenance`] to reconstitute the original pointer including its provenance.
179    ///
180    /// Due to its inherent ambiguity, [`with_exposed_provenance`] may not be supported by tools
181    /// that help you to stay conformant with the Rust memory model. It is recommended to use
182    /// [Strict Provenance][crate::ptr#strict-provenance] APIs such as [`with_addr`][pointer::with_addr]
183    /// wherever possible, in which case [`addr`][pointer::addr] should be used instead of `expose_provenance`.
184    ///
185    /// On most platforms this will produce a value with the same bytes as the original pointer,
186    /// because all the bytes are dedicated to describing the address. Platforms which need to store
187    /// additional information in the pointer may not support this operation, since the 'expose'
188    /// side-effect which is required for [`with_exposed_provenance`] to work is typically not
189    /// available.
190    ///
191    /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
192    ///
193    /// [`with_exposed_provenance`]: with_exposed_provenance
194    #[inline(always)]
195    #[stable(feature = "exposed_provenance", since = "1.84.0")]
196    #[cfg(not(feature = "ferrocene_certified"))]
197    pub fn expose_provenance(self) -> usize {
198        self.cast::<()>() as usize
199    }
200
201    /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
202    /// `self`.
203    ///
204    /// This is similar to a `addr as *const T` cast, but copies
205    /// the *provenance* of `self` to the new pointer.
206    /// This avoids the inherent ambiguity of the unary cast.
207    ///
208    /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
209    /// `self` to the given address, and therefore has all the same capabilities and restrictions.
210    ///
211    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
212    #[must_use]
213    #[inline]
214    #[stable(feature = "strict_provenance", since = "1.84.0")]
215    #[cfg(not(feature = "ferrocene_certified"))]
216    pub fn with_addr(self, addr: usize) -> Self {
217        // This should probably be an intrinsic to avoid doing any sort of arithmetic, but
218        // meanwhile, we can implement it with `wrapping_offset`, which preserves the pointer's
219        // provenance.
220        let self_addr = self.addr() as isize;
221        let dest_addr = addr as isize;
222        let offset = dest_addr.wrapping_sub(self_addr);
223        self.wrapping_byte_offset(offset)
224    }
225
226    /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
227    /// [provenance][crate::ptr#provenance] of `self`.
228    ///
229    /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
230    ///
231    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
232    #[must_use]
233    #[inline]
234    #[stable(feature = "strict_provenance", since = "1.84.0")]
235    #[cfg(not(feature = "ferrocene_certified"))]
236    pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
237        self.with_addr(f(self.addr()))
238    }
239
240    /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
241    ///
242    /// The pointer can be later reconstructed with [`from_raw_parts`].
243    #[unstable(feature = "ptr_metadata", issue = "81513")]
244    #[inline]
245    #[cfg(not(feature = "ferrocene_certified"))]
246    pub const fn to_raw_parts(self) -> (*const (), <T as super::Pointee>::Metadata) {
247        (self.cast(), metadata(self))
248    }
249
250    #[doc = include_str!("./docs/as_ref.md")]
251    ///
252    /// ```
253    /// let ptr: *const u8 = &10u8 as *const u8;
254    ///
255    /// unsafe {
256    ///     let val_back = &*ptr;
257    ///     assert_eq!(val_back, &10);
258    /// }
259    /// ```
260    ///
261    /// # Examples
262    ///
263    /// ```
264    /// let ptr: *const u8 = &10u8 as *const u8;
265    ///
266    /// unsafe {
267    ///     if let Some(val_back) = ptr.as_ref() {
268    ///         assert_eq!(val_back, &10);
269    ///     }
270    /// }
271    /// ```
272    ///
273    ///
274    /// [`is_null`]: #method.is_null
275    /// [`as_uninit_ref`]: #method.as_uninit_ref
276    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
277    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
278    #[inline]
279    #[cfg(not(feature = "ferrocene_certified"))]
280    pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
281        // SAFETY: the caller must guarantee that `self` is valid
282        // for a reference if it isn't null.
283        if self.is_null() { None } else { unsafe { Some(&*self) } }
284    }
285
286    /// Returns a shared reference to the value behind the pointer.
287    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_ref`] must be used instead.
288    /// If the pointer may be null, but the value is known to have been initialized, [`as_ref`] must be used instead.
289    ///
290    /// [`as_ref`]: #method.as_ref
291    /// [`as_uninit_ref`]: #method.as_uninit_ref
292    ///
293    /// # Safety
294    ///
295    /// When calling this method, you have to ensure that
296    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
297    ///
298    /// # Examples
299    ///
300    /// ```
301    /// #![feature(ptr_as_ref_unchecked)]
302    /// let ptr: *const u8 = &10u8 as *const u8;
303    ///
304    /// unsafe {
305    ///     assert_eq!(ptr.as_ref_unchecked(), &10);
306    /// }
307    /// ```
308    // FIXME: mention it in the docs for `as_ref` and `as_uninit_ref` once stabilized.
309    #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
310    #[inline]
311    #[must_use]
312    #[cfg(not(feature = "ferrocene_certified"))]
313    pub const unsafe fn as_ref_unchecked<'a>(self) -> &'a T {
314        // SAFETY: the caller must guarantee that `self` is valid for a reference
315        unsafe { &*self }
316    }
317
318    #[doc = include_str!("./docs/as_uninit_ref.md")]
319    ///
320    /// [`is_null`]: #method.is_null
321    /// [`as_ref`]: #method.as_ref
322    ///
323    /// # Examples
324    ///
325    /// ```
326    /// #![feature(ptr_as_uninit)]
327    ///
328    /// let ptr: *const u8 = &10u8 as *const u8;
329    ///
330    /// unsafe {
331    ///     if let Some(val_back) = ptr.as_uninit_ref() {
332    ///         assert_eq!(val_back.assume_init(), 10);
333    ///     }
334    /// }
335    /// ```
336    #[inline]
337    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
338    #[cfg(not(feature = "ferrocene_certified"))]
339    pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
340    where
341        T: Sized,
342    {
343        // SAFETY: the caller must guarantee that `self` meets all the
344        // requirements for a reference.
345        if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
346    }
347
348    #[doc = include_str!("./docs/offset.md")]
349    ///
350    /// # Examples
351    ///
352    /// ```
353    /// let s: &str = "123";
354    /// let ptr: *const u8 = s.as_ptr();
355    ///
356    /// unsafe {
357    ///     assert_eq!(*ptr.offset(1) as char, '2');
358    ///     assert_eq!(*ptr.offset(2) as char, '3');
359    /// }
360    /// ```
361    #[stable(feature = "rust1", since = "1.0.0")]
362    #[must_use = "returns a new pointer rather than modifying its argument"]
363    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
364    #[inline(always)]
365    #[track_caller]
366    #[cfg(not(feature = "ferrocene_certified"))]
367    pub const unsafe fn offset(self, count: isize) -> *const T
368    where
369        T: Sized,
370    {
371        #[inline]
372        #[rustc_allow_const_fn_unstable(const_eval_select)]
373        const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
374            // We can use const_eval_select here because this is only for UB checks.
375            const_eval_select!(
376                @capture { this: *const (), count: isize, size: usize } -> bool:
377                if const {
378                    true
379                } else {
380                    // `size` is the size of a Rust type, so we know that
381                    // `size <= isize::MAX` and thus `as` cast here is not lossy.
382                    let Some(byte_offset) = count.checked_mul(size as isize) else {
383                        return false;
384                    };
385                    let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
386                    !overflow
387                }
388            )
389        }
390
391        ub_checks::assert_unsafe_precondition!(
392            check_language_ub,
393            "ptr::offset requires the address calculation to not overflow",
394            (
395                this: *const () = self as *const (),
396                count: isize = count,
397                size: usize = size_of::<T>(),
398            ) => runtime_offset_nowrap(this, count, size)
399        );
400
401        // SAFETY: the caller must uphold the safety contract for `offset`.
402        unsafe { intrinsics::offset(self, count) }
403    }
404
405    /// Adds a signed offset in bytes to a pointer.
406    ///
407    /// `count` is in units of **bytes**.
408    ///
409    /// This is purely a convenience for casting to a `u8` pointer and
410    /// using [offset][pointer::offset] on it. See that method for documentation
411    /// and safety requirements.
412    ///
413    /// For non-`Sized` pointees this operation changes only the data pointer,
414    /// leaving the metadata untouched.
415    #[must_use]
416    #[inline(always)]
417    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
418    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
419    #[track_caller]
420    #[cfg(not(feature = "ferrocene_certified"))]
421    pub const unsafe fn byte_offset(self, count: isize) -> Self {
422        // SAFETY: the caller must uphold the safety contract for `offset`.
423        unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
424    }
425
426    /// Adds a signed offset to a pointer using wrapping arithmetic.
427    ///
428    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
429    /// offset of `3 * size_of::<T>()` bytes.
430    ///
431    /// # Safety
432    ///
433    /// This operation itself is always safe, but using the resulting pointer is not.
434    ///
435    /// The resulting pointer "remembers" the [allocation] that `self` points to
436    /// (this is called "[Provenance](ptr/index.html#provenance)").
437    /// The pointer must not be used to read or write other allocations.
438    ///
439    /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
440    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
441    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
442    /// `x` and `y` point into the same allocation.
443    ///
444    /// Compared to [`offset`], this method basically delays the requirement of staying within the
445    /// same allocation: [`offset`] is immediate Undefined Behavior when crossing object
446    /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
447    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
448    /// can be optimized better and is thus preferable in performance-sensitive code.
449    ///
450    /// The delayed check only considers the value of the pointer that was dereferenced, not the
451    /// intermediate values used during the computation of the final result. For example,
452    /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
453    /// words, leaving the allocation and then re-entering it later is permitted.
454    ///
455    /// [`offset`]: #method.offset
456    /// [allocation]: crate::ptr#allocation
457    ///
458    /// # Examples
459    ///
460    /// ```
461    /// # use std::fmt::Write;
462    /// // Iterate using a raw pointer in increments of two elements
463    /// let data = [1u8, 2, 3, 4, 5];
464    /// let mut ptr: *const u8 = data.as_ptr();
465    /// let step = 2;
466    /// let end_rounded_up = ptr.wrapping_offset(6);
467    ///
468    /// let mut out = String::new();
469    /// while ptr != end_rounded_up {
470    ///     unsafe {
471    ///         write!(&mut out, "{}, ", *ptr)?;
472    ///     }
473    ///     ptr = ptr.wrapping_offset(step);
474    /// }
475    /// assert_eq!(out.as_str(), "1, 3, 5, ");
476    /// # std::fmt::Result::Ok(())
477    /// ```
478    #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
479    #[must_use = "returns a new pointer rather than modifying its argument"]
480    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
481    #[inline(always)]
482    #[cfg(not(feature = "ferrocene_certified"))]
483    pub const fn wrapping_offset(self, count: isize) -> *const T
484    where
485        T: Sized,
486    {
487        // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
488        unsafe { intrinsics::arith_offset(self, count) }
489    }
490
491    /// Adds a signed offset in bytes to a pointer using wrapping arithmetic.
492    ///
493    /// `count` is in units of **bytes**.
494    ///
495    /// This is purely a convenience for casting to a `u8` pointer and
496    /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
497    /// for documentation.
498    ///
499    /// For non-`Sized` pointees this operation changes only the data pointer,
500    /// leaving the metadata untouched.
501    #[must_use]
502    #[inline(always)]
503    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
504    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
505    #[cfg(not(feature = "ferrocene_certified"))]
506    pub const fn wrapping_byte_offset(self, count: isize) -> Self {
507        self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
508    }
509
510    /// Masks out bits of the pointer according to a mask.
511    ///
512    /// This is convenience for `ptr.map_addr(|a| a & mask)`.
513    ///
514    /// For non-`Sized` pointees this operation changes only the data pointer,
515    /// leaving the metadata untouched.
516    ///
517    /// ## Examples
518    ///
519    /// ```
520    /// #![feature(ptr_mask)]
521    /// let v = 17_u32;
522    /// let ptr: *const u32 = &v;
523    ///
524    /// // `u32` is 4 bytes aligned,
525    /// // which means that lower 2 bits are always 0.
526    /// let tag_mask = 0b11;
527    /// let ptr_mask = !tag_mask;
528    ///
529    /// // We can store something in these lower bits
530    /// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
531    ///
532    /// // Get the "tag" back
533    /// let tag = tagged_ptr.addr() & tag_mask;
534    /// assert_eq!(tag, 0b10);
535    ///
536    /// // Note that `tagged_ptr` is unaligned, it's UB to read from it.
537    /// // To get original pointer `mask` can be used:
538    /// let masked_ptr = tagged_ptr.mask(ptr_mask);
539    /// assert_eq!(unsafe { *masked_ptr }, 17);
540    /// ```
541    #[unstable(feature = "ptr_mask", issue = "98290")]
542    #[must_use = "returns a new pointer rather than modifying its argument"]
543    #[inline(always)]
544    #[cfg(not(feature = "ferrocene_certified"))]
545    pub fn mask(self, mask: usize) -> *const T {
546        intrinsics::ptr_mask(self.cast::<()>(), mask).with_metadata_of(self)
547    }
548
549    /// Calculates the distance between two pointers within the same allocation. The returned value is in
550    /// units of T: the distance in bytes divided by `size_of::<T>()`.
551    ///
552    /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
553    /// except that it has a lot more opportunities for UB, in exchange for the compiler
554    /// better understanding what you are doing.
555    ///
556    /// The primary motivation of this method is for computing the `len` of an array/slice
557    /// of `T` that you are currently representing as a "start" and "end" pointer
558    /// (and "end" is "one past the end" of the array).
559    /// In that case, `end.offset_from(start)` gets you the length of the array.
560    ///
561    /// All of the following safety requirements are trivially satisfied for this usecase.
562    ///
563    /// [`offset`]: #method.offset
564    ///
565    /// # Safety
566    ///
567    /// If any of the following conditions are violated, the result is Undefined Behavior:
568    ///
569    /// * `self` and `origin` must either
570    ///
571    ///   * point to the same address, or
572    ///   * both be [derived from][crate::ptr#provenance] a pointer to the same [allocation], and the memory range between
573    ///     the two pointers must be in bounds of that object. (See below for an example.)
574    ///
575    /// * The distance between the pointers, in bytes, must be an exact multiple
576    ///   of the size of `T`.
577    ///
578    /// As a consequence, the absolute distance between the pointers, in bytes, computed on
579    /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
580    /// implied by the in-bounds requirement, and the fact that no allocation can be larger
581    /// than `isize::MAX` bytes.
582    ///
583    /// The requirement for pointers to be derived from the same allocation is primarily
584    /// needed for `const`-compatibility: the distance between pointers into *different* allocated
585    /// objects is not known at compile-time. However, the requirement also exists at
586    /// runtime and may be exploited by optimizations. If you wish to compute the difference between
587    /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
588    /// origin as isize) / size_of::<T>()`.
589    // FIXME: recommend `addr()` instead of `as usize` once that is stable.
590    ///
591    /// [`add`]: #method.add
592    /// [allocation]: crate::ptr#allocation
593    ///
594    /// # Panics
595    ///
596    /// This function panics if `T` is a Zero-Sized Type ("ZST").
597    ///
598    /// # Examples
599    ///
600    /// Basic usage:
601    ///
602    /// ```
603    /// let a = [0; 5];
604    /// let ptr1: *const i32 = &a[1];
605    /// let ptr2: *const i32 = &a[3];
606    /// unsafe {
607    ///     assert_eq!(ptr2.offset_from(ptr1), 2);
608    ///     assert_eq!(ptr1.offset_from(ptr2), -2);
609    ///     assert_eq!(ptr1.offset(2), ptr2);
610    ///     assert_eq!(ptr2.offset(-2), ptr1);
611    /// }
612    /// ```
613    ///
614    /// *Incorrect* usage:
615    ///
616    /// ```rust,no_run
617    /// let ptr1 = Box::into_raw(Box::new(0u8)) as *const u8;
618    /// let ptr2 = Box::into_raw(Box::new(1u8)) as *const u8;
619    /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
620    /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
621    /// let ptr2_other = (ptr1 as *const u8).wrapping_offset(diff).wrapping_offset(1);
622    /// assert_eq!(ptr2 as usize, ptr2_other as usize);
623    /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
624    /// // computing their offset is undefined behavior, even though
625    /// // they point to addresses that are in-bounds of the same object!
626    /// unsafe {
627    ///     let one = ptr2_other.offset_from(ptr2); // Undefined Behavior! ⚠️
628    /// }
629    /// ```
630    #[stable(feature = "ptr_offset_from", since = "1.47.0")]
631    #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")]
632    #[inline]
633    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
634    #[cfg(not(feature = "ferrocene_certified"))]
635    pub const unsafe fn offset_from(self, origin: *const T) -> isize
636    where
637        T: Sized,
638    {
639        let pointee_size = size_of::<T>();
640        assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
641        // SAFETY: the caller must uphold the safety contract for `ptr_offset_from`.
642        unsafe { intrinsics::ptr_offset_from(self, origin) }
643    }
644
645    /// Calculates the distance between two pointers within the same allocation. The returned value is in
646    /// units of **bytes**.
647    ///
648    /// This is purely a convenience for casting to a `u8` pointer and
649    /// using [`offset_from`][pointer::offset_from] on it. See that method for
650    /// documentation and safety requirements.
651    ///
652    /// For non-`Sized` pointees this operation considers only the data pointers,
653    /// ignoring the metadata.
654    #[inline(always)]
655    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
656    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
657    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
658    #[cfg(not(feature = "ferrocene_certified"))]
659    pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: *const U) -> isize {
660        // SAFETY: the caller must uphold the safety contract for `offset_from`.
661        unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
662    }
663
664    /// Calculates the distance between two pointers within the same allocation, *where it's known that
665    /// `self` is equal to or greater than `origin`*. The returned value is in
666    /// units of T: the distance in bytes is divided by `size_of::<T>()`.
667    ///
668    /// This computes the same value that [`offset_from`](#method.offset_from)
669    /// would compute, but with the added precondition that the offset is
670    /// guaranteed to be non-negative.  This method is equivalent to
671    /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
672    /// but it provides slightly more information to the optimizer, which can
673    /// sometimes allow it to optimize slightly better with some backends.
674    ///
675    /// This method can be thought of as recovering the `count` that was passed
676    /// to [`add`](#method.add) (or, with the parameters in the other order,
677    /// to [`sub`](#method.sub)).  The following are all equivalent, assuming
678    /// that their safety preconditions are met:
679    /// ```rust
680    /// # unsafe fn blah(ptr: *const i32, origin: *const i32, count: usize) -> bool { unsafe {
681    /// ptr.offset_from_unsigned(origin) == count
682    /// # &&
683    /// origin.add(count) == ptr
684    /// # &&
685    /// ptr.sub(count) == origin
686    /// # } }
687    /// ```
688    ///
689    /// # Safety
690    ///
691    /// - The distance between the pointers must be non-negative (`self >= origin`)
692    ///
693    /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
694    ///   apply to this method as well; see it for the full details.
695    ///
696    /// Importantly, despite the return type of this method being able to represent
697    /// a larger offset, it's still *not permitted* to pass pointers which differ
698    /// by more than `isize::MAX` *bytes*.  As such, the result of this method will
699    /// always be less than or equal to `isize::MAX as usize`.
700    ///
701    /// # Panics
702    ///
703    /// This function panics if `T` is a Zero-Sized Type ("ZST").
704    ///
705    /// # Examples
706    ///
707    /// ```
708    /// let a = [0; 5];
709    /// let ptr1: *const i32 = &a[1];
710    /// let ptr2: *const i32 = &a[3];
711    /// unsafe {
712    ///     assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
713    ///     assert_eq!(ptr1.add(2), ptr2);
714    ///     assert_eq!(ptr2.sub(2), ptr1);
715    ///     assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
716    /// }
717    ///
718    /// // This would be incorrect, as the pointers are not correctly ordered:
719    /// // ptr1.offset_from_unsigned(ptr2)
720    /// ```
721    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
722    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
723    #[inline]
724    #[track_caller]
725    #[cfg(not(feature = "ferrocene_certified"))]
726    pub const unsafe fn offset_from_unsigned(self, origin: *const T) -> usize
727    where
728        T: Sized,
729    {
730        #[rustc_allow_const_fn_unstable(const_eval_select)]
731        const fn runtime_ptr_ge(this: *const (), origin: *const ()) -> bool {
732            const_eval_select!(
733                @capture { this: *const (), origin: *const () } -> bool:
734                if const {
735                    true
736                } else {
737                    this >= origin
738                }
739            )
740        }
741
742        ub_checks::assert_unsafe_precondition!(
743            check_language_ub,
744            "ptr::offset_from_unsigned requires `self >= origin`",
745            (
746                this: *const () = self as *const (),
747                origin: *const () = origin as *const (),
748            ) => runtime_ptr_ge(this, origin)
749        );
750
751        let pointee_size = size_of::<T>();
752        assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
753        // SAFETY: the caller must uphold the safety contract for `ptr_offset_from_unsigned`.
754        unsafe { intrinsics::ptr_offset_from_unsigned(self, origin) }
755    }
756
757    /// Calculates the distance between two pointers within the same allocation, *where it's known that
758    /// `self` is equal to or greater than `origin`*. The returned value is in
759    /// units of **bytes**.
760    ///
761    /// This is purely a convenience for casting to a `u8` pointer and
762    /// using [`offset_from_unsigned`][pointer::offset_from_unsigned] on it.
763    /// See that method for documentation and safety requirements.
764    ///
765    /// For non-`Sized` pointees this operation considers only the data pointers,
766    /// ignoring the metadata.
767    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
768    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
769    #[inline]
770    #[track_caller]
771    #[cfg(not(feature = "ferrocene_certified"))]
772    pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: *const U) -> usize {
773        // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
774        unsafe { self.cast::<u8>().offset_from_unsigned(origin.cast::<u8>()) }
775    }
776
777    /// Returns whether two pointers are guaranteed to be equal.
778    ///
779    /// At runtime this function behaves like `Some(self == other)`.
780    /// However, in some contexts (e.g., compile-time evaluation),
781    /// it is not always possible to determine equality of two pointers, so this function may
782    /// spuriously return `None` for pointers that later actually turn out to have its equality known.
783    /// But when it returns `Some`, the pointers' equality is guaranteed to be known.
784    ///
785    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
786    /// version and unsafe code must not
787    /// rely on the result of this function for soundness. It is suggested to only use this function
788    /// for performance optimizations where spurious `None` return values by this function do not
789    /// affect the outcome, but just the performance.
790    /// The consequences of using this method to make runtime and compile-time code behave
791    /// differently have not been explored. This method should not be used to introduce such
792    /// differences, and it should also not be stabilized before we have a better understanding
793    /// of this issue.
794    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
795    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
796    #[inline]
797    pub const fn guaranteed_eq(self, other: *const T) -> Option<bool>
798    where
799        T: Sized,
800    {
801        match intrinsics::ptr_guaranteed_cmp(self, other) {
802            #[ferrocene::annotation(
803                "This cannot be reached in runtime code so it cannot be covered."
804            )]
805            2 => None,
806            other => Some(other == 1),
807        }
808    }
809
810    /// Returns whether two pointers are guaranteed to be inequal.
811    ///
812    /// At runtime this function behaves like `Some(self != other)`.
813    /// However, in some contexts (e.g., compile-time evaluation),
814    /// it is not always possible to determine inequality of two pointers, so this function may
815    /// spuriously return `None` for pointers that later actually turn out to have its inequality known.
816    /// But when it returns `Some`, the pointers' inequality is guaranteed to be known.
817    ///
818    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
819    /// version and unsafe code must not
820    /// rely on the result of this function for soundness. It is suggested to only use this function
821    /// for performance optimizations where spurious `None` return values by this function do not
822    /// affect the outcome, but just the performance.
823    /// The consequences of using this method to make runtime and compile-time code behave
824    /// differently have not been explored. This method should not be used to introduce such
825    /// differences, and it should also not be stabilized before we have a better understanding
826    /// of this issue.
827    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
828    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
829    #[inline]
830    #[cfg(not(feature = "ferrocene_certified"))]
831    pub const fn guaranteed_ne(self, other: *const T) -> Option<bool>
832    where
833        T: Sized,
834    {
835        match self.guaranteed_eq(other) {
836            None => None,
837            Some(eq) => Some(!eq),
838        }
839    }
840
841    #[doc = include_str!("./docs/add.md")]
842    ///
843    /// # Examples
844    ///
845    /// ```
846    /// let s: &str = "123";
847    /// let ptr: *const u8 = s.as_ptr();
848    ///
849    /// unsafe {
850    ///     assert_eq!(*ptr.add(1), b'2');
851    ///     assert_eq!(*ptr.add(2), b'3');
852    /// }
853    /// ```
854    #[stable(feature = "pointer_methods", since = "1.26.0")]
855    #[must_use = "returns a new pointer rather than modifying its argument"]
856    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
857    #[inline(always)]
858    #[track_caller]
859    pub const unsafe fn add(self, count: usize) -> Self
860    where
861        T: Sized,
862    {
863        #[cfg(debug_assertions)]
864        #[inline]
865        #[rustc_allow_const_fn_unstable(const_eval_select)]
866        const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
867            const_eval_select!(
868                @capture { this: *const (), count: usize, size: usize } -> bool:
869                if const {
870                    true
871                } else {
872                    let Some(byte_offset) = count.checked_mul(size) else {
873                        return false;
874                    };
875                    let (_, overflow) = this.addr().overflowing_add(byte_offset);
876                    byte_offset <= (isize::MAX as usize) && !overflow
877                }
878            )
879        }
880
881        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
882        ub_checks::assert_unsafe_precondition!(
883            check_language_ub,
884            "ptr::add requires that the address calculation does not overflow",
885            (
886                this: *const () = self as *const (),
887                count: usize = count,
888                size: usize = size_of::<T>(),
889            ) => runtime_add_nowrap(this, count, size)
890        );
891
892        // SAFETY: the caller must uphold the safety contract for `offset`.
893        unsafe { intrinsics::offset(self, count) }
894    }
895
896    /// Adds an unsigned offset in bytes to a pointer.
897    ///
898    /// `count` is in units of bytes.
899    ///
900    /// This is purely a convenience for casting to a `u8` pointer and
901    /// using [add][pointer::add] on it. See that method for documentation
902    /// and safety requirements.
903    ///
904    /// For non-`Sized` pointees this operation changes only the data pointer,
905    /// leaving the metadata untouched.
906    #[must_use]
907    #[inline(always)]
908    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
909    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
910    #[track_caller]
911    #[cfg(not(feature = "ferrocene_certified"))]
912    pub const unsafe fn byte_add(self, count: usize) -> Self {
913        // SAFETY: the caller must uphold the safety contract for `add`.
914        unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
915    }
916
917    /// Subtracts an unsigned offset from a pointer.
918    ///
919    /// This can only move the pointer backward (or not move it). If you need to move forward or
920    /// backward depending on the value, then you might want [`offset`](#method.offset) instead
921    /// which takes a signed offset.
922    ///
923    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
924    /// offset of `3 * size_of::<T>()` bytes.
925    ///
926    /// # Safety
927    ///
928    /// If any of the following conditions are violated, the result is Undefined Behavior:
929    ///
930    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
931    ///   "wrapping around"), must fit in an `isize`.
932    ///
933    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
934    ///   [allocation], and the entire memory range between `self` and the result must be in
935    ///   bounds of that allocation. In particular, this range must not "wrap around" the edge
936    ///   of the address space.
937    ///
938    /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
939    /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
940    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
941    /// safe.
942    ///
943    /// Consider using [`wrapping_sub`] instead if these constraints are
944    /// difficult to satisfy. The only advantage of this method is that it
945    /// enables more aggressive compiler optimizations.
946    ///
947    /// [`wrapping_sub`]: #method.wrapping_sub
948    /// [allocation]: crate::ptr#allocation
949    ///
950    /// # Examples
951    ///
952    /// ```
953    /// let s: &str = "123";
954    ///
955    /// unsafe {
956    ///     let end: *const u8 = s.as_ptr().add(3);
957    ///     assert_eq!(*end.sub(1), b'3');
958    ///     assert_eq!(*end.sub(2), b'2');
959    /// }
960    /// ```
961    #[stable(feature = "pointer_methods", since = "1.26.0")]
962    #[must_use = "returns a new pointer rather than modifying its argument"]
963    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
964    #[inline(always)]
965    #[track_caller]
966    #[cfg(not(feature = "ferrocene_certified"))]
967    pub const unsafe fn sub(self, count: usize) -> Self
968    where
969        T: Sized,
970    {
971        #[cfg(debug_assertions)]
972        #[inline]
973        #[rustc_allow_const_fn_unstable(const_eval_select)]
974        const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
975            const_eval_select!(
976                @capture { this: *const (), count: usize, size: usize } -> bool:
977                if const {
978                    true
979                } else {
980                    let Some(byte_offset) = count.checked_mul(size) else {
981                        return false;
982                    };
983                    byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
984                }
985            )
986        }
987
988        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
989        ub_checks::assert_unsafe_precondition!(
990            check_language_ub,
991            "ptr::sub requires that the address calculation does not overflow",
992            (
993                this: *const () = self as *const (),
994                count: usize = count,
995                size: usize = size_of::<T>(),
996            ) => runtime_sub_nowrap(this, count, size)
997        );
998
999        if T::IS_ZST {
1000            // Pointer arithmetic does nothing when the pointee is a ZST.
1001            self
1002        } else {
1003            // SAFETY: the caller must uphold the safety contract for `offset`.
1004            // Because the pointee is *not* a ZST, that means that `count` is
1005            // at most `isize::MAX`, and thus the negation cannot overflow.
1006            unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
1007        }
1008    }
1009
1010    /// Subtracts an unsigned offset in bytes from a pointer.
1011    ///
1012    /// `count` is in units of bytes.
1013    ///
1014    /// This is purely a convenience for casting to a `u8` pointer and
1015    /// using [sub][pointer::sub] on it. See that method for documentation
1016    /// and safety requirements.
1017    ///
1018    /// For non-`Sized` pointees this operation changes only the data pointer,
1019    /// leaving the metadata untouched.
1020    #[must_use]
1021    #[inline(always)]
1022    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1023    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1024    #[track_caller]
1025    #[cfg(not(feature = "ferrocene_certified"))]
1026    pub const unsafe fn byte_sub(self, count: usize) -> Self {
1027        // SAFETY: the caller must uphold the safety contract for `sub`.
1028        unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
1029    }
1030
1031    /// Adds an unsigned offset to a pointer using wrapping arithmetic.
1032    ///
1033    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1034    /// offset of `3 * size_of::<T>()` bytes.
1035    ///
1036    /// # Safety
1037    ///
1038    /// This operation itself is always safe, but using the resulting pointer is not.
1039    ///
1040    /// The resulting pointer "remembers" the [allocation] that `self` points to; it must not
1041    /// be used to read or write other allocations.
1042    ///
1043    /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
1044    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1045    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1046    /// `x` and `y` point into the same allocation.
1047    ///
1048    /// Compared to [`add`], this method basically delays the requirement of staying within the
1049    /// same allocation: [`add`] is immediate Undefined Behavior when crossing object
1050    /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
1051    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
1052    /// can be optimized better and is thus preferable in performance-sensitive code.
1053    ///
1054    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1055    /// intermediate values used during the computation of the final result. For example,
1056    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1057    /// allocation and then re-entering it later is permitted.
1058    ///
1059    /// [`add`]: #method.add
1060    /// [allocation]: crate::ptr#allocation
1061    ///
1062    /// # Examples
1063    ///
1064    /// ```
1065    /// # use std::fmt::Write;
1066    /// // Iterate using a raw pointer in increments of two elements
1067    /// let data = [1u8, 2, 3, 4, 5];
1068    /// let mut ptr: *const u8 = data.as_ptr();
1069    /// let step = 2;
1070    /// let end_rounded_up = ptr.wrapping_add(6);
1071    ///
1072    /// let mut out = String::new();
1073    /// while ptr != end_rounded_up {
1074    ///     unsafe {
1075    ///         write!(&mut out, "{}, ", *ptr)?;
1076    ///     }
1077    ///     ptr = ptr.wrapping_add(step);
1078    /// }
1079    /// assert_eq!(out, "1, 3, 5, ");
1080    /// # std::fmt::Result::Ok(())
1081    /// ```
1082    #[stable(feature = "pointer_methods", since = "1.26.0")]
1083    #[must_use = "returns a new pointer rather than modifying its argument"]
1084    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1085    #[inline(always)]
1086    #[cfg(not(feature = "ferrocene_certified"))]
1087    pub const fn wrapping_add(self, count: usize) -> Self
1088    where
1089        T: Sized,
1090    {
1091        self.wrapping_offset(count as isize)
1092    }
1093
1094    /// Adds an unsigned offset in bytes to a pointer using wrapping arithmetic.
1095    ///
1096    /// `count` is in units of bytes.
1097    ///
1098    /// This is purely a convenience for casting to a `u8` pointer and
1099    /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
1100    ///
1101    /// For non-`Sized` pointees this operation changes only the data pointer,
1102    /// leaving the metadata untouched.
1103    #[must_use]
1104    #[inline(always)]
1105    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1106    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1107    #[cfg(not(feature = "ferrocene_certified"))]
1108    pub const fn wrapping_byte_add(self, count: usize) -> Self {
1109        self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
1110    }
1111
1112    /// Subtracts an unsigned offset from a pointer using wrapping arithmetic.
1113    ///
1114    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1115    /// offset of `3 * size_of::<T>()` bytes.
1116    ///
1117    /// # Safety
1118    ///
1119    /// This operation itself is always safe, but using the resulting pointer is not.
1120    ///
1121    /// The resulting pointer "remembers" the [allocation] that `self` points to; it must not
1122    /// be used to read or write other allocations.
1123    ///
1124    /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
1125    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1126    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1127    /// `x` and `y` point into the same allocation.
1128    ///
1129    /// Compared to [`sub`], this method basically delays the requirement of staying within the
1130    /// same allocation: [`sub`] is immediate Undefined Behavior when crossing object
1131    /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
1132    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
1133    /// can be optimized better and is thus preferable in performance-sensitive code.
1134    ///
1135    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1136    /// intermediate values used during the computation of the final result. For example,
1137    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1138    /// allocation and then re-entering it later is permitted.
1139    ///
1140    /// [`sub`]: #method.sub
1141    /// [allocation]: crate::ptr#allocation
1142    ///
1143    /// # Examples
1144    ///
1145    /// ```
1146    /// # use std::fmt::Write;
1147    /// // Iterate using a raw pointer in increments of two elements (backwards)
1148    /// let data = [1u8, 2, 3, 4, 5];
1149    /// let mut ptr: *const u8 = data.as_ptr();
1150    /// let start_rounded_down = ptr.wrapping_sub(2);
1151    /// ptr = ptr.wrapping_add(4);
1152    /// let step = 2;
1153    /// let mut out = String::new();
1154    /// while ptr != start_rounded_down {
1155    ///     unsafe {
1156    ///         write!(&mut out, "{}, ", *ptr)?;
1157    ///     }
1158    ///     ptr = ptr.wrapping_sub(step);
1159    /// }
1160    /// assert_eq!(out, "5, 3, 1, ");
1161    /// # std::fmt::Result::Ok(())
1162    /// ```
1163    #[stable(feature = "pointer_methods", since = "1.26.0")]
1164    #[must_use = "returns a new pointer rather than modifying its argument"]
1165    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1166    #[inline(always)]
1167    #[cfg(not(feature = "ferrocene_certified"))]
1168    pub const fn wrapping_sub(self, count: usize) -> Self
1169    where
1170        T: Sized,
1171    {
1172        self.wrapping_offset((count as isize).wrapping_neg())
1173    }
1174
1175    /// Subtracts an unsigned offset in bytes from a pointer using wrapping arithmetic.
1176    ///
1177    /// `count` is in units of bytes.
1178    ///
1179    /// This is purely a convenience for casting to a `u8` pointer and
1180    /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
1181    ///
1182    /// For non-`Sized` pointees this operation changes only the data pointer,
1183    /// leaving the metadata untouched.
1184    #[must_use]
1185    #[inline(always)]
1186    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1187    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1188    #[cfg(not(feature = "ferrocene_certified"))]
1189    pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1190        self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
1191    }
1192
1193    /// Reads the value from `self` without moving it. This leaves the
1194    /// memory in `self` unchanged.
1195    ///
1196    /// See [`ptr::read`] for safety concerns and examples.
1197    ///
1198    /// [`ptr::read`]: crate::ptr::read()
1199    #[stable(feature = "pointer_methods", since = "1.26.0")]
1200    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1201    #[inline]
1202    #[track_caller]
1203    pub const unsafe fn read(self) -> T
1204    where
1205        T: Sized,
1206    {
1207        // SAFETY: the caller must uphold the safety contract for `read`.
1208        unsafe { read(self) }
1209    }
1210
1211    /// Performs a volatile read of the value from `self` without moving it. This
1212    /// leaves the memory in `self` unchanged.
1213    ///
1214    /// Volatile operations are intended to act on I/O memory, and are guaranteed
1215    /// to not be elided or reordered by the compiler across other volatile
1216    /// operations.
1217    ///
1218    /// See [`ptr::read_volatile`] for safety concerns and examples.
1219    ///
1220    /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1221    #[stable(feature = "pointer_methods", since = "1.26.0")]
1222    #[inline]
1223    #[track_caller]
1224    #[cfg(not(feature = "ferrocene_certified"))]
1225    pub unsafe fn read_volatile(self) -> T
1226    where
1227        T: Sized,
1228    {
1229        // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1230        unsafe { read_volatile(self) }
1231    }
1232
1233    /// Reads the value from `self` without moving it. This leaves the
1234    /// memory in `self` unchanged.
1235    ///
1236    /// Unlike `read`, the pointer may be unaligned.
1237    ///
1238    /// See [`ptr::read_unaligned`] for safety concerns and examples.
1239    ///
1240    /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1241    #[stable(feature = "pointer_methods", since = "1.26.0")]
1242    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1243    #[inline]
1244    #[track_caller]
1245    #[cfg(not(feature = "ferrocene_certified"))]
1246    pub const unsafe fn read_unaligned(self) -> T
1247    where
1248        T: Sized,
1249    {
1250        // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1251        unsafe { read_unaligned(self) }
1252    }
1253
1254    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1255    /// and destination may overlap.
1256    ///
1257    /// NOTE: this has the *same* argument order as [`ptr::copy`].
1258    ///
1259    /// See [`ptr::copy`] for safety concerns and examples.
1260    ///
1261    /// [`ptr::copy`]: crate::ptr::copy()
1262    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1263    #[stable(feature = "pointer_methods", since = "1.26.0")]
1264    #[inline]
1265    #[track_caller]
1266    #[cfg(not(feature = "ferrocene_certified"))]
1267    pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
1268    where
1269        T: Sized,
1270    {
1271        // SAFETY: the caller must uphold the safety contract for `copy`.
1272        unsafe { copy(self, dest, count) }
1273    }
1274
1275    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1276    /// and destination may *not* overlap.
1277    ///
1278    /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1279    ///
1280    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1281    ///
1282    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1283    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1284    #[stable(feature = "pointer_methods", since = "1.26.0")]
1285    #[inline]
1286    #[track_caller]
1287    #[cfg(not(feature = "ferrocene_certified"))]
1288    pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
1289    where
1290        T: Sized,
1291    {
1292        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1293        unsafe { copy_nonoverlapping(self, dest, count) }
1294    }
1295
1296    /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1297    /// `align`.
1298    ///
1299    /// If it is not possible to align the pointer, the implementation returns
1300    /// `usize::MAX`.
1301    ///
1302    /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1303    /// used with the `wrapping_add` method.
1304    ///
1305    /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1306    /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1307    /// the returned offset is correct in all terms other than alignment.
1308    ///
1309    /// # Panics
1310    ///
1311    /// The function panics if `align` is not a power-of-two.
1312    ///
1313    /// # Examples
1314    ///
1315    /// Accessing adjacent `u8` as `u16`
1316    ///
1317    /// ```
1318    /// # unsafe {
1319    /// let x = [5_u8, 6, 7, 8, 9];
1320    /// let ptr = x.as_ptr();
1321    /// let offset = ptr.align_offset(align_of::<u16>());
1322    ///
1323    /// if offset < x.len() - 1 {
1324    ///     let u16_ptr = ptr.add(offset).cast::<u16>();
1325    ///     assert!(*u16_ptr == u16::from_ne_bytes([5, 6]) || *u16_ptr == u16::from_ne_bytes([6, 7]));
1326    /// } else {
1327    ///     // while the pointer can be aligned via `offset`, it would point
1328    ///     // outside the allocation
1329    /// }
1330    /// # }
1331    /// ```
1332    #[must_use]
1333    #[inline]
1334    #[stable(feature = "align_offset", since = "1.36.0")]
1335    pub fn align_offset(self, align: usize) -> usize
1336    where
1337        T: Sized,
1338    {
1339        if !align.is_power_of_two() {
1340            panic!("align_offset: align is not a power-of-two");
1341        }
1342
1343        // SAFETY: `align` has been checked to be a power of 2 above
1344        let ret = unsafe { align_offset(self, align) };
1345
1346        // Inform Miri that we want to consider the resulting pointer to be suitably aligned.
1347        #[cfg(miri)]
1348        if ret != usize::MAX {
1349            intrinsics::miri_promise_symbolic_alignment(self.wrapping_add(ret).cast(), align);
1350        }
1351
1352        ret
1353    }
1354
1355    /// Returns whether the pointer is properly aligned for `T`.
1356    ///
1357    /// # Examples
1358    ///
1359    /// ```
1360    /// // On some platforms, the alignment of i32 is less than 4.
1361    /// #[repr(align(4))]
1362    /// struct AlignedI32(i32);
1363    ///
1364    /// let data = AlignedI32(42);
1365    /// let ptr = &data as *const AlignedI32;
1366    ///
1367    /// assert!(ptr.is_aligned());
1368    /// assert!(!ptr.wrapping_byte_add(1).is_aligned());
1369    /// ```
1370    #[must_use]
1371    #[inline]
1372    #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1373    #[cfg(not(feature = "ferrocene_certified"))]
1374    pub fn is_aligned(self) -> bool
1375    where
1376        T: Sized,
1377    {
1378        self.is_aligned_to(align_of::<T>())
1379    }
1380
1381    /// Returns whether the pointer is aligned to `align`.
1382    ///
1383    /// For non-`Sized` pointees this operation considers only the data pointer,
1384    /// ignoring the metadata.
1385    ///
1386    /// # Panics
1387    ///
1388    /// The function panics if `align` is not a power-of-two (this includes 0).
1389    ///
1390    /// # Examples
1391    ///
1392    /// ```
1393    /// #![feature(pointer_is_aligned_to)]
1394    ///
1395    /// // On some platforms, the alignment of i32 is less than 4.
1396    /// #[repr(align(4))]
1397    /// struct AlignedI32(i32);
1398    ///
1399    /// let data = AlignedI32(42);
1400    /// let ptr = &data as *const AlignedI32;
1401    ///
1402    /// assert!(ptr.is_aligned_to(1));
1403    /// assert!(ptr.is_aligned_to(2));
1404    /// assert!(ptr.is_aligned_to(4));
1405    ///
1406    /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1407    /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1408    ///
1409    /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1410    /// ```
1411    #[must_use]
1412    #[inline]
1413    #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1414    pub fn is_aligned_to(self, align: usize) -> bool {
1415        if !align.is_power_of_two() {
1416            panic!("is_aligned_to: align is not a power-of-two");
1417        }
1418
1419        self.addr() & (align - 1) == 0
1420    }
1421}
1422
1423impl<T> *const T {
1424    /// Casts from a type to its maybe-uninitialized version.
1425    #[must_use]
1426    #[inline(always)]
1427    #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1428    #[cfg(not(feature = "ferrocene_certified"))]
1429    pub const fn cast_uninit(self) -> *const MaybeUninit<T> {
1430        self as _
1431    }
1432}
1433#[cfg(not(feature = "ferrocene_certified"))]
1434impl<T> *const MaybeUninit<T> {
1435    /// Casts from a maybe-uninitialized type to its initialized version.
1436    ///
1437    /// This is always safe, since UB can only occur if the pointer is read
1438    /// before being initialized.
1439    #[must_use]
1440    #[inline(always)]
1441    #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1442    pub const fn cast_init(self) -> *const T {
1443        self as _
1444    }
1445}
1446
1447impl<T> *const [T] {
1448    /// Returns the length of a raw slice.
1449    ///
1450    /// The returned value is the number of **elements**, not the number of bytes.
1451    ///
1452    /// This function is safe, even when the raw slice cannot be cast to a slice
1453    /// reference because the pointer is null or unaligned.
1454    ///
1455    /// # Examples
1456    ///
1457    /// ```rust
1458    /// use std::ptr;
1459    ///
1460    /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1461    /// assert_eq!(slice.len(), 3);
1462    /// ```
1463    #[inline]
1464    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1465    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1466    pub const fn len(self) -> usize {
1467        metadata(self)
1468    }
1469
1470    /// Returns `true` if the raw slice has a length of 0.
1471    ///
1472    /// # Examples
1473    ///
1474    /// ```
1475    /// use std::ptr;
1476    ///
1477    /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1478    /// assert!(!slice.is_empty());
1479    /// ```
1480    #[inline(always)]
1481    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1482    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1483    pub const fn is_empty(self) -> bool {
1484        self.len() == 0
1485    }
1486
1487    /// Returns a raw pointer to the slice's buffer.
1488    ///
1489    /// This is equivalent to casting `self` to `*const T`, but more type-safe.
1490    ///
1491    /// # Examples
1492    ///
1493    /// ```rust
1494    /// #![feature(slice_ptr_get)]
1495    /// use std::ptr;
1496    ///
1497    /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1498    /// assert_eq!(slice.as_ptr(), ptr::null());
1499    /// ```
1500    #[inline]
1501    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1502    pub const fn as_ptr(self) -> *const T {
1503        self as *const T
1504    }
1505
1506    /// Gets a raw pointer to the underlying array.
1507    ///
1508    /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1509    #[unstable(feature = "slice_as_array", issue = "133508")]
1510    #[inline]
1511    #[must_use]
1512    pub const fn as_array<const N: usize>(self) -> Option<*const [T; N]> {
1513        if self.len() == N {
1514            let me = self.as_ptr() as *const [T; N];
1515            Some(me)
1516        } else {
1517            None
1518        }
1519    }
1520
1521    /// Returns a raw pointer to an element or subslice, without doing bounds
1522    /// checking.
1523    ///
1524    /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1525    /// is *[undefined behavior]* even if the resulting pointer is not used.
1526    ///
1527    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1528    ///
1529    /// # Examples
1530    ///
1531    /// ```
1532    /// #![feature(slice_ptr_get)]
1533    ///
1534    /// let x = &[1, 2, 4] as *const [i32];
1535    ///
1536    /// unsafe {
1537    ///     assert_eq!(x.get_unchecked(1), x.as_ptr().add(1));
1538    /// }
1539    /// ```
1540    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1541    #[rustc_const_unstable(feature = "const_index", issue = "143775")]
1542    #[inline]
1543    #[cfg(not(feature = "ferrocene_certified"))]
1544    pub const unsafe fn get_unchecked<I>(self, index: I) -> *const I::Output
1545    where
1546        I: [const] SliceIndex<[T]>,
1547    {
1548        // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1549        unsafe { index.get_unchecked(self) }
1550    }
1551
1552    #[doc = include_str!("docs/as_uninit_slice.md")]
1553    #[inline]
1554    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1555    #[cfg(not(feature = "ferrocene_certified"))]
1556    pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1557        if self.is_null() {
1558            None
1559        } else {
1560            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1561            Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1562        }
1563    }
1564}
1565
1566impl<T> *const T {
1567    /// Casts from a pointer-to-`T` to a pointer-to-`[T; N]`.
1568    #[inline]
1569    #[unstable(feature = "ptr_cast_array", issue = "144514")]
1570    pub const fn cast_array<const N: usize>(self) -> *const [T; N] {
1571        self.cast()
1572    }
1573}
1574
1575#[cfg(not(feature = "ferrocene_certified"))]
1576impl<T, const N: usize> *const [T; N] {
1577    /// Returns a raw pointer to the array's buffer.
1578    ///
1579    /// This is equivalent to casting `self` to `*const T`, but more type-safe.
1580    ///
1581    /// # Examples
1582    ///
1583    /// ```rust
1584    /// #![feature(array_ptr_get)]
1585    /// use std::ptr;
1586    ///
1587    /// let arr: *const [i8; 3] = ptr::null();
1588    /// assert_eq!(arr.as_ptr(), ptr::null());
1589    /// ```
1590    #[inline]
1591    #[unstable(feature = "array_ptr_get", issue = "119834")]
1592    pub const fn as_ptr(self) -> *const T {
1593        self as *const T
1594    }
1595
1596    /// Returns a raw pointer to a slice containing the entire array.
1597    ///
1598    /// # Examples
1599    ///
1600    /// ```
1601    /// #![feature(array_ptr_get)]
1602    ///
1603    /// let arr: *const [i32; 3] = &[1, 2, 4] as *const [i32; 3];
1604    /// let slice: *const [i32] = arr.as_slice();
1605    /// assert_eq!(slice.len(), 3);
1606    /// ```
1607    #[inline]
1608    #[unstable(feature = "array_ptr_get", issue = "119834")]
1609    pub const fn as_slice(self) -> *const [T] {
1610        self
1611    }
1612}
1613
1614/// Pointer equality is by address, as produced by the [`<*const T>::addr`](pointer::addr) method.
1615#[stable(feature = "rust1", since = "1.0.0")]
1616impl<T: PointeeSized> PartialEq for *const T {
1617    #[inline]
1618    #[allow(ambiguous_wide_pointer_comparisons)]
1619    fn eq(&self, other: &*const T) -> bool {
1620        *self == *other
1621    }
1622}
1623
1624/// Pointer equality is an equivalence relation.
1625#[stable(feature = "rust1", since = "1.0.0")]
1626impl<T: PointeeSized> Eq for *const T {}
1627
1628/// Pointer comparison is by address, as produced by the `[`<*const T>::addr`](pointer::addr)` method.
1629#[stable(feature = "rust1", since = "1.0.0")]
1630#[cfg(not(feature = "ferrocene_certified"))]
1631impl<T: PointeeSized> Ord for *const T {
1632    #[inline]
1633    #[allow(ambiguous_wide_pointer_comparisons)]
1634    fn cmp(&self, other: &*const T) -> Ordering {
1635        if self < other {
1636            Less
1637        } else if self == other {
1638            Equal
1639        } else {
1640            Greater
1641        }
1642    }
1643}
1644
1645/// Pointer comparison is by address, as produced by the `[`<*const T>::addr`](pointer::addr)` method.
1646#[stable(feature = "rust1", since = "1.0.0")]
1647#[cfg(not(feature = "ferrocene_certified"))]
1648impl<T: PointeeSized> PartialOrd for *const T {
1649    #[inline]
1650    #[allow(ambiguous_wide_pointer_comparisons)]
1651    fn partial_cmp(&self, other: &*const T) -> Option<Ordering> {
1652        Some(self.cmp(other))
1653    }
1654
1655    #[inline]
1656    #[allow(ambiguous_wide_pointer_comparisons)]
1657    fn lt(&self, other: &*const T) -> bool {
1658        *self < *other
1659    }
1660
1661    #[inline]
1662    #[allow(ambiguous_wide_pointer_comparisons)]
1663    fn le(&self, other: &*const T) -> bool {
1664        *self <= *other
1665    }
1666
1667    #[inline]
1668    #[allow(ambiguous_wide_pointer_comparisons)]
1669    fn gt(&self, other: &*const T) -> bool {
1670        *self > *other
1671    }
1672
1673    #[inline]
1674    #[allow(ambiguous_wide_pointer_comparisons)]
1675    fn ge(&self, other: &*const T) -> bool {
1676        *self >= *other
1677    }
1678}
1679
1680#[stable(feature = "raw_ptr_default", since = "1.88.0")]
1681#[cfg(not(feature = "ferrocene_certified"))]
1682impl<T: ?Sized + Thin> Default for *const T {
1683    /// Returns the default value of [`null()`][crate::ptr::null].
1684    fn default() -> Self {
1685        crate::ptr::null()
1686    }
1687}