Skip to main content

core/ptr/
const_ptr.rs

1use super::*;
2use crate::cmp::Ordering::{Equal, Greater, Less};
3use crate::intrinsics::const_eval_select;
4#[cfg(not(feature = "ferrocene_subset"))]
5use crate::mem::{self, SizedTypeProperties};
6#[cfg(not(feature = "ferrocene_subset"))]
7use crate::slice::{self, SliceIndex};
8
9// Ferrocene addition: imports for certified subset
10#[cfg(feature = "ferrocene_subset")]
11#[rustfmt::skip]
12use crate::mem;
13
14impl<T: PointeeSized> *const T {
15    #[doc = include_str!("docs/is_null.md")]
16    ///
17    /// # Examples
18    ///
19    /// ```
20    /// let s: &str = "Follow the rabbit";
21    /// let ptr: *const u8 = s.as_ptr();
22    /// assert!(!ptr.is_null());
23    /// ```
24    #[stable(feature = "rust1", since = "1.0.0")]
25    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
26    #[rustc_diagnostic_item = "ptr_const_is_null"]
27    #[inline]
28    #[rustc_allow_const_fn_unstable(const_eval_select)]
29    pub const fn is_null(self) -> bool {
30        // Compare via a cast to a thin pointer, so fat pointers are only
31        // considering their "data" part for null-ness.
32        let ptr = self as *const u8;
33        const_eval_select!(
34            @capture { ptr: *const u8 } -> bool:
35            // This use of `const_raw_ptr_comparison` has been explicitly blessed by t-lang.
36            if const #[rustc_allow_const_fn_unstable(const_raw_ptr_comparison)] {
37                match (ptr).guaranteed_eq(null_mut()) {
38                    Some(res) => res,
39                    // To remain maximally conservative, we stop execution when we don't
40                    // know whether the pointer is null or not.
41                    // We can *not* return `false` here, that would be unsound in `NonNull::new`!
42                    None => panic!("null-ness of this pointer cannot be determined in const context"),
43                }
44            } else {
45                ptr.addr() == 0
46            }
47        )
48    }
49
50    /// Casts to a pointer of another type.
51    #[stable(feature = "ptr_cast", since = "1.38.0")]
52    #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
53    #[rustc_diagnostic_item = "const_ptr_cast"]
54    #[inline(always)]
55    pub const fn cast<U>(self) -> *const U {
56        self as _
57    }
58
59    /// Try to cast to a pointer of another type by checking alignment.
60    ///
61    /// If the pointer is properly aligned to the target type, it will be
62    /// cast to the target type. Otherwise, `None` is returned.
63    ///
64    /// # Examples
65    ///
66    /// ```rust
67    /// #![feature(pointer_try_cast_aligned)]
68    ///
69    /// let x = 0u64;
70    ///
71    /// let aligned: *const u64 = &x;
72    /// let unaligned = unsafe { aligned.byte_add(1) };
73    ///
74    /// assert!(aligned.try_cast_aligned::<u32>().is_some());
75    /// assert!(unaligned.try_cast_aligned::<u32>().is_none());
76    /// ```
77    #[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
78    #[must_use = "this returns the result of the operation, \
79                  without modifying the original"]
80    #[inline]
81    pub fn try_cast_aligned<U>(self) -> Option<*const U> {
82        if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
83    }
84
85    /// Uses the address value in a new pointer of another type.
86    ///
87    /// This operation will ignore the address part of its `meta` operand and discard existing
88    /// metadata of `self`. For pointers to a sized types (thin pointers), this has the same effect
89    /// as a simple cast. For pointers to an unsized type (fat pointers) this recombines the address
90    /// with new metadata such as slice lengths or `dyn`-vtable.
91    ///
92    /// The resulting pointer will have provenance of `self`. This operation is semantically the
93    /// same as creating a new pointer with the data pointer value of `self` but the metadata of
94    /// `meta`, being fat or thin depending on the `meta` operand.
95    ///
96    /// # Examples
97    ///
98    /// This function is primarily useful for enabling pointer arithmetic on potentially fat
99    /// pointers. The pointer is cast to a sized pointee to utilize offset operations and then
100    /// recombined with its own original metadata.
101    ///
102    /// ```
103    /// #![feature(set_ptr_value)]
104    /// # use core::fmt::Debug;
105    /// let arr: [i32; 3] = [1, 2, 3];
106    /// let mut ptr = arr.as_ptr() as *const dyn Debug;
107    /// let thin = ptr as *const u8;
108    /// unsafe {
109    ///     ptr = thin.add(8).with_metadata_of(ptr);
110    ///     # assert_eq!(*(ptr as *const i32), 3);
111    ///     println!("{:?}", &*ptr); // will print "3"
112    /// }
113    /// ```
114    ///
115    /// # *Incorrect* usage
116    ///
117    /// The provenance from pointers is *not* combined. The result must only be used to refer to the
118    /// address allowed by `self`.
119    ///
120    /// ```rust,no_run
121    /// #![feature(set_ptr_value)]
122    /// let x = 0u32;
123    /// let y = 1u32;
124    ///
125    /// let x = (&x) as *const u32;
126    /// let y = (&y) as *const u32;
127    ///
128    /// let offset = (x as usize - y as usize) / 4;
129    /// let bad = x.wrapping_add(offset).with_metadata_of(y);
130    ///
131    /// // This dereference is UB. The pointer only has provenance for `x` but points to `y`.
132    /// println!("{:?}", unsafe { &*bad });
133    /// ```
134    #[unstable(feature = "set_ptr_value", issue = "75091")]
135    #[must_use = "returns a new pointer rather than modifying its argument"]
136    #[inline]
137    pub const fn with_metadata_of<U>(self, meta: *const U) -> *const U
138    where
139        U: PointeeSized,
140    {
141        from_raw_parts::<U>(self as *const (), metadata(meta))
142    }
143
144    /// Changes constness without changing the type.
145    ///
146    /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
147    /// refactored.
148    #[stable(feature = "ptr_const_cast", since = "1.65.0")]
149    #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")]
150    #[rustc_diagnostic_item = "ptr_cast_mut"]
151    #[inline(always)]
152    #[cfg(not(feature = "ferrocene_subset"))]
153    pub const fn cast_mut(self) -> *mut T {
154        self as _
155    }
156
157    #[doc = include_str!("./docs/addr.md")]
158    #[must_use]
159    #[inline(always)]
160    #[stable(feature = "strict_provenance", since = "1.84.0")]
161    pub fn addr(self) -> usize {
162        // A pointer-to-integer transmute currently has exactly the right semantics: it returns the
163        // address without exposing the provenance. Note that this is *not* a stable guarantee about
164        // transmute semantics, it relies on sysroot crates having special status.
165        // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
166        // provenance).
167        unsafe { mem::transmute(self.cast::<()>()) }
168    }
169
170    /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
171    /// [`with_exposed_provenance`] and returns the "address" portion.
172    ///
173    /// This is equivalent to `self as usize`, which semantically discards provenance information.
174    /// Furthermore, this (like the `as` cast) has the implicit side-effect of marking the
175    /// provenance as 'exposed', so on platforms that support it you can later call
176    /// [`with_exposed_provenance`] to reconstitute the original pointer including its provenance.
177    ///
178    /// Due to its inherent ambiguity, [`with_exposed_provenance`] may not be supported by tools
179    /// that help you to stay conformant with the Rust memory model. It is recommended to use
180    /// [Strict Provenance][crate::ptr#strict-provenance] APIs such as [`with_addr`][pointer::with_addr]
181    /// wherever possible, in which case [`addr`][pointer::addr] should be used instead of `expose_provenance`.
182    ///
183    /// On most platforms this will produce a value with the same bytes as the original pointer,
184    /// because all the bytes are dedicated to describing the address. Platforms which need to store
185    /// additional information in the pointer may not support this operation, since the 'expose'
186    /// side-effect which is required for [`with_exposed_provenance`] to work is typically not
187    /// available.
188    ///
189    /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
190    ///
191    /// [`with_exposed_provenance`]: with_exposed_provenance
192    #[inline(always)]
193    #[stable(feature = "exposed_provenance", since = "1.84.0")]
194    pub fn expose_provenance(self) -> usize {
195        self.cast::<()>() as usize
196    }
197
198    /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
199    /// `self`.
200    ///
201    /// This is similar to a `addr as *const T` cast, but copies
202    /// the *provenance* of `self` to the new pointer.
203    /// This avoids the inherent ambiguity of the unary cast.
204    ///
205    /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
206    /// `self` to the given address, and therefore has all the same capabilities and restrictions.
207    ///
208    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
209    #[must_use]
210    #[inline]
211    #[stable(feature = "strict_provenance", since = "1.84.0")]
212    #[cfg(not(feature = "ferrocene_subset"))]
213    pub fn with_addr(self, addr: usize) -> Self {
214        // This should probably be an intrinsic to avoid doing any sort of arithmetic, but
215        // meanwhile, we can implement it with `wrapping_offset`, which preserves the pointer's
216        // provenance.
217        let self_addr = self.addr() as isize;
218        let dest_addr = addr as isize;
219        let offset = dest_addr.wrapping_sub(self_addr);
220        self.wrapping_byte_offset(offset)
221    }
222
223    /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
224    /// [provenance][crate::ptr#provenance] of `self`.
225    ///
226    /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
227    ///
228    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
229    #[must_use]
230    #[inline]
231    #[stable(feature = "strict_provenance", since = "1.84.0")]
232    #[cfg(not(feature = "ferrocene_subset"))]
233    pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
234        self.with_addr(f(self.addr()))
235    }
236
237    /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
238    ///
239    /// The pointer can be later reconstructed with [`from_raw_parts`].
240    #[unstable(feature = "ptr_metadata", issue = "81513")]
241    #[inline]
242    pub const fn to_raw_parts(self) -> (*const (), <T as super::Pointee>::Metadata) {
243        (self.cast(), metadata(self))
244    }
245
246    #[doc = include_str!("./docs/as_ref.md")]
247    ///
248    /// ```
249    /// let ptr: *const u8 = &10u8 as *const u8;
250    ///
251    /// unsafe {
252    ///     let val_back = &*ptr;
253    ///     assert_eq!(val_back, &10);
254    /// }
255    /// ```
256    ///
257    /// # Examples
258    ///
259    /// ```
260    /// let ptr: *const u8 = &10u8 as *const u8;
261    ///
262    /// unsafe {
263    ///     if let Some(val_back) = ptr.as_ref() {
264    ///         assert_eq!(val_back, &10);
265    ///     }
266    /// }
267    /// ```
268    ///
269    ///
270    /// [`is_null`]: #method.is_null
271    /// [`as_uninit_ref`]: #method.as_uninit_ref
272    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
273    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
274    #[inline]
275    #[cfg(not(feature = "ferrocene_subset"))]
276    pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
277        // SAFETY: the caller must guarantee that `self` is valid
278        // for a reference if it isn't null.
279        if self.is_null() { None } else { unsafe { Some(&*self) } }
280    }
281
282    /// Returns a shared reference to the value behind the pointer.
283    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_ref`] must be used instead.
284    /// If the pointer may be null, but the value is known to have been initialized, [`as_ref`] must be used instead.
285    ///
286    /// [`as_ref`]: #method.as_ref
287    /// [`as_uninit_ref`]: #method.as_uninit_ref
288    ///
289    /// # Safety
290    ///
291    /// When calling this method, you have to ensure that
292    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
293    ///
294    /// # Examples
295    ///
296    /// ```
297    /// #![feature(ptr_as_ref_unchecked)]
298    /// let ptr: *const u8 = &10u8 as *const u8;
299    ///
300    /// unsafe {
301    ///     assert_eq!(ptr.as_ref_unchecked(), &10);
302    /// }
303    /// ```
304    // FIXME: mention it in the docs for `as_ref` and `as_uninit_ref` once stabilized.
305    #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
306    #[inline]
307    #[must_use]
308    #[cfg(not(feature = "ferrocene_subset"))]
309    pub const unsafe fn as_ref_unchecked<'a>(self) -> &'a T {
310        // SAFETY: the caller must guarantee that `self` is valid for a reference
311        unsafe { &*self }
312    }
313
314    #[doc = include_str!("./docs/as_uninit_ref.md")]
315    ///
316    /// [`is_null`]: #method.is_null
317    /// [`as_ref`]: #method.as_ref
318    ///
319    /// # Examples
320    ///
321    /// ```
322    /// #![feature(ptr_as_uninit)]
323    ///
324    /// let ptr: *const u8 = &10u8 as *const u8;
325    ///
326    /// unsafe {
327    ///     if let Some(val_back) = ptr.as_uninit_ref() {
328    ///         assert_eq!(val_back.assume_init(), 10);
329    ///     }
330    /// }
331    /// ```
332    #[inline]
333    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
334    #[cfg(not(feature = "ferrocene_subset"))]
335    pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
336    where
337        T: Sized,
338    {
339        // SAFETY: the caller must guarantee that `self` meets all the
340        // requirements for a reference.
341        if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
342    }
343
344    #[doc = include_str!("./docs/offset.md")]
345    ///
346    /// # Examples
347    ///
348    /// ```
349    /// let s: &str = "123";
350    /// let ptr: *const u8 = s.as_ptr();
351    ///
352    /// unsafe {
353    ///     assert_eq!(*ptr.offset(1) as char, '2');
354    ///     assert_eq!(*ptr.offset(2) as char, '3');
355    /// }
356    /// ```
357    #[stable(feature = "rust1", since = "1.0.0")]
358    #[must_use = "returns a new pointer rather than modifying its argument"]
359    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
360    #[inline(always)]
361    #[track_caller]
362    pub const unsafe fn offset(self, count: isize) -> *const T
363    where
364        T: Sized,
365    {
366        #[inline]
367        #[rustc_allow_const_fn_unstable(const_eval_select)]
368        const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
369            // We can use const_eval_select here because this is only for UB checks.
370            const_eval_select!(
371                @capture { this: *const (), count: isize, size: usize } -> bool:
372                if const {
373                    true
374                } else {
375                    // `size` is the size of a Rust type, so we know that
376                    // `size <= isize::MAX` and thus `as` cast here is not lossy.
377                    let Some(byte_offset) = count.checked_mul(size as isize) else {
378                        return false;
379                    };
380                    let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
381                    !overflow
382                }
383            )
384        }
385
386        ub_checks::assert_unsafe_precondition!(
387            check_language_ub,
388            "ptr::offset requires the address calculation to not overflow",
389            (
390                this: *const () = self as *const (),
391                count: isize = count,
392                size: usize = size_of::<T>(),
393            ) => runtime_offset_nowrap(this, count, size)
394        );
395
396        // SAFETY: the caller must uphold the safety contract for `offset`.
397        unsafe { intrinsics::offset(self, count) }
398    }
399
400    /// Adds a signed offset in bytes to a pointer.
401    ///
402    /// `count` is in units of **bytes**.
403    ///
404    /// This is purely a convenience for casting to a `u8` pointer and
405    /// using [offset][pointer::offset] on it. See that method for documentation
406    /// and safety requirements.
407    ///
408    /// For non-`Sized` pointees this operation changes only the data pointer,
409    /// leaving the metadata untouched.
410    #[must_use]
411    #[inline(always)]
412    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
413    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
414    #[track_caller]
415    #[cfg(not(feature = "ferrocene_subset"))]
416    pub const unsafe fn byte_offset(self, count: isize) -> Self {
417        // SAFETY: the caller must uphold the safety contract for `offset`.
418        unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
419    }
420
421    /// Adds a signed offset to a pointer using wrapping arithmetic.
422    ///
423    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
424    /// offset of `3 * size_of::<T>()` bytes.
425    ///
426    /// # Safety
427    ///
428    /// This operation itself is always safe, but using the resulting pointer is not.
429    ///
430    /// The resulting pointer "remembers" the [allocation] that `self` points to
431    /// (this is called "[Provenance](ptr/index.html#provenance)").
432    /// The pointer must not be used to read or write other allocations.
433    ///
434    /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
435    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
436    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
437    /// `x` and `y` point into the same allocation.
438    ///
439    /// Compared to [`offset`], this method basically delays the requirement of staying within the
440    /// same allocation: [`offset`] is immediate Undefined Behavior when crossing object
441    /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
442    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
443    /// can be optimized better and is thus preferable in performance-sensitive code.
444    ///
445    /// The delayed check only considers the value of the pointer that was dereferenced, not the
446    /// intermediate values used during the computation of the final result. For example,
447    /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
448    /// words, leaving the allocation and then re-entering it later is permitted.
449    ///
450    /// [`offset`]: #method.offset
451    /// [allocation]: crate::ptr#allocation
452    ///
453    /// # Examples
454    ///
455    /// ```
456    /// # use std::fmt::Write;
457    /// // Iterate using a raw pointer in increments of two elements
458    /// let data = [1u8, 2, 3, 4, 5];
459    /// let mut ptr: *const u8 = data.as_ptr();
460    /// let step = 2;
461    /// let end_rounded_up = ptr.wrapping_offset(6);
462    ///
463    /// let mut out = String::new();
464    /// while ptr != end_rounded_up {
465    ///     unsafe {
466    ///         write!(&mut out, "{}, ", *ptr)?;
467    ///     }
468    ///     ptr = ptr.wrapping_offset(step);
469    /// }
470    /// assert_eq!(out.as_str(), "1, 3, 5, ");
471    /// # std::fmt::Result::Ok(())
472    /// ```
473    #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
474    #[must_use = "returns a new pointer rather than modifying its argument"]
475    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
476    #[inline(always)]
477    pub const fn wrapping_offset(self, count: isize) -> *const T
478    where
479        T: Sized,
480    {
481        // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
482        unsafe { intrinsics::arith_offset(self, count) }
483    }
484
485    /// Adds a signed offset in bytes to a pointer using wrapping arithmetic.
486    ///
487    /// `count` is in units of **bytes**.
488    ///
489    /// This is purely a convenience for casting to a `u8` pointer and
490    /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
491    /// for documentation.
492    ///
493    /// For non-`Sized` pointees this operation changes only the data pointer,
494    /// leaving the metadata untouched.
495    #[must_use]
496    #[inline(always)]
497    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
498    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
499    #[cfg(not(feature = "ferrocene_subset"))]
500    pub const fn wrapping_byte_offset(self, count: isize) -> Self {
501        self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
502    }
503
504    /// Masks out bits of the pointer according to a mask.
505    ///
506    /// This is convenience for `ptr.map_addr(|a| a & mask)`.
507    ///
508    /// For non-`Sized` pointees this operation changes only the data pointer,
509    /// leaving the metadata untouched.
510    ///
511    /// ## Examples
512    ///
513    /// ```
514    /// #![feature(ptr_mask)]
515    /// let v = 17_u32;
516    /// let ptr: *const u32 = &v;
517    ///
518    /// // `u32` is 4 bytes aligned,
519    /// // which means that lower 2 bits are always 0.
520    /// let tag_mask = 0b11;
521    /// let ptr_mask = !tag_mask;
522    ///
523    /// // We can store something in these lower bits
524    /// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
525    ///
526    /// // Get the "tag" back
527    /// let tag = tagged_ptr.addr() & tag_mask;
528    /// assert_eq!(tag, 0b10);
529    ///
530    /// // Note that `tagged_ptr` is unaligned, it's UB to read from it.
531    /// // To get original pointer `mask` can be used:
532    /// let masked_ptr = tagged_ptr.mask(ptr_mask);
533    /// assert_eq!(unsafe { *masked_ptr }, 17);
534    /// ```
535    #[unstable(feature = "ptr_mask", issue = "98290")]
536    #[must_use = "returns a new pointer rather than modifying its argument"]
537    #[inline(always)]
538    #[cfg(not(feature = "ferrocene_subset"))]
539    pub fn mask(self, mask: usize) -> *const T {
540        intrinsics::ptr_mask(self.cast::<()>(), mask).with_metadata_of(self)
541    }
542
543    /// Calculates the distance between two pointers within the same allocation. The returned value is in
544    /// units of T: the distance in bytes divided by `size_of::<T>()`.
545    ///
546    /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
547    /// except that it has a lot more opportunities for UB, in exchange for the compiler
548    /// better understanding what you are doing.
549    ///
550    /// The primary motivation of this method is for computing the `len` of an array/slice
551    /// of `T` that you are currently representing as a "start" and "end" pointer
552    /// (and "end" is "one past the end" of the array).
553    /// In that case, `end.offset_from(start)` gets you the length of the array.
554    ///
555    /// All of the following safety requirements are trivially satisfied for this usecase.
556    ///
557    /// [`offset`]: #method.offset
558    ///
559    /// # Safety
560    ///
561    /// If any of the following conditions are violated, the result is Undefined Behavior:
562    ///
563    /// * `self` and `origin` must either
564    ///
565    ///   * point to the same address, or
566    ///   * both be [derived from][crate::ptr#provenance] a pointer to the same [allocation], and the memory range between
567    ///     the two pointers must be in bounds of that object. (See below for an example.)
568    ///
569    /// * The distance between the pointers, in bytes, must be an exact multiple
570    ///   of the size of `T`.
571    ///
572    /// As a consequence, the absolute distance between the pointers, in bytes, computed on
573    /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
574    /// implied by the in-bounds requirement, and the fact that no allocation can be larger
575    /// than `isize::MAX` bytes.
576    ///
577    /// The requirement for pointers to be derived from the same allocation is primarily
578    /// needed for `const`-compatibility: the distance between pointers into *different* allocated
579    /// objects is not known at compile-time. However, the requirement also exists at
580    /// runtime and may be exploited by optimizations. If you wish to compute the difference between
581    /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
582    /// origin as isize) / size_of::<T>()`.
583    // FIXME: recommend `addr()` instead of `as usize` once that is stable.
584    ///
585    /// [`add`]: #method.add
586    /// [allocation]: crate::ptr#allocation
587    ///
588    /// # Panics
589    ///
590    /// This function panics if `T` is a Zero-Sized Type ("ZST").
591    ///
592    /// # Examples
593    ///
594    /// Basic usage:
595    ///
596    /// ```
597    /// let a = [0; 5];
598    /// let ptr1: *const i32 = &a[1];
599    /// let ptr2: *const i32 = &a[3];
600    /// unsafe {
601    ///     assert_eq!(ptr2.offset_from(ptr1), 2);
602    ///     assert_eq!(ptr1.offset_from(ptr2), -2);
603    ///     assert_eq!(ptr1.offset(2), ptr2);
604    ///     assert_eq!(ptr2.offset(-2), ptr1);
605    /// }
606    /// ```
607    ///
608    /// *Incorrect* usage:
609    ///
610    /// ```rust,no_run
611    /// let ptr1 = Box::into_raw(Box::new(0u8)) as *const u8;
612    /// let ptr2 = Box::into_raw(Box::new(1u8)) as *const u8;
613    /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
614    /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
615    /// let ptr2_other = (ptr1 as *const u8).wrapping_offset(diff).wrapping_offset(1);
616    /// assert_eq!(ptr2 as usize, ptr2_other as usize);
617    /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
618    /// // computing their offset is undefined behavior, even though
619    /// // they point to addresses that are in-bounds of the same object!
620    /// unsafe {
621    ///     let one = ptr2_other.offset_from(ptr2); // Undefined Behavior! ⚠️
622    /// }
623    /// ```
624    #[stable(feature = "ptr_offset_from", since = "1.47.0")]
625    #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")]
626    #[inline]
627    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
628    #[cfg(not(feature = "ferrocene_subset"))]
629    pub const unsafe fn offset_from(self, origin: *const T) -> isize
630    where
631        T: Sized,
632    {
633        let pointee_size = size_of::<T>();
634        assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
635        // SAFETY: the caller must uphold the safety contract for `ptr_offset_from`.
636        unsafe { intrinsics::ptr_offset_from(self, origin) }
637    }
638
639    /// Calculates the distance between two pointers within the same allocation. The returned value is in
640    /// units of **bytes**.
641    ///
642    /// This is purely a convenience for casting to a `u8` pointer and
643    /// using [`offset_from`][pointer::offset_from] on it. See that method for
644    /// documentation and safety requirements.
645    ///
646    /// For non-`Sized` pointees this operation considers only the data pointers,
647    /// ignoring the metadata.
648    #[inline(always)]
649    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
650    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
651    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
652    #[cfg(not(feature = "ferrocene_subset"))]
653    pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: *const U) -> isize {
654        // SAFETY: the caller must uphold the safety contract for `offset_from`.
655        unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
656    }
657
658    /// Calculates the distance between two pointers within the same allocation, *where it's known that
659    /// `self` is equal to or greater than `origin`*. The returned value is in
660    /// units of T: the distance in bytes is divided by `size_of::<T>()`.
661    ///
662    /// This computes the same value that [`offset_from`](#method.offset_from)
663    /// would compute, but with the added precondition that the offset is
664    /// guaranteed to be non-negative.  This method is equivalent to
665    /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
666    /// but it provides slightly more information to the optimizer, which can
667    /// sometimes allow it to optimize slightly better with some backends.
668    ///
669    /// This method can be thought of as recovering the `count` that was passed
670    /// to [`add`](#method.add) (or, with the parameters in the other order,
671    /// to [`sub`](#method.sub)).  The following are all equivalent, assuming
672    /// that their safety preconditions are met:
673    /// ```rust
674    /// # unsafe fn blah(ptr: *const i32, origin: *const i32, count: usize) -> bool { unsafe {
675    /// ptr.offset_from_unsigned(origin) == count
676    /// # &&
677    /// origin.add(count) == ptr
678    /// # &&
679    /// ptr.sub(count) == origin
680    /// # } }
681    /// ```
682    ///
683    /// # Safety
684    ///
685    /// - The distance between the pointers must be non-negative (`self >= origin`)
686    ///
687    /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
688    ///   apply to this method as well; see it for the full details.
689    ///
690    /// Importantly, despite the return type of this method being able to represent
691    /// a larger offset, it's still *not permitted* to pass pointers which differ
692    /// by more than `isize::MAX` *bytes*.  As such, the result of this method will
693    /// always be less than or equal to `isize::MAX as usize`.
694    ///
695    /// # Panics
696    ///
697    /// This function panics if `T` is a Zero-Sized Type ("ZST").
698    ///
699    /// # Examples
700    ///
701    /// ```
702    /// let a = [0; 5];
703    /// let ptr1: *const i32 = &a[1];
704    /// let ptr2: *const i32 = &a[3];
705    /// unsafe {
706    ///     assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
707    ///     assert_eq!(ptr1.add(2), ptr2);
708    ///     assert_eq!(ptr2.sub(2), ptr1);
709    ///     assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
710    /// }
711    ///
712    /// // This would be incorrect, as the pointers are not correctly ordered:
713    /// // ptr1.offset_from_unsigned(ptr2)
714    /// ```
715    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
716    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
717    #[inline]
718    #[track_caller]
719    pub const unsafe fn offset_from_unsigned(self, origin: *const T) -> usize
720    where
721        T: Sized,
722    {
723        #[rustc_allow_const_fn_unstable(const_eval_select)]
724        const fn runtime_ptr_ge(this: *const (), origin: *const ()) -> bool {
725            const_eval_select!(
726                @capture { this: *const (), origin: *const () } -> bool:
727                if const {
728                    true
729                } else {
730                    this >= origin
731                }
732            )
733        }
734
735        ub_checks::assert_unsafe_precondition!(
736            check_language_ub,
737            "ptr::offset_from_unsigned requires `self >= origin`",
738            (
739                this: *const () = self as *const (),
740                origin: *const () = origin as *const (),
741            ) => runtime_ptr_ge(this, origin)
742        );
743
744        let pointee_size = size_of::<T>();
745        assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
746        // SAFETY: the caller must uphold the safety contract for `ptr_offset_from_unsigned`.
747        unsafe { intrinsics::ptr_offset_from_unsigned(self, origin) }
748    }
749
750    /// Calculates the distance between two pointers within the same allocation, *where it's known that
751    /// `self` is equal to or greater than `origin`*. The returned value is in
752    /// units of **bytes**.
753    ///
754    /// This is purely a convenience for casting to a `u8` pointer and
755    /// using [`offset_from_unsigned`][pointer::offset_from_unsigned] on it.
756    /// See that method for documentation and safety requirements.
757    ///
758    /// For non-`Sized` pointees this operation considers only the data pointers,
759    /// ignoring the metadata.
760    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
761    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
762    #[inline]
763    #[track_caller]
764    #[cfg(not(feature = "ferrocene_subset"))]
765    pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: *const U) -> usize {
766        // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
767        unsafe { self.cast::<u8>().offset_from_unsigned(origin.cast::<u8>()) }
768    }
769
770    /// Returns whether two pointers are guaranteed to be equal.
771    ///
772    /// At runtime this function behaves like `Some(self == other)`.
773    /// However, in some contexts (e.g., compile-time evaluation),
774    /// it is not always possible to determine equality of two pointers, so this function may
775    /// spuriously return `None` for pointers that later actually turn out to have its equality known.
776    /// But when it returns `Some`, the pointers' equality is guaranteed to be known.
777    ///
778    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
779    /// version and unsafe code must not
780    /// rely on the result of this function for soundness. It is suggested to only use this function
781    /// for performance optimizations where spurious `None` return values by this function do not
782    /// affect the outcome, but just the performance.
783    /// The consequences of using this method to make runtime and compile-time code behave
784    /// differently have not been explored. This method should not be used to introduce such
785    /// differences, and it should also not be stabilized before we have a better understanding
786    /// of this issue.
787    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
788    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
789    #[inline]
790    pub const fn guaranteed_eq(self, other: *const T) -> Option<bool>
791    where
792        T: Sized,
793    {
794        match intrinsics::ptr_guaranteed_cmp(self, other) {
795            #[ferrocene::annotation(
796                "This cannot be reached in runtime code so it cannot be covered."
797            )]
798            2 => None,
799            other => Some(other == 1),
800        }
801    }
802
803    /// Returns whether two pointers are guaranteed to be inequal.
804    ///
805    /// At runtime this function behaves like `Some(self != other)`.
806    /// However, in some contexts (e.g., compile-time evaluation),
807    /// it is not always possible to determine inequality of two pointers, so this function may
808    /// spuriously return `None` for pointers that later actually turn out to have its inequality known.
809    /// But when it returns `Some`, the pointers' inequality is guaranteed to be known.
810    ///
811    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
812    /// version and unsafe code must not
813    /// rely on the result of this function for soundness. It is suggested to only use this function
814    /// for performance optimizations where spurious `None` return values by this function do not
815    /// affect the outcome, but just the performance.
816    /// The consequences of using this method to make runtime and compile-time code behave
817    /// differently have not been explored. This method should not be used to introduce such
818    /// differences, and it should also not be stabilized before we have a better understanding
819    /// of this issue.
820    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
821    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
822    #[inline]
823    #[cfg(not(feature = "ferrocene_subset"))]
824    pub const fn guaranteed_ne(self, other: *const T) -> Option<bool>
825    where
826        T: Sized,
827    {
828        match self.guaranteed_eq(other) {
829            None => None,
830            Some(eq) => Some(!eq),
831        }
832    }
833
834    #[doc = include_str!("./docs/add.md")]
835    ///
836    /// # Examples
837    ///
838    /// ```
839    /// let s: &str = "123";
840    /// let ptr: *const u8 = s.as_ptr();
841    ///
842    /// unsafe {
843    ///     assert_eq!(*ptr.add(1), b'2');
844    ///     assert_eq!(*ptr.add(2), b'3');
845    /// }
846    /// ```
847    #[stable(feature = "pointer_methods", since = "1.26.0")]
848    #[must_use = "returns a new pointer rather than modifying its argument"]
849    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
850    #[inline(always)]
851    #[track_caller]
852    pub const unsafe fn add(self, count: usize) -> Self
853    where
854        T: Sized,
855    {
856        #[cfg(debug_assertions)]
857        #[inline]
858        #[rustc_allow_const_fn_unstable(const_eval_select)]
859        const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
860            const_eval_select!(
861                @capture { this: *const (), count: usize, size: usize } -> bool:
862                if const {
863                    true
864                } else {
865                    let Some(byte_offset) = count.checked_mul(size) else {
866                        return false;
867                    };
868                    let (_, overflow) = this.addr().overflowing_add(byte_offset);
869                    byte_offset <= (isize::MAX as usize) && !overflow
870                }
871            )
872        }
873
874        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
875        ub_checks::assert_unsafe_precondition!(
876            check_language_ub,
877            "ptr::add requires that the address calculation does not overflow",
878            (
879                this: *const () = self as *const (),
880                count: usize = count,
881                size: usize = size_of::<T>(),
882            ) => runtime_add_nowrap(this, count, size)
883        );
884
885        // SAFETY: the caller must uphold the safety contract for `offset`.
886        unsafe { intrinsics::offset(self, count) }
887    }
888
889    /// Adds an unsigned offset in bytes to a pointer.
890    ///
891    /// `count` is in units of bytes.
892    ///
893    /// This is purely a convenience for casting to a `u8` pointer and
894    /// using [add][pointer::add] on it. See that method for documentation
895    /// and safety requirements.
896    ///
897    /// For non-`Sized` pointees this operation changes only the data pointer,
898    /// leaving the metadata untouched.
899    #[must_use]
900    #[inline(always)]
901    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
902    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
903    #[track_caller]
904    pub const unsafe fn byte_add(self, count: usize) -> Self {
905        // SAFETY: the caller must uphold the safety contract for `add`.
906        unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
907    }
908
909    /// Subtracts an unsigned offset from a pointer.
910    ///
911    /// This can only move the pointer backward (or not move it). If you need to move forward or
912    /// backward depending on the value, then you might want [`offset`](#method.offset) instead
913    /// which takes a signed offset.
914    ///
915    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
916    /// offset of `3 * size_of::<T>()` bytes.
917    ///
918    /// # Safety
919    ///
920    /// If any of the following conditions are violated, the result is Undefined Behavior:
921    ///
922    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
923    ///   "wrapping around"), must fit in an `isize`.
924    ///
925    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
926    ///   [allocation], and the entire memory range between `self` and the result must be in
927    ///   bounds of that allocation. In particular, this range must not "wrap around" the edge
928    ///   of the address space.
929    ///
930    /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
931    /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
932    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
933    /// safe.
934    ///
935    /// Consider using [`wrapping_sub`] instead if these constraints are
936    /// difficult to satisfy. The only advantage of this method is that it
937    /// enables more aggressive compiler optimizations.
938    ///
939    /// [`wrapping_sub`]: #method.wrapping_sub
940    /// [allocation]: crate::ptr#allocation
941    ///
942    /// # Examples
943    ///
944    /// ```
945    /// let s: &str = "123";
946    ///
947    /// unsafe {
948    ///     let end: *const u8 = s.as_ptr().add(3);
949    ///     assert_eq!(*end.sub(1), b'3');
950    ///     assert_eq!(*end.sub(2), b'2');
951    /// }
952    /// ```
953    #[stable(feature = "pointer_methods", since = "1.26.0")]
954    #[must_use = "returns a new pointer rather than modifying its argument"]
955    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
956    #[inline(always)]
957    #[track_caller]
958    #[cfg(not(feature = "ferrocene_subset"))]
959    pub const unsafe fn sub(self, count: usize) -> Self
960    where
961        T: Sized,
962    {
963        #[cfg(debug_assertions)]
964        #[inline]
965        #[rustc_allow_const_fn_unstable(const_eval_select)]
966        const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
967            const_eval_select!(
968                @capture { this: *const (), count: usize, size: usize } -> bool:
969                if const {
970                    true
971                } else {
972                    let Some(byte_offset) = count.checked_mul(size) else {
973                        return false;
974                    };
975                    byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
976                }
977            )
978        }
979
980        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
981        ub_checks::assert_unsafe_precondition!(
982            check_language_ub,
983            "ptr::sub requires that the address calculation does not overflow",
984            (
985                this: *const () = self as *const (),
986                count: usize = count,
987                size: usize = size_of::<T>(),
988            ) => runtime_sub_nowrap(this, count, size)
989        );
990
991        if T::IS_ZST {
992            // Pointer arithmetic does nothing when the pointee is a ZST.
993            self
994        } else {
995            // SAFETY: the caller must uphold the safety contract for `offset`.
996            // Because the pointee is *not* a ZST, that means that `count` is
997            // at most `isize::MAX`, and thus the negation cannot overflow.
998            unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
999        }
1000    }
1001
1002    /// Subtracts an unsigned offset in bytes from a pointer.
1003    ///
1004    /// `count` is in units of bytes.
1005    ///
1006    /// This is purely a convenience for casting to a `u8` pointer and
1007    /// using [sub][pointer::sub] on it. See that method for documentation
1008    /// and safety requirements.
1009    ///
1010    /// For non-`Sized` pointees this operation changes only the data pointer,
1011    /// leaving the metadata untouched.
1012    #[must_use]
1013    #[inline(always)]
1014    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1015    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1016    #[track_caller]
1017    #[cfg(not(feature = "ferrocene_subset"))]
1018    pub const unsafe fn byte_sub(self, count: usize) -> Self {
1019        // SAFETY: the caller must uphold the safety contract for `sub`.
1020        unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
1021    }
1022
1023    /// Adds an unsigned offset to a pointer using wrapping arithmetic.
1024    ///
1025    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1026    /// offset of `3 * size_of::<T>()` bytes.
1027    ///
1028    /// # Safety
1029    ///
1030    /// This operation itself is always safe, but using the resulting pointer is not.
1031    ///
1032    /// The resulting pointer "remembers" the [allocation] that `self` points to; it must not
1033    /// be used to read or write other allocations.
1034    ///
1035    /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
1036    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1037    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1038    /// `x` and `y` point into the same allocation.
1039    ///
1040    /// Compared to [`add`], this method basically delays the requirement of staying within the
1041    /// same allocation: [`add`] is immediate Undefined Behavior when crossing object
1042    /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
1043    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
1044    /// can be optimized better and is thus preferable in performance-sensitive code.
1045    ///
1046    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1047    /// intermediate values used during the computation of the final result. For example,
1048    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1049    /// allocation and then re-entering it later is permitted.
1050    ///
1051    /// [`add`]: #method.add
1052    /// [allocation]: crate::ptr#allocation
1053    ///
1054    /// # Examples
1055    ///
1056    /// ```
1057    /// # use std::fmt::Write;
1058    /// // Iterate using a raw pointer in increments of two elements
1059    /// let data = [1u8, 2, 3, 4, 5];
1060    /// let mut ptr: *const u8 = data.as_ptr();
1061    /// let step = 2;
1062    /// let end_rounded_up = ptr.wrapping_add(6);
1063    ///
1064    /// let mut out = String::new();
1065    /// while ptr != end_rounded_up {
1066    ///     unsafe {
1067    ///         write!(&mut out, "{}, ", *ptr)?;
1068    ///     }
1069    ///     ptr = ptr.wrapping_add(step);
1070    /// }
1071    /// assert_eq!(out, "1, 3, 5, ");
1072    /// # std::fmt::Result::Ok(())
1073    /// ```
1074    #[stable(feature = "pointer_methods", since = "1.26.0")]
1075    #[must_use = "returns a new pointer rather than modifying its argument"]
1076    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1077    #[inline(always)]
1078    pub const fn wrapping_add(self, count: usize) -> Self
1079    where
1080        T: Sized,
1081    {
1082        self.wrapping_offset(count as isize)
1083    }
1084
1085    /// Adds an unsigned offset in bytes to a pointer using wrapping arithmetic.
1086    ///
1087    /// `count` is in units of bytes.
1088    ///
1089    /// This is purely a convenience for casting to a `u8` pointer and
1090    /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
1091    ///
1092    /// For non-`Sized` pointees this operation changes only the data pointer,
1093    /// leaving the metadata untouched.
1094    #[must_use]
1095    #[inline(always)]
1096    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1097    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1098    #[cfg(not(feature = "ferrocene_subset"))]
1099    pub const fn wrapping_byte_add(self, count: usize) -> Self {
1100        self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
1101    }
1102
1103    /// Subtracts an unsigned offset from a pointer using wrapping arithmetic.
1104    ///
1105    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1106    /// offset of `3 * size_of::<T>()` bytes.
1107    ///
1108    /// # Safety
1109    ///
1110    /// This operation itself is always safe, but using the resulting pointer is not.
1111    ///
1112    /// The resulting pointer "remembers" the [allocation] that `self` points to; it must not
1113    /// be used to read or write other allocations.
1114    ///
1115    /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
1116    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1117    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1118    /// `x` and `y` point into the same allocation.
1119    ///
1120    /// Compared to [`sub`], this method basically delays the requirement of staying within the
1121    /// same allocation: [`sub`] is immediate Undefined Behavior when crossing object
1122    /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
1123    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
1124    /// can be optimized better and is thus preferable in performance-sensitive code.
1125    ///
1126    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1127    /// intermediate values used during the computation of the final result. For example,
1128    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1129    /// allocation and then re-entering it later is permitted.
1130    ///
1131    /// [`sub`]: #method.sub
1132    /// [allocation]: crate::ptr#allocation
1133    ///
1134    /// # Examples
1135    ///
1136    /// ```
1137    /// # use std::fmt::Write;
1138    /// // Iterate using a raw pointer in increments of two elements (backwards)
1139    /// let data = [1u8, 2, 3, 4, 5];
1140    /// let mut ptr: *const u8 = data.as_ptr();
1141    /// let start_rounded_down = ptr.wrapping_sub(2);
1142    /// ptr = ptr.wrapping_add(4);
1143    /// let step = 2;
1144    /// let mut out = String::new();
1145    /// while ptr != start_rounded_down {
1146    ///     unsafe {
1147    ///         write!(&mut out, "{}, ", *ptr)?;
1148    ///     }
1149    ///     ptr = ptr.wrapping_sub(step);
1150    /// }
1151    /// assert_eq!(out, "5, 3, 1, ");
1152    /// # std::fmt::Result::Ok(())
1153    /// ```
1154    #[stable(feature = "pointer_methods", since = "1.26.0")]
1155    #[must_use = "returns a new pointer rather than modifying its argument"]
1156    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1157    #[inline(always)]
1158    #[cfg(not(feature = "ferrocene_subset"))]
1159    pub const fn wrapping_sub(self, count: usize) -> Self
1160    where
1161        T: Sized,
1162    {
1163        self.wrapping_offset((count as isize).wrapping_neg())
1164    }
1165
1166    /// Subtracts an unsigned offset in bytes from a pointer using wrapping arithmetic.
1167    ///
1168    /// `count` is in units of bytes.
1169    ///
1170    /// This is purely a convenience for casting to a `u8` pointer and
1171    /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
1172    ///
1173    /// For non-`Sized` pointees this operation changes only the data pointer,
1174    /// leaving the metadata untouched.
1175    #[must_use]
1176    #[inline(always)]
1177    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1178    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1179    #[cfg(not(feature = "ferrocene_subset"))]
1180    pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1181        self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
1182    }
1183
1184    /// Reads the value from `self` without moving it. This leaves the
1185    /// memory in `self` unchanged.
1186    ///
1187    /// See [`ptr::read`] for safety concerns and examples.
1188    ///
1189    /// [`ptr::read`]: crate::ptr::read()
1190    #[stable(feature = "pointer_methods", since = "1.26.0")]
1191    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1192    #[inline]
1193    #[track_caller]
1194    pub const unsafe fn read(self) -> T
1195    where
1196        T: Sized,
1197    {
1198        // SAFETY: the caller must uphold the safety contract for `read`.
1199        unsafe { read(self) }
1200    }
1201
1202    /// Performs a volatile read of the value from `self` without moving it. This
1203    /// leaves the memory in `self` unchanged.
1204    ///
1205    /// Volatile operations are intended to act on I/O memory, and are guaranteed
1206    /// to not be elided or reordered by the compiler across other volatile
1207    /// operations.
1208    ///
1209    /// See [`ptr::read_volatile`] for safety concerns and examples.
1210    ///
1211    /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1212    #[stable(feature = "pointer_methods", since = "1.26.0")]
1213    #[inline]
1214    #[track_caller]
1215    #[cfg(not(feature = "ferrocene_subset"))]
1216    pub unsafe fn read_volatile(self) -> T
1217    where
1218        T: Sized,
1219    {
1220        // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1221        unsafe { read_volatile(self) }
1222    }
1223
1224    /// Reads the value from `self` without moving it. This leaves the
1225    /// memory in `self` unchanged.
1226    ///
1227    /// Unlike `read`, the pointer may be unaligned.
1228    ///
1229    /// See [`ptr::read_unaligned`] for safety concerns and examples.
1230    ///
1231    /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1232    #[stable(feature = "pointer_methods", since = "1.26.0")]
1233    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1234    #[inline]
1235    #[track_caller]
1236    pub const unsafe fn read_unaligned(self) -> T
1237    where
1238        T: Sized,
1239    {
1240        // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1241        unsafe { read_unaligned(self) }
1242    }
1243
1244    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1245    /// and destination may overlap.
1246    ///
1247    /// NOTE: this has the *same* argument order as [`ptr::copy`].
1248    ///
1249    /// See [`ptr::copy`] for safety concerns and examples.
1250    ///
1251    /// [`ptr::copy`]: crate::ptr::copy()
1252    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1253    #[stable(feature = "pointer_methods", since = "1.26.0")]
1254    #[inline]
1255    #[track_caller]
1256    #[cfg(not(feature = "ferrocene_subset"))]
1257    pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
1258    where
1259        T: Sized,
1260    {
1261        // SAFETY: the caller must uphold the safety contract for `copy`.
1262        unsafe { copy(self, dest, count) }
1263    }
1264
1265    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1266    /// and destination may *not* overlap.
1267    ///
1268    /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1269    ///
1270    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1271    ///
1272    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1273    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1274    #[stable(feature = "pointer_methods", since = "1.26.0")]
1275    #[inline]
1276    #[track_caller]
1277    #[cfg(not(feature = "ferrocene_subset"))]
1278    pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
1279    where
1280        T: Sized,
1281    {
1282        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1283        unsafe { copy_nonoverlapping(self, dest, count) }
1284    }
1285
1286    /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1287    /// `align`.
1288    ///
1289    /// If it is not possible to align the pointer, the implementation returns
1290    /// `usize::MAX`.
1291    ///
1292    /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1293    /// used with the `wrapping_add` method.
1294    ///
1295    /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1296    /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1297    /// the returned offset is correct in all terms other than alignment.
1298    ///
1299    /// # Panics
1300    ///
1301    /// The function panics if `align` is not a power-of-two.
1302    ///
1303    /// # Examples
1304    ///
1305    /// Accessing adjacent `u8` as `u16`
1306    ///
1307    /// ```
1308    /// # unsafe {
1309    /// let x = [5_u8, 6, 7, 8, 9];
1310    /// let ptr = x.as_ptr();
1311    /// let offset = ptr.align_offset(align_of::<u16>());
1312    ///
1313    /// if offset < x.len() - 1 {
1314    ///     let u16_ptr = ptr.add(offset).cast::<u16>();
1315    ///     assert!(*u16_ptr == u16::from_ne_bytes([5, 6]) || *u16_ptr == u16::from_ne_bytes([6, 7]));
1316    /// } else {
1317    ///     // while the pointer can be aligned via `offset`, it would point
1318    ///     // outside the allocation
1319    /// }
1320    /// # }
1321    /// ```
1322    #[must_use]
1323    #[inline]
1324    #[stable(feature = "align_offset", since = "1.36.0")]
1325    pub fn align_offset(self, align: usize) -> usize
1326    where
1327        T: Sized,
1328    {
1329        if !align.is_power_of_two() {
1330            panic!("align_offset: align is not a power-of-two");
1331        }
1332
1333        // SAFETY: `align` has been checked to be a power of 2 above
1334        let ret = unsafe { align_offset(self, align) };
1335
1336        // Inform Miri that we want to consider the resulting pointer to be suitably aligned.
1337        #[cfg(miri)]
1338        if ret != usize::MAX {
1339            intrinsics::miri_promise_symbolic_alignment(self.wrapping_add(ret).cast(), align);
1340        }
1341
1342        ret
1343    }
1344
1345    /// Returns whether the pointer is properly aligned for `T`.
1346    ///
1347    /// # Examples
1348    ///
1349    /// ```
1350    /// // On some platforms, the alignment of i32 is less than 4.
1351    /// #[repr(align(4))]
1352    /// struct AlignedI32(i32);
1353    ///
1354    /// let data = AlignedI32(42);
1355    /// let ptr = &data as *const AlignedI32;
1356    ///
1357    /// assert!(ptr.is_aligned());
1358    /// assert!(!ptr.wrapping_byte_add(1).is_aligned());
1359    /// ```
1360    #[must_use]
1361    #[inline]
1362    #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1363    #[cfg(not(feature = "ferrocene_subset"))]
1364    pub fn is_aligned(self) -> bool
1365    where
1366        T: Sized,
1367    {
1368        self.is_aligned_to(align_of::<T>())
1369    }
1370
1371    /// Returns whether the pointer is aligned to `align`.
1372    ///
1373    /// For non-`Sized` pointees this operation considers only the data pointer,
1374    /// ignoring the metadata.
1375    ///
1376    /// # Panics
1377    ///
1378    /// The function panics if `align` is not a power-of-two (this includes 0).
1379    ///
1380    /// # Examples
1381    ///
1382    /// ```
1383    /// #![feature(pointer_is_aligned_to)]
1384    ///
1385    /// // On some platforms, the alignment of i32 is less than 4.
1386    /// #[repr(align(4))]
1387    /// struct AlignedI32(i32);
1388    ///
1389    /// let data = AlignedI32(42);
1390    /// let ptr = &data as *const AlignedI32;
1391    ///
1392    /// assert!(ptr.is_aligned_to(1));
1393    /// assert!(ptr.is_aligned_to(2));
1394    /// assert!(ptr.is_aligned_to(4));
1395    ///
1396    /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1397    /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1398    ///
1399    /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1400    /// ```
1401    #[must_use]
1402    #[inline]
1403    #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1404    pub fn is_aligned_to(self, align: usize) -> bool {
1405        if !align.is_power_of_two() {
1406            panic!("is_aligned_to: align is not a power-of-two");
1407        }
1408
1409        self.addr() & (align - 1) == 0
1410    }
1411}
1412
1413impl<T> *const T {
1414    /// Casts from a type to its maybe-uninitialized version.
1415    #[must_use]
1416    #[inline(always)]
1417    #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1418    #[cfg(not(feature = "ferrocene_subset"))]
1419    pub const fn cast_uninit(self) -> *const MaybeUninit<T> {
1420        self as _
1421    }
1422}
1423#[cfg(not(feature = "ferrocene_subset"))]
1424impl<T> *const MaybeUninit<T> {
1425    /// Casts from a maybe-uninitialized type to its initialized version.
1426    ///
1427    /// This is always safe, since UB can only occur if the pointer is read
1428    /// before being initialized.
1429    #[must_use]
1430    #[inline(always)]
1431    #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1432    pub const fn cast_init(self) -> *const T {
1433        self as _
1434    }
1435}
1436
1437impl<T> *const [T] {
1438    /// Returns the length of a raw slice.
1439    ///
1440    /// The returned value is the number of **elements**, not the number of bytes.
1441    ///
1442    /// This function is safe, even when the raw slice cannot be cast to a slice
1443    /// reference because the pointer is null or unaligned.
1444    ///
1445    /// # Examples
1446    ///
1447    /// ```rust
1448    /// use std::ptr;
1449    ///
1450    /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1451    /// assert_eq!(slice.len(), 3);
1452    /// ```
1453    #[inline]
1454    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1455    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1456    pub const fn len(self) -> usize {
1457        metadata(self)
1458    }
1459
1460    /// Returns `true` if the raw slice has a length of 0.
1461    ///
1462    /// # Examples
1463    ///
1464    /// ```
1465    /// use std::ptr;
1466    ///
1467    /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1468    /// assert!(!slice.is_empty());
1469    /// ```
1470    #[inline(always)]
1471    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1472    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1473    pub const fn is_empty(self) -> bool {
1474        self.len() == 0
1475    }
1476
1477    /// Returns a raw pointer to the slice's buffer.
1478    ///
1479    /// This is equivalent to casting `self` to `*const T`, but more type-safe.
1480    ///
1481    /// # Examples
1482    ///
1483    /// ```rust
1484    /// #![feature(slice_ptr_get)]
1485    /// use std::ptr;
1486    ///
1487    /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1488    /// assert_eq!(slice.as_ptr(), ptr::null());
1489    /// ```
1490    #[inline]
1491    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1492    pub const fn as_ptr(self) -> *const T {
1493        self as *const T
1494    }
1495
1496    /// Gets a raw pointer to the underlying array.
1497    ///
1498    /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1499    #[stable(feature = "core_slice_as_array", since = "1.93.0")]
1500    #[rustc_const_stable(feature = "core_slice_as_array", since = "1.93.0")]
1501    #[inline]
1502    #[must_use]
1503    pub const fn as_array<const N: usize>(self) -> Option<*const [T; N]> {
1504        if self.len() == N {
1505            let me = self.as_ptr() as *const [T; N];
1506            Some(me)
1507        } else {
1508            None
1509        }
1510    }
1511
1512    /// Returns a raw pointer to an element or subslice, without doing bounds
1513    /// checking.
1514    ///
1515    /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1516    /// is *[undefined behavior]* even if the resulting pointer is not used.
1517    ///
1518    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1519    ///
1520    /// # Examples
1521    ///
1522    /// ```
1523    /// #![feature(slice_ptr_get)]
1524    ///
1525    /// let x = &[1, 2, 4] as *const [i32];
1526    ///
1527    /// unsafe {
1528    ///     assert_eq!(x.get_unchecked(1), x.as_ptr().add(1));
1529    /// }
1530    /// ```
1531    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1532    #[rustc_const_unstable(feature = "const_index", issue = "143775")]
1533    #[inline]
1534    #[cfg(not(feature = "ferrocene_subset"))]
1535    pub const unsafe fn get_unchecked<I>(self, index: I) -> *const I::Output
1536    where
1537        I: [const] SliceIndex<[T]>,
1538    {
1539        // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1540        unsafe { index.get_unchecked(self) }
1541    }
1542
1543    #[doc = include_str!("docs/as_uninit_slice.md")]
1544    #[inline]
1545    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1546    #[cfg(not(feature = "ferrocene_subset"))]
1547    pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1548        if self.is_null() {
1549            None
1550        } else {
1551            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1552            Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1553        }
1554    }
1555}
1556
1557impl<T> *const T {
1558    /// Casts from a pointer-to-`T` to a pointer-to-`[T; N]`.
1559    #[inline]
1560    #[unstable(feature = "ptr_cast_array", issue = "144514")]
1561    pub const fn cast_array<const N: usize>(self) -> *const [T; N] {
1562        self.cast()
1563    }
1564}
1565
1566#[cfg(not(feature = "ferrocene_subset"))]
1567impl<T, const N: usize> *const [T; N] {
1568    /// Returns a raw pointer to the array's buffer.
1569    ///
1570    /// This is equivalent to casting `self` to `*const T`, but more type-safe.
1571    ///
1572    /// # Examples
1573    ///
1574    /// ```rust
1575    /// #![feature(array_ptr_get)]
1576    /// use std::ptr;
1577    ///
1578    /// let arr: *const [i8; 3] = ptr::null();
1579    /// assert_eq!(arr.as_ptr(), ptr::null());
1580    /// ```
1581    #[inline]
1582    #[unstable(feature = "array_ptr_get", issue = "119834")]
1583    pub const fn as_ptr(self) -> *const T {
1584        self as *const T
1585    }
1586
1587    /// Returns a raw pointer to a slice containing the entire array.
1588    ///
1589    /// # Examples
1590    ///
1591    /// ```
1592    /// #![feature(array_ptr_get)]
1593    ///
1594    /// let arr: *const [i32; 3] = &[1, 2, 4] as *const [i32; 3];
1595    /// let slice: *const [i32] = arr.as_slice();
1596    /// assert_eq!(slice.len(), 3);
1597    /// ```
1598    #[inline]
1599    #[unstable(feature = "array_ptr_get", issue = "119834")]
1600    pub const fn as_slice(self) -> *const [T] {
1601        self
1602    }
1603}
1604
1605/// Pointer equality is by address, as produced by the [`<*const T>::addr`](pointer::addr) method.
1606#[stable(feature = "rust1", since = "1.0.0")]
1607#[diagnostic::on_const(
1608    message = "pointers cannot be reliably compared during const eval",
1609    note = "see issue #53020 <https://github.com/rust-lang/rust/issues/53020> for more information"
1610)]
1611impl<T: PointeeSized> PartialEq for *const T {
1612    #[inline]
1613    #[allow(ambiguous_wide_pointer_comparisons)]
1614    fn eq(&self, other: &*const T) -> bool {
1615        *self == *other
1616    }
1617}
1618
1619/// Pointer equality is an equivalence relation.
1620#[stable(feature = "rust1", since = "1.0.0")]
1621#[diagnostic::on_const(
1622    message = "pointers cannot be reliably compared during const eval",
1623    note = "see issue #53020 <https://github.com/rust-lang/rust/issues/53020> for more information"
1624)]
1625impl<T: PointeeSized> Eq for *const T {}
1626
1627/// Pointer comparison is by address, as produced by the `[`<*const T>::addr`](pointer::addr)` method.
1628#[stable(feature = "rust1", since = "1.0.0")]
1629#[diagnostic::on_const(
1630    message = "pointers cannot be reliably compared during const eval",
1631    note = "see issue #53020 <https://github.com/rust-lang/rust/issues/53020> for more information"
1632)]
1633impl<T: PointeeSized> Ord for *const T {
1634    #[inline]
1635    #[allow(ambiguous_wide_pointer_comparisons)]
1636    fn cmp(&self, other: &*const T) -> Ordering {
1637        if self < other {
1638            Less
1639        } else if self == other {
1640            Equal
1641        } else {
1642            Greater
1643        }
1644    }
1645}
1646
1647/// Pointer comparison is by address, as produced by the `[`<*const T>::addr`](pointer::addr)` method.
1648#[stable(feature = "rust1", since = "1.0.0")]
1649#[diagnostic::on_const(
1650    message = "pointers cannot be reliably compared during const eval",
1651    note = "see issue #53020 <https://github.com/rust-lang/rust/issues/53020> for more information"
1652)]
1653impl<T: PointeeSized> PartialOrd for *const T {
1654    #[inline]
1655    #[allow(ambiguous_wide_pointer_comparisons)]
1656    fn partial_cmp(&self, other: &*const T) -> Option<Ordering> {
1657        Some(self.cmp(other))
1658    }
1659
1660    #[inline]
1661    #[allow(ambiguous_wide_pointer_comparisons)]
1662    fn lt(&self, other: &*const T) -> bool {
1663        *self < *other
1664    }
1665
1666    #[inline]
1667    #[allow(ambiguous_wide_pointer_comparisons)]
1668    fn le(&self, other: &*const T) -> bool {
1669        *self <= *other
1670    }
1671
1672    #[inline]
1673    #[allow(ambiguous_wide_pointer_comparisons)]
1674    fn gt(&self, other: &*const T) -> bool {
1675        *self > *other
1676    }
1677
1678    #[inline]
1679    #[allow(ambiguous_wide_pointer_comparisons)]
1680    fn ge(&self, other: &*const T) -> bool {
1681        *self >= *other
1682    }
1683}
1684
1685#[stable(feature = "raw_ptr_default", since = "1.88.0")]
1686#[cfg(not(feature = "ferrocene_subset"))]
1687impl<T: ?Sized + Thin> Default for *const T {
1688    /// Returns the default value of [`null()`][crate::ptr::null].
1689    fn default() -> Self {
1690        crate::ptr::null()
1691    }
1692}