1
use super::*;
2
#[cfg(not(feature = "ferrocene_certified"))]
3
use crate::cmp::Ordering::{Equal, Greater, Less};
4
#[cfg(not(feature = "ferrocene_certified"))]
5
use crate::intrinsics::const_eval_select;
6
#[cfg(not(feature = "ferrocene_certified"))]
7
use crate::mem::{self, SizedTypeProperties};
8
#[cfg(not(feature = "ferrocene_certified"))]
9
use crate::slice::{self, SliceIndex};
10
// Ferrocene addition: imports used by certified subset
11
#[cfg(feature = "ferrocene_certified")]
12
use crate::{intrinsics::const_eval_select, mem};
13

            
14
impl<T: PointeeSized> *const T {
15
    #[doc = include_str!("docs/is_null.md")]
16
    ///
17
    /// # Examples
18
    ///
19
    /// ```
20
    /// let s: &str = "Follow the rabbit";
21
    /// let ptr: *const u8 = s.as_ptr();
22
    /// assert!(!ptr.is_null());
23
    /// ```
24
    #[stable(feature = "rust1", since = "1.0.0")]
25
    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
26
    #[rustc_diagnostic_item = "ptr_const_is_null"]
27
    #[inline]
28
    #[rustc_allow_const_fn_unstable(const_eval_select)]
29
2992205
    pub const fn is_null(self) -> bool {
30
        // Compare via a cast to a thin pointer, so fat pointers are only
31
        // considering their "data" part for null-ness.
32
2992205
        let ptr = self as *const u8;
33
2992205
        const_eval_select!(
34
            @capture { ptr: *const u8 } -> bool:
35
            // This use of `const_raw_ptr_comparison` has been explicitly blessed by t-lang.
36
            if const #[rustc_allow_const_fn_unstable(const_raw_ptr_comparison)] {
37
                match (ptr).guaranteed_eq(null_mut()) {
38
                    Some(res) => res,
39
                    // To remain maximally conservative, we stop execution when we don't
40
                    // know whether the pointer is null or not.
41
                    // We can *not* return `false` here, that would be unsound in `NonNull::new`!
42
                    None => panic!("null-ness of this pointer cannot be determined in const context"),
43
                }
44
            } else {
45
                ptr.addr() == 0
46
            }
47
        )
48
2992205
    }
49

            
50
    /// Casts to a pointer of another type.
51
    #[stable(feature = "ptr_cast", since = "1.38.0")]
52
    #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
53
    #[rustc_diagnostic_item = "const_ptr_cast"]
54
    #[inline(always)]
55
11164017
    pub const fn cast<U>(self) -> *const U {
56
11164017
        self as _
57
11164017
    }
58

            
59
    /// Try to cast to a pointer of another type by checking alignment.
60
    ///
61
    /// If the pointer is properly aligned to the target type, it will be
62
    /// cast to the target type. Otherwise, `None` is returned.
63
    ///
64
    /// # Examples
65
    ///
66
    /// ```rust
67
    /// #![feature(pointer_try_cast_aligned)]
68
    ///
69
    /// let x = 0u64;
70
    ///
71
    /// let aligned: *const u64 = &x;
72
    /// let unaligned = unsafe { aligned.byte_add(1) };
73
    ///
74
    /// assert!(aligned.try_cast_aligned::<u32>().is_some());
75
    /// assert!(unaligned.try_cast_aligned::<u32>().is_none());
76
    /// ```
77
    #[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
78
    #[must_use = "this returns the result of the operation, \
79
                  without modifying the original"]
80
    #[inline]
81
    pub fn try_cast_aligned<U>(self) -> Option<*const U> {
82
        if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
83
    }
84

            
85
    /// Uses the address value in a new pointer of another type.
86
    ///
87
    /// This operation will ignore the address part of its `meta` operand and discard existing
88
    /// metadata of `self`. For pointers to a sized types (thin pointers), this has the same effect
89
    /// as a simple cast. For pointers to an unsized type (fat pointers) this recombines the address
90
    /// with new metadata such as slice lengths or `dyn`-vtable.
91
    ///
92
    /// The resulting pointer will have provenance of `self`. This operation is semantically the
93
    /// same as creating a new pointer with the data pointer value of `self` but the metadata of
94
    /// `meta`, being fat or thin depending on the `meta` operand.
95
    ///
96
    /// # Examples
97
    ///
98
    /// This function is primarily useful for enabling pointer arithmetic on potentially fat
99
    /// pointers. The pointer is cast to a sized pointee to utilize offset operations and then
100
    /// recombined with its own original metadata.
101
    ///
102
    /// ```
103
    /// #![feature(set_ptr_value)]
104
    /// # use core::fmt::Debug;
105
    /// let arr: [i32; 3] = [1, 2, 3];
106
    /// let mut ptr = arr.as_ptr() as *const dyn Debug;
107
    /// let thin = ptr as *const u8;
108
    /// unsafe {
109
    ///     ptr = thin.add(8).with_metadata_of(ptr);
110
    ///     # assert_eq!(*(ptr as *const i32), 3);
111
    ///     println!("{:?}", &*ptr); // will print "3"
112
    /// }
113
    /// ```
114
    ///
115
    /// # *Incorrect* usage
116
    ///
117
    /// The provenance from pointers is *not* combined. The result must only be used to refer to the
118
    /// address allowed by `self`.
119
    ///
120
    /// ```rust,no_run
121
    /// #![feature(set_ptr_value)]
122
    /// let x = 0u32;
123
    /// let y = 1u32;
124
    ///
125
    /// let x = (&x) as *const u32;
126
    /// let y = (&y) as *const u32;
127
    ///
128
    /// let offset = (x as usize - y as usize) / 4;
129
    /// let bad = x.wrapping_add(offset).with_metadata_of(y);
130
    ///
131
    /// // This dereference is UB. The pointer only has provenance for `x` but points to `y`.
132
    /// println!("{:?}", unsafe { &*bad });
133
    /// ```
134
    #[unstable(feature = "set_ptr_value", issue = "75091")]
135
    #[must_use = "returns a new pointer rather than modifying its argument"]
136
    #[inline]
137
    #[cfg(not(feature = "ferrocene_certified"))]
138
    pub const fn with_metadata_of<U>(self, meta: *const U) -> *const U
139
    where
140
        U: PointeeSized,
141
    {
142
        from_raw_parts::<U>(self as *const (), metadata(meta))
143
    }
144

            
145
    /// Changes constness without changing the type.
146
    ///
147
    /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
148
    /// refactored.
149
    #[stable(feature = "ptr_const_cast", since = "1.65.0")]
150
    #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")]
151
    #[rustc_diagnostic_item = "ptr_cast_mut"]
152
    #[inline(always)]
153
    #[cfg(not(feature = "ferrocene_certified"))]
154
    pub const fn cast_mut(self) -> *mut T {
155
        self as _
156
    }
157

            
158
    /// Gets the "address" portion of the pointer.
159
    ///
160
    /// This is similar to `self as usize`, except that the [provenance][crate::ptr#provenance] of
161
    /// the pointer is discarded and not [exposed][crate::ptr#exposed-provenance]. This means that
162
    /// casting the returned address back to a pointer yields a [pointer without
163
    /// provenance][without_provenance], which is undefined behavior to dereference. To properly
164
    /// restore the lost information and obtain a dereferenceable pointer, use
165
    /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr].
166
    ///
167
    /// If using those APIs is not possible because there is no way to preserve a pointer with the
168
    /// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts
169
    /// or [`expose_provenance`][pointer::expose_provenance] and [`with_exposed_provenance`][with_exposed_provenance]
170
    /// instead. However, note that this makes your code less portable and less amenable to tools
171
    /// that check for compliance with the Rust memory model.
172
    ///
173
    /// On most platforms this will produce a value with the same bytes as the original
174
    /// pointer, because all the bytes are dedicated to describing the address.
175
    /// Platforms which need to store additional information in the pointer may
176
    /// perform a change of representation to produce a value containing only the address
177
    /// portion of the pointer. What that means is up to the platform to define.
178
    ///
179
    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
180
    #[must_use]
181
    #[inline(always)]
182
    #[stable(feature = "strict_provenance", since = "1.84.0")]
183
11139896
    pub fn addr(self) -> usize {
184
        // A pointer-to-integer transmute currently has exactly the right semantics: it returns the
185
        // address without exposing the provenance. Note that this is *not* a stable guarantee about
186
        // transmute semantics, it relies on sysroot crates having special status.
187
        // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
188
        // provenance).
189
11139896
        unsafe { mem::transmute(self.cast::<()>()) }
190
11139896
    }
191

            
192
    /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
193
    /// [`with_exposed_provenance`] and returns the "address" portion.
194
    ///
195
    /// This is equivalent to `self as usize`, which semantically discards provenance information.
196
    /// Furthermore, this (like the `as` cast) has the implicit side-effect of marking the
197
    /// provenance as 'exposed', so on platforms that support it you can later call
198
    /// [`with_exposed_provenance`] to reconstitute the original pointer including its provenance.
199
    ///
200
    /// Due to its inherent ambiguity, [`with_exposed_provenance`] may not be supported by tools
201
    /// that help you to stay conformant with the Rust memory model. It is recommended to use
202
    /// [Strict Provenance][crate::ptr#strict-provenance] APIs such as [`with_addr`][pointer::with_addr]
203
    /// wherever possible, in which case [`addr`][pointer::addr] should be used instead of `expose_provenance`.
204
    ///
205
    /// On most platforms this will produce a value with the same bytes as the original pointer,
206
    /// because all the bytes are dedicated to describing the address. Platforms which need to store
207
    /// additional information in the pointer may not support this operation, since the 'expose'
208
    /// side-effect which is required for [`with_exposed_provenance`] to work is typically not
209
    /// available.
210
    ///
211
    /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
212
    ///
213
    /// [`with_exposed_provenance`]: with_exposed_provenance
214
    #[inline(always)]
215
    #[stable(feature = "exposed_provenance", since = "1.84.0")]
216
    #[cfg(not(feature = "ferrocene_certified"))]
217
    pub fn expose_provenance(self) -> usize {
218
        self.cast::<()>() as usize
219
    }
220

            
221
    /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
222
    /// `self`.
223
    ///
224
    /// This is similar to a `addr as *const T` cast, but copies
225
    /// the *provenance* of `self` to the new pointer.
226
    /// This avoids the inherent ambiguity of the unary cast.
227
    ///
228
    /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
229
    /// `self` to the given address, and therefore has all the same capabilities and restrictions.
230
    ///
231
    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
232
    #[must_use]
233
    #[inline]
234
    #[stable(feature = "strict_provenance", since = "1.84.0")]
235
    #[cfg(not(feature = "ferrocene_certified"))]
236
    pub fn with_addr(self, addr: usize) -> Self {
237
        // This should probably be an intrinsic to avoid doing any sort of arithmetic, but
238
        // meanwhile, we can implement it with `wrapping_offset`, which preserves the pointer's
239
        // provenance.
240
        let self_addr = self.addr() as isize;
241
        let dest_addr = addr as isize;
242
        let offset = dest_addr.wrapping_sub(self_addr);
243
        self.wrapping_byte_offset(offset)
244
    }
245

            
246
    /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
247
    /// [provenance][crate::ptr#provenance] of `self`.
248
    ///
249
    /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
250
    ///
251
    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
252
    #[must_use]
253
    #[inline]
254
    #[stable(feature = "strict_provenance", since = "1.84.0")]
255
    #[cfg(not(feature = "ferrocene_certified"))]
256
    pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
257
        self.with_addr(f(self.addr()))
258
    }
259

            
260
    /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
261
    ///
262
    /// The pointer can be later reconstructed with [`from_raw_parts`].
263
    #[unstable(feature = "ptr_metadata", issue = "81513")]
264
    #[inline]
265
    #[cfg(not(feature = "ferrocene_certified"))]
266
    pub const fn to_raw_parts(self) -> (*const (), <T as super::Pointee>::Metadata) {
267
        (self.cast(), metadata(self))
268
    }
269

            
270
    /// Returns `None` if the pointer is null, or else returns a shared reference to
271
    /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]
272
    /// must be used instead.
273
    ///
274
    /// [`as_uninit_ref`]: #method.as_uninit_ref
275
    ///
276
    /// # Safety
277
    ///
278
    /// When calling this method, you have to ensure that *either* the pointer is null *or*
279
    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
280
    ///
281
    /// # Panics during const evaluation
282
    ///
283
    /// This method will panic during const evaluation if the pointer cannot be
284
    /// determined to be null or not. See [`is_null`] for more information.
285
    ///
286
    /// [`is_null`]: #method.is_null
287
    ///
288
    /// # Examples
289
    ///
290
    /// ```
291
    /// let ptr: *const u8 = &10u8 as *const u8;
292
    ///
293
    /// unsafe {
294
    ///     if let Some(val_back) = ptr.as_ref() {
295
    ///         assert_eq!(val_back, &10);
296
    ///     }
297
    /// }
298
    /// ```
299
    ///
300
    /// # Null-unchecked version
301
    ///
302
    /// If you are sure the pointer can never be null and are looking for some kind of
303
    /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
304
    /// dereference the pointer directly.
305
    ///
306
    /// ```
307
    /// let ptr: *const u8 = &10u8 as *const u8;
308
    ///
309
    /// unsafe {
310
    ///     let val_back = &*ptr;
311
    ///     assert_eq!(val_back, &10);
312
    /// }
313
    /// ```
314
    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
315
    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
316
    #[inline]
317
    #[cfg(not(feature = "ferrocene_certified"))]
318
    pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
319
        // SAFETY: the caller must guarantee that `self` is valid
320
        // for a reference if it isn't null.
321
        if self.is_null() { None } else { unsafe { Some(&*self) } }
322
    }
323

            
324
    /// Returns a shared reference to the value behind the pointer.
325
    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_ref`] must be used instead.
326
    /// If the pointer may be null, but the value is known to have been initialized, [`as_ref`] must be used instead.
327
    ///
328
    /// [`as_ref`]: #method.as_ref
329
    /// [`as_uninit_ref`]: #method.as_uninit_ref
330
    ///
331
    /// # Safety
332
    ///
333
    /// When calling this method, you have to ensure that
334
    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
335
    ///
336
    /// # Examples
337
    ///
338
    /// ```
339
    /// #![feature(ptr_as_ref_unchecked)]
340
    /// let ptr: *const u8 = &10u8 as *const u8;
341
    ///
342
    /// unsafe {
343
    ///     assert_eq!(ptr.as_ref_unchecked(), &10);
344
    /// }
345
    /// ```
346
    // FIXME: mention it in the docs for `as_ref` and `as_uninit_ref` once stabilized.
347
    #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
348
    #[inline]
349
    #[must_use]
350
    #[cfg(not(feature = "ferrocene_certified"))]
351
    pub const unsafe fn as_ref_unchecked<'a>(self) -> &'a T {
352
        // SAFETY: the caller must guarantee that `self` is valid for a reference
353
        unsafe { &*self }
354
    }
355

            
356
    /// Returns `None` if the pointer is null, or else returns a shared reference to
357
    /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
358
    /// that the value has to be initialized.
359
    ///
360
    /// [`as_ref`]: #method.as_ref
361
    ///
362
    /// # Safety
363
    ///
364
    /// When calling this method, you have to ensure that *either* the pointer is null *or*
365
    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
366
    ///
367
    /// # Panics during const evaluation
368
    ///
369
    /// This method will panic during const evaluation if the pointer cannot be
370
    /// determined to be null or not. See [`is_null`] for more information.
371
    ///
372
    /// [`is_null`]: #method.is_null
373
    ///
374
    /// # Examples
375
    ///
376
    /// ```
377
    /// #![feature(ptr_as_uninit)]
378
    ///
379
    /// let ptr: *const u8 = &10u8 as *const u8;
380
    ///
381
    /// unsafe {
382
    ///     if let Some(val_back) = ptr.as_uninit_ref() {
383
    ///         assert_eq!(val_back.assume_init(), 10);
384
    ///     }
385
    /// }
386
    /// ```
387
    #[inline]
388
    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
389
    #[cfg(not(feature = "ferrocene_certified"))]
390
    pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
391
    where
392
        T: Sized,
393
    {
394
        // SAFETY: the caller must guarantee that `self` meets all the
395
        // requirements for a reference.
396
        if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
397
    }
398

            
399
    #[doc = include_str!("./docs/offset.md")]
400
    ///
401
    /// # Examples
402
    ///
403
    /// ```
404
    /// let s: &str = "123";
405
    /// let ptr: *const u8 = s.as_ptr();
406
    ///
407
    /// unsafe {
408
    ///     assert_eq!(*ptr.offset(1) as char, '2');
409
    ///     assert_eq!(*ptr.offset(2) as char, '3');
410
    /// }
411
    /// ```
412
    #[stable(feature = "rust1", since = "1.0.0")]
413
    #[must_use = "returns a new pointer rather than modifying its argument"]
414
    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
415
    #[inline(always)]
416
    #[track_caller]
417
    #[cfg(not(feature = "ferrocene_certified"))]
418
    pub const unsafe fn offset(self, count: isize) -> *const T
419
    where
420
        T: Sized,
421
    {
422
        #[inline]
423
        #[rustc_allow_const_fn_unstable(const_eval_select)]
424
        const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
425
            // We can use const_eval_select here because this is only for UB checks.
426
            const_eval_select!(
427
                @capture { this: *const (), count: isize, size: usize } -> bool:
428
                if const {
429
                    true
430
                } else {
431
                    // `size` is the size of a Rust type, so we know that
432
                    // `size <= isize::MAX` and thus `as` cast here is not lossy.
433
                    let Some(byte_offset) = count.checked_mul(size as isize) else {
434
                        return false;
435
                    };
436
                    let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
437
                    !overflow
438
                }
439
            )
440
        }
441

            
442
        ub_checks::assert_unsafe_precondition!(
443
            check_language_ub,
444
            "ptr::offset requires the address calculation to not overflow",
445
            (
446
                this: *const () = self as *const (),
447
                count: isize = count,
448
                size: usize = size_of::<T>(),
449
            ) => runtime_offset_nowrap(this, count, size)
450
        );
451

            
452
        // SAFETY: the caller must uphold the safety contract for `offset`.
453
        unsafe { intrinsics::offset(self, count) }
454
    }
455

            
456
    /// Adds a signed offset in bytes to a pointer.
457
    ///
458
    /// `count` is in units of **bytes**.
459
    ///
460
    /// This is purely a convenience for casting to a `u8` pointer and
461
    /// using [offset][pointer::offset] on it. See that method for documentation
462
    /// and safety requirements.
463
    ///
464
    /// For non-`Sized` pointees this operation changes only the data pointer,
465
    /// leaving the metadata untouched.
466
    #[must_use]
467
    #[inline(always)]
468
    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
469
    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
470
    #[track_caller]
471
    #[cfg(not(feature = "ferrocene_certified"))]
472
    pub const unsafe fn byte_offset(self, count: isize) -> Self {
473
        // SAFETY: the caller must uphold the safety contract for `offset`.
474
        unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
475
    }
476

            
477
    /// Adds a signed offset to a pointer using wrapping arithmetic.
478
    ///
479
    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
480
    /// offset of `3 * size_of::<T>()` bytes.
481
    ///
482
    /// # Safety
483
    ///
484
    /// This operation itself is always safe, but using the resulting pointer is not.
485
    ///
486
    /// The resulting pointer "remembers" the [allocation] that `self` points to
487
    /// (this is called "[Provenance](ptr/index.html#provenance)").
488
    /// The pointer must not be used to read or write other allocations.
489
    ///
490
    /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
491
    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
492
    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
493
    /// `x` and `y` point into the same allocation.
494
    ///
495
    /// Compared to [`offset`], this method basically delays the requirement of staying within the
496
    /// same allocation: [`offset`] is immediate Undefined Behavior when crossing object
497
    /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
498
    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
499
    /// can be optimized better and is thus preferable in performance-sensitive code.
500
    ///
501
    /// The delayed check only considers the value of the pointer that was dereferenced, not the
502
    /// intermediate values used during the computation of the final result. For example,
503
    /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
504
    /// words, leaving the allocation and then re-entering it later is permitted.
505
    ///
506
    /// [`offset`]: #method.offset
507
    /// [allocation]: crate::ptr#allocation
508
    ///
509
    /// # Examples
510
    ///
511
    /// ```
512
    /// # use std::fmt::Write;
513
    /// // Iterate using a raw pointer in increments of two elements
514
    /// let data = [1u8, 2, 3, 4, 5];
515
    /// let mut ptr: *const u8 = data.as_ptr();
516
    /// let step = 2;
517
    /// let end_rounded_up = ptr.wrapping_offset(6);
518
    ///
519
    /// let mut out = String::new();
520
    /// while ptr != end_rounded_up {
521
    ///     unsafe {
522
    ///         write!(&mut out, "{}, ", *ptr)?;
523
    ///     }
524
    ///     ptr = ptr.wrapping_offset(step);
525
    /// }
526
    /// assert_eq!(out.as_str(), "1, 3, 5, ");
527
    /// # std::fmt::Result::Ok(())
528
    /// ```
529
    #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
530
    #[must_use = "returns a new pointer rather than modifying its argument"]
531
    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
532
    #[inline(always)]
533
    #[cfg(not(feature = "ferrocene_certified"))]
534
    pub const fn wrapping_offset(self, count: isize) -> *const T
535
    where
536
        T: Sized,
537
    {
538
        // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
539
        unsafe { intrinsics::arith_offset(self, count) }
540
    }
541

            
542
    /// Adds a signed offset in bytes to a pointer using wrapping arithmetic.
543
    ///
544
    /// `count` is in units of **bytes**.
545
    ///
546
    /// This is purely a convenience for casting to a `u8` pointer and
547
    /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
548
    /// for documentation.
549
    ///
550
    /// For non-`Sized` pointees this operation changes only the data pointer,
551
    /// leaving the metadata untouched.
552
    #[must_use]
553
    #[inline(always)]
554
    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
555
    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
556
    #[cfg(not(feature = "ferrocene_certified"))]
557
    pub const fn wrapping_byte_offset(self, count: isize) -> Self {
558
        self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
559
    }
560

            
561
    /// Masks out bits of the pointer according to a mask.
562
    ///
563
    /// This is convenience for `ptr.map_addr(|a| a & mask)`.
564
    ///
565
    /// For non-`Sized` pointees this operation changes only the data pointer,
566
    /// leaving the metadata untouched.
567
    ///
568
    /// ## Examples
569
    ///
570
    /// ```
571
    /// #![feature(ptr_mask)]
572
    /// let v = 17_u32;
573
    /// let ptr: *const u32 = &v;
574
    ///
575
    /// // `u32` is 4 bytes aligned,
576
    /// // which means that lower 2 bits are always 0.
577
    /// let tag_mask = 0b11;
578
    /// let ptr_mask = !tag_mask;
579
    ///
580
    /// // We can store something in these lower bits
581
    /// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
582
    ///
583
    /// // Get the "tag" back
584
    /// let tag = tagged_ptr.addr() & tag_mask;
585
    /// assert_eq!(tag, 0b10);
586
    ///
587
    /// // Note that `tagged_ptr` is unaligned, it's UB to read from it.
588
    /// // To get original pointer `mask` can be used:
589
    /// let masked_ptr = tagged_ptr.mask(ptr_mask);
590
    /// assert_eq!(unsafe { *masked_ptr }, 17);
591
    /// ```
592
    #[unstable(feature = "ptr_mask", issue = "98290")]
593
    #[must_use = "returns a new pointer rather than modifying its argument"]
594
    #[inline(always)]
595
    #[cfg(not(feature = "ferrocene_certified"))]
596
    pub fn mask(self, mask: usize) -> *const T {
597
        intrinsics::ptr_mask(self.cast::<()>(), mask).with_metadata_of(self)
598
    }
599

            
600
    /// Calculates the distance between two pointers within the same allocation. The returned value is in
601
    /// units of T: the distance in bytes divided by `size_of::<T>()`.
602
    ///
603
    /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
604
    /// except that it has a lot more opportunities for UB, in exchange for the compiler
605
    /// better understanding what you are doing.
606
    ///
607
    /// The primary motivation of this method is for computing the `len` of an array/slice
608
    /// of `T` that you are currently representing as a "start" and "end" pointer
609
    /// (and "end" is "one past the end" of the array).
610
    /// In that case, `end.offset_from(start)` gets you the length of the array.
611
    ///
612
    /// All of the following safety requirements are trivially satisfied for this usecase.
613
    ///
614
    /// [`offset`]: #method.offset
615
    ///
616
    /// # Safety
617
    ///
618
    /// If any of the following conditions are violated, the result is Undefined Behavior:
619
    ///
620
    /// * `self` and `origin` must either
621
    ///
622
    ///   * point to the same address, or
623
    ///   * both be [derived from][crate::ptr#provenance] a pointer to the same [allocation], and the memory range between
624
    ///     the two pointers must be in bounds of that object. (See below for an example.)
625
    ///
626
    /// * The distance between the pointers, in bytes, must be an exact multiple
627
    ///   of the size of `T`.
628
    ///
629
    /// As a consequence, the absolute distance between the pointers, in bytes, computed on
630
    /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
631
    /// implied by the in-bounds requirement, and the fact that no allocation can be larger
632
    /// than `isize::MAX` bytes.
633
    ///
634
    /// The requirement for pointers to be derived from the same allocation is primarily
635
    /// needed for `const`-compatibility: the distance between pointers into *different* allocated
636
    /// objects is not known at compile-time. However, the requirement also exists at
637
    /// runtime and may be exploited by optimizations. If you wish to compute the difference between
638
    /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
639
    /// origin as isize) / size_of::<T>()`.
640
    // FIXME: recommend `addr()` instead of `as usize` once that is stable.
641
    ///
642
    /// [`add`]: #method.add
643
    /// [allocation]: crate::ptr#allocation
644
    ///
645
    /// # Panics
646
    ///
647
    /// This function panics if `T` is a Zero-Sized Type ("ZST").
648
    ///
649
    /// # Examples
650
    ///
651
    /// Basic usage:
652
    ///
653
    /// ```
654
    /// let a = [0; 5];
655
    /// let ptr1: *const i32 = &a[1];
656
    /// let ptr2: *const i32 = &a[3];
657
    /// unsafe {
658
    ///     assert_eq!(ptr2.offset_from(ptr1), 2);
659
    ///     assert_eq!(ptr1.offset_from(ptr2), -2);
660
    ///     assert_eq!(ptr1.offset(2), ptr2);
661
    ///     assert_eq!(ptr2.offset(-2), ptr1);
662
    /// }
663
    /// ```
664
    ///
665
    /// *Incorrect* usage:
666
    ///
667
    /// ```rust,no_run
668
    /// let ptr1 = Box::into_raw(Box::new(0u8)) as *const u8;
669
    /// let ptr2 = Box::into_raw(Box::new(1u8)) as *const u8;
670
    /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
671
    /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
672
    /// let ptr2_other = (ptr1 as *const u8).wrapping_offset(diff).wrapping_offset(1);
673
    /// assert_eq!(ptr2 as usize, ptr2_other as usize);
674
    /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
675
    /// // computing their offset is undefined behavior, even though
676
    /// // they point to addresses that are in-bounds of the same object!
677
    /// unsafe {
678
    ///     let one = ptr2_other.offset_from(ptr2); // Undefined Behavior! ⚠️
679
    /// }
680
    /// ```
681
    #[stable(feature = "ptr_offset_from", since = "1.47.0")]
682
    #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")]
683
    #[inline]
684
    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
685
    #[cfg(not(feature = "ferrocene_certified"))]
686
    pub const unsafe fn offset_from(self, origin: *const T) -> isize
687
    where
688
        T: Sized,
689
    {
690
        let pointee_size = size_of::<T>();
691
        assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
692
        // SAFETY: the caller must uphold the safety contract for `ptr_offset_from`.
693
        unsafe { intrinsics::ptr_offset_from(self, origin) }
694
    }
695

            
696
    /// Calculates the distance between two pointers within the same allocation. The returned value is in
697
    /// units of **bytes**.
698
    ///
699
    /// This is purely a convenience for casting to a `u8` pointer and
700
    /// using [`offset_from`][pointer::offset_from] on it. See that method for
701
    /// documentation and safety requirements.
702
    ///
703
    /// For non-`Sized` pointees this operation considers only the data pointers,
704
    /// ignoring the metadata.
705
    #[inline(always)]
706
    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
707
    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
708
    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
709
    #[cfg(not(feature = "ferrocene_certified"))]
710
    pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: *const U) -> isize {
711
        // SAFETY: the caller must uphold the safety contract for `offset_from`.
712
        unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
713
    }
714

            
715
    /// Calculates the distance between two pointers within the same allocation, *where it's known that
716
    /// `self` is equal to or greater than `origin`*. The returned value is in
717
    /// units of T: the distance in bytes is divided by `size_of::<T>()`.
718
    ///
719
    /// This computes the same value that [`offset_from`](#method.offset_from)
720
    /// would compute, but with the added precondition that the offset is
721
    /// guaranteed to be non-negative.  This method is equivalent to
722
    /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
723
    /// but it provides slightly more information to the optimizer, which can
724
    /// sometimes allow it to optimize slightly better with some backends.
725
    ///
726
    /// This method can be thought of as recovering the `count` that was passed
727
    /// to [`add`](#method.add) (or, with the parameters in the other order,
728
    /// to [`sub`](#method.sub)).  The following are all equivalent, assuming
729
    /// that their safety preconditions are met:
730
    /// ```rust
731
    /// # unsafe fn blah(ptr: *const i32, origin: *const i32, count: usize) -> bool { unsafe {
732
    /// ptr.offset_from_unsigned(origin) == count
733
    /// # &&
734
    /// origin.add(count) == ptr
735
    /// # &&
736
    /// ptr.sub(count) == origin
737
    /// # } }
738
    /// ```
739
    ///
740
    /// # Safety
741
    ///
742
    /// - The distance between the pointers must be non-negative (`self >= origin`)
743
    ///
744
    /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
745
    ///   apply to this method as well; see it for the full details.
746
    ///
747
    /// Importantly, despite the return type of this method being able to represent
748
    /// a larger offset, it's still *not permitted* to pass pointers which differ
749
    /// by more than `isize::MAX` *bytes*.  As such, the result of this method will
750
    /// always be less than or equal to `isize::MAX as usize`.
751
    ///
752
    /// # Panics
753
    ///
754
    /// This function panics if `T` is a Zero-Sized Type ("ZST").
755
    ///
756
    /// # Examples
757
    ///
758
    /// ```
759
    /// let a = [0; 5];
760
    /// let ptr1: *const i32 = &a[1];
761
    /// let ptr2: *const i32 = &a[3];
762
    /// unsafe {
763
    ///     assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
764
    ///     assert_eq!(ptr1.add(2), ptr2);
765
    ///     assert_eq!(ptr2.sub(2), ptr1);
766
    ///     assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
767
    /// }
768
    ///
769
    /// // This would be incorrect, as the pointers are not correctly ordered:
770
    /// // ptr1.offset_from_unsigned(ptr2)
771
    /// ```
772
    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
773
    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
774
    #[inline]
775
    #[track_caller]
776
    #[cfg(not(feature = "ferrocene_certified"))]
777
    pub const unsafe fn offset_from_unsigned(self, origin: *const T) -> usize
778
    where
779
        T: Sized,
780
    {
781
        #[rustc_allow_const_fn_unstable(const_eval_select)]
782
        const fn runtime_ptr_ge(this: *const (), origin: *const ()) -> bool {
783
            const_eval_select!(
784
                @capture { this: *const (), origin: *const () } -> bool:
785
                if const {
786
                    true
787
                } else {
788
                    this >= origin
789
                }
790
            )
791
        }
792

            
793
        ub_checks::assert_unsafe_precondition!(
794
            check_language_ub,
795
            "ptr::offset_from_unsigned requires `self >= origin`",
796
            (
797
                this: *const () = self as *const (),
798
                origin: *const () = origin as *const (),
799
            ) => runtime_ptr_ge(this, origin)
800
        );
801

            
802
        let pointee_size = size_of::<T>();
803
        assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
804
        // SAFETY: the caller must uphold the safety contract for `ptr_offset_from_unsigned`.
805
        unsafe { intrinsics::ptr_offset_from_unsigned(self, origin) }
806
    }
807

            
808
    /// Calculates the distance between two pointers within the same allocation, *where it's known that
809
    /// `self` is equal to or greater than `origin`*. The returned value is in
810
    /// units of **bytes**.
811
    ///
812
    /// This is purely a convenience for casting to a `u8` pointer and
813
    /// using [`offset_from_unsigned`][pointer::offset_from_unsigned] on it.
814
    /// See that method for documentation and safety requirements.
815
    ///
816
    /// For non-`Sized` pointees this operation considers only the data pointers,
817
    /// ignoring the metadata.
818
    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
819
    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
820
    #[inline]
821
    #[track_caller]
822
    #[cfg(not(feature = "ferrocene_certified"))]
823
    pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: *const U) -> usize {
824
        // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
825
        unsafe { self.cast::<u8>().offset_from_unsigned(origin.cast::<u8>()) }
826
    }
827

            
828
    /// Returns whether two pointers are guaranteed to be equal.
829
    ///
830
    /// At runtime this function behaves like `Some(self == other)`.
831
    /// However, in some contexts (e.g., compile-time evaluation),
832
    /// it is not always possible to determine equality of two pointers, so this function may
833
    /// spuriously return `None` for pointers that later actually turn out to have its equality known.
834
    /// But when it returns `Some`, the pointers' equality is guaranteed to be known.
835
    ///
836
    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
837
    /// version and unsafe code must not
838
    /// rely on the result of this function for soundness. It is suggested to only use this function
839
    /// for performance optimizations where spurious `None` return values by this function do not
840
    /// affect the outcome, but just the performance.
841
    /// The consequences of using this method to make runtime and compile-time code behave
842
    /// differently have not been explored. This method should not be used to introduce such
843
    /// differences, and it should also not be stabilized before we have a better understanding
844
    /// of this issue.
845
    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
846
    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
847
    #[inline]
848
    pub const fn guaranteed_eq(self, other: *const T) -> Option<bool>
849
    where
850
        T: Sized,
851
    {
852
        match intrinsics::ptr_guaranteed_cmp(self, other) {
853
            2 => None,
854
            other => Some(other == 1),
855
        }
856
    }
857

            
858
    /// Returns whether two pointers are guaranteed to be inequal.
859
    ///
860
    /// At runtime this function behaves like `Some(self != other)`.
861
    /// However, in some contexts (e.g., compile-time evaluation),
862
    /// it is not always possible to determine inequality of two pointers, so this function may
863
    /// spuriously return `None` for pointers that later actually turn out to have its inequality known.
864
    /// But when it returns `Some`, the pointers' inequality is guaranteed to be known.
865
    ///
866
    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
867
    /// version and unsafe code must not
868
    /// rely on the result of this function for soundness. It is suggested to only use this function
869
    /// for performance optimizations where spurious `None` return values by this function do not
870
    /// affect the outcome, but just the performance.
871
    /// The consequences of using this method to make runtime and compile-time code behave
872
    /// differently have not been explored. This method should not be used to introduce such
873
    /// differences, and it should also not be stabilized before we have a better understanding
874
    /// of this issue.
875
    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
876
    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
877
    #[inline]
878
    #[cfg(not(feature = "ferrocene_certified"))]
879
    pub const fn guaranteed_ne(self, other: *const T) -> Option<bool>
880
    where
881
        T: Sized,
882
    {
883
        match self.guaranteed_eq(other) {
884
            None => None,
885
            Some(eq) => Some(!eq),
886
        }
887
    }
888

            
889
    #[doc = include_str!("./docs/add.md")]
890
    ///
891
    /// # Examples
892
    ///
893
    /// ```
894
    /// let s: &str = "123";
895
    /// let ptr: *const u8 = s.as_ptr();
896
    ///
897
    /// unsafe {
898
    ///     assert_eq!(*ptr.add(1), b'2');
899
    ///     assert_eq!(*ptr.add(2), b'3');
900
    /// }
901
    /// ```
902
    #[stable(feature = "pointer_methods", since = "1.26.0")]
903
    #[must_use = "returns a new pointer rather than modifying its argument"]
904
    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
905
    #[inline(always)]
906
    #[track_caller]
907
    #[cfg(not(feature = "ferrocene_certified"))]
908
    pub const unsafe fn add(self, count: usize) -> Self
909
    where
910
        T: Sized,
911
    {
912
        #[cfg(debug_assertions)]
913
        #[inline]
914
        #[rustc_allow_const_fn_unstable(const_eval_select)]
915
        const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
916
            const_eval_select!(
917
                @capture { this: *const (), count: usize, size: usize } -> bool:
918
                if const {
919
                    true
920
                } else {
921
                    let Some(byte_offset) = count.checked_mul(size) else {
922
                        return false;
923
                    };
924
                    let (_, overflow) = this.addr().overflowing_add(byte_offset);
925
                    byte_offset <= (isize::MAX as usize) && !overflow
926
                }
927
            )
928
        }
929

            
930
        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
931
        ub_checks::assert_unsafe_precondition!(
932
            check_language_ub,
933
            "ptr::add requires that the address calculation does not overflow",
934
            (
935
                this: *const () = self as *const (),
936
                count: usize = count,
937
                size: usize = size_of::<T>(),
938
            ) => runtime_add_nowrap(this, count, size)
939
        );
940

            
941
        // SAFETY: the caller must uphold the safety contract for `offset`.
942
        unsafe { intrinsics::offset(self, count) }
943
    }
944

            
945
    /// Adds an unsigned offset in bytes to a pointer.
946
    ///
947
    /// `count` is in units of bytes.
948
    ///
949
    /// This is purely a convenience for casting to a `u8` pointer and
950
    /// using [add][pointer::add] on it. See that method for documentation
951
    /// and safety requirements.
952
    ///
953
    /// For non-`Sized` pointees this operation changes only the data pointer,
954
    /// leaving the metadata untouched.
955
    #[must_use]
956
    #[inline(always)]
957
    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
958
    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
959
    #[track_caller]
960
    #[cfg(not(feature = "ferrocene_certified"))]
961
    pub const unsafe fn byte_add(self, count: usize) -> Self {
962
        // SAFETY: the caller must uphold the safety contract for `add`.
963
        unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
964
    }
965

            
966
    /// Subtracts an unsigned offset from a pointer.
967
    ///
968
    /// This can only move the pointer backward (or not move it). If you need to move forward or
969
    /// backward depending on the value, then you might want [`offset`](#method.offset) instead
970
    /// which takes a signed offset.
971
    ///
972
    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
973
    /// offset of `3 * size_of::<T>()` bytes.
974
    ///
975
    /// # Safety
976
    ///
977
    /// If any of the following conditions are violated, the result is Undefined Behavior:
978
    ///
979
    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
980
    ///   "wrapping around"), must fit in an `isize`.
981
    ///
982
    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
983
    ///   [allocation], and the entire memory range between `self` and the result must be in
984
    ///   bounds of that allocation. In particular, this range must not "wrap around" the edge
985
    ///   of the address space.
986
    ///
987
    /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
988
    /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
989
    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
990
    /// safe.
991
    ///
992
    /// Consider using [`wrapping_sub`] instead if these constraints are
993
    /// difficult to satisfy. The only advantage of this method is that it
994
    /// enables more aggressive compiler optimizations.
995
    ///
996
    /// [`wrapping_sub`]: #method.wrapping_sub
997
    /// [allocation]: crate::ptr#allocation
998
    ///
999
    /// # Examples
    ///
    /// ```
    /// let s: &str = "123";
    ///
    /// unsafe {
    ///     let end: *const u8 = s.as_ptr().add(3);
    ///     assert_eq!(*end.sub(1), b'3');
    ///     assert_eq!(*end.sub(2), b'2');
    /// }
    /// ```
    #[stable(feature = "pointer_methods", since = "1.26.0")]
    #[must_use = "returns a new pointer rather than modifying its argument"]
    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
    #[inline(always)]
    #[track_caller]
    #[cfg(not(feature = "ferrocene_certified"))]
    pub const unsafe fn sub(self, count: usize) -> Self
    where
        T: Sized,
    {
        #[cfg(debug_assertions)]
        #[inline]
        #[rustc_allow_const_fn_unstable(const_eval_select)]
        const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
            const_eval_select!(
                @capture { this: *const (), count: usize, size: usize } -> bool:
                if const {
                    true
                } else {
                    let Some(byte_offset) = count.checked_mul(size) else {
                        return false;
                    };
                    byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
                }
            )
        }
        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
        ub_checks::assert_unsafe_precondition!(
            check_language_ub,
            "ptr::sub requires that the address calculation does not overflow",
            (
                this: *const () = self as *const (),
                count: usize = count,
                size: usize = size_of::<T>(),
            ) => runtime_sub_nowrap(this, count, size)
        );
        if T::IS_ZST {
            // Pointer arithmetic does nothing when the pointee is a ZST.
            self
        } else {
            // SAFETY: the caller must uphold the safety contract for `offset`.
            // Because the pointee is *not* a ZST, that means that `count` is
            // at most `isize::MAX`, and thus the negation cannot overflow.
            unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
        }
    }
    /// Subtracts an unsigned offset in bytes from a pointer.
    ///
    /// `count` is in units of bytes.
    ///
    /// This is purely a convenience for casting to a `u8` pointer and
    /// using [sub][pointer::sub] on it. See that method for documentation
    /// and safety requirements.
    ///
    /// For non-`Sized` pointees this operation changes only the data pointer,
    /// leaving the metadata untouched.
    #[must_use]
    #[inline(always)]
    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
    #[track_caller]
    #[cfg(not(feature = "ferrocene_certified"))]
    pub const unsafe fn byte_sub(self, count: usize) -> Self {
        // SAFETY: the caller must uphold the safety contract for `sub`.
        unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
    }
    /// Adds an unsigned offset to a pointer using wrapping arithmetic.
    ///
    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
    /// offset of `3 * size_of::<T>()` bytes.
    ///
    /// # Safety
    ///
    /// This operation itself is always safe, but using the resulting pointer is not.
    ///
    /// The resulting pointer "remembers" the [allocation] that `self` points to; it must not
    /// be used to read or write other allocations.
    ///
    /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
    /// `x` and `y` point into the same allocation.
    ///
    /// Compared to [`add`], this method basically delays the requirement of staying within the
    /// same allocation: [`add`] is immediate Undefined Behavior when crossing object
    /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
    /// can be optimized better and is thus preferable in performance-sensitive code.
    ///
    /// The delayed check only considers the value of the pointer that was dereferenced, not the
    /// intermediate values used during the computation of the final result. For example,
    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
    /// allocation and then re-entering it later is permitted.
    ///
    /// [`add`]: #method.add
    /// [allocation]: crate::ptr#allocation
    ///
    /// # Examples
    ///
    /// ```
    /// # use std::fmt::Write;
    /// // Iterate using a raw pointer in increments of two elements
    /// let data = [1u8, 2, 3, 4, 5];
    /// let mut ptr: *const u8 = data.as_ptr();
    /// let step = 2;
    /// let end_rounded_up = ptr.wrapping_add(6);
    ///
    /// let mut out = String::new();
    /// while ptr != end_rounded_up {
    ///     unsafe {
    ///         write!(&mut out, "{}, ", *ptr)?;
    ///     }
    ///     ptr = ptr.wrapping_add(step);
    /// }
    /// assert_eq!(out, "1, 3, 5, ");
    /// # std::fmt::Result::Ok(())
    /// ```
    #[stable(feature = "pointer_methods", since = "1.26.0")]
    #[must_use = "returns a new pointer rather than modifying its argument"]
    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
    #[inline(always)]
    #[cfg(not(feature = "ferrocene_certified"))]
    pub const fn wrapping_add(self, count: usize) -> Self
    where
        T: Sized,
    {
        self.wrapping_offset(count as isize)
    }
    /// Adds an unsigned offset in bytes to a pointer using wrapping arithmetic.
    ///
    /// `count` is in units of bytes.
    ///
    /// This is purely a convenience for casting to a `u8` pointer and
    /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
    ///
    /// For non-`Sized` pointees this operation changes only the data pointer,
    /// leaving the metadata untouched.
    #[must_use]
    #[inline(always)]
    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
    #[cfg(not(feature = "ferrocene_certified"))]
    pub const fn wrapping_byte_add(self, count: usize) -> Self {
        self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
    }
    /// Subtracts an unsigned offset from a pointer using wrapping arithmetic.
    ///
    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
    /// offset of `3 * size_of::<T>()` bytes.
    ///
    /// # Safety
    ///
    /// This operation itself is always safe, but using the resulting pointer is not.
    ///
    /// The resulting pointer "remembers" the [allocation] that `self` points to; it must not
    /// be used to read or write other allocations.
    ///
    /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
    /// `x` and `y` point into the same allocation.
    ///
    /// Compared to [`sub`], this method basically delays the requirement of staying within the
    /// same allocation: [`sub`] is immediate Undefined Behavior when crossing object
    /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
    /// can be optimized better and is thus preferable in performance-sensitive code.
    ///
    /// The delayed check only considers the value of the pointer that was dereferenced, not the
    /// intermediate values used during the computation of the final result. For example,
    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
    /// allocation and then re-entering it later is permitted.
    ///
    /// [`sub`]: #method.sub
    /// [allocation]: crate::ptr#allocation
    ///
    /// # Examples
    ///
    /// ```
    /// # use std::fmt::Write;
    /// // Iterate using a raw pointer in increments of two elements (backwards)
    /// let data = [1u8, 2, 3, 4, 5];
    /// let mut ptr: *const u8 = data.as_ptr();
    /// let start_rounded_down = ptr.wrapping_sub(2);
    /// ptr = ptr.wrapping_add(4);
    /// let step = 2;
    /// let mut out = String::new();
    /// while ptr != start_rounded_down {
    ///     unsafe {
    ///         write!(&mut out, "{}, ", *ptr)?;
    ///     }
    ///     ptr = ptr.wrapping_sub(step);
    /// }
    /// assert_eq!(out, "5, 3, 1, ");
    /// # std::fmt::Result::Ok(())
    /// ```
    #[stable(feature = "pointer_methods", since = "1.26.0")]
    #[must_use = "returns a new pointer rather than modifying its argument"]
    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
    #[inline(always)]
    #[cfg(not(feature = "ferrocene_certified"))]
    pub const fn wrapping_sub(self, count: usize) -> Self
    where
        T: Sized,
    {
        self.wrapping_offset((count as isize).wrapping_neg())
    }
    /// Subtracts an unsigned offset in bytes from a pointer using wrapping arithmetic.
    ///
    /// `count` is in units of bytes.
    ///
    /// This is purely a convenience for casting to a `u8` pointer and
    /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
    ///
    /// For non-`Sized` pointees this operation changes only the data pointer,
    /// leaving the metadata untouched.
    #[must_use]
    #[inline(always)]
    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
    #[cfg(not(feature = "ferrocene_certified"))]
    pub const fn wrapping_byte_sub(self, count: usize) -> Self {
        self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
    }
    /// Reads the value from `self` without moving it. This leaves the
    /// memory in `self` unchanged.
    ///
    /// See [`ptr::read`] for safety concerns and examples.
    ///
    /// [`ptr::read`]: crate::ptr::read()
    #[stable(feature = "pointer_methods", since = "1.26.0")]
    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
    #[inline]
    #[track_caller]
    #[cfg(not(feature = "ferrocene_certified"))]
    pub const unsafe fn read(self) -> T
    where
        T: Sized,
    {
        // SAFETY: the caller must uphold the safety contract for `read`.
        unsafe { read(self) }
    }
    /// Performs a volatile read of the value from `self` without moving it. This
    /// leaves the memory in `self` unchanged.
    ///
    /// Volatile operations are intended to act on I/O memory, and are guaranteed
    /// to not be elided or reordered by the compiler across other volatile
    /// operations.
    ///
    /// See [`ptr::read_volatile`] for safety concerns and examples.
    ///
    /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
    #[stable(feature = "pointer_methods", since = "1.26.0")]
    #[inline]
    #[track_caller]
    #[cfg(not(feature = "ferrocene_certified"))]
    pub unsafe fn read_volatile(self) -> T
    where
        T: Sized,
    {
        // SAFETY: the caller must uphold the safety contract for `read_volatile`.
        unsafe { read_volatile(self) }
    }
    /// Reads the value from `self` without moving it. This leaves the
    /// memory in `self` unchanged.
    ///
    /// Unlike `read`, the pointer may be unaligned.
    ///
    /// See [`ptr::read_unaligned`] for safety concerns and examples.
    ///
    /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
    #[stable(feature = "pointer_methods", since = "1.26.0")]
    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
    #[inline]
    #[track_caller]
    #[cfg(not(feature = "ferrocene_certified"))]
    pub const unsafe fn read_unaligned(self) -> T
    where
        T: Sized,
    {
        // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
        unsafe { read_unaligned(self) }
    }
    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
    /// and destination may overlap.
    ///
    /// NOTE: this has the *same* argument order as [`ptr::copy`].
    ///
    /// See [`ptr::copy`] for safety concerns and examples.
    ///
    /// [`ptr::copy`]: crate::ptr::copy()
    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
    #[stable(feature = "pointer_methods", since = "1.26.0")]
    #[inline]
    #[track_caller]
    #[cfg(not(feature = "ferrocene_certified"))]
    pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
    where
        T: Sized,
    {
        // SAFETY: the caller must uphold the safety contract for `copy`.
        unsafe { copy(self, dest, count) }
    }
    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
    /// and destination may *not* overlap.
    ///
    /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
    ///
    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
    ///
    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
    #[stable(feature = "pointer_methods", since = "1.26.0")]
    #[inline]
    #[track_caller]
    #[cfg(not(feature = "ferrocene_certified"))]
    pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
    where
        T: Sized,
    {
        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
        unsafe { copy_nonoverlapping(self, dest, count) }
    }
    /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
    /// `align`.
    ///
    /// If it is not possible to align the pointer, the implementation returns
    /// `usize::MAX`.
    ///
    /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
    /// used with the `wrapping_add` method.
    ///
    /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
    /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
    /// the returned offset is correct in all terms other than alignment.
    ///
    /// # Panics
    ///
    /// The function panics if `align` is not a power-of-two.
    ///
    /// # Examples
    ///
    /// Accessing adjacent `u8` as `u16`
    ///
    /// ```
    /// # unsafe {
    /// let x = [5_u8, 6, 7, 8, 9];
    /// let ptr = x.as_ptr();
    /// let offset = ptr.align_offset(align_of::<u16>());
    ///
    /// if offset < x.len() - 1 {
    ///     let u16_ptr = ptr.add(offset).cast::<u16>();
    ///     assert!(*u16_ptr == u16::from_ne_bytes([5, 6]) || *u16_ptr == u16::from_ne_bytes([6, 7]));
    /// } else {
    ///     // while the pointer can be aligned via `offset`, it would point
    ///     // outside the allocation
    /// }
    /// # }
    /// ```
    #[must_use]
    #[inline]
    #[stable(feature = "align_offset", since = "1.36.0")]
    #[cfg(not(feature = "ferrocene_certified"))]
    pub fn align_offset(self, align: usize) -> usize
    where
        T: Sized,
    {
        if !align.is_power_of_two() {
            panic!("align_offset: align is not a power-of-two");
        }
        // SAFETY: `align` has been checked to be a power of 2 above
        let ret = unsafe { align_offset(self, align) };
        // Inform Miri that we want to consider the resulting pointer to be suitably aligned.
        #[cfg(miri)]
        if ret != usize::MAX {
            intrinsics::miri_promise_symbolic_alignment(self.wrapping_add(ret).cast(), align);
        }
        ret
    }
    /// Returns whether the pointer is properly aligned for `T`.
    ///
    /// # Examples
    ///
    /// ```
    /// // On some platforms, the alignment of i32 is less than 4.
    /// #[repr(align(4))]
    /// struct AlignedI32(i32);
    ///
    /// let data = AlignedI32(42);
    /// let ptr = &data as *const AlignedI32;
    ///
    /// assert!(ptr.is_aligned());
    /// assert!(!ptr.wrapping_byte_add(1).is_aligned());
    /// ```
    #[must_use]
    #[inline]
    #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
    #[cfg(not(feature = "ferrocene_certified"))]
    pub fn is_aligned(self) -> bool
    where
        T: Sized,
    {
        self.is_aligned_to(align_of::<T>())
    }
    /// Returns whether the pointer is aligned to `align`.
    ///
    /// For non-`Sized` pointees this operation considers only the data pointer,
    /// ignoring the metadata.
    ///
    /// # Panics
    ///
    /// The function panics if `align` is not a power-of-two (this includes 0).
    ///
    /// # Examples
    ///
    /// ```
    /// #![feature(pointer_is_aligned_to)]
    ///
    /// // On some platforms, the alignment of i32 is less than 4.
    /// #[repr(align(4))]
    /// struct AlignedI32(i32);
    ///
    /// let data = AlignedI32(42);
    /// let ptr = &data as *const AlignedI32;
    ///
    /// assert!(ptr.is_aligned_to(1));
    /// assert!(ptr.is_aligned_to(2));
    /// assert!(ptr.is_aligned_to(4));
    ///
    /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
    /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
    ///
    /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
    /// ```
    #[must_use]
    #[inline]
    #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
2952455
    pub fn is_aligned_to(self, align: usize) -> bool {
2952455
        if !align.is_power_of_two() {
            panic!("is_aligned_to: align is not a power-of-two");
2952455
        }
2952455
        self.addr() & (align - 1) == 0
2952455
    }
}
impl<T> *const T {
    /// Casts from a type to its maybe-uninitialized version.
    #[must_use]
    #[inline(always)]
    #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
    #[cfg(not(feature = "ferrocene_certified"))]
    pub const fn cast_uninit(self) -> *const MaybeUninit<T> {
        self as _
    }
}
#[cfg(not(feature = "ferrocene_certified"))]
impl<T> *const MaybeUninit<T> {
    /// Casts from a maybe-uninitialized type to its initialized version.
    ///
    /// This is always safe, since UB can only occur if the pointer is read
    /// before being initialized.
    #[must_use]
    #[inline(always)]
    #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
    pub const fn cast_init(self) -> *const T {
        self as _
    }
}
#[cfg(not(feature = "ferrocene_certified"))]
impl<T> *const [T] {
    /// Returns the length of a raw slice.
    ///
    /// The returned value is the number of **elements**, not the number of bytes.
    ///
    /// This function is safe, even when the raw slice cannot be cast to a slice
    /// reference because the pointer is null or unaligned.
    ///
    /// # Examples
    ///
    /// ```rust
    /// use std::ptr;
    ///
    /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
    /// assert_eq!(slice.len(), 3);
    /// ```
    #[inline]
    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
303623
    pub const fn len(self) -> usize {
303623
        metadata(self)
303623
    }
    /// Returns `true` if the raw slice has a length of 0.
    ///
    /// # Examples
    ///
    /// ```
    /// use std::ptr;
    ///
    /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
    /// assert!(!slice.is_empty());
    /// ```
    #[inline(always)]
    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
    pub const fn is_empty(self) -> bool {
        self.len() == 0
    }
    /// Returns a raw pointer to the slice's buffer.
    ///
    /// This is equivalent to casting `self` to `*const T`, but more type-safe.
    ///
    /// # Examples
    ///
    /// ```rust
    /// #![feature(slice_ptr_get)]
    /// use std::ptr;
    ///
    /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
    /// assert_eq!(slice.as_ptr(), ptr::null());
    /// ```
    #[inline]
    #[unstable(feature = "slice_ptr_get", issue = "74265")]
5623
    pub const fn as_ptr(self) -> *const T {
5623
        self as *const T
5623
    }
    /// Gets a raw pointer to the underlying array.
    ///
    /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
    #[unstable(feature = "slice_as_array", issue = "133508")]
    #[inline]
    #[must_use]
    pub const fn as_array<const N: usize>(self) -> Option<*const [T; N]> {
        if self.len() == N {
            let me = self.as_ptr() as *const [T; N];
            Some(me)
        } else {
            None
        }
    }
    /// Returns a raw pointer to an element or subslice, without doing bounds
    /// checking.
    ///
    /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
    /// is *[undefined behavior]* even if the resulting pointer is not used.
    ///
    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
    ///
    /// # Examples
    ///
    /// ```
    /// #![feature(slice_ptr_get)]
    ///
    /// let x = &[1, 2, 4] as *const [i32];
    ///
    /// unsafe {
    ///     assert_eq!(x.get_unchecked(1), x.as_ptr().add(1));
    /// }
    /// ```
    #[unstable(feature = "slice_ptr_get", issue = "74265")]
    #[rustc_const_unstable(feature = "const_index", issue = "143775")]
    #[inline]
    pub const unsafe fn get_unchecked<I>(self, index: I) -> *const I::Output
    where
        I: [const] SliceIndex<[T]>,
    {
        // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
        unsafe { index.get_unchecked(self) }
    }
    #[doc = include_str!("docs/as_uninit_slice.md")]
    #[inline]
    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
    pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
        if self.is_null() {
            None
        } else {
            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
            Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
        }
    }
}
impl<T> *const T {
    /// Casts from a pointer-to-`T` to a pointer-to-`[T; N]`.
    #[inline]
    #[unstable(feature = "ptr_cast_array", issue = "144514")]
252
    pub const fn cast_array<const N: usize>(self) -> *const [T; N] {
252
        self.cast()
252
    }
}
#[cfg(not(feature = "ferrocene_certified"))]
impl<T, const N: usize> *const [T; N] {
    /// Returns a raw pointer to the array's buffer.
    ///
    /// This is equivalent to casting `self` to `*const T`, but more type-safe.
    ///
    /// # Examples
    ///
    /// ```rust
    /// #![feature(array_ptr_get)]
    /// use std::ptr;
    ///
    /// let arr: *const [i8; 3] = ptr::null();
    /// assert_eq!(arr.as_ptr(), ptr::null());
    /// ```
    #[inline]
    #[unstable(feature = "array_ptr_get", issue = "119834")]
    pub const fn as_ptr(self) -> *const T {
        self as *const T
    }
    /// Returns a raw pointer to a slice containing the entire array.
    ///
    /// # Examples
    ///
    /// ```
    /// #![feature(array_ptr_get)]
    ///
    /// let arr: *const [i32; 3] = &[1, 2, 4] as *const [i32; 3];
    /// let slice: *const [i32] = arr.as_slice();
    /// assert_eq!(slice.len(), 3);
    /// ```
    #[inline]
    #[unstable(feature = "array_ptr_get", issue = "119834")]
    pub const fn as_slice(self) -> *const [T] {
        self
    }
}
/// Pointer equality is by address, as produced by the [`<*const T>::addr`](pointer::addr) method.
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: PointeeSized> PartialEq for *const T {
    #[inline]
    #[allow(ambiguous_wide_pointer_comparisons)]
    fn eq(&self, other: &*const T) -> bool {
        *self == *other
    }
}
/// Pointer equality is an equivalence relation.
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: PointeeSized> Eq for *const T {}
/// Pointer comparison is by address, as produced by the `[`<*const T>::addr`](pointer::addr)` method.
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(not(feature = "ferrocene_certified"))]
impl<T: PointeeSized> Ord for *const T {
    #[inline]
    #[allow(ambiguous_wide_pointer_comparisons)]
    fn cmp(&self, other: &*const T) -> Ordering {
        if self < other {
            Less
        } else if self == other {
            Equal
        } else {
            Greater
        }
    }
}
/// Pointer comparison is by address, as produced by the `[`<*const T>::addr`](pointer::addr)` method.
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg(not(feature = "ferrocene_certified"))]
impl<T: PointeeSized> PartialOrd for *const T {
    #[inline]
    #[allow(ambiguous_wide_pointer_comparisons)]
    fn partial_cmp(&self, other: &*const T) -> Option<Ordering> {
        Some(self.cmp(other))
    }
    #[inline]
    #[allow(ambiguous_wide_pointer_comparisons)]
    fn lt(&self, other: &*const T) -> bool {
        *self < *other
    }
    #[inline]
    #[allow(ambiguous_wide_pointer_comparisons)]
    fn le(&self, other: &*const T) -> bool {
        *self <= *other
    }
    #[inline]
    #[allow(ambiguous_wide_pointer_comparisons)]
    fn gt(&self, other: &*const T) -> bool {
        *self > *other
    }
    #[inline]
    #[allow(ambiguous_wide_pointer_comparisons)]
    fn ge(&self, other: &*const T) -> bool {
        *self >= *other
    }
}
#[stable(feature = "raw_ptr_default", since = "1.88.0")]
#[cfg(not(feature = "ferrocene_certified"))]
impl<T: ?Sized + Thin> Default for *const T {
    /// Returns the default value of [`null()`][crate::ptr::null].
    fn default() -> Self {
        crate::ptr::null()
    }
}