core/ptr/
const_ptr.rs

1use super::*;
2use crate::cmp::Ordering::{Equal, Greater, Less};
3use crate::intrinsics::const_eval_select;
4use crate::mem::{self, SizedTypeProperties};
5use crate::slice::{self, SliceIndex};
6
7impl<T: ?Sized> *const T {
8    /// Returns `true` if the pointer is null.
9    ///
10    /// Note that unsized types have many possible null pointers, as only the
11    /// raw data pointer is considered, not their length, vtable, etc.
12    /// Therefore, two pointers that are null may still not compare equal to
13    /// each other.
14    ///
15    /// # Panics during const evaluation
16    ///
17    /// If this method is used during const evaluation, and `self` is a pointer
18    /// that is offset beyond the bounds of the memory it initially pointed to,
19    /// then there might not be enough information to determine whether the
20    /// pointer is null. This is because the absolute address in memory is not
21    /// known at compile time. If the nullness of the pointer cannot be
22    /// determined, this method will panic.
23    ///
24    /// In-bounds pointers are never null, so the method will never panic for
25    /// such pointers.
26    ///
27    /// # Examples
28    ///
29    /// ```
30    /// let s: &str = "Follow the rabbit";
31    /// let ptr: *const u8 = s.as_ptr();
32    /// assert!(!ptr.is_null());
33    /// ```
34    #[stable(feature = "rust1", since = "1.0.0")]
35    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
36    #[rustc_diagnostic_item = "ptr_const_is_null"]
37    #[inline]
38    #[rustc_allow_const_fn_unstable(const_eval_select)]
39    pub const fn is_null(self) -> bool {
40        // Compare via a cast to a thin pointer, so fat pointers are only
41        // considering their "data" part for null-ness.
42        let ptr = self as *const u8;
43        const_eval_select!(
44            @capture { ptr: *const u8 } -> bool:
45            // This use of `const_raw_ptr_comparison` has been explicitly blessed by t-lang.
46            if const #[rustc_allow_const_fn_unstable(const_raw_ptr_comparison)] {
47                match (ptr).guaranteed_eq(null_mut()) {
48                    Some(res) => res,
49                    // To remain maximally convervative, we stop execution when we don't
50                    // know whether the pointer is null or not.
51                    // We can *not* return `false` here, that would be unsound in `NonNull::new`!
52                    None => panic!("null-ness of this pointer cannot be determined in const context"),
53                }
54            } else {
55                ptr.addr() == 0
56            }
57        )
58    }
59
60    /// Casts to a pointer of another type.
61    #[stable(feature = "ptr_cast", since = "1.38.0")]
62    #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
63    #[rustc_diagnostic_item = "const_ptr_cast"]
64    #[inline(always)]
65    pub const fn cast<U>(self) -> *const U {
66        self as _
67    }
68
69    /// Try to cast to a pointer of another type by checking aligment.
70    ///
71    /// If the pointer is properly aligned to the target type, it will be
72    /// cast to the target type. Otherwise, `None` is returned.
73    ///
74    /// # Examples
75    ///
76    /// ```rust
77    /// #![feature(pointer_try_cast_aligned)]
78    ///
79    /// let x = 0u64;
80    ///
81    /// let aligned: *const u64 = &x;
82    /// let unaligned = unsafe { aligned.byte_add(1) };
83    ///
84    /// assert!(aligned.try_cast_aligned::<u32>().is_some());
85    /// assert!(unaligned.try_cast_aligned::<u32>().is_none());
86    /// ```
87    #[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
88    #[must_use = "this returns the result of the operation, \
89                  without modifying the original"]
90    #[inline]
91    pub fn try_cast_aligned<U>(self) -> Option<*const U> {
92        if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
93    }
94
95    /// Uses the address value in a new pointer of another type.
96    ///
97    /// This operation will ignore the address part of its `meta` operand and discard existing
98    /// metadata of `self`. For pointers to a sized types (thin pointers), this has the same effect
99    /// as a simple cast. For pointers to an unsized type (fat pointers) this recombines the address
100    /// with new metadata such as slice lengths or `dyn`-vtable.
101    ///
102    /// The resulting pointer will have provenance of `self`. This operation is semantically the
103    /// same as creating a new pointer with the data pointer value of `self` but the metadata of
104    /// `meta`, being fat or thin depending on the `meta` operand.
105    ///
106    /// # Examples
107    ///
108    /// This function is primarily useful for enabling pointer arithmetic on potentially fat
109    /// pointers. The pointer is cast to a sized pointee to utilize offset operations and then
110    /// recombined with its own original metadata.
111    ///
112    /// ```
113    /// #![feature(set_ptr_value)]
114    /// # use core::fmt::Debug;
115    /// let arr: [i32; 3] = [1, 2, 3];
116    /// let mut ptr = arr.as_ptr() as *const dyn Debug;
117    /// let thin = ptr as *const u8;
118    /// unsafe {
119    ///     ptr = thin.add(8).with_metadata_of(ptr);
120    ///     # assert_eq!(*(ptr as *const i32), 3);
121    ///     println!("{:?}", &*ptr); // will print "3"
122    /// }
123    /// ```
124    ///
125    /// # *Incorrect* usage
126    ///
127    /// The provenance from pointers is *not* combined. The result must only be used to refer to the
128    /// address allowed by `self`.
129    ///
130    /// ```rust,no_run
131    /// #![feature(set_ptr_value)]
132    /// let x = 0u32;
133    /// let y = 1u32;
134    ///
135    /// let x = (&x) as *const u32;
136    /// let y = (&y) as *const u32;
137    ///
138    /// let offset = (x as usize - y as usize) / 4;
139    /// let bad = x.wrapping_add(offset).with_metadata_of(y);
140    ///
141    /// // This dereference is UB. The pointer only has provenance for `x` but points to `y`.
142    /// println!("{:?}", unsafe { &*bad });
143    /// ```
144    #[unstable(feature = "set_ptr_value", issue = "75091")]
145    #[must_use = "returns a new pointer rather than modifying its argument"]
146    #[inline]
147    pub const fn with_metadata_of<U>(self, meta: *const U) -> *const U
148    where
149        U: ?Sized,
150    {
151        from_raw_parts::<U>(self as *const (), metadata(meta))
152    }
153
154    /// Changes constness without changing the type.
155    ///
156    /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
157    /// refactored.
158    #[stable(feature = "ptr_const_cast", since = "1.65.0")]
159    #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")]
160    #[rustc_diagnostic_item = "ptr_cast_mut"]
161    #[inline(always)]
162    pub const fn cast_mut(self) -> *mut T {
163        self as _
164    }
165
166    /// Gets the "address" portion of the pointer.
167    ///
168    /// This is similar to `self as usize`, except that the [provenance][crate::ptr#provenance] of
169    /// the pointer is discarded and not [exposed][crate::ptr#exposed-provenance]. This means that
170    /// casting the returned address back to a pointer yields a [pointer without
171    /// provenance][without_provenance], which is undefined behavior to dereference. To properly
172    /// restore the lost information and obtain a dereferenceable pointer, use
173    /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr].
174    ///
175    /// If using those APIs is not possible because there is no way to preserve a pointer with the
176    /// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts
177    /// or [`expose_provenance`][pointer::expose_provenance] and [`with_exposed_provenance`][with_exposed_provenance]
178    /// instead. However, note that this makes your code less portable and less amenable to tools
179    /// that check for compliance with the Rust memory model.
180    ///
181    /// On most platforms this will produce a value with the same bytes as the original
182    /// pointer, because all the bytes are dedicated to describing the address.
183    /// Platforms which need to store additional information in the pointer may
184    /// perform a change of representation to produce a value containing only the address
185    /// portion of the pointer. What that means is up to the platform to define.
186    ///
187    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
188    #[must_use]
189    #[inline(always)]
190    #[stable(feature = "strict_provenance", since = "1.84.0")]
191    pub fn addr(self) -> usize {
192        // A pointer-to-integer transmute currently has exactly the right semantics: it returns the
193        // address without exposing the provenance. Note that this is *not* a stable guarantee about
194        // transmute semantics, it relies on sysroot crates having special status.
195        // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
196        // provenance).
197        unsafe { mem::transmute(self.cast::<()>()) }
198    }
199
200    /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
201    /// [`with_exposed_provenance`] and returns the "address" portion.
202    ///
203    /// This is equivalent to `self as usize`, which semantically discards provenance information.
204    /// Furthermore, this (like the `as` cast) has the implicit side-effect of marking the
205    /// provenance as 'exposed', so on platforms that support it you can later call
206    /// [`with_exposed_provenance`] to reconstitute the original pointer including its provenance.
207    ///
208    /// Due to its inherent ambiguity, [`with_exposed_provenance`] may not be supported by tools
209    /// that help you to stay conformant with the Rust memory model. It is recommended to use
210    /// [Strict Provenance][crate::ptr#strict-provenance] APIs such as [`with_addr`][pointer::with_addr]
211    /// wherever possible, in which case [`addr`][pointer::addr] should be used instead of `expose_provenance`.
212    ///
213    /// On most platforms this will produce a value with the same bytes as the original pointer,
214    /// because all the bytes are dedicated to describing the address. Platforms which need to store
215    /// additional information in the pointer may not support this operation, since the 'expose'
216    /// side-effect which is required for [`with_exposed_provenance`] to work is typically not
217    /// available.
218    ///
219    /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
220    ///
221    /// [`with_exposed_provenance`]: with_exposed_provenance
222    #[inline(always)]
223    #[stable(feature = "exposed_provenance", since = "1.84.0")]
224    pub fn expose_provenance(self) -> usize {
225        self.cast::<()>() as usize
226    }
227
228    /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
229    /// `self`.
230    ///
231    /// This is similar to a `addr as *const T` cast, but copies
232    /// the *provenance* of `self` to the new pointer.
233    /// This avoids the inherent ambiguity of the unary cast.
234    ///
235    /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
236    /// `self` to the given address, and therefore has all the same capabilities and restrictions.
237    ///
238    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
239    #[must_use]
240    #[inline]
241    #[stable(feature = "strict_provenance", since = "1.84.0")]
242    pub fn with_addr(self, addr: usize) -> Self {
243        // This should probably be an intrinsic to avoid doing any sort of arithmetic, but
244        // meanwhile, we can implement it with `wrapping_offset`, which preserves the pointer's
245        // provenance.
246        let self_addr = self.addr() as isize;
247        let dest_addr = addr as isize;
248        let offset = dest_addr.wrapping_sub(self_addr);
249        self.wrapping_byte_offset(offset)
250    }
251
252    /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
253    /// [provenance][crate::ptr#provenance] of `self`.
254    ///
255    /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
256    ///
257    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
258    #[must_use]
259    #[inline]
260    #[stable(feature = "strict_provenance", since = "1.84.0")]
261    pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
262        self.with_addr(f(self.addr()))
263    }
264
265    /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
266    ///
267    /// The pointer can be later reconstructed with [`from_raw_parts`].
268    #[unstable(feature = "ptr_metadata", issue = "81513")]
269    #[inline]
270    pub const fn to_raw_parts(self) -> (*const (), <T as super::Pointee>::Metadata) {
271        (self.cast(), metadata(self))
272    }
273
274    /// Returns `None` if the pointer is null, or else returns a shared reference to
275    /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]
276    /// must be used instead.
277    ///
278    /// [`as_uninit_ref`]: #method.as_uninit_ref
279    ///
280    /// # Safety
281    ///
282    /// When calling this method, you have to ensure that *either* the pointer is null *or*
283    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
284    ///
285    /// # Panics during const evaluation
286    ///
287    /// This method will panic during const evaluation if the pointer cannot be
288    /// determined to be null or not. See [`is_null`] for more information.
289    ///
290    /// [`is_null`]: #method.is_null
291    ///
292    /// # Examples
293    ///
294    /// ```
295    /// let ptr: *const u8 = &10u8 as *const u8;
296    ///
297    /// unsafe {
298    ///     if let Some(val_back) = ptr.as_ref() {
299    ///         assert_eq!(val_back, &10);
300    ///     }
301    /// }
302    /// ```
303    ///
304    /// # Null-unchecked version
305    ///
306    /// If you are sure the pointer can never be null and are looking for some kind of
307    /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
308    /// dereference the pointer directly.
309    ///
310    /// ```
311    /// let ptr: *const u8 = &10u8 as *const u8;
312    ///
313    /// unsafe {
314    ///     let val_back = &*ptr;
315    ///     assert_eq!(val_back, &10);
316    /// }
317    /// ```
318    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
319    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
320    #[inline]
321    pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
322        // SAFETY: the caller must guarantee that `self` is valid
323        // for a reference if it isn't null.
324        if self.is_null() { None } else { unsafe { Some(&*self) } }
325    }
326
327    /// Returns a shared reference to the value behind the pointer.
328    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_ref`] must be used instead.
329    /// If the pointer may be null, but the value is known to have been initialized, [`as_ref`] must be used instead.
330    ///
331    /// [`as_ref`]: #method.as_ref
332    /// [`as_uninit_ref`]: #method.as_uninit_ref
333    ///
334    /// # Safety
335    ///
336    /// When calling this method, you have to ensure that
337    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
338    ///
339    /// # Examples
340    ///
341    /// ```
342    /// #![feature(ptr_as_ref_unchecked)]
343    /// let ptr: *const u8 = &10u8 as *const u8;
344    ///
345    /// unsafe {
346    ///     assert_eq!(ptr.as_ref_unchecked(), &10);
347    /// }
348    /// ```
349    // FIXME: mention it in the docs for `as_ref` and `as_uninit_ref` once stabilized.
350    #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
351    #[inline]
352    #[must_use]
353    pub const unsafe fn as_ref_unchecked<'a>(self) -> &'a T {
354        // SAFETY: the caller must guarantee that `self` is valid for a reference
355        unsafe { &*self }
356    }
357
358    /// Returns `None` if the pointer is null, or else returns a shared reference to
359    /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
360    /// that the value has to be initialized.
361    ///
362    /// [`as_ref`]: #method.as_ref
363    ///
364    /// # Safety
365    ///
366    /// When calling this method, you have to ensure that *either* the pointer is null *or*
367    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
368    ///
369    /// # Panics during const evaluation
370    ///
371    /// This method will panic during const evaluation if the pointer cannot be
372    /// determined to be null or not. See [`is_null`] for more information.
373    ///
374    /// [`is_null`]: #method.is_null
375    ///
376    /// # Examples
377    ///
378    /// ```
379    /// #![feature(ptr_as_uninit)]
380    ///
381    /// let ptr: *const u8 = &10u8 as *const u8;
382    ///
383    /// unsafe {
384    ///     if let Some(val_back) = ptr.as_uninit_ref() {
385    ///         assert_eq!(val_back.assume_init(), 10);
386    ///     }
387    /// }
388    /// ```
389    #[inline]
390    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
391    pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
392    where
393        T: Sized,
394    {
395        // SAFETY: the caller must guarantee that `self` meets all the
396        // requirements for a reference.
397        if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
398    }
399
400    /// Adds a signed offset to a pointer.
401    ///
402    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
403    /// offset of `3 * size_of::<T>()` bytes.
404    ///
405    /// # Safety
406    ///
407    /// If any of the following conditions are violated, the result is Undefined Behavior:
408    ///
409    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
410    ///   "wrapping around"), must fit in an `isize`.
411    ///
412    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
413    ///   [allocated object], and the entire memory range between `self` and the result must be in
414    ///   bounds of that allocated object. In particular, this range must not "wrap around" the edge
415    ///   of the address space. Note that "range" here refers to a half-open range as usual in Rust,
416    ///   i.e., `self..result` for non-negative offsets and `result..self` for negative offsets.
417    ///
418    /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
419    /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
420    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
421    /// safe.
422    ///
423    /// Consider using [`wrapping_offset`] instead if these constraints are
424    /// difficult to satisfy. The only advantage of this method is that it
425    /// enables more aggressive compiler optimizations.
426    ///
427    /// [`wrapping_offset`]: #method.wrapping_offset
428    /// [allocated object]: crate::ptr#allocated-object
429    ///
430    /// # Examples
431    ///
432    /// ```
433    /// let s: &str = "123";
434    /// let ptr: *const u8 = s.as_ptr();
435    ///
436    /// unsafe {
437    ///     assert_eq!(*ptr.offset(1) as char, '2');
438    ///     assert_eq!(*ptr.offset(2) as char, '3');
439    /// }
440    /// ```
441    #[stable(feature = "rust1", since = "1.0.0")]
442    #[must_use = "returns a new pointer rather than modifying its argument"]
443    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
444    #[inline(always)]
445    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
446    pub const unsafe fn offset(self, count: isize) -> *const T
447    where
448        T: Sized,
449    {
450        #[inline]
451        #[rustc_allow_const_fn_unstable(const_eval_select)]
452        const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
453            // We can use const_eval_select here because this is only for UB checks.
454            const_eval_select!(
455                @capture { this: *const (), count: isize, size: usize } -> bool:
456                if const {
457                    true
458                } else {
459                    // `size` is the size of a Rust type, so we know that
460                    // `size <= isize::MAX` and thus `as` cast here is not lossy.
461                    let Some(byte_offset) = count.checked_mul(size as isize) else {
462                        return false;
463                    };
464                    let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
465                    !overflow
466                }
467            )
468        }
469
470        ub_checks::assert_unsafe_precondition!(
471            check_language_ub,
472            "ptr::offset requires the address calculation to not overflow",
473            (
474                this: *const () = self as *const (),
475                count: isize = count,
476                size: usize = size_of::<T>(),
477            ) => runtime_offset_nowrap(this, count, size)
478        );
479
480        // SAFETY: the caller must uphold the safety contract for `offset`.
481        unsafe { intrinsics::offset(self, count) }
482    }
483
484    /// Adds a signed offset in bytes to a pointer.
485    ///
486    /// `count` is in units of **bytes**.
487    ///
488    /// This is purely a convenience for casting to a `u8` pointer and
489    /// using [offset][pointer::offset] on it. See that method for documentation
490    /// and safety requirements.
491    ///
492    /// For non-`Sized` pointees this operation changes only the data pointer,
493    /// leaving the metadata untouched.
494    #[must_use]
495    #[inline(always)]
496    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
497    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
498    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
499    pub const unsafe fn byte_offset(self, count: isize) -> Self {
500        // SAFETY: the caller must uphold the safety contract for `offset`.
501        unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
502    }
503
504    /// Adds a signed offset to a pointer using wrapping arithmetic.
505    ///
506    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
507    /// offset of `3 * size_of::<T>()` bytes.
508    ///
509    /// # Safety
510    ///
511    /// This operation itself is always safe, but using the resulting pointer is not.
512    ///
513    /// The resulting pointer "remembers" the [allocated object] that `self` points to
514    /// (this is called "[Provenance](ptr/index.html#provenance)").
515    /// The pointer must not be used to read or write other allocated objects.
516    ///
517    /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
518    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
519    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
520    /// `x` and `y` point into the same allocated object.
521    ///
522    /// Compared to [`offset`], this method basically delays the requirement of staying within the
523    /// same allocated object: [`offset`] is immediate Undefined Behavior when crossing object
524    /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
525    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
526    /// can be optimized better and is thus preferable in performance-sensitive code.
527    ///
528    /// The delayed check only considers the value of the pointer that was dereferenced, not the
529    /// intermediate values used during the computation of the final result. For example,
530    /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
531    /// words, leaving the allocated object and then re-entering it later is permitted.
532    ///
533    /// [`offset`]: #method.offset
534    /// [allocated object]: crate::ptr#allocated-object
535    ///
536    /// # Examples
537    ///
538    /// ```
539    /// # use std::fmt::Write;
540    /// // Iterate using a raw pointer in increments of two elements
541    /// let data = [1u8, 2, 3, 4, 5];
542    /// let mut ptr: *const u8 = data.as_ptr();
543    /// let step = 2;
544    /// let end_rounded_up = ptr.wrapping_offset(6);
545    ///
546    /// let mut out = String::new();
547    /// while ptr != end_rounded_up {
548    ///     unsafe {
549    ///         write!(&mut out, "{}, ", *ptr)?;
550    ///     }
551    ///     ptr = ptr.wrapping_offset(step);
552    /// }
553    /// assert_eq!(out.as_str(), "1, 3, 5, ");
554    /// # std::fmt::Result::Ok(())
555    /// ```
556    #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
557    #[must_use = "returns a new pointer rather than modifying its argument"]
558    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
559    #[inline(always)]
560    pub const fn wrapping_offset(self, count: isize) -> *const T
561    where
562        T: Sized,
563    {
564        // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
565        unsafe { intrinsics::arith_offset(self, count) }
566    }
567
568    /// Adds a signed offset in bytes to a pointer using wrapping arithmetic.
569    ///
570    /// `count` is in units of **bytes**.
571    ///
572    /// This is purely a convenience for casting to a `u8` pointer and
573    /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
574    /// for documentation.
575    ///
576    /// For non-`Sized` pointees this operation changes only the data pointer,
577    /// leaving the metadata untouched.
578    #[must_use]
579    #[inline(always)]
580    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
581    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
582    pub const fn wrapping_byte_offset(self, count: isize) -> Self {
583        self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
584    }
585
586    /// Masks out bits of the pointer according to a mask.
587    ///
588    /// This is convenience for `ptr.map_addr(|a| a & mask)`.
589    ///
590    /// For non-`Sized` pointees this operation changes only the data pointer,
591    /// leaving the metadata untouched.
592    ///
593    /// ## Examples
594    ///
595    /// ```
596    /// #![feature(ptr_mask)]
597    /// let v = 17_u32;
598    /// let ptr: *const u32 = &v;
599    ///
600    /// // `u32` is 4 bytes aligned,
601    /// // which means that lower 2 bits are always 0.
602    /// let tag_mask = 0b11;
603    /// let ptr_mask = !tag_mask;
604    ///
605    /// // We can store something in these lower bits
606    /// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
607    ///
608    /// // Get the "tag" back
609    /// let tag = tagged_ptr.addr() & tag_mask;
610    /// assert_eq!(tag, 0b10);
611    ///
612    /// // Note that `tagged_ptr` is unaligned, it's UB to read from it.
613    /// // To get original pointer `mask` can be used:
614    /// let masked_ptr = tagged_ptr.mask(ptr_mask);
615    /// assert_eq!(unsafe { *masked_ptr }, 17);
616    /// ```
617    #[unstable(feature = "ptr_mask", issue = "98290")]
618    #[must_use = "returns a new pointer rather than modifying its argument"]
619    #[inline(always)]
620    pub fn mask(self, mask: usize) -> *const T {
621        intrinsics::ptr_mask(self.cast::<()>(), mask).with_metadata_of(self)
622    }
623
624    /// Calculates the distance between two pointers within the same allocation. The returned value is in
625    /// units of T: the distance in bytes divided by `size_of::<T>()`.
626    ///
627    /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
628    /// except that it has a lot more opportunities for UB, in exchange for the compiler
629    /// better understanding what you are doing.
630    ///
631    /// The primary motivation of this method is for computing the `len` of an array/slice
632    /// of `T` that you are currently representing as a "start" and "end" pointer
633    /// (and "end" is "one past the end" of the array).
634    /// In that case, `end.offset_from(start)` gets you the length of the array.
635    ///
636    /// All of the following safety requirements are trivially satisfied for this usecase.
637    ///
638    /// [`offset`]: #method.offset
639    ///
640    /// # Safety
641    ///
642    /// If any of the following conditions are violated, the result is Undefined Behavior:
643    ///
644    /// * `self` and `origin` must either
645    ///
646    ///   * point to the same address, or
647    ///   * both be [derived from][crate::ptr#provenance] a pointer to the same [allocated object], and the memory range between
648    ///     the two pointers must be in bounds of that object. (See below for an example.)
649    ///
650    /// * The distance between the pointers, in bytes, must be an exact multiple
651    ///   of the size of `T`.
652    ///
653    /// As a consequence, the absolute distance between the pointers, in bytes, computed on
654    /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
655    /// implied by the in-bounds requirement, and the fact that no allocated object can be larger
656    /// than `isize::MAX` bytes.
657    ///
658    /// The requirement for pointers to be derived from the same allocated object is primarily
659    /// needed for `const`-compatibility: the distance between pointers into *different* allocated
660    /// objects is not known at compile-time. However, the requirement also exists at
661    /// runtime and may be exploited by optimizations. If you wish to compute the difference between
662    /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
663    /// origin as isize) / size_of::<T>()`.
664    // FIXME: recommend `addr()` instead of `as usize` once that is stable.
665    ///
666    /// [`add`]: #method.add
667    /// [allocated object]: crate::ptr#allocated-object
668    ///
669    /// # Panics
670    ///
671    /// This function panics if `T` is a Zero-Sized Type ("ZST").
672    ///
673    /// # Examples
674    ///
675    /// Basic usage:
676    ///
677    /// ```
678    /// let a = [0; 5];
679    /// let ptr1: *const i32 = &a[1];
680    /// let ptr2: *const i32 = &a[3];
681    /// unsafe {
682    ///     assert_eq!(ptr2.offset_from(ptr1), 2);
683    ///     assert_eq!(ptr1.offset_from(ptr2), -2);
684    ///     assert_eq!(ptr1.offset(2), ptr2);
685    ///     assert_eq!(ptr2.offset(-2), ptr1);
686    /// }
687    /// ```
688    ///
689    /// *Incorrect* usage:
690    ///
691    /// ```rust,no_run
692    /// let ptr1 = Box::into_raw(Box::new(0u8)) as *const u8;
693    /// let ptr2 = Box::into_raw(Box::new(1u8)) as *const u8;
694    /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
695    /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
696    /// let ptr2_other = (ptr1 as *const u8).wrapping_offset(diff).wrapping_offset(1);
697    /// assert_eq!(ptr2 as usize, ptr2_other as usize);
698    /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
699    /// // computing their offset is undefined behavior, even though
700    /// // they point to addresses that are in-bounds of the same object!
701    /// unsafe {
702    ///     let one = ptr2_other.offset_from(ptr2); // Undefined Behavior! ⚠️
703    /// }
704    /// ```
705    #[stable(feature = "ptr_offset_from", since = "1.47.0")]
706    #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")]
707    #[inline]
708    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
709    pub const unsafe fn offset_from(self, origin: *const T) -> isize
710    where
711        T: Sized,
712    {
713        let pointee_size = size_of::<T>();
714        assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
715        // SAFETY: the caller must uphold the safety contract for `ptr_offset_from`.
716        unsafe { intrinsics::ptr_offset_from(self, origin) }
717    }
718
719    /// Calculates the distance between two pointers within the same allocation. The returned value is in
720    /// units of **bytes**.
721    ///
722    /// This is purely a convenience for casting to a `u8` pointer and
723    /// using [`offset_from`][pointer::offset_from] on it. See that method for
724    /// documentation and safety requirements.
725    ///
726    /// For non-`Sized` pointees this operation considers only the data pointers,
727    /// ignoring the metadata.
728    #[inline(always)]
729    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
730    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
731    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
732    pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: *const U) -> isize {
733        // SAFETY: the caller must uphold the safety contract for `offset_from`.
734        unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
735    }
736
737    /// Calculates the distance between two pointers within the same allocation, *where it's known that
738    /// `self` is equal to or greater than `origin`*. The returned value is in
739    /// units of T: the distance in bytes is divided by `size_of::<T>()`.
740    ///
741    /// This computes the same value that [`offset_from`](#method.offset_from)
742    /// would compute, but with the added precondition that the offset is
743    /// guaranteed to be non-negative.  This method is equivalent to
744    /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
745    /// but it provides slightly more information to the optimizer, which can
746    /// sometimes allow it to optimize slightly better with some backends.
747    ///
748    /// This method can be thought of as recovering the `count` that was passed
749    /// to [`add`](#method.add) (or, with the parameters in the other order,
750    /// to [`sub`](#method.sub)).  The following are all equivalent, assuming
751    /// that their safety preconditions are met:
752    /// ```rust
753    /// # unsafe fn blah(ptr: *const i32, origin: *const i32, count: usize) -> bool { unsafe {
754    /// ptr.offset_from_unsigned(origin) == count
755    /// # &&
756    /// origin.add(count) == ptr
757    /// # &&
758    /// ptr.sub(count) == origin
759    /// # } }
760    /// ```
761    ///
762    /// # Safety
763    ///
764    /// - The distance between the pointers must be non-negative (`self >= origin`)
765    ///
766    /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
767    ///   apply to this method as well; see it for the full details.
768    ///
769    /// Importantly, despite the return type of this method being able to represent
770    /// a larger offset, it's still *not permitted* to pass pointers which differ
771    /// by more than `isize::MAX` *bytes*.  As such, the result of this method will
772    /// always be less than or equal to `isize::MAX as usize`.
773    ///
774    /// # Panics
775    ///
776    /// This function panics if `T` is a Zero-Sized Type ("ZST").
777    ///
778    /// # Examples
779    ///
780    /// ```
781    /// let a = [0; 5];
782    /// let ptr1: *const i32 = &a[1];
783    /// let ptr2: *const i32 = &a[3];
784    /// unsafe {
785    ///     assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
786    ///     assert_eq!(ptr1.add(2), ptr2);
787    ///     assert_eq!(ptr2.sub(2), ptr1);
788    ///     assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
789    /// }
790    ///
791    /// // This would be incorrect, as the pointers are not correctly ordered:
792    /// // ptr1.offset_from_unsigned(ptr2)
793    /// ```
794    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
795    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
796    #[inline]
797    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
798    pub const unsafe fn offset_from_unsigned(self, origin: *const T) -> usize
799    where
800        T: Sized,
801    {
802        #[rustc_allow_const_fn_unstable(const_eval_select)]
803        const fn runtime_ptr_ge(this: *const (), origin: *const ()) -> bool {
804            const_eval_select!(
805                @capture { this: *const (), origin: *const () } -> bool:
806                if const {
807                    true
808                } else {
809                    this >= origin
810                }
811            )
812        }
813
814        ub_checks::assert_unsafe_precondition!(
815            check_language_ub,
816            "ptr::offset_from_unsigned requires `self >= origin`",
817            (
818                this: *const () = self as *const (),
819                origin: *const () = origin as *const (),
820            ) => runtime_ptr_ge(this, origin)
821        );
822
823        let pointee_size = size_of::<T>();
824        assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
825        // SAFETY: the caller must uphold the safety contract for `ptr_offset_from_unsigned`.
826        unsafe { intrinsics::ptr_offset_from_unsigned(self, origin) }
827    }
828
829    /// Calculates the distance between two pointers within the same allocation, *where it's known that
830    /// `self` is equal to or greater than `origin`*. The returned value is in
831    /// units of **bytes**.
832    ///
833    /// This is purely a convenience for casting to a `u8` pointer and
834    /// using [`offset_from_unsigned`][pointer::offset_from_unsigned] on it.
835    /// See that method for documentation and safety requirements.
836    ///
837    /// For non-`Sized` pointees this operation considers only the data pointers,
838    /// ignoring the metadata.
839    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
840    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
841    #[inline]
842    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
843    pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: *const U) -> usize {
844        // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
845        unsafe { self.cast::<u8>().offset_from_unsigned(origin.cast::<u8>()) }
846    }
847
848    /// Returns whether two pointers are guaranteed to be equal.
849    ///
850    /// At runtime this function behaves like `Some(self == other)`.
851    /// However, in some contexts (e.g., compile-time evaluation),
852    /// it is not always possible to determine equality of two pointers, so this function may
853    /// spuriously return `None` for pointers that later actually turn out to have its equality known.
854    /// But when it returns `Some`, the pointers' equality is guaranteed to be known.
855    ///
856    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
857    /// version and unsafe code must not
858    /// rely on the result of this function for soundness. It is suggested to only use this function
859    /// for performance optimizations where spurious `None` return values by this function do not
860    /// affect the outcome, but just the performance.
861    /// The consequences of using this method to make runtime and compile-time code behave
862    /// differently have not been explored. This method should not be used to introduce such
863    /// differences, and it should also not be stabilized before we have a better understanding
864    /// of this issue.
865    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
866    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
867    #[inline]
868    pub const fn guaranteed_eq(self, other: *const T) -> Option<bool>
869    where
870        T: Sized,
871    {
872        match intrinsics::ptr_guaranteed_cmp(self, other) {
873            2 => None,
874            other => Some(other == 1),
875        }
876    }
877
878    /// Returns whether two pointers are guaranteed to be inequal.
879    ///
880    /// At runtime this function behaves like `Some(self != other)`.
881    /// However, in some contexts (e.g., compile-time evaluation),
882    /// it is not always possible to determine inequality of two pointers, so this function may
883    /// spuriously return `None` for pointers that later actually turn out to have its inequality known.
884    /// But when it returns `Some`, the pointers' inequality is guaranteed to be known.
885    ///
886    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
887    /// version and unsafe code must not
888    /// rely on the result of this function for soundness. It is suggested to only use this function
889    /// for performance optimizations where spurious `None` return values by this function do not
890    /// affect the outcome, but just the performance.
891    /// The consequences of using this method to make runtime and compile-time code behave
892    /// differently have not been explored. This method should not be used to introduce such
893    /// differences, and it should also not be stabilized before we have a better understanding
894    /// of this issue.
895    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
896    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
897    #[inline]
898    pub const fn guaranteed_ne(self, other: *const T) -> Option<bool>
899    where
900        T: Sized,
901    {
902        match self.guaranteed_eq(other) {
903            None => None,
904            Some(eq) => Some(!eq),
905        }
906    }
907
908    /// Adds an unsigned offset to a pointer.
909    ///
910    /// This can only move the pointer forward (or not move it). If you need to move forward or
911    /// backward depending on the value, then you might want [`offset`](#method.offset) instead
912    /// which takes a signed offset.
913    ///
914    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
915    /// offset of `3 * size_of::<T>()` bytes.
916    ///
917    /// # Safety
918    ///
919    /// If any of the following conditions are violated, the result is Undefined Behavior:
920    ///
921    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
922    ///   "wrapping around"), must fit in an `isize`.
923    ///
924    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
925    ///   [allocated object], and the entire memory range between `self` and the result must be in
926    ///   bounds of that allocated object. In particular, this range must not "wrap around" the edge
927    ///   of the address space.
928    ///
929    /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
930    /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
931    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
932    /// safe.
933    ///
934    /// Consider using [`wrapping_add`] instead if these constraints are
935    /// difficult to satisfy. The only advantage of this method is that it
936    /// enables more aggressive compiler optimizations.
937    ///
938    /// [`wrapping_add`]: #method.wrapping_add
939    /// [allocated object]: crate::ptr#allocated-object
940    ///
941    /// # Examples
942    ///
943    /// ```
944    /// let s: &str = "123";
945    /// let ptr: *const u8 = s.as_ptr();
946    ///
947    /// unsafe {
948    ///     assert_eq!(*ptr.add(1), b'2');
949    ///     assert_eq!(*ptr.add(2), b'3');
950    /// }
951    /// ```
952    #[stable(feature = "pointer_methods", since = "1.26.0")]
953    #[must_use = "returns a new pointer rather than modifying its argument"]
954    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
955    #[inline(always)]
956    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
957    pub const unsafe fn add(self, count: usize) -> Self
958    where
959        T: Sized,
960    {
961        #[cfg(debug_assertions)]
962        #[inline]
963        #[rustc_allow_const_fn_unstable(const_eval_select)]
964        const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
965            const_eval_select!(
966                @capture { this: *const (), count: usize, size: usize } -> bool:
967                if const {
968                    true
969                } else {
970                    let Some(byte_offset) = count.checked_mul(size) else {
971                        return false;
972                    };
973                    let (_, overflow) = this.addr().overflowing_add(byte_offset);
974                    byte_offset <= (isize::MAX as usize) && !overflow
975                }
976            )
977        }
978
979        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
980        ub_checks::assert_unsafe_precondition!(
981            check_language_ub,
982            "ptr::add requires that the address calculation does not overflow",
983            (
984                this: *const () = self as *const (),
985                count: usize = count,
986                size: usize = size_of::<T>(),
987            ) => runtime_add_nowrap(this, count, size)
988        );
989
990        // SAFETY: the caller must uphold the safety contract for `offset`.
991        unsafe { intrinsics::offset(self, count) }
992    }
993
994    /// Adds an unsigned offset in bytes to a pointer.
995    ///
996    /// `count` is in units of bytes.
997    ///
998    /// This is purely a convenience for casting to a `u8` pointer and
999    /// using [add][pointer::add] on it. See that method for documentation
1000    /// and safety requirements.
1001    ///
1002    /// For non-`Sized` pointees this operation changes only the data pointer,
1003    /// leaving the metadata untouched.
1004    #[must_use]
1005    #[inline(always)]
1006    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1007    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1008    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1009    pub const unsafe fn byte_add(self, count: usize) -> Self {
1010        // SAFETY: the caller must uphold the safety contract for `add`.
1011        unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
1012    }
1013
1014    /// Subtracts an unsigned offset from a pointer.
1015    ///
1016    /// This can only move the pointer backward (or not move it). If you need to move forward or
1017    /// backward depending on the value, then you might want [`offset`](#method.offset) instead
1018    /// which takes a signed offset.
1019    ///
1020    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1021    /// offset of `3 * size_of::<T>()` bytes.
1022    ///
1023    /// # Safety
1024    ///
1025    /// If any of the following conditions are violated, the result is Undefined Behavior:
1026    ///
1027    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
1028    ///   "wrapping around"), must fit in an `isize`.
1029    ///
1030    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
1031    ///   [allocated object], and the entire memory range between `self` and the result must be in
1032    ///   bounds of that allocated object. In particular, this range must not "wrap around" the edge
1033    ///   of the address space.
1034    ///
1035    /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
1036    /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
1037    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
1038    /// safe.
1039    ///
1040    /// Consider using [`wrapping_sub`] instead if these constraints are
1041    /// difficult to satisfy. The only advantage of this method is that it
1042    /// enables more aggressive compiler optimizations.
1043    ///
1044    /// [`wrapping_sub`]: #method.wrapping_sub
1045    /// [allocated object]: crate::ptr#allocated-object
1046    ///
1047    /// # Examples
1048    ///
1049    /// ```
1050    /// let s: &str = "123";
1051    ///
1052    /// unsafe {
1053    ///     let end: *const u8 = s.as_ptr().add(3);
1054    ///     assert_eq!(*end.sub(1), b'3');
1055    ///     assert_eq!(*end.sub(2), b'2');
1056    /// }
1057    /// ```
1058    #[stable(feature = "pointer_methods", since = "1.26.0")]
1059    #[must_use = "returns a new pointer rather than modifying its argument"]
1060    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1061    #[inline(always)]
1062    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1063    pub const unsafe fn sub(self, count: usize) -> Self
1064    where
1065        T: Sized,
1066    {
1067        #[cfg(debug_assertions)]
1068        #[inline]
1069        #[rustc_allow_const_fn_unstable(const_eval_select)]
1070        const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
1071            const_eval_select!(
1072                @capture { this: *const (), count: usize, size: usize } -> bool:
1073                if const {
1074                    true
1075                } else {
1076                    let Some(byte_offset) = count.checked_mul(size) else {
1077                        return false;
1078                    };
1079                    byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
1080                }
1081            )
1082        }
1083
1084        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
1085        ub_checks::assert_unsafe_precondition!(
1086            check_language_ub,
1087            "ptr::sub requires that the address calculation does not overflow",
1088            (
1089                this: *const () = self as *const (),
1090                count: usize = count,
1091                size: usize = size_of::<T>(),
1092            ) => runtime_sub_nowrap(this, count, size)
1093        );
1094
1095        if T::IS_ZST {
1096            // Pointer arithmetic does nothing when the pointee is a ZST.
1097            self
1098        } else {
1099            // SAFETY: the caller must uphold the safety contract for `offset`.
1100            // Because the pointee is *not* a ZST, that means that `count` is
1101            // at most `isize::MAX`, and thus the negation cannot overflow.
1102            unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
1103        }
1104    }
1105
1106    /// Subtracts an unsigned offset in bytes from a pointer.
1107    ///
1108    /// `count` is in units of bytes.
1109    ///
1110    /// This is purely a convenience for casting to a `u8` pointer and
1111    /// using [sub][pointer::sub] on it. See that method for documentation
1112    /// and safety requirements.
1113    ///
1114    /// For non-`Sized` pointees this operation changes only the data pointer,
1115    /// leaving the metadata untouched.
1116    #[must_use]
1117    #[inline(always)]
1118    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1119    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1120    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1121    pub const unsafe fn byte_sub(self, count: usize) -> Self {
1122        // SAFETY: the caller must uphold the safety contract for `sub`.
1123        unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
1124    }
1125
1126    /// Adds an unsigned offset to a pointer using wrapping arithmetic.
1127    ///
1128    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1129    /// offset of `3 * size_of::<T>()` bytes.
1130    ///
1131    /// # Safety
1132    ///
1133    /// This operation itself is always safe, but using the resulting pointer is not.
1134    ///
1135    /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
1136    /// be used to read or write other allocated objects.
1137    ///
1138    /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
1139    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1140    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1141    /// `x` and `y` point into the same allocated object.
1142    ///
1143    /// Compared to [`add`], this method basically delays the requirement of staying within the
1144    /// same allocated object: [`add`] is immediate Undefined Behavior when crossing object
1145    /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
1146    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
1147    /// can be optimized better and is thus preferable in performance-sensitive code.
1148    ///
1149    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1150    /// intermediate values used during the computation of the final result. For example,
1151    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1152    /// allocated object and then re-entering it later is permitted.
1153    ///
1154    /// [`add`]: #method.add
1155    /// [allocated object]: crate::ptr#allocated-object
1156    ///
1157    /// # Examples
1158    ///
1159    /// ```
1160    /// # use std::fmt::Write;
1161    /// // Iterate using a raw pointer in increments of two elements
1162    /// let data = [1u8, 2, 3, 4, 5];
1163    /// let mut ptr: *const u8 = data.as_ptr();
1164    /// let step = 2;
1165    /// let end_rounded_up = ptr.wrapping_add(6);
1166    ///
1167    /// let mut out = String::new();
1168    /// while ptr != end_rounded_up {
1169    ///     unsafe {
1170    ///         write!(&mut out, "{}, ", *ptr)?;
1171    ///     }
1172    ///     ptr = ptr.wrapping_add(step);
1173    /// }
1174    /// assert_eq!(out, "1, 3, 5, ");
1175    /// # std::fmt::Result::Ok(())
1176    /// ```
1177    #[stable(feature = "pointer_methods", since = "1.26.0")]
1178    #[must_use = "returns a new pointer rather than modifying its argument"]
1179    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1180    #[inline(always)]
1181    pub const fn wrapping_add(self, count: usize) -> Self
1182    where
1183        T: Sized,
1184    {
1185        self.wrapping_offset(count as isize)
1186    }
1187
1188    /// Adds an unsigned offset in bytes to a pointer using wrapping arithmetic.
1189    ///
1190    /// `count` is in units of bytes.
1191    ///
1192    /// This is purely a convenience for casting to a `u8` pointer and
1193    /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
1194    ///
1195    /// For non-`Sized` pointees this operation changes only the data pointer,
1196    /// leaving the metadata untouched.
1197    #[must_use]
1198    #[inline(always)]
1199    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1200    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1201    pub const fn wrapping_byte_add(self, count: usize) -> Self {
1202        self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
1203    }
1204
1205    /// Subtracts an unsigned offset from a pointer using wrapping arithmetic.
1206    ///
1207    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1208    /// offset of `3 * size_of::<T>()` bytes.
1209    ///
1210    /// # Safety
1211    ///
1212    /// This operation itself is always safe, but using the resulting pointer is not.
1213    ///
1214    /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
1215    /// be used to read or write other allocated objects.
1216    ///
1217    /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
1218    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1219    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1220    /// `x` and `y` point into the same allocated object.
1221    ///
1222    /// Compared to [`sub`], this method basically delays the requirement of staying within the
1223    /// same allocated object: [`sub`] is immediate Undefined Behavior when crossing object
1224    /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
1225    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
1226    /// can be optimized better and is thus preferable in performance-sensitive code.
1227    ///
1228    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1229    /// intermediate values used during the computation of the final result. For example,
1230    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1231    /// allocated object and then re-entering it later is permitted.
1232    ///
1233    /// [`sub`]: #method.sub
1234    /// [allocated object]: crate::ptr#allocated-object
1235    ///
1236    /// # Examples
1237    ///
1238    /// ```
1239    /// # use std::fmt::Write;
1240    /// // Iterate using a raw pointer in increments of two elements (backwards)
1241    /// let data = [1u8, 2, 3, 4, 5];
1242    /// let mut ptr: *const u8 = data.as_ptr();
1243    /// let start_rounded_down = ptr.wrapping_sub(2);
1244    /// ptr = ptr.wrapping_add(4);
1245    /// let step = 2;
1246    /// let mut out = String::new();
1247    /// while ptr != start_rounded_down {
1248    ///     unsafe {
1249    ///         write!(&mut out, "{}, ", *ptr)?;
1250    ///     }
1251    ///     ptr = ptr.wrapping_sub(step);
1252    /// }
1253    /// assert_eq!(out, "5, 3, 1, ");
1254    /// # std::fmt::Result::Ok(())
1255    /// ```
1256    #[stable(feature = "pointer_methods", since = "1.26.0")]
1257    #[must_use = "returns a new pointer rather than modifying its argument"]
1258    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1259    #[inline(always)]
1260    pub const fn wrapping_sub(self, count: usize) -> Self
1261    where
1262        T: Sized,
1263    {
1264        self.wrapping_offset((count as isize).wrapping_neg())
1265    }
1266
1267    /// Subtracts an unsigned offset in bytes from a pointer using wrapping arithmetic.
1268    ///
1269    /// `count` is in units of bytes.
1270    ///
1271    /// This is purely a convenience for casting to a `u8` pointer and
1272    /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
1273    ///
1274    /// For non-`Sized` pointees this operation changes only the data pointer,
1275    /// leaving the metadata untouched.
1276    #[must_use]
1277    #[inline(always)]
1278    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1279    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1280    pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1281        self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
1282    }
1283
1284    /// Reads the value from `self` without moving it. This leaves the
1285    /// memory in `self` unchanged.
1286    ///
1287    /// See [`ptr::read`] for safety concerns and examples.
1288    ///
1289    /// [`ptr::read`]: crate::ptr::read()
1290    #[stable(feature = "pointer_methods", since = "1.26.0")]
1291    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1292    #[inline]
1293    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1294    pub const unsafe fn read(self) -> T
1295    where
1296        T: Sized,
1297    {
1298        // SAFETY: the caller must uphold the safety contract for `read`.
1299        unsafe { read(self) }
1300    }
1301
1302    /// Performs a volatile read of the value from `self` without moving it. This
1303    /// leaves the memory in `self` unchanged.
1304    ///
1305    /// Volatile operations are intended to act on I/O memory, and are guaranteed
1306    /// to not be elided or reordered by the compiler across other volatile
1307    /// operations.
1308    ///
1309    /// See [`ptr::read_volatile`] for safety concerns and examples.
1310    ///
1311    /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1312    #[stable(feature = "pointer_methods", since = "1.26.0")]
1313    #[inline]
1314    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1315    pub unsafe fn read_volatile(self) -> T
1316    where
1317        T: Sized,
1318    {
1319        // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1320        unsafe { read_volatile(self) }
1321    }
1322
1323    /// Reads the value from `self` without moving it. This leaves the
1324    /// memory in `self` unchanged.
1325    ///
1326    /// Unlike `read`, the pointer may be unaligned.
1327    ///
1328    /// See [`ptr::read_unaligned`] for safety concerns and examples.
1329    ///
1330    /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1331    #[stable(feature = "pointer_methods", since = "1.26.0")]
1332    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1333    #[inline]
1334    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1335    pub const unsafe fn read_unaligned(self) -> T
1336    where
1337        T: Sized,
1338    {
1339        // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1340        unsafe { read_unaligned(self) }
1341    }
1342
1343    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1344    /// and destination may overlap.
1345    ///
1346    /// NOTE: this has the *same* argument order as [`ptr::copy`].
1347    ///
1348    /// See [`ptr::copy`] for safety concerns and examples.
1349    ///
1350    /// [`ptr::copy`]: crate::ptr::copy()
1351    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1352    #[stable(feature = "pointer_methods", since = "1.26.0")]
1353    #[inline]
1354    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1355    pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
1356    where
1357        T: Sized,
1358    {
1359        // SAFETY: the caller must uphold the safety contract for `copy`.
1360        unsafe { copy(self, dest, count) }
1361    }
1362
1363    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1364    /// and destination may *not* overlap.
1365    ///
1366    /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1367    ///
1368    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1369    ///
1370    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1371    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1372    #[stable(feature = "pointer_methods", since = "1.26.0")]
1373    #[inline]
1374    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1375    pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
1376    where
1377        T: Sized,
1378    {
1379        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1380        unsafe { copy_nonoverlapping(self, dest, count) }
1381    }
1382
1383    /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1384    /// `align`.
1385    ///
1386    /// If it is not possible to align the pointer, the implementation returns
1387    /// `usize::MAX`.
1388    ///
1389    /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1390    /// used with the `wrapping_add` method.
1391    ///
1392    /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1393    /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1394    /// the returned offset is correct in all terms other than alignment.
1395    ///
1396    /// # Panics
1397    ///
1398    /// The function panics if `align` is not a power-of-two.
1399    ///
1400    /// # Examples
1401    ///
1402    /// Accessing adjacent `u8` as `u16`
1403    ///
1404    /// ```
1405    /// # unsafe {
1406    /// let x = [5_u8, 6, 7, 8, 9];
1407    /// let ptr = x.as_ptr();
1408    /// let offset = ptr.align_offset(align_of::<u16>());
1409    ///
1410    /// if offset < x.len() - 1 {
1411    ///     let u16_ptr = ptr.add(offset).cast::<u16>();
1412    ///     assert!(*u16_ptr == u16::from_ne_bytes([5, 6]) || *u16_ptr == u16::from_ne_bytes([6, 7]));
1413    /// } else {
1414    ///     // while the pointer can be aligned via `offset`, it would point
1415    ///     // outside the allocation
1416    /// }
1417    /// # }
1418    /// ```
1419    #[must_use]
1420    #[inline]
1421    #[stable(feature = "align_offset", since = "1.36.0")]
1422    pub fn align_offset(self, align: usize) -> usize
1423    where
1424        T: Sized,
1425    {
1426        if !align.is_power_of_two() {
1427            panic!("align_offset: align is not a power-of-two");
1428        }
1429
1430        // SAFETY: `align` has been checked to be a power of 2 above
1431        let ret = unsafe { align_offset(self, align) };
1432
1433        // Inform Miri that we want to consider the resulting pointer to be suitably aligned.
1434        #[cfg(miri)]
1435        if ret != usize::MAX {
1436            intrinsics::miri_promise_symbolic_alignment(self.wrapping_add(ret).cast(), align);
1437        }
1438
1439        ret
1440    }
1441
1442    /// Returns whether the pointer is properly aligned for `T`.
1443    ///
1444    /// # Examples
1445    ///
1446    /// ```
1447    /// // On some platforms, the alignment of i32 is less than 4.
1448    /// #[repr(align(4))]
1449    /// struct AlignedI32(i32);
1450    ///
1451    /// let data = AlignedI32(42);
1452    /// let ptr = &data as *const AlignedI32;
1453    ///
1454    /// assert!(ptr.is_aligned());
1455    /// assert!(!ptr.wrapping_byte_add(1).is_aligned());
1456    /// ```
1457    #[must_use]
1458    #[inline]
1459    #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1460    pub fn is_aligned(self) -> bool
1461    where
1462        T: Sized,
1463    {
1464        self.is_aligned_to(align_of::<T>())
1465    }
1466
1467    /// Returns whether the pointer is aligned to `align`.
1468    ///
1469    /// For non-`Sized` pointees this operation considers only the data pointer,
1470    /// ignoring the metadata.
1471    ///
1472    /// # Panics
1473    ///
1474    /// The function panics if `align` is not a power-of-two (this includes 0).
1475    ///
1476    /// # Examples
1477    ///
1478    /// ```
1479    /// #![feature(pointer_is_aligned_to)]
1480    ///
1481    /// // On some platforms, the alignment of i32 is less than 4.
1482    /// #[repr(align(4))]
1483    /// struct AlignedI32(i32);
1484    ///
1485    /// let data = AlignedI32(42);
1486    /// let ptr = &data as *const AlignedI32;
1487    ///
1488    /// assert!(ptr.is_aligned_to(1));
1489    /// assert!(ptr.is_aligned_to(2));
1490    /// assert!(ptr.is_aligned_to(4));
1491    ///
1492    /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1493    /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1494    ///
1495    /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1496    /// ```
1497    #[must_use]
1498    #[inline]
1499    #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1500    pub fn is_aligned_to(self, align: usize) -> bool {
1501        if !align.is_power_of_two() {
1502            panic!("is_aligned_to: align is not a power-of-two");
1503        }
1504
1505        self.addr() & (align - 1) == 0
1506    }
1507}
1508
1509impl<T> *const [T] {
1510    /// Returns the length of a raw slice.
1511    ///
1512    /// The returned value is the number of **elements**, not the number of bytes.
1513    ///
1514    /// This function is safe, even when the raw slice cannot be cast to a slice
1515    /// reference because the pointer is null or unaligned.
1516    ///
1517    /// # Examples
1518    ///
1519    /// ```rust
1520    /// use std::ptr;
1521    ///
1522    /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1523    /// assert_eq!(slice.len(), 3);
1524    /// ```
1525    #[inline]
1526    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1527    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1528    pub const fn len(self) -> usize {
1529        metadata(self)
1530    }
1531
1532    /// Returns `true` if the raw slice has a length of 0.
1533    ///
1534    /// # Examples
1535    ///
1536    /// ```
1537    /// use std::ptr;
1538    ///
1539    /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1540    /// assert!(!slice.is_empty());
1541    /// ```
1542    #[inline(always)]
1543    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1544    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1545    pub const fn is_empty(self) -> bool {
1546        self.len() == 0
1547    }
1548
1549    /// Returns a raw pointer to the slice's buffer.
1550    ///
1551    /// This is equivalent to casting `self` to `*const T`, but more type-safe.
1552    ///
1553    /// # Examples
1554    ///
1555    /// ```rust
1556    /// #![feature(slice_ptr_get)]
1557    /// use std::ptr;
1558    ///
1559    /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1560    /// assert_eq!(slice.as_ptr(), ptr::null());
1561    /// ```
1562    #[inline]
1563    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1564    pub const fn as_ptr(self) -> *const T {
1565        self as *const T
1566    }
1567
1568    /// Gets a raw pointer to the underlying array.
1569    ///
1570    /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1571    #[unstable(feature = "slice_as_array", issue = "133508")]
1572    #[inline]
1573    #[must_use]
1574    pub const fn as_array<const N: usize>(self) -> Option<*const [T; N]> {
1575        if self.len() == N {
1576            let me = self.as_ptr() as *const [T; N];
1577            Some(me)
1578        } else {
1579            None
1580        }
1581    }
1582
1583    /// Returns a raw pointer to an element or subslice, without doing bounds
1584    /// checking.
1585    ///
1586    /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1587    /// is *[undefined behavior]* even if the resulting pointer is not used.
1588    ///
1589    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1590    ///
1591    /// # Examples
1592    ///
1593    /// ```
1594    /// #![feature(slice_ptr_get)]
1595    ///
1596    /// let x = &[1, 2, 4] as *const [i32];
1597    ///
1598    /// unsafe {
1599    ///     assert_eq!(x.get_unchecked(1), x.as_ptr().add(1));
1600    /// }
1601    /// ```
1602    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1603    #[inline]
1604    pub unsafe fn get_unchecked<I>(self, index: I) -> *const I::Output
1605    where
1606        I: SliceIndex<[T]>,
1607    {
1608        // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1609        unsafe { index.get_unchecked(self) }
1610    }
1611
1612    /// Returns `None` if the pointer is null, or else returns a shared slice to
1613    /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
1614    /// that the value has to be initialized.
1615    ///
1616    /// [`as_ref`]: #method.as_ref
1617    ///
1618    /// # Safety
1619    ///
1620    /// When calling this method, you have to ensure that *either* the pointer is null *or*
1621    /// all of the following is true:
1622    ///
1623    /// * The pointer must be [valid] for reads for `ptr.len() * size_of::<T>()` many bytes,
1624    ///   and it must be properly aligned. This means in particular:
1625    ///
1626    ///     * The entire memory range of this slice must be contained within a single [allocated object]!
1627    ///       Slices can never span across multiple allocated objects.
1628    ///
1629    ///     * The pointer must be aligned even for zero-length slices. One
1630    ///       reason for this is that enum layout optimizations may rely on references
1631    ///       (including slices of any length) being aligned and non-null to distinguish
1632    ///       them from other data. You can obtain a pointer that is usable as `data`
1633    ///       for zero-length slices using [`NonNull::dangling()`].
1634    ///
1635    /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1636    ///   See the safety documentation of [`pointer::offset`].
1637    ///
1638    /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1639    ///   arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1640    ///   In particular, while this reference exists, the memory the pointer points to must
1641    ///   not get mutated (except inside `UnsafeCell`).
1642    ///
1643    /// This applies even if the result of this method is unused!
1644    ///
1645    /// See also [`slice::from_raw_parts`][].
1646    ///
1647    /// [valid]: crate::ptr#safety
1648    /// [allocated object]: crate::ptr#allocated-object
1649    ///
1650    /// # Panics during const evaluation
1651    ///
1652    /// This method will panic during const evaluation if the pointer cannot be
1653    /// determined to be null or not. See [`is_null`] for more information.
1654    ///
1655    /// [`is_null`]: #method.is_null
1656    #[inline]
1657    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1658    pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1659        if self.is_null() {
1660            None
1661        } else {
1662            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1663            Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1664        }
1665    }
1666}
1667
1668impl<T, const N: usize> *const [T; N] {
1669    /// Returns a raw pointer to the array's buffer.
1670    ///
1671    /// This is equivalent to casting `self` to `*const T`, but more type-safe.
1672    ///
1673    /// # Examples
1674    ///
1675    /// ```rust
1676    /// #![feature(array_ptr_get)]
1677    /// use std::ptr;
1678    ///
1679    /// let arr: *const [i8; 3] = ptr::null();
1680    /// assert_eq!(arr.as_ptr(), ptr::null());
1681    /// ```
1682    #[inline]
1683    #[unstable(feature = "array_ptr_get", issue = "119834")]
1684    pub const fn as_ptr(self) -> *const T {
1685        self as *const T
1686    }
1687
1688    /// Returns a raw pointer to a slice containing the entire array.
1689    ///
1690    /// # Examples
1691    ///
1692    /// ```
1693    /// #![feature(array_ptr_get)]
1694    ///
1695    /// let arr: *const [i32; 3] = &[1, 2, 4] as *const [i32; 3];
1696    /// let slice: *const [i32] = arr.as_slice();
1697    /// assert_eq!(slice.len(), 3);
1698    /// ```
1699    #[inline]
1700    #[unstable(feature = "array_ptr_get", issue = "119834")]
1701    pub const fn as_slice(self) -> *const [T] {
1702        self
1703    }
1704}
1705
1706/// Pointer equality is by address, as produced by the [`<*const T>::addr`](pointer::addr) method.
1707#[stable(feature = "rust1", since = "1.0.0")]
1708impl<T: ?Sized> PartialEq for *const T {
1709    #[inline]
1710    #[allow(ambiguous_wide_pointer_comparisons)]
1711    fn eq(&self, other: &*const T) -> bool {
1712        *self == *other
1713    }
1714}
1715
1716/// Pointer equality is an equivalence relation.
1717#[stable(feature = "rust1", since = "1.0.0")]
1718impl<T: ?Sized> Eq for *const T {}
1719
1720/// Pointer comparison is by address, as produced by the `[`<*const T>::addr`](pointer::addr)` method.
1721#[stable(feature = "rust1", since = "1.0.0")]
1722impl<T: ?Sized> Ord for *const T {
1723    #[inline]
1724    #[allow(ambiguous_wide_pointer_comparisons)]
1725    fn cmp(&self, other: &*const T) -> Ordering {
1726        if self < other {
1727            Less
1728        } else if self == other {
1729            Equal
1730        } else {
1731            Greater
1732        }
1733    }
1734}
1735
1736/// Pointer comparison is by address, as produced by the `[`<*const T>::addr`](pointer::addr)` method.
1737#[stable(feature = "rust1", since = "1.0.0")]
1738impl<T: ?Sized> PartialOrd for *const T {
1739    #[inline]
1740    #[allow(ambiguous_wide_pointer_comparisons)]
1741    fn partial_cmp(&self, other: &*const T) -> Option<Ordering> {
1742        Some(self.cmp(other))
1743    }
1744
1745    #[inline]
1746    #[allow(ambiguous_wide_pointer_comparisons)]
1747    fn lt(&self, other: &*const T) -> bool {
1748        *self < *other
1749    }
1750
1751    #[inline]
1752    #[allow(ambiguous_wide_pointer_comparisons)]
1753    fn le(&self, other: &*const T) -> bool {
1754        *self <= *other
1755    }
1756
1757    #[inline]
1758    #[allow(ambiguous_wide_pointer_comparisons)]
1759    fn gt(&self, other: &*const T) -> bool {
1760        *self > *other
1761    }
1762
1763    #[inline]
1764    #[allow(ambiguous_wide_pointer_comparisons)]
1765    fn ge(&self, other: &*const T) -> bool {
1766        *self >= *other
1767    }
1768}
1769
1770#[stable(feature = "raw_ptr_default", since = "1.88.0")]
1771impl<T: ?Sized + Thin> Default for *const T {
1772    /// Returns the default value of [`null()`][crate::ptr::null].
1773    fn default() -> Self {
1774        crate::ptr::null()
1775    }
1776}