core/ptr/
mut_ptr.rs

1use super::*;
2use crate::cmp::Ordering::{Equal, Greater, Less};
3use crate::intrinsics::const_eval_select;
4use crate::marker::PointeeSized;
5use crate::mem::{self, SizedTypeProperties};
6use crate::slice::{self, SliceIndex};
7
8impl<T: PointeeSized> *mut T {
9    #[doc = include_str!("docs/is_null.md")]
10    ///
11    /// # Examples
12    ///
13    /// ```
14    /// let mut s = [1, 2, 3];
15    /// let ptr: *mut u32 = s.as_mut_ptr();
16    /// assert!(!ptr.is_null());
17    /// ```
18    #[stable(feature = "rust1", since = "1.0.0")]
19    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
20    #[rustc_diagnostic_item = "ptr_is_null"]
21    #[inline]
22    pub const fn is_null(self) -> bool {
23        self.cast_const().is_null()
24    }
25
26    /// Casts to a pointer of another type.
27    #[stable(feature = "ptr_cast", since = "1.38.0")]
28    #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
29    #[rustc_diagnostic_item = "ptr_cast"]
30    #[inline(always)]
31    pub const fn cast<U>(self) -> *mut U {
32        self as _
33    }
34
35    /// Try to cast to a pointer of another type by checking alignment.
36    ///
37    /// If the pointer is properly aligned to the target type, it will be
38    /// cast to the target type. Otherwise, `None` is returned.
39    ///
40    /// # Examples
41    ///
42    /// ```rust
43    /// #![feature(pointer_try_cast_aligned)]
44    ///
45    /// let mut x = 0u64;
46    ///
47    /// let aligned: *mut u64 = &mut x;
48    /// let unaligned = unsafe { aligned.byte_add(1) };
49    ///
50    /// assert!(aligned.try_cast_aligned::<u32>().is_some());
51    /// assert!(unaligned.try_cast_aligned::<u32>().is_none());
52    /// ```
53    #[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
54    #[must_use = "this returns the result of the operation, \
55                  without modifying the original"]
56    #[inline]
57    pub fn try_cast_aligned<U>(self) -> Option<*mut U> {
58        if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
59    }
60
61    /// Uses the address value in a new pointer of another type.
62    ///
63    /// This operation will ignore the address part of its `meta` operand and discard existing
64    /// metadata of `self`. For pointers to a sized types (thin pointers), this has the same effect
65    /// as a simple cast. For pointers to an unsized type (fat pointers) this recombines the address
66    /// with new metadata such as slice lengths or `dyn`-vtable.
67    ///
68    /// The resulting pointer will have provenance of `self`. This operation is semantically the
69    /// same as creating a new pointer with the data pointer value of `self` but the metadata of
70    /// `meta`, being fat or thin depending on the `meta` operand.
71    ///
72    /// # Examples
73    ///
74    /// This function is primarily useful for enabling pointer arithmetic on potentially fat
75    /// pointers. The pointer is cast to a sized pointee to utilize offset operations and then
76    /// recombined with its own original metadata.
77    ///
78    /// ```
79    /// #![feature(set_ptr_value)]
80    /// # use core::fmt::Debug;
81    /// let mut arr: [i32; 3] = [1, 2, 3];
82    /// let mut ptr = arr.as_mut_ptr() as *mut dyn Debug;
83    /// let thin = ptr as *mut u8;
84    /// unsafe {
85    ///     ptr = thin.add(8).with_metadata_of(ptr);
86    ///     # assert_eq!(*(ptr as *mut i32), 3);
87    ///     println!("{:?}", &*ptr); // will print "3"
88    /// }
89    /// ```
90    ///
91    /// # *Incorrect* usage
92    ///
93    /// The provenance from pointers is *not* combined. The result must only be used to refer to the
94    /// address allowed by `self`.
95    ///
96    /// ```rust,no_run
97    /// #![feature(set_ptr_value)]
98    /// let mut x = 0u32;
99    /// let mut y = 1u32;
100    ///
101    /// let x = (&mut x) as *mut u32;
102    /// let y = (&mut y) as *mut u32;
103    ///
104    /// let offset = (x as usize - y as usize) / 4;
105    /// let bad = x.wrapping_add(offset).with_metadata_of(y);
106    ///
107    /// // This dereference is UB. The pointer only has provenance for `x` but points to `y`.
108    /// println!("{:?}", unsafe { &*bad });
109    #[unstable(feature = "set_ptr_value", issue = "75091")]
110    #[must_use = "returns a new pointer rather than modifying its argument"]
111    #[inline]
112    pub const fn with_metadata_of<U>(self, meta: *const U) -> *mut U
113    where
114        U: PointeeSized,
115    {
116        from_raw_parts_mut::<U>(self as *mut (), metadata(meta))
117    }
118
119    /// Changes constness without changing the type.
120    ///
121    /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
122    /// refactored.
123    ///
124    /// While not strictly required (`*mut T` coerces to `*const T`), this is provided for symmetry
125    /// with [`cast_mut`] on `*const T` and may have documentation value if used instead of implicit
126    /// coercion.
127    ///
128    /// [`cast_mut`]: pointer::cast_mut
129    #[stable(feature = "ptr_const_cast", since = "1.65.0")]
130    #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")]
131    #[rustc_diagnostic_item = "ptr_cast_const"]
132    #[inline(always)]
133    pub const fn cast_const(self) -> *const T {
134        self as _
135    }
136
137    /// Gets the "address" portion of the pointer.
138    ///
139    /// This is similar to `self as usize`, except that the [provenance][crate::ptr#provenance] of
140    /// the pointer is discarded and not [exposed][crate::ptr#exposed-provenance]. This means that
141    /// casting the returned address back to a pointer yields a [pointer without
142    /// provenance][without_provenance_mut], which is undefined behavior to dereference. To properly
143    /// restore the lost information and obtain a dereferenceable pointer, use
144    /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr].
145    ///
146    /// If using those APIs is not possible because there is no way to preserve a pointer with the
147    /// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts
148    /// or [`expose_provenance`][pointer::expose_provenance] and [`with_exposed_provenance`][with_exposed_provenance]
149    /// instead. However, note that this makes your code less portable and less amenable to tools
150    /// that check for compliance with the Rust memory model.
151    ///
152    /// On most platforms this will produce a value with the same bytes as the original
153    /// pointer, because all the bytes are dedicated to describing the address.
154    /// Platforms which need to store additional information in the pointer may
155    /// perform a change of representation to produce a value containing only the address
156    /// portion of the pointer. What that means is up to the platform to define.
157    ///
158    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
159    #[must_use]
160    #[inline(always)]
161    #[stable(feature = "strict_provenance", since = "1.84.0")]
162    pub fn addr(self) -> usize {
163        // A pointer-to-integer transmute currently has exactly the right semantics: it returns the
164        // address without exposing the provenance. Note that this is *not* a stable guarantee about
165        // transmute semantics, it relies on sysroot crates having special status.
166        // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
167        // provenance).
168        unsafe { mem::transmute(self.cast::<()>()) }
169    }
170
171    /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
172    /// [`with_exposed_provenance_mut`] and returns the "address" portion.
173    ///
174    /// This is equivalent to `self as usize`, which semantically discards provenance information.
175    /// Furthermore, this (like the `as` cast) has the implicit side-effect of marking the
176    /// provenance as 'exposed', so on platforms that support it you can later call
177    /// [`with_exposed_provenance_mut`] to reconstitute the original pointer including its provenance.
178    ///
179    /// Due to its inherent ambiguity, [`with_exposed_provenance_mut`] may not be supported by tools
180    /// that help you to stay conformant with the Rust memory model. It is recommended to use
181    /// [Strict Provenance][crate::ptr#strict-provenance] APIs such as [`with_addr`][pointer::with_addr]
182    /// wherever possible, in which case [`addr`][pointer::addr] should be used instead of `expose_provenance`.
183    ///
184    /// On most platforms this will produce a value with the same bytes as the original pointer,
185    /// because all the bytes are dedicated to describing the address. Platforms which need to store
186    /// additional information in the pointer may not support this operation, since the 'expose'
187    /// side-effect which is required for [`with_exposed_provenance_mut`] to work is typically not
188    /// available.
189    ///
190    /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
191    ///
192    /// [`with_exposed_provenance_mut`]: with_exposed_provenance_mut
193    #[inline(always)]
194    #[stable(feature = "exposed_provenance", since = "1.84.0")]
195    pub fn expose_provenance(self) -> usize {
196        self.cast::<()>() as usize
197    }
198
199    /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
200    /// `self`.
201    ///
202    /// This is similar to a `addr as *mut T` cast, but copies
203    /// the *provenance* of `self` to the new pointer.
204    /// This avoids the inherent ambiguity of the unary cast.
205    ///
206    /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
207    /// `self` to the given address, and therefore has all the same capabilities and restrictions.
208    ///
209    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
210    #[must_use]
211    #[inline]
212    #[stable(feature = "strict_provenance", since = "1.84.0")]
213    pub fn with_addr(self, addr: usize) -> Self {
214        // This should probably be an intrinsic to avoid doing any sort of arithmetic, but
215        // meanwhile, we can implement it with `wrapping_offset`, which preserves the pointer's
216        // provenance.
217        let self_addr = self.addr() as isize;
218        let dest_addr = addr as isize;
219        let offset = dest_addr.wrapping_sub(self_addr);
220        self.wrapping_byte_offset(offset)
221    }
222
223    /// Creates a new pointer by mapping `self`'s address to a new one, preserving the original
224    /// pointer's [provenance][crate::ptr#provenance].
225    ///
226    /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
227    ///
228    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
229    #[must_use]
230    #[inline]
231    #[stable(feature = "strict_provenance", since = "1.84.0")]
232    pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
233        self.with_addr(f(self.addr()))
234    }
235
236    /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
237    ///
238    /// The pointer can be later reconstructed with [`from_raw_parts_mut`].
239    #[unstable(feature = "ptr_metadata", issue = "81513")]
240    #[inline]
241    pub const fn to_raw_parts(self) -> (*mut (), <T as super::Pointee>::Metadata) {
242        (self.cast(), super::metadata(self))
243    }
244
245    /// Returns `None` if the pointer is null, or else returns a shared reference to
246    /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]
247    /// must be used instead.
248    ///
249    /// For the mutable counterpart see [`as_mut`].
250    ///
251    /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1
252    /// [`as_mut`]: #method.as_mut
253    ///
254    /// # Safety
255    ///
256    /// When calling this method, you have to ensure that *either* the pointer is null *or*
257    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
258    ///
259    /// # Panics during const evaluation
260    ///
261    /// This method will panic during const evaluation if the pointer cannot be
262    /// determined to be null or not. See [`is_null`] for more information.
263    ///
264    /// [`is_null`]: #method.is_null-1
265    ///
266    /// # Examples
267    ///
268    /// ```
269    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
270    ///
271    /// unsafe {
272    ///     if let Some(val_back) = ptr.as_ref() {
273    ///         println!("We got back the value: {val_back}!");
274    ///     }
275    /// }
276    /// ```
277    ///
278    /// # Null-unchecked version
279    ///
280    /// If you are sure the pointer can never be null and are looking for some kind of
281    /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
282    /// dereference the pointer directly.
283    ///
284    /// ```
285    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
286    ///
287    /// unsafe {
288    ///     let val_back = &*ptr;
289    ///     println!("We got back the value: {val_back}!");
290    /// }
291    /// ```
292    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
293    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
294    #[inline]
295    pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
296        // SAFETY: the caller must guarantee that `self` is valid for a
297        // reference if it isn't null.
298        if self.is_null() { None } else { unsafe { Some(&*self) } }
299    }
300
301    /// Returns a shared reference to the value behind the pointer.
302    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_ref`] must be used instead.
303    /// If the pointer may be null, but the value is known to have been initialized, [`as_ref`] must be used instead.
304    ///
305    /// For the mutable counterpart see [`as_mut_unchecked`].
306    ///
307    /// [`as_ref`]: #method.as_ref
308    /// [`as_uninit_ref`]: #method.as_uninit_ref
309    /// [`as_mut_unchecked`]: #method.as_mut_unchecked
310    ///
311    /// # Safety
312    ///
313    /// When calling this method, you have to ensure that the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
314    ///
315    /// # Examples
316    ///
317    /// ```
318    /// #![feature(ptr_as_ref_unchecked)]
319    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
320    ///
321    /// unsafe {
322    ///     println!("We got back the value: {}!", ptr.as_ref_unchecked());
323    /// }
324    /// ```
325    // FIXME: mention it in the docs for `as_ref` and `as_uninit_ref` once stabilized.
326    #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
327    #[inline]
328    #[must_use]
329    pub const unsafe fn as_ref_unchecked<'a>(self) -> &'a T {
330        // SAFETY: the caller must guarantee that `self` is valid for a reference
331        unsafe { &*self }
332    }
333
334    /// Returns `None` if the pointer is null, or else returns a shared reference to
335    /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
336    /// that the value has to be initialized.
337    ///
338    /// For the mutable counterpart see [`as_uninit_mut`].
339    ///
340    /// [`as_ref`]: pointer#method.as_ref-1
341    /// [`as_uninit_mut`]: #method.as_uninit_mut
342    ///
343    /// # Safety
344    ///
345    /// When calling this method, you have to ensure that *either* the pointer is null *or*
346    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
347    /// Note that because the created reference is to `MaybeUninit<T>`, the
348    /// source pointer can point to uninitialized memory.
349    ///
350    /// # Panics during const evaluation
351    ///
352    /// This method will panic during const evaluation if the pointer cannot be
353    /// determined to be null or not. See [`is_null`] for more information.
354    ///
355    /// [`is_null`]: #method.is_null-1
356    ///
357    /// # Examples
358    ///
359    /// ```
360    /// #![feature(ptr_as_uninit)]
361    ///
362    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
363    ///
364    /// unsafe {
365    ///     if let Some(val_back) = ptr.as_uninit_ref() {
366    ///         println!("We got back the value: {}!", val_back.assume_init());
367    ///     }
368    /// }
369    /// ```
370    #[inline]
371    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
372    pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
373    where
374        T: Sized,
375    {
376        // SAFETY: the caller must guarantee that `self` meets all the
377        // requirements for a reference.
378        if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
379    }
380
381    #[doc = include_str!("./docs/offset.md")]
382    ///
383    /// # Examples
384    ///
385    /// ```
386    /// let mut s = [1, 2, 3];
387    /// let ptr: *mut u32 = s.as_mut_ptr();
388    ///
389    /// unsafe {
390    ///     assert_eq!(2, *ptr.offset(1));
391    ///     assert_eq!(3, *ptr.offset(2));
392    /// }
393    /// ```
394    #[stable(feature = "rust1", since = "1.0.0")]
395    #[must_use = "returns a new pointer rather than modifying its argument"]
396    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
397    #[inline(always)]
398    #[track_caller]
399    pub const unsafe fn offset(self, count: isize) -> *mut T
400    where
401        T: Sized,
402    {
403        #[inline]
404        #[rustc_allow_const_fn_unstable(const_eval_select)]
405        const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
406            // We can use const_eval_select here because this is only for UB checks.
407            const_eval_select!(
408                @capture { this: *const (), count: isize, size: usize } -> bool:
409                if const {
410                    true
411                } else {
412                    // `size` is the size of a Rust type, so we know that
413                    // `size <= isize::MAX` and thus `as` cast here is not lossy.
414                    let Some(byte_offset) = count.checked_mul(size as isize) else {
415                        return false;
416                    };
417                    let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
418                    !overflow
419                }
420            )
421        }
422
423        ub_checks::assert_unsafe_precondition!(
424            check_language_ub,
425            "ptr::offset requires the address calculation to not overflow",
426            (
427                this: *const () = self as *const (),
428                count: isize = count,
429                size: usize = size_of::<T>(),
430            ) => runtime_offset_nowrap(this, count, size)
431        );
432
433        // SAFETY: the caller must uphold the safety contract for `offset`.
434        // The obtained pointer is valid for writes since the caller must
435        // guarantee that it points to the same allocation as `self`.
436        unsafe { intrinsics::offset(self, count) }
437    }
438
439    /// Adds a signed offset in bytes to a pointer.
440    ///
441    /// `count` is in units of **bytes**.
442    ///
443    /// This is purely a convenience for casting to a `u8` pointer and
444    /// using [offset][pointer::offset] on it. See that method for documentation
445    /// and safety requirements.
446    ///
447    /// For non-`Sized` pointees this operation changes only the data pointer,
448    /// leaving the metadata untouched.
449    #[must_use]
450    #[inline(always)]
451    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
452    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
453    #[track_caller]
454    pub const unsafe fn byte_offset(self, count: isize) -> Self {
455        // SAFETY: the caller must uphold the safety contract for `offset`.
456        unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
457    }
458
459    /// Adds a signed offset to a pointer using wrapping arithmetic.
460    ///
461    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
462    /// offset of `3 * size_of::<T>()` bytes.
463    ///
464    /// # Safety
465    ///
466    /// This operation itself is always safe, but using the resulting pointer is not.
467    ///
468    /// The resulting pointer "remembers" the [allocation] that `self` points to
469    /// (this is called "[Provenance](ptr/index.html#provenance)").
470    /// The pointer must not be used to read or write other allocations.
471    ///
472    /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
473    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
474    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
475    /// `x` and `y` point into the same allocation.
476    ///
477    /// Compared to [`offset`], this method basically delays the requirement of staying within the
478    /// same allocation: [`offset`] is immediate Undefined Behavior when crossing object
479    /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
480    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
481    /// can be optimized better and is thus preferable in performance-sensitive code.
482    ///
483    /// The delayed check only considers the value of the pointer that was dereferenced, not the
484    /// intermediate values used during the computation of the final result. For example,
485    /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
486    /// words, leaving the allocation and then re-entering it later is permitted.
487    ///
488    /// [`offset`]: #method.offset
489    /// [allocation]: crate::ptr#allocation
490    ///
491    /// # Examples
492    ///
493    /// ```
494    /// // Iterate using a raw pointer in increments of two elements
495    /// let mut data = [1u8, 2, 3, 4, 5];
496    /// let mut ptr: *mut u8 = data.as_mut_ptr();
497    /// let step = 2;
498    /// let end_rounded_up = ptr.wrapping_offset(6);
499    ///
500    /// while ptr != end_rounded_up {
501    ///     unsafe {
502    ///         *ptr = 0;
503    ///     }
504    ///     ptr = ptr.wrapping_offset(step);
505    /// }
506    /// assert_eq!(&data, &[0, 2, 0, 4, 0]);
507    /// ```
508    #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
509    #[must_use = "returns a new pointer rather than modifying its argument"]
510    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
511    #[inline(always)]
512    pub const fn wrapping_offset(self, count: isize) -> *mut T
513    where
514        T: Sized,
515    {
516        // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
517        unsafe { intrinsics::arith_offset(self, count) as *mut T }
518    }
519
520    /// Adds a signed offset in bytes to a pointer using wrapping arithmetic.
521    ///
522    /// `count` is in units of **bytes**.
523    ///
524    /// This is purely a convenience for casting to a `u8` pointer and
525    /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
526    /// for documentation.
527    ///
528    /// For non-`Sized` pointees this operation changes only the data pointer,
529    /// leaving the metadata untouched.
530    #[must_use]
531    #[inline(always)]
532    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
533    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
534    pub const fn wrapping_byte_offset(self, count: isize) -> Self {
535        self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
536    }
537
538    /// Masks out bits of the pointer according to a mask.
539    ///
540    /// This is convenience for `ptr.map_addr(|a| a & mask)`.
541    ///
542    /// For non-`Sized` pointees this operation changes only the data pointer,
543    /// leaving the metadata untouched.
544    ///
545    /// ## Examples
546    ///
547    /// ```
548    /// #![feature(ptr_mask)]
549    /// let mut v = 17_u32;
550    /// let ptr: *mut u32 = &mut v;
551    ///
552    /// // `u32` is 4 bytes aligned,
553    /// // which means that lower 2 bits are always 0.
554    /// let tag_mask = 0b11;
555    /// let ptr_mask = !tag_mask;
556    ///
557    /// // We can store something in these lower bits
558    /// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
559    ///
560    /// // Get the "tag" back
561    /// let tag = tagged_ptr.addr() & tag_mask;
562    /// assert_eq!(tag, 0b10);
563    ///
564    /// // Note that `tagged_ptr` is unaligned, it's UB to read from/write to it.
565    /// // To get original pointer `mask` can be used:
566    /// let masked_ptr = tagged_ptr.mask(ptr_mask);
567    /// assert_eq!(unsafe { *masked_ptr }, 17);
568    ///
569    /// unsafe { *masked_ptr = 0 };
570    /// assert_eq!(v, 0);
571    /// ```
572    #[unstable(feature = "ptr_mask", issue = "98290")]
573    #[must_use = "returns a new pointer rather than modifying its argument"]
574    #[inline(always)]
575    pub fn mask(self, mask: usize) -> *mut T {
576        intrinsics::ptr_mask(self.cast::<()>(), mask).cast_mut().with_metadata_of(self)
577    }
578
579    /// Returns `None` if the pointer is null, or else returns a unique reference to
580    /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_mut`]
581    /// must be used instead.
582    ///
583    /// For the shared counterpart see [`as_ref`].
584    ///
585    /// [`as_uninit_mut`]: #method.as_uninit_mut
586    /// [`as_ref`]: pointer#method.as_ref-1
587    ///
588    /// # Safety
589    ///
590    /// When calling this method, you have to ensure that *either*
591    /// the pointer is null *or*
592    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
593    ///
594    /// # Panics during const evaluation
595    ///
596    /// This method will panic during const evaluation if the pointer cannot be
597    /// determined to be null or not. See [`is_null`] for more information.
598    ///
599    /// [`is_null`]: #method.is_null-1
600    ///
601    /// # Examples
602    ///
603    /// ```
604    /// let mut s = [1, 2, 3];
605    /// let ptr: *mut u32 = s.as_mut_ptr();
606    /// let first_value = unsafe { ptr.as_mut().unwrap() };
607    /// *first_value = 4;
608    /// # assert_eq!(s, [4, 2, 3]);
609    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
610    /// ```
611    ///
612    /// # Null-unchecked version
613    ///
614    /// If you are sure the pointer can never be null and are looking for some kind of
615    /// `as_mut_unchecked` that returns the `&mut T` instead of `Option<&mut T>`, know that
616    /// you can dereference the pointer directly.
617    ///
618    /// ```
619    /// let mut s = [1, 2, 3];
620    /// let ptr: *mut u32 = s.as_mut_ptr();
621    /// let first_value = unsafe { &mut *ptr };
622    /// *first_value = 4;
623    /// # assert_eq!(s, [4, 2, 3]);
624    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
625    /// ```
626    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
627    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
628    #[inline]
629    pub const unsafe fn as_mut<'a>(self) -> Option<&'a mut T> {
630        // SAFETY: the caller must guarantee that `self` is be valid for
631        // a mutable reference if it isn't null.
632        if self.is_null() { None } else { unsafe { Some(&mut *self) } }
633    }
634
635    /// Returns a unique reference to the value behind the pointer.
636    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_mut`] must be used instead.
637    /// If the pointer may be null, but the value is known to have been initialized, [`as_mut`] must be used instead.
638    ///
639    /// For the shared counterpart see [`as_ref_unchecked`].
640    ///
641    /// [`as_mut`]: #method.as_mut
642    /// [`as_uninit_mut`]: #method.as_uninit_mut
643    /// [`as_ref_unchecked`]: #method.as_mut_unchecked
644    ///
645    /// # Safety
646    ///
647    /// When calling this method, you have to ensure that
648    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
649    ///
650    /// # Examples
651    ///
652    /// ```
653    /// #![feature(ptr_as_ref_unchecked)]
654    /// let mut s = [1, 2, 3];
655    /// let ptr: *mut u32 = s.as_mut_ptr();
656    /// let first_value = unsafe { ptr.as_mut_unchecked() };
657    /// *first_value = 4;
658    /// # assert_eq!(s, [4, 2, 3]);
659    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
660    /// ```
661    // FIXME: mention it in the docs for `as_mut` and `as_uninit_mut` once stabilized.
662    #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
663    #[inline]
664    #[must_use]
665    pub const unsafe fn as_mut_unchecked<'a>(self) -> &'a mut T {
666        // SAFETY: the caller must guarantee that `self` is valid for a reference
667        unsafe { &mut *self }
668    }
669
670    /// Returns `None` if the pointer is null, or else returns a unique reference to
671    /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
672    /// that the value has to be initialized.
673    ///
674    /// For the shared counterpart see [`as_uninit_ref`].
675    ///
676    /// [`as_mut`]: #method.as_mut
677    /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1
678    ///
679    /// # Safety
680    ///
681    /// When calling this method, you have to ensure that *either* the pointer is null *or*
682    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
683    ///
684    /// # Panics during const evaluation
685    ///
686    /// This method will panic during const evaluation if the pointer cannot be
687    /// determined to be null or not. See [`is_null`] for more information.
688    ///
689    /// [`is_null`]: #method.is_null-1
690    #[inline]
691    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
692    pub const unsafe fn as_uninit_mut<'a>(self) -> Option<&'a mut MaybeUninit<T>>
693    where
694        T: Sized,
695    {
696        // SAFETY: the caller must guarantee that `self` meets all the
697        // requirements for a reference.
698        if self.is_null() { None } else { Some(unsafe { &mut *(self as *mut MaybeUninit<T>) }) }
699    }
700
701    /// Returns whether two pointers are guaranteed to be equal.
702    ///
703    /// At runtime this function behaves like `Some(self == other)`.
704    /// However, in some contexts (e.g., compile-time evaluation),
705    /// it is not always possible to determine equality of two pointers, so this function may
706    /// spuriously return `None` for pointers that later actually turn out to have its equality known.
707    /// But when it returns `Some`, the pointers' equality is guaranteed to be known.
708    ///
709    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
710    /// version and unsafe code must not
711    /// rely on the result of this function for soundness. It is suggested to only use this function
712    /// for performance optimizations where spurious `None` return values by this function do not
713    /// affect the outcome, but just the performance.
714    /// The consequences of using this method to make runtime and compile-time code behave
715    /// differently have not been explored. This method should not be used to introduce such
716    /// differences, and it should also not be stabilized before we have a better understanding
717    /// of this issue.
718    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
719    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
720    #[inline]
721    pub const fn guaranteed_eq(self, other: *mut T) -> Option<bool>
722    where
723        T: Sized,
724    {
725        (self as *const T).guaranteed_eq(other as _)
726    }
727
728    /// Returns whether two pointers are guaranteed to be inequal.
729    ///
730    /// At runtime this function behaves like `Some(self != other)`.
731    /// However, in some contexts (e.g., compile-time evaluation),
732    /// it is not always possible to determine inequality of two pointers, so this function may
733    /// spuriously return `None` for pointers that later actually turn out to have its inequality known.
734    /// But when it returns `Some`, the pointers' inequality is guaranteed to be known.
735    ///
736    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
737    /// version and unsafe code must not
738    /// rely on the result of this function for soundness. It is suggested to only use this function
739    /// for performance optimizations where spurious `None` return values by this function do not
740    /// affect the outcome, but just the performance.
741    /// The consequences of using this method to make runtime and compile-time code behave
742    /// differently have not been explored. This method should not be used to introduce such
743    /// differences, and it should also not be stabilized before we have a better understanding
744    /// of this issue.
745    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
746    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
747    #[inline]
748    pub const fn guaranteed_ne(self, other: *mut T) -> Option<bool>
749    where
750        T: Sized,
751    {
752        (self as *const T).guaranteed_ne(other as _)
753    }
754
755    /// Calculates the distance between two pointers within the same allocation. The returned value is in
756    /// units of T: the distance in bytes divided by `size_of::<T>()`.
757    ///
758    /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
759    /// except that it has a lot more opportunities for UB, in exchange for the compiler
760    /// better understanding what you are doing.
761    ///
762    /// The primary motivation of this method is for computing the `len` of an array/slice
763    /// of `T` that you are currently representing as a "start" and "end" pointer
764    /// (and "end" is "one past the end" of the array).
765    /// In that case, `end.offset_from(start)` gets you the length of the array.
766    ///
767    /// All of the following safety requirements are trivially satisfied for this usecase.
768    ///
769    /// [`offset`]: pointer#method.offset-1
770    ///
771    /// # Safety
772    ///
773    /// If any of the following conditions are violated, the result is Undefined Behavior:
774    ///
775    /// * `self` and `origin` must either
776    ///
777    ///   * point to the same address, or
778    ///   * both be [derived from][crate::ptr#provenance] a pointer to the same [allocation], and the memory range between
779    ///     the two pointers must be in bounds of that object. (See below for an example.)
780    ///
781    /// * The distance between the pointers, in bytes, must be an exact multiple
782    ///   of the size of `T`.
783    ///
784    /// As a consequence, the absolute distance between the pointers, in bytes, computed on
785    /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
786    /// implied by the in-bounds requirement, and the fact that no allocation can be larger
787    /// than `isize::MAX` bytes.
788    ///
789    /// The requirement for pointers to be derived from the same allocation is primarily
790    /// needed for `const`-compatibility: the distance between pointers into *different* allocated
791    /// objects is not known at compile-time. However, the requirement also exists at
792    /// runtime and may be exploited by optimizations. If you wish to compute the difference between
793    /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
794    /// origin as isize) / size_of::<T>()`.
795    // FIXME: recommend `addr()` instead of `as usize` once that is stable.
796    ///
797    /// [`add`]: #method.add
798    /// [allocation]: crate::ptr#allocation
799    ///
800    /// # Panics
801    ///
802    /// This function panics if `T` is a Zero-Sized Type ("ZST").
803    ///
804    /// # Examples
805    ///
806    /// Basic usage:
807    ///
808    /// ```
809    /// let mut a = [0; 5];
810    /// let ptr1: *mut i32 = &mut a[1];
811    /// let ptr2: *mut i32 = &mut a[3];
812    /// unsafe {
813    ///     assert_eq!(ptr2.offset_from(ptr1), 2);
814    ///     assert_eq!(ptr1.offset_from(ptr2), -2);
815    ///     assert_eq!(ptr1.offset(2), ptr2);
816    ///     assert_eq!(ptr2.offset(-2), ptr1);
817    /// }
818    /// ```
819    ///
820    /// *Incorrect* usage:
821    ///
822    /// ```rust,no_run
823    /// let ptr1 = Box::into_raw(Box::new(0u8));
824    /// let ptr2 = Box::into_raw(Box::new(1u8));
825    /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
826    /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
827    /// let ptr2_other = (ptr1 as *mut u8).wrapping_offset(diff).wrapping_offset(1);
828    /// assert_eq!(ptr2 as usize, ptr2_other as usize);
829    /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
830    /// // computing their offset is undefined behavior, even though
831    /// // they point to addresses that are in-bounds of the same object!
832    /// unsafe {
833    ///     let one = ptr2_other.offset_from(ptr2); // Undefined Behavior! ⚠️
834    /// }
835    /// ```
836    #[stable(feature = "ptr_offset_from", since = "1.47.0")]
837    #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")]
838    #[inline(always)]
839    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
840    pub const unsafe fn offset_from(self, origin: *const T) -> isize
841    where
842        T: Sized,
843    {
844        // SAFETY: the caller must uphold the safety contract for `offset_from`.
845        unsafe { (self as *const T).offset_from(origin) }
846    }
847
848    /// Calculates the distance between two pointers within the same allocation. The returned value is in
849    /// units of **bytes**.
850    ///
851    /// This is purely a convenience for casting to a `u8` pointer and
852    /// using [`offset_from`][pointer::offset_from] on it. See that method for
853    /// documentation and safety requirements.
854    ///
855    /// For non-`Sized` pointees this operation considers only the data pointers,
856    /// ignoring the metadata.
857    #[inline(always)]
858    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
859    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
860    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
861    pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: *const U) -> isize {
862        // SAFETY: the caller must uphold the safety contract for `offset_from`.
863        unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
864    }
865
866    /// Calculates the distance between two pointers within the same allocation, *where it's known that
867    /// `self` is equal to or greater than `origin`*. The returned value is in
868    /// units of T: the distance in bytes is divided by `size_of::<T>()`.
869    ///
870    /// This computes the same value that [`offset_from`](#method.offset_from)
871    /// would compute, but with the added precondition that the offset is
872    /// guaranteed to be non-negative.  This method is equivalent to
873    /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
874    /// but it provides slightly more information to the optimizer, which can
875    /// sometimes allow it to optimize slightly better with some backends.
876    ///
877    /// This method can be thought of as recovering the `count` that was passed
878    /// to [`add`](#method.add) (or, with the parameters in the other order,
879    /// to [`sub`](#method.sub)).  The following are all equivalent, assuming
880    /// that their safety preconditions are met:
881    /// ```rust
882    /// # unsafe fn blah(ptr: *mut i32, origin: *mut i32, count: usize) -> bool { unsafe {
883    /// ptr.offset_from_unsigned(origin) == count
884    /// # &&
885    /// origin.add(count) == ptr
886    /// # &&
887    /// ptr.sub(count) == origin
888    /// # } }
889    /// ```
890    ///
891    /// # Safety
892    ///
893    /// - The distance between the pointers must be non-negative (`self >= origin`)
894    ///
895    /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
896    ///   apply to this method as well; see it for the full details.
897    ///
898    /// Importantly, despite the return type of this method being able to represent
899    /// a larger offset, it's still *not permitted* to pass pointers which differ
900    /// by more than `isize::MAX` *bytes*.  As such, the result of this method will
901    /// always be less than or equal to `isize::MAX as usize`.
902    ///
903    /// # Panics
904    ///
905    /// This function panics if `T` is a Zero-Sized Type ("ZST").
906    ///
907    /// # Examples
908    ///
909    /// ```
910    /// let mut a = [0; 5];
911    /// let p: *mut i32 = a.as_mut_ptr();
912    /// unsafe {
913    ///     let ptr1: *mut i32 = p.add(1);
914    ///     let ptr2: *mut i32 = p.add(3);
915    ///
916    ///     assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
917    ///     assert_eq!(ptr1.add(2), ptr2);
918    ///     assert_eq!(ptr2.sub(2), ptr1);
919    ///     assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
920    /// }
921    ///
922    /// // This would be incorrect, as the pointers are not correctly ordered:
923    /// // ptr1.offset_from(ptr2)
924    /// ```
925    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
926    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
927    #[inline]
928    #[track_caller]
929    pub const unsafe fn offset_from_unsigned(self, origin: *const T) -> usize
930    where
931        T: Sized,
932    {
933        // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
934        unsafe { (self as *const T).offset_from_unsigned(origin) }
935    }
936
937    /// Calculates the distance between two pointers within the same allocation, *where it's known that
938    /// `self` is equal to or greater than `origin`*. The returned value is in
939    /// units of **bytes**.
940    ///
941    /// This is purely a convenience for casting to a `u8` pointer and
942    /// using [`offset_from_unsigned`][pointer::offset_from_unsigned] on it.
943    /// See that method for documentation and safety requirements.
944    ///
945    /// For non-`Sized` pointees this operation considers only the data pointers,
946    /// ignoring the metadata.
947    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
948    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
949    #[inline]
950    #[track_caller]
951    pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: *mut U) -> usize {
952        // SAFETY: the caller must uphold the safety contract for `byte_offset_from_unsigned`.
953        unsafe { (self as *const T).byte_offset_from_unsigned(origin) }
954    }
955
956    #[doc = include_str!("./docs/add.md")]
957    ///
958    /// # Examples
959    ///
960    /// ```
961    /// let mut s: String = "123".to_string();
962    /// let ptr: *mut u8 = s.as_mut_ptr();
963    ///
964    /// unsafe {
965    ///     assert_eq!('2', *ptr.add(1) as char);
966    ///     assert_eq!('3', *ptr.add(2) as char);
967    /// }
968    /// ```
969    #[stable(feature = "pointer_methods", since = "1.26.0")]
970    #[must_use = "returns a new pointer rather than modifying its argument"]
971    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
972    #[inline(always)]
973    #[track_caller]
974    pub const unsafe fn add(self, count: usize) -> Self
975    where
976        T: Sized,
977    {
978        #[cfg(debug_assertions)]
979        #[inline]
980        #[rustc_allow_const_fn_unstable(const_eval_select)]
981        const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
982            const_eval_select!(
983                @capture { this: *const (), count: usize, size: usize } -> bool:
984                if const {
985                    true
986                } else {
987                    let Some(byte_offset) = count.checked_mul(size) else {
988                        return false;
989                    };
990                    let (_, overflow) = this.addr().overflowing_add(byte_offset);
991                    byte_offset <= (isize::MAX as usize) && !overflow
992                }
993            )
994        }
995
996        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
997        ub_checks::assert_unsafe_precondition!(
998            check_language_ub,
999            "ptr::add requires that the address calculation does not overflow",
1000            (
1001                this: *const () = self as *const (),
1002                count: usize = count,
1003                size: usize = size_of::<T>(),
1004            ) => runtime_add_nowrap(this, count, size)
1005        );
1006
1007        // SAFETY: the caller must uphold the safety contract for `offset`.
1008        unsafe { intrinsics::offset(self, count) }
1009    }
1010
1011    /// Adds an unsigned offset in bytes to a pointer.
1012    ///
1013    /// `count` is in units of bytes.
1014    ///
1015    /// This is purely a convenience for casting to a `u8` pointer and
1016    /// using [add][pointer::add] on it. See that method for documentation
1017    /// and safety requirements.
1018    ///
1019    /// For non-`Sized` pointees this operation changes only the data pointer,
1020    /// leaving the metadata untouched.
1021    #[must_use]
1022    #[inline(always)]
1023    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1024    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1025    #[track_caller]
1026    pub const unsafe fn byte_add(self, count: usize) -> Self {
1027        // SAFETY: the caller must uphold the safety contract for `add`.
1028        unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
1029    }
1030
1031    /// Subtracts an unsigned offset from a pointer.
1032    ///
1033    /// This can only move the pointer backward (or not move it). If you need to move forward or
1034    /// backward depending on the value, then you might want [`offset`](#method.offset) instead
1035    /// which takes a signed offset.
1036    ///
1037    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1038    /// offset of `3 * size_of::<T>()` bytes.
1039    ///
1040    /// # Safety
1041    ///
1042    /// If any of the following conditions are violated, the result is Undefined Behavior:
1043    ///
1044    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
1045    ///   "wrapping around"), must fit in an `isize`.
1046    ///
1047    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
1048    ///   [allocation], and the entire memory range between `self` and the result must be in
1049    ///   bounds of that allocation. In particular, this range must not "wrap around" the edge
1050    ///   of the address space.
1051    ///
1052    /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
1053    /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
1054    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
1055    /// safe.
1056    ///
1057    /// Consider using [`wrapping_sub`] instead if these constraints are
1058    /// difficult to satisfy. The only advantage of this method is that it
1059    /// enables more aggressive compiler optimizations.
1060    ///
1061    /// [`wrapping_sub`]: #method.wrapping_sub
1062    /// [allocation]: crate::ptr#allocation
1063    ///
1064    /// # Examples
1065    ///
1066    /// ```
1067    /// let s: &str = "123";
1068    ///
1069    /// unsafe {
1070    ///     let end: *const u8 = s.as_ptr().add(3);
1071    ///     assert_eq!('3', *end.sub(1) as char);
1072    ///     assert_eq!('2', *end.sub(2) as char);
1073    /// }
1074    /// ```
1075    #[stable(feature = "pointer_methods", since = "1.26.0")]
1076    #[must_use = "returns a new pointer rather than modifying its argument"]
1077    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1078    #[inline(always)]
1079    #[track_caller]
1080    pub const unsafe fn sub(self, count: usize) -> Self
1081    where
1082        T: Sized,
1083    {
1084        #[cfg(debug_assertions)]
1085        #[inline]
1086        #[rustc_allow_const_fn_unstable(const_eval_select)]
1087        const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
1088            const_eval_select!(
1089                @capture { this: *const (), count: usize, size: usize } -> bool:
1090                if const {
1091                    true
1092                } else {
1093                    let Some(byte_offset) = count.checked_mul(size) else {
1094                        return false;
1095                    };
1096                    byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
1097                }
1098            )
1099        }
1100
1101        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
1102        ub_checks::assert_unsafe_precondition!(
1103            check_language_ub,
1104            "ptr::sub requires that the address calculation does not overflow",
1105            (
1106                this: *const () = self as *const (),
1107                count: usize = count,
1108                size: usize = size_of::<T>(),
1109            ) => runtime_sub_nowrap(this, count, size)
1110        );
1111
1112        if T::IS_ZST {
1113            // Pointer arithmetic does nothing when the pointee is a ZST.
1114            self
1115        } else {
1116            // SAFETY: the caller must uphold the safety contract for `offset`.
1117            // Because the pointee is *not* a ZST, that means that `count` is
1118            // at most `isize::MAX`, and thus the negation cannot overflow.
1119            unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
1120        }
1121    }
1122
1123    /// Subtracts an unsigned offset in bytes from a pointer.
1124    ///
1125    /// `count` is in units of bytes.
1126    ///
1127    /// This is purely a convenience for casting to a `u8` pointer and
1128    /// using [sub][pointer::sub] on it. See that method for documentation
1129    /// and safety requirements.
1130    ///
1131    /// For non-`Sized` pointees this operation changes only the data pointer,
1132    /// leaving the metadata untouched.
1133    #[must_use]
1134    #[inline(always)]
1135    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1136    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1137    #[track_caller]
1138    pub const unsafe fn byte_sub(self, count: usize) -> Self {
1139        // SAFETY: the caller must uphold the safety contract for `sub`.
1140        unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
1141    }
1142
1143    /// Adds an unsigned offset to a pointer using wrapping arithmetic.
1144    ///
1145    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1146    /// offset of `3 * size_of::<T>()` bytes.
1147    ///
1148    /// # Safety
1149    ///
1150    /// This operation itself is always safe, but using the resulting pointer is not.
1151    ///
1152    /// The resulting pointer "remembers" the [allocation] that `self` points to; it must not
1153    /// be used to read or write other allocations.
1154    ///
1155    /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
1156    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1157    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1158    /// `x` and `y` point into the same allocation.
1159    ///
1160    /// Compared to [`add`], this method basically delays the requirement of staying within the
1161    /// same allocation: [`add`] is immediate Undefined Behavior when crossing object
1162    /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
1163    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
1164    /// can be optimized better and is thus preferable in performance-sensitive code.
1165    ///
1166    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1167    /// intermediate values used during the computation of the final result. For example,
1168    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1169    /// allocation and then re-entering it later is permitted.
1170    ///
1171    /// [`add`]: #method.add
1172    /// [allocation]: crate::ptr#allocation
1173    ///
1174    /// # Examples
1175    ///
1176    /// ```
1177    /// // Iterate using a raw pointer in increments of two elements
1178    /// let data = [1u8, 2, 3, 4, 5];
1179    /// let mut ptr: *const u8 = data.as_ptr();
1180    /// let step = 2;
1181    /// let end_rounded_up = ptr.wrapping_add(6);
1182    ///
1183    /// // This loop prints "1, 3, 5, "
1184    /// while ptr != end_rounded_up {
1185    ///     unsafe {
1186    ///         print!("{}, ", *ptr);
1187    ///     }
1188    ///     ptr = ptr.wrapping_add(step);
1189    /// }
1190    /// ```
1191    #[stable(feature = "pointer_methods", since = "1.26.0")]
1192    #[must_use = "returns a new pointer rather than modifying its argument"]
1193    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1194    #[inline(always)]
1195    pub const fn wrapping_add(self, count: usize) -> Self
1196    where
1197        T: Sized,
1198    {
1199        self.wrapping_offset(count as isize)
1200    }
1201
1202    /// Adds an unsigned offset in bytes to a pointer using wrapping arithmetic.
1203    ///
1204    /// `count` is in units of bytes.
1205    ///
1206    /// This is purely a convenience for casting to a `u8` pointer and
1207    /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
1208    ///
1209    /// For non-`Sized` pointees this operation changes only the data pointer,
1210    /// leaving the metadata untouched.
1211    #[must_use]
1212    #[inline(always)]
1213    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1214    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1215    pub const fn wrapping_byte_add(self, count: usize) -> Self {
1216        self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
1217    }
1218
1219    /// Subtracts an unsigned offset from a pointer using wrapping arithmetic.
1220    ///
1221    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1222    /// offset of `3 * size_of::<T>()` bytes.
1223    ///
1224    /// # Safety
1225    ///
1226    /// This operation itself is always safe, but using the resulting pointer is not.
1227    ///
1228    /// The resulting pointer "remembers" the [allocation] that `self` points to; it must not
1229    /// be used to read or write other allocations.
1230    ///
1231    /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
1232    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1233    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1234    /// `x` and `y` point into the same allocation.
1235    ///
1236    /// Compared to [`sub`], this method basically delays the requirement of staying within the
1237    /// same allocation: [`sub`] is immediate Undefined Behavior when crossing object
1238    /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
1239    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
1240    /// can be optimized better and is thus preferable in performance-sensitive code.
1241    ///
1242    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1243    /// intermediate values used during the computation of the final result. For example,
1244    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1245    /// allocation and then re-entering it later is permitted.
1246    ///
1247    /// [`sub`]: #method.sub
1248    /// [allocation]: crate::ptr#allocation
1249    ///
1250    /// # Examples
1251    ///
1252    /// ```
1253    /// // Iterate using a raw pointer in increments of two elements (backwards)
1254    /// let data = [1u8, 2, 3, 4, 5];
1255    /// let mut ptr: *const u8 = data.as_ptr();
1256    /// let start_rounded_down = ptr.wrapping_sub(2);
1257    /// ptr = ptr.wrapping_add(4);
1258    /// let step = 2;
1259    /// // This loop prints "5, 3, 1, "
1260    /// while ptr != start_rounded_down {
1261    ///     unsafe {
1262    ///         print!("{}, ", *ptr);
1263    ///     }
1264    ///     ptr = ptr.wrapping_sub(step);
1265    /// }
1266    /// ```
1267    #[stable(feature = "pointer_methods", since = "1.26.0")]
1268    #[must_use = "returns a new pointer rather than modifying its argument"]
1269    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1270    #[inline(always)]
1271    pub const fn wrapping_sub(self, count: usize) -> Self
1272    where
1273        T: Sized,
1274    {
1275        self.wrapping_offset((count as isize).wrapping_neg())
1276    }
1277
1278    /// Subtracts an unsigned offset in bytes from a pointer using wrapping arithmetic.
1279    ///
1280    /// `count` is in units of bytes.
1281    ///
1282    /// This is purely a convenience for casting to a `u8` pointer and
1283    /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
1284    ///
1285    /// For non-`Sized` pointees this operation changes only the data pointer,
1286    /// leaving the metadata untouched.
1287    #[must_use]
1288    #[inline(always)]
1289    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1290    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1291    pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1292        self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
1293    }
1294
1295    /// Reads the value from `self` without moving it. This leaves the
1296    /// memory in `self` unchanged.
1297    ///
1298    /// See [`ptr::read`] for safety concerns and examples.
1299    ///
1300    /// [`ptr::read`]: crate::ptr::read()
1301    #[stable(feature = "pointer_methods", since = "1.26.0")]
1302    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1303    #[inline(always)]
1304    #[track_caller]
1305    pub const unsafe fn read(self) -> T
1306    where
1307        T: Sized,
1308    {
1309        // SAFETY: the caller must uphold the safety contract for ``.
1310        unsafe { read(self) }
1311    }
1312
1313    /// Performs a volatile read of the value from `self` without moving it. This
1314    /// leaves the memory in `self` unchanged.
1315    ///
1316    /// Volatile operations are intended to act on I/O memory, and are guaranteed
1317    /// to not be elided or reordered by the compiler across other volatile
1318    /// operations.
1319    ///
1320    /// See [`ptr::read_volatile`] for safety concerns and examples.
1321    ///
1322    /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1323    #[stable(feature = "pointer_methods", since = "1.26.0")]
1324    #[inline(always)]
1325    #[track_caller]
1326    pub unsafe fn read_volatile(self) -> T
1327    where
1328        T: Sized,
1329    {
1330        // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1331        unsafe { read_volatile(self) }
1332    }
1333
1334    /// Reads the value from `self` without moving it. This leaves the
1335    /// memory in `self` unchanged.
1336    ///
1337    /// Unlike `read`, the pointer may be unaligned.
1338    ///
1339    /// See [`ptr::read_unaligned`] for safety concerns and examples.
1340    ///
1341    /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1342    #[stable(feature = "pointer_methods", since = "1.26.0")]
1343    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1344    #[inline(always)]
1345    #[track_caller]
1346    pub const unsafe fn read_unaligned(self) -> T
1347    where
1348        T: Sized,
1349    {
1350        // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1351        unsafe { read_unaligned(self) }
1352    }
1353
1354    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1355    /// and destination may overlap.
1356    ///
1357    /// NOTE: this has the *same* argument order as [`ptr::copy`].
1358    ///
1359    /// See [`ptr::copy`] for safety concerns and examples.
1360    ///
1361    /// [`ptr::copy`]: crate::ptr::copy()
1362    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1363    #[stable(feature = "pointer_methods", since = "1.26.0")]
1364    #[inline(always)]
1365    #[track_caller]
1366    pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
1367    where
1368        T: Sized,
1369    {
1370        // SAFETY: the caller must uphold the safety contract for `copy`.
1371        unsafe { copy(self, dest, count) }
1372    }
1373
1374    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1375    /// and destination may *not* overlap.
1376    ///
1377    /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1378    ///
1379    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1380    ///
1381    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1382    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1383    #[stable(feature = "pointer_methods", since = "1.26.0")]
1384    #[inline(always)]
1385    #[track_caller]
1386    pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
1387    where
1388        T: Sized,
1389    {
1390        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1391        unsafe { copy_nonoverlapping(self, dest, count) }
1392    }
1393
1394    /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1395    /// and destination may overlap.
1396    ///
1397    /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1398    ///
1399    /// See [`ptr::copy`] for safety concerns and examples.
1400    ///
1401    /// [`ptr::copy`]: crate::ptr::copy()
1402    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1403    #[stable(feature = "pointer_methods", since = "1.26.0")]
1404    #[inline(always)]
1405    #[track_caller]
1406    pub const unsafe fn copy_from(self, src: *const T, count: usize)
1407    where
1408        T: Sized,
1409    {
1410        // SAFETY: the caller must uphold the safety contract for `copy`.
1411        unsafe { copy(src, self, count) }
1412    }
1413
1414    /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1415    /// and destination may *not* overlap.
1416    ///
1417    /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1418    ///
1419    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1420    ///
1421    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1422    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1423    #[stable(feature = "pointer_methods", since = "1.26.0")]
1424    #[inline(always)]
1425    #[track_caller]
1426    pub const unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize)
1427    where
1428        T: Sized,
1429    {
1430        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1431        unsafe { copy_nonoverlapping(src, self, count) }
1432    }
1433
1434    /// Executes the destructor (if any) of the pointed-to value.
1435    ///
1436    /// See [`ptr::drop_in_place`] for safety concerns and examples.
1437    ///
1438    /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1439    #[stable(feature = "pointer_methods", since = "1.26.0")]
1440    #[inline(always)]
1441    pub unsafe fn drop_in_place(self) {
1442        // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1443        unsafe { drop_in_place(self) }
1444    }
1445
1446    /// Overwrites a memory location with the given value without reading or
1447    /// dropping the old value.
1448    ///
1449    /// See [`ptr::write`] for safety concerns and examples.
1450    ///
1451    /// [`ptr::write`]: crate::ptr::write()
1452    #[stable(feature = "pointer_methods", since = "1.26.0")]
1453    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1454    #[inline(always)]
1455    #[track_caller]
1456    pub const unsafe fn write(self, val: T)
1457    where
1458        T: Sized,
1459    {
1460        // SAFETY: the caller must uphold the safety contract for `write`.
1461        unsafe { write(self, val) }
1462    }
1463
1464    /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1465    /// bytes of memory starting at `self` to `val`.
1466    ///
1467    /// See [`ptr::write_bytes`] for safety concerns and examples.
1468    ///
1469    /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1470    #[doc(alias = "memset")]
1471    #[stable(feature = "pointer_methods", since = "1.26.0")]
1472    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1473    #[inline(always)]
1474    #[track_caller]
1475    pub const unsafe fn write_bytes(self, val: u8, count: usize)
1476    where
1477        T: Sized,
1478    {
1479        // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1480        unsafe { write_bytes(self, val, count) }
1481    }
1482
1483    /// Performs a volatile write of a memory location with the given value without
1484    /// reading or dropping the old value.
1485    ///
1486    /// Volatile operations are intended to act on I/O memory, and are guaranteed
1487    /// to not be elided or reordered by the compiler across other volatile
1488    /// operations.
1489    ///
1490    /// See [`ptr::write_volatile`] for safety concerns and examples.
1491    ///
1492    /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1493    #[stable(feature = "pointer_methods", since = "1.26.0")]
1494    #[inline(always)]
1495    #[track_caller]
1496    pub unsafe fn write_volatile(self, val: T)
1497    where
1498        T: Sized,
1499    {
1500        // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1501        unsafe { write_volatile(self, val) }
1502    }
1503
1504    /// Overwrites a memory location with the given value without reading or
1505    /// dropping the old value.
1506    ///
1507    /// Unlike `write`, the pointer may be unaligned.
1508    ///
1509    /// See [`ptr::write_unaligned`] for safety concerns and examples.
1510    ///
1511    /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1512    #[stable(feature = "pointer_methods", since = "1.26.0")]
1513    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1514    #[inline(always)]
1515    #[track_caller]
1516    pub const unsafe fn write_unaligned(self, val: T)
1517    where
1518        T: Sized,
1519    {
1520        // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1521        unsafe { write_unaligned(self, val) }
1522    }
1523
1524    /// Replaces the value at `self` with `src`, returning the old
1525    /// value, without dropping either.
1526    ///
1527    /// See [`ptr::replace`] for safety concerns and examples.
1528    ///
1529    /// [`ptr::replace`]: crate::ptr::replace()
1530    #[stable(feature = "pointer_methods", since = "1.26.0")]
1531    #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "1.88.0")]
1532    #[inline(always)]
1533    pub const unsafe fn replace(self, src: T) -> T
1534    where
1535        T: Sized,
1536    {
1537        // SAFETY: the caller must uphold the safety contract for `replace`.
1538        unsafe { replace(self, src) }
1539    }
1540
1541    /// Swaps the values at two mutable locations of the same type, without
1542    /// deinitializing either. They may overlap, unlike `mem::swap` which is
1543    /// otherwise equivalent.
1544    ///
1545    /// See [`ptr::swap`] for safety concerns and examples.
1546    ///
1547    /// [`ptr::swap`]: crate::ptr::swap()
1548    #[stable(feature = "pointer_methods", since = "1.26.0")]
1549    #[rustc_const_stable(feature = "const_swap", since = "1.85.0")]
1550    #[inline(always)]
1551    pub const unsafe fn swap(self, with: *mut T)
1552    where
1553        T: Sized,
1554    {
1555        // SAFETY: the caller must uphold the safety contract for `swap`.
1556        unsafe { swap(self, with) }
1557    }
1558
1559    /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1560    /// `align`.
1561    ///
1562    /// If it is not possible to align the pointer, the implementation returns
1563    /// `usize::MAX`.
1564    ///
1565    /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1566    /// used with the `wrapping_add` method.
1567    ///
1568    /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1569    /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1570    /// the returned offset is correct in all terms other than alignment.
1571    ///
1572    /// # Panics
1573    ///
1574    /// The function panics if `align` is not a power-of-two.
1575    ///
1576    /// # Examples
1577    ///
1578    /// Accessing adjacent `u8` as `u16`
1579    ///
1580    /// ```
1581    /// # unsafe {
1582    /// let mut x = [5_u8, 6, 7, 8, 9];
1583    /// let ptr = x.as_mut_ptr();
1584    /// let offset = ptr.align_offset(align_of::<u16>());
1585    ///
1586    /// if offset < x.len() - 1 {
1587    ///     let u16_ptr = ptr.add(offset).cast::<u16>();
1588    ///     *u16_ptr = 0;
1589    ///
1590    ///     assert!(x == [0, 0, 7, 8, 9] || x == [5, 0, 0, 8, 9]);
1591    /// } else {
1592    ///     // while the pointer can be aligned via `offset`, it would point
1593    ///     // outside the allocation
1594    /// }
1595    /// # }
1596    /// ```
1597    #[must_use]
1598    #[inline]
1599    #[stable(feature = "align_offset", since = "1.36.0")]
1600    pub fn align_offset(self, align: usize) -> usize
1601    where
1602        T: Sized,
1603    {
1604        if !align.is_power_of_two() {
1605            panic!("align_offset: align is not a power-of-two");
1606        }
1607
1608        // SAFETY: `align` has been checked to be a power of 2 above
1609        let ret = unsafe { align_offset(self, align) };
1610
1611        // Inform Miri that we want to consider the resulting pointer to be suitably aligned.
1612        #[cfg(miri)]
1613        if ret != usize::MAX {
1614            intrinsics::miri_promise_symbolic_alignment(
1615                self.wrapping_add(ret).cast_const().cast(),
1616                align,
1617            );
1618        }
1619
1620        ret
1621    }
1622
1623    /// Returns whether the pointer is properly aligned for `T`.
1624    ///
1625    /// # Examples
1626    ///
1627    /// ```
1628    /// // On some platforms, the alignment of i32 is less than 4.
1629    /// #[repr(align(4))]
1630    /// struct AlignedI32(i32);
1631    ///
1632    /// let mut data = AlignedI32(42);
1633    /// let ptr = &mut data as *mut AlignedI32;
1634    ///
1635    /// assert!(ptr.is_aligned());
1636    /// assert!(!ptr.wrapping_byte_add(1).is_aligned());
1637    /// ```
1638    #[must_use]
1639    #[inline]
1640    #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1641    pub fn is_aligned(self) -> bool
1642    where
1643        T: Sized,
1644    {
1645        self.is_aligned_to(align_of::<T>())
1646    }
1647
1648    /// Returns whether the pointer is aligned to `align`.
1649    ///
1650    /// For non-`Sized` pointees this operation considers only the data pointer,
1651    /// ignoring the metadata.
1652    ///
1653    /// # Panics
1654    ///
1655    /// The function panics if `align` is not a power-of-two (this includes 0).
1656    ///
1657    /// # Examples
1658    ///
1659    /// ```
1660    /// #![feature(pointer_is_aligned_to)]
1661    ///
1662    /// // On some platforms, the alignment of i32 is less than 4.
1663    /// #[repr(align(4))]
1664    /// struct AlignedI32(i32);
1665    ///
1666    /// let mut data = AlignedI32(42);
1667    /// let ptr = &mut data as *mut AlignedI32;
1668    ///
1669    /// assert!(ptr.is_aligned_to(1));
1670    /// assert!(ptr.is_aligned_to(2));
1671    /// assert!(ptr.is_aligned_to(4));
1672    ///
1673    /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1674    /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1675    ///
1676    /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1677    /// ```
1678    #[must_use]
1679    #[inline]
1680    #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1681    pub fn is_aligned_to(self, align: usize) -> bool {
1682        if !align.is_power_of_two() {
1683            panic!("is_aligned_to: align is not a power-of-two");
1684        }
1685
1686        self.addr() & (align - 1) == 0
1687    }
1688}
1689
1690impl<T> *mut [T] {
1691    /// Returns the length of a raw slice.
1692    ///
1693    /// The returned value is the number of **elements**, not the number of bytes.
1694    ///
1695    /// This function is safe, even when the raw slice cannot be cast to a slice
1696    /// reference because the pointer is null or unaligned.
1697    ///
1698    /// # Examples
1699    ///
1700    /// ```rust
1701    /// use std::ptr;
1702    ///
1703    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1704    /// assert_eq!(slice.len(), 3);
1705    /// ```
1706    #[inline(always)]
1707    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1708    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1709    pub const fn len(self) -> usize {
1710        metadata(self)
1711    }
1712
1713    /// Returns `true` if the raw slice has a length of 0.
1714    ///
1715    /// # Examples
1716    ///
1717    /// ```
1718    /// use std::ptr;
1719    ///
1720    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1721    /// assert!(!slice.is_empty());
1722    /// ```
1723    #[inline(always)]
1724    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1725    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1726    pub const fn is_empty(self) -> bool {
1727        self.len() == 0
1728    }
1729
1730    /// Gets a raw, mutable pointer to the underlying array.
1731    ///
1732    /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1733    #[unstable(feature = "slice_as_array", issue = "133508")]
1734    #[inline]
1735    #[must_use]
1736    pub const fn as_mut_array<const N: usize>(self) -> Option<*mut [T; N]> {
1737        if self.len() == N {
1738            let me = self.as_mut_ptr() as *mut [T; N];
1739            Some(me)
1740        } else {
1741            None
1742        }
1743    }
1744
1745    /// Divides one mutable raw slice into two at an index.
1746    ///
1747    /// The first will contain all indices from `[0, mid)` (excluding
1748    /// the index `mid` itself) and the second will contain all
1749    /// indices from `[mid, len)` (excluding the index `len` itself).
1750    ///
1751    /// # Panics
1752    ///
1753    /// Panics if `mid > len`.
1754    ///
1755    /// # Safety
1756    ///
1757    /// `mid` must be [in-bounds] of the underlying [allocation].
1758    /// Which means `self` must be dereferenceable and span a single allocation
1759    /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
1760    /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
1761    ///
1762    /// Since `len` being in-bounds it is not a safety invariant of `*mut [T]` the
1763    /// safety requirements of this method are the same as for [`split_at_mut_unchecked`].
1764    /// The explicit bounds check is only as useful as `len` is correct.
1765    ///
1766    /// [`split_at_mut_unchecked`]: #method.split_at_mut_unchecked
1767    /// [in-bounds]: #method.add
1768    /// [allocation]: crate::ptr#allocation
1769    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1770    ///
1771    /// # Examples
1772    ///
1773    /// ```
1774    /// #![feature(raw_slice_split)]
1775    /// #![feature(slice_ptr_get)]
1776    ///
1777    /// let mut v = [1, 0, 3, 0, 5, 6];
1778    /// let ptr = &mut v as *mut [_];
1779    /// unsafe {
1780    ///     let (left, right) = ptr.split_at_mut(2);
1781    ///     assert_eq!(&*left, [1, 0]);
1782    ///     assert_eq!(&*right, [3, 0, 5, 6]);
1783    /// }
1784    /// ```
1785    #[inline(always)]
1786    #[track_caller]
1787    #[unstable(feature = "raw_slice_split", issue = "95595")]
1788    pub unsafe fn split_at_mut(self, mid: usize) -> (*mut [T], *mut [T]) {
1789        assert!(mid <= self.len());
1790        // SAFETY: The assert above is only a safety-net as long as `self.len()` is correct
1791        // The actual safety requirements of this function are the same as for `split_at_mut_unchecked`
1792        unsafe { self.split_at_mut_unchecked(mid) }
1793    }
1794
1795    /// Divides one mutable raw slice into two at an index, without doing bounds checking.
1796    ///
1797    /// The first will contain all indices from `[0, mid)` (excluding
1798    /// the index `mid` itself) and the second will contain all
1799    /// indices from `[mid, len)` (excluding the index `len` itself).
1800    ///
1801    /// # Safety
1802    ///
1803    /// `mid` must be [in-bounds] of the underlying [allocation].
1804    /// Which means `self` must be dereferenceable and span a single allocation
1805    /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
1806    /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
1807    ///
1808    /// [in-bounds]: #method.add
1809    /// [out-of-bounds index]: #method.add
1810    /// [allocation]: crate::ptr#allocation
1811    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1812    ///
1813    /// # Examples
1814    ///
1815    /// ```
1816    /// #![feature(raw_slice_split)]
1817    ///
1818    /// let mut v = [1, 0, 3, 0, 5, 6];
1819    /// // scoped to restrict the lifetime of the borrows
1820    /// unsafe {
1821    ///     let ptr = &mut v as *mut [_];
1822    ///     let (left, right) = ptr.split_at_mut_unchecked(2);
1823    ///     assert_eq!(&*left, [1, 0]);
1824    ///     assert_eq!(&*right, [3, 0, 5, 6]);
1825    ///     (&mut *left)[1] = 2;
1826    ///     (&mut *right)[1] = 4;
1827    /// }
1828    /// assert_eq!(v, [1, 2, 3, 4, 5, 6]);
1829    /// ```
1830    #[inline(always)]
1831    #[unstable(feature = "raw_slice_split", issue = "95595")]
1832    pub unsafe fn split_at_mut_unchecked(self, mid: usize) -> (*mut [T], *mut [T]) {
1833        let len = self.len();
1834        let ptr = self.as_mut_ptr();
1835
1836        // SAFETY: Caller must pass a valid pointer and an index that is in-bounds.
1837        let tail = unsafe { ptr.add(mid) };
1838        (
1839            crate::ptr::slice_from_raw_parts_mut(ptr, mid),
1840            crate::ptr::slice_from_raw_parts_mut(tail, len - mid),
1841        )
1842    }
1843
1844    /// Returns a raw pointer to the slice's buffer.
1845    ///
1846    /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
1847    ///
1848    /// # Examples
1849    ///
1850    /// ```rust
1851    /// #![feature(slice_ptr_get)]
1852    /// use std::ptr;
1853    ///
1854    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1855    /// assert_eq!(slice.as_mut_ptr(), ptr::null_mut());
1856    /// ```
1857    #[inline(always)]
1858    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1859    pub const fn as_mut_ptr(self) -> *mut T {
1860        self as *mut T
1861    }
1862
1863    /// Returns a raw pointer to an element or subslice, without doing bounds
1864    /// checking.
1865    ///
1866    /// Calling this method with an [out-of-bounds index] or when `self` is not dereferenceable
1867    /// is *[undefined behavior]* even if the resulting pointer is not used.
1868    ///
1869    /// [out-of-bounds index]: #method.add
1870    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1871    ///
1872    /// # Examples
1873    ///
1874    /// ```
1875    /// #![feature(slice_ptr_get)]
1876    ///
1877    /// let x = &mut [1, 2, 4] as *mut [i32];
1878    ///
1879    /// unsafe {
1880    ///     assert_eq!(x.get_unchecked_mut(1), x.as_mut_ptr().add(1));
1881    /// }
1882    /// ```
1883    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1884    #[inline(always)]
1885    pub unsafe fn get_unchecked_mut<I>(self, index: I) -> *mut I::Output
1886    where
1887        I: SliceIndex<[T]>,
1888    {
1889        // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1890        unsafe { index.get_unchecked_mut(self) }
1891    }
1892
1893    #[doc = include_str!("docs/as_uninit_slice.md")]
1894    ///
1895    /// # See Also
1896    /// For the mutable counterpart see [`as_uninit_slice_mut`](pointer::as_uninit_slice_mut).
1897    #[inline]
1898    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1899    pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1900        if self.is_null() {
1901            None
1902        } else {
1903            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1904            Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1905        }
1906    }
1907
1908    /// Returns `None` if the pointer is null, or else returns a unique slice to
1909    /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
1910    /// that the value has to be initialized.
1911    ///
1912    /// For the shared counterpart see [`as_uninit_slice`].
1913    ///
1914    /// [`as_mut`]: #method.as_mut
1915    /// [`as_uninit_slice`]: #method.as_uninit_slice-1
1916    ///
1917    /// # Safety
1918    ///
1919    /// When calling this method, you have to ensure that *either* the pointer is null *or*
1920    /// all of the following is true:
1921    ///
1922    /// * The pointer must be [valid] for reads and writes for `ptr.len() * size_of::<T>()`
1923    ///   many bytes, and it must be properly aligned. This means in particular:
1924    ///
1925    ///     * The entire memory range of this slice must be contained within a single [allocation]!
1926    ///       Slices can never span across multiple allocations.
1927    ///
1928    ///     * The pointer must be aligned even for zero-length slices. One
1929    ///       reason for this is that enum layout optimizations may rely on references
1930    ///       (including slices of any length) being aligned and non-null to distinguish
1931    ///       them from other data. You can obtain a pointer that is usable as `data`
1932    ///       for zero-length slices using [`NonNull::dangling()`].
1933    ///
1934    /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1935    ///   See the safety documentation of [`pointer::offset`].
1936    ///
1937    /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1938    ///   arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1939    ///   In particular, while this reference exists, the memory the pointer points to must
1940    ///   not get accessed (read or written) through any other pointer.
1941    ///
1942    /// This applies even if the result of this method is unused!
1943    ///
1944    /// See also [`slice::from_raw_parts_mut`][].
1945    ///
1946    /// [valid]: crate::ptr#safety
1947    /// [allocation]: crate::ptr#allocation
1948    ///
1949    /// # Panics during const evaluation
1950    ///
1951    /// This method will panic during const evaluation if the pointer cannot be
1952    /// determined to be null or not. See [`is_null`] for more information.
1953    ///
1954    /// [`is_null`]: #method.is_null-1
1955    #[inline]
1956    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1957    pub const unsafe fn as_uninit_slice_mut<'a>(self) -> Option<&'a mut [MaybeUninit<T>]> {
1958        if self.is_null() {
1959            None
1960        } else {
1961            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
1962            Some(unsafe { slice::from_raw_parts_mut(self as *mut MaybeUninit<T>, self.len()) })
1963        }
1964    }
1965}
1966
1967impl<T, const N: usize> *mut [T; N] {
1968    /// Returns a raw pointer to the array's buffer.
1969    ///
1970    /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
1971    ///
1972    /// # Examples
1973    ///
1974    /// ```rust
1975    /// #![feature(array_ptr_get)]
1976    /// use std::ptr;
1977    ///
1978    /// let arr: *mut [i8; 3] = ptr::null_mut();
1979    /// assert_eq!(arr.as_mut_ptr(), ptr::null_mut());
1980    /// ```
1981    #[inline]
1982    #[unstable(feature = "array_ptr_get", issue = "119834")]
1983    pub const fn as_mut_ptr(self) -> *mut T {
1984        self as *mut T
1985    }
1986
1987    /// Returns a raw pointer to a mutable slice containing the entire array.
1988    ///
1989    /// # Examples
1990    ///
1991    /// ```
1992    /// #![feature(array_ptr_get)]
1993    ///
1994    /// let mut arr = [1, 2, 5];
1995    /// let ptr: *mut [i32; 3] = &mut arr;
1996    /// unsafe {
1997    ///     (&mut *ptr.as_mut_slice())[..2].copy_from_slice(&[3, 4]);
1998    /// }
1999    /// assert_eq!(arr, [3, 4, 5]);
2000    /// ```
2001    #[inline]
2002    #[unstable(feature = "array_ptr_get", issue = "119834")]
2003    pub const fn as_mut_slice(self) -> *mut [T] {
2004        self
2005    }
2006}
2007
2008/// Pointer equality is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2009#[stable(feature = "rust1", since = "1.0.0")]
2010impl<T: PointeeSized> PartialEq for *mut T {
2011    #[inline(always)]
2012    #[allow(ambiguous_wide_pointer_comparisons)]
2013    fn eq(&self, other: &*mut T) -> bool {
2014        *self == *other
2015    }
2016}
2017
2018/// Pointer equality is an equivalence relation.
2019#[stable(feature = "rust1", since = "1.0.0")]
2020impl<T: PointeeSized> Eq for *mut T {}
2021
2022/// Pointer comparison is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2023#[stable(feature = "rust1", since = "1.0.0")]
2024impl<T: PointeeSized> Ord for *mut T {
2025    #[inline]
2026    #[allow(ambiguous_wide_pointer_comparisons)]
2027    fn cmp(&self, other: &*mut T) -> Ordering {
2028        if self < other {
2029            Less
2030        } else if self == other {
2031            Equal
2032        } else {
2033            Greater
2034        }
2035    }
2036}
2037
2038/// Pointer comparison is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2039#[stable(feature = "rust1", since = "1.0.0")]
2040impl<T: PointeeSized> PartialOrd for *mut T {
2041    #[inline(always)]
2042    #[allow(ambiguous_wide_pointer_comparisons)]
2043    fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
2044        Some(self.cmp(other))
2045    }
2046
2047    #[inline(always)]
2048    #[allow(ambiguous_wide_pointer_comparisons)]
2049    fn lt(&self, other: &*mut T) -> bool {
2050        *self < *other
2051    }
2052
2053    #[inline(always)]
2054    #[allow(ambiguous_wide_pointer_comparisons)]
2055    fn le(&self, other: &*mut T) -> bool {
2056        *self <= *other
2057    }
2058
2059    #[inline(always)]
2060    #[allow(ambiguous_wide_pointer_comparisons)]
2061    fn gt(&self, other: &*mut T) -> bool {
2062        *self > *other
2063    }
2064
2065    #[inline(always)]
2066    #[allow(ambiguous_wide_pointer_comparisons)]
2067    fn ge(&self, other: &*mut T) -> bool {
2068        *self >= *other
2069    }
2070}
2071
2072#[stable(feature = "raw_ptr_default", since = "1.88.0")]
2073impl<T: ?Sized + Thin> Default for *mut T {
2074    /// Returns the default value of [`null_mut()`][crate::ptr::null_mut].
2075    fn default() -> Self {
2076        crate::ptr::null_mut()
2077    }
2078}