core/ptr/non_null.rs
1use crate::cmp::Ordering;
2use crate::marker::{PointeeSized, Unsize};
3use crate::mem::{MaybeUninit, SizedTypeProperties};
4use crate::num::NonZero;
5use crate::ops::{CoerceUnsized, DispatchFromDyn};
6use crate::pin::PinCoerceUnsized;
7use crate::ptr::Unique;
8use crate::slice::{self, SliceIndex};
9use crate::ub_checks::assert_unsafe_precondition;
10use crate::{fmt, hash, intrinsics, mem, ptr};
11
12/// `*mut T` but non-zero and [covariant].
13///
14/// This is often the correct thing to use when building data structures using
15/// raw pointers, but is ultimately more dangerous to use because of its additional
16/// properties. If you're not sure if you should use `NonNull<T>`, just use `*mut T`!
17///
18/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
19/// is never dereferenced. This is so that enums may use this forbidden value
20/// as a discriminant -- `Option<NonNull<T>>` has the same size as `*mut T`.
21/// However the pointer may still dangle if it isn't dereferenced.
22///
23/// Unlike `*mut T`, `NonNull<T>` is covariant over `T`. This is usually the correct
24/// choice for most data structures and safe abstractions, such as `Box`, `Rc`, `Arc`, `Vec`,
25/// and `LinkedList`.
26///
27/// In rare cases, if your type exposes a way to mutate the value of `T` through a `NonNull<T>`,
28/// and you need to prevent unsoundness from variance (for example, if `T` could be a reference
29/// with a shorter lifetime), you should add a field to make your type invariant, such as
30/// `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`.
31///
32/// Example of a type that must be invariant:
33/// ```rust
34/// use std::cell::Cell;
35/// use std::marker::PhantomData;
36/// struct Invariant<T> {
37/// ptr: std::ptr::NonNull<T>,
38/// _invariant: PhantomData<Cell<T>>,
39/// }
40/// ```
41///
42/// Notice that `NonNull<T>` has a `From` instance for `&T`. However, this does
43/// not change the fact that mutating through a (pointer derived from a) shared
44/// reference is undefined behavior unless the mutation happens inside an
45/// [`UnsafeCell<T>`]. The same goes for creating a mutable reference from a shared
46/// reference. When using this `From` instance without an `UnsafeCell<T>`,
47/// it is your responsibility to ensure that `as_mut` is never called, and `as_ptr`
48/// is never used for mutation.
49///
50/// # Representation
51///
52/// Thanks to the [null pointer optimization],
53/// `NonNull<T>` and `Option<NonNull<T>>`
54/// are guaranteed to have the same size and alignment:
55///
56/// ```
57/// use std::ptr::NonNull;
58///
59/// assert_eq!(size_of::<NonNull<i16>>(), size_of::<Option<NonNull<i16>>>());
60/// assert_eq!(align_of::<NonNull<i16>>(), align_of::<Option<NonNull<i16>>>());
61///
62/// assert_eq!(size_of::<NonNull<str>>(), size_of::<Option<NonNull<str>>>());
63/// assert_eq!(align_of::<NonNull<str>>(), align_of::<Option<NonNull<str>>>());
64/// ```
65///
66/// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
67/// [`PhantomData`]: crate::marker::PhantomData
68/// [`UnsafeCell<T>`]: crate::cell::UnsafeCell
69/// [null pointer optimization]: crate::option#representation
70#[stable(feature = "nonnull", since = "1.25.0")]
71#[repr(transparent)]
72#[rustc_layout_scalar_valid_range_start(1)]
73#[rustc_nonnull_optimization_guaranteed]
74#[rustc_diagnostic_item = "NonNull"]
75pub struct NonNull<T: PointeeSized> {
76 // Remember to use `.as_ptr()` instead of `.pointer`, as field projecting to
77 // this is banned by <https://github.com/rust-lang/compiler-team/issues/807>.
78 pointer: *const T,
79}
80
81/// `NonNull` pointers are not `Send` because the data they reference may be aliased.
82// N.B., this impl is unnecessary, but should provide better error messages.
83#[stable(feature = "nonnull", since = "1.25.0")]
84impl<T: PointeeSized> !Send for NonNull<T> {}
85
86/// `NonNull` pointers are not `Sync` because the data they reference may be aliased.
87// N.B., this impl is unnecessary, but should provide better error messages.
88#[stable(feature = "nonnull", since = "1.25.0")]
89impl<T: PointeeSized> !Sync for NonNull<T> {}
90
91impl<T: Sized> NonNull<T> {
92 /// Creates a pointer with the given address and no [provenance][crate::ptr#provenance].
93 ///
94 /// For more details, see the equivalent method on a raw pointer, [`ptr::without_provenance_mut`].
95 ///
96 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
97 #[stable(feature = "nonnull_provenance", since = "1.89.0")]
98 #[rustc_const_stable(feature = "nonnull_provenance", since = "1.89.0")]
99 #[must_use]
100 #[inline]
101 pub const fn without_provenance(addr: NonZero<usize>) -> Self {
102 let pointer = crate::ptr::without_provenance(addr.get());
103 // SAFETY: we know `addr` is non-zero.
104 unsafe { NonNull { pointer } }
105 }
106
107 /// Creates a new `NonNull` that is dangling, but well-aligned.
108 ///
109 /// This is useful for initializing types which lazily allocate, like
110 /// `Vec::new` does.
111 ///
112 /// Note that the pointer value may potentially represent a valid pointer to
113 /// a `T`, which means this must not be used as a "not yet initialized"
114 /// sentinel value. Types that lazily allocate must track initialization by
115 /// some other means.
116 ///
117 /// # Examples
118 ///
119 /// ```
120 /// use std::ptr::NonNull;
121 ///
122 /// let ptr = NonNull::<u32>::dangling();
123 /// // Important: don't try to access the value of `ptr` without
124 /// // initializing it first! The pointer is not null but isn't valid either!
125 /// ```
126 #[stable(feature = "nonnull", since = "1.25.0")]
127 #[rustc_const_stable(feature = "const_nonnull_dangling", since = "1.36.0")]
128 #[must_use]
129 #[inline]
130 pub const fn dangling() -> Self {
131 let align = crate::ptr::Alignment::of::<T>();
132 NonNull::without_provenance(align.as_nonzero())
133 }
134
135 /// Converts an address back to a mutable pointer, picking up some previously 'exposed'
136 /// [provenance][crate::ptr#provenance].
137 ///
138 /// For more details, see the equivalent method on a raw pointer, [`ptr::with_exposed_provenance_mut`].
139 ///
140 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
141 #[stable(feature = "nonnull_provenance", since = "1.89.0")]
142 #[inline]
143 pub fn with_exposed_provenance(addr: NonZero<usize>) -> Self {
144 // SAFETY: we know `addr` is non-zero.
145 unsafe {
146 let ptr = crate::ptr::with_exposed_provenance_mut(addr.get());
147 NonNull::new_unchecked(ptr)
148 }
149 }
150
151 /// Returns a shared references to the value. In contrast to [`as_ref`], this does not require
152 /// that the value has to be initialized.
153 ///
154 /// For the mutable counterpart see [`as_uninit_mut`].
155 ///
156 /// [`as_ref`]: NonNull::as_ref
157 /// [`as_uninit_mut`]: NonNull::as_uninit_mut
158 ///
159 /// # Safety
160 ///
161 /// When calling this method, you have to ensure that
162 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
163 /// Note that because the created reference is to `MaybeUninit<T>`, the
164 /// source pointer can point to uninitialized memory.
165 #[inline]
166 #[must_use]
167 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
168 pub const unsafe fn as_uninit_ref<'a>(self) -> &'a MaybeUninit<T> {
169 // SAFETY: the caller must guarantee that `self` meets all the
170 // requirements for a reference.
171 unsafe { &*self.cast().as_ptr() }
172 }
173
174 /// Returns a unique references to the value. In contrast to [`as_mut`], this does not require
175 /// that the value has to be initialized.
176 ///
177 /// For the shared counterpart see [`as_uninit_ref`].
178 ///
179 /// [`as_mut`]: NonNull::as_mut
180 /// [`as_uninit_ref`]: NonNull::as_uninit_ref
181 ///
182 /// # Safety
183 ///
184 /// When calling this method, you have to ensure that
185 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
186 /// Note that because the created reference is to `MaybeUninit<T>`, the
187 /// source pointer can point to uninitialized memory.
188 #[inline]
189 #[must_use]
190 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
191 pub const unsafe fn as_uninit_mut<'a>(self) -> &'a mut MaybeUninit<T> {
192 // SAFETY: the caller must guarantee that `self` meets all the
193 // requirements for a reference.
194 unsafe { &mut *self.cast().as_ptr() }
195 }
196}
197
198impl<T: PointeeSized> NonNull<T> {
199 /// Creates a new `NonNull`.
200 ///
201 /// # Safety
202 ///
203 /// `ptr` must be non-null.
204 ///
205 /// # Examples
206 ///
207 /// ```
208 /// use std::ptr::NonNull;
209 ///
210 /// let mut x = 0u32;
211 /// let ptr = unsafe { NonNull::new_unchecked(&mut x as *mut _) };
212 /// ```
213 ///
214 /// *Incorrect* usage of this function:
215 ///
216 /// ```rust,no_run
217 /// use std::ptr::NonNull;
218 ///
219 /// // NEVER DO THAT!!! This is undefined behavior. ⚠️
220 /// let ptr = unsafe { NonNull::<u32>::new_unchecked(std::ptr::null_mut()) };
221 /// ```
222 #[stable(feature = "nonnull", since = "1.25.0")]
223 #[rustc_const_stable(feature = "const_nonnull_new_unchecked", since = "1.25.0")]
224 #[inline]
225 #[track_caller]
226 pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
227 // SAFETY: the caller must guarantee that `ptr` is non-null.
228 unsafe {
229 assert_unsafe_precondition!(
230 check_language_ub,
231 "NonNull::new_unchecked requires that the pointer is non-null",
232 (ptr: *mut () = ptr as *mut ()) => !ptr.is_null()
233 );
234 NonNull { pointer: ptr as _ }
235 }
236 }
237
238 /// Creates a new `NonNull` if `ptr` is non-null.
239 ///
240 /// # Panics during const evaluation
241 ///
242 /// This method will panic during const evaluation if the pointer cannot be
243 /// determined to be null or not. See [`is_null`] for more information.
244 ///
245 /// [`is_null`]: ../primitive.pointer.html#method.is_null-1
246 ///
247 /// # Examples
248 ///
249 /// ```
250 /// use std::ptr::NonNull;
251 ///
252 /// let mut x = 0u32;
253 /// let ptr = NonNull::<u32>::new(&mut x as *mut _).expect("ptr is null!");
254 ///
255 /// if let Some(ptr) = NonNull::<u32>::new(std::ptr::null_mut()) {
256 /// unreachable!();
257 /// }
258 /// ```
259 #[stable(feature = "nonnull", since = "1.25.0")]
260 #[rustc_const_stable(feature = "const_nonnull_new", since = "1.85.0")]
261 #[inline]
262 pub const fn new(ptr: *mut T) -> Option<Self> {
263 if !ptr.is_null() {
264 // SAFETY: The pointer is already checked and is not null
265 Some(unsafe { Self::new_unchecked(ptr) })
266 } else {
267 None
268 }
269 }
270
271 /// Converts a reference to a `NonNull` pointer.
272 #[stable(feature = "non_null_from_ref", since = "1.89.0")]
273 #[rustc_const_stable(feature = "non_null_from_ref", since = "1.89.0")]
274 #[inline]
275 pub const fn from_ref(r: &T) -> Self {
276 // SAFETY: A reference cannot be null.
277 unsafe { NonNull { pointer: r as *const T } }
278 }
279
280 /// Converts a mutable reference to a `NonNull` pointer.
281 #[stable(feature = "non_null_from_ref", since = "1.89.0")]
282 #[rustc_const_stable(feature = "non_null_from_ref", since = "1.89.0")]
283 #[inline]
284 pub const fn from_mut(r: &mut T) -> Self {
285 // SAFETY: A mutable reference cannot be null.
286 unsafe { NonNull { pointer: r as *mut T } }
287 }
288
289 /// Performs the same functionality as [`std::ptr::from_raw_parts`], except that a
290 /// `NonNull` pointer is returned, as opposed to a raw `*const` pointer.
291 ///
292 /// See the documentation of [`std::ptr::from_raw_parts`] for more details.
293 ///
294 /// [`std::ptr::from_raw_parts`]: crate::ptr::from_raw_parts
295 #[unstable(feature = "ptr_metadata", issue = "81513")]
296 #[inline]
297 pub const fn from_raw_parts(
298 data_pointer: NonNull<impl super::Thin>,
299 metadata: <T as super::Pointee>::Metadata,
300 ) -> NonNull<T> {
301 // SAFETY: The result of `ptr::from::raw_parts_mut` is non-null because `data_pointer` is.
302 unsafe {
303 NonNull::new_unchecked(super::from_raw_parts_mut(data_pointer.as_ptr(), metadata))
304 }
305 }
306
307 /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
308 ///
309 /// The pointer can be later reconstructed with [`NonNull::from_raw_parts`].
310 #[unstable(feature = "ptr_metadata", issue = "81513")]
311 #[must_use = "this returns the result of the operation, \
312 without modifying the original"]
313 #[inline]
314 pub const fn to_raw_parts(self) -> (NonNull<()>, <T as super::Pointee>::Metadata) {
315 (self.cast(), super::metadata(self.as_ptr()))
316 }
317
318 /// Gets the "address" portion of the pointer.
319 ///
320 /// For more details, see the equivalent method on a raw pointer, [`pointer::addr`].
321 ///
322 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
323 #[must_use]
324 #[inline]
325 #[stable(feature = "strict_provenance", since = "1.84.0")]
326 pub fn addr(self) -> NonZero<usize> {
327 // SAFETY: The pointer is guaranteed by the type to be non-null,
328 // meaning that the address will be non-zero.
329 unsafe { NonZero::new_unchecked(self.as_ptr().addr()) }
330 }
331
332 /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
333 /// [`with_exposed_provenance`][NonNull::with_exposed_provenance] and returns the "address" portion.
334 ///
335 /// For more details, see the equivalent method on a raw pointer, [`pointer::expose_provenance`].
336 ///
337 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
338 #[stable(feature = "nonnull_provenance", since = "1.89.0")]
339 pub fn expose_provenance(self) -> NonZero<usize> {
340 // SAFETY: The pointer is guaranteed by the type to be non-null,
341 // meaning that the address will be non-zero.
342 unsafe { NonZero::new_unchecked(self.as_ptr().expose_provenance()) }
343 }
344
345 /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
346 /// `self`.
347 ///
348 /// For more details, see the equivalent method on a raw pointer, [`pointer::with_addr`].
349 ///
350 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
351 #[must_use]
352 #[inline]
353 #[stable(feature = "strict_provenance", since = "1.84.0")]
354 pub fn with_addr(self, addr: NonZero<usize>) -> Self {
355 // SAFETY: The result of `ptr::from::with_addr` is non-null because `addr` is guaranteed to be non-zero.
356 unsafe { NonNull::new_unchecked(self.as_ptr().with_addr(addr.get()) as *mut _) }
357 }
358
359 /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
360 /// [provenance][crate::ptr#provenance] of `self`.
361 ///
362 /// For more details, see the equivalent method on a raw pointer, [`pointer::map_addr`].
363 ///
364 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
365 #[must_use]
366 #[inline]
367 #[stable(feature = "strict_provenance", since = "1.84.0")]
368 pub fn map_addr(self, f: impl FnOnce(NonZero<usize>) -> NonZero<usize>) -> Self {
369 self.with_addr(f(self.addr()))
370 }
371
372 /// Acquires the underlying `*mut` pointer.
373 ///
374 /// # Examples
375 ///
376 /// ```
377 /// use std::ptr::NonNull;
378 ///
379 /// let mut x = 0u32;
380 /// let ptr = NonNull::new(&mut x).expect("ptr is null!");
381 ///
382 /// let x_value = unsafe { *ptr.as_ptr() };
383 /// assert_eq!(x_value, 0);
384 ///
385 /// unsafe { *ptr.as_ptr() += 2; }
386 /// let x_value = unsafe { *ptr.as_ptr() };
387 /// assert_eq!(x_value, 2);
388 /// ```
389 #[stable(feature = "nonnull", since = "1.25.0")]
390 #[rustc_const_stable(feature = "const_nonnull_as_ptr", since = "1.32.0")]
391 #[rustc_never_returns_null_ptr]
392 #[must_use]
393 #[inline(always)]
394 pub const fn as_ptr(self) -> *mut T {
395 // This is a transmute for the same reasons as `NonZero::get`.
396
397 // SAFETY: `NonNull` is `transparent` over a `*const T`, and `*const T`
398 // and `*mut T` have the same layout, so transitively we can transmute
399 // our `NonNull` to a `*mut T` directly.
400 unsafe { mem::transmute::<Self, *mut T>(self) }
401 }
402
403 /// Returns a shared reference to the value. If the value may be uninitialized, [`as_uninit_ref`]
404 /// must be used instead.
405 ///
406 /// For the mutable counterpart see [`as_mut`].
407 ///
408 /// [`as_uninit_ref`]: NonNull::as_uninit_ref
409 /// [`as_mut`]: NonNull::as_mut
410 ///
411 /// # Safety
412 ///
413 /// When calling this method, you have to ensure that
414 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
415 ///
416 /// # Examples
417 ///
418 /// ```
419 /// use std::ptr::NonNull;
420 ///
421 /// let mut x = 0u32;
422 /// let ptr = NonNull::new(&mut x as *mut _).expect("ptr is null!");
423 ///
424 /// let ref_x = unsafe { ptr.as_ref() };
425 /// println!("{ref_x}");
426 /// ```
427 ///
428 /// [the module documentation]: crate::ptr#safety
429 #[stable(feature = "nonnull", since = "1.25.0")]
430 #[rustc_const_stable(feature = "const_nonnull_as_ref", since = "1.73.0")]
431 #[must_use]
432 #[inline(always)]
433 pub const unsafe fn as_ref<'a>(&self) -> &'a T {
434 // SAFETY: the caller must guarantee that `self` meets all the
435 // requirements for a reference.
436 // `cast_const` avoids a mutable raw pointer deref.
437 unsafe { &*self.as_ptr().cast_const() }
438 }
439
440 /// Returns a unique reference to the value. If the value may be uninitialized, [`as_uninit_mut`]
441 /// must be used instead.
442 ///
443 /// For the shared counterpart see [`as_ref`].
444 ///
445 /// [`as_uninit_mut`]: NonNull::as_uninit_mut
446 /// [`as_ref`]: NonNull::as_ref
447 ///
448 /// # Safety
449 ///
450 /// When calling this method, you have to ensure that
451 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
452 /// # Examples
453 ///
454 /// ```
455 /// use std::ptr::NonNull;
456 ///
457 /// let mut x = 0u32;
458 /// let mut ptr = NonNull::new(&mut x).expect("null pointer");
459 ///
460 /// let x_ref = unsafe { ptr.as_mut() };
461 /// assert_eq!(*x_ref, 0);
462 /// *x_ref += 2;
463 /// assert_eq!(*x_ref, 2);
464 /// ```
465 ///
466 /// [the module documentation]: crate::ptr#safety
467 #[stable(feature = "nonnull", since = "1.25.0")]
468 #[rustc_const_stable(feature = "const_ptr_as_ref", since = "1.83.0")]
469 #[must_use]
470 #[inline(always)]
471 pub const unsafe fn as_mut<'a>(&mut self) -> &'a mut T {
472 // SAFETY: the caller must guarantee that `self` meets all the
473 // requirements for a mutable reference.
474 unsafe { &mut *self.as_ptr() }
475 }
476
477 /// Casts to a pointer of another type.
478 ///
479 /// # Examples
480 ///
481 /// ```
482 /// use std::ptr::NonNull;
483 ///
484 /// let mut x = 0u32;
485 /// let ptr = NonNull::new(&mut x as *mut _).expect("null pointer");
486 ///
487 /// let casted_ptr = ptr.cast::<i8>();
488 /// let raw_ptr: *mut i8 = casted_ptr.as_ptr();
489 /// ```
490 #[stable(feature = "nonnull_cast", since = "1.27.0")]
491 #[rustc_const_stable(feature = "const_nonnull_cast", since = "1.36.0")]
492 #[must_use = "this returns the result of the operation, \
493 without modifying the original"]
494 #[inline]
495 pub const fn cast<U>(self) -> NonNull<U> {
496 // SAFETY: `self` is a `NonNull` pointer which is necessarily non-null
497 unsafe { NonNull { pointer: self.as_ptr() as *mut U } }
498 }
499
500 /// Try to cast to a pointer of another type by checking alignment.
501 ///
502 /// If the pointer is properly aligned to the target type, it will be
503 /// cast to the target type. Otherwise, `None` is returned.
504 ///
505 /// # Examples
506 ///
507 /// ```rust
508 /// #![feature(pointer_try_cast_aligned)]
509 /// use std::ptr::NonNull;
510 ///
511 /// let mut x = 0u64;
512 ///
513 /// let aligned = NonNull::from_mut(&mut x);
514 /// let unaligned = unsafe { aligned.byte_add(1) };
515 ///
516 /// assert!(aligned.try_cast_aligned::<u32>().is_some());
517 /// assert!(unaligned.try_cast_aligned::<u32>().is_none());
518 /// ```
519 #[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
520 #[must_use = "this returns the result of the operation, \
521 without modifying the original"]
522 #[inline]
523 pub fn try_cast_aligned<U>(self) -> Option<NonNull<U>> {
524 if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
525 }
526
527 /// Adds an offset to a pointer.
528 ///
529 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
530 /// offset of `3 * size_of::<T>()` bytes.
531 ///
532 /// # Safety
533 ///
534 /// If any of the following conditions are violated, the result is Undefined Behavior:
535 ///
536 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
537 ///
538 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
539 /// [allocation], and the entire memory range between `self` and the result must be in
540 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
541 /// of the address space.
542 ///
543 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
544 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
545 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
546 /// safe.
547 ///
548 /// [allocation]: crate::ptr#allocation
549 ///
550 /// # Examples
551 ///
552 /// ```
553 /// use std::ptr::NonNull;
554 ///
555 /// let mut s = [1, 2, 3];
556 /// let ptr: NonNull<u32> = NonNull::new(s.as_mut_ptr()).unwrap();
557 ///
558 /// unsafe {
559 /// println!("{}", ptr.offset(1).read());
560 /// println!("{}", ptr.offset(2).read());
561 /// }
562 /// ```
563 #[inline(always)]
564 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
565 #[must_use = "returns a new pointer rather than modifying its argument"]
566 #[stable(feature = "non_null_convenience", since = "1.80.0")]
567 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
568 pub const unsafe fn offset(self, count: isize) -> Self
569 where
570 T: Sized,
571 {
572 // SAFETY: the caller must uphold the safety contract for `offset`.
573 // Additionally safety contract of `offset` guarantees that the resulting pointer is
574 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
575 // construct `NonNull`.
576 unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
577 }
578
579 /// Calculates the offset from a pointer in bytes.
580 ///
581 /// `count` is in units of **bytes**.
582 ///
583 /// This is purely a convenience for casting to a `u8` pointer and
584 /// using [offset][pointer::offset] on it. See that method for documentation
585 /// and safety requirements.
586 ///
587 /// For non-`Sized` pointees this operation changes only the data pointer,
588 /// leaving the metadata untouched.
589 #[must_use]
590 #[inline(always)]
591 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
592 #[stable(feature = "non_null_convenience", since = "1.80.0")]
593 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
594 pub const unsafe fn byte_offset(self, count: isize) -> Self {
595 // SAFETY: the caller must uphold the safety contract for `offset` and `byte_offset` has
596 // the same safety contract.
597 // Additionally safety contract of `offset` guarantees that the resulting pointer is
598 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
599 // construct `NonNull`.
600 unsafe { NonNull { pointer: self.as_ptr().byte_offset(count) } }
601 }
602
603 /// Adds an offset to a pointer (convenience for `.offset(count as isize)`).
604 ///
605 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
606 /// offset of `3 * size_of::<T>()` bytes.
607 ///
608 /// # Safety
609 ///
610 /// If any of the following conditions are violated, the result is Undefined Behavior:
611 ///
612 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
613 ///
614 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
615 /// [allocation], and the entire memory range between `self` and the result must be in
616 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
617 /// of the address space.
618 ///
619 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
620 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
621 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
622 /// safe.
623 ///
624 /// [allocation]: crate::ptr#allocation
625 ///
626 /// # Examples
627 ///
628 /// ```
629 /// use std::ptr::NonNull;
630 ///
631 /// let s: &str = "123";
632 /// let ptr: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap();
633 ///
634 /// unsafe {
635 /// println!("{}", ptr.add(1).read() as char);
636 /// println!("{}", ptr.add(2).read() as char);
637 /// }
638 /// ```
639 #[inline(always)]
640 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
641 #[must_use = "returns a new pointer rather than modifying its argument"]
642 #[stable(feature = "non_null_convenience", since = "1.80.0")]
643 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
644 pub const unsafe fn add(self, count: usize) -> Self
645 where
646 T: Sized,
647 {
648 // SAFETY: the caller must uphold the safety contract for `offset`.
649 // Additionally safety contract of `offset` guarantees that the resulting pointer is
650 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
651 // construct `NonNull`.
652 unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
653 }
654
655 /// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
656 ///
657 /// `count` is in units of bytes.
658 ///
659 /// This is purely a convenience for casting to a `u8` pointer and
660 /// using [`add`][NonNull::add] on it. See that method for documentation
661 /// and safety requirements.
662 ///
663 /// For non-`Sized` pointees this operation changes only the data pointer,
664 /// leaving the metadata untouched.
665 #[must_use]
666 #[inline(always)]
667 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
668 #[stable(feature = "non_null_convenience", since = "1.80.0")]
669 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
670 pub const unsafe fn byte_add(self, count: usize) -> Self {
671 // SAFETY: the caller must uphold the safety contract for `add` and `byte_add` has the same
672 // safety contract.
673 // Additionally safety contract of `add` guarantees that the resulting pointer is pointing
674 // to an allocation, there can't be an allocation at null, thus it's safe to construct
675 // `NonNull`.
676 unsafe { NonNull { pointer: self.as_ptr().byte_add(count) } }
677 }
678
679 /// Subtracts an offset from a pointer (convenience for
680 /// `.offset((count as isize).wrapping_neg())`).
681 ///
682 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
683 /// offset of `3 * size_of::<T>()` bytes.
684 ///
685 /// # Safety
686 ///
687 /// If any of the following conditions are violated, the result is Undefined Behavior:
688 ///
689 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
690 ///
691 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
692 /// [allocation], and the entire memory range between `self` and the result must be in
693 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
694 /// of the address space.
695 ///
696 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
697 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
698 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
699 /// safe.
700 ///
701 /// [allocation]: crate::ptr#allocation
702 ///
703 /// # Examples
704 ///
705 /// ```
706 /// use std::ptr::NonNull;
707 ///
708 /// let s: &str = "123";
709 ///
710 /// unsafe {
711 /// let end: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap().add(3);
712 /// println!("{}", end.sub(1).read() as char);
713 /// println!("{}", end.sub(2).read() as char);
714 /// }
715 /// ```
716 #[inline(always)]
717 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
718 #[must_use = "returns a new pointer rather than modifying its argument"]
719 #[stable(feature = "non_null_convenience", since = "1.80.0")]
720 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
721 pub const unsafe fn sub(self, count: usize) -> Self
722 where
723 T: Sized,
724 {
725 if T::IS_ZST {
726 // Pointer arithmetic does nothing when the pointee is a ZST.
727 self
728 } else {
729 // SAFETY: the caller must uphold the safety contract for `offset`.
730 // Because the pointee is *not* a ZST, that means that `count` is
731 // at most `isize::MAX`, and thus the negation cannot overflow.
732 unsafe { self.offset((count as isize).unchecked_neg()) }
733 }
734 }
735
736 /// Calculates the offset from a pointer in bytes (convenience for
737 /// `.byte_offset((count as isize).wrapping_neg())`).
738 ///
739 /// `count` is in units of bytes.
740 ///
741 /// This is purely a convenience for casting to a `u8` pointer and
742 /// using [`sub`][NonNull::sub] on it. See that method for documentation
743 /// and safety requirements.
744 ///
745 /// For non-`Sized` pointees this operation changes only the data pointer,
746 /// leaving the metadata untouched.
747 #[must_use]
748 #[inline(always)]
749 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
750 #[stable(feature = "non_null_convenience", since = "1.80.0")]
751 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
752 pub const unsafe fn byte_sub(self, count: usize) -> Self {
753 // SAFETY: the caller must uphold the safety contract for `sub` and `byte_sub` has the same
754 // safety contract.
755 // Additionally safety contract of `sub` guarantees that the resulting pointer is pointing
756 // to an allocation, there can't be an allocation at null, thus it's safe to construct
757 // `NonNull`.
758 unsafe { NonNull { pointer: self.as_ptr().byte_sub(count) } }
759 }
760
761 /// Calculates the distance between two pointers within the same allocation. The returned value is in
762 /// units of T: the distance in bytes divided by `size_of::<T>()`.
763 ///
764 /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
765 /// except that it has a lot more opportunities for UB, in exchange for the compiler
766 /// better understanding what you are doing.
767 ///
768 /// The primary motivation of this method is for computing the `len` of an array/slice
769 /// of `T` that you are currently representing as a "start" and "end" pointer
770 /// (and "end" is "one past the end" of the array).
771 /// In that case, `end.offset_from(start)` gets you the length of the array.
772 ///
773 /// All of the following safety requirements are trivially satisfied for this usecase.
774 ///
775 /// [`offset`]: #method.offset
776 ///
777 /// # Safety
778 ///
779 /// If any of the following conditions are violated, the result is Undefined Behavior:
780 ///
781 /// * `self` and `origin` must either
782 ///
783 /// * point to the same address, or
784 /// * both be *derived from* a pointer to the same [allocation], and the memory range between
785 /// the two pointers must be in bounds of that object. (See below for an example.)
786 ///
787 /// * The distance between the pointers, in bytes, must be an exact multiple
788 /// of the size of `T`.
789 ///
790 /// As a consequence, the absolute distance between the pointers, in bytes, computed on
791 /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
792 /// implied by the in-bounds requirement, and the fact that no allocation can be larger
793 /// than `isize::MAX` bytes.
794 ///
795 /// The requirement for pointers to be derived from the same allocation is primarily
796 /// needed for `const`-compatibility: the distance between pointers into *different* allocated
797 /// objects is not known at compile-time. However, the requirement also exists at
798 /// runtime and may be exploited by optimizations. If you wish to compute the difference between
799 /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
800 /// origin as isize) / size_of::<T>()`.
801 // FIXME: recommend `addr()` instead of `as usize` once that is stable.
802 ///
803 /// [`add`]: #method.add
804 /// [allocation]: crate::ptr#allocation
805 ///
806 /// # Panics
807 ///
808 /// This function panics if `T` is a Zero-Sized Type ("ZST").
809 ///
810 /// # Examples
811 ///
812 /// Basic usage:
813 ///
814 /// ```
815 /// use std::ptr::NonNull;
816 ///
817 /// let a = [0; 5];
818 /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
819 /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
820 /// unsafe {
821 /// assert_eq!(ptr2.offset_from(ptr1), 2);
822 /// assert_eq!(ptr1.offset_from(ptr2), -2);
823 /// assert_eq!(ptr1.offset(2), ptr2);
824 /// assert_eq!(ptr2.offset(-2), ptr1);
825 /// }
826 /// ```
827 ///
828 /// *Incorrect* usage:
829 ///
830 /// ```rust,no_run
831 /// use std::ptr::NonNull;
832 ///
833 /// let ptr1 = NonNull::new(Box::into_raw(Box::new(0u8))).unwrap();
834 /// let ptr2 = NonNull::new(Box::into_raw(Box::new(1u8))).unwrap();
835 /// let diff = (ptr2.addr().get() as isize).wrapping_sub(ptr1.addr().get() as isize);
836 /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
837 /// let diff_plus_1 = diff.wrapping_add(1);
838 /// let ptr2_other = NonNull::new(ptr1.as_ptr().wrapping_byte_offset(diff_plus_1)).unwrap();
839 /// assert_eq!(ptr2.addr(), ptr2_other.addr());
840 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
841 /// // computing their offset is undefined behavior, even though
842 /// // they point to addresses that are in-bounds of the same object!
843 ///
844 /// let one = unsafe { ptr2_other.offset_from(ptr2) }; // Undefined Behavior! ⚠️
845 /// ```
846 #[inline]
847 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
848 #[stable(feature = "non_null_convenience", since = "1.80.0")]
849 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
850 pub const unsafe fn offset_from(self, origin: NonNull<T>) -> isize
851 where
852 T: Sized,
853 {
854 // SAFETY: the caller must uphold the safety contract for `offset_from`.
855 unsafe { self.as_ptr().offset_from(origin.as_ptr()) }
856 }
857
858 /// Calculates the distance between two pointers within the same allocation. The returned value is in
859 /// units of **bytes**.
860 ///
861 /// This is purely a convenience for casting to a `u8` pointer and
862 /// using [`offset_from`][NonNull::offset_from] on it. See that method for
863 /// documentation and safety requirements.
864 ///
865 /// For non-`Sized` pointees this operation considers only the data pointers,
866 /// ignoring the metadata.
867 #[inline(always)]
868 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
869 #[stable(feature = "non_null_convenience", since = "1.80.0")]
870 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
871 pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: NonNull<U>) -> isize {
872 // SAFETY: the caller must uphold the safety contract for `byte_offset_from`.
873 unsafe { self.as_ptr().byte_offset_from(origin.as_ptr()) }
874 }
875
876 // N.B. `wrapping_offset``, `wrapping_add`, etc are not implemented because they can wrap to null
877
878 /// Calculates the distance between two pointers within the same allocation, *where it's known that
879 /// `self` is equal to or greater than `origin`*. The returned value is in
880 /// units of T: the distance in bytes is divided by `size_of::<T>()`.
881 ///
882 /// This computes the same value that [`offset_from`](#method.offset_from)
883 /// would compute, but with the added precondition that the offset is
884 /// guaranteed to be non-negative. This method is equivalent to
885 /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
886 /// but it provides slightly more information to the optimizer, which can
887 /// sometimes allow it to optimize slightly better with some backends.
888 ///
889 /// This method can be though of as recovering the `count` that was passed
890 /// to [`add`](#method.add) (or, with the parameters in the other order,
891 /// to [`sub`](#method.sub)). The following are all equivalent, assuming
892 /// that their safety preconditions are met:
893 /// ```rust
894 /// # unsafe fn blah(ptr: std::ptr::NonNull<u32>, origin: std::ptr::NonNull<u32>, count: usize) -> bool { unsafe {
895 /// ptr.offset_from_unsigned(origin) == count
896 /// # &&
897 /// origin.add(count) == ptr
898 /// # &&
899 /// ptr.sub(count) == origin
900 /// # } }
901 /// ```
902 ///
903 /// # Safety
904 ///
905 /// - The distance between the pointers must be non-negative (`self >= origin`)
906 ///
907 /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
908 /// apply to this method as well; see it for the full details.
909 ///
910 /// Importantly, despite the return type of this method being able to represent
911 /// a larger offset, it's still *not permitted* to pass pointers which differ
912 /// by more than `isize::MAX` *bytes*. As such, the result of this method will
913 /// always be less than or equal to `isize::MAX as usize`.
914 ///
915 /// # Panics
916 ///
917 /// This function panics if `T` is a Zero-Sized Type ("ZST").
918 ///
919 /// # Examples
920 ///
921 /// ```
922 /// use std::ptr::NonNull;
923 ///
924 /// let a = [0; 5];
925 /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
926 /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
927 /// unsafe {
928 /// assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
929 /// assert_eq!(ptr1.add(2), ptr2);
930 /// assert_eq!(ptr2.sub(2), ptr1);
931 /// assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
932 /// }
933 ///
934 /// // This would be incorrect, as the pointers are not correctly ordered:
935 /// // ptr1.offset_from_unsigned(ptr2)
936 /// ```
937 #[inline]
938 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
939 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
940 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
941 pub const unsafe fn offset_from_unsigned(self, subtracted: NonNull<T>) -> usize
942 where
943 T: Sized,
944 {
945 // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
946 unsafe { self.as_ptr().offset_from_unsigned(subtracted.as_ptr()) }
947 }
948
949 /// Calculates the distance between two pointers within the same allocation, *where it's known that
950 /// `self` is equal to or greater than `origin`*. The returned value is in
951 /// units of **bytes**.
952 ///
953 /// This is purely a convenience for casting to a `u8` pointer and
954 /// using [`offset_from_unsigned`][NonNull::offset_from_unsigned] on it.
955 /// See that method for documentation and safety requirements.
956 ///
957 /// For non-`Sized` pointees this operation considers only the data pointers,
958 /// ignoring the metadata.
959 #[inline(always)]
960 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
961 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
962 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
963 pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: NonNull<U>) -> usize {
964 // SAFETY: the caller must uphold the safety contract for `byte_offset_from_unsigned`.
965 unsafe { self.as_ptr().byte_offset_from_unsigned(origin.as_ptr()) }
966 }
967
968 /// Reads the value from `self` without moving it. This leaves the
969 /// memory in `self` unchanged.
970 ///
971 /// See [`ptr::read`] for safety concerns and examples.
972 ///
973 /// [`ptr::read`]: crate::ptr::read()
974 #[inline]
975 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
976 #[stable(feature = "non_null_convenience", since = "1.80.0")]
977 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
978 pub const unsafe fn read(self) -> T
979 where
980 T: Sized,
981 {
982 // SAFETY: the caller must uphold the safety contract for `read`.
983 unsafe { ptr::read(self.as_ptr()) }
984 }
985
986 /// Performs a volatile read of the value from `self` without moving it. This
987 /// leaves the memory in `self` unchanged.
988 ///
989 /// Volatile operations are intended to act on I/O memory, and are guaranteed
990 /// to not be elided or reordered by the compiler across other volatile
991 /// operations.
992 ///
993 /// See [`ptr::read_volatile`] for safety concerns and examples.
994 ///
995 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
996 #[inline]
997 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
998 #[stable(feature = "non_null_convenience", since = "1.80.0")]
999 pub unsafe fn read_volatile(self) -> T
1000 where
1001 T: Sized,
1002 {
1003 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1004 unsafe { ptr::read_volatile(self.as_ptr()) }
1005 }
1006
1007 /// Reads the value from `self` without moving it. This leaves the
1008 /// memory in `self` unchanged.
1009 ///
1010 /// Unlike `read`, the pointer may be unaligned.
1011 ///
1012 /// See [`ptr::read_unaligned`] for safety concerns and examples.
1013 ///
1014 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1015 #[inline]
1016 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1017 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1018 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
1019 pub const unsafe fn read_unaligned(self) -> T
1020 where
1021 T: Sized,
1022 {
1023 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1024 unsafe { ptr::read_unaligned(self.as_ptr()) }
1025 }
1026
1027 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1028 /// and destination may overlap.
1029 ///
1030 /// NOTE: this has the *same* argument order as [`ptr::copy`].
1031 ///
1032 /// See [`ptr::copy`] for safety concerns and examples.
1033 ///
1034 /// [`ptr::copy`]: crate::ptr::copy()
1035 #[inline(always)]
1036 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1037 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1038 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1039 pub const unsafe fn copy_to(self, dest: NonNull<T>, count: usize)
1040 where
1041 T: Sized,
1042 {
1043 // SAFETY: the caller must uphold the safety contract for `copy`.
1044 unsafe { ptr::copy(self.as_ptr(), dest.as_ptr(), count) }
1045 }
1046
1047 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1048 /// and destination may *not* overlap.
1049 ///
1050 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1051 ///
1052 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1053 ///
1054 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1055 #[inline(always)]
1056 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1057 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1058 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1059 pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull<T>, count: usize)
1060 where
1061 T: Sized,
1062 {
1063 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1064 unsafe { ptr::copy_nonoverlapping(self.as_ptr(), dest.as_ptr(), count) }
1065 }
1066
1067 /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1068 /// and destination may overlap.
1069 ///
1070 /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1071 ///
1072 /// See [`ptr::copy`] for safety concerns and examples.
1073 ///
1074 /// [`ptr::copy`]: crate::ptr::copy()
1075 #[inline(always)]
1076 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1077 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1078 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1079 pub const unsafe fn copy_from(self, src: NonNull<T>, count: usize)
1080 where
1081 T: Sized,
1082 {
1083 // SAFETY: the caller must uphold the safety contract for `copy`.
1084 unsafe { ptr::copy(src.as_ptr(), self.as_ptr(), count) }
1085 }
1086
1087 /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1088 /// and destination may *not* overlap.
1089 ///
1090 /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1091 ///
1092 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1093 ///
1094 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1095 #[inline(always)]
1096 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1097 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1098 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1099 pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull<T>, count: usize)
1100 where
1101 T: Sized,
1102 {
1103 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1104 unsafe { ptr::copy_nonoverlapping(src.as_ptr(), self.as_ptr(), count) }
1105 }
1106
1107 /// Executes the destructor (if any) of the pointed-to value.
1108 ///
1109 /// See [`ptr::drop_in_place`] for safety concerns and examples.
1110 ///
1111 /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1112 #[inline(always)]
1113 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1114 pub unsafe fn drop_in_place(self) {
1115 // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1116 unsafe { ptr::drop_in_place(self.as_ptr()) }
1117 }
1118
1119 /// Overwrites a memory location with the given value without reading or
1120 /// dropping the old value.
1121 ///
1122 /// See [`ptr::write`] for safety concerns and examples.
1123 ///
1124 /// [`ptr::write`]: crate::ptr::write()
1125 #[inline(always)]
1126 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1127 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1128 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1129 pub const unsafe fn write(self, val: T)
1130 where
1131 T: Sized,
1132 {
1133 // SAFETY: the caller must uphold the safety contract for `write`.
1134 unsafe { ptr::write(self.as_ptr(), val) }
1135 }
1136
1137 /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1138 /// bytes of memory starting at `self` to `val`.
1139 ///
1140 /// See [`ptr::write_bytes`] for safety concerns and examples.
1141 ///
1142 /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1143 #[inline(always)]
1144 #[doc(alias = "memset")]
1145 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1146 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1147 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1148 pub const unsafe fn write_bytes(self, val: u8, count: usize)
1149 where
1150 T: Sized,
1151 {
1152 // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1153 unsafe { ptr::write_bytes(self.as_ptr(), val, count) }
1154 }
1155
1156 /// Performs a volatile write of a memory location with the given value without
1157 /// reading or dropping the old value.
1158 ///
1159 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1160 /// to not be elided or reordered by the compiler across other volatile
1161 /// operations.
1162 ///
1163 /// See [`ptr::write_volatile`] for safety concerns and examples.
1164 ///
1165 /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1166 #[inline(always)]
1167 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1168 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1169 pub unsafe fn write_volatile(self, val: T)
1170 where
1171 T: Sized,
1172 {
1173 // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1174 unsafe { ptr::write_volatile(self.as_ptr(), val) }
1175 }
1176
1177 /// Overwrites a memory location with the given value without reading or
1178 /// dropping the old value.
1179 ///
1180 /// Unlike `write`, the pointer may be unaligned.
1181 ///
1182 /// See [`ptr::write_unaligned`] for safety concerns and examples.
1183 ///
1184 /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1185 #[inline(always)]
1186 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1187 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1188 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1189 pub const unsafe fn write_unaligned(self, val: T)
1190 where
1191 T: Sized,
1192 {
1193 // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1194 unsafe { ptr::write_unaligned(self.as_ptr(), val) }
1195 }
1196
1197 /// Replaces the value at `self` with `src`, returning the old
1198 /// value, without dropping either.
1199 ///
1200 /// See [`ptr::replace`] for safety concerns and examples.
1201 ///
1202 /// [`ptr::replace`]: crate::ptr::replace()
1203 #[inline(always)]
1204 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1205 #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "1.88.0")]
1206 pub const unsafe fn replace(self, src: T) -> T
1207 where
1208 T: Sized,
1209 {
1210 // SAFETY: the caller must uphold the safety contract for `replace`.
1211 unsafe { ptr::replace(self.as_ptr(), src) }
1212 }
1213
1214 /// Swaps the values at two mutable locations of the same type, without
1215 /// deinitializing either. They may overlap, unlike `mem::swap` which is
1216 /// otherwise equivalent.
1217 ///
1218 /// See [`ptr::swap`] for safety concerns and examples.
1219 ///
1220 /// [`ptr::swap`]: crate::ptr::swap()
1221 #[inline(always)]
1222 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1223 #[rustc_const_stable(feature = "const_swap", since = "1.85.0")]
1224 pub const unsafe fn swap(self, with: NonNull<T>)
1225 where
1226 T: Sized,
1227 {
1228 // SAFETY: the caller must uphold the safety contract for `swap`.
1229 unsafe { ptr::swap(self.as_ptr(), with.as_ptr()) }
1230 }
1231
1232 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1233 /// `align`.
1234 ///
1235 /// If it is not possible to align the pointer, the implementation returns
1236 /// `usize::MAX`.
1237 ///
1238 /// The offset is expressed in number of `T` elements, and not bytes.
1239 ///
1240 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1241 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1242 /// the returned offset is correct in all terms other than alignment.
1243 ///
1244 /// When this is called during compile-time evaluation (which is unstable), the implementation
1245 /// may return `usize::MAX` in cases where that can never happen at runtime. This is because the
1246 /// actual alignment of pointers is not known yet during compile-time, so an offset with
1247 /// guaranteed alignment can sometimes not be computed. For example, a buffer declared as `[u8;
1248 /// N]` might be allocated at an odd or an even address, but at compile-time this is not yet
1249 /// known, so the execution has to be correct for either choice. It is therefore impossible to
1250 /// find an offset that is guaranteed to be 2-aligned. (This behavior is subject to change, as usual
1251 /// for unstable APIs.)
1252 ///
1253 /// # Panics
1254 ///
1255 /// The function panics if `align` is not a power-of-two.
1256 ///
1257 /// # Examples
1258 ///
1259 /// Accessing adjacent `u8` as `u16`
1260 ///
1261 /// ```
1262 /// use std::ptr::NonNull;
1263 ///
1264 /// # unsafe {
1265 /// let x = [5_u8, 6, 7, 8, 9];
1266 /// let ptr = NonNull::new(x.as_ptr() as *mut u8).unwrap();
1267 /// let offset = ptr.align_offset(align_of::<u16>());
1268 ///
1269 /// if offset < x.len() - 1 {
1270 /// let u16_ptr = ptr.add(offset).cast::<u16>();
1271 /// assert!(u16_ptr.read() == u16::from_ne_bytes([5, 6]) || u16_ptr.read() == u16::from_ne_bytes([6, 7]));
1272 /// } else {
1273 /// // while the pointer can be aligned via `offset`, it would point
1274 /// // outside the allocation
1275 /// }
1276 /// # }
1277 /// ```
1278 #[inline]
1279 #[must_use]
1280 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1281 pub fn align_offset(self, align: usize) -> usize
1282 where
1283 T: Sized,
1284 {
1285 if !align.is_power_of_two() {
1286 panic!("align_offset: align is not a power-of-two");
1287 }
1288
1289 {
1290 // SAFETY: `align` has been checked to be a power of 2 above.
1291 unsafe { ptr::align_offset(self.as_ptr(), align) }
1292 }
1293 }
1294
1295 /// Returns whether the pointer is properly aligned for `T`.
1296 ///
1297 /// # Examples
1298 ///
1299 /// ```
1300 /// use std::ptr::NonNull;
1301 ///
1302 /// // On some platforms, the alignment of i32 is less than 4.
1303 /// #[repr(align(4))]
1304 /// struct AlignedI32(i32);
1305 ///
1306 /// let data = AlignedI32(42);
1307 /// let ptr = NonNull::<AlignedI32>::from(&data);
1308 ///
1309 /// assert!(ptr.is_aligned());
1310 /// assert!(!NonNull::new(ptr.as_ptr().wrapping_byte_add(1)).unwrap().is_aligned());
1311 /// ```
1312 #[inline]
1313 #[must_use]
1314 #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1315 pub fn is_aligned(self) -> bool
1316 where
1317 T: Sized,
1318 {
1319 self.as_ptr().is_aligned()
1320 }
1321
1322 /// Returns whether the pointer is aligned to `align`.
1323 ///
1324 /// For non-`Sized` pointees this operation considers only the data pointer,
1325 /// ignoring the metadata.
1326 ///
1327 /// # Panics
1328 ///
1329 /// The function panics if `align` is not a power-of-two (this includes 0).
1330 ///
1331 /// # Examples
1332 ///
1333 /// ```
1334 /// #![feature(pointer_is_aligned_to)]
1335 ///
1336 /// // On some platforms, the alignment of i32 is less than 4.
1337 /// #[repr(align(4))]
1338 /// struct AlignedI32(i32);
1339 ///
1340 /// let data = AlignedI32(42);
1341 /// let ptr = &data as *const AlignedI32;
1342 ///
1343 /// assert!(ptr.is_aligned_to(1));
1344 /// assert!(ptr.is_aligned_to(2));
1345 /// assert!(ptr.is_aligned_to(4));
1346 ///
1347 /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1348 /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1349 ///
1350 /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1351 /// ```
1352 #[inline]
1353 #[must_use]
1354 #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1355 pub fn is_aligned_to(self, align: usize) -> bool {
1356 self.as_ptr().is_aligned_to(align)
1357 }
1358}
1359
1360impl<T> NonNull<[T]> {
1361 /// Creates a non-null raw slice from a thin pointer and a length.
1362 ///
1363 /// The `len` argument is the number of **elements**, not the number of bytes.
1364 ///
1365 /// This function is safe, but dereferencing the return value is unsafe.
1366 /// See the documentation of [`slice::from_raw_parts`] for slice safety requirements.
1367 ///
1368 /// # Examples
1369 ///
1370 /// ```rust
1371 /// use std::ptr::NonNull;
1372 ///
1373 /// // create a slice pointer when starting out with a pointer to the first element
1374 /// let mut x = [5, 6, 7];
1375 /// let nonnull_pointer = NonNull::new(x.as_mut_ptr()).unwrap();
1376 /// let slice = NonNull::slice_from_raw_parts(nonnull_pointer, 3);
1377 /// assert_eq!(unsafe { slice.as_ref()[2] }, 7);
1378 /// ```
1379 ///
1380 /// (Note that this example artificially demonstrates a use of this method,
1381 /// but `let slice = NonNull::from(&x[..]);` would be a better way to write code like this.)
1382 #[stable(feature = "nonnull_slice_from_raw_parts", since = "1.70.0")]
1383 #[rustc_const_stable(feature = "const_slice_from_raw_parts_mut", since = "1.83.0")]
1384 #[must_use]
1385 #[inline]
1386 pub const fn slice_from_raw_parts(data: NonNull<T>, len: usize) -> Self {
1387 // SAFETY: `data` is a `NonNull` pointer which is necessarily non-null
1388 unsafe { Self::new_unchecked(super::slice_from_raw_parts_mut(data.as_ptr(), len)) }
1389 }
1390
1391 /// Returns the length of a non-null raw slice.
1392 ///
1393 /// The returned value is the number of **elements**, not the number of bytes.
1394 ///
1395 /// This function is safe, even when the non-null raw slice cannot be dereferenced to a slice
1396 /// because the pointer does not have a valid address.
1397 ///
1398 /// # Examples
1399 ///
1400 /// ```rust
1401 /// use std::ptr::NonNull;
1402 ///
1403 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1404 /// assert_eq!(slice.len(), 3);
1405 /// ```
1406 #[stable(feature = "slice_ptr_len_nonnull", since = "1.63.0")]
1407 #[rustc_const_stable(feature = "const_slice_ptr_len_nonnull", since = "1.63.0")]
1408 #[must_use]
1409 #[inline]
1410 pub const fn len(self) -> usize {
1411 self.as_ptr().len()
1412 }
1413
1414 /// Returns `true` if the non-null raw slice has a length of 0.
1415 ///
1416 /// # Examples
1417 ///
1418 /// ```rust
1419 /// use std::ptr::NonNull;
1420 ///
1421 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1422 /// assert!(!slice.is_empty());
1423 /// ```
1424 #[stable(feature = "slice_ptr_is_empty_nonnull", since = "1.79.0")]
1425 #[rustc_const_stable(feature = "const_slice_ptr_is_empty_nonnull", since = "1.79.0")]
1426 #[must_use]
1427 #[inline]
1428 pub const fn is_empty(self) -> bool {
1429 self.len() == 0
1430 }
1431
1432 /// Returns a non-null pointer to the slice's buffer.
1433 ///
1434 /// # Examples
1435 ///
1436 /// ```rust
1437 /// #![feature(slice_ptr_get)]
1438 /// use std::ptr::NonNull;
1439 ///
1440 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1441 /// assert_eq!(slice.as_non_null_ptr(), NonNull::<i8>::dangling());
1442 /// ```
1443 #[inline]
1444 #[must_use]
1445 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1446 pub const fn as_non_null_ptr(self) -> NonNull<T> {
1447 self.cast()
1448 }
1449
1450 /// Returns a raw pointer to the slice's buffer.
1451 ///
1452 /// # Examples
1453 ///
1454 /// ```rust
1455 /// #![feature(slice_ptr_get)]
1456 /// use std::ptr::NonNull;
1457 ///
1458 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1459 /// assert_eq!(slice.as_mut_ptr(), NonNull::<i8>::dangling().as_ptr());
1460 /// ```
1461 #[inline]
1462 #[must_use]
1463 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1464 #[rustc_never_returns_null_ptr]
1465 pub const fn as_mut_ptr(self) -> *mut T {
1466 self.as_non_null_ptr().as_ptr()
1467 }
1468
1469 /// Returns a shared reference to a slice of possibly uninitialized values. In contrast to
1470 /// [`as_ref`], this does not require that the value has to be initialized.
1471 ///
1472 /// For the mutable counterpart see [`as_uninit_slice_mut`].
1473 ///
1474 /// [`as_ref`]: NonNull::as_ref
1475 /// [`as_uninit_slice_mut`]: NonNull::as_uninit_slice_mut
1476 ///
1477 /// # Safety
1478 ///
1479 /// When calling this method, you have to ensure that all of the following is true:
1480 ///
1481 /// * The pointer must be [valid] for reads for `ptr.len() * size_of::<T>()` many bytes,
1482 /// and it must be properly aligned. This means in particular:
1483 ///
1484 /// * The entire memory range of this slice must be contained within a single allocation!
1485 /// Slices can never span across multiple allocations.
1486 ///
1487 /// * The pointer must be aligned even for zero-length slices. One
1488 /// reason for this is that enum layout optimizations may rely on references
1489 /// (including slices of any length) being aligned and non-null to distinguish
1490 /// them from other data. You can obtain a pointer that is usable as `data`
1491 /// for zero-length slices using [`NonNull::dangling()`].
1492 ///
1493 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1494 /// See the safety documentation of [`pointer::offset`].
1495 ///
1496 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1497 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1498 /// In particular, while this reference exists, the memory the pointer points to must
1499 /// not get mutated (except inside `UnsafeCell`).
1500 ///
1501 /// This applies even if the result of this method is unused!
1502 ///
1503 /// See also [`slice::from_raw_parts`].
1504 ///
1505 /// [valid]: crate::ptr#safety
1506 #[inline]
1507 #[must_use]
1508 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1509 pub const unsafe fn as_uninit_slice<'a>(self) -> &'a [MaybeUninit<T>] {
1510 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1511 unsafe { slice::from_raw_parts(self.cast().as_ptr(), self.len()) }
1512 }
1513
1514 /// Returns a unique reference to a slice of possibly uninitialized values. In contrast to
1515 /// [`as_mut`], this does not require that the value has to be initialized.
1516 ///
1517 /// For the shared counterpart see [`as_uninit_slice`].
1518 ///
1519 /// [`as_mut`]: NonNull::as_mut
1520 /// [`as_uninit_slice`]: NonNull::as_uninit_slice
1521 ///
1522 /// # Safety
1523 ///
1524 /// When calling this method, you have to ensure that all of the following is true:
1525 ///
1526 /// * The pointer must be [valid] for reads and writes for `ptr.len() * size_of::<T>()`
1527 /// many bytes, and it must be properly aligned. This means in particular:
1528 ///
1529 /// * The entire memory range of this slice must be contained within a single allocation!
1530 /// Slices can never span across multiple allocations.
1531 ///
1532 /// * The pointer must be aligned even for zero-length slices. One
1533 /// reason for this is that enum layout optimizations may rely on references
1534 /// (including slices of any length) being aligned and non-null to distinguish
1535 /// them from other data. You can obtain a pointer that is usable as `data`
1536 /// for zero-length slices using [`NonNull::dangling()`].
1537 ///
1538 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1539 /// See the safety documentation of [`pointer::offset`].
1540 ///
1541 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1542 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1543 /// In particular, while this reference exists, the memory the pointer points to must
1544 /// not get accessed (read or written) through any other pointer.
1545 ///
1546 /// This applies even if the result of this method is unused!
1547 ///
1548 /// See also [`slice::from_raw_parts_mut`].
1549 ///
1550 /// [valid]: crate::ptr#safety
1551 ///
1552 /// # Examples
1553 ///
1554 /// ```rust
1555 /// #![feature(allocator_api, ptr_as_uninit)]
1556 ///
1557 /// use std::alloc::{Allocator, Layout, Global};
1558 /// use std::mem::MaybeUninit;
1559 /// use std::ptr::NonNull;
1560 ///
1561 /// let memory: NonNull<[u8]> = Global.allocate(Layout::new::<[u8; 32]>())?;
1562 /// // This is safe as `memory` is valid for reads and writes for `memory.len()` many bytes.
1563 /// // Note that calling `memory.as_mut()` is not allowed here as the content may be uninitialized.
1564 /// # #[allow(unused_variables)]
1565 /// let slice: &mut [MaybeUninit<u8>] = unsafe { memory.as_uninit_slice_mut() };
1566 /// # // Prevent leaks for Miri.
1567 /// # unsafe { Global.deallocate(memory.cast(), Layout::new::<[u8; 32]>()); }
1568 /// # Ok::<_, std::alloc::AllocError>(())
1569 /// ```
1570 #[inline]
1571 #[must_use]
1572 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1573 pub const unsafe fn as_uninit_slice_mut<'a>(self) -> &'a mut [MaybeUninit<T>] {
1574 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
1575 unsafe { slice::from_raw_parts_mut(self.cast().as_ptr(), self.len()) }
1576 }
1577
1578 /// Returns a raw pointer to an element or subslice, without doing bounds
1579 /// checking.
1580 ///
1581 /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1582 /// is *[undefined behavior]* even if the resulting pointer is not used.
1583 ///
1584 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1585 ///
1586 /// # Examples
1587 ///
1588 /// ```
1589 /// #![feature(slice_ptr_get)]
1590 /// use std::ptr::NonNull;
1591 ///
1592 /// let x = &mut [1, 2, 4];
1593 /// let x = NonNull::slice_from_raw_parts(NonNull::new(x.as_mut_ptr()).unwrap(), x.len());
1594 ///
1595 /// unsafe {
1596 /// assert_eq!(x.get_unchecked_mut(1).as_ptr(), x.as_non_null_ptr().as_ptr().add(1));
1597 /// }
1598 /// ```
1599 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1600 #[inline]
1601 pub unsafe fn get_unchecked_mut<I>(self, index: I) -> NonNull<I::Output>
1602 where
1603 I: SliceIndex<[T]>,
1604 {
1605 // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1606 // As a consequence, the resulting pointer cannot be null.
1607 unsafe { NonNull::new_unchecked(self.as_ptr().get_unchecked_mut(index)) }
1608 }
1609}
1610
1611#[stable(feature = "nonnull", since = "1.25.0")]
1612impl<T: PointeeSized> Clone for NonNull<T> {
1613 #[inline(always)]
1614 fn clone(&self) -> Self {
1615 *self
1616 }
1617}
1618
1619#[stable(feature = "nonnull", since = "1.25.0")]
1620impl<T: PointeeSized> Copy for NonNull<T> {}
1621
1622#[unstable(feature = "coerce_unsized", issue = "18598")]
1623impl<T: PointeeSized, U: PointeeSized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
1624
1625#[unstable(feature = "dispatch_from_dyn", issue = "none")]
1626impl<T: PointeeSized, U: PointeeSized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
1627
1628#[stable(feature = "pin", since = "1.33.0")]
1629unsafe impl<T: PointeeSized> PinCoerceUnsized for NonNull<T> {}
1630
1631#[stable(feature = "nonnull", since = "1.25.0")]
1632impl<T: PointeeSized> fmt::Debug for NonNull<T> {
1633 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1634 fmt::Pointer::fmt(&self.as_ptr(), f)
1635 }
1636}
1637
1638#[stable(feature = "nonnull", since = "1.25.0")]
1639impl<T: PointeeSized> fmt::Pointer for NonNull<T> {
1640 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1641 fmt::Pointer::fmt(&self.as_ptr(), f)
1642 }
1643}
1644
1645#[stable(feature = "nonnull", since = "1.25.0")]
1646impl<T: PointeeSized> Eq for NonNull<T> {}
1647
1648#[stable(feature = "nonnull", since = "1.25.0")]
1649impl<T: PointeeSized> PartialEq for NonNull<T> {
1650 #[inline]
1651 #[allow(ambiguous_wide_pointer_comparisons)]
1652 fn eq(&self, other: &Self) -> bool {
1653 self.as_ptr() == other.as_ptr()
1654 }
1655}
1656
1657#[stable(feature = "nonnull", since = "1.25.0")]
1658impl<T: PointeeSized> Ord for NonNull<T> {
1659 #[inline]
1660 #[allow(ambiguous_wide_pointer_comparisons)]
1661 fn cmp(&self, other: &Self) -> Ordering {
1662 self.as_ptr().cmp(&other.as_ptr())
1663 }
1664}
1665
1666#[stable(feature = "nonnull", since = "1.25.0")]
1667impl<T: PointeeSized> PartialOrd for NonNull<T> {
1668 #[inline]
1669 #[allow(ambiguous_wide_pointer_comparisons)]
1670 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
1671 self.as_ptr().partial_cmp(&other.as_ptr())
1672 }
1673}
1674
1675#[stable(feature = "nonnull", since = "1.25.0")]
1676impl<T: PointeeSized> hash::Hash for NonNull<T> {
1677 #[inline]
1678 fn hash<H: hash::Hasher>(&self, state: &mut H) {
1679 self.as_ptr().hash(state)
1680 }
1681}
1682
1683#[unstable(feature = "ptr_internals", issue = "none")]
1684impl<T: PointeeSized> From<Unique<T>> for NonNull<T> {
1685 #[inline]
1686 fn from(unique: Unique<T>) -> Self {
1687 unique.as_non_null_ptr()
1688 }
1689}
1690
1691#[stable(feature = "nonnull", since = "1.25.0")]
1692impl<T: PointeeSized> From<&mut T> for NonNull<T> {
1693 /// Converts a `&mut T` to a `NonNull<T>`.
1694 ///
1695 /// This conversion is safe and infallible since references cannot be null.
1696 #[inline]
1697 fn from(r: &mut T) -> Self {
1698 NonNull::from_mut(r)
1699 }
1700}
1701
1702#[stable(feature = "nonnull", since = "1.25.0")]
1703impl<T: PointeeSized> From<&T> for NonNull<T> {
1704 /// Converts a `&T` to a `NonNull<T>`.
1705 ///
1706 /// This conversion is safe and infallible since references cannot be null.
1707 #[inline]
1708 fn from(r: &T) -> Self {
1709 NonNull::from_ref(r)
1710 }
1711}