core/ptr/non_null.rs
1use crate::cmp::Ordering;
2use crate::marker::{PointeeSized, Unsize};
3use crate::mem::{MaybeUninit, SizedTypeProperties};
4use crate::num::NonZero;
5use crate::ops::{CoerceUnsized, DispatchFromDyn};
6use crate::pin::PinCoerceUnsized;
7use crate::ptr::Unique;
8use crate::slice::{self, SliceIndex};
9use crate::ub_checks::assert_unsafe_precondition;
10use crate::{fmt, hash, intrinsics, mem, ptr};
11
12/// `*mut T` but non-zero and [covariant].
13///
14/// This is often the correct thing to use when building data structures using
15/// raw pointers, but is ultimately more dangerous to use because of its additional
16/// properties. If you're not sure if you should use `NonNull<T>`, just use `*mut T`!
17///
18/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
19/// is never dereferenced. This is so that enums may use this forbidden value
20/// as a discriminant -- `Option<NonNull<T>>` has the same size as `*mut T`.
21/// However the pointer may still dangle if it isn't dereferenced.
22///
23/// Unlike `*mut T`, `NonNull<T>` is covariant over `T`. This is usually the correct
24/// choice for most data structures and safe abstractions, such as `Box`, `Rc`, `Arc`, `Vec`,
25/// and `LinkedList`.
26///
27/// In rare cases, if your type exposes a way to mutate the value of `T` through a `NonNull<T>`,
28/// and you need to prevent unsoundness from variance (for example, if `T` could be a reference
29/// with a shorter lifetime), you should add a field to make your type invariant, such as
30/// `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`.
31///
32/// Example of a type that must be invariant:
33/// ```rust
34/// use std::cell::Cell;
35/// use std::marker::PhantomData;
36/// struct Invariant<T> {
37/// ptr: std::ptr::NonNull<T>,
38/// _invariant: PhantomData<Cell<T>>,
39/// }
40/// ```
41///
42/// Notice that `NonNull<T>` has a `From` instance for `&T`. However, this does
43/// not change the fact that mutating through a (pointer derived from a) shared
44/// reference is undefined behavior unless the mutation happens inside an
45/// [`UnsafeCell<T>`]. The same goes for creating a mutable reference from a shared
46/// reference. When using this `From` instance without an `UnsafeCell<T>`,
47/// it is your responsibility to ensure that `as_mut` is never called, and `as_ptr`
48/// is never used for mutation.
49///
50/// # Representation
51///
52/// Thanks to the [null pointer optimization],
53/// `NonNull<T>` and `Option<NonNull<T>>`
54/// are guaranteed to have the same size and alignment:
55///
56/// ```
57/// use std::ptr::NonNull;
58///
59/// assert_eq!(size_of::<NonNull<i16>>(), size_of::<Option<NonNull<i16>>>());
60/// assert_eq!(align_of::<NonNull<i16>>(), align_of::<Option<NonNull<i16>>>());
61///
62/// assert_eq!(size_of::<NonNull<str>>(), size_of::<Option<NonNull<str>>>());
63/// assert_eq!(align_of::<NonNull<str>>(), align_of::<Option<NonNull<str>>>());
64/// ```
65///
66/// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
67/// [`PhantomData`]: crate::marker::PhantomData
68/// [`UnsafeCell<T>`]: crate::cell::UnsafeCell
69/// [null pointer optimization]: crate::option#representation
70#[stable(feature = "nonnull", since = "1.25.0")]
71#[repr(transparent)]
72#[rustc_layout_scalar_valid_range_start(1)]
73#[rustc_nonnull_optimization_guaranteed]
74#[rustc_diagnostic_item = "NonNull"]
75pub struct NonNull<T: PointeeSized> {
76 // Remember to use `.as_ptr()` instead of `.pointer`, as field projecting to
77 // this is banned by <https://github.com/rust-lang/compiler-team/issues/807>.
78 pointer: *const T,
79}
80
81/// `NonNull` pointers are not `Send` because the data they reference may be aliased.
82// N.B., this impl is unnecessary, but should provide better error messages.
83#[stable(feature = "nonnull", since = "1.25.0")]
84impl<T: PointeeSized> !Send for NonNull<T> {}
85
86/// `NonNull` pointers are not `Sync` because the data they reference may be aliased.
87// N.B., this impl is unnecessary, but should provide better error messages.
88#[stable(feature = "nonnull", since = "1.25.0")]
89impl<T: PointeeSized> !Sync for NonNull<T> {}
90
91impl<T: Sized> NonNull<T> {
92 /// Creates a pointer with the given address and no [provenance][crate::ptr#provenance].
93 ///
94 /// For more details, see the equivalent method on a raw pointer, [`ptr::without_provenance_mut`].
95 ///
96 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
97 #[stable(feature = "nonnull_provenance", since = "1.89.0")]
98 #[rustc_const_stable(feature = "nonnull_provenance", since = "1.89.0")]
99 #[must_use]
100 #[inline]
101 pub const fn without_provenance(addr: NonZero<usize>) -> Self {
102 let pointer = crate::ptr::without_provenance(addr.get());
103 // SAFETY: we know `addr` is non-zero.
104 unsafe { NonNull { pointer } }
105 }
106
107 /// Creates a new `NonNull` that is dangling, but well-aligned.
108 ///
109 /// This is useful for initializing types which lazily allocate, like
110 /// `Vec::new` does.
111 ///
112 /// Note that the address of the returned pointer may potentially
113 /// be that of a valid pointer, which means this must not be used
114 /// as a "not yet initialized" sentinel value.
115 /// Types that lazily allocate must track initialization by some other means.
116 ///
117 /// # Examples
118 ///
119 /// ```
120 /// use std::ptr::NonNull;
121 ///
122 /// let ptr = NonNull::<u32>::dangling();
123 /// // Important: don't try to access the value of `ptr` without
124 /// // initializing it first! The pointer is not null but isn't valid either!
125 /// ```
126 #[stable(feature = "nonnull", since = "1.25.0")]
127 #[rustc_const_stable(feature = "const_nonnull_dangling", since = "1.36.0")]
128 #[must_use]
129 #[inline]
130 pub const fn dangling() -> Self {
131 let align = crate::ptr::Alignment::of::<T>();
132 NonNull::without_provenance(align.as_nonzero())
133 }
134
135 /// Converts an address back to a mutable pointer, picking up some previously 'exposed'
136 /// [provenance][crate::ptr#provenance].
137 ///
138 /// For more details, see the equivalent method on a raw pointer, [`ptr::with_exposed_provenance_mut`].
139 ///
140 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
141 #[stable(feature = "nonnull_provenance", since = "1.89.0")]
142 #[inline]
143 pub fn with_exposed_provenance(addr: NonZero<usize>) -> Self {
144 // SAFETY: we know `addr` is non-zero.
145 unsafe {
146 let ptr = crate::ptr::with_exposed_provenance_mut(addr.get());
147 NonNull::new_unchecked(ptr)
148 }
149 }
150
151 /// Returns a shared references to the value. In contrast to [`as_ref`], this does not require
152 /// that the value has to be initialized.
153 ///
154 /// For the mutable counterpart see [`as_uninit_mut`].
155 ///
156 /// [`as_ref`]: NonNull::as_ref
157 /// [`as_uninit_mut`]: NonNull::as_uninit_mut
158 ///
159 /// # Safety
160 ///
161 /// When calling this method, you have to ensure that
162 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
163 /// Note that because the created reference is to `MaybeUninit<T>`, the
164 /// source pointer can point to uninitialized memory.
165 #[inline]
166 #[must_use]
167 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
168 pub const unsafe fn as_uninit_ref<'a>(self) -> &'a MaybeUninit<T> {
169 // SAFETY: the caller must guarantee that `self` meets all the
170 // requirements for a reference.
171 unsafe { &*self.cast().as_ptr() }
172 }
173
174 /// Returns a unique references to the value. In contrast to [`as_mut`], this does not require
175 /// that the value has to be initialized.
176 ///
177 /// For the shared counterpart see [`as_uninit_ref`].
178 ///
179 /// [`as_mut`]: NonNull::as_mut
180 /// [`as_uninit_ref`]: NonNull::as_uninit_ref
181 ///
182 /// # Safety
183 ///
184 /// When calling this method, you have to ensure that
185 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
186 /// Note that because the created reference is to `MaybeUninit<T>`, the
187 /// source pointer can point to uninitialized memory.
188 #[inline]
189 #[must_use]
190 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
191 pub const unsafe fn as_uninit_mut<'a>(self) -> &'a mut MaybeUninit<T> {
192 // SAFETY: the caller must guarantee that `self` meets all the
193 // requirements for a reference.
194 unsafe { &mut *self.cast().as_ptr() }
195 }
196
197 /// Casts from a pointer-to-`T` to a pointer-to-`[T; N]`.
198 #[inline]
199 #[unstable(feature = "ptr_cast_array", issue = "144514")]
200 pub const fn cast_array<const N: usize>(self) -> NonNull<[T; N]> {
201 self.cast()
202 }
203}
204
205impl<T: PointeeSized> NonNull<T> {
206 /// Creates a new `NonNull`.
207 ///
208 /// # Safety
209 ///
210 /// `ptr` must be non-null.
211 ///
212 /// # Examples
213 ///
214 /// ```
215 /// use std::ptr::NonNull;
216 ///
217 /// let mut x = 0u32;
218 /// let ptr = unsafe { NonNull::new_unchecked(&mut x as *mut _) };
219 /// ```
220 ///
221 /// *Incorrect* usage of this function:
222 ///
223 /// ```rust,no_run
224 /// use std::ptr::NonNull;
225 ///
226 /// // NEVER DO THAT!!! This is undefined behavior. ⚠️
227 /// let ptr = unsafe { NonNull::<u32>::new_unchecked(std::ptr::null_mut()) };
228 /// ```
229 #[stable(feature = "nonnull", since = "1.25.0")]
230 #[rustc_const_stable(feature = "const_nonnull_new_unchecked", since = "1.25.0")]
231 #[inline]
232 #[track_caller]
233 pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
234 // SAFETY: the caller must guarantee that `ptr` is non-null.
235 unsafe {
236 assert_unsafe_precondition!(
237 check_language_ub,
238 "NonNull::new_unchecked requires that the pointer is non-null",
239 (ptr: *mut () = ptr as *mut ()) => !ptr.is_null()
240 );
241 NonNull { pointer: ptr as _ }
242 }
243 }
244
245 /// Creates a new `NonNull` if `ptr` is non-null.
246 ///
247 /// # Panics during const evaluation
248 ///
249 /// This method will panic during const evaluation if the pointer cannot be
250 /// determined to be null or not. See [`is_null`] for more information.
251 ///
252 /// [`is_null`]: ../primitive.pointer.html#method.is_null-1
253 ///
254 /// # Examples
255 ///
256 /// ```
257 /// use std::ptr::NonNull;
258 ///
259 /// let mut x = 0u32;
260 /// let ptr = NonNull::<u32>::new(&mut x as *mut _).expect("ptr is null!");
261 ///
262 /// if let Some(ptr) = NonNull::<u32>::new(std::ptr::null_mut()) {
263 /// unreachable!();
264 /// }
265 /// ```
266 #[stable(feature = "nonnull", since = "1.25.0")]
267 #[rustc_const_stable(feature = "const_nonnull_new", since = "1.85.0")]
268 #[inline]
269 pub const fn new(ptr: *mut T) -> Option<Self> {
270 if !ptr.is_null() {
271 // SAFETY: The pointer is already checked and is not null
272 Some(unsafe { Self::new_unchecked(ptr) })
273 } else {
274 None
275 }
276 }
277
278 /// Converts a reference to a `NonNull` pointer.
279 #[stable(feature = "non_null_from_ref", since = "1.89.0")]
280 #[rustc_const_stable(feature = "non_null_from_ref", since = "1.89.0")]
281 #[inline]
282 pub const fn from_ref(r: &T) -> Self {
283 // SAFETY: A reference cannot be null.
284 unsafe { NonNull { pointer: r as *const T } }
285 }
286
287 /// Converts a mutable reference to a `NonNull` pointer.
288 #[stable(feature = "non_null_from_ref", since = "1.89.0")]
289 #[rustc_const_stable(feature = "non_null_from_ref", since = "1.89.0")]
290 #[inline]
291 pub const fn from_mut(r: &mut T) -> Self {
292 // SAFETY: A mutable reference cannot be null.
293 unsafe { NonNull { pointer: r as *mut T } }
294 }
295
296 /// Performs the same functionality as [`std::ptr::from_raw_parts`], except that a
297 /// `NonNull` pointer is returned, as opposed to a raw `*const` pointer.
298 ///
299 /// See the documentation of [`std::ptr::from_raw_parts`] for more details.
300 ///
301 /// [`std::ptr::from_raw_parts`]: crate::ptr::from_raw_parts
302 #[unstable(feature = "ptr_metadata", issue = "81513")]
303 #[inline]
304 pub const fn from_raw_parts(
305 data_pointer: NonNull<impl super::Thin>,
306 metadata: <T as super::Pointee>::Metadata,
307 ) -> NonNull<T> {
308 // SAFETY: The result of `ptr::from::raw_parts_mut` is non-null because `data_pointer` is.
309 unsafe {
310 NonNull::new_unchecked(super::from_raw_parts_mut(data_pointer.as_ptr(), metadata))
311 }
312 }
313
314 /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
315 ///
316 /// The pointer can be later reconstructed with [`NonNull::from_raw_parts`].
317 #[unstable(feature = "ptr_metadata", issue = "81513")]
318 #[must_use = "this returns the result of the operation, \
319 without modifying the original"]
320 #[inline]
321 pub const fn to_raw_parts(self) -> (NonNull<()>, <T as super::Pointee>::Metadata) {
322 (self.cast(), super::metadata(self.as_ptr()))
323 }
324
325 /// Gets the "address" portion of the pointer.
326 ///
327 /// For more details, see the equivalent method on a raw pointer, [`pointer::addr`].
328 ///
329 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
330 #[must_use]
331 #[inline]
332 #[stable(feature = "strict_provenance", since = "1.84.0")]
333 pub fn addr(self) -> NonZero<usize> {
334 // SAFETY: The pointer is guaranteed by the type to be non-null,
335 // meaning that the address will be non-zero.
336 unsafe { NonZero::new_unchecked(self.as_ptr().addr()) }
337 }
338
339 /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
340 /// [`with_exposed_provenance`][NonNull::with_exposed_provenance] and returns the "address" portion.
341 ///
342 /// For more details, see the equivalent method on a raw pointer, [`pointer::expose_provenance`].
343 ///
344 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
345 #[stable(feature = "nonnull_provenance", since = "1.89.0")]
346 pub fn expose_provenance(self) -> NonZero<usize> {
347 // SAFETY: The pointer is guaranteed by the type to be non-null,
348 // meaning that the address will be non-zero.
349 unsafe { NonZero::new_unchecked(self.as_ptr().expose_provenance()) }
350 }
351
352 /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
353 /// `self`.
354 ///
355 /// For more details, see the equivalent method on a raw pointer, [`pointer::with_addr`].
356 ///
357 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
358 #[must_use]
359 #[inline]
360 #[stable(feature = "strict_provenance", since = "1.84.0")]
361 pub fn with_addr(self, addr: NonZero<usize>) -> Self {
362 // SAFETY: The result of `ptr::from::with_addr` is non-null because `addr` is guaranteed to be non-zero.
363 unsafe { NonNull::new_unchecked(self.as_ptr().with_addr(addr.get()) as *mut _) }
364 }
365
366 /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
367 /// [provenance][crate::ptr#provenance] of `self`.
368 ///
369 /// For more details, see the equivalent method on a raw pointer, [`pointer::map_addr`].
370 ///
371 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
372 #[must_use]
373 #[inline]
374 #[stable(feature = "strict_provenance", since = "1.84.0")]
375 pub fn map_addr(self, f: impl FnOnce(NonZero<usize>) -> NonZero<usize>) -> Self {
376 self.with_addr(f(self.addr()))
377 }
378
379 /// Acquires the underlying `*mut` pointer.
380 ///
381 /// # Examples
382 ///
383 /// ```
384 /// use std::ptr::NonNull;
385 ///
386 /// let mut x = 0u32;
387 /// let ptr = NonNull::new(&mut x).expect("ptr is null!");
388 ///
389 /// let x_value = unsafe { *ptr.as_ptr() };
390 /// assert_eq!(x_value, 0);
391 ///
392 /// unsafe { *ptr.as_ptr() += 2; }
393 /// let x_value = unsafe { *ptr.as_ptr() };
394 /// assert_eq!(x_value, 2);
395 /// ```
396 #[stable(feature = "nonnull", since = "1.25.0")]
397 #[rustc_const_stable(feature = "const_nonnull_as_ptr", since = "1.32.0")]
398 #[rustc_never_returns_null_ptr]
399 #[must_use]
400 #[inline(always)]
401 pub const fn as_ptr(self) -> *mut T {
402 // This is a transmute for the same reasons as `NonZero::get`.
403
404 // SAFETY: `NonNull` is `transparent` over a `*const T`, and `*const T`
405 // and `*mut T` have the same layout, so transitively we can transmute
406 // our `NonNull` to a `*mut T` directly.
407 unsafe { mem::transmute::<Self, *mut T>(self) }
408 }
409
410 /// Returns a shared reference to the value. If the value may be uninitialized, [`as_uninit_ref`]
411 /// must be used instead.
412 ///
413 /// For the mutable counterpart see [`as_mut`].
414 ///
415 /// [`as_uninit_ref`]: NonNull::as_uninit_ref
416 /// [`as_mut`]: NonNull::as_mut
417 ///
418 /// # Safety
419 ///
420 /// When calling this method, you have to ensure that
421 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
422 ///
423 /// # Examples
424 ///
425 /// ```
426 /// use std::ptr::NonNull;
427 ///
428 /// let mut x = 0u32;
429 /// let ptr = NonNull::new(&mut x as *mut _).expect("ptr is null!");
430 ///
431 /// let ref_x = unsafe { ptr.as_ref() };
432 /// println!("{ref_x}");
433 /// ```
434 ///
435 /// [the module documentation]: crate::ptr#safety
436 #[stable(feature = "nonnull", since = "1.25.0")]
437 #[rustc_const_stable(feature = "const_nonnull_as_ref", since = "1.73.0")]
438 #[must_use]
439 #[inline(always)]
440 pub const unsafe fn as_ref<'a>(&self) -> &'a T {
441 // SAFETY: the caller must guarantee that `self` meets all the
442 // requirements for a reference.
443 // `cast_const` avoids a mutable raw pointer deref.
444 unsafe { &*self.as_ptr().cast_const() }
445 }
446
447 /// Returns a unique reference to the value. If the value may be uninitialized, [`as_uninit_mut`]
448 /// must be used instead.
449 ///
450 /// For the shared counterpart see [`as_ref`].
451 ///
452 /// [`as_uninit_mut`]: NonNull::as_uninit_mut
453 /// [`as_ref`]: NonNull::as_ref
454 ///
455 /// # Safety
456 ///
457 /// When calling this method, you have to ensure that
458 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
459 /// # Examples
460 ///
461 /// ```
462 /// use std::ptr::NonNull;
463 ///
464 /// let mut x = 0u32;
465 /// let mut ptr = NonNull::new(&mut x).expect("null pointer");
466 ///
467 /// let x_ref = unsafe { ptr.as_mut() };
468 /// assert_eq!(*x_ref, 0);
469 /// *x_ref += 2;
470 /// assert_eq!(*x_ref, 2);
471 /// ```
472 ///
473 /// [the module documentation]: crate::ptr#safety
474 #[stable(feature = "nonnull", since = "1.25.0")]
475 #[rustc_const_stable(feature = "const_ptr_as_ref", since = "1.83.0")]
476 #[must_use]
477 #[inline(always)]
478 pub const unsafe fn as_mut<'a>(&mut self) -> &'a mut T {
479 // SAFETY: the caller must guarantee that `self` meets all the
480 // requirements for a mutable reference.
481 unsafe { &mut *self.as_ptr() }
482 }
483
484 /// Casts to a pointer of another type.
485 ///
486 /// # Examples
487 ///
488 /// ```
489 /// use std::ptr::NonNull;
490 ///
491 /// let mut x = 0u32;
492 /// let ptr = NonNull::new(&mut x as *mut _).expect("null pointer");
493 ///
494 /// let casted_ptr = ptr.cast::<i8>();
495 /// let raw_ptr: *mut i8 = casted_ptr.as_ptr();
496 /// ```
497 #[stable(feature = "nonnull_cast", since = "1.27.0")]
498 #[rustc_const_stable(feature = "const_nonnull_cast", since = "1.36.0")]
499 #[must_use = "this returns the result of the operation, \
500 without modifying the original"]
501 #[inline]
502 pub const fn cast<U>(self) -> NonNull<U> {
503 // SAFETY: `self` is a `NonNull` pointer which is necessarily non-null
504 unsafe { NonNull { pointer: self.as_ptr() as *mut U } }
505 }
506
507 /// Try to cast to a pointer of another type by checking alignment.
508 ///
509 /// If the pointer is properly aligned to the target type, it will be
510 /// cast to the target type. Otherwise, `None` is returned.
511 ///
512 /// # Examples
513 ///
514 /// ```rust
515 /// #![feature(pointer_try_cast_aligned)]
516 /// use std::ptr::NonNull;
517 ///
518 /// let mut x = 0u64;
519 ///
520 /// let aligned = NonNull::from_mut(&mut x);
521 /// let unaligned = unsafe { aligned.byte_add(1) };
522 ///
523 /// assert!(aligned.try_cast_aligned::<u32>().is_some());
524 /// assert!(unaligned.try_cast_aligned::<u32>().is_none());
525 /// ```
526 #[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
527 #[must_use = "this returns the result of the operation, \
528 without modifying the original"]
529 #[inline]
530 pub fn try_cast_aligned<U>(self) -> Option<NonNull<U>> {
531 if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
532 }
533
534 /// Adds an offset to a pointer.
535 ///
536 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
537 /// offset of `3 * size_of::<T>()` bytes.
538 ///
539 /// # Safety
540 ///
541 /// If any of the following conditions are violated, the result is Undefined Behavior:
542 ///
543 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
544 ///
545 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
546 /// [allocation], and the entire memory range between `self` and the result must be in
547 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
548 /// of the address space.
549 ///
550 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
551 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
552 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
553 /// safe.
554 ///
555 /// [allocation]: crate::ptr#allocation
556 ///
557 /// # Examples
558 ///
559 /// ```
560 /// use std::ptr::NonNull;
561 ///
562 /// let mut s = [1, 2, 3];
563 /// let ptr: NonNull<u32> = NonNull::new(s.as_mut_ptr()).unwrap();
564 ///
565 /// unsafe {
566 /// println!("{}", ptr.offset(1).read());
567 /// println!("{}", ptr.offset(2).read());
568 /// }
569 /// ```
570 #[inline(always)]
571 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
572 #[must_use = "returns a new pointer rather than modifying its argument"]
573 #[stable(feature = "non_null_convenience", since = "1.80.0")]
574 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
575 pub const unsafe fn offset(self, count: isize) -> Self
576 where
577 T: Sized,
578 {
579 // SAFETY: the caller must uphold the safety contract for `offset`.
580 // Additionally safety contract of `offset` guarantees that the resulting pointer is
581 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
582 // construct `NonNull`.
583 unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
584 }
585
586 /// Calculates the offset from a pointer in bytes.
587 ///
588 /// `count` is in units of **bytes**.
589 ///
590 /// This is purely a convenience for casting to a `u8` pointer and
591 /// using [offset][pointer::offset] on it. See that method for documentation
592 /// and safety requirements.
593 ///
594 /// For non-`Sized` pointees this operation changes only the data pointer,
595 /// leaving the metadata untouched.
596 #[must_use]
597 #[inline(always)]
598 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
599 #[stable(feature = "non_null_convenience", since = "1.80.0")]
600 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
601 pub const unsafe fn byte_offset(self, count: isize) -> Self {
602 // SAFETY: the caller must uphold the safety contract for `offset` and `byte_offset` has
603 // the same safety contract.
604 // Additionally safety contract of `offset` guarantees that the resulting pointer is
605 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
606 // construct `NonNull`.
607 unsafe { NonNull { pointer: self.as_ptr().byte_offset(count) } }
608 }
609
610 /// Adds an offset to a pointer (convenience for `.offset(count as isize)`).
611 ///
612 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
613 /// offset of `3 * size_of::<T>()` bytes.
614 ///
615 /// # Safety
616 ///
617 /// If any of the following conditions are violated, the result is Undefined Behavior:
618 ///
619 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
620 ///
621 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
622 /// [allocation], and the entire memory range between `self` and the result must be in
623 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
624 /// of the address space.
625 ///
626 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
627 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
628 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
629 /// safe.
630 ///
631 /// [allocation]: crate::ptr#allocation
632 ///
633 /// # Examples
634 ///
635 /// ```
636 /// use std::ptr::NonNull;
637 ///
638 /// let s: &str = "123";
639 /// let ptr: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap();
640 ///
641 /// unsafe {
642 /// println!("{}", ptr.add(1).read() as char);
643 /// println!("{}", ptr.add(2).read() as char);
644 /// }
645 /// ```
646 #[inline(always)]
647 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
648 #[must_use = "returns a new pointer rather than modifying its argument"]
649 #[stable(feature = "non_null_convenience", since = "1.80.0")]
650 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
651 pub const unsafe fn add(self, count: usize) -> Self
652 where
653 T: Sized,
654 {
655 // SAFETY: the caller must uphold the safety contract for `offset`.
656 // Additionally safety contract of `offset` guarantees that the resulting pointer is
657 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
658 // construct `NonNull`.
659 unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
660 }
661
662 /// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
663 ///
664 /// `count` is in units of bytes.
665 ///
666 /// This is purely a convenience for casting to a `u8` pointer and
667 /// using [`add`][NonNull::add] on it. See that method for documentation
668 /// and safety requirements.
669 ///
670 /// For non-`Sized` pointees this operation changes only the data pointer,
671 /// leaving the metadata untouched.
672 #[must_use]
673 #[inline(always)]
674 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
675 #[stable(feature = "non_null_convenience", since = "1.80.0")]
676 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
677 pub const unsafe fn byte_add(self, count: usize) -> Self {
678 // SAFETY: the caller must uphold the safety contract for `add` and `byte_add` has the same
679 // safety contract.
680 // Additionally safety contract of `add` guarantees that the resulting pointer is pointing
681 // to an allocation, there can't be an allocation at null, thus it's safe to construct
682 // `NonNull`.
683 unsafe { NonNull { pointer: self.as_ptr().byte_add(count) } }
684 }
685
686 /// Subtracts an offset from a pointer (convenience for
687 /// `.offset((count as isize).wrapping_neg())`).
688 ///
689 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
690 /// offset of `3 * size_of::<T>()` bytes.
691 ///
692 /// # Safety
693 ///
694 /// If any of the following conditions are violated, the result is Undefined Behavior:
695 ///
696 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
697 ///
698 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
699 /// [allocation], and the entire memory range between `self` and the result must be in
700 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
701 /// of the address space.
702 ///
703 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
704 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
705 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
706 /// safe.
707 ///
708 /// [allocation]: crate::ptr#allocation
709 ///
710 /// # Examples
711 ///
712 /// ```
713 /// use std::ptr::NonNull;
714 ///
715 /// let s: &str = "123";
716 ///
717 /// unsafe {
718 /// let end: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap().add(3);
719 /// println!("{}", end.sub(1).read() as char);
720 /// println!("{}", end.sub(2).read() as char);
721 /// }
722 /// ```
723 #[inline(always)]
724 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
725 #[must_use = "returns a new pointer rather than modifying its argument"]
726 #[stable(feature = "non_null_convenience", since = "1.80.0")]
727 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
728 pub const unsafe fn sub(self, count: usize) -> Self
729 where
730 T: Sized,
731 {
732 if T::IS_ZST {
733 // Pointer arithmetic does nothing when the pointee is a ZST.
734 self
735 } else {
736 // SAFETY: the caller must uphold the safety contract for `offset`.
737 // Because the pointee is *not* a ZST, that means that `count` is
738 // at most `isize::MAX`, and thus the negation cannot overflow.
739 unsafe { self.offset((count as isize).unchecked_neg()) }
740 }
741 }
742
743 /// Calculates the offset from a pointer in bytes (convenience for
744 /// `.byte_offset((count as isize).wrapping_neg())`).
745 ///
746 /// `count` is in units of bytes.
747 ///
748 /// This is purely a convenience for casting to a `u8` pointer and
749 /// using [`sub`][NonNull::sub] on it. See that method for documentation
750 /// and safety requirements.
751 ///
752 /// For non-`Sized` pointees this operation changes only the data pointer,
753 /// leaving the metadata untouched.
754 #[must_use]
755 #[inline(always)]
756 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
757 #[stable(feature = "non_null_convenience", since = "1.80.0")]
758 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
759 pub const unsafe fn byte_sub(self, count: usize) -> Self {
760 // SAFETY: the caller must uphold the safety contract for `sub` and `byte_sub` has the same
761 // safety contract.
762 // Additionally safety contract of `sub` guarantees that the resulting pointer is pointing
763 // to an allocation, there can't be an allocation at null, thus it's safe to construct
764 // `NonNull`.
765 unsafe { NonNull { pointer: self.as_ptr().byte_sub(count) } }
766 }
767
768 /// Calculates the distance between two pointers within the same allocation. The returned value is in
769 /// units of T: the distance in bytes divided by `size_of::<T>()`.
770 ///
771 /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
772 /// except that it has a lot more opportunities for UB, in exchange for the compiler
773 /// better understanding what you are doing.
774 ///
775 /// The primary motivation of this method is for computing the `len` of an array/slice
776 /// of `T` that you are currently representing as a "start" and "end" pointer
777 /// (and "end" is "one past the end" of the array).
778 /// In that case, `end.offset_from(start)` gets you the length of the array.
779 ///
780 /// All of the following safety requirements are trivially satisfied for this usecase.
781 ///
782 /// [`offset`]: #method.offset
783 ///
784 /// # Safety
785 ///
786 /// If any of the following conditions are violated, the result is Undefined Behavior:
787 ///
788 /// * `self` and `origin` must either
789 ///
790 /// * point to the same address, or
791 /// * both be *derived from* a pointer to the same [allocation], and the memory range between
792 /// the two pointers must be in bounds of that object. (See below for an example.)
793 ///
794 /// * The distance between the pointers, in bytes, must be an exact multiple
795 /// of the size of `T`.
796 ///
797 /// As a consequence, the absolute distance between the pointers, in bytes, computed on
798 /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
799 /// implied by the in-bounds requirement, and the fact that no allocation can be larger
800 /// than `isize::MAX` bytes.
801 ///
802 /// The requirement for pointers to be derived from the same allocation is primarily
803 /// needed for `const`-compatibility: the distance between pointers into *different* allocated
804 /// objects is not known at compile-time. However, the requirement also exists at
805 /// runtime and may be exploited by optimizations. If you wish to compute the difference between
806 /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
807 /// origin as isize) / size_of::<T>()`.
808 // FIXME: recommend `addr()` instead of `as usize` once that is stable.
809 ///
810 /// [`add`]: #method.add
811 /// [allocation]: crate::ptr#allocation
812 ///
813 /// # Panics
814 ///
815 /// This function panics if `T` is a Zero-Sized Type ("ZST").
816 ///
817 /// # Examples
818 ///
819 /// Basic usage:
820 ///
821 /// ```
822 /// use std::ptr::NonNull;
823 ///
824 /// let a = [0; 5];
825 /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
826 /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
827 /// unsafe {
828 /// assert_eq!(ptr2.offset_from(ptr1), 2);
829 /// assert_eq!(ptr1.offset_from(ptr2), -2);
830 /// assert_eq!(ptr1.offset(2), ptr2);
831 /// assert_eq!(ptr2.offset(-2), ptr1);
832 /// }
833 /// ```
834 ///
835 /// *Incorrect* usage:
836 ///
837 /// ```rust,no_run
838 /// use std::ptr::NonNull;
839 ///
840 /// let ptr1 = NonNull::new(Box::into_raw(Box::new(0u8))).unwrap();
841 /// let ptr2 = NonNull::new(Box::into_raw(Box::new(1u8))).unwrap();
842 /// let diff = (ptr2.addr().get() as isize).wrapping_sub(ptr1.addr().get() as isize);
843 /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
844 /// let diff_plus_1 = diff.wrapping_add(1);
845 /// let ptr2_other = NonNull::new(ptr1.as_ptr().wrapping_byte_offset(diff_plus_1)).unwrap();
846 /// assert_eq!(ptr2.addr(), ptr2_other.addr());
847 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
848 /// // computing their offset is undefined behavior, even though
849 /// // they point to addresses that are in-bounds of the same object!
850 ///
851 /// let one = unsafe { ptr2_other.offset_from(ptr2) }; // Undefined Behavior! ⚠️
852 /// ```
853 #[inline]
854 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
855 #[stable(feature = "non_null_convenience", since = "1.80.0")]
856 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
857 pub const unsafe fn offset_from(self, origin: NonNull<T>) -> isize
858 where
859 T: Sized,
860 {
861 // SAFETY: the caller must uphold the safety contract for `offset_from`.
862 unsafe { self.as_ptr().offset_from(origin.as_ptr()) }
863 }
864
865 /// Calculates the distance between two pointers within the same allocation. The returned value is in
866 /// units of **bytes**.
867 ///
868 /// This is purely a convenience for casting to a `u8` pointer and
869 /// using [`offset_from`][NonNull::offset_from] on it. See that method for
870 /// documentation and safety requirements.
871 ///
872 /// For non-`Sized` pointees this operation considers only the data pointers,
873 /// ignoring the metadata.
874 #[inline(always)]
875 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
876 #[stable(feature = "non_null_convenience", since = "1.80.0")]
877 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
878 pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: NonNull<U>) -> isize {
879 // SAFETY: the caller must uphold the safety contract for `byte_offset_from`.
880 unsafe { self.as_ptr().byte_offset_from(origin.as_ptr()) }
881 }
882
883 // N.B. `wrapping_offset``, `wrapping_add`, etc are not implemented because they can wrap to null
884
885 /// Calculates the distance between two pointers within the same allocation, *where it's known that
886 /// `self` is equal to or greater than `origin`*. The returned value is in
887 /// units of T: the distance in bytes is divided by `size_of::<T>()`.
888 ///
889 /// This computes the same value that [`offset_from`](#method.offset_from)
890 /// would compute, but with the added precondition that the offset is
891 /// guaranteed to be non-negative. This method is equivalent to
892 /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
893 /// but it provides slightly more information to the optimizer, which can
894 /// sometimes allow it to optimize slightly better with some backends.
895 ///
896 /// This method can be though of as recovering the `count` that was passed
897 /// to [`add`](#method.add) (or, with the parameters in the other order,
898 /// to [`sub`](#method.sub)). The following are all equivalent, assuming
899 /// that their safety preconditions are met:
900 /// ```rust
901 /// # unsafe fn blah(ptr: std::ptr::NonNull<u32>, origin: std::ptr::NonNull<u32>, count: usize) -> bool { unsafe {
902 /// ptr.offset_from_unsigned(origin) == count
903 /// # &&
904 /// origin.add(count) == ptr
905 /// # &&
906 /// ptr.sub(count) == origin
907 /// # } }
908 /// ```
909 ///
910 /// # Safety
911 ///
912 /// - The distance between the pointers must be non-negative (`self >= origin`)
913 ///
914 /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
915 /// apply to this method as well; see it for the full details.
916 ///
917 /// Importantly, despite the return type of this method being able to represent
918 /// a larger offset, it's still *not permitted* to pass pointers which differ
919 /// by more than `isize::MAX` *bytes*. As such, the result of this method will
920 /// always be less than or equal to `isize::MAX as usize`.
921 ///
922 /// # Panics
923 ///
924 /// This function panics if `T` is a Zero-Sized Type ("ZST").
925 ///
926 /// # Examples
927 ///
928 /// ```
929 /// use std::ptr::NonNull;
930 ///
931 /// let a = [0; 5];
932 /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
933 /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
934 /// unsafe {
935 /// assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
936 /// assert_eq!(ptr1.add(2), ptr2);
937 /// assert_eq!(ptr2.sub(2), ptr1);
938 /// assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
939 /// }
940 ///
941 /// // This would be incorrect, as the pointers are not correctly ordered:
942 /// // ptr1.offset_from_unsigned(ptr2)
943 /// ```
944 #[inline]
945 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
946 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
947 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
948 pub const unsafe fn offset_from_unsigned(self, subtracted: NonNull<T>) -> usize
949 where
950 T: Sized,
951 {
952 // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
953 unsafe { self.as_ptr().offset_from_unsigned(subtracted.as_ptr()) }
954 }
955
956 /// Calculates the distance between two pointers within the same allocation, *where it's known that
957 /// `self` is equal to or greater than `origin`*. The returned value is in
958 /// units of **bytes**.
959 ///
960 /// This is purely a convenience for casting to a `u8` pointer and
961 /// using [`offset_from_unsigned`][NonNull::offset_from_unsigned] on it.
962 /// See that method for documentation and safety requirements.
963 ///
964 /// For non-`Sized` pointees this operation considers only the data pointers,
965 /// ignoring the metadata.
966 #[inline(always)]
967 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
968 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
969 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
970 pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: NonNull<U>) -> usize {
971 // SAFETY: the caller must uphold the safety contract for `byte_offset_from_unsigned`.
972 unsafe { self.as_ptr().byte_offset_from_unsigned(origin.as_ptr()) }
973 }
974
975 /// Reads the value from `self` without moving it. This leaves the
976 /// memory in `self` unchanged.
977 ///
978 /// See [`ptr::read`] for safety concerns and examples.
979 ///
980 /// [`ptr::read`]: crate::ptr::read()
981 #[inline]
982 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
983 #[stable(feature = "non_null_convenience", since = "1.80.0")]
984 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
985 pub const unsafe fn read(self) -> T
986 where
987 T: Sized,
988 {
989 // SAFETY: the caller must uphold the safety contract for `read`.
990 unsafe { ptr::read(self.as_ptr()) }
991 }
992
993 /// Performs a volatile read of the value from `self` without moving it. This
994 /// leaves the memory in `self` unchanged.
995 ///
996 /// Volatile operations are intended to act on I/O memory, and are guaranteed
997 /// to not be elided or reordered by the compiler across other volatile
998 /// operations.
999 ///
1000 /// See [`ptr::read_volatile`] for safety concerns and examples.
1001 ///
1002 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1003 #[inline]
1004 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1005 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1006 pub unsafe fn read_volatile(self) -> T
1007 where
1008 T: Sized,
1009 {
1010 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1011 unsafe { ptr::read_volatile(self.as_ptr()) }
1012 }
1013
1014 /// Reads the value from `self` without moving it. This leaves the
1015 /// memory in `self` unchanged.
1016 ///
1017 /// Unlike `read`, the pointer may be unaligned.
1018 ///
1019 /// See [`ptr::read_unaligned`] for safety concerns and examples.
1020 ///
1021 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1022 #[inline]
1023 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1024 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1025 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
1026 pub const unsafe fn read_unaligned(self) -> T
1027 where
1028 T: Sized,
1029 {
1030 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1031 unsafe { ptr::read_unaligned(self.as_ptr()) }
1032 }
1033
1034 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1035 /// and destination may overlap.
1036 ///
1037 /// NOTE: this has the *same* argument order as [`ptr::copy`].
1038 ///
1039 /// See [`ptr::copy`] for safety concerns and examples.
1040 ///
1041 /// [`ptr::copy`]: crate::ptr::copy()
1042 #[inline(always)]
1043 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1044 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1045 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1046 pub const unsafe fn copy_to(self, dest: NonNull<T>, count: usize)
1047 where
1048 T: Sized,
1049 {
1050 // SAFETY: the caller must uphold the safety contract for `copy`.
1051 unsafe { ptr::copy(self.as_ptr(), dest.as_ptr(), count) }
1052 }
1053
1054 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1055 /// and destination may *not* overlap.
1056 ///
1057 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1058 ///
1059 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1060 ///
1061 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1062 #[inline(always)]
1063 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1064 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1065 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1066 pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull<T>, count: usize)
1067 where
1068 T: Sized,
1069 {
1070 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1071 unsafe { ptr::copy_nonoverlapping(self.as_ptr(), dest.as_ptr(), count) }
1072 }
1073
1074 /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1075 /// and destination may overlap.
1076 ///
1077 /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1078 ///
1079 /// See [`ptr::copy`] for safety concerns and examples.
1080 ///
1081 /// [`ptr::copy`]: crate::ptr::copy()
1082 #[inline(always)]
1083 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1084 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1085 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1086 pub const unsafe fn copy_from(self, src: NonNull<T>, count: usize)
1087 where
1088 T: Sized,
1089 {
1090 // SAFETY: the caller must uphold the safety contract for `copy`.
1091 unsafe { ptr::copy(src.as_ptr(), self.as_ptr(), count) }
1092 }
1093
1094 /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1095 /// and destination may *not* overlap.
1096 ///
1097 /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1098 ///
1099 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1100 ///
1101 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1102 #[inline(always)]
1103 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1104 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1105 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1106 pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull<T>, count: usize)
1107 where
1108 T: Sized,
1109 {
1110 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1111 unsafe { ptr::copy_nonoverlapping(src.as_ptr(), self.as_ptr(), count) }
1112 }
1113
1114 /// Executes the destructor (if any) of the pointed-to value.
1115 ///
1116 /// See [`ptr::drop_in_place`] for safety concerns and examples.
1117 ///
1118 /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1119 #[inline(always)]
1120 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1121 pub unsafe fn drop_in_place(self) {
1122 // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1123 unsafe { ptr::drop_in_place(self.as_ptr()) }
1124 }
1125
1126 /// Overwrites a memory location with the given value without reading or
1127 /// dropping the old value.
1128 ///
1129 /// See [`ptr::write`] for safety concerns and examples.
1130 ///
1131 /// [`ptr::write`]: crate::ptr::write()
1132 #[inline(always)]
1133 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1134 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1135 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1136 pub const unsafe fn write(self, val: T)
1137 where
1138 T: Sized,
1139 {
1140 // SAFETY: the caller must uphold the safety contract for `write`.
1141 unsafe { ptr::write(self.as_ptr(), val) }
1142 }
1143
1144 /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1145 /// bytes of memory starting at `self` to `val`.
1146 ///
1147 /// See [`ptr::write_bytes`] for safety concerns and examples.
1148 ///
1149 /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1150 #[inline(always)]
1151 #[doc(alias = "memset")]
1152 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1153 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1154 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1155 pub const unsafe fn write_bytes(self, val: u8, count: usize)
1156 where
1157 T: Sized,
1158 {
1159 // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1160 unsafe { ptr::write_bytes(self.as_ptr(), val, count) }
1161 }
1162
1163 /// Performs a volatile write of a memory location with the given value without
1164 /// reading or dropping the old value.
1165 ///
1166 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1167 /// to not be elided or reordered by the compiler across other volatile
1168 /// operations.
1169 ///
1170 /// See [`ptr::write_volatile`] for safety concerns and examples.
1171 ///
1172 /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1173 #[inline(always)]
1174 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1175 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1176 pub unsafe fn write_volatile(self, val: T)
1177 where
1178 T: Sized,
1179 {
1180 // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1181 unsafe { ptr::write_volatile(self.as_ptr(), val) }
1182 }
1183
1184 /// Overwrites a memory location with the given value without reading or
1185 /// dropping the old value.
1186 ///
1187 /// Unlike `write`, the pointer may be unaligned.
1188 ///
1189 /// See [`ptr::write_unaligned`] for safety concerns and examples.
1190 ///
1191 /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1192 #[inline(always)]
1193 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1194 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1195 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1196 pub const unsafe fn write_unaligned(self, val: T)
1197 where
1198 T: Sized,
1199 {
1200 // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1201 unsafe { ptr::write_unaligned(self.as_ptr(), val) }
1202 }
1203
1204 /// Replaces the value at `self` with `src`, returning the old
1205 /// value, without dropping either.
1206 ///
1207 /// See [`ptr::replace`] for safety concerns and examples.
1208 ///
1209 /// [`ptr::replace`]: crate::ptr::replace()
1210 #[inline(always)]
1211 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1212 #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "1.88.0")]
1213 pub const unsafe fn replace(self, src: T) -> T
1214 where
1215 T: Sized,
1216 {
1217 // SAFETY: the caller must uphold the safety contract for `replace`.
1218 unsafe { ptr::replace(self.as_ptr(), src) }
1219 }
1220
1221 /// Swaps the values at two mutable locations of the same type, without
1222 /// deinitializing either. They may overlap, unlike `mem::swap` which is
1223 /// otherwise equivalent.
1224 ///
1225 /// See [`ptr::swap`] for safety concerns and examples.
1226 ///
1227 /// [`ptr::swap`]: crate::ptr::swap()
1228 #[inline(always)]
1229 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1230 #[rustc_const_stable(feature = "const_swap", since = "1.85.0")]
1231 pub const unsafe fn swap(self, with: NonNull<T>)
1232 where
1233 T: Sized,
1234 {
1235 // SAFETY: the caller must uphold the safety contract for `swap`.
1236 unsafe { ptr::swap(self.as_ptr(), with.as_ptr()) }
1237 }
1238
1239 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1240 /// `align`.
1241 ///
1242 /// If it is not possible to align the pointer, the implementation returns
1243 /// `usize::MAX`.
1244 ///
1245 /// The offset is expressed in number of `T` elements, and not bytes.
1246 ///
1247 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1248 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1249 /// the returned offset is correct in all terms other than alignment.
1250 ///
1251 /// When this is called during compile-time evaluation (which is unstable), the implementation
1252 /// may return `usize::MAX` in cases where that can never happen at runtime. This is because the
1253 /// actual alignment of pointers is not known yet during compile-time, so an offset with
1254 /// guaranteed alignment can sometimes not be computed. For example, a buffer declared as `[u8;
1255 /// N]` might be allocated at an odd or an even address, but at compile-time this is not yet
1256 /// known, so the execution has to be correct for either choice. It is therefore impossible to
1257 /// find an offset that is guaranteed to be 2-aligned. (This behavior is subject to change, as usual
1258 /// for unstable APIs.)
1259 ///
1260 /// # Panics
1261 ///
1262 /// The function panics if `align` is not a power-of-two.
1263 ///
1264 /// # Examples
1265 ///
1266 /// Accessing adjacent `u8` as `u16`
1267 ///
1268 /// ```
1269 /// use std::ptr::NonNull;
1270 ///
1271 /// # unsafe {
1272 /// let x = [5_u8, 6, 7, 8, 9];
1273 /// let ptr = NonNull::new(x.as_ptr() as *mut u8).unwrap();
1274 /// let offset = ptr.align_offset(align_of::<u16>());
1275 ///
1276 /// if offset < x.len() - 1 {
1277 /// let u16_ptr = ptr.add(offset).cast::<u16>();
1278 /// assert!(u16_ptr.read() == u16::from_ne_bytes([5, 6]) || u16_ptr.read() == u16::from_ne_bytes([6, 7]));
1279 /// } else {
1280 /// // while the pointer can be aligned via `offset`, it would point
1281 /// // outside the allocation
1282 /// }
1283 /// # }
1284 /// ```
1285 #[inline]
1286 #[must_use]
1287 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1288 pub fn align_offset(self, align: usize) -> usize
1289 where
1290 T: Sized,
1291 {
1292 if !align.is_power_of_two() {
1293 panic!("align_offset: align is not a power-of-two");
1294 }
1295
1296 {
1297 // SAFETY: `align` has been checked to be a power of 2 above.
1298 unsafe { ptr::align_offset(self.as_ptr(), align) }
1299 }
1300 }
1301
1302 /// Returns whether the pointer is properly aligned for `T`.
1303 ///
1304 /// # Examples
1305 ///
1306 /// ```
1307 /// use std::ptr::NonNull;
1308 ///
1309 /// // On some platforms, the alignment of i32 is less than 4.
1310 /// #[repr(align(4))]
1311 /// struct AlignedI32(i32);
1312 ///
1313 /// let data = AlignedI32(42);
1314 /// let ptr = NonNull::<AlignedI32>::from(&data);
1315 ///
1316 /// assert!(ptr.is_aligned());
1317 /// assert!(!NonNull::new(ptr.as_ptr().wrapping_byte_add(1)).unwrap().is_aligned());
1318 /// ```
1319 #[inline]
1320 #[must_use]
1321 #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1322 pub fn is_aligned(self) -> bool
1323 where
1324 T: Sized,
1325 {
1326 self.as_ptr().is_aligned()
1327 }
1328
1329 /// Returns whether the pointer is aligned to `align`.
1330 ///
1331 /// For non-`Sized` pointees this operation considers only the data pointer,
1332 /// ignoring the metadata.
1333 ///
1334 /// # Panics
1335 ///
1336 /// The function panics if `align` is not a power-of-two (this includes 0).
1337 ///
1338 /// # Examples
1339 ///
1340 /// ```
1341 /// #![feature(pointer_is_aligned_to)]
1342 ///
1343 /// // On some platforms, the alignment of i32 is less than 4.
1344 /// #[repr(align(4))]
1345 /// struct AlignedI32(i32);
1346 ///
1347 /// let data = AlignedI32(42);
1348 /// let ptr = &data as *const AlignedI32;
1349 ///
1350 /// assert!(ptr.is_aligned_to(1));
1351 /// assert!(ptr.is_aligned_to(2));
1352 /// assert!(ptr.is_aligned_to(4));
1353 ///
1354 /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1355 /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1356 ///
1357 /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1358 /// ```
1359 #[inline]
1360 #[must_use]
1361 #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1362 pub fn is_aligned_to(self, align: usize) -> bool {
1363 self.as_ptr().is_aligned_to(align)
1364 }
1365}
1366
1367impl<T> NonNull<T> {
1368 /// Casts from a type to its maybe-uninitialized version.
1369 #[must_use]
1370 #[inline(always)]
1371 #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1372 pub const fn cast_uninit(self) -> NonNull<MaybeUninit<T>> {
1373 self.cast()
1374 }
1375}
1376impl<T> NonNull<MaybeUninit<T>> {
1377 /// Casts from a maybe-uninitialized type to its initialized version.
1378 ///
1379 /// This is always safe, since UB can only occur if the pointer is read
1380 /// before being initialized.
1381 #[must_use]
1382 #[inline(always)]
1383 #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1384 pub const fn cast_init(self) -> NonNull<T> {
1385 self.cast()
1386 }
1387}
1388
1389impl<T> NonNull<[T]> {
1390 /// Creates a non-null raw slice from a thin pointer and a length.
1391 ///
1392 /// The `len` argument is the number of **elements**, not the number of bytes.
1393 ///
1394 /// This function is safe, but dereferencing the return value is unsafe.
1395 /// See the documentation of [`slice::from_raw_parts`] for slice safety requirements.
1396 ///
1397 /// # Examples
1398 ///
1399 /// ```rust
1400 /// use std::ptr::NonNull;
1401 ///
1402 /// // create a slice pointer when starting out with a pointer to the first element
1403 /// let mut x = [5, 6, 7];
1404 /// let nonnull_pointer = NonNull::new(x.as_mut_ptr()).unwrap();
1405 /// let slice = NonNull::slice_from_raw_parts(nonnull_pointer, 3);
1406 /// assert_eq!(unsafe { slice.as_ref()[2] }, 7);
1407 /// ```
1408 ///
1409 /// (Note that this example artificially demonstrates a use of this method,
1410 /// but `let slice = NonNull::from(&x[..]);` would be a better way to write code like this.)
1411 #[stable(feature = "nonnull_slice_from_raw_parts", since = "1.70.0")]
1412 #[rustc_const_stable(feature = "const_slice_from_raw_parts_mut", since = "1.83.0")]
1413 #[must_use]
1414 #[inline]
1415 pub const fn slice_from_raw_parts(data: NonNull<T>, len: usize) -> Self {
1416 // SAFETY: `data` is a `NonNull` pointer which is necessarily non-null
1417 unsafe { Self::new_unchecked(super::slice_from_raw_parts_mut(data.as_ptr(), len)) }
1418 }
1419
1420 /// Returns the length of a non-null raw slice.
1421 ///
1422 /// The returned value is the number of **elements**, not the number of bytes.
1423 ///
1424 /// This function is safe, even when the non-null raw slice cannot be dereferenced to a slice
1425 /// because the pointer does not have a valid address.
1426 ///
1427 /// # Examples
1428 ///
1429 /// ```rust
1430 /// use std::ptr::NonNull;
1431 ///
1432 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1433 /// assert_eq!(slice.len(), 3);
1434 /// ```
1435 #[stable(feature = "slice_ptr_len_nonnull", since = "1.63.0")]
1436 #[rustc_const_stable(feature = "const_slice_ptr_len_nonnull", since = "1.63.0")]
1437 #[must_use]
1438 #[inline]
1439 pub const fn len(self) -> usize {
1440 self.as_ptr().len()
1441 }
1442
1443 /// Returns `true` if the non-null raw slice has a length of 0.
1444 ///
1445 /// # Examples
1446 ///
1447 /// ```rust
1448 /// use std::ptr::NonNull;
1449 ///
1450 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1451 /// assert!(!slice.is_empty());
1452 /// ```
1453 #[stable(feature = "slice_ptr_is_empty_nonnull", since = "1.79.0")]
1454 #[rustc_const_stable(feature = "const_slice_ptr_is_empty_nonnull", since = "1.79.0")]
1455 #[must_use]
1456 #[inline]
1457 pub const fn is_empty(self) -> bool {
1458 self.len() == 0
1459 }
1460
1461 /// Returns a non-null pointer to the slice's buffer.
1462 ///
1463 /// # Examples
1464 ///
1465 /// ```rust
1466 /// #![feature(slice_ptr_get)]
1467 /// use std::ptr::NonNull;
1468 ///
1469 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1470 /// assert_eq!(slice.as_non_null_ptr(), NonNull::<i8>::dangling());
1471 /// ```
1472 #[inline]
1473 #[must_use]
1474 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1475 pub const fn as_non_null_ptr(self) -> NonNull<T> {
1476 self.cast()
1477 }
1478
1479 /// Returns a raw pointer to the slice's buffer.
1480 ///
1481 /// # Examples
1482 ///
1483 /// ```rust
1484 /// #![feature(slice_ptr_get)]
1485 /// use std::ptr::NonNull;
1486 ///
1487 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1488 /// assert_eq!(slice.as_mut_ptr(), NonNull::<i8>::dangling().as_ptr());
1489 /// ```
1490 #[inline]
1491 #[must_use]
1492 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1493 #[rustc_never_returns_null_ptr]
1494 pub const fn as_mut_ptr(self) -> *mut T {
1495 self.as_non_null_ptr().as_ptr()
1496 }
1497
1498 /// Returns a shared reference to a slice of possibly uninitialized values. In contrast to
1499 /// [`as_ref`], this does not require that the value has to be initialized.
1500 ///
1501 /// For the mutable counterpart see [`as_uninit_slice_mut`].
1502 ///
1503 /// [`as_ref`]: NonNull::as_ref
1504 /// [`as_uninit_slice_mut`]: NonNull::as_uninit_slice_mut
1505 ///
1506 /// # Safety
1507 ///
1508 /// When calling this method, you have to ensure that all of the following is true:
1509 ///
1510 /// * The pointer must be [valid] for reads for `ptr.len() * size_of::<T>()` many bytes,
1511 /// and it must be properly aligned. This means in particular:
1512 ///
1513 /// * The entire memory range of this slice must be contained within a single allocation!
1514 /// Slices can never span across multiple allocations.
1515 ///
1516 /// * The pointer must be aligned even for zero-length slices. One
1517 /// reason for this is that enum layout optimizations may rely on references
1518 /// (including slices of any length) being aligned and non-null to distinguish
1519 /// them from other data. You can obtain a pointer that is usable as `data`
1520 /// for zero-length slices using [`NonNull::dangling()`].
1521 ///
1522 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1523 /// See the safety documentation of [`pointer::offset`].
1524 ///
1525 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1526 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1527 /// In particular, while this reference exists, the memory the pointer points to must
1528 /// not get mutated (except inside `UnsafeCell`).
1529 ///
1530 /// This applies even if the result of this method is unused!
1531 ///
1532 /// See also [`slice::from_raw_parts`].
1533 ///
1534 /// [valid]: crate::ptr#safety
1535 #[inline]
1536 #[must_use]
1537 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1538 pub const unsafe fn as_uninit_slice<'a>(self) -> &'a [MaybeUninit<T>] {
1539 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1540 unsafe { slice::from_raw_parts(self.cast().as_ptr(), self.len()) }
1541 }
1542
1543 /// Returns a unique reference to a slice of possibly uninitialized values. In contrast to
1544 /// [`as_mut`], this does not require that the value has to be initialized.
1545 ///
1546 /// For the shared counterpart see [`as_uninit_slice`].
1547 ///
1548 /// [`as_mut`]: NonNull::as_mut
1549 /// [`as_uninit_slice`]: NonNull::as_uninit_slice
1550 ///
1551 /// # Safety
1552 ///
1553 /// When calling this method, you have to ensure that all of the following is true:
1554 ///
1555 /// * The pointer must be [valid] for reads and writes for `ptr.len() * size_of::<T>()`
1556 /// many bytes, and it must be properly aligned. This means in particular:
1557 ///
1558 /// * The entire memory range of this slice must be contained within a single allocation!
1559 /// Slices can never span across multiple allocations.
1560 ///
1561 /// * The pointer must be aligned even for zero-length slices. One
1562 /// reason for this is that enum layout optimizations may rely on references
1563 /// (including slices of any length) being aligned and non-null to distinguish
1564 /// them from other data. You can obtain a pointer that is usable as `data`
1565 /// for zero-length slices using [`NonNull::dangling()`].
1566 ///
1567 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1568 /// See the safety documentation of [`pointer::offset`].
1569 ///
1570 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1571 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1572 /// In particular, while this reference exists, the memory the pointer points to must
1573 /// not get accessed (read or written) through any other pointer.
1574 ///
1575 /// This applies even if the result of this method is unused!
1576 ///
1577 /// See also [`slice::from_raw_parts_mut`].
1578 ///
1579 /// [valid]: crate::ptr#safety
1580 ///
1581 /// # Examples
1582 ///
1583 /// ```rust
1584 /// #![feature(allocator_api, ptr_as_uninit)]
1585 ///
1586 /// use std::alloc::{Allocator, Layout, Global};
1587 /// use std::mem::MaybeUninit;
1588 /// use std::ptr::NonNull;
1589 ///
1590 /// let memory: NonNull<[u8]> = Global.allocate(Layout::new::<[u8; 32]>())?;
1591 /// // This is safe as `memory` is valid for reads and writes for `memory.len()` many bytes.
1592 /// // Note that calling `memory.as_mut()` is not allowed here as the content may be uninitialized.
1593 /// # #[allow(unused_variables)]
1594 /// let slice: &mut [MaybeUninit<u8>] = unsafe { memory.as_uninit_slice_mut() };
1595 /// # // Prevent leaks for Miri.
1596 /// # unsafe { Global.deallocate(memory.cast(), Layout::new::<[u8; 32]>()); }
1597 /// # Ok::<_, std::alloc::AllocError>(())
1598 /// ```
1599 #[inline]
1600 #[must_use]
1601 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1602 pub const unsafe fn as_uninit_slice_mut<'a>(self) -> &'a mut [MaybeUninit<T>] {
1603 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
1604 unsafe { slice::from_raw_parts_mut(self.cast().as_ptr(), self.len()) }
1605 }
1606
1607 /// Returns a raw pointer to an element or subslice, without doing bounds
1608 /// checking.
1609 ///
1610 /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1611 /// is *[undefined behavior]* even if the resulting pointer is not used.
1612 ///
1613 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1614 ///
1615 /// # Examples
1616 ///
1617 /// ```
1618 /// #![feature(slice_ptr_get)]
1619 /// use std::ptr::NonNull;
1620 ///
1621 /// let x = &mut [1, 2, 4];
1622 /// let x = NonNull::slice_from_raw_parts(NonNull::new(x.as_mut_ptr()).unwrap(), x.len());
1623 ///
1624 /// unsafe {
1625 /// assert_eq!(x.get_unchecked_mut(1).as_ptr(), x.as_non_null_ptr().as_ptr().add(1));
1626 /// }
1627 /// ```
1628 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1629 #[rustc_const_unstable(feature = "const_index", issue = "143775")]
1630 #[inline]
1631 pub const unsafe fn get_unchecked_mut<I>(self, index: I) -> NonNull<I::Output>
1632 where
1633 I: [const] SliceIndex<[T]>,
1634 {
1635 // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1636 // As a consequence, the resulting pointer cannot be null.
1637 unsafe { NonNull::new_unchecked(self.as_ptr().get_unchecked_mut(index)) }
1638 }
1639}
1640
1641#[stable(feature = "nonnull", since = "1.25.0")]
1642impl<T: PointeeSized> Clone for NonNull<T> {
1643 #[inline(always)]
1644 fn clone(&self) -> Self {
1645 *self
1646 }
1647}
1648
1649#[stable(feature = "nonnull", since = "1.25.0")]
1650impl<T: PointeeSized> Copy for NonNull<T> {}
1651
1652#[unstable(feature = "coerce_unsized", issue = "18598")]
1653impl<T: PointeeSized, U: PointeeSized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
1654
1655#[unstable(feature = "dispatch_from_dyn", issue = "none")]
1656impl<T: PointeeSized, U: PointeeSized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
1657
1658#[stable(feature = "pin", since = "1.33.0")]
1659unsafe impl<T: PointeeSized> PinCoerceUnsized for NonNull<T> {}
1660
1661#[stable(feature = "nonnull", since = "1.25.0")]
1662impl<T: PointeeSized> fmt::Debug for NonNull<T> {
1663 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1664 fmt::Pointer::fmt(&self.as_ptr(), f)
1665 }
1666}
1667
1668#[stable(feature = "nonnull", since = "1.25.0")]
1669impl<T: PointeeSized> fmt::Pointer for NonNull<T> {
1670 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1671 fmt::Pointer::fmt(&self.as_ptr(), f)
1672 }
1673}
1674
1675#[stable(feature = "nonnull", since = "1.25.0")]
1676impl<T: PointeeSized> Eq for NonNull<T> {}
1677
1678#[stable(feature = "nonnull", since = "1.25.0")]
1679impl<T: PointeeSized> PartialEq for NonNull<T> {
1680 #[inline]
1681 #[allow(ambiguous_wide_pointer_comparisons)]
1682 fn eq(&self, other: &Self) -> bool {
1683 self.as_ptr() == other.as_ptr()
1684 }
1685}
1686
1687#[stable(feature = "nonnull", since = "1.25.0")]
1688impl<T: PointeeSized> Ord for NonNull<T> {
1689 #[inline]
1690 #[allow(ambiguous_wide_pointer_comparisons)]
1691 fn cmp(&self, other: &Self) -> Ordering {
1692 self.as_ptr().cmp(&other.as_ptr())
1693 }
1694}
1695
1696#[stable(feature = "nonnull", since = "1.25.0")]
1697impl<T: PointeeSized> PartialOrd for NonNull<T> {
1698 #[inline]
1699 #[allow(ambiguous_wide_pointer_comparisons)]
1700 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
1701 self.as_ptr().partial_cmp(&other.as_ptr())
1702 }
1703}
1704
1705#[stable(feature = "nonnull", since = "1.25.0")]
1706impl<T: PointeeSized> hash::Hash for NonNull<T> {
1707 #[inline]
1708 fn hash<H: hash::Hasher>(&self, state: &mut H) {
1709 self.as_ptr().hash(state)
1710 }
1711}
1712
1713#[unstable(feature = "ptr_internals", issue = "none")]
1714impl<T: PointeeSized> From<Unique<T>> for NonNull<T> {
1715 #[inline]
1716 fn from(unique: Unique<T>) -> Self {
1717 unique.as_non_null_ptr()
1718 }
1719}
1720
1721#[stable(feature = "nonnull", since = "1.25.0")]
1722impl<T: PointeeSized> From<&mut T> for NonNull<T> {
1723 /// Converts a `&mut T` to a `NonNull<T>`.
1724 ///
1725 /// This conversion is safe and infallible since references cannot be null.
1726 #[inline]
1727 fn from(r: &mut T) -> Self {
1728 NonNull::from_mut(r)
1729 }
1730}
1731
1732#[stable(feature = "nonnull", since = "1.25.0")]
1733impl<T: PointeeSized> From<&T> for NonNull<T> {
1734 /// Converts a `&T` to a `NonNull<T>`.
1735 ///
1736 /// This conversion is safe and infallible since references cannot be null.
1737 #[inline]
1738 fn from(r: &T) -> Self {
1739 NonNull::from_ref(r)
1740 }
1741}