Skip to main content

core/ptr/
mut_ptr.rs

1use super::*;
2use crate::cmp::Ordering::{Equal, Greater, Less};
3use crate::intrinsics::const_eval_select;
4use crate::marker::{Destruct, PointeeSized};
5use crate::mem::{self, SizedTypeProperties};
6use crate::slice::{self, SliceIndex};
7
8impl<T: PointeeSized> *mut T {
9    #[doc = "Returns `true` if the pointer is null.\n\nNote that unsized types have many possible null pointers, as only the\nraw data pointer is considered, not their length, vtable, etc.\nTherefore, two pointers that are null may still not compare equal to\neach other.\n\n# Panics during const evaluation\n\nIf this method is used during const evaluation, and `self` is a pointer\nthat is offset beyond the bounds of the memory it initially pointed to,\nthen there might not be enough information to determine whether the\npointer is null. This is because the absolute address in memory is not\nknown at compile time. If the nullness of the pointer cannot be\ndetermined, this method will panic.\n\nIn-bounds pointers are never null, so the method will never panic for\nsuch pointers.\n"include_str!("docs/is_null.md")]
10    ///
11    /// # Examples
12    ///
13    /// ```
14    /// let mut s = [1, 2, 3];
15    /// let ptr: *mut u32 = s.as_mut_ptr();
16    /// assert!(!ptr.is_null());
17    /// ```
18    #[stable(feature = "rust1", since = "1.0.0")]
19    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
20    #[rustc_diagnostic_item = "ptr_is_null"]
21    #[inline]
22    pub const fn is_null(self) -> bool {
23        self.cast_const().is_null()
24    }
25
26    /// Casts to a pointer of another type.
27    #[stable(feature = "ptr_cast", since = "1.38.0")]
28    #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
29    #[rustc_diagnostic_item = "ptr_cast"]
30    #[inline(always)]
31    pub const fn cast<U>(self) -> *mut U {
32        self as _
33    }
34
35    /// Try to cast to a pointer of another type by checking alignment.
36    ///
37    /// If the pointer is properly aligned to the target type, it will be
38    /// cast to the target type. Otherwise, `None` is returned.
39    ///
40    /// # Examples
41    ///
42    /// ```rust
43    /// #![feature(pointer_try_cast_aligned)]
44    ///
45    /// let mut x = 0u64;
46    ///
47    /// let aligned: *mut u64 = &mut x;
48    /// let unaligned = unsafe { aligned.byte_add(1) };
49    ///
50    /// assert!(aligned.try_cast_aligned::<u32>().is_some());
51    /// assert!(unaligned.try_cast_aligned::<u32>().is_none());
52    /// ```
53    #[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
54    #[must_use = "this returns the result of the operation, \
55                  without modifying the original"]
56    #[inline]
57    pub fn try_cast_aligned<U>(self) -> Option<*mut U> {
58        if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
59    }
60
61    /// Uses the address value in a new pointer of another type.
62    ///
63    /// This operation will ignore the address part of its `meta` operand and discard existing
64    /// metadata of `self`. For pointers to a sized types (thin pointers), this has the same effect
65    /// as a simple cast. For pointers to an unsized type (fat pointers) this recombines the address
66    /// with new metadata such as slice lengths or `dyn`-vtable.
67    ///
68    /// The resulting pointer will have provenance of `self`. This operation is semantically the
69    /// same as creating a new pointer with the data pointer value of `self` but the metadata of
70    /// `meta`, being fat or thin depending on the `meta` operand.
71    ///
72    /// # Examples
73    ///
74    /// This function is primarily useful for enabling pointer arithmetic on potentially fat
75    /// pointers. The pointer is cast to a sized pointee to utilize offset operations and then
76    /// recombined with its own original metadata.
77    ///
78    /// ```
79    /// #![feature(set_ptr_value)]
80    /// # use core::fmt::Debug;
81    /// let mut arr: [i32; 3] = [1, 2, 3];
82    /// let mut ptr = arr.as_mut_ptr() as *mut dyn Debug;
83    /// let thin = ptr as *mut u8;
84    /// unsafe {
85    ///     ptr = thin.add(8).with_metadata_of(ptr);
86    ///     # assert_eq!(*(ptr as *mut i32), 3);
87    ///     println!("{:?}", &*ptr); // will print "3"
88    /// }
89    /// ```
90    ///
91    /// # *Incorrect* usage
92    ///
93    /// The provenance from pointers is *not* combined. The result must only be used to refer to the
94    /// address allowed by `self`.
95    ///
96    /// ```rust,no_run
97    /// #![feature(set_ptr_value)]
98    /// let mut x = 0u32;
99    /// let mut y = 1u32;
100    ///
101    /// let x = (&mut x) as *mut u32;
102    /// let y = (&mut y) as *mut u32;
103    ///
104    /// let offset = (x as usize - y as usize) / 4;
105    /// let bad = x.wrapping_add(offset).with_metadata_of(y);
106    ///
107    /// // This dereference is UB. The pointer only has provenance for `x` but points to `y`.
108    /// println!("{:?}", unsafe { &*bad });
109    /// ```
110    #[unstable(feature = "set_ptr_value", issue = "75091")]
111    #[must_use = "returns a new pointer rather than modifying its argument"]
112    #[inline]
113    pub const fn with_metadata_of<U>(self, meta: *const U) -> *mut U
114    where
115        U: PointeeSized,
116    {
117        from_raw_parts_mut::<U>(self as *mut (), metadata(meta))
118    }
119
120    /// Changes constness without changing the type.
121    ///
122    /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
123    /// refactored.
124    ///
125    /// While not strictly required (`*mut T` coerces to `*const T`), this is provided for symmetry
126    /// with [`cast_mut`] on `*const T` and may have documentation value if used instead of implicit
127    /// coercion.
128    ///
129    /// [`cast_mut`]: pointer::cast_mut
130    #[stable(feature = "ptr_const_cast", since = "1.65.0")]
131    #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")]
132    #[rustc_diagnostic_item = "ptr_cast_const"]
133    #[inline(always)]
134    pub const fn cast_const(self) -> *const T {
135        self as _
136    }
137
138    #[doc = "Gets the \"address\" portion of the pointer.\n\nThis is similar to `self as usize`, except that the [provenance][crate::ptr#provenance] of\nthe pointer is discarded and not [exposed][crate::ptr#exposed-provenance]. This means that\ncasting the returned address back to a pointer yields a [pointer without\nprovenance][without_provenance], which is undefined behavior to dereference. To properly\nrestore the lost information and obtain a dereferenceable pointer, use\n[`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr].\n\nIf using those APIs is not possible because there is no way to preserve a pointer with the\nrequired provenance, then Strict Provenance might not be for you. Use pointer-integer casts\nor [`expose_provenance`][pointer::expose_provenance] and [`with_exposed_provenance`][with_exposed_provenance]\ninstead. However, note that this makes your code less portable and less amenable to tools\nthat check for compliance with the Rust memory model.\n\nOn most platforms this will produce a value with the same bytes as the original\npointer, because all the bytes are dedicated to describing the address.\nPlatforms which need to store additional information in the pointer may\nperform a change of representation to produce a value containing only the address\nportion of the pointer. What that means is up to the platform to define.\n\nThis is a [Strict Provenance][crate::ptr#strict-provenance] API.\n"include_str!("./docs/addr.md")]
139    ///
140    /// [without_provenance]: without_provenance_mut
141    #[must_use]
142    #[inline(always)]
143    #[stable(feature = "strict_provenance", since = "1.84.0")]
144    pub fn addr(self) -> usize {
145        // A pointer-to-integer transmute currently has exactly the right semantics: it returns the
146        // address without exposing the provenance. Note that this is *not* a stable guarantee about
147        // transmute semantics, it relies on sysroot crates having special status.
148        // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
149        // provenance).
150        unsafe { mem::transmute(self.cast::<()>()) }
151    }
152
153    /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
154    /// [`with_exposed_provenance_mut`] and returns the "address" portion.
155    ///
156    /// This is equivalent to `self as usize`, which semantically discards provenance information.
157    /// Furthermore, this (like the `as` cast) has the implicit side-effect of marking the
158    /// provenance as 'exposed', so on platforms that support it you can later call
159    /// [`with_exposed_provenance_mut`] to reconstitute the original pointer including its provenance.
160    ///
161    /// Due to its inherent ambiguity, [`with_exposed_provenance_mut`] may not be supported by tools
162    /// that help you to stay conformant with the Rust memory model. It is recommended to use
163    /// [Strict Provenance][crate::ptr#strict-provenance] APIs such as [`with_addr`][pointer::with_addr]
164    /// wherever possible, in which case [`addr`][pointer::addr] should be used instead of `expose_provenance`.
165    ///
166    /// On most platforms this will produce a value with the same bytes as the original pointer,
167    /// because all the bytes are dedicated to describing the address. Platforms which need to store
168    /// additional information in the pointer may not support this operation, since the 'expose'
169    /// side-effect which is required for [`with_exposed_provenance_mut`] to work is typically not
170    /// available.
171    ///
172    /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
173    ///
174    /// [`with_exposed_provenance_mut`]: with_exposed_provenance_mut
175    #[inline(always)]
176    #[stable(feature = "exposed_provenance", since = "1.84.0")]
177    pub fn expose_provenance(self) -> usize {
178        self.cast::<()>() as usize
179    }
180
181    /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
182    /// `self`.
183    ///
184    /// This is similar to a `addr as *mut T` cast, but copies
185    /// the *provenance* of `self` to the new pointer.
186    /// This avoids the inherent ambiguity of the unary cast.
187    ///
188    /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
189    /// `self` to the given address, and therefore has all the same capabilities and restrictions.
190    ///
191    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
192    #[must_use]
193    #[inline]
194    #[stable(feature = "strict_provenance", since = "1.84.0")]
195    pub fn with_addr(self, addr: usize) -> Self {
196        // This should probably be an intrinsic to avoid doing any sort of arithmetic, but
197        // meanwhile, we can implement it with `wrapping_offset`, which preserves the pointer's
198        // provenance.
199        let self_addr = self.addr() as isize;
200        let dest_addr = addr as isize;
201        let offset = dest_addr.wrapping_sub(self_addr);
202        self.wrapping_byte_offset(offset)
203    }
204
205    /// Creates a new pointer by mapping `self`'s address to a new one, preserving the original
206    /// pointer's [provenance][crate::ptr#provenance].
207    ///
208    /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
209    ///
210    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
211    #[must_use]
212    #[inline]
213    #[stable(feature = "strict_provenance", since = "1.84.0")]
214    pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
215        self.with_addr(f(self.addr()))
216    }
217
218    /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
219    ///
220    /// The pointer can be later reconstructed with [`from_raw_parts_mut`].
221    #[unstable(feature = "ptr_metadata", issue = "81513")]
222    #[inline]
223    pub const fn to_raw_parts(self) -> (*mut (), <T as super::Pointee>::Metadata) {
224        (self.cast(), super::metadata(self))
225    }
226
227    #[doc = "Returns `None` if the pointer is null, or else returns a shared reference to\nthe value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]\nmust be used instead. If the value is known to be non-null, [`as_ref_unchecked`]\ncan be used instead.\n\n# Safety\n\nWhen calling this method, you have to ensure that *either* the pointer is null *or*\nthe pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).\n\n# Panics during const evaluation\n\nThis method will panic during const evaluation if the pointer cannot be\ndetermined to be null or not. See [`is_null`] for more information.\n\n# Null-unchecked version\n\nIf you are sure the pointer can never be null, you can use `as_ref_unchecked` which returns\n`&mut T` instead of `Option<&mut T>`.\n"include_str!("./docs/as_ref.md")]
228    ///
229    /// ```
230    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
231    ///
232    /// unsafe {
233    ///     let val_back = ptr.as_ref_unchecked();
234    ///     println!("We got back the value: {val_back}!");
235    /// }
236    /// ```
237    ///
238    /// # Examples
239    ///
240    /// ```
241    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
242    ///
243    /// unsafe {
244    ///     if let Some(val_back) = ptr.as_ref() {
245    ///         println!("We got back the value: {val_back}!");
246    ///     }
247    /// }
248    /// ```
249    ///
250    /// # See Also
251    ///
252    /// For the mutable counterpart see [`as_mut`].
253    ///
254    /// [`is_null`]: #method.is_null-1
255    /// [`as_uninit_ref`]: #method.as_uninit_ref-1
256    /// [`as_ref_unchecked`]: #method.as_ref_unchecked-1
257    /// [`as_mut`]: #method.as_mut
258
259    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
260    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
261    #[inline]
262    pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
263        // SAFETY: the caller must guarantee that `self` is valid for a
264        // reference if it isn't null.
265        if self.is_null() { None } else { unsafe { Some(&*self) } }
266    }
267
268    /// Returns a shared reference to the value behind the pointer.
269    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_ref`] must be used instead.
270    /// If the pointer may be null, but the value is known to have been initialized, [`as_ref`] must be used instead.
271    ///
272    /// For the mutable counterpart see [`as_mut_unchecked`].
273    ///
274    /// [`as_ref`]: #method.as_ref
275    /// [`as_uninit_ref`]: #method.as_uninit_ref
276    /// [`as_mut_unchecked`]: #method.as_mut_unchecked
277    ///
278    /// # Safety
279    ///
280    /// When calling this method, you have to ensure that the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
281    ///
282    /// # Examples
283    ///
284    /// ```
285    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
286    ///
287    /// unsafe {
288    ///     println!("We got back the value: {}!", ptr.as_ref_unchecked());
289    /// }
290    /// ```
291    #[stable(feature = "ptr_as_ref_unchecked", since = "1.95.0")]
292    #[rustc_const_stable(feature = "ptr_as_ref_unchecked", since = "1.95.0")]
293    #[inline]
294    #[must_use]
295    pub const unsafe fn as_ref_unchecked<'a>(self) -> &'a T {
296        // SAFETY: the caller must guarantee that `self` is valid for a reference
297        unsafe { &*self }
298    }
299
300    #[doc = "Returns `None` if the pointer is null, or else returns a shared reference to\nthe value wrapped in `Some`. In contrast to [`as_ref`], this does not require\nthat the value has to be initialized.\n\n# Safety\n\nWhen calling this method, you have to ensure that *either* the pointer is null *or*\nthe pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).\nNote that because the created reference is to `MaybeUninit<T>`, the\nsource pointer can point to uninitialized memory.\n\n# Panics during const evaluation\n\nThis method will panic during const evaluation if the pointer cannot be\ndetermined to be null or not. See [`is_null`] for more information.\n"include_str!("./docs/as_uninit_ref.md")]
301    ///
302    /// [`is_null`]: #method.is_null-1
303    /// [`as_ref`]: pointer#method.as_ref-1
304    ///
305    /// # See Also
306    /// For the mutable counterpart see [`as_uninit_mut`].
307    ///
308    /// [`as_uninit_mut`]: #method.as_uninit_mut
309    ///
310    /// # Examples
311    ///
312    /// ```
313    /// #![feature(ptr_as_uninit)]
314    ///
315    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
316    ///
317    /// unsafe {
318    ///     if let Some(val_back) = ptr.as_uninit_ref() {
319    ///         println!("We got back the value: {}!", val_back.assume_init());
320    ///     }
321    /// }
322    /// ```
323    #[inline]
324    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
325    pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
326    where
327        T: Sized,
328    {
329        // SAFETY: the caller must guarantee that `self` meets all the
330        // requirements for a reference.
331        if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
332    }
333
334    #[doc = "Adds a signed offset to a pointer.\n\n`count` is in units of T; e.g., a `count` of 3 represents a pointer\noffset of `3 * size_of::<T>()` bytes.\n\n# Safety\n\nIf any of the following conditions are violated, the result is Undefined Behavior:\n\n* The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without\n\"wrapping around\"), must fit in an `isize`.\n\n* If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some\n[allocation], and the entire memory range between `self` and the result must be in\nbounds of that allocation. In particular, this range must not \"wrap around\" the edge\nof the address space. Note that \"range\" here refers to a half-open range as usual in Rust,\ni.e., `self..result` for non-negative offsets and `result..self` for negative offsets.\n\nAllocations can never be larger than `isize::MAX` bytes, so if the computed offset\nstays in bounds of the allocation, it is guaranteed to satisfy the first requirement.\nThis implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always\nsafe.\n\nConsider using [`wrapping_offset`] instead if these constraints are\ndifficult to satisfy. The only advantage of this method is that it\nenables more aggressive compiler optimizations.\n\n[`wrapping_offset`]: #method.wrapping_offset\n[allocation]: crate::ptr#allocation\n"include_str!("./docs/offset.md")]
335    ///
336    /// # Examples
337    ///
338    /// ```
339    /// let mut s = [1, 2, 3];
340    /// let ptr: *mut u32 = s.as_mut_ptr();
341    ///
342    /// unsafe {
343    ///     assert_eq!(2, *ptr.offset(1));
344    ///     assert_eq!(3, *ptr.offset(2));
345    /// }
346    /// ```
347    #[stable(feature = "rust1", since = "1.0.0")]
348    #[must_use = "returns a new pointer rather than modifying its argument"]
349    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
350    #[inline(always)]
351    #[track_caller]
352    pub const unsafe fn offset(self, count: isize) -> *mut T
353    where
354        T: Sized,
355    {
356        #[inline]
357        #[rustc_allow_const_fn_unstable(const_eval_select)]
358        const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
359            // We can use const_eval_select here because this is only for UB checks.
360            {
    #[inline]
    fn runtime(this: *const (), count: isize, size: usize) -> bool {
        {
            let Some(byte_offset) =
                count.checked_mul(size as isize) else { return false; };
            let (_, overflow) =
                this.addr().overflowing_add_signed(byte_offset);
            !overflow
        }
    }
    #[inline]
    const fn compiletime(this: *const (), count: isize, size: usize) -> bool {
        let _ = this;
        let _ = count;
        let _ = size;
        { true }
    }
    const_eval_select((this, count, size), compiletime, runtime)
}const_eval_select!(
361                @capture { this: *const (), count: isize, size: usize } -> bool:
362                if const {
363                    true
364                } else {
365                    // `size` is the size of a Rust type, so we know that
366                    // `size <= isize::MAX` and thus `as` cast here is not lossy.
367                    let Some(byte_offset) = count.checked_mul(size as isize) else {
368                        return false;
369                    };
370                    let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
371                    !overflow
372                }
373            )
374        }
375
376        {
    #[rustc_no_mir_inline]
    #[inline]
    #[rustc_nounwind]
    #[track_caller]
    const fn precondition_check(this: *const (), count: isize, size: usize) {
        if !runtime_offset_nowrap(this, count, size) {
            let msg =
                "unsafe precondition(s) violated: ptr::offset requires the address calculation to not overflow\n\nThis indicates a bug in the program. This Undefined Behavior check is optional, and cannot be relied on for safety.";
            ::core::panicking::panic_nounwind_fmt(::core::fmt::Arguments::from_str(msg),
                false);
        }
    }
    if ::core::ub_checks::check_language_ub() {
        precondition_check(self as *const (), count, size_of::<T>());
    }
};ub_checks::assert_unsafe_precondition!(
377            check_language_ub,
378            "ptr::offset requires the address calculation to not overflow",
379            (
380                this: *const () = self as *const (),
381                count: isize = count,
382                size: usize = size_of::<T>(),
383            ) => runtime_offset_nowrap(this, count, size)
384        );
385
386        // SAFETY: the caller must uphold the safety contract for `offset`.
387        // The obtained pointer is valid for writes since the caller must
388        // guarantee that it points to the same allocation as `self`.
389        unsafe { intrinsics::offset(self, count) }
390    }
391
392    /// Adds a signed offset in bytes to a pointer.
393    ///
394    /// `count` is in units of **bytes**.
395    ///
396    /// This is purely a convenience for casting to a `u8` pointer and
397    /// using [offset][pointer::offset] on it. See that method for documentation
398    /// and safety requirements.
399    ///
400    /// For non-`Sized` pointees this operation changes only the data pointer,
401    /// leaving the metadata untouched.
402    #[must_use]
403    #[inline(always)]
404    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
405    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
406    #[track_caller]
407    pub const unsafe fn byte_offset(self, count: isize) -> Self {
408        // SAFETY: the caller must uphold the safety contract for `offset`.
409        unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
410    }
411
412    /// Adds a signed offset to a pointer using wrapping arithmetic.
413    ///
414    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
415    /// offset of `3 * size_of::<T>()` bytes.
416    ///
417    /// # Safety
418    ///
419    /// This operation itself is always safe, but using the resulting pointer is not.
420    ///
421    /// The resulting pointer "remembers" the [allocation] that `self` points to
422    /// (this is called "[Provenance](ptr/index.html#provenance)").
423    /// The pointer must not be used to read or write other allocations.
424    ///
425    /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
426    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
427    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
428    /// `x` and `y` point into the same allocation.
429    ///
430    /// Compared to [`offset`], this method basically delays the requirement of staying within the
431    /// same allocation: [`offset`] is immediate Undefined Behavior when crossing object
432    /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
433    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
434    /// can be optimized better and is thus preferable in performance-sensitive code.
435    ///
436    /// The delayed check only considers the value of the pointer that was dereferenced, not the
437    /// intermediate values used during the computation of the final result. For example,
438    /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
439    /// words, leaving the allocation and then re-entering it later is permitted.
440    ///
441    /// [`offset`]: #method.offset
442    /// [allocation]: crate::ptr#allocation
443    ///
444    /// # Examples
445    ///
446    /// ```
447    /// // Iterate using a raw pointer in increments of two elements
448    /// let mut data = [1u8, 2, 3, 4, 5];
449    /// let mut ptr: *mut u8 = data.as_mut_ptr();
450    /// let step = 2;
451    /// let end_rounded_up = ptr.wrapping_offset(6);
452    ///
453    /// while ptr != end_rounded_up {
454    ///     unsafe {
455    ///         *ptr = 0;
456    ///     }
457    ///     ptr = ptr.wrapping_offset(step);
458    /// }
459    /// assert_eq!(&data, &[0, 2, 0, 4, 0]);
460    /// ```
461    #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
462    #[must_use = "returns a new pointer rather than modifying its argument"]
463    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
464    #[inline(always)]
465    pub const fn wrapping_offset(self, count: isize) -> *mut T
466    where
467        T: Sized,
468    {
469        // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
470        unsafe { intrinsics::arith_offset(self, count) as *mut T }
471    }
472
473    /// Adds a signed offset in bytes to a pointer using wrapping arithmetic.
474    ///
475    /// `count` is in units of **bytes**.
476    ///
477    /// This is purely a convenience for casting to a `u8` pointer and
478    /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
479    /// for documentation.
480    ///
481    /// For non-`Sized` pointees this operation changes only the data pointer,
482    /// leaving the metadata untouched.
483    #[must_use]
484    #[inline(always)]
485    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
486    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
487    pub const fn wrapping_byte_offset(self, count: isize) -> Self {
488        self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
489    }
490
491    /// Masks out bits of the pointer according to a mask.
492    ///
493    /// This is convenience for `ptr.map_addr(|a| a & mask)`.
494    ///
495    /// For non-`Sized` pointees this operation changes only the data pointer,
496    /// leaving the metadata untouched.
497    ///
498    /// ## Examples
499    ///
500    /// ```
501    /// #![feature(ptr_mask)]
502    /// let mut v = 17_u32;
503    /// let ptr: *mut u32 = &mut v;
504    ///
505    /// // `u32` is 4 bytes aligned,
506    /// // which means that lower 2 bits are always 0.
507    /// let tag_mask = 0b11;
508    /// let ptr_mask = !tag_mask;
509    ///
510    /// // We can store something in these lower bits
511    /// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
512    ///
513    /// // Get the "tag" back
514    /// let tag = tagged_ptr.addr() & tag_mask;
515    /// assert_eq!(tag, 0b10);
516    ///
517    /// // Note that `tagged_ptr` is unaligned, it's UB to read from/write to it.
518    /// // To get original pointer `mask` can be used:
519    /// let masked_ptr = tagged_ptr.mask(ptr_mask);
520    /// assert_eq!(unsafe { *masked_ptr }, 17);
521    ///
522    /// unsafe { *masked_ptr = 0 };
523    /// assert_eq!(v, 0);
524    /// ```
525    #[unstable(feature = "ptr_mask", issue = "98290")]
526    #[must_use = "returns a new pointer rather than modifying its argument"]
527    #[inline(always)]
528    pub fn mask(self, mask: usize) -> *mut T {
529        intrinsics::ptr_mask(self.cast::<()>(), mask).cast_mut().with_metadata_of(self)
530    }
531
532    /// Returns `None` if the pointer is null, or else returns a unique reference to
533    /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_mut`]
534    /// must be used instead. If the value is known to be non-null, [`as_mut_unchecked`]
535    /// can be used instead.
536    ///
537    /// For the shared counterpart see [`as_ref`].
538    ///
539    /// [`as_uninit_mut`]: #method.as_uninit_mut
540    /// [`as_mut_unchecked`]: #method.as_mut_unchecked
541    /// [`as_ref`]: pointer#method.as_ref-1
542    ///
543    /// # Safety
544    ///
545    /// When calling this method, you have to ensure that *either*
546    /// the pointer is null *or*
547    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
548    ///
549    /// # Panics during const evaluation
550    ///
551    /// This method will panic during const evaluation if the pointer cannot be
552    /// determined to be null or not. See [`is_null`] for more information.
553    ///
554    /// [`is_null`]: #method.is_null-1
555    ///
556    /// # Examples
557    ///
558    /// ```
559    /// let mut s = [1, 2, 3];
560    /// let ptr: *mut u32 = s.as_mut_ptr();
561    /// let first_value = unsafe { ptr.as_mut().unwrap() };
562    /// *first_value = 4;
563    /// # assert_eq!(s, [4, 2, 3]);
564    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
565    /// ```
566    ///
567    /// # Null-unchecked version
568    ///
569    /// If you are sure the pointer can never be null, you can use `as_mut_unchecked` which returns
570    /// `&mut T` instead of `Option<&mut T>`.
571    ///
572    /// ```
573    /// let mut s = [1, 2, 3];
574    /// let ptr: *mut u32 = s.as_mut_ptr();
575    /// let first_value = unsafe { ptr.as_mut_unchecked() };
576    /// *first_value = 4;
577    /// # assert_eq!(s, [4, 2, 3]);
578    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
579    /// ```
580    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
581    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
582    #[inline]
583    pub const unsafe fn as_mut<'a>(self) -> Option<&'a mut T> {
584        // SAFETY: the caller must guarantee that `self` is be valid for
585        // a mutable reference if it isn't null.
586        if self.is_null() { None } else { unsafe { Some(&mut *self) } }
587    }
588
589    /// Returns a unique reference to the value behind the pointer.
590    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_mut`] must be used instead.
591    /// If the pointer may be null, but the value is known to have been initialized, [`as_mut`] must be used instead.
592    ///
593    /// For the shared counterpart see [`as_ref_unchecked`].
594    ///
595    /// [`as_mut`]: #method.as_mut
596    /// [`as_uninit_mut`]: #method.as_uninit_mut
597    /// [`as_ref_unchecked`]: #method.as_mut_unchecked
598    ///
599    /// # Safety
600    ///
601    /// When calling this method, you have to ensure that
602    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
603    ///
604    /// # Examples
605    ///
606    /// ```
607    /// let mut s = [1, 2, 3];
608    /// let ptr: *mut u32 = s.as_mut_ptr();
609    /// let first_value = unsafe { ptr.as_mut_unchecked() };
610    /// *first_value = 4;
611    /// # assert_eq!(s, [4, 2, 3]);
612    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
613    /// ```
614    #[stable(feature = "ptr_as_ref_unchecked", since = "1.95.0")]
615    #[rustc_const_stable(feature = "ptr_as_ref_unchecked", since = "1.95.0")]
616    #[inline]
617    #[must_use]
618    pub const unsafe fn as_mut_unchecked<'a>(self) -> &'a mut T {
619        // SAFETY: the caller must guarantee that `self` is valid for a reference
620        unsafe { &mut *self }
621    }
622
623    /// Returns `None` if the pointer is null, or else returns a unique reference to
624    /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
625    /// that the value has to be initialized.
626    ///
627    /// For the shared counterpart see [`as_uninit_ref`].
628    ///
629    /// [`as_mut`]: #method.as_mut
630    /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1
631    ///
632    /// # Safety
633    ///
634    /// When calling this method, you have to ensure that *either* the pointer is null *or*
635    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
636    ///
637    /// # Panics during const evaluation
638    ///
639    /// This method will panic during const evaluation if the pointer cannot be
640    /// determined to be null or not. See [`is_null`] for more information.
641    ///
642    /// [`is_null`]: #method.is_null-1
643    #[inline]
644    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
645    pub const unsafe fn as_uninit_mut<'a>(self) -> Option<&'a mut MaybeUninit<T>>
646    where
647        T: Sized,
648    {
649        // SAFETY: the caller must guarantee that `self` meets all the
650        // requirements for a reference.
651        if self.is_null() { None } else { Some(unsafe { &mut *(self as *mut MaybeUninit<T>) }) }
652    }
653
654    /// Returns whether two pointers are guaranteed to be equal.
655    ///
656    /// At runtime this function behaves like `Some(self == other)`.
657    /// However, in some contexts (e.g., compile-time evaluation),
658    /// it is not always possible to determine equality of two pointers, so this function may
659    /// spuriously return `None` for pointers that later actually turn out to have its equality known.
660    /// But when it returns `Some`, the pointers' equality is guaranteed to be known.
661    ///
662    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
663    /// version and unsafe code must not
664    /// rely on the result of this function for soundness. It is suggested to only use this function
665    /// for performance optimizations where spurious `None` return values by this function do not
666    /// affect the outcome, but just the performance.
667    /// The consequences of using this method to make runtime and compile-time code behave
668    /// differently have not been explored. This method should not be used to introduce such
669    /// differences, and it should also not be stabilized before we have a better understanding
670    /// of this issue.
671    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
672    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
673    #[inline]
674    pub const fn guaranteed_eq(self, other: *mut T) -> Option<bool>
675    where
676        T: Sized,
677    {
678        (self as *const T).guaranteed_eq(other as _)
679    }
680
681    /// Returns whether two pointers are guaranteed to be inequal.
682    ///
683    /// At runtime this function behaves like `Some(self != other)`.
684    /// However, in some contexts (e.g., compile-time evaluation),
685    /// it is not always possible to determine inequality of two pointers, so this function may
686    /// spuriously return `None` for pointers that later actually turn out to have its inequality known.
687    /// But when it returns `Some`, the pointers' inequality is guaranteed to be known.
688    ///
689    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
690    /// version and unsafe code must not
691    /// rely on the result of this function for soundness. It is suggested to only use this function
692    /// for performance optimizations where spurious `None` return values by this function do not
693    /// affect the outcome, but just the performance.
694    /// The consequences of using this method to make runtime and compile-time code behave
695    /// differently have not been explored. This method should not be used to introduce such
696    /// differences, and it should also not be stabilized before we have a better understanding
697    /// of this issue.
698    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
699    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
700    #[inline]
701    pub const fn guaranteed_ne(self, other: *mut T) -> Option<bool>
702    where
703        T: Sized,
704    {
705        (self as *const T).guaranteed_ne(other as _)
706    }
707
708    /// Calculates the distance between two pointers within the same allocation. The returned value is in
709    /// units of T: the distance in bytes divided by `size_of::<T>()`.
710    ///
711    /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
712    /// except that it has a lot more opportunities for UB, in exchange for the compiler
713    /// better understanding what you are doing.
714    ///
715    /// The primary motivation of this method is for computing the `len` of an array/slice
716    /// of `T` that you are currently representing as a "start" and "end" pointer
717    /// (and "end" is "one past the end" of the array).
718    /// In that case, `end.offset_from(start)` gets you the length of the array.
719    ///
720    /// All of the following safety requirements are trivially satisfied for this usecase.
721    ///
722    /// [`offset`]: pointer#method.offset-1
723    ///
724    /// # Safety
725    ///
726    /// If any of the following conditions are violated, the result is Undefined Behavior:
727    ///
728    /// * `self` and `origin` must either
729    ///
730    ///   * point to the same address, or
731    ///   * both be [derived from][crate::ptr#provenance] a pointer to the same [allocation], and the memory range between
732    ///     the two pointers must be in bounds of that object. (See below for an example.)
733    ///
734    /// * The distance between the pointers, in bytes, must be an exact multiple
735    ///   of the size of `T`.
736    ///
737    /// As a consequence, the absolute distance between the pointers, in bytes, computed on
738    /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
739    /// implied by the in-bounds requirement, and the fact that no allocation can be larger
740    /// than `isize::MAX` bytes.
741    ///
742    /// The requirement for pointers to be derived from the same allocation is primarily
743    /// needed for `const`-compatibility: the distance between pointers into *different* allocated
744    /// objects is not known at compile-time. However, the requirement also exists at
745    /// runtime and may be exploited by optimizations. If you wish to compute the difference between
746    /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
747    /// origin as isize) / size_of::<T>()`.
748    // FIXME: recommend `addr()` instead of `as usize` once that is stable.
749    ///
750    /// [`add`]: #method.add
751    /// [allocation]: crate::ptr#allocation
752    ///
753    /// # Panics
754    ///
755    /// This function panics if `T` is a Zero-Sized Type ("ZST").
756    ///
757    /// # Examples
758    ///
759    /// Basic usage:
760    ///
761    /// ```
762    /// let mut a = [0; 5];
763    /// let ptr1: *mut i32 = &mut a[1];
764    /// let ptr2: *mut i32 = &mut a[3];
765    /// unsafe {
766    ///     assert_eq!(ptr2.offset_from(ptr1), 2);
767    ///     assert_eq!(ptr1.offset_from(ptr2), -2);
768    ///     assert_eq!(ptr1.offset(2), ptr2);
769    ///     assert_eq!(ptr2.offset(-2), ptr1);
770    /// }
771    /// ```
772    ///
773    /// *Incorrect* usage:
774    ///
775    /// ```rust,no_run
776    /// let ptr1 = Box::into_raw(Box::new(0u8));
777    /// let ptr2 = Box::into_raw(Box::new(1u8));
778    /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
779    /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
780    /// let ptr2_other = (ptr1 as *mut u8).wrapping_offset(diff).wrapping_offset(1);
781    /// assert_eq!(ptr2 as usize, ptr2_other as usize);
782    /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
783    /// // computing their offset is undefined behavior, even though
784    /// // they point to addresses that are in-bounds of the same object!
785    /// unsafe {
786    ///     let one = ptr2_other.offset_from(ptr2); // Undefined Behavior! ⚠️
787    /// }
788    /// ```
789    #[stable(feature = "ptr_offset_from", since = "1.47.0")]
790    #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")]
791    #[inline(always)]
792    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
793    pub const unsafe fn offset_from(self, origin: *const T) -> isize
794    where
795        T: Sized,
796    {
797        // SAFETY: the caller must uphold the safety contract for `offset_from`.
798        unsafe { (self as *const T).offset_from(origin) }
799    }
800
801    /// Calculates the distance between two pointers within the same allocation. The returned value is in
802    /// units of **bytes**.
803    ///
804    /// This is purely a convenience for casting to a `u8` pointer and
805    /// using [`offset_from`][pointer::offset_from] on it. See that method for
806    /// documentation and safety requirements.
807    ///
808    /// For non-`Sized` pointees this operation considers only the data pointers,
809    /// ignoring the metadata.
810    #[inline(always)]
811    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
812    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
813    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
814    pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: *const U) -> isize {
815        // SAFETY: the caller must uphold the safety contract for `offset_from`.
816        unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
817    }
818
819    /// Calculates the distance between two pointers within the same allocation, *where it's known that
820    /// `self` is equal to or greater than `origin`*. The returned value is in
821    /// units of T: the distance in bytes is divided by `size_of::<T>()`.
822    ///
823    /// This computes the same value that [`offset_from`](#method.offset_from)
824    /// would compute, but with the added precondition that the offset is
825    /// guaranteed to be non-negative.  This method is equivalent to
826    /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
827    /// but it provides slightly more information to the optimizer, which can
828    /// sometimes allow it to optimize slightly better with some backends.
829    ///
830    /// This method can be thought of as recovering the `count` that was passed
831    /// to [`add`](#method.add) (or, with the parameters in the other order,
832    /// to [`sub`](#method.sub)).  The following are all equivalent, assuming
833    /// that their safety preconditions are met:
834    /// ```rust
835    /// # unsafe fn blah(ptr: *mut i32, origin: *mut i32, count: usize) -> bool { unsafe {
836    /// ptr.offset_from_unsigned(origin) == count
837    /// # &&
838    /// origin.add(count) == ptr
839    /// # &&
840    /// ptr.sub(count) == origin
841    /// # } }
842    /// ```
843    ///
844    /// # Safety
845    ///
846    /// - The distance between the pointers must be non-negative (`self >= origin`)
847    ///
848    /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
849    ///   apply to this method as well; see it for the full details.
850    ///
851    /// Importantly, despite the return type of this method being able to represent
852    /// a larger offset, it's still *not permitted* to pass pointers which differ
853    /// by more than `isize::MAX` *bytes*.  As such, the result of this method will
854    /// always be less than or equal to `isize::MAX as usize`.
855    ///
856    /// # Panics
857    ///
858    /// This function panics if `T` is a Zero-Sized Type ("ZST").
859    ///
860    /// # Examples
861    ///
862    /// ```
863    /// let mut a = [0; 5];
864    /// let p: *mut i32 = a.as_mut_ptr();
865    /// unsafe {
866    ///     let ptr1: *mut i32 = p.add(1);
867    ///     let ptr2: *mut i32 = p.add(3);
868    ///
869    ///     assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
870    ///     assert_eq!(ptr1.add(2), ptr2);
871    ///     assert_eq!(ptr2.sub(2), ptr1);
872    ///     assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
873    /// }
874    ///
875    /// // This would be incorrect, as the pointers are not correctly ordered:
876    /// // ptr1.offset_from(ptr2)
877    /// ```
878    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
879    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
880    #[inline]
881    #[track_caller]
882    pub const unsafe fn offset_from_unsigned(self, origin: *const T) -> usize
883    where
884        T: Sized,
885    {
886        // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
887        unsafe { (self as *const T).offset_from_unsigned(origin) }
888    }
889
890    /// Calculates the distance between two pointers within the same allocation, *where it's known that
891    /// `self` is equal to or greater than `origin`*. The returned value is in
892    /// units of **bytes**.
893    ///
894    /// This is purely a convenience for casting to a `u8` pointer and
895    /// using [`offset_from_unsigned`][pointer::offset_from_unsigned] on it.
896    /// See that method for documentation and safety requirements.
897    ///
898    /// For non-`Sized` pointees this operation considers only the data pointers,
899    /// ignoring the metadata.
900    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
901    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
902    #[inline]
903    #[track_caller]
904    pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: *mut U) -> usize {
905        // SAFETY: the caller must uphold the safety contract for `byte_offset_from_unsigned`.
906        unsafe { (self as *const T).byte_offset_from_unsigned(origin) }
907    }
908
909    #[doc = "Adds an unsigned offset to a pointer.\n\nThis can only move the pointer forward (or not move it). If you need to move forward or\nbackward depending on the value, then you might want [`offset`](#method.offset) instead\nwhich takes a signed offset.\n\n`count` is in units of T; e.g., a `count` of 3 represents a pointer\noffset of `3 * size_of::<T>()` bytes.\n\n# Safety\n\nIf any of the following conditions are violated, the result is Undefined Behavior:\n\n* The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without\n\"wrapping around\"), must fit in an `isize`.\n\n* If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some\n[allocation], and the entire memory range between `self` and the result must be in\nbounds of that allocation. In particular, this range must not \"wrap around\" the edge\nof the address space.\n\nAllocations can never be larger than `isize::MAX` bytes, so if the computed offset\nstays in bounds of the allocation, it is guaranteed to satisfy the first requirement.\nThis implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always\nsafe.\n\nConsider using [`wrapping_add`] instead if these constraints are\ndifficult to satisfy. The only advantage of this method is that it\nenables more aggressive compiler optimizations.\n\n[`wrapping_add`]: #method.wrapping_add\n[allocation]: crate::ptr#allocation\n"include_str!("./docs/add.md")]
910    ///
911    /// # Examples
912    ///
913    /// ```
914    /// let mut s: String = "123".to_string();
915    /// let ptr: *mut u8 = s.as_mut_ptr();
916    ///
917    /// unsafe {
918    ///     assert_eq!('2', *ptr.add(1) as char);
919    ///     assert_eq!('3', *ptr.add(2) as char);
920    /// }
921    /// ```
922    #[stable(feature = "pointer_methods", since = "1.26.0")]
923    #[must_use = "returns a new pointer rather than modifying its argument"]
924    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
925    #[inline(always)]
926    #[track_caller]
927    pub const unsafe fn add(self, count: usize) -> Self
928    where
929        T: Sized,
930    {
931        #[cfg(debug_assertions)]
932        #[inline]
933        #[rustc_allow_const_fn_unstable(const_eval_select)]
934        const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
935            {
    #[inline]
    fn runtime(this: *const (), count: usize, size: usize) -> bool {
        {
            let Some(byte_offset) =
                count.checked_mul(size) else { return false; };
            let (_, overflow) = this.addr().overflowing_add(byte_offset);
            byte_offset <= (isize::MAX as usize) && !overflow
        }
    }
    #[inline]
    const fn compiletime(this: *const (), count: usize, size: usize) -> bool {
        let _ = this;
        let _ = count;
        let _ = size;
        { true }
    }
    const_eval_select((this, count, size), compiletime, runtime)
}const_eval_select!(
936                @capture { this: *const (), count: usize, size: usize } -> bool:
937                if const {
938                    true
939                } else {
940                    let Some(byte_offset) = count.checked_mul(size) else {
941                        return false;
942                    };
943                    let (_, overflow) = this.addr().overflowing_add(byte_offset);
944                    byte_offset <= (isize::MAX as usize) && !overflow
945                }
946            )
947        }
948
949        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
950        {
    #[rustc_no_mir_inline]
    #[inline]
    #[rustc_nounwind]
    #[track_caller]
    const fn precondition_check(this: *const (), count: usize, size: usize) {
        if !runtime_add_nowrap(this, count, size) {
            let msg =
                "unsafe precondition(s) violated: ptr::add requires that the address calculation does not overflow\n\nThis indicates a bug in the program. This Undefined Behavior check is optional, and cannot be relied on for safety.";
            ::core::panicking::panic_nounwind_fmt(::core::fmt::Arguments::from_str(msg),
                false);
        }
    }
    if ::core::ub_checks::check_language_ub() {
        precondition_check(self as *const (), count, size_of::<T>());
    }
};ub_checks::assert_unsafe_precondition!(
951            check_language_ub,
952            "ptr::add requires that the address calculation does not overflow",
953            (
954                this: *const () = self as *const (),
955                count: usize = count,
956                size: usize = size_of::<T>(),
957            ) => runtime_add_nowrap(this, count, size)
958        );
959
960        // SAFETY: the caller must uphold the safety contract for `offset`.
961        unsafe { intrinsics::offset(self, count) }
962    }
963
964    /// Adds an unsigned offset in bytes to a pointer.
965    ///
966    /// `count` is in units of bytes.
967    ///
968    /// This is purely a convenience for casting to a `u8` pointer and
969    /// using [add][pointer::add] on it. See that method for documentation
970    /// and safety requirements.
971    ///
972    /// For non-`Sized` pointees this operation changes only the data pointer,
973    /// leaving the metadata untouched.
974    #[must_use]
975    #[inline(always)]
976    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
977    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
978    #[track_caller]
979    pub const unsafe fn byte_add(self, count: usize) -> Self {
980        // SAFETY: the caller must uphold the safety contract for `add`.
981        unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
982    }
983
984    /// Subtracts an unsigned offset from a pointer.
985    ///
986    /// This can only move the pointer backward (or not move it). If you need to move forward or
987    /// backward depending on the value, then you might want [`offset`](#method.offset) instead
988    /// which takes a signed offset.
989    ///
990    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
991    /// offset of `3 * size_of::<T>()` bytes.
992    ///
993    /// # Safety
994    ///
995    /// If any of the following conditions are violated, the result is Undefined Behavior:
996    ///
997    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
998    ///   "wrapping around"), must fit in an `isize`.
999    ///
1000    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
1001    ///   [allocation], and the entire memory range between `self` and the result must be in
1002    ///   bounds of that allocation. In particular, this range must not "wrap around" the edge
1003    ///   of the address space.
1004    ///
1005    /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
1006    /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
1007    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
1008    /// safe.
1009    ///
1010    /// Consider using [`wrapping_sub`] instead if these constraints are
1011    /// difficult to satisfy. The only advantage of this method is that it
1012    /// enables more aggressive compiler optimizations.
1013    ///
1014    /// [`wrapping_sub`]: #method.wrapping_sub
1015    /// [allocation]: crate::ptr#allocation
1016    ///
1017    /// # Examples
1018    ///
1019    /// ```
1020    /// let s: &str = "123";
1021    ///
1022    /// unsafe {
1023    ///     let end: *const u8 = s.as_ptr().add(3);
1024    ///     assert_eq!('3', *end.sub(1) as char);
1025    ///     assert_eq!('2', *end.sub(2) as char);
1026    /// }
1027    /// ```
1028    #[stable(feature = "pointer_methods", since = "1.26.0")]
1029    #[must_use = "returns a new pointer rather than modifying its argument"]
1030    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1031    #[inline(always)]
1032    #[track_caller]
1033    pub const unsafe fn sub(self, count: usize) -> Self
1034    where
1035        T: Sized,
1036    {
1037        #[cfg(debug_assertions)]
1038        #[inline]
1039        #[rustc_allow_const_fn_unstable(const_eval_select)]
1040        const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
1041            {
    #[inline]
    fn runtime(this: *const (), count: usize, size: usize) -> bool {
        {
            let Some(byte_offset) =
                count.checked_mul(size) else { return false; };
            byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
        }
    }
    #[inline]
    const fn compiletime(this: *const (), count: usize, size: usize) -> bool {
        let _ = this;
        let _ = count;
        let _ = size;
        { true }
    }
    const_eval_select((this, count, size), compiletime, runtime)
}const_eval_select!(
1042                @capture { this: *const (), count: usize, size: usize } -> bool:
1043                if const {
1044                    true
1045                } else {
1046                    let Some(byte_offset) = count.checked_mul(size) else {
1047                        return false;
1048                    };
1049                    byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
1050                }
1051            )
1052        }
1053
1054        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
1055        {
    #[rustc_no_mir_inline]
    #[inline]
    #[rustc_nounwind]
    #[track_caller]
    const fn precondition_check(this: *const (), count: usize, size: usize) {
        if !runtime_sub_nowrap(this, count, size) {
            let msg =
                "unsafe precondition(s) violated: ptr::sub requires that the address calculation does not overflow\n\nThis indicates a bug in the program. This Undefined Behavior check is optional, and cannot be relied on for safety.";
            ::core::panicking::panic_nounwind_fmt(::core::fmt::Arguments::from_str(msg),
                false);
        }
    }
    if ::core::ub_checks::check_language_ub() {
        precondition_check(self as *const (), count, size_of::<T>());
    }
};ub_checks::assert_unsafe_precondition!(
1056            check_language_ub,
1057            "ptr::sub requires that the address calculation does not overflow",
1058            (
1059                this: *const () = self as *const (),
1060                count: usize = count,
1061                size: usize = size_of::<T>(),
1062            ) => runtime_sub_nowrap(this, count, size)
1063        );
1064
1065        if T::IS_ZST {
1066            // Pointer arithmetic does nothing when the pointee is a ZST.
1067            self
1068        } else {
1069            // SAFETY: the caller must uphold the safety contract for `offset`.
1070            // Because the pointee is *not* a ZST, that means that `count` is
1071            // at most `isize::MAX`, and thus the negation cannot overflow.
1072            unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
1073        }
1074    }
1075
1076    /// Subtracts an unsigned offset in bytes from a pointer.
1077    ///
1078    /// `count` is in units of bytes.
1079    ///
1080    /// This is purely a convenience for casting to a `u8` pointer and
1081    /// using [sub][pointer::sub] on it. See that method for documentation
1082    /// and safety requirements.
1083    ///
1084    /// For non-`Sized` pointees this operation changes only the data pointer,
1085    /// leaving the metadata untouched.
1086    #[must_use]
1087    #[inline(always)]
1088    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1089    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1090    #[track_caller]
1091    pub const unsafe fn byte_sub(self, count: usize) -> Self {
1092        // SAFETY: the caller must uphold the safety contract for `sub`.
1093        unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
1094    }
1095
1096    /// Adds an unsigned offset to a pointer using wrapping arithmetic.
1097    ///
1098    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1099    /// offset of `3 * size_of::<T>()` bytes.
1100    ///
1101    /// # Safety
1102    ///
1103    /// This operation itself is always safe, but using the resulting pointer is not.
1104    ///
1105    /// The resulting pointer "remembers" the [allocation] that `self` points to; it must not
1106    /// be used to read or write other allocations.
1107    ///
1108    /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
1109    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1110    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1111    /// `x` and `y` point into the same allocation.
1112    ///
1113    /// Compared to [`add`], this method basically delays the requirement of staying within the
1114    /// same allocation: [`add`] is immediate Undefined Behavior when crossing object
1115    /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
1116    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
1117    /// can be optimized better and is thus preferable in performance-sensitive code.
1118    ///
1119    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1120    /// intermediate values used during the computation of the final result. For example,
1121    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1122    /// allocation and then re-entering it later is permitted.
1123    ///
1124    /// [`add`]: #method.add
1125    /// [allocation]: crate::ptr#allocation
1126    ///
1127    /// # Examples
1128    ///
1129    /// ```
1130    /// // Iterate using a raw pointer in increments of two elements
1131    /// let data = [1u8, 2, 3, 4, 5];
1132    /// let mut ptr: *const u8 = data.as_ptr();
1133    /// let step = 2;
1134    /// let end_rounded_up = ptr.wrapping_add(6);
1135    ///
1136    /// // This loop prints "1, 3, 5, "
1137    /// while ptr != end_rounded_up {
1138    ///     unsafe {
1139    ///         print!("{}, ", *ptr);
1140    ///     }
1141    ///     ptr = ptr.wrapping_add(step);
1142    /// }
1143    /// ```
1144    #[stable(feature = "pointer_methods", since = "1.26.0")]
1145    #[must_use = "returns a new pointer rather than modifying its argument"]
1146    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1147    #[inline(always)]
1148    pub const fn wrapping_add(self, count: usize) -> Self
1149    where
1150        T: Sized,
1151    {
1152        self.wrapping_offset(count as isize)
1153    }
1154
1155    /// Adds an unsigned offset in bytes to a pointer using wrapping arithmetic.
1156    ///
1157    /// `count` is in units of bytes.
1158    ///
1159    /// This is purely a convenience for casting to a `u8` pointer and
1160    /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
1161    ///
1162    /// For non-`Sized` pointees this operation changes only the data pointer,
1163    /// leaving the metadata untouched.
1164    #[must_use]
1165    #[inline(always)]
1166    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1167    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1168    pub const fn wrapping_byte_add(self, count: usize) -> Self {
1169        self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
1170    }
1171
1172    /// Subtracts an unsigned offset from a pointer using wrapping arithmetic.
1173    ///
1174    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1175    /// offset of `3 * size_of::<T>()` bytes.
1176    ///
1177    /// # Safety
1178    ///
1179    /// This operation itself is always safe, but using the resulting pointer is not.
1180    ///
1181    /// The resulting pointer "remembers" the [allocation] that `self` points to; it must not
1182    /// be used to read or write other allocations.
1183    ///
1184    /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
1185    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1186    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1187    /// `x` and `y` point into the same allocation.
1188    ///
1189    /// Compared to [`sub`], this method basically delays the requirement of staying within the
1190    /// same allocation: [`sub`] is immediate Undefined Behavior when crossing object
1191    /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
1192    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
1193    /// can be optimized better and is thus preferable in performance-sensitive code.
1194    ///
1195    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1196    /// intermediate values used during the computation of the final result. For example,
1197    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1198    /// allocation and then re-entering it later is permitted.
1199    ///
1200    /// [`sub`]: #method.sub
1201    /// [allocation]: crate::ptr#allocation
1202    ///
1203    /// # Examples
1204    ///
1205    /// ```
1206    /// // Iterate using a raw pointer in increments of two elements (backwards)
1207    /// let data = [1u8, 2, 3, 4, 5];
1208    /// let mut ptr: *const u8 = data.as_ptr();
1209    /// let start_rounded_down = ptr.wrapping_sub(2);
1210    /// ptr = ptr.wrapping_add(4);
1211    /// let step = 2;
1212    /// // This loop prints "5, 3, 1, "
1213    /// while ptr != start_rounded_down {
1214    ///     unsafe {
1215    ///         print!("{}, ", *ptr);
1216    ///     }
1217    ///     ptr = ptr.wrapping_sub(step);
1218    /// }
1219    /// ```
1220    #[stable(feature = "pointer_methods", since = "1.26.0")]
1221    #[must_use = "returns a new pointer rather than modifying its argument"]
1222    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1223    #[inline(always)]
1224    pub const fn wrapping_sub(self, count: usize) -> Self
1225    where
1226        T: Sized,
1227    {
1228        self.wrapping_offset((count as isize).wrapping_neg())
1229    }
1230
1231    /// Subtracts an unsigned offset in bytes from a pointer using wrapping arithmetic.
1232    ///
1233    /// `count` is in units of bytes.
1234    ///
1235    /// This is purely a convenience for casting to a `u8` pointer and
1236    /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
1237    ///
1238    /// For non-`Sized` pointees this operation changes only the data pointer,
1239    /// leaving the metadata untouched.
1240    #[must_use]
1241    #[inline(always)]
1242    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1243    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1244    pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1245        self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
1246    }
1247
1248    /// Reads the value from `self` without moving it. This leaves the
1249    /// memory in `self` unchanged.
1250    ///
1251    /// See [`ptr::read`] for safety concerns and examples.
1252    ///
1253    /// [`ptr::read`]: crate::ptr::read()
1254    #[stable(feature = "pointer_methods", since = "1.26.0")]
1255    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1256    #[inline(always)]
1257    #[track_caller]
1258    pub const unsafe fn read(self) -> T
1259    where
1260        T: Sized,
1261    {
1262        // SAFETY: the caller must uphold the safety contract for ``.
1263        unsafe { read(self) }
1264    }
1265
1266    /// Performs a volatile read of the value from `self` without moving it. This
1267    /// leaves the memory in `self` unchanged.
1268    ///
1269    /// Volatile operations are intended to act on I/O memory, and are guaranteed
1270    /// to not be elided or reordered by the compiler across other volatile
1271    /// operations.
1272    ///
1273    /// See [`ptr::read_volatile`] for safety concerns and examples.
1274    ///
1275    /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1276    #[stable(feature = "pointer_methods", since = "1.26.0")]
1277    #[inline(always)]
1278    #[track_caller]
1279    pub unsafe fn read_volatile(self) -> T
1280    where
1281        T: Sized,
1282    {
1283        // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1284        unsafe { read_volatile(self) }
1285    }
1286
1287    /// Reads the value from `self` without moving it. This leaves the
1288    /// memory in `self` unchanged.
1289    ///
1290    /// Unlike `read`, the pointer may be unaligned.
1291    ///
1292    /// See [`ptr::read_unaligned`] for safety concerns and examples.
1293    ///
1294    /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1295    #[stable(feature = "pointer_methods", since = "1.26.0")]
1296    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1297    #[inline(always)]
1298    #[track_caller]
1299    pub const unsafe fn read_unaligned(self) -> T
1300    where
1301        T: Sized,
1302    {
1303        // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1304        unsafe { read_unaligned(self) }
1305    }
1306
1307    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1308    /// and destination may overlap.
1309    ///
1310    /// NOTE: this has the *same* argument order as [`ptr::copy`].
1311    ///
1312    /// See [`ptr::copy`] for safety concerns and examples.
1313    ///
1314    /// [`ptr::copy`]: crate::ptr::copy()
1315    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1316    #[stable(feature = "pointer_methods", since = "1.26.0")]
1317    #[inline(always)]
1318    #[track_caller]
1319    pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
1320    where
1321        T: Sized,
1322    {
1323        // SAFETY: the caller must uphold the safety contract for `copy`.
1324        unsafe { copy(self, dest, count) }
1325    }
1326
1327    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1328    /// and destination may *not* overlap.
1329    ///
1330    /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1331    ///
1332    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1333    ///
1334    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1335    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1336    #[stable(feature = "pointer_methods", since = "1.26.0")]
1337    #[inline(always)]
1338    #[track_caller]
1339    pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
1340    where
1341        T: Sized,
1342    {
1343        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1344        unsafe { copy_nonoverlapping(self, dest, count) }
1345    }
1346
1347    /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1348    /// and destination may overlap.
1349    ///
1350    /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1351    ///
1352    /// See [`ptr::copy`] for safety concerns and examples.
1353    ///
1354    /// [`ptr::copy`]: crate::ptr::copy()
1355    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1356    #[stable(feature = "pointer_methods", since = "1.26.0")]
1357    #[inline(always)]
1358    #[track_caller]
1359    pub const unsafe fn copy_from(self, src: *const T, count: usize)
1360    where
1361        T: Sized,
1362    {
1363        // SAFETY: the caller must uphold the safety contract for `copy`.
1364        unsafe { copy(src, self, count) }
1365    }
1366
1367    /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1368    /// and destination may *not* overlap.
1369    ///
1370    /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1371    ///
1372    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1373    ///
1374    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1375    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1376    #[stable(feature = "pointer_methods", since = "1.26.0")]
1377    #[inline(always)]
1378    #[track_caller]
1379    pub const unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize)
1380    where
1381        T: Sized,
1382    {
1383        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1384        unsafe { copy_nonoverlapping(src, self, count) }
1385    }
1386
1387    /// Executes the destructor (if any) of the pointed-to value.
1388    ///
1389    /// See [`ptr::drop_in_place`] for safety concerns and examples.
1390    ///
1391    /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1392    #[stable(feature = "pointer_methods", since = "1.26.0")]
1393    #[rustc_const_unstable(feature = "const_drop_in_place", issue = "109342")]
1394    #[inline(always)]
1395    pub const unsafe fn drop_in_place(self)
1396    where
1397        T: [const] Destruct,
1398    {
1399        // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1400        unsafe { drop_in_place(self) }
1401    }
1402
1403    /// Overwrites a memory location with the given value without reading or
1404    /// dropping the old value.
1405    ///
1406    /// See [`ptr::write`] for safety concerns and examples.
1407    ///
1408    /// [`ptr::write`]: crate::ptr::write()
1409    #[stable(feature = "pointer_methods", since = "1.26.0")]
1410    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1411    #[inline(always)]
1412    #[track_caller]
1413    pub const unsafe fn write(self, val: T)
1414    where
1415        T: Sized,
1416    {
1417        // SAFETY: the caller must uphold the safety contract for `write`.
1418        unsafe { write(self, val) }
1419    }
1420
1421    /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1422    /// bytes of memory starting at `self` to `val`.
1423    ///
1424    /// See [`ptr::write_bytes`] for safety concerns and examples.
1425    ///
1426    /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1427    #[doc(alias = "memset")]
1428    #[stable(feature = "pointer_methods", since = "1.26.0")]
1429    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1430    #[inline(always)]
1431    #[track_caller]
1432    pub const unsafe fn write_bytes(self, val: u8, count: usize)
1433    where
1434        T: Sized,
1435    {
1436        // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1437        unsafe { write_bytes(self, val, count) }
1438    }
1439
1440    /// Performs a volatile write of a memory location with the given value without
1441    /// reading or dropping the old value.
1442    ///
1443    /// Volatile operations are intended to act on I/O memory, and are guaranteed
1444    /// to not be elided or reordered by the compiler across other volatile
1445    /// operations.
1446    ///
1447    /// See [`ptr::write_volatile`] for safety concerns and examples.
1448    ///
1449    /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1450    #[stable(feature = "pointer_methods", since = "1.26.0")]
1451    #[inline(always)]
1452    #[track_caller]
1453    pub unsafe fn write_volatile(self, val: T)
1454    where
1455        T: Sized,
1456    {
1457        // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1458        unsafe { write_volatile(self, val) }
1459    }
1460
1461    /// Overwrites a memory location with the given value without reading or
1462    /// dropping the old value.
1463    ///
1464    /// Unlike `write`, the pointer may be unaligned.
1465    ///
1466    /// See [`ptr::write_unaligned`] for safety concerns and examples.
1467    ///
1468    /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1469    #[stable(feature = "pointer_methods", since = "1.26.0")]
1470    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1471    #[inline(always)]
1472    #[track_caller]
1473    pub const unsafe fn write_unaligned(self, val: T)
1474    where
1475        T: Sized,
1476    {
1477        // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1478        unsafe { write_unaligned(self, val) }
1479    }
1480
1481    /// Replaces the value at `self` with `src`, returning the old
1482    /// value, without dropping either.
1483    ///
1484    /// See [`ptr::replace`] for safety concerns and examples.
1485    ///
1486    /// [`ptr::replace`]: crate::ptr::replace()
1487    #[stable(feature = "pointer_methods", since = "1.26.0")]
1488    #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "1.88.0")]
1489    #[inline(always)]
1490    pub const unsafe fn replace(self, src: T) -> T
1491    where
1492        T: Sized,
1493    {
1494        // SAFETY: the caller must uphold the safety contract for `replace`.
1495        unsafe { replace(self, src) }
1496    }
1497
1498    /// Swaps the values at two mutable locations of the same type, without
1499    /// deinitializing either. They may overlap, unlike `mem::swap` which is
1500    /// otherwise equivalent.
1501    ///
1502    /// See [`ptr::swap`] for safety concerns and examples.
1503    ///
1504    /// [`ptr::swap`]: crate::ptr::swap()
1505    #[stable(feature = "pointer_methods", since = "1.26.0")]
1506    #[rustc_const_stable(feature = "const_swap", since = "1.85.0")]
1507    #[inline(always)]
1508    pub const unsafe fn swap(self, with: *mut T)
1509    where
1510        T: Sized,
1511    {
1512        // SAFETY: the caller must uphold the safety contract for `swap`.
1513        unsafe { swap(self, with) }
1514    }
1515
1516    /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1517    /// `align`.
1518    ///
1519    /// If it is not possible to align the pointer, the implementation returns
1520    /// `usize::MAX`.
1521    ///
1522    /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1523    /// used with the `wrapping_add` method.
1524    ///
1525    /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1526    /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1527    /// the returned offset is correct in all terms other than alignment.
1528    ///
1529    /// # Panics
1530    ///
1531    /// The function panics if `align` is not a power-of-two.
1532    ///
1533    /// # Examples
1534    ///
1535    /// Accessing adjacent `u8` as `u16`
1536    ///
1537    /// ```
1538    /// # unsafe {
1539    /// let mut x = [5_u8, 6, 7, 8, 9];
1540    /// let ptr = x.as_mut_ptr();
1541    /// let offset = ptr.align_offset(align_of::<u16>());
1542    ///
1543    /// if offset < x.len() - 1 {
1544    ///     let u16_ptr = ptr.add(offset).cast::<u16>();
1545    ///     *u16_ptr = 0;
1546    ///
1547    ///     assert!(x == [0, 0, 7, 8, 9] || x == [5, 0, 0, 8, 9]);
1548    /// } else {
1549    ///     // while the pointer can be aligned via `offset`, it would point
1550    ///     // outside the allocation
1551    /// }
1552    /// # }
1553    /// ```
1554    #[must_use]
1555    #[inline]
1556    #[stable(feature = "align_offset", since = "1.36.0")]
1557    pub fn align_offset(self, align: usize) -> usize
1558    where
1559        T: Sized,
1560    {
1561        if !align.is_power_of_two() {
1562            {
    crate::panicking::panic_fmt(format_args!("align_offset: align is not a power-of-two"));
};panic!("align_offset: align is not a power-of-two");
1563        }
1564
1565        // SAFETY: `align` has been checked to be a power of 2 above
1566        let ret = unsafe { align_offset(self, align) };
1567
1568        // Inform Miri that we want to consider the resulting pointer to be suitably aligned.
1569        #[cfg(miri)]
1570        if ret != usize::MAX {
1571            intrinsics::miri_promise_symbolic_alignment(
1572                self.wrapping_add(ret).cast_const().cast(),
1573                align,
1574            );
1575        }
1576
1577        ret
1578    }
1579
1580    /// Returns whether the pointer is properly aligned for `T`.
1581    ///
1582    /// # Examples
1583    ///
1584    /// ```
1585    /// // On some platforms, the alignment of i32 is less than 4.
1586    /// #[repr(align(4))]
1587    /// struct AlignedI32(i32);
1588    ///
1589    /// let mut data = AlignedI32(42);
1590    /// let ptr = &mut data as *mut AlignedI32;
1591    ///
1592    /// assert!(ptr.is_aligned());
1593    /// assert!(!ptr.wrapping_byte_add(1).is_aligned());
1594    /// ```
1595    #[must_use]
1596    #[inline]
1597    #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1598    pub fn is_aligned(self) -> bool
1599    where
1600        T: Sized,
1601    {
1602        self.is_aligned_to(align_of::<T>())
1603    }
1604
1605    /// Returns whether the pointer is aligned to `align`.
1606    ///
1607    /// For non-`Sized` pointees this operation considers only the data pointer,
1608    /// ignoring the metadata.
1609    ///
1610    /// # Panics
1611    ///
1612    /// The function panics if `align` is not a power-of-two (this includes 0).
1613    ///
1614    /// # Examples
1615    ///
1616    /// ```
1617    /// #![feature(pointer_is_aligned_to)]
1618    ///
1619    /// // On some platforms, the alignment of i32 is less than 4.
1620    /// #[repr(align(4))]
1621    /// struct AlignedI32(i32);
1622    ///
1623    /// let mut data = AlignedI32(42);
1624    /// let ptr = &mut data as *mut AlignedI32;
1625    ///
1626    /// assert!(ptr.is_aligned_to(1));
1627    /// assert!(ptr.is_aligned_to(2));
1628    /// assert!(ptr.is_aligned_to(4));
1629    ///
1630    /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1631    /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1632    ///
1633    /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1634    /// ```
1635    #[must_use]
1636    #[inline]
1637    #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1638    pub fn is_aligned_to(self, align: usize) -> bool {
1639        if !align.is_power_of_two() {
1640            {
    crate::panicking::panic_fmt(format_args!("is_aligned_to: align is not a power-of-two"));
};panic!("is_aligned_to: align is not a power-of-two");
1641        }
1642
1643        self.addr() & (align - 1) == 0
1644    }
1645}
1646
1647impl<T> *mut T {
1648    /// Casts from a type to its maybe-uninitialized version.
1649    ///
1650    /// This is always safe, since UB can only occur if the pointer is read
1651    /// before being initialized.
1652    #[must_use]
1653    #[inline(always)]
1654    #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1655    pub const fn cast_uninit(self) -> *mut MaybeUninit<T> {
1656        self as _
1657    }
1658
1659    /// Forms a raw mutable slice from a pointer and a length.
1660    ///
1661    /// The `len` argument is the number of **elements**, not the number of bytes.
1662    ///
1663    /// Performs the same functionality as [`cast_slice`] on a `*const T`, except that a
1664    /// raw mutable slice is returned, as opposed to a raw immutable slice.
1665    ///
1666    /// This function is safe, but actually using the return value is unsafe.
1667    /// See the documentation of [`slice::from_raw_parts_mut`] for slice safety requirements.
1668    ///
1669    /// [`slice::from_raw_parts_mut`]: crate::slice::from_raw_parts_mut
1670    /// [`cast_slice`]: pointer::cast_slice
1671    ///
1672    /// # Examples
1673    ///
1674    /// ```rust
1675    /// #![feature(ptr_cast_slice)]
1676    ///
1677    /// let x = &mut [5, 6, 7];
1678    /// let slice = x.as_mut_ptr().cast_slice(3);
1679    ///
1680    /// unsafe {
1681    ///     (*slice)[2] = 99; // assign a value at an index in the slice
1682    /// };
1683    ///
1684    /// assert_eq!(unsafe { &*slice }[2], 99);
1685    /// ```
1686    ///
1687    /// You must ensure that the pointer is valid and not null before dereferencing
1688    /// the raw slice. A slice reference must never have a null pointer, even if it's empty.
1689    ///
1690    /// ```rust,should_panic
1691    /// #![feature(ptr_cast_slice)]
1692    /// use std::ptr;
1693    /// let danger: *mut [u8] = ptr::null_mut::<u8>().cast_slice(0);
1694    /// unsafe {
1695    ///     danger.as_mut().expect("references must not be null");
1696    /// }
1697    /// ```
1698    #[inline]
1699    #[unstable(feature = "ptr_cast_slice", issue = "149103")]
1700    pub const fn cast_slice(self, len: usize) -> *mut [T] {
1701        slice_from_raw_parts_mut(self, len)
1702    }
1703}
1704impl<T> *mut MaybeUninit<T> {
1705    /// Casts from a maybe-uninitialized type to its initialized version.
1706    ///
1707    /// This is always safe, since UB can only occur if the pointer is read
1708    /// before being initialized.
1709    #[must_use]
1710    #[inline(always)]
1711    #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1712    pub const fn cast_init(self) -> *mut T {
1713        self as _
1714    }
1715}
1716
1717impl<T> *mut [T] {
1718    /// Returns the length of a raw slice.
1719    ///
1720    /// The returned value is the number of **elements**, not the number of bytes.
1721    ///
1722    /// This function is safe, even when the raw slice cannot be cast to a slice
1723    /// reference because the pointer is null or unaligned.
1724    ///
1725    /// # Examples
1726    ///
1727    /// ```rust
1728    /// use std::ptr;
1729    ///
1730    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1731    /// assert_eq!(slice.len(), 3);
1732    /// ```
1733    #[inline(always)]
1734    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1735    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1736    pub const fn len(self) -> usize {
1737        metadata(self)
1738    }
1739
1740    /// Returns `true` if the raw slice has a length of 0.
1741    ///
1742    /// # Examples
1743    ///
1744    /// ```
1745    /// use std::ptr;
1746    ///
1747    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1748    /// assert!(!slice.is_empty());
1749    /// ```
1750    #[inline(always)]
1751    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1752    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1753    pub const fn is_empty(self) -> bool {
1754        self.len() == 0
1755    }
1756
1757    /// Gets a raw, mutable pointer to the underlying array.
1758    ///
1759    /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1760    #[stable(feature = "core_slice_as_array", since = "1.93.0")]
1761    #[rustc_const_stable(feature = "core_slice_as_array", since = "1.93.0")]
1762    #[inline]
1763    #[must_use]
1764    pub const fn as_mut_array<const N: usize>(self) -> Option<*mut [T; N]> {
1765        if self.len() == N {
1766            let me = self.as_mut_ptr() as *mut [T; N];
1767            Some(me)
1768        } else {
1769            None
1770        }
1771    }
1772
1773    /// Divides one mutable raw slice into two at an index.
1774    ///
1775    /// The first will contain all indices from `[0, mid)` (excluding
1776    /// the index `mid` itself) and the second will contain all
1777    /// indices from `[mid, len)` (excluding the index `len` itself).
1778    ///
1779    /// # Panics
1780    ///
1781    /// Panics if `mid > len`.
1782    ///
1783    /// # Safety
1784    ///
1785    /// `mid` must be [in-bounds] of the underlying [allocation].
1786    /// Which means `self` must be dereferenceable and span a single allocation
1787    /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
1788    /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
1789    ///
1790    /// Since `len` being in-bounds is not a safety invariant of `*mut [T]` the
1791    /// safety requirements of this method are the same as for [`split_at_mut_unchecked`].
1792    /// The explicit bounds check is only as useful as `len` is correct.
1793    ///
1794    /// [`split_at_mut_unchecked`]: #method.split_at_mut_unchecked
1795    /// [in-bounds]: #method.add
1796    /// [allocation]: crate::ptr#allocation
1797    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1798    ///
1799    /// # Examples
1800    ///
1801    /// ```
1802    /// #![feature(raw_slice_split)]
1803    ///
1804    /// let mut v = [1, 0, 3, 0, 5, 6];
1805    /// let ptr = &mut v as *mut [_];
1806    /// unsafe {
1807    ///     let (left, right) = ptr.split_at_mut(2);
1808    ///     assert_eq!(&*left, [1, 0]);
1809    ///     assert_eq!(&*right, [3, 0, 5, 6]);
1810    /// }
1811    /// ```
1812    #[inline(always)]
1813    #[track_caller]
1814    #[unstable(feature = "raw_slice_split", issue = "95595")]
1815    pub unsafe fn split_at_mut(self, mid: usize) -> (*mut [T], *mut [T]) {
1816        if !(mid <= self.len()) {
    crate::panicking::panic("assertion failed: mid <= self.len()")
};assert!(mid <= self.len());
1817        // SAFETY: The assert above is only a safety-net as long as `self.len()` is correct
1818        // The actual safety requirements of this function are the same as for `split_at_mut_unchecked`
1819        unsafe { self.split_at_mut_unchecked(mid) }
1820    }
1821
1822    /// Divides one mutable raw slice into two at an index, without doing bounds checking.
1823    ///
1824    /// The first will contain all indices from `[0, mid)` (excluding
1825    /// the index `mid` itself) and the second will contain all
1826    /// indices from `[mid, len)` (excluding the index `len` itself).
1827    ///
1828    /// # Safety
1829    ///
1830    /// `mid` must be [in-bounds] of the underlying [allocation].
1831    /// Which means `self` must be dereferenceable and span a single allocation
1832    /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
1833    /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
1834    ///
1835    /// [in-bounds]: #method.add
1836    /// [out-of-bounds index]: #method.add
1837    /// [allocation]: crate::ptr#allocation
1838    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1839    ///
1840    /// # Examples
1841    ///
1842    /// ```
1843    /// #![feature(raw_slice_split)]
1844    ///
1845    /// let mut v = [1, 0, 3, 0, 5, 6];
1846    /// // scoped to restrict the lifetime of the borrows
1847    /// unsafe {
1848    ///     let ptr = &mut v as *mut [_];
1849    ///     let (left, right) = ptr.split_at_mut_unchecked(2);
1850    ///     assert_eq!(&*left, [1, 0]);
1851    ///     assert_eq!(&*right, [3, 0, 5, 6]);
1852    ///     (&mut *left)[1] = 2;
1853    ///     (&mut *right)[1] = 4;
1854    /// }
1855    /// assert_eq!(v, [1, 2, 3, 4, 5, 6]);
1856    /// ```
1857    #[inline(always)]
1858    #[unstable(feature = "raw_slice_split", issue = "95595")]
1859    pub unsafe fn split_at_mut_unchecked(self, mid: usize) -> (*mut [T], *mut [T]) {
1860        let len = self.len();
1861        let ptr = self.as_mut_ptr();
1862
1863        // SAFETY: Caller must pass a valid pointer and an index that is in-bounds.
1864        let tail = unsafe { ptr.add(mid) };
1865        (
1866            crate::ptr::slice_from_raw_parts_mut(ptr, mid),
1867            crate::ptr::slice_from_raw_parts_mut(tail, len - mid),
1868        )
1869    }
1870
1871    /// Returns a raw pointer to the slice's buffer.
1872    ///
1873    /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
1874    ///
1875    /// # Examples
1876    ///
1877    /// ```rust
1878    /// #![feature(slice_ptr_get)]
1879    /// use std::ptr;
1880    ///
1881    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1882    /// assert_eq!(slice.as_mut_ptr(), ptr::null_mut());
1883    /// ```
1884    #[inline(always)]
1885    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1886    pub const fn as_mut_ptr(self) -> *mut T {
1887        self as *mut T
1888    }
1889
1890    /// Returns a raw pointer to an element or subslice, without doing bounds
1891    /// checking.
1892    ///
1893    /// Calling this method with an [out-of-bounds index] or when `self` is not dereferenceable
1894    /// is *[undefined behavior]* even if the resulting pointer is not used.
1895    ///
1896    /// [out-of-bounds index]: #method.add
1897    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1898    ///
1899    /// # Examples
1900    ///
1901    /// ```
1902    /// #![feature(slice_ptr_get)]
1903    ///
1904    /// let x = &mut [1, 2, 4] as *mut [i32];
1905    ///
1906    /// unsafe {
1907    ///     assert_eq!(x.get_unchecked_mut(1), x.as_mut_ptr().add(1));
1908    /// }
1909    /// ```
1910    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1911    #[rustc_const_unstable(feature = "const_index", issue = "143775")]
1912    #[inline(always)]
1913    pub const unsafe fn get_unchecked_mut<I>(self, index: I) -> *mut I::Output
1914    where
1915        I: [const] SliceIndex<[T]>,
1916    {
1917        // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1918        unsafe { index.get_unchecked_mut(self) }
1919    }
1920
1921    #[doc = "Returns `None` if the pointer is null, or else returns a shared slice to\nthe value wrapped in `Some`. In contrast to [`as_ref`], this does not require\nthat the value has to be initialized.\n\n[`as_ref`]: #method.as_ref\n\n# Safety\n\nWhen calling this method, you have to ensure that *either* the pointer is null *or*\nall of the following is true:\n\n* The pointer must be [valid] for reads for `ptr.len() * size_of::<T>()` many bytes,\n  and it must be properly aligned. This means in particular:\n\n* The entire memory range of this slice must be contained within a single [allocation]!\n  Slices can never span across multiple allocations.\n\n* The pointer must be aligned even for zero-length slices. One\n  reason for this is that enum layout optimizations may rely on references\n  (including slices of any length) being aligned and non-null to distinguish\n  them from other data. You can obtain a pointer that is usable as `data`\n  for zero-length slices using [`NonNull::dangling()`].\n\n* The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.\n  See the safety documentation of [`pointer::offset`].\n\n* You must enforce Rust\'s aliasing rules, since the returned lifetime `\'a` is\n  arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.\n  In particular, while this reference exists, the memory the pointer points to must\n  not get mutated (except inside `UnsafeCell`).\n\nThis applies even if the result of this method is unused!\n\nSee also [`slice::from_raw_parts`][].\n\n[valid]: crate::ptr#safety\n[allocation]: crate::ptr#allocation\n\n# Panics during const evaluation\n\nThis method will panic during const evaluation if the pointer cannot be\ndetermined to be null or not. See [`is_null`] for more information.\n\n[`is_null`]: #method.is_null\n"include_str!("docs/as_uninit_slice.md")]
1922    ///
1923    /// # See Also
1924    /// For the mutable counterpart see [`as_uninit_slice_mut`](pointer::as_uninit_slice_mut).
1925    #[inline]
1926    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1927    pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1928        if self.is_null() {
1929            None
1930        } else {
1931            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1932            Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1933        }
1934    }
1935
1936    /// Returns `None` if the pointer is null, or else returns a unique slice to
1937    /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
1938    /// that the value has to be initialized.
1939    ///
1940    /// For the shared counterpart see [`as_uninit_slice`].
1941    ///
1942    /// [`as_mut`]: #method.as_mut
1943    /// [`as_uninit_slice`]: #method.as_uninit_slice-1
1944    ///
1945    /// # Safety
1946    ///
1947    /// When calling this method, you have to ensure that *either* the pointer is null *or*
1948    /// all of the following is true:
1949    ///
1950    /// * The pointer must be [valid] for reads and writes for `ptr.len() * size_of::<T>()`
1951    ///   many bytes, and it must be properly aligned. This means in particular:
1952    ///
1953    ///     * The entire memory range of this slice must be contained within a single [allocation]!
1954    ///       Slices can never span across multiple allocations.
1955    ///
1956    ///     * The pointer must be aligned even for zero-length slices. One
1957    ///       reason for this is that enum layout optimizations may rely on references
1958    ///       (including slices of any length) being aligned and non-null to distinguish
1959    ///       them from other data. You can obtain a pointer that is usable as `data`
1960    ///       for zero-length slices using [`NonNull::dangling()`].
1961    ///
1962    /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1963    ///   See the safety documentation of [`pointer::offset`].
1964    ///
1965    /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1966    ///   arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1967    ///   In particular, while this reference exists, the memory the pointer points to must
1968    ///   not get accessed (read or written) through any other pointer.
1969    ///
1970    /// This applies even if the result of this method is unused!
1971    ///
1972    /// See also [`slice::from_raw_parts_mut`][].
1973    ///
1974    /// [valid]: crate::ptr#safety
1975    /// [allocation]: crate::ptr#allocation
1976    ///
1977    /// # Panics during const evaluation
1978    ///
1979    /// This method will panic during const evaluation if the pointer cannot be
1980    /// determined to be null or not. See [`is_null`] for more information.
1981    ///
1982    /// [`is_null`]: #method.is_null-1
1983    #[inline]
1984    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1985    pub const unsafe fn as_uninit_slice_mut<'a>(self) -> Option<&'a mut [MaybeUninit<T>]> {
1986        if self.is_null() {
1987            None
1988        } else {
1989            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
1990            Some(unsafe { slice::from_raw_parts_mut(self as *mut MaybeUninit<T>, self.len()) })
1991        }
1992    }
1993}
1994
1995impl<T> *mut T {
1996    /// Casts from a pointer-to-`T` to a pointer-to-`[T; N]`.
1997    #[inline]
1998    #[unstable(feature = "ptr_cast_array", issue = "144514")]
1999    pub const fn cast_array<const N: usize>(self) -> *mut [T; N] {
2000        self.cast()
2001    }
2002}
2003
2004impl<T, const N: usize> *mut [T; N] {
2005    /// Returns a raw pointer to the array's buffer.
2006    ///
2007    /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
2008    ///
2009    /// # Examples
2010    ///
2011    /// ```rust
2012    /// #![feature(array_ptr_get)]
2013    /// use std::ptr;
2014    ///
2015    /// let arr: *mut [i8; 3] = ptr::null_mut();
2016    /// assert_eq!(arr.as_mut_ptr(), ptr::null_mut());
2017    /// ```
2018    #[inline]
2019    #[unstable(feature = "array_ptr_get", issue = "119834")]
2020    pub const fn as_mut_ptr(self) -> *mut T {
2021        self as *mut T
2022    }
2023
2024    /// Returns a raw pointer to a mutable slice containing the entire array.
2025    ///
2026    /// # Examples
2027    ///
2028    /// ```
2029    /// #![feature(array_ptr_get)]
2030    ///
2031    /// let mut arr = [1, 2, 5];
2032    /// let ptr: *mut [i32; 3] = &mut arr;
2033    /// unsafe {
2034    ///     (&mut *ptr.as_mut_slice())[..2].copy_from_slice(&[3, 4]);
2035    /// }
2036    /// assert_eq!(arr, [3, 4, 5]);
2037    /// ```
2038    #[inline]
2039    #[unstable(feature = "array_ptr_get", issue = "119834")]
2040    pub const fn as_mut_slice(self) -> *mut [T] {
2041        self
2042    }
2043}
2044
2045/// Pointer equality is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2046#[stable(feature = "rust1", since = "1.0.0")]
2047#[diagnostic::on_const(
2048    message = "pointers cannot be reliably compared during const eval",
2049    note = "see issue #53020 <https://github.com/rust-lang/rust/issues/53020> for more information"
2050)]
2051impl<T: PointeeSized> PartialEq for *mut T {
2052    #[inline(always)]
2053    #[allow(ambiguous_wide_pointer_comparisons)]
2054    fn eq(&self, other: &*mut T) -> bool {
2055        *self == *other
2056    }
2057}
2058
2059/// Pointer equality is an equivalence relation.
2060#[stable(feature = "rust1", since = "1.0.0")]
2061#[diagnostic::on_const(
2062    message = "pointers cannot be reliably compared during const eval",
2063    note = "see issue #53020 <https://github.com/rust-lang/rust/issues/53020> for more information"
2064)]
2065impl<T: PointeeSized> Eq for *mut T {}
2066
2067/// Pointer comparison is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2068#[stable(feature = "rust1", since = "1.0.0")]
2069#[diagnostic::on_const(
2070    message = "pointers cannot be reliably compared during const eval",
2071    note = "see issue #53020 <https://github.com/rust-lang/rust/issues/53020> for more information"
2072)]
2073impl<T: PointeeSized> Ord for *mut T {
2074    #[inline]
2075    #[allow(ambiguous_wide_pointer_comparisons)]
2076    fn cmp(&self, other: &*mut T) -> Ordering {
2077        if self < other {
2078            Less
2079        } else if self == other {
2080            Equal
2081        } else {
2082            Greater
2083        }
2084    }
2085}
2086
2087/// Pointer comparison is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2088#[stable(feature = "rust1", since = "1.0.0")]
2089#[diagnostic::on_const(
2090    message = "pointers cannot be reliably compared during const eval",
2091    note = "see issue #53020 <https://github.com/rust-lang/rust/issues/53020> for more information"
2092)]
2093impl<T: PointeeSized> PartialOrd for *mut T {
2094    #[inline(always)]
2095    #[allow(ambiguous_wide_pointer_comparisons)]
2096    fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
2097        Some(self.cmp(other))
2098    }
2099
2100    #[inline(always)]
2101    #[allow(ambiguous_wide_pointer_comparisons)]
2102    fn lt(&self, other: &*mut T) -> bool {
2103        *self < *other
2104    }
2105
2106    #[inline(always)]
2107    #[allow(ambiguous_wide_pointer_comparisons)]
2108    fn le(&self, other: &*mut T) -> bool {
2109        *self <= *other
2110    }
2111
2112    #[inline(always)]
2113    #[allow(ambiguous_wide_pointer_comparisons)]
2114    fn gt(&self, other: &*mut T) -> bool {
2115        *self > *other
2116    }
2117
2118    #[inline(always)]
2119    #[allow(ambiguous_wide_pointer_comparisons)]
2120    fn ge(&self, other: &*mut T) -> bool {
2121        *self >= *other
2122    }
2123}
2124
2125#[stable(feature = "raw_ptr_default", since = "1.88.0")]
2126impl<T: ?Sized + Thin> Default for *mut T {
2127    /// Returns the default value of [`null_mut()`][crate::ptr::null_mut].
2128    fn default() -> Self {
2129        crate::ptr::null_mut()
2130    }
2131}