1//! Memory allocation APIs
23#![stable(feature = "alloc_module", since = "1.28.0")]
45#[stable(feature = "alloc_module", since = "1.28.0")]
6#[doc(inline)]
7pub use core::alloc::*;
8use core::mem::Alignment;
9use core::ptr::{self, NonNull};
10use core::{cmp, hint};
1112unsafe extern "Rust" {
13// These are the magic symbols to call the global allocator. rustc generates
14 // them to call the global allocator if there is a `#[global_allocator]` attribute
15 // (the code expanding that attribute macro generates those functions), or to call
16 // the default implementations in std (`__rdl_alloc` etc. in `library/std/src/alloc.rs`)
17 // otherwise.
18#[rustc_allocator]
19 #[rustc_nounwind]
20 #[rustc_std_internal_symbol]
21 #[rustc_allocator_zeroed_variant = "__rust_alloc_zeroed"]
22fn __rust_alloc(size: usize, align: Alignment) -> *mut u8;
23#[rustc_deallocator]
24 #[rustc_nounwind]
25 #[rustc_std_internal_symbol]
26fn __rust_dealloc(ptr: NonNull<u8>, size: usize, align: Alignment);
27#[rustc_reallocator]
28 #[rustc_nounwind]
29 #[rustc_std_internal_symbol]
30fn __rust_realloc(
31 ptr: NonNull<u8>,
32 old_size: usize,
33 align: Alignment,
34 new_size: usize,
35 ) -> *mut u8;
36#[rustc_allocator_zeroed]
37 #[rustc_nounwind]
38 #[rustc_std_internal_symbol]
39fn __rust_alloc_zeroed(size: usize, align: Alignment) -> *mut u8;
4041#[rustc_nounwind]
42 #[rustc_std_internal_symbol]
43fn __rust_no_alloc_shim_is_unstable_v2();
44}
4546/// The global memory allocator.
47///
48/// This type implements the [`Allocator`] trait by forwarding calls
49/// to the allocator registered with the `#[global_allocator]` attribute
50/// if there is one, or the `std` crate’s default.
51///
52/// Note: while this type is unstable, the functionality it provides can be
53/// accessed through the [free functions in `alloc`](self#functions).
54#[unstable(feature = "allocator_api", issue = "32838")]
55#[derive(#[automatically_derived]
#[unstable(feature = "allocator_api", issue = "32838")]
impl ::core::marker::Copy for Global { }Copy, #[automatically_derived]
#[unstable(feature = "allocator_api", issue = "32838")]
impl ::core::clone::Clone for Global {
#[inline]
fn clone(&self) -> Global { *self }
}Clone, #[automatically_derived]
#[unstable(feature = "allocator_api", issue = "32838")]
impl ::core::default::Default for Global {
#[inline]
fn default() -> Global { Global {} }
}Default, #[automatically_derived]
#[unstable(feature = "allocator_api", issue = "32838")]
impl ::core::fmt::Debug for Global {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f, "Global")
}
}Debug)]
56// the compiler needs to know when a Box uses the global allocator vs a custom one
57#[lang = "global_alloc_ty"]
58pub struct Global;
5960/// Allocates memory with the global allocator.
61///
62/// This function forwards calls to the [`GlobalAlloc::alloc`] method
63/// of the allocator registered with the `#[global_allocator]` attribute
64/// if there is one, or the `std` crate’s default.
65///
66/// This function is expected to be deprecated in favor of the `allocate` method
67/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
68///
69/// # Safety
70///
71/// See [`GlobalAlloc::alloc`].
72///
73/// # Examples
74///
75/// ```
76/// use std::alloc::{alloc, dealloc, handle_alloc_error, Layout};
77///
78/// unsafe {
79/// let layout = Layout::new::<u16>();
80/// let ptr = alloc(layout);
81/// if ptr.is_null() {
82/// handle_alloc_error(layout);
83/// }
84///
85/// *(ptr as *mut u16) = 42;
86/// assert_eq!(*(ptr as *mut u16), 42);
87///
88/// dealloc(ptr, layout);
89/// }
90/// ```
91#[stable(feature = "global_alloc", since = "1.28.0")]
92#[must_use = "losing the pointer will leak memory"]
93#[inline]
94#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
95pub unsafe fn alloc(layout: Layout) -> *mut u8 {
96unsafe {
97// Make sure we don't accidentally allow omitting the allocator shim in
98 // stable code until it is actually stabilized.
99__rust_no_alloc_shim_is_unstable_v2();
100101__rust_alloc(layout.size(), layout.alignment())
102 }
103}
104105/// Deallocates memory with the global allocator.
106///
107/// This function forwards calls to the [`GlobalAlloc::dealloc`] method
108/// of the allocator registered with the `#[global_allocator]` attribute
109/// if there is one, or the `std` crate’s default.
110///
111/// This function is expected to be deprecated in favor of the `deallocate` method
112/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
113///
114/// # Safety
115///
116/// See [`GlobalAlloc::dealloc`].
117#[stable(feature = "global_alloc", since = "1.28.0")]
118#[inline]
119#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
120pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
121unsafe { dealloc_nonnull(NonNull::new_unchecked(ptr), layout) }
122}
123124/// Same as [`dealloc`] but when you already have a non-null pointer
125#[inline]
126#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
127unsafe fn dealloc_nonnull(ptr: NonNull<u8>, layout: Layout) {
128unsafe { __rust_dealloc(ptr, layout.size(), layout.alignment()) }
129}
130131/// Reallocates memory with the global allocator.
132///
133/// This function forwards calls to the [`GlobalAlloc::realloc`] method
134/// of the allocator registered with the `#[global_allocator]` attribute
135/// if there is one, or the `std` crate’s default.
136///
137/// This function is expected to be deprecated in favor of the `grow` and `shrink` methods
138/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
139///
140/// # Safety
141///
142/// See [`GlobalAlloc::realloc`].
143#[stable(feature = "global_alloc", since = "1.28.0")]
144#[must_use = "losing the pointer will leak memory"]
145#[inline]
146#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
147pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
148unsafe { realloc_nonnull(NonNull::new_unchecked(ptr), layout, new_size) }
149}
150151/// Same as [`realloc`] but when you already have a non-null pointer
152#[inline]
153#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
154unsafe fn realloc_nonnull(ptr: NonNull<u8>, layout: Layout, new_size: usize) -> *mut u8 {
155unsafe { __rust_realloc(ptr, layout.size(), layout.alignment(), new_size) }
156}
157158/// Allocates zero-initialized memory with the global allocator.
159///
160/// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method
161/// of the allocator registered with the `#[global_allocator]` attribute
162/// if there is one, or the `std` crate’s default.
163///
164/// This function is expected to be deprecated in favor of the `allocate_zeroed` method
165/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
166///
167/// # Safety
168///
169/// See [`GlobalAlloc::alloc_zeroed`].
170///
171/// # Examples
172///
173/// ```
174/// use std::alloc::{alloc_zeroed, dealloc, handle_alloc_error, Layout};
175///
176/// unsafe {
177/// let layout = Layout::new::<u16>();
178/// let ptr = alloc_zeroed(layout);
179/// if ptr.is_null() {
180/// handle_alloc_error(layout);
181/// }
182///
183/// assert_eq!(*(ptr as *mut u16), 0);
184///
185/// dealloc(ptr, layout);
186/// }
187/// ```
188#[stable(feature = "global_alloc", since = "1.28.0")]
189#[must_use = "losing the pointer will leak memory"]
190#[inline]
191#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
192pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
193unsafe {
194// Make sure we don't accidentally allow omitting the allocator shim in
195 // stable code until it is actually stabilized.
196__rust_no_alloc_shim_is_unstable_v2();
197198__rust_alloc_zeroed(layout.size(), layout.alignment())
199 }
200}
201202impl Global {
203#[inline]
204 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
205fn alloc_impl_runtime(layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
206match layout.size() {
2070 => Ok(NonNull::slice_from_raw_parts(layout.dangling_ptr(), 0)),
208// SAFETY: `layout` is non-zero in size,
209size => unsafe {
210let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) };
211let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
212Ok(NonNull::slice_from_raw_parts(ptr, size))
213 },
214 }
215 }
216217#[inline]
218 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
219fn deallocate_impl_runtime(ptr: NonNull<u8>, layout: Layout) {
220if layout.size() != 0 {
221// SAFETY:
222 // * We have checked that `layout` is non-zero in size.
223 // * The caller is obligated to provide a layout that "fits", and in this case,
224 // "fit" always means a layout that is equal to the original, because our
225 // `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
226 // allocation than requested.
227 // * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
228 // safety documentation.
229unsafe { dealloc_nonnull(ptr, layout) }
230 }
231 }
232233// SAFETY: Same as `Allocator::grow`
234#[inline]
235 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
236fn grow_impl_runtime(
237&self,
238 ptr: NonNull<u8>,
239 old_layout: Layout,
240 new_layout: Layout,
241 zeroed: bool,
242 ) -> Result<NonNull<[u8]>, AllocError> {
243if true {
if !(new_layout.size() >= old_layout.size()) {
{
::core::panicking::panic_fmt(format_args!("`new_layout.size()` must be greater than or equal to `old_layout.size()`"));
}
};
};debug_assert!(
244 new_layout.size() >= old_layout.size(),
245"`new_layout.size()` must be greater than or equal to `old_layout.size()`"
246);
247248match old_layout.size() {
2490 => self.alloc_impl(new_layout, zeroed),
250251// SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
252 // as required by safety conditions. Other conditions must be upheld by the caller
253old_size if old_layout.align() == new_layout.align() => unsafe {
254let new_size = new_layout.size();
255256// `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
257hint::assert_unchecked(new_size >= old_layout.size());
258259let raw_ptr = realloc_nonnull(ptr, old_layout, new_size);
260let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
261if zeroed {
262raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
263 }
264Ok(NonNull::slice_from_raw_parts(ptr, new_size))
265 },
266267// SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
268 // both the old and new memory allocation are valid for reads and writes for `old_size`
269 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
270 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
271 // for `dealloc` must be upheld by the caller.
272old_size => unsafe {
273let new_ptr = self.alloc_impl(new_layout, zeroed)?;
274 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
275self.deallocate(ptr, old_layout);
276Ok(new_ptr)
277 },
278 }
279 }
280281// SAFETY: Same as `Allocator::grow`
282#[inline]
283 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
284fn shrink_impl_runtime(
285&self,
286 ptr: NonNull<u8>,
287 old_layout: Layout,
288 new_layout: Layout,
289 _zeroed: bool,
290 ) -> Result<NonNull<[u8]>, AllocError> {
291if true {
if !(new_layout.size() <= old_layout.size()) {
{
::core::panicking::panic_fmt(format_args!("`new_layout.size()` must be smaller than or equal to `old_layout.size()`"));
}
};
};debug_assert!(
292 new_layout.size() <= old_layout.size(),
293"`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
294);
295296match new_layout.size() {
297// SAFETY: conditions must be upheld by the caller
2980 => unsafe {
299self.deallocate(ptr, old_layout);
300Ok(NonNull::slice_from_raw_parts(new_layout.dangling_ptr(), 0))
301 },
302303// SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
304new_size if old_layout.align() == new_layout.align() => unsafe {
305// `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
306hint::assert_unchecked(new_size <= old_layout.size());
307308let raw_ptr = realloc_nonnull(ptr, old_layout, new_size);
309let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
310Ok(NonNull::slice_from_raw_parts(ptr, new_size))
311 },
312313// SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
314 // both the old and new memory allocation are valid for reads and writes for `new_size`
315 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
316 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
317 // for `dealloc` must be upheld by the caller.
318new_size => unsafe {
319let new_ptr = self.allocate(new_layout)?;
320 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
321self.deallocate(ptr, old_layout);
322Ok(new_ptr)
323 },
324 }
325 }
326327// SAFETY: Same as `Allocator::allocate`
328#[inline]
329 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
330#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
331const fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
332 core::intrinsics::const_eval_select(
333 (layout, zeroed),
334Global::alloc_impl_const,
335Global::alloc_impl_runtime,
336 )
337 }
338339// SAFETY: Same as `Allocator::deallocate`
340#[inline]
341 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
342#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
343const unsafe fn deallocate_impl(&self, ptr: NonNull<u8>, layout: Layout) {
344 core::intrinsics::const_eval_select(
345 (ptr, layout),
346Global::deallocate_impl_const,
347Global::deallocate_impl_runtime,
348 )
349 }
350351// SAFETY: Same as `Allocator::grow`
352#[inline]
353 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
354#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
355const unsafe fn grow_impl(
356&self,
357 ptr: NonNull<u8>,
358 old_layout: Layout,
359 new_layout: Layout,
360 zeroed: bool,
361 ) -> Result<NonNull<[u8]>, AllocError> {
362 core::intrinsics::const_eval_select(
363 (self, ptr, old_layout, new_layout, zeroed),
364Global::grow_shrink_impl_const,
365Global::grow_impl_runtime,
366 )
367 }
368369// SAFETY: Same as `Allocator::shrink`
370#[inline]
371 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
372#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
373const unsafe fn shrink_impl(
374&self,
375 ptr: NonNull<u8>,
376 old_layout: Layout,
377 new_layout: Layout,
378 ) -> Result<NonNull<[u8]>, AllocError> {
379 core::intrinsics::const_eval_select(
380 (self, ptr, old_layout, new_layout, false),
381Global::grow_shrink_impl_const,
382Global::shrink_impl_runtime,
383 )
384 }
385386#[inline]
387 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
388#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
389const fn alloc_impl_const(layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
390match layout.size() {
3910 => Ok(NonNull::slice_from_raw_parts(layout.dangling_ptr(), 0)),
392// SAFETY: `layout` is non-zero in size,
393size => unsafe {
394let raw_ptr = core::intrinsics::const_allocate(layout.size(), layout.align());
395let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
396if zeroed {
397// SAFETY: the pointer returned by `const_allocate` is valid to write to.
398ptr.write_bytes(0, size);
399 }
400Ok(NonNull::slice_from_raw_parts(ptr, size))
401 },
402 }
403 }
404405#[inline]
406 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
407#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
408const fn deallocate_impl_const(ptr: NonNull<u8>, layout: Layout) {
409if layout.size() != 0 {
410// SAFETY: We checked for nonzero size; other preconditions must be upheld by caller.
411unsafe {
412 core::intrinsics::const_deallocate(ptr.as_ptr(), layout.size(), layout.align());
413 }
414 }
415 }
416417#[inline]
418 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
419#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
420const fn grow_shrink_impl_const(
421&self,
422 ptr: NonNull<u8>,
423 old_layout: Layout,
424 new_layout: Layout,
425 zeroed: bool,
426 ) -> Result<NonNull<[u8]>, AllocError> {
427let new_ptr = self.alloc_impl(new_layout, zeroed)?;
428// SAFETY: both pointers are valid and this operations is in bounds.
429unsafe {
430 ptr::copy_nonoverlapping(
431ptr.as_ptr(),
432new_ptr.as_mut_ptr(),
433 cmp::min(old_layout.size(), new_layout.size()),
434 );
435 }
436unsafe {
437self.deallocate_impl(ptr, old_layout);
438 }
439Ok(new_ptr)
440 }
441}
442443#[unstable(feature = "allocator_api", issue = "32838")]
444#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
445unsafe impl const Allocatorfor Global {
446#[inline]
447 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
448fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
449self.alloc_impl(layout, false)
450 }
451452#[inline]
453 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
454fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
455self.alloc_impl(layout, true)
456 }
457458#[inline]
459 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
460unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
461// SAFETY: all conditions must be upheld by the caller
462unsafe { self.deallocate_impl(ptr, layout) }
463 }
464465#[inline]
466 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
467unsafe fn grow(
468&self,
469 ptr: NonNull<u8>,
470 old_layout: Layout,
471 new_layout: Layout,
472 ) -> Result<NonNull<[u8]>, AllocError> {
473// SAFETY: all conditions must be upheld by the caller
474unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
475 }
476477#[inline]
478 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
479unsafe fn grow_zeroed(
480&self,
481 ptr: NonNull<u8>,
482 old_layout: Layout,
483 new_layout: Layout,
484 ) -> Result<NonNull<[u8]>, AllocError> {
485// SAFETY: all conditions must be upheld by the caller
486unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
487 }
488489#[inline]
490 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
491unsafe fn shrink(
492&self,
493 ptr: NonNull<u8>,
494 old_layout: Layout,
495 new_layout: Layout,
496 ) -> Result<NonNull<[u8]>, AllocError> {
497// SAFETY: all conditions must be upheld by the caller
498unsafe { self.shrink_impl(ptr, old_layout, new_layout) }
499 }
500}
501502// # Allocation error handler
503504#[cfg(not(no_global_oom_handling))]
505unsafe extern "Rust" {
506// This is the magic symbol to call the global alloc error handler. rustc generates
507 // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the
508 // default implementations below (`__rdl_alloc_error_handler`) otherwise.
509#[rustc_std_internal_symbol]
510fn __rust_alloc_error_handler(size: usize, align: usize) -> !;
511}
512513/// Signals a memory allocation error.
514///
515/// Callers of memory allocation APIs wishing to cease execution
516/// in response to an allocation error are encouraged to call this function,
517/// rather than directly invoking [`panic!`] or similar.
518///
519/// This function is guaranteed to diverge (not return normally with a value), but depending on
520/// global configuration, it may either panic (resulting in unwinding or aborting as per
521/// configuration for all panics), or abort the process (with no unwinding).
522///
523/// The default behavior is:
524///
525/// * If the binary links against `std` (typically the case), then
526/// print a message to standard error and abort the process.
527/// This behavior can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`].
528/// Future versions of Rust may panic by default instead.
529///
530/// * If the binary does not link against `std` (all of its crates are marked
531/// [`#![no_std]`][no_std]), then call [`panic!`] with a message.
532/// [The panic handler] applies as to any panic.
533///
534/// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html
535/// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html
536/// [The panic handler]: https://doc.rust-lang.org/reference/runtime.html#the-panic_handler-attribute
537/// [no_std]: https://doc.rust-lang.org/reference/names/preludes.html#the-no_std-attribute
538#[stable(feature = "global_alloc", since = "1.28.0")]
539#[rustc_const_unstable(feature = "const_alloc_error", issue = "92523")]
540#[cfg(not(no_global_oom_handling))]
541#[cold]
542#[optimize(size)]
543pub const fn handle_alloc_error(layout: Layout) -> ! {
544const fn ct_error(_: Layout) -> ! {
545{ ::core::panicking::panic_fmt(format_args!("allocation failed")); };panic!("allocation failed");
546 }
547548#[inline]
549fn rt_error(layout: Layout) -> ! {
550unsafe {
551__rust_alloc_error_handler(layout.size(), layout.align());
552 }
553 }
554555#[cfg(not(panic = "immediate-abort"))]
556{
557 core::intrinsics::const_eval_select((layout,), ct_error, rt_error)
558 }
559560#[cfg(panic = "immediate-abort")]
561ct_error(layout)
562}
563564#[cfg(not(no_global_oom_handling))]
565#[doc(hidden)]
566#[allow(unused_attributes)]
567#[unstable(feature = "alloc_internals", issue = "none")]
568pub mod __alloc_error_handler {
569// called via generated `__rust_alloc_error_handler` if there is no
570 // `#[alloc_error_handler]`.
571#[rustc_std_internal_symbol]
572pub unsafe fn __rdl_alloc_error_handler(size: usize, _align: usize) -> ! {
573 core::panicking::panic_nounwind_fmt(
574format_args!("memory allocation of {0} bytes failed", size)format_args!("memory allocation of {size} bytes failed"),
575/* force_no_backtrace */ false,
576 )
577 }
578}