alloc/raw_vec/mod.rs
1#![unstable(feature = "raw_vec_internals", reason = "unstable const warnings", issue = "none")]
2#![cfg_attr(test, allow(dead_code))]
3
4// Note: This module is also included in the alloctests crate using #[path] to
5// run the tests. See the comment there for an explanation why this is the case.
6
7use core::marker::{Destruct, PhantomData};
8use core::mem::{Alignment, ManuallyDrop, MaybeUninit, SizedTypeProperties};
9use core::ptr::{self, NonNull, Unique};
10use core::{cmp, hint};
11
12#[cfg(not(no_global_oom_handling))]
13use crate::alloc::handle_alloc_error;
14use crate::alloc::{Allocator, Global, Layout};
15use crate::boxed::Box;
16use crate::collections::TryReserveError;
17use crate::collections::TryReserveErrorKind::*;
18
19#[cfg(test)]
20mod tests;
21
22// One central function responsible for reporting capacity overflows. This'll
23// ensure that the code generation related to these panics is minimal as there's
24// only one location which panics rather than a bunch throughout the module.
25#[cfg(not(no_global_oom_handling))]
26#[cfg_attr(not(panic = "immediate-abort"), inline(never))]
27const fn capacity_overflow() -> ! {
28 panic!("capacity overflow");
29}
30
31enum AllocInit {
32 /// The contents of the new memory are uninitialized.
33 Uninitialized,
34 #[cfg(not(no_global_oom_handling))]
35 /// The new memory is guaranteed to be zeroed.
36 Zeroed,
37}
38
39type Cap = core::num::niche_types::UsizeNoHighBit;
40
41const ZERO_CAP: Cap = unsafe { Cap::new_unchecked(0) };
42
43/// `Cap(cap)`, except if `T` is a ZST then `Cap::ZERO`.
44///
45/// # Safety: cap must be <= `isize::MAX`.
46const unsafe fn new_cap<T>(cap: usize) -> Cap {
47 if T::IS_ZST { ZERO_CAP } else { unsafe { Cap::new_unchecked(cap) } }
48}
49
50/// A low-level utility for more ergonomically allocating, reallocating, and deallocating
51/// a buffer of memory on the heap without having to worry about all the corner cases
52/// involved. This type is excellent for building your own data structures like Vec and VecDeque.
53/// In particular:
54///
55/// * Produces `Unique::dangling()` on zero-sized types.
56/// * Produces `Unique::dangling()` on zero-length allocations.
57/// * Avoids freeing `Unique::dangling()`.
58/// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics).
59/// * Guards against 32-bit systems allocating more than `isize::MAX` bytes.
60/// * Guards against overflowing your length.
61/// * Calls `handle_alloc_error` for fallible allocations.
62/// * Contains a `ptr::Unique` and thus endows the user with all related benefits.
63/// * Uses the excess returned from the allocator to use the largest available capacity.
64///
65/// This type does not in anyway inspect the memory that it manages. When dropped it *will*
66/// free its memory, but it *won't* try to drop its contents. It is up to the user of `RawVec`
67/// to handle the actual things *stored* inside of a `RawVec`.
68///
69/// Note that the excess of a zero-sized types is always infinite, so `capacity()` always returns
70/// `usize::MAX`. This means that you need to be careful when round-tripping this type with a
71/// `Box<[T]>`, since `capacity()` won't yield the length.
72#[allow(missing_debug_implementations)]
73pub(crate) struct RawVec<T, A: Allocator = Global> {
74 inner: RawVecInner<A>,
75 _marker: PhantomData<T>,
76}
77
78/// Like a `RawVec`, but only generic over the allocator, not the type.
79///
80/// As such, all the methods need the layout passed-in as a parameter.
81///
82/// Having this separation reduces the amount of code we need to monomorphize,
83/// as most operations don't need the actual type, just its layout.
84#[allow(missing_debug_implementations)]
85struct RawVecInner<A: Allocator = Global> {
86 ptr: Unique<u8>,
87 /// Never used for ZSTs; it's `capacity()`'s responsibility to return usize::MAX in that case.
88 ///
89 /// # Safety
90 ///
91 /// `cap` must be in the `0..=isize::MAX` range.
92 cap: Cap,
93 alloc: A,
94}
95
96impl<T> RawVec<T, Global> {
97 /// Creates the biggest possible `RawVec` (on the system heap)
98 /// without allocating. If `T` has positive size, then this makes a
99 /// `RawVec` with capacity `0`. If `T` is zero-sized, then it makes a
100 /// `RawVec` with capacity `usize::MAX`. Useful for implementing
101 /// delayed allocation.
102 #[must_use]
103 pub(crate) const fn new() -> Self {
104 Self::new_in(Global)
105 }
106
107 /// Creates a `RawVec` (on the system heap) with exactly the
108 /// capacity and alignment requirements for a `[T; capacity]`. This is
109 /// equivalent to calling `RawVec::new` when `capacity` is `0` or `T` is
110 /// zero-sized. Note that if `T` is zero-sized this means you will
111 /// *not* get a `RawVec` with the requested capacity.
112 ///
113 /// Non-fallible version of `try_with_capacity`
114 ///
115 /// # Panics
116 ///
117 /// Panics if the requested capacity exceeds `isize::MAX` bytes.
118 ///
119 /// # Aborts
120 ///
121 /// Aborts on OOM.
122 #[cfg(not(any(no_global_oom_handling, test)))]
123 #[must_use]
124 #[inline]
125 pub(crate) fn with_capacity(capacity: usize) -> Self {
126 Self { inner: RawVecInner::with_capacity(capacity, T::LAYOUT), _marker: PhantomData }
127 }
128
129 /// Like `with_capacity`, but guarantees the buffer is zeroed.
130 #[cfg(not(any(no_global_oom_handling, test)))]
131 #[must_use]
132 #[inline]
133 pub(crate) fn with_capacity_zeroed(capacity: usize) -> Self {
134 Self {
135 inner: RawVecInner::with_capacity_zeroed_in(capacity, Global, T::LAYOUT),
136 _marker: PhantomData,
137 }
138 }
139}
140
141impl RawVecInner<Global> {
142 #[cfg(not(any(no_global_oom_handling, test)))]
143 #[must_use]
144 #[inline]
145 fn with_capacity(capacity: usize, elem_layout: Layout) -> Self {
146 match Self::try_allocate_in(capacity, AllocInit::Uninitialized, Global, elem_layout) {
147 Ok(res) => res,
148 Err(err) => handle_error(err),
149 }
150 }
151}
152
153// Tiny Vecs are dumb. Skip to:
154// - 8 if the element size is 1, because any heap allocator is likely
155// to round up a request of less than 8 bytes to at least 8 bytes.
156// - 4 if elements are moderate-sized (<= 1 KiB).
157// - 1 otherwise, to avoid wasting too much space for very short Vecs.
158const fn min_non_zero_cap(size: usize) -> usize {
159 if size == 1 {
160 8
161 } else if size <= 1024 {
162 4
163 } else {
164 1
165 }
166}
167
168#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
169#[rustfmt::skip] // FIXME(fee1-dead): temporary measure before rustfmt is bumped
170const impl<T, A: [const] Allocator + [const] Destruct> RawVec<T, A> {
171 /// Like `with_capacity`, but parameterized over the choice of
172 /// allocator for the returned `RawVec`.
173 #[cfg(not(no_global_oom_handling))]
174 #[inline]
175 pub(crate) fn with_capacity_in(capacity: usize, alloc: A) -> Self {
176 Self {
177 inner: RawVecInner::with_capacity_in(capacity, alloc, T::LAYOUT),
178 _marker: PhantomData,
179 }
180 }
181
182 /// A specialized version of `self.reserve(len, 1)` which requires the
183 /// caller to ensure `len == self.capacity()`.
184 #[cfg(not(no_global_oom_handling))]
185 #[inline(never)]
186 pub(crate) fn grow_one(&mut self) {
187 // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
188 unsafe { self.inner.grow_one(T::LAYOUT) }
189 }
190}
191
192impl<T, A: Allocator> RawVec<T, A> {
193 #[cfg(not(no_global_oom_handling))]
194 pub(crate) const MIN_NON_ZERO_CAP: usize = min_non_zero_cap(size_of::<T>());
195
196 /// Like `new`, but parameterized over the choice of allocator for
197 /// the returned `RawVec`.
198 #[inline]
199 pub(crate) const fn new_in(alloc: A) -> Self {
200 // Check assumption made in `current_memory`
201 const { assert!(T::LAYOUT.size() % T::LAYOUT.align() == 0) };
202 Self { inner: RawVecInner::new_in(alloc, Alignment::of::<T>()), _marker: PhantomData }
203 }
204
205 /// Like `try_with_capacity`, but parameterized over the choice of
206 /// allocator for the returned `RawVec`.
207 #[inline]
208 pub(crate) fn try_with_capacity_in(capacity: usize, alloc: A) -> Result<Self, TryReserveError> {
209 match RawVecInner::try_with_capacity_in(capacity, alloc, T::LAYOUT) {
210 Ok(inner) => Ok(Self { inner, _marker: PhantomData }),
211 Err(e) => Err(e),
212 }
213 }
214
215 /// Like `with_capacity_zeroed`, but parameterized over the choice
216 /// of allocator for the returned `RawVec`.
217 #[cfg(not(no_global_oom_handling))]
218 #[inline]
219 pub(crate) fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self {
220 Self {
221 inner: RawVecInner::with_capacity_zeroed_in(capacity, alloc, T::LAYOUT),
222 _marker: PhantomData,
223 }
224 }
225
226 /// Converts the entire buffer into `Box<[MaybeUninit<T>]>` with the specified `len`.
227 ///
228 /// Note that this will correctly reconstitute any `cap` changes
229 /// that may have been performed. (See description of type for details.)
230 ///
231 /// # Safety
232 ///
233 /// * `len` must be greater than or equal to the most recently requested capacity, and
234 /// * `len` must be less than or equal to `self.capacity()`.
235 ///
236 /// Note, that the requested capacity and `self.capacity()` could differ, as
237 /// an allocator could overallocate and return a greater memory block than requested.
238 pub(crate) unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit<T>], A> {
239 // Sanity-check one half of the safety requirement (we cannot check the other half).
240 debug_assert!(
241 len <= self.capacity(),
242 "`len` must be smaller than or equal to `self.capacity()`"
243 );
244
245 let me = ManuallyDrop::new(self);
246 unsafe {
247 let slice = ptr::slice_from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len);
248 Box::from_raw_in(slice, ptr::read(&me.inner.alloc))
249 }
250 }
251
252 /// Reconstitutes a `RawVec` from a pointer, capacity, and allocator.
253 ///
254 /// # Safety
255 ///
256 /// The `ptr` must be allocated (via the given allocator `alloc`), and with the given
257 /// `capacity`.
258 /// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit
259 /// systems). For ZSTs capacity is ignored.
260 /// If the `ptr` and `capacity` come from a `RawVec` created via `alloc`, then this is
261 /// guaranteed.
262 #[inline]
263 pub(crate) const unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self {
264 // SAFETY: Precondition passed to the caller
265 unsafe {
266 let ptr = ptr.cast();
267 let capacity = new_cap::<T>(capacity);
268 Self {
269 inner: RawVecInner::from_raw_parts_in(ptr, capacity, alloc),
270 _marker: PhantomData,
271 }
272 }
273 }
274
275 /// A convenience method for hoisting the non-null precondition out of [`RawVec::from_raw_parts_in`].
276 ///
277 /// # Safety
278 ///
279 /// See [`RawVec::from_raw_parts_in`].
280 #[inline]
281 #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
282 pub(crate) const unsafe fn from_nonnull_in(ptr: NonNull<T>, capacity: usize, alloc: A) -> Self {
283 // SAFETY: Precondition passed to the caller
284 unsafe {
285 let ptr = ptr.cast();
286 let capacity = new_cap::<T>(capacity);
287 Self { inner: RawVecInner::from_nonnull_in(ptr, capacity, alloc), _marker: PhantomData }
288 }
289 }
290
291 /// Gets a raw pointer to the start of the allocation. Note that this is
292 /// `Unique::dangling()` if `capacity == 0` or `T` is zero-sized. In the former case, you must
293 /// be careful.
294 #[inline]
295 pub(crate) const fn ptr(&self) -> *mut T {
296 self.inner.ptr()
297 }
298
299 #[inline]
300 pub(crate) const fn non_null(&self) -> NonNull<T> {
301 self.inner.non_null()
302 }
303
304 /// Gets the capacity of the allocation.
305 ///
306 /// This will always be `usize::MAX` if `T` is zero-sized.
307 #[inline]
308 pub(crate) const fn capacity(&self) -> usize {
309 self.inner.capacity(size_of::<T>())
310 }
311
312 /// Returns a shared reference to the allocator backing this `RawVec`.
313 #[inline]
314 pub(crate) const fn allocator(&self) -> &A {
315 self.inner.allocator()
316 }
317
318 /// Ensures that the buffer contains at least enough space to hold `len +
319 /// additional` elements. If it doesn't already have enough capacity, will
320 /// reallocate enough space plus comfortable slack space to get amortized
321 /// *O*(1) behavior. Will limit this behavior if it would needlessly cause
322 /// itself to panic.
323 ///
324 /// If `len` exceeds `self.capacity()`, this may fail to actually allocate
325 /// the requested space. This is not really unsafe, but the unsafe
326 /// code *you* write that relies on the behavior of this function may break.
327 ///
328 /// This is ideal for implementing a bulk-push operation like `extend`.
329 ///
330 /// # Panics
331 ///
332 /// Panics if the new capacity exceeds `isize::MAX` _bytes_.
333 ///
334 /// # Aborts
335 ///
336 /// Aborts on OOM.
337 #[cfg(not(no_global_oom_handling))]
338 #[inline]
339 pub(crate) fn reserve(&mut self, len: usize, additional: usize) {
340 // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
341 unsafe { self.inner.reserve(len, additional, T::LAYOUT) }
342 }
343
344 /// The same as `reserve`, but returns on errors instead of panicking or aborting.
345 pub(crate) fn try_reserve(
346 &mut self,
347 len: usize,
348 additional: usize,
349 ) -> Result<(), TryReserveError> {
350 // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
351 unsafe { self.inner.try_reserve(len, additional, T::LAYOUT) }
352 }
353
354 /// Ensures that the buffer contains at least enough space to hold `len +
355 /// additional` elements. If it doesn't already, will reallocate the
356 /// minimum possible amount of memory necessary. Generally this will be
357 /// exactly the amount of memory necessary, but in principle the allocator
358 /// is free to give back more than we asked for.
359 ///
360 /// If `len` exceeds `self.capacity()`, this may fail to actually allocate
361 /// the requested space. This is not really unsafe, but the unsafe code
362 /// *you* write that relies on the behavior of this function may break.
363 ///
364 /// # Panics
365 ///
366 /// Panics if the new capacity exceeds `isize::MAX` _bytes_.
367 ///
368 /// # Aborts
369 ///
370 /// Aborts on OOM.
371 #[cfg(not(no_global_oom_handling))]
372 pub(crate) fn reserve_exact(&mut self, len: usize, additional: usize) {
373 // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
374 unsafe { self.inner.reserve_exact(len, additional, T::LAYOUT) }
375 }
376
377 /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting.
378 pub(crate) fn try_reserve_exact(
379 &mut self,
380 len: usize,
381 additional: usize,
382 ) -> Result<(), TryReserveError> {
383 // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
384 unsafe { self.inner.try_reserve_exact(len, additional, T::LAYOUT) }
385 }
386
387 /// Shrinks the buffer down to the specified capacity. If the given amount
388 /// is 0, actually completely deallocates.
389 ///
390 /// # Panics
391 ///
392 /// Panics if the given amount is *larger* than the current capacity.
393 ///
394 /// # Aborts
395 ///
396 /// Aborts on OOM.
397 #[cfg(not(no_global_oom_handling))]
398 #[inline]
399 pub(crate) fn shrink_to_fit(&mut self, cap: usize) {
400 // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
401 unsafe { self.inner.shrink_to_fit(cap, T::LAYOUT) }
402 }
403
404 /// Shrinks the buffer down to the specified capacity. If the given amount
405 /// is 0, actually completely deallocates.
406 ///
407 /// # Errors
408 ///
409 /// This function returns an error if the allocator cannot shrink the allocation.
410 ///
411 /// # Panics
412 ///
413 /// Panics if the given amount is *larger* than the current capacity.
414 #[inline]
415 pub(crate) fn try_shrink_to_fit(&mut self, cap: usize) -> Result<(), TryReserveError> {
416 unsafe { self.inner.try_shrink_to_fit(cap, T::LAYOUT) }
417 }
418}
419
420unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec<T, A> {
421 /// Frees the memory owned by the `RawVec` *without* trying to drop its contents.
422 fn drop(&mut self) {
423 // SAFETY: We are in a Drop impl, self.inner will not be used again.
424 unsafe { self.inner.deallocate(T::LAYOUT) }
425 }
426}
427
428#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
429#[rustfmt::skip] // FIXME(fee1-dead): temporary measure before rustfmt is bumped
430const impl<A: [const] Allocator + [const] Destruct> RawVecInner<A> {
431 #[cfg(not(no_global_oom_handling))]
432 #[inline]
433 fn with_capacity_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
434 match Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout) {
435 Ok(this) => {
436 unsafe {
437 // Make it more obvious that a subsequent Vec::reserve(capacity) will not allocate.
438 hint::assert_unchecked(!this.needs_to_grow(0, capacity, elem_layout));
439 }
440 this
441 }
442 Err(err) => handle_error(err),
443 }
444 }
445
446 fn try_allocate_in(
447 capacity: usize,
448 init: AllocInit,
449 alloc: A,
450 elem_layout: Layout,
451 ) -> Result<Self, TryReserveError> {
452 // We avoid `unwrap_or_else` here because it bloats the amount of
453 // LLVM IR generated.
454 let layout = match layout_array(capacity, elem_layout) {
455 Ok(layout) => layout,
456 Err(_) => return Err(CapacityOverflow.into()),
457 };
458
459 // Don't allocate here because `Drop` will not deallocate when `capacity` is 0.
460 if layout.size() == 0 {
461 return Ok(Self::new_in(alloc, elem_layout.alignment()));
462 }
463
464 let result = match init {
465 AllocInit::Uninitialized => alloc.allocate(layout),
466 #[cfg(not(no_global_oom_handling))]
467 AllocInit::Zeroed => alloc.allocate_zeroed(layout),
468 };
469 let ptr = match result {
470 Ok(ptr) => ptr,
471 Err(_) => return Err(AllocError { layout, non_exhaustive: () }.into()),
472 };
473
474 // Allocators currently return a `NonNull<[u8]>` whose length
475 // matches the size requested. If that ever changes, the capacity
476 // here should change to `ptr.len() / size_of::<T>()`.
477 Ok(Self {
478 ptr: Unique::from(ptr.cast()),
479 cap: unsafe { Cap::new_unchecked(capacity) },
480 alloc,
481 })
482 }
483
484 /// # Safety
485 /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
486 /// initially construct `self`
487 /// - `elem_layout`'s size must be a multiple of its alignment
488 #[cfg(not(no_global_oom_handling))]
489 #[inline]
490 unsafe fn grow_one(&mut self, elem_layout: Layout) {
491 // SAFETY: Precondition passed to caller
492 if let Err(err) = unsafe { self.grow_amortized(self.cap.as_inner(), 1, elem_layout) } {
493 handle_error(err);
494 }
495 }
496
497 /// # Safety
498 /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
499 /// initially construct `self`
500 /// - `elem_layout`'s size must be a multiple of its alignment
501 /// - The sum of `len` and `additional` must be greater than the current capacity
502 unsafe fn grow_amortized(
503 &mut self,
504 len: usize,
505 additional: usize,
506 elem_layout: Layout,
507 ) -> Result<(), TryReserveError> {
508 // This is ensured by the calling contexts.
509 debug_assert!(additional > 0);
510
511 if elem_layout.size() == 0 {
512 // Since we return a capacity of `usize::MAX` when `elem_size` is
513 // 0, getting to here necessarily means the `RawVec` is overfull.
514 return Err(CapacityOverflow.into());
515 }
516
517 // Nothing we can really do about these checks, sadly.
518 let required_cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
519
520 // This guarantees exponential growth. The doubling cannot overflow
521 // because `cap <= isize::MAX` and the type of `cap` is `usize`.
522 let cap = cmp::max(self.cap.as_inner() * 2, required_cap);
523 let cap = cmp::max(min_non_zero_cap(elem_layout.size()), cap);
524
525 // SAFETY:
526 // - cap >= len + additional
527 // - other preconditions passed to caller
528 let ptr = unsafe { self.finish_grow(cap, elem_layout)? };
529
530 // SAFETY: `finish_grow` would have failed if `cap > isize::MAX`
531 unsafe { self.set_ptr_and_cap(ptr, cap) };
532 Ok(())
533 }
534
535 /// # Safety
536 /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
537 /// initially construct `self`
538 /// - `elem_layout`'s size must be a multiple of its alignment
539 /// - `cap` must be greater than the current capacity
540 // not marked inline(never) since we want optimizers to be able to observe the specifics of this
541 // function, see tests/codegen-llvm/vec-reserve-extend.rs.
542 #[cold]
543 unsafe fn finish_grow(
544 &self,
545 cap: usize,
546 elem_layout: Layout,
547 ) -> Result<NonNull<[u8]>, TryReserveError> {
548 let new_layout = layout_array(cap, elem_layout)?;
549
550 let memory = if let Some((ptr, old_layout)) = unsafe { self.current_memory(elem_layout) } {
551 // FIXME(const-hack): switch to `debug_assert_eq`
552 debug_assert!(old_layout.align() == new_layout.align());
553 unsafe {
554 // The allocator checks for alignment equality
555 hint::assert_unchecked(old_layout.align() == new_layout.align());
556 self.alloc.grow(ptr, old_layout, new_layout)
557 }
558 } else {
559 self.alloc.allocate(new_layout)
560 };
561
562 // FIXME(const-hack): switch back to `map_err`
563 match memory {
564 Ok(memory) => Ok(memory),
565 Err(_) => Err(AllocError { layout: new_layout, non_exhaustive: () }.into()),
566 }
567 }
568}
569
570impl<A: Allocator> RawVecInner<A> {
571 #[inline]
572 const fn new_in(alloc: A, align: Alignment) -> Self {
573 let ptr = Unique::from_non_null(NonNull::without_provenance(align.as_nonzero_usize()));
574 // `cap: 0` means "unallocated". zero-sized types are ignored.
575 Self { ptr, cap: ZERO_CAP, alloc }
576 }
577
578 #[inline]
579 fn try_with_capacity_in(
580 capacity: usize,
581 alloc: A,
582 elem_layout: Layout,
583 ) -> Result<Self, TryReserveError> {
584 Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout)
585 }
586
587 #[cfg(not(no_global_oom_handling))]
588 #[inline]
589 fn with_capacity_zeroed_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
590 match Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc, elem_layout) {
591 Ok(res) => res,
592 Err(err) => handle_error(err),
593 }
594 }
595
596 #[inline]
597 const unsafe fn from_raw_parts_in(ptr: *mut u8, cap: Cap, alloc: A) -> Self {
598 Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap, alloc }
599 }
600
601 #[inline]
602 #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
603 const unsafe fn from_nonnull_in(ptr: NonNull<u8>, cap: Cap, alloc: A) -> Self {
604 Self { ptr: Unique::from(ptr), cap, alloc }
605 }
606
607 #[inline]
608 const fn ptr<T>(&self) -> *mut T {
609 self.non_null::<T>().as_ptr()
610 }
611
612 #[inline]
613 const fn non_null<T>(&self) -> NonNull<T> {
614 self.ptr.cast().as_non_null_ptr()
615 }
616
617 #[inline]
618 const fn capacity(&self, elem_size: usize) -> usize {
619 if elem_size == 0 { usize::MAX } else { self.cap.as_inner() }
620 }
621
622 #[inline]
623 const fn allocator(&self) -> &A {
624 &self.alloc
625 }
626
627 /// # Safety
628 /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
629 /// initially construct `self`
630 /// - `elem_layout`'s size must be a multiple of its alignment
631 #[inline]
632 #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
633 const unsafe fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull<u8>, Layout)> {
634 if elem_layout.size() == 0 || self.cap.as_inner() == 0 {
635 None
636 } else {
637 // We could use Layout::array here which ensures the absence of isize and usize overflows
638 // and could hypothetically handle differences between stride and size, but this memory
639 // has already been allocated so we know it can't overflow and currently Rust does not
640 // support such types. So we can do better by skipping some checks and avoid an unwrap.
641 unsafe {
642 let alloc_size = elem_layout.size().unchecked_mul(self.cap.as_inner());
643 let layout = Layout::from_size_align_unchecked(alloc_size, elem_layout.align());
644 Some((self.ptr.into(), layout))
645 }
646 }
647 }
648
649 /// # Safety
650 /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
651 /// initially construct `self`
652 /// - `elem_layout`'s size must be a multiple of its alignment
653 #[cfg(not(no_global_oom_handling))]
654 #[inline]
655 unsafe fn reserve(&mut self, len: usize, additional: usize, elem_layout: Layout) {
656 // Callers expect this function to be very cheap when there is already sufficient capacity.
657 // Therefore, we move all the resizing and error-handling logic from grow_amortized and
658 // handle_reserve behind a call, while making sure that this function is likely to be
659 // inlined as just a comparison and a call if the comparison fails.
660 #[cold]
661 unsafe fn do_reserve_and_handle<A: Allocator>(
662 slf: &mut RawVecInner<A>,
663 len: usize,
664 additional: usize,
665 elem_layout: Layout,
666 ) {
667 // SAFETY: Precondition passed to caller
668 if let Err(err) = unsafe { slf.grow_amortized(len, additional, elem_layout) } {
669 handle_error(err);
670 }
671 }
672
673 if self.needs_to_grow(len, additional, elem_layout) {
674 unsafe {
675 do_reserve_and_handle(self, len, additional, elem_layout);
676 }
677 }
678 }
679
680 /// # Safety
681 /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
682 /// initially construct `self`
683 /// - `elem_layout`'s size must be a multiple of its alignment
684 unsafe fn try_reserve(
685 &mut self,
686 len: usize,
687 additional: usize,
688 elem_layout: Layout,
689 ) -> Result<(), TryReserveError> {
690 if self.needs_to_grow(len, additional, elem_layout) {
691 // SAFETY: Precondition passed to caller
692 unsafe {
693 self.grow_amortized(len, additional, elem_layout)?;
694 }
695 }
696 unsafe {
697 // Inform the optimizer that the reservation has succeeded or wasn't needed
698 hint::assert_unchecked(!self.needs_to_grow(len, additional, elem_layout));
699 }
700 Ok(())
701 }
702
703 /// # Safety
704 /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
705 /// initially construct `self`
706 /// - `elem_layout`'s size must be a multiple of its alignment
707 #[cfg(not(no_global_oom_handling))]
708 unsafe fn reserve_exact(&mut self, len: usize, additional: usize, elem_layout: Layout) {
709 // SAFETY: Precondition passed to caller
710 if let Err(err) = unsafe { self.try_reserve_exact(len, additional, elem_layout) } {
711 handle_error(err);
712 }
713 }
714
715 /// # Safety
716 /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
717 /// initially construct `self`
718 /// - `elem_layout`'s size must be a multiple of its alignment
719 unsafe fn try_reserve_exact(
720 &mut self,
721 len: usize,
722 additional: usize,
723 elem_layout: Layout,
724 ) -> Result<(), TryReserveError> {
725 if self.needs_to_grow(len, additional, elem_layout) {
726 // SAFETY: Precondition passed to caller
727 unsafe {
728 self.grow_exact(len, additional, elem_layout)?;
729 }
730 }
731 unsafe {
732 // Inform the optimizer that the reservation has succeeded or wasn't needed
733 hint::assert_unchecked(!self.needs_to_grow(len, additional, elem_layout));
734 }
735 Ok(())
736 }
737
738 /// # Safety
739 /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
740 /// initially construct `self`
741 /// - `elem_layout`'s size must be a multiple of its alignment
742 /// - `cap` must be less than or equal to `self.capacity(elem_layout.size())`
743 #[cfg(not(no_global_oom_handling))]
744 #[inline]
745 unsafe fn shrink_to_fit(&mut self, cap: usize, elem_layout: Layout) {
746 if let Err(err) = unsafe { self.shrink(cap, elem_layout) } {
747 handle_error(err);
748 }
749 }
750
751 /// # Safety
752 ///
753 /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
754 /// initially construct `self`
755 /// - `elem_layout`'s size must be a multiple of its alignment
756 /// - `cap` must be less than or equal to `self.capacity(elem_layout.size())`
757 unsafe fn try_shrink_to_fit(
758 &mut self,
759 cap: usize,
760 elem_layout: Layout,
761 ) -> Result<(), TryReserveError> {
762 unsafe { self.shrink(cap, elem_layout) }
763 }
764
765 #[inline]
766 const fn needs_to_grow(&self, len: usize, additional: usize, elem_layout: Layout) -> bool {
767 additional > self.capacity(elem_layout.size()).wrapping_sub(len)
768 }
769
770 #[inline]
771 #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
772 const unsafe fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) {
773 // Allocators currently return a `NonNull<[u8]>` whose length matches
774 // the size requested. If that ever changes, the capacity here should
775 // change to `ptr.len() / size_of::<T>()`.
776 self.ptr = Unique::from(ptr.cast());
777 self.cap = unsafe { Cap::new_unchecked(cap) };
778 }
779
780 /// # Safety
781 /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
782 /// initially construct `self`
783 /// - `elem_layout`'s size must be a multiple of its alignment
784 /// - The sum of `len` and `additional` must be greater than the current capacity
785 unsafe fn grow_exact(
786 &mut self,
787 len: usize,
788 additional: usize,
789 elem_layout: Layout,
790 ) -> Result<(), TryReserveError> {
791 if elem_layout.size() == 0 {
792 // Since we return a capacity of `usize::MAX` when the type size is
793 // 0, getting to here necessarily means the `RawVec` is overfull.
794 return Err(CapacityOverflow.into());
795 }
796
797 let cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
798
799 // SAFETY: preconditions passed to caller
800 let ptr = unsafe { self.finish_grow(cap, elem_layout)? };
801
802 // SAFETY: `finish_grow` would have failed if `cap > isize::MAX`
803 unsafe { self.set_ptr_and_cap(ptr, cap) };
804 Ok(())
805 }
806
807 /// # Safety
808 /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
809 /// initially construct `self`
810 /// - `elem_layout`'s size must be a multiple of its alignment
811 /// - `cap` must be less than or equal to `self.capacity(elem_layout.size())`
812 #[inline]
813 unsafe fn shrink(&mut self, cap: usize, elem_layout: Layout) -> Result<(), TryReserveError> {
814 assert!(cap <= self.capacity(elem_layout.size()), "Tried to shrink to a larger capacity");
815 // SAFETY: Just checked this isn't trying to grow
816 unsafe { self.shrink_unchecked(cap, elem_layout) }
817 }
818
819 /// `shrink`, but without the capacity check.
820 ///
821 /// This is split out so that `shrink` can inline the check, since it
822 /// optimizes out in things like `shrink_to_fit`, without needing to
823 /// also inline all this code, as doing that ends up failing the
824 /// `vec-shrink-panic` codegen test when `shrink_to_fit` ends up being too
825 /// big for LLVM to be willing to inline.
826 ///
827 /// # Safety
828 /// `cap <= self.capacity()`
829 unsafe fn shrink_unchecked(
830 &mut self,
831 cap: usize,
832 elem_layout: Layout,
833 ) -> Result<(), TryReserveError> {
834 // SAFETY: Precondition passed to caller
835 let Some((ptr, layout)) = (unsafe { self.current_memory(elem_layout) }) else {
836 return Ok(());
837 };
838
839 // If shrinking to 0, deallocate the buffer. We don't reach this point
840 // for the T::IS_ZST case since current_memory() will have returned
841 // None.
842 if cap == 0 {
843 unsafe { self.alloc.deallocate(ptr, layout) };
844 self.ptr =
845 unsafe { Unique::new_unchecked(ptr::without_provenance_mut(elem_layout.align())) };
846 self.cap = ZERO_CAP;
847 } else {
848 let ptr = unsafe {
849 // Layout cannot overflow here because it would have
850 // overflowed earlier when capacity was larger.
851 let new_size = elem_layout.size().unchecked_mul(cap);
852 let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
853 self.alloc
854 .shrink(ptr, layout, new_layout)
855 .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?
856 };
857 // SAFETY: if the allocation is valid, then the capacity is too
858 unsafe {
859 self.set_ptr_and_cap(ptr, cap);
860 }
861 }
862 Ok(())
863 }
864
865 /// # Safety
866 ///
867 /// This function deallocates the owned allocation, but does not update `ptr` or `cap` to
868 /// prevent double-free or use-after-free. Essentially, do not do anything with the caller
869 /// after this function returns.
870 /// Ideally this function would take `self` by move, but it cannot because it exists to be
871 /// called from a `Drop` impl.
872 unsafe fn deallocate(&mut self, elem_layout: Layout) {
873 // SAFETY: Precondition passed to caller
874 if let Some((ptr, layout)) = unsafe { self.current_memory(elem_layout) } {
875 unsafe {
876 self.alloc.deallocate(ptr, layout);
877 }
878 }
879 }
880}
881
882// Central function for reserve error handling.
883#[cfg(not(no_global_oom_handling))]
884#[cold]
885#[optimize(size)]
886#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
887const fn handle_error(e: TryReserveError) -> ! {
888 match e.kind() {
889 CapacityOverflow => capacity_overflow(),
890 AllocError { layout, .. } => handle_alloc_error(layout),
891 }
892}
893
894#[inline]
895#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
896const fn layout_array(cap: usize, elem_layout: Layout) -> Result<Layout, TryReserveError> {
897 // This is only used with `elem_layout`s which are those of real rust types,
898 // which lets us use the much-simpler `repeat_packed`.
899 debug_assert!(elem_layout.size() == elem_layout.pad_to_align().size());
900
901 // FIXME(const-hack) return to using `map` and `map_err` once `const_closures` is implemented
902 match elem_layout.repeat_packed(cap) {
903 Ok(layout) => Ok(layout),
904 Err(_) => Err(CapacityOverflow.into()),
905 }
906}