1use core::iter::{
2FusedIterator, InPlaceIterable, SourceIter, TrustedFused, TrustedLen,
3 TrustedRandomAccessNoCoerce,
4};
5use core::marker::PhantomData;
6use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
7use core::num::NonZero;
8#[cfg(not(no_global_oom_handling))]
9use core::ops::Deref;
10use core::panic::UnwindSafe;
11use core::ptr::{self, NonNull};
12use core::{array, fmt, slice};
1314#[cfg(not(no_global_oom_handling))]
15use super::AsVecIntoIter;
16use crate::alloc::{Allocator, Global};
17#[cfg(not(no_global_oom_handling))]
18use crate::collections::VecDeque;
19use crate::raw_vec::RawVec;
2021macro non_null {
22 (mut $place:expr, $t:ident) => {{
23#![allow(unused_unsafe)] // we're sometimes used within an unsafe block
24unsafe { &mut *((&raw mut $place) as *mut NonNull<$t>) }
25 }},
26 ($place:expr, $t:ident) => {{
27#![allow(unused_unsafe)] // we're sometimes used within an unsafe block
28unsafe { *((&raw const $place) as *const NonNull<$t>) }
29 }},
30}
3132/// An iterator that moves out of a vector.
33///
34/// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
35/// (provided by the [`IntoIterator`] trait).
36///
37/// # Example
38///
39/// ```
40/// let v = vec![0, 1, 2];
41/// let iter: std::vec::IntoIter<_> = v.into_iter();
42/// ```
43#[stable(feature = "rust1", since = "1.0.0")]
44#[rustc_insignificant_dtor]
45pub struct IntoIter<
46 T,
47#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
48> {
49pub(super) buf: NonNull<T>,
50pub(super) phantom: PhantomData<T>,
51pub(super) cap: usize,
52// the drop impl reconstructs a RawVec from buf, cap and alloc
53 // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop
54pub(super) alloc: ManuallyDrop<A>,
55pub(super) ptr: NonNull<T>,
56/// If T is a ZST, this is actually ptr+len. This encoding is picked so that
57 /// ptr == end is a quick test for the Iterator being empty, that works
58 /// for both ZST and non-ZST.
59 /// For non-ZSTs the pointer is treated as `NonNull<T>`
60pub(super) end: *const T,
61}
6263// Manually mirroring what `Vec` has,
64// because otherwise we get `T: RefUnwindSafe` from `NonNull`.
65#[stable(feature = "catch_unwind", since = "1.9.0")]
66impl<T: UnwindSafe, A: Allocator + UnwindSafe> UnwindSafefor IntoIter<T, A> {}
6768#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
69impl<T: fmt::Debug, A: Allocator> fmt::Debugfor IntoIter<T, A> {
70fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
71f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
72 }
73}
7475impl<T, A: Allocator> IntoIter<T, A> {
76/// Returns the remaining items of this iterator as a slice.
77 ///
78 /// # Examples
79 ///
80 /// ```
81 /// let vec = vec!['a', 'b', 'c'];
82 /// let mut into_iter = vec.into_iter();
83 /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
84 /// let _ = into_iter.next().unwrap();
85 /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
86 /// ```
87#[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
88pub fn as_slice(&self) -> &[T] {
89unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len()) }
90 }
9192/// Returns the remaining items of this iterator as a mutable slice.
93 ///
94 /// # Examples
95 ///
96 /// ```
97 /// let vec = vec!['a', 'b', 'c'];
98 /// let mut into_iter = vec.into_iter();
99 /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
100 /// into_iter.as_mut_slice()[2] = 'z';
101 /// assert_eq!(into_iter.next().unwrap(), 'a');
102 /// assert_eq!(into_iter.next().unwrap(), 'b');
103 /// assert_eq!(into_iter.next().unwrap(), 'z');
104 /// ```
105#[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
106pub fn as_mut_slice(&mut self) -> &mut [T] {
107unsafe { &mut *self.as_raw_mut_slice() }
108 }
109110/// Returns a reference to the underlying allocator.
111#[unstable(feature = "allocator_api", issue = "32838")]
112 #[inline]
113pub fn allocator(&self) -> &A {
114&self.alloc
115 }
116117fn as_raw_mut_slice(&mut self) -> *mut [T] {
118 ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len())
119 }
120121/// Drops remaining elements and relinquishes the backing allocation.
122 ///
123 /// This method guarantees it won't panic before relinquishing the backing
124 /// allocation.
125 ///
126 /// This is roughly equivalent to the following, but more efficient
127 ///
128 /// ```
129 /// # let mut vec = Vec::<u8>::with_capacity(10);
130 /// # let ptr = vec.as_mut_ptr();
131 /// # let mut into_iter = vec.into_iter();
132 /// let mut into_iter = std::mem::replace(&mut into_iter, Vec::new().into_iter());
133 /// (&mut into_iter).for_each(drop);
134 /// std::mem::forget(into_iter);
135 /// # // FIXME(https://github.com/rust-lang/miri/issues/3670):
136 /// # // use -Zmiri-disable-leak-check instead of unleaking in tests meant to leak.
137 /// # drop(unsafe { Vec::<u8>::from_raw_parts(ptr, 0, 10) });
138 /// ```
139 ///
140 /// This method is used by in-place iteration, refer to the vec::in_place_collect
141 /// documentation for an overview.
142#[cfg(not(no_global_oom_handling))]
143pub(super) fn forget_allocation_drop_remaining(&mut self) {
144let remaining = self.as_raw_mut_slice();
145146// overwrite the individual fields instead of creating a new
147 // struct and then overwriting &mut self.
148 // this creates less assembly
149self.cap = 0;
150self.buf = RawVec::new().non_null();
151self.ptr = self.buf;
152self.end = self.buf.as_ptr();
153154// Dropping the remaining elements can panic, so this needs to be
155 // done only after updating the other fields.
156unsafe {
157 ptr::drop_in_place(remaining);
158 }
159 }
160161/// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed.
162 ///
163 /// This method does not consume `self`, and leaves deallocation to `impl Drop for IntoIter`.
164 /// If consuming `self` is possible, consider calling
165 /// [`Self::forget_remaining_elements_and_dealloc()`] instead.
166pub(crate) fn forget_remaining_elements(&mut self) {
167// For the ZST case, it is crucial that we mutate `end` here, not `ptr`.
168 // `ptr` must stay aligned, while `end` may be unaligned.
169self.end = self.ptr.as_ptr();
170 }
171172/// Forgets to Drop the remaining elements and frees the backing allocation.
173 /// Consuming version of [`Self::forget_remaining_elements()`].
174 ///
175 /// This can be used in place of `drop(self)` when `self` is known to be exhausted,
176 /// to avoid producing a needless `drop_in_place::<[T]>()`.
177#[inline]
178pub(crate) fn forget_remaining_elements_and_dealloc(self) {
179let mut this = ManuallyDrop::new(self);
180// SAFETY: `this` is in ManuallyDrop, so it will not be double-freed.
181unsafe {
182this.dealloc_only();
183 }
184 }
185186/// Frees the allocation, without checking or dropping anything else.
187 ///
188 /// The safe version of this method is [`Self::forget_remaining_elements_and_dealloc()`].
189 /// This function exists only to share code between that method and the `impl Drop`.
190 ///
191 /// # Safety
192 ///
193 /// This function must only be called with an [`IntoIter`] that is not going to be dropped
194 /// or otherwise used in any way, either because it is being forgotten or because its `Drop`
195 /// is already executing; otherwise a double-free will occur, and possibly a read from freed
196 /// memory if there are any remaining elements.
197#[inline]
198unsafe fn dealloc_only(&mut self) {
199unsafe {
200// SAFETY: our caller promises not to touch `*self` again
201let alloc = ManuallyDrop::take(&mut self.alloc);
202// RawVec handles deallocation
203let _ = RawVec::from_nonnull_in(self.buf, self.cap, alloc);
204 }
205 }
206207#[cfg(not(no_global_oom_handling))]
208 #[inline]
209pub(crate) fn into_vecdeque(self) -> VecDeque<T, A> {
210// Keep our `Drop` impl from dropping the elements and the allocator
211let mut this = ManuallyDrop::new(self);
212213// SAFETY: This allocation originally came from a `Vec`, so it passes
214 // all those checks. We have `this.buf` ≤ `this.ptr` ≤ `this.end`,
215 // so the `offset_from_unsigned`s below cannot wrap, and will produce a well-formed
216 // range. `end` ≤ `buf + cap`, so the range will be in-bounds.
217 // Taking `alloc` is ok because nothing else is going to look at it,
218 // since our `Drop` impl isn't going to run so there's no more code.
219unsafe {
220let buf = this.buf.as_ptr();
221let initialized = if T::IS_ZST {
222// All the pointers are the same for ZSTs, so it's fine to
223 // say that they're all at the beginning of the "allocation".
2240..this.len()
225 } else {
226 this.ptr.offset_from_unsigned(this.buf)..this.end.offset_from_unsigned(buf)
227 };
228let cap = this.cap;
229let alloc = ManuallyDrop::take(&mut this.alloc);
230VecDeque::from_contiguous_raw_parts_in(buf, initialized, cap, alloc)
231 }
232 }
233}
234235#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
236impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
237fn as_ref(&self) -> &[T] {
238self.as_slice()
239 }
240}
241242#[stable(feature = "rust1", since = "1.0.0")]
243unsafe impl<T: Send, A: Allocator + Send> Sendfor IntoIter<T, A> {}
244#[stable(feature = "rust1", since = "1.0.0")]
245unsafe impl<T: Sync, A: Allocator + Sync> Syncfor IntoIter<T, A> {}
246247#[stable(feature = "rust1", since = "1.0.0")]
248impl<T, A: Allocator> Iteratorfor IntoIter<T, A> {
249type Item = T;
250251#[inline]
252fn next(&mut self) -> Option<T> {
253let ptr = if T::IS_ZST {
254if self.ptr.as_ptr() == self.end as *mut T {
255return None;
256 }
257// `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
258 // reducing the `end`.
259self.end = self.end.wrapping_byte_sub(1);
260self.ptr
261 } else {
262if self.ptr == {
#![allow(unused_unsafe)]
unsafe { *((&raw const self.end) as *const NonNull<T>) }
}non_null!(self.end, T) {
263return None;
264 }
265let old = self.ptr;
266self.ptr = unsafe { old.add(1) };
267old268 };
269Some(unsafe { ptr.read() })
270 }
271272#[inline]
273fn size_hint(&self) -> (usize, Option<usize>) {
274let exact = if T::IS_ZST {
275self.end.addr().wrapping_sub(self.ptr.as_ptr().addr())
276 } else {
277unsafe { {
#![allow(unused_unsafe)]
unsafe { *((&raw const self.end) as *const NonNull<T>) }
}non_null!(self.end, T).offset_from_unsigned(self.ptr) }
278 };
279 (exact, Some(exact))
280 }
281282#[inline]
283fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
284let step_size = self.len().min(n);
285let to_drop = ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), step_size);
286if T::IS_ZST {
287// See `next` for why we sub `end` here.
288self.end = self.end.wrapping_byte_sub(step_size);
289 } else {
290// SAFETY: the min() above ensures that step_size is in bounds
291self.ptr = unsafe { self.ptr.add(step_size) };
292 }
293// SAFETY: the min() above ensures that step_size is in bounds
294unsafe {
295 ptr::drop_in_place(to_drop);
296 }
297NonZero::new(n - step_size).map_or(Ok(()), Err)
298 }
299300#[inline]
301fn count(self) -> usize {
302self.len()
303 }
304305#[inline]
306fn last(mut self) -> Option<T> {
307self.next_back()
308 }
309310#[inline]
311fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> {
312let mut raw_ary = [const { MaybeUninit::uninit() }; N];
313314let len = self.len();
315316if T::IS_ZST {
317if len < N {
318self.forget_remaining_elements();
319// Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct
320return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) });
321 }
322323self.end = self.end.wrapping_byte_sub(N);
324// Safety: ditto
325return Ok(unsafe { raw_ary.transpose().assume_init() });
326 }
327328if len < N {
329// Safety: `len` indicates that this many elements are available and we just checked that
330 // it fits into the array.
331unsafe {
332 ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, len);
333self.forget_remaining_elements();
334return Err(array::IntoIter::new_unchecked(raw_ary, 0..len));
335 }
336 }
337338// Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize
339 // the array.
340unsafe {
341 ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, N);
342self.ptr = self.ptr.add(N);
343Ok(raw_ary.transpose().assume_init())
344 }
345 }
346347fn fold<B, F>(mut self, mut accum: B, mut f: F) -> B
348where
349F: FnMut(B, Self::Item) -> B,
350 {
351if T::IS_ZST {
352while self.ptr.as_ptr() != self.end.cast_mut() {
353// SAFETY: we just checked that `self.ptr` is in bounds.
354let tmp = unsafe { self.ptr.read() };
355// See `next` for why we subtract from `end` here.
356self.end = self.end.wrapping_byte_sub(1);
357 accum = f(accum, tmp);
358 }
359 } else {
360// SAFETY: `self.end` can only be null if `T` is a ZST.
361while self.ptr != {
#![allow(unused_unsafe)]
unsafe { *((&raw const self.end) as *const NonNull<T>) }
}non_null!(self.end, T) {
362// SAFETY: we just checked that `self.ptr` is in bounds.
363let tmp = unsafe { self.ptr.read() };
364// SAFETY: the maximum this can be is `self.end`.
365 // Increment `self.ptr` first to avoid double dropping in the event of a panic.
366self.ptr = unsafe { self.ptr.add(1) };
367 accum = f(accum, tmp);
368 }
369 }
370371// There are in fact no remaining elements to forget, but by doing this we can avoid
372 // potentially generating a needless loop to drop the elements that cannot exist at
373 // this point.
374self.forget_remaining_elements_and_dealloc();
375376accum377 }
378379fn try_fold<B, F, R>(&mut self, mut accum: B, mut f: F) -> R
380where
381Self: Sized,
382 F: FnMut(B, Self::Item) -> R,
383 R: core::ops::Try<Output = B>,
384 {
385if T::IS_ZST {
386while self.ptr.as_ptr() != self.end.cast_mut() {
387// SAFETY: we just checked that `self.ptr` is in bounds.
388let tmp = unsafe { self.ptr.read() };
389// See `next` for why we subtract from `end` here.
390self.end = self.end.wrapping_byte_sub(1);
391 accum = f(accum, tmp)?;
392 }
393 } else {
394// SAFETY: `self.end` can only be null if `T` is a ZST.
395while self.ptr != {
#![allow(unused_unsafe)]
unsafe { *((&raw const self.end) as *const NonNull<T>) }
}non_null!(self.end, T) {
396// SAFETY: we just checked that `self.ptr` is in bounds.
397let tmp = unsafe { self.ptr.read() };
398// SAFETY: the maximum this can be is `self.end`.
399 // Increment `self.ptr` first to avoid double dropping in the event of a panic.
400self.ptr = unsafe { self.ptr.add(1) };
401 accum = f(accum, tmp)?;
402 }
403 }
404 R::from_output(accum)
405 }
406407unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
408where
409Self: TrustedRandomAccessNoCoerce,
410 {
411// SAFETY: the caller must guarantee that `i` is in bounds of the
412 // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
413 // is guaranteed to pointer to an element of the `Vec<T>` and
414 // thus guaranteed to be valid to dereference.
415 //
416 // Also note the implementation of `Self: TrustedRandomAccess` requires
417 // that `T: Copy` so reading elements from the buffer doesn't invalidate
418 // them for `Drop`.
419unsafe { self.ptr.add(i).read() }
420 }
421}
422423#[stable(feature = "rust1", since = "1.0.0")]
424impl<T, A: Allocator> DoubleEndedIteratorfor IntoIter<T, A> {
425#[inline]
426fn next_back(&mut self) -> Option<T> {
427if T::IS_ZST {
428if self.ptr.as_ptr() == self.end as *mut _ {
429return None;
430 }
431// See above for why 'ptr.offset' isn't used
432self.end = self.end.wrapping_byte_sub(1);
433// Note that even though this is next_back() we're reading from `self.ptr`, not
434 // `self.end`. We track our length using the byte offset from `self.ptr` to `self.end`,
435 // so the end pointer may not be suitably aligned for T.
436Some(unsafe { ptr::read(self.ptr.as_ptr()) })
437 } else {
438if self.ptr == {
#![allow(unused_unsafe)]
unsafe { *((&raw const self.end) as *const NonNull<T>) }
}non_null!(self.end, T) {
439return None;
440 }
441unsafe {
442self.end = self.end.sub(1);
443Some(ptr::read(self.end))
444 }
445 }
446 }
447448#[inline]
449fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
450let step_size = self.len().min(n);
451if T::IS_ZST {
452// SAFETY: same as for advance_by()
453self.end = self.end.wrapping_byte_sub(step_size);
454 } else {
455// SAFETY: same as for advance_by()
456self.end = unsafe { self.end.sub(step_size) };
457 }
458let to_drop = if T::IS_ZST {
459// ZST may cause unalignment
460ptr::slice_from_raw_parts_mut(ptr::NonNull::<T>::dangling().as_ptr(), step_size)
461 } else {
462 ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size)
463 };
464// SAFETY: same as for advance_by()
465unsafe {
466 ptr::drop_in_place(to_drop);
467 }
468NonZero::new(n - step_size).map_or(Ok(()), Err)
469 }
470}
471472#[stable(feature = "rust1", since = "1.0.0")]
473impl<T, A: Allocator> ExactSizeIteratorfor IntoIter<T, A> {
474fn is_empty(&self) -> bool {
475if T::IS_ZST {
476self.ptr.as_ptr() == self.end as *mut _
477} else {
478self.ptr == {
#![allow(unused_unsafe)]
unsafe { *((&raw const self.end) as *const NonNull<T>) }
}non_null!(self.end, T)479 }
480 }
481}
482483#[stable(feature = "fused", since = "1.26.0")]
484impl<T, A: Allocator> FusedIteratorfor IntoIter<T, A> {}
485486#[doc(hidden)]
487#[unstable(issue = "none", feature = "trusted_fused")]
488unsafe impl<T, A: Allocator> TrustedFused for IntoIter<T, A> {}
489490#[unstable(feature = "trusted_len", issue = "37572")]
491unsafe impl<T, A: Allocator> TrustedLenfor IntoIter<T, A> {}
492493#[stable(feature = "default_iters", since = "1.70.0")]
494impl<T, A> Defaultfor IntoIter<T, A>
495where
496A: Allocator + Default,
497{
498/// Creates an empty `vec::IntoIter`.
499 ///
500 /// ```
501 /// # use std::vec;
502 /// let iter: vec::IntoIter<u8> = Default::default();
503 /// assert_eq!(iter.len(), 0);
504 /// assert_eq!(iter.as_slice(), &[]);
505 /// ```
506fn default() -> Self {
507super::Vec::new_in(Default::default()).into_iter()
508 }
509}
510511#[doc(hidden)]
512#[unstable(issue = "none", feature = "std_internals")]
513#[rustc_unsafe_specialization_marker]
514pub trait NonDrop {}
515516// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
517// and thus we can't implement drop-handling
518#[unstable(issue = "none", feature = "std_internals")]
519impl<T: Copy> NonDropfor T {}
520521#[doc(hidden)]
522#[unstable(issue = "none", feature = "std_internals")]
523// TrustedRandomAccess (without NoCoerce) must not be implemented because
524// subtypes/supertypes of `T` might not be `NonDrop`
525unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A>
526where
527T: NonDrop,
528{
529const MAY_HAVE_SIDE_EFFECT: bool = false;
530}
531532#[cfg(not(no_global_oom_handling))]
533#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
534impl<T: Clone, A: Allocator + Clone> Clonefor IntoIter<T, A> {
535fn clone(&self) -> Self {
536self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter()
537 }
538}
539540#[stable(feature = "rust1", since = "1.0.0")]
541unsafe impl<#[may_dangle] T, A: Allocator> Dropfor IntoIter<T, A> {
542fn drop(&mut self) {
543struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
544545impl<T, A: Allocator> Dropfor DropGuard<'_, T, A> {
546fn drop(&mut self) {
547unsafe {
548self.0.dealloc_only();
549 }
550 }
551 }
552553let guard = DropGuard(self);
554// destroy the remaining elements
555unsafe {
556 ptr::drop_in_place(guard.0.as_raw_mut_slice());
557 }
558// now `guard` will be dropped and do the rest
559}
560}
561562// In addition to the SAFETY invariants of the following three unsafe traits
563// also refer to the vec::in_place_collect module documentation to get an overview
564#[unstable(issue = "none", feature = "inplace_iteration")]
565#[doc(hidden)]
566unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {
567const EXPAND_BY: Option<NonZero<usize>> = NonZero::new(1);
568const MERGE_BY: Option<NonZero<usize>> = NonZero::new(1);
569}
570571#[unstable(issue = "none", feature = "inplace_iteration")]
572#[doc(hidden)]
573unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
574type Source = Self;
575576#[inline]
577unsafe fn as_inner(&mut self) -> &mut Self::Source {
578self579 }
580}
581582#[cfg(not(no_global_oom_handling))]
583unsafe impl<T> AsVecIntoIterfor IntoIter<T> {
584type Item = T;
585586fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> {
587self588 }
589}