Skip to content
This repository was archived by the owner on May 28, 2025. It is now read-only.

Commit 9361390

Browse files
committed
refactor: moved IntoIter into into_iter.rs
1 parent 2580822 commit 9361390

File tree

2 files changed

+276
-264
lines changed

2 files changed

+276
-264
lines changed

library/alloc/src/vec/into_iter.rs

Lines changed: 270 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,270 @@
1+
use crate::alloc::{Allocator, Global};
2+
use crate::raw_vec::RawVec;
3+
use core::marker::PhantomData;
4+
use core::intrinsics::{arith_offset};
5+
use core::mem::{self};
6+
use core::fmt;
7+
use core::ptr::{self, NonNull};
8+
use core::slice::{self};
9+
use core::iter::{
10+
FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccess,
11+
};
12+
13+
/// An iterator that moves out of a vector.
14+
///
15+
/// This `struct` is created by the `into_iter` method on [`Vec`] (provided
16+
/// by the [`IntoIterator`] trait).
17+
///
18+
/// # Example
19+
///
20+
/// ```
21+
/// let v = vec![0, 1, 2];
22+
/// let iter: std::vec::IntoIter<_> = v.into_iter();
23+
/// ```
24+
#[stable(feature = "rust1", since = "1.0.0")]
25+
pub struct IntoIter<
26+
T,
27+
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
28+
> {
29+
pub(super) buf: NonNull<T>,
30+
pub(super) phantom: PhantomData<T>,
31+
pub(super) cap: usize,
32+
pub(super) alloc: A,
33+
pub(super) ptr: *const T,
34+
pub(super) end: *const T,
35+
}
36+
37+
#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
38+
impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
39+
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
40+
f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
41+
}
42+
}
43+
44+
impl<T, A: Allocator> IntoIter<T, A> {
45+
/// Returns the remaining items of this iterator as a slice.
46+
///
47+
/// # Examples
48+
///
49+
/// ```
50+
/// let vec = vec!['a', 'b', 'c'];
51+
/// let mut into_iter = vec.into_iter();
52+
/// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
53+
/// let _ = into_iter.next().unwrap();
54+
/// assert_eq!(into_iter.as_slice(), &['b', 'c']);
55+
/// ```
56+
#[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
57+
pub fn as_slice(&self) -> &[T] {
58+
unsafe { slice::from_raw_parts(self.ptr, self.len()) }
59+
}
60+
61+
/// Returns the remaining items of this iterator as a mutable slice.
62+
///
63+
/// # Examples
64+
///
65+
/// ```
66+
/// let vec = vec!['a', 'b', 'c'];
67+
/// let mut into_iter = vec.into_iter();
68+
/// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
69+
/// into_iter.as_mut_slice()[2] = 'z';
70+
/// assert_eq!(into_iter.next().unwrap(), 'a');
71+
/// assert_eq!(into_iter.next().unwrap(), 'b');
72+
/// assert_eq!(into_iter.next().unwrap(), 'z');
73+
/// ```
74+
#[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
75+
pub fn as_mut_slice(&mut self) -> &mut [T] {
76+
unsafe { &mut *self.as_raw_mut_slice() }
77+
}
78+
79+
/// Returns a reference to the underlying allocator.
80+
#[unstable(feature = "allocator_api", issue = "32838")]
81+
#[inline]
82+
pub fn allocator(&self) -> &A {
83+
&self.alloc
84+
}
85+
86+
fn as_raw_mut_slice(&mut self) -> *mut [T] {
87+
ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
88+
}
89+
90+
pub(super) fn drop_remaining(&mut self) {
91+
unsafe {
92+
ptr::drop_in_place(self.as_mut_slice());
93+
}
94+
self.ptr = self.end;
95+
}
96+
97+
/// Relinquishes the backing allocation, equivalent to
98+
/// `ptr::write(&mut self, Vec::new().into_iter())`
99+
pub(super) fn forget_allocation(&mut self) {
100+
self.cap = 0;
101+
self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) };
102+
self.ptr = self.buf.as_ptr();
103+
self.end = self.buf.as_ptr();
104+
}
105+
}
106+
107+
#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
108+
impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
109+
fn as_ref(&self) -> &[T] {
110+
self.as_slice()
111+
}
112+
}
113+
114+
#[stable(feature = "rust1", since = "1.0.0")]
115+
unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
116+
#[stable(feature = "rust1", since = "1.0.0")]
117+
unsafe impl<T: Sync, A: Allocator> Sync for IntoIter<T, A> {}
118+
119+
#[stable(feature = "rust1", since = "1.0.0")]
120+
impl<T, A: Allocator> Iterator for IntoIter<T, A> {
121+
type Item = T;
122+
123+
#[inline]
124+
fn next(&mut self) -> Option<T> {
125+
if self.ptr as *const _ == self.end {
126+
None
127+
} else if mem::size_of::<T>() == 0 {
128+
// purposefully don't use 'ptr.offset' because for
129+
// vectors with 0-size elements this would return the
130+
// same pointer.
131+
self.ptr = unsafe { arith_offset(self.ptr as *const i8, 1) as *mut T };
132+
133+
// Make up a value of this ZST.
134+
Some(unsafe { mem::zeroed() })
135+
} else {
136+
let old = self.ptr;
137+
self.ptr = unsafe { self.ptr.offset(1) };
138+
139+
Some(unsafe { ptr::read(old) })
140+
}
141+
}
142+
143+
#[inline]
144+
fn size_hint(&self) -> (usize, Option<usize>) {
145+
let exact = if mem::size_of::<T>() == 0 {
146+
(self.end as usize).wrapping_sub(self.ptr as usize)
147+
} else {
148+
unsafe { self.end.offset_from(self.ptr) as usize }
149+
};
150+
(exact, Some(exact))
151+
}
152+
153+
#[inline]
154+
fn count(self) -> usize {
155+
self.len()
156+
}
157+
158+
unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
159+
where
160+
Self: TrustedRandomAccess,
161+
{
162+
// SAFETY: the caller must guarantee that `i` is in bounds of the
163+
// `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
164+
// is guaranteed to pointer to an element of the `Vec<T>` and
165+
// thus guaranteed to be valid to dereference.
166+
//
167+
// Also note the implementation of `Self: TrustedRandomAccess` requires
168+
// that `T: Copy` so reading elements from the buffer doesn't invalidate
169+
// them for `Drop`.
170+
unsafe {
171+
if mem::size_of::<T>() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) }
172+
}
173+
}
174+
}
175+
176+
#[stable(feature = "rust1", since = "1.0.0")]
177+
impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
178+
#[inline]
179+
fn next_back(&mut self) -> Option<T> {
180+
if self.end == self.ptr {
181+
None
182+
} else if mem::size_of::<T>() == 0 {
183+
// See above for why 'ptr.offset' isn't used
184+
self.end = unsafe { arith_offset(self.end as *const i8, -1) as *mut T };
185+
186+
// Make up a value of this ZST.
187+
Some(unsafe { mem::zeroed() })
188+
} else {
189+
self.end = unsafe { self.end.offset(-1) };
190+
191+
Some(unsafe { ptr::read(self.end) })
192+
}
193+
}
194+
}
195+
196+
#[stable(feature = "rust1", since = "1.0.0")]
197+
impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
198+
fn is_empty(&self) -> bool {
199+
self.ptr == self.end
200+
}
201+
}
202+
203+
#[stable(feature = "fused", since = "1.26.0")]
204+
impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
205+
206+
#[unstable(feature = "trusted_len", issue = "37572")]
207+
unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
208+
209+
#[doc(hidden)]
210+
#[unstable(issue = "none", feature = "std_internals")]
211+
// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
212+
// and thus we can't implement drop-handling
213+
unsafe impl<T, A: Allocator> TrustedRandomAccess for IntoIter<T, A>
214+
where
215+
T: Copy,
216+
{
217+
fn may_have_side_effect() -> bool {
218+
false
219+
}
220+
}
221+
222+
#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
223+
impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
224+
#[cfg(not(test))]
225+
fn clone(&self) -> Self {
226+
self.as_slice().to_vec_in(self.alloc.clone()).into_iter()
227+
}
228+
#[cfg(test)]
229+
fn clone(&self) -> Self {
230+
crate::slice::to_vec(self.as_slice(), self.alloc.clone()).into_iter()
231+
}
232+
}
233+
234+
#[stable(feature = "rust1", since = "1.0.0")]
235+
unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
236+
fn drop(&mut self) {
237+
struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
238+
239+
impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
240+
fn drop(&mut self) {
241+
unsafe {
242+
// `IntoIter::alloc` is not used anymore after this
243+
let alloc = ptr::read(&self.0.alloc);
244+
// RawVec handles deallocation
245+
let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc);
246+
}
247+
}
248+
}
249+
250+
let guard = DropGuard(self);
251+
// destroy the remaining elements
252+
unsafe {
253+
ptr::drop_in_place(guard.0.as_raw_mut_slice());
254+
}
255+
// now `guard` will be dropped and do the rest
256+
}
257+
}
258+
259+
#[unstable(issue = "none", feature = "inplace_iteration")]
260+
unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {}
261+
262+
#[unstable(issue = "none", feature = "inplace_iteration")]
263+
unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
264+
type Source = Self;
265+
266+
#[inline]
267+
unsafe fn as_inner(&mut self) -> &mut Self::Source {
268+
self
269+
}
270+
}

0 commit comments

Comments
 (0)