Skip to content

Commit 6a434c9

Browse files
committed
Move vec::IntoIter to using slice::DrainRaw
1 parent 0528783 commit 6a434c9

File tree

6 files changed

+108
-181
lines changed

6 files changed

+108
-181
lines changed

library/alloc/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -146,6 +146,7 @@
146146
#![feature(receiver_trait)]
147147
#![feature(set_ptr_value)]
148148
#![feature(sized_type_properties)]
149+
#![feature(slice_drain_raw_iter)]
149150
#![feature(slice_from_ptr_range)]
150151
#![feature(slice_index_methods)]
151152
#![feature(slice_ptr_get)]

library/alloc/src/vec/in_place_collect.rs

+6-4
Original file line numberDiff line numberDiff line change
@@ -253,12 +253,14 @@ where
253253
{
254254
let (src_buf, src_ptr, src_cap, mut dst_buf, dst_end, dst_cap) = unsafe {
255255
let inner = iterator.as_inner().as_into_iter();
256+
let inner_ptr = inner.ptr();
257+
let inner_end = inner_ptr.add(inner.len());
256258
(
257259
inner.buf,
258-
inner.ptr,
260+
inner_ptr,
259261
inner.cap,
260262
inner.buf.cast::<T>(),
261-
inner.end as *const T,
263+
inner_end.as_ptr() as *const T,
262264
inner.cap * mem::size_of::<I::Src>() / mem::size_of::<T>(),
263265
)
264266
};
@@ -275,9 +277,9 @@ where
275277
// check InPlaceIterable contract. This is only possible if the iterator advanced the
276278
// source pointer at all. If it uses unchecked access via TrustedRandomAccess
277279
// then the source pointer will stay in its initial position and we can't use it as reference
278-
if src.ptr != src_ptr {
280+
if src.ptr() != src_ptr {
279281
debug_assert!(
280-
unsafe { dst_buf.add(len).cast() } <= src.ptr,
282+
unsafe { dst_buf.add(len).cast() } <= src.ptr(),
281283
"InPlaceIterable contract violation, write pointer advanced beyond read pointer"
282284
);
283285
}

library/alloc/src/vec/into_iter.rs

+37-154
Original file line numberDiff line numberDiff line change
@@ -11,23 +11,12 @@ use core::iter::{
1111
TrustedRandomAccessNoCoerce,
1212
};
1313
use core::marker::PhantomData;
14-
use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
14+
use core::mem::{ManuallyDrop, SizedTypeProperties};
1515
use core::num::NonZero;
1616
#[cfg(not(no_global_oom_handling))]
1717
use core::ops::Deref;
18-
use core::ptr::{self, NonNull};
19-
use core::slice::{self};
20-
21-
macro non_null {
22-
(mut $place:expr, $t:ident) => {{
23-
#![allow(unused_unsafe)] // we're sometimes used within an unsafe block
24-
unsafe { &mut *(ptr::addr_of_mut!($place) as *mut NonNull<$t>) }
25-
}},
26-
($place:expr, $t:ident) => {{
27-
#![allow(unused_unsafe)] // we're sometimes used within an unsafe block
28-
unsafe { *(ptr::addr_of!($place) as *const NonNull<$t>) }
29-
}},
30-
}
18+
use core::ptr::NonNull;
19+
use core::slice::DrainRaw;
3120

3221
/// An iterator that moves out of a vector.
3322
///
@@ -52,12 +41,7 @@ pub struct IntoIter<
5241
// the drop impl reconstructs a RawVec from buf, cap and alloc
5342
// to avoid dropping the allocator twice we need to wrap it into ManuallyDrop
5443
pub(super) alloc: ManuallyDrop<A>,
55-
pub(super) ptr: NonNull<T>,
56-
/// If T is a ZST, this is actually ptr+len. This encoding is picked so that
57-
/// ptr == end is a quick test for the Iterator being empty, that works
58-
/// for both ZST and non-ZST.
59-
/// For non-ZSTs the pointer is treated as `NonNull<T>`
60-
pub(super) end: *const T,
44+
pub(super) drain: DrainRaw<T>,
6145
}
6246

6347
#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
@@ -81,7 +65,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
8165
/// ```
8266
#[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
8367
pub fn as_slice(&self) -> &[T] {
84-
unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len()) }
68+
unsafe { self.drain.as_nonnull_slice().as_ref() }
8569
}
8670

8771
/// Returns the remaining items of this iterator as a mutable slice.
@@ -99,7 +83,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
9983
/// ```
10084
#[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
10185
pub fn as_mut_slice(&mut self) -> &mut [T] {
102-
unsafe { &mut *self.as_raw_mut_slice() }
86+
unsafe { self.drain.as_nonnull_slice().as_mut() }
10387
}
10488

10589
/// Returns a reference to the underlying allocator.
@@ -109,10 +93,6 @@ impl<T, A: Allocator> IntoIter<T, A> {
10993
&self.alloc
11094
}
11195

112-
fn as_raw_mut_slice(&mut self) -> *mut [T] {
113-
ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len())
114-
}
115-
11696
/// Drops remaining elements and relinquishes the backing allocation.
11797
/// This method guarantees it won't panic before relinquishing
11898
/// the backing allocation.
@@ -130,28 +110,26 @@ impl<T, A: Allocator> IntoIter<T, A> {
130110
/// documentation for an overview.
131111
#[cfg(not(no_global_oom_handling))]
132112
pub(super) fn forget_allocation_drop_remaining(&mut self) {
133-
let remaining = self.as_raw_mut_slice();
134-
135113
// overwrite the individual fields instead of creating a new
136114
// struct and then overwriting &mut self.
137115
// this creates less assembly
138116
self.cap = 0;
139117
self.buf = RawVec::NEW.non_null();
140-
self.ptr = self.buf;
141-
self.end = self.buf.as_ptr();
142118

143119
// Dropping the remaining elements can panic, so this needs to be
144120
// done only after updating the other fields.
145-
unsafe {
146-
ptr::drop_in_place(remaining);
147-
}
121+
self.drain.drop_remaining();
122+
}
123+
124+
/// Returns a pointer to the start of the part of the buffer that has not yet been dropped.
125+
#[inline]
126+
pub(crate) fn ptr(&self) -> NonNull<T> {
127+
self.drain.as_nonnull_slice().as_non_null_ptr()
148128
}
149129

150130
/// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed.
151131
pub(crate) fn forget_remaining_elements(&mut self) {
152-
// For the ZST case, it is crucial that we mutate `end` here, not `ptr`.
153-
// `ptr` must stay aligned, while `end` may be unaligned.
154-
self.end = self.ptr.as_ptr();
132+
self.drain.forget_remaining();
155133
}
156134

157135
#[cfg(not(no_global_oom_handling))]
@@ -167,17 +145,19 @@ impl<T, A: Allocator> IntoIter<T, A> {
167145
// Taking `alloc` is ok because nothing else is going to look at it,
168146
// since our `Drop` impl isn't going to run so there's no more code.
169147
unsafe {
170-
let buf = this.buf.as_ptr();
171-
let initialized = if T::IS_ZST {
148+
let buf = this.buf;
149+
let len = this.drain.len();
150+
let start = if T::IS_ZST {
172151
// All the pointers are the same for ZSTs, so it's fine to
173152
// say that they're all at the beginning of the "allocation".
174-
0..this.len()
153+
0
175154
} else {
176-
this.ptr.sub_ptr(this.buf)..this.end.sub_ptr(buf)
155+
this.ptr().sub_ptr(buf)
177156
};
157+
let initialized = start..(start + len);
178158
let cap = this.cap;
179159
let alloc = ManuallyDrop::take(&mut this.alloc);
180-
VecDeque::from_contiguous_raw_parts_in(buf, initialized, cap, alloc)
160+
VecDeque::from_contiguous_raw_parts_in(buf.as_ptr(), initialized, cap, alloc)
181161
}
182162
}
183163
}
@@ -200,51 +180,17 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
200180

201181
#[inline]
202182
fn next(&mut self) -> Option<T> {
203-
let ptr = if T::IS_ZST {
204-
if self.ptr.as_ptr() == self.end as *mut T {
205-
return None;
206-
}
207-
// `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
208-
// reducing the `end`.
209-
self.end = self.end.wrapping_byte_sub(1);
210-
self.ptr
211-
} else {
212-
if self.ptr == non_null!(self.end, T) {
213-
return None;
214-
}
215-
let old = self.ptr;
216-
self.ptr = unsafe { old.add(1) };
217-
old
218-
};
219-
Some(unsafe { ptr.read() })
183+
self.drain.next()
220184
}
221185

222186
#[inline]
223187
fn size_hint(&self) -> (usize, Option<usize>) {
224-
let exact = if T::IS_ZST {
225-
self.end.addr().wrapping_sub(self.ptr.as_ptr().addr())
226-
} else {
227-
unsafe { non_null!(self.end, T).sub_ptr(self.ptr) }
228-
};
229-
(exact, Some(exact))
188+
self.drain.size_hint()
230189
}
231190

232191
#[inline]
233192
fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
234-
let step_size = self.len().min(n);
235-
let to_drop = ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), step_size);
236-
if T::IS_ZST {
237-
// See `next` for why we sub `end` here.
238-
self.end = self.end.wrapping_byte_sub(step_size);
239-
} else {
240-
// SAFETY: the min() above ensures that step_size is in bounds
241-
self.ptr = unsafe { self.ptr.add(step_size) };
242-
}
243-
// SAFETY: the min() above ensures that step_size is in bounds
244-
unsafe {
245-
ptr::drop_in_place(to_drop);
246-
}
247-
NonZero::new(n - step_size).map_or(Ok(()), Err)
193+
self.drain.advance_by(n)
248194
}
249195

250196
#[inline]
@@ -253,46 +199,17 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
253199
}
254200

255201
#[inline]
256-
fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> {
257-
let mut raw_ary = MaybeUninit::uninit_array();
258-
259-
let len = self.len();
260-
261-
if T::IS_ZST {
262-
if len < N {
263-
self.forget_remaining_elements();
264-
// Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct
265-
return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) });
266-
}
267-
268-
self.end = self.end.wrapping_byte_sub(N);
269-
// Safety: ditto
270-
return Ok(unsafe { raw_ary.transpose().assume_init() });
271-
}
272-
273-
if len < N {
274-
// Safety: `len` indicates that this many elements are available and we just checked that
275-
// it fits into the array.
276-
unsafe {
277-
ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, len);
278-
self.forget_remaining_elements();
279-
return Err(array::IntoIter::new_unchecked(raw_ary, 0..len));
280-
}
281-
}
282-
283-
// Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize
284-
// the array.
285-
return unsafe {
286-
ptr::copy_nonoverlapping(self.ptr.as_ptr(), raw_ary.as_mut_ptr() as *mut T, N);
287-
self.ptr = self.ptr.add(N);
288-
Ok(raw_ary.transpose().assume_init())
289-
};
202+
fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], array::IntoIter<T, N>> {
203+
self.drain.next_chunk()
290204
}
291205

292206
unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
293207
where
294208
Self: TrustedRandomAccessNoCoerce,
295209
{
210+
// FIXME: for some reason, just `self.drain.__iterator_get_unchecked(i)`
211+
// never worked for me. If you know a way to fix that, please do.
212+
296213
// SAFETY: the caller must guarantee that `i` is in bounds of the
297214
// `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
298215
// is guaranteed to pointer to an element of the `Vec<T>` and
@@ -301,62 +218,30 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
301218
// Also note the implementation of `Self: TrustedRandomAccess` requires
302219
// that `T: Copy` so reading elements from the buffer doesn't invalidate
303220
// them for `Drop`.
304-
unsafe { self.ptr.add(i).read() }
221+
unsafe { self.ptr().add(i).read() }
305222
}
306223
}
307224

308225
#[stable(feature = "rust1", since = "1.0.0")]
309226
impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
310227
#[inline]
311228
fn next_back(&mut self) -> Option<T> {
312-
if T::IS_ZST {
313-
if self.ptr.as_ptr() == self.end as *mut _ {
314-
return None;
315-
}
316-
// See above for why 'ptr.offset' isn't used
317-
self.end = self.end.wrapping_byte_sub(1);
318-
// Note that even though this is next_back() we're reading from `self.ptr`, not
319-
// `self.end`. We track our length using the byte offset from `self.ptr` to `self.end`,
320-
// so the end pointer may not be suitably aligned for T.
321-
Some(unsafe { ptr::read(self.ptr.as_ptr()) })
322-
} else {
323-
if self.ptr == non_null!(self.end, T) {
324-
return None;
325-
}
326-
unsafe {
327-
self.end = self.end.sub(1);
328-
Some(ptr::read(self.end))
329-
}
330-
}
229+
self.drain.next_back()
331230
}
332231

333232
#[inline]
334233
fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
335-
let step_size = self.len().min(n);
336-
if T::IS_ZST {
337-
// SAFETY: same as for advance_by()
338-
self.end = self.end.wrapping_byte_sub(step_size);
339-
} else {
340-
// SAFETY: same as for advance_by()
341-
self.end = unsafe { self.end.sub(step_size) };
342-
}
343-
let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size);
344-
// SAFETY: same as for advance_by()
345-
unsafe {
346-
ptr::drop_in_place(to_drop);
347-
}
348-
NonZero::new(n - step_size).map_or(Ok(()), Err)
234+
self.drain.advance_back_by(n)
349235
}
350236
}
351237

352238
#[stable(feature = "rust1", since = "1.0.0")]
353239
impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
354240
fn is_empty(&self) -> bool {
355-
if T::IS_ZST {
356-
self.ptr.as_ptr() == self.end as *mut _
357-
} else {
358-
self.ptr == non_null!(self.end, T)
359-
}
241+
self.drain.is_empty()
242+
}
243+
fn len(&self) -> usize {
244+
self.drain.len()
360245
}
361246
}
362247

@@ -440,9 +325,7 @@ unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
440325

441326
let guard = DropGuard(self);
442327
// destroy the remaining elements
443-
unsafe {
444-
ptr::drop_in_place(guard.0.as_raw_mut_slice());
445-
}
328+
guard.0.drain.drop_remaining();
446329
// now `guard` will be dropped and do the rest
447330
}
448331
}

library/alloc/src/vec/mod.rs

+5-7
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,8 @@ use core::marker::PhantomData;
6464
use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties};
6565
use core::ops::{self, Index, IndexMut, Range, RangeBounds};
6666
use core::ptr::{self, NonNull};
67+
#[cfg(not(no_global_oom_handling))]
68+
use core::slice::DrainRaw;
6769
use core::slice::{self, SliceIndex};
6870

6971
use crate::alloc::{Allocator, Global};
@@ -3000,14 +3002,10 @@ impl<T, A: Allocator> IntoIterator for Vec<T, A> {
30003002
let me = ManuallyDrop::new(self);
30013003
let alloc = ManuallyDrop::new(ptr::read(me.allocator()));
30023004
let buf = me.buf.non_null();
3003-
let begin = buf.as_ptr();
3004-
let end = if T::IS_ZST {
3005-
begin.wrapping_byte_add(me.len())
3006-
} else {
3007-
begin.add(me.len()) as *const T
3008-
};
3005+
let len = me.len();
30093006
let cap = me.buf.capacity();
3010-
IntoIter { buf, phantom: PhantomData, cap, alloc, ptr: buf, end }
3007+
let drain = DrainRaw::from_parts(buf, len);
3008+
IntoIter { buf, phantom: PhantomData, cap, alloc, drain }
30113009
}
30123010
}
30133011
}

library/alloc/src/vec/spec_from_iter.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -44,12 +44,12 @@ impl<T> SpecFromIter<T, IntoIter<T>> for Vec<T> {
4444
// than creating it through the generic FromIterator implementation would. That limitation
4545
// is not strictly necessary as Vec's allocation behavior is intentionally unspecified.
4646
// But it is a conservative choice.
47-
let has_advanced = iterator.buf != iterator.ptr;
47+
let has_advanced = iterator.buf != iterator.ptr();
4848
if !has_advanced || iterator.len() >= iterator.cap / 2 {
4949
unsafe {
5050
let it = ManuallyDrop::new(iterator);
5151
if has_advanced {
52-
ptr::copy(it.ptr.as_ptr(), it.buf.as_ptr(), it.len());
52+
ptr::copy(it.ptr().as_ptr(), it.buf.as_ptr(), it.len());
5353
}
5454
return Vec::from_nonnull(it.buf, it.len(), it.cap);
5555
}

0 commit comments

Comments
 (0)