@@ -11,23 +11,12 @@ use core::iter::{
11
11
TrustedRandomAccessNoCoerce ,
12
12
} ;
13
13
use core:: marker:: PhantomData ;
14
- use core:: mem:: { ManuallyDrop , MaybeUninit , SizedTypeProperties } ;
14
+ use core:: mem:: { ManuallyDrop , SizedTypeProperties } ;
15
15
use core:: num:: NonZero ;
16
16
#[ cfg( not( no_global_oom_handling) ) ]
17
17
use core:: ops:: Deref ;
18
- use core:: ptr:: { self , NonNull } ;
19
- use core:: slice:: { self } ;
20
-
21
- macro non_null {
22
- ( mut $place: expr, $t: ident) => { {
23
- #![ allow( unused_unsafe) ] // we're sometimes used within an unsafe block
24
- unsafe { & mut * ( ptr:: addr_of_mut!( $place) as * mut NonNull < $t> ) }
25
- } } ,
26
- ( $place: expr, $t: ident) => { {
27
- #![ allow( unused_unsafe) ] // we're sometimes used within an unsafe block
28
- unsafe { * ( ptr:: addr_of!( $place) as * const NonNull < $t> ) }
29
- } } ,
30
- }
18
+ use core:: ptr:: NonNull ;
19
+ use core:: slice:: DrainRaw ;
31
20
32
21
/// An iterator that moves out of a vector.
33
22
///
@@ -52,12 +41,7 @@ pub struct IntoIter<
52
41
// the drop impl reconstructs a RawVec from buf, cap and alloc
53
42
// to avoid dropping the allocator twice we need to wrap it into ManuallyDrop
54
43
pub ( super ) alloc : ManuallyDrop < A > ,
55
- pub ( super ) ptr : NonNull < T > ,
56
- /// If T is a ZST, this is actually ptr+len. This encoding is picked so that
57
- /// ptr == end is a quick test for the Iterator being empty, that works
58
- /// for both ZST and non-ZST.
59
- /// For non-ZSTs the pointer is treated as `NonNull<T>`
60
- pub ( super ) end : * const T ,
44
+ pub ( super ) drain : DrainRaw < T > ,
61
45
}
62
46
63
47
#[ stable( feature = "vec_intoiter_debug" , since = "1.13.0" ) ]
@@ -81,7 +65,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
81
65
/// ```
82
66
#[ stable( feature = "vec_into_iter_as_slice" , since = "1.15.0" ) ]
83
67
pub fn as_slice ( & self ) -> & [ T ] {
84
- unsafe { slice :: from_raw_parts ( self . ptr . as_ptr ( ) , self . len ( ) ) }
68
+ unsafe { self . drain . as_nonnull_slice ( ) . as_ref ( ) }
85
69
}
86
70
87
71
/// Returns the remaining items of this iterator as a mutable slice.
@@ -99,7 +83,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
99
83
/// ```
100
84
#[ stable( feature = "vec_into_iter_as_slice" , since = "1.15.0" ) ]
101
85
pub fn as_mut_slice ( & mut self ) -> & mut [ T ] {
102
- unsafe { & mut * self . as_raw_mut_slice ( ) }
86
+ unsafe { self . drain . as_nonnull_slice ( ) . as_mut ( ) }
103
87
}
104
88
105
89
/// Returns a reference to the underlying allocator.
@@ -109,10 +93,6 @@ impl<T, A: Allocator> IntoIter<T, A> {
109
93
& self . alloc
110
94
}
111
95
112
- fn as_raw_mut_slice ( & mut self ) -> * mut [ T ] {
113
- ptr:: slice_from_raw_parts_mut ( self . ptr . as_ptr ( ) , self . len ( ) )
114
- }
115
-
116
96
/// Drops remaining elements and relinquishes the backing allocation.
117
97
/// This method guarantees it won't panic before relinquishing
118
98
/// the backing allocation.
@@ -130,28 +110,26 @@ impl<T, A: Allocator> IntoIter<T, A> {
130
110
/// documentation for an overview.
131
111
#[ cfg( not( no_global_oom_handling) ) ]
132
112
pub ( super ) fn forget_allocation_drop_remaining ( & mut self ) {
133
- let remaining = self . as_raw_mut_slice ( ) ;
134
-
135
113
// overwrite the individual fields instead of creating a new
136
114
// struct and then overwriting &mut self.
137
115
// this creates less assembly
138
116
self . cap = 0 ;
139
117
self . buf = RawVec :: NEW . non_null ( ) ;
140
- self . ptr = self . buf ;
141
- self . end = self . buf . as_ptr ( ) ;
142
118
143
119
// Dropping the remaining elements can panic, so this needs to be
144
120
// done only after updating the other fields.
145
- unsafe {
146
- ptr:: drop_in_place ( remaining) ;
147
- }
121
+ self . drain . drop_remaining ( ) ;
122
+ }
123
+
124
+ /// Returns a pointer to the start of the part of the buffer that has not yet been dropped.
125
+ #[ inline]
126
+ pub ( crate ) fn ptr ( & self ) -> NonNull < T > {
127
+ self . drain . as_nonnull_slice ( ) . as_non_null_ptr ( )
148
128
}
149
129
150
130
/// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed.
151
131
pub ( crate ) fn forget_remaining_elements ( & mut self ) {
152
- // For the ZST case, it is crucial that we mutate `end` here, not `ptr`.
153
- // `ptr` must stay aligned, while `end` may be unaligned.
154
- self . end = self . ptr . as_ptr ( ) ;
132
+ self . drain . forget_remaining ( ) ;
155
133
}
156
134
157
135
#[ cfg( not( no_global_oom_handling) ) ]
@@ -167,17 +145,19 @@ impl<T, A: Allocator> IntoIter<T, A> {
167
145
// Taking `alloc` is ok because nothing else is going to look at it,
168
146
// since our `Drop` impl isn't going to run so there's no more code.
169
147
unsafe {
170
- let buf = this. buf . as_ptr ( ) ;
171
- let initialized = if T :: IS_ZST {
148
+ let buf = this. buf ;
149
+ let len = this. drain . len ( ) ;
150
+ let start = if T :: IS_ZST {
172
151
// All the pointers are the same for ZSTs, so it's fine to
173
152
// say that they're all at the beginning of the "allocation".
174
- 0 ..this . len ( )
153
+ 0
175
154
} else {
176
- this. ptr . sub_ptr ( this . buf ) ..this . end . sub_ptr ( buf)
155
+ this. ptr ( ) . sub_ptr ( buf)
177
156
} ;
157
+ let initialized = start..( start + len) ;
178
158
let cap = this. cap ;
179
159
let alloc = ManuallyDrop :: take ( & mut this. alloc ) ;
180
- VecDeque :: from_contiguous_raw_parts_in ( buf, initialized, cap, alloc)
160
+ VecDeque :: from_contiguous_raw_parts_in ( buf. as_ptr ( ) , initialized, cap, alloc)
181
161
}
182
162
}
183
163
}
@@ -200,51 +180,17 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
200
180
201
181
#[ inline]
202
182
fn next ( & mut self ) -> Option < T > {
203
- let ptr = if T :: IS_ZST {
204
- if self . ptr . as_ptr ( ) == self . end as * mut T {
205
- return None ;
206
- }
207
- // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
208
- // reducing the `end`.
209
- self . end = self . end . wrapping_byte_sub ( 1 ) ;
210
- self . ptr
211
- } else {
212
- if self . ptr == non_null ! ( self . end, T ) {
213
- return None ;
214
- }
215
- let old = self . ptr ;
216
- self . ptr = unsafe { old. add ( 1 ) } ;
217
- old
218
- } ;
219
- Some ( unsafe { ptr. read ( ) } )
183
+ self . drain . next ( )
220
184
}
221
185
222
186
#[ inline]
223
187
fn size_hint ( & self ) -> ( usize , Option < usize > ) {
224
- let exact = if T :: IS_ZST {
225
- self . end . addr ( ) . wrapping_sub ( self . ptr . as_ptr ( ) . addr ( ) )
226
- } else {
227
- unsafe { non_null ! ( self . end, T ) . sub_ptr ( self . ptr ) }
228
- } ;
229
- ( exact, Some ( exact) )
188
+ self . drain . size_hint ( )
230
189
}
231
190
232
191
#[ inline]
233
192
fn advance_by ( & mut self , n : usize ) -> Result < ( ) , NonZero < usize > > {
234
- let step_size = self . len ( ) . min ( n) ;
235
- let to_drop = ptr:: slice_from_raw_parts_mut ( self . ptr . as_ptr ( ) , step_size) ;
236
- if T :: IS_ZST {
237
- // See `next` for why we sub `end` here.
238
- self . end = self . end . wrapping_byte_sub ( step_size) ;
239
- } else {
240
- // SAFETY: the min() above ensures that step_size is in bounds
241
- self . ptr = unsafe { self . ptr . add ( step_size) } ;
242
- }
243
- // SAFETY: the min() above ensures that step_size is in bounds
244
- unsafe {
245
- ptr:: drop_in_place ( to_drop) ;
246
- }
247
- NonZero :: new ( n - step_size) . map_or ( Ok ( ( ) ) , Err )
193
+ self . drain . advance_by ( n)
248
194
}
249
195
250
196
#[ inline]
@@ -253,46 +199,17 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
253
199
}
254
200
255
201
#[ inline]
256
- fn next_chunk < const N : usize > ( & mut self ) -> Result < [ T ; N ] , core:: array:: IntoIter < T , N > > {
257
- let mut raw_ary = MaybeUninit :: uninit_array ( ) ;
258
-
259
- let len = self . len ( ) ;
260
-
261
- if T :: IS_ZST {
262
- if len < N {
263
- self . forget_remaining_elements ( ) ;
264
- // Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct
265
- return Err ( unsafe { array:: IntoIter :: new_unchecked ( raw_ary, 0 ..len) } ) ;
266
- }
267
-
268
- self . end = self . end . wrapping_byte_sub ( N ) ;
269
- // Safety: ditto
270
- return Ok ( unsafe { raw_ary. transpose ( ) . assume_init ( ) } ) ;
271
- }
272
-
273
- if len < N {
274
- // Safety: `len` indicates that this many elements are available and we just checked that
275
- // it fits into the array.
276
- unsafe {
277
- ptr:: copy_nonoverlapping ( self . ptr . as_ptr ( ) , raw_ary. as_mut_ptr ( ) as * mut T , len) ;
278
- self . forget_remaining_elements ( ) ;
279
- return Err ( array:: IntoIter :: new_unchecked ( raw_ary, 0 ..len) ) ;
280
- }
281
- }
282
-
283
- // Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize
284
- // the array.
285
- return unsafe {
286
- ptr:: copy_nonoverlapping ( self . ptr . as_ptr ( ) , raw_ary. as_mut_ptr ( ) as * mut T , N ) ;
287
- self . ptr = self . ptr . add ( N ) ;
288
- Ok ( raw_ary. transpose ( ) . assume_init ( ) )
289
- } ;
202
+ fn next_chunk < const N : usize > ( & mut self ) -> Result < [ T ; N ] , array:: IntoIter < T , N > > {
203
+ self . drain . next_chunk ( )
290
204
}
291
205
292
206
unsafe fn __iterator_get_unchecked ( & mut self , i : usize ) -> Self :: Item
293
207
where
294
208
Self : TrustedRandomAccessNoCoerce ,
295
209
{
210
+ // FIXME: for some reason, just `self.drain.__iterator_get_unchecked(i)`
211
+ // never worked for me. If you know a way to fix that, please do.
212
+
296
213
// SAFETY: the caller must guarantee that `i` is in bounds of the
297
214
// `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
298
215
// is guaranteed to pointer to an element of the `Vec<T>` and
@@ -301,62 +218,30 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
301
218
// Also note the implementation of `Self: TrustedRandomAccess` requires
302
219
// that `T: Copy` so reading elements from the buffer doesn't invalidate
303
220
// them for `Drop`.
304
- unsafe { self . ptr . add ( i) . read ( ) }
221
+ unsafe { self . ptr ( ) . add ( i) . read ( ) }
305
222
}
306
223
}
307
224
308
225
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
309
226
impl < T , A : Allocator > DoubleEndedIterator for IntoIter < T , A > {
310
227
#[ inline]
311
228
fn next_back ( & mut self ) -> Option < T > {
312
- if T :: IS_ZST {
313
- if self . ptr . as_ptr ( ) == self . end as * mut _ {
314
- return None ;
315
- }
316
- // See above for why 'ptr.offset' isn't used
317
- self . end = self . end . wrapping_byte_sub ( 1 ) ;
318
- // Note that even though this is next_back() we're reading from `self.ptr`, not
319
- // `self.end`. We track our length using the byte offset from `self.ptr` to `self.end`,
320
- // so the end pointer may not be suitably aligned for T.
321
- Some ( unsafe { ptr:: read ( self . ptr . as_ptr ( ) ) } )
322
- } else {
323
- if self . ptr == non_null ! ( self . end, T ) {
324
- return None ;
325
- }
326
- unsafe {
327
- self . end = self . end . sub ( 1 ) ;
328
- Some ( ptr:: read ( self . end ) )
329
- }
330
- }
229
+ self . drain . next_back ( )
331
230
}
332
231
333
232
#[ inline]
334
233
fn advance_back_by ( & mut self , n : usize ) -> Result < ( ) , NonZero < usize > > {
335
- let step_size = self . len ( ) . min ( n) ;
336
- if T :: IS_ZST {
337
- // SAFETY: same as for advance_by()
338
- self . end = self . end . wrapping_byte_sub ( step_size) ;
339
- } else {
340
- // SAFETY: same as for advance_by()
341
- self . end = unsafe { self . end . sub ( step_size) } ;
342
- }
343
- let to_drop = ptr:: slice_from_raw_parts_mut ( self . end as * mut T , step_size) ;
344
- // SAFETY: same as for advance_by()
345
- unsafe {
346
- ptr:: drop_in_place ( to_drop) ;
347
- }
348
- NonZero :: new ( n - step_size) . map_or ( Ok ( ( ) ) , Err )
234
+ self . drain . advance_back_by ( n)
349
235
}
350
236
}
351
237
352
238
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
353
239
impl < T , A : Allocator > ExactSizeIterator for IntoIter < T , A > {
354
240
fn is_empty ( & self ) -> bool {
355
- if T :: IS_ZST {
356
- self . ptr . as_ptr ( ) == self . end as * mut _
357
- } else {
358
- self . ptr == non_null ! ( self . end, T )
359
- }
241
+ self . drain . is_empty ( )
242
+ }
243
+ fn len ( & self ) -> usize {
244
+ self . drain . len ( )
360
245
}
361
246
}
362
247
@@ -440,9 +325,7 @@ unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
440
325
441
326
let guard = DropGuard ( self ) ;
442
327
// destroy the remaining elements
443
- unsafe {
444
- ptr:: drop_in_place ( guard. 0 . as_raw_mut_slice ( ) ) ;
445
- }
328
+ guard. 0 . drain . drop_remaining ( ) ;
446
329
// now `guard` will be dropped and do the rest
447
330
}
448
331
}
0 commit comments