Skip to content

Commit 6c67e55

Browse files
committed
specialize in-place collection further via TrustedRandomAccess
This allows the optimizer to turn certain iterator pipelines such as ```rust let vec = vec![0usize; 100]; vec.into_iter().map(|e| e as isize).collect::<Vec<_>>() ``` into a noop. The optimization only applies when iterator sources are `T: Copy` since `impl TrustedRandomAccess for IntoIter<T>`. No such requirement applies to the output type (`Iterator::Item`).
1 parent a1a04e0 commit 6c67e55

File tree

1 file changed

+53
-16
lines changed

1 file changed

+53
-16
lines changed

library/alloc/src/vec/source_iter_marker.rs

+53-16
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use core::iter::{InPlaceIterable, SourceIter};
1+
use core::iter::{InPlaceIterable, SourceIter, TrustedRandomAccess};
22
use core::mem::{self, ManuallyDrop};
33
use core::ptr::{self};
44

@@ -52,16 +52,7 @@ where
5252
)
5353
};
5454

55-
// use try-fold since
56-
// - it vectorizes better for some iterator adapters
57-
// - unlike most internal iteration methods, it only takes a &mut self
58-
// - it lets us thread the write pointer through its innards and get it back in the end
59-
let sink = InPlaceDrop { inner: dst_buf, dst: dst_buf };
60-
let sink = iterator
61-
.try_fold::<_, _, Result<_, !>>(sink, write_in_place_with_drop(dst_end))
62-
.unwrap();
63-
// iteration succeeded, don't drop head
64-
let dst = ManuallyDrop::new(sink).dst;
55+
let len = SpecInPlaceCollect::collect_in_place(&mut iterator, dst_buf, dst_end);
6556

6657
let src = unsafe { iterator.as_inner().as_into_iter() };
6758
// check if SourceIter contract was upheld
@@ -72,7 +63,7 @@ where
7263
// then the source pointer will stay in its initial position and we can't use it as reference
7364
if src.ptr != src_ptr {
7465
debug_assert!(
75-
dst as *const _ <= src.ptr,
66+
unsafe { dst_buf.add(len) as *const _ } <= src.ptr,
7667
"InPlaceIterable contract violation, write pointer advanced beyond read pointer"
7768
);
7869
}
@@ -82,10 +73,7 @@ where
8273
// but prevent drop of the allocation itself once IntoIter goes out of scope
8374
src.forget_allocation();
8475

85-
let vec = unsafe {
86-
let len = dst.offset_from(dst_buf) as usize;
87-
Vec::from_raw_parts(dst_buf, len, cap)
88-
};
76+
let vec = unsafe { Vec::from_raw_parts(dst_buf, len, cap) };
8977

9078
vec
9179
}
@@ -106,3 +94,52 @@ fn write_in_place_with_drop<T>(
10694
Ok(sink)
10795
}
10896
}
97+
98+
/// Helper trait to hold specialized implementations of the in-place iterate-collect loop
99+
trait SpecInPlaceCollect<T, I>: Iterator<Item = T> {
100+
/// Collects an iterator (`self`) into the destination buffer (`dst`) and returns the number of items
101+
/// collected. `end` is the last writable element of the allocation and used for bounds checks.
102+
fn collect_in_place(&mut self, dst: *mut T, end: *const T) -> usize;
103+
}
104+
105+
impl<T, I> SpecInPlaceCollect<T, I> for I
106+
where
107+
I: Iterator<Item = T>,
108+
{
109+
#[inline]
110+
default fn collect_in_place(&mut self, dst_buf: *mut T, end: *const T) -> usize {
111+
// use try-fold since
112+
// - it vectorizes better for some iterator adapters
113+
// - unlike most internal iteration methods, it only takes a &mut self
114+
// - it lets us thread the write pointer through its innards and get it back in the end
115+
let sink = InPlaceDrop { inner: dst_buf, dst: dst_buf };
116+
let sink =
117+
self.try_fold::<_, _, Result<_, !>>(sink, write_in_place_with_drop(end)).unwrap();
118+
// iteration succeeded, don't drop head
119+
unsafe { ManuallyDrop::new(sink).dst.offset_from(dst_buf) as usize }
120+
}
121+
}
122+
123+
impl<T, I> SpecInPlaceCollect<T, I> for I
124+
where
125+
I: Iterator<Item = T> + TrustedRandomAccess,
126+
{
127+
#[inline]
128+
fn collect_in_place(&mut self, dst_buf: *mut T, end: *const T) -> usize {
129+
let len = self.size();
130+
let mut drop_guard = InPlaceDrop { inner: dst_buf, dst: dst_buf };
131+
for i in 0..len {
132+
// Safety: InplaceIterable contract guarantees that for every element we read
133+
// one slot in the underlying storage will have been freed up and we can immediately
134+
// write back the result.
135+
unsafe {
136+
let dst = dst_buf.offset(i as isize);
137+
debug_assert!(dst as *const _ <= end, "InPlaceIterable contract violation");
138+
ptr::write(dst, self.__iterator_get_unchecked(i));
139+
drop_guard.dst = dst.add(1);
140+
}
141+
}
142+
mem::forget(drop_guard);
143+
len
144+
}
145+
}

0 commit comments

Comments
 (0)