rune_alloc/vec/
into_iter.rs1use core::fmt;
2use core::iter::FusedIterator;
3use core::marker::PhantomData;
4use core::mem::{self, ManuallyDrop};
5use core::slice::{self};
6
7use crate::alloc::SizedTypeProperties;
8use crate::alloc::{Allocator, Global};
9use crate::ptr::{self, NonNull};
10use crate::raw_vec::RawVec;
11
12pub struct IntoIter<T, A: Allocator = Global> {
24 pub(super) buf: NonNull<T>,
25 pub(super) phantom: PhantomData<T>,
26 pub(super) cap: usize,
27 pub(super) alloc: ManuallyDrop<A>,
30 pub(super) ptr: *const T,
31 pub(super) end: *const T, }
35
36impl<T, A> fmt::Debug for IntoIter<T, A>
37where
38 T: fmt::Debug,
39 A: Allocator,
40{
41 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
42 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
43 }
44}
45
46impl<T, A> IntoIter<T, A>
47where
48 A: Allocator,
49{
50 pub fn as_slice(&self) -> &[T] {
62 unsafe { slice::from_raw_parts(self.ptr, self.len()) }
63 }
64
65 pub fn as_mut_slice(&mut self) -> &mut [T] {
79 unsafe { &mut *self.as_raw_mut_slice() }
80 }
81
82 #[inline]
84 pub fn allocator(&self) -> &A {
85 &self.alloc
86 }
87
88 fn as_raw_mut_slice(&mut self) -> *mut [T] {
89 ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
90 }
91
92 #[cfg(rune_nightly)]
94 pub(crate) fn forget_remaining_elements(&mut self) {
95 self.end = self.ptr;
98 }
99}
100
101impl<T, A> AsRef<[T]> for IntoIter<T, A>
102where
103 A: Allocator,
104{
105 fn as_ref(&self) -> &[T] {
106 self.as_slice()
107 }
108}
109
110unsafe impl<T, A> Send for IntoIter<T, A>
111where
112 T: Send,
113 A: Allocator + Send,
114{
115}
116unsafe impl<T, A> Sync for IntoIter<T, A>
117where
118 T: Sync,
119 A: Allocator + Sync,
120{
121}
122
123impl<T, A> Iterator for IntoIter<T, A>
124where
125 A: Allocator,
126{
127 type Item = T;
128
129 #[inline]
130 fn next(&mut self) -> Option<T> {
131 if self.ptr == self.end {
132 None
133 } else if T::IS_ZST {
134 self.end = self.end.wrapping_byte_sub(1);
137
138 Some(unsafe { mem::zeroed() })
140 } else {
141 let old = self.ptr;
142 self.ptr = unsafe { self.ptr.add(1) };
143
144 Some(unsafe { ptr::read(old) })
145 }
146 }
147
148 #[inline]
149 fn size_hint(&self) -> (usize, Option<usize>) {
150 let exact = if T::IS_ZST {
151 self.end.addr().wrapping_sub(self.ptr.addr())
152 } else {
153 unsafe { self.end.offset_from_unsigned(self.ptr) }
154 };
155 (exact, Some(exact))
156 }
157
158 #[inline]
159 fn count(self) -> usize {
160 self.len()
161 }
162}
163
164impl<T, A> DoubleEndedIterator for IntoIter<T, A>
165where
166 A: Allocator,
167{
168 #[inline]
169 fn next_back(&mut self) -> Option<T> {
170 if self.end == self.ptr {
171 None
172 } else if T::IS_ZST {
173 self.end = self.end.wrapping_byte_sub(1);
175
176 Some(unsafe { mem::zeroed() })
178 } else {
179 self.end = unsafe { self.end.sub(1) };
180
181 Some(unsafe { ptr::read(self.end) })
182 }
183 }
184}
185
186impl<T, A> ExactSizeIterator for IntoIter<T, A> where A: Allocator {}
187
188impl<T, A> FusedIterator for IntoIter<T, A> where A: Allocator {}
189
190impl<T, A> Default for IntoIter<T, A>
191where
192 A: Allocator + Default,
193{
194 fn default() -> Self {
203 super::Vec::new_in(Default::default()).into_iter()
204 }
205}
206
207#[cfg(rune_nightly)]
208unsafe impl<#[may_dangle] T, A> Drop for IntoIter<T, A>
209where
210 A: Allocator,
211{
212 fn drop(&mut self) {
213 struct DropGuard<'a, T, A>(&'a mut IntoIter<T, A>)
214 where
215 A: Allocator;
216
217 impl<T, A> Drop for DropGuard<'_, T, A>
218 where
219 A: Allocator,
220 {
221 fn drop(&mut self) {
222 unsafe {
223 let alloc = ManuallyDrop::take(&mut self.0.alloc);
225 let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc);
227 }
228 }
229 }
230
231 let guard = DropGuard(self);
232 unsafe {
234 ptr::drop_in_place(guard.0.as_raw_mut_slice());
235 }
236 }
238}
239
240#[cfg(not(rune_nightly))]
241impl<T, A> Drop for IntoIter<T, A>
242where
243 A: Allocator,
244{
245 fn drop(&mut self) {
246 struct DropGuard<'a, T, A>(&'a mut IntoIter<T, A>)
247 where
248 A: Allocator;
249
250 impl<T, A> Drop for DropGuard<'_, T, A>
251 where
252 A: Allocator,
253 {
254 fn drop(&mut self) {
255 unsafe {
256 let alloc = ManuallyDrop::take(&mut self.0.alloc);
258 let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc);
260 }
261 }
262 }
263
264 let guard = DropGuard(self);
265 unsafe {
267 ptr::drop_in_place(guard.0.as_raw_mut_slice());
268 }
269 }
271}