bitvec/vec/
iter.rs

1#![doc = include_str!("../../doc/vec/iter.md")]
2
3use alloc::vec::Vec;
4use core::{
5	fmt::{
6		self,
7		Debug,
8		Formatter,
9	},
10	iter::{
11		FromIterator,
12		FusedIterator,
13	},
14	mem,
15	ops::Range,
16};
17
18use tap::{
19	Pipe,
20	Tap,
21	TapOptional,
22};
23use wyz::{
24	comu::{
25		Mut,
26		Mutability,
27	},
28	range::RangeExt,
29};
30
31use super::BitVec;
32use crate::{
33	boxed::BitBox,
34	mem::bits_of,
35	order::BitOrder,
36	ptr::{
37		BitPtrRange,
38		BitRef,
39	},
40	slice::BitSlice,
41	store::BitStore,
42	view::BitView,
43};
44
45#[doc = include_str!("../../doc/vec/iter/Extend_bool.md")]
46impl<T, O> Extend<bool> for BitVec<T, O>
47where
48	T: BitStore,
49	O: BitOrder,
50{
51	#[inline]
52	fn extend<I>(&mut self, iter: I)
53	where I: IntoIterator<Item = bool> {
54		let mut iter = iter.into_iter();
55		#[allow(irrefutable_let_patterns)] // Removing the `if` is unstable.
56		if let (_, Some(n)) | (n, None) = iter.size_hint() {
57			self.reserve(n);
58			let len = self.len();
59			//  If the reservation did not panic, then this will not overflow.
60			let new_len = len.wrapping_add(n);
61			let new = unsafe { self.get_unchecked_mut(len .. new_len) };
62
63			let pulled = new
64				.as_mut_bitptr_range()
65				.zip(iter.by_ref())
66				.map(|(ptr, bit)| unsafe {
67					ptr.write(bit);
68				})
69				.count();
70			unsafe {
71				self.set_len(len + pulled);
72			}
73		}
74
75		//  If the iterator is well-behaved and finite, this should never
76		//  enter; if the iterator is infinite, then this will eventually crash.
77		iter.for_each(|bit| self.push(bit));
78	}
79}
80
81#[cfg(not(tarpaulin_include))]
82impl<'a, T, O> Extend<&'a bool> for BitVec<T, O>
83where
84	T: BitStore,
85	O: BitOrder,
86{
87	#[inline]
88	fn extend<I>(&mut self, iter: I)
89	where I: IntoIterator<Item = &'a bool> {
90		self.extend(iter.into_iter().copied());
91	}
92}
93
94#[cfg(not(tarpaulin_include))]
95#[doc = include_str!("../../doc/vec/iter/Extend_BitRef.md")]
96impl<'a, M, T1, T2, O1, O2> Extend<BitRef<'a, M, T2, O2>> for BitVec<T1, O1>
97where
98	M: Mutability,
99	T1: BitStore,
100	T2: BitStore,
101	O1: BitOrder,
102	O2: BitOrder,
103{
104	#[inline]
105	fn extend<I>(&mut self, iter: I)
106	where I: IntoIterator<Item = BitRef<'a, M, T2, O2>> {
107		self.extend(iter.into_iter().map(|bit| *bit));
108	}
109}
110
111impl<T, O> Extend<T> for BitVec<T, O>
112where
113	T: BitStore,
114	O: BitOrder,
115{
116	#[inline]
117	fn extend<I>(&mut self, iter: I)
118	where I: IntoIterator<Item = T> {
119		let iter = iter.into_iter();
120		#[allow(irrefutable_let_patterns)]
121		if let (_, Some(n)) | (n, None) = iter.size_hint() {
122			self.reserve(n.checked_mul(bits_of::<T::Mem>()).unwrap());
123		}
124		iter.for_each(|elem| self.extend_from_bitslice(elem.view_bits::<O>()));
125	}
126}
127
128#[cfg(not(tarpaulin_include))]
129impl<'a, T, O> Extend<&'a T> for BitVec<T, O>
130where
131	T: BitStore,
132	O: BitOrder,
133{
134	#[inline]
135	fn extend<I>(&mut self, iter: I)
136	where I: IntoIterator<Item = &'a T> {
137		self.extend(
138			iter.into_iter()
139				.map(BitStore::load_value)
140				.map(<T as BitStore>::new),
141		);
142	}
143}
144
145#[cfg(not(tarpaulin_include))]
146#[doc = include_str!("../../doc/vec/iter/FromIterator_bool.md")]
147impl<T, O> FromIterator<bool> for BitVec<T, O>
148where
149	T: BitStore,
150	O: BitOrder,
151{
152	#[inline]
153	fn from_iter<I>(iter: I) -> Self
154	where I: IntoIterator<Item = bool> {
155		Self::new().tap_mut(|bv| bv.extend(iter))
156	}
157}
158
159#[cfg(not(tarpaulin_include))]
160impl<'a, T, O> FromIterator<&'a bool> for BitVec<T, O>
161where
162	T: BitStore,
163	O: BitOrder,
164{
165	#[inline]
166	fn from_iter<I>(iter: I) -> Self
167	where I: IntoIterator<Item = &'a bool> {
168		iter.into_iter().copied().collect::<Self>()
169	}
170}
171
172#[cfg(not(tarpaulin_include))]
173#[doc = include_str!("../../doc/vec/iter/FromIterator_BitRef.md")]
174impl<'a, M, T1, T2, O1, O2> FromIterator<BitRef<'a, M, T2, O2>>
175	for BitVec<T1, O1>
176where
177	M: Mutability,
178	T1: BitStore,
179	T2: BitStore,
180	O1: BitOrder,
181	O2: BitOrder,
182{
183	#[inline]
184	fn from_iter<I>(iter: I) -> Self
185	where I: IntoIterator<Item = BitRef<'a, M, T2, O2>> {
186		iter.into_iter().map(|br| *br).pipe(Self::from_iter)
187	}
188}
189
190#[cfg(not(tarpaulin_include))]
191impl<T, O> FromIterator<T> for BitVec<T, O>
192where
193	T: BitStore,
194	O: BitOrder,
195{
196	#[inline]
197	fn from_iter<I>(iter: I) -> Self
198	where I: IntoIterator<Item = T> {
199		iter.into_iter().collect::<Vec<T>>().pipe(Self::from_vec)
200	}
201}
202
203#[cfg(not(tarpaulin_include))]
204impl<'a, T, O> FromIterator<&'a T> for BitVec<T, O>
205where
206	T: BitStore,
207	O: BitOrder,
208{
209	#[inline]
210	fn from_iter<I>(iter: I) -> Self
211	where I: IntoIterator<Item = &'a T> {
212		iter.into_iter()
213			.map(<T as BitStore>::load_value)
214			.map(<T as BitStore>::new)
215			.collect::<Self>()
216	}
217}
218
219#[doc = include_str!("../../doc/vec/iter/IntoIterator.md")]
220impl<T, O> IntoIterator for BitVec<T, O>
221where
222	T: BitStore,
223	O: BitOrder,
224{
225	type IntoIter = <BitBox<T, O> as IntoIterator>::IntoIter;
226	type Item = <BitBox<T, O> as IntoIterator>::Item;
227
228	#[inline]
229	fn into_iter(self) -> Self::IntoIter {
230		self.into_boxed_bitslice().into_iter()
231	}
232}
233
234#[cfg(not(tarpaulin_include))]
235/// [Original](https://doc.rust-lang.org/alloc/vec/struct.Vec.html#impl-IntoIterator-1)
236impl<'a, T, O> IntoIterator for &'a BitVec<T, O>
237where
238	O: BitOrder,
239	T: 'a + BitStore,
240{
241	type IntoIter = <&'a BitSlice<T, O> as IntoIterator>::IntoIter;
242	type Item = <&'a BitSlice<T, O> as IntoIterator>::Item;
243
244	#[inline]
245	fn into_iter(self) -> Self::IntoIter {
246		self.as_bitslice().iter()
247	}
248}
249
250#[cfg(not(tarpaulin_include))]
251/// [Original](https://doc.rust-lang.org/alloc/vec/struct.Vec.html#impl-IntoIterator-2)
252impl<'a, T, O> IntoIterator for &'a mut BitVec<T, O>
253where
254	O: BitOrder,
255	T: 'a + BitStore,
256{
257	type IntoIter = <&'a mut BitSlice<T, O> as IntoIterator>::IntoIter;
258	type Item = <&'a mut BitSlice<T, O> as IntoIterator>::Item;
259
260	#[inline]
261	fn into_iter(self) -> Self::IntoIter {
262		self.as_mut_bitslice().iter_mut()
263	}
264}
265
266#[doc = include_str!("../../doc/vec/iter/Drain.md")]
267pub struct Drain<'a, T, O>
268where
269	O: BitOrder,
270	T: 'a + BitStore,
271{
272	/// Exclusive reference to the handle that created the drain.
273	source: &'a mut BitVec<T, O>,
274	/// The range of the source bit-vector’s buffer that is being drained.
275	drain:  BitPtrRange<Mut, T, O>,
276	/// The range of the source bit-vector’s preserved back section. This runs
277	/// from the first bit after the `.drain` to the first bit after the
278	/// original bit-vector ends.
279	tail:   Range<usize>,
280}
281
282impl<'a, T, O> Drain<'a, T, O>
283where
284	O: BitOrder,
285	T: 'a + BitStore,
286{
287	/// Produces a new drain over a region of a bit-vector.
288	pub(super) fn new<R>(source: &'a mut BitVec<T, O>, range: R) -> Self
289	where R: RangeExt<usize> {
290		let len = source.len();
291		let region = range.normalize(None, len);
292		assert!(
293			region.end <= len,
294			"drains cannot extend past the length of their source bit-vector",
295		);
296
297		//  The `.tail` region is everything in the bit-vector after the drain.
298		let tail = region.end .. len;
299		let drain = unsafe {
300			//  Artificially truncate the source bit-vector to before the drain
301			//  region. This is restored in the destructor.
302			source.set_len_unchecked(region.start);
303			let base = source.as_mut_bitptr();
304			BitPtrRange {
305				start: base.add(region.start),
306				end:   base.add(region.end),
307			}
308		};
309
310		Self {
311			source,
312			drain,
313			tail,
314		}
315	}
316
317	/// Views the unyielded bits remaining in the drain.
318	///
319	/// ## Original
320	///
321	/// [`Drain::as_slice`](alloc::vec::Drain::as_slice)
322	#[inline]
323	#[cfg(not(tarpaulin_include))]
324	pub fn as_bitslice(&self) -> &'a BitSlice<T, O> {
325		unsafe { self.drain.clone().into_bitspan().into_bitslice_ref() }
326	}
327
328	#[inline]
329	#[cfg(not(tarpaulin_include))]
330	#[deprecated = "use `.as_bitslice()` instead"]
331	#[allow(missing_docs, clippy::missing_docs_in_private_items)]
332	pub fn as_slice(&self) -> &'a BitSlice<T, O> {
333		self.as_bitslice()
334	}
335
336	/// Attempts to fill the `drain` region with the contents of another
337	/// iterator.
338	///
339	/// The source bit-vector is extended to include each bit that the
340	/// replacement iterator provides, but is *not yet* extended to include the
341	/// `tail` region, even if the replacement iterator completely fills the
342	/// `drain` region. That work occurs in the destructor.
343	///
344	/// This is only used by [`Splice`].
345	///
346	/// [`Splice`]: crate::vec::Splice
347	#[inline]
348	fn fill(&mut self, iter: &mut impl Iterator<Item = bool>) -> FillStatus {
349		let bv = &mut *self.source;
350		let mut len = bv.len();
351		let span =
352			unsafe { bv.as_mut_bitptr().add(len).range(self.tail.start - len) };
353
354		let mut out = FillStatus::FullSpan;
355		for ptr in span {
356			if let Some(bit) = iter.next() {
357				unsafe {
358					ptr.write(bit);
359				}
360				len += 1;
361			}
362			else {
363				out = FillStatus::EmptyInput;
364				break;
365			}
366		}
367		unsafe {
368			bv.set_len_unchecked(len);
369		}
370		out
371	}
372
373	/// Reserves space for `additional` more bits at the end of the `drain`
374	/// region by moving the `tail` region upwards in memory.
375	///
376	/// This has the same effects as [`BitVec::resize`], except that the bits
377	/// are inserted between `drain` and `tail` rather than at the end.
378	///
379	/// This does not modify the drain iteration cursor, including its endpoint.
380	/// The newly inserted bits are not available for iteration.
381	///
382	/// This is only used by [`Splice`], which may insert more bits than the
383	/// drain removed.
384	///
385	/// [`BitVec::resize`]: crate::vec::BitVec::resize
386	/// [`Splice`]: crate::vec::Splice
387	unsafe fn move_tail(&mut self, additional: usize) {
388		if additional == 0 {
389			return;
390		}
391
392		let bv = &mut *self.source;
393		let tail_len = self.tail.len();
394
395		let full_len = additional + tail_len;
396		bv.reserve(full_len);
397		let new_tail_start = additional + self.tail.start;
398		let orig_tail = mem::replace(
399			&mut self.tail,
400			new_tail_start .. new_tail_start + tail_len,
401		);
402		let len = bv.len();
403		bv.set_len_unchecked(full_len);
404		bv.copy_within_unchecked(orig_tail, new_tail_start);
405		bv.set_len_unchecked(len);
406	}
407}
408
409/// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-AsRef%3C%5BT%5D%3E)
410#[cfg(not(tarpaulin_include))]
411impl<T, O> AsRef<BitSlice<T, O>> for Drain<'_, T, O>
412where
413	T: BitStore,
414	O: BitOrder,
415{
416	#[inline]
417	fn as_ref(&self) -> &BitSlice<T, O> {
418		self.as_bitslice()
419	}
420}
421
422#[cfg(not(tarpaulin_include))]
423impl<T, O> Debug for Drain<'_, T, O>
424where
425	T: BitStore,
426	O: BitOrder,
427{
428	#[inline]
429	fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
430		fmt.debug_tuple("Drain").field(&self.as_bitslice()).finish()
431	}
432}
433
434/// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-Iterator)
435#[cfg(not(tarpaulin_include))]
436impl<T, O> Iterator for Drain<'_, T, O>
437where
438	T: BitStore,
439	O: BitOrder,
440{
441	type Item = bool;
442
443	easy_iter!();
444
445	#[inline]
446	fn next(&mut self) -> Option<Self::Item> {
447		self.drain.next().map(|bp| unsafe { bp.read() })
448	}
449
450	#[inline]
451	fn nth(&mut self, n: usize) -> Option<Self::Item> {
452		self.drain.nth(n).map(|bp| unsafe { bp.read() })
453	}
454}
455
456/// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-DoubleEndedIterator)
457#[cfg(not(tarpaulin_include))]
458impl<T, O> DoubleEndedIterator for Drain<'_, T, O>
459where
460	T: BitStore,
461	O: BitOrder,
462{
463	#[inline]
464	fn next_back(&mut self) -> Option<Self::Item> {
465		self.drain.next_back().map(|bp| unsafe { bp.read() })
466	}
467
468	#[inline]
469	fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
470		self.drain.nth_back(n).map(|bp| unsafe { bp.read() })
471	}
472}
473
474/// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-ExactSizeIterator)
475#[cfg(not(tarpaulin_include))]
476impl<T, O> ExactSizeIterator for Drain<'_, T, O>
477where
478	T: BitStore,
479	O: BitOrder,
480{
481	#[inline]
482	fn len(&self) -> usize {
483		self.drain.len()
484	}
485}
486
487/// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-FusedIterator)
488impl<T, O> FusedIterator for Drain<'_, T, O>
489where
490	T: BitStore,
491	O: BitOrder,
492{
493}
494
495/// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-Send)
496// #[allow(clippy::non_send_fields_in_send_ty)]
497unsafe impl<T, O> Send for Drain<'_, T, O>
498where
499	T: BitStore,
500	O: BitOrder,
501	for<'a> &'a mut BitSlice<T, O>: Send,
502{
503}
504
505/// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-Sync)
506unsafe impl<T, O> Sync for Drain<'_, T, O>
507where
508	T: BitStore,
509	O: BitOrder,
510	BitSlice<T, O>: Sync,
511{
512}
513
514/// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-Drop)
515impl<T, O> Drop for Drain<'_, T, O>
516where
517	T: BitStore,
518	O: BitOrder,
519{
520	#[inline]
521	fn drop(&mut self) {
522		let tail = mem::take(&mut self.tail);
523		let tail_len = tail.len();
524		if tail_len == 0 {
525			return;
526		}
527
528		let bv = &mut *self.source;
529		let old_len = bv.len();
530		unsafe {
531			bv.set_len_unchecked(tail.end);
532			bv.copy_within_unchecked(tail, old_len);
533			bv.set_len_unchecked(old_len + tail_len);
534		}
535	}
536}
537
538#[repr(u8)]
539#[doc = include_str!("../../doc/vec/iter/FillStatus.md")]
540#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
541enum FillStatus {
542	/// The drain span is completely filled.
543	FullSpan   = 0,
544	/// The replacement source is completely exhausted.
545	EmptyInput = 1,
546}
547
548#[derive(Debug)]
549#[doc = include_str!("../../doc/vec/iter/Splice.md")]
550pub struct Splice<'a, T, O, I>
551where
552	O: BitOrder,
553	T: 'a + BitStore,
554	I: Iterator<Item = bool>,
555{
556	/// The region of the bit-vector being drained.
557	drain:  Drain<'a, T, O>,
558	/// The bitstream that replaces drained bits.
559	splice: I,
560}
561
562impl<'a, T, O, I> Splice<'a, T, O, I>
563where
564	O: BitOrder,
565	T: 'a + BitStore,
566	I: Iterator<Item = bool>,
567{
568	/// Constructs a splice out of a drain and a replacement source.
569	pub(super) fn new(
570		drain: Drain<'a, T, O>,
571		splice: impl IntoIterator<IntoIter = I, Item = bool>,
572	) -> Self {
573		let splice = splice.into_iter();
574		Self { drain, splice }
575	}
576}
577
578impl<T, O, I> Iterator for Splice<'_, T, O, I>
579where
580	T: BitStore,
581	O: BitOrder,
582	I: Iterator<Item = bool>,
583{
584	type Item = bool;
585
586	easy_iter!();
587
588	#[inline]
589	fn next(&mut self) -> Option<Self::Item> {
590		self.drain.next().tap_some(|_| unsafe {
591			if let Some(bit) = self.splice.next() {
592				let bv = &mut *self.drain.source;
593				let len = bv.len();
594				bv.set_len_unchecked(len + 1);
595				bv.set_unchecked(len, bit);
596			}
597		})
598	}
599}
600
601#[cfg(not(tarpaulin_include))]
602impl<T, O, I> DoubleEndedIterator for Splice<'_, T, O, I>
603where
604	T: BitStore,
605	O: BitOrder,
606	I: Iterator<Item = bool>,
607{
608	#[inline]
609	fn next_back(&mut self) -> Option<Self::Item> {
610		self.drain.next_back()
611	}
612
613	#[inline]
614	fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
615		self.drain.nth_back(n)
616	}
617}
618
619#[cfg(not(tarpaulin_include))]
620impl<T, O, I> ExactSizeIterator for Splice<'_, T, O, I>
621where
622	T: BitStore,
623	O: BitOrder,
624	I: Iterator<Item = bool>,
625{
626	#[inline]
627	fn len(&self) -> usize {
628		self.drain.len()
629	}
630}
631
632impl<T, O, I> FusedIterator for Splice<'_, T, O, I>
633where
634	T: BitStore,
635	O: BitOrder,
636	I: Iterator<Item = bool>,
637{
638}
639
640/// [Original](https://doc.rust-lang.org/alloc/vec/struct.Drain.html#impl-Drop)
641impl<T, O, I> Drop for Splice<'_, T, O, I>
642where
643	T: BitStore,
644	O: BitOrder,
645	I: Iterator<Item = bool>,
646{
647	#[inline]
648	fn drop(&mut self) {
649		let tail = self.drain.tail.clone();
650		let tail_len = tail.len();
651		let bv = &mut *self.drain.source;
652
653		if tail_len == 0 {
654			bv.extend(self.splice.by_ref());
655			return;
656		}
657
658		if let FillStatus::EmptyInput = self.drain.fill(&mut self.splice) {
659			return;
660		}
661
662		let len = match self.splice.size_hint() {
663			(n, None) | (_, Some(n)) => n,
664		};
665
666		unsafe {
667			self.drain.move_tail(len);
668		}
669		if let FillStatus::EmptyInput = self.drain.fill(&mut self.splice) {
670			return;
671		}
672
673		/* If the `.splice` *still* has bits to provide, then its
674		 * `.size_hint()` is untrustworthy. Collect the `.splice` into a
675		 * bit-vector, then insert the bit-vector into the spliced region.
676		 */
677		let mut collected =
678			self.splice.by_ref().collect::<BitVec<T, O>>().into_iter();
679		let len = collected.len();
680		if len > 0 {
681			unsafe {
682				self.drain.move_tail(len);
683			}
684			let filled = self.drain.fill(collected.by_ref());
685			debug_assert_eq!(filled, FillStatus::EmptyInput);
686			debug_assert_eq!(collected.len(), 0);
687		}
688	}
689}