1#![doc = include_str!("../../doc/vec/iter.md")]
2
3use alloc::vec::Vec;
4use core::{
5 fmt::{
6 self,
7 Debug,
8 Formatter,
9 },
10 iter::{
11 FromIterator,
12 FusedIterator,
13 },
14 mem,
15 ops::Range,
16};
17
18use tap::{
19 Pipe,
20 Tap,
21 TapOptional,
22};
23use wyz::{
24 comu::{
25 Mut,
26 Mutability,
27 },
28 range::RangeExt,
29};
30
31use super::BitVec;
32use crate::{
33 boxed::BitBox,
34 mem::bits_of,
35 order::BitOrder,
36 ptr::{
37 BitPtrRange,
38 BitRef,
39 },
40 slice::BitSlice,
41 store::BitStore,
42 view::BitView,
43};
44
45#[doc = include_str!("../../doc/vec/iter/Extend_bool.md")]
46impl<T, O> Extend<bool> for BitVec<T, O>
47where
48 T: BitStore,
49 O: BitOrder,
50{
51 #[inline]
52 fn extend<I>(&mut self, iter: I)
53 where I: IntoIterator<Item = bool> {
54 let mut iter = iter.into_iter();
55 #[allow(irrefutable_let_patterns)] if let (_, Some(n)) | (n, None) = iter.size_hint() {
57 self.reserve(n);
58 let len = self.len();
59 let new_len = len.wrapping_add(n);
61 let new = unsafe { self.get_unchecked_mut(len .. new_len) };
62
63 let pulled = new
64 .as_mut_bitptr_range()
65 .zip(iter.by_ref())
66 .map(|(ptr, bit)| unsafe {
67 ptr.write(bit);
68 })
69 .count();
70 unsafe {
71 self.set_len(len + pulled);
72 }
73 }
74
75 iter.for_each(|bit| self.push(bit));
78 }
79}
80
81#[cfg(not(tarpaulin_include))]
82impl<'a, T, O> Extend<&'a bool> for BitVec<T, O>
83where
84 T: BitStore,
85 O: BitOrder,
86{
87 #[inline]
88 fn extend<I>(&mut self, iter: I)
89 where I: IntoIterator<Item = &'a bool> {
90 self.extend(iter.into_iter().copied());
91 }
92}
93
94#[cfg(not(tarpaulin_include))]
95#[doc = include_str!("../../doc/vec/iter/Extend_BitRef.md")]
96impl<'a, M, T1, T2, O1, O2> Extend<BitRef<'a, M, T2, O2>> for BitVec<T1, O1>
97where
98 M: Mutability,
99 T1: BitStore,
100 T2: BitStore,
101 O1: BitOrder,
102 O2: BitOrder,
103{
104 #[inline]
105 fn extend<I>(&mut self, iter: I)
106 where I: IntoIterator<Item = BitRef<'a, M, T2, O2>> {
107 self.extend(iter.into_iter().map(|bit| *bit));
108 }
109}
110
111impl<T, O> Extend<T> for BitVec<T, O>
112where
113 T: BitStore,
114 O: BitOrder,
115{
116 #[inline]
117 fn extend<I>(&mut self, iter: I)
118 where I: IntoIterator<Item = T> {
119 let iter = iter.into_iter();
120 #[allow(irrefutable_let_patterns)]
121 if let (_, Some(n)) | (n, None) = iter.size_hint() {
122 self.reserve(n.checked_mul(bits_of::<T::Mem>()).unwrap());
123 }
124 iter.for_each(|elem| self.extend_from_bitslice(elem.view_bits::<O>()));
125 }
126}
127
128#[cfg(not(tarpaulin_include))]
129impl<'a, T, O> Extend<&'a T> for BitVec<T, O>
130where
131 T: BitStore,
132 O: BitOrder,
133{
134 #[inline]
135 fn extend<I>(&mut self, iter: I)
136 where I: IntoIterator<Item = &'a T> {
137 self.extend(
138 iter.into_iter()
139 .map(BitStore::load_value)
140 .map(<T as BitStore>::new),
141 );
142 }
143}
144
145#[cfg(not(tarpaulin_include))]
146#[doc = include_str!("../../doc/vec/iter/FromIterator_bool.md")]
147impl<T, O> FromIterator<bool> for BitVec<T, O>
148where
149 T: BitStore,
150 O: BitOrder,
151{
152 #[inline]
153 fn from_iter<I>(iter: I) -> Self
154 where I: IntoIterator<Item = bool> {
155 Self::new().tap_mut(|bv| bv.extend(iter))
156 }
157}
158
159#[cfg(not(tarpaulin_include))]
160impl<'a, T, O> FromIterator<&'a bool> for BitVec<T, O>
161where
162 T: BitStore,
163 O: BitOrder,
164{
165 #[inline]
166 fn from_iter<I>(iter: I) -> Self
167 where I: IntoIterator<Item = &'a bool> {
168 iter.into_iter().copied().collect::<Self>()
169 }
170}
171
172#[cfg(not(tarpaulin_include))]
173#[doc = include_str!("../../doc/vec/iter/FromIterator_BitRef.md")]
174impl<'a, M, T1, T2, O1, O2> FromIterator<BitRef<'a, M, T2, O2>>
175 for BitVec<T1, O1>
176where
177 M: Mutability,
178 T1: BitStore,
179 T2: BitStore,
180 O1: BitOrder,
181 O2: BitOrder,
182{
183 #[inline]
184 fn from_iter<I>(iter: I) -> Self
185 where I: IntoIterator<Item = BitRef<'a, M, T2, O2>> {
186 iter.into_iter().map(|br| *br).pipe(Self::from_iter)
187 }
188}
189
190#[cfg(not(tarpaulin_include))]
191impl<T, O> FromIterator<T> for BitVec<T, O>
192where
193 T: BitStore,
194 O: BitOrder,
195{
196 #[inline]
197 fn from_iter<I>(iter: I) -> Self
198 where I: IntoIterator<Item = T> {
199 iter.into_iter().collect::<Vec<T>>().pipe(Self::from_vec)
200 }
201}
202
203#[cfg(not(tarpaulin_include))]
204impl<'a, T, O> FromIterator<&'a T> for BitVec<T, O>
205where
206 T: BitStore,
207 O: BitOrder,
208{
209 #[inline]
210 fn from_iter<I>(iter: I) -> Self
211 where I: IntoIterator<Item = &'a T> {
212 iter.into_iter()
213 .map(<T as BitStore>::load_value)
214 .map(<T as BitStore>::new)
215 .collect::<Self>()
216 }
217}
218
219#[doc = include_str!("../../doc/vec/iter/IntoIterator.md")]
220impl<T, O> IntoIterator for BitVec<T, O>
221where
222 T: BitStore,
223 O: BitOrder,
224{
225 type IntoIter = <BitBox<T, O> as IntoIterator>::IntoIter;
226 type Item = <BitBox<T, O> as IntoIterator>::Item;
227
228 #[inline]
229 fn into_iter(self) -> Self::IntoIter {
230 self.into_boxed_bitslice().into_iter()
231 }
232}
233
234#[cfg(not(tarpaulin_include))]
235impl<'a, T, O> IntoIterator for &'a BitVec<T, O>
237where
238 O: BitOrder,
239 T: 'a + BitStore,
240{
241 type IntoIter = <&'a BitSlice<T, O> as IntoIterator>::IntoIter;
242 type Item = <&'a BitSlice<T, O> as IntoIterator>::Item;
243
244 #[inline]
245 fn into_iter(self) -> Self::IntoIter {
246 self.as_bitslice().iter()
247 }
248}
249
250#[cfg(not(tarpaulin_include))]
251impl<'a, T, O> IntoIterator for &'a mut BitVec<T, O>
253where
254 O: BitOrder,
255 T: 'a + BitStore,
256{
257 type IntoIter = <&'a mut BitSlice<T, O> as IntoIterator>::IntoIter;
258 type Item = <&'a mut BitSlice<T, O> as IntoIterator>::Item;
259
260 #[inline]
261 fn into_iter(self) -> Self::IntoIter {
262 self.as_mut_bitslice().iter_mut()
263 }
264}
265
266#[doc = include_str!("../../doc/vec/iter/Drain.md")]
267pub struct Drain<'a, T, O>
268where
269 O: BitOrder,
270 T: 'a + BitStore,
271{
272 source: &'a mut BitVec<T, O>,
274 drain: BitPtrRange<Mut, T, O>,
276 tail: Range<usize>,
280}
281
282impl<'a, T, O> Drain<'a, T, O>
283where
284 O: BitOrder,
285 T: 'a + BitStore,
286{
287 pub(super) fn new<R>(source: &'a mut BitVec<T, O>, range: R) -> Self
289 where R: RangeExt<usize> {
290 let len = source.len();
291 let region = range.normalize(None, len);
292 assert!(
293 region.end <= len,
294 "drains cannot extend past the length of their source bit-vector",
295 );
296
297 let tail = region.end .. len;
299 let drain = unsafe {
300 source.set_len_unchecked(region.start);
303 let base = source.as_mut_bitptr();
304 BitPtrRange {
305 start: base.add(region.start),
306 end: base.add(region.end),
307 }
308 };
309
310 Self {
311 source,
312 drain,
313 tail,
314 }
315 }
316
317 #[inline]
323 #[cfg(not(tarpaulin_include))]
324 pub fn as_bitslice(&self) -> &'a BitSlice<T, O> {
325 unsafe { self.drain.clone().into_bitspan().into_bitslice_ref() }
326 }
327
328 #[inline]
329 #[cfg(not(tarpaulin_include))]
330 #[deprecated = "use `.as_bitslice()` instead"]
331 #[allow(missing_docs, clippy::missing_docs_in_private_items)]
332 pub fn as_slice(&self) -> &'a BitSlice<T, O> {
333 self.as_bitslice()
334 }
335
336 #[inline]
348 fn fill(&mut self, iter: &mut impl Iterator<Item = bool>) -> FillStatus {
349 let bv = &mut *self.source;
350 let mut len = bv.len();
351 let span =
352 unsafe { bv.as_mut_bitptr().add(len).range(self.tail.start - len) };
353
354 let mut out = FillStatus::FullSpan;
355 for ptr in span {
356 if let Some(bit) = iter.next() {
357 unsafe {
358 ptr.write(bit);
359 }
360 len += 1;
361 }
362 else {
363 out = FillStatus::EmptyInput;
364 break;
365 }
366 }
367 unsafe {
368 bv.set_len_unchecked(len);
369 }
370 out
371 }
372
373 unsafe fn move_tail(&mut self, additional: usize) {
388 if additional == 0 {
389 return;
390 }
391
392 let bv = &mut *self.source;
393 let tail_len = self.tail.len();
394
395 let full_len = additional + tail_len;
396 bv.reserve(full_len);
397 let new_tail_start = additional + self.tail.start;
398 let orig_tail = mem::replace(
399 &mut self.tail,
400 new_tail_start .. new_tail_start + tail_len,
401 );
402 let len = bv.len();
403 bv.set_len_unchecked(full_len);
404 bv.copy_within_unchecked(orig_tail, new_tail_start);
405 bv.set_len_unchecked(len);
406 }
407}
408
409#[cfg(not(tarpaulin_include))]
411impl<T, O> AsRef<BitSlice<T, O>> for Drain<'_, T, O>
412where
413 T: BitStore,
414 O: BitOrder,
415{
416 #[inline]
417 fn as_ref(&self) -> &BitSlice<T, O> {
418 self.as_bitslice()
419 }
420}
421
422#[cfg(not(tarpaulin_include))]
423impl<T, O> Debug for Drain<'_, T, O>
424where
425 T: BitStore,
426 O: BitOrder,
427{
428 #[inline]
429 fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
430 fmt.debug_tuple("Drain").field(&self.as_bitslice()).finish()
431 }
432}
433
434#[cfg(not(tarpaulin_include))]
436impl<T, O> Iterator for Drain<'_, T, O>
437where
438 T: BitStore,
439 O: BitOrder,
440{
441 type Item = bool;
442
443 easy_iter!();
444
445 #[inline]
446 fn next(&mut self) -> Option<Self::Item> {
447 self.drain.next().map(|bp| unsafe { bp.read() })
448 }
449
450 #[inline]
451 fn nth(&mut self, n: usize) -> Option<Self::Item> {
452 self.drain.nth(n).map(|bp| unsafe { bp.read() })
453 }
454}
455
456#[cfg(not(tarpaulin_include))]
458impl<T, O> DoubleEndedIterator for Drain<'_, T, O>
459where
460 T: BitStore,
461 O: BitOrder,
462{
463 #[inline]
464 fn next_back(&mut self) -> Option<Self::Item> {
465 self.drain.next_back().map(|bp| unsafe { bp.read() })
466 }
467
468 #[inline]
469 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
470 self.drain.nth_back(n).map(|bp| unsafe { bp.read() })
471 }
472}
473
474#[cfg(not(tarpaulin_include))]
476impl<T, O> ExactSizeIterator for Drain<'_, T, O>
477where
478 T: BitStore,
479 O: BitOrder,
480{
481 #[inline]
482 fn len(&self) -> usize {
483 self.drain.len()
484 }
485}
486
487impl<T, O> FusedIterator for Drain<'_, T, O>
489where
490 T: BitStore,
491 O: BitOrder,
492{
493}
494
495unsafe impl<T, O> Send for Drain<'_, T, O>
498where
499 T: BitStore,
500 O: BitOrder,
501 for<'a> &'a mut BitSlice<T, O>: Send,
502{
503}
504
505unsafe impl<T, O> Sync for Drain<'_, T, O>
507where
508 T: BitStore,
509 O: BitOrder,
510 BitSlice<T, O>: Sync,
511{
512}
513
514impl<T, O> Drop for Drain<'_, T, O>
516where
517 T: BitStore,
518 O: BitOrder,
519{
520 #[inline]
521 fn drop(&mut self) {
522 let tail = mem::take(&mut self.tail);
523 let tail_len = tail.len();
524 if tail_len == 0 {
525 return;
526 }
527
528 let bv = &mut *self.source;
529 let old_len = bv.len();
530 unsafe {
531 bv.set_len_unchecked(tail.end);
532 bv.copy_within_unchecked(tail, old_len);
533 bv.set_len_unchecked(old_len + tail_len);
534 }
535 }
536}
537
538#[repr(u8)]
539#[doc = include_str!("../../doc/vec/iter/FillStatus.md")]
540#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
541enum FillStatus {
542 FullSpan = 0,
544 EmptyInput = 1,
546}
547
548#[derive(Debug)]
549#[doc = include_str!("../../doc/vec/iter/Splice.md")]
550pub struct Splice<'a, T, O, I>
551where
552 O: BitOrder,
553 T: 'a + BitStore,
554 I: Iterator<Item = bool>,
555{
556 drain: Drain<'a, T, O>,
558 splice: I,
560}
561
562impl<'a, T, O, I> Splice<'a, T, O, I>
563where
564 O: BitOrder,
565 T: 'a + BitStore,
566 I: Iterator<Item = bool>,
567{
568 pub(super) fn new(
570 drain: Drain<'a, T, O>,
571 splice: impl IntoIterator<IntoIter = I, Item = bool>,
572 ) -> Self {
573 let splice = splice.into_iter();
574 Self { drain, splice }
575 }
576}
577
578impl<T, O, I> Iterator for Splice<'_, T, O, I>
579where
580 T: BitStore,
581 O: BitOrder,
582 I: Iterator<Item = bool>,
583{
584 type Item = bool;
585
586 easy_iter!();
587
588 #[inline]
589 fn next(&mut self) -> Option<Self::Item> {
590 self.drain.next().tap_some(|_| unsafe {
591 if let Some(bit) = self.splice.next() {
592 let bv = &mut *self.drain.source;
593 let len = bv.len();
594 bv.set_len_unchecked(len + 1);
595 bv.set_unchecked(len, bit);
596 }
597 })
598 }
599}
600
601#[cfg(not(tarpaulin_include))]
602impl<T, O, I> DoubleEndedIterator for Splice<'_, T, O, I>
603where
604 T: BitStore,
605 O: BitOrder,
606 I: Iterator<Item = bool>,
607{
608 #[inline]
609 fn next_back(&mut self) -> Option<Self::Item> {
610 self.drain.next_back()
611 }
612
613 #[inline]
614 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
615 self.drain.nth_back(n)
616 }
617}
618
619#[cfg(not(tarpaulin_include))]
620impl<T, O, I> ExactSizeIterator for Splice<'_, T, O, I>
621where
622 T: BitStore,
623 O: BitOrder,
624 I: Iterator<Item = bool>,
625{
626 #[inline]
627 fn len(&self) -> usize {
628 self.drain.len()
629 }
630}
631
632impl<T, O, I> FusedIterator for Splice<'_, T, O, I>
633where
634 T: BitStore,
635 O: BitOrder,
636 I: Iterator<Item = bool>,
637{
638}
639
640impl<T, O, I> Drop for Splice<'_, T, O, I>
642where
643 T: BitStore,
644 O: BitOrder,
645 I: Iterator<Item = bool>,
646{
647 #[inline]
648 fn drop(&mut self) {
649 let tail = self.drain.tail.clone();
650 let tail_len = tail.len();
651 let bv = &mut *self.drain.source;
652
653 if tail_len == 0 {
654 bv.extend(self.splice.by_ref());
655 return;
656 }
657
658 if let FillStatus::EmptyInput = self.drain.fill(&mut self.splice) {
659 return;
660 }
661
662 let len = match self.splice.size_hint() {
663 (n, None) | (_, Some(n)) => n,
664 };
665
666 unsafe {
667 self.drain.move_tail(len);
668 }
669 if let FillStatus::EmptyInput = self.drain.fill(&mut self.splice) {
670 return;
671 }
672
673 let mut collected =
678 self.splice.by_ref().collect::<BitVec<T, O>>().into_iter();
679 let len = collected.len();
680 if len > 0 {
681 unsafe {
682 self.drain.move_tail(len);
683 }
684 let filled = self.drain.fill(collected.by_ref());
685 debug_assert_eq!(filled, FillStatus::EmptyInput);
686 debug_assert_eq!(collected.len(), 0);
687 }
688 }
689}