bitvec/slice/
iter.rs

1#![doc = include_str!("../../doc/slice/iter.md")]
2
3use core::{
4	cmp,
5	fmt::{
6		self,
7		Debug,
8		Formatter,
9	},
10	iter::{
11		FusedIterator,
12		Map,
13	},
14	marker::PhantomData,
15	mem,
16};
17
18use wyz::comu::{
19	Const,
20	Mut,
21};
22
23use super::{
24	BitSlice,
25	BitSliceIndex,
26};
27use crate::{
28	order::{
29		BitOrder,
30		Lsb0,
31		Msb0,
32	},
33	ptr::{
34		BitPtrRange,
35		BitRef,
36	},
37	store::BitStore,
38};
39
40/// [Original](https://doc.rust-lang.org/core/iter/trait.IntoIterator.html#impl-IntoIterator-1)
41#[cfg(not(tarpaulin_include))]
42impl<'a, T, O> IntoIterator for &'a BitSlice<T, O>
43where
44	T: 'a + BitStore,
45	O: BitOrder,
46{
47	type IntoIter = Iter<'a, T, O>;
48	type Item = <Self::IntoIter as Iterator>::Item;
49
50	#[inline]
51	fn into_iter(self) -> Self::IntoIter {
52		Iter::new(self)
53	}
54}
55
56/// [Original](https://doc.rust-lang.org/core/iter/trait.IntoIterator.html#impl-IntoIterator-3)
57#[cfg(not(tarpaulin_include))]
58impl<'a, T, O> IntoIterator for &'a mut BitSlice<T, O>
59where
60	T: 'a + BitStore,
61	O: BitOrder,
62{
63	type IntoIter = IterMut<'a, T, O>;
64	type Item = <Self::IntoIter as Iterator>::Item;
65
66	#[inline]
67	fn into_iter(self) -> Self::IntoIter {
68		IterMut::new(self)
69	}
70}
71
72#[repr(transparent)]
73#[doc = include_str!("../../doc/slice/iter/Iter.md")]
74pub struct Iter<'a, T, O>
75where
76	T: 'a + BitStore,
77	O: BitOrder,
78{
79	/// A dual-pointer range of the bit-slice undergoing iteration.
80	///
81	/// This structure stores two fully-decode pointers to the first live and
82	/// first dead bits, trading increased size (three words instead of two) for
83	/// faster performance when iterating.
84	range: BitPtrRange<Const, T, O>,
85	/// `Iter` is semantically equivalent to a `&BitSlice`.
86	_ref:  PhantomData<&'a BitSlice<T, O>>,
87}
88
89impl<'a, T, O> Iter<'a, T, O>
90where
91	T: 'a + BitStore,
92	O: BitOrder,
93{
94	#[allow(missing_docs, clippy::missing_docs_in_private_items)]
95	pub(super) fn new(slice: &'a BitSlice<T, O>) -> Self {
96		Self {
97			range: slice.as_bitptr_range(),
98			_ref:  PhantomData,
99		}
100	}
101
102	/// Views the currently unyielded bit-slice.
103	///
104	/// Because the iterator is a shared view, the returned bit-slice does not
105	/// cause a lifetime conflict, and the iterator can continue to be used
106	/// while it exists.
107	///
108	/// ## Original
109	///
110	/// [`Iter::as_slice`](core::slice::Iter::as_slice)
111	///
112	/// ## Examples
113	///
114	/// ```rust
115	/// use bitvec::prelude::*;
116	///
117	/// let bits = bits![0, 0, 1, 1];
118	/// let mut iter = bits.iter();
119	///
120	/// assert_eq!(iter.as_bitslice(), bits![0, 0, 1, 1]);
121	/// assert!(!*iter.nth(1).unwrap());
122	/// assert_eq!(iter.as_bitslice(), bits![1, 1]);
123	/// ```
124	#[inline]
125	#[allow(missing_docs, clippy::missing_docs_in_private_items)]
126	pub fn as_bitslice(&self) -> &'a BitSlice<T, O> {
127		unsafe { self.range.clone().into_bitspan().into_bitslice_ref() }
128	}
129
130	#[inline]
131	#[cfg(not(tarpaulin_include))]
132	#[deprecated = "use `.as_bitslice()` instead"]
133	#[allow(missing_docs, clippy::missing_docs_in_private_items)]
134	pub fn as_slice(&self) -> &'a BitSlice<T, O> {
135		self.as_bitslice()
136	}
137
138	/// Adapts the iterator to yield regular `&bool` references rather than the
139	/// [proxy reference][0].
140	///
141	/// This allows the iterator to be used in APIs that expect ordinary
142	/// references. It reads from the proxy and provides an equivalent
143	/// `&'static bool`. The address value of the yielded reference is not
144	/// related to the addresses covered by the `BitSlice` buffer in any way.
145	///
146	/// ## Examples
147	///
148	/// ```rust
149	/// use bitvec::prelude::*;
150	///
151	/// let bits = bits![0, 1];
152	/// let mut iter = bits.iter().by_refs();
153	/// assert_eq!(iter.next(), Some(&false));
154	/// assert_eq!(iter.next(), Some(&true));
155	/// assert!(iter.next().is_none());
156	/// ```
157	///
158	/// [0]: crate::ptr::BitRef
159	#[inline]
160	pub fn by_refs(self) -> BitRefIter<'a, T, O> {
161		self.by_vals().map(|bit| match bit {
162			true => &true,
163			false => &false,
164		})
165	}
166
167	/// Adapts the iterator to yield `bool` values rather than the
168	/// [proxy reference][0].
169	///
170	/// This allows the iterator to be used in APIs that expect direct values.
171	/// It dereferences the proxy and yields the referent `bool` directly. It
172	/// replaces `Iterator::copied`, which is not available on this type.
173	///
174	/// ## Original
175	///
176	/// [`Iterator::copied`](core::iter::Iterator::copied)
177	///
178	/// ## Performance
179	///
180	/// This bypasses the construction of a `BitRef` for each yielded bit. Do
181	/// not use `bits.as_bitptr_range().map(|bp| unsafe { bp.read() })` in a
182	/// misguided attempt to eke out some additional performance in your code.
183	///
184	/// This iterator is already the fastest possible walk across a bit-slice.
185	/// You do not need to beat it.
186	///
187	/// ## Examples
188	///
189	/// ```rust
190	/// use bitvec::prelude::*;
191	///
192	/// let bits = bits![0, 1];
193	/// let mut iter = bits.iter().by_vals();
194	/// assert_eq!(iter.next(), Some(false));
195	/// assert_eq!(iter.next(), Some(true));
196	/// assert!(iter.next().is_none());
197	/// ```
198	///
199	/// [0]: crate::ptr::BitRef
200	#[inline]
201	pub fn by_vals(self) -> BitValIter<'a, T, O> {
202		BitValIter {
203			range: self.range,
204			_life: PhantomData,
205		}
206	}
207
208	/// Yields `bool` values directly, rather than [proxy references][0].
209	///
210	/// The original slice iterator yields true `&bool`, and as such allows
211	/// [`Iterator::copied`] to exist. This iterator does not satisfy the bounds
212	/// for that method, so `.copied()` is provided as an inherent in order to
213	/// maintain source compatibility. Prefer [`.by_vals()`] instead, which
214	/// avoids the name collision while still making clear that it yields `bool`
215	/// values.
216	///
217	/// [`Iterator::copied`]: core::iter::Iterator::copied
218	/// [`.by_vals()`]: Self::by_vals
219	/// [0]: crate::ptr::BitRef
220	#[inline]
221	#[cfg(not(tarpaulin_include))]
222	#[deprecated = "`Iterator::copied` does not exist on this type. Use \
223	                `.by_vals()` instead"]
224	pub fn copied(self) -> BitValIter<'a, T, O> {
225		self.by_vals()
226	}
227}
228
229/// [Original](https://doc.rust-lang.org/core/slice/struct.Iter.html#impl-Clone)
230#[cfg(not(tarpaulin_include))]
231impl<T, O> Clone for Iter<'_, T, O>
232where
233	T: BitStore,
234	O: BitOrder,
235{
236	#[inline]
237	fn clone(&self) -> Self {
238		Self {
239			range: self.range.clone(),
240			..*self
241		}
242	}
243}
244
245/// [Original](https://doc.rust-lang.org/core/slice/struct.Iter.html#impl-AsRef%3C%5BT%5D%3E)
246#[cfg(not(tarpaulin_include))]
247impl<T, O> AsRef<BitSlice<T, O>> for Iter<'_, T, O>
248where
249	T: BitStore,
250	O: BitOrder,
251{
252	#[inline]
253	fn as_ref(&self) -> &BitSlice<T, O> {
254		self.as_bitslice()
255	}
256}
257
258/// [Original](https://doc.rust-lang.org/core/slice/struct.Iter.html#impl-Debug)
259#[cfg(not(tarpaulin_include))]
260impl<T, O> Debug for Iter<'_, T, O>
261where
262	T: BitStore,
263	O: BitOrder,
264{
265	#[inline]
266	fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
267		fmt.debug_tuple("Iter").field(&self.as_bitslice()).finish()
268	}
269}
270
271#[repr(transparent)]
272#[doc = include_str!("../../doc/slice/iter/IterMut.md")]
273pub struct IterMut<'a, T, O>
274where
275	T: 'a + BitStore,
276	O: BitOrder,
277{
278	/// A dual-pointer range of the bit-slice undergoing iteration.
279	///
280	/// This structure stores two fully-decode pointers to the first live and
281	/// first dead bits, trading increased size (three words instead of two) for
282	/// faster performance when iterating.
283	range: BitPtrRange<Mut, T::Alias, O>,
284	/// `IterMut` is semantically equivalent to an aliased `&mut BitSlice`.
285	_ref:  PhantomData<&'a mut BitSlice<T::Alias, O>>,
286}
287
288impl<'a, T, O> IterMut<'a, T, O>
289where
290	T: 'a + BitStore,
291	O: BitOrder,
292{
293	#[allow(missing_docs, clippy::missing_docs_in_private_items)]
294	pub(super) fn new(slice: &'a mut BitSlice<T, O>) -> Self {
295		Self {
296			range: slice.alias_mut().as_mut_bitptr_range(),
297			_ref:  PhantomData,
298		}
299	}
300
301	/// Views the underlying bit-slice as a subslice of the original data.
302	///
303	/// This consumes the iterator in order to avoid creating aliasing
304	/// references between the returned subslice (which has the original
305	/// lifetime, and is not borrowed from the iterator) and the proxies the
306	/// iterator produces.
307	///
308	/// ## Original
309	///
310	/// [`IterMut::into_slice`](core::slice::IterMut::into_slice)
311	///
312	/// ## Examples
313	///
314	/// ```rust
315	/// use bitvec::prelude::*;
316	///
317	/// let bits = bits![mut 0, 0, 1, 1];
318	/// let mut iter = bits.iter_mut();
319	///
320	/// *iter.next().unwrap() = true;
321	/// assert_eq!(iter.into_bitslice(), bits![0, 1, 1]);
322	/// assert!(bits[0]);
323	/// ```
324	#[inline]
325	#[cfg(not(tarpaulin_include))]
326	pub fn into_bitslice(self) -> &'a mut BitSlice<T::Alias, O> {
327		unsafe { self.range.into_bitspan().into_bitslice_mut() }
328	}
329
330	#[inline]
331	#[cfg(not(tarpaulin_include))]
332	#[deprecated = "use `.into_bitslice()` instead"]
333	#[allow(missing_docs, clippy::missing_docs_in_private_items)]
334	pub fn into_slice(self) -> &'a mut BitSlice<T::Alias, O> {
335		self.into_bitslice()
336	}
337
338	/// Views the remaining bit-slice that has not yet been iterated.
339	///
340	/// This borrows the iterator’s own lifetime, preventing it from being used
341	/// while the bit-slice view exists and thus ensuring that no aliasing
342	/// references are created. Bits that the iterator has already yielded are
343	/// not included in the produced bit-slice.
344	///
345	/// ## Original
346	///
347	/// [`IterMut::as_slice`](core::slice::IterMut::as_slice)
348	///
349	/// ## Examples
350	///
351	/// ```rust
352	/// use bitvec::prelude::*;
353	///
354	/// let bits = bits![mut 0; 4];
355	/// let mut iter = bits.iter_mut();
356	///
357	/// *iter.next().unwrap() = true;
358	/// assert_eq!(iter.as_bitslice(), bits![0; 3]);
359	/// *iter.next().unwrap() = true;
360	/// assert_eq!(iter.as_bitslice(), bits![0; 2]);
361	///
362	/// assert_eq!(bits, bits![1, 1, 0, 0]);
363	/// ```
364	#[inline]
365	#[cfg(not(tarpaulin_include))]
366	pub fn as_bitslice(&self) -> &BitSlice<T::Alias, O> {
367		unsafe { self.range.clone().into_bitspan().into_bitslice_ref() }
368	}
369
370	#[inline]
371	#[cfg(not(tarpaulin_include))]
372	#[deprecated = "use `.as_bitslice()` instead"]
373	#[allow(missing_docs, clippy::missing_docs_in_private_items)]
374	pub fn as_slice(&self) -> &BitSlice<T::Alias, O> {
375		self.as_bitslice()
376	}
377}
378
379/// [Original](https://doc.rust-lang.org/core/slice/struct.IterMut.html#impl-AsRef%3C%5BT%5D%3E)
380#[cfg(not(tarpaulin_include))]
381impl<T, O> AsRef<BitSlice<T::Alias, O>> for IterMut<'_, T, O>
382where
383	T: BitStore,
384	O: BitOrder,
385{
386	#[inline]
387	fn as_ref(&self) -> &BitSlice<T::Alias, O> {
388		self.as_bitslice()
389	}
390}
391
392/// [Original](https://doc.rust-lang.org/core/slice/struct.IterMut.html#impl-Debug)
393#[cfg(not(tarpaulin_include))]
394impl<T, O> Debug for IterMut<'_, T, O>
395where
396	T: BitStore,
397	O: BitOrder,
398{
399	#[inline]
400	fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
401		fmt.debug_tuple("IterMut")
402			.field(&self.as_bitslice())
403			.finish()
404	}
405}
406
407/// `Iter` and `IterMut` have very nearly the same implementation text.
408macro_rules! iter {
409	($($iter:ident => $item:ty);+ $(;)?) => { $(
410		/// [Original](https://doc.rust-lang.org/core/slice/struct.Iter.html#impl-Iterator) and
411		/// [Original](https://doc.rust-lang.org/core/slice/struct.IterMut.html#impl-Iterator)
412		impl<'a, T, O> Iterator for $iter<'a, T, O>
413		where
414			T: 'a + BitStore,
415			O: BitOrder,
416		{
417			type Item = $item;
418
419			#[inline]
420			fn next(&mut self) -> Option<Self::Item> {
421				self.range.next().map(|bp| unsafe { BitRef::from_bitptr(bp) })
422			}
423
424			#[inline]
425			fn nth(&mut self, n: usize) -> Option<Self::Item> {
426				self.range.nth(n).map(|bp| unsafe { BitRef::from_bitptr(bp) })
427			}
428
429			easy_iter!();
430		}
431
432		/// [Original](https://doc.rust-lang.org/core/slice/struct.Iter.html#impl-DoubleEndedIterator) and
433		/// [Original](https://doc.rust-lang.org/core/slice/struct.IterMut.html#impl-DoubleEndedIterator)
434		impl<'a, T, O> DoubleEndedIterator for $iter<'a, T, O>
435		where
436			T: 'a + BitStore,
437			O: BitOrder,
438		{
439			#[inline]
440			fn next_back(&mut self) -> Option<Self::Item> {
441				self.range
442					.next_back()
443					.map(|bp| unsafe { BitRef::from_bitptr(bp) })
444			}
445
446			#[inline]
447			fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
448				self.range
449					.nth_back(n)
450					.map(|bp| unsafe { BitRef::from_bitptr(bp) })
451			}
452		}
453
454		/// [Original](https://doc.rust-lang.org/core/slice/struct.Iter.html#impl-ExactSizeIterator) and
455		/// [Original](https://doc.rust-lang.org/core/slice/struct.IterMut.html#impl-ExactSizeIterator)
456		impl<T, O> ExactSizeIterator for $iter<'_, T, O>
457		where
458			T: BitStore,
459			O: BitOrder,
460		{
461			#[inline]
462			fn len(&self) -> usize {
463				self.range.len()
464			}
465		}
466
467		/// [Original](https://doc.rust-lang.org/core/slice/struct.Iter.html#impl-FusedIterator) and
468		/// [Original](https://doc.rust-lang.org/core/slice/struct.IterMut.html#impl-FusedIterator)
469		impl<T, O> FusedIterator for $iter<'_, T, O>
470		where
471			T: BitStore,
472			O: BitOrder,
473		{
474		}
475
476		/// [Original](https://doc.rust-lang.org/core/slice/struct.Iter.html#impl-Send) and
477		/// [Original](https://doc.rust-lang.org/core/slice/struct.IterMut.html#impl-Send)
478		// #[allow(clippy::non_send_fields_in_send_ty)]
479		unsafe impl<'a, T, O> Send for $iter<'a, T, O>
480		where
481			T: BitStore,
482			O: BitOrder,
483			&'a mut BitSlice<T, O>: Send,
484		{
485		}
486
487		/// [Original](https://doc.rust-lang.org/core/slice/struct.Iter.html#impl-Sync) and
488		/// [Original](https://doc.rust-lang.org/core/slice/struct.IterMut.html#impl-Sync)
489		unsafe impl<T, O> Sync for $iter<'_, T, O>
490		where
491			T: BitStore,
492			O: BitOrder,
493			BitSlice<T, O>: Sync,
494		{
495		}
496	)+ };
497}
498
499iter! {
500	Iter => <usize as BitSliceIndex<'a, T, O>>::Immut;
501	IterMut => <usize as BitSliceIndex<'a, T::Alias, O>>::Mut;
502}
503
504/// Builds an iterator implementation for grouping iterators.
505macro_rules! group {
506	//  The iterator and its yielded type.
507	($iter:ident => $item:ty {
508		//  The eponymous functions from the iterator traits.
509		$next:item
510		$nth:item
511		$next_back:item
512		$nth_back:item
513		$len:item
514	}) => {
515		impl<'a, T, O> Iterator for $iter<'a, T, O>
516		where
517			T: 'a + BitStore,
518			O: BitOrder,
519		{
520			type Item = $item;
521
522			#[inline]
523			$next
524
525			#[inline]
526			$nth
527
528			easy_iter!();
529		}
530
531		impl<T, O> DoubleEndedIterator for $iter<'_, T, O>
532		where
533			T: BitStore,
534			O: BitOrder,
535		{
536			#[inline]
537			$next_back
538
539			#[inline]
540			$nth_back
541		}
542
543		impl<T, O> ExactSizeIterator for $iter<'_, T, O>
544		where
545			T: BitStore,
546			O: BitOrder,
547		{
548			#[inline]
549			$len
550		}
551
552		impl<T, O> FusedIterator for $iter<'_, T, O>
553		where
554			T: BitStore,
555			O: BitOrder,
556		{
557		}
558	};
559}
560
561/// An iterator over `BitSlice` that yields `&bool` directly.
562pub type BitRefIter<'a, T, O> = Map<BitValIter<'a, T, O>, fn(bool) -> &'a bool>;
563
564/// An iterator over `BitSlice` that yields `bool` directly.
565pub struct BitValIter<'a, T, O>
566where
567	T: 'a + BitStore,
568	O: BitOrder,
569{
570	/// The start and end bit-pointers in the iteration region.
571	range: BitPtrRange<Const, T, O>,
572	/// Hold the lifetime of the source region, so that this does not cause UAF.
573	_life: PhantomData<&'a BitSlice<T, O>>,
574}
575
576group!(BitValIter => bool {
577	fn next(&mut self) -> Option<Self::Item> {
578		self.range.next().map(|bp| unsafe { bp.read() })
579	}
580
581	fn nth(&mut self, n: usize) -> Option<Self::Item> {
582		self.range.nth(n).map(|bp| unsafe { bp.read() })
583	}
584
585	fn next_back(&mut self) -> Option<Self::Item> {
586		self.range.next_back().map(|bp| unsafe { bp.read() })
587	}
588
589	fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
590		self.range.nth_back(n).map(|bp| unsafe { bp.read() })
591	}
592
593	fn len(&self) -> usize {
594		self.range.len()
595	}
596});
597
598#[derive(Clone, Debug)]
599#[doc = include_str!("../../doc/slice/iter/Windows.md")]
600pub struct Windows<'a, T, O>
601where
602	T: 'a + BitStore,
603	O: BitOrder,
604{
605	/// The source bit-slice.
606	slice: &'a BitSlice<T, O>,
607	/// The width of the produced windows.
608	width: usize,
609}
610
611group!(Windows => &'a BitSlice<T, O> {
612	fn next(&mut self) -> Option<Self::Item> {
613		if self.width > self.slice.len() {
614			self.slice = Default::default();
615			return None;
616		}
617		unsafe {
618			let out = self.slice.get_unchecked(.. self.width);
619			self.slice = self.slice.get_unchecked(1 ..);
620			Some(out)
621		}
622	}
623
624	fn nth(&mut self, n: usize) -> Option<Self::Item> {
625		let (end, ovf) = self.width.overflowing_add(n);
626		if end > self.slice.len() || ovf {
627			self.slice = Default::default();
628			return None;
629		}
630		unsafe {
631			let out = self.slice.get_unchecked(n .. end);
632			self.slice = self.slice.get_unchecked(n + 1 ..);
633			Some(out)
634		}
635	}
636
637	fn next_back(&mut self) -> Option<Self::Item> {
638		let len = self.slice.len();
639		if self.width > len {
640			self.slice = Default::default();
641			return None;
642		}
643		unsafe {
644			let out = self.slice.get_unchecked(len - self.width ..);
645			self.slice = self.slice.get_unchecked(.. len - 1);
646			Some(out)
647		}
648	}
649
650	fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
651		let (end, ovf) = self.slice.len().overflowing_sub(n);
652		if end < self.width || ovf {
653			self.slice = Default::default();
654			return None;
655		}
656		unsafe {
657			let out = self.slice.get_unchecked(end - self.width .. end);
658			self.slice = self.slice.get_unchecked(.. end - 1);
659			Some(out)
660		}
661	}
662
663	fn len(&self) -> usize {
664		let len = self.slice.len();
665		if self.width > len {
666			0
667		}
668		else {
669			len - self.width + 1
670		}
671	}
672});
673
674#[derive(Clone, Debug)]
675#[doc = include_str!("../../doc/slice/iter/Chunks.md")]
676pub struct Chunks<'a, T, O>
677where
678	T: 'a + BitStore,
679	O: BitOrder,
680{
681	/// The source bit-slice.
682	slice: &'a BitSlice<T, O>,
683	/// The width of the produced chunks.
684	width: usize,
685}
686
687group!(Chunks => &'a BitSlice<T, O> {
688	fn next(&mut self) -> Option<Self::Item> {
689		let len = self.slice.len();
690		if len == 0 {
691			return None;
692		}
693		let mid = cmp::min(len, self.width);
694		let (out, rest) = unsafe { self.slice.split_at_unchecked(mid) };
695		self.slice = rest;
696		Some(out)
697	}
698
699	fn nth(&mut self, n: usize) -> Option<Self::Item> {
700		let len = self.slice.len();
701		let (start, ovf) = n.overflowing_mul(self.width);
702		if start >= len || ovf {
703			self.slice = Default::default();
704			return None;
705		}
706		let split = start.checked_add(self.width)
707			.map(|mid| cmp::min(mid, len))
708			.unwrap_or(len);
709		unsafe {
710			let (head, rest) = self.slice.split_at_unchecked(split);
711			self.slice = rest;
712			Some(head.get_unchecked(start ..))
713		}
714	}
715
716	fn next_back(&mut self) -> Option<Self::Item> {
717		match self.slice.len() {
718			0 => None,
719			len => {
720				//  Determine if the back chunk is a remnant or a whole chunk.
721				let rem = len % self.width;
722				let size = if rem == 0 { self.width } else { rem };
723				let (rest, out)
724					= unsafe { self.slice.split_at_unchecked(len - size) };
725				self.slice = rest;
726				Some(out)
727			},
728		}
729	}
730
731	fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
732		let len = self.len();
733		if n >= len {
734			self.slice = Default::default();
735			return None;
736		}
737		let start = (len - 1 - n) * self.width;
738		let width = cmp::min(start + self.width, self.slice.len());
739		let (rest, out) = unsafe {
740			self.slice
741				.get_unchecked(.. start + width)
742				.split_at_unchecked(start)
743		};
744		self.slice = rest;
745		Some(out)
746	}
747
748	fn len(&self) -> usize {
749		match self.slice.len() {
750			0 => 0,
751			len => {
752				let (n, r) = (len / self.width, len % self.width);
753				n + (r > 0) as usize
754			},
755		}
756	}
757});
758
759#[derive(Debug)]
760#[doc = include_str!("../../doc/slice/iter/ChunksMut.md")]
761pub struct ChunksMut<'a, T, O>
762where
763	T: 'a + BitStore,
764	O: BitOrder,
765{
766	/// The source bit-slice, marked with the alias tainting.
767	slice: &'a mut BitSlice<T::Alias, O>,
768	/// The width of the produced chunks.
769	width: usize,
770}
771
772group!(ChunksMut => &'a mut BitSlice<T::Alias, O> {
773	fn next(&mut self) -> Option<Self::Item> {
774		let slice = mem::take(&mut self.slice);
775		let len = slice.len();
776		if len == 0 {
777			return None;
778		}
779		let mid = cmp::min(len, self.width);
780		let (out, rest) = unsafe { slice.split_at_unchecked_mut_noalias(mid) };
781		self.slice = rest;
782		Some(out)
783	}
784
785	fn nth(&mut self, n: usize) -> Option<Self::Item> {
786		let slice = mem::take(&mut self.slice);
787		let len = slice.len();
788		let (start, ovf) = n.overflowing_mul(self.width);
789		if start >= len || ovf {
790			return None;
791		}
792		let (out, rest) = unsafe {
793			slice
794				.get_unchecked_mut(start ..)
795				.split_at_unchecked_mut_noalias(cmp::min(len - start, self.width))
796		};
797		self.slice = rest;
798		Some(out)
799	}
800
801	fn next_back(&mut self) -> Option<Self::Item> {
802		let slice = mem::take(&mut self.slice);
803		match slice.len() {
804			0 => None,
805			len => {
806				let rem = len % self.width;
807				let size = if rem == 0 { self.width } else { rem };
808				let mid = len - size;
809				let (rest, out)
810					= unsafe { slice.split_at_unchecked_mut_noalias(mid) };
811				self.slice = rest;
812				Some(out)
813			},
814		}
815	}
816
817	fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
818		let len = self.len();
819		let slice = mem::take(&mut self.slice);
820		if n >= len {
821			return None;
822		}
823		let start = (len - 1 - n) * self.width;
824		let width = cmp::min(start + self.width, slice.len());
825		let (rest, out) = unsafe {
826			slice
827				.get_unchecked_mut(.. start + width)
828				.split_at_unchecked_mut_noalias(start)
829		};
830		self.slice = rest;
831		Some(out)
832	}
833
834	fn len(&self) -> usize {
835		match self.slice.len() {
836			0 => 0,
837			len => {
838				let (n, r) = (len / self.width, len % self.width);
839				n + (r > 0) as usize
840			},
841		}
842	}
843});
844
845#[derive(Clone, Debug)]
846#[doc = include_str!("../../doc/slice/iter/ChunksExact.md")]
847pub struct ChunksExact<'a, T, O>
848where
849	T: 'a + BitStore,
850	O: BitOrder,
851{
852	/// The source bit-slice.
853	slice: &'a BitSlice<T, O>,
854	/// Any remnant of the source bit-slice that will not be yielded as a chunk.
855	extra: &'a BitSlice<T, O>,
856	/// The width of the produced chunks.
857	width: usize,
858}
859
860impl<'a, T, O> ChunksExact<'a, T, O>
861where
862	T: 'a + BitStore,
863	O: BitOrder,
864{
865	#[allow(missing_docs, clippy::missing_docs_in_private_items)]
866	pub(super) fn new(slice: &'a BitSlice<T, O>, width: usize) -> Self {
867		assert_ne!(width, 0, "Chunk width cannot be 0");
868		let len = slice.len();
869		let rem = len % width;
870		let (slice, extra) = unsafe { slice.split_at_unchecked(len - rem) };
871		Self {
872			slice,
873			extra,
874			width,
875		}
876	}
877
878	/// Gets the remnant bit-slice that the iterator will not yield.
879	///
880	/// ## Original
881	///
882	/// [`ChunksExact::remainder`](core::slice::ChunksExact::remainder)
883	#[inline]
884	#[cfg(not(tarpaulin_include))]
885	pub fn remainder(&self) -> &'a BitSlice<T, O> {
886		self.extra
887	}
888}
889
890group!(ChunksExact => &'a BitSlice<T, O> {
891	fn next(&mut self) -> Option<Self::Item> {
892		if self.slice.len() < self.width {
893			return None;
894		}
895		let (out, rest) = unsafe { self.slice.split_at_unchecked(self.width) };
896		self.slice = rest;
897		Some(out)
898	}
899
900	fn nth(&mut self, n: usize) -> Option<Self::Item> {
901		let (start, ovf) = n.overflowing_mul(self.width);
902		if start >= self.slice.len() || ovf {
903			self.slice = Default::default();
904			return None;
905		}
906		let (out, rest) = unsafe {
907			self.slice
908				.get_unchecked(start ..)
909				.split_at_unchecked(self.width)
910		};
911		self.slice = rest;
912		Some(out)
913	}
914
915	fn next_back(&mut self) -> Option<Self::Item> {
916		let len = self.slice.len();
917		if len < self.width {
918			return None;
919		}
920		let (rest, out) =
921			unsafe { self.slice.split_at_unchecked(len - self.width) };
922		self.slice = rest;
923		Some(out)
924	}
925
926	fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
927		let len = self.len();
928		if n >= len {
929			self.slice = Default::default();
930			return None;
931		}
932		let end = (len - n) * self.width;
933		let (rest, out) = unsafe {
934			self.slice
935				.get_unchecked(.. end)
936				.split_at_unchecked(end - self.width)
937		};
938		self.slice = rest;
939		Some(out)
940	}
941
942	fn len(&self) -> usize {
943		self.slice.len() / self.width
944	}
945});
946
947#[derive(Debug)]
948#[doc = include_str!("../../doc/slice/iter/ChunksExactMut.md")]
949pub struct ChunksExactMut<'a, T, O>
950where
951	T: 'a + BitStore,
952	O: BitOrder,
953{
954	/// The source bit-slice, marked with the alias tainting.
955	slice: &'a mut BitSlice<T::Alias, O>,
956	/// Any remnant of the source bit-slice that will not be yielded as a chunk.
957	extra: &'a mut BitSlice<T::Alias, O>,
958	/// The width of the produced chunks.
959	width: usize,
960}
961
962impl<'a, T, O> ChunksExactMut<'a, T, O>
963where
964	T: 'a + BitStore,
965	O: BitOrder,
966{
967	#[allow(missing_docs, clippy::missing_docs_in_private_items)]
968	pub(super) fn new(slice: &'a mut BitSlice<T, O>, width: usize) -> Self {
969		assert_ne!(width, 0, "Chunk width cannot be 0");
970		let len = slice.len();
971		let rem = len % width;
972		let (slice, extra) = unsafe { slice.split_at_unchecked_mut(len - rem) };
973		Self {
974			slice,
975			extra,
976			width,
977		}
978	}
979
980	/// Consumes the iterator, returning the remnant bit-slice that it will not
981	/// yield.
982	///
983	/// ## Original
984	///
985	/// [`ChunksExactMut::into_remainder`][0]
986	///
987	/// [0]: core::slice::ChunksExactMut::into_remainder
988	#[inline]
989	#[cfg(not(tarpaulin_include))]
990	pub fn into_remainder(self) -> &'a mut BitSlice<T::Alias, O> {
991		self.extra
992	}
993
994	/// Takes the remnant bit-slice out of the iterator.
995	///
996	/// The first time this is called, it will produce the remnant; on each
997	/// subsequent call, it will produce an empty bit-slice.
998	///
999	/// ## Examples
1000	///
1001	/// ```rust
1002	/// use bitvec::prelude::*;
1003	///
1004	/// let bits = bits![mut 0; 5];
1005	/// let mut chunks = bits.chunks_exact_mut(3);
1006	///
1007	/// assert_eq!(chunks.take_remainder(), bits![0; 2]);
1008	/// assert!(chunks.take_remainder().is_empty());
1009	/// ```
1010	#[inline]
1011	pub fn take_remainder(&mut self) -> &'a mut BitSlice<T::Alias, O> {
1012		mem::take(&mut self.extra)
1013	}
1014}
1015
1016group!(ChunksExactMut => &'a mut BitSlice<T::Alias, O> {
1017	fn next(&mut self) -> Option<Self::Item> {
1018		let slice = mem::take(&mut self.slice);
1019		if slice.len() < self.width {
1020			return None;
1021		}
1022		let (out, rest) =
1023			unsafe { slice.split_at_unchecked_mut_noalias(self.width) };
1024		self.slice = rest;
1025		Some(out)
1026	}
1027
1028	fn nth(&mut self, n: usize) -> Option<Self::Item> {
1029		let slice = mem::take(&mut self.slice);
1030		let (start, ovf) = n.overflowing_mul(self.width);
1031		if start + self.width >= slice.len() || ovf {
1032			return None;
1033		}
1034		let (out, rest) = unsafe {
1035			slice.get_unchecked_mut(start ..)
1036				.split_at_unchecked_mut_noalias(self.width)
1037		};
1038		self.slice = rest;
1039		Some(out)
1040	}
1041
1042	fn next_back(&mut self) -> Option<Self::Item> {
1043		let slice = mem::take(&mut self.slice);
1044		let len = slice.len();
1045		if len < self.width {
1046			return None;
1047		}
1048		let (rest, out) =
1049			unsafe { slice.split_at_unchecked_mut_noalias(len - self.width) };
1050		self.slice = rest;
1051		Some(out)
1052	}
1053
1054	fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
1055		let len = self.len();
1056		let slice = mem::take(&mut self.slice);
1057		if n >= len {
1058			return None;
1059		}
1060		let end = (len - n) * self.width;
1061		let (rest, out) = unsafe {
1062			slice.get_unchecked_mut(.. end)
1063				.split_at_unchecked_mut_noalias(end - self.width)
1064		};
1065		self.slice = rest;
1066		Some(out)
1067	}
1068
1069	fn len(&self) -> usize {
1070		self.slice.len() / self.width
1071	}
1072});
1073
1074#[derive(Clone, Debug)]
1075#[doc = include_str!("../../doc/slice/iter/RChunks.md")]
1076pub struct RChunks<'a, T, O>
1077where
1078	T: 'a + BitStore,
1079	O: BitOrder,
1080{
1081	/// The source bit-slice.
1082	slice: &'a BitSlice<T, O>,
1083	/// The width of the produced chunks.
1084	width: usize,
1085}
1086
1087group!(RChunks => &'a BitSlice<T, O> {
1088	fn next(&mut self) -> Option<Self::Item> {
1089		let len = self.slice.len();
1090		if len == 0 {
1091			return None;
1092		}
1093		let mid = len - cmp::min(len, self.width);
1094		let (rest, out) = unsafe { self.slice.split_at_unchecked(mid) };
1095		self.slice = rest;
1096		Some(out)
1097	}
1098
1099	fn nth(&mut self, n: usize) -> Option<Self::Item> {
1100		let len = self.slice.len();
1101		let (num, ovf) = n.overflowing_mul(self.width);
1102		if num >= len || ovf {
1103			self.slice = Default::default();
1104			return None;
1105		}
1106		let end = len - num;
1107		let mid = end.saturating_sub(self.width);
1108		let (rest, out) = unsafe {
1109			self.slice
1110				.get_unchecked(.. end)
1111				.split_at_unchecked(mid)
1112		};
1113		self.slice = rest;
1114		Some(out)
1115	}
1116
1117	fn next_back(&mut self) -> Option<Self::Item> {
1118		match self.slice.len() {
1119			0 => None,
1120			n => {
1121				let rem = n % self.width;
1122				let len = if rem == 0 { self.width } else { rem };
1123				let (out, rest) = unsafe { self.slice.split_at_unchecked(len) };
1124				self.slice = rest;
1125				Some(out)
1126			},
1127		}
1128	}
1129
1130	fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
1131		let len = self.len();
1132		if n >= len {
1133			self.slice = Default::default();
1134			return None;
1135		}
1136		/* Taking from the back of a reverse iterator means taking from the
1137		front of the slice.
1138
1139		`len` gives us the total number of subslices remaining. In order to find
1140		the partition point, we need to subtract `n - 1` full subslices from
1141		that count (because the back slice of the iteration might not be full),
1142		compute their bit width, and offset *that* from the end of the memory
1143		region. This gives us the zero-based index of the partition point
1144		between what is returned and what is retained.
1145
1146		The `part ..` section of the slice is retained, and the very end of the
1147		`.. part` section is returned. The head section is split at no less than
1148		`self.width` bits below the end marker (this could be the partial
1149		section, so a wrapping subtraction cannot be used), and `.. start` is
1150		discarded.
1151
1152		Source:
1153		https://doc.rust-lang.org/1.43.0/src/core/slice/mod.rs.html#5141-5156
1154		*/
1155		let from_end = (len - 1 - n) * self.width;
1156		let end = self.slice.len() - from_end;
1157		let start = end.saturating_sub(self.width);
1158		let (out, rest) = unsafe { self.slice.split_at_unchecked(end) };
1159		self.slice = rest;
1160		Some(unsafe { out.get_unchecked(start ..) })
1161	}
1162
1163	fn len(&self) -> usize {
1164		match self.slice.len() {
1165			0 => 0,
1166			len => {
1167				let (n, r) = (len / self.width, len % self.width);
1168				n + (r > 0) as usize
1169			},
1170		}
1171	}
1172});
1173
1174#[derive(Debug)]
1175#[doc = include_str!("../../doc/slice/iter/RChunksMut.md")]
1176pub struct RChunksMut<'a, T, O>
1177where
1178	T: 'a + BitStore,
1179	O: BitOrder,
1180{
1181	/// The source bit-slice, marked with the alias tainting.
1182	slice: &'a mut BitSlice<T::Alias, O>,
1183	/// The width of the produced chunks.
1184	width: usize,
1185}
1186
1187group!(RChunksMut => &'a mut BitSlice<T::Alias, O> {
1188	fn next(&mut self) -> Option<Self::Item> {
1189		let slice = mem::take(&mut self.slice);
1190		let len = slice.len();
1191		if len == 0 {
1192			return None;
1193		}
1194		let mid = len - cmp::min(len, self.width);
1195		let (rest, out) = unsafe { slice.split_at_unchecked_mut_noalias(mid) };
1196		self.slice = rest;
1197		Some(out)
1198	}
1199
1200	fn nth(&mut self, n: usize) -> Option<Self::Item> {
1201		let slice = mem::take(&mut self.slice);
1202		let len = slice.len();
1203		let (num, ovf) = n.overflowing_mul(self.width);
1204		if num >= len || ovf {
1205			return None;
1206		}
1207		let end = len - num;
1208		let mid = end.saturating_sub(self.width);
1209		let (rest, out) = unsafe {
1210			slice.get_unchecked_mut(.. end)
1211				.split_at_unchecked_mut_noalias(mid)
1212		};
1213		self.slice = rest;
1214		Some(out)
1215	}
1216
1217	fn next_back(&mut self) -> Option<Self::Item> {
1218		let slice = mem::take(&mut self.slice);
1219		match slice.len() {
1220			0 => None,
1221			n => {
1222				let rem = n % self.width;
1223				let len = if rem == 0 { self.width } else { rem };
1224				let (out, rest) =
1225					unsafe { slice.split_at_unchecked_mut_noalias(len) };
1226				self.slice = rest;
1227				Some(out)
1228			},
1229		}
1230	}
1231
1232	fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
1233		let len = self.len();
1234		let slice = mem::take(&mut self.slice);
1235		if n >= len {
1236			return None;
1237		}
1238		let from_end = (len - 1 - n) * self.width;
1239		let end = slice.len() - from_end;
1240		let start = end.saturating_sub(self.width);
1241		let (out, rest) = unsafe { slice.split_at_unchecked_mut_noalias(end) };
1242		self.slice = rest;
1243		Some(unsafe { out.get_unchecked_mut(start ..) })
1244	}
1245
1246	fn len(&self) -> usize {
1247		match self.slice.len() {
1248			0 => 0,
1249			len => {
1250				let (n, r) = (len / self.width, len % self.width);
1251				n + (r > 0) as usize
1252			},
1253		}
1254	}
1255});
1256
1257#[derive(Clone, Debug)]
1258#[doc = include_str!("../../doc/slice/iter/RChunksExact.md")]
1259pub struct RChunksExact<'a, T, O>
1260where
1261	T: 'a + BitStore,
1262	O: BitOrder,
1263{
1264	/// The source bit-slice.
1265	slice: &'a BitSlice<T, O>,
1266	/// Any remnant of the source bit-slice that will not be yielded as a chunk.
1267	extra: &'a BitSlice<T, O>,
1268	/// The width of the produced chunks.
1269	width: usize,
1270}
1271
1272impl<'a, T, O> RChunksExact<'a, T, O>
1273where
1274	T: 'a + BitStore,
1275	O: BitOrder,
1276{
1277	#[allow(missing_docs, clippy::missing_docs_in_private_items)]
1278	pub(super) fn new(slice: &'a BitSlice<T, O>, width: usize) -> Self {
1279		assert_ne!(width, 0, "Chunk width cannot be 0");
1280		let (extra, slice) =
1281			unsafe { slice.split_at_unchecked(slice.len() % width) };
1282		Self {
1283			slice,
1284			extra,
1285			width,
1286		}
1287	}
1288
1289	/// Gets the remnant bit-slice that the iterator will not yield.
1290	///
1291	/// ## Original
1292	///
1293	/// [`RChunksExact::remainder`](core::slice::RChunksExact::remainder)
1294	#[inline]
1295	#[cfg(not(tarpaulin_include))]
1296	pub fn remainder(&self) -> &'a BitSlice<T, O> {
1297		self.extra
1298	}
1299}
1300
1301group!(RChunksExact => &'a BitSlice<T, O> {
1302	fn next(&mut self) -> Option<Self::Item> {
1303		let len = self.slice.len();
1304		if len < self.width {
1305			return None;
1306		}
1307		let (rest, out) =
1308			unsafe { self.slice.split_at_unchecked(len - self.width) };
1309		self.slice = rest;
1310		Some(out)
1311	}
1312
1313	fn nth(&mut self, n: usize) -> Option<Self::Item> {
1314		let len = self.slice.len();
1315		let (split, ovf) = n.overflowing_mul(self.width);
1316		if split >= len || ovf {
1317			self.slice = Default::default();
1318			return None;
1319		}
1320		let end = len - split;
1321		let (rest, out) = unsafe {
1322			self.slice
1323				.get_unchecked(.. end)
1324				.split_at_unchecked(end - self.width)
1325		};
1326		self.slice = rest;
1327		Some(out)
1328	}
1329
1330	fn next_back(&mut self) -> Option<Self::Item> {
1331		if self.slice.len() < self.width {
1332			return None;
1333		}
1334		let (out, rest) = unsafe { self.slice.split_at_unchecked(self.width) };
1335		self.slice = rest;
1336		Some(out)
1337	}
1338
1339	fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
1340		let len = self.slice.len();
1341		let (start, ovf) = n.overflowing_mul(self.width);
1342		if start >= len || ovf {
1343			self.slice = Default::default();
1344			return None;
1345		}
1346		//  At this point, `start` is at least `self.width` less than `len`.
1347		let (out, rest) = unsafe {
1348			self.slice.get_unchecked(start ..).split_at_unchecked(self.width)
1349		};
1350		self.slice = rest;
1351		Some(out)
1352	}
1353
1354	fn len(&self) -> usize {
1355		self.slice.len() / self.width
1356	}
1357});
1358
1359#[derive(Debug)]
1360#[doc = include_str!("../../doc/slice/iter/RChunksExactMut.md")]
1361pub struct RChunksExactMut<'a, T, O>
1362where
1363	T: 'a + BitStore,
1364	O: BitOrder,
1365{
1366	/// The source bit-slice, marked with the alias tainting.
1367	slice: &'a mut BitSlice<T::Alias, O>,
1368	/// Any remnant of the source bit-slice that will not be yielded as a chunk.
1369	extra: &'a mut BitSlice<T::Alias, O>,
1370	/// The width of the produced chunks.
1371	width: usize,
1372}
1373
1374impl<'a, T, O> RChunksExactMut<'a, T, O>
1375where
1376	T: 'a + BitStore,
1377	O: BitOrder,
1378{
1379	#[allow(missing_docs, clippy::missing_docs_in_private_items)]
1380	pub(super) fn new(slice: &'a mut BitSlice<T, O>, width: usize) -> Self {
1381		assert_ne!(width, 0, "Chunk width cannot be 0");
1382		let (extra, slice) =
1383			unsafe { slice.split_at_unchecked_mut(slice.len() % width) };
1384		Self {
1385			slice,
1386			extra,
1387			width,
1388		}
1389	}
1390
1391	/// Consumes the iterator, returning the remnant bit-slice that it will not
1392	/// yield.
1393	///
1394	/// ## Original
1395	///
1396	/// [`RChunksExactMut::into_remainder`][0]
1397	///
1398	/// [0]: core::slice::RChunksExactMut::into_remainder
1399	#[inline]
1400	#[cfg(not(tarpaulin_include))]
1401	pub fn into_remainder(self) -> &'a mut BitSlice<T::Alias, O> {
1402		self.extra
1403	}
1404
1405	/// Takes the remnant bit-slice out of the iterator.
1406	///
1407	/// The first time this is called, it will produce the remnant; on each
1408	/// subsequent call, it will produce an empty bit-slice.
1409	///
1410	/// ## Examples
1411	///
1412	/// ```rust
1413	/// use bitvec::prelude::*;
1414	///
1415	/// let bits = bits![mut 0; 5];
1416	/// let mut chunks = bits.rchunks_exact_mut(3);
1417	///
1418	/// assert_eq!(chunks.take_remainder(), bits![0; 2]);
1419	/// assert!(chunks.take_remainder().is_empty());
1420	/// ```
1421	#[inline]
1422	pub fn take_remainder(&mut self) -> &'a mut BitSlice<T::Alias, O> {
1423		mem::take(&mut self.extra)
1424	}
1425}
1426
1427group!(RChunksExactMut => &'a mut BitSlice<T::Alias, O> {
1428	fn next(&mut self) -> Option<Self::Item> {
1429		let slice = mem::take(&mut self.slice);
1430		let len = slice.len();
1431		if len < self.width {
1432			return None;
1433		}
1434		let (rest, out) =
1435			unsafe { slice.split_at_unchecked_mut_noalias(len - self.width) };
1436		self.slice = rest;
1437		Some(out)
1438	}
1439
1440	fn nth(&mut self, n: usize) -> Option<Self::Item> {
1441		let slice = mem::take(&mut self.slice);
1442		let len = slice.len();
1443		let (split, ovf) = n.overflowing_mul(self.width);
1444		if split >= len || ovf {
1445			return None;
1446		}
1447		let end = len - split;
1448		let (rest, out) = unsafe {
1449			slice.get_unchecked_mut(.. end)
1450				.split_at_unchecked_mut_noalias(end - self.width)
1451		};
1452		self.slice = rest;
1453		Some(out)
1454	}
1455
1456	fn next_back(&mut self) -> Option<Self::Item> {
1457		let slice = mem::take(&mut self.slice);
1458		if slice.len() < self.width {
1459			return None;
1460		}
1461		let (out, rest) =
1462			unsafe { slice.split_at_unchecked_mut_noalias(self.width) };
1463		self.slice = rest;
1464		Some(out)
1465	}
1466
1467	fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
1468		let slice = mem::take(&mut self.slice);
1469		let len = slice.len();
1470		let (start, ovf) = n.overflowing_mul(self.width);
1471		if start >= len || ovf {
1472			return None;
1473		}
1474		//  At this point, `start` is at least `self.width` less than `len`.
1475		let (out, rest) = unsafe {
1476			slice.get_unchecked_mut(start ..)
1477				.split_at_unchecked_mut_noalias(self.width)
1478		};
1479		self.slice = rest;
1480		Some(out)
1481	}
1482
1483	fn len(&self) -> usize {
1484		self.slice.len() / self.width
1485	}
1486});
1487
1488/// Creates the `new` function for the easy grouping iterators.
1489macro_rules! new_group {
1490	($($t:ident $($m:ident)? $(.$a:ident())?),+ $(,)?) => { $(
1491		impl<'a, T, O> $t<'a, T, O>
1492		where
1493			T: 'a + BitStore,
1494			O: BitOrder,
1495		{
1496			#[inline]
1497			#[allow(missing_docs, clippy::missing_docs_in_private_items)]
1498			pub(super) fn new(
1499				slice: &'a $($m)? BitSlice<T, O>,
1500				width: usize,
1501			) -> Self {
1502				assert_ne!(width, 0, "view width cannot be 0");
1503				let slice = slice$(.$a())?;
1504				Self { slice, width }
1505			}
1506		}
1507	)+ };
1508}
1509
1510new_group! {
1511	Windows,
1512	Chunks,
1513	ChunksMut mut .alias_mut(),
1514	RChunks,
1515	RChunksMut mut .alias_mut(),
1516}
1517
1518/// Creates splitting iterators.
1519macro_rules! split {
1520	(
1521		$iter:ident =>
1522		$item:ty
1523		$(where $alias:ident)? { $next:item $next_back:item }
1524	) => {
1525		impl<'a, T, O, P> $iter<'a, T, O, P>
1526		where
1527			T: 'a + BitStore,
1528			O: BitOrder,
1529			P: FnMut(usize, &bool) -> bool,
1530		{
1531			pub(super) fn new(slice: $item, pred: P) -> Self {
1532				Self {
1533					slice,
1534					pred,
1535					done: false,
1536				}
1537			}
1538		}
1539
1540		impl<T, O, P> Debug for $iter<'_, T, O, P>
1541		where
1542			T: BitStore,
1543			O: BitOrder,
1544			P: FnMut(usize, &bool) -> bool,
1545		{
1546			#[inline]
1547			fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
1548				fmt.debug_struct(stringify!($iter))
1549					.field("slice", &self.slice)
1550					.field("done", &self.done)
1551					.finish()
1552			}
1553		}
1554
1555		impl<'a, T, O, P> Iterator for $iter<'a, T, O, P>
1556		where
1557			T: 'a + BitStore,
1558			O: BitOrder,
1559			P: FnMut(usize, &bool) -> bool,
1560		{
1561			type Item = $item;
1562
1563			#[inline]
1564			$next
1565
1566			#[inline]
1567			fn size_hint(&self) -> (usize, Option<usize>) {
1568				if self.done {
1569					(0, Some(0))
1570				}
1571				else {
1572					(1, Some(self.slice.len() + 1))
1573				}
1574			}
1575		}
1576
1577		impl<'a, T, O, P> DoubleEndedIterator for $iter<'a, T, O, P>
1578		where
1579			T: 'a + BitStore,
1580			O: BitOrder,
1581			P: FnMut(usize, &bool) -> bool,
1582		{
1583			#[inline]
1584			$next_back
1585		}
1586
1587		impl<'a, T, O, P> FusedIterator for $iter<'a, T, O, P>
1588		where
1589			T: 'a + BitStore,
1590			O: BitOrder,
1591			P: FnMut(usize, &bool) -> bool,
1592		{
1593		}
1594
1595		impl<'a, T, O, P> SplitIter for $iter<'a, T, O, P>
1596		where
1597			T: 'a + BitStore,
1598			O: BitOrder,
1599			P: FnMut(usize, &bool) -> bool,
1600		{
1601			#[inline]
1602			fn finish(&mut self) -> Option<Self::Item> {
1603				if self.done {
1604					None
1605				}
1606				else {
1607					self.done = true;
1608					Some(mem::take(&mut self.slice))
1609				}
1610			}
1611		}
1612	};
1613}
1614
1615#[derive(Clone)]
1616#[doc = include_str!("../../doc/slice/iter/Split.md")]
1617pub struct Split<'a, T, O, P>
1618where
1619	T: 'a + BitStore,
1620	O: BitOrder,
1621	P: FnMut(usize, &bool) -> bool,
1622{
1623	/// The [`BitSlice`] being split.
1624	///
1625	/// [`BitSlice`]: crate::slice::BitSlice
1626	slice: &'a BitSlice<T, O>,
1627	/// The function used to test whether a split should occur.
1628	pred:  P,
1629	/// Whether the split is finished.
1630	done:  bool,
1631}
1632
1633split!(Split => &'a BitSlice<T, O> {
1634	fn next(&mut self) -> Option<Self::Item> {
1635		if self.done {
1636			return None;
1637		}
1638		match self.slice
1639			.iter()
1640			.by_refs()
1641			.enumerate()
1642			.position(|(idx, bit)| (self.pred)(idx, bit))
1643		{
1644			None => self.finish(),
1645			Some(idx) => unsafe {
1646				let out = self.slice.get_unchecked(.. idx);
1647				self.slice = self.slice.get_unchecked(idx + 1 ..);
1648				Some(out)
1649			},
1650		}
1651	}
1652
1653	fn next_back(&mut self) -> Option<Self::Item> {
1654		if self.done {
1655			return None;
1656		}
1657		match self.slice
1658			.iter()
1659			.by_refs()
1660			.enumerate()
1661			.rposition(|(idx, bit)| (self.pred)(idx, bit))
1662		{
1663			None => self.finish(),
1664			Some(idx) => unsafe {
1665				let out = self.slice.get_unchecked(idx + 1 ..);
1666				self.slice = self.slice.get_unchecked(.. idx);
1667				Some(out)
1668			},
1669		}
1670	}
1671});
1672
1673#[doc = include_str!("../../doc/slice/iter/SplitMut.md")]
1674pub struct SplitMut<'a, T, O, P>
1675where
1676	T: 'a + BitStore,
1677	O: BitOrder,
1678	P: FnMut(usize, &bool) -> bool,
1679{
1680	/// The source bit-slice, marked with the alias tainting.
1681	slice: &'a mut BitSlice<T::Alias, O>,
1682	/// The function that tests each bit for whether it is a split point.
1683	pred:  P,
1684	/// Marks whether iteration has concluded, without emptying the `slice`.
1685	done:  bool,
1686}
1687
1688split!(SplitMut => &'a mut BitSlice<T::Alias, O> {
1689	fn next(&mut self) -> Option<Self::Item> {
1690		if self.done {
1691			return None;
1692		}
1693		let idx_opt = {
1694			let pred = &mut self.pred;
1695			self.slice
1696				.iter()
1697				.by_refs()
1698				.enumerate()
1699				.position(|(idx, bit)| (pred)(idx, bit))
1700		};
1701		match idx_opt
1702		{
1703			None => self.finish(),
1704			Some(idx) => unsafe {
1705				let slice = mem::take(&mut self.slice);
1706				let (out, rest) = slice.split_at_unchecked_mut_noalias(idx);
1707				self.slice = rest.get_unchecked_mut(1 ..);
1708				Some(out)
1709			},
1710		}
1711	}
1712
1713	fn next_back(&mut self) -> Option<Self::Item> {
1714		if self.done {
1715			return None;
1716		}
1717		let idx_opt = {
1718			let pred = &mut self.pred;
1719			self.slice
1720				.iter()
1721				.by_refs()
1722				.enumerate()
1723				.rposition(|(idx, bit)| (pred)(idx, bit))
1724		};
1725		match idx_opt
1726		{
1727			None => self.finish(),
1728			Some(idx) => unsafe {
1729				let slice = mem::take(&mut self.slice);
1730				let (rest, out) = slice.split_at_unchecked_mut_noalias(idx);
1731				self.slice = rest;
1732				Some(out.get_unchecked_mut(1 ..))
1733			},
1734		}
1735	}
1736});
1737
1738#[derive(Clone)]
1739#[doc = include_str!("../../doc/slice/iter/SplitInclusive.md")]
1740pub struct SplitInclusive<'a, T, O, P>
1741where
1742	T: 'a + BitStore,
1743	O: BitOrder,
1744	P: FnMut(usize, &bool) -> bool,
1745{
1746	/// The source bit-slice.
1747	slice: &'a BitSlice<T, O>,
1748	/// The function that tests each bit for whether it is a split point.
1749	pred:  P,
1750	/// Marks whether iteration has concluded, without emptying the `slice`.
1751	done:  bool,
1752}
1753
1754split!(SplitInclusive => &'a BitSlice<T, O> {
1755	fn next(&mut self) -> Option<Self::Item> {
1756		if self.done {
1757			return None;
1758		}
1759		let len = self.slice.len();
1760		let idx = self.slice.iter()
1761			.by_refs()
1762			.enumerate()
1763			.position(|(idx, bit)| (self.pred)(idx, bit))
1764			.map(|idx| idx + 1)
1765			.unwrap_or(len);
1766		if idx == len {
1767			self.done = true;
1768		}
1769		let (out, rest) = unsafe { self.slice.split_at_unchecked(idx) };
1770		self.slice = rest;
1771		Some(out)
1772	}
1773
1774	fn next_back(&mut self) -> Option<Self::Item> {
1775		if self.done {
1776			return None;
1777		}
1778
1779		let idx = if self.slice.is_empty() {
1780			0
1781		}
1782		else {
1783			unsafe { self.slice.get_unchecked(.. self.slice.len() - 1) }
1784				.iter()
1785				.by_refs()
1786				.enumerate()
1787				.rposition(|(idx, bit)| (self.pred)(idx, bit))
1788				.map(|idx| idx + 1)
1789				.unwrap_or(0)
1790		};
1791		if idx == 0 {
1792			self.done = true;
1793		}
1794		let (rest, out) = unsafe { self.slice.split_at_unchecked(idx) };
1795		self.slice = rest;
1796		Some(out)
1797	}
1798});
1799
1800#[doc = include_str!("../../doc/slice/iter/SplitInclusiveMut.md")]
1801pub struct SplitInclusiveMut<'a, T, O, P>
1802where
1803	T: 'a + BitStore,
1804	O: BitOrder,
1805	P: FnMut(usize, &bool) -> bool,
1806{
1807	/// The source bit-slice, marked with the alias tainting.
1808	slice: &'a mut BitSlice<T::Alias, O>,
1809	/// The function that tests each bit for whether it is a split point.
1810	pred:  P,
1811	/// Marks whether iteration has concluded, without emptying the `slice`.
1812	done:  bool,
1813}
1814
1815split!(SplitInclusiveMut => &'a mut BitSlice<T::Alias, O> {
1816	fn next(&mut self) -> Option<Self::Item> {
1817		if self.done {
1818			return None;
1819		}
1820		let pred = &mut self.pred;
1821		let len = self.slice.len();
1822		let idx = self.slice.iter()
1823			.by_refs()
1824			.enumerate()
1825			.position(|(idx, bit)| (pred)(idx, bit))
1826			.map(|idx| idx + 1)
1827			.unwrap_or(len);
1828		if idx == len {
1829			self.done = true;
1830		}
1831		let (out, rest) = unsafe {
1832			mem::take(&mut self.slice)
1833				.split_at_unchecked_mut_noalias(idx)
1834		};
1835		self.slice = rest;
1836		Some(out)
1837	}
1838
1839	fn next_back(&mut self) -> Option<Self::Item> {
1840		if self.done {
1841			return None;
1842		}
1843		let pred = &mut self.pred;
1844		let idx = if self.slice.is_empty() {
1845			0
1846		}
1847		else {
1848			unsafe { self.slice.get_unchecked(.. self.slice.len() - 1) }
1849				.iter()
1850				.by_refs()
1851				.enumerate()
1852				.rposition(|(idx, bit)| (pred)(idx, bit))
1853				.map(|idx| idx + 1)
1854				.unwrap_or(0)
1855		};
1856		if idx == 0 {
1857			self.done = true;
1858		}
1859		let (rest, out) = unsafe {
1860			mem::take(&mut self.slice)
1861				.split_at_unchecked_mut_noalias(idx)
1862		};
1863		self.slice = rest;
1864		Some(out)
1865	}
1866});
1867
1868#[derive(Clone)]
1869#[doc = include_str!("../../doc/slice/iter/RSplit.md")]
1870pub struct RSplit<'a, T, O, P>
1871where
1872	T: 'a + BitStore,
1873	O: BitOrder,
1874	P: FnMut(usize, &bool) -> bool,
1875{
1876	/// The source bit-slice.
1877	slice: &'a BitSlice<T, O>,
1878	/// The function that tests each bit for whether it is a split point.
1879	pred:  P,
1880	/// Marks whether iteration has concluded, without emptying the `slice`.
1881	done:  bool,
1882}
1883
1884split!(RSplit => &'a BitSlice<T, O> {
1885	fn next(&mut self) -> Option<Self::Item> {
1886		let mut split = Split::<'a, T, O, &mut P> {
1887			slice: mem::take(&mut self.slice),
1888			pred: &mut self.pred,
1889			done: self.done,
1890		};
1891		let out = split.next_back();
1892		let Split { slice, done, .. } = split;
1893		self.slice = slice;
1894		self.done = done;
1895		out
1896	}
1897
1898	fn next_back(&mut self) -> Option<Self::Item> {
1899		let mut split = Split::<'a, T, O, &mut P> {
1900			slice: mem::take(&mut self.slice),
1901			pred: &mut self.pred,
1902			done: self.done,
1903		};
1904		let out = split.next();
1905		let Split { slice, done, .. } = split;
1906		self.slice = slice;
1907		self.done = done;
1908		out
1909	}
1910});
1911
1912#[doc = include_str!("../../doc/slice/iter/RSplitMut.md")]
1913pub struct RSplitMut<'a, T, O, P>
1914where
1915	T: 'a + BitStore,
1916	O: BitOrder,
1917	P: FnMut(usize, &bool) -> bool,
1918{
1919	/// The source bit-slice, marked with the alias tainting.
1920	slice: &'a mut BitSlice<T::Alias, O>,
1921	/// The function that tests each bit for whether it is a split point.
1922	pred:  P,
1923	/// Marks whether iteration has concluded, without emptying the `slice`.
1924	done:  bool,
1925}
1926
1927split!(RSplitMut => &'a mut BitSlice<T::Alias, O> {
1928	fn next(&mut self) -> Option<Self::Item> {
1929		let mut split = SplitMut::<'a, T, O, &mut P> {
1930			slice: mem::take(&mut self.slice),
1931			pred: &mut self.pred,
1932			done: self.done,
1933		};
1934		let out = split.next_back();
1935		let SplitMut { slice, done, .. } = split;
1936		self.slice = slice;
1937		self.done = done;
1938		out
1939	}
1940
1941	fn next_back(&mut self) -> Option<Self::Item> {
1942		let mut split = SplitMut::<'a, T, O, &mut P> {
1943			slice: mem::take(&mut self.slice),
1944			pred: &mut self.pred,
1945			done: self.done,
1946		};
1947		let out = split.next();
1948		let SplitMut { slice, done, .. } = split;
1949		self.slice = slice;
1950		self.done = done;
1951		out
1952	}
1953});
1954
1955/// [Original](https://github.com/rust-lang/rust/blob/95750ae/library/core/src/slice/iter.rs#L318-L325)
1956trait SplitIter: DoubleEndedIterator {
1957	/// Marks the underlying iterator as complete, and extracts the remaining
1958	/// portion of the bit-slice.
1959	fn finish(&mut self) -> Option<Self::Item>;
1960}
1961
1962#[derive(Clone)]
1963#[doc = include_str!("../../doc/slice/iter/SplitN.md")]
1964pub struct SplitN<'a, T, O, P>
1965where
1966	T: 'a + BitStore,
1967	O: BitOrder,
1968	P: FnMut(usize, &bool) -> bool,
1969{
1970	/// The interior splitter.
1971	inner: Split<'a, T, O, P>,
1972	/// The number of permissible splits remaining.
1973	count: usize,
1974}
1975
1976#[doc = include_str!("../../doc/slice/iter/SplitNMut.md")]
1977pub struct SplitNMut<'a, T, O, P>
1978where
1979	T: 'a + BitStore,
1980	O: BitOrder,
1981	P: FnMut(usize, &bool) -> bool,
1982{
1983	/// The interior splitter.
1984	inner: SplitMut<'a, T, O, P>,
1985	/// The number of permissible splits remaining.
1986	count: usize,
1987}
1988
1989#[derive(Clone)]
1990#[doc = include_str!("../../doc/slice/iter/RSplitN.md")]
1991pub struct RSplitN<'a, T, O, P>
1992where
1993	T: 'a + BitStore,
1994	O: BitOrder,
1995	P: FnMut(usize, &bool) -> bool,
1996{
1997	/// The interior splitter.
1998	inner: RSplit<'a, T, O, P>,
1999	/// The number of permissible splits remaining.
2000	count: usize,
2001}
2002
2003#[doc = include_str!("../../doc/slice/iter/RSplitNMut.md")]
2004pub struct RSplitNMut<'a, T, O, P>
2005where
2006	T: 'a + BitStore,
2007	O: BitOrder,
2008	P: FnMut(usize, &bool) -> bool,
2009{
2010	/// The interior splitter.
2011	inner: RSplitMut<'a, T, O, P>,
2012	/// The number of permissible splits remaining.
2013	count: usize,
2014}
2015
2016/// Creates a splitting iterator with a maximum number of attempts.
2017macro_rules! split_n {
2018	($(
2019		$outer:ident => $inner:ident => $item:ty $(where $alias:ident)?
2020	);+ $(;)?) => { $(
2021		impl<'a, T, O, P> $outer<'a, T, O, P>
2022		where
2023			T: 'a + BitStore,
2024			O: BitOrder,
2025			P: FnMut(usize, &bool) -> bool,
2026		{
2027			#[inline]
2028			#[allow(missing_docs, clippy::missing_docs_in_private_items)]
2029			pub(super) fn new(
2030				slice: $item,
2031				pred: P,
2032				count: usize,
2033			) -> Self {
2034				Self {
2035					inner: <$inner<'a, T, O, P>>::new(slice, pred),
2036					count,
2037				}
2038			}
2039		}
2040
2041		impl<T, O, P> Debug for $outer<'_, T, O, P>
2042		where
2043			T: BitStore,
2044			O: BitOrder,
2045			P: FnMut(usize, &bool) -> bool
2046		{
2047			#[inline]
2048			fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
2049				fmt.debug_struct(stringify!($outer))
2050					.field("slice", &self.inner.slice)
2051					.field("count", &self.count)
2052					.finish()
2053			}
2054		}
2055
2056		impl<'a, T, O, P> Iterator for $outer<'a, T, O, P>
2057		where
2058			T: 'a + BitStore,
2059			O: BitOrder,
2060			P: FnMut(usize, &bool) -> bool,
2061			$( T::$alias: radium::Radium<<<T as BitStore>::Alias as BitStore>::Mem>, )?
2062		{
2063			type Item = <$inner <'a, T, O, P> as Iterator>::Item;
2064
2065			#[inline]
2066			fn next(&mut self) -> Option<Self::Item> {
2067				match self.count {
2068					0 => None,
2069					1 => {
2070						self.count -= 1;
2071						self.inner.finish()
2072					},
2073					_ => {
2074						self.count -= 1;
2075						self.inner.next()
2076					},
2077				}
2078			}
2079
2080			#[inline]
2081			fn size_hint(&self) -> (usize, Option<usize>) {
2082				let (low, hi) = self.inner.size_hint();
2083				(low, hi.map(|h| cmp::min(h, self.count)).or(Some(self.count)))
2084			}
2085		}
2086
2087		impl<T, O, P> FusedIterator for $outer<'_, T, O, P>
2088		where
2089			T: BitStore,
2090			O: BitOrder,
2091			P: FnMut(usize, &bool) -> bool,
2092			$( T::$alias: radium::Radium<<<T as BitStore>::Alias as BitStore>::Mem>, )?
2093		{
2094		}
2095	)+ };
2096}
2097
2098split_n! {
2099	SplitN => Split => &'a BitSlice<T, O>;
2100	SplitNMut => SplitMut => &'a mut BitSlice<T::Alias, O>;
2101	RSplitN => RSplit => &'a BitSlice<T, O>;
2102	RSplitNMut => RSplitMut => &'a mut BitSlice<T::Alias, O>;
2103}
2104
2105#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]
2106#[doc = include_str!("../../doc/slice/iter/IterOnes.md")]
2107pub struct IterOnes<'a, T, O>
2108where
2109	T: 'a + BitStore,
2110	O: BitOrder,
2111{
2112	/// The remaining bit-slice whose `1` bits are to be found.
2113	inner: &'a BitSlice<T, O>,
2114	/// The offset from the front of the original bit-slice to the current
2115	/// `.inner`.
2116	front: usize,
2117}
2118
2119impl<'a, T, O> IterOnes<'a, T, O>
2120where
2121	T: 'a + BitStore,
2122	O: BitOrder,
2123{
2124	#[inline]
2125	#[allow(missing_docs, clippy::missing_docs_in_private_items)]
2126	pub(super) fn new(slice: &'a BitSlice<T, O>) -> Self {
2127		Self {
2128			inner: slice,
2129			front: 0,
2130		}
2131	}
2132}
2133
2134impl<T, O> Default for IterOnes<'_, T, O>
2135where
2136	T: BitStore,
2137	O: BitOrder,
2138{
2139	#[inline]
2140	fn default() -> Self {
2141		Self {
2142			inner: Default::default(),
2143			front: 0,
2144		}
2145	}
2146}
2147
2148impl<T, O> Iterator for IterOnes<'_, T, O>
2149where
2150	T: BitStore,
2151	O: BitOrder,
2152{
2153	type Item = usize;
2154
2155	easy_iter!();
2156
2157	#[inline]
2158	fn next(&mut self) -> Option<Self::Item> {
2159		let pos = if let Some(bits) = self.inner.coerce::<T, Lsb0>() {
2160			bits.sp_first_one()
2161		}
2162		else if let Some(bits) = self.inner.coerce::<T, Msb0>() {
2163			bits.sp_first_one()
2164		}
2165		else {
2166			self.inner.iter().by_vals().position(|b| b)
2167		};
2168
2169		match pos {
2170			Some(n) => {
2171				//  Split at the index *past* the discovered bit. This is always
2172				//  safe, as `split_at(len)` produces `(self, [])`.
2173				let (_, rest) = unsafe { self.inner.split_at_unchecked(n + 1) };
2174				self.inner = rest;
2175				let out = self.front + n;
2176				//  Search resumes from the next index after the found position.
2177				self.front = out + 1;
2178				Some(out)
2179			},
2180			None => {
2181				*self = Default::default();
2182				None
2183			},
2184		}
2185	}
2186}
2187
2188impl<T, O> DoubleEndedIterator for IterOnes<'_, T, O>
2189where
2190	T: BitStore,
2191	O: BitOrder,
2192{
2193	#[inline]
2194	fn next_back(&mut self) -> Option<Self::Item> {
2195		let pos = if let Some(bits) = self.inner.coerce::<T, Lsb0>() {
2196			bits.sp_last_one()
2197		}
2198		else if let Some(bits) = self.inner.coerce::<T, Msb0>() {
2199			bits.sp_last_one()
2200		}
2201		else {
2202			self.inner.iter().by_vals().rposition(|b| b)
2203		};
2204
2205		match pos {
2206			Some(n) => {
2207				let (rest, _) = unsafe { self.inner.split_at_unchecked(n) };
2208				self.inner = rest;
2209				Some(self.front + n)
2210			},
2211			None => {
2212				*self = Default::default();
2213				None
2214			},
2215		}
2216	}
2217}
2218
2219impl<T, O> ExactSizeIterator for IterOnes<'_, T, O>
2220where
2221	T: BitStore,
2222	O: BitOrder,
2223{
2224	#[inline]
2225	fn len(&self) -> usize {
2226		self.inner.count_ones()
2227	}
2228}
2229
2230impl<T, O> FusedIterator for IterOnes<'_, T, O>
2231where
2232	T: BitStore,
2233	O: BitOrder,
2234{
2235}
2236
2237#[doc = include_str!("../../doc/slice/iter/IterZeros.md")]
2238#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]
2239pub struct IterZeros<'a, T, O>
2240where
2241	T: 'a + BitStore,
2242	O: BitOrder,
2243{
2244	/// The remaining bit-slice whose `0` bits are to be found.
2245	inner: &'a BitSlice<T, O>,
2246	/// The offset from the front of the original bit-slice to the current
2247	/// `.inner`.
2248	front: usize,
2249}
2250
2251impl<'a, T, O> IterZeros<'a, T, O>
2252where
2253	T: 'a + BitStore,
2254	O: BitOrder,
2255{
2256	#[allow(missing_docs, clippy::missing_docs_in_private_items)]
2257	pub(super) fn new(slice: &'a BitSlice<T, O>) -> Self {
2258		Self {
2259			inner: slice,
2260			front: 0,
2261		}
2262	}
2263}
2264
2265impl<T, O> Default for IterZeros<'_, T, O>
2266where
2267	T: BitStore,
2268	O: BitOrder,
2269{
2270	#[inline]
2271	fn default() -> Self {
2272		Self {
2273			inner: Default::default(),
2274			front: 0,
2275		}
2276	}
2277}
2278
2279impl<T, O> Iterator for IterZeros<'_, T, O>
2280where
2281	T: BitStore,
2282	O: BitOrder,
2283{
2284	type Item = usize;
2285
2286	easy_iter!();
2287
2288	#[inline]
2289	fn next(&mut self) -> Option<Self::Item> {
2290		let pos = if let Some(bits) = self.inner.coerce::<T, Lsb0>() {
2291			bits.sp_first_zero()
2292		}
2293		else if let Some(bits) = self.inner.coerce::<T, Msb0>() {
2294			bits.sp_first_zero()
2295		}
2296		else {
2297			self.inner.iter().by_vals().position(|b| !b)
2298		};
2299
2300		match pos {
2301			Some(n) => {
2302				let (_, rest) = unsafe { self.inner.split_at_unchecked(n + 1) };
2303				self.inner = rest;
2304				let out = self.front + n;
2305				self.front = out + 1;
2306				Some(out)
2307			},
2308			None => {
2309				*self = Default::default();
2310				None
2311			},
2312		}
2313	}
2314}
2315
2316impl<T, O> DoubleEndedIterator for IterZeros<'_, T, O>
2317where
2318	T: BitStore,
2319	O: BitOrder,
2320{
2321	#[inline]
2322	fn next_back(&mut self) -> Option<Self::Item> {
2323		let pos = if let Some(bits) = self.inner.coerce::<T, Lsb0>() {
2324			bits.sp_last_zero()
2325		}
2326		else if let Some(bits) = self.inner.coerce::<T, Msb0>() {
2327			bits.sp_last_zero()
2328		}
2329		else {
2330			self.inner.iter().by_vals().rposition(|b| !b)
2331		};
2332
2333		match pos {
2334			Some(n) => {
2335				let (rest, _) = unsafe { self.inner.split_at_unchecked(n) };
2336				self.inner = rest;
2337				Some(self.front + n)
2338			},
2339			None => {
2340				*self = Default::default();
2341				None
2342			},
2343		}
2344	}
2345}
2346
2347impl<T, O> ExactSizeIterator for IterZeros<'_, T, O>
2348where
2349	T: BitStore,
2350	O: BitOrder,
2351{
2352	#[inline]
2353	fn len(&self) -> usize {
2354		self.inner.count_zeros()
2355	}
2356}
2357
2358impl<T, O> FusedIterator for IterZeros<'_, T, O>
2359where
2360	T: BitStore,
2361	O: BitOrder,
2362{
2363}
2364
2365/* This macro has some very obnoxious call syntax that is necessary to handle
2366the different iteration protocols used above.
2367
2368The `Split` iterators are not `DoubleEndedIterator` or `ExactSizeIterator`, and
2369must be excluded from those implementations. However, bounding on `DEI` causes
2370`.next_back()` and `.nth_back()` to return opaque associated types, rather than
2371the return type from the directly-resolved signatures. As such, the item type of
2372the source iterator must also be provided so that methods on it can be named.
2373*/
2374/// Creates wrappers that unsafely remove one layer of `::Alias` tainting.
2375macro_rules! noalias {
2376	($(
2377		$from:ident $(($p:ident))?
2378		=> $alias:ty
2379		=> $to:ident
2380		=> $item:ty
2381		=> $map:path;
2382	)+) => { $(
2383		#[repr(transparent)]
2384		#[doc = include_str!("../../doc/slice/iter/NoAlias.md")]
2385		pub struct $to<'a, T, O$(, $p)?>
2386		where
2387			T: 'a + BitStore,
2388			O: BitOrder,
2389			$($p: FnMut(usize, &bool) -> bool,)?
2390		{
2391			/// The actual iterator that this wraps.
2392			inner: $from<'a, T, O$(, $p)?>,
2393		}
2394
2395		impl<'a, T, O$(, $p)?> $from<'a, T, O$(, $p)?>
2396		where
2397			T: 'a + BitStore,
2398			O: BitOrder,
2399			$($p: FnMut(usize, &bool) -> bool,)?
2400		{
2401			/// Removes a layer of `::Alias` tainting from the yielded item.
2402			///
2403			/// ## Safety
2404			///
2405			/// You *must* consume the adapted iterator in a loop that does not
2406			/// allow multiple yielded items to exist in the same scope. Each
2407			/// yielded item must have a completely non-overlapping lifetime
2408			/// from all the others.
2409			///
2410			/// The items yielded by this iterator will not have an additional
2411			/// alias marker applied to them, so their use in an iteration
2412			/// sequence will not be penalized when the surrounding code ensures
2413			/// that each item yielded by the iterator is destroyed before the
2414			/// next is produced.
2415			///
2416			/// This adapter does **not** convert the iterator to use the
2417			/// [`T::Mem`] raw underlying type, as it can be applied to an
2418			/// iterator over an already-aliased bit-slice and must preserve the
2419			/// initial condition. Its *only* effect is to remove the additional
2420			/// [`T::Alias`] marker imposed by the mutable iterators.
2421			///
2422			/// Violating this requirement causes memory-unsafety and breaks
2423			/// Rust’s data-race guarantees.
2424			///
2425			/// [`T::Alias`]: crate::store::BitStore::Alias
2426			/// [`T::Mem`]: crate::store::BitStore::Mem
2427			#[inline]
2428			#[must_use = "You must consume this object, preferably immediately \
2429			              upon creation"]
2430			pub unsafe fn remove_alias(self) -> $to<'a, T, O$(, $p)?> {
2431				$to { inner: self }
2432			}
2433		}
2434
2435		impl<'a, T, O$(, $p)?> Iterator for $to<'a, T, O$(, $p)?>
2436		where
2437			T: 'a + BitStore,
2438			O: BitOrder,
2439			$($p: FnMut(usize, &bool) -> bool,)?
2440		{
2441			type Item = $item;
2442
2443			#[inline]
2444			fn next(&mut self) -> Option<Self::Item> {
2445				self.inner.next().map(|item| unsafe { $map(item) })
2446			}
2447
2448			#[inline]
2449			fn nth(&mut self, n: usize) -> Option<Self::Item> {
2450				self.inner.nth(n).map(|item| unsafe { $map(item) })
2451			}
2452
2453			#[inline]
2454			fn size_hint(&self) -> (usize, Option<usize>) {
2455				self.inner.size_hint()
2456			}
2457
2458			#[inline]
2459			fn count(self) -> usize {
2460				self.inner.count()
2461			}
2462
2463			#[inline]
2464			fn last(self) -> Option<Self::Item> {
2465				self.inner.last().map(|item| unsafe { $map(item) })
2466			}
2467		}
2468
2469		impl<'a, T, O$(, $p)?> DoubleEndedIterator for $to<'a, T, O$(, $p)?>
2470		where
2471			T: 'a + BitStore,
2472			O: BitOrder,
2473			$($p: FnMut(usize, &bool) -> bool,)?
2474			$from<'a, T, O$(, $p)?>: DoubleEndedIterator<Item = $alias>,
2475		{
2476			#[inline]
2477			fn next_back(&mut self) -> Option<Self::Item> {
2478				self.inner.next_back().map(|item| unsafe { $map(item) })
2479			}
2480
2481			#[inline]
2482			fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
2483				self.inner.nth_back(n).map(|item| unsafe { $map(item) })
2484			}
2485		}
2486
2487		impl<'a, T, O$(, $p)?> ExactSizeIterator for $to<'a, T, O$(, $p)?>
2488		where
2489			T: 'a + BitStore,
2490			O: BitOrder,
2491			$($p: FnMut(usize, &bool) -> bool,)?
2492			$from<'a, T, O$(, $p)?>: ExactSizeIterator,
2493		{
2494			#[inline]
2495			fn len(&self) -> usize {
2496				self.inner.len()
2497			}
2498		}
2499
2500		impl<'a, T, O$(, $p)?> FusedIterator for $to<'a, T, O$(, $p)?>
2501		where
2502			T: 'a + BitStore,
2503			O: BitOrder,
2504			$($p: FnMut(usize, &bool) -> bool,)?
2505			$from<'a, T, O$(, $p)?>: FusedIterator,
2506		{
2507		}
2508	)+ };
2509}
2510
2511noalias! {
2512	IterMut => <usize as BitSliceIndex<'a, T::Alias, O>>::Mut
2513	=> IterMutNoAlias => <usize as BitSliceIndex<'a, T, O>>::Mut
2514	=> BitRef::remove_alias;
2515
2516	ChunksMut => &'a mut BitSlice<T::Alias, O>
2517	=> ChunksMutNoAlias => &'a mut BitSlice<T, O>
2518	=> BitSlice::unalias_mut;
2519
2520	ChunksExactMut => &'a mut BitSlice<T::Alias, O>
2521	=> ChunksExactMutNoAlias => &'a mut BitSlice<T, O>
2522	=> BitSlice::unalias_mut;
2523
2524	RChunksMut => &'a mut BitSlice<T::Alias, O>
2525	=> RChunksMutNoAlias => &'a mut BitSlice<T, O>
2526	=> BitSlice::unalias_mut;
2527
2528	RChunksExactMut => &'a mut BitSlice<T::Alias, O>
2529	=> RChunksExactMutNoAlias => &'a mut BitSlice<T, O>
2530	=> BitSlice::unalias_mut;
2531
2532	SplitMut (P) => &'a mut BitSlice<T::Alias, O>
2533	=> SplitMutNoAlias => &'a mut BitSlice<T, O>
2534	=> BitSlice::unalias_mut;
2535
2536	SplitInclusiveMut (P) => &'a mut BitSlice<T::Alias, O>
2537	=> SplitInclusiveMutNoAlias => &'a mut BitSlice<T, O>
2538	=> BitSlice::unalias_mut;
2539
2540	RSplitMut (P) => &'a mut BitSlice<T::Alias, O>
2541	=> RSplitMutNoAlias => &'a mut BitSlice<T, O>
2542	=> BitSlice::unalias_mut;
2543
2544	SplitNMut (P) => &'a mut BitSlice<T::Alias, O>
2545	=> SplitNMutNoAlias => &'a mut BitSlice<T, O>
2546	=> BitSlice::unalias_mut;
2547
2548	RSplitNMut (P) => &'a mut BitSlice<T::Alias, O>
2549	=> RSplitNMutNoAlias => &'a mut BitSlice<T, O>
2550	=> BitSlice::unalias_mut;
2551}
2552
2553impl<'a, T, O> ChunksExactMutNoAlias<'a, T, O>
2554where
2555	T: 'a + BitStore,
2556	O: BitOrder,
2557{
2558	/// See [`ChunksExactMut::into_remainder()`][0].
2559	///
2560	/// [0]: crate::slice::ChunksExactMut::into_remainder
2561	#[inline]
2562	pub fn into_remainder(self) -> &'a mut BitSlice<T, O> {
2563		unsafe { BitSlice::unalias_mut(self.inner.into_remainder()) }
2564	}
2565
2566	/// See [`ChunksExactMut::take_remainder()`][0]
2567	///
2568	/// [0]: crate::slice::ChunksExactMut::take_remainder
2569	#[inline]
2570	pub fn take_remainder(&mut self) -> &'a mut BitSlice<T, O> {
2571		unsafe { BitSlice::unalias_mut(self.inner.take_remainder()) }
2572	}
2573}
2574
2575impl<'a, T, O> RChunksExactMutNoAlias<'a, T, O>
2576where
2577	T: 'a + BitStore,
2578	O: BitOrder,
2579{
2580	/// See [`RChunksExactMut::into_remainder()`][0]
2581	///
2582	/// [0]: crate::slice::RChunksExactMut::into_remainder
2583	#[inline]
2584	pub fn into_remainder(self) -> &'a mut BitSlice<T, O> {
2585		unsafe { BitSlice::unalias_mut(self.inner.into_remainder()) }
2586	}
2587
2588	/// See [`RChunksExactMut::take_remainder()`][0]
2589	///
2590	/// [0]:  crate::slice::RChunksExactMut::take_remainder
2591	#[inline]
2592	pub fn take_remainder(&mut self) -> &'a mut BitSlice<T, O> {
2593		unsafe { BitSlice::unalias_mut(self.inner.take_remainder()) }
2594	}
2595}