bitvec/ptr/
range.rs

1#![doc = include_str!("../../doc/ptr/range.md")]
2
3use core::{
4	fmt::{
5		self,
6		Debug,
7		Formatter,
8	},
9	hash::{
10		Hash,
11		Hasher,
12	},
13	iter::FusedIterator,
14	ops::{
15		Bound,
16		Range,
17		RangeBounds,
18	},
19};
20
21use wyz::comu::{
22	Const,
23	Mutability,
24};
25
26use super::{
27	BitPtr,
28	BitSpan,
29};
30use crate::{
31	devel as dvl,
32	order::{
33		BitOrder,
34		Lsb0,
35	},
36	store::BitStore,
37};
38
39#[repr(C)]
40#[doc = include_str!("../../doc/ptr/BitPtrRange.md")]
41pub struct BitPtrRange<M = Const, T = usize, O = Lsb0>
42where
43	M: Mutability,
44	T: BitStore,
45	O: BitOrder,
46{
47	/// The lower, inclusive, bound of the range. The bit to which this points
48	/// is considered live.
49	pub start: BitPtr<M, T, O>,
50	/// The higher, exclusive, bound of the range. The bit to which this points
51	/// is considered dead, and the pointer may be one bit beyond the bounds of
52	/// an allocation region.
53	///
54	/// Because Rust and LLVM both define the address of `base + (len * width)`
55	/// as being within the provenance of `base`, even though that address may
56	/// itself be the base address of another region in a different provenance,
57	/// and bit-pointers are always composed of an ordinary memory address and a
58	/// bit-counter, the ending bit-pointer is always valid.
59	pub end:   BitPtr<M, T, O>,
60}
61
62impl<M, T, O> BitPtrRange<M, T, O>
63where
64	M: Mutability,
65	T: BitStore,
66	O: BitOrder,
67{
68	/// The canonical empty range. All ranges with zero length (equal `.start`
69	/// and `.end`) are equally empty.
70	pub const EMPTY: Self = Self {
71		start: BitPtr::DANGLING,
72		end:   BitPtr::DANGLING,
73	};
74
75	/// Explicitly converts a `Range<BitPtr>` into a `BitPtrRange`.
76	#[inline]
77	pub fn from_range(Range { start, end }: Range<BitPtr<M, T, O>>) -> Self {
78		Self { start, end }
79	}
80
81	/// Explicitly converts a `BitPtrRange` into a `Range<BitPtr>`.
82	#[inline]
83	pub fn into_range(self) -> Range<BitPtr<M, T, O>> {
84		let Self { start, end } = self;
85		start .. end
86	}
87
88	/// Tests if the range is empty (the distance between bit-pointers is `0`).
89	///
90	/// ## Original
91	///
92	/// [`Range::is_empty`](core::ops::Range::is_empty)
93	///
94	/// ## Examples
95	///
96	/// ```rust
97	/// use bitvec::prelude::*;
98	/// use bitvec::ptr::BitPtrRange;
99	///
100	/// let data = 0u8;
101	/// let bp = BitPtr::<_, _, Lsb0>::from_ref(&data);
102	/// let mut range = BitPtrRange::from_range(bp .. bp.wrapping_add(1));
103	///
104	/// assert!(!range.is_empty());
105	/// assert_ne!(range.start, range.end);
106	///
107	/// range.next();
108	///
109	/// assert!(range.is_empty());
110	/// assert_eq!(range.start, range.end);
111	/// ```
112	#[inline]
113	pub fn is_empty(&self) -> bool {
114		self.start == self.end
115	}
116
117	/// Tests if a given bit-pointer is contained within the range.
118	///
119	/// Bit-pointer ordering is defined when the types have the same exact
120	/// `BitOrder` type parameter and the same `BitStore::Mem` associated type
121	/// (but are free to differ in alias condition!). Inclusion in a range
122	/// occurs when the bit-pointer is not strictly less than the range start,
123	/// and is strictly less than the range end.
124	///
125	/// ## Original
126	///
127	/// [`Range::contains`](core::ops::Range::contains)
128	///
129	/// ## Examples
130	///
131	/// ```rust
132	/// use bitvec::prelude::*;
133	/// use bitvec::ptr::BitPtrRange;
134	/// use core::cell::Cell;
135	///
136	/// let data = 0u16;
137	/// let bp = BitPtr::<_, _, Lsb0>::from_ref(&data);
138	///
139	/// let mut range = BitPtrRange::from_range(bp .. bp.wrapping_add(16));
140	/// range.nth(2);
141	/// range.nth_back(2);
142	///
143	/// assert!(bp < range.start);
144	/// assert!(!range.contains(&bp));
145	///
146	/// let mid = bp.wrapping_add(8);
147	///
148	/// let same_mem = mid.cast::<Cell<u16>>();
149	/// assert!(range.contains(&mid));
150	/// ```
151	///
152	/// Casting to a different `BitStore` type whose `Mem` parameter differs
153	/// from the range always results in a `false` response, even if the pointer
154	/// being tested is numerically within the range.
155	#[inline]
156	pub fn contains<M2, T2>(&self, pointer: &BitPtr<M2, T2, O>) -> bool
157	where
158		M2: Mutability,
159		T2: BitStore,
160	{
161		dvl::match_store::<T::Mem, T2::Mem>()
162			&& self.start <= *pointer
163			&& *pointer < self.end
164	}
165
166	/// Converts the range into a span descriptor over all live bits.
167	///
168	/// The produced bit-span does *not* include the bit addressed by `.end`.
169	///
170	/// ## Safety
171	///
172	/// The `.start` and `.end` bit-pointers must both be derived from the same
173	/// provenance region. `BitSpan` draws its provenance from the `.start`
174	/// element pointer, and incorrectly extending it beyond the source
175	/// provenance is undefined behavior.
176	pub(crate) unsafe fn into_bitspan(self) -> BitSpan<M, T, O> {
177		self.start.span_unchecked(self.len())
178	}
179
180	/// Snapshots `.start`, then increments it.
181	///
182	/// This method is only safe to call when the range is non-empty.
183	#[inline]
184	fn take_front(&mut self) -> BitPtr<M, T, O> {
185		let start = self.start;
186		self.start = start.wrapping_add(1);
187		start
188	}
189
190	/// Decrements `.end`, then returns it.
191	///
192	/// The bit-pointer returned by this method is always to an alive bit.
193	///
194	/// This method is only safe to call when the range is non-empty.
195	#[inline]
196	fn take_back(&mut self) -> BitPtr<M, T, O> {
197		let prev = self.end.wrapping_sub(1);
198		self.end = prev;
199		prev
200	}
201}
202
203#[cfg(not(tarpaulin_include))]
204impl<M, T, O> Clone for BitPtrRange<M, T, O>
205where
206	M: Mutability,
207	T: BitStore,
208	O: BitOrder,
209{
210	#[inline]
211	fn clone(&self) -> Self {
212		Self { ..*self }
213	}
214}
215
216impl<M, T, O> Eq for BitPtrRange<M, T, O>
217where
218	M: Mutability,
219	T: BitStore,
220	O: BitOrder,
221{
222}
223
224impl<M1, M2, O, T1, T2> PartialEq<BitPtrRange<M2, T2, O>>
225	for BitPtrRange<M1, T1, O>
226where
227	M1: Mutability,
228	M2: Mutability,
229	O: BitOrder,
230	T1: BitStore,
231	T2: BitStore,
232{
233	#[inline]
234	fn eq(&self, other: &BitPtrRange<M2, T2, O>) -> bool {
235		//  Pointers over different element types are never equal
236		dvl::match_store::<T1::Mem, T2::Mem>()
237			&& self.start == other.start
238			&& self.end == other.end
239	}
240}
241
242#[cfg(not(tarpaulin_include))]
243impl<M, T, O> Default for BitPtrRange<M, T, O>
244where
245	M: Mutability,
246	T: BitStore,
247	O: BitOrder,
248{
249	#[inline]
250	fn default() -> Self {
251		Self::EMPTY
252	}
253}
254
255#[cfg(not(tarpaulin_include))]
256impl<M, T, O> From<Range<BitPtr<M, T, O>>> for BitPtrRange<M, T, O>
257where
258	M: Mutability,
259	T: BitStore,
260	O: BitOrder,
261{
262	#[inline]
263	fn from(range: Range<BitPtr<M, T, O>>) -> Self {
264		Self::from_range(range)
265	}
266}
267
268#[cfg(not(tarpaulin_include))]
269impl<M, T, O> From<BitPtrRange<M, T, O>> for Range<BitPtr<M, T, O>>
270where
271	M: Mutability,
272	T: BitStore,
273	O: BitOrder,
274{
275	#[inline]
276	fn from(range: BitPtrRange<M, T, O>) -> Self {
277		range.into_range()
278	}
279}
280
281#[cfg(not(tarpaulin_include))]
282impl<M, T, O> Debug for BitPtrRange<M, T, O>
283where
284	M: Mutability,
285	T: BitStore,
286	O: BitOrder,
287{
288	#[inline]
289	fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
290		let Range { start, end } = self.clone().into_range();
291		Debug::fmt(&start, fmt)?;
292		write!(fmt, "{0}..{0}", if fmt.alternate() { " " } else { "" })?;
293		Debug::fmt(&end, fmt)
294	}
295}
296
297#[cfg(not(tarpaulin_include))]
298impl<M, T, O> Hash for BitPtrRange<M, T, O>
299where
300	M: Mutability,
301	T: BitStore,
302	O: BitOrder,
303{
304	#[inline]
305	fn hash<H>(&self, state: &mut H)
306	where H: Hasher {
307		self.start.hash(state);
308		self.end.hash(state);
309	}
310}
311
312impl<M, T, O> Iterator for BitPtrRange<M, T, O>
313where
314	M: Mutability,
315	T: BitStore,
316	O: BitOrder,
317{
318	type Item = BitPtr<M, T, O>;
319
320	easy_iter!();
321
322	#[inline]
323	fn next(&mut self) -> Option<Self::Item> {
324		if Self::is_empty(&*self) {
325			return None;
326		}
327		Some(self.take_front())
328	}
329
330	#[inline]
331	fn nth(&mut self, n: usize) -> Option<Self::Item> {
332		if n >= self.len() {
333			self.start = self.end;
334			return None;
335		}
336		self.start = unsafe { self.start.add(n) };
337		Some(self.take_front())
338	}
339}
340
341impl<M, T, O> DoubleEndedIterator for BitPtrRange<M, T, O>
342where
343	M: Mutability,
344	T: BitStore,
345	O: BitOrder,
346{
347	#[inline]
348	fn next_back(&mut self) -> Option<Self::Item> {
349		if Self::is_empty(&*self) {
350			return None;
351		}
352		Some(self.take_back())
353	}
354
355	#[inline]
356	fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
357		if n >= self.len() {
358			self.end = self.start;
359			return None;
360		}
361		let out = unsafe { self.end.sub(n.wrapping_add(1)) };
362		self.end = out;
363		Some(out)
364	}
365}
366
367impl<M, T, O> ExactSizeIterator for BitPtrRange<M, T, O>
368where
369	M: Mutability,
370	T: BitStore,
371	O: BitOrder,
372{
373	#[inline]
374	fn len(&self) -> usize {
375		(unsafe { self.end.offset_from(self.start) }) as usize
376	}
377}
378
379impl<M, T, O> FusedIterator for BitPtrRange<M, T, O>
380where
381	M: Mutability,
382	T: BitStore,
383	O: BitOrder,
384{
385}
386
387#[cfg(not(tarpaulin_include))]
388impl<M, T, O> RangeBounds<BitPtr<M, T, O>> for BitPtrRange<M, T, O>
389where
390	M: Mutability,
391	T: BitStore,
392	O: BitOrder,
393{
394	#[inline]
395	fn start_bound(&self) -> Bound<&BitPtr<M, T, O>> {
396		Bound::Included(&self.start)
397	}
398
399	#[inline]
400	fn end_bound(&self) -> Bound<&BitPtr<M, T, O>> {
401		Bound::Excluded(&self.end)
402	}
403}