1#![doc = include_str!("../../doc/ptr/range.md")]
2
3use core::{
4 fmt::{
5 self,
6 Debug,
7 Formatter,
8 },
9 hash::{
10 Hash,
11 Hasher,
12 },
13 iter::FusedIterator,
14 ops::{
15 Bound,
16 Range,
17 RangeBounds,
18 },
19};
20
21use wyz::comu::{
22 Const,
23 Mutability,
24};
25
26use super::{
27 BitPtr,
28 BitSpan,
29};
30use crate::{
31 devel as dvl,
32 order::{
33 BitOrder,
34 Lsb0,
35 },
36 store::BitStore,
37};
38
39#[repr(C)]
40#[doc = include_str!("../../doc/ptr/BitPtrRange.md")]
41pub struct BitPtrRange<M = Const, T = usize, O = Lsb0>
42where
43 M: Mutability,
44 T: BitStore,
45 O: BitOrder,
46{
47 pub start: BitPtr<M, T, O>,
50 pub end: BitPtr<M, T, O>,
60}
61
62impl<M, T, O> BitPtrRange<M, T, O>
63where
64 M: Mutability,
65 T: BitStore,
66 O: BitOrder,
67{
68 pub const EMPTY: Self = Self {
71 start: BitPtr::DANGLING,
72 end: BitPtr::DANGLING,
73 };
74
75 #[inline]
77 pub fn from_range(Range { start, end }: Range<BitPtr<M, T, O>>) -> Self {
78 Self { start, end }
79 }
80
81 #[inline]
83 pub fn into_range(self) -> Range<BitPtr<M, T, O>> {
84 let Self { start, end } = self;
85 start .. end
86 }
87
88 #[inline]
113 pub fn is_empty(&self) -> bool {
114 self.start == self.end
115 }
116
117 #[inline]
156 pub fn contains<M2, T2>(&self, pointer: &BitPtr<M2, T2, O>) -> bool
157 where
158 M2: Mutability,
159 T2: BitStore,
160 {
161 dvl::match_store::<T::Mem, T2::Mem>()
162 && self.start <= *pointer
163 && *pointer < self.end
164 }
165
166 pub(crate) unsafe fn into_bitspan(self) -> BitSpan<M, T, O> {
177 self.start.span_unchecked(self.len())
178 }
179
180 #[inline]
184 fn take_front(&mut self) -> BitPtr<M, T, O> {
185 let start = self.start;
186 self.start = start.wrapping_add(1);
187 start
188 }
189
190 #[inline]
196 fn take_back(&mut self) -> BitPtr<M, T, O> {
197 let prev = self.end.wrapping_sub(1);
198 self.end = prev;
199 prev
200 }
201}
202
203#[cfg(not(tarpaulin_include))]
204impl<M, T, O> Clone for BitPtrRange<M, T, O>
205where
206 M: Mutability,
207 T: BitStore,
208 O: BitOrder,
209{
210 #[inline]
211 fn clone(&self) -> Self {
212 Self { ..*self }
213 }
214}
215
216impl<M, T, O> Eq for BitPtrRange<M, T, O>
217where
218 M: Mutability,
219 T: BitStore,
220 O: BitOrder,
221{
222}
223
224impl<M1, M2, O, T1, T2> PartialEq<BitPtrRange<M2, T2, O>>
225 for BitPtrRange<M1, T1, O>
226where
227 M1: Mutability,
228 M2: Mutability,
229 O: BitOrder,
230 T1: BitStore,
231 T2: BitStore,
232{
233 #[inline]
234 fn eq(&self, other: &BitPtrRange<M2, T2, O>) -> bool {
235 dvl::match_store::<T1::Mem, T2::Mem>()
237 && self.start == other.start
238 && self.end == other.end
239 }
240}
241
242#[cfg(not(tarpaulin_include))]
243impl<M, T, O> Default for BitPtrRange<M, T, O>
244where
245 M: Mutability,
246 T: BitStore,
247 O: BitOrder,
248{
249 #[inline]
250 fn default() -> Self {
251 Self::EMPTY
252 }
253}
254
255#[cfg(not(tarpaulin_include))]
256impl<M, T, O> From<Range<BitPtr<M, T, O>>> for BitPtrRange<M, T, O>
257where
258 M: Mutability,
259 T: BitStore,
260 O: BitOrder,
261{
262 #[inline]
263 fn from(range: Range<BitPtr<M, T, O>>) -> Self {
264 Self::from_range(range)
265 }
266}
267
268#[cfg(not(tarpaulin_include))]
269impl<M, T, O> From<BitPtrRange<M, T, O>> for Range<BitPtr<M, T, O>>
270where
271 M: Mutability,
272 T: BitStore,
273 O: BitOrder,
274{
275 #[inline]
276 fn from(range: BitPtrRange<M, T, O>) -> Self {
277 range.into_range()
278 }
279}
280
281#[cfg(not(tarpaulin_include))]
282impl<M, T, O> Debug for BitPtrRange<M, T, O>
283where
284 M: Mutability,
285 T: BitStore,
286 O: BitOrder,
287{
288 #[inline]
289 fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
290 let Range { start, end } = self.clone().into_range();
291 Debug::fmt(&start, fmt)?;
292 write!(fmt, "{0}..{0}", if fmt.alternate() { " " } else { "" })?;
293 Debug::fmt(&end, fmt)
294 }
295}
296
297#[cfg(not(tarpaulin_include))]
298impl<M, T, O> Hash for BitPtrRange<M, T, O>
299where
300 M: Mutability,
301 T: BitStore,
302 O: BitOrder,
303{
304 #[inline]
305 fn hash<H>(&self, state: &mut H)
306 where H: Hasher {
307 self.start.hash(state);
308 self.end.hash(state);
309 }
310}
311
312impl<M, T, O> Iterator for BitPtrRange<M, T, O>
313where
314 M: Mutability,
315 T: BitStore,
316 O: BitOrder,
317{
318 type Item = BitPtr<M, T, O>;
319
320 easy_iter!();
321
322 #[inline]
323 fn next(&mut self) -> Option<Self::Item> {
324 if Self::is_empty(&*self) {
325 return None;
326 }
327 Some(self.take_front())
328 }
329
330 #[inline]
331 fn nth(&mut self, n: usize) -> Option<Self::Item> {
332 if n >= self.len() {
333 self.start = self.end;
334 return None;
335 }
336 self.start = unsafe { self.start.add(n) };
337 Some(self.take_front())
338 }
339}
340
341impl<M, T, O> DoubleEndedIterator for BitPtrRange<M, T, O>
342where
343 M: Mutability,
344 T: BitStore,
345 O: BitOrder,
346{
347 #[inline]
348 fn next_back(&mut self) -> Option<Self::Item> {
349 if Self::is_empty(&*self) {
350 return None;
351 }
352 Some(self.take_back())
353 }
354
355 #[inline]
356 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
357 if n >= self.len() {
358 self.end = self.start;
359 return None;
360 }
361 let out = unsafe { self.end.sub(n.wrapping_add(1)) };
362 self.end = out;
363 Some(out)
364 }
365}
366
367impl<M, T, O> ExactSizeIterator for BitPtrRange<M, T, O>
368where
369 M: Mutability,
370 T: BitStore,
371 O: BitOrder,
372{
373 #[inline]
374 fn len(&self) -> usize {
375 (unsafe { self.end.offset_from(self.start) }) as usize
376 }
377}
378
379impl<M, T, O> FusedIterator for BitPtrRange<M, T, O>
380where
381 M: Mutability,
382 T: BitStore,
383 O: BitOrder,
384{
385}
386
387#[cfg(not(tarpaulin_include))]
388impl<M, T, O> RangeBounds<BitPtr<M, T, O>> for BitPtrRange<M, T, O>
389where
390 M: Mutability,
391 T: BitStore,
392 O: BitOrder,
393{
394 #[inline]
395 fn start_bound(&self) -> Bound<&BitPtr<M, T, O>> {
396 Bound::Included(&self.start)
397 }
398
399 #[inline]
400 fn end_bound(&self) -> Bound<&BitPtr<M, T, O>> {
401 Bound::Excluded(&self.end)
402 }
403}