bitvec/ptr/
span.rs

1#![doc = include_str!("../../doc/ptr/span.md")]
2
3use core::{
4	any,
5	fmt::{
6		self,
7		Binary,
8		Debug,
9		Display,
10		Formatter,
11		Pointer,
12	},
13	marker::PhantomData,
14	mem,
15	ptr::{
16		self,
17		NonNull,
18	},
19};
20
21use tap::Pipe;
22use wyz::{
23	comu::{
24		Address,
25		Const,
26		Mut,
27		Mutability,
28		NullPtrError,
29		Reference,
30		Referential,
31	},
32	fmt::FmtForward,
33};
34
35use super::{
36	BitPtr,
37	BitPtrError,
38	BitPtrRange,
39	MisalignError,
40};
41use crate::{
42	index::{
43		BitEnd,
44		BitIdx,
45	},
46	mem::{
47		bits_of,
48		BitRegister,
49	},
50	order::{
51		BitOrder,
52		Lsb0,
53	},
54	slice::BitSlice,
55	store::BitStore,
56};
57
58#[doc = include_str!("../../doc/ptr/BitSpan.md")]
59pub(crate) struct BitSpan<M = Const, T = usize, O = Lsb0>
60where
61	M: Mutability,
62	T: BitStore,
63	O: BitOrder,
64{
65	/// The element address in which the base bit lives.
66	ptr: NonNull<()>,
67	/// The length of the span, in bits. This must be typed as `()` because it
68	/// cannot be directly dereferenced, and will not have valid values for
69	/// `NonNull<T>`.
70	len: usize,
71	/// The bit-ordering within elements used to translate indices to real bits.
72	_or: PhantomData<O>,
73	/// This is functionally an element-slice pointer.
74	_ty: PhantomData<Address<M, [T]>>,
75}
76
77impl<M, T, O> BitSpan<M, T, O>
78where
79	M: Mutability,
80	T: BitStore,
81	O: BitOrder,
82{
83	/// The canonical empty span. This always uses the dangling address for `T`.
84	pub(crate) const EMPTY: Self = Self {
85		ptr: NonNull::<T>::dangling().cast::<()>(),
86		len: 0,
87		_or: PhantomData,
88		_ty: PhantomData,
89	};
90	/// The number of least-significant bits in `.len` needed to hold the low
91	/// bits of the head `BitIdx` cursor.
92	///
93	/// This is always 3 until Rust adds a target architecture whose bytes are
94	/// not 8 bits.
95	pub(crate) const LEN_HEAD_BITS: usize = 3;
96	/// Marks the bits of `.len` that store some of the `.head()` logical field.
97	pub(crate) const LEN_HEAD_MASK: usize = 0b111;
98	/// Marks the bits of `.ptr` that store the `.addr()` logical field.
99	pub(crate) const PTR_ADDR_MASK: usize = !0 << Self::PTR_HEAD_BITS;
100	/// The number of least-significant bits in `.ptr` needed to hold the high
101	/// bits of the head `BitIdx` cursor.
102	pub(crate) const PTR_HEAD_BITS: usize =
103		<T::Mem as BitRegister>::INDX as usize - Self::LEN_HEAD_BITS;
104	/// Marks the bits of `.ptr` that store some of the `.head()` logical field.
105	pub(crate) const PTR_HEAD_MASK: usize = !Self::PTR_ADDR_MASK;
106	/// The inclusive-maximum number of bits that a `BitSpan` can cover. This
107	/// value is therefore one higher than the maximum *index* that can be used
108	/// to select a bit within a span.
109	pub(crate) const REGION_MAX_BITS: usize = !0 >> Self::LEN_HEAD_BITS;
110	/// The inclusive-maximum number of memory elements that a bit-span can
111	/// cover.
112	///
113	/// This is the number of elements required to store `REGION_MAX_BITS` bits,
114	/// plus one because a region could begin away from the zeroth bit and thus
115	/// continue into the next element at the end.
116	///
117	/// Since the region is ⅛th the domain of a `usize` counter already, this
118	/// number is guaranteed to be well below the limits of both arithmetic and
119	/// Rust’s own ceiling constraints on memory region descriptors.
120	pub(crate) const REGION_MAX_ELTS: usize =
121		crate::mem::elts::<T::Mem>(Self::REGION_MAX_BITS) + 1;
122}
123
124/// Constructors.
125impl<M, T, O> BitSpan<M, T, O>
126where
127	M: Mutability,
128	T: BitStore,
129	O: BitOrder,
130{
131	/// Constructs an empty `BitSpan` at an allocated address.
132	///
133	/// This is used when the region has no contents, but the pointer
134	/// information must be retained and cannot be canonicalized.
135	///
136	/// ## Parameters
137	///
138	/// - `addr`: Some address of a `T` allocation. It must be valid in the
139	///   caller’s memory regime.
140	///
141	/// ## Returns
142	///
143	/// A zero-length `BitSpan` based at `addr`.
144	#[cfg(feature = "alloc")]
145	pub(crate) fn uninhabited(addr: Address<M, T>) -> Self {
146		Self {
147			ptr: addr.into_inner().cast::<()>(),
148			..Self::EMPTY
149		}
150	}
151
152	/// Creates a new bit-span from its logical components.
153	///
154	/// ## Parameters
155	///
156	/// - `addr`: The base address of the memory region in which the bit-span
157	///   resides.
158	/// - `head`: The index of the initial bit within `*addr`.
159	/// - `bits`: The number of bits contained in the bit-span.
160	///
161	/// ## Returns
162	///
163	/// This fails in the following conditions:
164	///
165	/// - `bits` is greater than `REGION_MAX_BITS`
166	/// - `addr` is not aligned to `T`.
167	/// - `addr + elts(bits)` wraps around the address space
168	///
169	/// The `Address` type already enforces the non-null requirement.
170	pub(crate) fn new(
171		addr: Address<M, T>,
172		head: BitIdx<T::Mem>,
173		bits: usize,
174	) -> Result<Self, BitSpanError<T>> {
175		if bits > Self::REGION_MAX_BITS {
176			return Err(BitSpanError::TooLong(bits));
177		}
178		let base = BitPtr::<M, T, O>::new(addr, head)?;
179		let last = base.wrapping_add(bits);
180		if last < base {
181			return Err(BitSpanError::TooHigh(addr.to_const()));
182		}
183
184		Ok(unsafe { Self::new_unchecked(addr, head, bits) })
185	}
186
187	/// Creates a new bit-span from its components, without any validity checks.
188	///
189	/// ## Safety
190	///
191	/// The caller must ensure that the arguments satisfy all the requirements
192	/// outlined in [`::new()`]. The easiest way to ensure this is to only use
193	/// this function to construct bit-spans from values extracted from
194	/// bit-spans previously constructed through `::new()`.
195	///
196	/// This function **only** performs the value encoding. Invalid lengths will
197	/// truncate, and invalid addresses may cause memory unsafety.
198	///
199	/// [`::new()`]: Self::new
200	pub(crate) unsafe fn new_unchecked(
201		addr: Address<M, T>,
202		head: BitIdx<T::Mem>,
203		bits: usize,
204	) -> Self {
205		let addr = addr.to_const().cast::<u8>();
206
207		let head = head.into_inner() as usize;
208		let ptr_data = addr as usize & Self::PTR_ADDR_MASK;
209		let ptr_head = head >> Self::LEN_HEAD_BITS;
210
211		let len_head = head & Self::LEN_HEAD_MASK;
212		let len_bits = bits << Self::LEN_HEAD_BITS;
213
214		/* See <https://github.com/bitvecto-rs/bitvec/issues/135#issuecomment-986357842>.
215		 * This attempts to retain inbound provenance information and may help
216		 * Miri better understand pointer operations this module performs.
217		 *
218		 * This performs `a + (p - a)` in `addr`’s provenance zone, which is
219		 * numerically equivalent to `p` but does not require conjuring a new,
220		 * uninformed, pointer value.
221		 */
222		let ptr_raw = ptr_data | ptr_head;
223		let ptr = addr.wrapping_add(ptr_raw.wrapping_sub(addr as usize));
224
225		Self {
226			ptr: NonNull::new_unchecked(ptr.cast::<()>() as *mut ()),
227			len: len_bits | len_head,
228			..Self::EMPTY
229		}
230	}
231}
232
233/// Encoded fields.
234impl<M, T, O> BitSpan<M, T, O>
235where
236	M: Mutability,
237	T: BitStore,
238	O: BitOrder,
239{
240	/// Gets the base element address of the referent region.
241	///
242	/// # Parameters
243	///
244	/// - `&self`
245	///
246	/// # Returns
247	///
248	/// The address of the starting element of the memory region. This address
249	/// is weakly typed so that it can be cast by call sites to the most useful
250	/// access type.
251	pub(crate) fn address(&self) -> Address<M, T> {
252		Address::new(unsafe {
253			NonNull::new_unchecked(
254				(self.ptr.as_ptr() as usize & Self::PTR_ADDR_MASK) as *mut T,
255			)
256		})
257	}
258
259	/// Overwrites the data pointer with a new address. This method does not
260	/// perform safety checks on the new pointer.
261	///
262	/// # Parameters
263	///
264	/// - `&mut self`
265	/// - `ptr`: The new address of the `BitSpan`’s domain.
266	///
267	/// # Safety
268	///
269	/// None. The invariants of [`::new`] must be checked at the caller.
270	///
271	/// [`::new`]: Self::new
272	#[cfg(feature = "alloc")]
273	pub(crate) unsafe fn set_address(&mut self, addr: Address<M, T>) {
274		let mut addr_value = addr.to_const() as usize;
275		addr_value &= Self::PTR_ADDR_MASK;
276		addr_value |= self.ptr.as_ptr() as usize & Self::PTR_HEAD_MASK;
277		self.ptr = NonNull::new_unchecked(addr_value as *mut ())
278	}
279
280	/// Gets the starting bit index of the referent region.
281	///
282	/// # Parameters
283	///
284	/// - `&self`
285	///
286	/// # Returns
287	///
288	/// A [`BitIdx`] of the first live bit in the element at the
289	/// [`self.address()`] address.
290	///
291	/// [`BitIdx`]: crate::index::BitIdx
292	/// [`self.address()`]: Self::address
293	pub(crate) fn head(&self) -> BitIdx<T::Mem> {
294		let ptr = self.ptr.as_ptr() as usize;
295		let ptr_head = (ptr & Self::PTR_HEAD_MASK) << Self::LEN_HEAD_BITS;
296		let len_head = self.len & Self::LEN_HEAD_MASK;
297		unsafe { BitIdx::new_unchecked((ptr_head | len_head) as u8) }
298	}
299
300	/// Writes a new `head` value into the pointer, with no other effects.
301	///
302	/// # Parameters
303	///
304	/// - `&mut self`
305	/// - `head`: A new starting index.
306	///
307	/// # Effects
308	///
309	/// `head` is written into the `.head` logical field, without affecting
310	/// `.addr` or `.bits`.
311	#[cfg(feature = "alloc")]
312	pub(crate) unsafe fn set_head(&mut self, head: BitIdx<T::Mem>) {
313		let head = head.into_inner() as usize;
314		let mut ptr = self.ptr.as_ptr() as usize;
315
316		ptr &= Self::PTR_ADDR_MASK;
317		ptr |= head >> Self::LEN_HEAD_BITS;
318		self.ptr = NonNull::new_unchecked(ptr as *mut ());
319
320		self.len &= !Self::LEN_HEAD_MASK;
321		self.len |= head & Self::LEN_HEAD_MASK;
322	}
323
324	/// Gets the number of live bits in the described region.
325	///
326	/// # Parameters
327	///
328	/// - `&self`
329	///
330	/// # Returns
331	///
332	/// A count of how many live bits the region pointer describes.
333	pub(crate) fn len(&self) -> usize {
334		self.len >> Self::LEN_HEAD_BITS
335	}
336
337	/// Sets the `.bits` logical member to a new value.
338	///
339	/// # Parameters
340	///
341	/// - `&mut self`
342	/// - `len`: A new bit length. This must not be greater than
343	///   [`REGION_MAX_BITS`].
344	///
345	/// # Effects
346	///
347	/// The `new_len` value is written directly into the `.bits` logical field.
348	///
349	/// [`REGION_MAX_BITS`]: Self::REGION_MAX_BITS
350	pub(crate) unsafe fn set_len(&mut self, new_len: usize) {
351		if cfg!(debug_assertions) {
352			*self = Self::new(self.address(), self.head(), new_len).unwrap();
353		}
354		else {
355			self.len &= Self::LEN_HEAD_MASK;
356			self.len |= new_len << Self::LEN_HEAD_BITS;
357		}
358	}
359
360	/// Gets the three logical components of the pointer.
361	///
362	/// The encoding is not public API, and direct field access is never
363	/// supported.
364	///
365	/// # Parameters
366	///
367	/// - `&self`
368	///
369	/// # Returns
370	///
371	/// - `.0`: The base address of the referent memory region.
372	/// - `.1`: The index of the first live bit in the first element of the
373	///   region.
374	/// - `.2`: The number of live bits in the region.
375	pub(crate) fn raw_parts(&self) -> (Address<M, T>, BitIdx<T::Mem>, usize) {
376		(self.address(), self.head(), self.len())
377	}
378}
379
380/// Virtual fields.
381impl<M, T, O> BitSpan<M, T, O>
382where
383	M: Mutability,
384	T: BitStore,
385	O: BitOrder,
386{
387	/// Computes the number of elements, starting at [`self.address()`], that
388	/// the region touches.
389	///
390	/// # Parameters
391	///
392	/// - `&self`
393	///
394	/// # Returns
395	///
396	/// The count of all elements, starting at [`self.address()`], that contain
397	/// live bits included in the referent region.
398	///
399	/// [`self.address()`]: Self::address
400	pub(crate) fn elements(&self) -> usize {
401		crate::mem::elts::<T>(self.len() + self.head().into_inner() as usize)
402	}
403
404	/// Computes the tail index for the first dead bit after the live bits.
405	///
406	/// # Parameters
407	///
408	/// - `&self`
409	///
410	/// # Returns
411	///
412	/// A `BitEnd` that is the index of the first dead bit after the last live
413	/// bit in the last element. This will almost always be in the range `1 ..=
414	/// T::Mem::BITS`.
415	///
416	/// It will be zero only when `self` is empty.
417	pub(crate) fn tail(&self) -> BitEnd<T::Mem> {
418		let (head, len) = (self.head(), self.len());
419		let (_, tail) = head.span(len);
420		tail
421	}
422}
423
424/// Conversions.
425impl<M, T, O> BitSpan<M, T, O>
426where
427	M: Mutability,
428	T: BitStore,
429	O: BitOrder,
430{
431	/// Casts the span to another element type.
432	///
433	/// This does not alter the encoded value of the pointer! It only
434	/// reinterprets the element type, and the encoded value may shift
435	/// significantly in the result type. Use with caution.
436	pub(crate) fn cast<U>(self) -> BitSpan<M, U, O>
437	where U: BitStore {
438		let Self { ptr, len, .. } = self;
439		BitSpan {
440			ptr,
441			len,
442			..BitSpan::EMPTY
443		}
444	}
445
446	/// Reäligns a bit-span to a different base memory type.
447	///
448	/// ## Original
449	///
450	/// [`slice::align_to`](https://doc.rust-lang.org/std/primitive.slice.html#method.align_to)
451	///
452	/// ## Safety
453	///
454	/// `U` must have the same type family as `T`. It is illegal to use this
455	/// method to cast away alias safeties such as an atomic or `Cell` wrapper.
456	pub(crate) unsafe fn align_to<U>(self) -> (Self, BitSpan<M, U, O>, Self)
457	where U: BitStore {
458		/* This function body implements the algorithm locally, rather than
459		 * delegating to the standard library’s `<[T]>::align_to::<U>`
460		 * function, because that requires use of memory references, and
461		 * `BitSpan` does not require that its values be valid for
462		 * dereference.
463		 */
464		let this = self.to_bitptr();
465		//  Counter for how many bits remain in the span.
466		let mut rem = self.len();
467		//  The *byte* alignment of `U`.
468		let align = mem::align_of::<U>();
469		//  1. Get the number of bits between `self.head()` and the start of a
470		//     `[U]` region.
471		let step = this.align_offset(align);
472		//  If this count is more than the available bits, quit.
473		if step > rem {
474			return (self, BitSpan::EMPTY, Self::EMPTY);
475		}
476		let left = this.span_unchecked(step);
477		rem -= step;
478
479		let mid_base =
480			this.add(step).address().cast::<U>().pipe(|addr| {
481				BitPtr::<M, U, O>::new_unchecked(addr, BitIdx::MIN)
482			});
483		let mid_elts = rem >> <U::Mem as BitRegister>::INDX;
484		let excess = rem & <U::Mem as BitRegister>::MASK as usize;
485		let step = rem - excess;
486		let mid = mid_base.span_unchecked(step);
487
488		let right_base =
489			mid_base.address().add(mid_elts).cast::<T>().pipe(|addr| {
490				BitPtr::<M, T, O>::new_unchecked(addr, BitIdx::MIN)
491			});
492		let right = right_base.span_unchecked(excess);
493
494		(left, mid, right)
495	}
496
497	/// Casts a mutable bit-slice pointer into its structural representation.
498	pub(crate) fn from_bitslice_ptr_mut(raw: *mut BitSlice<T, O>) -> Self {
499		let BitSpan { ptr, len, .. } =
500			BitSpan::from_bitslice_ptr(raw as *const BitSlice<T, O>);
501		Self {
502			ptr,
503			len,
504			..Self::EMPTY
505		}
506	}
507
508	/// Converts the span descriptor into a raw `BitSlice` pointer.
509	///
510	/// This is a noöp.
511	pub(crate) fn into_bitslice_ptr(self) -> *const BitSlice<T, O> {
512		let Self { ptr, len, .. } = self;
513		ptr::slice_from_raw_parts(ptr.as_ptr(), len) as *const BitSlice<T, O>
514	}
515
516	/// Converts the span descriptor into a shared `BitSlice` reference.
517	///
518	/// This is a noöp.
519	///
520	/// ## Safety
521	///
522	/// The span must describe memory that is safe to dereference, and to which
523	/// no `&mut BitSlice` references exist.
524	pub(crate) unsafe fn into_bitslice_ref<'a>(self) -> &'a BitSlice<T, O> {
525		&*self.into_bitslice_ptr()
526	}
527
528	/// Produces a bit-pointer to the start of the span.
529	///
530	/// This is **not** a noöp: the base address and starting bit index are
531	/// decoded into the bit-pointer structure.
532	pub(crate) fn to_bitptr(self) -> BitPtr<M, T, O> {
533		unsafe { BitPtr::new_unchecked(self.address(), self.head()) }
534	}
535
536	/// Produces a bit-pointer range to either end of the span.
537	///
538	/// This is **not** a noöp: all three logical fields are decoded in order to
539	/// construct the range.
540	pub(crate) fn to_bitptr_range(self) -> BitPtrRange<M, T, O> {
541		let start = self.to_bitptr();
542		let end = unsafe { start.add(self.len()) };
543		BitPtrRange { start, end }
544	}
545
546	/// Converts the span descriptor into an `Address<>` generic pointer.
547	///
548	/// This is a noöp.
549	pub(crate) fn to_bitslice_addr(self) -> Address<M, BitSlice<T, O>> {
550		(self.into_bitslice_ptr() as *mut BitSlice<T, O>)
551			.pipe(|ptr| unsafe { NonNull::new_unchecked(ptr) })
552			.pipe(Address::new)
553	}
554
555	/// Converts the span descriptor into a `Reference<>` generic handle.
556	///
557	/// This is a noöp.
558	pub(crate) fn to_bitslice<'a>(self) -> Reference<'a, M, BitSlice<T, O>>
559	where Address<M, BitSlice<T, O>>: Referential<'a> {
560		unsafe { self.to_bitslice_addr().to_ref() }
561	}
562}
563
564/// Conversions.
565impl<T, O> BitSpan<Const, T, O>
566where
567	T: BitStore,
568	O: BitOrder,
569{
570	/// Creates a `Const` span descriptor from a `const` bit-slice pointer.
571	pub(crate) fn from_bitslice_ptr(raw: *const BitSlice<T, O>) -> Self {
572		let slice_nn = match NonNull::new(raw as *const [()] as *mut [()]) {
573			Some(nn) => nn,
574			None => return Self::EMPTY,
575		};
576		let ptr = slice_nn.cast::<()>();
577		let len = unsafe { slice_nn.as_ref() }.len();
578		Self {
579			ptr,
580			len,
581			..Self::EMPTY
582		}
583	}
584}
585
586/// Conversions.
587impl<T, O> BitSpan<Mut, T, O>
588where
589	T: BitStore,
590	O: BitOrder,
591{
592	/// Converts the span descriptor into a raw mutable `BitSlice` pointer.
593	///
594	/// This is a noöp.
595	pub(crate) fn into_bitslice_ptr_mut(self) -> *mut BitSlice<T, O> {
596		self.into_bitslice_ptr() as *mut BitSlice<T, O>
597	}
598
599	/// Converts the span descriptor into an exclusive `BitSlice` reference.
600	///
601	/// This is a noöp.
602	///
603	/// ## Safety
604	///
605	/// The span must describe memory that is safe to dereference. In addition,
606	/// no other `BitSlice` reference of any kind (`&` or `&mut`) may exist.
607	pub(crate) unsafe fn into_bitslice_mut<'a>(self) -> &'a mut BitSlice<T, O> {
608		&mut *self.into_bitslice_ptr_mut()
609	}
610}
611
612/// Utilities.
613impl<M, T, O> BitSpan<M, T, O>
614where
615	M: Mutability,
616	T: BitStore,
617	O: BitOrder,
618{
619	/// Checks if a requested length can be encoded into the `BitSpan`.
620	///
621	/// This is `len <= Self::REGION_MAX_BITS`.
622	#[cfg(feature = "alloc")]
623	pub(crate) fn len_encodable(len: usize) -> bool {
624		len <= Self::REGION_MAX_BITS
625	}
626
627	/// Renders the pointer structure into a formatter for use during
628	/// higher-level type [`Debug`] implementations.
629	///
630	/// # Parameters
631	///
632	/// - `&self`
633	/// - `fmt`: The formatter into which the pointer is rendered.
634	/// - `name`: The suffix of the structure rendering its pointer. The `Bit`
635	///   prefix is applied to the object type name in this format.
636	/// - `fields`: Any additional fields in the object’s debug info to be
637	///   rendered.
638	///
639	/// # Returns
640	///
641	/// The result of formatting the pointer into the receiver.
642	///
643	/// # Behavior
644	///
645	/// This function writes `Bit{name}<{ord}, {type}> {{ {fields } }}` into the
646	/// `fmt` formatter, where `{fields}` includes the address, head index, and
647	/// bit length of the pointer, as well as any additional fields provided by
648	/// the caller.
649	///
650	/// Higher types in the crate should use this function to drive their
651	/// [`Debug`] implementations, and then use [`BitSlice`]’s list formatters
652	/// to display their buffer contents.
653	///
654	/// [`BitSlice`]: crate::slice::BitSlice
655	/// [`Debug`]: core::fmt::Debug
656	pub(crate) fn render<'a>(
657		&'a self,
658		fmt: &'a mut Formatter,
659		name: &'a str,
660		fields: impl IntoIterator<Item = &'a (&'a str, &'a dyn Debug)>,
661	) -> fmt::Result {
662		write!(
663			fmt,
664			"Bit{}<{}, {}>",
665			name,
666			any::type_name::<T::Mem>(),
667			any::type_name::<O>(),
668		)?;
669		let mut builder = fmt.debug_struct("");
670		builder
671			.field("addr", &self.address().fmt_pointer())
672			.field("head", &self.head().fmt_binary())
673			.field("bits", &self.len());
674		for (name, value) in fields {
675			builder.field(name, value);
676		}
677		builder.finish()
678	}
679}
680
681#[cfg(not(tarpaulin_include))]
682impl<M, T, O> Clone for BitSpan<M, T, O>
683where
684	M: Mutability,
685	T: BitStore,
686	O: BitOrder,
687{
688	#[inline]
689	fn clone(&self) -> Self {
690		*self
691	}
692}
693
694impl<M1, M2, O, T1, T2> PartialEq<BitSpan<M2, T2, O>> for BitSpan<M1, T1, O>
695where
696	M1: Mutability,
697	M2: Mutability,
698	O: BitOrder,
699	T1: BitStore,
700	T2: BitStore,
701{
702	#[inline]
703	fn eq(&self, other: &BitSpan<M2, T2, O>) -> bool {
704		let (addr_a, head_a, bits_a) = self.raw_parts();
705		let (addr_b, head_b, bits_b) = other.raw_parts();
706		bits_of::<T1::Mem>() == bits_of::<T2::Mem>()
707			&& addr_a.to_const() as usize == addr_b.to_const() as usize
708			&& head_a.into_inner() == head_b.into_inner()
709			&& bits_a == bits_b
710	}
711}
712
713impl<T, O> From<&BitSlice<T, O>> for BitSpan<Const, T, O>
714where
715	T: BitStore,
716	O: BitOrder,
717{
718	#[inline]
719	fn from(bits: &BitSlice<T, O>) -> Self {
720		Self::from_bitslice_ptr(bits)
721	}
722}
723
724impl<T, O> From<&mut BitSlice<T, O>> for BitSpan<Mut, T, O>
725where
726	T: BitStore,
727	O: BitOrder,
728{
729	#[inline]
730	fn from(bits: &mut BitSlice<T, O>) -> Self {
731		Self::from_bitslice_ptr_mut(bits)
732	}
733}
734
735#[cfg(not(tarpaulin_include))]
736impl<M, T, O> Default for BitSpan<M, T, O>
737where
738	M: Mutability,
739	T: BitStore,
740	O: BitOrder,
741{
742	#[inline]
743	fn default() -> Self {
744		Self::EMPTY
745	}
746}
747
748impl<M, T, O> Debug for BitSpan<M, T, O>
749where
750	M: Mutability,
751	T: BitStore,
752	O: BitOrder,
753{
754	#[inline]
755	fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
756		self.render(fmt, "Span", None)
757	}
758}
759
760impl<M, T, O> Pointer for BitSpan<M, T, O>
761where
762	M: Mutability,
763	T: BitStore,
764	O: BitOrder,
765{
766	#[inline]
767	fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
768		Pointer::fmt(&self.address(), fmt)?;
769		fmt.write_str("(")?;
770		Binary::fmt(&self.head(), fmt)?;
771		fmt.write_str(")[")?;
772		Display::fmt(&self.len(), fmt)?;
773		fmt.write_str("]")
774	}
775}
776
777impl<M, T, O> Copy for BitSpan<M, T, O>
778where
779	M: Mutability,
780	T: BitStore,
781	O: BitOrder,
782{
783}
784
785/// An error produced when creating `BitSpan` encoded references.
786#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)]
787pub enum BitSpanError<T>
788where T: BitStore
789{
790	/// A null pointer was provided.
791	Null(NullPtrError),
792	/// The base element pointer is not aligned.
793	Misaligned(MisalignError<T>),
794	/// The requested length exceeds the `BitSpan` length ceiling.
795	TooLong(usize),
796	/// The requested address is too high, and wraps to zero.
797	TooHigh(*const T),
798}
799
800#[cfg(not(tarpaulin_include))]
801impl<T> From<BitPtrError<T>> for BitSpanError<T>
802where T: BitStore
803{
804	#[inline]
805	fn from(err: BitPtrError<T>) -> Self {
806		match err {
807			BitPtrError::Null(err) => Self::Null(err),
808			BitPtrError::Misaligned(err) => Self::Misaligned(err),
809		}
810	}
811}
812
813#[cfg(not(tarpaulin_include))]
814impl<T> From<MisalignError<T>> for BitSpanError<T>
815where T: BitStore
816{
817	#[inline]
818	fn from(err: MisalignError<T>) -> Self {
819		Self::Misaligned(err)
820	}
821}
822
823#[cfg(not(tarpaulin_include))]
824impl<T> Debug for BitSpanError<T>
825where T: BitStore
826{
827	#[inline]
828	fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
829		write!(fmt, "BitSpanError<{}>::", any::type_name::<T::Mem>())?;
830		match self {
831			Self::Null(err) => fmt.debug_tuple("Null").field(&err).finish(),
832			Self::Misaligned(err) => {
833				fmt.debug_tuple("Misaligned").field(&err).finish()
834			},
835			Self::TooLong(len) => fmt.debug_tuple("TooLong").field(len).finish(),
836			Self::TooHigh(addr) => {
837				fmt.debug_tuple("TooHigh").field(addr).finish()
838			},
839		}
840	}
841}
842
843#[cfg(not(tarpaulin_include))]
844impl<T> Display for BitSpanError<T>
845where T: BitStore
846{
847	#[inline]
848	fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
849		match self {
850			Self::Null(err) => Display::fmt(err, fmt),
851			Self::Misaligned(err) => Display::fmt(err, fmt),
852			Self::TooLong(len) => write!(
853				fmt,
854				"Length {} is too long to encode in a bit-slice, which can \
855				 only accept {} bits",
856				len,
857				BitSpan::<Const, T, Lsb0>::REGION_MAX_BITS,
858			),
859			Self::TooHigh(addr) => write!(
860				fmt,
861				"Address {:p} is too high, and produces a span that wraps \
862				 around to the zero address.",
863				addr,
864			),
865		}
866	}
867}
868
869unsafe impl<T> Send for BitSpanError<T> where T: BitStore {}
870
871unsafe impl<T> Sync for BitSpanError<T> where T: BitStore {}
872
873#[cfg(feature = "std")]
874impl<T> std::error::Error for BitSpanError<T> where T: BitStore {}