bitvec/ptr/
proxy.rs

1#![doc = include_str!("../../doc/ptr/proxy.md")]
2
3use core::{
4	cell::UnsafeCell,
5	cmp,
6	fmt::{
7		self,
8		Debug,
9		Display,
10		Formatter,
11		Pointer,
12	},
13	hash::{
14		Hash,
15		Hasher,
16	},
17	marker::PhantomData,
18	mem,
19	ops::{
20		Deref,
21		DerefMut,
22		Not,
23	},
24};
25
26use wyz::comu::{
27	Const,
28	Mut,
29	Mutability,
30};
31
32use super::BitPtr;
33use crate::{
34	order::{
35		BitOrder,
36		Lsb0,
37	},
38	store::BitStore,
39};
40
41#[doc = include_str!("../../doc/ptr/BitRef.md")]
42//  Restore alignment and sizing properties, as `BitPtr` lacks them.
43#[cfg_attr(target_pointer_width = "32", repr(C, align(4)))]
44#[cfg_attr(target_pointer_width = "64", repr(C, align(8)))]
45#[cfg_attr(
46	not(any(target_pointer_width = "32", target_pointer_width = "64")),
47	repr(C)
48)]
49pub struct BitRef<'a, M = Const, T = usize, O = Lsb0>
50where
51	M: Mutability,
52	T: BitStore,
53	O: BitOrder,
54{
55	/// The proxied bit-address.
56	bitptr: BitPtr<M, T, O>,
57	/// A local cache of the proxied bit that can be referenced.
58	data:   bool,
59	/// Attach the lifetime and reflect the possibility of mutation.
60	_ref:   PhantomData<&'a UnsafeCell<bool>>,
61}
62
63impl<M, T, O> BitRef<'_, M, T, O>
64where
65	M: Mutability,
66	T: BitStore,
67	O: BitOrder,
68{
69	/// Converts a bit-pointer into a proxy bit-reference.
70	///
71	/// This reads through the pointer in order to cache the current bit value
72	/// in the proxy.
73	///
74	/// ## Original
75	///
76	/// The syntax `unsafe { &* ptr }`.
77	///
78	/// ## Safety
79	///
80	/// This is equivalent to (and is!) dereferencing a raw pointer. The pointer
81	/// must be well-constructed, refer to a live memory location in the program
82	/// context, and not be aliased beyond its typing indicators.
83	#[inline]
84	pub unsafe fn from_bitptr(bitptr: BitPtr<M, T, O>) -> Self {
85		let data = bitptr.read();
86		Self {
87			bitptr,
88			data,
89			_ref: PhantomData,
90		}
91	}
92
93	/// Decays the bit-reference to an ordinary bit-pointer.
94	///
95	/// ## Original
96	///
97	/// The syntax `&val as *T`.
98	#[inline]
99	#[cfg(not(tarpaulin_include))]
100	pub fn into_bitptr(self) -> BitPtr<M, T, O> {
101		self.bitptr
102	}
103
104	/// Removes a layer of `::Alias` marking from a bit-reference.
105	///
106	/// ## Safety
107	///
108	/// The caller must ensure that no element-level aliasing *by `bitvec`*
109	/// occurs in the scope for which the produced de-aliased proxy is alive.
110	#[cfg(not(tarpaulin_include))]
111	pub(crate) unsafe fn remove_alias(this: BitRef<M, T::Alias, O>) -> Self {
112		Self {
113			bitptr: this.bitptr.cast::<T>(),
114			data:   this.data,
115			_ref:   PhantomData,
116		}
117	}
118}
119
120impl<T, O> BitRef<'_, Mut, T, O>
121where
122	T: BitStore,
123	O: BitOrder,
124{
125	/// Moves `src` into the referenced bit, returning the previous value.
126	///
127	/// ## Original
128	///
129	/// [`mem::replace`](core::mem::replace)
130	#[inline]
131	pub fn replace(&mut self, src: bool) -> bool {
132		mem::replace(&mut self.data, src)
133	}
134
135	/// Swaps the bit values of two proxies.
136	///
137	/// ## Original
138	///
139	/// [`mem::swap`](core::mem::swap)
140	#[inline]
141	pub fn swap<T2, O2>(&mut self, other: &mut BitRef<Mut, T2, O2>)
142	where
143		T2: BitStore,
144		O2: BitOrder,
145	{
146		mem::swap(&mut self.data, &mut other.data)
147	}
148
149	/// Commits a bit into the proxied location.
150	///
151	/// This function writes `value` directly into the proxied location,
152	/// bypassing the cache and destroying the proxy. This eliminates the second
153	/// write done in the destructor, and allows code to be slightly faster.
154	#[inline]
155	pub fn commit(self, value: bool) {
156		unsafe {
157			self.bitptr.write(value);
158		}
159		mem::forget(self);
160	}
161
162	/// Writes `value` into the proxy.
163	///
164	/// This does not write into the proxied location; that is deferred until
165	/// the proxy destructor runs.
166	#[inline]
167	pub fn set(&mut self, value: bool) {
168		self.data = value;
169	}
170}
171
172#[cfg(not(tarpaulin_include))]
173impl<T, O> Clone for BitRef<'_, Const, T, O>
174where
175	T: BitStore,
176	O: BitOrder,
177{
178	#[inline]
179	fn clone(&self) -> Self {
180		Self { ..*self }
181	}
182}
183
184impl<M, T, O> Eq for BitRef<'_, M, T, O>
185where
186	M: Mutability,
187	T: BitStore,
188	O: BitOrder,
189{
190}
191
192#[cfg(not(tarpaulin_include))]
193impl<M, T, O> Ord for BitRef<'_, M, T, O>
194where
195	M: Mutability,
196	T: BitStore,
197	O: BitOrder,
198{
199	#[inline]
200	fn cmp(&self, other: &Self) -> cmp::Ordering {
201		self.data.cmp(&other.data)
202	}
203}
204
205#[cfg(not(tarpaulin_include))]
206impl<M1, M2, O1, O2, T1, T2> PartialEq<BitRef<'_, M2, T2, O2>>
207	for BitRef<'_, M1, T1, O1>
208where
209	M1: Mutability,
210	M2: Mutability,
211	T1: BitStore,
212	T2: BitStore,
213	O1: BitOrder,
214	O2: BitOrder,
215{
216	#[inline(always)]
217	fn eq(&self, other: &BitRef<'_, M2, T2, O2>) -> bool {
218		self.data == other.data
219	}
220}
221
222#[cfg(not(tarpaulin_include))]
223impl<M, T, O> PartialEq<bool> for BitRef<'_, M, T, O>
224where
225	M: Mutability,
226	T: BitStore,
227	O: BitOrder,
228{
229	#[inline(always)]
230	fn eq(&self, other: &bool) -> bool {
231		self.data == *other
232	}
233}
234
235#[cfg(not(tarpaulin_include))]
236impl<M, T, O> PartialEq<BitRef<'_, M, T, O>> for bool
237where
238	M: Mutability,
239	T: BitStore,
240	O: BitOrder,
241{
242	#[inline]
243	fn eq(&self, other: &BitRef<'_, M, T, O>) -> bool {
244		other == self
245	}
246}
247
248#[cfg(not(tarpaulin_include))]
249impl<M, T, O> PartialEq<&bool> for BitRef<'_, M, T, O>
250where
251	M: Mutability,
252	T: BitStore,
253	O: BitOrder,
254{
255	#[inline(always)]
256	fn eq(&self, other: &&bool) -> bool {
257		self.data == **other
258	}
259}
260
261#[cfg(not(tarpaulin_include))]
262impl<M, T, O> PartialEq<BitRef<'_, M, T, O>> for &bool
263where
264	M: Mutability,
265	T: BitStore,
266	O: BitOrder,
267{
268	#[inline]
269	fn eq(&self, other: &BitRef<'_, M, T, O>) -> bool {
270		other == *self
271	}
272}
273
274#[cfg(not(tarpaulin_include))]
275impl<M1, M2, O1, O2, T1, T2> PartialOrd<BitRef<'_, M2, T2, O2>>
276	for BitRef<'_, M1, T1, O1>
277where
278	M1: Mutability,
279	M2: Mutability,
280	T1: BitStore,
281	T2: BitStore,
282	O1: BitOrder,
283	O2: BitOrder,
284{
285	#[inline]
286	fn partial_cmp(
287		&self,
288		other: &BitRef<'_, M2, T2, O2>,
289	) -> Option<cmp::Ordering> {
290		self.data.partial_cmp(&other.data)
291	}
292}
293
294#[cfg(not(tarpaulin_include))]
295impl<M, T, O> PartialOrd<bool> for BitRef<'_, M, T, O>
296where
297	M: Mutability,
298	T: BitStore,
299	O: BitOrder,
300{
301	#[inline]
302	fn partial_cmp(&self, other: &bool) -> Option<cmp::Ordering> {
303		self.data.partial_cmp(other)
304	}
305}
306
307#[cfg(not(tarpaulin_include))]
308impl<M, T, O> PartialOrd<&bool> for BitRef<'_, M, T, O>
309where
310	M: Mutability,
311	T: BitStore,
312	O: BitOrder,
313{
314	#[inline]
315	fn partial_cmp(&self, other: &&bool) -> Option<cmp::Ordering> {
316		self.data.partial_cmp(*other)
317	}
318}
319
320#[cfg(not(tarpaulin_include))]
321impl<M, T, O> AsRef<bool> for BitRef<'_, M, T, O>
322where
323	M: Mutability,
324	T: BitStore,
325	O: BitOrder,
326{
327	#[inline]
328	fn as_ref(&self) -> &bool {
329		&self.data
330	}
331}
332
333#[cfg(not(tarpaulin_include))]
334impl<T, O> AsMut<bool> for BitRef<'_, Mut, T, O>
335where
336	T: BitStore,
337	O: BitOrder,
338{
339	#[inline]
340	fn as_mut(&mut self) -> &mut bool {
341		&mut self.data
342	}
343}
344
345impl<M, T, O> Debug for BitRef<'_, M, T, O>
346where
347	M: Mutability,
348	T: BitStore,
349	O: BitOrder,
350{
351	#[inline]
352	fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
353		unsafe { self.bitptr.span_unchecked(1) }
354			.render(fmt, "Ref", &[("bit", &self.data as &dyn Debug)])
355	}
356}
357
358#[cfg(not(tarpaulin_include))]
359impl<M, T, O> Display for BitRef<'_, M, T, O>
360where
361	M: Mutability,
362	T: BitStore,
363	O: BitOrder,
364{
365	#[inline]
366	fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
367		Display::fmt(&self.data, fmt)
368	}
369}
370
371#[cfg(not(tarpaulin_include))]
372impl<M, T, O> Pointer for BitRef<'_, M, T, O>
373where
374	M: Mutability,
375	T: BitStore,
376	O: BitOrder,
377{
378	#[inline]
379	fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
380		Pointer::fmt(&self.bitptr, fmt)
381	}
382}
383
384#[cfg(not(tarpaulin_include))]
385impl<M, T, O> Hash for BitRef<'_, M, T, O>
386where
387	M: Mutability,
388	T: BitStore,
389	O: BitOrder,
390{
391	#[inline]
392	fn hash<H>(&self, state: &mut H)
393	where H: Hasher {
394		self.bitptr.hash(state);
395	}
396}
397
398// #[allow(clippy::non_send_fields_in_send_ty)] // I know what I’m doing
399unsafe impl<M, T, O> Send for BitRef<'_, M, T, O>
400where
401	M: Mutability,
402	T: BitStore + Sync,
403	O: BitOrder,
404{
405}
406
407unsafe impl<M, T, O> Sync for BitRef<'_, M, T, O>
408where
409	M: Mutability,
410	T: BitStore + Sync,
411	O: BitOrder,
412{
413}
414
415// This cannot be implemented until `Drop` is specialized to only
416// `<Mut, T, O>`.
417// impl<T, O> Copy for BitRef<'_, Const, T, O>
418// where O: BitOrder, T: BitStore {}
419
420impl<M, T, O> Deref for BitRef<'_, M, T, O>
421where
422	M: Mutability,
423	T: BitStore,
424	O: BitOrder,
425{
426	type Target = bool;
427
428	#[inline]
429	fn deref(&self) -> &Self::Target {
430		&self.data
431	}
432}
433
434impl<T, O> DerefMut for BitRef<'_, Mut, T, O>
435where
436	T: BitStore,
437	O: BitOrder,
438{
439	#[inline]
440	fn deref_mut(&mut self) -> &mut Self::Target {
441		&mut self.data
442	}
443}
444
445impl<M, T, O> Drop for BitRef<'_, M, T, O>
446where
447	M: Mutability,
448	T: BitStore,
449	O: BitOrder,
450{
451	#[inline]
452	fn drop(&mut self) {
453		//  `Drop` cannot specialize on type parameters, but only mutable
454		//  proxies can commit to memory.
455		if M::CONTAINS_MUTABILITY {
456			unsafe {
457				self.bitptr.to_mut().write(self.data);
458			}
459		}
460	}
461}
462
463impl<M, T, O> Not for BitRef<'_, M, T, O>
464where
465	M: Mutability,
466	T: BitStore,
467	O: BitOrder,
468{
469	type Output = bool;
470
471	#[inline]
472	fn not(self) -> Self::Output {
473		!self.data
474	}
475}