1use alloc::vec::Vec;
2use core::marker::PhantomData;
3use core::mem::{ManuallyDrop, MaybeUninit};
4
5pub type VecImpl<T> = FastVec<T>;
6pub type SliceImpl<'a, T> = FastSlice<'a, T>;
7
8pub struct FastVec<T> {
10 start: *mut T, end: *mut T, capacity: *mut T, _spooky: PhantomData<Vec<T>>,
14}
15
16impl<T> Default for FastVec<T> {
17 fn default() -> Self {
18 Self::from(vec![])
19 }
20}
21
22impl<T> Drop for FastVec<T> {
23 fn drop(&mut self) {
24 unsafe {
25 drop(Vec::from(core::ptr::read(self)));
26 }
27 }
28}
29
30unsafe impl<T: Send> Send for FastVec<T> {}
32unsafe impl<T: Sync> Sync for FastVec<T> {}
33
34#[inline(always)]
36fn sub_ptr<T>(ptr: *mut T, origin: *mut T) -> usize {
37 (ptr as usize - origin as usize) / core::mem::size_of::<T>()
39}
40
41impl<T> From<FastVec<T>> for Vec<T> {
42 fn from(fast: FastVec<T>) -> Self {
43 let start = fast.start;
44 let length = fast.len();
45 let capacity = sub_ptr(fast.capacity, fast.start);
46 core::mem::forget(fast);
47 unsafe { Vec::from_raw_parts(start, length, capacity) }
48 }
49}
50
51impl<T> From<Vec<T>> for FastVec<T> {
52 fn from(mut vec: Vec<T>) -> Self {
53 assert_ne!(core::mem::size_of::<T>(), 0);
54 let start = vec.as_mut_ptr();
55 let end = unsafe { start.add(vec.len()) };
56 let capacity = unsafe { start.add(vec.capacity()) };
57 core::mem::forget(vec);
58 Self {
59 start,
60 end,
61 capacity,
62 _spooky: Default::default(),
63 }
64 }
65}
66
67impl<T> FastVec<T> {
68 pub fn len(&self) -> usize {
69 sub_ptr(self.end, self.start)
70 }
71
72 pub fn as_slice(&self) -> &[T] {
73 unsafe { core::slice::from_raw_parts(self.start, self.len()) }
74 }
75
76 pub fn as_mut_slice(&mut self) -> &mut [T] {
77 unsafe { core::slice::from_raw_parts_mut(self.start, self.len()) }
78 }
79
80 pub fn clear(&mut self) {
81 unsafe {
85 let elems: *mut [T] = self.as_mut_slice();
86 self.end = self.start;
87 core::ptr::drop_in_place(elems);
88 }
89 }
90
91 pub fn reserve(&mut self, additional: usize) {
92 if additional > sub_ptr(self.capacity, self.end) {
93 #[cold]
94 #[inline(never)]
95 fn reserve_slow<T>(me: &mut FastVec<T>, additional: usize) {
96 unsafe {
98 me.mut_vec(|v| {
99 if additional <= v.capacity().wrapping_sub(v.len()) {
102 core::hint::unreachable_unchecked();
103 }
104 v.reserve(additional);
105 });
106 }
107 }
108 reserve_slow(self, additional);
109 }
110 }
111
112 unsafe fn mut_vec(&mut self, f: impl FnOnce(&mut Vec<T>)) {
116 let copied = core::ptr::read(self as *mut FastVec<T>);
117 let mut vec = ManuallyDrop::new(Vec::from(copied));
118 f(&mut vec);
119 let copied = FastVec::from(ManuallyDrop::into_inner(vec));
120 core::ptr::write(self as *mut FastVec<T>, copied);
121 }
122
123 #[inline(always)]
125 pub fn end_ptr(&mut self) -> *mut T {
126 debug_assert!(self.end <= self.capacity);
127 self.end
128 }
129
130 #[inline(always)]
132 pub fn set_end_ptr(&mut self, end: *mut T) {
133 self.end = end;
134 debug_assert!(self.end <= self.capacity);
135 }
136
137 #[inline(always)]
143 pub unsafe fn increment_len(&mut self) {
144 self.end = self.end.add(1);
145 debug_assert!(self.end <= self.capacity);
146 }
147}
148
149pub trait PushUnchecked<T> {
150 unsafe fn push_unchecked(&mut self, t: T);
153}
154
155impl<T> PushUnchecked<T> for FastVec<T> {
156 #[inline(always)]
157 unsafe fn push_unchecked(&mut self, t: T) {
158 debug_assert!(self.end < self.capacity);
159 core::ptr::write(self.end, t);
160 self.end = self.end.add(1);
161 }
162}
163
164impl<T> PushUnchecked<T> for Vec<T> {
165 #[inline(always)]
166 unsafe fn push_unchecked(&mut self, t: T) {
167 let n = self.len();
168 debug_assert!(n < self.capacity());
169 let end = self.as_mut_ptr().add(n);
170 core::ptr::write(end, t);
171 self.set_len(n + 1);
172 }
173}
174
175pub struct FastArrayVec<'a, T: Copy, const N: usize> {
178 start: *mut T,
179 end: *mut T,
180 _spooky: PhantomData<&'a mut T>,
181}
182
183impl<'a, T: Copy, const N: usize> FastArrayVec<'a, T, N> {
184 #[inline(always)]
185 pub fn new(uninit: &'a mut MaybeUninit<[T; N]>) -> Self {
186 let start = uninit.as_mut_ptr() as *mut T;
187 Self {
188 start,
189 end: start,
190 _spooky: PhantomData,
191 }
192 }
193
194 #[inline(always)]
195 pub fn as_slice(&self) -> &[T] {
196 let len = sub_ptr(self.end, self.start);
197 unsafe { core::slice::from_raw_parts(self.start, len) }
198 }
199}
200
201impl<'a, T: Copy, const N: usize> PushUnchecked<T> for FastArrayVec<'a, T, N> {
202 #[inline(always)]
203 unsafe fn push_unchecked(&mut self, t: T) {
204 core::ptr::write(self.end, t);
205 self.end = self.end.add(1);
206 }
207}
208
209#[derive(Clone)]
210pub struct FastSlice<'a, T> {
211 ptr: *const T,
212 #[cfg(debug_assertions)]
213 end: *const T, _spooky: PhantomData<&'a T>,
215}
216
217impl<T> Default for FastSlice<'_, T> {
218 fn default() -> Self {
219 Self::from([].as_slice())
220 }
221}
222
223unsafe impl<T: Send> Send for FastSlice<'_, T> {}
225unsafe impl<T: Sync> Sync for FastSlice<'_, T> {}
226
227impl<'a, T> From<&'a [T]> for FastSlice<'a, T> {
228 fn from(slice: &'a [T]) -> Self {
229 Self {
230 ptr: slice.as_ptr(),
231 #[cfg(debug_assertions)]
232 end: slice.as_ptr_range().end,
233 _spooky: PhantomData,
234 }
235 }
236}
237
238impl<'a, T> FastSlice<'a, T> {
239 #[inline(always)]
241 pub unsafe fn from_raw_parts(ptr: *const T, len: usize) -> Self {
242 let _ = len;
243 Self {
244 ptr,
245 #[cfg(debug_assertions)]
246 end: ptr.wrapping_add(len),
247 _spooky: PhantomData,
248 }
249 }
250
251 #[inline(always)]
253 pub unsafe fn next_unchecked_as_ptr(&mut self) -> *const T {
254 #[cfg(debug_assertions)]
255 assert!((self.ptr.wrapping_add(1) as usize) <= self.end as usize);
256 let p = self.ptr;
257 self.ptr = self.ptr.add(1);
258 p
259 }
260
261 #[inline(always)]
262 pub unsafe fn advance(&mut self, n: usize) {
263 #[cfg(debug_assertions)]
264 assert!((self.ptr.wrapping_add(n) as usize) <= self.end as usize);
265 self.ptr = self.ptr.add(n);
266 }
267
268 #[inline(always)]
269 pub fn as_ptr(&self) -> *const T {
270 self.ptr
271 }
272
273 #[inline(always)]
275 pub fn cast<B>(&mut self) -> &mut FastSlice<'a, B>
276 where
277 T: bytemuck::Pod,
278 B: bytemuck::Pod,
279 {
280 use core::mem::*;
281 assert_eq!(size_of::<T>(), size_of::<B>());
282 assert_eq!(align_of::<T>(), align_of::<B>());
283 unsafe { transmute(self) }
285 }
286}
287
288pub trait NextUnchecked<'a, T: Copy> {
289 unsafe fn next_unchecked(&mut self) -> T;
292
293 unsafe fn chunk_unchecked(&mut self, length: usize) -> &'a [T];
296}
297
298impl<'a, T: Copy> NextUnchecked<'a, T> for FastSlice<'a, T> {
299 #[inline(always)]
300 unsafe fn next_unchecked(&mut self) -> T {
301 #[cfg(debug_assertions)]
302 assert!((self.ptr.wrapping_add(1) as usize) <= self.end as usize);
303 let t = *self.ptr;
304 self.ptr = self.ptr.add(1);
305 t
306 }
307
308 #[inline(always)]
309 unsafe fn chunk_unchecked(&mut self, length: usize) -> &'a [T] {
310 #[cfg(debug_assertions)]
311 assert!((self.ptr.wrapping_add(length) as usize) <= self.end as usize);
312 let slice = core::slice::from_raw_parts(self.ptr, length);
313 self.ptr = self.ptr.add(length);
314 slice
315 }
316}
317
318impl<'a, T: Copy> NextUnchecked<'a, T> for &'a [T] {
319 #[inline(always)]
320 unsafe fn next_unchecked(&mut self) -> T {
321 let p = *self.get_unchecked(0);
322 *self = self.get_unchecked(1..);
323 p
324 }
325
326 #[inline(always)]
327 unsafe fn chunk_unchecked(&mut self, length: usize) -> &'a [T] {
328 let slice = self.get_unchecked(0..length);
329 *self = self.get_unchecked(length..);
330 slice
331 }
332}
333
334#[derive(Default)]
336pub struct CowSlice<'borrowed, T> {
337 slice: SliceImpl<'borrowed, T>, vec: Vec<T>,
339}
340impl<'borrowed, T> CowSlice<'borrowed, T> {
341 pub fn with_allocation(mut vec: Vec<T>) -> Self {
343 vec.clear();
344 Self {
345 slice: [].as_slice().into(),
346 vec,
347 }
348 }
349
350 pub fn into_allocation(mut self) -> Vec<T> {
352 self.vec.clear();
353 self.vec
354 }
355
356 #[must_use]
359 pub unsafe fn as_slice<'me>(&'me self, len: usize) -> &'me [T]
360 where
361 'borrowed: 'me,
362 {
363 #[cfg(debug_assertions)]
364 assert_eq!(self.slice.ptr.wrapping_add(len), self.slice.end);
365 core::slice::from_raw_parts(self.slice.ptr, len)
366 }
367
368 #[must_use]
370 #[inline(always)]
371 pub fn ref_slice<'me>(&'me self) -> &'me SliceImpl<'me, T>
372 where
373 'borrowed: 'me,
374 {
375 let slice: &'me SliceImpl<'me, T> = unsafe { core::mem::transmute(&self.slice) };
377 slice
378 }
379
380 #[must_use]
382 #[inline(always)]
383 pub fn mut_slice<'me>(&'me mut self) -> &'me mut SliceImpl<'me, T>
384 where
385 'borrowed: 'me,
386 {
387 let slice: &'me mut SliceImpl<'me, T> = unsafe { core::mem::transmute(&mut self.slice) };
389 slice
390 }
391
392 pub fn set_borrowed(&mut self, slice: &'borrowed [T]) {
394 self.slice = slice.into();
395 }
396
397 pub fn set_borrowed_slice_impl(&mut self, slice: SliceImpl<'borrowed, T>) {
399 self.slice = slice;
400 }
401
402 #[must_use]
405 pub fn set_owned(&mut self) -> SetOwned<'_, 'borrowed, T> {
406 self.slice = [].as_slice().into();
408 self.vec.clear();
409 SetOwned(self)
410 }
411
412 pub fn mut_owned<R>(&mut self, f: impl FnOnce(&mut Vec<T>) -> R) -> R {
418 assert!(
419 core::ptr::eq(self.slice.ptr, self.vec.as_ptr()),
420 "not owned"
421 );
422 self.slice = [].as_slice().into();
424 let ret = f(&mut self.vec);
425 let slice: &'borrowed [T] = unsafe { core::mem::transmute(self.vec.as_slice()) };
427 self.slice = slice.into();
428 ret
429 }
430
431 #[inline]
433 pub fn cast_mut<B>(&mut self) -> &mut CowSlice<'borrowed, B>
434 where
435 T: bytemuck::Pod,
436 B: bytemuck::Pod,
437 {
438 use core::mem::*;
439 assert_eq!(size_of::<T>(), size_of::<B>());
440 assert_eq!(align_of::<T>(), align_of::<B>());
441 unsafe { transmute(self) }
443 }
444}
445
446pub struct SetOwned<'a, 'borrowed, T>(&'a mut CowSlice<'borrowed, T>);
447impl<'borrowed, T> Drop for SetOwned<'_, 'borrowed, T> {
448 fn drop(&mut self) {
449 let slice: &'borrowed [T] = unsafe { core::mem::transmute(self.0.vec.as_slice()) };
451 self.0.slice = slice.into();
452 }
453}
454impl<'a, T> core::ops::Deref for SetOwned<'a, '_, T> {
455 type Target = Vec<T>;
456
457 fn deref(&self) -> &Self::Target {
458 &self.0.vec
459 }
460}
461impl<'a, T> core::ops::DerefMut for SetOwned<'a, '_, T> {
462 fn deref_mut(&mut self) -> &mut Self::Target {
463 &mut self.0.vec
464 }
465}
466
467#[derive(Copy, Clone)]
468#[repr(C, packed)]
469pub struct Unaligned<T>(T);
470
471unsafe impl<T: bytemuck::Zeroable> bytemuck::Zeroable for Unaligned<T> {}
473unsafe impl<T: bytemuck::Pod> bytemuck::Pod for Unaligned<T> {}
474
475#[cfg(test)]
476mod tests {
477 use super::*;
478 use test::{black_box, Bencher};
479
480 #[test]
481 fn test_as_slice() {
482 let mut vec = FastVec::default();
483 vec.reserve(2);
484 unsafe {
485 vec.push_unchecked(1);
486 vec.push_unchecked(2);
487 }
488 assert_eq!(vec.as_slice(), [1, 2]);
489 }
490
491 const N: usize = 1000;
492 type VecT = Vec<u32>;
493
494 #[bench]
495 fn bench_next_unchecked(b: &mut Bencher) {
496 let src: VecT = vec![0; N];
497 b.iter(|| {
498 let mut slice = src.as_slice();
499 for _ in 0..black_box(N) {
500 unsafe { black_box(black_box(&mut slice).next_unchecked()) };
501 }
502 });
503 }
504
505 #[bench]
506 fn bench_next_unchecked_fast(b: &mut Bencher) {
507 let src: VecT = vec![0; N];
508 b.iter(|| {
509 let mut fast_slice = FastSlice::from(src.as_slice());
510 for _ in 0..black_box(N) {
511 unsafe { black_box(black_box(&mut fast_slice).next_unchecked()) };
512 }
513 });
514 }
515
516 #[bench]
517 fn bench_push(b: &mut Bencher) {
518 let mut buffer = VecT::with_capacity(N);
519 b.iter(|| {
520 buffer.clear();
521 let vec = black_box(&mut buffer);
522 for _ in 0..black_box(N) {
523 let v = black_box(&mut *vec);
524 v.push(black_box(0));
525 }
526 });
527 }
528
529 #[bench]
530 fn bench_push_fast(b: &mut Bencher) {
531 let mut buffer = VecT::with_capacity(N);
532 b.iter(|| {
533 buffer.clear();
534 let mut vec = black_box(FastVec::from(core::mem::take(&mut buffer)));
535 for _ in 0..black_box(N) {
536 let v = black_box(&mut vec);
537 v.reserve(1);
538 unsafe { v.push_unchecked(black_box(0)) };
539 }
540 buffer = vec.into();
541 });
542 }
543
544 #[bench]
545 fn bench_push_unchecked(b: &mut Bencher) {
546 let mut buffer = VecT::with_capacity(N);
547 b.iter(|| {
548 buffer.clear();
549 let vec = black_box(&mut buffer);
550 for _ in 0..black_box(N) {
551 let v = black_box(&mut *vec);
552 unsafe { v.push_unchecked(black_box(0)) };
553 }
554 });
555 }
556
557 #[bench]
558 fn bench_push_unchecked_fast(b: &mut Bencher) {
559 let mut buffer = VecT::with_capacity(N);
560 b.iter(|| {
561 buffer.clear();
562 let mut vec = black_box(FastVec::from(core::mem::take(&mut buffer)));
563 for _ in 0..black_box(N) {
564 let v = black_box(&mut vec);
565 unsafe { v.push_unchecked(black_box(0)) };
566 }
567 buffer = vec.into();
568 });
569 }
570
571 #[bench]
572 fn bench_reserve(b: &mut Bencher) {
573 let mut buffer = VecT::with_capacity(N);
574 b.iter(|| {
575 buffer.clear();
576 let vec = black_box(&mut buffer);
577 for _ in 0..black_box(N) {
578 black_box(&mut *vec).reserve(1);
579 }
580 });
581 }
582
583 #[bench]
584 fn bench_reserve_fast(b: &mut Bencher) {
585 let mut buffer = VecT::with_capacity(N);
586 b.iter(|| {
587 buffer.clear();
588 let mut vec = black_box(FastVec::from(core::mem::take(&mut buffer)));
589 for _ in 0..black_box(N) {
590 black_box(&mut vec).reserve(1);
591 }
592 buffer = vec.into();
593 });
594 }
595}