use alloc::vec::Vec;
use core::marker::PhantomData;
use core::mem::{ManuallyDrop, MaybeUninit};
pub type VecImpl<T> = FastVec<T>;
pub type SliceImpl<'a, T> = FastSlice<'a, T>;
pub struct FastVec<T> {
start: *mut T, end: *mut T, capacity: *mut T, _spooky: PhantomData<Vec<T>>,
}
impl<T> Default for FastVec<T> {
fn default() -> Self {
Self::from(vec![])
}
}
impl<T> Drop for FastVec<T> {
fn drop(&mut self) {
unsafe {
drop(Vec::from(core::ptr::read(self)));
}
}
}
unsafe impl<T: Send> Send for FastVec<T> {}
unsafe impl<T: Sync> Sync for FastVec<T> {}
#[inline(always)]
fn sub_ptr<T>(ptr: *mut T, origin: *mut T) -> usize {
(ptr as usize - origin as usize) / core::mem::size_of::<T>()
}
impl<T> From<FastVec<T>> for Vec<T> {
fn from(fast: FastVec<T>) -> Self {
let start = fast.start;
let length = fast.len();
let capacity = sub_ptr(fast.capacity, fast.start);
core::mem::forget(fast);
unsafe { Vec::from_raw_parts(start, length, capacity) }
}
}
impl<T> From<Vec<T>> for FastVec<T> {
fn from(mut vec: Vec<T>) -> Self {
assert_ne!(core::mem::size_of::<T>(), 0);
let start = vec.as_mut_ptr();
let end = unsafe { start.add(vec.len()) };
let capacity = unsafe { start.add(vec.capacity()) };
core::mem::forget(vec);
Self {
start,
end,
capacity,
_spooky: Default::default(),
}
}
}
impl<T> FastVec<T> {
pub fn len(&self) -> usize {
sub_ptr(self.end, self.start)
}
pub fn as_slice(&self) -> &[T] {
unsafe { core::slice::from_raw_parts(self.start, self.len()) }
}
pub fn as_mut_slice(&mut self) -> &mut [T] {
unsafe { core::slice::from_raw_parts_mut(self.start, self.len()) }
}
pub fn clear(&mut self) {
unsafe {
let elems: *mut [T] = self.as_mut_slice();
self.end = self.start;
core::ptr::drop_in_place(elems);
}
}
pub fn reserve(&mut self, additional: usize) {
if additional > sub_ptr(self.capacity, self.end) {
#[cold]
#[inline(never)]
fn reserve_slow<T>(me: &mut FastVec<T>, additional: usize) {
unsafe {
me.mut_vec(|v| {
if additional <= v.capacity().wrapping_sub(v.len()) {
core::hint::unreachable_unchecked();
}
v.reserve(additional);
});
}
}
reserve_slow(self, additional);
}
}
unsafe fn mut_vec(&mut self, f: impl FnOnce(&mut Vec<T>)) {
let copied = core::ptr::read(self as *mut FastVec<T>);
let mut vec = ManuallyDrop::new(Vec::from(copied));
f(&mut vec);
let copied = FastVec::from(ManuallyDrop::into_inner(vec));
core::ptr::write(self as *mut FastVec<T>, copied);
}
#[inline(always)]
pub fn end_ptr(&mut self) -> *mut T {
debug_assert!(self.end <= self.capacity);
self.end
}
#[inline(always)]
pub fn set_end_ptr(&mut self, end: *mut T) {
self.end = end;
debug_assert!(self.end <= self.capacity);
}
#[inline(always)]
pub unsafe fn increment_len(&mut self) {
self.end = self.end.add(1);
debug_assert!(self.end <= self.capacity);
}
}
pub trait PushUnchecked<T> {
unsafe fn push_unchecked(&mut self, t: T);
}
impl<T> PushUnchecked<T> for FastVec<T> {
#[inline(always)]
unsafe fn push_unchecked(&mut self, t: T) {
debug_assert!(self.end < self.capacity);
core::ptr::write(self.end, t);
self.end = self.end.add(1);
}
}
impl<T> PushUnchecked<T> for Vec<T> {
#[inline(always)]
unsafe fn push_unchecked(&mut self, t: T) {
let n = self.len();
debug_assert!(n < self.capacity());
let end = self.as_mut_ptr().add(n);
core::ptr::write(end, t);
self.set_len(n + 1);
}
}
pub struct FastArrayVec<'a, T: Copy, const N: usize> {
start: *mut T,
end: *mut T,
_spooky: PhantomData<&'a mut T>,
}
impl<'a, T: Copy, const N: usize> FastArrayVec<'a, T, N> {
#[inline(always)]
pub fn new(uninit: &'a mut MaybeUninit<[T; N]>) -> Self {
let start = uninit.as_mut_ptr() as *mut T;
Self {
start,
end: start,
_spooky: PhantomData,
}
}
#[inline(always)]
pub fn as_slice(&self) -> &[T] {
let len = sub_ptr(self.end, self.start);
unsafe { core::slice::from_raw_parts(self.start, len) }
}
}
impl<'a, T: Copy, const N: usize> PushUnchecked<T> for FastArrayVec<'a, T, N> {
#[inline(always)]
unsafe fn push_unchecked(&mut self, t: T) {
core::ptr::write(self.end, t);
self.end = self.end.add(1);
}
}
#[derive(Clone)]
pub struct FastSlice<'a, T> {
ptr: *const T,
#[cfg(debug_assertions)]
end: *const T, _spooky: PhantomData<&'a T>,
}
impl<T> Default for FastSlice<'_, T> {
fn default() -> Self {
Self::from([].as_slice())
}
}
unsafe impl<T: Send> Send for FastSlice<'_, T> {}
unsafe impl<T: Sync> Sync for FastSlice<'_, T> {}
impl<'a, T> From<&'a [T]> for FastSlice<'a, T> {
fn from(slice: &'a [T]) -> Self {
Self {
ptr: slice.as_ptr(),
#[cfg(debug_assertions)]
end: slice.as_ptr_range().end,
_spooky: PhantomData,
}
}
}
impl<'a, T> FastSlice<'a, T> {
#[inline(always)]
pub unsafe fn from_raw_parts(ptr: *const T, len: usize) -> Self {
let _ = len;
Self {
ptr,
#[cfg(debug_assertions)]
end: ptr.wrapping_add(len),
_spooky: PhantomData,
}
}
#[inline(always)]
pub unsafe fn next_unchecked_as_ptr(&mut self) -> *const T {
#[cfg(debug_assertions)]
assert!((self.ptr.wrapping_add(1) as usize) <= self.end as usize);
let p = self.ptr;
self.ptr = self.ptr.add(1);
p
}
#[inline(always)]
pub unsafe fn advance(&mut self, n: usize) {
#[cfg(debug_assertions)]
assert!((self.ptr.wrapping_add(n) as usize) <= self.end as usize);
self.ptr = self.ptr.add(n);
}
#[inline(always)]
pub fn as_ptr(&self) -> *const T {
self.ptr
}
#[inline(always)]
pub fn cast<B>(&mut self) -> &mut FastSlice<'a, B>
where
T: bytemuck::Pod,
B: bytemuck::Pod,
{
use core::mem::*;
assert_eq!(size_of::<T>(), size_of::<B>());
assert_eq!(align_of::<T>(), align_of::<B>());
unsafe { transmute(self) }
}
}
pub trait NextUnchecked<'a, T: Copy> {
unsafe fn next_unchecked(&mut self) -> T;
unsafe fn chunk_unchecked(&mut self, length: usize) -> &'a [T];
}
impl<'a, T: Copy> NextUnchecked<'a, T> for FastSlice<'a, T> {
#[inline(always)]
unsafe fn next_unchecked(&mut self) -> T {
#[cfg(debug_assertions)]
assert!((self.ptr.wrapping_add(1) as usize) <= self.end as usize);
let t = *self.ptr;
self.ptr = self.ptr.add(1);
t
}
#[inline(always)]
unsafe fn chunk_unchecked(&mut self, length: usize) -> &'a [T] {
#[cfg(debug_assertions)]
assert!((self.ptr.wrapping_add(length) as usize) <= self.end as usize);
let slice = core::slice::from_raw_parts(self.ptr, length);
self.ptr = self.ptr.add(length);
slice
}
}
impl<'a, T: Copy> NextUnchecked<'a, T> for &'a [T] {
#[inline(always)]
unsafe fn next_unchecked(&mut self) -> T {
let p = *self.get_unchecked(0);
*self = self.get_unchecked(1..);
p
}
#[inline(always)]
unsafe fn chunk_unchecked(&mut self, length: usize) -> &'a [T] {
let slice = self.get_unchecked(0..length);
*self = self.get_unchecked(length..);
slice
}
}
#[derive(Default)]
pub struct CowSlice<'borrowed, T> {
slice: SliceImpl<'borrowed, T>, vec: Vec<T>,
}
impl<'borrowed, T> CowSlice<'borrowed, T> {
pub fn with_allocation(mut vec: Vec<T>) -> Self {
vec.clear();
Self {
slice: [].as_slice().into(),
vec,
}
}
pub fn into_allocation(mut self) -> Vec<T> {
self.vec.clear();
self.vec
}
#[must_use]
pub unsafe fn as_slice<'me>(&'me self, len: usize) -> &'me [T]
where
'borrowed: 'me,
{
#[cfg(debug_assertions)]
assert_eq!(self.slice.ptr.wrapping_add(len), self.slice.end);
core::slice::from_raw_parts(self.slice.ptr, len)
}
#[must_use]
#[inline(always)]
pub fn ref_slice<'me>(&'me self) -> &'me SliceImpl<'me, T>
where
'borrowed: 'me,
{
let slice: &'me SliceImpl<'me, T> = unsafe { core::mem::transmute(&self.slice) };
slice
}
#[must_use]
#[inline(always)]
pub fn mut_slice<'me>(&'me mut self) -> &'me mut SliceImpl<'me, T>
where
'borrowed: 'me,
{
let slice: &'me mut SliceImpl<'me, T> = unsafe { core::mem::transmute(&mut self.slice) };
slice
}
pub fn set_borrowed(&mut self, slice: &'borrowed [T]) {
self.slice = slice.into();
}
pub fn set_borrowed_slice_impl(&mut self, slice: SliceImpl<'borrowed, T>) {
self.slice = slice;
}
#[must_use]
pub fn set_owned(&mut self) -> SetOwned<'_, 'borrowed, T> {
self.slice = [].as_slice().into();
self.vec.clear();
SetOwned(self)
}
pub fn mut_owned<R>(&mut self, f: impl FnOnce(&mut Vec<T>) -> R) -> R {
assert!(
core::ptr::eq(self.slice.ptr, self.vec.as_ptr()),
"not owned"
);
self.slice = [].as_slice().into();
let ret = f(&mut self.vec);
let slice: &'borrowed [T] = unsafe { core::mem::transmute(self.vec.as_slice()) };
self.slice = slice.into();
ret
}
#[inline]
pub fn cast_mut<B>(&mut self) -> &mut CowSlice<'borrowed, B>
where
T: bytemuck::Pod,
B: bytemuck::Pod,
{
use core::mem::*;
assert_eq!(size_of::<T>(), size_of::<B>());
assert_eq!(align_of::<T>(), align_of::<B>());
unsafe { transmute(self) }
}
}
pub struct SetOwned<'a, 'borrowed, T>(&'a mut CowSlice<'borrowed, T>);
impl<'borrowed, T> Drop for SetOwned<'_, 'borrowed, T> {
fn drop(&mut self) {
let slice: &'borrowed [T] = unsafe { core::mem::transmute(self.0.vec.as_slice()) };
self.0.slice = slice.into();
}
}
impl<'a, T> core::ops::Deref for SetOwned<'a, '_, T> {
type Target = Vec<T>;
fn deref(&self) -> &Self::Target {
&self.0.vec
}
}
impl<'a, T> core::ops::DerefMut for SetOwned<'a, '_, T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0.vec
}
}
#[derive(Copy, Clone)]
#[repr(C, packed)]
pub struct Unaligned<T>(T);
unsafe impl<T: bytemuck::Zeroable> bytemuck::Zeroable for Unaligned<T> {}
unsafe impl<T: bytemuck::Pod> bytemuck::Pod for Unaligned<T> {}
#[cfg(test)]
mod tests {
use super::*;
use test::{black_box, Bencher};
#[test]
fn test_as_slice() {
let mut vec = FastVec::default();
vec.reserve(2);
unsafe {
vec.push_unchecked(1);
vec.push_unchecked(2);
}
assert_eq!(vec.as_slice(), [1, 2]);
}
const N: usize = 1000;
type VecT = Vec<u32>;
#[bench]
fn bench_next_unchecked(b: &mut Bencher) {
let src: VecT = vec![0; N];
b.iter(|| {
let mut slice = src.as_slice();
for _ in 0..black_box(N) {
unsafe { black_box(black_box(&mut slice).next_unchecked()) };
}
});
}
#[bench]
fn bench_next_unchecked_fast(b: &mut Bencher) {
let src: VecT = vec![0; N];
b.iter(|| {
let mut fast_slice = FastSlice::from(src.as_slice());
for _ in 0..black_box(N) {
unsafe { black_box(black_box(&mut fast_slice).next_unchecked()) };
}
});
}
#[bench]
fn bench_push(b: &mut Bencher) {
let mut buffer = VecT::with_capacity(N);
b.iter(|| {
buffer.clear();
let vec = black_box(&mut buffer);
for _ in 0..black_box(N) {
let v = black_box(&mut *vec);
v.push(black_box(0));
}
});
}
#[bench]
fn bench_push_fast(b: &mut Bencher) {
let mut buffer = VecT::with_capacity(N);
b.iter(|| {
buffer.clear();
let mut vec = black_box(FastVec::from(core::mem::take(&mut buffer)));
for _ in 0..black_box(N) {
let v = black_box(&mut vec);
v.reserve(1);
unsafe { v.push_unchecked(black_box(0)) };
}
buffer = vec.into();
});
}
#[bench]
fn bench_push_unchecked(b: &mut Bencher) {
let mut buffer = VecT::with_capacity(N);
b.iter(|| {
buffer.clear();
let vec = black_box(&mut buffer);
for _ in 0..black_box(N) {
let v = black_box(&mut *vec);
unsafe { v.push_unchecked(black_box(0)) };
}
});
}
#[bench]
fn bench_push_unchecked_fast(b: &mut Bencher) {
let mut buffer = VecT::with_capacity(N);
b.iter(|| {
buffer.clear();
let mut vec = black_box(FastVec::from(core::mem::take(&mut buffer)));
for _ in 0..black_box(N) {
let v = black_box(&mut vec);
unsafe { v.push_unchecked(black_box(0)) };
}
buffer = vec.into();
});
}
#[bench]
fn bench_reserve(b: &mut Bencher) {
let mut buffer = VecT::with_capacity(N);
b.iter(|| {
buffer.clear();
let vec = black_box(&mut buffer);
for _ in 0..black_box(N) {
black_box(&mut *vec).reserve(1);
}
});
}
#[bench]
fn bench_reserve_fast(b: &mut Bencher) {
let mut buffer = VecT::with_capacity(N);
b.iter(|| {
buffer.clear();
let mut vec = black_box(FastVec::from(core::mem::take(&mut buffer)));
for _ in 0..black_box(N) {
black_box(&mut vec).reserve(1);
}
buffer = vec.into();
});
}
}