1use core::ptr::NonNull;
23use alloc_crate::alloc::{alloc, alloc_zeroed, dealloc, realloc};
45use crate::stable::{assume, invalid_mut};
67use super::{AllocError, Allocator, Layout};
89/// The global memory allocator.
10///
11/// This type implements the [`Allocator`] trait by forwarding calls
12/// to the allocator registered with the `#[global_allocator]` attribute
13/// if there is one, or the `std` crate’s default.
14///
15/// Note: while this type is unstable, the functionality it provides can be
16/// accessed through the [free functions in `alloc`](crate#functions).
17#[derive(Copy, Clone, Default, Debug)]
18pub struct Global;
1920impl Global {
21#[inline(always)]
22fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
23match layout.size() {
240 => Ok(unsafe {
25 NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
26 invalid_mut(layout.align()),
270,
28 ))
29 }),
30// SAFETY: `layout` is non-zero in size,
31size => unsafe {
32let raw_ptr = if zeroed {
33 alloc_zeroed(layout)
34 } else {
35 alloc(layout)
36 };
37let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
38Ok(NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
39 ptr.as_ptr(),
40 size,
41 )))
42 },
43 }
44 }
4546// SAFETY: Same as `Allocator::grow`
47#[inline(always)]
48unsafe fn grow_impl(
49&self,
50 ptr: NonNull<u8>,
51 old_layout: Layout,
52 new_layout: Layout,
53 zeroed: bool,
54 ) -> Result<NonNull<[u8]>, AllocError> {
55debug_assert!(
56 new_layout.size() >= old_layout.size(),
57"`new_layout.size()` must be greater than or equal to `old_layout.size()`"
58);
5960match old_layout.size() {
610 => self.alloc_impl(new_layout, zeroed),
6263// SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
64 // as required by safety conditions. Other conditions must be upheld by the caller
65old_size if old_layout.align() == new_layout.align() => unsafe {
66let new_size = new_layout.size();
6768// `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
69assume(new_size >= old_layout.size());
7071let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
72let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
73if zeroed {
74 raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
75 }
76Ok(NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
77 ptr.as_ptr(),
78 new_size,
79 )))
80 },
8182// SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
83 // both the old and new memory allocation are valid for reads and writes for `old_size`
84 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
85 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
86 // for `dealloc` must be upheld by the caller.
87old_size => unsafe {
88let new_ptr = self.alloc_impl(new_layout, zeroed)?;
89 core::ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr().cast(), old_size);
90self.deallocate(ptr, old_layout);
91Ok(new_ptr)
92 },
93 }
94 }
95}
9697unsafe impl Allocator for Global {
98#[inline(always)]
99fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
100self.alloc_impl(layout, false)
101 }
102103#[inline(always)]
104fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
105self.alloc_impl(layout, true)
106 }
107108#[inline(always)]
109unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
110if layout.size() != 0 {
111// SAFETY: `layout` is non-zero in size,
112 // other conditions must be upheld by the caller
113unsafe { dealloc(ptr.as_ptr(), layout) }
114 }
115 }
116117#[inline(always)]
118unsafe fn grow(
119&self,
120 ptr: NonNull<u8>,
121 old_layout: Layout,
122 new_layout: Layout,
123 ) -> Result<NonNull<[u8]>, AllocError> {
124// SAFETY: all conditions must be upheld by the caller
125unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
126 }
127128#[inline(always)]
129unsafe fn grow_zeroed(
130&self,
131 ptr: NonNull<u8>,
132 old_layout: Layout,
133 new_layout: Layout,
134 ) -> Result<NonNull<[u8]>, AllocError> {
135// SAFETY: all conditions must be upheld by the caller
136unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
137 }
138139#[inline(always)]
140unsafe fn shrink(
141&self,
142 ptr: NonNull<u8>,
143 old_layout: Layout,
144 new_layout: Layout,
145 ) -> Result<NonNull<[u8]>, AllocError> {
146debug_assert!(
147 new_layout.size() <= old_layout.size(),
148"`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
149);
150151match new_layout.size() {
152// SAFETY: conditions must be upheld by the caller
1530 => unsafe {
154self.deallocate(ptr, old_layout);
155Ok(NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
156 invalid_mut(new_layout.align()),
1570,
158 )))
159 },
160161// SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
162new_size if old_layout.align() == new_layout.align() => unsafe {
163// `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
164assume(new_size <= old_layout.size());
165166let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
167let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
168Ok(NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
169 ptr.as_ptr(),
170 new_size,
171 )))
172 },
173174// SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
175 // both the old and new memory allocation are valid for reads and writes for `new_size`
176 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
177 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
178 // for `dealloc` must be upheld by the caller.
179new_size => unsafe {
180let new_ptr = self.allocate(new_layout)?;
181 core::ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr().cast(), new_size);
182self.deallocate(ptr, old_layout);
183Ok(new_ptr)
184 },
185 }
186 }
187}