mycelium_alloc/
bump.rs

1//! The simplest thing resembling an "allocator" I could possibly create.
2//! Allocates into a "large" static array.
3
4use core::alloc::{GlobalAlloc, Layout};
5use core::cell::UnsafeCell;
6use core::fmt;
7use core::mem::MaybeUninit;
8use core::ptr;
9use core::sync::atomic::{AtomicUsize, Ordering};
10
11macro_rules! try_null {
12    ($e:expr) => {
13        match $e {
14            Some(x) => x,
15            None => return ptr::null_mut(),
16        }
17    };
18}
19
20pub struct Alloc<const SIZE: usize> {
21    heap: Heap<SIZE>,
22    free: AtomicUsize,
23}
24
25#[repr(align(16))]
26struct Heap<const SIZE: usize>(UnsafeCell<MaybeUninit<[u8; SIZE]>>);
27
28impl<const SIZE: usize> Alloc<SIZE> {
29    #[must_use]
30    pub const fn new() -> Self {
31        Self {
32            heap: Heap(UnsafeCell::new(MaybeUninit::uninit())),
33            free: AtomicUsize::new(SIZE),
34        }
35    }
36
37    #[must_use]
38    pub fn total_size(&self) -> usize {
39        SIZE
40    }
41
42    #[must_use]
43    pub fn allocated_size(&self) -> usize {
44        SIZE - self.free.load(Ordering::Relaxed)
45    }
46
47    pub fn owns(&self, addr: *mut u8) -> bool {
48        let start_addr = self.heap.0.get() as *mut u8 as usize;
49        let end_addr = start_addr + SIZE;
50        (addr as usize) < end_addr
51    }
52}
53
54impl<const SIZE: usize> Default for Alloc<SIZE> {
55    fn default() -> Self {
56        Self::new()
57    }
58}
59
60unsafe impl<const SIZE: usize> GlobalAlloc for Alloc<SIZE> {
61    /// # Safety
62    ///
63    /// See [`GlobalAlloc::alloc`]
64    ///
65    /// NOTABLE INVARIANTS:
66    ///  * `Layout` is non-zero sized (enforced by `GlobalAlloc`)
67    ///  * `align` is a power of two (enforced by `Layout::from_size_align`)
68    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
69        let heap = self.heap.0.get() as *mut u8;
70
71        let mut prev = self.free.load(Ordering::Relaxed);
72        loop {
73            // Ensure enough space is allocated
74            let new_free = try_null!(prev.checked_sub(layout.size()));
75
76            // Ensure the final pointer is aligned
77            let new_ptr = (heap as usize + new_free) & !(layout.align() - 1);
78            let new_free = try_null!(new_ptr.checked_sub(heap as usize));
79
80            match self.free.compare_exchange_weak(
81                prev,
82                new_free,
83                Ordering::Relaxed,
84                Ordering::Relaxed,
85            ) {
86                Ok(_) => return new_ptr as *mut u8,
87                Err(next_prev) => {
88                    prev = next_prev;
89                    continue;
90                }
91            }
92        }
93    }
94
95    /// Does nothing, because freeing is hard.
96    ///
97    /// # Safety
98    ///
99    /// See [`GlobalAlloc::dealloc`]
100    unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
101        // lol
102    }
103}
104
105// Safety: access to the bump region is guarded by an atomic.
106unsafe impl<const SIZE: usize> Sync for Alloc<SIZE> {}
107
108impl<const SIZE: usize> fmt::Debug for Alloc<SIZE> {
109    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
110        let Self { free, heap: _ } = self;
111        f.debug_struct("bump::Alloc")
112            .field("heap", &format_args!("[u8; {SIZE}]"))
113            .field("free", &free)
114            .field("allocated_size", &self.allocated_size())
115            .finish()
116    }
117}
118
119#[cfg(test)]
120mod test {
121    use super::*;
122    use core::mem;
123
124    #[test]
125    fn alloc_small() {
126        static ALLOC: Alloc<1024> = Alloc::new();
127        let p0 = unsafe { ALLOC.alloc(Layout::new::<u32>()) };
128        assert!(!p0.is_null());
129
130        let p1 = unsafe { ALLOC.alloc(Layout::new::<u32>()) };
131        assert!(!p1.is_null());
132
133        assert_eq!(p0.align_offset(mem::align_of::<u32>()), 0);
134        assert_eq!(p1.align_offset(mem::align_of::<u32>()), 0);
135
136        assert_eq!((p0 as usize) - (p1 as usize), 4);
137    }
138
139    #[test]
140    fn alloc_alignment() {
141        static ALLOC: Alloc<1024> = Alloc::new();
142        let p0 = unsafe { ALLOC.alloc(Layout::new::<u8>()) };
143        assert!(!p0.is_null());
144
145        let p1 = unsafe { ALLOC.alloc(Layout::new::<u8>()) };
146        assert!(!p1.is_null());
147
148        let p2 = unsafe { ALLOC.alloc(Layout::new::<u32>()) };
149        assert!(!p2.is_null());
150
151        assert_eq!((p0 as usize) - (p1 as usize), 1);
152        assert!((p1 as usize) - (p2 as usize) > 0);
153
154        assert_eq!(p2.align_offset(mem::align_of::<u32>()), 0);
155    }
156}