1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
//! The simplest thing resembling an "allocator" I could possibly create.
//! Allocates into a "large" static array.

use core::alloc::{GlobalAlloc, Layout};
use core::cell::UnsafeCell;
use core::fmt;
use core::mem::MaybeUninit;
use core::ptr;
use core::sync::atomic::{AtomicUsize, Ordering};

macro_rules! try_null {
    ($e:expr) => {
        match $e {
            Some(x) => x,
            None => return ptr::null_mut(),
        }
    };
}

pub struct Alloc<const SIZE: usize> {
    heap: Heap<SIZE>,
    free: AtomicUsize,
}

#[repr(align(16))]
struct Heap<const SIZE: usize>(UnsafeCell<MaybeUninit<[u8; SIZE]>>);

impl<const SIZE: usize> Alloc<SIZE> {
    #[must_use]
    pub const fn new() -> Self {
        Self {
            heap: Heap(UnsafeCell::new(MaybeUninit::uninit())),
            free: AtomicUsize::new(SIZE),
        }
    }

    #[must_use]
    pub fn total_size(&self) -> usize {
        SIZE
    }

    #[must_use]
    pub fn allocated_size(&self) -> usize {
        SIZE - self.free.load(Ordering::Relaxed)
    }

    pub fn owns(&self, addr: *mut u8) -> bool {
        let start_addr = self.heap.0.get() as *mut u8 as usize;
        let end_addr = start_addr + SIZE;
        (addr as usize) < end_addr
    }
}

unsafe impl<const SIZE: usize> GlobalAlloc for Alloc<SIZE> {
    /// # Safety
    ///
    /// See [`GlobalAlloc::alloc`]
    ///
    /// NOTABLE INVARIANTS:
    ///  * `Layout` is non-zero sized (enforced by `GlobalAlloc`)
    ///  * `align` is a power of two (enforced by `Layout::from_size_align`)
    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
        let heap = self.heap.0.get() as *mut u8;

        let mut prev = self.free.load(Ordering::Relaxed);
        loop {
            // Ensure enough space is allocated
            let new_free = try_null!(prev.checked_sub(layout.size()));

            // Ensure the final pointer is aligned
            let new_ptr = (heap as usize + new_free) & !(layout.align() - 1);
            let new_free = try_null!(new_ptr.checked_sub(heap as usize));

            match self.free.compare_exchange_weak(
                prev,
                new_free,
                Ordering::Relaxed,
                Ordering::Relaxed,
            ) {
                Ok(_) => return new_ptr as *mut u8,
                Err(next_prev) => {
                    prev = next_prev;
                    continue;
                }
            }
        }
    }

    /// Does nothing, because freeing is hard.
    ///
    /// # Safety
    ///
    /// See [`GlobalAlloc::dealloc`]
    unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
        // lol
    }
}

// Safety: access to the bump region is guarded by an atomic.
unsafe impl<const SIZE: usize> Sync for Alloc<SIZE> {}

impl<const SIZE: usize> fmt::Debug for Alloc<SIZE> {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        let Self { free, heap: _ } = self;
        f.debug_struct("bump::Alloc")
            .field("heap", &format_args!("[u8; {SIZE}]"))
            .field("free", &free)
            .field("allocated_size", &self.allocated_size())
            .finish()
    }
}

#[cfg(test)]
mod test {
    use super::*;
    use core::mem;

    #[test]
    fn alloc_small() {
        static ALLOC: Alloc<1024> = Alloc::new();
        let p0 = unsafe { ALLOC.alloc(Layout::new::<u32>()) };
        assert!(!p0.is_null());

        let p1 = unsafe { ALLOC.alloc(Layout::new::<u32>()) };
        assert!(!p1.is_null());

        assert_eq!(p0.align_offset(mem::align_of::<u32>()), 0);
        assert_eq!(p1.align_offset(mem::align_of::<u32>()), 0);

        assert_eq!((p0 as usize) - (p1 as usize), 4);
    }

    #[test]
    fn alloc_alignment() {
        static ALLOC: Alloc<1024> = Alloc::new();
        let p0 = unsafe { ALLOC.alloc(Layout::new::<u8>()) };
        assert!(!p0.is_null());

        let p1 = unsafe { ALLOC.alloc(Layout::new::<u8>()) };
        assert!(!p1.is_null());

        let p2 = unsafe { ALLOC.alloc(Layout::new::<u32>()) };
        assert!(!p2.is_null());

        assert_eq!((p0 as usize) - (p1 as usize), 1);
        assert!((p1 as usize) - (p2 as usize) > 0);

        assert_eq!(p2.align_offset(mem::align_of::<u32>()), 0);
    }
}