1use core::alloc::{GlobalAlloc, Layout};
5use core::cell::UnsafeCell;
6use core::fmt;
7use core::mem::MaybeUninit;
8use core::ptr;
9use core::sync::atomic::{AtomicUsize, Ordering};
10
11macro_rules! try_null {
12 ($e:expr) => {
13 match $e {
14 Some(x) => x,
15 None => return ptr::null_mut(),
16 }
17 };
18}
19
20pub struct Alloc<const SIZE: usize> {
21 heap: Heap<SIZE>,
22 free: AtomicUsize,
23}
24
25#[repr(align(16))]
26struct Heap<const SIZE: usize>(UnsafeCell<MaybeUninit<[u8; SIZE]>>);
27
28impl<const SIZE: usize> Alloc<SIZE> {
29 #[must_use]
30 pub const fn new() -> Self {
31 Self {
32 heap: Heap(UnsafeCell::new(MaybeUninit::uninit())),
33 free: AtomicUsize::new(SIZE),
34 }
35 }
36
37 #[must_use]
38 pub fn total_size(&self) -> usize {
39 SIZE
40 }
41
42 #[must_use]
43 pub fn allocated_size(&self) -> usize {
44 SIZE - self.free.load(Ordering::Relaxed)
45 }
46
47 pub fn owns(&self, addr: *mut u8) -> bool {
48 let start_addr = self.heap.0.get() as *mut u8 as usize;
49 let end_addr = start_addr + SIZE;
50 (addr as usize) < end_addr
51 }
52}
53
54impl<const SIZE: usize> Default for Alloc<SIZE> {
55 fn default() -> Self {
56 Self::new()
57 }
58}
59
60unsafe impl<const SIZE: usize> GlobalAlloc for Alloc<SIZE> {
61 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
69 let heap = self.heap.0.get() as *mut u8;
70
71 let mut prev = self.free.load(Ordering::Relaxed);
72 loop {
73 let new_free = try_null!(prev.checked_sub(layout.size()));
75
76 let new_ptr = (heap as usize + new_free) & !(layout.align() - 1);
78 let new_free = try_null!(new_ptr.checked_sub(heap as usize));
79
80 match self.free.compare_exchange_weak(
81 prev,
82 new_free,
83 Ordering::Relaxed,
84 Ordering::Relaxed,
85 ) {
86 Ok(_) => return new_ptr as *mut u8,
87 Err(next_prev) => {
88 prev = next_prev;
89 continue;
90 }
91 }
92 }
93 }
94
95 unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
101 }
103}
104
105unsafe impl<const SIZE: usize> Sync for Alloc<SIZE> {}
107
108impl<const SIZE: usize> fmt::Debug for Alloc<SIZE> {
109 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
110 let Self { free, heap: _ } = self;
111 f.debug_struct("bump::Alloc")
112 .field("heap", &format_args!("[u8; {SIZE}]"))
113 .field("free", &free)
114 .field("allocated_size", &self.allocated_size())
115 .finish()
116 }
117}
118
119#[cfg(test)]
120mod test {
121 use super::*;
122 use core::mem;
123
124 #[test]
125 fn alloc_small() {
126 static ALLOC: Alloc<1024> = Alloc::new();
127 let p0 = unsafe { ALLOC.alloc(Layout::new::<u32>()) };
128 assert!(!p0.is_null());
129
130 let p1 = unsafe { ALLOC.alloc(Layout::new::<u32>()) };
131 assert!(!p1.is_null());
132
133 assert_eq!(p0.align_offset(mem::align_of::<u32>()), 0);
134 assert_eq!(p1.align_offset(mem::align_of::<u32>()), 0);
135
136 assert_eq!((p0 as usize) - (p1 as usize), 4);
137 }
138
139 #[test]
140 fn alloc_alignment() {
141 static ALLOC: Alloc<1024> = Alloc::new();
142 let p0 = unsafe { ALLOC.alloc(Layout::new::<u8>()) };
143 assert!(!p0.is_null());
144
145 let p1 = unsafe { ALLOC.alloc(Layout::new::<u8>()) };
146 assert!(!p1.is_null());
147
148 let p2 = unsafe { ALLOC.alloc(Layout::new::<u32>()) };
149 assert!(!p2.is_null());
150
151 assert_eq!((p0 as usize) - (p1 as usize), 1);
152 assert!((p1 as usize) - (p2 as usize) > 0);
153
154 assert_eq!(p2.align_offset(mem::align_of::<u32>()), 0);
155 }
156}