1pub use alloc::alloc::{GlobalAlloc, Layout};
2use core::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
3use hal_core::{
4 boot::BootInfo,
5 mem::{
6 self,
7 page::{self, Alloc as PageAlloc},
8 },
9 PAddr,
10};
11use mycelium_alloc::{buddy, bump};
12use mycelium_util::fmt;
13
14#[derive(Debug)]
15pub struct Allocator {
16 bump: bump::Alloc<BUMP_REGION_SIZE>,
17 allocator: buddy::Alloc<32>,
18 bump_mode: AtomicBool,
20 allocating: AtomicUsize,
21 deallocating: AtomicUsize,
22}
23
24const BUMP_REGION_SIZE: usize = 1024;
26
27#[derive(Debug, Copy, Clone)]
28pub struct State {
29 pub(crate) allocating: usize,
30 pub(crate) deallocating: usize,
31 pub(crate) heap_size: usize,
32 pub(crate) allocated: usize,
33 pub(crate) min_size: usize,
34 pub(crate) bump_mode: bool,
35 pub(crate) bump_allocated: usize,
36 pub(crate) bump_size: usize,
37}
38
39impl Allocator {
40 pub const fn new() -> Self {
41 Self {
42 bump: bump::Alloc::new(),
43 bump_mode: AtomicBool::new(true),
44 allocator: buddy::Alloc::new(32),
45 allocating: AtomicUsize::new(0),
46 deallocating: AtomicUsize::new(0),
47 }
48 }
49
50 pub fn state(&self) -> State {
51 State {
52 allocating: self.allocating.load(Ordering::Acquire),
53 deallocating: self.deallocating.load(Ordering::Acquire),
54 bump_mode: self.bump_mode.load(Ordering::Acquire),
55 heap_size: self.allocator.total_size(),
56 allocated: self.allocator.allocated_size(),
57 min_size: self.allocator.min_size(),
58 bump_allocated: self.bump.allocated_size(),
59 bump_size: self.bump.total_size(),
60 }
61 }
62
63 pub(crate) fn init(&self, _bootinfo: &impl BootInfo) {
64 self.allocator.set_vm_offset(crate::arch::mm::vm_offset());
66 tracing::info!("initialized allocator");
67 }
68
69 #[inline]
70 pub(crate) unsafe fn add_region(&self, region: mem::Region) {
71 self.deallocating.fetch_add(1, Ordering::Release);
72 tracing::trace!(?region, "adding to page allocator");
73 let added = self.allocator.add_region(region).is_ok();
74 tracing::trace!(added);
75 self.deallocating.fetch_sub(1, Ordering::Release);
76 if self.bump_mode.swap(false, Ordering::Release) {
77 tracing::debug!("disabled bump allocator mode");
78 }
79 }
80
81 #[inline]
82 pub fn dump_free_lists(&self) {
83 self.allocator.dump_free_lists();
84 }
85}
86
87impl Default for Allocator {
88 fn default() -> Self {
89 Self::new()
90 }
91}
92
93unsafe impl GlobalAlloc for Allocator {
94 #[inline]
95 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
96 self.allocating.fetch_add(1, Ordering::Release);
97 let ptr = if self.bump_mode.load(Ordering::Acquire) {
98 GlobalAlloc::alloc(&self.bump, layout)
99 } else {
100 GlobalAlloc::alloc(&self.allocator, layout)
101 };
102 self.allocating.fetch_sub(1, Ordering::Release);
103 ptr
104 }
105
106 #[inline]
107 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
108 self.deallocating.fetch_add(1, Ordering::Release);
109 if !self.bump.owns(ptr) {
110 GlobalAlloc::dealloc(&self.allocator, ptr, layout);
111 } else {
112 tracing::warn!(
114 ?ptr,
115 ?layout,
116 "an allocation in the bump region was deallocated! this is not \
117 great: the bump region should not be used for short-lived \
118 allocations"
119 );
120 }
121 self.deallocating.fetch_sub(1, Ordering::Release);
122 }
123}
124
125unsafe impl<S> PageAlloc<S> for Allocator
126where
127 buddy::Alloc<32>: PageAlloc<S>,
128 S: page::Size,
129{
130 #[inline]
131 fn alloc_range(
132 &self,
133 size: S,
134 len: usize,
135 ) -> Result<page::PageRange<PAddr, S>, page::AllocError> {
136 self.allocating.fetch_add(1, Ordering::Release);
137 let res = self.allocator.alloc_range(size, len);
138 self.allocating.fetch_sub(1, Ordering::Release);
139 res
140 }
141
142 #[inline]
143 fn dealloc_range(&self, range: page::PageRange<PAddr, S>) -> Result<(), page::AllocError> {
144 self.deallocating.fetch_add(1, Ordering::Release);
145 let res = self.allocator.dealloc_range(range);
146 self.deallocating.fetch_sub(1, Ordering::Release);
147 res
148 }
149}
150
151impl State {
154 #[inline]
155 #[must_use]
156 pub fn in_allocator(&self) -> bool {
157 self.allocating > 0 || self.deallocating > 0
158 }
159}
160
161impl fmt::Display for State {
162 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
163 let &Self {
164 allocating,
165 deallocating,
166 heap_size,
167 allocated,
168 min_size,
169 bump_mode,
170 bump_allocated,
171 bump_size,
172 } = self;
173 f.write_str("heap stats:\n")?;
174 writeln!(f, " {allocating} cores allocating")?;
175 writeln!(f, " {deallocating} cores deallocating")?;
176
177 if bump_mode {
178 writeln!(f, " bump allocator mode only")?;
179 } else {
180 let digits = {
181 let digits = (heap_size).checked_ilog(10).unwrap_or(0) + 1;
182 digits as usize
183 };
184 let free = heap_size - allocated;
185 writeln!(f, "buddy heap:")?;
186
187 writeln!(f, " {free:>digits$} B free")?;
188
189 writeln!(f, " {heap_size:>digits$} B total")?;
190 writeln!(f, " {free:>digits$} B free")?;
191 writeln!(f, " {allocated:>digits$} B busy")?;
192 writeln!(f, " {min_size:>digits$} B minimum allocation",)?;
193 }
194
195 writeln!(f, "bump region:")?;
196 let bump_digits = {
197 let digits = (bump_size).checked_ilog(10).unwrap_or(0) + 1;
198 digits as usize
199 };
200 let bump_free = bump_size - bump_allocated;
201
202 writeln!(f, " {bump_free:>bump_digits$} B free",)?;
203 writeln!(f, " {bump_allocated:>bump_digits$} B used",)?;
204 Ok(())
205 }
206}
207
208#[cfg(test)]
209mod tests {
210 use mycotest::*;
211
212 decl_test! {
213 fn basic_alloc() -> TestResult {
214 use alloc::vec::Vec;
216 let mut v = Vec::new();
217 tracing::info!(vec = ?v, vec.addr = ?v.as_ptr());
218 v.push(5u64);
219 tracing::info!(vec = ?v, vec.addr = ?v.as_ptr());
220 v.push(10u64);
221 tracing::info!(vec=?v, vec.addr=?v.as_ptr());
222 mycotest::assert_eq!(v.pop(), Some(10));
223 mycotest::assert_eq!(v.pop(), Some(5));
224
225 Ok(())
226 }
227 }
228
229 decl_test! {
230 fn alloc_big() {
231 use alloc::vec::Vec;
232 let mut v = Vec::new();
233
234 for i in 0..2048 {
235 v.push(i);
236 }
237
238 tracing::info!(vec = ?v);
239 }
240 }
241}