Skip to content
Snippets Groups Projects
Commit 6da814be authored by koenigl's avatar koenigl
Browse files

apply debug allocations

parent 35df2e3c
No related branches found
No related tags found
No related merge requests found
...@@ -4,24 +4,89 @@ use core::ptr; ...@@ -4,24 +4,89 @@ use core::ptr;
use core::ptr::NonNull; use core::ptr::NonNull;
use linked_list_allocator::Heap; use linked_list_allocator::Heap;
#[cfg(feature = "debug_allocations")]
use core::fmt::Write;
#[cfg(feature = "debug_allocations")]
use core::sync::atomic;
#[cfg(feature = "debug_allocations")]
use core::sync::atomic::AtomicUsize;
pub static mut HEAP: Heap = Heap::empty(); pub static mut HEAP: Heap = Heap::empty();
struct TockAllocator; // With the "debug_allocations" feature, we use `AtomicUsize` to store the
// statistics because:
// - it is `Sync`, so we can use it in a static object (the allocator),
// - it implements interior mutability, so we can use it in the allocator
// methods (that take an immutable `&self` reference).
struct TockAllocator {
#[cfg(feature = "debug_allocations")]
count: AtomicUsize,
#[cfg(feature = "debug_allocations")]
size: AtomicUsize,
}
impl TockAllocator {
const fn new() -> TockAllocator {
TockAllocator {
#[cfg(feature = "debug_allocations")]
count: AtomicUsize::new(0),
#[cfg(feature = "debug_allocations")]
size: AtomicUsize::new(0),
}
}
}
unsafe impl GlobalAlloc for TockAllocator { unsafe impl GlobalAlloc for TockAllocator {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 { unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
HEAP.allocate_first_fit(layout) let ptr = HEAP
.allocate_first_fit(layout)
.ok() .ok()
.map_or(ptr::null_mut(), NonNull::as_ptr) .map_or(ptr::null_mut(), NonNull::as_ptr);
#[cfg(feature = "debug_allocations")]
{
self.count.fetch_add(1, atomic::Ordering::SeqCst);
self.size.fetch_add(layout.size(), atomic::Ordering::SeqCst);
writeln!(
crate::console::Console::new(),
"alloc[{}, {}] = {:?} ({} ptrs, {} bytes)",
layout.size(),
layout.align(),
ptr,
self.count.load(atomic::Ordering::SeqCst),
self.size.load(atomic::Ordering::SeqCst)
)
.unwrap();
}
ptr
} }
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
#[cfg(feature = "debug_allocations")]
{
self.count.fetch_sub(1, atomic::Ordering::SeqCst);
self.size.fetch_sub(layout.size(), atomic::Ordering::SeqCst);
writeln!(
crate::console::Console::new(),
"dealloc[{}, {}] = {:?} ({} ptrs, {} bytes)",
layout.size(),
layout.align(),
ptr,
self.count.load(atomic::Ordering::SeqCst),
self.size.load(atomic::Ordering::SeqCst)
)
.unwrap();
}
HEAP.deallocate(NonNull::new_unchecked(ptr), layout) HEAP.deallocate(NonNull::new_unchecked(ptr), layout)
} }
} }
#[global_allocator] #[global_allocator]
static ALLOCATOR: TockAllocator = TockAllocator; static ALLOCATOR: TockAllocator = TockAllocator::new();
#[cfg(not(feature = "custom_alloc_error_handler"))] #[cfg(not(feature = "custom_alloc_error_handler"))]
#[alloc_error_handler] #[alloc_error_handler]
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment