From 6da814be6a35ea9d3d6fcaeb302848ee557ebb3b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Leonard=20K=C3=B6nig?= <leonard.koenig@fu-berlin.de> Date: Tue, 30 Jun 2020 13:28:09 +0200 Subject: [PATCH] apply debug allocations --- src/allocator.rs | 73 +++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 69 insertions(+), 4 deletions(-) diff --git a/src/allocator.rs b/src/allocator.rs index 05e9669..bd8ebbe 100644 --- a/src/allocator.rs +++ b/src/allocator.rs @@ -4,24 +4,89 @@ use core::ptr; use core::ptr::NonNull; use linked_list_allocator::Heap; +#[cfg(feature = "debug_allocations")] +use core::fmt::Write; +#[cfg(feature = "debug_allocations")] +use core::sync::atomic; +#[cfg(feature = "debug_allocations")] +use core::sync::atomic::AtomicUsize; + + pub static mut HEAP: Heap = Heap::empty(); -struct TockAllocator; +// With the "debug_allocations" feature, we use `AtomicUsize` to store the +// statistics because: +// - it is `Sync`, so we can use it in a static object (the allocator), +// - it implements interior mutability, so we can use it in the allocator +// methods (that take an immutable `&self` reference). +struct TockAllocator { + #[cfg(feature = "debug_allocations")] + count: AtomicUsize, + #[cfg(feature = "debug_allocations")] + size: AtomicUsize, +} + +impl TockAllocator { + const fn new() -> TockAllocator { + TockAllocator { + #[cfg(feature = "debug_allocations")] + count: AtomicUsize::new(0), + #[cfg(feature = "debug_allocations")] + size: AtomicUsize::new(0), + } + } +} + unsafe impl GlobalAlloc for TockAllocator { unsafe fn alloc(&self, layout: Layout) -> *mut u8 { - HEAP.allocate_first_fit(layout) + let ptr = HEAP + .allocate_first_fit(layout) .ok() - .map_or(ptr::null_mut(), NonNull::as_ptr) + .map_or(ptr::null_mut(), NonNull::as_ptr); + + #[cfg(feature = "debug_allocations")] + { + self.count.fetch_add(1, atomic::Ordering::SeqCst); + self.size.fetch_add(layout.size(), atomic::Ordering::SeqCst); + writeln!( + crate::console::Console::new(), + "alloc[{}, {}] = {:?} ({} ptrs, {} bytes)", + layout.size(), + layout.align(), + ptr, + self.count.load(atomic::Ordering::SeqCst), + self.size.load(atomic::Ordering::SeqCst) + ) + .unwrap(); + } + ptr } + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + #[cfg(feature = "debug_allocations")] + { + self.count.fetch_sub(1, atomic::Ordering::SeqCst); + self.size.fetch_sub(layout.size(), atomic::Ordering::SeqCst); + writeln!( + crate::console::Console::new(), + "dealloc[{}, {}] = {:?} ({} ptrs, {} bytes)", + layout.size(), + layout.align(), + ptr, + self.count.load(atomic::Ordering::SeqCst), + self.size.load(atomic::Ordering::SeqCst) + ) + .unwrap(); + } + HEAP.deallocate(NonNull::new_unchecked(ptr), layout) } } #[global_allocator] -static ALLOCATOR: TockAllocator = TockAllocator; +static ALLOCATOR: TockAllocator = TockAllocator::new(); #[cfg(not(feature = "custom_alloc_error_handler"))] #[alloc_error_handler] -- GitLab