diff --git a/src/allocator.rs b/src/allocator.rs
index 05e96695999c90d21aa7799a3ea774a4b263a173..bd8ebbef0d59a87ddff3720d71c6e56e88aea5c8 100644
--- a/src/allocator.rs
+++ b/src/allocator.rs
@@ -4,24 +4,89 @@ use core::ptr;
 use core::ptr::NonNull;
 use linked_list_allocator::Heap;
 
+#[cfg(feature = "debug_allocations")]
+use core::fmt::Write;
+#[cfg(feature = "debug_allocations")]
+use core::sync::atomic;
+#[cfg(feature = "debug_allocations")]
+use core::sync::atomic::AtomicUsize;
+
+
 pub static mut HEAP: Heap = Heap::empty();
 
-struct TockAllocator;
+// With the "debug_allocations" feature, we use `AtomicUsize` to store the
+// statistics because:
+// - it is `Sync`, so we can use it in a static object (the allocator),
+// - it implements interior mutability, so we can use it in the allocator
+//   methods (that take an immutable `&self` reference).
+struct TockAllocator {
+    #[cfg(feature = "debug_allocations")]
+    count: AtomicUsize,
+    #[cfg(feature = "debug_allocations")]
+    size: AtomicUsize,
+}
+
+impl TockAllocator {
+    const fn new() -> TockAllocator {
+        TockAllocator {
+            #[cfg(feature = "debug_allocations")]
+            count: AtomicUsize::new(0),
+            #[cfg(feature = "debug_allocations")]
+            size: AtomicUsize::new(0),
+        }
+    }
+}
+
 
 unsafe impl GlobalAlloc for TockAllocator {
     unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
-        HEAP.allocate_first_fit(layout)
+        let ptr = HEAP
+            .allocate_first_fit(layout)
             .ok()
-            .map_or(ptr::null_mut(), NonNull::as_ptr)
+            .map_or(ptr::null_mut(), NonNull::as_ptr);
+
+        #[cfg(feature = "debug_allocations")]
+        {
+            self.count.fetch_add(1, atomic::Ordering::SeqCst);
+            self.size.fetch_add(layout.size(), atomic::Ordering::SeqCst);
+            writeln!(
+                crate::console::Console::new(),
+                "alloc[{}, {}] = {:?} ({} ptrs, {} bytes)",
+                layout.size(),
+                layout.align(),
+                ptr,
+                self.count.load(atomic::Ordering::SeqCst),
+                self.size.load(atomic::Ordering::SeqCst)
+            )
+            .unwrap();
+        }
+        ptr
     }
 
+
     unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
+        #[cfg(feature = "debug_allocations")]
+        {
+            self.count.fetch_sub(1, atomic::Ordering::SeqCst);
+            self.size.fetch_sub(layout.size(), atomic::Ordering::SeqCst);
+            writeln!(
+                crate::console::Console::new(),
+                "dealloc[{}, {}] = {:?} ({} ptrs, {} bytes)",
+                layout.size(),
+                layout.align(),
+                ptr,
+                self.count.load(atomic::Ordering::SeqCst),
+                self.size.load(atomic::Ordering::SeqCst)
+            )
+            .unwrap();
+        }
+
         HEAP.deallocate(NonNull::new_unchecked(ptr), layout)
     }
 }
 
 #[global_allocator]
-static ALLOCATOR: TockAllocator = TockAllocator;
+static ALLOCATOR: TockAllocator = TockAllocator::new();
 
 #[cfg(not(feature = "custom_alloc_error_handler"))]
 #[alloc_error_handler]