ruka_runtime/
ptr.rs

1use std::{
2    collections::BTreeMap,
3    fmt,
4    marker::PhantomData,
5    ptr::NonNull,
6    sync::{Mutex, OnceLock},
7};
8
9#[derive(Debug, Default)]
10struct LeakTracker {
11    next_id: u64,
12    live_sites: BTreeMap<u64, &'static str>,
13}
14
15impl LeakTracker {
16    /// Register one live pointer allocation and return its unique id.
17    fn register_alloc(&mut self, site: &'static str) -> u64 {
18        let id = self.next_id;
19        self.next_id = self
20            .next_id
21            .checked_add(1)
22            .expect("pointer leak tracker id overflow");
23        let replaced = self.live_sites.insert(id, site);
24        assert!(replaced.is_none(), "pointer leak tracker id collision");
25        id
26    }
27
28    /// Unregister one live pointer allocation by id.
29    fn unregister_alloc(&mut self, id: u64) {
30        let _ = self.live_sites.remove(&id);
31    }
32}
33
34/// Return the global pointer leak tracker.
35fn leak_tracker() -> &'static Mutex<LeakTracker> {
36    static LEAK_TRACKER: OnceLock<Mutex<LeakTracker>> = OnceLock::new();
37    LEAK_TRACKER.get_or_init(|| Mutex::new(LeakTracker::default()))
38}
39
40/// Lock the global leak tracker, recovering from poisoned state in tests.
41fn lock_leak_tracker() -> std::sync::MutexGuard<'static, LeakTracker> {
42    leak_tracker()
43        .lock()
44        .unwrap_or_else(std::sync::PoisonError::into_inner)
45}
46
47/// Assert that all tracked pointer allocations have been released.
48pub fn assert_no_leaks() {
49    let tracker = lock_leak_tracker();
50    if tracker.live_sites.is_empty() {
51        return;
52    }
53    let mut grouped = BTreeMap::<&'static str, usize>::new();
54    for site in tracker.live_sites.values() {
55        *grouped.entry(*site).or_insert(0) += 1;
56    }
57    let mut details = String::new();
58    for (site, count) in grouped {
59        details.push_str(&format!("\n- {site}: {count}"));
60    }
61    panic!(
62        "detected {} leaked pointer allocation(s):{}",
63        tracker.live_sites.len(),
64        details
65    );
66}
67
68#[cfg(test)]
69/// Return the number of tracked live pointer allocations.
70pub fn tracked_leak_count() -> usize {
71    lock_leak_tracker().live_sites.len()
72}
73
74#[cfg(test)]
75/// Clear all tracked pointer allocations for test isolation.
76pub fn clear_tracked_leaks() {
77    let mut tracker = lock_leak_tracker();
78    tracker.live_sites.clear();
79}
80
81struct PtrCell<T> {
82    alloc_id: u64,
83    value: T,
84}
85
86/// Heap-backed pointer value with one explicit level of indirection.
87pub struct Ptr<T> {
88    ptr: NonNull<PtrCell<T>>,
89    _marker: PhantomData<PtrCell<T>>,
90}
91
92impl<T> Ptr<T> {
93    /// Allocate one new pointer payload.
94    pub fn new(value: T) -> Self {
95        Self::new_tracked(value, "<unknown>")
96    }
97
98    /// Allocate one new pointer payload with allocation-site metadata.
99    pub fn new_tracked(value: T, site: &'static str) -> Self {
100        let alloc_id = lock_leak_tracker().register_alloc(site);
101        let cell = Box::new(PtrCell { alloc_id, value });
102        Self {
103            ptr: NonNull::from(Box::leak(cell)),
104            _marker: PhantomData,
105        }
106    }
107
108    /// Borrow the current payload immutably.
109    pub fn borrow(&self) -> &T {
110        &self.cell().value
111    }
112
113    fn cell(&self) -> &PtrCell<T> {
114        unsafe { self.ptr.as_ref() }
115    }
116
117    fn cell_mut(&mut self) -> &mut PtrCell<T> {
118        unsafe { self.ptr.as_mut() }
119    }
120
121    fn release(&mut self) {
122        let alloc_id = self.cell().alloc_id;
123        lock_leak_tracker().unregister_alloc(alloc_id);
124        unsafe { drop(Box::from_raw(self.ptr.as_ptr())) };
125    }
126
127    /// Borrow the current payload mutably.
128    pub fn borrow_mut(&mut self) -> &mut T {
129        &mut self.cell_mut().value
130    }
131}
132
133impl<T: Clone> Clone for Ptr<T> {
134    fn clone(&self) -> Self {
135        Ptr::new(self.borrow().clone())
136    }
137}
138
139impl<T> Drop for Ptr<T> {
140    fn drop(&mut self) {
141        self.release();
142    }
143}
144
145impl<T: fmt::Debug> fmt::Debug for Ptr<T> {
146    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
147        f.debug_tuple("Ptr").field(self.borrow()).finish()
148    }
149}
150
151impl<T: PartialEq> PartialEq for Ptr<T> {
152    fn eq(&self, other: &Self) -> bool {
153        self.borrow() == other.borrow()
154    }
155}
156
157/// Clone one optional pointer by cloning its pointee into a fresh cell.
158pub fn clone_pointee<T: Clone>(value: &Option<Ptr<T>>) -> Option<Ptr<T>> {
159    value.as_ref().map(|value| Ptr::new(value.borrow().clone()))
160}