Skip to main content

airbender_rt/allocator/
talc_allocator.rs

1use core::alloc::{GlobalAlloc, Layout};
2use core::cell::UnsafeCell;
3use core::ptr::{null_mut, NonNull};
4
5pub struct TalcAllocator {
6    state: UnsafeCell<TalcState>,
7}
8
9struct TalcState {
10    allocator: Option<talc::Talc<talc::ClaimOnOom>>,
11}
12
13unsafe impl Sync for TalcAllocator {}
14
15impl TalcAllocator {
16    pub const fn uninit() -> Self {
17        Self {
18            state: UnsafeCell::new(TalcState { allocator: None }),
19        }
20    }
21
22    /// # Safety
23    ///
24    /// Caller must ensure `start` and `end` define a writable heap range.
25    pub unsafe fn init(&self, start: *mut usize, end: *mut usize) {
26        let state = &mut *self.state.get();
27        let base = start.cast::<u8>();
28        let size = (end as usize).saturating_sub(start as usize);
29        if size == 0 {
30            state.allocator = None;
31            return;
32        }
33
34        let mut allocator = talc::Talc::new(talc::ClaimOnOom::new(talc::Span::empty()));
35        let span = talc::Span::from_base_size(base, size);
36        allocator.claim(span).expect("must claim initial heap span");
37        state.allocator = Some(allocator);
38    }
39
40    unsafe fn alloc_inner(&self, layout: Layout) -> *mut u8 {
41        let state = &mut *self.state.get();
42        let Some(allocator) = state.allocator.as_mut() else {
43            return null_mut();
44        };
45        allocator
46            .malloc(layout)
47            .map_or(null_mut(), |nn| nn.as_ptr())
48    }
49
50    unsafe fn dealloc_inner(&self, ptr: *mut u8, layout: Layout) {
51        if ptr.is_null() || layout.size() == 0 {
52            return;
53        }
54
55        let state = &mut *self.state.get();
56        if let Some(allocator) = state.allocator.as_mut() {
57            allocator.free(NonNull::new_unchecked(ptr), layout);
58        }
59    }
60}
61
62unsafe impl GlobalAlloc for TalcAllocator {
63    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
64        self.alloc_inner(layout)
65    }
66
67    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
68        self.dealloc_inner(ptr, layout)
69    }
70
71    unsafe fn realloc(&self, ptr: *mut u8, old_layout: Layout, new_size: usize) -> *mut u8 {
72        // Safety for `from_size_align_unchecked`: `old_layout` _must_ be the same layout that was
73        // used to allocate `ptr`.`
74
75        if ptr.is_null() {
76            return self.alloc_inner(Layout::from_size_align_unchecked(
77                new_size,
78                old_layout.align(),
79            ));
80        }
81
82        let new_layout = Layout::from_size_align_unchecked(new_size, old_layout.align());
83        let new_ptr = self.alloc_inner(new_layout);
84        if !new_ptr.is_null() {
85            let copy_len = core::cmp::min(old_layout.size(), new_size);
86            core::ptr::copy_nonoverlapping(ptr, new_ptr, copy_len);
87            self.dealloc_inner(ptr, old_layout);
88        }
89        new_ptr
90    }
91}
92
93#[cfg(target_arch = "riscv32")]
94#[global_allocator]
95static GLOBAL_ALLOCATOR: TalcAllocator = TalcAllocator::uninit();
96
97/// # Safety
98///
99/// Caller must ensure `start` and `end` define a valid, exclusively-owned heap region.
100#[cfg(target_arch = "riscv32")]
101pub unsafe fn init(start: *mut usize, end: *mut usize) {
102    GLOBAL_ALLOCATOR.init(start, end);
103}
104
105/// # Safety
106///
107/// No-op on non-riscv32 targets; kept for API compatibility.
108#[cfg(not(target_arch = "riscv32"))]
109pub unsafe fn init(_start: *mut usize, _end: *mut usize) {}