|  | 
|  | 1 | +// FIXME(static_mut_refs): Do not allow `static_mut_refs` lint | 
|  | 2 | +#![allow(static_mut_refs)] | 
|  | 3 | + | 
|  | 4 | +use crate::alloc::{GlobalAlloc, Layout, System}; | 
|  | 5 | +use crate::ptr; | 
|  | 6 | +use crate::sync::atomic::{AtomicBool, Ordering}; | 
|  | 7 | + | 
|  | 8 | +// Symbols for heap section boundaries defined in the target's linkerscript | 
|  | 9 | +unsafe extern "C" { | 
|  | 10 | +    static mut __heap_start: u8; | 
|  | 11 | +    static mut __heap_end: u8; | 
|  | 12 | +} | 
|  | 13 | + | 
|  | 14 | +static mut DLMALLOC: dlmalloc::Dlmalloc<Vexos> = dlmalloc::Dlmalloc::new_with_allocator(Vexos); | 
|  | 15 | + | 
|  | 16 | +struct Vexos; | 
|  | 17 | + | 
|  | 18 | +unsafe impl dlmalloc::Allocator for Vexos { | 
|  | 19 | +    /// Allocs system resources | 
|  | 20 | +    fn alloc(&self, _size: usize) -> (*mut u8, usize, u32) { | 
|  | 21 | +        static INIT: AtomicBool = AtomicBool::new(false); | 
|  | 22 | + | 
|  | 23 | +        if !INIT.swap(true, Ordering::Relaxed) { | 
|  | 24 | +            // This target has no growable heap, as user memory has a fixed | 
|  | 25 | +            // size/location and VEXos does not manage allocation for us. | 
|  | 26 | +            unsafe { | 
|  | 27 | +                ( | 
|  | 28 | +                    (&raw mut __heap_start).cast::<u8>(), | 
|  | 29 | +                    (&raw const __heap_end).offset_from_unsigned(&raw const __heap_start), | 
|  | 30 | +                    0, | 
|  | 31 | +                ) | 
|  | 32 | +            } | 
|  | 33 | +        } else { | 
|  | 34 | +            (ptr::null_mut(), 0, 0) | 
|  | 35 | +        } | 
|  | 36 | +    } | 
|  | 37 | + | 
|  | 38 | +    fn remap(&self, _ptr: *mut u8, _oldsize: usize, _newsize: usize, _can_move: bool) -> *mut u8 { | 
|  | 39 | +        ptr::null_mut() | 
|  | 40 | +    } | 
|  | 41 | + | 
|  | 42 | +    fn free_part(&self, _ptr: *mut u8, _oldsize: usize, _newsize: usize) -> bool { | 
|  | 43 | +        false | 
|  | 44 | +    } | 
|  | 45 | + | 
|  | 46 | +    fn free(&self, _ptr: *mut u8, _size: usize) -> bool { | 
|  | 47 | +        return false; | 
|  | 48 | +    } | 
|  | 49 | + | 
|  | 50 | +    fn can_release_part(&self, _flags: u32) -> bool { | 
|  | 51 | +        false | 
|  | 52 | +    } | 
|  | 53 | + | 
|  | 54 | +    fn allocates_zeros(&self) -> bool { | 
|  | 55 | +        false | 
|  | 56 | +    } | 
|  | 57 | + | 
|  | 58 | +    fn page_size(&self) -> usize { | 
|  | 59 | +        0x1000 | 
|  | 60 | +    } | 
|  | 61 | +} | 
|  | 62 | + | 
|  | 63 | +#[stable(feature = "alloc_system_type", since = "1.28.0")] | 
|  | 64 | +unsafe impl GlobalAlloc for System { | 
|  | 65 | +    #[inline] | 
|  | 66 | +    unsafe fn alloc(&self, layout: Layout) -> *mut u8 { | 
|  | 67 | +        // SAFETY: DLMALLOC access is guaranteed to be safe because we are a single-threaded target, which | 
|  | 68 | +        // guarantees unique and non-reentrant access to the allocator. As such, no allocator lock is used. | 
|  | 69 | +        // Calling malloc() is safe because preconditions on this function match the trait method preconditions. | 
|  | 70 | +        unsafe { DLMALLOC.malloc(layout.size(), layout.align()) } | 
|  | 71 | +    } | 
|  | 72 | + | 
|  | 73 | +    #[inline] | 
|  | 74 | +    unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { | 
|  | 75 | +        // SAFETY: DLMALLOC access is guaranteed to be safe because we are a single-threaded target, which | 
|  | 76 | +        // guarantees unique and non-reentrant access to the allocator. As such, no allocator lock is used. | 
|  | 77 | +        // Calling calloc() is safe because preconditions on this function match the trait method preconditions. | 
|  | 78 | +        unsafe { DLMALLOC.calloc(layout.size(), layout.align()) } | 
|  | 79 | +    } | 
|  | 80 | + | 
|  | 81 | +    #[inline] | 
|  | 82 | +    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { | 
|  | 83 | +        // SAFETY: DLMALLOC access is guaranteed to be safe because we are a single-threaded target, which | 
|  | 84 | +        // guarantees unique and non-reentrant access to the allocator. As such, no allocator lock is used. | 
|  | 85 | +        // Calling free() is safe because preconditions on this function match the trait method preconditions. | 
|  | 86 | +        unsafe { DLMALLOC.free(ptr, layout.size(), layout.align()) } | 
|  | 87 | +    } | 
|  | 88 | + | 
|  | 89 | +    #[inline] | 
|  | 90 | +    unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { | 
|  | 91 | +        // SAFETY: DLMALLOC access is guaranteed to be safe because we are a single-threaded target, which | 
|  | 92 | +        // guarantees unique and non-reentrant access to the allocator. As such, no allocator lock is used. | 
|  | 93 | +        // Calling realloc() is safe because preconditions on this function match the trait method preconditions. | 
|  | 94 | +        unsafe { DLMALLOC.realloc(ptr, layout.size(), layout.align(), new_size) } | 
|  | 95 | +    } | 
|  | 96 | +} | 
0 commit comments