1use crate::sync::spinlock::SpinLocked;
4use crate::{BootInfo, utils};
5use alloc::Allocator;
6use core::ptr::NonNull;
7
8pub mod alloc;
9pub mod array;
10pub mod boxed;
11pub mod heap;
12pub mod pool;
13pub mod queue;
14
15#[repr(C)]
18#[allow(unused)]
19enum MemoryTypes {
20 Available = 1,
22 Reserved = 2,
24 ACPIReclaimable = 3,
26 Nvs = 4,
28 BadMemory = 5,
30}
31
32static GLOBAL_ALLOCATOR: SpinLocked<alloc::BestFitAllocator> =
34 SpinLocked::new(alloc::BestFitAllocator::new());
35
36pub fn init_memory(boot_info: &BootInfo) -> Result<(), utils::KernelError> {
42 let mut allocator = GLOBAL_ALLOCATOR.lock();
43
44 for entry in boot_info.mmap.iter().take(boot_info.mmap_len) {
45 if entry.ty == MemoryTypes::Available as u32 {
47 let range = entry.addr as usize..(entry.addr + entry.length) as usize;
48 unsafe {
49 allocator.add_range(range)?;
50 }
51 }
52 }
53
54 Ok(())
55}
56
57pub fn malloc(size: usize, align: usize) -> Option<NonNull<u8>> {
64 let mut allocator = GLOBAL_ALLOCATOR.lock();
65 allocator.malloc(size, align).ok()
66}
67
68pub unsafe fn free(ptr: NonNull<u8>, size: usize) {
77 let mut allocator = GLOBAL_ALLOCATOR.lock();
78 unsafe { allocator.free(ptr, size) };
79}
80
81pub fn align_up(size: usize) -> usize {
87 if size >= (usize::MAX - align_of::<u128>()) {
88 return usize::MAX;
89 }
90
91 let align = align_of::<u128>();
92 (size + align - 1) & !(align - 1)
93}
94
95#[cfg(kani)]
97mod verification {
98 use crate::MemMapEntry;
99
100 use super::*;
101 use kani::Arbitrary;
102
103 impl Arbitrary for MemMapEntry {
104 fn any() -> Self {
105 let size = size_of::<MemMapEntry>() as u32;
106 let length = kani::any();
107 let addr = kani::any();
108
109 kani::assume(
110 length < alloc::MAX_ADDR as u64
111 && length > alloc::BestFitAllocator::MIN_RANGE_SIZE as u64,
112 );
113 kani::assume(addr < alloc::MAX_ADDR as u64 - length && addr > 0);
114
115 MemMapEntry {
116 size,
117 addr,
118 length,
119 ty: kani::any(),
120 }
121 }
122
123 fn any_array<const MAX_ARRAY_LENGTH: usize>() -> [Self; MAX_ARRAY_LENGTH] {
124 [(); MAX_ARRAY_LENGTH].map(|_| Self::any())
125 }
126 }
127
128 fn mock_ptr_write<T>(dst: *mut T, src: T) {
129 }
131
132 #[kani::proof]
133 #[kani::stub(core::ptr::write, mock_ptr_write)]
134 fn proof_init_allocator_good() {
135 let mmap: [MemMapEntry; _] = kani::any();
136
137 kani::assume(mmap.len() > 0 && mmap.len() <= 8);
138 for entry in mmap.iter() {
139 kani::assume(entry.addr % align_of::<u128>() as u64 == 0);
141 kani::assume(entry.addr > 0);
143 kani::assume(entry.length > 0);
144
145 for other in mmap.iter() {
147 if entry.addr != other.addr {
148 kani::assume(
149 entry.addr + entry.length <= other.addr
150 || other.addr + other.length <= entry.addr,
151 );
152 }
153 }
154 }
155
156 let mmap_len = mmap.len();
157
158 let boot_info = BootInfo {
159 implementer: core::ptr::null(),
160 variant: core::ptr::null(),
161 mmap,
162 mmap_len,
163 };
164
165 assert!(init_memory(&boot_info).is_ok());
166 }
167
168 #[kani::proof]
169 fn check_align_up() {
170 let size = kani::any();
171 kani::assume(size > 0);
172 let align = align_up(size);
173 assert_ne!(align, 0);
174
175 if align != usize::MAX {
176 assert_eq!(align % align_of::<u128>(), 0);
177 assert!(align >= size);
178 }
179 }
180}
181