1use core::{ops::Range, ptr::NonNull};
5
6use crate::{BUG_ON, utils};
7
8#[cfg(target_pointer_width = "64")]
9pub const MAX_ADDR: usize = 2_usize.pow(48);
10
11#[cfg(target_pointer_width = "32")]
12pub const MAX_ADDR: usize = usize::MAX;
13
14pub trait Allocator {
24 fn malloc<T>(&mut self, size: usize, align: usize) -> Result<NonNull<T>, utils::KernelError>;
25 unsafe fn free<T>(&mut self, ptr: NonNull<T>, size: usize);
26}
27
28struct BestFitMeta {
30 size: usize,
32 next: Option<NonNull<u8>>,
34}
35
36pub struct BestFitAllocator {
41 head: Option<NonNull<u8>>,
43}
44
45impl BestFitAllocator {
47 pub const MIN_RANGE_SIZE: usize = size_of::<BestFitMeta>() + Self::align_up() + 1;
48
49 pub const fn new() -> Self {
53 Self { head: None }
54 }
55
56 pub unsafe fn add_range(&mut self, range: Range<usize>) -> Result<(), utils::KernelError> {
68 let ptr = range.start;
69
70 if ptr % align_of::<u128>() != 0 {
72 return Err(utils::KernelError::InvalidAlign);
73 }
74
75 if ptr == 0 {
76 return Err(utils::KernelError::InvalidAddress);
77 }
78
79 debug_assert!(range.end > range.start);
80 debug_assert!(range.end - range.start > size_of::<BestFitMeta>() + Self::align_up());
81 debug_assert!(range.end <= isize::MAX as usize);
82
83 let user_pointer = ptr + size_of::<BestFitMeta>() + Self::align_up();
85
86 let meta = BestFitMeta {
88 size: range.end - user_pointer,
89 next: self.head,
90 };
91
92 unsafe { core::ptr::write(ptr as *mut BestFitMeta, meta) };
94
95 self.head = Some(unsafe { NonNull::new_unchecked(ptr as *mut u8) });
97 Ok(())
98 }
99
100 const fn align_up() -> usize {
104 let meta = size_of::<BestFitMeta>();
105 let align = align_of::<u128>();
106 (align - (meta % align)) % align
108 }
109
110 fn select_block(
116 &mut self,
117 size: usize,
118 ) -> Result<(NonNull<u8>, Option<NonNull<u8>>), utils::KernelError> {
119 let mut best_fit = Err(utils::KernelError::OutOfMemory);
120 let mut best_fit_size = usize::MAX;
121
122 let mut current = self.head;
123 let mut prev = None;
124
125 while let Some(ptr) = current {
127 let meta = unsafe { ptr.cast::<BestFitMeta>().as_ref() };
129
130 if meta.size >= size && meta.size <= best_fit_size {
132 best_fit = Ok((ptr, prev));
133 best_fit_size = meta.size;
134 }
135
136 prev = current;
138 current = meta.next;
139 }
140
141 best_fit
142 }
143
144 unsafe fn user_ptr(ptr: NonNull<u8>) -> NonNull<u8> {
154 debug_assert!(
155 (ptr.as_ptr() as usize)
156 <= isize::MAX as usize - size_of::<BestFitMeta>() - Self::align_up()
157 );
158 unsafe { ptr.byte_add(size_of::<BestFitMeta>() + Self::align_up()) }
159 }
160
161 unsafe fn control_ptr(ptr: NonNull<u8>) -> NonNull<u8> {
171 debug_assert!((ptr.as_ptr() as usize) > size_of::<BestFitMeta>() + Self::align_up());
172 unsafe { ptr.byte_sub(size_of::<BestFitMeta>() + Self::align_up()) }
173 }
174}
175
176impl Allocator for BestFitAllocator {
178 fn malloc<T>(&mut self, size: usize, align: usize) -> Result<NonNull<T>, utils::KernelError> {
185 if align > align_of::<u128>() {
187 return Err(utils::KernelError::InvalidAlign);
188 }
189
190 if size == 0 {
192 return Err(utils::KernelError::InvalidSize);
193 }
194
195 #[allow(clippy::absurd_extreme_comparisons)]
197 if size >= MAX_ADDR {
198 return Err(utils::KernelError::InvalidSize);
199 }
200
201 let aligned_size = super::align_up(size);
203 debug_assert!(aligned_size >= size);
204 debug_assert!(aligned_size <= isize::MAX as usize);
205
206 let (split, block, prev) = match self.select_block(aligned_size) {
208 Ok((block, prev)) => {
209 let meta = unsafe { block.cast::<BestFitMeta>().as_mut() };
211
212 let min = aligned_size.saturating_add(size_of::<BestFitMeta>() + Self::align_up());
214
215 debug_assert!(
216 (block.as_ptr() as usize)
217 <= isize::MAX as usize
218 - meta.size
219 - size_of::<BestFitMeta>()
220 - Self::align_up()
221 );
222
223 debug_assert!(
224 meta.size < isize::MAX as usize - size_of::<BestFitMeta>() - Self::align_up()
225 );
226
227 if meta.size > min {
229 let remaining_meta = BestFitMeta {
231 size: meta.size - min,
232 next: meta.next,
233 };
234
235 meta.size = aligned_size;
237
238 let ptr = unsafe { Self::user_ptr(block).byte_add(aligned_size) };
240
241 unsafe {
242 ptr.cast::<BestFitMeta>().write(remaining_meta);
244 }
245
246 if let Some(prev) = prev {
248 let prev_meta = unsafe { prev.cast::<BestFitMeta>().as_mut() };
249 prev_meta.next = Some(ptr);
250 } else {
251 self.head = Some(ptr);
252 }
253
254 meta.next = None;
256
257 (true, block, prev)
258 } else {
259 (false, block, prev)
260 }
261 }
262 Err(_) => {
263 let (block, prev) = self.select_block(size)?;
264 (false, block, prev)
265 }
266 };
267
268 if !split {
269 let meta = unsafe { block.cast::<BestFitMeta>().as_mut() };
271
272 if let Some(prev) = prev {
273 let prev_meta = unsafe { prev.cast::<BestFitMeta>().as_mut() };
274 prev_meta.next = meta.next;
276 } else {
277 self.head = meta.next;
279 }
280
281 meta.next = None;
283 }
284
285 Ok(unsafe { Self::user_ptr(block).cast() })
287 }
288
289 unsafe fn free<T>(&mut self, ptr: NonNull<T>, size: usize) {
294 let block = unsafe { Self::control_ptr(ptr.cast()) };
295 let meta = unsafe { block.cast::<BestFitMeta>().as_mut() };
296
297 meta.next = self.head;
299
300 BUG_ON!(meta.size != super::align_up(size), "Invalid size in free()");
302
303 meta.size = size;
305
306 self.head = Some(block);
308 }
309}
310
311#[cfg(test)]
314mod tests {
315 use super::*;
316
317 fn verify_block(user_ptr: NonNull<u8>, size: usize, next: Option<NonNull<u8>>) {
318 let control_ptr = unsafe { BestFitAllocator::control_ptr(user_ptr) };
319 let meta = unsafe { control_ptr.cast::<BestFitMeta>().as_ref() };
320
321 assert!(meta.size >= size);
322 assert_eq!(meta.next, next);
323 }
324
325 fn verify_ptrs_not_overlaping(ptrs: &[(NonNull<u8>, usize)]) {
326 for (i, (ptr1, size1)) in ptrs.iter().enumerate() {
327 for (j, (ptr2, size2)) in ptrs.iter().enumerate() {
328 if i == j {
329 continue;
330 }
331
332 let begin1 = ptr1.as_ptr() as usize;
333 let end1 = begin1 + size1;
334 let begin2 = ptr2.as_ptr() as usize;
335 let end2 = begin2 + size2;
336
337 assert!(end1 <= begin2 || end2 <= begin1);
338 assert!(begin1 != begin2);
339 assert!(end1 != end2);
340 assert!(*size1 > 0);
341 assert!(*size2 > 0);
342 assert!(end1 > begin1);
343 assert!(end2 > begin2);
344 }
345 }
346 }
347
348 fn alloc_range(length: usize) -> Range<usize> {
349 let alloc_range = std::alloc::Layout::from_size_align(length, align_of::<u128>()).unwrap();
350 let ptr = unsafe { std::alloc::alloc(alloc_range) };
351 ptr as usize..ptr as usize + length
352 }
353
354 #[test]
355 fn allocate_one() {
356 let mut allocator = BestFitAllocator::new();
357
358 let range = alloc_range(4096);
359 unsafe {
360 allocator.add_range(range).unwrap();
361 }
362
363 let ptr = allocator.malloc(128, 1).unwrap();
364
365 verify_block(ptr, 128, None);
366 }
367
368 #[test]
369 fn alloc_alot() {
370 let mut allocator = BestFitAllocator::new();
371 const CNT: usize = 100;
372 const SIZE: usize = 128;
373
374 let range = alloc_range(SIZE * CNT * 2);
375 unsafe {
376 allocator.add_range(range).unwrap();
377 }
378
379 let mut ptrs = Vec::new();
380 for _ in 0..CNT {
381 let ptr = allocator.malloc(SIZE, 1).unwrap();
382 verify_block(ptr, SIZE, None);
383 ptrs.push((ptr, SIZE));
384 }
385
386 verify_ptrs_not_overlaping(ptrs.as_slice());
387 }
388
389 #[test]
390 fn alloc_exact() {
391 let mut allocator = BestFitAllocator::new();
392 const CNT: usize = 10;
393 const SIZE: usize = 128;
394
395 let range =
396 alloc_range((SIZE + size_of::<BestFitMeta>() + BestFitAllocator::align_up()) * CNT);
397 unsafe {
398 allocator.add_range(range).unwrap();
399 }
400
401 let mut ptrs = Vec::new();
402 for _ in 0..CNT {
403 let ptr = allocator.malloc(SIZE, 1).unwrap();
404 verify_block(ptr, SIZE, None);
405 ptrs.push((ptr, SIZE));
406 }
407
408 verify_ptrs_not_overlaping(ptrs.as_slice());
409 }
410
411 #[test]
412 fn alloc_oom() {
413 let mut allocator = BestFitAllocator::new();
414 const CNT: usize = 10;
415 const SIZE: usize = 128;
416
417 let range =
418 alloc_range((SIZE + size_of::<BestFitMeta>() + BestFitAllocator::align_up()) * CNT - 1);
419 unsafe {
420 allocator.add_range(range).unwrap();
421 }
422
423 let mut ptrs = Vec::new();
424 for _ in 0..CNT - 1 {
425 let ptr = allocator.malloc(SIZE, 1).unwrap();
426 verify_block(ptr, SIZE, None);
427 ptrs.push(ptr);
428 }
429
430 let ptr = allocator.malloc::<u8>(SIZE, 1);
431 assert!(ptr.is_err_and(|e| e == utils::KernelError::OutOfMemory));
432 }
433
434 #[test]
435 fn alloc_no_oom_through_free() {
436 let mut allocator = BestFitAllocator::new();
437 const SIZE: usize = 128;
438
439 let range = alloc_range(SIZE + size_of::<BestFitMeta>() + BestFitAllocator::align_up());
440 unsafe {
441 allocator.add_range(range).unwrap();
442 }
443
444 let ptr = allocator.malloc(SIZE, 1).unwrap();
445 verify_block(ptr, SIZE, None);
446
447 unsafe {
448 allocator.free(ptr, SIZE);
449 }
450
451 let ptr = allocator.malloc(SIZE, 1).unwrap();
452 verify_block(ptr, SIZE, None);
453 }
454
455 #[test]
456 fn multi_range_alloc() {
457 let mut allocator = BestFitAllocator::new();
458 const CNT: usize = 10;
459 const SIZE: usize = 128;
460
461 let mut ranges = Vec::new();
462 for _ in 0..CNT {
463 let range = alloc_range(SIZE + size_of::<BestFitMeta>() + BestFitAllocator::align_up());
464 unsafe {
465 allocator.add_range(range.clone()).unwrap();
466 }
467 ranges.push(range);
468 }
469
470 let mut ptrs = Vec::new();
471 for _ in 0..CNT {
472 let ptr = allocator.malloc(SIZE, 1).unwrap();
473 verify_block(ptr, SIZE, None);
474 ptrs.push((ptr, SIZE));
475 }
476
477 verify_ptrs_not_overlaping(ptrs.as_slice());
478 }
479
480 #[test]
481 fn multi_range_no_oom_through_free() {
482 let mut allocator = BestFitAllocator::new();
484
485 const CNT: usize = 10;
486 const SIZE: usize = 128;
487
488 let mut ranges = Vec::new();
489 for _ in 0..CNT {
490 let range = alloc_range(SIZE + size_of::<BestFitMeta>() + BestFitAllocator::align_up());
491 unsafe {
492 allocator.add_range(range.clone()).unwrap();
493 }
494 ranges.push(range);
495 }
496
497 let mut ptrs = Vec::new();
498
499 let ptr = allocator.malloc::<u8>(SIZE, 1).unwrap();
500
501 for _ in 0..CNT - 1 {
502 let ptr = allocator.malloc(SIZE, 1).unwrap();
503 verify_block(ptr, SIZE, None);
504 ptrs.push((ptr, SIZE));
505 }
506
507 unsafe {
508 allocator.free(ptr, SIZE);
509 }
510
511 let ptr = allocator.malloc(SIZE, 1).unwrap();
512 ptrs.push((ptr, SIZE));
513
514 verify_ptrs_not_overlaping(ptrs.as_slice());
515 }
516
517 #[test]
518 fn multi_range_oom() {
519 let mut allocator = BestFitAllocator::new();
521
522 const CNT: usize = 10;
523 const SIZE: usize = 128;
524
525 let mut ranges = Vec::new();
526 for _ in 0..CNT {
527 let range = alloc_range(SIZE + size_of::<BestFitMeta>() + BestFitAllocator::align_up());
528 unsafe {
529 allocator.add_range(range.clone()).unwrap();
530 }
531 ranges.push(range);
532 }
533
534 let mut ptrs = Vec::new();
535
536 for _ in 0..CNT {
537 let ptr = allocator.malloc(SIZE, 1).unwrap();
538 verify_block(ptr, SIZE, None);
539 ptrs.push((ptr, SIZE));
540 }
541
542 let ptr = allocator.malloc::<u8>(SIZE, 1);
543 assert!(ptr.is_err_and(|e| e == utils::KernelError::OutOfMemory));
544
545 verify_ptrs_not_overlaping(ptrs.as_slice());
546 }
547}
548
549#[cfg(kani)]
553mod verification {
554 use super::*;
555 use core::{alloc::Layout, ptr};
556
557 fn verify_block(user_ptr: NonNull<u8>, size: usize, next: Option<NonNull<u8>>) {
558 let control_ptr = unsafe { BestFitAllocator::control_ptr(user_ptr) };
559 let meta = unsafe { control_ptr.cast::<BestFitMeta>().as_ref() };
560
561 assert!(meta.size >= size);
562 assert_eq!(meta.next, next);
563 }
564
565 fn alloc_range(length: usize) -> Option<Range<usize>> {
566 let alloc_range = std::alloc::Layout::from_size_align(length, align_of::<u128>()).unwrap();
567 let ptr = unsafe { std::alloc::alloc(alloc_range) };
568
569 if ptr.is_null() || ((ptr as usize) >= isize::MAX as usize - length) {
570 None
571 } else {
572 Some(ptr as usize..ptr as usize + length)
573 }
574 }
575
576 #[kani::proof]
577 #[kani::unwind(2)]
578 fn allocate_one() {
579 let mut allocator = BestFitAllocator::new();
580
581 let size: usize = kani::any();
582 kani::assume(size < MAX_ADDR - size_of::<BestFitMeta>() - BestFitAllocator::align_up());
583 kani::assume(size > 0);
584 let larger_size: usize = kani::any_where(|&x| {
585 x > size + size_of::<BestFitMeta>() + BestFitAllocator::align_up() && x < MAX_ADDR
586 });
587
588 if let Some(range) = alloc_range(larger_size) {
589 unsafe {
590 assert_eq!(allocator.add_range(range), Ok(()));
591 }
592
593 let ptr = allocator.malloc(size, 1).unwrap();
594
595 verify_block(ptr, size, None);
596 }
597 }
598}
599