1use core::{ops::Range, ptr::NonNull};
2
3use hal::mem::PhysAddr;
4
5use crate::error::Result;
6
7struct BestFitMeta {
9 size: usize,
11 next: Option<NonNull<u8>>,
13}
14
15#[proc_macros::fmt]
20pub struct BestFitAllocator {
21 head: Option<NonNull<u8>>,
23}
24
25impl BestFitAllocator {
27 pub const MIN_RANGE_SIZE: usize = size_of::<BestFitMeta>() + Self::align_up() + 1;
28
29 pub const fn new() -> Self {
33 Self { head: None }
34 }
35
36 pub unsafe fn add_range(&mut self, range: &Range<PhysAddr>) -> Result<()> {
48 let ptr = range.start;
49
50 if !ptr.is_multiple_of(align_of::<u128>()) {
52 return Err(kerr!(InvalidArgument));
53 }
54
55 if range.end.diff(range.start) < Self::MIN_RANGE_SIZE {
56 return Err(kerr!(InvalidArgument));
57 }
58
59 debug_assert!(range.end > range.start);
60 debug_assert!(range.end.diff(range.start) > size_of::<BestFitMeta>() + Self::align_up());
61 debug_assert!(range.end.as_usize() <= isize::MAX as usize);
62
63 let user_pointer = ptr + size_of::<BestFitMeta>() + Self::align_up();
65
66 let meta = BestFitMeta {
68 size: range.end.diff(user_pointer),
69 next: self.head,
70 };
71
72 unsafe { core::ptr::write(ptr.as_mut_ptr::<BestFitMeta>(), meta) };
74
75 self.head = Some(unsafe { NonNull::new_unchecked(ptr.as_mut_ptr::<u8>()) });
77 Ok(())
78 }
79
80 const fn align_up() -> usize {
84 let meta = size_of::<BestFitMeta>();
85 let align = align_of::<u128>();
86 (align - (meta % align)) % align
88 }
89
90 fn select_block(
96 &mut self,
97 size: usize,
98 requested: Option<PhysAddr>,
99 ) -> Result<(NonNull<u8>, Option<NonNull<u8>>)> {
100 let mut best_fit = Err(kerr!(OutOfMemory));
101 let mut best_fit_size = usize::MAX;
102
103 let mut current = self.head;
104 let mut prev = None;
105
106 if let Some(requested) = requested {
107 while let Some(ptr) = current {
108 let meta = unsafe { ptr.cast::<BestFitMeta>().as_ref() };
110
111 if unsafe { Self::contains(meta, requested, size) } {
112 return Ok((ptr, prev));
113 }
114
115 prev = current;
117 current = meta.next;
118 }
119 }
120
121 while let Some(ptr) = current {
123 let meta = unsafe { ptr.cast::<BestFitMeta>().as_ref() };
125
126 if meta.size >= size && meta.size <= best_fit_size {
128 best_fit = Ok((ptr, prev));
129 best_fit_size = meta.size;
130 }
131
132 prev = current;
134 current = meta.next;
135 }
136
137 best_fit
138 }
139
140 unsafe fn user_ptr(ptr: NonNull<u8>) -> NonNull<u8> {
150 debug_assert!(
151 (ptr.as_ptr() as usize)
152 <= isize::MAX as usize - size_of::<BestFitMeta>() - Self::align_up()
153 );
154 unsafe { ptr.byte_add(size_of::<BestFitMeta>() + Self::align_up()) }
155 }
156
157 unsafe fn control_ptr(ptr: NonNull<u8>) -> NonNull<u8> {
167 debug_assert!((ptr.as_ptr() as usize) > size_of::<BestFitMeta>() + Self::align_up());
168 unsafe { ptr.byte_sub(size_of::<BestFitMeta>() + Self::align_up()) }
169 }
170
171 unsafe fn contains(meta: &BestFitMeta, target: PhysAddr, size: usize) -> bool {
172 let begin = unsafe {
173 Self::user_ptr(NonNull::new_unchecked(
174 meta as *const BestFitMeta as *mut u8,
175 ))
176 };
177 debug_assert!(size > 0);
178
179 if target >= begin.into() {
180 if let Some(target) = target.checked_add(size) {
181 if target > (unsafe { begin.add(meta.size) }).into() {
182 return false;
183 }
184 } else {
185 return false;
186 }
187 return true;
188 }
189 false
190 }
191}
192
193impl super::Allocator for BestFitAllocator {
195 fn malloc<T>(
202 &mut self,
203 size: usize,
204 align: usize,
205 request: Option<PhysAddr>,
206 ) -> Result<NonNull<T>> {
207 if align == 0 || align > align_of::<u128>() {
209 return Err(kerr!(InvalidAlign));
210 }
211
212 if let Some(request) = request {
213 if !request.is_multiple_of(align) {
214 return Err(kerr!(InvalidAlign));
215 }
216 }
217
218 if size == 0 {
220 return Err(kerr!(InvalidArgument));
221 }
222
223 #[allow(clippy::absurd_extreme_comparisons)]
225 if size >= super::MAX_ADDR {
226 return Err(kerr!(InvalidArgument));
227 }
228
229 let aligned_size = super::super::align_up(size);
231 debug_assert!(aligned_size >= size);
232 debug_assert!(aligned_size <= isize::MAX as usize);
233
234 let (split, block, prev) = match self.select_block(aligned_size, request) {
236 Ok((block, prev)) => {
237 let meta = unsafe { block.cast::<BestFitMeta>().as_mut() };
239
240 let aligned_size = if let Some(request) = request {
242 aligned_size + request.diff(unsafe { Self::user_ptr(block) }.into())
243 } else {
244 aligned_size
245 };
246
247 let min = aligned_size.saturating_add(size_of::<BestFitMeta>() + Self::align_up());
249
250 debug_assert!(
251 (block.as_ptr() as usize)
252 <= isize::MAX as usize
253 - meta.size
254 - size_of::<BestFitMeta>()
255 - Self::align_up()
256 );
257
258 debug_assert!(
259 meta.size < isize::MAX as usize - size_of::<BestFitMeta>() - Self::align_up()
260 );
261
262 if meta.size > min {
264 let remaining_meta = BestFitMeta {
266 size: meta.size - min,
267 next: meta.next,
268 };
269
270 meta.size = aligned_size;
272
273 let ptr = unsafe { Self::user_ptr(block).byte_add(aligned_size) };
275
276 unsafe {
277 ptr.cast::<BestFitMeta>().write(remaining_meta);
279 }
280
281 if let Some(prev) = prev {
283 let prev_meta = unsafe { prev.cast::<BestFitMeta>().as_mut() };
284 prev_meta.next = Some(ptr);
285 } else {
286 self.head = Some(ptr);
287 }
288
289 meta.next = None;
291
292 (true, block, prev)
293 } else {
294 (false, block, prev)
295 }
296 }
297 Err(_) => {
298 let (block, prev) = self.select_block(size, request)?;
299 (false, block, prev)
300 }
301 };
302
303 if !split {
304 let meta = unsafe { block.cast::<BestFitMeta>().as_mut() };
306
307 if let Some(prev) = prev {
308 let prev_meta = unsafe { prev.cast::<BestFitMeta>().as_mut() };
309 prev_meta.next = meta.next;
311 } else {
312 self.head = meta.next;
314 }
315
316 meta.next = None;
318 }
319
320 if let Some(request) = request {
321 debug_assert!(unsafe {
322 Self::contains(block.cast::<BestFitMeta>().as_ref(), request, size)
323 });
324 }
325
326 Ok(unsafe { Self::user_ptr(block).cast() })
328 }
329
330 unsafe fn free<T>(&mut self, ptr: NonNull<T>, size: usize) {
335 let block = unsafe { Self::control_ptr(ptr.cast()) };
336 let meta = unsafe { block.cast::<BestFitMeta>().as_mut() };
337
338 meta.next = self.head;
340
341 bug_on!(
343 size > meta.size,
344 "allocation size {} is larger than block size {}",
345 size,
346 meta.size
347 );
348
349 self.head = Some(block);
351 }
352}
353
354#[cfg(test)]
357mod tests {
358 use crate::error::Kind;
359 use crate::mem::align_up;
360
361 use super::super::*;
362 use super::*;
363
364 fn verify_block(user_ptr: NonNull<u8>, size: usize, next: Option<NonNull<u8>>) {
365 let control_ptr = unsafe { BestFitAllocator::control_ptr(user_ptr) };
366 let meta = unsafe { control_ptr.cast::<BestFitMeta>().as_ref() };
367
368 assert!(meta.size >= size);
369 assert_eq!(meta.next, next);
370 }
371
372 fn verify_ptrs_not_overlaping(ptrs: &[(NonNull<u8>, usize)]) {
373 for (i, (ptr1, size1)) in ptrs.iter().enumerate() {
374 for (j, (ptr2, size2)) in ptrs.iter().enumerate() {
375 if i == j {
376 continue;
377 }
378
379 let begin1 = ptr1.as_ptr() as usize;
380 let end1 = begin1 + size1;
381 let begin2 = ptr2.as_ptr() as usize;
382 let end2 = begin2 + size2;
383
384 assert!(end1 <= begin2 || end2 <= begin1);
385 assert!(begin1 != begin2);
386 assert!(end1 != end2);
387 assert!(*size1 > 0);
388 assert!(*size2 > 0);
389 assert!(end1 > begin1);
390 assert!(end2 > begin2);
391 }
392 }
393 }
394
395 fn alloc_range(length: usize) -> Range<PhysAddr> {
396 let alloc_range = std::alloc::Layout::from_size_align(length, align_of::<u128>()).unwrap();
397 let ptr = unsafe { std::alloc::alloc(alloc_range) };
398 PhysAddr::new(ptr as usize)..PhysAddr::new(ptr as usize + length)
399 }
400
401 #[test]
402 fn allocate_one() {
403 let mut allocator = BestFitAllocator::new();
404
405 let range = alloc_range(4096);
406 unsafe {
407 allocator.add_range(&range).unwrap();
408 }
409
410 let ptr = allocator.malloc(128, 1, None).unwrap();
411
412 verify_block(ptr, 128, None);
413 }
414
415 #[test]
416 fn alloc_request() {
417 let mut allocator = BestFitAllocator::new();
418
419 let range = alloc_range(4096);
420 unsafe {
421 allocator.add_range(&range).unwrap();
422 }
423
424 let request = range.start + 128;
425 let ptr = allocator.malloc::<u8>(128, 1, Some(request)).unwrap();
426
427 let meta = unsafe {
429 BestFitAllocator::control_ptr(ptr)
430 .cast::<BestFitMeta>()
431 .as_ref()
432 };
433 assert!(unsafe { BestFitAllocator::contains(meta, request, 128) });
434 }
435
436 #[test]
437 fn alloc_request_to_big() {
438 let mut allocator = BestFitAllocator::new();
439
440 let range = alloc_range(4096);
441 unsafe {
442 allocator.add_range(&range).unwrap();
443 }
444
445 let request = range.start + 4096;
446 let ptr = allocator.malloc::<u8>(128, 1, Some(request));
447
448 assert!(ptr.is_err_and(|e| e.kind == Kind::OutOfMemory));
449 }
450
451 #[test]
452 fn alloc_request_not_aligned() {
453 let mut allocator = BestFitAllocator::new();
454
455 let range = alloc_range(4096);
456 unsafe {
457 allocator.add_range(&range).unwrap();
458 }
459
460 let request = range.start + 127;
461 let ptr = allocator.malloc::<u8>(128, 8, Some(request));
462
463 assert!(ptr.is_err_and(|e| e.kind == Kind::InvalidAlign));
464 }
465
466 #[test]
467 fn alloc_request_not_available() {
468 let mut allocator = BestFitAllocator::new();
469
470 let range = alloc_range(4096);
471 unsafe {
472 allocator.add_range(&range).unwrap();
473 }
474
475 let request = range.start + 128;
476 let ptr = allocator.malloc::<u8>(128, 1, Some(request)).unwrap();
477 verify_block(ptr, 128, None);
478
479 let ptr = allocator.malloc::<u8>(128, 1, Some(request));
480 assert!(ptr.is_err_and(|e| e.kind == Kind::OutOfMemory));
481 }
482
483 #[test]
484 fn alloc_request_out_of_range() {
485 let mut allocator = BestFitAllocator::new();
486
487 let range = alloc_range(4096);
488 unsafe {
489 allocator.add_range(&range).unwrap();
490 }
491
492 let request = range.end + 128;
493 let ptr = allocator.malloc::<u8>(128, 1, Some(request));
494
495 assert!(ptr.is_err_and(|e| e.kind == Kind::OutOfMemory));
496 }
497
498 #[test]
499 fn alloc_alot() {
500 let mut allocator = BestFitAllocator::new();
501 const CNT: usize = 100;
502 const SIZE: usize = 128;
503
504 let range = alloc_range(SIZE * CNT * 2);
505 unsafe {
506 allocator.add_range(&range).unwrap();
507 }
508
509 let mut ptrs = Vec::new();
510 for _ in 0..CNT {
511 let ptr = allocator.malloc(SIZE, 1, None).unwrap();
512 verify_block(ptr, SIZE, None);
513 ptrs.push((ptr, SIZE));
514 }
515
516 verify_ptrs_not_overlaping(ptrs.as_slice());
517 }
518
519 #[test]
520 fn alloc_exact() {
521 let mut allocator = BestFitAllocator::new();
522 const CNT: usize = 10;
523 const SIZE: usize = 128;
524
525 let range =
526 alloc_range((SIZE + size_of::<BestFitMeta>() + BestFitAllocator::align_up()) * CNT);
527 unsafe {
528 allocator.add_range(&range).unwrap();
529 }
530
531 let mut ptrs = Vec::new();
532 for _ in 0..CNT {
533 let ptr = allocator.malloc(SIZE, 1, None).unwrap();
534 verify_block(ptr, SIZE, None);
535 ptrs.push((ptr, SIZE));
536 }
537
538 verify_ptrs_not_overlaping(ptrs.as_slice());
539 }
540
541 #[test]
542 fn alloc_oom() {
543 let mut allocator = BestFitAllocator::new();
544 const CNT: usize = 10;
545 const SIZE: usize = 128;
546
547 let range =
548 alloc_range((SIZE + size_of::<BestFitMeta>() + BestFitAllocator::align_up()) * CNT - 1);
549 unsafe {
550 allocator.add_range(&range).unwrap();
551 }
552
553 let mut ptrs = Vec::new();
554 for _ in 0..CNT - 1 {
555 let ptr = allocator.malloc(SIZE, 1, None).unwrap();
556 verify_block(ptr, SIZE, None);
557 ptrs.push(ptr);
558 }
559
560 let ptr = allocator.malloc::<u8>(SIZE, 1, None);
561 assert!(ptr.is_err_and(|e| e.kind == Kind::OutOfMemory));
562 }
563
564 #[test]
565 fn alloc_no_oom_through_free() {
566 let mut allocator = BestFitAllocator::new();
567 const SIZE: usize = 128;
568
569 let range = alloc_range(SIZE + size_of::<BestFitMeta>() + BestFitAllocator::align_up());
570 unsafe {
571 allocator.add_range(&range).unwrap();
572 }
573
574 let ptr = allocator.malloc(SIZE, 1, None).unwrap();
575 verify_block(ptr, SIZE, None);
576
577 unsafe {
578 allocator.free(ptr, SIZE);
579 }
580
581 let ptr = allocator.malloc(SIZE, 1, None).unwrap();
582 verify_block(ptr, SIZE, None);
583 }
584
585 #[test]
586 fn multi_range_alloc() {
587 let mut allocator = BestFitAllocator::new();
588 const CNT: usize = 10;
589 const SIZE: usize = 128;
590
591 let mut ranges = Vec::new();
592 for _ in 0..CNT {
593 let range = alloc_range(SIZE + size_of::<BestFitMeta>() + BestFitAllocator::align_up());
594 unsafe {
595 allocator.add_range(&range).unwrap();
596 }
597 ranges.push(range);
598 }
599
600 let mut ptrs = Vec::new();
601 for _ in 0..CNT {
602 let ptr = allocator.malloc(SIZE, 1, None).unwrap();
603 verify_block(ptr, SIZE, None);
604 ptrs.push((ptr, SIZE));
605 }
606
607 verify_ptrs_not_overlaping(ptrs.as_slice());
608 }
609
610 #[test]
611 fn multi_range_no_oom_through_free() {
612 let mut allocator = BestFitAllocator::new();
614
615 const CNT: usize = 10;
616 const SIZE: usize = 128;
617
618 let mut ranges = Vec::new();
619 for _ in 0..CNT {
620 let range = alloc_range(SIZE + size_of::<BestFitMeta>() + BestFitAllocator::align_up());
621 unsafe {
622 allocator.add_range(&range).unwrap();
623 }
624 ranges.push(range);
625 }
626
627 let mut ptrs = Vec::new();
628
629 let ptr = allocator.malloc::<u8>(SIZE, 1, None).unwrap();
630
631 for _ in 0..CNT - 1 {
632 let ptr = allocator.malloc(SIZE, 1, None).unwrap();
633 verify_block(ptr, SIZE, None);
634 ptrs.push((ptr, SIZE));
635 }
636
637 unsafe {
638 allocator.free(ptr, SIZE);
639 }
640
641 let ptr = allocator.malloc(SIZE, 1, None).unwrap();
642 ptrs.push((ptr, SIZE));
643
644 verify_ptrs_not_overlaping(ptrs.as_slice());
645 }
646
647 #[test]
648 fn free_corrupts_metadata() {
649 let mut allocator = BestFitAllocator::new();
650 const SIZE: usize = 17;
651 const ALIGNED: usize = 32;
652 assert!(align_up(SIZE) == ALIGNED);
653
654 let range = alloc_range(ALIGNED + size_of::<BestFitMeta>() + BestFitAllocator::align_up());
655 unsafe {
656 allocator.add_range(&range).unwrap();
657 }
658
659 let ptr1: core::ptr::NonNull<u8> = allocator.malloc(SIZE, 1, None).unwrap();
660
661 unsafe {
662 allocator.free(ptr1, SIZE);
663 }
664
665 let ptr2: core::ptr::NonNull<u8> = allocator
666 .malloc(SIZE, 1, None)
667 .expect("second malloc should succeed via fallback path");
668
669 unsafe {
670 allocator.free(ptr2, SIZE);
671 }
672 }
673
674 #[test]
675 fn multi_range_oom() {
676 let mut allocator = BestFitAllocator::new();
678
679 const CNT: usize = 10;
680 const SIZE: usize = 128;
681
682 let mut ranges = Vec::new();
683 for _ in 0..CNT {
684 let range = alloc_range(SIZE + size_of::<BestFitMeta>() + BestFitAllocator::align_up());
685 unsafe {
686 allocator.add_range(&range).unwrap();
687 }
688 ranges.push(range);
689 }
690
691 let mut ptrs = Vec::new();
692
693 for _ in 0..CNT {
694 let ptr = allocator.malloc(SIZE, 1, None).unwrap();
695 verify_block(ptr, SIZE, None);
696 ptrs.push((ptr, SIZE));
697 }
698
699 let ptr = allocator.malloc::<u8>(SIZE, 1, None);
700 assert!(ptr.is_err_and(|e| e.kind == Kind::OutOfMemory));
701
702 verify_ptrs_not_overlaping(ptrs.as_slice());
703 }
704}
705
706#[cfg(kani)]
710mod verification {
711 use super::*;
712 use crate::mem::alloc::Allocator;
713 use crate::mem::alloc::MAX_ADDR;
714
715 fn verify_block(user_ptr: NonNull<u8>, size: usize, next: Option<NonNull<u8>>) {
716 let control_ptr = unsafe { BestFitAllocator::control_ptr(user_ptr) };
717 let meta = unsafe { control_ptr.cast::<BestFitMeta>().as_ref() };
718
719 assert!(meta.size >= size);
720 assert_eq!(meta.next, next);
721 }
722
723 fn alloc_range(length: usize) -> Option<Range<PhysAddr>> {
724 let alloc_range = std::alloc::Layout::from_size_align(length, align_of::<u128>()).unwrap();
725 let ptr = unsafe { std::alloc::alloc(alloc_range) };
726
727 if ptr.is_null() || ((ptr as usize) >= isize::MAX as usize - length) {
728 None
729 } else {
730 Some(PhysAddr::new(ptr as usize)..PhysAddr::new(ptr as usize + length))
731 }
732 }
733
734 #[kani::proof]
735 #[kani::unwind(2)]
736 fn allocate_one() {
737 let mut allocator = BestFitAllocator::new();
738
739 let size: usize = kani::any();
740 kani::assume(size < MAX_ADDR - size_of::<BestFitMeta>() - BestFitAllocator::align_up());
741 kani::assume(size > 0);
742 let larger_size: usize = kani::any_where(|&x| {
743 x > size + size_of::<BestFitMeta>() + BestFitAllocator::align_up() && x < MAX_ADDR
744 });
745
746 if let Some(range) = alloc_range(larger_size) {
747 unsafe {
748 assert_eq!(allocator.add_range(&range), Ok(()));
749 }
750
751 let ptr = allocator.malloc(size, 1, None).unwrap();
752
753 verify_block(ptr, size, None);
754 }
755 }
756}
757