kernel/mem/
boxed.rs

1//! This module provides a simple heap-allocated memory block for in-kernel use.
2
3use super::{free, malloc};
4use crate::utils::KernelError;
5use core::{
6    mem::{MaybeUninit, forget},
7    ops::{Deref, DerefMut, Index, IndexMut, Range, RangeFrom, RangeTo},
8    ptr::{NonNull, drop_in_place, slice_from_raw_parts_mut, write},
9};
10
11/// A heap-allocated memory block.
12pub struct Box<T: ?Sized> {
13    /// Pointer to the heap-allocated memory.
14    /// This is uniquely owned, so no covariance issues.
15    ptr: NonNull<T>,
16}
17
18impl<T> Box<[T]> {
19    /// Create a new zeroed heap-allocated slice with the given length.
20    ///
21    /// `len` - The length of the slice.
22    ///
23    /// Returns a new heap-allocated slice with the given length or an error if the allocation failed.
24    pub fn new_slice_zeroed(len: usize) -> Result<Self, KernelError> {
25        if len == 0 {
26            return Ok(Self::new_slice_empty());
27        }
28
29        if let Some(ptr) = malloc(size_of::<T>() * len, align_of::<T>()) {
30            let ptr = slice_from_raw_parts_mut(ptr.as_ptr().cast(), len);
31            Ok(Self {
32                ptr: unsafe { NonNull::new_unchecked(ptr) },
33            })
34        } else {
35            Err(KernelError::OutOfMemory)
36        }
37    }
38
39    /// Create a new empty slice.
40    ///
41    /// Returns a new empty slice.
42    pub const fn new_slice_empty() -> Self {
43        let ptr = slice_from_raw_parts_mut(NonNull::dangling().as_ptr(), 0);
44        Self {
45            ptr: unsafe { NonNull::new_unchecked(ptr) },
46        }
47    }
48
49    /// Create a new uninit heap-allocated slice with the given length.
50    ///
51    /// `len` - The length of the slice.
52    ///
53    /// Returns a new heap-allocated slice with the given length or an error if the allocation failed.
54    pub fn new_slice_uninit(len: usize) -> Result<Box<[MaybeUninit<T>]>, KernelError> {
55        if let Some(ptr) = malloc(
56            size_of::<MaybeUninit<T>>() * len,
57            align_of::<MaybeUninit<T>>(),
58        ) {
59            let ptr = slice_from_raw_parts_mut(ptr.as_ptr().cast(), len);
60            Ok(Box {
61                ptr: unsafe { NonNull::new_unchecked(ptr) },
62            })
63        } else {
64            Err(KernelError::OutOfMemory)
65        }
66    }
67}
68
69impl<T> Box<T> {
70    /// Create a new heap-allocated value.
71    ///
72    /// `value` - The value to store on the heap.
73    ///
74    /// Returns a new heap-allocated value or `None` if the allocation failed.
75    pub fn new(value: T) -> Option<Self> {
76        if let Some(ptr) = malloc(size_of::<T>(), align_of::<T>()) {
77            unsafe {
78                write(ptr.as_ptr().cast(), value);
79            }
80
81            Some(Self { ptr: ptr.cast() })
82        } else {
83            None
84        }
85    }
86
87    /// Returns a mutable reference to the heap-allocated value.
88    pub fn as_mut(&mut self) -> &mut T {
89        unsafe { self.ptr.as_mut() }
90    }
91
92    /// Returns an immutable reference to the heap-allocated value.
93    pub fn as_ref(&self) -> &T {
94        unsafe { self.ptr.as_ref() }
95    }
96
97    /// Consumes the `Box`, returning a pointer to the heap-allocated value.
98    ///
99    /// The caller is responsible for freeing the memory with the global `free` function.
100    /// A pointer created with this function can be converted back into a `Box` with the `from_raw` function.
101    pub fn into_raw(self) -> NonNull<T> {
102        let ptr = self.ptr;
103        forget(self);
104        ptr
105    }
106
107    /// Moves a pointer to a heap-allocated value into a `Box`.
108    ///
109    /// `ptr` - The pointer to the heap-allocated value.
110    ///
111    /// Returns a new `Box` managing the given pointer.
112    /// # Safety
113    ///
114    /// The caller must ensure that the pointer is valid and that the memory is not freed while the `Box` is alive.
115    ///
116    /// The caller must ensure that the following conditions are met:
117    ///
118    /// * The pointer must be allocated with the global `malloc` function.
119    /// * The pointer must be unique and not aliased.
120    /// * The pointer must be properly aligned.
121    /// * The pointer must point to a valid `T`.
122    ///
123    /// The `Box` takes ownership of the memory and will free it with the global allocator when dropped.
124    pub unsafe fn from_raw(ptr: NonNull<T>) -> Self {
125        Self { ptr }
126    }
127}
128
129impl<T: ?Sized> Drop for Box<T> {
130    fn drop(&mut self) {
131        unsafe {
132            let size = size_of_val(self.ptr.as_ref());
133
134            if size == 0 {
135                return;
136            }
137
138            drop_in_place(self.ptr.as_ptr());
139            free(self.ptr.cast(), size);
140        }
141    }
142}
143
144impl<T> Deref for Box<T> {
145    type Target = T;
146
147    fn deref(&self) -> &Self::Target {
148        self.as_ref()
149    }
150}
151
152impl<T> DerefMut for Box<T> {
153    fn deref_mut(&mut self) -> &mut Self::Target {
154        self.as_mut()
155    }
156}
157
158impl<T> Deref for Box<[T]> {
159    type Target = [T];
160
161    fn deref(&self) -> &[T] {
162        unsafe { self.ptr.as_ref() }
163    }
164}
165
166impl<T> DerefMut for Box<[T]> {
167    fn deref_mut(&mut self) -> &mut [T] {
168        unsafe { self.ptr.as_mut() }
169    }
170}
171
172impl<T> Index<usize> for Box<[T]> {
173    type Output = T;
174
175    fn index(&self, index: usize) -> &Self::Output {
176        &self.as_ref()[index]
177    }
178}
179
180impl<T> Index<Range<usize>> for Box<[T]> {
181    type Output = [T];
182
183    fn index(&self, index: Range<usize>) -> &Self::Output {
184        &self.as_ref()[index]
185    }
186}
187
188impl<T> Index<RangeTo<usize>> for Box<[T]> {
189    type Output = [T];
190
191    fn index(&self, index: RangeTo<usize>) -> &Self::Output {
192        &self.as_ref()[index]
193    }
194}
195
196impl<T> Index<RangeFrom<usize>> for Box<[T]> {
197    type Output = [T];
198
199    fn index(&self, index: RangeFrom<usize>) -> &Self::Output {
200        &self.as_ref()[index]
201    }
202}
203
204impl<T> IndexMut<usize> for Box<[T]> {
205    fn index_mut(&mut self, index: usize) -> &mut Self::Output {
206        &mut self.as_mut()[index]
207    }
208}
209
210impl<T> IndexMut<Range<usize>> for Box<[T]> {
211    fn index_mut(&mut self, index: Range<usize>) -> &mut Self::Output {
212        &mut self.as_mut()[index]
213    }
214}
215
216impl<T> IndexMut<RangeTo<usize>> for Box<[T]> {
217    fn index_mut(&mut self, index: RangeTo<usize>) -> &mut Self::Output {
218        &mut self.as_mut()[index]
219    }
220}
221
222impl<T> IndexMut<RangeFrom<usize>> for Box<[T]> {
223    fn index_mut(&mut self, index: RangeFrom<usize>) -> &mut Self::Output {
224        &mut self.as_mut()[index]
225    }
226}
227
228impl<T> AsRef<T> for Box<T> {
229    fn as_ref(&self) -> &T {
230        self.as_ref()
231    }
232}
233
234impl<T> AsMut<T> for Box<T> {
235    fn as_mut(&mut self) -> &mut T {
236        self.as_mut()
237    }
238}
239
240#[cfg(kani)]
241mod verification {
242    use crate::mem::alloc;
243
244    use super::*;
245
246    /*
247    fn alloc_range(length: usize) -> Option<Range<usize>> {
248        let alloc_range = std::alloc::Layout::from_size_align(length, align_of::<u128>()).unwrap();
249        let ptr = unsafe { std::alloc::alloc(alloc_range) };
250
251        if ptr.is_null() || ((ptr as usize) >= isize::MAX as usize - length) {
252            None
253        } else {
254            Some(ptr as usize..ptr as usize + length)
255        }
256    }
257
258    #[kani::proof]
259    fn proof_new_slice_zero() {
260        let mut allocator = alloc::BestFitAllocator::new();
261        allocator
262
263        let len = kani::any();
264        kani::assume(len < alloc::MAX_ADDR);
265
266        let b = Box::<u8>::new_slice_zeroed(len);
267
268        let index = kani::any();
269        kani::assume(index < len);
270
271        assert!(b[index] == 0);
272    }
273    */
274}