hal_arm/
sched.rs

1//! Module: sched
2
3use core::{
4    ffi::c_void,
5    num::NonZero,
6    ops::{Add, AddAssign, Range},
7    ptr::NonNull,
8};
9
10use hal_api::{Result, stack::Descriptor};
11
12use crate::print::println;
13
14// A default finalizer used if none is supplied: just spins forever.
15#[inline(never)]
16extern "C" fn default_finalizer() -> ! {
17    loop {
18        core::hint::spin_loop();
19    }
20}
21
22#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
23pub struct StackPtr {
24    offset: usize,
25}
26
27impl StackPtr {
28    fn as_ptr(&self, top: NonNull<u32>) -> NonNull<u32> {
29        unsafe { top.sub(self.offset) }
30    }
31
32    fn checked_add(&self, rhs: usize) -> Option<Self> {
33        self.offset.checked_add(rhs).map(|offset| Self { offset })
34    }
35
36    fn offset(&self) -> usize {
37        self.offset
38    }
39}
40
41impl AddAssign<usize> for StackPtr {
42    fn add_assign(&mut self, rhs: usize) {
43        self.offset += rhs;
44    }
45}
46
47impl Add<usize> for StackPtr {
48    type Output = Self;
49
50    fn add(self, rhs: usize) -> Self::Output {
51        Self {
52            offset: self.offset + rhs,
53        }
54    }
55}
56
57/// A stack on arm is 4 byte aligned and grows downwards.
58#[derive(Debug, Clone, Copy)]
59pub struct ArmStack {
60    /// The top of the stack (highest address).
61    /// Safety: NonNull<u32> can safely be covariant over u32.
62    top: NonNull<u32>,
63    /// The current offset from the top of the stack
64    sp: StackPtr,
65    /// The size of the stack
66    size: NonZero<usize>,
67}
68
69impl ArmStack {
70    fn does_fit(&self, size: usize) -> bool {
71        size <= (self.size.get() - self.sp.offset()) * size_of::<u32>()
72    }
73
74    fn is_call_aligned(sp: StackPtr) -> bool {
75        sp.offset.is_multiple_of(2)
76    }
77
78    fn in_bounds(&self, sp: *mut u32) -> Option<usize> {
79        if let Some(sp) = NonNull::new(sp) {
80            if sp > self.top {
81                return None;
82            }
83
84            if sp < unsafe { self.top.sub(self.size.get()) } {
85                return None;
86            }
87
88            return Some(unsafe { self.top.as_ptr().offset_from(sp.as_ptr()) as usize });
89        }
90
91        None
92    }
93
94    #[inline(always)]
95    unsafe fn push(sp: &mut NonNull<u32>, value: u32) {
96        unsafe {
97            *sp = sp.sub(1);
98            *sp.as_ptr() = value;
99        };
100    }
101
102    fn push_irq_ret_fn(
103        &mut self,
104        f: extern "C" fn(),
105        fin: Option<extern "C" fn() -> !>,
106    ) -> Result<()> {
107        const FRAME_WORDS: usize = 18;
108        const WORD: usize = core::mem::size_of::<u32>();
109
110        // TODO: find out if this is Cortex-M4 specific
111        const EXEC_RETURN_THREAD_PSP: u32 = 0xFFFFFFFD;
112        // TODO: this is thumb specific
113        const XPSR_THUMB: u32 = 1 << 24;
114
115        let needed_size = FRAME_WORDS * WORD;
116
117        if !self.does_fit(needed_size) {
118            return Err(hal_api::Error::OutOfMemory(needed_size));
119        }
120
121        // We push an odd number of words, so if the stack is already call-aligned (DOUBLEWORD), we need to add padding.
122        if !Self::is_call_aligned(self.sp) {
123            self.sp = self.sp.checked_add(1).ok_or(hal_api::Error::default())?;
124        }
125
126        // Pushes a function context onto the stack, which will be executed when the IRQ returns.
127        // The layout is as follows:
128        // xPSR
129        // PC (entry point)
130        // LR (function to return after the thread is done)
131        // R12 (scratch register)
132        // R3 (argument to the function - 0)
133        // R2 (argument to the function - 0)
134        // R1 (argument to the function - 0)
135        // R0 (argument to the function - 0)
136        // LR (EXEC_RETURN)
137        // R12 (dummy for alignment)
138        // R11 - R4 (scratch - 0)
139
140        println!(
141            "Pushing IRQ return frame: sp offset {}, top: {:p}\n",
142            self.sp.offset(),
143            self.top
144        );
145
146        unsafe {
147            let mut write_index = self.sp.as_ptr(self.top);
148
149            Self::push(&mut write_index, XPSR_THUMB);
150            // Function pointer on arm is a 32bit address.
151            Self::push(&mut write_index, f as usize as u32 | 1);
152            let finalizer = fin.unwrap_or(default_finalizer);
153            Self::push(&mut write_index, finalizer as usize as u32 | 1);
154
155            // R12 - R0
156            for _ in 0..5 {
157                Self::push(&mut write_index, 0);
158            }
159
160            // Tells the hw to return to thread mode and use the PSP after the exception.
161            Self::push(&mut write_index, EXEC_RETURN_THREAD_PSP);
162
163            // R12 (dummy), R11 - R10
164            for _ in 0..4 {
165                Self::push(&mut write_index, 0);
166            }
167
168            // R8 - R4
169            for _ in 0..5 {
170                Self::push(&mut write_index, 0);
171            }
172
173            // We should have written exactly FRAME_WORDS words.
174            debug_assert!(write_index == self.top.sub(self.sp.offset() + FRAME_WORDS));
175
176            self.sp += FRAME_WORDS;
177        }
178
179        // The returned stack pointer must be call-aligned.
180        debug_assert!(Self::is_call_aligned(self.sp));
181        Ok(())
182    }
183}
184
185impl hal_api::stack::Stacklike for ArmStack {
186    type ElemSize = u32;
187    type StackPtr = StackPtr;
188
189    unsafe fn new(desc: Descriptor) -> Result<Self>
190    where
191        Self: Sized,
192    {
193        let Descriptor {
194            top,
195            size,
196            entry,
197            fin,
198        } = desc;
199
200        // We expect a PhysAddr, which can be converted to a ptr on nommu.
201        let top = NonNull::new(top.as_mut_ptr::<u32>())
202            .ok_or(hal_api::Error::InvalidAddress(top.as_usize()))?;
203
204        let mut stack = Self {
205            top,
206            sp: StackPtr { offset: 0 },
207            size,
208        };
209
210        stack.push_irq_ret_fn(entry, fin)?;
211        Ok(stack)
212    }
213
214    fn create_sp(&self, ptr: *mut c_void) -> Result<StackPtr> {
215        if let Some(offset) = self.in_bounds(ptr as *mut u32) {
216            return Ok(StackPtr { offset });
217        }
218
219        Err(hal_api::Error::OutOfBoundsPtr(
220            ptr as usize,
221            Range {
222                start: self.top.as_ptr() as usize - self.size.get(),
223                end: self.top.as_ptr() as usize,
224            },
225        ))
226    }
227
228    fn set_sp(&mut self, sp: StackPtr) {
229        self.sp = sp;
230    }
231
232    fn sp(&self) -> *mut c_void {
233        self.sp.as_ptr(self.top).as_ptr() as *mut c_void
234    }
235}