1/*
2 * Copyright (c) 2006-2018, RT-Thread Development Team
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 *
6 * Change Logs:
7 * Date           Author       Notes
8 * 2009-10-11     Bernard      first version
9 * 2012-01-01     aozima       support context switch load/store FPU register.
10 * 2013-06-18     aozima       add restore MSP feature.
11 * 2013-06-23     aozima       support lazy stack optimized.
12 * 2018-07-24     aozima       enhancement hard fault exception handler.
13 */
14
15/**
16 * @addtogroup cortex-m7
17 */
18/*@{*/
19
20#include <rtconfig.h>
21
22.cpu cortex-m7
23.syntax unified
24.thumb
25.text
26
27.equ    SCB_VTOR,           0xE000ED08              /* Vector Table Offset Register */
28.equ    NVIC_INT_CTRL,      0xE000ED04              /* interrupt control state register */
29.equ    NVIC_SYSPRI2,       0xE000ED20              /* system priority register (2) */
30.equ    NVIC_PENDSV_PRI,    0xFFFF0000              /* PendSV and SysTick priority value (lowest) */
31.equ    NVIC_PENDSVSET,     0x10000000              /* value to trigger PendSV exception */
32
33/*
34 * rt_base_t rt_hw_interrupt_disable();
35 */
36.global rt_hw_interrupt_disable
37.type rt_hw_interrupt_disable, %function
38rt_hw_interrupt_disable:
39    MRS     r0, PRIMASK
40    CPSID   I
41    BX      LR
42
43/*
44 * void rt_hw_interrupt_enable(rt_base_t level);
45 */
46.global rt_hw_interrupt_enable
47.type rt_hw_interrupt_enable, %function
48rt_hw_interrupt_enable:
49    MSR     PRIMASK, r0
50    BX      LR
51
52/*
53 * void rt_hw_context_switch(rt_uint32 from, rt_uint32 to);
54 * r0 --> from
55 * r1 --> to
56 */
57.global rt_hw_context_switch_interrupt
58.type rt_hw_context_switch_interrupt, %function
59.global rt_hw_context_switch
60.type rt_hw_context_switch, %function
61
62rt_hw_context_switch_interrupt:
63rt_hw_context_switch:
64    /* set rt_thread_switch_interrupt_flag to 1 */
65    LDR     r2, =rt_thread_switch_interrupt_flag
66    LDR     r3, [r2]
67    CMP     r3, #1
68    BEQ     _reswitch
69    MOV     r3, #1
70    STR     r3, [r2]
71
72    LDR     r2, =rt_interrupt_from_thread   /* set rt_interrupt_from_thread */
73    STR     r0, [r2]
74
75_reswitch:
76    LDR     r2, =rt_interrupt_to_thread     /* set rt_interrupt_to_thread */
77    STR     r1, [r2]
78
79    LDR r0, =NVIC_INT_CTRL              /* trigger the PendSV exception (causes context switch) */
80    LDR r1, =NVIC_PENDSVSET
81    STR r1, [r0]
82    BX  LR
83
84/* r0 --> switch from thread stack
85 * r1 --> switch to thread stack
86 * psr, pc, lr, r12, r3, r2, r1, r0 are pushed into [from] stack
87 */
88.global PendSV_Handler
89.type PendSV_Handler, %function
90PendSV_Handler:
91    /* disable interrupt to protect context switch */
92    MRS r2, PRIMASK
93    CPSID   I
94
95    /* get rt_thread_switch_interrupt_flag */
96    LDR r0, =rt_thread_switch_interrupt_flag
97    LDR r1, [r0]
98    CBZ r1, pendsv_exit         /* pendsv already handled */
99
100    /* clear rt_thread_switch_interrupt_flag to 0 */
101    MOV r1, #0x00
102    STR r1, [r0]
103
104    LDR r0, =rt_interrupt_from_thread
105    LDR r1, [r0]
106    CBZ r1, switch_to_thread    /* skip register save at the first time */
107
108    MRS r1, psp                 /* get from thread stack pointer */
109
110#if defined (__VFP_FP__) && !defined(__SOFTFP__)
111    TST     lr, #0x10           /* if(!EXC_RETURN[4]) */
112    IT      EQ
113    VSTMDBEQ r1!, {d8 - d15}    /* push FPU register s16~s31 */
114#endif
115
116    STMFD   r1!, {r4 - r11}     /* push r4 - r11 register */
117
118#if defined (__VFP_FP__) && !defined(__SOFTFP__)
119    MOV     r4, #0x00           /* flag = 0 */
120
121    TST     lr, #0x10           /* if(!EXC_RETURN[4]) */
122    IT      EQ
123    MOVEQ   r4, #0x01           /* flag = 1 */
124
125    STMFD   r1!, {r4}           /* push flag */
126#endif
127
128    LDR r0, [r0]
129    STR r1, [r0]                /* update from thread stack pointer */
130
131switch_to_thread:
132    LDR r1, =rt_interrupt_to_thread
133    LDR r1, [r1]
134    LDR r1, [r1]                /* load thread stack pointer */
135
136#if defined (__VFP_FP__) && !defined(__SOFTFP__)
137    LDMFD   r1!, {r3}           /* pop flag */
138#endif
139
140    LDMFD   r1!, {r4 - r11}     /* pop r4 - r11 register */
141
142#if defined (__VFP_FP__) && !defined(__SOFTFP__)
143    CMP     r3,  #0             /* if(flag_r3 != 0) */
144    IT      NE
145    VLDMIANE  r1!, {d8 - d15}   /* pop FPU register s16~s31 */
146#endif
147
148    MSR psp, r1                 /* update stack pointer */
149
150#if defined (__VFP_FP__) && !defined(__SOFTFP__)
151    ORR     lr, lr, #0x10       /* lr |=  (1 << 4), clean FPCA. */
152    CMP     r3,  #0             /* if(flag_r3 != 0) */
153    IT      NE
154    BICNE   lr, lr, #0x10       /* lr &= ~(1 << 4), set FPCA. */
155#endif
156
157#if defined (RT_USING_MEM_PROTECTION)
158    PUSH    {r0-r3, r12, lr}
159    BL      rt_thread_self
160    BL      rt_hw_mpu_table_switch
161    POP     {r0-r3, r12, lr}
162#endif
163
164pendsv_exit:
165    /* restore interrupt */
166    MSR PRIMASK, r2
167
168    ORR lr, lr, #0x04
169    BX  lr
170
171/*
172 * void rt_hw_context_switch_to(rt_uint32 to);
173 * r0 --> to
174 */
175.global rt_hw_context_switch_to
176.type rt_hw_context_switch_to, %function
177rt_hw_context_switch_to:
178    LDR r1, =rt_interrupt_to_thread
179    STR r0, [r1]
180
181#if defined (__VFP_FP__) && !defined(__SOFTFP__)
182    /* CLEAR CONTROL.FPCA */
183    MRS     r2, CONTROL         /* read */
184    BIC     r2, #0x04           /* modify */
185    MSR     CONTROL, r2         /* write-back */
186#endif
187
188    /* set from thread to 0 */
189    LDR r1, =rt_interrupt_from_thread
190    MOV r0, #0x0
191    STR r0, [r1]
192
193    /* set interrupt flag to 1 */
194    LDR     r1, =rt_thread_switch_interrupt_flag
195    MOV     r0, #1
196    STR     r0, [r1]
197
198    /* set the PendSV and SysTick exception priority */
199    LDR r0, =NVIC_SYSPRI2
200    LDR r1, =NVIC_PENDSV_PRI
201    LDR.W   r2, [r0,#0x00]       /* read       */
202    ORR     r1,r1,r2             /* modify     */
203    STR     r1, [r0]             /* write-back */
204
205    LDR r0, =NVIC_INT_CTRL      /* trigger the PendSV exception (causes context switch) */
206    LDR r1, =NVIC_PENDSVSET
207    STR r1, [r0]
208
209    /* restore MSP */
210    LDR     r0, =SCB_VTOR
211    LDR     r0, [r0]
212    LDR     r0, [r0]
213    NOP
214    MSR     msp, r0
215
216    /* enable interrupts at processor level */
217    CPSIE   F
218    CPSIE   I
219
220    /* ensure PendSV exception taken place before subsequent operation */
221    DSB
222    ISB
223
224    /* never reach here! */
225
226/* compatible with old version */
227.global rt_hw_interrupt_thread_switch
228.type rt_hw_interrupt_thread_switch, %function
229rt_hw_interrupt_thread_switch:
230    BX  lr
231    NOP
232
233.global HardFault_Handler
234.type HardFault_Handler, %function
235HardFault_Handler:
236    /* get current context */
237    MRS     r0, msp                 /* get fault context from handler. */
238    TST     lr, #0x04               /* if(!EXC_RETURN[2]) */
239    BEQ     _get_sp_done
240    MRS     r0, psp                 /* get fault context from thread. */
241_get_sp_done:
242
243    STMFD   r0!, {r4 - r11}         /* push r4 - r11 register */
244#if defined (__VFP_FP__) && !defined(__SOFTFP__)
245    STMFD   r0!, {lr}               /* push dummy for flag */
246#endif
247    STMFD   r0!, {lr}               /* push exec_return register */
248
249    TST     lr, #0x04               /* if(!EXC_RETURN[2]) */
250    BEQ     _update_msp
251    MSR     psp, r0                 /* update stack pointer to PSP. */
252    B       _update_done
253_update_msp:
254    MSR     msp, r0                 /* update stack pointer to MSP. */
255_update_done:
256
257    PUSH    {LR}
258    BL      rt_hw_hard_fault_exception
259    POP     {LR}
260
261    ORR     lr, lr, #0x04
262    BX      lr
263