1/*
2 * Copyright (c) 2015 Travis Geiselbrecht
3 *
4 * Use of this source code is governed by a MIT-style
5 * license that can be found in the LICENSE file or at
6 * https://opensource.org/licenses/MIT
7 */
8#include <lk/asm.h>
9#include <arch/riscv.h>
10#include <arch/riscv/asm.h>
11#include <arch/riscv/iframe.h>
12
13/* void riscv_context_switch(
14    struct riscv_context_switch_frame *oldcs,
15    struct riscv_context_switch_frame *newcs); */
16FUNCTION(riscv_context_switch)
17    # a0 = oldcs
18    # a1 = newcs
19    STR    ra, REGOFF(0)(a0)
20    STR    sp, REGOFF(1)(a0)
21    STR    s0, REGOFF(2)(a0)
22    STR    s1, REGOFF(3)(a0)
23    STR    s2, REGOFF(4)(a0)
24    STR    s3, REGOFF(5)(a0)
25    STR    s4, REGOFF(6)(a0)
26    STR    s5, REGOFF(7)(a0)
27    STR    s6, REGOFF(8)(a0)
28    STR    s7, REGOFF(9)(a0)
29    STR    s8, REGOFF(10)(a0)
30    STR    s9, REGOFF(11)(a0)
31    STR    s10, REGOFF(12)(a0)
32    STR    s11, REGOFF(13)(a0)
33
34    LDR    s11, REGOFF(13)(a1)
35    LDR    s10, REGOFF(12)(a1)
36    LDR    s9, REGOFF(11)(a1)
37    LDR    s8, REGOFF(10)(a1)
38    LDR    s7, REGOFF(9)(a1)
39    LDR    s6, REGOFF(8)(a1)
40    LDR    s5, REGOFF(7)(a1)
41    LDR    s4, REGOFF(6)(a1)
42    LDR    s3, REGOFF(5)(a1)
43    LDR    s2, REGOFF(4)(a1)
44    LDR    s1, REGOFF(3)(a1)
45    LDR    s0, REGOFF(2)(a1)
46    LDR    sp, REGOFF(1)(a1)
47    LDR    ra, REGOFF(0)(a1)
48
49    ret
50END_FUNCTION(riscv_context_switch)
51
52.macro save_regs, user
53    addi   sp, sp, -RISCV_IFRAME_LEN // subtract a multiple of 16 to align the stack in 32bit
54.if \user == 1
55    // recover tp from the top of the stack (we saved it here before)
56    STR     tp, RISCV_IFRAME_TP(sp)
57    LDR     tp, (RISCV_IFRAME_LEN-__riscv_xlen / 8)(sp) // this is where the top of the stack used to be
58
59    STR     gp, RISCV_IFRAME_GP(sp)
60
61    // save the user stack and zero scratch register
62    csrrw   gp, RISCV_CSR_XSCRATCH, zero
63    STR     gp, RISCV_IFRAME_SP(sp)
64
65    // recover gp for the kernel
66.option push
67.option norelax
68    lla     gp, __global_pointer$
69.option pop
70.endif
71    STR    t6, RISCV_IFRAME_T(6)(sp)
72    STR    t5, RISCV_IFRAME_T(5)(sp)
73    STR    t4, RISCV_IFRAME_T(4)(sp)
74    STR    t3, RISCV_IFRAME_T(3)(sp)
75    STR    t2, RISCV_IFRAME_T(2)(sp)
76    STR    t1, RISCV_IFRAME_T(1)(sp)
77    STR    t0, RISCV_IFRAME_T(0)(sp)
78    STR    a7, RISCV_IFRAME_A(7)(sp)
79    STR    a6, RISCV_IFRAME_A(6)(sp)
80    STR    a5, RISCV_IFRAME_A(5)(sp)
81    STR    a4, RISCV_IFRAME_A(4)(sp)
82    STR    a3, RISCV_IFRAME_A(3)(sp)
83    STR    a2, RISCV_IFRAME_A(2)(sp)
84    STR    a1, RISCV_IFRAME_A(1)(sp)
85    STR    a0, RISCV_IFRAME_A(0)(sp)
86    STR    ra, RISCV_IFRAME_RA(sp)
87    csrr   t0, RISCV_CSR_XSTATUS
88    STR    t0, RISCV_IFRAME_STATUS(sp)
89    csrr   a0, RISCV_CSR_XCAUSE
90    csrr   a1, RISCV_CSR_XEPC
91    STR    a1, RISCV_IFRAME_EPC(sp)
92    mv     a2, sp
93    // args are set up for a call into riscv_exception_handler()
94    // a0 = xcause
95    // a1 = xepc
96    // a2 = sp
97.endm
98
99.macro restore_regs, user
100    // put everything back
101    LDR    t0, RISCV_IFRAME_EPC(sp)
102    csrw   RISCV_CSR_XEPC, t0
103    LDR    t0, RISCV_IFRAME_STATUS(sp)
104    csrw   RISCV_CSR_XSTATUS, t0
105
106    LDR    ra, RISCV_IFRAME_RA(sp)
107    LDR    a0, RISCV_IFRAME_A(0)(sp)
108    LDR    a1, RISCV_IFRAME_A(1)(sp)
109    LDR    a2, RISCV_IFRAME_A(2)(sp)
110    LDR    a3, RISCV_IFRAME_A(3)(sp)
111    LDR    a4, RISCV_IFRAME_A(4)(sp)
112    LDR    a5, RISCV_IFRAME_A(5)(sp)
113    LDR    a6, RISCV_IFRAME_A(6)(sp)
114    LDR    a7, RISCV_IFRAME_A(7)(sp)
115    LDR    t0, RISCV_IFRAME_T(0)(sp)
116    LDR    t1, RISCV_IFRAME_T(1)(sp)
117    LDR    t2, RISCV_IFRAME_T(2)(sp)
118    LDR    t3, RISCV_IFRAME_T(3)(sp)
119    LDR    t4, RISCV_IFRAME_T(4)(sp)
120    LDR    t5, RISCV_IFRAME_T(5)(sp)
121    LDR    t6, RISCV_IFRAME_T(6)(sp)
122.if \user == 1
123    // before we run out of registers, save tp to the top of the kernel stack
124    // and put the kernel stack in the scratch register
125    addi   gp, sp, RISCV_IFRAME_LEN
126    STR    tp, REGOFF(-1)(gp)
127    csrw   RISCV_CSR_XSCRATCH, gp
128
129    LDR    tp, RISCV_IFRAME_TP(sp)
130    LDR    gp, RISCV_IFRAME_GP(sp)
131    LDR    sp, RISCV_IFRAME_SP(sp)
132.else
133    addi   sp, sp, RISCV_IFRAME_LEN
134.endif
135.endm
136
137// top level exception handler for riscv in non vectored mode
138.balign 4
139FUNCTION(riscv_exception_entry)
140    // check to see if we came from user space
141    csrrw   sp, RISCV_CSR_XSCRATCH, sp
142    bnez    sp, 1f
143
144    // put the stack back
145    csrrw   sp, RISCV_CSR_XSCRATCH, sp
146    j       kernel_exception_entry
147
1481:
149    // came from user space
150    j       user_exception_entry
151END_FUNCTION(riscv_exception_entry)
152
153LOCAL_FUNCTION(kernel_exception_entry)
154    // we came from kernel space so tp and gp are okay
155    save_regs 0
156
157    // bool kernel = true
158    li     a3, 1
159    call   riscv_exception_handler
160
161    restore_regs 0
162
163    RISCV_XRET
164END_FUNCTION(kernel_exception_entry)
165
166LOCAL_FUNCTION(user_exception_entry)
167    // we came from user space, assume gp and tp have been trashed
168    save_regs 1
169
170    // bool kernel = false
171    li     a3, 0
172    call   riscv_exception_handler
173
174    restore_regs 1
175
176    RISCV_XRET
177END_FUNCTION(user_exception_entry)
178
179