1 /*
2  * This file is part of the MicroPython project, http://micropython.org/
3  *
4  * The MIT License (MIT)
5  *
6  * Copyright (c) 2021 Yonatan Goldschmidt
7  *
8  * Permission is hereby granted, free of charge, to any person obtaining a copy
9  * of this software and associated documentation files (the "Software"), to deal
10  * in the Software without restriction, including without limitation the rights
11  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12  * copies of the Software, and to permit persons to whom the Software is
13  * furnished to do so, subject to the following conditions:
14  *
15  * The above copyright notice and this permission notice shall be included in
16  * all copies or substantial portions of the Software.
17  *
18  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24  * THE SOFTWARE.
25  */
26 
27 #include "py/mpstate.h" // needed for NLR defs
28 
29 #if MICROPY_NLR_AARCH64
30 
31 // AArch64 callee-saved registers are x19-x29.
32 // https://en.wikipedia.org/wiki/Calling_convention#ARM_(A64)
33 
34 // Implemented purely as inline assembly; inside a function, we have to deal with undoing the prologue, restoring
35 // SP and LR. This way, we don't.
36 __asm(
37     #if defined(__APPLE__) && defined(__MACH__)
38     "_nlr_push:              \n"
39     ".global _nlr_push       \n"
40     #else
41     "nlr_push:               \n"
42     ".global nlr_push        \n"
43     #endif
44     "mov x9, sp              \n"
45     "stp lr,  x9,  [x0,  #16]\n" // 16 == offsetof(nlr_buf_t, regs)
46     "stp x19, x20, [x0,  #32]\n"
47     "stp x21, x22, [x0,  #48]\n"
48     "stp x23, x24, [x0,  #64]\n"
49     "stp x25, x26, [x0,  #80]\n"
50     "stp x27, x28, [x0,  #96]\n"
51     "str x29,      [x0, #112]\n"
52     #if defined(__APPLE__) && defined(__MACH__)
53     "b _nlr_push_tail        \n" // do the rest in C
54     #else
55     "b nlr_push_tail         \n" // do the rest in C
56     #endif
57     );
58 
nlr_jump(void * val)59 NORETURN void nlr_jump(void *val) {
60     MP_NLR_JUMP_HEAD(val, top)
61 
62     MP_STATIC_ASSERT(offsetof(nlr_buf_t, regs) == 16); // asm assumes it
63 
64     __asm volatile (
65         "ldr x29,      [%0, #112]\n"
66         "ldp x27, x28, [%0,  #96]\n"
67         "ldp x25, x26, [%0,  #80]\n"
68         "ldp x23, x24, [%0,  #64]\n"
69         "ldp x21, x22, [%0,  #48]\n"
70         "ldp x19, x20, [%0,  #32]\n"
71         "ldp lr,  x9,  [%0,  #16]\n" // 16 == offsetof(nlr_buf_t, regs)
72         "mov sp, x9              \n"
73         "mov x0, #1              \n"  // non-local return
74         "ret                     \n"
75         :
76         : "r" (top)
77         :
78         );
79 
80     MP_UNREACHABLE
81 }
82 
83 #endif // MICROPY_NLR_AARCH64
84