1/* SPDX-License-Identifier: BSD-2-Clause */ 2/* 3 * Copyright (c) 2020, Linaro Limited 4 * Copyright (c) 2019-2021, Arm Limited 5 */ 6 7#include <platform_config.h> 8 9#include <arm64_macros.S> 10#include <arm.h> 11#include <asm.S> 12#include <ffa.h> 13#include <generated/asm-defines.h> 14#include <kernel/thread.h> 15#include <optee_ffa.h> 16 17FUNC thread_ffa_msg_wait , : 18 mov_imm x0, FFA_MSG_WAIT /* FID */ 19 mov x1, #FFA_TARGET_INFO_MBZ /* Target info MBZ */ 20 mov x2, #FFA_PARAM_MBZ /* Param MBZ */ 21 mov x3, #FFA_PARAM_MBZ /* Param MBZ */ 22 mov x4, #FFA_PARAM_MBZ /* Param MBZ */ 23 mov x5, #FFA_PARAM_MBZ /* Param MBZ */ 24 mov x6, #FFA_PARAM_MBZ /* Param MBZ */ 25 mov x7, #FFA_PARAM_MBZ /* Param MBZ */ 26 b .ffa_msg_loop 27END_FUNC thread_ffa_msg_wait 28 29 /* Caller provides x1, x3-x7 params */ 30LOCAL_FUNC ffa_msg_send_direct_resp , : 31 mov_imm x0, FFA_MSG_SEND_DIRECT_RESP_32 /* FID */ 32 mov x2, #FFA_PARAM_MBZ /* RES MBZ */ 33 34.ffa_msg_loop: 35 /* Invoke SMC with caller provided parameters */ 36 smc #0 37 38 /* Store the parameters as struct thread_smc_args on stack */ 39 sub sp, sp, #THREAD_SMC_ARGS_SIZE 40 store_xregs sp, THREAD_SMC_ARGS_X0, 0, 7 41 mov x0, sp 42 43 /* parse and handle message */ 44 bl thread_spmc_msg_recv 45 46 /* Load struct thread_smc_args into registers */ 47 load_xregs sp, THREAD_SMC_ARGS_X0, 0, 7 48 add sp, sp, #THREAD_SMC_ARGS_SIZE 49 b .ffa_msg_loop 50END_FUNC ffa_msg_send_direct_resp 51 52FUNC thread_std_smc_entry , : 53 ror w19, w0, #16 /* Save target info with src and dst swapped */ 54 bl __thread_std_smc_entry 55 mov w20, w0 /* Save return value */ 56 57 /* Mask all maskable exceptions before switching to temporary stack */ 58 msr daifset, #DAIFBIT_ALL 59 bl thread_get_tmp_sp 60 mov sp, x0 61 62 bl thread_state_free 63 64 mov w1, w19 /* Target info */ 65 mov w3, w20 /* Return value */ 66 mov x4, #FFA_PARAM_MBZ /* Unused parameter */ 67 mov x5, #FFA_PARAM_MBZ /* Unused parameter */ 68 mov x6, #FFA_PARAM_MBZ /* Unused parameter */ 69 mov x7, #FFA_PARAM_MBZ /* Unused parameter */ 70 b ffa_msg_send_direct_resp 71END_FUNC thread_std_smc_entry 72 73#ifdef CFG_SECURE_PARTITION 74/* void spmc_sp_thread_entry(args) */ 75FUNC spmc_sp_thread_entry , : 76 /* Store the parameters as struct thread_smc_args on stack */ 77 sub sp, sp, #THREAD_SMC_ARGS_SIZE 78 store_xregs sp, THREAD_SMC_ARGS_X0, 0, 7 79 mov x0, sp 80 mov x1, #0 /* Pass NULL pointer for caller_sp, coming from NW */ 81 bl spmc_sp_msg_handler 82 load_xregs sp, THREAD_SMC_ARGS_X0, 20, 27 83 84 /* Mask all maskable exceptions before switching to temporary stack */ 85 msr daifset, #DAIFBIT_ALL 86 bl thread_get_tmp_sp 87 mov sp, x0 88 89 bl thread_state_free 90 91 /* Restore the FF-A arguments before the SMC instruction. */ 92 mov w0, w20 93 mov w1, w21 94 mov w2, w22 95 mov w3, w23 96 mov w4, w24 97 mov w5, w25 98 mov w6, w26 99 mov w7, w27 100 b .ffa_msg_loop 101END_FUNC spmc_sp_thread_entry 102#endif 103 104/* void thread_rpc(struct thread_rpc_arg *rpc_arg) */ 105FUNC thread_rpc , : 106 /* Read daif and create an SPSR */ 107 mrs x1, daif 108 orr x1, x1, #(SPSR_64_MODE_EL1 << SPSR_64_MODE_EL_SHIFT) 109 110 /* Mask all maskable exceptions before switching to temporary stack */ 111 msr daifset, #DAIFBIT_ALL 112 push x0, xzr 113 push x1, x30 114 bl thread_get_ctx_regs 115 ldr x30, [sp, #8] 116 store_xregs x0, THREAD_CTX_REGS_X19, 19, 30 117 mov x19, x0 118 119#if defined(CFG_CORE_PAUTH) 120 /* Save APIAKEY */ 121 read_apiakeyhi x1 122 read_apiakeylo x2 123 store_xregs x0, THREAD_CTX_REGS_APIAKEY_HI, 1, 2 124#endif 125 126 bl thread_get_tmp_sp 127 pop x1, xzr /* Match "push x1, x30" above */ 128 mov x2, sp 129 str x2, [x19, #THREAD_CTX_REGS_SP] 130 ldr x20, [sp] /* Get pointer to rpc_arg[] */ 131 mov sp, x0 /* Switch to tmp stack */ 132 /* 133 * We need to read rpc_arg[] early, because thread_state_suspend 134 * can invoke virt_unset_guest() which will unmap pages, 135 * where rpc_arg[] resides 136 */ 137 load_wregs x20, 0, 21, 24 /* Load rpc_arg[] into w21-w24 */ 138 139 adr x2, .thread_rpc_return 140 mov w0, #THREAD_FLAGS_COPY_ARGS_ON_RETURN 141 bl thread_state_suspend 142 mov w7, w0 /* Supply thread index */ 143 ldr w0, =FFA_MSG_SEND_DIRECT_RESP_32 144 mov w1, w21 145 mov w2, #FFA_PARAM_MBZ 146 mov w3, #0 /* Error code = 0 */ 147 mov w4, w22 148 mov w5, w23 149 mov w6, w24 150 b ffa_msg_send_direct_resp 151 152.thread_rpc_return: 153 /* 154 * At this point has the stack pointer been restored to the value 155 * stored in THREAD_CTX above. 156 * 157 * Jumps here from thread_resume above when RPC has returned. The 158 * IRQ and FIQ bits are restored to what they where when this 159 * function was originally entered. w0-w3 holds the values supplied 160 * to thread_resume_from_rpc() in a0-a3. 161 */ 162 pop x16, xzr /* Get pointer to rv[] */ 163 store_wregs x16, 0, 0, 3 /* Store w0-w3 into rv[] */ 164 ret 165END_FUNC thread_rpc 166 167/* 168 * void thread_foreign_intr_exit(uint32_t thread_index) 169 * 170 * This function is jumped to at the end of macro foreign_intr_handler(). 171 * The current thread as indicated by @thread_index has just been 172 * suspended. The job here is just to inform normal world the thread id to 173 * resume when returning. 174 */ 175FUNC thread_foreign_intr_exit , : 176 /* load threads[w0].tsd.rpc_target_info into w1 */ 177 mov x1, #THREAD_CTX_SIZE 178 adr_l x2, threads 179 madd x1, x1, x0, x2 180 ldr w1, [x1, #THREAD_CTX_TSD_RPC_TARGET_INFO] 181 mov x2, #FFA_PARAM_MBZ 182 mov w3, #FFA_PARAM_MBZ 183 mov w4, #OPTEE_FFA_YIELDING_CALL_RETURN_INTERRUPT 184 mov x5, #FFA_PARAM_MBZ 185 mov w6, #FFA_PARAM_MBZ 186 mov w7, w0 187 b ffa_msg_send_direct_resp 188END_FUNC thread_foreign_intr_exit 189 190BTI(emit_aarch64_feature_1_and GNU_PROPERTY_AARCH64_FEATURE_1_BTI) 191