1/* SPDX-License-Identifier: GPL-2.0 */ 2/* 3 * Copyright (C) 2022 Loongson Technology Corporation Limited 4 */ 5 6#include <asm/export.h> 7#include <asm/ftrace.h> 8#include <asm/regdef.h> 9#include <asm/stackframe.h> 10 11 .text 12/* 13 * Due to -fpatchable-function-entry=2: the compiler inserted 2 NOPs before the 14 * regular C function prologue. When PC arrived here, the last 2 instructions 15 * are as follows: 16 * move t0, ra 17 * bl callsite (for modules, callsite is a tramplione) 18 * 19 * modules trampoline is as follows: 20 * lu12i.w t1, callsite[31:12] 21 * lu32i.d t1, callsite[51:32] 22 * lu52i.d t1, t1, callsite[63:52] 23 * jirl zero, t1, callsite[11:0] >> 2 24 * 25 * See arch/loongarch/kernel/ftrace_dyn.c for details. Here, pay attention to 26 * that the T series regs are available and safe because each C functions 27 * follows the LoongArch's psABI as well. 28 */ 29 30 .macro ftrace_regs_entry allregs=0 31 PTR_ADDI sp, sp, -PT_SIZE 32 PTR_S t0, sp, PT_R1 /* Save parent ra at PT_R1(RA) */ 33 PTR_S a0, sp, PT_R4 34 PTR_S a1, sp, PT_R5 35 PTR_S a2, sp, PT_R6 36 PTR_S a3, sp, PT_R7 37 PTR_S a4, sp, PT_R8 38 PTR_S a5, sp, PT_R9 39 PTR_S a6, sp, PT_R10 40 PTR_S a7, sp, PT_R11 41 PTR_S fp, sp, PT_R22 42 .if \allregs 43 PTR_S tp, sp, PT_R2 44 PTR_S t0, sp, PT_R12 45 PTR_S t1, sp, PT_R13 46 PTR_S t2, sp, PT_R14 47 PTR_S t3, sp, PT_R15 48 PTR_S t4, sp, PT_R16 49 PTR_S t5, sp, PT_R17 50 PTR_S t6, sp, PT_R18 51 PTR_S t7, sp, PT_R19 52 PTR_S t8, sp, PT_R20 53 PTR_S u0, sp, PT_R21 54 PTR_S s0, sp, PT_R23 55 PTR_S s1, sp, PT_R24 56 PTR_S s2, sp, PT_R25 57 PTR_S s3, sp, PT_R26 58 PTR_S s4, sp, PT_R27 59 PTR_S s5, sp, PT_R28 60 PTR_S s6, sp, PT_R29 61 PTR_S s7, sp, PT_R30 62 PTR_S s8, sp, PT_R31 63 /* Clear it for later use as a flag sometimes. */ 64 PTR_S zero, sp, PT_R0 65 .endif 66 PTR_S ra, sp, PT_ERA /* Save trace function ra at PT_ERA */ 67 PTR_ADDI t8, sp, PT_SIZE 68 PTR_S t8, sp, PT_R3 69 .endm 70 71SYM_FUNC_START(ftrace_stub) 72 jr ra 73SYM_FUNC_END(ftrace_stub) 74 75SYM_CODE_START(ftrace_common) 76 PTR_ADDI a0, ra, -8 /* arg0: ip */ 77 move a1, t0 /* arg1: parent_ip */ 78 la.pcrel t1, function_trace_op 79 PTR_L a2, t1, 0 /* arg2: op */ 80 move a3, sp /* arg3: regs */ 81 82SYM_INNER_LABEL(ftrace_call, SYM_L_GLOBAL) 83 bl ftrace_stub 84#ifdef CONFIG_FUNCTION_GRAPH_TRACER 85SYM_INNER_LABEL(ftrace_graph_call, SYM_L_GLOBAL) 86 nop /* b ftrace_graph_caller */ 87#endif 88 89/* 90 * As we didn't use S series regs in this assmembly code and all calls 91 * are C function which will save S series regs by themselves, there is 92 * no need to restore S series regs. The T series is available and safe 93 * at the callsite, so there is no need to restore the T series regs. 94 */ 95ftrace_common_return: 96 PTR_L ra, sp, PT_R1 97 PTR_L a0, sp, PT_R4 98 PTR_L a1, sp, PT_R5 99 PTR_L a2, sp, PT_R6 100 PTR_L a3, sp, PT_R7 101 PTR_L a4, sp, PT_R8 102 PTR_L a5, sp, PT_R9 103 PTR_L a6, sp, PT_R10 104 PTR_L a7, sp, PT_R11 105 PTR_L fp, sp, PT_R22 106 PTR_L t0, sp, PT_ERA 107 PTR_ADDI sp, sp, PT_SIZE 108 jr t0 109SYM_CODE_END(ftrace_common) 110 111SYM_CODE_START(ftrace_caller) 112 ftrace_regs_entry allregs=0 113 b ftrace_common 114SYM_CODE_END(ftrace_caller) 115 116#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS 117SYM_CODE_START(ftrace_regs_caller) 118 ftrace_regs_entry allregs=1 119 b ftrace_common 120SYM_CODE_END(ftrace_regs_caller) 121#endif 122 123#ifdef CONFIG_FUNCTION_GRAPH_TRACER 124SYM_CODE_START(ftrace_graph_caller) 125 PTR_L a0, sp, PT_ERA 126 PTR_ADDI a0, a0, -8 /* arg0: self_addr */ 127 PTR_ADDI a1, sp, PT_R1 /* arg1: parent */ 128 bl prepare_ftrace_return 129 b ftrace_common_return 130SYM_CODE_END(ftrace_graph_caller) 131 132SYM_CODE_START(return_to_handler) 133 /* Save return value regs */ 134 PTR_ADDI sp, sp, -2 * SZREG 135 PTR_S a0, sp, 0 136 PTR_S a1, sp, SZREG 137 138 move a0, zero 139 bl ftrace_return_to_handler 140 move ra, a0 141 142 /* Restore return value regs */ 143 PTR_L a0, sp, 0 144 PTR_L a1, sp, SZREG 145 PTR_ADDI sp, sp, 2 * SZREG 146 147 jr ra 148SYM_CODE_END(return_to_handler) 149#endif 150