1/*
2 * SPDX-License-Identifier: BSD-3-Clause
3 * SPDX-FileCopyrightText: Copyright TF-RMM Contributors.
4 */
5#ifndef ASM_MACROS_S
6#define ASM_MACROS_S
7
8#define TLB_INVALIDATE(_type) \
9	tlbi	_type
10
11#define ENTRY(x) .global x; x
12#define ENDPROC(x)
13
14	/*
15	 * This macro is used to create a function label and place the
16	 * code into a separate text section based on the function name
17	 * to enable elimination of unused code during linking. It also adds
18	 * basic debug information to enable call stack printing most of the
19	 * time. The optional _align parameter can be used to force a
20	 * non-standard alignment (indicated in powers of 2). The default is
21	 * _align=2 because aarch64 instructions must be word aligned.
22	 * Do *not* try to use a raw .align directive. Since func
23	 * switches to a new section, this would not have the desired effect.
24	 */
25	.macro func _name, _align=2
26	/*
27	 * Add Call Frame Information entry in the .debug_frame section for
28	 * debugger consumption. This enables callstack printing in debuggers.
29	 * This does not use any space in the final loaded binary, only in the
30	 * ELF file.
31	 * Note that a function manipulating the CFA pointer location (i.e. the
32	 * x29 frame pointer on AArch64) should declare it using the
33	 * appropriate .cfi* directives, or be prepared to have a degraded
34	 * debugging experience.
35	 */
36	.cfi_sections .debug_frame
37	.section .text.asm.\_name, "ax"
38	.type \_name, %function
39	/*
40	 * .cfi_startproc and .cfi_endproc are needed to output entries in
41	 * .debug_frame
42	 */
43	.cfi_startproc
44	.align \_align
45	\_name:
46	bti	jc
47	.endm
48
49	/*
50	 * This macro is used to mark the end of a function.
51	 */
52	.macro endfunc _name
53		.cfi_endproc
54		.size \_name, . - \_name
55	.endm
56
57
58	.macro	dcache_line_size  reg, tmp
59	mrs	\tmp, ctr_el0
60	ubfx	\tmp, \tmp, #16, #4
61	mov	\reg, #4
62	lsl	\reg, \reg, \tmp
63	.endm
64
65	/*
66	 * Declare the exception vector table, enforcing it is aligned on a
67	 * 2KB boundary, as required by the ARMv8 architecture.
68	 * Use zero bytes as the fill value to be stored in the padding bytes
69	 * so that it inserts illegal AArch64 instructions. This increases
70	 * security, robustness and potentially facilitates debugging.
71	 */
72	.macro vector_base  label, section_name=.vectors
73	.section \section_name, "ax"
74	.align 11, 0
75	\label:
76	.endm
77
78	/*
79	 * Create an entry in the exception vector table, enforcing it is
80	 * aligned on a 128-byte boundary, as required by the ARMv8 architecture.
81	 * Use zero bytes as the fill value to be stored in the padding bytes
82	 * so that it inserts illegal AArch64 instructions. This increases
83	 * security, robustness and potentially facilitates debugging.
84	 */
85	.macro vector_entry  label, section_name=.vectors
86	.cfi_sections .debug_frame
87	.section \section_name, "ax"
88	.align 7, 0
89	.type \label, %function
90	.cfi_startproc
91	\label:
92	.endm
93
94	/*
95	 * Add the bytes until fill the full exception vector, whose size is always
96	 * 32 instructions. If there are more than 32 instructions in the
97	 * exception vector then an error is emitted.
98	 */
99	.macro end_vector_entry label
100	.cfi_endproc
101	.fill	\label + (32 * 4) - .
102	.endm
103
104	/*
105	 * Helper macro to generate the best mov/movk combinations according
106	 * the value to be moved. The 16 bits from '_shift' are tested and
107	 * if not zero, they are moved into '_reg' without affecting
108	 * other bits.
109	 */
110	.macro _mov_imm16 _reg, _val, _shift
111		.if (\_val >> \_shift) & 0xffff
112			.if (\_val & (1 << \_shift - 1))
113				movk	\_reg, (\_val >> \_shift) & 0xffff, LSL \_shift
114			.else
115				mov	\_reg, \_val & (0xffff << \_shift)
116			.endif
117		.endif
118	.endm
119
120	/*
121	 * Helper macro to load arbitrary values into 32 or 64-bit registers
122	 * which generates the best mov/movk combinations. Many base addresses
123	 * are 64KB aligned the macro will eliminate updating bits 15:0 in
124	 * that case
125	 */
126	.macro mov_imm _reg, _val
127		.if (\_val) == 0
128			mov	\_reg, #0
129		.else
130			_mov_imm16	\_reg, (\_val), 0
131			_mov_imm16	\_reg, (\_val), 16
132			_mov_imm16	\_reg, (\_val), 32
133			_mov_imm16	\_reg, (\_val), 48
134		.endif
135	.endm
136
137	/*
138	 * Assembler panic. At the moment there is no support for crash
139	 * reporting in assembler without having a stack available, so for
140	 * the time being just enter into a busy loop and stay there.
141	 */
142	.macro asm_panic
143		b	.
144	.endm
145
146	/*
147	 * Assembler macro to enable asm_assert. Use this macro wherever
148	 * assert is required in assembly. Please note that the macro makes
149	 * use of label '300' to provide the logic and the caller
150	 * should make sure that this label is not used to branch prior
151	 * to calling this macro.
152	 */
153	.macro ASM_ASSERT _cc
154		.ifndef .L_assert_filename
155			.pushsection .rodata.str1.1, "aS"
156			.L_assert_filename:
157				.string	__FILE__
158			.popsection
159		.endif
160		b.\_cc	300f
161		adr	x0, .L_assert_filename
162		mov	x1, __LINE__
163		asm_panic
164	300:
165	.endm
166
167	.macro ventry_unused error_message
168	.balign	0x80
169	wfe
170	b	.-4
171	.endm
172
173	.macro ventry label
174		.balign	0x80
175		b	\label
176	.endm
177
178	/* VBAR_EL3[10:0] are hardwired to 0, align vector address accordingly */
179	.balign 0x800
180
181#endif /* ASM_MACROS_S */
182