1 /* SPDX-License-Identifier: GPL-2.0+ */
2 /*
3  * Copyright (C) 2005 - 2013 Tensilica Inc.
4  * Copyright (C) 2014 - 2016 Cadence Design Systems Inc.
5  */
6 
7 #ifndef _XTENSA_ASMMACRO_H
8 #define _XTENSA_ASMMACRO_H
9 
10 #include <asm/arch/core.h>
11 
12 /*
13  * Function entry and return macros for supported ABIs.
14  */
15 
16 #if defined(__XTENSA_WINDOWED_ABI__)
17 #define abi_entry	entry	sp, 16
18 #define abi_ret		retw
19 #elif defined(__XTENSA_CALL0_ABI__)
20 #define abi_entry
21 #define abi_ret		ret
22 #else
23 #error Unsupported Xtensa ABI
24 #endif
25 
26 /*
27  * Some little helpers for loops. Use zero-overhead-loops
28  * where applicable and if supported by the processor.
29  *
30  * __loopi ar, at, size, inc
31  *         ar	register initialized with the start address
32  *	   at	scratch register used by macro
33  *	   size	size immediate value
34  *	   inc	increment
35  *
36  * __loops ar, as, at, inc_log2[, mask_log2][, cond][, ncond]
37  *	   ar	register initialized with the start address
38  *	   as	register initialized with the size
39  *	   at	scratch register use by macro
40  *	   inc_log2	increment [in log2]
41  *	   mask_log2	mask [in log2]
42  *	   cond		true condition (used in loop'cond')
43  *	   ncond	false condition (used in b'ncond')
44  *
45  * __loop  as
46  *	   restart loop. 'as' register must not have been modified!
47  *
48  * __endla ar, as, incr
49  *	   ar	start address (modified)
50  *	   as	scratch register used by __loops/__loopi macros or
51  *		end address used by __loopt macro
52  *	   inc	increment
53  */
54 
55 #if XCHAL_HAVE_LOOPS
56 
57 .macro	__loopi ar, at, size, incr
58 	movi	\at, ((\size + \incr - 1) / (\incr))
59 	loop	\at, 99f
60 .endm
61 
62 .macro	__loops	ar, as, at, incr_log2, mask_log2, cond, ncond
63 	.ifgt \incr_log2 - 1
64 		addi	\at, \as, (1 << \incr_log2) - 1
65 		.ifnc \mask_log2,
66 			extui	\at, \at, \incr_log2, \mask_log2
67 		.else
68 			srli	\at, \at, \incr_log2
69 		.endif
70 	.endif
71 	loop\cond	\at, 99f
72 .endm
73 
74 .macro	__loopt	ar, as, at, incr_log2
75 	sub	\at, \as, \ar
76 	.ifgt	\incr_log2 - 1
77 		addi	\at, \at, (1 << \incr_log2) - 1
78 		srli	\at, \at, \incr_log2
79 	.endif
80 	loop	\at, 99f
81 .endm
82 
83 .macro	__loop	as
84 	loop	\as, 99f
85 .endm
86 
87 .macro	__endl	ar, as
88 99:
89 .endm
90 
91 #else
92 
93 .macro	__loopi ar, at, size, incr
94 	movi	\at, ((\size + \incr - 1) / (\incr))
95 	addi	\at, \ar, \size
96 98:
97 .endm
98 
99 .macro	__loops	ar, as, at, incr_log2, mask_log2, cond, ncond
100 	.ifnc \mask_log2,
101 		extui	\at, \as, \incr_log2, \mask_log2
102 	.else
103 		.ifnc \ncond,
104 			srli	\at, \as, \incr_log2
105 		.endif
106 	.endif
107 	.ifnc \ncond,
108 		b\ncond	\at, 99f
109 
110 	.endif
111 	.ifnc \mask_log2,
112 		slli	\at, \at, \incr_log2
113 		add	\at, \ar, \at
114 	.else
115 		add	\at, \ar, \as
116 	.endif
117 98:
118 .endm
119 
120 .macro	__loopt	ar, as, at, incr_log2
121 98:
122 .endm
123 
124 .macro	__loop	as
125 98:
126 .endm
127 
128 .macro	__endl	ar, as
129 	bltu	\ar, \as, 98b
130 99:
131 .endm
132 
133 #endif
134 
135 .macro	__endla	ar, as, incr
136 	addi	\ar, \ar, \incr
137 	__endl	\ar \as
138 .endm
139 
140 #endif /* _XTENSA_ASMMACRO_H */
141