1/* SPDX-License-Identifier: BSD-2-Clause */
2/*
3 * Copyright (c) 2014, STMicroelectronics International N.V.
4 */
5
6#include <asm.S>
7#include <kernel/tz_proc_def.h>
8#include <kernel/tz_ssvce_def.h>
9#include <platform_config.h>
10
11#define PL310_LOCKDOWN_NBREGS	8
12#define PL310_LOCKDOWN_SZREG	4
13
14#define PL310_8WAYS_MASK	0x00FF
15#define PL310_16WAYS_UPPERMASK	0xFF00
16
17/*
18 * void arm_cl2_lockallways(vaddr_t base)
19 *
20 * lock all L2 caches ways for data and instruction
21 */
22FUNC arm_cl2_lockallways , :
23	add	r1, r0, #PL310_DCACHE_LOCKDOWN_BASE
24	ldr	r2, [r0, #PL310_AUX_CTRL]
25	tst	r2, #PL310_AUX_16WAY_BIT
26	mov	r2, #PL310_8WAYS_MASK
27	orrne	r2, #PL310_16WAYS_UPPERMASK
28	mov	r0, #PL310_LOCKDOWN_NBREGS
291:	/* lock Dcache and Icache */
30	str	r2, [r1], #PL310_LOCKDOWN_SZREG
31	str	r2, [r1], #PL310_LOCKDOWN_SZREG
32	subs	r0, r0, #1
33	bne	1b
34
35	mov	pc, lr
36END_FUNC arm_cl2_lockallways
37
38/*
39 * Set sync operation mask according to ways associativity.
40 * Preserve r0 = pl310 iomem base address
41 */
42.macro syncbyway_set_mask reg
43	ldr	\reg, [r0, #PL310_AUX_CTRL]
44	tst	\reg, #PL310_AUX_16WAY_BIT
45	mov	\reg, #PL310_8WAYS_MASK
46	orrne	\reg, \reg, #PL310_16WAYS_UPPERMASK
47.endm
48
49/*
50 * void arm_cl2_cleaninvbyway(vaddr_t base)
51 * clean & invalidate the whole L2 cache.
52 */
53FUNC arm_cl2_cleaninvbyway , :
54
55	syncbyway_set_mask r1
56	str	r1, [r0, #PL310_FLUSH_BY_WAY]
57
58	/* Wait for all cache ways to be cleaned and invalidated */
59loop_cli_way_done:
60	ldr	r2, [r0, #PL310_FLUSH_BY_WAY]
61	and	r2, r2, r1
62	cmp	r2, #0
63	bne	loop_cli_way_done
64
65	/* Cache Sync */
66
67	/*
68	 * Wait for writing cache sync
69	 * To PL310, Cache sync is atomic opertion, no need to check
70	 * the status. For PL220, this check is needed. Keeping the loop
71	 * for PL310 is no harm for PL310.
72	 */
73loop_cli_sync:
74	ldr	r1, [r0, #PL310_SYNC]
75	cmp	r1, #0
76	bne	loop_cli_sync
77
78	mov	r1, #0
79	str	r1, [r0, #PL310_SYNC]
80
81loop_cli_sync_done:
82	ldr	r1, [r0, #PL310_SYNC]
83	cmp	r1, #0
84	bne	loop_cli_sync_done
85
86	mov	pc, lr
87END_FUNC arm_cl2_cleaninvbyway
88
89/* void arm_cl2_invbyway(vaddr_t base) */
90FUNC arm_cl2_invbyway , :
91
92	syncbyway_set_mask r1
93	str	r1, [r0, #PL310_INV_BY_WAY]
94
95loop_inv_way_done:
96	ldr	r2, [r0, #PL310_INV_BY_WAY]
97	and	r2, r2, r1
98	cmp	r2, #0
99	bne	loop_inv_way_done
100
101loop_inv_way_sync:
102	ldr	r1, [r0, #PL310_SYNC]
103	cmp	r1, #0
104	bne	loop_inv_way_sync
105
106	mov	r1, #0
107	str	r1, [r0, #PL310_SYNC]
108
109loop_inv_way_sync_done:
110	ldr	r1, [r0, #PL310_SYNC]
111	cmp	r1, #0
112	bne	loop_inv_way_sync_done
113
114	mov	pc, lr
115END_FUNC arm_cl2_invbyway
116
117/* void arm_cl2_cleanbyway(vaddr_t base) */
118FUNC arm_cl2_cleanbyway , :
119
120	syncbyway_set_mask r1
121	str	r1, [r0, #PL310_CLEAN_BY_WAY]
122
123loop_cl_way_done:
124	ldr	r2, [r0, #PL310_CLEAN_BY_WAY]
125	and	r2, r2, r1
126	cmp	r2, #0
127	bne	loop_cl_way_done
128
129loop_cl_way_sync:
130	ldr	r1, [r0, #PL310_SYNC]
131	cmp	r1, #0
132	bne	loop_cl_way_sync
133
134	mov	r1, #0
135	str	r1, [r0, #PL310_SYNC]
136
137loop_cl_way_sync_done:
138	ldr	r1, [r0, #PL310_SYNC]
139	cmp	r1, #0
140	bne	loop_cl_way_sync_done
141
142	mov	pc, lr
143END_FUNC arm_cl2_cleanbyway
144
145/*
146 * void _arm_cl2_xxxbypa(vaddr_t pl310_base, paddr_t start, paddr_t end,
147 *			 int pl310value);
148 * pl310value is one of PL310_CLEAN_BY_PA, PL310_INV_BY_PA or PL310_FLUSH_BY_PA
149 */
150LOCAL_FUNC _arm_cl2_xxxbypa , :
151	/* Align start address on PL310 line size */
152	and	r1, #(~(PL310_LINE_SIZE - 1))
153#ifdef SCU_BASE
154	/*
155	 * ARM ERRATA #764369
156	 * Undocummented SCU Diagnostic Control Register
157	 */
158	/*
159	 * NOTE:
160	 * We're assuming that if mmu is enabled PL310_BASE and SCU_BASE
161	 * still have the same relative offsets from each other.
162	 */
163	sub	r0, r0, #(PL310_BASE - SCU_BASE)
164	mov	r12, #1
165	str	r12, [r0, #SCU_ERRATA744369]
166	dsb
167	add	r0, r0, #(PL310_BASE - SCU_BASE)
168#endif
169loop_cl2_xxxbypa:
170	str	r1, [r0, r3]
171
172loop_xxx_pa_done:
173	ldr	r12, [r0, r3]
174	and	r12, r12, r1
175	cmp	r12, #0
176	bne	loop_xxx_pa_done
177
178	add	r1, r1, #PL310_LINE_SIZE
179	cmp	r2, r1
180	bpl	loop_cl2_xxxbypa
181
182loop_xxx_pa_sync:
183	ldr	r12, [r0, #PL310_SYNC]
184	cmp	r12, #0
185	bne	loop_xxx_pa_sync
186
187	mov	r12, #0
188	str	r12, [r0, #PL310_SYNC]
189
190loop_xxx_pa_sync_done:
191	ldr	r12, [r0, #PL310_SYNC]
192	cmp	r12, #0
193	bne	loop_xxx_pa_sync_done
194
195	mov	pc, lr
196END_FUNC _arm_cl2_xxxbypa
197
198/*
199 * void _arm_cl2_cleanbypa(vaddr_t pl310_base, paddr_t start, paddr_t end);
200 * clean L2 cache by physical address range.
201 */
202FUNC arm_cl2_cleanbypa , :
203	mov	r3, #PL310_CLEAN_BY_PA
204	b	_arm_cl2_xxxbypa
205END_FUNC arm_cl2_cleanbypa
206
207/*
208 * void arm_cl2_invbypa(vaddr_t pl310_base, paddr_t start, paddr_t end);
209 * invalidate L2 cache by physical address range.
210 */
211FUNC arm_cl2_invbypa , :
212	mov	r3, #PL310_INV_BY_PA
213	b	_arm_cl2_xxxbypa
214END_FUNC arm_cl2_invbypa
215
216/*
217 * void arm_cl2_cleaninvbypa(vaddr_t pl310_base, paddr_t start, paddr_t end);
218 * clean and invalidate L2 cache by physical address range.
219 */
220FUNC arm_cl2_cleaninvbypa , :
221	mov	r3, #PL310_FLUSH_BY_PA
222	b	_arm_cl2_xxxbypa
223END_FUNC arm_cl2_cleaninvbypa
224
225