1/*
2 * Copyright (c) 2010-2012, Freescale Semiconductor, Inc.
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without modification,
6 * are permitted provided that the following conditions are met:
7 *
8 * o Redistributions of source code must retain the above copyright notice, this list
9 *   of conditions and the following disclaimer.
10 *
11 * o Redistributions in binary form must reproduce the above copyright notice, this
12 *   list of conditions and the following disclaimer in the documentation and/or
13 *   other materials provided with the distribution.
14 *
15 * o Neither the name of Freescale Semiconductor, Inc. nor the names of its
16 *   contributors may be used to endorse or promote products derived from this
17 *   software without specific prior written permission.
18 *
19 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
20 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
23 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
26 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
28 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 */
30
31/*!
32 * @file cortexA9.s
33 * @brief This file contains cortexA9 functions
34 *
35 */
36
37    .code 32
38    .section ".text","ax"
39
40/*
41 * bool arm_set_interrupt_state(bool enable)
42 */
43    .global arm_set_interrupt_state
44    .func arm_set_interrupt_state
45arm_set_interrupt_state:
46    mrs             r2,CPSR            @ read CPSR (Current Program Status Register)
47    teq     r0,#0
48    bicne   r1,r2,#0xc0        @ disable IRQ and FIQ
49    orreq   r1,r2,#0xc0        @ enable IRQ and FIQ
50    msr     CPSR_c,r1
51    tst     r2,#0x80
52    movne   r0,#0
53    moveq   r0,#1
54    bx      lr
55    .endfunc
56
57  .global cpu_get_current
58  @ int cpu_get_current(void)@
59  @ get current CPU ID
60  .func cpu_get_current
61cpu_get_current:
62    mrc   p15, 0, r0, c0, c0, 5
63    and   r0, r0, #3
64	BX	  lr
65  .endfunc    @cpu_get_current()@
66
67  .global enable_neon_fpu
68  .func enable_neon_fpu
69enable_neon_fpu:
70	/* set NSACR, both Secure and Non-secure access are allowed to NEON */
71	MRC p15, 0, r0, c1, c1, 2
72	ORR r0, r0, #(0x3<<10) @ enable fpu/neon
73	MCR p15, 0, r0, c1, c1, 2
74	/* Set the CPACR for access to CP10 and CP11*/
75	LDR r0, =0xF00000
76	MCR p15, 0, r0, c1, c0, 2
77	/* Set the FPEXC EN bit to enable the FPU */
78	MOV r3, #0x40000000
79	@VMSR FPEXC, r3
80	MCR p10, 7, r3, c8, c0, 0
81  .endfunc
82
83  .global disable_strict_align_check
84  .func disable_strict_align_check
85disable_strict_align_check:
86  	/*Ray's note: disable strict alignment fault checking.
87 	without disabling this, data abort will happen when accessing
88	the BPB structure of file system since it is packed.*/
89
90  	push	{r0, lr}
91
92	mrc p15, 0, r0, c1, c0, 0
93	bic r0, r0, #(0x1<<1) @clear A bit of SCTLR
94	mcr p15, 0, r0, c1, c0, 0
95
96    pop {r0, pc}
97  .endfunc
98
99  .global disable_L1_cache
100  .func disable_L1_cache
101disable_L1_cache:
102    push	{r0-r6, lr}
103
104	mrc p15, 0, r0, c1, c0, 0
105	bic r0, r0, #(0x1<<12)
106	bic r0, r0, #(0x1<<11)
107	bic r0, r0, #(0x1<<2)
108	bic r0, r0, #(0x1<<0)
109	mcr p15, 0, r0, c1, c0, 0
110
111    pop {r0-r6, pc}
112
113  .endfunc
114
115  .global get_arm_private_peripheral_base
116  @ uint32_t get_arm_private_peripheral_base(void)@
117  .func get_arm_private_peripheral_base
118get_arm_private_peripheral_base:
119
120  @ Get base address of private perpherial space
121  mrc     p15, 4, r0, c15, c0, 0  @ Read periph base address
122  bx      lr
123
124  .endfunc    @get_arm_private_peripheral_base()@
125
126@ ------------------------------------------------------------
127@ TLB
128@ ------------------------------------------------------------
129
130  .global arm_unified_tlb_invalidate
131  @ void arm_unified_tlb_invalidate(void)@
132  .func arm_unified_tlb_invalidate
133arm_unified_tlb_invalidate:
134  mov     r0, #1
135  mcr     p15, 0, r0, c8, c7, 0                 @ TLBIALL - Invalidate entire unified TLB
136  dsb
137  bx      lr
138  .endfunc
139
140  .global arm_unified_tlb_invalidate_is
141  @ void arm_unified_tlb_invalidate_is(void)@
142  .func arm_unified_tlb_invalidate_is
143arm_unified_tlb_invalidate_is:
144  mov     r0, #1
145  mcr     p15, 0, r0, c8, c3, 0                 @ TLBIALLIS - Invalidate entire unified TLB Inner Shareable
146  dsb
147  bx      lr
148  .endfunc
149
150@ ------------------------------------------------------------
151@ Branch Prediction
152@ ------------------------------------------------------------
153
154  .global arm_branch_prediction_enable
155  @ void arm_branch_prediction_enable(void)
156  .func arm_branch_prediction_enable
157arm_branch_prediction_enable:
158  mrc     p15, 0, r0, c1, c0, 0                 @ Read SCTLR
159  orr     r0, r0, #(1 << 11)                    @ Set the Z bit (bit 11)
160  mcr     p15, 0,r0, c1, c0, 0                  @ Write SCTLR
161  bx      lr
162  .endfunc
163
164  .global arm_branch_prediction_disable
165  @ void arm_branch_prediction_disable(void)
166  .func arm_branch_prediction_disable
167arm_branch_prediction_disable:
168  mrc     p15, 0, r0, c1, c0, 0                 @ Read SCTLR
169  bic     r0, r0, #(1 << 11)                    @ Clear the Z bit (bit 11)
170  mcr     p15, 0,r0, c1, c0, 0                  @ Write SCTLR
171  bx      lr
172  .endfunc
173
174  .global arm_branch_target_cache_invalidate
175  @ void arm_branch_target_cache_invalidate(void)
176  .func arm_branch_target_cache_invalidate
177arm_branch_target_cache_invalidate:
178  mov     r0, #0
179  mcr     p15, 0, r0, c7, c5, 6                 @ BPIALL - Invalidate entire branch predictor array
180  bx      lr
181  .endfunc
182
183  .global arm_branch_target_cache_invalidate_is
184  @ void arm_branch_target_cache_invalidate_is(void)
185  .func arm_branch_target_cache_invalidate_is
186arm_branch_target_cache_invalidate_is:
187  mov     r0, #0
188  mcr     p15, 0, r0, c7, c1, 6                 @ BPIALLIS - Invalidate entire branch predictor array Inner Shareable
189  bx      lr
190  .endfunc
191
192@ ------------------------------------------------------------
193@ SCU
194@ ------------------------------------------------------------
195
196  @ SCU offset from base of private peripheral space --> 0x000
197
198  .global scu_enable
199  @ void scu_enable(void)
200  @ Enables the SCU
201  .func scu_enable
202scu_enable:
203
204  mrc     p15, 4, r0, c15, c0, 0  @ Read periph base address
205
206  ldr     r1, [r0, #0x0]          @ Read the SCU Control Register
207  orr     r1, r1, #0x1            @ Set bit 0 (The Enable bit)
208  str     r1, [r0, #0x0]          @ Write back modifed value
209
210  bx      lr
211  .endfunc
212
213@ ------------------------------------------------------------
214
215  .global  scu_join_smp
216  @ void scu_join_smp(void)
217  @ Set this CPU as participating in SMP
218  .func scu_join_smp
219scu_join_smp:
220
221  @ SMP status is controlled by bit 6 of the CP15 Aux Ctrl Reg
222
223  mrc     p15, 0, r0, c1, c0, 1   @ Read ACTLR
224  orr     r0, r0, #0x040          @ Set bit 6
225  mcr     p15, 0, r0, c1, c0, 1   @ Write ACTLR
226
227  bx      lr
228  .endfunc
229
230@ ------------------------------------------------------------
231
232  .global scu_leave_smp
233  @ void scu_leave_smp(void)
234  @ Set this CPU as NOT participating in SMP
235  .func scu_leave_smp
236scu_leave_smp:
237
238  @ SMP status is controlled by bit 6 of the CP15 Aux Ctrl Reg
239
240  mrc     p15, 0, r0, c1, c0, 1   @ Read ACTLR
241  bic     r0, r0, #0x040          @ Clear bit 6
242  mcr     p15, 0, r0, c1, c0, 1   @ Write ACTLR
243
244  bx      lr
245  .endfunc
246
247@ ------------------------------------------------------------
248
249  .global scu_get_cpus_in_smp
250  @ unsigned int scu_get_cpus_in_smp(void)
251  @ The return value is 1 bit per core:
252  @ bit 0 - CPU 0
253  @ bit 1 - CPU 1
254  @ etc...
255  .func scu_get_cpus_in_smp
256scu_get_cpus_in_smp:
257
258  mrc     p15, 4, r0, c15, c0, 0  @ Read periph base address
259
260  ldr     r0, [r0, #0x004]        @ Read SCU Configuration register
261  mov     r0, r0, lsr #4          @ Bits 7:4 gives the cores in SMP mode, shift then mask
262  and     r0, r0, #0x0F
263
264  bx      lr
265  .endfunc
266
267@ ------------------------------------------------------------
268
269  .global scu_enable_maintenance_broadcast
270  @ void scu_enable_maintenance_broadcast(void)
271  @ Enable the broadcasting of cache & TLB maintenance operations
272  @ When enabled AND in SMP, broadcast all "inner sharable"
273  @ cache and TLM maintenance operations to other SMP cores
274  .func scu_enable_maintenance_broadcast
275scu_enable_maintenance_broadcast:
276  mrc     p15, 0, r0, c1, c0, 1   @ Read Aux Ctrl register
277  orr     r0, r0, #0x01           @ Set the FW bit (bit 0)
278  mcr     p15, 0, r0, c1, c0, 1   @ Write Aux Ctrl register
279
280  bx      lr
281  .endfunc
282
283@ ------------------------------------------------------------
284
285  .global scu_disable_maintenance_broadcast
286  @ void scu_disable_maintenance_broadcast(void)
287  @ Disable the broadcasting of cache & TLB maintenance operations
288  .func scu_disable_maintenance_broadcast
289scu_disable_maintenance_broadcast:
290  mrc     p15, 0, r0, c1, c0, 1   @ Read Aux Ctrl register
291  bic     r0, r0, #0x01           @ Clear the FW bit (bit 0)
292  mcr     p15, 0, r0, c1, c0, 1   @ Write Aux Ctrl register
293
294  bx      lr
295  .endfunc
296
297@ ------------------------------------------------------------
298
299  .global scu_secure_invalidate
300  @ void scu_secure_invalidate(unsigned int cpu, unsigned int ways)
301  @ cpu: 0x0=CPU 0 0x1=CPU 1 etc...
302  @ This function invalidates the SCU copy of the tag rams
303  @ for the specified core.  typically only done at start-up.
304  @ Possible flow:
305  @ - Invalidate L1 caches
306  @ - Invalidate SCU copy of TAG RAMs
307  @ - Join SMP
308  .func scu_secure_invalidate
309scu_secure_invalidate:
310  and     r0, r0, #0x03           @ Mask off unused bits of CPU ID
311  mov     r0, r0, lsl #2          @ Convert into bit offset (four bits per core)
312
313  and     r1, r1, #0x0F           @ Mask off unused bits of ways
314  mov     r1, r1, lsl r0          @ Shift ways into the correct CPU field
315
316  mrc     p15, 4, r2, c15, c0, 0  @ Read periph base address
317
318  str     r1, [r2, #0x0C]         @ Write to SCU Invalidate All in Secure State
319
320  bx      lr
321
322  .endfunc
323
324@ ------------------------------------------------------------
325@ End of cortexA9.s
326@ ------------------------------------------------------------
327    .end
328