1/*
2 * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <asm_macros.S>
8#include <assert_macros.S>
9#include <lib/xlat_tables/xlat_tables_v2.h>
10
11	.global	enable_mmu_direct_svc_mon
12	.global	enable_mmu_direct_hyp
13
14	/* void enable_mmu_direct_svc_mon(unsigned int flags) */
15func enable_mmu_direct_svc_mon
16	/* Assert that MMU is turned off */
17#if ENABLE_ASSERTIONS
18	ldcopr  r1, SCTLR
19	tst	r1, #SCTLR_M_BIT
20	ASM_ASSERT(eq)
21#endif
22
23	/* Invalidate TLB entries */
24	TLB_INVALIDATE(r0, TLBIALL)
25
26	mov	r3, r0
27	ldr	r0, =mmu_cfg_params
28
29	/* MAIR0. Only the lower 32 bits are used. */
30	ldr	r1, [r0, #(MMU_CFG_MAIR << 3)]
31	stcopr	r1, MAIR0
32
33	/* TTBCR. Only the lower 32 bits are used. */
34	ldr	r2, [r0, #(MMU_CFG_TCR << 3)]
35	stcopr	r2, TTBCR
36
37	/* TTBR0 */
38	ldr	r1, [r0, #(MMU_CFG_TTBR0 << 3)]
39	ldr	r2, [r0, #((MMU_CFG_TTBR0 << 3) + 4)]
40	stcopr16	r1, r2, TTBR0_64
41
42	/* TTBR1 is unused right now; set it to 0. */
43	mov	r1, #0
44	mov	r2, #0
45	stcopr16	r1, r2, TTBR1_64
46
47	/*
48	 * Ensure all translation table writes have drained into memory, the TLB
49	 * invalidation is complete, and translation register writes are
50	 * committed before enabling the MMU
51	 */
52	dsb	ish
53	isb
54
55	/* Enable enable MMU by honoring flags */
56	ldcopr  r1, SCTLR
57	ldr	r2, =(SCTLR_WXN_BIT | SCTLR_C_BIT | SCTLR_M_BIT)
58	orr	r1, r1, r2
59
60	/* Clear C bit if requested */
61	tst	r3, #DISABLE_DCACHE
62	bicne	r1, r1, #SCTLR_C_BIT
63
64	stcopr	r1, SCTLR
65	isb
66
67	bx	lr
68endfunc enable_mmu_direct_svc_mon
69
70
71	/* void enable_mmu_direct_hyp(unsigned int flags) */
72func enable_mmu_direct_hyp
73	/* Assert that MMU is turned off */
74#if ENABLE_ASSERTIONS
75	ldcopr  r1, HSCTLR
76	tst	r1, #HSCTLR_M_BIT
77	ASM_ASSERT(eq)
78#endif
79
80	/* Invalidate TLB entries */
81	TLB_INVALIDATE(r0, TLBIALL)
82
83	mov	r3, r0
84	ldr	r0, =mmu_cfg_params
85
86	/* HMAIR0 */
87	ldr	r1, [r0, #(MMU_CFG_MAIR << 3)]
88	stcopr	r1, HMAIR0
89
90	/* HTCR */
91	ldr	r2, [r0, #(MMU_CFG_TCR << 3)]
92	stcopr	r2, HTCR
93
94	/* HTTBR */
95	ldr	r1, [r0, #(MMU_CFG_TTBR0 << 3)]
96	ldr	r2, [r0, #((MMU_CFG_TTBR0 << 3) + 4)]
97	stcopr16	r1, r2, HTTBR_64
98
99	/*
100	 * Ensure all translation table writes have drained into memory, the TLB
101	 * invalidation is complete, and translation register writes are
102	 * committed before enabling the MMU
103	 */
104	dsb	ish
105	isb
106
107	/* Enable enable MMU by honoring flags */
108	ldcopr  r1, HSCTLR
109	ldr	r2, =(HSCTLR_WXN_BIT | HSCTLR_C_BIT | HSCTLR_M_BIT)
110	orr	r1, r1, r2
111
112	/* Clear C bit if requested */
113	tst	r3, #DISABLE_DCACHE
114	bicne	r1, r1, #HSCTLR_C_BIT
115
116	stcopr	r1, HSCTLR
117	isb
118
119	bx	lr
120endfunc enable_mmu_direct_hyp
121