1/* SPDX-License-Identifier: BSD-2-Clause */
2/*
3 * Copyright (c) 2021, EPAM Systems
4 */
5
6#include <asm.S>
7#include <arm.h>
8#include <arm64_macros.S>
9#include <generated/asm-defines.h>
10#include <kernel/cache_helpers.h>
11
12/* uint32_t __plat_romapi_wrapper(paddr_t func, uint64_t arg1, uint64_t arg2,
13 *				  uint64_t arg3)
14 * Call MaskROM function func(arg1, arg2, arg3).
15 * We need to disable MMU before calling any MaskROM API functions
16 */
17FUNC __plat_romapi_wrapper , : , .identity_map
18
19	push	fp,  lr
20	push	x19, x20
21	push	x21, x22
22	push	x23, x24
23
24	mov	x19, x0
25	mov	x20, x1
26	mov	x21, x2
27	mov	x22, x3
28
29	/* Get PA of stack pointer */
30	mov	x0, sp
31#ifdef CFG_CORE_ASLR
32	/*
33	 * We are running at identity location, so we can't use bl there,
34	 * because assembler will generate relative address to virt_to_phys(),
35	 * which is not identity mapped.
36	 */
37	adr_l   x9, virt_to_phys
38	ldr     x10, boot_mmu_config + CORE_MMU_CONFIG_LOAD_OFFSET
39	add     x9, x9, x10
40	blr	x9
41#else
42	bl	virt_to_phys
43#endif
44	mov	x23, x0
45
46	/* We about to disable MMU. Make sure that all writes reached memory */
47	mov     x0, #DCACHE_OP_CLEAN
48#ifdef CFG_CORE_ASLR
49	/* See the comment above */
50	adr_l   x9, dcache_op_all
51	ldr     x10, boot_mmu_config + CORE_MMU_CONFIG_LOAD_OFFSET
52	add     x9, x9, x10
53	blr	x9
54#else
55	bl	dcache_op_all
56#endif
57
58	/* Disable MMU */
59	mrs	x9, sctlr_el1
60	bic	x9, x9, #SCTLR_M
61	bic	x9, x9, #SCTLR_C
62	msr	sctlr_el1, x9
63	isb
64	/* Invalidate instruction cache and branch predictor */
65	ic	ialluis
66	dsb	ish	/* ensure that maintenance operations are seen */
67	isb
68
69	/* Save old SP to x24 and switch to a new stack */
70	mov	x24, sp
71	mov	sp, x23
72
73	/* call the function */
74	mov	x0, x20 	/* x20: uint64_t arg1 */
75	mov	x1, x21		/* x21: uint64_t arg2 */
76	mov	x2, x22		/* x22: uint64_t arg3 */
77	blr	x19		/* x19: paddr_t func */
78
79	/* restore sp */
80	mov	sp, x24
81
82	/* Enable MMU */
83	mrs	x9, sctlr_el1
84	orr	x9, x9, #SCTLR_M
85	orr	x9, x9, #SCTLR_C
86	msr	sctlr_el1, x9
87	isb
88
89	/* Invalidate instruction cache and branch predictor */
90	ic	iallu
91	isb
92
93	pop	x23, x24
94	pop	x21, x22
95	pop	x19, x20
96	pop	fp, lr
97	ret
98END_FUNC __plat_romapi_wrapper
99