1 /*
2  * Copyright (c) 2006-2020, RT-Thread Development Team
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  *
6  * Change Logs:
7  * Date           Author       Notes
8  * 2011-09-15     Bernard      first version
9  */
10 
11 #ifndef __CP15_H__
12 #define __CP15_H__
13 
14 #ifndef   __STATIC_FORCEINLINE
15 #define __STATIC_FORCEINLINE     __attribute__((always_inline)) static inline
16 #endif
17 
18 #define __WFI()    __asm__ volatile ("wfi":::"memory")
19 
20 #define __WFE()    __asm__ volatile ("wfe":::"memory")
21 
22 #define __SEV()    __asm__ volatile ("sev")
23 
__ISB(void)24 __STATIC_FORCEINLINE  void __ISB(void)
25 {
26     __asm__ volatile ("isb 0xF":::"memory");
27 }
28 
29 /**
30   \brief   Data Synchronization Barrier
31   \details Acts as a special kind of Data Memory Barrier.
32            It completes when all explicit memory accesses before this instruction complete.
33  */
__DSB(void)34 __STATIC_FORCEINLINE  void __DSB(void)
35 {
36     __asm__ volatile ("dsb 0xF":::"memory");
37 }
38 
39 /**
40   \brief   Data Memory Barrier
41   \details Ensures the apparent order of the explicit memory operations before
42            and after the instruction, without ensuring their completion.
43  */
44 
__DMB(void)45 __STATIC_FORCEINLINE  void __DMB(void)
46 {
47     __asm__ volatile ("dmb 0xF":::"memory");
48 }
49 
50 unsigned long rt_cpu_get_smp_id(void);
51 
52 void rt_cpu_mmu_disable(void);
53 void rt_cpu_mmu_enable(void);
54 void rt_cpu_tlb_set(volatile unsigned long*);
55 
56 void rt_cpu_dcache_clean_flush(void);
57 void rt_cpu_icache_flush(void);
58 
59 void rt_cpu_vector_set_base(rt_ubase_t addr);
60 void rt_hw_mmu_init(void);
61 void rt_hw_vector_init(void);
62 
63 void set_timer_counter(unsigned int counter);
64 void set_timer_control(unsigned int control);
65 #endif
66