1 /*
2  * Copyright (c) 2006-2021, RT-Thread Development Team
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  *
6  * Change Logs:
7  * Date           Author       Notes
8  * 2019-03-29     quanzhao     the first version
9  */
10 #include <rthw.h>
11 #include <rtdef.h>
12 
rt_cpu_icache_line_size(void)13 rt_inline rt_uint32_t rt_cpu_icache_line_size(void)
14 {
15     rt_uint32_t ctr;
16     asm volatile ("mrc p15, 0, %0, c0, c0, 1" : "=r"(ctr));
17     return 4 << (ctr & 0xF);
18 }
19 
rt_cpu_dcache_line_size(void)20 rt_inline rt_uint32_t rt_cpu_dcache_line_size(void)
21 {
22     rt_uint32_t ctr;
23     asm volatile ("mrc p15, 0, %0, c0, c0, 1" : "=r"(ctr));
24     return 4 << ((ctr >> 16) & 0xF);
25 }
26 
rt_hw_cpu_icache_invalidate(void * addr,int size)27 void rt_hw_cpu_icache_invalidate(void *addr, int size)
28 {
29     rt_uint32_t line_size = rt_cpu_icache_line_size();
30     rt_uint32_t start_addr = (rt_uint32_t)addr;
31     rt_uint32_t end_addr = (rt_uint32_t) addr + size + line_size - 1;
32 
33     asm volatile ("dmb":::"memory");
34     start_addr &= ~(line_size - 1);
35     end_addr &= ~(line_size - 1);
36     while (start_addr < end_addr)
37     {
38         asm volatile ("mcr p15, 0, %0, c7, c5, 1" :: "r"(start_addr));  /* icimvau */
39         start_addr += line_size;
40     }
41     asm volatile ("dsb\n\tisb":::"memory");
42 }
43 
rt_hw_cpu_dcache_invalidate(void * addr,int size)44 void rt_hw_cpu_dcache_invalidate(void *addr, int size)
45 {
46     rt_uint32_t line_size = rt_cpu_dcache_line_size();
47     rt_uint32_t start_addr = (rt_uint32_t)addr;
48     rt_uint32_t end_addr = (rt_uint32_t) addr + size + line_size - 1;
49 
50     asm volatile ("dmb":::"memory");
51     start_addr &= ~(line_size - 1);
52     end_addr &= ~(line_size - 1);
53     while (start_addr < end_addr)
54     {
55         asm volatile ("mcr p15, 0, %0, c7, c6, 1" :: "r"(start_addr));  /* dcimvac */
56         start_addr += line_size;
57     }
58     asm volatile ("dsb":::"memory");
59 }
60 
rt_hw_cpu_dcache_inv_range(void * addr,int size)61 void rt_hw_cpu_dcache_inv_range(void *addr, int size)
62 {
63     rt_uint32_t line_size = rt_cpu_dcache_line_size();
64     rt_uint32_t start_addr = (rt_uint32_t)addr;
65     rt_uint32_t end_addr = (rt_uint32_t)addr + size;
66 
67     asm volatile ("dmb":::"memory");
68 
69     if ((start_addr & (line_size - 1)) != 0)
70     {
71         start_addr &= ~(line_size - 1);
72         asm volatile ("mcr p15, 0, %0, c7, c14, 1" :: "r"(start_addr));
73         start_addr += line_size;
74         asm volatile ("dsb":::"memory");
75     }
76 
77     if ((end_addr & (line_size - 1)) != 0)
78     {
79         end_addr &= ~(line_size - 1);
80         asm volatile ("mcr p15, 0, %0, c7, c14, 1" :: "r"(end_addr));
81         asm volatile ("dsb":::"memory");
82     }
83 
84     while (start_addr < end_addr)
85     {
86         asm volatile ("mcr p15, 0, %0, c7, c6, 1" :: "r"(start_addr));  /* dcimvac */
87         start_addr += line_size;
88     }
89     asm volatile ("dsb":::"memory");
90 }
91 
rt_hw_cpu_dcache_clean(void * addr,int size)92 void rt_hw_cpu_dcache_clean(void *addr, int size)
93 {
94     rt_uint32_t line_size = rt_cpu_dcache_line_size();
95     rt_uint32_t start_addr = (rt_uint32_t)addr;
96     rt_uint32_t end_addr = (rt_uint32_t) addr + size + line_size - 1;
97 
98     asm volatile ("dmb":::"memory");
99     start_addr &= ~(line_size - 1);
100     end_addr &= ~(line_size - 1);
101     while (start_addr < end_addr)
102     {
103         asm volatile ("mcr p15, 0, %0, c7, c10, 1" :: "r"(start_addr)); /* dccmvac */
104         start_addr += line_size;
105     }
106     asm volatile ("dsb":::"memory");
107 }
108 
rt_hw_cpu_dcache_clean_and_invalidate(void * addr,int size)109 void rt_hw_cpu_dcache_clean_and_invalidate(void *addr, int size)
110 {
111     rt_uint32_t line_size = rt_cpu_dcache_line_size();
112     rt_uint32_t start_addr = (rt_uint32_t)addr;
113     rt_uint32_t end_addr = (rt_uint32_t) addr + size + line_size - 1;
114 
115     asm volatile ("dmb":::"memory");
116     start_addr &= ~(line_size - 1);
117     end_addr &= ~(line_size - 1);
118     while (start_addr < end_addr)
119     {
120         asm volatile ("mcr p15, 0, %0, c7, c10, 1" :: "r"(start_addr)); /* dccmvac */
121         asm volatile ("mcr p15, 0, %0, c7, c6, 1" :: "r"(start_addr));  /* dcimvac */
122         start_addr += line_size;
123     }
124     asm volatile ("dsb":::"memory");
125 }
126 
rt_hw_cpu_icache_ops(int ops,void * addr,int size)127 void rt_hw_cpu_icache_ops(int ops, void *addr, int size)
128 {
129     if (ops == RT_HW_CACHE_INVALIDATE)
130     {
131         rt_hw_cpu_icache_invalidate(addr, size);
132     }
133 }
134 
rt_hw_cpu_dcache_ops(int ops,void * addr,int size)135 void rt_hw_cpu_dcache_ops(int ops, void *addr, int size)
136 {
137     if (ops == RT_HW_CACHE_FLUSH)
138     {
139         rt_hw_cpu_dcache_clean(addr, size);
140     }
141     else if (ops == RT_HW_CACHE_INVALIDATE)
142     {
143         rt_hw_cpu_dcache_invalidate(addr, size);
144     }
145 }
146 
rt_hw_cpu_icache_status(void)147 rt_base_t rt_hw_cpu_icache_status(void)
148 {
149     return 0;
150 }
151 
rt_hw_cpu_dcache_status(void)152 rt_base_t rt_hw_cpu_dcache_status(void)
153 {
154     return 0;
155 }
156