1 /*
2 * Copyright (c) 2006-2022, RT-Thread Development Team
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 *
6 * Change Logs:
7 * Date Author Notes
8 * 2022-11-28 WangXiaoyao the first version
9 */
10 #ifndef __TLB_H__
11 #define __TLB_H__
12
13 #include <rtthread.h>
14 #include <stddef.h>
15 #include <stdint.h>
16 #include "mm_aspace.h"
17 #include "mmu.h"
18
19 #define TLBI_ARG(addr, asid) \
20 ({ \
21 rt_ubase_t arg = (rt_ubase_t)(addr) >> ARCH_PAGE_SHIFT; \
22 arg &= (1ull << 44) - 1; \
23 arg |= (rt_ubase_t)(asid) << MMU_ASID_SHIFT; \
24 (void *)arg; \
25 })
26
rt_hw_tlb_invalidate_all(void)27 static inline void rt_hw_tlb_invalidate_all(void)
28 {
29 __asm__ volatile(
30 // ensure updates to pte completed
31 "dsb ishst\n"
32 "tlbi vmalle1is\n"
33 "dsb ish\n"
34 // after tlb in new context, refresh inst
35 "isb\n" ::
36 : "memory");
37 }
38
rt_hw_tlb_invalidate_all_local(void)39 static inline void rt_hw_tlb_invalidate_all_local(void)
40 {
41 __asm__ volatile(
42 // ensure updates to pte completed
43 "dsb nshst\n"
44 "tlbi vmalle1is\n"
45 "dsb nsh\n"
46 // after tlb in new context, refresh inst
47 "isb\n" ::
48 : "memory");
49 }
50
rt_hw_tlb_invalidate_aspace(rt_aspace_t aspace)51 static inline void rt_hw_tlb_invalidate_aspace(rt_aspace_t aspace)
52 {
53 #ifdef ARCH_USING_ASID
54 __asm__ volatile(
55 // ensure updates to pte completed
56 "dsb nshst\n"
57 "tlbi aside1is, %0\n"
58 "dsb nsh\n"
59 // after tlb in new context, refresh inst
60 "isb\n" ::"r"(TLBI_ARG(0ul, aspace->asid))
61 : "memory");
62 #else
63 rt_hw_tlb_invalidate_all();
64 #endif
65 }
66
rt_hw_tlb_invalidate_page(rt_aspace_t aspace,void * start)67 static inline void rt_hw_tlb_invalidate_page(rt_aspace_t aspace, void *start)
68 {
69 start = TLBI_ARG(start, 0);
70 __asm__ volatile(
71 "dsb ishst\n"
72 "tlbi vaae1is, %0\n"
73 "dsb ish\n"
74 "isb\n" ::"r"(start)
75 : "memory");
76 }
77
rt_hw_tlb_invalidate_range(rt_aspace_t aspace,void * start,size_t size,size_t stride)78 static inline void rt_hw_tlb_invalidate_range(rt_aspace_t aspace, void *start,
79 size_t size, size_t stride)
80 {
81 if (size <= ARCH_PAGE_SIZE)
82 {
83 rt_hw_tlb_invalidate_page(aspace, start);
84 }
85 else
86 {
87 rt_hw_tlb_invalidate_aspace(aspace);
88 }
89 }
90
91 #endif /* __TLB_H__ */
92