1 /*
2 * Copyright (c) 2006-2022, RT-Thread Development Team
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 *
6 * Change Logs:
7 * Date Author Notes
8 * 2022-11-28 WangXiaoyao the first version
9 */
10 #ifndef __TLB_H__
11 #define __TLB_H__
12
13 #include <stddef.h>
14 #include <stdint.h>
15
16 #include <rtthread.h>
17 #include <mm_aspace.h>
18 #include "sbi.h"
19 #include "riscv_mmu.h"
20
21 #define HANDLE_FAULT(ret) \
22 if (__builtin_expect((ret) != SBI_SUCCESS, 0)) \
23 LOG_W("%s failed", __FUNCTION__);
24
rt_hw_tlb_invalidate_all(void)25 static inline void rt_hw_tlb_invalidate_all(void)
26 {
27 uintptr_t mask = -1ul;
28 HANDLE_FAULT(sbi_remote_sfence_vma(&mask, -1ul, 0, mask));
29 }
30
rt_hw_tlb_invalidate_all_local(void)31 static inline void rt_hw_tlb_invalidate_all_local(void)
32 {
33 __asm__ volatile("sfence.vma" ::: "memory");
34 }
35
rt_hw_tlb_invalidate_aspace(rt_aspace_t aspace)36 static inline void rt_hw_tlb_invalidate_aspace(rt_aspace_t aspace)
37 {
38 // TODO ASID
39 rt_hw_tlb_invalidate_all_local();
40 }
41
rt_hw_tlb_invalidate_page(rt_aspace_t aspace,void * start)42 static inline void rt_hw_tlb_invalidate_page(rt_aspace_t aspace, void *start)
43 {
44 __asm__ volatile("sfence.vma %0, zero" ::"r"(start) : "memory");
45 }
46
rt_hw_tlb_invalidate_range(rt_aspace_t aspace,void * start,size_t size,size_t stride)47 static inline void rt_hw_tlb_invalidate_range(rt_aspace_t aspace, void *start,
48 size_t size, size_t stride)
49 {
50 // huge page is taking as normal page
51 if (size <= ARCH_PAGE_SIZE)
52 {
53 rt_hw_tlb_invalidate_page(aspace, start);
54 }
55 else
56 {
57 rt_hw_tlb_invalidate_aspace(aspace);
58 }
59 }
60
61 #endif /* __TLB_H__ */
62