1 #ifndef __PMU_H__
2 #define __PMU_H__
3 
4 #include "board.h"
5 
6 /* Number of counters */
7 #define ARM_PMU_CNTER_NR 4
8 
9 enum rt_hw_pmu_event_type {
10     ARM_PMU_EVENT_PMNC_SW_INCR      = 0x00,
11     ARM_PMU_EVENT_L1_ICACHE_REFILL  = 0x01,
12     ARM_PMU_EVENT_ITLB_REFILL       = 0x02,
13     ARM_PMU_EVENT_L1_DCACHE_REFILL  = 0x03,
14     ARM_PMU_EVENT_L1_DCACHE_ACCESS  = 0x04,
15     ARM_PMU_EVENT_DTLB_REFILL       = 0x05,
16     ARM_PMU_EVENT_MEM_READ          = 0x06,
17     ARM_PMU_EVENT_MEM_WRITE         = 0x07,
18     ARM_PMU_EVENT_INSTR_EXECUTED    = 0x08,
19     ARM_PMU_EVENT_EXC_TAKEN         = 0x09,
20     ARM_PMU_EVENT_EXC_EXECUTED      = 0x0A,
21     ARM_PMU_EVENT_CID_WRITE         = 0x0B,
22 };
23 
24 /* Enable bit */
25 #define ARM_PMU_PMCR_E   (0x01 << 0)
26 /* Event counter reset */
27 #define ARM_PMU_PMCR_P   (0x01 << 1)
28 /* Cycle counter reset */
29 #define ARM_PMU_PMCR_C   (0x01 << 2)
30 /* Cycle counter divider */
31 #define ARM_PMU_PMCR_D   (0x01 << 3)
32 
33 #ifdef __GNUC__
rt_hw_pmu_enable_cnt(int divide64)34 rt_inline void rt_hw_pmu_enable_cnt(int divide64)
35 {
36     unsigned long pmcr;
37     unsigned long pmcntenset;
38 
39     asm volatile ("mrc p15, 0, %0, c9, c12, 0" : "=r"(pmcr));
40     pmcr |= ARM_PMU_PMCR_E | ARM_PMU_PMCR_P | ARM_PMU_PMCR_C;
41     if (divide64)
42         pmcr |= ARM_PMU_PMCR_D;
43     else
44         pmcr &= ~ARM_PMU_PMCR_D;
45     asm volatile ("mcr p15, 0, %0, c9, c12, 0" :: "r"(pmcr));
46 
47     /* enable all the counters */
48     pmcntenset = ~0;
49     asm volatile ("mcr p15, 0, %0, c9, c12, 1" :: "r"(pmcntenset));
50     /* clear overflows(just in case) */
51     asm volatile ("mcr p15, 0, %0, c9, c12, 3" :: "r"(pmcntenset));
52 }
53 
rt_hw_pmu_get_control(void)54 rt_inline unsigned long rt_hw_pmu_get_control(void)
55 {
56     unsigned long pmcr;
57     asm ("mrc p15, 0, %0, c9, c12, 0" : "=r"(pmcr));
58     return pmcr;
59 }
60 
rt_hw_pmu_get_ceid(void)61 rt_inline unsigned long rt_hw_pmu_get_ceid(void)
62 {
63     unsigned long reg;
64     /* only PMCEID0 is supported, PMCEID1 is RAZ. */
65     asm ("mrc p15, 0, %0, c9, c12, 6" : "=r"(reg));
66     return reg;
67 }
68 
rt_hw_pmu_get_cnten(void)69 rt_inline unsigned long rt_hw_pmu_get_cnten(void)
70 {
71     unsigned long pmcnt;
72     asm ("mrc p15, 0, %0, c9, c12, 1" : "=r"(pmcnt));
73     return pmcnt;
74 }
75 
rt_hw_pmu_reset_cycle(void)76 rt_inline void rt_hw_pmu_reset_cycle(void)
77 {
78     unsigned long pmcr;
79 
80     asm volatile ("mrc p15, 0, %0, c9, c12, 0" : "=r"(pmcr));
81     pmcr |= ARM_PMU_PMCR_C;
82     asm volatile ("mcr p15, 0, %0, c9, c12, 0" :: "r"(pmcr));
83     asm volatile ("isb");
84 }
85 
rt_hw_pmu_reset_event(void)86 rt_inline void rt_hw_pmu_reset_event(void)
87 {
88     unsigned long pmcr;
89 
90     asm volatile ("mrc p15, 0, %0, c9, c12, 0" : "=r"(pmcr));
91     pmcr |= ARM_PMU_PMCR_P;
92     asm volatile ("mcr p15, 0, %0, c9, c12, 0" :: "r"(pmcr));
93     asm volatile ("isb");
94 }
95 
rt_hw_pmu_get_cycle(void)96 rt_inline unsigned long rt_hw_pmu_get_cycle(void)
97 {
98     unsigned long cyc;
99     asm volatile ("isb");
100     asm volatile ("mrc  p15, 0, %0, c9, c13, 0" : "=r"(cyc));
101     return cyc;
102 }
103 
rt_hw_pmu_select_counter(int idx)104 rt_inline void rt_hw_pmu_select_counter(int idx)
105 {
106     RT_ASSERT(idx < ARM_PMU_CNTER_NR);
107 
108     asm volatile ("mcr p15, 0, %0, c9, c12, 5" : : "r"(idx));
109     /* Linux add an isb here, don't know why here. */
110     asm volatile ("isb");
111 }
112 
rt_hw_pmu_select_event(int idx,enum rt_hw_pmu_event_type eve)113 rt_inline void rt_hw_pmu_select_event(int idx,
114                                       enum rt_hw_pmu_event_type eve)
115 {
116     RT_ASSERT(idx < ARM_PMU_CNTER_NR);
117 
118     rt_hw_pmu_select_counter(idx);
119     asm volatile ("mcr p15, 0, %0, c9, c13, 1" : : "r"(eve));
120 }
121 
rt_hw_pmu_read_counter(int idx)122 rt_inline unsigned long rt_hw_pmu_read_counter(int idx)
123 {
124     unsigned long reg;
125 
126     rt_hw_pmu_select_counter(idx);
127     asm volatile ("isb");
128     asm volatile ("mrc p15, 0, %0, c9, c13, 2" : "=r"(reg));
129     return reg;
130 }
131 
rt_hw_pmu_get_ovsr(void)132 rt_inline unsigned long rt_hw_pmu_get_ovsr(void)
133 {
134     unsigned long reg;
135     asm volatile ("isb");
136     asm ("mrc  p15, 0, %0, c9, c12, 3" : "=r"(reg));
137     return reg;
138 }
139 
rt_hw_pmu_clear_ovsr(unsigned long reg)140 rt_inline void rt_hw_pmu_clear_ovsr(unsigned long reg)
141 {
142     asm ("mcr  p15, 0, %0, c9, c12, 3" : : "r"(reg));
143     asm volatile ("isb");
144 }
145 
146 #endif
147 
148 void rt_hw_pmu_dump_feature(void);
149 
150 #endif /* end of include guard: __PMU_H__ */
151 
152