1 /*
2  * Copyright 2020 GreenWaves Technologies
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *     http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  *
16  * SPDX-License-Identifier: Apache-2.0
17  */
18 
19 #ifndef HAL_INCLUDE_HAL_SOC_EU_PERIPH_H_
20 #define HAL_INCLUDE_HAL_SOC_EU_PERIPH_H_
21 
22 #include "core-v-mcu-target.h"
23 #include "pulp_io.h"
24 
25 /*!
26  * @addtogroup soc_eu
27  * @{
28  */
29 /*******************************************************************************
30  * Definitions
31  ******************************************************************************/
32 /* FC SOC domain events, all delegated by FC_SOC_EVENT_IRQn = 27 */
33 /* TODO: garbage collect this */
34 #define    UDMA_EVENT_LVDS_RX              0
35 #define    UDMA_EVENT_LVDS_TX              1
36 #define    UDMA_EVENT_SPIM0_RX             2
37 #define    UDMA_EVENT_SPIM0_TX             3
38 #define    UDMA_EVENT_SPIM1_RX             4
39 #define    UDMA_EVENT_SPIM1_TX             5
40 #define    UDMA_EVENT_HYPERBUS_RX          6
41 #define    UDMA_EVENT_HYPERBUS_TX          7
42 #define    UDMA_EVENT_UART_RX              8
43 #define    UDMA_EVENT_UART_TX              9
44 #define    UDMA_EVENT_I2C0_RX              10
45 #define    UDMA_EVENT_I2C0_TX              11
46 #define    UDMA_EVENT_I2C1_RX              12
47 #define    UDMA_EVENT_I2C1_TX              13
48 #define    UDMA_EVENT_DMACPY_RX            14
49 #define    UDMA_EVENT_DMACPY_TX            15
50 #define    UDMA_EVENT_SAI_CH0              16
51 #define    UDMA_EVENT_SAI_CH1              17
52 #define    UDMA_EVENT_CPI_RX               18
53 #define    UDMA_EVENT_RESERVED0            19
54 
55 #define    UDMA_EVENT_LVDS_GEN0            20
56 #define    UDMA_EVENT_LVDS_GEN1            21
57 #define    UDMA_EVENT_SPIM0_EOT            22
58 #define    UDMA_EVENT_SPIM1_EOT            23
59 #define    UDMA_EVENT_HYPERBUS_RESERVED    24
60 #define    UDMA_EVENT_UART_RESERVED        25
61 #define    UDMA_EVENT_I2C0_ERROR           26
62 #define    UDMA_EVENT_I2C1_ERROR           27
63 #define    UDMA_EVENT_I2S_RESERVED         28
64 #define    UDMA_EVENT_CAM_RESERVED         29
65 #define    UDMA_EVENT_RESERVED1            30
66 
67 #define    PMU_EVENT_CLUSTER_POWER_ON      31
68 #define    PMU_EVENT_CLUSTER_RESERVED0     32
69 #define    PMU_EVENT_CLUSTER_RESERVED1     33
70 #define    PMU_EVENT_CLUSTER_RESERVED2     34
71 #define    PMU_EVENT_CLUSTER_CLOCK_GATING  35
72 #define    PMU_DLC_EVENT_BRIDGE_PICL_OK    36
73 #define    PMU_DLC_EVENT_BRIDGE_SCU_OK     37
74 #define    PMU_EVENTS_NUM                  7
75 
76 #define    PWM0_EVENT                      38
77 #define    PWM1_EVENT                      39
78 #define    PWM2_EVENT                      40
79 #define    PWM3_EVENT                      41
80 #define    GPIO_EVENT                      42              /**< GPIO group interrupt */
81 #define    RTC_APB_EVENT                   43
82 #define    RTC_EVENT                       44
83 #define    EVENT_RESERVED0                 45
84 #define    EVENT_RESERVED1                 46
85 #define    EVENT_RESERVED2                 47
86 
87 #define    SOC_SW_EVENT0                   48              /**< GAP8 SOC SW Event0 */
88 #define    SOC_SW_EVENT1                   49              /**< GAP8 SOC SW Event1 */
89 #define    SOC_SW_EVENT2                   50              /**< GAP8 SOC SW Event2 */
90 #define    SOC_SW_EVENT3                   51              /**< GAP8 SOC SW Event3 */
91 #define    SOC_SW_EVENT4                   52              /**< GAP8 SOC SW Event4 */
92 #define    SOC_SW_EVENT5                   53              /**< GAP8 SOC SW Event5 */
93 #define    SOC_SW_EVENT6                   54              /**< GAP8 SOC SW Event6 */
94 #define    SOC_SW_EVENT7                   55              /**< GAP8 SOC SW Event7 */
95 #define    REF32K_CLK_RISE_EVENT           56              /**< GAP8 SOC EU SW Event Reference 32K Clock event */
96 
97 /*******************************************************************************
98  * APIs
99  ******************************************************************************/
100 
101 #if defined(__cplusplus)
102 extern "C" {
103 #endif /* __cplusplus */
104 
soc_eu_fc_write(uint32_t val,uint32_t reg)105 static inline void soc_eu_fc_write(uint32_t val, uint32_t reg)
106 {
107 	writew(val, (uintptr_t)(SOC_EU_ADDR + SOC_FC_MASK0_OFFSET + reg));
108 }
109 
soc_eu_fc_read(uint32_t reg)110 static inline uint32_t soc_eu_fc_read(uint32_t reg)
111 {
112 	return readw((uintptr_t)(SOC_EU_ADDR + SOC_FC_MASK0_OFFSET + reg));
113 }
114 
soc_eu_cl_write(uint32_t val,uint32_t reg)115 static inline void soc_eu_cl_write(uint32_t val, uint32_t reg)
116 {
117 	writew(val, (uintptr_t)(SOC_EU_ADDR + SOC_CL_MASK0_OFFSET + reg));
118 }
119 
soc_eu_cl_read(uint32_t reg)120 static inline uint32_t soc_eu_cl_read(uint32_t reg)
121 {
122 	return readw((uintptr_t)(SOC_EU_ADDR + SOC_CL_MASK0_OFFSET + reg));
123 }
124 
soc_eu_pr_write(uint32_t val,uint32_t reg)125 static inline void soc_eu_pr_write(uint32_t val, uint32_t reg)
126 {
127 	writew(val, (uintptr_t)(SOC_EU_ADDR + SOC_PR_MASK0_OFFSET + reg));
128 }
129 
soc_eu_pr_read(uint32_t reg)130 static inline uint32_t soc_eu_pr_read(uint32_t reg)
131 {
132 	return readw((uintptr_t)(SOC_EU_ADDR + SOC_PR_MASK0_OFFSET + reg));
133 }
134 
hal_soc_eu_set_fc_mask(int evt)135 static inline void hal_soc_eu_set_fc_mask(int evt) {
136 	if (evt >= 256 || evt < 0)
137 		return;
138 
139 	int shift = evt % 32;
140 	uint32_t reg_offset = evt/32 * 4;
141 	soc_eu_fc_write(soc_eu_fc_read(reg_offset) & ~(1 << shift), reg_offset);
142 }
143 
hal_soc_eu_set_pr_mask(int evt)144 static inline void hal_soc_eu_set_pr_mask(int evt) {
145 	if (evt >= 256 || evt < 0)
146 		return;
147 
148 	int shift = evt % 32;
149 	uint32_t reg_offset = evt/32 * 4;
150 	soc_eu_pr_write(soc_eu_pr_read(reg_offset) & ~(1 << shift), reg_offset);
151 }
152 
hal_soc_eu_set_cl_mask(int clusterId,int evt)153 static inline void hal_soc_eu_set_cl_mask(int clusterId, int evt) {
154 	if (evt >= 256 || evt < 0)
155 		return;
156 
157 	int shift = evt % 32;
158 	uint32_t reg_offset = evt/32 * 4;
159 	soc_eu_cl_write(soc_eu_cl_read(reg_offset) & ~(1 << shift), reg_offset);
160 }
161 
hal_soc_eu_clear_fc_mask(int evt)162 static inline void hal_soc_eu_clear_fc_mask(int evt) {
163 	if (evt >= 256 || evt < 0)
164 		return;
165 
166 	int shift = evt % 32;
167 	uint32_t reg_offset = evt/32 * 4;
168 	soc_eu_fc_write(soc_eu_fc_read(reg_offset) | (1 << shift), reg_offset);
169 }
170 
hal_soc_eu_clear_pr_mask(int evt)171 static inline void hal_soc_eu_clear_pr_mask(int evt) {
172 	if (evt >= 256 || evt < 0)
173 		return;
174 
175 	int shift = evt % 32;
176 	uint32_t reg_offset = evt/32 * 4;
177 	soc_eu_pr_write(soc_eu_pr_read(reg_offset) | (1 << shift), reg_offset);
178 }
179 
hal_soc_eu_clear_cl_mask(int clusterId,int evt)180 static inline void hal_soc_eu_clear_cl_mask(int clusterId, int evt) {
181 	if (evt >= 256 || evt < 0)
182 		return;
183 
184 	int shift = evt % 32;
185 	uint32_t reg_offset = evt/32 * 4;
186 	soc_eu_cl_write(soc_eu_cl_read(reg_offset) | (1 << shift), reg_offset);
187 }
188 
189 
hal_soc_eu_set_mask(uint32_t mask)190 static inline void hal_soc_eu_set_mask(uint32_t mask) {
191 	writew(mask, (uintptr_t)(SOC_EU_ADDR + SOC_EVENT_OFFSET));
192 }
193 
hal_soc_eu_configure(int cluster,int event,int active)194 static inline void hal_soc_eu_configure(int cluster, int event, int active) {
195 	abort();
196 	/* TODO: implement this */
197 /* #if SOC_SW_EVENT0 < 32 */
198 /*     uint32_t mask = (cluster == FC_CLUSTER_ID) ? (SOCEU->FC_MASK_LSB) : (SOCEU->CL_MASK_LSB); */
199 /*     int fullEvent = event - SOC_SW_EVENT0; */
200 
201 /*     if (!active) */
202 /*       mask = mask | (1<<fullEvent); */
203 /*     else */
204 /*       mask = mask & ~(1<<fullEvent); */
205 
206 /*     if (cluster == FC_CLUSTER_ID) */
207 /*       SOCEU->FC_MASK_LSB = mask; */
208 /*     else */
209 /*       SOCEU->CL_MASK_LSB = mask; */
210 /* #else */
211 /*     uint32_t mask = (cluster == FC_CLUSTER_ID) ? (SOCEU->FC_MASK_MSB) : (SOCEU->CL_MASK_MSB); */
212 /*     int fullEvent = event + SOC_SW_EVENT0 - 32; */
213 
214 /*     if (!active) */
215 /*       mask = mask | (1<<fullEvent); */
216 /*     else */
217 /*       mask = mask & ~(1<<fullEvent); */
218 
219 /*     if (cluster == FC_CLUSTER_ID) */
220 /*       SOCEU->FC_MASK_MSB = mask; */
221 /*     else */
222 /*       SOCEU->CL_MASK_MSB = mask; */
223 /* #endif */
224 }
225 
226 /* static inline int SOC_EU_ReserveConfig(int cluster, int event) { */
227 /*   if (_bitfield_reserve(&soc_events_mask, event)) return -1; */
228 /*   SOC_EU_Configure(cluster, event, 1); */
229 /*   return 0; */
230 /* } */
231 
232 #if defined(__cplusplus)
233 }
234 #endif /* __cplusplus */
235 
236 /* @} */
237 
238 
239 #endif /* HAL_INCLUDE_HAL_SOC_EU_PERIPH_H_ */
240