1 /*
2 * Copyright 2019 The Hafnium Authors.
3 *
4 * Use of this source code is governed by a BSD-style
5 * license that can be found in the LICENSE file or at
6 * https://opensource.org/licenses/BSD-3-Clause.
7 */
8
9 #pragma once
10
11 #include <stddef.h>
12 #include <stdint.h>
13
14 #include "hf/arch/barriers.h"
15 #include "hf/arch/types.h"
16
17 #include "hf/assert.h"
18
19 /* Opaque types for different sized fields of memory mapped IO. */
20
21 typedef struct {
22 volatile uint8_t *ptr;
23 } io8_t;
24
25 typedef struct {
26 volatile uint16_t *ptr;
27 } io16_t;
28
29 typedef struct {
30 volatile uint32_t *ptr;
31 } io32_t;
32
33 typedef struct {
34 volatile uint64_t *ptr;
35 } io64_t;
36
37 typedef struct {
38 volatile uint8_t *base;
39 size_t count;
40 } io8_array_t;
41
42 typedef struct {
43 volatile uint16_t *base;
44 size_t count;
45 } io16_array_t;
46
47 typedef struct {
48 volatile uint32_t *base;
49 size_t count;
50 } io32_array_t;
51
52 typedef struct {
53 volatile uint64_t *base;
54 size_t count;
55 } io64_array_t;
56
57 /* Contructors for literals. */
58
io8_c(uintpaddr_t addr,uintpaddr_t offset)59 static inline io8_t io8_c(uintpaddr_t addr, uintpaddr_t offset)
60 {
61 return (io8_t){.ptr = (volatile uint8_t *)(addr + offset)};
62 }
63
io8_array_c(uintpaddr_t addr,uintpaddr_t offset,uint32_t count)64 static inline io8_array_t io8_array_c(uintpaddr_t addr, uintpaddr_t offset,
65 uint32_t count)
66 {
67 return (io8_array_t){.base = (volatile uint8_t *)addr, .count = count};
68 }
69
io16_c(uintpaddr_t addr,uintpaddr_t offset)70 static inline io16_t io16_c(uintpaddr_t addr, uintpaddr_t offset)
71 {
72 return (io16_t){.ptr = (volatile uint16_t *)(addr + offset)};
73 }
74
io16_array_c(uintpaddr_t addr,uintpaddr_t offset,uint32_t count)75 static inline io16_array_t io16_array_c(uintpaddr_t addr, uintpaddr_t offset,
76 uint32_t count)
77 {
78 return (io16_array_t){.base = (volatile uint16_t *)addr,
79 .count = count};
80 }
81
io32_c(uintpaddr_t addr,uintpaddr_t offset)82 static inline io32_t io32_c(uintpaddr_t addr, uintpaddr_t offset)
83 {
84 return (io32_t){.ptr = (volatile uint32_t *)(addr + offset)};
85 }
86
io32_array_c(uintpaddr_t addr,uintpaddr_t offset,uint32_t count)87 static inline io32_array_t io32_array_c(uintpaddr_t addr, uintpaddr_t offset,
88 uint32_t count)
89 {
90 return (io32_array_t){.base = (volatile uint32_t *)addr,
91 .count = count};
92 }
93
io64_c(uintpaddr_t addr,uintpaddr_t offset)94 static inline io64_t io64_c(uintpaddr_t addr, uintpaddr_t offset)
95 {
96 return (io64_t){.ptr = (volatile uint64_t *)(addr + offset)};
97 }
98
io64_array_c(uintpaddr_t addr,uintpaddr_t offset,uint32_t count)99 static inline io64_array_t io64_array_c(uintpaddr_t addr, uintpaddr_t offset,
100 uint32_t count)
101 {
102 return (io64_array_t){.base = (volatile uint64_t *)addr,
103 .count = count};
104 }
105
106 #define IO8_C(addr) io8_c((addr), 0)
107 #define IO16_C(addr) io16_c((addr), 0)
108 #define IO32_C(addr) io32_c((addr), 0)
109 #define IO64_C(addr) io64_c((addr), 0)
110
111 #define IO8_ARRAY_C(addr, cnt) io8_array_c((addr), 0, cnt)
112 #define IO16_ARRAY_C(addr, cnt) io16_array_c((addr), 0, cnt)
113 #define IO32_ARRAY_C(addr, cnt) io32_array_c((addr), 0, cnt)
114 #define IO64_ARRAY_C(addr, cnt) io64_array_c((addr), 0, cnt)
115
116 /** Read from memory-mapped IO. */
117
io_read8(io8_t io)118 static inline uint8_t io_read8(io8_t io)
119 {
120 return *io.ptr;
121 }
122
io_read16(io16_t io)123 static inline uint16_t io_read16(io16_t io)
124 {
125 return *io.ptr;
126 }
127
io_read32(io32_t io)128 static inline uint32_t io_read32(io32_t io)
129 {
130 return *io.ptr;
131 }
132
io_read64(io64_t io)133 static inline uint64_t io_read64(io64_t io)
134 {
135 return *io.ptr;
136 }
137
io_read8_array(io8_array_t io,size_t n)138 static inline uint8_t io_read8_array(io8_array_t io, size_t n)
139 {
140 assert(n < io.count);
141 return io.base[n];
142 }
143
io_read16_array(io16_array_t io,size_t n)144 static inline uint16_t io_read16_array(io16_array_t io, size_t n)
145 {
146 assert(n < io.count);
147 return io.base[n];
148 }
149
io_read32_array(io32_array_t io,size_t n)150 static inline uint32_t io_read32_array(io32_array_t io, size_t n)
151 {
152 assert(n < io.count);
153 return io.base[n];
154 }
155
io_read64_array(io64_array_t io,size_t n)156 static inline uint64_t io_read64_array(io64_array_t io, size_t n)
157 {
158 assert(n < io.count);
159 return io.base[n];
160 }
161
162 /**
163 * Read from memory-mapped IO with memory barrier.
164 *
165 * The read is ordered before subsequent memory accesses.
166 */
167
io_read8_mb(io8_t io)168 static inline uint8_t io_read8_mb(io8_t io)
169 {
170 uint8_t v = io_read8(io);
171
172 data_sync_barrier();
173 return v;
174 }
175
io_read16_mb(io16_t io)176 static inline uint16_t io_read16_mb(io16_t io)
177 {
178 uint16_t v = io_read16(io);
179
180 data_sync_barrier();
181 return v;
182 }
183
io_read32_mb(io32_t io)184 static inline uint32_t io_read32_mb(io32_t io)
185 {
186 uint32_t v = io_read32(io);
187
188 data_sync_barrier();
189 return v;
190 }
191
io_read64_mb(io64_t io)192 static inline uint64_t io_read64_mb(io64_t io)
193 {
194 uint64_t v = io_read64(io);
195
196 data_sync_barrier();
197 return v;
198 }
199
io_read8_array_mb(io8_array_t io,size_t n)200 static inline uint8_t io_read8_array_mb(io8_array_t io, size_t n)
201 {
202 uint8_t v = io_read8_array(io, n);
203
204 data_sync_barrier();
205 return v;
206 }
207
io_read16_array_mb(io16_array_t io,size_t n)208 static inline uint16_t io_read16_array_mb(io16_array_t io, size_t n)
209 {
210 uint16_t v = io_read16_array(io, n);
211
212 data_sync_barrier();
213 return v;
214 }
215
io_read32_array_mb(io32_array_t io,size_t n)216 static inline uint32_t io_read32_array_mb(io32_array_t io, size_t n)
217 {
218 uint32_t v = io_read32_array(io, n);
219
220 data_sync_barrier();
221 return v;
222 }
223
io_read64_array_mb(io64_array_t io,size_t n)224 static inline uint64_t io_read64_array_mb(io64_array_t io, size_t n)
225 {
226 uint64_t v = io_read64_array(io, n);
227
228 data_sync_barrier();
229 return v;
230 }
231
232 /* Write to memory-mapped IO. */
233
io_write8(io8_t io,uint8_t v)234 static inline void io_write8(io8_t io, uint8_t v)
235 {
236 *io.ptr = v;
237 }
238
io_write16(io16_t io,uint16_t v)239 static inline void io_write16(io16_t io, uint16_t v)
240 {
241 *io.ptr = v;
242 }
243
io_write32(io32_t io,uint32_t v)244 static inline void io_write32(io32_t io, uint32_t v)
245 {
246 *io.ptr = v;
247 }
248
io_write64(io64_t io,uint64_t v)249 static inline void io_write64(io64_t io, uint64_t v)
250 {
251 *io.ptr = v;
252 }
253
io_clrbits32(io32_t io,uint32_t clear)254 static inline void io_clrbits32(io32_t io, uint32_t clear)
255 {
256 io_write32(io, io_read32(io) & ~clear);
257 }
258
io_setbits32(io32_t io,uint32_t set)259 static inline void io_setbits32(io32_t io, uint32_t set)
260 {
261 io_write32(io, io_read32(io) | set);
262 }
263
io_clrsetbits32(io32_t io,uint32_t clear,uint32_t set)264 static inline void io_clrsetbits32(io32_t io, uint32_t clear, uint32_t set)
265 {
266 io_write32(io, (io_read32(io) & ~clear) | set);
267 }
268
io_write8_array(io8_array_t io,size_t n,uint8_t v)269 static inline void io_write8_array(io8_array_t io, size_t n, uint8_t v)
270 {
271 assert(n < io.count);
272 io.base[n] = v;
273 }
274
io_write16_array(io16_array_t io,size_t n,uint16_t v)275 static inline void io_write16_array(io16_array_t io, size_t n, uint16_t v)
276 {
277 assert(n < io.count);
278 io.base[n] = v;
279 }
280
io_write32_array(io32_array_t io,size_t n,uint32_t v)281 static inline void io_write32_array(io32_array_t io, size_t n, uint32_t v)
282 {
283 assert(n < io.count);
284 io.base[n] = v;
285 }
286
io_write64_array(io64_array_t io,size_t n,uint64_t v)287 static inline void io_write64_array(io64_array_t io, size_t n, uint64_t v)
288 {
289 assert(n < io.count);
290 io.base[n] = v;
291 }
292
293 /*
294 * Write to memory-mapped IO with memory barrier.
295 *
296 * The write is ordered after previous memory accesses.
297 */
298
io_write8_mb(io8_t io,uint8_t v)299 static inline void io_write8_mb(io8_t io, uint8_t v)
300 {
301 data_sync_barrier();
302 io_write8(io, v);
303 }
304
io_write16_mb(io16_t io,uint16_t v)305 static inline void io_write16_mb(io16_t io, uint16_t v)
306 {
307 data_sync_barrier();
308 io_write16(io, v);
309 }
310
io_write32_mb(io32_t io,uint32_t v)311 static inline void io_write32_mb(io32_t io, uint32_t v)
312 {
313 data_sync_barrier();
314 io_write32(io, v);
315 }
316
io_write64_mb(io64_t io,uint64_t v)317 static inline void io_write64_mb(io64_t io, uint64_t v)
318 {
319 data_sync_barrier();
320 io_write64(io, v);
321 }
322
io_write8_array_mb(io8_array_t io,size_t n,uint8_t v)323 static inline void io_write8_array_mb(io8_array_t io, size_t n, uint8_t v)
324 {
325 data_sync_barrier();
326 io_write8_array(io, n, v);
327 }
328
io_write16_array_mb(io16_array_t io,size_t n,uint16_t v)329 static inline void io_write16_array_mb(io16_array_t io, size_t n, uint16_t v)
330 {
331 data_sync_barrier();
332 io_write16_array(io, n, v);
333 }
334
io_write32_array_mb(io32_array_t io,size_t n,uint32_t v)335 static inline void io_write32_array_mb(io32_array_t io, size_t n, uint32_t v)
336 {
337 data_sync_barrier();
338 io_write32_array(io, n, v);
339 }
340
io_write64_array_mb(io64_array_t io,size_t n,uint64_t v)341 static inline void io_write64_array_mb(io64_array_t io, size_t n, uint64_t v)
342 {
343 data_sync_barrier();
344 io_write64_array(io, n, v);
345 }
346