1 /*
2  * Copyright (c) 2006-2023, RT-Thread Development Team
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  *
6  * Change Logs:
7  * Date           Author       Notes
8  * 2023-02-25     GuEe-GUI     the first version
9  */
10 
11 #ifndef __DMA_H__
12 #define __DMA_H__
13 
14 #include <rtthread.h>
15 #include <drivers/ofw.h>
16 #include <drivers/misc.h>
17 #include <drivers/core/dm.h>
18 
19 #include <mmu.h>
20 #include <mm_page.h>
21 #include <bitmap.h>
22 
23 struct rt_dma_chan;
24 struct rt_dma_controller_ops;
25 
26 enum rt_dma_transfer_direction
27 {
28     RT_DMA_MEM_TO_MEM,
29     RT_DMA_MEM_TO_DEV,
30     RT_DMA_DEV_TO_MEM,
31     RT_DMA_DEV_TO_DEV,
32 
33     RT_DMA_DIR_MAX,
34 };
35 
36 enum rt_dma_slave_buswidth
37 {
38     RT_DMA_SLAVE_BUSWIDTH_UNDEFINED     = 0,
39     RT_DMA_SLAVE_BUSWIDTH_1_BYTE        = 1,
40     RT_DMA_SLAVE_BUSWIDTH_2_BYTES       = 2,
41     RT_DMA_SLAVE_BUSWIDTH_3_BYTES       = 3,
42     RT_DMA_SLAVE_BUSWIDTH_4_BYTES       = 4,
43     RT_DMA_SLAVE_BUSWIDTH_8_BYTES       = 8,
44     RT_DMA_SLAVE_BUSWIDTH_16_BYTES      = 16,
45     RT_DMA_SLAVE_BUSWIDTH_32_BYTES      = 32,
46     RT_DMA_SLAVE_BUSWIDTH_64_BYTES      = 64,
47     RT_DMA_SLAVE_BUSWIDTH_128_BYTES     = 128,
48 
49     RT_DMA_SLAVE_BUSWIDTH_BYTES_MAX,
50 };
51 
52 struct rt_dma_slave_config
53 {
54     enum rt_dma_transfer_direction direction;
55     enum rt_dma_slave_buswidth src_addr_width;
56     enum rt_dma_slave_buswidth dst_addr_width;
57 
58     rt_ubase_t src_addr;
59     rt_ubase_t dst_addr;
60 
61     rt_uint32_t src_maxburst;
62     rt_uint32_t dst_maxburst;
63     rt_uint32_t src_port_window_size;
64     rt_uint32_t dst_port_window_size;
65 };
66 
67 struct rt_dma_slave_transfer
68 {
69     rt_ubase_t src_addr;
70     rt_ubase_t dst_addr;
71 
72     void *buffer;
73     rt_ubase_t dma_handle;
74     rt_size_t buffer_len;
75     rt_size_t period_len;
76 };
77 
78 struct rt_dma_controller
79 {
80     rt_list_t list;
81 
82     struct rt_device *dev;
83 
84     RT_BITMAP_DECLARE(dir_cap, RT_DMA_DIR_MAX);
85     const struct rt_dma_controller_ops *ops;
86 
87     rt_list_t channels_nodes;
88     struct rt_mutex mutex;
89 };
90 
91 struct rt_dma_controller_ops
92 {
93     struct rt_dma_chan *(*request_chan)(struct rt_dma_controller *ctrl,
94             struct rt_device *slave, void *fw_data);
95     rt_err_t (*release_chan)(struct rt_dma_chan *chan);
96 
97     rt_err_t (*start)(struct rt_dma_chan *chan);
98     rt_err_t (*stop)(struct rt_dma_chan *chan);
99     rt_err_t (*config)(struct rt_dma_chan *chan, struct rt_dma_slave_config *conf);
100 
101     rt_err_t (*prep_memcpy)(struct rt_dma_chan *chan,
102             rt_ubase_t dma_addr_src, rt_ubase_t dma_addr_dst, rt_size_t len);
103 
104     rt_err_t (*prep_cyclic)(struct rt_dma_chan *chan,
105             rt_ubase_t dma_buf_addr, rt_size_t buf_len, rt_size_t period_len,
106             enum rt_dma_transfer_direction dir);
107 
108     rt_err_t (*prep_single)(struct rt_dma_chan *chan,
109             rt_ubase_t dma_buf_addr, rt_size_t buf_len,
110             enum rt_dma_transfer_direction dir);
111 };
112 
113 struct rt_dma_chan
114 {
115     const char *name;
116 
117     struct rt_dma_controller *ctrl;
118     struct rt_device *slave;
119 
120     rt_list_t list;
121     rt_err_t conf_err;
122     rt_err_t prep_err;
123     struct rt_dma_slave_config conf;
124     struct rt_dma_slave_transfer transfer;
125 
126     void (*callback)(struct rt_dma_chan *chan, rt_size_t size);
127 
128     void *priv;
129 };
130 
131 struct rt_dma_pool
132 {
133     rt_region_t region;
134 
135     rt_list_t list;
136 
137     rt_ubase_t flags;
138 
139     rt_bitmap_t *map;
140     rt_size_t bits;
141     rt_ubase_t start;
142 
143     struct rt_device *dev;
144 };
145 
146 struct rt_dma_map_ops
147 {
148     void *(*alloc)(struct rt_device *dev, rt_size_t size,
149             rt_ubase_t *dma_handle, rt_ubase_t flags);
150     void (*free)(struct rt_device *dev, rt_size_t size,
151             void *cpu_addr, rt_ubase_t dma_handle, rt_ubase_t flags);
152     rt_err_t (*sync_out_data)(struct rt_device *dev, void *data, rt_size_t size,
153             rt_ubase_t *dma_handle, rt_ubase_t flags);
154     rt_err_t (*sync_in_data)(struct rt_device *dev, void *out_data, rt_size_t size,
155             rt_ubase_t dma_handle, rt_ubase_t flags);
156 };
157 
rt_dma_controller_add_direction(struct rt_dma_controller * ctrl,enum rt_dma_transfer_direction dir)158 rt_inline void rt_dma_controller_add_direction(struct rt_dma_controller *ctrl,
159         enum rt_dma_transfer_direction dir)
160 {
161     RT_ASSERT(ctrl != RT_NULL);
162     RT_ASSERT(dir < RT_DMA_DIR_MAX);
163 
164     rt_bitmap_set_bit(ctrl->dir_cap, dir);
165 }
166 
167 rt_err_t rt_dma_controller_register(struct rt_dma_controller *ctrl);
168 rt_err_t rt_dma_controller_unregister(struct rt_dma_controller *ctrl);
169 
170 rt_err_t rt_dma_chan_start(struct rt_dma_chan *chan);
171 rt_err_t rt_dma_chan_stop(struct rt_dma_chan *chan);
172 rt_err_t rt_dma_chan_config(struct rt_dma_chan *chan,
173         struct rt_dma_slave_config *conf);
174 rt_err_t rt_dma_chan_done(struct rt_dma_chan *chan, rt_size_t size);
175 
176 rt_err_t rt_dma_prep_memcpy(struct rt_dma_chan *chan,
177         struct rt_dma_slave_transfer *transfer);
178 rt_err_t rt_dma_prep_cyclic(struct rt_dma_chan *chan,
179         struct rt_dma_slave_transfer *transfer);
180 rt_err_t rt_dma_prep_single(struct rt_dma_chan *chan,
181         struct rt_dma_slave_transfer *transfer);
182 
183 struct rt_dma_chan *rt_dma_chan_request(struct rt_device *dev, const char *name);
184 rt_err_t rt_dma_chan_release(struct rt_dma_chan *chan);
185 
186 #define RT_DMA_F_LINEAR     RT_BIT(0)
187 #define RT_DMA_F_32BITS     RT_BIT(1)
188 #define RT_DMA_F_NOCACHE    RT_BIT(2)
189 #define RT_DMA_F_DEVICE     RT_BIT(3)
190 #define RT_DMA_F_NOMAP      RT_BIT(4)
191 
192 #define RT_DMA_PAGE_SIZE    ARCH_PAGE_SIZE
193 
194 void *rt_dma_alloc(struct rt_device *dev, rt_size_t size,
195         rt_ubase_t *dma_handle, rt_ubase_t flags);
196 
197 void rt_dma_free(struct rt_device *dev, rt_size_t size,
198         void *cpu_addr, rt_ubase_t dma_handle, rt_ubase_t flags);
199 
rt_dma_alloc_coherent(struct rt_device * dev,rt_size_t size,rt_ubase_t * dma_handle)200 rt_inline void *rt_dma_alloc_coherent(struct rt_device *dev, rt_size_t size,
201         rt_ubase_t *dma_handle)
202 {
203     return rt_dma_alloc(dev, size, dma_handle,
204             RT_DMA_F_NOCACHE | RT_DMA_F_LINEAR);
205 }
206 
rt_dma_free_coherent(struct rt_device * dev,rt_size_t size,void * cpu_addr,rt_ubase_t dma_handle)207 rt_inline void rt_dma_free_coherent(struct rt_device *dev, rt_size_t size,
208         void *cpu_addr, rt_ubase_t dma_handle)
209 {
210     rt_dma_free(dev, size, cpu_addr, dma_handle,
211             RT_DMA_F_NOCACHE | RT_DMA_F_LINEAR);
212 }
213 
214 rt_err_t rt_dma_sync_out_data(struct rt_device *dev, void *data, rt_size_t size,
215         rt_ubase_t *dma_handle, rt_ubase_t flags);
216 rt_err_t rt_dma_sync_in_data(struct rt_device *dev, void *out_data, rt_size_t size,
217         rt_ubase_t dma_handle, rt_ubase_t flags);
218 
rt_dma_device_is_coherent(struct rt_device * dev)219 rt_inline rt_bool_t rt_dma_device_is_coherent(struct rt_device *dev)
220 {
221     return rt_dm_dev_prop_read_bool(dev, "dma-coherent");
222 }
223 
rt_dma_device_set_ops(struct rt_device * dev,const struct rt_dma_map_ops * ops)224 rt_inline void rt_dma_device_set_ops(struct rt_device *dev,
225         const struct rt_dma_map_ops *ops)
226 {
227     dev->dma_ops = ops;
228 }
229 
230 struct rt_dma_pool *rt_dma_pool_install(rt_region_t *region);
231 rt_err_t rt_dma_pool_extract(rt_region_t *region_list, rt_size_t list_len,
232         rt_size_t cma_size, rt_size_t coherent_pool_size);
233 
234 #endif /* __DMA_H__ */
235