1 /*
2 * Copyright (c) 2021-2024 HPMicro
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 *
6 */
7 #include "hpm_sdxc_drv.h"
8
9
10 #define SDXC_TMCLK_IN_MHZ (1UL)
11
12 #define SDXC_DMA_MAX_XFER_LEN_26BIT ((1UL << 26) - 4U)
13 #define SDXC_DMA_MAX_XFER_LEN_16BIT ((1UL << 16) - 4U)
14
15 #define SDXC_SYS_DMA_ALIGN_LEN (4U)
16
17 enum {
18 sdxc_cmd_only = (1UL << 0),
19 sdxc_cmd_and_tx_data = (1UL << 1),
20 sdxc_cmd_and_rx_data = (1UL << 2),
21 sdxc_data_with_auto_cmd12 = (1UL << 3),
22 sdxc_data_with_auto_cmd23 = (1UL << 4),
23 sdxc_boot_data = (1UL << 5),
24 sdxc_boot_data_continuous = (1UL << 6),
25 };
26
27 static const uint32_t s_sdxc_boot_dummy = 0;
28
29
30 static hpm_stat_t sdxc_set_transfer_config(SDXC_Type *base,
31 uint32_t xfer_flags,
32 uint32_t block_size,
33 uint32_t block_cnt,
34 uint32_t *new_flags);
35
36 static void sdxc_read_data_buf(SDXC_Type *base, uint32_t *data, uint32_t num_of_words);
37
38 static hpm_stat_t sdxc_read_via_data_buf_blocking(SDXC_Type *base, sdxc_data_t *data);
39
40 static void sdxc_write_data_buf(SDXC_Type *base, const uint32_t *data, uint32_t num_of_words);
41
42 static hpm_stat_t sdxc_write_via_data_buf_blocking(SDXC_Type *base, sdxc_data_t *data);
43
44 static hpm_stat_t sdxc_transfer_data_blocking(SDXC_Type *base, sdxc_data_t *data, bool enable_dma);
45
46 static hpm_stat_t sdxc_tuning_error_recovery(SDXC_Type *base);
47
48 static bool sdxc_is_bus_idle(SDXC_Type *base);
49
sdxc_set_transfer_config(SDXC_Type * base,uint32_t xfer_flags,uint32_t block_size,uint32_t block_cnt,uint32_t * new_flags)50 static hpm_stat_t sdxc_set_transfer_config(SDXC_Type *base,
51 uint32_t xfer_flags,
52 uint32_t block_size,
53 uint32_t block_cnt,
54 uint32_t *new_flags)
55 {
56 uint32_t flags = base->CMD_XFER & ~(SDXC_CMD_XFER_MULTI_BLK_SEL_MASK | SDXC_CMD_XFER_BLOCK_COUNT_ENABLE_MASK |
57 SDXC_CMD_XFER_DATA_PRESENT_SEL_MASK | SDXC_CMD_XFER_DATA_XFER_DIR_MASK |
58 SDXC_CMD_XFER_AUTO_CMD_ENABLE_MASK | SDXC_CMD_XFER_RESP_TYPE_SELECT_MASK |
59 SDXC_CMD_XFER_DMA_ENABLE_MASK);
60 if (IS_HPM_BITMASK_SET(base->PSTATE, SDXC_PSTATE_CMD_INHIBIT_MASK)) {
61 return status_sdxc_busy;
62 } else {
63 if (IS_HPM_BITMASK_SET(xfer_flags, sdxc_boot_data_continuous)) {
64
65 base->PROT_CTRL &= ~SDXC_PROT_CTRL_STOP_BG_REQ_MASK;
66 base->PROT_CTRL |= SDXC_PROT_CTRL_CONTINUE_REQ_MASK;
67 return status_success;
68 }
69
70 if (IS_HPM_BITMASK_SET(base->PSTATE, SDXC_PSTATE_DAT_INHIBIT_MASK)) {
71 return status_sdxc_busy;
72 }
73
74 if (block_cnt > SDXC_BLK_ATTR_BLOCK_CNT_GET(SDXC_BLK_ATTR_BLOCK_CNT_MASK)) {
75 return status_invalid_argument;
76 }
77
78 if (IS_HPM_BITMASK_SET(xfer_flags, sdxc_cmd_and_rx_data)) {
79 flags |= SDXC_CMD_XFER_DATA_XFER_DIR_MASK;
80 }
81 if (block_cnt > 1U) {
82 flags |= SDXC_CMD_XFER_MULTI_BLK_SEL_MASK | SDXC_CMD_XFER_BLOCK_COUNT_ENABLE_MASK;
83 if (IS_HPM_BITMASK_SET(xfer_flags, sdxc_data_with_auto_cmd12)) {
84 flags |= SDXC_CMD_XFER_AUTO_CMD_ENABLE_SET(sdxc_auto_cmd12_enabled);
85 }
86 }
87
88 if (IS_HPM_BITMASK_SET(xfer_flags, sdxc_data_with_auto_cmd23)) {
89 flags |= SDXC_CMD_XFER_AUTO_CMD_ENABLE_SET(sdxc_auto_cmd23_enabled);
90 }
91
92 if (IS_HPM_BITMASK_CLR(xfer_flags, sdxc_boot_data)) {
93 base->BLK_ATTR = block_size;
94 base->SDMASA = block_cnt;
95 } else {
96 flags |= SDXC_CMD_XFER_MULTI_BLK_SEL_MASK | SDXC_CMD_XFER_BLOCK_COUNT_ENABLE_MASK;
97 }
98 }
99
100 *new_flags = flags;
101
102 return status_success;
103 }
104
105
sdxc_receive_cmd_response(SDXC_Type * base,sdxc_command_t * cmd)106 hpm_stat_t sdxc_receive_cmd_response(SDXC_Type *base, sdxc_command_t *cmd)
107 {
108 hpm_stat_t status = status_success;
109
110 if (cmd->resp_type != sdxc_dev_resp_none) {
111
112 cmd->response[0] = base->RESP[0];
113 if (cmd->resp_type == sdxc_dev_resp_r2) {
114
115 /* R3-R2-R1-R0 (lowest 8 bits are invalid bits) has the same format as R2 format in SD spec
116 * after removing internal CRC7 and end bit
117 */
118 uint32_t resp0 = base->RESP[0];
119 uint32_t resp1 = base->RESP[1];
120 uint32_t resp2 = base->RESP[2];
121 uint32_t resp3 = base->RESP[3];
122 cmd->response[0] = (resp0 << 8);
123 cmd->response[1] = (resp1 << 8) | (resp0 >> 24);
124 cmd->response[2] = (resp2 << 8) | (resp1 >> 24);
125 cmd->response[3] = (resp3 << 8) | (resp2 >> 24);
126 }
127
128 if (SDXC_CMD_XFER_AUTO_CMD_ENABLE_GET(base->CMD_XFER) == sdxc_auto_cmd12_enabled) {
129 cmd->auto_cmd_resp = base->RESP[3];
130 }
131 }
132
133 /* check response flag */
134 if ((cmd->resp_error_flags != 0U) &&
135 ((cmd->resp_type == sdxc_dev_resp_r1) || (cmd->resp_type == sdxc_dev_resp_r1b) ||
136 (cmd->resp_type == sdxc_dev_resp_r6) || (cmd->resp_type == sdxc_dev_resp_r5))) {
137 if ((cmd->resp_error_flags & cmd->response[0]) != 0U) {
138 status = status_sdxc_send_cmd_failed;
139 }
140 }
141
142 return status;
143 }
144
sdxc_read_data_buf(SDXC_Type * base,uint32_t * data,uint32_t num_of_words)145 static void sdxc_read_data_buf(SDXC_Type *base, uint32_t *data, uint32_t num_of_words)
146 {
147 if (sdxc_is_data_buf_readable(base)) {
148 for (uint32_t i = 0; i < num_of_words; i++) {
149 data[i] = sdxc_read_data(base);
150 }
151 }
152 }
153
sdxc_read_via_data_buf_blocking(SDXC_Type * base,sdxc_data_t * data)154 static hpm_stat_t sdxc_read_via_data_buf_blocking(SDXC_Type *base, sdxc_data_t *data)
155 {
156 uint32_t interrupt_status = 0;
157 hpm_stat_t status = status_success;
158 do {
159 /* For multi-block transfer, the block size must be 4-byte aligned */
160 if ((data->block_cnt > 1) && (data->block_size % sizeof(uint32_t) != 0)) {
161 status = status_invalid_argument;
162 break;
163 }
164 if (data->block_size % sizeof(uint32_t) != 0U) {
165 data->block_size += sizeof(uint32_t) - (data->block_size % sizeof(uint32_t));
166 }
167
168 uint32_t words_per_block = data->block_size / sizeof(uint32_t);
169 uint32_t remaining_blocks = data->block_cnt;
170 uint32_t *read_buf = data->rx_data;
171 while (remaining_blocks > 0) {
172 uint32_t status_flags = SDXC_INT_STAT_BUF_RD_READY_MASK | SDXC_STS_DATA_ERR;
173 /* Wait until data is ready or timeout event occurs */
174 do {
175 interrupt_status = sdxc_get_interrupt_status(base);
176 } while (!IS_HPM_BITMASK_SET(interrupt_status, status_flags));
177
178 if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DATA_CRC_ERR_MASK)) {
179 /* Handle Data CRC error */
180 if (!data->enable_ignore_error) {
181 status = status_sdxc_data_crc_error;
182 break;
183 }
184 sdxc_clear_interrupt_status(base, SDXC_STS_DATA_ERR);
185 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DATA_TOUT_ERR_MASK)) {
186 /* Handle Data timeout error */
187 status = status_sdxc_data_timeout_error;
188 break;
189 } else {
190 /* Receive data block by block */
191 sdxc_clear_interrupt_status(base, SDXC_INT_STAT_BUF_RD_READY_MASK);
192 sdxc_read_data_buf(base, read_buf, words_per_block);
193 read_buf += words_per_block;
194 remaining_blocks--;
195 }
196 }
197
198 sdxc_clear_interrupt_status(base, SDXC_INT_STAT_XFER_COMPLETE_MASK);
199
200 } while (false);
201
202 return status;
203 }
204
sdxc_write_data_buf(SDXC_Type * base,const uint32_t * data,uint32_t num_of_words)205 static void sdxc_write_data_buf(SDXC_Type *base, const uint32_t *data, uint32_t num_of_words)
206 {
207 if (sdxc_is_data_buf_writable(base)) {
208 for (uint32_t i = 0; i < num_of_words; i++) {
209 sdxc_write_data(base, data[i]);
210 }
211 }
212 }
213
sdxc_write_via_data_buf_blocking(SDXC_Type * base,sdxc_data_t * data)214 static hpm_stat_t sdxc_write_via_data_buf_blocking(SDXC_Type *base, sdxc_data_t *data)
215 {
216 uint32_t interrupt_status = 0;
217 hpm_stat_t status = status_success;
218 do {
219 /* For multi-block transfer, the block size must be 4-byte aligned */
220 if ((data->block_cnt > 1) && (data->block_size % sizeof(uint32_t) != 0)) {
221 status = status_invalid_argument;
222 break;
223 }
224 if (data->block_size % sizeof(uint32_t) != 0U) {
225 data->block_size += sizeof(uint32_t) - (data->block_size % sizeof(uint32_t));
226 }
227
228 uint32_t words_per_block = data->block_size / sizeof(uint32_t);
229 uint32_t remaining_blocks = data->block_cnt;
230 const uint32_t *write_buf = data->tx_data;
231 while (remaining_blocks > 0) {
232 uint32_t status_flags = SDXC_INT_STAT_BUF_WR_READY_MASK | SDXC_STS_DATA_ERR;
233 /* Wait until write data is allowed or timeout event occurs */
234 do {
235 interrupt_status = sdxc_get_interrupt_status(base);
236 } while (!IS_HPM_BITMASK_SET(interrupt_status, status_flags));
237
238 if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DATA_CRC_ERR_MASK)) {
239 /* Handle Data CRC error */
240 if (!data->enable_ignore_error) {
241 status = status_sdxc_data_crc_error;
242 break;
243 }
244 sdxc_clear_interrupt_status(base, SDXC_STS_DATA_ERR);
245 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DATA_TOUT_ERR_MASK)) {
246 /* Handle Data timeout error */
247 status = status_sdxc_data_timeout_error;
248 break;
249 } else {
250 /* Receive data block by block */
251 sdxc_clear_interrupt_status(base, SDXC_INT_STAT_BUF_WR_READY_MASK);
252 sdxc_write_data_buf(base, write_buf, words_per_block);
253 write_buf += words_per_block;
254 remaining_blocks--;
255 }
256 }
257
258 sdxc_clear_interrupt_status(base, SDXC_INT_STAT_XFER_COMPLETE_MASK);
259
260 } while (false);
261
262 return status;
263 }
264
sdxc_is_bus_idle(SDXC_Type * base)265 static bool sdxc_is_bus_idle(SDXC_Type *base)
266 {
267 uint32_t busy_mask = SDXC_PSTATE_CMD_INHIBIT_MASK | SDXC_PSTATE_DAT_INHIBIT_MASK;
268
269 return IS_HPM_BITMASK_CLR(base->PSTATE, busy_mask);
270 }
271
sdxc_get_capabilities(SDXC_Type * base,sdxc_capabilities_t * capabilities)272 hpm_stat_t sdxc_get_capabilities(SDXC_Type *base, sdxc_capabilities_t *capabilities)
273 {
274 hpm_stat_t status = status_invalid_argument;
275 do {
276 HPM_BREAK_IF((base == NULL) || (capabilities == NULL));
277
278 capabilities->capabilities1.U = base->CAPABILITIES1;
279 capabilities->capabilities2.U = base->CAPABILITIES2;
280 capabilities->curr_capabilities1.U = base->CURR_CAPABILITIES1;
281 capabilities->curr_capabilities2.U = base->CURR_CAPABILITIES2;
282
283 status = status_success;
284 } while (false);
285
286 return status;
287 }
288
sdxc_send_command(SDXC_Type * base,sdxc_command_t * cmd)289 hpm_stat_t sdxc_send_command(SDXC_Type *base, sdxc_command_t *cmd)
290 {
291 sdxc_clear_interrupt_status(base, ~0U);
292
293 uint32_t cmd_xfer = SDXC_CMD_XFER_CMD_INDEX_SET(cmd->cmd_index);
294 uint32_t flags = cmd->cmd_flags;
295
296 /* Wait a while until the BUS is idle after the previous command */
297 uint32_t wait_cnt = 100L;
298 while (!sdxc_is_bus_idle(base) && (wait_cnt > 0U)) {
299 wait_cnt--;
300 }
301
302 if (wait_cnt == 0) {
303 return status_timeout;
304 }
305
306 if (cmd->cmd_type != sdxc_cmd_type_empty) {
307 flags &= ~SDXC_CMD_XFER_RESP_TYPE_SELECT_MASK;
308 uint32_t cmd_check_flags = SDXC_CMD_XFER_CMD_CRC_CHK_ENABLE_MASK | SDXC_CMD_XFER_CMD_IDX_CHK_ENABLE_MASK;
309 if ((cmd->resp_type == sdxc_dev_resp_r1) || (cmd->resp_type == sdxc_dev_resp_r5) ||
310 (cmd->resp_type == sdxc_dev_resp_r6) || (cmd->resp_type == sdxc_dev_resp_r7)) {
311 flags |= SDXC_CMD_RESP_LEN_48 | cmd_check_flags;
312 } else if ((cmd->resp_type == sdxc_dev_resp_r1b) || (cmd->resp_type == sdxc_dev_resp_r5b)) {
313 flags |= SDXC_CMD_RESP_LEN_48B | cmd_check_flags;
314 } else if (cmd->resp_type == sdxc_dev_resp_r2) {
315 flags |= SDXC_CMD_RESP_LEN_136 | SDXC_CMD_XFER_CMD_CRC_CHK_ENABLE_MASK;
316 } else if ((cmd->resp_type == sdxc_dev_resp_r3) || (cmd->resp_type == sdxc_dev_resp_r4)) {
317 flags |= SDXC_CMD_RESP_LEN_48;
318 } else {
319 /* do nothing */
320 }
321 if (IS_HPM_BITMASK_SET(flags, SDXC_CMD_XFER_RESP_ERR_CHK_ENABLE_MASK)) {
322 flags |= SDXC_CMD_XFER_RESP_INT_DISABLE_MASK;
323 }
324 }
325
326 switch (cmd->cmd_type) {
327 case sdxc_cmd_type_abort_cmd:
328 flags |= SDXC_CMD_TYPE_ABORT;
329 break;
330 case sdxc_cmd_type_suspend_cmd:
331 flags |= SDXC_CMD_TYPE_SUSPEND;
332 break;
333 case sdxc_cmd_tye_resume_cmd:
334 flags |= SDXC_CMD_TYPE_RESUME;
335 break;
336 default:
337 flags |= SDXC_CMD_TYPE_NORMAL;
338 break;
339 }
340
341 cmd_xfer |= ((flags & (SDXC_CMD_XFER_CMD_TYPE_MASK | SDXC_CMD_XFER_CMD_CRC_CHK_ENABLE_MASK |
342 SDXC_CMD_XFER_CMD_IDX_CHK_ENABLE_MASK | SDXC_CMD_XFER_RESP_TYPE_SELECT_MASK |
343 SDXC_CMD_XFER_DATA_PRESENT_SEL_MASK | SDXC_CMD_XFER_DMA_ENABLE_MASK |
344 SDXC_CMD_XFER_DATA_XFER_DIR_MASK | SDXC_CMD_XFER_AUTO_CMD_ENABLE_MASK |
345 SDXC_CMD_XFER_MULTI_BLK_SEL_MASK | SDXC_CMD_XFER_BLOCK_COUNT_ENABLE_MASK |
346 SDXC_CMD_XFER_RESP_TYPE_MASK | SDXC_CMD_XFER_RESP_ERR_CHK_ENABLE_MASK)));
347
348 base->CMD_ARG = cmd->cmd_argument;
349 base->CMD_XFER = cmd_xfer;
350
351 return status_success;
352 }
353
354
sdxc_parse_interrupt_status(SDXC_Type * base)355 hpm_stat_t sdxc_parse_interrupt_status(SDXC_Type *base)
356 {
357 uint32_t interrupt_status = sdxc_get_interrupt_status(base);
358 hpm_stat_t status = status_success;
359 if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_STS_ERROR)) {
360 if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_CMD_TOUT_ERR_MASK)) {
361 status = status_sdxc_cmd_timeout_error;
362 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_CMD_CRC_ERR_MASK)) {
363 status = status_sdxc_cmd_crc_error;
364 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_CMD_END_BIT_ERR_MASK)) {
365 status = status_sdxc_cmd_end_bit_error;
366 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_CMD_IDX_ERR_MASK)) {
367 status = status_sdxc_cmd_index_error;
368 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DATA_TOUT_ERR_MASK)) {
369 status = status_sdxc_data_timeout_error;
370 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DATA_CRC_ERR_MASK)) {
371 status = status_sdxc_data_crc_error;
372 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DATA_END_BIT_ERR_MASK)) {
373 status = status_sdxc_data_end_bit_error;
374 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_AUTO_CMD_ERR_MASK)) {
375 uint32_t auto_cmd_err_mask = base->AC_HOST_CTRL & 0xFFFFUL;
376 if (IS_HPM_BITMASK_SET(auto_cmd_err_mask, SDXC_AC_HOST_CTRL_AUTO_CMD12_NOT_EXEC_MASK)) {
377 status = status_sdxc_autocmd_cmd12_not_exec;
378 } else if (IS_HPM_BITMASK_SET(auto_cmd_err_mask, SDXC_AC_HOST_CTRL_AUTO_CMD_TOUT_ERR_MASK)) {
379 status = status_sdxc_autocmd_cmd_timeout_error;
380 } else if (IS_HPM_BITMASK_SET(auto_cmd_err_mask, SDXC_AC_HOST_CTRL_AUTO_CMD_CRC_ERR_MASK)) {
381 status = status_sdxc_autocmd_cmd_crc_error;
382 } else if (IS_HPM_BITMASK_SET(auto_cmd_err_mask, SDXC_AC_HOST_CTRL_AUTO_CMD_IDX_ERR_MASK)) {
383 status = status_sdxc_autocmd_cmd_index_error;
384 } else if (IS_HPM_BITMASK_SET(auto_cmd_err_mask, SDXC_AC_HOST_CTRL_AUTO_CMD_EBIT_ERR_MASK)) {
385 status = status_sdxc_autocmd_end_bit_error;
386 } else if (IS_HPM_BITMASK_SET(auto_cmd_err_mask, SDXC_AC_HOST_CTRL_AUTO_CMD_RESP_ERR_MASK)) {
387 status = status_sdxc_autocmd_cmd_response_error;
388 } else if (IS_HPM_BITMASK_SET(auto_cmd_err_mask, SDXC_AC_HOST_CTRL_CMD_NOT_ISSUED_AUTO_CMD12_MASK)) {
389 status = status_sdxc_autocmd_cmd_not_issued_auto_cmd12;
390 } else {
391 status = status_sdxc_auto_cmd_error;
392 }
393 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_ADMA_ERR_MASK)) {
394 status = status_sdxc_adma_error;
395 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_TUNING_ERR_MASK)) {
396 status = status_sdxc_tuning_error;
397 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_RESP_ERR_MASK)) {
398 status = status_sdxc_response_error;
399 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_BOOT_ACK_ERR_MASK)) {
400 status = status_sdxc_boot_ack_error;
401 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_CARD_REMOVAL_MASK)) {
402 status = status_sdxc_card_removed;
403 } else {
404 status = status_sdxc_error;
405 }
406 } else {
407 status = status_success;
408 }
409 return status;
410 }
411
sdxc_wait_cmd_done(SDXC_Type * base,sdxc_command_t * cmd,bool polling_cmd_done)412 hpm_stat_t sdxc_wait_cmd_done(SDXC_Type *base, sdxc_command_t *cmd, bool polling_cmd_done)
413 {
414 hpm_stat_t status = status_success;
415 uint32_t interrupt_status = 0U;
416
417 if (polling_cmd_done) {
418 while (!IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_CMD_COMPLETE_MASK)) {
419 interrupt_status = sdxc_get_interrupt_status(base);
420 if ((interrupt_status & SDXC_STS_CMD_ERR) != 0) {
421 status = sdxc_parse_interrupt_status(base);
422 HPM_BREAK_IF(status != status_success);
423 }
424 }
425 sdxc_clear_interrupt_status(base, SDXC_INT_STAT_CMD_COMPLETE_MASK);
426
427 if (status == status_success) {
428 status = sdxc_receive_cmd_response(base, cmd);
429 }
430 }
431
432 return status;
433 }
434
435
sdxc_transfer_data_blocking(SDXC_Type * base,sdxc_data_t * data,bool enable_dma)436 static hpm_stat_t sdxc_transfer_data_blocking(SDXC_Type *base, sdxc_data_t *data, bool enable_dma)
437 {
438 hpm_stat_t status = status_success;
439
440 uint32_t interrupt_status = 0;
441
442 if (enable_dma) {
443 uint32_t status_flags = SDXC_INT_STAT_XFER_COMPLETE_MASK | SDXC_STS_ERROR;
444 while (!IS_HPM_BITMASK_SET(interrupt_status, status_flags)) {
445 interrupt_status = sdxc_get_interrupt_status(base);
446 if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DMA_INTERRUPT_MASK)) {
447 sdxc_clear_interrupt_status(base, SDXC_INT_STAT_DMA_INTERRUPT_MASK);
448 if (SDXC_PROT_CTRL_DMA_SEL_GET(base->PROT_CTRL) == (uint32_t) sdxc_dmasel_sdma) {
449 base->ADMA_SYS_ADDR += data->block_size;
450 }
451 }
452 }
453
454 if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_TUNING_ERR_MASK)) {
455 status = status_sdxc_transfer_data_failed;
456 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_STS_DATA_ERR | SDXC_INT_STAT_ADMA_ERR_MASK)) {
457 if ((!data->enable_ignore_error) ||
458 IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DATA_TOUT_ERR_MASK)) {
459 status = status_sdxc_transfer_data_failed;
460 }
461 } else {
462 /* Do nothing */
463 }
464
465 if ((data->data_type == (uint8_t) sdxc_xfer_data_boot_continuous) && (status == status_success)) {
466 *(data->rx_data) = s_sdxc_boot_dummy;
467 }
468 sdxc_clear_interrupt_status(base, SDXC_INT_STAT_DMA_INTERRUPT_MASK | SDXC_INT_STAT_TUNING_ERR_MASK);
469
470 } else {
471 if (data->rx_data != NULL) {
472 status = sdxc_read_via_data_buf_blocking(base, data);
473 if (status != status_success) {
474 return status;
475 }
476 } else {
477 status = sdxc_write_via_data_buf_blocking(base, data);
478 if (status != status_success) {
479 return status;
480 }
481 }
482 }
483
484 return status;
485 }
486
487
sdxc_init(SDXC_Type * base,const sdxc_config_t * config)488 void sdxc_init(SDXC_Type *base, const sdxc_config_t *config)
489 {
490 sdxc_reset(base, sdxc_reset_all, 0x10000U);
491
492 uint32_t prot_ctrl = base->PROT_CTRL;
493
494 prot_ctrl &= ~(SDXC_PROT_CTRL_DMA_SEL_MASK | SDXC_PROT_CTRL_SD_BUS_VOL_VDD1_MASK);
495
496 prot_ctrl |= SDXC_PROT_CTRL_SD_BUS_PWR_VDD1_MASK;
497
498 sdxc_enable_tm_clock(base);
499 sdxc_set_data_timeout(base, config->data_timeout, NULL);
500
501 base->PROT_CTRL = prot_ctrl;
502
503 /* Enable SD internal clock and the output clock */
504 base->SYS_CTRL |= SDXC_SYS_CTRL_INTERNAL_CLK_EN_MASK;
505 while (!IS_HPM_BITMASK_SET(base->SYS_CTRL, SDXC_SYS_CTRL_INTERNAL_CLK_STABLE_MASK)) {
506
507 }
508 base->SYS_CTRL |= SDXC_SYS_CTRL_PLL_ENABLE_MASK;
509 while (!IS_HPM_BITMASK_SET(base->SYS_CTRL, SDXC_SYS_CTRL_INTERNAL_CLK_STABLE_MASK)) {
510
511 }
512
513 base->SYS_CTRL |= SDXC_SYS_CTRL_SD_CLK_EN_MASK;
514
515 base->INT_STAT_EN = SDXC_STS_ALL_FLAGS;
516 base->INT_SIGNAL_EN = 0UL;
517 base->INT_STAT = SDXC_STS_ALL_FLAGS;
518
519 /* Set Host to version 4, enable 26-bit ADMA2 length mode */
520 base->AC_HOST_CTRL &= ~(SDXC_AC_HOST_CTRL_UHS_MODE_SEL_MASK | SDXC_AC_HOST_CTRL_SAMPLE_CLK_SEL_MASK);
521 base->AC_HOST_CTRL |= SDXC_AC_HOST_CTRL_HOST_VER4_ENABLE_MASK | SDXC_AC_HOST_CTRL_ADMA2_LEN_MODE_MASK;
522 }
523
sdxc_set_data_timeout(SDXC_Type * base,uint32_t timeout_in_ms,uint32_t * actual_timeout_ms)524 void sdxc_set_data_timeout(SDXC_Type *base, uint32_t timeout_in_ms, uint32_t *actual_timeout_ms)
525 {
526 uint32_t field_value = 0xE;
527 /*
528 * Timeout calculation logic:
529 * the base timeout tick is (1UL << 13),
530 * assuming TMCLK is 24MHz, then the timeout_in_us = (1UL << 13) / 24
531 */
532 uint32_t timeout_in_us_unit = (1UL << 13U) / SDXC_TMCLK_IN_MHZ;
533 uint32_t timeout_in_us = timeout_in_ms * 1000UL;
534 for (uint32_t i = 0; i < 0xFU; i++) {
535 uint32_t timeout_us_using_value_i = (timeout_in_us_unit << i);
536 if (timeout_in_us < timeout_us_using_value_i) {
537 field_value = i;
538 break;
539 }
540 }
541 sdxc_enable_interrupt_status(base, SDXC_INT_STAT_EN_DATA_TOUT_ERR_STAT_EN_MASK, false);
542 base->SYS_CTRL = (base->SYS_CTRL & ~SDXC_SYS_CTRL_TOUT_CNT_MASK) | SDXC_SYS_CTRL_TOUT_CNT_SET(field_value);
543 sdxc_enable_interrupt_status(base, SDXC_INT_STAT_EN_DATA_TOUT_ERR_STAT_EN_MASK, true);
544
545 if (actual_timeout_ms != NULL) {
546 *actual_timeout_ms = (timeout_in_us_unit << field_value) / 1000UL;
547 }
548 }
549
sdxc_set_mmc_boot_config(SDXC_Type * base,const sdxc_boot_config_t * config)550 void sdxc_set_mmc_boot_config(SDXC_Type *base, const sdxc_boot_config_t *config)
551 {
552 uint32_t emmc_boot_reg = base->EMMC_BOOT_CTRL;
553
554 emmc_boot_reg &= ~(SDXC_EMMC_BOOT_CTRL_BOOT_TOUT_CNT_MASK | SDXC_EMMC_BOOT_CTRL_MAN_BOOT_EN_MASK);
555
556 if (config->enable_boot_ack) {
557 emmc_boot_reg |= SDXC_EMMC_BOOT_CTRL_BOOT_ACK_ENABLE_MASK;
558 }
559
560 /* TODO: Auto stop at block gap, how to handle this? */
561
562 uint32_t block_attr_reg = base->BLK_ATTR & ~(SDXC_BLK_ATTR_XFER_BLOCK_SIZE_MASK | SDXC_BLK_ATTR_BLOCK_CNT_MASK);
563
564 block_attr_reg |=
565 SDXC_BLK_ATTR_XFER_BLOCK_SIZE_SET(config->block_size) | SDXC_BLK_ATTR_BLOCK_CNT_SET(config->block_cnt);
566
567 base->BLK_ATTR = block_attr_reg;
568 base->EMMC_BOOT_CTRL = emmc_boot_reg;
569 }
570
sdxc_set_data_config(SDXC_Type * base,sdxc_xfer_direction_t data_dir,uint32_t block_cnt,uint32_t block_size)571 void sdxc_set_data_config(SDXC_Type *base, sdxc_xfer_direction_t data_dir, uint32_t block_cnt, uint32_t block_size)
572 {
573 uint32_t block_attr_reg = base->BLK_ATTR & ~(SDXC_BLK_ATTR_XFER_BLOCK_SIZE_MASK | SDXC_BLK_ATTR_BLOCK_CNT_MASK);
574
575 block_attr_reg |= SDXC_BLK_ATTR_BLOCK_CNT_SET(block_cnt) | SDXC_BLK_ATTR_XFER_BLOCK_SIZE_SET(block_size);
576
577 base->BLK_ATTR = block_attr_reg;
578
579 if (data_dir == sdxc_xfer_dir_write) {
580 base->CMD_XFER &= ~SDXC_CMD_XFER_DATA_XFER_DIR_MASK;
581 } else {
582 base->CMD_XFER |= SDXC_CMD_XFER_DATA_XFER_DIR_MASK;
583 }
584 }
585
sdxc_set_dma_config(SDXC_Type * base,sdxc_adma_config_t * dma_cfg,const uint32_t * data_addr,bool enable_auto_cmd23)586 hpm_stat_t sdxc_set_dma_config(SDXC_Type *base,
587 sdxc_adma_config_t *dma_cfg,
588 const uint32_t *data_addr,
589 bool enable_auto_cmd23)
590 {
591 (void) enable_auto_cmd23;
592 if (dma_cfg->dma_type == sdxc_dmasel_sdma) {
593
594 if (((uint32_t) data_addr % SDXC_SYS_DMA_ALIGN_LEN) != 0U) {
595 return status_sdxc_dma_addr_unaligned;
596 }
597
598 base->ADMA_SYS_ADDR = (uint32_t) data_addr;
599 } else {
600 base->ADMA_SYS_ADDR = (uint32_t) dma_cfg->adma_table;
601 }
602
603 /* Set DMA mode */
604 uint32_t sys_ctl = base->SYS_CTRL;
605 base->PROT_CTRL = (base->PROT_CTRL & ~SDXC_PROT_CTRL_DMA_SEL_MASK) | SDXC_PROT_CTRL_DMA_SEL_SET(dma_cfg->dma_type);
606 base->SYS_CTRL = sys_ctl;
607 return status_success;
608 }
609
610
sdxc_set_adma2_desc(uint32_t * adma_tbl,uint32_t adma_table_words,const uint32_t * data_buf,uint32_t data_bytes,uint32_t flags)611 hpm_stat_t sdxc_set_adma2_desc(uint32_t *adma_tbl,
612 uint32_t adma_table_words,
613 const uint32_t *data_buf,
614 uint32_t data_bytes,
615 uint32_t flags)
616 {
617 hpm_stat_t status = status_invalid_argument;
618 do {
619 if ((adma_tbl == NULL) || (data_buf == NULL)) {
620 break;
621 }
622 if ((uint32_t) data_buf % 4U != 0U) {
623 status = status_sdxc_dma_addr_unaligned;
624 break;
625 }
626
627 uint32_t start_idx = 0;
628 uint32_t min_entries;
629 uint32_t max_entries = adma_table_words * sizeof(uint32_t) / sizeof(sdxc_adma2_descriptor_t);
630 sdxc_adma2_descriptor_t *adma2_desc = (sdxc_adma2_descriptor_t *) adma_tbl;
631
632 /* Ensure that the data_bytes is 4-byte aligned. */
633 data_bytes += (data_bytes % sizeof(uint32_t));
634
635 min_entries = data_bytes / SDXC_DMA_MAX_XFER_LEN_26BIT;
636 if (data_bytes % SDXC_DMA_MAX_XFER_LEN_26BIT != 0U) {
637 min_entries += 1U;
638 }
639
640 uint32_t i;
641 if (flags == (uint32_t) sdxc_adma_desc_multi_flag) {
642
643 for (i = 0; i < max_entries; i++) {
644 if (adma2_desc[i].valid == 0U) {
645 break;
646 }
647 }
648
649 start_idx = i;
650
651 /* add one entry as dummy entry */
652 min_entries += 1U;
653 }
654
655 if ((min_entries + start_idx) > max_entries) {
656 return status_invalid_argument;
657 }
658
659 uint32_t dma_buf_len = 0U;
660 const uint32_t *data = data_buf;
661 for (i = start_idx; i < (min_entries + start_idx); i++) {
662 if (data_bytes > SDXC_DMA_MAX_XFER_LEN_26BIT) {
663 dma_buf_len = SDXC_DMA_MAX_XFER_LEN_26BIT;
664 } else {
665 dma_buf_len = (data_bytes == 0U) ? sizeof(uint32_t) : data_bytes;
666 }
667
668 /* Format each adma2 descriptor entry */
669 adma2_desc[i].addr = (data_bytes == 0U) ? &s_sdxc_boot_dummy : data;
670 adma2_desc[i].len_attr = 0U;
671 adma2_desc[i].len_lower = dma_buf_len & 0xFFFFU;
672 adma2_desc[i].len_upper = dma_buf_len >> 16;
673 adma2_desc[i].len_attr |= SDXC_ADMA2_DESC_VALID_FLAG;
674 if (data_bytes != 0U) {
675 adma2_desc[i].act = SDXC_ADMA2_DESC_TYPE_TRANS;
676 }
677
678 data = (uint32_t *) ((uint32_t) data + dma_buf_len);
679 if (data_bytes != 0U) {
680 data_bytes -= dma_buf_len;
681 }
682 }
683
684 if (flags == (uint32_t) sdxc_adma_desc_multi_flag) {
685 adma2_desc[i + 1U].len_attr |= SDXC_ADMA2_DESC_TYPE_TRANS;
686 } else {
687 adma2_desc[i - 1U].len_attr |= SDXC_ADMA2_DESC_END_FLAG;
688 }
689
690 status = status_success;
691 } while (false);
692
693 return status;
694 }
695
sdxc_set_adma_table_config(SDXC_Type * base,sdxc_adma_config_t * dma_cfg,sdxc_data_t * data_cfg,uint32_t flags)696 hpm_stat_t sdxc_set_adma_table_config(SDXC_Type *base,
697 sdxc_adma_config_t *dma_cfg,
698 sdxc_data_t *data_cfg,
699 uint32_t flags)
700 {
701 hpm_stat_t status = status_fail;
702
703 uint32_t boot_dummy_offset = (data_cfg->data_type == (uint8_t) sdxc_xfer_data_boot_continuous) ? sizeof(uint32_t)
704 : 0;
705 const uint32_t *data;
706
707 if (data_cfg->rx_data == NULL) {
708 data = (const uint32_t *) data_cfg->tx_data;
709 } else {
710 data = (const uint32_t *) data_cfg->rx_data;
711 }
712 if (boot_dummy_offset > 0) {
713 data++;
714 }
715
716 uint32_t block_size = data_cfg->block_size * data_cfg->block_cnt - boot_dummy_offset;
717
718 if (dma_cfg->dma_type == sdxc_dmasel_sdma) {
719 status = status_success;
720 } else if (dma_cfg->dma_type == sdxc_dmasel_adma2) {
721 status = sdxc_set_adma2_desc(dma_cfg->adma_table, dma_cfg->adma_table_words, data, block_size, flags);
722
723 } else if (dma_cfg->dma_type == sdxc_dmasel_adma2_or_3) {
724 /* TODO: To be implemented */
725
726 } else {
727 status = status_invalid_argument;
728 }
729
730 if ((status == status_success) && (data_cfg->data_type != (uint8_t) sdxc_xfer_data_boot_continuous)) {
731 status = sdxc_set_dma_config(base, dma_cfg, data, data_cfg->enable_auto_cmd23);
732 }
733
734 return status;
735 }
736
sdxc_reset(SDXC_Type * base,sdxc_sw_reset_type_t reset_type,uint32_t timeout)737 bool sdxc_reset(SDXC_Type *base, sdxc_sw_reset_type_t reset_type, uint32_t timeout)
738 {
739 uint32_t reset_mask = 0U;
740
741 switch (reset_type) {
742 case sdxc_reset_all:
743 reset_mask = SDXC_SYS_CTRL_SW_RST_ALL_MASK;
744 break;
745 case sdxc_reset_cmd_line:
746 reset_mask = SDXC_SYS_CTRL_SW_RST_CMD_MASK;
747 break;
748 case sdxc_reset_data_line:
749 reset_mask = SDXC_SYS_CTRL_SW_RST_DAT_MASK;
750 break;
751 default:
752 /* Do nothing */
753 break;
754 }
755 base->SYS_CTRL |= reset_mask;
756 while (IS_HPM_BITMASK_SET(base->SYS_CTRL, reset_mask)) {
757 if (timeout == 0U) {
758 return false;
759 }
760 timeout--;
761 }
762
763 return true;
764 }
765
sdxc_select_voltage(SDXC_Type * base,sdxc_bus_voltage_option_t option)766 void sdxc_select_voltage(SDXC_Type *base, sdxc_bus_voltage_option_t option)
767 {
768 uint32_t option_u32 = (uint32_t) option;
769
770 base->PROT_CTRL =
771 (base->PROT_CTRL & ~SDXC_PROT_CTRL_SD_BUS_VOL_VDD1_MASK) | SDXC_PROT_CTRL_SD_BUS_VOL_VDD1_SET(option_u32);
772
773 if (option == sdxc_bus_voltage_sd_1v8) {
774 base->AC_HOST_CTRL |= SDXC_AC_HOST_CTRL_SIGNALING_EN_MASK;
775 } else {
776 base->AC_HOST_CTRL &= ~SDXC_AC_HOST_CTRL_SIGNALING_EN_MASK;
777 }
778 }
779
sdxc_enable_wakeup_event(SDXC_Type * base,sdxc_wakeup_event_t evt,bool enable)780 void sdxc_enable_wakeup_event(SDXC_Type *base, sdxc_wakeup_event_t evt, bool enable)
781 {
782 uint32_t evt_u32 = (uint32_t) evt;
783
784 if (enable) {
785 base->PROT_CTRL |= evt_u32;
786 } else {
787 base->PROT_CTRL &= ~evt_u32;
788 }
789 }
790
sdxc_set_data_bus_width(SDXC_Type * base,sdxc_bus_width_t width)791 void sdxc_set_data_bus_width(SDXC_Type *base, sdxc_bus_width_t width)
792 {
793 uint32_t host_ctrl = base->PROT_CTRL & ~(SDXC_PROT_CTRL_EXT_DAT_XFER_MASK | SDXC_PROT_CTRL_DAT_XFER_WIDTH_MASK);
794
795 if (width == sdxc_bus_width_1bit) {
796 host_ctrl |= SDXC_PROT_CTRL_DAT_XFER_WIDTH_SET(0U);
797 } else if (width == sdxc_bus_width_4bit) {
798 host_ctrl |= SDXC_PROT_CTRL_DAT_XFER_WIDTH_SET(1U);
799 } else if (width == sdxc_bus_width_8bit) {
800 host_ctrl |= SDXC_PROT_CTRL_EXT_DAT_XFER_SET(1U);
801 } else {
802 /* Do nothing */
803 }
804 base->PROT_CTRL = host_ctrl;
805
806 sdxc_enable_sd_clock(base, true);
807 }
808
sdxc_get_data_bus_width(SDXC_Type * base)809 uint32_t sdxc_get_data_bus_width(SDXC_Type *base)
810 {
811 uint32_t bus_width;
812 uint32_t bus_width_mask = base->PROT_CTRL & (SDXC_PROT_CTRL_EXT_DAT_XFER_MASK | SDXC_PROT_CTRL_DAT_XFER_WIDTH_MASK);
813
814 if (IS_HPM_BITMASK_SET(bus_width_mask, SDXC_PROT_CTRL_EXT_DAT_XFER_MASK)) {
815 bus_width = 8;
816 } else if (IS_HPM_BITMASK_SET(bus_width_mask, SDXC_PROT_CTRL_DAT_XFER_WIDTH_MASK)) {
817 bus_width = 4;
818 } else {
819 bus_width = 1;
820 }
821 return bus_width;
822 }
823
sdxc_set_speed_mode(SDXC_Type * base,sdxc_speed_mode_t mode)824 void sdxc_set_speed_mode(SDXC_Type *base, sdxc_speed_mode_t mode)
825 {
826 uint32_t mode_u32 = (uint32_t) mode;
827
828 base->AC_HOST_CTRL =
829 (base->AC_HOST_CTRL & ~SDXC_AC_HOST_CTRL_UHS_MODE_SEL_MASK) | SDXC_AC_HOST_CTRL_UHS_MODE_SEL_SET(mode_u32);
830 if ((mode_u32 & 0xFU) > sdxc_sd_speed_sdr12) {
831 base->PROT_CTRL |= SDXC_PROT_CTRL_HIGH_SPEED_EN_MASK;
832 } else {
833 base->PROT_CTRL &= ~SDXC_PROT_CTRL_HIGH_SPEED_EN_MASK;
834 }
835 }
836
sdxc_transfer_nonblocking(SDXC_Type * base,sdxc_adma_config_t * dma_config,sdxc_xfer_t * xfer)837 hpm_stat_t sdxc_transfer_nonblocking(SDXC_Type *base, sdxc_adma_config_t *dma_config, sdxc_xfer_t *xfer)
838 {
839 hpm_stat_t status = status_invalid_argument;
840 sdxc_command_t *cmd = xfer->command;
841 sdxc_data_t *data = xfer->data;
842 uint32_t xfer_flags = (uint32_t) sdxc_cmd_only;
843
844 uint32_t block_size = 0U;
845 uint32_t block_cnt = 0U;
846 do {
847 if (IS_HPM_BITMASK_SET(base->INT_STAT, SDXC_INT_STAT_RE_TUNE_EVENT_MASK)) {
848 base->INT_STAT = SDXC_INT_STAT_RE_TUNE_EVENT_MASK;
849 status = status_sdxc_retuning_request;
850 break;
851 }
852
853 uint32_t new_xfer_flags = 0;
854 if (data != NULL) {
855 if ((dma_config != NULL) && (dma_config->dma_type != sdxc_dmasel_nodma)) {
856 uint32_t flags = IS_HPM_BITMASK_SET(data->data_type, sdxc_xfer_data_boot) ? sdxc_adma_desc_multi_flag
857 : sdxc_adma_desc_single_flag;
858 status = sdxc_set_adma_table_config(base, dma_config, data, flags);
859 if (status != status_success) {
860 break;
861 }
862 cmd->cmd_flags |= SDXC_CMD_XFER_DMA_ENABLE_MASK;
863 } else {
864 cmd->cmd_flags &= ~SDXC_CMD_XFER_DMA_ENABLE_MASK;
865 }
866 block_size = data->block_size;
867 block_cnt = data->block_cnt;
868 xfer_flags = data->enable_auto_cmd12 ? (uint32_t) sdxc_data_with_auto_cmd12 : 0;
869 xfer_flags |= (data->enable_auto_cmd23) ? (uint32_t) sdxc_data_with_auto_cmd23 : 0;
870 xfer_flags |= (data->tx_data != NULL) ? (uint32_t) sdxc_cmd_and_tx_data : 0;
871 xfer_flags |= (data->rx_data != NULL) ? (uint32_t) sdxc_cmd_and_rx_data : 0;
872 xfer_flags |= (data->data_type == sdxc_xfer_data_boot) ? (uint32_t) sdxc_boot_data : 0;
873 xfer_flags |= (data->data_type == sdxc_xfer_data_boot_continuous) ? (uint32_t) sdxc_boot_data_continuous
874 : 0;
875 cmd->cmd_flags |= SDXC_CMD_XFER_DATA_PRESENT_SEL_MASK;
876
877 status = sdxc_set_transfer_config(base, xfer_flags, block_size, block_cnt, &new_xfer_flags);
878 if (status != status_success) {
879 break;
880 }
881 } else {
882 cmd->cmd_flags &= ~(SDXC_CMD_XFER_DATA_PRESENT_SEL_MASK | SDXC_CMD_XFER_DMA_ENABLE_MASK);
883 }
884
885 cmd->cmd_flags |= new_xfer_flags;
886 status = sdxc_send_command(base, cmd);
887 } while (false);
888
889 return status;
890 }
891
sdxc_transfer_blocking(SDXC_Type * base,sdxc_adma_config_t * dma_config,sdxc_xfer_t * xfer)892 hpm_stat_t sdxc_transfer_blocking(SDXC_Type *base, sdxc_adma_config_t *dma_config, sdxc_xfer_t *xfer)
893 {
894 hpm_stat_t status = status_invalid_argument;
895 sdxc_command_t *cmd = xfer->command;
896 sdxc_data_t *data = xfer->data;
897 bool enable_dma = true;
898 do {
899 status = sdxc_transfer_nonblocking(base, dma_config, xfer);
900 HPM_BREAK_IF(status != status_success);
901
902 bool polling_cmd_done = (data == NULL) || (data->data_type == sdxc_xfer_data_normal);
903 status = sdxc_wait_cmd_done(base, cmd, polling_cmd_done);
904 if (status != status_success) {
905 status = status_sdxc_send_cmd_failed;
906 break;
907 }
908 if (data != NULL) {
909 status = sdxc_transfer_data_blocking(base, data, enable_dma);
910 }
911 } while (false);
912
913 return status;
914 }
915
sdxc_error_recovery(SDXC_Type * base)916 hpm_stat_t sdxc_error_recovery(SDXC_Type *base)
917 {
918 hpm_stat_t status = status_success;
919 /* D8, D3-D0 of ERROR_INT_STAT */
920 uint32_t cmdline_err_mask =
921 SDXC_INT_STAT_AUTO_CMD_ERR_MASK | SDXC_INT_STAT_CMD_TOUT_ERR_MASK | SDXC_INT_STAT_CMD_CRC_ERR_MASK |
922 SDXC_INT_STAT_CMD_END_BIT_ERR_MASK | SDXC_INT_STAT_CMD_IDX_ERR_MASK;
923
924 if ((base->INT_STAT & cmdline_err_mask) != 0U) {
925 status = status_sdxc_send_cmd_failed;
926 /* Only Auto Command error */
927 if ((base->INT_STAT & cmdline_err_mask) == SDXC_INT_STAT_AUTO_CMD_ERR_MASK) {
928 /* Will reset command line */
929 } else {
930 if (((base->CMD_XFER & SDXC_CMD_XFER_RESP_INT_DISABLE_MASK) != 0U) ||
931 ((base->AC_HOST_CTRL & SDXC_AC_HOST_CTRL_EXEC_TUNING_MASK))) {
932 /* Will reset command line */
933 } else {
934 /* Wait until command completes */
935 uint32_t timeout = 1000000UL;
936 while (!IS_HPM_BITMASK_SET(base->CMD_XFER, SDXC_INT_STAT_CMD_COMPLETE_MASK)) {
937 --timeout;
938 if (timeout < 1U) {
939 status = status_timeout;
940 break;
941 }
942 }
943 }
944 }
945
946 sdxc_reset(base, sdxc_reset_cmd_line, 0xFFFFFFU);
947 }
948
949 /* Check D9, D6-D4 in ERR_INT_STAT */
950 uint32_t dataline_err_mask =
951 SDXC_INT_STAT_ADMA_ERR_MASK | SDXC_INT_STAT_DATA_TOUT_ERR_MASK | SDXC_INT_STAT_DATA_CRC_ERR_MASK |
952 SDXC_INT_STAT_DATA_END_BIT_ERR_MASK;
953
954 if ((base->INT_STAT & dataline_err_mask) != 0U) {
955 status = status_sdxc_transfer_data_failed;
956 sdxc_reset(base, sdxc_reset_data_line, 0xFFFFFFU);
957 }
958 sdxc_clear_interrupt_status(base, ~0UL);
959
960 if (IS_HPM_BITMASK_SET(base->PSTATE, SDXC_PSTATE_CMD_INHIBIT_MASK)) {
961 sdxc_reset(base, sdxc_reset_cmd_line, 10000U);
962 }
963 if (IS_HPM_BITMASK_SET(base->PSTATE, SDXC_PSTATE_DAT_INHIBIT_MASK)) {
964 sdxc_reset(base, sdxc_reset_data_line, 10000U);
965 }
966
967 return status;
968 }
969
sdxc_tuning_error_recovery(SDXC_Type * base)970 hpm_stat_t sdxc_tuning_error_recovery(SDXC_Type *base)
971 {
972 sdxc_reset_tuning_engine(base);
973 sdxc_reset(base, sdxc_reset_data_line, 0xFFFFFFUL);
974 sdxc_reset(base, sdxc_reset_cmd_line, 0xFFFFFFUL);
975
976 return status_success;
977 }
978
sdxc_perform_tuning_flow_sequence(SDXC_Type * base,uint8_t tuning_cmd)979 hpm_stat_t sdxc_perform_tuning_flow_sequence(SDXC_Type *base, uint8_t tuning_cmd)
980 {
981 hpm_stat_t status = status_success;
982
983 /* Turn off Sampling clock */
984 sdxc_enable_sd_clock(base, false);
985 sdxc_execute_tuning(base);
986 uint32_t block_size = SDXC_PROT_CTRL_EXT_DAT_XFER_GET(base->PROT_CTRL) ? 128U : 64U;
987 sdxc_command_t cmd;
988 (void) memset(&cmd, 0, sizeof(cmd));
989 cmd.cmd_index = tuning_cmd;
990 cmd.cmd_argument = 0;
991 cmd.cmd_flags = SDXC_CMD_XFER_DATA_PRESENT_SEL_MASK | SDXC_CMD_XFER_DATA_XFER_DIR_MASK;
992 cmd.resp_type = sdxc_dev_resp_r1;
993 sdxc_enable_sd_clock(base, true);
994 do {
995 base->BLK_ATTR = block_size;
996 base->SDMASA = 1;
997 status = sdxc_send_command(base, &cmd);
998 while (!IS_HPM_BITMASK_SET(base->INT_STAT, SDXC_INT_STAT_BUF_RD_READY_MASK)) {
999 }
1000 sdxc_clear_interrupt_status(base, SDXC_INT_STAT_BUF_RD_READY_MASK);
1001 } while (IS_HPM_BITMASK_SET(base->AC_HOST_CTRL, SDXC_AC_HOST_CTRL_EXEC_TUNING_MASK));
1002
1003 if (!IS_HPM_BITMASK_SET(base->AC_HOST_CTRL, SDXC_AC_HOST_CTRL_SAMPLE_CLK_SEL_MASK)) {
1004 sdxc_tuning_error_recovery(base);
1005 status = status_sdxc_tuning_failed;
1006 }
1007
1008 return status;
1009 }
1010
sdxc_perform_software_tuning(SDXC_Type * base,uint8_t tuning_cmd)1011 hpm_stat_t sdxc_perform_software_tuning(SDXC_Type *base, uint8_t tuning_cmd)
1012 {
1013 hpm_stat_t status;
1014
1015 sdxc_tuning_error_recovery(base);
1016
1017 /* Turn off Sampling clock */
1018 sdxc_enable_sd_clock(base, false);
1019 sdxc_reset_tuning_engine(base);
1020 uint32_t block_size = SDXC_PROT_CTRL_EXT_DAT_XFER_GET(base->PROT_CTRL) ? 128U : 64U;
1021 sdxc_command_t cmd;
1022 (void) memset(&cmd, 0, sizeof(cmd));
1023 cmd.cmd_index = tuning_cmd;
1024 cmd.cmd_argument = 0;
1025 cmd.cmd_flags = SDXC_CMD_XFER_DATA_PRESENT_SEL_MASK | SDXC_CMD_XFER_DATA_XFER_DIR_MASK;
1026 cmd.resp_type = sdxc_dev_resp_r1;
1027 base->BLK_ATTR = block_size;
1028 base->SDMASA = 0;
1029 uint32_t tuning_cclk_sel = 0;
1030 sdxc_enable_software_tuning(base, true);
1031 sdxc_set_center_phase_code(base, 0);
1032 sdxc_enable_sd_clock(base, true);
1033
1034 bool center_phase_codes_valid[255];
1035 (void) memset(¢er_phase_codes_valid, 0, sizeof(center_phase_codes_valid));
1036
1037 do {
1038
1039 sdxc_send_command(base, &cmd);
1040
1041 uint32_t timeout_cnt = 0xFFFFFUL;
1042 while (!IS_HPM_BITMASK_SET(base->INT_STAT,
1043 SDXC_INT_STAT_BUF_RD_READY_MASK | SDXC_INT_STAT_ERR_INTERRUPT_MASK)) {
1044 timeout_cnt--;
1045 if (timeout_cnt < 1U) {
1046 break;
1047 }
1048 }
1049
1050 if (IS_HPM_BITMASK_SET(base->INT_STAT, SDXC_INT_STAT_BUF_RD_READY_MASK)) {
1051 center_phase_codes_valid[tuning_cclk_sel] = true;
1052 }
1053 sdxc_clear_interrupt_status(base, ~0UL);
1054
1055 sdxc_reset(base, sdxc_reset_cmd_line, 0xFFFFFFU);
1056 sdxc_reset(base, sdxc_reset_data_line, 0xFFFFFFU);
1057
1058 tuning_cclk_sel++;
1059
1060 sdxc_enable_sd_clock(base, false);
1061 sdxc_set_center_phase_code(base, tuning_cclk_sel);
1062 sdxc_enable_sd_clock(base, true);
1063
1064 } while (tuning_cclk_sel < SDXC_AUTO_TUNING_STAT_CENTER_PH_CODE_GET(SDXC_AUTO_TUNING_STAT_CENTER_PH_CODE_MASK));
1065
1066 int32_t first_window_idx = -1;
1067 int32_t last_window_idx = -1;
1068 for (int32_t i = 0; i < (int32_t) ARRAY_SIZE(center_phase_codes_valid); i++) {
1069 if (center_phase_codes_valid[i]) {
1070 first_window_idx = i;
1071 break;
1072 }
1073 }
1074
1075 for (int32_t i = ARRAY_SIZE(center_phase_codes_valid) - 1; i >= 0; i--) {
1076 if (center_phase_codes_valid[i]) {
1077 last_window_idx = i;
1078 break;
1079 }
1080 }
1081
1082 if ((first_window_idx >= 0) && (last_window_idx >= 0)) {
1083
1084 uint32_t center_window = (first_window_idx + last_window_idx) / 2;
1085 sdxc_set_center_phase_code(base, center_window);
1086
1087 status = status_success;
1088 } else {
1089 status = status_sdxc_tuning_failed;
1090 }
1091
1092 return status;
1093 }
1094
sdxc_perform_auto_tuning(SDXC_Type * base,uint8_t tuning_cmd)1095 hpm_stat_t sdxc_perform_auto_tuning(SDXC_Type *base, uint8_t tuning_cmd)
1096 {
1097 bool need_inverse = sdxc_is_inverse_clock_enabled(base);
1098 sdxc_enable_inverse_clock(base, false);
1099 sdxc_enable_sd_clock(base, false);
1100 sdxc_enable_auto_tuning(base, true);
1101 sdxc_enable_inverse_clock(base, need_inverse);
1102 sdxc_enable_sd_clock(base, true);
1103
1104 return sdxc_perform_tuning_flow_sequence(base, tuning_cmd);
1105 }
1106