Lines Matching refs:op
137 static void mtk_nor_set_addr(struct mtk_nor *sp, const struct spi_mem_op *op) in mtk_nor_set_addr() argument
139 u32 addr = op->addr.val; in mtk_nor_set_addr()
146 if (op->addr.nbytes == 4) { in mtk_nor_set_addr()
154 static bool need_bounce(struct mtk_nor *sp, const struct spi_mem_op *op) in need_bounce() argument
156 return ((uintptr_t)op->data.buf.in & MTK_NOR_DMA_ALIGN_MASK); in need_bounce()
159 static bool mtk_nor_match_read(const struct spi_mem_op *op) in mtk_nor_match_read() argument
163 if (op->dummy.nbytes) in mtk_nor_match_read()
164 dummy = op->dummy.nbytes * BITS_PER_BYTE / op->dummy.buswidth; in mtk_nor_match_read()
166 if ((op->data.buswidth == 2) || (op->data.buswidth == 4)) { in mtk_nor_match_read()
167 if (op->addr.buswidth == 1) in mtk_nor_match_read()
169 else if (op->addr.buswidth == 2) in mtk_nor_match_read()
171 else if (op->addr.buswidth == 4) in mtk_nor_match_read()
173 } else if ((op->addr.buswidth == 1) && (op->data.buswidth == 1)) { in mtk_nor_match_read()
174 if (op->cmd.opcode == 0x03) in mtk_nor_match_read()
176 else if (op->cmd.opcode == 0x0b) in mtk_nor_match_read()
182 static bool mtk_nor_match_prg(const struct spi_mem_op *op) in mtk_nor_match_prg() argument
187 if ((op->cmd.buswidth > 1) || (op->addr.buswidth > 1) || in mtk_nor_match_prg()
188 (op->dummy.buswidth > 1) || (op->data.buswidth > 1)) in mtk_nor_match_prg()
191 tx_len = op->cmd.nbytes + op->addr.nbytes; in mtk_nor_match_prg()
193 if (op->data.dir == SPI_MEM_DATA_OUT) { in mtk_nor_match_prg()
195 tx_len += op->dummy.nbytes; in mtk_nor_match_prg()
203 if ((!op->addr.nbytes) && in mtk_nor_match_prg()
204 (tx_len + op->data.nbytes > MTK_NOR_REG_PRGDATA_MAX + 1)) in mtk_nor_match_prg()
206 } else if (op->data.dir == SPI_MEM_DATA_IN) { in mtk_nor_match_prg()
210 rx_len = op->data.nbytes; in mtk_nor_match_prg()
211 prg_left = MTK_NOR_PRG_CNT_MAX / 8 - tx_len - op->dummy.nbytes; in mtk_nor_match_prg()
215 if (!op->addr.nbytes) in mtk_nor_match_prg()
220 prg_len = tx_len + op->dummy.nbytes + rx_len; in mtk_nor_match_prg()
224 prg_len = tx_len + op->dummy.nbytes; in mtk_nor_match_prg()
231 static void mtk_nor_adj_prg_size(struct spi_mem_op *op) in mtk_nor_adj_prg_size() argument
235 tx_len = op->cmd.nbytes + op->addr.nbytes; in mtk_nor_adj_prg_size()
236 if (op->data.dir == SPI_MEM_DATA_OUT) { in mtk_nor_adj_prg_size()
237 tx_len += op->dummy.nbytes; in mtk_nor_adj_prg_size()
239 if (op->data.nbytes > tx_left) in mtk_nor_adj_prg_size()
240 op->data.nbytes = tx_left; in mtk_nor_adj_prg_size()
241 } else if (op->data.dir == SPI_MEM_DATA_IN) { in mtk_nor_adj_prg_size()
242 prg_left = MTK_NOR_PRG_CNT_MAX / 8 - tx_len - op->dummy.nbytes; in mtk_nor_adj_prg_size()
245 if (op->data.nbytes > prg_left) in mtk_nor_adj_prg_size()
246 op->data.nbytes = prg_left; in mtk_nor_adj_prg_size()
250 static int mtk_nor_adjust_op_size(struct spi_mem *mem, struct spi_mem_op *op) in mtk_nor_adjust_op_size() argument
254 if (!op->data.nbytes) in mtk_nor_adjust_op_size()
257 if ((op->addr.nbytes == 3) || (op->addr.nbytes == 4)) { in mtk_nor_adjust_op_size()
258 if ((op->data.dir == SPI_MEM_DATA_IN) && in mtk_nor_adjust_op_size()
259 mtk_nor_match_read(op)) { in mtk_nor_adjust_op_size()
261 if (op->data.nbytes > 0x400000) in mtk_nor_adjust_op_size()
262 op->data.nbytes = 0x400000; in mtk_nor_adjust_op_size()
264 if ((op->addr.val & MTK_NOR_DMA_ALIGN_MASK) || in mtk_nor_adjust_op_size()
265 (op->data.nbytes < MTK_NOR_DMA_ALIGN)) in mtk_nor_adjust_op_size()
266 op->data.nbytes = 1; in mtk_nor_adjust_op_size()
267 else if (!need_bounce(sp, op)) in mtk_nor_adjust_op_size()
268 op->data.nbytes &= ~MTK_NOR_DMA_ALIGN_MASK; in mtk_nor_adjust_op_size()
269 else if (op->data.nbytes > MTK_NOR_BOUNCE_BUF_SIZE) in mtk_nor_adjust_op_size()
270 op->data.nbytes = MTK_NOR_BOUNCE_BUF_SIZE; in mtk_nor_adjust_op_size()
272 } else if (op->data.dir == SPI_MEM_DATA_OUT) { in mtk_nor_adjust_op_size()
273 if (op->data.nbytes >= MTK_NOR_PP_SIZE) in mtk_nor_adjust_op_size()
274 op->data.nbytes = MTK_NOR_PP_SIZE; in mtk_nor_adjust_op_size()
276 op->data.nbytes = 1; in mtk_nor_adjust_op_size()
281 mtk_nor_adj_prg_size(op); in mtk_nor_adjust_op_size()
286 const struct spi_mem_op *op) in mtk_nor_supports_op() argument
288 if (!spi_mem_default_supports_op(mem, op)) in mtk_nor_supports_op()
291 if (op->cmd.buswidth != 1) in mtk_nor_supports_op()
294 if ((op->addr.nbytes == 3) || (op->addr.nbytes == 4)) { in mtk_nor_supports_op()
295 switch (op->data.dir) { in mtk_nor_supports_op()
297 if (mtk_nor_match_read(op)) in mtk_nor_supports_op()
301 if ((op->addr.buswidth == 1) && in mtk_nor_supports_op()
302 (op->dummy.nbytes == 0) && in mtk_nor_supports_op()
303 (op->data.buswidth == 1)) in mtk_nor_supports_op()
311 return mtk_nor_match_prg(op); in mtk_nor_supports_op()
314 static void mtk_nor_setup_bus(struct mtk_nor *sp, const struct spi_mem_op *op) in mtk_nor_setup_bus() argument
318 if (op->addr.nbytes == 4) in mtk_nor_setup_bus()
321 if (op->data.buswidth == 4) { in mtk_nor_setup_bus()
323 writeb(op->cmd.opcode, sp->base + MTK_NOR_REG_PRGDATA(4)); in mtk_nor_setup_bus()
324 if (op->addr.buswidth == 4) in mtk_nor_setup_bus()
326 } else if (op->data.buswidth == 2) { in mtk_nor_setup_bus()
328 writeb(op->cmd.opcode, sp->base + MTK_NOR_REG_PRGDATA(3)); in mtk_nor_setup_bus()
329 if (op->addr.buswidth == 2) in mtk_nor_setup_bus()
332 if (op->cmd.opcode == 0x0b) in mtk_nor_setup_bus()
383 static int mtk_nor_read_bounce(struct mtk_nor *sp, const struct spi_mem_op *op) in mtk_nor_read_bounce() argument
388 if (op->data.nbytes & MTK_NOR_DMA_ALIGN_MASK) in mtk_nor_read_bounce()
389 rdlen = (op->data.nbytes + MTK_NOR_DMA_ALIGN) & ~MTK_NOR_DMA_ALIGN_MASK; in mtk_nor_read_bounce()
391 rdlen = op->data.nbytes; in mtk_nor_read_bounce()
393 ret = mtk_nor_dma_exec(sp, op->addr.val, rdlen, sp->buffer_dma); in mtk_nor_read_bounce()
396 memcpy(op->data.buf.in, sp->buffer, op->data.nbytes); in mtk_nor_read_bounce()
401 static int mtk_nor_read_dma(struct mtk_nor *sp, const struct spi_mem_op *op) in mtk_nor_read_dma() argument
406 if (need_bounce(sp, op)) in mtk_nor_read_dma()
407 return mtk_nor_read_bounce(sp, op); in mtk_nor_read_dma()
409 dma_addr = dma_map_single(sp->dev, op->data.buf.in, in mtk_nor_read_dma()
410 op->data.nbytes, DMA_FROM_DEVICE); in mtk_nor_read_dma()
415 ret = mtk_nor_dma_exec(sp, op->addr.val, op->data.nbytes, dma_addr); in mtk_nor_read_dma()
417 dma_unmap_single(sp->dev, dma_addr, op->data.nbytes, DMA_FROM_DEVICE); in mtk_nor_read_dma()
422 static int mtk_nor_read_pio(struct mtk_nor *sp, const struct spi_mem_op *op) in mtk_nor_read_pio() argument
424 u8 *buf = op->data.buf.in; in mtk_nor_read_pio()
466 static int mtk_nor_pp_buffered(struct mtk_nor *sp, const struct spi_mem_op *op) in mtk_nor_pp_buffered() argument
468 const u8 *buf = op->data.buf.out; in mtk_nor_pp_buffered()
476 for (i = 0; i < op->data.nbytes; i += 4) { in mtk_nor_pp_buffered()
482 (op->data.nbytes + 5) * BITS_PER_BYTE); in mtk_nor_pp_buffered()
486 const struct spi_mem_op *op) in mtk_nor_pp_unbuffered() argument
488 const u8 *buf = op->data.buf.out; in mtk_nor_pp_unbuffered()
498 static int mtk_nor_spi_mem_prg(struct mtk_nor *sp, const struct spi_mem_op *op) in mtk_nor_spi_mem_prg() argument
507 tx_len = op->cmd.nbytes + op->addr.nbytes; in mtk_nor_spi_mem_prg()
510 if (op->data.dir == SPI_MEM_DATA_OUT) in mtk_nor_spi_mem_prg()
511 tx_len += op->dummy.nbytes + op->data.nbytes; in mtk_nor_spi_mem_prg()
512 else if (op->data.dir == SPI_MEM_DATA_IN) in mtk_nor_spi_mem_prg()
513 rx_len = op->data.nbytes; in mtk_nor_spi_mem_prg()
515 prg_len = op->cmd.nbytes + op->addr.nbytes + op->dummy.nbytes + in mtk_nor_spi_mem_prg()
516 op->data.nbytes; in mtk_nor_spi_mem_prg()
527 for (i = op->cmd.nbytes; i > 0; i--, reg_offset--) { in mtk_nor_spi_mem_prg()
529 bufbyte = (op->cmd.opcode >> ((i - 1) * BITS_PER_BYTE)) & 0xff; in mtk_nor_spi_mem_prg()
533 for (i = op->addr.nbytes; i > 0; i--, reg_offset--) { in mtk_nor_spi_mem_prg()
535 bufbyte = (op->addr.val >> ((i - 1) * BITS_PER_BYTE)) & 0xff; in mtk_nor_spi_mem_prg()
539 if (op->data.dir == SPI_MEM_DATA_OUT) { in mtk_nor_spi_mem_prg()
540 for (i = 0; i < op->dummy.nbytes; i++, reg_offset--) { in mtk_nor_spi_mem_prg()
545 for (i = 0; i < op->data.nbytes; i++, reg_offset--) { in mtk_nor_spi_mem_prg()
547 writeb(((const u8 *)(op->data.buf.out))[i], reg); in mtk_nor_spi_mem_prg()
565 if (op->data.dir == SPI_MEM_DATA_IN) { in mtk_nor_spi_mem_prg()
566 for (i = op->data.nbytes - 1; i >= 0; i--, reg_offset++) { in mtk_nor_spi_mem_prg()
568 ((u8 *)(op->data.buf.in))[i] = readb(reg); in mtk_nor_spi_mem_prg()
575 static int mtk_nor_exec_op(struct spi_mem *mem, const struct spi_mem_op *op) in mtk_nor_exec_op() argument
580 if ((op->data.nbytes == 0) || in mtk_nor_exec_op()
581 ((op->addr.nbytes != 3) && (op->addr.nbytes != 4))) in mtk_nor_exec_op()
582 return mtk_nor_spi_mem_prg(sp, op); in mtk_nor_exec_op()
584 if (op->data.dir == SPI_MEM_DATA_OUT) { in mtk_nor_exec_op()
585 mtk_nor_set_addr(sp, op); in mtk_nor_exec_op()
586 writeb(op->cmd.opcode, sp->base + MTK_NOR_REG_PRGDATA0); in mtk_nor_exec_op()
587 if (op->data.nbytes == MTK_NOR_PP_SIZE) in mtk_nor_exec_op()
588 return mtk_nor_pp_buffered(sp, op); in mtk_nor_exec_op()
589 return mtk_nor_pp_unbuffered(sp, op); in mtk_nor_exec_op()
592 if ((op->data.dir == SPI_MEM_DATA_IN) && mtk_nor_match_read(op)) { in mtk_nor_exec_op()
596 mtk_nor_setup_bus(sp, op); in mtk_nor_exec_op()
597 if (op->data.nbytes == 1) { in mtk_nor_exec_op()
598 mtk_nor_set_addr(sp, op); in mtk_nor_exec_op()
599 return mtk_nor_read_pio(sp, op); in mtk_nor_exec_op()
601 return mtk_nor_read_dma(sp, op); in mtk_nor_exec_op()
605 return mtk_nor_spi_mem_prg(sp, op); in mtk_nor_exec_op()