Lines Matching refs:nand_dev
21 static struct nand_device nand_dev; variable
38 unsigned int block = offset / nand_dev.block_size; in nand_read()
39 unsigned int end_block = (offset + length - 1U) / nand_dev.block_size; in nand_read()
41 (offset % nand_dev.block_size) / nand_dev.page_size; in nand_read()
42 unsigned int nb_pages = nand_dev.block_size / nand_dev.page_size; in nand_read()
43 unsigned int start_offset = offset % nand_dev.page_size; in nand_read()
60 if (((start_offset != 0U) || (length % nand_dev.page_size) != 0U) && in nand_read()
61 (scratch_buff_size < nand_dev.page_size)) { in nand_read()
66 is_bad = nand_dev.mtd_block_is_bad(block); in nand_read()
74 nand_dev.size / nand_dev.block_size; in nand_read()
87 (length < nand_dev.page_size)) { in nand_read()
88 ret = nand_dev.mtd_read_page( in nand_read()
89 &nand_dev, in nand_read()
96 bytes_read = MIN((size_t)(nand_dev.page_size - in nand_read()
106 ret = nand_dev.mtd_read_page(&nand_dev, in nand_read()
113 bytes_read = nand_dev.page_size; in nand_read()
140 block = base / nand_dev.block_size; in nand_seek_bb()
143 offset_block = (base + offset - 1U) / nand_dev.block_size; in nand_seek_bb()
148 max_block = nand_dev.size / nand_dev.block_size; in nand_seek_bb()
155 is_bad = nand_dev.mtd_block_is_bad(block); in nand_seek_bb()
168 *extra_offset = count_bb * nand_dev.block_size; in nand_seek_bb()
175 return &nand_dev; in get_nand_device()