| /linux/drivers/android/ |
| A D | dbitmap.h | 38 dmap->nbits = 0; in dbitmap_free() 39 kfree(dmap->map); in dbitmap_free() 56 bit = find_last_bit(dmap->map, dmap->nbits); in dbitmap_shrink_nbits() 71 bitmap_copy(new, dmap->map, min(dmap->nbits, nbits)); in dbitmap_replace() 72 kfree(dmap->map); in dbitmap_replace() 73 dmap->map = new; in dbitmap_replace() 88 if (!dbitmap_enabled(dmap) || dbitmap_shrink_nbits(dmap) != nbits) { in dbitmap_shrink() 110 if (!dbitmap_enabled(dmap) || nbits <= dmap->nbits) { in dbitmap_grow() 140 n = find_next_zero_bit(dmap->map, dmap->nbits, offset); in dbitmap_acquire_next_zero_bit() 159 if (!dmap->map) { in dbitmap_init() [all …]
|
| A D | binder_internal.h | 439 struct dbitmap dmap; member
|
| /linux/fs/fuse/ |
| A D | dax.c | 136 if (dmap) { in alloc_dax_mapping() 144 return dmap; in alloc_dax_mapping() 221 dmap->itn.start = dmap->itn.last = start_idx; in fuse_setup_one_mapping() 295 dmap->itn.start, dmap->itn.last, dmap->window_offset, in dmap_reinit_add_to_free_pool() 299 dmap->itn.start = dmap->itn.last = 0; in dmap_reinit_add_to_free_pool() 641 if (dmap) { in fuse_iomap_end() 914 dmap->window_offset, dmap->length, ret); in reclaim_one_dmap_locked() 968 if (!dmap) in inode_inline_reclaim_one_dmap() 1009 dmap->itn.start = dmap->itn.last = 0; in inode_inline_reclaim_one_dmap() 1031 if (dmap) in alloc_dax_mapping_reclaim() [all …]
|
| /linux/drivers/pmdomain/ti/ |
| A D | omap_prm.c | 59 const struct omap_prm_domain_map *dmap; member 158 .pwrstctrl = 0x0, .pwrstst = 0x4, .dmap = &omap_prm_reton, 167 .pwrstctrl = 0, .pwrstst = 0x4, .dmap = &omap_prm_all, 171 .pwrstctrl = 0x0, .pwrstst = 0x4, .dmap = &omap_prm_alwon, 175 .pwrstctrl = 0x0, .pwrstst = 0x4, .dmap = &omap_prm_reton, 199 .pwrstctrl = 0x0, .pwrstst = 0x4, .dmap = &omap_prm_reton 212 .pwrstctrl = 0x0, .pwrstst = 0x4, .dmap = &omap_prm_alwon 238 .pwrstctrl = 0, .pwrstst = 0x4, .dmap = &omap_prm_nooff, 242 .pwrstctrl = 0x0, .pwrstst = 0x4, .dmap = &omap_prm_alwon 269 .pwrstctrl = 0x0, .pwrstst = 0x4, .dmap = &omap_prm_reton [all …]
|
| /linux/drivers/md/ |
| A D | dm-clone-metadata.c | 127 struct dirty_map dmap[2]; member 471 dmap->changed = 0; in __dirty_map_init() 474 if (!dmap->dirty_words) in __dirty_map_init() 762 dmap->changed = 0; in __flush_dmap() 781 dmap = cmd->current_dmap; in dm_clone_metadata_pre_commit() 784 next_dmap = (dmap == &cmd->dmap[0]) ? &cmd->dmap[1] : &cmd->dmap[0]; in dm_clone_metadata_pre_commit() 836 struct dirty_map *dmap; in dm_clone_set_region_hydrated() local 859 dmap->changed = 1; in dm_clone_set_region_hydrated() 871 struct dirty_map *dmap; in dm_clone_cond_set_range() local 895 dmap->changed = 1; in dm_clone_cond_set_range() [all …]
|
| A D | dm-zoned-metadata.c | 1688 struct dmz_map *dmap; in dmz_load_mapping() local 1707 dmap = dmap_mblk->data; in dmz_load_mapping() 1713 dzone_id = le32_to_cpu(dmap[e].dzone_id); in dmz_load_mapping() 1741 bzone_id = le32_to_cpu(dmap[e].bzone_id); in dmz_load_mapping() 1838 struct dmz_map *dmap = dmap_mblk->data; in dmz_set_chunk_mapping() local 1841 dmap[map_idx].dzone_id = cpu_to_le32(dzone_id); in dmz_set_chunk_mapping() 1842 dmap[map_idx].bzone_id = cpu_to_le32(bzone_id); in dmz_set_chunk_mapping() 2051 struct dmz_map *dmap = dmap_mblk->data; in dmz_get_chunk_mapping() local 2061 dzone_id = le32_to_cpu(dmap[dmap_idx].dzone_id); in dmz_get_chunk_mapping()
|
| /linux/drivers/media/pci/intel/ipu6/ |
| A D | ipu6-mmu.c | 586 dmap = kzalloc(sizeof(*dmap), GFP_KERNEL); in alloc_dma_mapping() 587 if (!dmap) in alloc_dma_mapping() 591 if (!dmap->mmu_info) { in alloc_dma_mapping() 592 kfree(dmap); in alloc_dma_mapping() 597 dmap->mmu_info->dmap = dmap; in alloc_dma_mapping() 603 return dmap; in alloc_dma_mapping() 747 struct ipu6_dma_mapping *dmap = mmu->dmap; in ipu6_mmu_destroy() local 829 if (!mmu->dmap) { in ipu6_mmu_init() 839 struct ipu6_dma_mapping *dmap = mmu->dmap; in ipu6_mmu_cleanup() local 842 mmu->dmap = NULL; in ipu6_mmu_cleanup() [all …]
|
| A D | ipu6-dma.c | 171 iova = alloc_iova(&mmu->dmap->iovad, count, in ipu6_dma_alloc() 193 ret = ipu6_mmu_map(mmu->dmap->mmu_info, in ipu6_dma_alloc() 222 pci_dma_addr = ipu6_mmu_iova_to_phys(mmu->dmap->mmu_info, in ipu6_dma_alloc() 227 ipu6_mmu_unmap(mmu->dmap->mmu_info, ipu6_iova, PAGE_SIZE); in ipu6_dma_alloc() 233 __free_iova(&mmu->dmap->iovad, iova); in ipu6_dma_alloc() 275 pci_dma_addr = ipu6_mmu_iova_to_phys(mmu->dmap->mmu_info, in ipu6_dma_free() 288 __free_iova(&mmu->dmap->iovad, iova); in ipu6_dma_free() 333 struct iova *iova = find_iova(&mmu->dmap->iovad, in ipu6_dma_unmap_sg() 370 pci_dma_addr = ipu6_mmu_iova_to_phys(mmu->dmap->mmu_info, in ipu6_dma_unmap_sg() 386 __free_iova(&mmu->dmap->iovad, iova); in ipu6_dma_unmap_sg() [all …]
|
| A D | ipu6-mmu.h | 35 struct ipu6_dma_mapping *dmap; member 48 struct ipu6_dma_mapping *dmap; member
|
| /linux/sound/soc/tegra/ |
| A D | tegra_pcm.c | 80 struct snd_dmaengine_dai_dma_data *dmap; in tegra_pcm_open() local 88 dmap = snd_soc_dai_get_dma_data(cpu_dai, substream); in tegra_pcm_open() 101 chan = dma_request_chan(cpu_dai->dev, dmap->chan_name); in tegra_pcm_open() 105 dmap->chan_name); in tegra_pcm_open() 113 dmap->chan_name); in tegra_pcm_open() 146 struct snd_dmaengine_dai_dma_data *dmap; in tegra_pcm_hw_params() local 154 dmap = snd_soc_dai_get_dma_data(snd_soc_rtd_to_cpu(rtd, 0), substream); in tegra_pcm_hw_params() 155 if (!dmap) in tegra_pcm_hw_params() 169 slave_config.dst_addr = dmap->addr; in tegra_pcm_hw_params() 173 slave_config.src_addr = dmap->addr; in tegra_pcm_hw_params()
|
| /linux/drivers/gpu/drm/ttm/ |
| A D | ttm_resource.c | 660 struct iosys_map *dmap, in ttm_kmap_iter_iomap_map_local() argument 687 iosys_map_set_vaddr_iomem(dmap, addr); in ttm_kmap_iter_iomap_map_local() 742 struct iosys_map *dmap, in ttm_kmap_iter_linear_io_map_local() argument 748 *dmap = iter_io->dmap; in ttm_kmap_iter_linear_io_map_local() 749 iosys_map_incr(dmap, i * PAGE_SIZE); in ttm_kmap_iter_linear_io_map_local() 789 memset(&iter_io->dmap, 0, sizeof(iter_io->dmap)); in ttm_kmap_iter_linear_io_init() 795 iosys_map_set_vaddr(&iter_io->dmap, in ttm_kmap_iter_linear_io_init() 802 if (iosys_map_is_null(&iter_io->dmap)) in ttm_kmap_iter_linear_io_init() 837 if (iter_io->dmap.is_iomem) in ttm_kmap_iter_linear_io_fini() 838 iounmap(iter_io->dmap.vaddr_iomem); in ttm_kmap_iter_linear_io_fini() [all …]
|
| A D | ttm_tt.c | 428 struct iosys_map *dmap, in ttm_kmap_iter_tt_map_local() argument 434 iosys_map_set_vaddr(dmap, kmap_local_page_prot(iter_tt->tt->pages[i], in ttm_kmap_iter_tt_map_local()
|
| /linux/fs/nilfs2/ |
| A D | page.c | 244 int nilfs_copy_dirty_pages(struct address_space *dmap, in nilfs_copy_dirty_pages() argument 265 dfolio = filemap_grab_folio(dmap, folio->index); in nilfs_copy_dirty_pages() 299 void nilfs_copy_back_pages(struct address_space *dmap, in nilfs_copy_back_pages() argument 317 dfolio = filemap_lock_folio(dmap, index); in nilfs_copy_back_pages() 335 xa_lock_irq(&dmap->i_pages); in nilfs_copy_back_pages() 336 f = __xa_store(&dmap->i_pages, index, folio, GFP_NOFS); in nilfs_copy_back_pages() 342 folio->mapping = dmap; in nilfs_copy_back_pages() 343 dmap->nrpages++; in nilfs_copy_back_pages() 345 __xa_set_mark(&dmap->i_pages, index, in nilfs_copy_back_pages() 348 xa_unlock_irq(&dmap->i_pages); in nilfs_copy_back_pages()
|
| /linux/fs/xfs/ |
| A D | xfs_reflink.c | 1127 struct xfs_bmbt_irec *dmap, in xfs_reflink_remap_extent() argument 1166 resblks + dmap->br_blockcount, 0, false, &tp); in xfs_reflink_remap_extent() 1181 error = xfs_bmapi_read(ip, dmap->br_startoff, dmap->br_blockcount, in xfs_reflink_remap_extent() 1192 dmap->br_blockcount = min(dmap->br_blockcount, smap.br_blockcount); in xfs_reflink_remap_extent() 1203 if (dmap->br_state != smap.br_state) { in xfs_reflink_remap_extent() 1211 if (dmap->br_state == XFS_EXT_UNWRITTEN && in xfs_reflink_remap_extent() 1218 XFS_FSB_TO_AGNO(mp, dmap->br_startblock)); in xfs_reflink_remap_extent() 1247 dmap->br_blockcount, 0, false); in xfs_reflink_remap_extent() 1291 xfs_refcount_increase_extent(tp, dmap); in xfs_reflink_remap_extent() 1293 qdelta += dmap->br_blockcount; in xfs_reflink_remap_extent() [all …]
|
| /linux/drivers/gpu/drm/ |
| A D | drm_panic.c | 260 static void drm_panic_blit16(struct iosys_map *dmap, unsigned int dpitch, in drm_panic_blit16() argument 270 iosys_map_wr(dmap, y * dpitch + x * sizeof(u16), u16, fg16); in drm_panic_blit16() 286 iosys_map_wr(dmap, off, u8, (fg32 & 0x000000FF) >> 0); in drm_panic_blit24() 287 iosys_map_wr(dmap, off + 1, u8, (fg32 & 0x0000FF00) >> 8); in drm_panic_blit24() 288 iosys_map_wr(dmap, off + 2, u8, (fg32 & 0x00FF0000) >> 16); in drm_panic_blit24() 304 iosys_map_wr(dmap, y * dpitch + x * sizeof(u32), u32, fg32); in drm_panic_blit32() 371 iosys_map_wr(dmap, y * dpitch + x * sizeof(u16), u16, color); in drm_panic_fill16() 385 iosys_map_wr(dmap, off, u8, (color & 0x000000FF) >> 0); in drm_panic_fill24() 386 iosys_map_wr(dmap, off + 1, u8, (color & 0x0000FF00) >> 8); in drm_panic_fill24() 387 iosys_map_wr(dmap, off + 2, u8, (color & 0x00FF0000) >> 16); in drm_panic_fill24() [all …]
|
| /linux/fs/jfs/ |
| A D | jfs_dmap.c | 358 struct dmap *dp; in dbFree() 401 dp = (struct dmap *) mp->data; in dbFree() 457 struct dmap *dp; in dbUpdatePMap() 496 dp = (struct dmap *) mp->data; in dbUpdatePMap() 732 struct dmap *dp; in dbAlloc() 811 dp = (struct dmap *) mp->data; in dbAlloc() 989 struct dmap *dp; in dbExtend() 1043 dp = (struct dmap *) mp->data; in dbExtend() 1810 struct dmap *dp; in dbAllocCtl() 3191 struct dmap *dp; in dbAllocBottomUp() [all …]
|
| /linux/include/drm/ttm/ |
| A D | ttm_kmap_iter.h | 32 struct iosys_map *dmap, pgoff_t i); 42 struct iosys_map *dmap);
|
| A D | ttm_resource.h | 365 struct iosys_map dmap; member
|
| /linux/drivers/misc/sgi-gru/ |
| A D | grufault.c | 143 struct gru_tlb_fault_map *dmap) in get_clear_fault_map() argument 158 dmap->fault_bits[i] = k; in get_clear_fault_map() 522 struct gru_tlb_fault_map imap, dmap; in gru_intr() local 536 get_clear_fault_map(gru, &imap, &dmap); in gru_intr() 541 dmap.fault_bits[0], dmap.fault_bits[1]); in gru_intr() 543 for_each_cbr_in_tfm(cbrnum, dmap.fault_bits) { in gru_intr()
|
| /linux/drivers/staging/media/ipu3/ |
| A D | ipu3.c | 77 &imgu_pipe->queues[i].dmap); in imgu_dummybufs_cleanup() 98 &imgu_pipe->queues[i].dmap, size)) { in imgu_dummybufs_preallocate() 138 &imgu_pipe->queues[i].dmap, in imgu_dummybufs_init() 146 imgu_pipe->queues[i].dmap.daddr); in imgu_dummybufs_init() 163 if (WARN_ON(!imgu_pipe->queues[queue].dmap.vaddr)) in imgu_dummybufs_get() 176 imgu_pipe->queues[queue].dmap.daddr); in imgu_dummybufs_get()
|
| A D | ipu3.h | 105 struct imgu_css_map dmap; member
|
| /linux/drivers/edac/ |
| A D | pnd2_edac.c | 418 static struct d_cr_dmap dmap[DNV_NUM_CHANNELS]; variable 474 RD_REGP(&dmap[i], d_cr_dmap, dnv_dports[i]) || in dnv_get_registers() 979 daddr->rank = dnv_get_bit(pmiaddr, dmap[pmiidx].rs0 + 13, 0); in dnv_pmi2mem() 981 daddr->rank |= dnv_get_bit(pmiaddr, dmap[pmiidx].rs1 + 13, 1); in dnv_pmi2mem() 989 daddr->bank = dnv_get_bit(pmiaddr, dmap[pmiidx].ba0 + 6, 0); in dnv_pmi2mem() 990 daddr->bank |= dnv_get_bit(pmiaddr, dmap[pmiidx].ba1 + 6, 1); in dnv_pmi2mem() 991 daddr->bank |= dnv_get_bit(pmiaddr, dmap[pmiidx].bg0 + 6, 2); in dnv_pmi2mem() 993 daddr->bank |= dnv_get_bit(pmiaddr, dmap[pmiidx].bg1 + 6, 3); in dnv_pmi2mem()
|
| /linux/drivers/iommu/ |
| A D | tegra-smmu.c | 575 dma_addr_t *dmap) in tegra_smmu_pte_lookup() argument 587 *dmap = smmu_pde_to_dma(smmu, pd[pd_index]); in tegra_smmu_pte_lookup() 593 dma_addr_t *dmap, struct page *page) in as_get_pte() argument 620 *dmap = dma; in as_get_pte() 624 *dmap = smmu_pde_to_dma(smmu, pd[pde]); in as_get_pte()
|
| /linux/drivers/media/platform/xilinx/ |
| A D | xilinx-vipp.c | 476 struct xvip_dma *dmap; in xvip_graph_cleanup() local 482 list_for_each_entry_safe(dma, dmap, &xdev->dmas, list) { in xvip_graph_cleanup()
|
| /linux/drivers/net/ethernet/apple/ |
| A D | bmac.c | 178 dbdma_continue(volatile struct dbdma_regs __iomem *dmap) in dbdma_continue() argument 180 dbdma_st32(&dmap->control, in dbdma_continue() 186 dbdma_reset(volatile struct dbdma_regs __iomem *dmap) in dbdma_reset() argument 188 dbdma_st32(&dmap->control, in dbdma_reset() 191 while (dbdma_ld32(&dmap->status) & RUN) in dbdma_reset()
|