Home
last modified time | relevance | path

Searched refs:dev (Results 1 – 15 of 15) sorted by relevance

/mm/
A Ddmapool.c55 struct device *dev; member
81 list_for_each_entry(pool, &dev->dma_pools, pools) { in pools_show()
141 dev_err(pool->dev, "%s %s, %p/%pad (bad dma)\n", in pool_block_err()
233 if (!dev) in dma_pool_create_node()
262 retval->dev = dev; in dma_pool_create_node()
282 empty = list_empty(&dev->dma_pools); in dma_pool_create_node()
283 list_add(&retval->pools, &dev->dma_pools); in dma_pool_create_node()
288 err = device_create_file(dev, &dev_attr_pools); in dma_pool_create_node()
374 empty = list_empty(&pool->dev->dma_pools); in dma_pool_destroy()
507 devres_add(dev, ptr); in dmam_pool_create()
[all …]
A Dbacking-dev.c425 dev_warn_once(dev, in stable_pages_required_show()
1003 bdi->dev = NULL; in bdi_init()
1087 struct device *dev; in bdi_register_va() local
1095 if (IS_ERR(dev)) in bdi_register_va()
1096 return PTR_ERR(dev); in bdi_register_va()
1099 bdi->dev = dev; in bdi_register_va()
1168 if (bdi->dev) { in bdi_unregister()
1170 device_unregister(bdi->dev); in bdi_unregister()
1171 bdi->dev = NULL; in bdi_unregister()
1187 WARN_ON_ONCE(bdi->dev); in release_bdi()
[all …]
A Dhmm.c647 dma_need_sync = !dev->dma_skip_sync; in hmm_dma_map_alloc()
649 if (dma_need_sync || dma_addressing_limited(dev)) in hmm_dma_map_alloc()
658 use_iova = dma_iova_try_alloc(dev, &map->state, 0, in hmm_dma_map_alloc()
660 if (!use_iova && dma_need_unmap(dev)) { in hmm_dma_map_alloc()
684 dma_iova_free(dev, &map->state); in hmm_dma_map_free()
739 if (dma_need_unmap(dev)) in hmm_dma_map_pfn()
745 switch (pci_p2pdma_state(p2pdma_state, dev, page)) { in hmm_dma_map_pfn()
760 ret = dma_iova_link(dev, state, paddr, offset, in hmm_dma_map_pfn()
780 if (dma_mapping_error(dev, dma_addr)) in hmm_dma_map_pfn()
783 if (dma_need_unmap(dev)) in hmm_dma_map_pfn()
[all …]
A Dmemory-tiers.c24 struct device dev; member
145 return container_of(device, struct memory_tier, dev); in to_memory_tier()
161 struct memory_tier *tier = to_memory_tier(dev); in memory_tier_device_release()
169 static ssize_t nodelist_show(struct device *dev, in nodelist_show() argument
176 nmask = get_memtier_nodemask(to_memory_tier(dev)); in nodelist_show()
242 new_memtier->dev.id = adistance >> MEMTIER_CHUNK_BITS; in find_create_memory_tier()
243 new_memtier->dev.bus = &memory_tier_subsys; in find_create_memory_tier()
245 new_memtier->dev.groups = memtier_dev_groups; in find_create_memory_tier()
247 ret = device_register(&new_memtier->dev); in find_create_memory_tier()
250 put_device(&new_memtier->dev); in find_create_memory_tier()
[all …]
A Dmemremap.c371 void *devm_memremap_pages(struct device *dev, struct dev_pagemap *pgmap) in devm_memremap_pages() argument
376 ret = memremap_pages(pgmap, dev_to_node(dev)); in devm_memremap_pages()
380 error = devm_add_action_or_reset(dev, devm_memremap_pages_release, in devm_memremap_pages()
388 void devm_memunmap_pages(struct device *dev, struct dev_pagemap *pgmap) in devm_memunmap_pages() argument
390 devm_release_action(dev, devm_memremap_pages_release, pgmap); in devm_memunmap_pages()
A Dmemory-failure.c97 static ssize_t _name##_show(struct device *dev, \
102 &NODE_DATA(dev->id)->mf_stats; \
232 dev_t dev; in hwpoison_filter_dev() local
242 dev = mapping->host->i_sb->s_dev; in hwpoison_filter_dev()
244 hwpoison_filter_dev_major != MAJOR(dev)) in hwpoison_filter_dev()
247 hwpoison_filter_dev_minor != MINOR(dev)) in hwpoison_filter_dev()
A Ddmapool_test.c101 static void dmapool_test_release(struct device *dev) in dmapool_test_release() argument
A Dshmem.c3098 dev_t dev, unsigned long flags) in __shmem_get_inode() argument
3146 init_special_inode(inode, mode, dev); in __shmem_get_inode()
3179 umode_t mode, dev_t dev, unsigned long flags) in shmem_get_inode() argument
3184 inode = __shmem_get_inode(idmap, sb, dir, mode, dev, flags); in shmem_get_inode()
3207 umode_t mode, dev_t dev, unsigned long flags) in shmem_get_inode() argument
3209 return __shmem_get_inode(idmap, sb, dir, mode, dev, flags); in shmem_get_inode()
3897 struct dentry *dentry, umode_t mode, dev_t dev) in shmem_mknod() argument
3905 inode = shmem_get_inode(idmap, dir->i_sb, dir, mode, dev, VM_NORESERVE); in shmem_mknod()
5854 umode_t mode, dev_t dev, unsigned long flags) in shmem_get_inode() argument
5856 struct inode *inode = ramfs_get_inode(sb, dir, mode, dev); in shmem_get_inode()
A DMakefile54 shmem.o util.o mmzone.o vmstat.o backing-dev.o \
A Dmemory_hotplug.c1350 return device_online(&mem->dev); in online_memory_block()
2325 rc = device_offline(&mem->dev); in try_offline_memory_block()
2345 rc = device_online(&mem->dev); in try_reonline_memory_block()
A Dcompaction.c2995 static ssize_t compact_store(struct device *dev, in compact_store() argument
2999 int nid = dev->id; in compact_store()
3014 return device_create_file(&node->dev, &dev_attr_compact); in compaction_register_node()
3019 device_remove_file(&node->dev, &dev_attr_compact); in compaction_unregister_node()
A DKconfig.debug256 allocations. See Documentation/dev-tools/kmemleak.rst for more
A Dhugetlb.c4540 struct node_hstate *nhs = &node_hstates[node->dev.id]; in hugetlb_unregister_node()
4570 struct node_hstate *nhs = &node_hstates[node->dev.id]; in hugetlb_register_node()
4580 &node->dev.kobj); in hugetlb_register_node()
4590 h->name, node->dev.id); in hugetlb_register_node()
A Dvmscan.c7896 static ssize_t reclaim_store(struct device *dev, in reclaim_store() argument
7900 int ret, nid = dev->id; in reclaim_store()
7909 return device_create_file(&node->dev, &dev_attr_reclaim); in reclaim_register_node()
7914 return device_remove_file(&node->dev, &dev_attr_reclaim); in reclaim_unregister_node()
A DKconfig491 # /dev/mem.

Completed in 76 milliseconds