| /drivers/gpu/drm/amd/amdkfd/ |
| A D | kfd_crat.c | 57 .cache_size = 16, 67 .cache_size = 16, 77 .cache_size = 8, 93 .cache_size = 16, 103 .cache_size = 32, 113 .cache_size = 16, 143 .cache_size = 16, 153 .cache_size = 32, 163 .cache_size = 16, 186 .cache_size = 16, [all …]
|
| A D | kfd_crat.h | 165 uint32_t cache_size; member 302 uint32_t cache_size; member
|
| A D | kfd_topology.h | 104 uint32_t cache_size; member
|
| A D | kfd_topology.c | 340 sysfs_show_32bit_prop(buffer, offs, "size", cache->cache_size); in kfd_cache_show() 1638 pcache->cache_size = pcache_info[cache_type].cache_size; in fill_in_l1_pcache() 1732 pcache->cache_size = pcache_info[cache_type].cache_size; in fill_in_l2_l3_pcache() 1735 pcache->cache_size /= mode; in fill_in_l2_l3_pcache()
|
| /drivers/mtd/ |
| A D | mtdblock.c | 30 unsigned int cache_size; member 88 mtdblk->cache_offset, mtdblk->cache_size); in write_cached_data() 91 mtdblk->cache_size, mtdblk->cache_data); in write_cached_data() 114 unsigned int sect_size = mtdblk->cache_size; in do_cached_write() 162 mtdblk->cache_size = sect_size; in do_cached_write() 184 unsigned int sect_size = mtdblk->cache_size; in do_cached_read() 241 if (unlikely(!mtdblk->cache_data && mtdblk->cache_size)) { in mtdblock_writesect() 273 mtdblk->cache_size = mbd->mtd->erasesize; in mtdblock_open()
|
| /drivers/infiniband/hw/hfi1/ |
| A D | user_pages.c | 13 static unsigned long cache_size = 256; variable 14 module_param(cache_size, ulong, S_IRUGO | S_IWUSR); 15 MODULE_PARM_DESC(cache_size, "Send and receive side cache size limit (in MB)"); 73 cache_limit_pages = cache_size * (1024 * 1024) / PAGE_SIZE; in hfi1_can_pin_pages()
|
| /drivers/misc/lkdtm/ |
| A D | usercopy.c | 23 static volatile size_t cache_size = 1024; variable 234 memset(buf, 'B', cache_size); in do_usercopy_slab_whitelist() 237 offset = (cache_size / 4) + unconst; in do_usercopy_slab_whitelist() 238 size = (cache_size / 16) + unconst; in do_usercopy_slab_whitelist() 429 kmem_cache_create_usercopy("lkdtm-usercopy", cache_size, in lkdtm_usercopy_init() 431 cache_size / 4, in lkdtm_usercopy_init() 432 cache_size / 16, in lkdtm_usercopy_init()
|
| /drivers/md/ |
| A D | dm-cache-policy-smq.c | 802 dm_cblock_t cache_size; member 1118 clear_bitset(mq->cache_hit_bits, from_cblock(mq->cache_size)); in end_cache_period() 1138 return from_cblock(mq->cache_size) * p / 100u; in percent_to_target() 1744 mq->cache_size = cache_size; in __smq_create() 1779 if (from_cblock(cache_size)) { in __smq_create() 1780 mq->cache_hit_bits = alloc_bitset(from_cblock(cache_size)); in __smq_create() 1785 clear_bitset(mq->cache_hit_bits, from_cblock(mq->cache_size)); in __smq_create() 1803 if (h_init(&mq->table, &mq->es, from_cblock(cache_size))) in __smq_create() 1844 return __smq_create(cache_size, origin_size, cache_block_size, in smq_create() 1852 return __smq_create(cache_size, origin_size, cache_block_size, in mq_create() [all …]
|
| A D | dm-cache-policy.c | 113 dm_cblock_t cache_size, in dm_cache_policy_create() argument 126 p = type->create(cache_size, origin_size, cache_block_size); in dm_cache_policy_create()
|
| A D | dm-cache-target.c | 333 dm_cblock_t cache_size; member 2333 cache->cache_size, in create_cache_policy() 2382 cache->cache_size = size; in set_cache_size() 2437 dm_block_t cache_size = ca->cache_sectors; in cache_create() local 2440 cache_size = block_div(cache_size, ca->block_size); in cache_create() 2441 set_cache_size(cache, to_cblock(cache_size)); in cache_create() 2970 from_cblock(cache->cache_size), in can_resize() 2972 if (new_size != cache->cache_size) { in can_resize() 3001 uint32_t nr_blocks = from_cblock(cache->cache_size); in truncate_oblocks() 3029 if (!cache->sized || csize != cache->cache_size) { in cache_preresume() [all …]
|
| A D | dm-cache-policy.h | 177 struct dm_cache_policy *(*create)(dm_cblock_t cache_size,
|
| A D | dm-cache-policy-internal.h | 146 struct dm_cache_policy *dm_cache_policy_create(const char *name, dm_cblock_t cache_size,
|
| /drivers/cxl/ |
| A D | acpi.c | 344 resource_size_t cache_size; in cxl_acpi_set_cache_size() local 351 rc = hmat_get_extended_linear_cache_size(&res, nid, &cache_size); in cxl_acpi_set_cache_size() 361 if (cache_size && size != cache_size) { in cxl_acpi_set_cache_size() 364 &cache_size, &size); in cxl_acpi_set_cache_size() 368 cxlrd->cache_size = cache_size; in cxl_acpi_set_cache_size() 392 cxlrd->cache_size = 0; in cxl_setup_extended_linear_cache()
|
| A D | cxl.h | 437 resource_size_t cache_size; member 485 resource_size_t cache_size; member
|
| /drivers/acpi/numa/ |
| A D | hmat.c | 121 resource_size_t *cache_size) in hmat_get_extended_linear_cache_size() argument 141 *cache_size = tcache->cache_attrs.size; in hmat_get_extended_linear_cache_size() 145 *cache_size = 0; in hmat_get_extended_linear_cache_size() 528 cache->memory_PD, cache->cache_size, attrs, in hmat_parse_cache() 541 tcache->cache_attrs.size = cache->cache_size; in hmat_parse_cache()
|
| /drivers/cxl/core/ |
| A D | region.c | 3302 resource_size_t cache_size, start; in cxl_extended_linear_cache_resize() local 3304 cache_size = cxlrd->cache_size; in cxl_extended_linear_cache_resize() 3305 if (!cache_size) in cxl_extended_linear_cache_resize() 3308 if (size != cache_size) { in cxl_extended_linear_cache_resize() 3311 &cache_size, &size); in cxl_extended_linear_cache_resize() 3324 start = res->start - cache_size; in cxl_extended_linear_cache_resize() 3329 p->cache_size = cache_size; in cxl_extended_linear_cache_resize() 3514 if (!p->cache_size) in cxl_port_get_spa_cache_alias() 3517 if (spa >= p->res->start + p->cache_size) in cxl_port_get_spa_cache_alias() 3518 return spa - p->cache_size; in cxl_port_get_spa_cache_alias() [all …]
|
| A D | mbox.c | 922 u64 cache_size = cxlr->params.cache_size; in cxl_event_trace_record() local 925 if (cache_size) in cxl_event_trace_record() 926 hpa_alias = hpa - cache_size; in cxl_event_trace_record()
|
| /drivers/block/null_blk/ |
| A D | main.c | 225 module_param_named(cache_size, g_cache_size, ulong, 0444); 461 NULLB_DEVICE_ATTR(cache_size, ulong, NULL); 803 dev->cache_size = g_cache_size; in null_alloc_dev() 1086 if ((nullb->dev->cache_size * 1024 * 1024) > in null_make_cache_space() 1227 nullb->dev->cache_size * 1024 * 1024); in null_handle_flush() 1905 dev->cache_size = 0; in null_validate_conf() 1906 dev->cache_size = min_t(unsigned long, ULONG_MAX / 1024 / 1024, in null_validate_conf() 1907 dev->cache_size); in null_validate_conf() 1988 if (dev->cache_size > 0) { in null_add_dev()
|
| A D | null_blk.h | 81 unsigned long cache_size; /* disk cache size in MB */ member
|
| /drivers/md/dm-vdo/indexer/ |
| A D | volume.h | 103 size_t cache_size; member
|
| /drivers/md/dm-vdo/ |
| A D | types.h | 224 unsigned int cache_size; member
|
| A D | dm-vdo-target.c | 806 result = kstrtouint(dm_shift_arg(&arg_set), 10, &config->cache_size); in parse_device_config() 864 if (config->cache_size < in parse_device_config() 1367 vdo->device_config->cache_size, maximum_age, in decode_vdo() 1461 vdo_log_debug("Block map cache blocks = %u", config->cache_size); in vdo_initialize() 1785 if (to_validate->cache_size != config->cache_size) { in validate_new_device_config()
|
| A D | block-map.h | 336 page_count_t cache_size, block_count_t maximum_age,
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | vcn_v5_0_1.c | 718 uint32_t offset, cache_size; in vcn_v5_0_1_start_sriov() local 761 cache_size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.inst[i].fw->size + 4); in vcn_v5_0_1_start_sriov() 782 offset = cache_size; in vcn_v5_0_1_start_sriov() 790 cache_size); in vcn_v5_0_1_start_sriov()
|
| /drivers/base/ |
| A D | cacheinfo.c | 122 static void cache_size(struct cacheinfo *this_leaf, struct device_node *np) in cache_size() function 246 cache_size(this_leaf, np); in cache_of_set_props()
|