| /drivers/gpu/drm/amd/amdkfd/ |
| A D | kfd_crat.c | 59 .cache_line_size = 64, 69 .cache_line_size = 64, 79 .cache_line_size = 64, 95 .cache_line_size = 64, 105 .cache_line_size = 64, 115 .cache_line_size = 64, 145 .cache_line_size = 64, 155 .cache_line_size = 64, 165 .cache_line_size = 64, 175 .cache_line_size = 64, [all …]
|
| A D | kfd_crat.h | 168 uint16_t cache_line_size; member 304 uint32_t cache_line_size; member
|
| /drivers/net/ethernet/mellanox/mlx5/core/ |
| A D | alloc.c | 136 u32 db_per_page = PAGE_SIZE / cache_line_size(); in mlx5_alloc_db_pgdir() 165 u32 db_per_page = PAGE_SIZE / cache_line_size(); in mlx5_alloc_db_from_pgdir() 177 offset = db->index * cache_line_size(); in mlx5_alloc_db_from_pgdir() 218 u32 db_per_page = PAGE_SIZE / cache_line_size(); in mlx5_db_free()
|
| A D | wc.c | 120 if (MLX5_CAP_GEN(mdev, cqe_128_always) && cache_line_size() >= 128) in mlx5_wc_create_cq()
|
| /drivers/s390/cio/ |
| A D | airq.c | 141 if ((cache_line_size() * BITS_PER_BYTE) < bits in airq_iv_create() 308 cache_line_size(), in airq_init() 309 cache_line_size(), PAGE_SIZE); in airq_init()
|
| /drivers/infiniband/sw/rxe/ |
| A D | rxe_queue.c | 77 if (elem_size < cache_line_size()) in rxe_queue_init() 78 elem_size = cache_line_size(); in rxe_queue_init()
|
| /drivers/infiniband/hw/hfi1/ |
| A D | mmu_rb.c | 51 free_ptr = kzalloc(sizeof(*h) + cache_line_size() - 1, GFP_KERNEL); in hfi1_mmu_rb_register() 55 h = PTR_ALIGN(free_ptr, cache_line_size()); in hfi1_mmu_rb_register()
|
| /drivers/pci/endpoint/ |
| A D | pci-ep-cfs.c | 437 PCI_EPF_HEADER_R(cache_line_size) 438 PCI_EPF_HEADER_W_u8(cache_line_size) 455 CONFIGFS_ATTR(pci_epf_, cache_line_size);
|
| /drivers/staging/vc04_services/interface/vchiq_arm/ |
| A D | vchiq_arm.h | 34 unsigned int cache_line_size; member
|
| A D | vchiq_core.c | 1507 unsigned int cache_line_size; in create_pagelist() local 1660 cache_line_size = drv_mgmt->info->cache_line_size; in create_pagelist() 1662 ((pagelist->offset & (cache_line_size - 1)) || in create_pagelist() 1663 ((pagelist->offset + pagelist->length) & (cache_line_size - 1)))) { in create_pagelist() 1693 unsigned int cache_line_size; in free_pagelist() local 1708 cache_line_size = drv_mgmt->info->cache_line_size; in free_pagelist() 1715 head_bytes = (cache_line_size - pagelist->offset) & in free_pagelist() 1716 (cache_line_size - 1); in free_pagelist() 1718 (cache_line_size - 1); in free_pagelist() 1731 (PAGE_SIZE - 1) & ~(cache_line_size - 1), in free_pagelist() [all …]
|
| A D | vchiq_arm.c | 68 .cache_line_size = 32, 72 .cache_line_size = 64, 205 drv_mgmt->fragments_size = 2 * drv_mgmt->info->cache_line_size; in vchiq_platform_init()
|
| /drivers/pci/ |
| A D | pci-acpi.c | 148 u8 cache_line_size; /* Not applicable to PCIe */ member 156 .cache_line_size = 8, 175 pci_write_config_byte(dev, PCI_CACHE_LINE_SIZE, hpx->cache_line_size); in program_hpx_type0() 210 hpx0->cache_line_size = fields[2].integer.value; in decode_type0_hpx_record() 754 hpx0.cache_line_size = fields[0].integer.value; in acpi_run_hpp()
|
| A D | pci-bridge-emul.h | 14 u8 cache_line_size; member
|
| A D | pci-bridge-emul.c | 356 bridge->conf.cache_line_size = 0x10; in pci_bridge_emul_init()
|
| /drivers/edac/ |
| A D | i7core_edac.c | 1983 const int cache_line_size = 64; in set_sdram_scrub_rate() local 1991 cache_line_size * 1000000; in set_sdram_scrub_rate() 2023 const u32 cache_line_size = 64; in get_sdram_scrub_rate() local 2043 1000000 * cache_line_size; in get_sdram_scrub_rate()
|
| A D | thunderx_edac.c | 334 unsigned int cline_size = cache_line_size(); in inject_ecc_fn() 405 unsigned int cline_size = cache_line_size(); in thunderx_lmc_inject_ecc_write()
|
| /drivers/net/ethernet/mellanox/mlx5/core/lib/ |
| A D | aso.c | 133 if (MLX5_CAP_GEN(mdev, cqe_128_always) && cache_line_size() >= 128) in mlx5_aso_create_cq()
|
| /drivers/gpu/drm/amd/display/dc/resource/dcn32/ |
| A D | dcn32_resource.c | 2036 cache_lines_used = total_size_in_mall_bytes / dc->caps.cache_line_size + 2; in dcn32_calculate_mall_ways_from_bytes() 2038 total_cache_lines = dc->caps.max_cab_allocation_bytes / dc->caps.cache_line_size; in dcn32_calculate_mall_ways_from_bytes() 2207 dc->caps.cache_line_size = 64; in dcn32_resource_construct() 2535 dc->dml2_options.mall_cfg.cache_line_size_bytes = dc->caps.cache_line_size; in dcn32_resource_construct()
|
| /drivers/pci/controller/ |
| A D | pcie-rockchip-ep.c | 138 rockchip_pcie_write(rockchip, hdr->cache_line_size, in rockchip_pcie_ep_write_header()
|
| /drivers/iommu/ |
| A D | iova.c | 733 cache_line_size()); in iova_domain_init_rcaches()
|
| /drivers/pci/controller/cadence/ |
| A D | pcie-cadence-ep.c | 60 hdr->cache_line_size); in cdns_pcie_ep_write_header()
|
| /drivers/gpu/drm/amd/display/dc/resource/dcn321/ |
| A D | dcn321_resource.c | 1715 dc->caps.cache_line_size = 64; in dcn321_resource_construct() 2034 dc->dml2_options.mall_cfg.cache_line_size_bytes = dc->caps.cache_line_size; in dcn321_resource_construct()
|
| /drivers/net/ethernet/mellanox/mlx4/ |
| A D | fw.c | 1904 ((ilog2(cache_line_size()) - 4) << 5) | (1 << 4); in mlx4_INIT_HCA() 1953 dev->caps.eqe_size = cache_line_size(); in mlx4_INIT_HCA() 1954 dev->caps.cqe_size = cache_line_size(); in mlx4_INIT_HCA()
|
| A D | main.c | 378 if (cache_line_size() == 128 || cache_line_size() == 256) { in mlx4_enable_cqe_eqe_stride() 387 if (cache_line_size() != 32 && cache_line_size() != 64) in mlx4_enable_cqe_eqe_stride()
|
| /drivers/net/ethernet/marvell/mvpp2/ |
| A D | mvpp2.h | 844 ETH_HLEN + ETH_FCS_LEN, cache_line_size())
|