| /linux/tools/include/linux/ |
| A D | bitops.h | 17 #define BITS_PER_TYPE(type) (sizeof(type) * BITS_PER_BYTE) macro 18 #define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_TYPE(long)) 19 #define BITS_TO_U64(nr) DIV_ROUND_UP(nr, BITS_PER_TYPE(u64)) 20 #define BITS_TO_U32(nr) DIV_ROUND_UP(nr, BITS_PER_TYPE(u32)) 21 #define BITS_TO_BYTES(nr) DIV_ROUND_UP(nr, BITS_PER_TYPE(char))
|
| /linux/lib/dim/ |
| A D | dim.c | 62 u32 npkts = BIT_GAP(BITS_PER_TYPE(u32), end->pkt_ctr, start->pkt_ctr); in dim_calc_stats() 63 u32 nbytes = BIT_GAP(BITS_PER_TYPE(u32), end->byte_ctr, in dim_calc_stats() 65 u32 ncomps = BIT_GAP(BITS_PER_TYPE(u32), end->comp_ctr, in dim_calc_stats()
|
| /linux/security/landlock/ |
| A D | ruleset.h | 36 static_assert(BITS_PER_TYPE(access_mask_t) >= LANDLOCK_NUM_ACCESS_FS); 38 static_assert(BITS_PER_TYPE(access_mask_t) >= LANDLOCK_NUM_ACCESS_NET); 40 static_assert(BITS_PER_TYPE(access_mask_t) >= LANDLOCK_NUM_SCOPE); 62 static_assert(BITS_PER_TYPE(layer_mask_t) >= LANDLOCK_MAX_NUM_LAYERS);
|
| /linux/include/linux/ |
| A D | bitops.h | 11 #define BITS_PER_TYPE(type) (sizeof(type) * BITS_PER_BYTE) macro 12 #define BITS_TO_LONGS(nr) __KERNEL_DIV_ROUND_UP(nr, BITS_PER_TYPE(long)) 13 #define BITS_TO_U64(nr) __KERNEL_DIV_ROUND_UP(nr, BITS_PER_TYPE(u64)) 14 #define BITS_TO_U32(nr) __KERNEL_DIV_ROUND_UP(nr, BITS_PER_TYPE(u32)) 15 #define BITS_TO_BYTES(nr) __KERNEL_DIV_ROUND_UP(nr, BITS_PER_TYPE(char))
|
| /linux/drivers/mux/ |
| A D | gpio.c | 26 DECLARE_BITMAP(values, BITS_PER_TYPE(state)); in mux_gpio_set() 29 bitmap_from_arr32(values, &value, BITS_PER_TYPE(value)); in mux_gpio_set()
|
| /linux/arch/riscv/kvm/ |
| A D | aia_imsic.c | 20 #define IMSIC_MAX_EIX (IMSIC_MAX_ID / BITS_PER_TYPE(u64)) 23 unsigned long eip[BITS_PER_TYPE(u64) / BITS_PER_LONG]; 24 unsigned long eie[BITS_PER_TYPE(u64) / BITS_PER_LONG]; 251 unsigned long eipend[BITS_PER_TYPE(u64) / BITS_PER_LONG]; in imsic_mrif_topei() 270 imin = ei * BITS_PER_TYPE(u64); in imsic_mrif_topei() 271 imax = ((imin + BITS_PER_TYPE(u64)) < max_msi) ? in imsic_mrif_topei() 272 imin + BITS_PER_TYPE(u64) : max_msi; in imsic_mrif_topei() 862 BITS_PER_TYPE(u64)]; in kvm_riscv_vcpu_aia_imsic_rmw() 863 clear_bit(topei & (BITS_PER_TYPE(u64) - 1), in kvm_riscv_vcpu_aia_imsic_rmw() 979 eix = &imsic->swfile->eix[iid / BITS_PER_TYPE(u64)]; in kvm_riscv_vcpu_aia_imsic_inject() [all …]
|
| /linux/drivers/crypto/cavium/nitrox/ |
| A D | nitrox_mbx.c | 124 DECLARE_BITMAP(csr, BITS_PER_TYPE(u64)); in nitrox_pf2vf_mbox_handler() 135 for_each_set_bit(i, csr, BITS_PER_TYPE(csr)) { in nitrox_pf2vf_mbox_handler() 158 for_each_set_bit(i, csr, BITS_PER_TYPE(csr)) { in nitrox_pf2vf_mbox_handler()
|
| /linux/kernel/irq/ |
| A D | ipi-mux.c | 140 for_each_set_bit(hwirq, &ipis, BITS_PER_TYPE(int)) in ipi_mux_process() 163 if (BITS_PER_TYPE(int) < nr_ipi || !mux_send) in ipi_mux_create()
|
| /linux/drivers/dma/amd/qdma/ |
| A D | qdma.c | 56 low_pos = f->lsb / BITS_PER_TYPE(*data); in qdma_get_field() 57 hi_pos = f->msb / BITS_PER_TYPE(*data); in qdma_get_field() 60 low_bit = f->lsb % BITS_PER_TYPE(*data); in qdma_get_field() 61 hi_bit = f->msb % BITS_PER_TYPE(*data); in qdma_get_field() 65 low_bit = f->lsb % BITS_PER_TYPE(*data); in qdma_get_field() 67 value = ((u64)data[hi_pos] << BITS_PER_TYPE(*data)) | in qdma_get_field() 72 hi_bit = f->msb % BITS_PER_TYPE(*data); in qdma_get_field() 92 low_pos = f->lsb / BITS_PER_TYPE(*data); in qdma_set_field() 93 hi_pos = f->msb / BITS_PER_TYPE(*data); in qdma_set_field() 94 low_bit = f->lsb % BITS_PER_TYPE(*data); in qdma_set_field()
|
| /linux/drivers/char/hw_random/ |
| A D | cctrng.h | 18 #define CC_TRNG_EHR_IN_BITS (CC_TRNG_EHR_IN_WORDS * BITS_PER_TYPE(u32))
|
| /linux/drivers/net/ethernet/intel/ice/ |
| A D | ice_parser_rt.c | 148 return bitrev16(v) >> (BITS_PER_TYPE(v) - len); in ice_bit_rev_u16() 153 return bitrev32(v) >> (BITS_PER_TYPE(v) - len); in ice_bit_rev_u32() 162 offset = ICE_GPR_HV_IDX + (start / BITS_PER_TYPE(u16)); in ice_hv_bit_sel() 170 val >>= start % BITS_PER_TYPE(u16); in ice_hv_bit_sel() 182 ((u32)rt->gpr[kb->len_reg1] << BITS_PER_TYPE(u16)); in ice_pk_build() 431 offset = ICE_GPR_HV_IDX + (start / BITS_PER_TYPE(u16)); in ice_reg_bit_sel() 436 val >>= start % BITS_PER_TYPE(u16); in ice_reg_bit_sel()
|
| /linux/drivers/mtd/nand/raw/ |
| A D | ams-delta.c | 60 DECLARE_BITMAP(values, BITS_PER_TYPE(byte)) = { byte, }; in gpio_nand_io_write() 71 DECLARE_BITMAP(values, BITS_PER_TYPE(byte)) = { byte, }; in gpio_nand_dir_output() 87 DECLARE_BITMAP(values, BITS_PER_TYPE(res)) = { 0, }; in gpio_nand_io_read()
|
| /linux/drivers/iommu/iommufd/ |
| A D | iova_bitmap.c | 135 return iova / (BITS_PER_TYPE(*bitmap->bitmap) * pgsize); in iova_bitmap_offset_to_index() 146 return (index * BITS_PER_TYPE(*bitmap->bitmap)) << pgshift; in iova_bitmap_index_to_offset()
|
| /linux/drivers/acpi/ |
| A D | mipi-disco-img.c | 536 } else if (ret * BITS_PER_TYPE(u8) < num_lanes + 1) { in init_csi2_port() 538 ret * BITS_PER_TYPE(u8), num_lanes + 1); in init_csi2_port() 550 BUILD_BUG_ON(BITS_PER_TYPE(unsigned long) <= ACPI_DEVICE_CSI2_DATA_LANES); in init_csi2_port() 561 mask |= (unsigned long)val[i] << BITS_PER_TYPE(u8) * i; in init_csi2_port()
|
| /linux/drivers/net/mdio/ |
| A D | mdio-mux-gpio.c | 26 DECLARE_BITMAP(values, BITS_PER_TYPE(desired_child)); in mdio_mux_gpio_switch_fn()
|
| /linux/drivers/pinctrl/sunxi/ |
| A D | pinctrl-sunxi.c | 68 offset / BITS_PER_TYPE(u32) * sizeof(u32); in sunxi_mux_reg() 69 *shift = offset % BITS_PER_TYPE(u32); in sunxi_mux_reg() 80 offset / BITS_PER_TYPE(u32) * sizeof(u32); in sunxi_data_reg() 81 *shift = offset % BITS_PER_TYPE(u32); in sunxi_data_reg() 92 offset / BITS_PER_TYPE(u32) * sizeof(u32); in sunxi_dlevel_reg() 93 *shift = offset % BITS_PER_TYPE(u32); in sunxi_dlevel_reg() 104 offset / BITS_PER_TYPE(u32) * sizeof(u32); in sunxi_pull_reg() 105 *shift = offset % BITS_PER_TYPE(u32); in sunxi_pull_reg()
|
| /linux/drivers/gpu/drm/ |
| A D | drm_print.c | 286 if (WARN_ON_ONCE(nbits > BITS_PER_TYPE(value))) in drm_print_bits() 287 nbits = BITS_PER_TYPE(value); in drm_print_bits()
|
| /linux/drivers/gpu/drm/i915/ |
| A D | i915_mitigations.c | 37 BUILD_BUG_ON(ARRAY_SIZE(names) >= BITS_PER_TYPE(mitigations)); in mitigations_set()
|
| /linux/drivers/android/ |
| A D | dbitmap.h | 24 #define NBITS_MIN BITS_PER_TYPE(unsigned long)
|
| /linux/drivers/staging/media/atomisp/pci/isp/kernels/ipu2_io_ls/bayer_io_ls/ |
| A D | ia_css_bayer_io.host.c | 35 DIV_ROUND_UP(HIVE_ISP_DDR_WORD_BITS, BITS_PER_TYPE(short)); in ia_css_bayer_io_config()
|
| /linux/drivers/staging/media/atomisp/pci/isp/kernels/ipu2_io_ls/yuv444_io_ls/ |
| A D | ia_css_yuv444_io.host.c | 35 DIV_ROUND_UP(HIVE_ISP_DDR_WORD_BITS, BITS_PER_TYPE(short)); in ia_css_yuv444_io_config()
|
| /linux/drivers/hid/amd-sfh-hid/sfh1_1/ |
| A D | amd_sfh_desc.c | 153 if (exp >= BITS_PER_TYPE(u32)) in amd_sfh_float_to_int() 160 if (abs(shift) >= BITS_PER_TYPE(u32)) in amd_sfh_float_to_int()
|
| /linux/net/netfilter/ |
| A D | nft_bitwise.c | 46 carry = src[i - 1] >> (BITS_PER_TYPE(u32) - shift); in nft_bitwise_eval_lshift() 59 carry = src[i] << (BITS_PER_TYPE(u32) - shift); in nft_bitwise_eval_rshift() 152 if (priv->data.data[0] >= BITS_PER_TYPE(u32)) { in nft_bitwise_init_shift()
|
| /linux/drivers/gpu/drm/i915/gt/ |
| A D | selftest_ring.c | 16 ring->wrap = BITS_PER_TYPE(ring->size) - ilog2(sz); in mock_ring()
|
| /linux/include/net/ |
| A D | ip_tunnels.h | 254 bitmap_set(supp, 0, BITS_PER_TYPE(__be16)); in ip_tunnel_flags_is_be16_compat() 264 bitmap_write(dst, be16_to_cpu(flags), 0, BITS_PER_TYPE(__be16)); in ip_tunnel_flags_from_be16() 272 ret = cpu_to_be16(bitmap_read(flags, 0, BITS_PER_TYPE(__be16))); in ip_tunnel_flags_to_be16()
|