Searched refs:cpu_to_le64 (Results 1 – 21 of 21) sorted by relevance
74 ctx->shash.a = cpu_to_le64((a << 1) | (b >> 63)); in ghash_setkey()75 ctx->shash.b = cpu_to_le64((b << 1) | (a >> 63)); in ghash_setkey()77 ctx->shash.a ^= cpu_to_le64((u64)0xc2 << 56); in ghash_setkey()
23 # define __mem_ioswabq(a, x) ((__force u64)cpu_to_le64(x))
24 # define __mem_ioswabq(a, x) ((__force u64)cpu_to_le64(x))
20 #define __mem_ioswabq(a, x) ((__force u64)cpu_to_le64(x))
48 # define __mem_ioswabq(a, x) ((__force u64)cpu_to_le64(x))
55 *pbits = cpu_to_le64(sctx->byte_count << 3); in ppc_md5_finup()
63 sctx->sha3.state[i] = cpu_to_le64(get_unaligned(p.u64++)); in sha3_256_import()
62 sctx->sha3.state[i] = cpu_to_le64(get_unaligned(p.u64++)); in sha3_512_import()
30 kvm_put_guest(kvm, base + offset, cpu_to_le64(steal)); in kvm_update_stolen_time()
75 *pbits = cpu_to_le64(sctx->byte_count << 3); in md5_sparc64_finup()
98 #define writeq_cpu(v, c) ((void)__raw_writeq((__force u64)cpu_to_le64(v), (c)))
65 #define cpu_to_lelong(__x) cpu_to_le64(__x)
72 WARN_ON(put_user(cpu_to_le64(steal), steal_ptr)); in kvm_riscv_vcpu_record_steal_time()
140 #define cpu_to_le64(val) (val) macro155 #define cpu_to_le64(val) bswap_64(val) macro
586 return cpu_to_le64(data); in vcpu_data_host_to_guest()
414 *((u64 *)addr) = cpu_to_le64(datum); in iowrite64()
2118 val = cpu_to_le64(val); in vgic_its_save_ite()2272 val = cpu_to_le64(val); in vgic_its_save_dte()2458 val = cpu_to_le64(val); in vgic_its_save_cte()
906 data = cpu_to_le64(data); in vgic_data_host_to_mmio_bus()
255 data = (__force u64) cpu_to_le64(data); in zpci_cfg_store()
1035 vcpu->arch.shared->msr = cpu_to_le64(val); in kvmppc_set_msr_fast()
1051 ((u64 *)vdst)[i] = cpu_to_le64(((u64 *)vbuf)[i]); in p9_hmi_special_emu()
Completed in 46 milliseconds