Home
last modified time | relevance | path

Searched refs:CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS (Results 1 – 25 of 49) sorted by relevance

12

/linux/include/linux/
A Detherdevice.h89 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in is_link_local_ether_addr()
107 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in is_zero_ether_addr()
125 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in is_multicast_ether_addr()
139 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in is_multicast_ether_addr_64bits()
297 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in ether_addr_copy()
348 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in ether_addr_equal()
378 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ether_addr_equal_64bits()
402 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in ether_addr_equal_unaligned()
531 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in compare_ether_header()
A Dsiphash.h83 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) || in siphash()
133 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) || in hsiphash()
A Dnetfilter.h27 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in nf_inet_addr_cmp()
44 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in nf_inet_addr_mask()
/linux/include/asm-generic/
A Dflat.h10 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS in flat_get_addr_from_rp()
19 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS in flat_put_addr_at_rp()
/linux/Documentation/translations/zh_CN/core-api/
A Dunaligned-memory-access.rst134 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
223 所以可以使代码依赖于CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS,像这样::
225 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
/linux/crypto/
A Dmemneq.c72 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in __crypto_memneq_generic()
96 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS in __crypto_memneq_16()
/linux/lib/lzo/
A Dlzo1x_decompress_safe.c95 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in lzo1x_decompress_safe()
213 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in lzo1x_decompress_safe()
255 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in lzo1x_decompress_safe()
A Dlzo1x_compress.c56 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && \ in lzo1x_1_do_compress()
166 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && defined(LZO_USE_CTZ64) in lzo1x_1_do_compress()
186 #elif defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && defined(LZO_USE_CTZ32) in lzo1x_1_do_compress()
/linux/include/net/
A Dipv6.h501 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_masked_addr_cmp()
548 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in __ipv6_addr_set_half()
576 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_equal()
589 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64
638 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_any()
650 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_hash()
674 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_loopback()
691 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_v4mapped()
780 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64
800 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in __ipv6_addr_diff()
A Daddrconf.h443 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_is_ll_all_nodes()
455 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_is_ll_all_routers()
472 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_is_solict_mult()
487 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_is_all_snoopers()
/linux/mm/
A Dmaccess.c29 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) in copy_from_kernel_nofault()
63 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) in copy_to_kernel_nofault()
/linux/include/crypto/
A Dalgapi.h149 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && in crypto_xor()
167 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && in crypto_xor_cpy()
/linux/lib/
A Dsiphash.c52 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
253 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
421 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
A Dstrncpy_from_user.c15 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
A Dstring.c184 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS in strscpy()
765 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS in memcmp()
A Dsort.c38 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS in is_aligned()
/linux/arch/arm/crypto/
A Daes-cipher-glue.c47 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
/linux/drivers/misc/cb710/
A Dsgbuf2.c47 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS in needs_unaligned_copy()
/linux/drivers/net/ethernet/sfc/
A Drx_common.h28 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in efx_rx_buf_hash()
/linux/Documentation/dev-tools/
A Dubsan.rst80 unaligned accesses (CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS=y). One could
/linux/Documentation/core-api/
A Dunaligned-memory-access.rst155 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
259 made dependent on CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS like so::
261 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
/linux/net/rds/
A Dthreads.c272 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in rds_addr_cmp()
/linux/lib/zlib_inflate/
A Dinffast.c257 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) in inflate_fast()
/linux/tools/testing/selftests/bpf/
A Dtest_verifier.c41 # define CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS 1 macro
1184 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS in do_test_single()
1320 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS in test_as_unpriv()
/linux/drivers/misc/lkdtm/
A Dbugs.c242 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) in lkdtm_UNALIGNED_LOAD_STORE_WRITE()

Completed in 967 milliseconds

12