| /drivers/edac/ |
| A D | pnd2_edac.c | 963 daddr->col = column; in apl_pmi2mem() 964 daddr->bank = bank; in apl_pmi2mem() 965 daddr->row = row; in apl_pmi2mem() 966 daddr->rank = rank; in apl_pmi2mem() 967 daddr->dimm = 0; in apl_pmi2mem() 987 daddr->dimm = (daddr->rank >= 2) ^ drp[pmiidx].dimmflip; in dnv_pmi2mem() 1110 daddr->chan = pmiidx; in get_memory_error_data() 1117 addr, pmiaddr, daddr->chan, daddr->dimm, daddr->rank, daddr->bank, daddr->row, daddr->col); in get_memory_error_data() 1187 errcode, daddr->chan, daddr->dimm, daddr->rank, daddr->row, daddr->bank, daddr->col); in pnd2_mce_output_error() 1193 m->addr & ~PAGE_MASK, 0, daddr->chan, daddr->dimm, -1, optype, msg); in pnd2_mce_output_error() [all …]
|
| /drivers/infiniband/sw/rxe/ |
| A D | rxe_net.c | 26 struct in_addr *daddr) in rxe_find_route4() argument 34 memcpy(&fl.daddr, daddr, sizeof(*daddr)); in rxe_find_route4() 50 struct in6_addr *daddr) in rxe_find_route6() argument 58 memcpy(&fl6.daddr, daddr, sizeof(*daddr)); in rxe_find_route6() 65 rxe_dbg_qp(qp, "no route to %pI6\n", daddr); in rxe_find_route6() 70 rxe_dbg_qp(qp, "no route to %pI6\n", daddr); in rxe_find_route6() 85 struct in6_addr *daddr) in rxe_find_route6() argument 107 struct in_addr *daddr; in rxe_find_route() local 110 daddr = &av->dgid_addr._sockaddr_in.sin_addr; in rxe_find_route() 249 iph->daddr = daddr; in prepare_ipv4_hdr() [all …]
|
| A D | rxe_recv.c | 114 struct in_addr *daddr = in check_addr() local 117 if ((ip_hdr(skb)->daddr != saddr->s_addr) || in check_addr() 118 (ip_hdr(skb)->saddr != daddr->s_addr)) in check_addr() 124 struct in6_addr *daddr = in check_addr() local 127 if (memcmp(&ipv6_hdr(skb)->daddr, saddr, sizeof(*saddr)) || in check_addr() 128 memcmp(&ipv6_hdr(skb)->saddr, daddr, sizeof(*daddr))) in check_addr() 200 ipv6_addr_set_v4mapped(ip_hdr(skb)->daddr, in rxe_rcv_mcast_pkt() 203 memcpy(&dgid, &ipv6_hdr(skb)->daddr, sizeof(dgid)); in rxe_rcv_mcast_pkt() 295 ipv6_addr_set_v4mapped(ip_hdr(skb)->daddr, in rxe_chk_dgid() 299 pdgid = (union ib_gid *)&ipv6_hdr(skb)->daddr; in rxe_chk_dgid()
|
| /drivers/media/platform/chips-media/wave5/ |
| A D | wave5-vdi.c | 34 &vpu_dev->common_mem.daddr, vpu_dev->common_mem.size, vpu_dev->common_mem.vaddr); in wave5_vdi_allocate_common_memory() 120 dma_addr_t daddr; in wave5_vdi_allocate_dma_memory() local 127 vaddr = dma_alloc_coherent(vpu_dev->dev, vb->size, &daddr, GFP_KERNEL); in wave5_vdi_allocate_dma_memory() 131 vb->daddr = daddr; in wave5_vdi_allocate_dma_memory() 144 dma_free_coherent(vpu_dev->dev, vb->size, vb->vaddr, vb->daddr); in wave5_vdi_free_dma_memory() 181 dma_addr_t daddr; in wave5_vdi_allocate_sram() local 189 vaddr = gen_pool_dma_alloc(vpu_dev->sram_pool, size, &daddr); in wave5_vdi_allocate_sram() 192 vb->daddr = daddr; in wave5_vdi_allocate_sram() 197 __func__, &vb->daddr, vb->size, vb->vaddr); in wave5_vdi_allocate_sram()
|
| /drivers/net/ethernet/intel/ixgbevf/ |
| A D | ipsec.c | 34 memcpy(sam->addr, &xs->id.daddr.a6, sizeof(xs->id.daddr.a6)); in ixgbevf_ipsec_set_pf_sa() 36 memcpy(sam->addr, &xs->id.daddr.a4, sizeof(xs->id.daddr.a4)); in ixgbevf_ipsec_set_pf_sa() 179 __be32 *daddr, u8 proto, in ixgbevf_ipsec_find_rx_state() argument 189 ((ip4 && *daddr == rsa->xs->id.daddr.a4) || in ixgbevf_ipsec_find_rx_state() 190 (!ip4 && !memcmp(daddr, &rsa->xs->id.daddr.a6, in ixgbevf_ipsec_find_rx_state() 191 sizeof(rsa->xs->id.daddr.a6)))) && in ixgbevf_ipsec_find_rx_state() 324 memcpy(rsa.ipaddr, &xs->id.daddr.a6, 16); in ixgbevf_ipsec_add_sa() 326 memcpy(&rsa.ipaddr[3], &xs->id.daddr.a4, 4); in ixgbevf_ipsec_add_sa() 553 void *daddr; in ixgbevf_ipsec_rx() local 565 daddr = &ip4->daddr; in ixgbevf_ipsec_rx() [all …]
|
| /drivers/gpu/drm/radeon/ |
| A D | radeon_benchmark.c | 36 uint64_t saddr, uint64_t daddr, in radeon_benchmark_do_move() argument 49 fence = radeon_copy_dma(rdev, saddr, daddr, in radeon_benchmark_do_move() 54 fence = radeon_copy_blit(rdev, saddr, daddr, in radeon_benchmark_do_move() 92 uint64_t saddr, daddr; in radeon_benchmark_move() local 116 r = radeon_bo_pin(dobj, ddomain, &daddr); in radeon_benchmark_move() 123 time = radeon_benchmark_do_move(rdev, size, saddr, daddr, in radeon_benchmark_move() 134 time = radeon_benchmark_do_move(rdev, size, saddr, daddr, in radeon_benchmark_move()
|
| /drivers/net/ethernet/chelsio/libcxgb/ |
| A D | libcxgb_cm.c | 59 __func__, ntohl(ip->saddr), ntohl(ip->daddr), in cxgb_get_4tuple() 63 memcpy(local_ip, &ip->daddr, 4); in cxgb_get_4tuple() 66 __func__, ip6->saddr.s6_addr, ip6->daddr.s6_addr, in cxgb_get_4tuple() 70 memcpy(local_ip, ip6->daddr.s6_addr, 16); in cxgb_get_4tuple() 132 memcpy(&fl6.daddr, peer_ip, 16); in cxgb_find_route6() 134 if (ipv6_addr_type(&fl6.daddr) & IPV6_ADDR_LINKLOCAL) in cxgb_find_route6()
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_benchmark.c | 32 uint64_t saddr, uint64_t daddr, int n, s64 *time_ms) in amdgpu_benchmark_do_move() argument 41 r = amdgpu_copy_buffer(ring, saddr, daddr, size, NULL, &fence, in amdgpu_benchmark_do_move() 80 uint64_t saddr, daddr; in amdgpu_benchmark_move() local 96 &daddr, in amdgpu_benchmark_move() 102 r = amdgpu_benchmark_do_move(adev, size, saddr, daddr, n, &time_ms); in amdgpu_benchmark_move() 118 amdgpu_bo_free_kernel(&dobj, &daddr, NULL); in amdgpu_benchmark_move()
|
| /drivers/net/ethernet/mellanox/mlxsw/ |
| A D | spectrum_ipip.c | 88 return (union mlxsw_sp_l3addr) { .addr4 = parms->iph.daddr }; in mlxsw_sp_ipip_parms4_daddr() 162 .daddr = mlxsw_sp_ipip_parms4_daddr(&parms), in mlxsw_sp_ipip_netdev_parms_init_gre4() 230 union mlxsw_sp_l3addr daddr = mlxsw_sp_ipip_netdev_daddr(proto, ol_dev); in mlxsw_sp_ipip_tunnel_complete() local 238 !mlxsw_sp_l3addr_is_zero(daddr); in mlxsw_sp_ipip_tunnel_complete() 308 } else if (!mlxsw_sp_l3addr_eq(&new_parms->daddr, &old_parms->daddr)) { in mlxsw_sp_ipip_ol_netdev_change_gre() 380 .daddr = mlxsw_sp_ipip_parms6_daddr(&parms), in mlxsw_sp_ipip_netdev_parms_init_gre6() 485 struct in6_addr old_addr6 = ipip_entry->parms.daddr.addr6; in mlxsw_sp_ipip_ol_netdev_change_gre6() 492 &new_parms.daddr.addr6, in mlxsw_sp_ipip_ol_netdev_change_gre6() 509 mlxsw_sp_ipv6_addr_put(mlxsw_sp, &new_parms.daddr.addr6); in mlxsw_sp_ipip_ol_netdev_change_gre6() 518 &ipip_entry->parms.daddr.addr6, in mlxsw_sp_ipip_rem_addr_set_gre6() [all …]
|
| A D | spectrum_span.c | 356 union mlxsw_sp_l3addr daddr, in mlxsw_sp_span_entry_tunnel_parms_common() argument 366 gw = daddr; in mlxsw_sp_span_entry_tunnel_parms_common() 402 sparmsp->daddr = daddr; in mlxsw_sp_span_entry_tunnel_parms_common() 455 union mlxsw_sp_l3addr daddr = { .addr4 = tparm.iph.daddr }; in mlxsw_sp_span_entry_gretap4_parms() local 458 union mlxsw_sp_l3addr gw = daddr; in mlxsw_sp_span_entry_gretap4_parms() 468 mlxsw_sp_l3addr_is_zero(daddr)) in mlxsw_sp_span_entry_gretap4_parms() 499 be32_to_cpu(sparms.daddr.addr4)); in mlxsw_sp_span_entry_gretap4_configure() 535 if (!ip6_tnl_xmit_ctl(t, &fl6.saddr, &fl6.daddr)) in mlxsw_sp_span_gretap6_route() 563 union mlxsw_sp_l3addr gw = daddr; in mlxsw_sp_span_entry_gretap6_parms() 573 mlxsw_sp_l3addr_is_zero(daddr)) in mlxsw_sp_span_entry_gretap6_parms() [all …]
|
| /drivers/net/ethernet/intel/ixgbe/ |
| A D | ixgbe_ipsec.c | 393 __be32 *daddr, u8 proto, in ixgbe_ipsec_find_rx_state() argument 405 ((ip4 && *daddr == rsa->xs->id.daddr.a4) || in ixgbe_ipsec_find_rx_state() 406 (!ip4 && !memcmp(daddr, &rsa->xs->id.daddr.a6, in ixgbe_ipsec_find_rx_state() 516 if (reg == (__force u32)xs->id.daddr.a4) in ixgbe_ipsec_check_mgmt_ip() 523 if (reg == (__force u32)xs->id.daddr.a4) in ixgbe_ipsec_check_mgmt_ip() 629 memcpy(rsa.ipaddr, &xs->id.daddr.a6, 16); in ixgbe_ipsec_add_sa() 918 memcpy(&xs->id.daddr.a6, sam->addr, sizeof(xs->id.daddr.a6)); in ixgbe_ipsec_vf_add_sa() 920 memcpy(&xs->id.daddr.a4, sam->addr, sizeof(xs->id.daddr.a4)); in ixgbe_ipsec_vf_add_sa() 1160 void *daddr; in ixgbe_ipsec_rx() local 1172 daddr = &ip4->daddr; in ixgbe_ipsec_rx() [all …]
|
| /drivers/net/ethernet/mellanox/mlx5/core/en_accel/ |
| A D | ipsec.c | 288 fl4.daddr = addrs->saddr.a4; in mlx5e_ipsec_init_macs() 289 fl4.saddr = addrs->daddr.a4; in mlx5e_ipsec_init_macs() 308 fl4.daddr = addrs->daddr.a4; in mlx5e_ipsec_init_macs() 310 pkey = &addrs->daddr.a4; in mlx5e_ipsec_init_macs() 314 memcpy(fl6.daddr.s6_addr32, addrs->daddr.a6, 16); in mlx5e_ipsec_init_macs() 316 pkey = &addrs->daddr.a6; in mlx5e_ipsec_init_macs() 464 memcpy(&attrs->addrs.daddr, x->id.daddr.a6, sizeof(attrs->addrs.daddr)); in mlx5e_ipsec_build_accel_xfrm_attrs() 945 !neigh_key_eq32(n, &attrs->addrs.daddr.a4)) in mlx5e_ipsec_netevent_event() 1137 addrs->daddr.a4 &= addrs->dmask.m4; in mlx5e_ipsec_policy_mask() 1150 addrs->daddr.a6[i] &= addrs->dmask.m6[i]; in mlx5e_ipsec_policy_mask() [all …]
|
| /drivers/target/ |
| A D | target_core_sbc.c | 1168 void *daddr, *paddr; in sbc_dif_generate() local 1183 kunmap_atomic(daddr - dsg->offset); in sbc_dif_generate() 1194 crc = crc_t10dif(daddr + offset, avail); in sbc_dif_generate() 1196 kunmap_atomic(daddr - dsg->offset); in sbc_dif_generate() 1224 kunmap_atomic(daddr - dsg->offset); in sbc_dif_generate() 1323 void *daddr, *paddr; in sbc_dif_verify() local 1341 kunmap_atomic(daddr - dsg->offset); in sbc_dif_verify() 1363 crc = crc_t10dif(daddr + dsg_off, avail); in sbc_dif_verify() 1365 kunmap_atomic(daddr - dsg->offset); in sbc_dif_verify() 1380 kunmap_atomic(daddr - dsg->offset); in sbc_dif_verify() [all …]
|
| /drivers/infiniband/ulp/ipoib/ |
| A D | ipoib_main.c | 979 path = __path_find(dev, daddr + 4); in neigh_refresh_path() 1014 path = __path_find(dev, daddr + 4); in neigh_add_path() 1048 IPOIB_QPN(daddr)); in neigh_add_path() 1278 const void *daddr, in ipoib_hard_header() argument 1294 push_pseudo_header(skb, daddr); in ipoib_hard_header() 1332 u32 *d32 = (u32 *) daddr; in ipoib_addr_hash() 1358 if (memcmp(daddr, neigh->daddr, INFINIBAND_ALEN) == 0) { in ipoib_neigh_get() 1450 memcpy(&neigh->daddr, daddr, sizeof(neigh->daddr)); in ipoib_neigh_ctor() 1485 if (memcmp(daddr, neigh->daddr, INFINIBAND_ALEN) == 0) { in ipoib_neigh_alloc() 1532 IPOIB_QPN(neigh->daddr), in ipoib_neigh_dtor() [all …]
|
| /drivers/misc/genwqe/ |
| A D | card_utils.c | 251 dma_addr_t daddr; in genwqe_map_pages() local 262 __func__, (long long)daddr); in genwqe_map_pages() 266 dma_list[i] = daddr; in genwqe_map_pages() 389 dma_addr_t daddr; in genwqe_setup_sgl() local 404 daddr = sgl->fpage_dma_addr + map_offs; in genwqe_setup_sgl() 408 daddr = sgl->lpage_dma_addr; in genwqe_setup_sgl() 410 daddr = dma_list[p] + map_offs; in genwqe_setup_sgl() 416 if (prev_daddr == daddr) { in genwqe_setup_sgl() 430 prev_daddr = daddr + size_to_map; in genwqe_setup_sgl() 435 s[j].target_addr = cpu_to_be64(daddr); in genwqe_setup_sgl() [all …]
|
| /drivers/infiniband/core/ |
| A D | addr.c | 147 const void *daddr, in ib_nl_ip_send_msg() argument 183 nla_put(skb, attrtype, size, daddr); in ib_nl_ip_send_msg() 318 const void *daddr, u32 seq, u16 family) in ib_nl_fetch_ha() argument 323 return ib_nl_ip_send_msg(dev_addr, daddr, seq, family); in ib_nl_fetch_ha() 328 const void *daddr) in dst_fetch_ha() argument 333 n = dst_neigh_lookup(dst, daddr); in dst_fetch_ha() 364 const void *daddr = (dst_in->sa_family == AF_INET) ? in fetch_ha() local 373 return ib_nl_fetch_ha(dev_addr, daddr, seq, family); in fetch_ha() 375 return dst_fetch_ha(dst, dev_addr, daddr); in fetch_ha() 394 fl4.daddr = dst_ip; in addr4_resolve() [all …]
|
| /drivers/staging/media/ipu3/ |
| A D | ipu3-dmamap.c | 133 map->daddr = iova_dma_addr(&imgu->iova_domain, iova); in imgu_dmamap_alloc() 136 size, &map->daddr, map->vaddr); in imgu_dmamap_alloc() 156 iova_pfn(&imgu->iova_domain, map->daddr)); in imgu_dmamap_unmap() 172 __func__, map->size, &map->daddr, map->vaddr); in imgu_dmamap_free() 220 map->daddr = iova_dma_addr(&imgu->iova_domain, iova); in imgu_dmamap_map_sg()
|
| A D | ipu3-css.h | 70 dma_addr_t daddr; member 207 unsigned int queue, dma_addr_t daddr) in imgu_css_buf_init() argument 211 b->daddr = daddr; in imgu_css_buf_init()
|
| /drivers/net/ethernet/mellanox/mlx5/core/en/ |
| A D | tc_tun.c | 153 n = dst_neigh_lookup(&rt->dst, &attr->fl.fl4.daddr); in mlx5e_route_lookup_ipv4_get() 237 attr.fl.fl4.daddr = tun_key->u.ipv4.dst; in mlx5e_tc_tun_create_header_ipv4() 291 ip->daddr = attr.fl.fl4.daddr; in mlx5e_tc_tun_create_header_ipv4() 353 attr.fl.fl4.daddr = tun_key->u.ipv4.dst; in mlx5e_tc_tun_update_header_ipv4() 396 ip->daddr = attr.fl.fl4.daddr; in mlx5e_tc_tun_update_header_ipv4() 468 n = dst_neigh_lookup(dst, &attr->fl.fl6.daddr); in mlx5e_route_lookup_ipv6_get() 509 attr.fl.fl6.daddr = tun_key->u.ipv6.dst; in mlx5e_tc_tun_create_header_ipv6() 561 ip6h->daddr = attr.fl.fl6.daddr; in mlx5e_tc_tun_create_header_ipv6() 624 attr.fl.fl6.daddr = tun_key->u.ipv6.dst; in mlx5e_tc_tun_update_header_ipv6() 665 ip6h->daddr = attr.fl.fl6.daddr; in mlx5e_tc_tun_update_header_ipv6() [all …]
|
| /drivers/dma/ |
| A D | fsl-edma-trace.h | 71 __field(u64, daddr) 85 __entry->daddr = fsl_edma_get_tcd_to_cpu(chan, tcd, daddr), 109 __entry->daddr,
|
| /drivers/infiniband/hw/usnic/ |
| A D | usnic_fwd.h | 112 uint32_t daddr, uint16_t dport) in usnic_fwd_init_udp_filter() argument 118 if (daddr) { in usnic_fwd_init_udp_filter() 120 filter->u.ipv4.dst_addr = daddr; in usnic_fwd_init_udp_filter()
|
| /drivers/net/ |
| A D | amt.c | 683 iph->daddr = amt->discovery_ip; in amt_send_discovery() 774 iph->daddr = amt->remote_ip; in amt_send_request() 837 ip6h->daddr = mld2_all_node; in amt_build_mld_gq() 1042 fl4.daddr, in amt_send_membership_update() 1100 fl4.daddr, in amt_send_multicast_data() 1159 fl4.daddr, in amt_send_membership_query() 1212 group.ip4 = iph->daddr; in amt_dev_xmit() 1236 group.ip6 = ip6h->daddr; in amt_dev_xmit() 1284 &ip6h->daddr)) in amt_dev_xmit() 2573 daddr, amt->local_ip, in amt_send_advertisement() [all …]
|
| /drivers/net/ovpn/ |
| A D | peer.c | 216 local_ip = &ip_hdr(skb)->daddr; in ovpn_peer_endpoints_update() 233 &ip_hdr(skb)->daddr); in ovpn_peer_endpoints_update() 234 bind->local.ipv4.s_addr = ip_hdr(skb)->daddr; in ovpn_peer_endpoints_update() 244 local_ip = &ipv6_hdr(skb)->daddr; in ovpn_peer_endpoints_update() 260 &ipv6_hdr(skb)->daddr))) { in ovpn_peer_endpoints_update() 264 &ipv6_hdr(skb)->daddr); in ovpn_peer_endpoints_update() 265 bind->local.ipv6 = ipv6_hdr(skb)->daddr; in ovpn_peer_endpoints_update() 416 return ip_hdr(skb)->daddr; in ovpn_nexthop_from_skb4() 430 return ipv6_hdr(skb)->daddr; in ovpn_nexthop_from_skb6() 781 .daddr = dest in ovpn_nexthop_from_rt4() [all …]
|
| /drivers/gpu/drm/ |
| A D | drm_fb_dma_helper.c | 132 dma_addr_t daddr; in drm_fb_dma_sync_non_coherent() local 140 daddr = drm_fb_dma_get_gem_addr(state->fb, state, i); in drm_fb_dma_sync_non_coherent() 148 dma_sync_single_for_device(drm->dev, daddr + offset, in drm_fb_dma_sync_non_coherent()
|
| /drivers/video/fbdev/ |
| A D | hitfb.c | 94 u32 saddr, daddr; in hitfb_accel_bitblt() local 102 daddr = WIDTH * (dy + height) + dx + width; in hitfb_accel_bitblt() 115 daddr = WIDTH * dy + dx; in hitfb_accel_bitblt() 124 daddr <<= 1; in hitfb_accel_bitblt() 130 hitfb_writew(daddr & 0xffff, HD64461_BBTDSARL); in hitfb_accel_bitblt() 131 hitfb_writew(daddr >> 16, HD64461_BBTDSARH); in hitfb_accel_bitblt()
|