Lines Matching refs:vs
68 static inline bool vxlan_collect_metadata(struct vxlan_sock *vs) in vxlan_collect_metadata() argument
70 return vs->flags & VXLAN_F_COLLECT_METADATA || in vxlan_collect_metadata()
81 struct vxlan_sock *vs; in vxlan_find_sock() local
85 hlist_for_each_entry_rcu(vs, vs_head(net, port), hlist) { in vxlan_find_sock()
86 if (inet_sk(vs->sock->sk)->inet_sport == port && in vxlan_find_sock()
87 vxlan_get_sk_family(vs) == family && in vxlan_find_sock()
88 vs->flags == flags && in vxlan_find_sock()
89 vs->sock->sk->sk_bound_dev_if == ifindex) in vxlan_find_sock()
90 return vs; in vxlan_find_sock()
95 static struct vxlan_dev *vxlan_vs_find_vni(struct vxlan_sock *vs, in vxlan_vs_find_vni() argument
103 if (vs->flags & VXLAN_F_COLLECT_METADATA && in vxlan_vs_find_vni()
104 !(vs->flags & VXLAN_F_VNIFILTER)) in vxlan_vs_find_vni()
107 hlist_for_each_entry_rcu(node, vni_head(vs, vni), hlist) { in vxlan_vs_find_vni()
140 struct vxlan_sock *vs; in vxlan_find_vni() local
142 vs = vxlan_find_sock(net, family, port, flags, ifindex); in vxlan_find_vni()
143 if (!vs) in vxlan_find_vni()
146 return vxlan_vs_find_vni(vs, ifindex, vni, NULL); in vxlan_find_vni()
685 struct vxlan_sock *vs = rcu_dereference_sk_user_data(sk); in vxlan_gro_prepare_receive() local
700 if ((flags & VXLAN_HF_RCO) && (vs->flags & VXLAN_F_REMCSUM_RX)) { in vxlan_gro_prepare_receive()
703 !!(vs->flags & in vxlan_gro_prepare_receive()
1503 static bool __vxlan_sock_release_prep(struct vxlan_sock *vs) in __vxlan_sock_release_prep() argument
1507 if (!vs) in __vxlan_sock_release_prep()
1509 if (!refcount_dec_and_test(&vs->refcnt)) in __vxlan_sock_release_prep()
1512 vn = net_generic(sock_net(vs->sock->sk), vxlan_net_id); in __vxlan_sock_release_prep()
1514 hlist_del_rcu(&vs->hlist); in __vxlan_sock_release_prep()
1515 udp_tunnel_notify_del_rx_port(vs->sock, in __vxlan_sock_release_prep()
1516 (vs->flags & VXLAN_F_GPE) ? in __vxlan_sock_release_prep()
1608 struct vxlan_sock *vs, in vxlan_set_mac() argument
1623 if (vxlan_get_sk_family(vs) == AF_INET) { in vxlan_set_mac()
1640 static bool vxlan_ecn_decapsulate(struct vxlan_sock *vs, void *oiph, in vxlan_ecn_decapsulate() argument
1645 if (vxlan_get_sk_family(vs) == AF_INET) in vxlan_ecn_decapsulate()
1653 if (vxlan_get_sk_family(vs) == AF_INET) in vxlan_ecn_decapsulate()
1669 struct vxlan_sock *vs; in vxlan_rcv() local
1695 vs = rcu_dereference_sk_user_data(sk); in vxlan_rcv()
1696 if (!vs) in vxlan_rcv()
1701 vxlan = vxlan_vs_find_vni(vs, skb->dev->ifindex, vni, &vninode); in vxlan_rcv()
1708 if (vs->flags & VXLAN_F_GPE) { in vxlan_rcv()
1719 if (vs->flags & VXLAN_F_REMCSUM_RX) in vxlan_rcv()
1720 if (unlikely(!vxlan_remcsum(&unparsed, skb, vs->flags))) in vxlan_rcv()
1723 if (vxlan_collect_metadata(vs)) { in vxlan_rcv()
1728 tun_dst = udp_tun_rx_dst(skb, vxlan_get_sk_family(vs), flags, in vxlan_rcv()
1741 if (vs->flags & VXLAN_F_GBP) in vxlan_rcv()
1742 vxlan_parse_gbp_hdr(&unparsed, skb, vs->flags, md); in vxlan_rcv()
1760 if (!vxlan_set_mac(vxlan, vs, skb, vni)) in vxlan_rcv()
1787 if (!vxlan_ecn_decapsulate(vs, oiph, skb)) { in vxlan_rcv()
1823 struct vxlan_sock *vs; in vxlan_err_lookup() local
1835 vs = rcu_dereference_sk_user_data(sk); in vxlan_err_lookup()
1836 if (!vs) in vxlan_err_lookup()
1840 vxlan = vxlan_vs_find_vni(vs, skb->dev->ifindex, vni, NULL); in vxlan_err_lookup()
2856 static void vxlan_vs_add_dev(struct vxlan_sock *vs, struct vxlan_dev *vxlan, in vxlan_vs_add_dev() argument
2864 hlist_add_head_rcu(&node->hlist, vni_head(vs, vni)); in vxlan_vs_add_dev()
3288 struct vxlan_sock *vs; in vxlan_offload_rx_ports() local
3295 hlist_for_each_entry_rcu(vs, &vn->sock_list[i], hlist) { in vxlan_offload_rx_ports()
3298 if (vs->flags & VXLAN_F_GPE) in vxlan_offload_rx_ports()
3304 udp_tunnel_push_rx_port(dev, vs->sock, type); in vxlan_offload_rx_ports()
3306 udp_tunnel_drop_rx_port(dev, vs->sock, type); in vxlan_offload_rx_ports()
3542 struct vxlan_sock *vs; in vxlan_socket_create() local
3547 vs = kzalloc(sizeof(*vs), GFP_KERNEL); in vxlan_socket_create()
3548 if (!vs) in vxlan_socket_create()
3552 INIT_HLIST_HEAD(&vs->vni_list[h]); in vxlan_socket_create()
3556 kfree(vs); in vxlan_socket_create()
3560 vs->sock = sock; in vxlan_socket_create()
3561 refcount_set(&vs->refcnt, 1); in vxlan_socket_create()
3562 vs->flags = (flags & VXLAN_F_RCV_FLAGS); in vxlan_socket_create()
3565 hlist_add_head_rcu(&vs->hlist, vs_head(net, port)); in vxlan_socket_create()
3567 (vs->flags & VXLAN_F_GPE) ? in vxlan_socket_create()
3574 tunnel_cfg.sk_user_data = vs; in vxlan_socket_create()
3579 if (vs->flags & VXLAN_F_GPE) { in vxlan_socket_create()
3589 return vs; in vxlan_socket_create()
3596 struct vxlan_sock *vs = NULL; in __vxlan_sock_add() local
3606 vs = vxlan_find_sock(vxlan->net, ipv6 ? AF_INET6 : AF_INET, in __vxlan_sock_add()
3609 if (vs && !refcount_inc_not_zero(&vs->refcnt)) { in __vxlan_sock_add()
3615 if (!vs) in __vxlan_sock_add()
3616 vs = vxlan_socket_create(vxlan->net, ipv6, in __vxlan_sock_add()
3619 if (IS_ERR(vs)) in __vxlan_sock_add()
3620 return PTR_ERR(vs); in __vxlan_sock_add()
3623 rcu_assign_pointer(vxlan->vn6_sock, vs); in __vxlan_sock_add()
3628 rcu_assign_pointer(vxlan->vn4_sock, vs); in __vxlan_sock_add()
3633 vxlan_vs_add_vnigrp(vxlan, vs, ipv6); in __vxlan_sock_add()
3635 vxlan_vs_add_dev(vs, vxlan, node); in __vxlan_sock_add()