Home
last modified time | relevance | path

Searched refs:vn (Results 1 – 25 of 34) sorted by relevance

12

/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
A Dmemnv04.c35 struct nv04_mem_map_vn vn; in nv04_mem_map() member
41 if ((ret = nvif_unvers(ret, &argv, &argc, args->vn))) in nv04_mem_map()
55 struct nv04_mem_vn vn; in nv04_mem_new() member
59 if ((ret = nvif_unvers(ret, &argv, &argc, args->vn))) in nv04_mem_new()
A Dmemgf100.c39 struct gf100_mem_map_vn vn; in gf100_mem_map() member
50 if (!(ret = nvif_unvers(ret, &argv, &argc, args->vn))) { in gf100_mem_map()
73 struct gf100_mem_vn vn; in gf100_mem_new() member
82 if (!(ret = nvif_unvers(ret, &argv, &argc, args->vn))) { in gf100_mem_new()
A Dmemnv50.c39 struct nv50_mem_map_vn vn; in nv50_mem_map() member
52 if (!(ret = nvif_unvers(ret, &argv, &argc, args->vn))) { in nv50_mem_map()
70 struct nv50_mem_vn vn; in nv50_mem_new() member
80 if (!(ret = nvif_unvers(ret, &argv, &argc, args->vn))) { in nv50_mem_new()
A Dvmmnv04.c84 struct nv04_vmm_map_vn vn; in nv04_vmm_valid() member
87 if ((ret = nvif_unvers(ret, &argv, &argc, args->vn))) in nv04_vmm_valid()
108 struct nv04_vmm_vn vn; in nv04_vmm_new_() member
117 return nvif_unvers(-ENOSYS, &argv, &argc, args->vn); in nv04_vmm_new_()
A Dvmmgp100.c411 struct gp100_vmm_map_vn vn; in gp100_vmm_valid() member
429 if (!(ret = nvif_unvers(ret, &argv, &argc, args->vn))) { in gp100_vmm_valid()
527 struct gp100_vmm_fault_replay_vn vn; in gp100_vmm_fault_replay() member
531 if (!(ret = nvif_unvers(ret, &argv, &argc, args->vn))) { in gp100_vmm_fault_replay()
609 struct gp100_vmm_vn vn; in gp100_vmm_new_() member
618 if (!(ret = nvif_unvers(ret, &argv, &argc, args->vn))) { in gp100_vmm_new_()
A Dvmmgm200.c150 struct gm200_vmm_vn vn; in gm200_vmm_new_() member
163 if (!(ret = nvif_unvers(ret, &argv, &argc, args->vn))) { in gm200_vmm_new_()
A Dmem.c149 struct nvif_mem_ram_vn vn; in nvkm_mem_new_host() member
186 if ( (ret = nvif_unvers(ret, &argv, &argc, args->vn))) { in nvkm_mem_new_host()
A Dvmmnv50.c231 struct nv50_vmm_map_vn vn; in nv50_vmm_valid() member
250 if (!(ret = nvif_unvers(ret, &argv, &argc, args->vn))) { in nv50_vmm_valid()
/drivers/net/vxlan/
A Dvxlan_multicast.c125 bool vxlan_group_used(struct vxlan_net *vn, struct vxlan_dev *dev, in vxlan_group_used() argument
151 list_for_each_entry(vxlan, &vn->vxlan_list, next) { in vxlan_group_used()
218 struct vxlan_net *vn = net_generic(vxlan->net, vxlan_net_id); in vxlan_multicast_leave_vnigrp() local
225 !vxlan_group_used(vn, vxlan, v->vni, &v->remote_ip, in vxlan_multicast_leave_vnigrp()
257 struct vxlan_net *vn = net_generic(vxlan->net, vxlan_net_id); in vxlan_multicast_leave() local
261 !vxlan_group_used(vn, vxlan, 0, NULL, 0)) { in vxlan_multicast_leave()
A Dvxlan_private.h59 struct vxlan_net *vn = net_generic(net, vxlan_net_id); in vs_head() local
61 return &vn->sock_list[hash_32(ntohs(port), PORT_HASH_BITS)]; in vs_head()
229 bool vxlan_group_used(struct vxlan_net *vn, struct vxlan_dev *dev,
A Dvxlan_core.c3295 struct vxlan_net *vn = net_generic(net, vxlan_net_id); in vxlan_offload_rx_ports() local
3301 hlist_for_each_entry(vs, &vn->sock_list[i], hlist) { in vxlan_offload_rx_ports()
3673 list_for_each_entry(tmp, &vn->vxlan_list, next) { in vxlan_vni_in_use()
3928 struct vxlan_net *vn = net_generic(net, vxlan_net_id); in __vxlan_dev_create() local
3980 list_add(&vxlan->next, &vn->vxlan_list); in __vxlan_dev_create()
4718 vxlan_handle_lowerdev_unregister(vn, dev); in vxlan_netdevice_event()
4887 struct vxlan_net *vn = net_generic(net, vxlan_net_id); in vxlan_init_net() local
4890 INIT_LIST_HEAD(&vn->vxlan_list); in vxlan_init_net()
4894 INIT_HLIST_HEAD(&vn->sock_list[h]); in vxlan_init_net()
4917 vxlan_destroy_tunnels(vn, dev_to_kill); in vxlan_exit_rtnl()
[all …]
A Dvxlan_vnifilter.c519 struct vxlan_net *vn = net_generic(vxlan->net, vxlan_net_id); in vxlan_vni_update_group() local
560 !vxlan_group_used(vn, vxlan, vninode->vni, in vxlan_vni_update_group()
616 struct vxlan_net *vn = net_generic(vxlan->net, vxlan_net_id); in vxlan_vni_delete_group() local
637 !vxlan_group_used(vn, vxlan, vninode->vni, in vxlan_vni_delete_group()
/drivers/gpu/drm/nouveau/include/nvif/
A Dif000e.h19 } vn; member
24 } vn; member
A Dif0004.h7 } vn; member
A Dclb069.h13 } vn; member
A Dif0013.h15 } vn; member
/drivers/scsi/fcoe/
A Dfcoe_ctlr.c539 struct fip_vn_desc *vn; in fcoe_ctlr_send_keep_alive() local
564 ports * sizeof(*vn)) / FIP_BPW); in fcoe_ctlr_send_keep_alive()
573 vn = (struct fip_vn_desc *)(kal + 1); in fcoe_ctlr_send_keep_alive()
574 vn->fd_desc.fip_dtype = FIP_DT_VN_ID; in fcoe_ctlr_send_keep_alive()
575 vn->fd_desc.fip_dlen = sizeof(*vn) / FIP_BPW; in fcoe_ctlr_send_keep_alive()
577 hton24(vn->fd_fc_id, lport->port_id); in fcoe_ctlr_send_keep_alive()
2023 struct fip_vn_desc vn; in fcoe_ctlr_vn_send() member
2071 frame->vn.fd_desc.fip_dlen = sizeof(frame->vn) / FIP_BPW; in fcoe_ctlr_vn_send()
2273 struct fip_vn_desc *vn = NULL; in fcoe_ctlr_vn_parse() local
2349 vn = (struct fip_vn_desc *)desc; in fcoe_ctlr_vn_parse()
[all …]
/drivers/net/ethernet/broadcom/bnx2x/
A Dbnx2x_cmn.h904 static inline int func_by_vn(struct bnx2x *bp, int vn) in func_by_vn() argument
906 return 2 * vn + BP_PORT(bp); in func_by_vn()
1327 int vn; in bnx2x_link_sync_notify() local
1330 for (vn = VN_0; vn < BP_MAX_VN_NUM(bp); vn++) { in bnx2x_link_sync_notify()
1331 if (vn == BP_VN(bp)) in bnx2x_link_sync_notify()
1334 func = func_by_vn(bp, vn); in bnx2x_link_sync_notify()
/drivers/gpu/drm/sun4i/
A Dsun8i_vi_layer.c63 u32 vn = 0, vm = 0; in sun8i_vi_layer_update_coord() local
141 vn = (u32)ability * dst_h / 100; in sun8i_vi_layer_update_coord()
142 src_h = vn; in sun8i_vi_layer_update_coord()
177 SUN8I_MIXER_CHAN_VI_DS_N(vn) | in sun8i_vi_layer_update_coord()
181 SUN8I_MIXER_CHAN_VI_DS_N(vn) | in sun8i_vi_layer_update_coord()
/drivers/gpu/drm/nouveau/nvkm/core/
A Duevent.c41 if (argc != sizeof(args->vn)) in nvkm_uevent_mthd_block()
52 if (argc != sizeof(args->vn)) in nvkm_uevent_mthd_allow()
/drivers/gpu/drm/i915/
A Di915_gem_evict.c478 struct i915_vma *vma, *vn; in i915_gem_evict_vm() local
515 list_for_each_entry_safe(vma, vn, &locked_eviction_list, evict_link) { in i915_gem_evict_vm()
527 list_for_each_entry_safe(vma, vn, &eviction_list, evict_link) { in i915_gem_evict_vm()
/drivers/gpu/drm/nouveau/nvkm/engine/sw/
A Dnvsw.c36 if (argc != sizeof(args->vn)) in nvkm_nvsw_uevent()
/drivers/gpu/drm/i915/gem/
A Di915_gem_tiling.c187 struct i915_vma *vma, *vn; in i915_gem_object_fence_prepare() local
207 list_for_each_entry_safe(vma, vn, &unbind, vm_link) { in i915_gem_object_fence_prepare()
/drivers/gpu/drm/nouveau/nvkm/subdev/fault/
A Duser.c39 if (argc != sizeof(args->vn)) in nvkm_ufault_uevent()
/drivers/usb/gadget/function/
A Df_mass_storage.h137 void fsg_common_set_inquiry_string(struct fsg_common *common, const char *vn,

Completed in 61 milliseconds

12