Lines Matching refs:mvdev

63 static void fill_create_direct_mr(struct mlx5_vdpa_dev *mvdev,  in fill_create_direct_mr()  argument
70 MLX5_SET(create_mkey_in, in, uid, mvdev->res.uid); in fill_create_direct_mr()
76 MLX5_SET(mkc, mkc, pd, mvdev->res.pdn); in fill_create_direct_mr()
87 MLX5_SET(create_mkey_in, in, uid, mvdev->res.uid); in fill_create_direct_mr()
90 static void create_direct_mr_end(struct mlx5_vdpa_dev *mvdev, in create_direct_mr_end() argument
99 static void fill_destroy_direct_mr(struct mlx5_vdpa_dev *mvdev, in fill_destroy_direct_mr() argument
105 MLX5_SET(destroy_mkey_in, in, uid, mvdev->res.uid); in fill_destroy_direct_mr()
110 static void destroy_direct_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_direct_mr *mr) in destroy_direct_mr() argument
115 mlx5_vdpa_destroy_mkey(mvdev, mr->mr); in destroy_direct_mr()
169 static void fill_indir(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_mr *mkey, void *in) in fill_indir() argument
195 klm->key = cpu_to_be32(mvdev->res.null_mkey); in fill_indir()
211 static int create_direct_keys(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_mr *mr) in create_direct_keys() argument
239 fill_create_direct_mr(mvdev, dmr, cmd_mem); in create_direct_keys()
244 err = mlx5_vdpa_exec_async_cmds(mvdev, cmds, mr->num_directs); in create_direct_keys()
247 mlx5_vdpa_err(mvdev, "error issuing MTT mkey creation for direct mrs: %d\n", err); in create_direct_keys()
259 create_direct_mr_end(mvdev, dmr, cmd_mem); in create_direct_keys()
262 mlx5_vdpa_err(mvdev, "error creating MTT mkey [0x%llx, 0x%llx]: %d\n", in create_direct_keys()
282 static int destroy_direct_keys(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_mr *mr) in DEFINE_FREE()
300 fill_destroy_direct_mr(mvdev, dmr, &cmd_mem[i]); in DEFINE_FREE()
304 err = mlx5_vdpa_exec_async_cmds(mvdev, cmds, mr->num_directs); in DEFINE_FREE()
307 mlx5_vdpa_err(mvdev, "error issuing MTT mkey deletion for direct mrs: %d\n", err); in DEFINE_FREE()
318 mlx5_vdpa_err(mvdev, "error deleting MTT mkey [0x%llx, 0x%llx]: %d\n", in DEFINE_FREE()
326 static int create_indirect_key(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_mr *mr) in create_indirect_key() argument
345 MLX5_SET(create_mkey_in, in, uid, mvdev->res.uid); in create_indirect_key()
351 MLX5_SET(mkc, mkc, pd, mvdev->res.pdn); in create_indirect_key()
356 fill_indir(mvdev, mr, in); in create_indirect_key()
357 err = mlx5_vdpa_create_mkey(mvdev, &mr->mkey, in, inlen); in create_indirect_key()
362 static void destroy_indirect_key(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_mr *mkey) in destroy_indirect_key() argument
364 mlx5_vdpa_destroy_mkey(mvdev, mkey->mkey); in destroy_indirect_key()
367 static int map_direct_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_direct_mr *mr, in map_direct_mr() argument
381 struct device *dma = mvdev->vdev.dma_dev; in map_direct_mr()
406 mlx5_vdpa_warn(mvdev, "sg null. start 0x%llx, end 0x%llx\n", in map_direct_mr()
433 static void unmap_direct_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_direct_mr *mr) in unmap_direct_mr() argument
435 struct device *dma = mvdev->vdev.dma_dev; in unmap_direct_mr()
437 destroy_direct_mr(mvdev, mr); in unmap_direct_mr()
442 static int add_direct_chain(struct mlx5_vdpa_dev *mvdev, in add_direct_chain() argument
468 err = map_direct_mr(mvdev, dmr, iotlb); in add_direct_chain()
486 unmap_direct_mr(mvdev, dmr); in add_direct_chain()
498 static int create_user_mr(struct mlx5_vdpa_dev *mvdev, in create_user_mr() argument
529 err = add_direct_chain(mvdev, mr, ps, pe - ps, pperm, iotlb); in create_user_mr()
538 err = add_direct_chain(mvdev, mr, ps, pe - ps, pperm, iotlb); in create_user_mr()
542 err = create_direct_keys(mvdev, mr); in create_user_mr()
550 err = create_indirect_key(mvdev, mr); in create_user_mr()
560 unmap_direct_mr(mvdev, dmr); in create_user_mr()
566 static int create_dma_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_mr *mr) in create_dma_mr() argument
583 MLX5_SET(mkc, mkc, pd, mvdev->res.pdn); in create_dma_mr()
586 err = mlx5_vdpa_create_mkey(mvdev, &mr->mkey, in, inlen); in create_dma_mr()
594 static void destroy_dma_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_mr *mr) in destroy_dma_mr() argument
596 mlx5_vdpa_destroy_mkey(mvdev, mr->mkey); in destroy_dma_mr()
628 static void destroy_user_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_mr *mr) in destroy_user_mr() argument
633 destroy_indirect_key(mvdev, mr); in destroy_user_mr()
634 destroy_direct_keys(mvdev, mr); in destroy_user_mr()
637 unmap_direct_mr(mvdev, dmr); in destroy_user_mr()
642 static void _mlx5_vdpa_destroy_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_mr *mr) in _mlx5_vdpa_destroy_mr() argument
648 destroy_user_mr(mvdev, mr); in _mlx5_vdpa_destroy_mr()
650 destroy_dma_mr(mvdev, mr); in _mlx5_vdpa_destroy_mr()
669 struct mlx5_vdpa_dev *mvdev; in mlx5_vdpa_mr_gc_handler() local
681 mvdev = container_of(mres, struct mlx5_vdpa_dev, mres); in mlx5_vdpa_mr_gc_handler()
684 _mlx5_vdpa_destroy_mr(mvdev, mr); in mlx5_vdpa_mr_gc_handler()
690 static void _mlx5_vdpa_put_mr(struct mlx5_vdpa_dev *mvdev, in _mlx5_vdpa_put_mr() argument
693 struct mlx5_vdpa_mr_resources *mres = &mvdev->mres; in _mlx5_vdpa_put_mr()
705 void mlx5_vdpa_put_mr(struct mlx5_vdpa_dev *mvdev, in mlx5_vdpa_put_mr() argument
708 mutex_lock(&mvdev->mres.lock); in mlx5_vdpa_put_mr()
709 _mlx5_vdpa_put_mr(mvdev, mr); in mlx5_vdpa_put_mr()
710 mutex_unlock(&mvdev->mres.lock); in mlx5_vdpa_put_mr()
713 static void _mlx5_vdpa_get_mr(struct mlx5_vdpa_dev *mvdev, in _mlx5_vdpa_get_mr() argument
722 void mlx5_vdpa_get_mr(struct mlx5_vdpa_dev *mvdev, in mlx5_vdpa_get_mr() argument
725 mutex_lock(&mvdev->mres.lock); in mlx5_vdpa_get_mr()
726 _mlx5_vdpa_get_mr(mvdev, mr); in mlx5_vdpa_get_mr()
727 mutex_unlock(&mvdev->mres.lock); in mlx5_vdpa_get_mr()
730 void mlx5_vdpa_update_mr(struct mlx5_vdpa_dev *mvdev, in mlx5_vdpa_update_mr() argument
734 struct mlx5_vdpa_mr *old_mr = mvdev->mres.mr[asid]; in mlx5_vdpa_update_mr()
736 mutex_lock(&mvdev->mres.lock); in mlx5_vdpa_update_mr()
738 _mlx5_vdpa_put_mr(mvdev, old_mr); in mlx5_vdpa_update_mr()
739 mvdev->mres.mr[asid] = new_mr; in mlx5_vdpa_update_mr()
741 mutex_unlock(&mvdev->mres.lock); in mlx5_vdpa_update_mr()
744 static void mlx5_vdpa_show_mr_leaks(struct mlx5_vdpa_dev *mvdev) in mlx5_vdpa_show_mr_leaks() argument
748 mutex_lock(&mvdev->mres.lock); in mlx5_vdpa_show_mr_leaks()
750 list_for_each_entry(mr, &mvdev->mres.mr_list_head, mr_list) { in mlx5_vdpa_show_mr_leaks()
752 mlx5_vdpa_warn(mvdev, "mkey still alive after resource delete: " in mlx5_vdpa_show_mr_leaks()
757 mutex_unlock(&mvdev->mres.lock); in mlx5_vdpa_show_mr_leaks()
761 void mlx5_vdpa_clean_mrs(struct mlx5_vdpa_dev *mvdev) in mlx5_vdpa_clean_mrs() argument
763 if (!mvdev->res.valid) in mlx5_vdpa_clean_mrs()
767 mlx5_vdpa_update_mr(mvdev, NULL, i); in mlx5_vdpa_clean_mrs()
769 prune_iotlb(mvdev->cvq.iotlb); in mlx5_vdpa_clean_mrs()
771 mlx5_vdpa_show_mr_leaks(mvdev); in mlx5_vdpa_clean_mrs()
774 static int _mlx5_vdpa_create_mr(struct mlx5_vdpa_dev *mvdev, in _mlx5_vdpa_create_mr() argument
781 err = create_user_mr(mvdev, mr, iotlb); in _mlx5_vdpa_create_mr()
783 err = create_dma_mr(mvdev, mr); in _mlx5_vdpa_create_mr()
798 list_add_tail(&mr->mr_list, &mvdev->mres.mr_list_head); in _mlx5_vdpa_create_mr()
807 destroy_user_mr(mvdev, mr); in _mlx5_vdpa_create_mr()
809 destroy_dma_mr(mvdev, mr); in _mlx5_vdpa_create_mr()
814 struct mlx5_vdpa_mr *mlx5_vdpa_create_mr(struct mlx5_vdpa_dev *mvdev, in mlx5_vdpa_create_mr() argument
824 mutex_lock(&mvdev->mres.lock); in mlx5_vdpa_create_mr()
825 err = _mlx5_vdpa_create_mr(mvdev, mr, iotlb); in mlx5_vdpa_create_mr()
826 mutex_unlock(&mvdev->mres.lock); in mlx5_vdpa_create_mr()
840 int mlx5_vdpa_update_cvq_iotlb(struct mlx5_vdpa_dev *mvdev, in mlx5_vdpa_update_cvq_iotlb() argument
846 if (mvdev->mres.group2asid[MLX5_VDPA_CVQ_GROUP] != asid) in mlx5_vdpa_update_cvq_iotlb()
849 spin_lock(&mvdev->cvq.iommu_lock); in mlx5_vdpa_update_cvq_iotlb()
851 prune_iotlb(mvdev->cvq.iotlb); in mlx5_vdpa_update_cvq_iotlb()
852 err = dup_iotlb(mvdev->cvq.iotlb, iotlb); in mlx5_vdpa_update_cvq_iotlb()
854 spin_unlock(&mvdev->cvq.iommu_lock); in mlx5_vdpa_update_cvq_iotlb()
859 int mlx5_vdpa_create_dma_mr(struct mlx5_vdpa_dev *mvdev) in mlx5_vdpa_create_dma_mr() argument
863 mr = mlx5_vdpa_create_mr(mvdev, NULL); in mlx5_vdpa_create_dma_mr()
867 mlx5_vdpa_update_mr(mvdev, mr, 0); in mlx5_vdpa_create_dma_mr()
869 return mlx5_vdpa_update_cvq_iotlb(mvdev, NULL, 0); in mlx5_vdpa_create_dma_mr()
872 int mlx5_vdpa_reset_mr(struct mlx5_vdpa_dev *mvdev, unsigned int asid) in mlx5_vdpa_reset_mr() argument
877 mlx5_vdpa_update_mr(mvdev, NULL, asid); in mlx5_vdpa_reset_mr()
879 if (asid == 0 && MLX5_CAP_GEN(mvdev->mdev, umem_uid_0)) { in mlx5_vdpa_reset_mr()
880 if (mlx5_vdpa_create_dma_mr(mvdev)) in mlx5_vdpa_reset_mr()
881 mlx5_vdpa_warn(mvdev, "create DMA MR failed\n"); in mlx5_vdpa_reset_mr()
883 mlx5_vdpa_update_cvq_iotlb(mvdev, NULL, asid); in mlx5_vdpa_reset_mr()
889 int mlx5_vdpa_init_mr_resources(struct mlx5_vdpa_dev *mvdev) in mlx5_vdpa_init_mr_resources() argument
891 struct mlx5_vdpa_mr_resources *mres = &mvdev->mres; in mlx5_vdpa_init_mr_resources()
907 void mlx5_vdpa_destroy_mr_resources(struct mlx5_vdpa_dev *mvdev) in mlx5_vdpa_destroy_mr_resources() argument
909 struct mlx5_vdpa_mr_resources *mres = &mvdev->mres; in mlx5_vdpa_destroy_mr_resources()