Home
last modified time | relevance | path

Searched refs:dmn (Results 1 – 19 of 19) sorted by relevance

/linux-6.3-rc2/drivers/net/ethernet/mellanox/mlx5/core/steering/
A Ddr_domain.c133 ret = mlx5_core_alloc_pd(dmn->mdev, &dmn->pdn); in dr_domain_init_resources()
139 dmn->uar = mlx5_get_uars_page(dmn->mdev); in dr_domain_init_resources()
163 mlx5_put_uars_page(dmn->mdev, dmn->uar); in dr_domain_init_resources()
165 mlx5_core_dealloc_pd(dmn->mdev, dmn->pdn); in dr_domain_init_resources()
172 mlx5dr_send_ring_free(dmn, dmn->send_ring); in dr_domain_uninit_resources()
174 mlx5_put_uars_page(dmn->mdev, dmn->uar); in dr_domain_uninit_resources()
175 mlx5_core_dealloc_pd(dmn->mdev, dmn->pdn); in dr_domain_uninit_resources()
419 dmn = kzalloc(sizeof(*dmn), GFP_KERNEL); in mlx5dr_domain_create()
420 if (!dmn) in mlx5dr_domain_create()
453 return dmn; in mlx5dr_domain_create()
[all …]
A Ddr_fw.c21 ft_attr.level = dmn->info.caps.max_ft_level - 1; in mlx5dr_fw_create_recalc_cs_ft()
24 ret = mlx5dr_cmd_create_flow_table(dmn->mdev, in mlx5dr_fw_create_recalc_cs_ft()
33 ret = mlx5dr_cmd_create_empty_flow_group(dmn->mdev, in mlx5dr_fw_create_recalc_cs_ft()
54 ret = mlx5dr_cmd_set_fte_modify_and_vport(dmn->mdev, in mlx5dr_fw_create_recalc_cs_ft()
73 mlx5dr_cmd_destroy_flow_group(dmn->mdev, in mlx5dr_fw_create_recalc_cs_ft()
86 mlx5dr_cmd_del_flow_table_entry(dmn->mdev, in mlx5dr_fw_destroy_recalc_cs_ft()
90 mlx5dr_cmd_destroy_flow_group(dmn->mdev, in mlx5dr_fw_destroy_recalc_cs_ft()
94 mlx5dr_cmd_destroy_flow_table(dmn->mdev, in mlx5dr_fw_destroy_recalc_cs_ft()
157 mlx5dr_cmd_destroy_flow_table(dmn->mdev, *tbl_id, in mlx5dr_fw_create_md_tbl()
166 mlx5dr_cmd_destroy_flow_group(dmn->mdev, in mlx5dr_fw_destroy_md_tbl()
[all …]
A Ddr_action.c676 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in dr_action_get_dest_fw_tbl_addr() local
705 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in dr_action_get_dest_sw_tbl_addr() local
708 if (dest_tbl->tbl->dmn != dmn) { in dr_action_get_dest_sw_tbl_addr()
994 action->dest_tbl->fw_tbl.dmn = dmn; in mlx5dr_action_create_dest_table_num()
1117 action->range->dmn = dmn; in mlx5dr_action_create_dest_match_range()
1227 action->dest_tbl->fw_tbl.dmn = dmn; in mlx5dr_action_create_mult_dest_tbl()
1258 action->dest_tbl->fw_tbl.dmn = dmn; in mlx5dr_action_create_dest_flow_fw_table()
1308 action->sampler->dmn = dmn; in mlx5dr_action_create_flow_sampler()
1526 action->reformat->dmn = dmn; in mlx5dr_action_create_packet_reformat()
2044 action->rewrite->dmn = dmn; in mlx5dr_action_create_modify_header()
[all …]
A Ddr_table.c54 mlx5dr_domain_lock(tbl->dmn); in mlx5dr_table_set_miss_action()
83 mlx5dr_domain_unlock(tbl->dmn); in mlx5dr_table_set_miss_action()
100 mlx5dr_domain_lock(tbl->dmn); in dr_table_uninit()
102 switch (tbl->dmn->type) { in dr_table_uninit()
117 mlx5dr_domain_unlock(tbl->dmn); in dr_table_uninit()
184 mlx5dr_domain_lock(tbl->dmn); in dr_table_init()
186 switch (tbl->dmn->type) { in dr_table_init()
208 mlx5dr_domain_unlock(tbl->dmn); in dr_table_init()
256 refcount_inc(&dmn->refcount); in mlx5dr_table_create()
262 tbl->dmn = dmn; in mlx5dr_table_create()
[all …]
A Ddr_matcher.c117 struct mlx5dr_domain *dmn) in dr_mask_is_tnl_vxlan_gpe() argument
175 struct mlx5dr_domain *dmn) in dr_mask_is_tnl_gtpu() argument
408 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in dr_matcher_set_ste_builders() local
802 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in dr_matcher_uninit() local
804 switch (dmn->type) { in dr_matcher_uninit()
823 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in dr_matcher_set_all_ste_builders() local
841 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in dr_matcher_init_nic() local
900 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in dr_matcher_copy_param() local
929 mlx5dr_dbg(dmn, in dr_matcher_copy_param()
945 struct mlx5dr_domain *dmn = tbl->dmn; in dr_matcher_init() local
[all …]
A Ddr_send.c1066 dmn->send_ring->cq = dr_create_cq(dmn->mdev, dmn->uar, cq_size); in mlx5dr_send_ring_alloc()
1084 dmn->send_ring->qp = dr_create_rc_qp(dmn->mdev, &init_attr); in mlx5dr_send_ring_alloc()
1091 dmn->send_ring->cq->qp = dmn->send_ring->qp; in mlx5dr_send_ring_alloc()
1097 dmn->send_ring->signal_th = dmn->info.max_send_wr / in mlx5dr_send_ring_alloc()
1119 dmn->send_ring->mr = dr_reg_mr(dmn->mdev, in mlx5dr_send_ring_alloc()
1120 dmn->pdn, dmn->send_ring->buf, size); in mlx5dr_send_ring_alloc()
1126 dmn->send_ring->sync_mr = dr_reg_mr(dmn->mdev, in mlx5dr_send_ring_alloc()
1127 dmn->pdn, dmn->send_ring->sync_buff, in mlx5dr_send_ring_alloc()
1137 dr_dereg_mr(dmn->mdev, dmn->send_ring->mr); in mlx5dr_send_ring_alloc()
1141 dr_destroy_qp(dmn->mdev, dmn->send_ring->qp); in mlx5dr_send_ring_alloc()
[all …]
A Ddr_rule.c62 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in dr_rule_create_collision_htbl() local
198 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in dr_rule_rehash_handle_collision() local
255 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in dr_rule_rehash_copy_ste() local
403 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in dr_rule_rehash_htbl() local
521 struct mlx5dr_domain *dmn = rule->matcher->tbl->dmn; in dr_rule_rehash() local
542 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in dr_rule_handle_collision() local
722 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in dr_rule_handle_action_stes() local
788 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in dr_rule_handle_empty_entry() local
841 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in dr_rule_handle_ste_branch() local
1122 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in dr_rule_create_rule_nic() local
[all …]
A Ddr_definer.c40 dr_definer_find_obj(struct mlx5dr_domain *dmn, u16 format_id, in dr_definer_find_obj() argument
46 xa_for_each(&dmn->definers_xa, id, definer_obj) { in dr_definer_find_obj()
67 ret = mlx5dr_cmd_create_definer(dmn->mdev, in dr_definer_create_obj()
100 mlx5dr_cmd_destroy_definer(dmn->mdev, definer_obj->id); in dr_definer_create_obj()
107 static void dr_definer_destroy_obj(struct mlx5dr_domain *dmn, in dr_definer_destroy_obj() argument
110 mlx5dr_cmd_destroy_definer(dmn->mdev, definer_obj->id); in dr_definer_destroy_obj()
111 xa_erase(&dmn->definers_xa, definer_obj->id); in dr_definer_destroy_obj()
125 definer_obj = dr_definer_create_obj(dmn, format_id, in mlx5dr_definer_get()
143 definer_obj = xa_load(&dmn->definers_xa, definer_id); in mlx5dr_definer_put()
145 mlx5dr_err(dmn, "Definer ID %d not found\n", definer_id); in mlx5dr_definer_put()
[all …]
A Ddr_dbg.c72 struct mlx5dr_domain *dmn = rule->matcher->tbl->dmn; in mlx5dr_dbg_rule_add() local
81 struct mlx5dr_domain *dmn = rule->matcher->tbl->dmn; in mlx5dr_dbg_rule_del() local
610 domain_id, dmn->type, dmn->info.caps.gvmi, in dr_dump_domain()
611 dmn->info.supp_sw_steering, pci_name(dmn->mdev->pdev)); in dr_dump_domain()
617 if (dmn->info.supp_sw_steering) { in dr_dump_domain()
632 mlx5dr_domain_lock(dmn); in dr_dump_domain_all()
634 ret = dr_dump_domain(file, dmn); in dr_dump_domain_all()
645 mlx5dr_domain_unlock(dmn); in dr_dump_domain_all()
667 dmn->dump_info.steering_debugfs = in mlx5dr_dbg_init_dump()
669 dmn->dump_info.fdb_debugfs = in mlx5dr_dbg_init_dump()
[all …]
A Ddr_icm_pool.c18 struct mlx5dr_domain *dmn; member
44 struct mlx5dr_domain *dmn; member
109 struct mlx5_core_dev *mdev = pool->dmn->mdev; in dr_icm_pool_mr_create()
119 icm_mr->dmn = pool->dmn; in dr_icm_pool_mr_create()
143 err = dr_icm_create_dm_mkey(mdev, pool->dmn->pdn, in dr_icm_pool_mr_create()
175 struct mlx5_core_dev *mdev = icm_mr->dmn->mdev; in dr_icm_pool_mr_destroy()
357 err = mlx5dr_cmd_sync_steering(pool->dmn->mdev); in dr_icm_pool_sync_all_buddy_pools()
392 mlx5dr_err(pool->dmn, in dr_icm_handle_buddies_get_mem()
402 mlx5dr_err(pool->dmn, in dr_icm_handle_buddies_get_mem()
500 max_log_chunk_sz = dmn->info.max_log_sw_icm_sz; in mlx5dr_icm_pool_create()
[all …]
A Ddr_types.h25 #define mlx5dr_err(dmn, arg...) mlx5_core_err((dmn)->mdev, ##arg) argument
26 #define mlx5dr_info(dmn, arg...) mlx5_core_info((dmn)->mdev, ##arg) argument
27 #define mlx5dr_dbg(dmn, arg...) mlx5_core_dbg((dmn)->mdev, ##arg) argument
211 struct mlx5dr_domain *dmn; member
948 struct mlx5dr_domain *dmn; member
998 struct mlx5dr_domain *dmn; member
1009 struct mlx5dr_domain *dmn; member
1017 struct mlx5dr_domain *dmn; member
1041 struct mlx5dr_domain *dmn; member
1055 struct mlx5dr_domain *dmn; member
[all …]
A Dmlx5dr.h50 void mlx5dr_domain_set_peer(struct mlx5dr_domain *dmn,
85 mlx5dr_action_create_dest_table_num(struct mlx5dr_domain *dmn, u32 table_num);
100 mlx5dr_action_create_mult_dest_tbl(struct mlx5dr_domain *dmn,
111 mlx5dr_action_create_flow_sampler(struct mlx5dr_domain *dmn, u32 sampler_id);
117 mlx5dr_action_create_packet_reformat(struct mlx5dr_domain *dmn,
136 mlx5dr_action_create_aso(struct mlx5dr_domain *dmn,
144 mlx5dr_action_create_dest_match_range(struct mlx5dr_domain *dmn,
153 int mlx5dr_definer_get(struct mlx5dr_domain *dmn, u16 format_id,
156 void mlx5dr_definer_put(struct mlx5dr_domain *dmn, u32 definer_id);
A Ddr_ste.c312 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in mlx5dr_ste_free() local
435 dmn->info.caps.gvmi, in mlx5dr_ste_htbl_init_and_postsend()
451 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in mlx5dr_ste_create_next_htbl() local
641 mlx5dr_err(dmn, in dr_ste_build_pre_check_spec()
647 mlx5dr_err(dmn, in dr_ste_build_pre_check_spec()
665 mlx5dr_err(dmn, in mlx5dr_ste_build_pre_check()
671 mlx5dr_err(dmn, in mlx5dr_ste_build_pre_check()
695 struct mlx5dr_domain *dmn = matcher->tbl->dmn; in mlx5dr_ste_build_ste_arr() local
710 dmn->info.caps.gvmi); in mlx5dr_ste_build_ste_arr()
1353 struct mlx5dr_domain *dmn, in mlx5dr_ste_build_src_gvmi_qpn() argument
[all …]
A Ddr_dbg.h10 void mlx5dr_dbg_init_dump(struct mlx5dr_domain *dmn);
11 void mlx5dr_dbg_uninit_dump(struct mlx5dr_domain *dmn);
A Ddr_ste_v1.h20 void dr_ste_v1_set_actions_tx(struct mlx5dr_domain *dmn, u8 *action_type_set,
23 void dr_ste_v1_set_actions_rx(struct mlx5dr_domain *dmn, u8 *action_type_set,
A Ddr_ste.h163 void (*set_actions_rx)(struct mlx5dr_domain *dmn,
169 void (*set_actions_tx)(struct mlx5dr_domain *dmn,
A Ddr_ste_v0.c409 dr_ste_v0_set_actions_tx(struct mlx5dr_domain *dmn, in dr_ste_v0_set_actions_tx() argument
467 if (MLX5_CAP_GEN(dmn->mdev, prio_tag_required)) in dr_ste_v0_set_actions_tx()
479 dr_ste_v0_set_actions_rx(struct mlx5dr_domain *dmn, in dr_ste_v0_set_actions_rx() argument
1651 struct mlx5dr_domain *dmn = sb->dmn; in dr_ste_v0_build_src_gvmi_qpn_tag() local
1660 if (misc->source_eswitch_owner_vhca_id == dmn->info.caps.gvmi) in dr_ste_v0_build_src_gvmi_qpn_tag()
1661 vport_dmn = dmn; in dr_ste_v0_build_src_gvmi_qpn_tag()
1662 else if (dmn->peer_dmn && (misc->source_eswitch_owner_vhca_id == in dr_ste_v0_build_src_gvmi_qpn_tag()
1663 dmn->peer_dmn->info.caps.gvmi)) in dr_ste_v0_build_src_gvmi_qpn_tag()
1664 vport_dmn = dmn->peer_dmn; in dr_ste_v0_build_src_gvmi_qpn_tag()
1670 vport_dmn = dmn; in dr_ste_v0_build_src_gvmi_qpn_tag()
[all …]
A Ddr_ste_v1.c578 void dr_ste_v1_set_actions_tx(struct mlx5dr_domain *dmn, in dr_ste_v1_set_actions_tx() argument
731 void dr_ste_v1_set_actions_rx(struct mlx5dr_domain *dmn, in dr_ste_v1_set_actions_rx() argument
1931 struct mlx5dr_domain *dmn = sb->dmn; in dr_ste_v1_build_src_gvmi_qpn_tag() local
1939 if (misc->source_eswitch_owner_vhca_id == dmn->info.caps.gvmi) in dr_ste_v1_build_src_gvmi_qpn_tag()
1940 vport_dmn = dmn; in dr_ste_v1_build_src_gvmi_qpn_tag()
1941 else if (dmn->peer_dmn && (misc->source_eswitch_owner_vhca_id == in dr_ste_v1_build_src_gvmi_qpn_tag()
1942 dmn->peer_dmn->info.caps.gvmi)) in dr_ste_v1_build_src_gvmi_qpn_tag()
1943 vport_dmn = dmn->peer_dmn; in dr_ste_v1_build_src_gvmi_qpn_tag()
1949 vport_dmn = dmn; in dr_ste_v1_build_src_gvmi_qpn_tag()
1957 mlx5dr_err(dmn, "Vport 0x%x is disabled or invalid\n", in dr_ste_v1_build_src_gvmi_qpn_tag()
/linux-6.3-rc2/drivers/powercap/
A Dintel_rapl_common.c1154 int dmn, prim; in rapl_update_domain_data() local
1157 for (dmn = 0; dmn < rp->nr_domains; dmn++) { in rapl_update_domain_data()
1159 rp->domains[dmn].name); in rapl_update_domain_data()
1162 if (!rapl_read_data_raw(&rp->domains[dmn], prim, in rapl_update_domain_data()
1164 rp->domains[dmn].rdd.primitives[prim] = val; in rapl_update_domain_data()

Completed in 54 milliseconds