Searched refs:aeqe (Results 1 – 8 of 8) sorted by relevance
| /drivers/infiniband/hw/hns/ |
| A D | hns_roce_trace.h | 92 TP_PROTO(int event_type, void *aeqe, unsigned int len), 93 TP_ARGS(event_type, aeqe, len), 96 __array(u32, aeqe, 104 __entry->aeqe[i] = le32_to_cpu(((__le32 *)aeqe)[i]); 108 __print_array(__entry->aeqe, __entry->len, sizeof(__le32)))
|
| A D | hns_roce_hw_v2.c | 6219 struct hns_roce_aeqe *aeqe; in next_aeqe_sw_v2() local 6221 aeqe = hns_roce_buf_offset(eq->mtr.kmem, in next_aeqe_sw_v2() 6225 return (hr_reg_read(aeqe, AEQE_OWNER) ^ in next_aeqe_sw_v2() 6226 !!(eq->cons_index & eq->entries)) ? aeqe : NULL; in next_aeqe_sw_v2() 6232 struct hns_roce_aeqe *aeqe = next_aeqe_sw_v2(eq); in hns_roce_v2_aeq_int() local 6245 event_type = hr_reg_read(aeqe, AEQE_EVENT_TYPE); in hns_roce_v2_aeq_int() 6246 sub_type = hr_reg_read(aeqe, AEQE_SUB_TYPE); in hns_roce_v2_aeq_int() 6259 le16_to_cpu(aeqe->event.cmd.token), in hns_roce_v2_aeq_int() 6260 aeqe->event.cmd.status, in hns_roce_v2_aeq_int() 6261 le64_to_cpu(aeqe->event.cmd.out_param)); in hns_roce_v2_aeq_int() [all …]
|
| /drivers/infiniband/hw/erdma/ |
| A D | erdma_eq.c | 32 struct erdma_aeqe *aeqe; in erdma_aeq_event_handler() local 42 aeqe = get_next_valid_eqe(&dev->aeq); in erdma_aeq_event_handler() 43 if (!aeqe) in erdma_aeq_event_handler() 53 le32_to_cpu(aeqe->hdr)) == ERDMA_AE_TYPE_CQ_ERR) { in erdma_aeq_event_handler() 54 cqn = le32_to_cpu(aeqe->event_data0); in erdma_aeq_event_handler() 66 qpn = le32_to_cpu(aeqe->event_data0); in erdma_aeq_event_handler()
|
| /drivers/net/ethernet/huawei/hinic/ |
| A D | hinic_hw_eqs.c | 594 struct hinic_aeq_elem *aeqe; in aeq_elements_init() local 598 aeqe = GET_AEQ_ELEM(eq, i); in aeq_elements_init() 599 aeqe->desc = cpu_to_be32(init_val); in aeq_elements_init()
|
| /drivers/crypto/hisilicon/ |
| A D | qm.c | 68 #define QM_AEQE_PHASE(aeqe) ((le32_to_cpu((aeqe)->dw0) >> 16) & 0x1) argument 1099 struct qm_aeqe *aeqe = qm->aeqe + qm->status.aeq_head; in qm_aeq_thread() local 1105 while (QM_AEQE_PHASE(aeqe) == qm->status.aeqc_phase) { in qm_aeq_thread() 1106 type = (le32_to_cpu(aeqe->dw0) >> QM_AEQE_TYPE_SHIFT) & in qm_aeq_thread() 1108 qp_id = le32_to_cpu(aeqe->dw0) & QM_AEQE_CQN_MASK; in qm_aeq_thread() 1130 aeqe = qm->aeqe; in qm_aeq_thread() 1133 aeqe++; in qm_aeq_thread() 5560 QM_INIT_BUF(qm, aeqe, qm->aeq_depth); in hisi_qm_memory_init()
|
| A D | debugfs.c | 399 } else if (qm->aeqe && !strcmp(name, "AEQE")) { in qm_eq_aeq_dump() 400 xeqe = qm->aeqe + xeqe_id; in qm_eq_aeq_dump()
|
| /drivers/infiniband/hw/bnxt_re/ |
| A D | main.c | 1620 void *aeqe, void *obj) in bnxt_re_aeq_handler() argument 1627 type = ((struct creq_base *)aeqe)->type; in bnxt_re_aeq_handler() 1629 unaffi_async = aeqe; in bnxt_re_aeq_handler() 1632 affi_async = aeqe; in bnxt_re_aeq_handler()
|
| /drivers/infiniband/hw/irdma/ |
| A D | ctrl.c | 4009 __le64 *aeqe; in irdma_sc_get_next_aeqe() local 4013 aeqe = IRDMA_GET_CURRENT_AEQ_ELEM(aeq); in irdma_sc_get_next_aeqe() 4014 get_64bit_val(aeqe, 8, &temp); in irdma_sc_get_next_aeqe() 4023 get_64bit_val(aeqe, 0, &compl_ctx); in irdma_sc_get_next_aeqe() 4026 aeqe, 16, false); in irdma_sc_get_next_aeqe()
|
Completed in 838 milliseconds