| /drivers/infiniband/hw/hfi1/ |
| A D | sdma.c | 531 return sde->tx_ring[sde->tx_head & sde->sdma_mask]; in get_txhead() 548 head = sde->descq_head & sde->sdma_mask; in sdma_flush_descq() 556 sde->tx_ring[sde->tx_head++ & sde->sdma_mask] = NULL; in sdma_flush_descq() 850 sde = map->sde[selector & map->mask]; in sdma_select_user_engine() 866 map->sde[map->ctr + i] = map->sde[i]; in sdma_populate_sde_map() 876 if (map->sde[i] == sde) { in sdma_cleanup_sde_map() 955 rht_node->map[vl]->sde[0] = sde; in sdma_set_cpu_to_sde_map() 1396 sde->imask = sde->int_mask | sde->progress_mask | in sdma_init() 1793 sde->tx_ring[sde->tx_head++ & sde->sdma_mask] = NULL; in sdma_make_progress() 2285 WARN_ON_ONCE(sde->tx_ring[sde->tx_tail & sde->sdma_mask]); in submit_tx() [all …]
|
| A D | trace_tx.h | 333 TP_ARGS(sde, aidx), 433 TP_ARGS(sde, sn), 454 TP_ARGS(sde, sn) 459 TP_ARGS(sde, sn) 630 __entry->sde = sde; 702 __field(u8, sde) 713 __entry->sde = sde; 724 __entry->sde, 915 __entry->sde = txq->sde; 934 __entry->sde ? __entry->sde->this_idx : 0, [all …]
|
| A D | vnic_sdma.c | 55 sde->dd, in build_vnic_ulp_payload() 66 ret = sdma_txadd_page(sde->dd, in build_vnic_ulp_payload() 107 sde->dd, in build_vnic_tx_desc() 115 ret = build_vnic_ulp_payload(sde, tx); in build_vnic_tx_desc() 131 struct sdma_engine *sde = vnic_sdma->sde; in hfi1_vnic_send_dma() local 138 if (unlikely(!sde || !sdma_running(sde))) in hfi1_vnic_send_dma() 151 ret = build_vnic_tx_desc(sde, tx, pbc); in hfi1_vnic_send_dma() 195 write_seqlock(&sde->waitlock); in hfi1_vnic_sdma_sleep() 196 if (sdma_progress(sde, seq, txreq)) { in hfi1_vnic_sdma_sleep() 197 write_sequnlock(&sde->waitlock); in hfi1_vnic_sdma_sleep() [all …]
|
| A D | sdma.h | 392 return sde->descq_tail == sde->descq_head; in sdma_empty() 397 return sde->descq_cnt - in sdma_descq_freecnt() 398 (sde->descq_tail - in sdma_descq_freecnt() 399 READ_ONCE(sde->descq_head)) - 1; in sdma_descq_freecnt() 404 return sde->descq_cnt - sdma_descq_freecnt(sde); in sdma_descq_inprocess() 883 sde->desc_avail = sdma_descq_freecnt(sde); in sdma_progress() 884 if (tx->num_desc > sde->desc_avail) in sdma_progress() 968 struct sdma_engine *sde[]; member 1010 struct sdma_engine *sde) in sdma_engine_progress_schedule() argument 1012 if (!sde || sdma_descq_inprocess(sde) < (sde->descq_cnt / 8)) in sdma_engine_progress_schedule() [all …]
|
| A D | msix.c | 185 int msix_request_sdma_irq(struct sdma_engine *sde) in msix_request_sdma_irq() argument 191 sde->dd->unit, sde->this_idx); in msix_request_sdma_irq() 192 nr = msix_request_irq(sde->dd, sde, sdma_interrupt, NULL, in msix_request_sdma_irq() 196 sde->msix_intr = nr; in msix_request_sdma_irq() 197 remap_sdma_interrupts(sde->dd, sde->this_idx, nr); in msix_request_sdma_irq() 259 struct sdma_engine *sde = &dd->per_sdma[i]; in msix_request_irqs() local 261 ret = msix_request_sdma_irq(sde); in msix_request_irqs() 264 enable_sdma_srcs(sde->dd, i); in msix_request_irqs()
|
| A D | sysfs.c | 571 ssize_t (*show)(struct sdma_engine *sde, char *buf); 572 ssize_t (*store)(struct sdma_engine *sde, const char *buf, size_t cnt); 579 struct sdma_engine *sde = in sde_show() local 585 return sde_attr->show(sde, buf); in sde_show() 593 struct sdma_engine *sde = in sde_store() local 602 return sde_attr->store(sde, buf, count); in sde_store() 620 return sdma_get_cpu_to_sde_map(sde, buf); in sde_show_cpu_to_sde_map() 623 static ssize_t sde_store_cpu_to_sde_map(struct sdma_engine *sde, in sde_store_cpu_to_sde_map() argument 626 return sdma_set_cpu_to_sde_map(sde, buf, count); in sde_store_cpu_to_sde_map() 629 static ssize_t sde_show_vl(struct sdma_engine *sde, char *buf) in sde_show_vl() argument [all …]
|
| A D | qp.c | 26 struct sdma_engine *sde, 437 struct sdma_engine *sde, in iowait_sleep() argument 461 write_seqlock(&sde->waitlock); in iowait_sleep() 472 &sde->dmawait); in iowait_sleep() 477 write_sequnlock(&sde->waitlock); in iowait_sleep() 487 write_sequnlock(&sde->waitlock); in iowait_sleep() 542 struct sdma_engine *sde; in qp_to_sdma_engine() local 553 return sde; in qp_to_sdma_engine() 603 struct sdma_engine *sde; in qp_iter_print() local 649 sde, in qp_iter_print() [all …]
|
| A D | ipoib_tx.c | 126 tx->txq->sde->this_idx); in hfi1_ipoib_free_tx() 389 txq->sde = in hfi1_ipoib_send_dma_common() 410 ret = sdma_send_txlist(txq->sde, in hfi1_ipoib_submit_tx_list() 444 ret = sdma_send_txreq(txq->sde, in hfi1_ipoib_submit_tx() 622 write_seqlock(&sde->waitlock); in hfi1_ipoib_sdma_sleep() 626 write_sequnlock(&sde->waitlock); in hfi1_ipoib_sdma_sleep() 644 write_sequnlock(&sde->waitlock); in hfi1_ipoib_sdma_sleep() 648 write_sequnlock(&sde->waitlock); in hfi1_ipoib_sdma_sleep() 718 txq->sde = NULL; in hfi1_ipoib_txreq_init() 853 txq->sde, in hfi1_ipoib_tx_timeout() [all …]
|
| A D | user_sdma.c | 57 struct sdma_engine *sde, 65 struct sdma_engine *sde, in defer_packet_queue() argument 74 write_seqlock(&sde->waitlock); in defer_packet_queue() 75 trace_hfi1_usdma_defer(pq, sde, &pq->busy); in defer_packet_queue() 76 if (sdma_progress(sde, seq, txreq)) in defer_packet_queue() 85 pq->busy.lock = &sde->waitlock; in defer_packet_queue() 89 write_sequnlock(&sde->waitlock); in defer_packet_queue() 92 write_sequnlock(&sde->waitlock); in defer_packet_queue() 516 if (!req->sde || !sdma_running(req->sde)) { in hfi1_user_sdma_process_request() 820 ret = sdma_send_txlist(req->sde, in user_sdma_send_pkts() [all …]
|
| A D | affinity.c | 733 struct sdma_engine *sde = msix->arg; in hfi1_update_sdma_affinity() local 734 struct hfi1_devdata *dd = sde->dd; in hfi1_update_sdma_affinity() 739 if (cpu > num_online_cpus() || cpu == sde->cpu) in hfi1_update_sdma_affinity() 747 old_cpu = sde->cpu; in hfi1_update_sdma_affinity() 748 sde->cpu = cpu; in hfi1_update_sdma_affinity() 753 sde->this_idx, cpu); in hfi1_update_sdma_affinity() 831 struct sdma_engine *sde = NULL; in get_irq_affinity() local 843 sde = (struct sdma_engine *)msix->arg; in get_irq_affinity() 844 scnprintf(extra, 64, "engine %u", sde->this_idx); in get_irq_affinity() 894 sde->cpu = cpu; in get_irq_affinity()
|
| A D | verbs_txreq.h | 23 struct sdma_engine *sde; member 50 tx->sde = priv->s_sde; in get_txreq()
|
| A D | msix.h | 17 int msix_request_sdma_irq(struct sdma_engine *sde);
|
| A D | iowait.h | 101 struct sdma_engine *sde, 133 int (*sleep)(struct sdma_engine *sde,
|
| A D | verbs.c | 655 struct sdma_engine *sde, in build_verbs_ulp_payload() argument 670 sde->dd, in build_verbs_ulp_payload() 719 struct sdma_engine *sde, in build_verbs_tx_desc() argument 753 sde->dd, in build_verbs_tx_desc() 774 ret = build_verbs_ulp_payload(sde, length, tx); in build_verbs_tx_desc() 781 ret = sdma_txadd_daddr(sde->dd, &tx->txreq, sde->dd->sdma_pad_phys, in build_verbs_tx_desc() 848 ret = build_verbs_tx_desc(tx->sde, len, tx, ahg_info, pbc); in hfi1_verbs_send_dma() 852 ret = sdma_send_txreq(tx->sde, ps->wait, &tx->txreq, ps->pkts_sent); in hfi1_verbs_send_dma()
|
| A D | vnic.h | 44 struct sdma_engine *sde; member
|
| A D | vnic_main.c | 284 struct sdma_engine *sde; in hfi1_vnic_select_queue() local 287 sde = sdma_select_engine_vl(vinfo->dd, mdata->entropy, mdata->vl); in hfi1_vnic_select_queue() 288 return sde->this_idx; in hfi1_vnic_select_queue()
|
| A D | ipoib.h | 111 struct sdma_engine *sde; member
|
| A D | iowait.c | 44 int (*sleep)(struct sdma_engine *sde, in iowait_init() argument
|
| A D | user_sdma.h | 134 struct sdma_engine *sde; member
|
| A D | chip.c | 6008 struct sdma_engine *sde; in handle_sdma_eng_err() local 6011 sde = &dd->per_sdma[source]; in handle_sdma_eng_err() 6013 dd_dev_err(sde->dd, "CONFIG SDMA(%u) %s:%d %s()\n", sde->this_idx, in handle_sdma_eng_err() 6018 sde->err_cnt++; in handle_sdma_eng_err() 6019 sdma_engine_error(sde, status); in handle_sdma_eng_err() 6044 sdma_dumpstate(sde); in is_sdma_eng_err_int() 8323 struct sdma_engine *sde = data; in sdma_interrupt() local 8324 struct hfi1_devdata *dd = sde->dd; in sdma_interrupt() 8330 sdma_dumpstate(sde); in sdma_interrupt() 8338 & sde->imask; in sdma_interrupt() [all …]
|
| A D | uc.c | 229 ps->s_txreq->sde = priv->s_sde; in hfi1_make_uc_req()
|
| A D | ud.c | 522 ps->s_txreq->sde = priv->s_sde; in hfi1_make_ud_req()
|
| A D | rc.c | 353 ps->s_txreq->sde = qpriv->s_sde; in make_rc_ack() 1147 ps->s_txreq->sde = priv->s_sde; in hfi1_make_rc_req()
|
| /drivers/net/fddi/skfp/ |
| A D | smt.c | 74 static void smt_fill_sde(struct s_smc *smc, struct smt_p_sde *sde); 938 smt_fill_sde(smc,&nif->sde) ; /* set station descriptor */ in smt_send_nif() 1030 smt_fill_sde(smc,&sif->sde) ; /* set station descriptor */ in smt_send_sif_config() 1157 static void smt_fill_sde(struct s_smc *smc, struct smt_p_sde *sde) in smt_fill_sde() argument 1159 SMTSETPARA(sde,SMT_P_SDE) ; in smt_fill_sde() 1160 sde->sde_non_master = smc->mib.fddiSMTNonMaster_Ct ; in smt_fill_sde() 1161 sde->sde_master = smc->mib.fddiSMTMaster_Ct ; in smt_fill_sde() 1162 sde->sde_mac_count = NUMMACS ; /* only 1 MAC */ in smt_fill_sde() 1164 sde->sde_type = SMT_SDE_CONCENTRATOR ; in smt_fill_sde() 1166 sde->sde_type = SMT_SDE_STATION ; in smt_fill_sde()
|
| /drivers/net/fddi/skfp/h/ |
| A D | smt.h | 727 struct smt_p_sde sde ; /* station descriptor */ member 740 struct smt_p_sde sde ; /* station descriptor */ member
|