Lines Matching refs:htt

51 	lockdep_assert_held(&ar->htt.tx_lock);  in __ath10k_htt_tx_txq_recalc()
53 if (!ar->htt.tx_q_state.enabled) in __ath10k_htt_tx_txq_recalc()
56 if (ar->htt.tx_q_state.mode != HTT_TX_MODE_SWITCH_PUSH_PULL) in __ath10k_htt_tx_txq_recalc()
73 if (unlikely(peer_id >= ar->htt.tx_q_state.num_peers) || in __ath10k_htt_tx_txq_recalc()
74 unlikely(tid >= ar->htt.tx_q_state.num_tids)) { in __ath10k_htt_tx_txq_recalc()
80 ar->htt.tx_q_state.vaddr->count[tid][peer_id] = count; in __ath10k_htt_tx_txq_recalc()
81 ar->htt.tx_q_state.vaddr->map[tid][idx] &= ~bit; in __ath10k_htt_tx_txq_recalc()
82 ar->htt.tx_q_state.vaddr->map[tid][idx] |= count ? bit : 0; in __ath10k_htt_tx_txq_recalc()
93 lockdep_assert_held(&ar->htt.tx_lock); in __ath10k_htt_tx_txq_sync()
95 if (!ar->htt.tx_q_state.enabled) in __ath10k_htt_tx_txq_sync()
98 if (ar->htt.tx_q_state.mode != HTT_TX_MODE_SWITCH_PUSH_PULL) in __ath10k_htt_tx_txq_sync()
101 seq = le32_to_cpu(ar->htt.tx_q_state.vaddr->seq); in __ath10k_htt_tx_txq_sync()
103 ar->htt.tx_q_state.vaddr->seq = cpu_to_le32(seq); in __ath10k_htt_tx_txq_sync()
108 size = sizeof(*ar->htt.tx_q_state.vaddr); in __ath10k_htt_tx_txq_sync()
110 ar->htt.tx_q_state.paddr, in __ath10k_htt_tx_txq_sync()
120 spin_lock_bh(&ar->htt.tx_lock); in ath10k_htt_tx_txq_recalc()
122 spin_unlock_bh(&ar->htt.tx_lock); in ath10k_htt_tx_txq_recalc()
127 spin_lock_bh(&ar->htt.tx_lock); in ath10k_htt_tx_txq_sync()
129 spin_unlock_bh(&ar->htt.tx_lock); in ath10k_htt_tx_txq_sync()
137 spin_lock_bh(&ar->htt.tx_lock); in ath10k_htt_tx_txq_update()
140 spin_unlock_bh(&ar->htt.tx_lock); in ath10k_htt_tx_txq_update()
143 void ath10k_htt_tx_dec_pending(struct ath10k_htt *htt) in ath10k_htt_tx_dec_pending() argument
145 lockdep_assert_held(&htt->tx_lock); in ath10k_htt_tx_dec_pending()
147 htt->num_pending_tx--; in ath10k_htt_tx_dec_pending()
148 if (htt->num_pending_tx == htt->max_num_pending_tx - 1) in ath10k_htt_tx_dec_pending()
149 ath10k_mac_tx_unlock(htt->ar, ATH10K_TX_PAUSE_Q_FULL); in ath10k_htt_tx_dec_pending()
151 if (htt->num_pending_tx == 0) in ath10k_htt_tx_dec_pending()
152 wake_up(&htt->empty_tx_wq); in ath10k_htt_tx_dec_pending()
155 int ath10k_htt_tx_inc_pending(struct ath10k_htt *htt) in ath10k_htt_tx_inc_pending() argument
157 lockdep_assert_held(&htt->tx_lock); in ath10k_htt_tx_inc_pending()
159 if (htt->num_pending_tx >= htt->max_num_pending_tx) in ath10k_htt_tx_inc_pending()
162 htt->num_pending_tx++; in ath10k_htt_tx_inc_pending()
163 if (htt->num_pending_tx == htt->max_num_pending_tx) in ath10k_htt_tx_inc_pending()
164 ath10k_mac_tx_lock(htt->ar, ATH10K_TX_PAUSE_Q_FULL); in ath10k_htt_tx_inc_pending()
169 int ath10k_htt_tx_mgmt_inc_pending(struct ath10k_htt *htt, bool is_mgmt, in ath10k_htt_tx_mgmt_inc_pending() argument
172 struct ath10k *ar = htt->ar; in ath10k_htt_tx_mgmt_inc_pending()
174 lockdep_assert_held(&htt->tx_lock); in ath10k_htt_tx_mgmt_inc_pending()
180 ar->hw_params.max_probe_resp_desc_thres < htt->num_pending_mgmt_tx) in ath10k_htt_tx_mgmt_inc_pending()
183 htt->num_pending_mgmt_tx++; in ath10k_htt_tx_mgmt_inc_pending()
188 void ath10k_htt_tx_mgmt_dec_pending(struct ath10k_htt *htt) in ath10k_htt_tx_mgmt_dec_pending() argument
190 lockdep_assert_held(&htt->tx_lock); in ath10k_htt_tx_mgmt_dec_pending()
192 if (!htt->ar->hw_params.max_probe_resp_desc_thres) in ath10k_htt_tx_mgmt_dec_pending()
195 htt->num_pending_mgmt_tx--; in ath10k_htt_tx_mgmt_dec_pending()
198 int ath10k_htt_tx_alloc_msdu_id(struct ath10k_htt *htt, struct sk_buff *skb) in ath10k_htt_tx_alloc_msdu_id() argument
200 struct ath10k *ar = htt->ar; in ath10k_htt_tx_alloc_msdu_id()
203 spin_lock_bh(&htt->tx_lock); in ath10k_htt_tx_alloc_msdu_id()
204 ret = idr_alloc(&htt->pending_tx, skb, 0, in ath10k_htt_tx_alloc_msdu_id()
205 htt->max_num_pending_tx, GFP_ATOMIC); in ath10k_htt_tx_alloc_msdu_id()
206 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_tx_alloc_msdu_id()
213 void ath10k_htt_tx_free_msdu_id(struct ath10k_htt *htt, u16 msdu_id) in ath10k_htt_tx_free_msdu_id() argument
215 struct ath10k *ar = htt->ar; in ath10k_htt_tx_free_msdu_id()
217 lockdep_assert_held(&htt->tx_lock); in ath10k_htt_tx_free_msdu_id()
221 idr_remove(&htt->pending_tx, msdu_id); in ath10k_htt_tx_free_msdu_id()
224 static void ath10k_htt_tx_free_cont_txbuf_32(struct ath10k_htt *htt) in ath10k_htt_tx_free_cont_txbuf_32() argument
226 struct ath10k *ar = htt->ar; in ath10k_htt_tx_free_cont_txbuf_32()
229 if (!htt->txbuf.vaddr_txbuff_32) in ath10k_htt_tx_free_cont_txbuf_32()
232 size = htt->txbuf.size; in ath10k_htt_tx_free_cont_txbuf_32()
233 dma_free_coherent(ar->dev, size, htt->txbuf.vaddr_txbuff_32, in ath10k_htt_tx_free_cont_txbuf_32()
234 htt->txbuf.paddr); in ath10k_htt_tx_free_cont_txbuf_32()
235 htt->txbuf.vaddr_txbuff_32 = NULL; in ath10k_htt_tx_free_cont_txbuf_32()
238 static int ath10k_htt_tx_alloc_cont_txbuf_32(struct ath10k_htt *htt) in ath10k_htt_tx_alloc_cont_txbuf_32() argument
240 struct ath10k *ar = htt->ar; in ath10k_htt_tx_alloc_cont_txbuf_32()
243 size = htt->max_num_pending_tx * in ath10k_htt_tx_alloc_cont_txbuf_32()
246 htt->txbuf.vaddr_txbuff_32 = dma_alloc_coherent(ar->dev, size, in ath10k_htt_tx_alloc_cont_txbuf_32()
247 &htt->txbuf.paddr, in ath10k_htt_tx_alloc_cont_txbuf_32()
249 if (!htt->txbuf.vaddr_txbuff_32) in ath10k_htt_tx_alloc_cont_txbuf_32()
252 htt->txbuf.size = size; in ath10k_htt_tx_alloc_cont_txbuf_32()
257 static void ath10k_htt_tx_free_cont_txbuf_64(struct ath10k_htt *htt) in ath10k_htt_tx_free_cont_txbuf_64() argument
259 struct ath10k *ar = htt->ar; in ath10k_htt_tx_free_cont_txbuf_64()
262 if (!htt->txbuf.vaddr_txbuff_64) in ath10k_htt_tx_free_cont_txbuf_64()
265 size = htt->txbuf.size; in ath10k_htt_tx_free_cont_txbuf_64()
266 dma_free_coherent(ar->dev, size, htt->txbuf.vaddr_txbuff_64, in ath10k_htt_tx_free_cont_txbuf_64()
267 htt->txbuf.paddr); in ath10k_htt_tx_free_cont_txbuf_64()
268 htt->txbuf.vaddr_txbuff_64 = NULL; in ath10k_htt_tx_free_cont_txbuf_64()
271 static int ath10k_htt_tx_alloc_cont_txbuf_64(struct ath10k_htt *htt) in ath10k_htt_tx_alloc_cont_txbuf_64() argument
273 struct ath10k *ar = htt->ar; in ath10k_htt_tx_alloc_cont_txbuf_64()
276 size = htt->max_num_pending_tx * in ath10k_htt_tx_alloc_cont_txbuf_64()
279 htt->txbuf.vaddr_txbuff_64 = dma_alloc_coherent(ar->dev, size, in ath10k_htt_tx_alloc_cont_txbuf_64()
280 &htt->txbuf.paddr, in ath10k_htt_tx_alloc_cont_txbuf_64()
282 if (!htt->txbuf.vaddr_txbuff_64) in ath10k_htt_tx_alloc_cont_txbuf_64()
285 htt->txbuf.size = size; in ath10k_htt_tx_alloc_cont_txbuf_64()
290 static void ath10k_htt_tx_free_cont_frag_desc_32(struct ath10k_htt *htt) in ath10k_htt_tx_free_cont_frag_desc_32() argument
294 if (!htt->frag_desc.vaddr_desc_32) in ath10k_htt_tx_free_cont_frag_desc_32()
297 size = htt->max_num_pending_tx * in ath10k_htt_tx_free_cont_frag_desc_32()
300 dma_free_coherent(htt->ar->dev, in ath10k_htt_tx_free_cont_frag_desc_32()
302 htt->frag_desc.vaddr_desc_32, in ath10k_htt_tx_free_cont_frag_desc_32()
303 htt->frag_desc.paddr); in ath10k_htt_tx_free_cont_frag_desc_32()
305 htt->frag_desc.vaddr_desc_32 = NULL; in ath10k_htt_tx_free_cont_frag_desc_32()
308 static int ath10k_htt_tx_alloc_cont_frag_desc_32(struct ath10k_htt *htt) in ath10k_htt_tx_alloc_cont_frag_desc_32() argument
310 struct ath10k *ar = htt->ar; in ath10k_htt_tx_alloc_cont_frag_desc_32()
316 size = htt->max_num_pending_tx * in ath10k_htt_tx_alloc_cont_frag_desc_32()
318 htt->frag_desc.vaddr_desc_32 = dma_alloc_coherent(ar->dev, size, in ath10k_htt_tx_alloc_cont_frag_desc_32()
319 &htt->frag_desc.paddr, in ath10k_htt_tx_alloc_cont_frag_desc_32()
321 if (!htt->frag_desc.vaddr_desc_32) { in ath10k_htt_tx_alloc_cont_frag_desc_32()
325 htt->frag_desc.size = size; in ath10k_htt_tx_alloc_cont_frag_desc_32()
330 static void ath10k_htt_tx_free_cont_frag_desc_64(struct ath10k_htt *htt) in ath10k_htt_tx_free_cont_frag_desc_64() argument
334 if (!htt->frag_desc.vaddr_desc_64) in ath10k_htt_tx_free_cont_frag_desc_64()
337 size = htt->max_num_pending_tx * in ath10k_htt_tx_free_cont_frag_desc_64()
340 dma_free_coherent(htt->ar->dev, in ath10k_htt_tx_free_cont_frag_desc_64()
342 htt->frag_desc.vaddr_desc_64, in ath10k_htt_tx_free_cont_frag_desc_64()
343 htt->frag_desc.paddr); in ath10k_htt_tx_free_cont_frag_desc_64()
345 htt->frag_desc.vaddr_desc_64 = NULL; in ath10k_htt_tx_free_cont_frag_desc_64()
348 static int ath10k_htt_tx_alloc_cont_frag_desc_64(struct ath10k_htt *htt) in ath10k_htt_tx_alloc_cont_frag_desc_64() argument
350 struct ath10k *ar = htt->ar; in ath10k_htt_tx_alloc_cont_frag_desc_64()
356 size = htt->max_num_pending_tx * in ath10k_htt_tx_alloc_cont_frag_desc_64()
359 htt->frag_desc.vaddr_desc_64 = dma_alloc_coherent(ar->dev, size, in ath10k_htt_tx_alloc_cont_frag_desc_64()
360 &htt->frag_desc.paddr, in ath10k_htt_tx_alloc_cont_frag_desc_64()
362 if (!htt->frag_desc.vaddr_desc_64) { in ath10k_htt_tx_alloc_cont_frag_desc_64()
366 htt->frag_desc.size = size; in ath10k_htt_tx_alloc_cont_frag_desc_64()
371 static void ath10k_htt_tx_free_txq(struct ath10k_htt *htt) in ath10k_htt_tx_free_txq() argument
373 struct ath10k *ar = htt->ar; in ath10k_htt_tx_free_txq()
380 size = sizeof(*htt->tx_q_state.vaddr); in ath10k_htt_tx_free_txq()
382 dma_unmap_single(ar->dev, htt->tx_q_state.paddr, size, DMA_TO_DEVICE); in ath10k_htt_tx_free_txq()
383 kfree(htt->tx_q_state.vaddr); in ath10k_htt_tx_free_txq()
386 static int ath10k_htt_tx_alloc_txq(struct ath10k_htt *htt) in ath10k_htt_tx_alloc_txq() argument
388 struct ath10k *ar = htt->ar; in ath10k_htt_tx_alloc_txq()
396 htt->tx_q_state.num_peers = HTT_TX_Q_STATE_NUM_PEERS; in ath10k_htt_tx_alloc_txq()
397 htt->tx_q_state.num_tids = HTT_TX_Q_STATE_NUM_TIDS; in ath10k_htt_tx_alloc_txq()
398 htt->tx_q_state.type = HTT_Q_DEPTH_TYPE_BYTES; in ath10k_htt_tx_alloc_txq()
400 size = sizeof(*htt->tx_q_state.vaddr); in ath10k_htt_tx_alloc_txq()
401 htt->tx_q_state.vaddr = kzalloc(size, GFP_KERNEL); in ath10k_htt_tx_alloc_txq()
402 if (!htt->tx_q_state.vaddr) in ath10k_htt_tx_alloc_txq()
405 htt->tx_q_state.paddr = dma_map_single(ar->dev, htt->tx_q_state.vaddr, in ath10k_htt_tx_alloc_txq()
407 ret = dma_mapping_error(ar->dev, htt->tx_q_state.paddr); in ath10k_htt_tx_alloc_txq()
410 kfree(htt->tx_q_state.vaddr); in ath10k_htt_tx_alloc_txq()
417 static void ath10k_htt_tx_free_txdone_fifo(struct ath10k_htt *htt) in ath10k_htt_tx_free_txdone_fifo() argument
419 WARN_ON(!kfifo_is_empty(&htt->txdone_fifo)); in ath10k_htt_tx_free_txdone_fifo()
420 kfifo_free(&htt->txdone_fifo); in ath10k_htt_tx_free_txdone_fifo()
423 static int ath10k_htt_tx_alloc_txdone_fifo(struct ath10k_htt *htt) in ath10k_htt_tx_alloc_txdone_fifo() argument
428 size = roundup_pow_of_two(htt->max_num_pending_tx); in ath10k_htt_tx_alloc_txdone_fifo()
429 ret = kfifo_alloc(&htt->txdone_fifo, size, GFP_KERNEL); in ath10k_htt_tx_alloc_txdone_fifo()
433 static int ath10k_htt_tx_alloc_buf(struct ath10k_htt *htt) in ath10k_htt_tx_alloc_buf() argument
435 struct ath10k *ar = htt->ar; in ath10k_htt_tx_alloc_buf()
438 ret = ath10k_htt_alloc_txbuff(htt); in ath10k_htt_tx_alloc_buf()
444 ret = ath10k_htt_alloc_frag_desc(htt); in ath10k_htt_tx_alloc_buf()
450 ret = ath10k_htt_tx_alloc_txq(htt); in ath10k_htt_tx_alloc_buf()
456 ret = ath10k_htt_tx_alloc_txdone_fifo(htt); in ath10k_htt_tx_alloc_buf()
465 ath10k_htt_tx_free_txq(htt); in ath10k_htt_tx_alloc_buf()
468 ath10k_htt_free_frag_desc(htt); in ath10k_htt_tx_alloc_buf()
471 ath10k_htt_free_txbuff(htt); in ath10k_htt_tx_alloc_buf()
476 int ath10k_htt_tx_start(struct ath10k_htt *htt) in ath10k_htt_tx_start() argument
478 struct ath10k *ar = htt->ar; in ath10k_htt_tx_start()
482 htt->max_num_pending_tx); in ath10k_htt_tx_start()
484 spin_lock_init(&htt->tx_lock); in ath10k_htt_tx_start()
485 idr_init(&htt->pending_tx); in ath10k_htt_tx_start()
487 if (htt->tx_mem_allocated) in ath10k_htt_tx_start()
493 ret = ath10k_htt_tx_alloc_buf(htt); in ath10k_htt_tx_start()
497 htt->tx_mem_allocated = true; in ath10k_htt_tx_start()
502 idr_destroy(&htt->pending_tx); in ath10k_htt_tx_start()
510 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_tx_clean_up_pending() local
518 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_tx_clean_up_pending()
523 void ath10k_htt_tx_destroy(struct ath10k_htt *htt) in ath10k_htt_tx_destroy() argument
525 if (!htt->tx_mem_allocated) in ath10k_htt_tx_destroy()
528 ath10k_htt_free_txbuff(htt); in ath10k_htt_tx_destroy()
529 ath10k_htt_tx_free_txq(htt); in ath10k_htt_tx_destroy()
530 ath10k_htt_free_frag_desc(htt); in ath10k_htt_tx_destroy()
531 ath10k_htt_tx_free_txdone_fifo(htt); in ath10k_htt_tx_destroy()
532 htt->tx_mem_allocated = false; in ath10k_htt_tx_destroy()
535 static void ath10k_htt_flush_tx_queue(struct ath10k_htt *htt) in ath10k_htt_flush_tx_queue() argument
537 ath10k_htc_stop_hl(htt->ar); in ath10k_htt_flush_tx_queue()
538 idr_for_each(&htt->pending_tx, ath10k_htt_tx_clean_up_pending, htt->ar); in ath10k_htt_flush_tx_queue()
541 void ath10k_htt_tx_stop(struct ath10k_htt *htt) in ath10k_htt_tx_stop() argument
543 ath10k_htt_flush_tx_queue(htt); in ath10k_htt_tx_stop()
544 idr_destroy(&htt->pending_tx); in ath10k_htt_tx_stop()
547 void ath10k_htt_tx_free(struct ath10k_htt *htt) in ath10k_htt_tx_free() argument
549 ath10k_htt_tx_stop(htt); in ath10k_htt_tx_free()
550 ath10k_htt_tx_destroy(htt); in ath10k_htt_tx_free()
560 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_htc_tx_complete() local
567 if (htt->disable_tx_comp) { in ath10k_htt_htc_tx_complete()
582 if ((!htt->disable_tx_comp) || (msg_type != HTT_H2T_MSG_TYPE_TX_FRM)) in ath10k_htt_htc_tx_complete()
594 ath10k_txrx_tx_unref(&ar->htt, &tx_done); in ath10k_htt_htc_tx_complete()
603 int ath10k_htt_h2t_ver_req_msg(struct ath10k_htt *htt) in ath10k_htt_h2t_ver_req_msg() argument
605 struct ath10k *ar = htt->ar; in ath10k_htt_h2t_ver_req_msg()
622 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_h2t_ver_req_msg()
631 int ath10k_htt_h2t_stats_req(struct ath10k_htt *htt, u32 mask, u32 reset_mask, in ath10k_htt_h2t_stats_req() argument
634 struct ath10k *ar = htt->ar; in ath10k_htt_h2t_stats_req()
664 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_h2t_stats_req()
675 static int ath10k_htt_send_frag_desc_bank_cfg_32(struct ath10k_htt *htt) in ath10k_htt_send_frag_desc_bank_cfg_32() argument
677 struct ath10k *ar = htt->ar; in ath10k_htt_send_frag_desc_bank_cfg_32()
687 if (!htt->frag_desc.paddr) { in ath10k_htt_send_frag_desc_bank_cfg_32()
702 info |= SM(htt->tx_q_state.type, in ath10k_htt_send_frag_desc_bank_cfg_32()
713 cfg->bank_base_addrs[0] = __cpu_to_le32(htt->frag_desc.paddr); in ath10k_htt_send_frag_desc_bank_cfg_32()
715 cfg->bank_id[0].bank_max_id = __cpu_to_le16(htt->max_num_pending_tx - in ath10k_htt_send_frag_desc_bank_cfg_32()
718 cfg->q_state.paddr = cpu_to_le32(htt->tx_q_state.paddr); in ath10k_htt_send_frag_desc_bank_cfg_32()
719 cfg->q_state.num_peers = cpu_to_le16(htt->tx_q_state.num_peers); in ath10k_htt_send_frag_desc_bank_cfg_32()
720 cfg->q_state.num_tids = cpu_to_le16(htt->tx_q_state.num_tids); in ath10k_htt_send_frag_desc_bank_cfg_32()
726 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_send_frag_desc_bank_cfg_32()
737 static int ath10k_htt_send_frag_desc_bank_cfg_64(struct ath10k_htt *htt) in ath10k_htt_send_frag_desc_bank_cfg_64() argument
739 struct ath10k *ar = htt->ar; in ath10k_htt_send_frag_desc_bank_cfg_64()
749 if (!htt->frag_desc.paddr) { in ath10k_htt_send_frag_desc_bank_cfg_64()
764 info |= SM(htt->tx_q_state.type, in ath10k_htt_send_frag_desc_bank_cfg_64()
775 cfg->bank_base_addrs[0] = __cpu_to_le64(htt->frag_desc.paddr); in ath10k_htt_send_frag_desc_bank_cfg_64()
777 cfg->bank_id[0].bank_max_id = __cpu_to_le16(htt->max_num_pending_tx - in ath10k_htt_send_frag_desc_bank_cfg_64()
780 cfg->q_state.paddr = cpu_to_le32(htt->tx_q_state.paddr); in ath10k_htt_send_frag_desc_bank_cfg_64()
781 cfg->q_state.num_peers = cpu_to_le16(htt->tx_q_state.num_peers); in ath10k_htt_send_frag_desc_bank_cfg_64()
782 cfg->q_state.num_tids = cpu_to_le16(htt->tx_q_state.num_tids); in ath10k_htt_send_frag_desc_bank_cfg_64()
788 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_send_frag_desc_bank_cfg_64()
815 static int ath10k_htt_send_rx_ring_cfg_32(struct ath10k_htt *htt) in ath10k_htt_send_rx_ring_cfg_32() argument
817 struct ath10k *ar = htt->ar; in ath10k_htt_send_rx_ring_cfg_32()
868 fw_idx = __le32_to_cpu(*htt->rx_ring.alloc_idx.vaddr); in ath10k_htt_send_rx_ring_cfg_32()
871 __cpu_to_le32(htt->rx_ring.alloc_idx.paddr); in ath10k_htt_send_rx_ring_cfg_32()
872 ring->rx_ring_base_paddr = __cpu_to_le32(htt->rx_ring.base_paddr); in ath10k_htt_send_rx_ring_cfg_32()
873 ring->rx_ring_len = __cpu_to_le16(htt->rx_ring.size); in ath10k_htt_send_rx_ring_cfg_32()
879 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_send_rx_ring_cfg_32()
888 static int ath10k_htt_send_rx_ring_cfg_64(struct ath10k_htt *htt) in ath10k_htt_send_rx_ring_cfg_64() argument
890 struct ath10k *ar = htt->ar; in ath10k_htt_send_rx_ring_cfg_64()
939 fw_idx = __le32_to_cpu(*htt->rx_ring.alloc_idx.vaddr); in ath10k_htt_send_rx_ring_cfg_64()
941 ring->fw_idx_shadow_reg_paddr = __cpu_to_le64(htt->rx_ring.alloc_idx.paddr); in ath10k_htt_send_rx_ring_cfg_64()
942 ring->rx_ring_base_paddr = __cpu_to_le64(htt->rx_ring.base_paddr); in ath10k_htt_send_rx_ring_cfg_64()
943 ring->rx_ring_len = __cpu_to_le16(htt->rx_ring.size); in ath10k_htt_send_rx_ring_cfg_64()
949 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_send_rx_ring_cfg_64()
958 static int ath10k_htt_send_rx_ring_cfg_hl(struct ath10k_htt *htt) in ath10k_htt_send_rx_ring_cfg_hl() argument
960 struct ath10k *ar = htt->ar; in ath10k_htt_send_rx_ring_cfg_hl()
1000 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_send_rx_ring_cfg_hl()
1009 static int ath10k_htt_h2t_aggr_cfg_msg_32(struct ath10k_htt *htt, in ath10k_htt_h2t_aggr_cfg_msg_32() argument
1013 struct ath10k *ar = htt->ar; in ath10k_htt_h2t_aggr_cfg_msg_32()
1047 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_h2t_aggr_cfg_msg_32()
1056 static int ath10k_htt_h2t_aggr_cfg_msg_v2(struct ath10k_htt *htt, in ath10k_htt_h2t_aggr_cfg_msg_v2() argument
1060 struct ath10k *ar = htt->ar; in ath10k_htt_h2t_aggr_cfg_msg_v2()
1094 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_h2t_aggr_cfg_msg_v2()
1138 ret = ath10k_htc_send(&ar->htc, ar->htt.eid, skb); in ath10k_htt_tx_fetch_resp()
1183 int ath10k_htt_mgmt_tx(struct ath10k_htt *htt, struct sk_buff *msdu) in ath10k_htt_mgmt_tx() argument
1185 struct ath10k *ar = htt->ar; in ath10k_htt_mgmt_tx()
1200 res = ath10k_htt_tx_alloc_msdu_id(htt, msdu); in ath10k_htt_mgmt_tx()
1248 res = ath10k_htc_send(&htt->ar->htc, htt->eid, txdesc); in ath10k_htt_mgmt_tx()
1260 spin_lock_bh(&htt->tx_lock); in ath10k_htt_mgmt_tx()
1261 ath10k_htt_tx_free_msdu_id(htt, msdu_id); in ath10k_htt_mgmt_tx()
1262 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_mgmt_tx()
1272 static int ath10k_htt_tx_hl(struct ath10k_htt *htt, enum ath10k_hw_txrx_mode txmode, in ath10k_htt_tx_hl() argument
1275 struct ath10k *ar = htt->ar; in ath10k_htt_tx_hl()
1314 if (htt->disable_tx_comp) in ath10k_htt_tx_hl()
1336 ath10k_dbg(htt->ar, ATH10K_DBG_HTT, in ath10k_htt_tx_hl()
1342 ath10k_warn(htt->ar, "htt hl tx: Unable to realloc skb!\n"); in ath10k_htt_tx_hl()
1350 res = ath10k_htt_tx_alloc_msdu_id(htt, msdu); in ath10k_htt_tx_hl()
1381 res = ath10k_htc_send_hl(&htt->ar->htc, htt->eid, msdu); in ath10k_htt_tx_hl()
1387 static int ath10k_htt_tx_32(struct ath10k_htt *htt, in ath10k_htt_tx_32() argument
1391 struct ath10k *ar = htt->ar; in ath10k_htt_tx_32()
1411 res = ath10k_htt_tx_alloc_msdu_id(htt, msdu); in ath10k_htt_tx_32()
1417 prefetch_len = min(htt->prefetch_len, msdu->len); in ath10k_htt_tx_32()
1420 txbuf = htt->txbuf.vaddr_txbuff_32 + msdu_id; in ath10k_htt_tx_32()
1421 txbuf_paddr = htt->txbuf.paddr + in ath10k_htt_tx_32()
1457 ext_desc_t = htt->frag_desc.vaddr_desc_32; in ath10k_htt_tx_32()
1468 frags_paddr = htt->frag_desc.paddr + in ath10k_htt_tx_32()
1508 txbuf->htc_hdr.eid = htt->eid; in ath10k_htt_tx_32()
1574 res = ath10k_hif_tx_sg(htt->ar, in ath10k_htt_tx_32()
1575 htt->ar->htc.endpoint[htt->eid].ul_pipe_id, in ath10k_htt_tx_32()
1585 spin_lock_bh(&htt->tx_lock); in ath10k_htt_tx_32()
1586 ath10k_htt_tx_free_msdu_id(htt, msdu_id); in ath10k_htt_tx_32()
1587 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_tx_32()
1592 static int ath10k_htt_tx_64(struct ath10k_htt *htt, in ath10k_htt_tx_64() argument
1596 struct ath10k *ar = htt->ar; in ath10k_htt_tx_64()
1616 res = ath10k_htt_tx_alloc_msdu_id(htt, msdu); in ath10k_htt_tx_64()
1622 prefetch_len = min(htt->prefetch_len, msdu->len); in ath10k_htt_tx_64()
1625 txbuf = htt->txbuf.vaddr_txbuff_64 + msdu_id; in ath10k_htt_tx_64()
1626 txbuf_paddr = htt->txbuf.paddr + in ath10k_htt_tx_64()
1662 ext_desc_t = htt->frag_desc.vaddr_desc_64; in ath10k_htt_tx_64()
1674 frags_paddr = htt->frag_desc.paddr + in ath10k_htt_tx_64()
1715 txbuf->htc_hdr.eid = htt->eid; in ath10k_htt_tx_64()
1786 res = ath10k_hif_tx_sg(htt->ar, in ath10k_htt_tx_64()
1787 htt->ar->htc.endpoint[htt->eid].ul_pipe_id, in ath10k_htt_tx_64()
1797 spin_lock_bh(&htt->tx_lock); in ath10k_htt_tx_64()
1798 ath10k_htt_tx_free_msdu_id(htt, msdu_id); in ath10k_htt_tx_64()
1799 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_tx_64()
1834 void ath10k_htt_set_tx_ops(struct ath10k_htt *htt) in ath10k_htt_set_tx_ops() argument
1836 struct ath10k *ar = htt->ar; in ath10k_htt_set_tx_ops()
1839 htt->tx_ops = &htt_tx_ops_hl; in ath10k_htt_set_tx_ops()
1841 htt->tx_ops = &htt_tx_ops_64; in ath10k_htt_set_tx_ops()
1843 htt->tx_ops = &htt_tx_ops_32; in ath10k_htt_set_tx_ops()