Lines Matching refs:htt

34 	hash_for_each_possible(ar->htt.rx_ring.skb_table, rxcb, hlist, paddr)  in ath10k_htt_rx_find_skb_paddr()
42 static void ath10k_htt_rx_ring_free(struct ath10k_htt *htt) in ath10k_htt_rx_ring_free() argument
49 if (htt->rx_ring.in_ord_rx) { in ath10k_htt_rx_ring_free()
50 hash_for_each_safe(htt->rx_ring.skb_table, i, n, rxcb, hlist) { in ath10k_htt_rx_ring_free()
52 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_ring_free()
59 for (i = 0; i < htt->rx_ring.size; i++) { in ath10k_htt_rx_ring_free()
60 skb = htt->rx_ring.netbufs_ring[i]; in ath10k_htt_rx_ring_free()
65 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_ring_free()
72 htt->rx_ring.fill_cnt = 0; in ath10k_htt_rx_ring_free()
73 hash_init(htt->rx_ring.skb_table); in ath10k_htt_rx_ring_free()
74 memset(htt->rx_ring.netbufs_ring, 0, in ath10k_htt_rx_ring_free()
75 htt->rx_ring.size * sizeof(htt->rx_ring.netbufs_ring[0])); in ath10k_htt_rx_ring_free()
78 static size_t ath10k_htt_get_rx_ring_size_32(struct ath10k_htt *htt) in ath10k_htt_get_rx_ring_size_32() argument
80 return htt->rx_ring.size * sizeof(htt->rx_ring.paddrs_ring_32); in ath10k_htt_get_rx_ring_size_32()
83 static size_t ath10k_htt_get_rx_ring_size_64(struct ath10k_htt *htt) in ath10k_htt_get_rx_ring_size_64() argument
85 return htt->rx_ring.size * sizeof(htt->rx_ring.paddrs_ring_64); in ath10k_htt_get_rx_ring_size_64()
88 static void ath10k_htt_config_paddrs_ring_32(struct ath10k_htt *htt, in ath10k_htt_config_paddrs_ring_32() argument
91 htt->rx_ring.paddrs_ring_32 = vaddr; in ath10k_htt_config_paddrs_ring_32()
94 static void ath10k_htt_config_paddrs_ring_64(struct ath10k_htt *htt, in ath10k_htt_config_paddrs_ring_64() argument
97 htt->rx_ring.paddrs_ring_64 = vaddr; in ath10k_htt_config_paddrs_ring_64()
100 static void ath10k_htt_set_paddrs_ring_32(struct ath10k_htt *htt, in ath10k_htt_set_paddrs_ring_32() argument
103 htt->rx_ring.paddrs_ring_32[idx] = __cpu_to_le32(paddr); in ath10k_htt_set_paddrs_ring_32()
106 static void ath10k_htt_set_paddrs_ring_64(struct ath10k_htt *htt, in ath10k_htt_set_paddrs_ring_64() argument
109 htt->rx_ring.paddrs_ring_64[idx] = __cpu_to_le64(paddr); in ath10k_htt_set_paddrs_ring_64()
112 static void ath10k_htt_reset_paddrs_ring_32(struct ath10k_htt *htt, int idx) in ath10k_htt_reset_paddrs_ring_32() argument
114 htt->rx_ring.paddrs_ring_32[idx] = 0; in ath10k_htt_reset_paddrs_ring_32()
117 static void ath10k_htt_reset_paddrs_ring_64(struct ath10k_htt *htt, int idx) in ath10k_htt_reset_paddrs_ring_64() argument
119 htt->rx_ring.paddrs_ring_64[idx] = 0; in ath10k_htt_reset_paddrs_ring_64()
122 static void *ath10k_htt_get_vaddr_ring_32(struct ath10k_htt *htt) in ath10k_htt_get_vaddr_ring_32() argument
124 return (void *)htt->rx_ring.paddrs_ring_32; in ath10k_htt_get_vaddr_ring_32()
127 static void *ath10k_htt_get_vaddr_ring_64(struct ath10k_htt *htt) in ath10k_htt_get_vaddr_ring_64() argument
129 return (void *)htt->rx_ring.paddrs_ring_64; in ath10k_htt_get_vaddr_ring_64()
132 static int __ath10k_htt_rx_ring_fill_n(struct ath10k_htt *htt, int num) in __ath10k_htt_rx_ring_fill_n() argument
134 struct ath10k_hw_params *hw = &htt->ar->hw_params; in __ath10k_htt_rx_ring_fill_n()
148 idx = __le32_to_cpu(*htt->rx_ring.alloc_idx.vaddr); in __ath10k_htt_rx_ring_fill_n()
150 if (idx < 0 || idx >= htt->rx_ring.size) { in __ath10k_htt_rx_ring_fill_n()
151 ath10k_err(htt->ar, "rx ring index is not valid, firmware malfunctioning?\n"); in __ath10k_htt_rx_ring_fill_n()
152 idx &= htt->rx_ring.size_mask; in __ath10k_htt_rx_ring_fill_n()
173 paddr = dma_map_single(htt->ar->dev, skb->data, in __ath10k_htt_rx_ring_fill_n()
177 if (unlikely(dma_mapping_error(htt->ar->dev, paddr))) { in __ath10k_htt_rx_ring_fill_n()
185 htt->rx_ring.netbufs_ring[idx] = skb; in __ath10k_htt_rx_ring_fill_n()
186 ath10k_htt_set_paddrs_ring(htt, paddr, idx); in __ath10k_htt_rx_ring_fill_n()
187 htt->rx_ring.fill_cnt++; in __ath10k_htt_rx_ring_fill_n()
189 if (htt->rx_ring.in_ord_rx) { in __ath10k_htt_rx_ring_fill_n()
190 hash_add(htt->rx_ring.skb_table, in __ath10k_htt_rx_ring_fill_n()
197 idx &= htt->rx_ring.size_mask; in __ath10k_htt_rx_ring_fill_n()
206 *htt->rx_ring.alloc_idx.vaddr = __cpu_to_le32(idx); in __ath10k_htt_rx_ring_fill_n()
210 static int ath10k_htt_rx_ring_fill_n(struct ath10k_htt *htt, int num) in ath10k_htt_rx_ring_fill_n() argument
212 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_ring_fill_n()
213 return __ath10k_htt_rx_ring_fill_n(htt, num); in ath10k_htt_rx_ring_fill_n()
216 static void ath10k_htt_rx_msdu_buff_replenish(struct ath10k_htt *htt) in ath10k_htt_rx_msdu_buff_replenish() argument
236 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_msdu_buff_replenish()
237 num_deficit = htt->rx_ring.fill_level - htt->rx_ring.fill_cnt; in ath10k_htt_rx_msdu_buff_replenish()
240 ret = ath10k_htt_rx_ring_fill_n(htt, num_to_fill); in ath10k_htt_rx_msdu_buff_replenish()
248 mod_timer(&htt->rx_ring.refill_retry_timer, jiffies + in ath10k_htt_rx_msdu_buff_replenish()
251 mod_timer(&htt->rx_ring.refill_retry_timer, jiffies + in ath10k_htt_rx_msdu_buff_replenish()
254 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_msdu_buff_replenish()
259 struct ath10k_htt *htt = from_timer(htt, t, rx_ring.refill_retry_timer); in ath10k_htt_rx_ring_refill_retry() local
261 ath10k_htt_rx_msdu_buff_replenish(htt); in ath10k_htt_rx_ring_refill_retry()
266 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_ring_refill() local
272 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_ring_refill()
273 ret = ath10k_htt_rx_ring_fill_n(htt, (htt->rx_ring.fill_level - in ath10k_htt_rx_ring_refill()
274 htt->rx_ring.fill_cnt)); in ath10k_htt_rx_ring_refill()
277 ath10k_htt_rx_ring_free(htt); in ath10k_htt_rx_ring_refill()
279 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_ring_refill()
284 void ath10k_htt_rx_free(struct ath10k_htt *htt) in ath10k_htt_rx_free() argument
286 if (htt->ar->bus_param.dev_type == ATH10K_DEV_TYPE_HL) in ath10k_htt_rx_free()
289 del_timer_sync(&htt->rx_ring.refill_retry_timer); in ath10k_htt_rx_free()
291 skb_queue_purge(&htt->rx_msdus_q); in ath10k_htt_rx_free()
292 skb_queue_purge(&htt->rx_in_ord_compl_q); in ath10k_htt_rx_free()
293 skb_queue_purge(&htt->tx_fetch_ind_q); in ath10k_htt_rx_free()
295 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_free()
296 ath10k_htt_rx_ring_free(htt); in ath10k_htt_rx_free()
297 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_free()
299 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_free()
300 ath10k_htt_get_rx_ring_size(htt), in ath10k_htt_rx_free()
301 ath10k_htt_get_vaddr_ring(htt), in ath10k_htt_rx_free()
302 htt->rx_ring.base_paddr); in ath10k_htt_rx_free()
304 ath10k_htt_config_paddrs_ring(htt, NULL); in ath10k_htt_rx_free()
306 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_free()
307 sizeof(*htt->rx_ring.alloc_idx.vaddr), in ath10k_htt_rx_free()
308 htt->rx_ring.alloc_idx.vaddr, in ath10k_htt_rx_free()
309 htt->rx_ring.alloc_idx.paddr); in ath10k_htt_rx_free()
310 htt->rx_ring.alloc_idx.vaddr = NULL; in ath10k_htt_rx_free()
312 kfree(htt->rx_ring.netbufs_ring); in ath10k_htt_rx_free()
313 htt->rx_ring.netbufs_ring = NULL; in ath10k_htt_rx_free()
316 static inline struct sk_buff *ath10k_htt_rx_netbuf_pop(struct ath10k_htt *htt) in ath10k_htt_rx_netbuf_pop() argument
318 struct ath10k *ar = htt->ar; in ath10k_htt_rx_netbuf_pop()
322 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_netbuf_pop()
324 if (htt->rx_ring.fill_cnt == 0) { in ath10k_htt_rx_netbuf_pop()
329 idx = htt->rx_ring.sw_rd_idx.msdu_payld; in ath10k_htt_rx_netbuf_pop()
330 msdu = htt->rx_ring.netbufs_ring[idx]; in ath10k_htt_rx_netbuf_pop()
331 htt->rx_ring.netbufs_ring[idx] = NULL; in ath10k_htt_rx_netbuf_pop()
332 ath10k_htt_reset_paddrs_ring(htt, idx); in ath10k_htt_rx_netbuf_pop()
335 idx &= htt->rx_ring.size_mask; in ath10k_htt_rx_netbuf_pop()
336 htt->rx_ring.sw_rd_idx.msdu_payld = idx; in ath10k_htt_rx_netbuf_pop()
337 htt->rx_ring.fill_cnt--; in ath10k_htt_rx_netbuf_pop()
339 dma_unmap_single(htt->ar->dev, in ath10k_htt_rx_netbuf_pop()
350 static int ath10k_htt_rx_amsdu_pop(struct ath10k_htt *htt, in ath10k_htt_rx_amsdu_pop() argument
353 struct ath10k *ar = htt->ar; in ath10k_htt_rx_amsdu_pop()
363 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_amsdu_pop()
368 msdu = ath10k_htt_rx_netbuf_pop(htt); in ath10k_htt_rx_amsdu_pop()
419 msdu = ath10k_htt_rx_netbuf_pop(htt); in ath10k_htt_rx_amsdu_pop()
462 static struct sk_buff *ath10k_htt_rx_pop_paddr(struct ath10k_htt *htt, in ath10k_htt_rx_pop_paddr() argument
465 struct ath10k *ar = htt->ar; in ath10k_htt_rx_pop_paddr()
469 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_pop_paddr()
477 htt->rx_ring.fill_cnt--; in ath10k_htt_rx_pop_paddr()
479 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_pop_paddr()
497 static int ath10k_htt_rx_handle_amsdu_mon_32(struct ath10k_htt *htt, in ath10k_htt_rx_handle_amsdu_mon_32() argument
501 struct ath10k *ar = htt->ar; in ath10k_htt_rx_handle_amsdu_mon_32()
531 frag_buf = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_handle_amsdu_mon_32()
546 frag_buf = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_handle_amsdu_mon_32()
574 ath10k_htt_rx_handle_amsdu_mon_64(struct ath10k_htt *htt, in ath10k_htt_rx_handle_amsdu_mon_64() argument
578 struct ath10k *ar = htt->ar; in ath10k_htt_rx_handle_amsdu_mon_64()
608 frag_buf = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_handle_amsdu_mon_64()
623 frag_buf = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_handle_amsdu_mon_64()
650 static int ath10k_htt_rx_pop_paddr32_list(struct ath10k_htt *htt, in ath10k_htt_rx_pop_paddr32_list() argument
654 struct ath10k *ar = htt->ar; in ath10k_htt_rx_pop_paddr32_list()
664 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_pop_paddr32_list()
672 msdu = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_pop_paddr32_list()
679 ret = ath10k_htt_rx_handle_amsdu_mon_32(htt, msdu, in ath10k_htt_rx_pop_paddr32_list()
704 ath10k_warn(htt->ar, "tried to pop an incomplete frame, oops!\n"); in ath10k_htt_rx_pop_paddr32_list()
715 static int ath10k_htt_rx_pop_paddr64_list(struct ath10k_htt *htt, in ath10k_htt_rx_pop_paddr64_list() argument
719 struct ath10k *ar = htt->ar; in ath10k_htt_rx_pop_paddr64_list()
729 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_pop_paddr64_list()
736 msdu = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_pop_paddr64_list()
743 ret = ath10k_htt_rx_handle_amsdu_mon_64(htt, msdu, in ath10k_htt_rx_pop_paddr64_list()
768 ath10k_warn(htt->ar, "tried to pop an incomplete frame, oops!\n"); in ath10k_htt_rx_pop_paddr64_list()
779 int ath10k_htt_rx_alloc(struct ath10k_htt *htt) in ath10k_htt_rx_alloc() argument
781 struct ath10k *ar = htt->ar; in ath10k_htt_rx_alloc()
785 struct timer_list *timer = &htt->rx_ring.refill_retry_timer; in ath10k_htt_rx_alloc()
790 htt->rx_confused = false; in ath10k_htt_rx_alloc()
795 htt->rx_ring.size = HTT_RX_RING_SIZE; in ath10k_htt_rx_alloc()
796 htt->rx_ring.size_mask = htt->rx_ring.size - 1; in ath10k_htt_rx_alloc()
797 htt->rx_ring.fill_level = ar->hw_params.rx_ring_fill_level; in ath10k_htt_rx_alloc()
799 if (!is_power_of_2(htt->rx_ring.size)) { in ath10k_htt_rx_alloc()
804 htt->rx_ring.netbufs_ring = in ath10k_htt_rx_alloc()
805 kcalloc(htt->rx_ring.size, sizeof(struct sk_buff *), in ath10k_htt_rx_alloc()
807 if (!htt->rx_ring.netbufs_ring) in ath10k_htt_rx_alloc()
810 size = ath10k_htt_get_rx_ring_size(htt); in ath10k_htt_rx_alloc()
812 vaddr_ring = dma_alloc_coherent(htt->ar->dev, size, &paddr, GFP_KERNEL); in ath10k_htt_rx_alloc()
816 ath10k_htt_config_paddrs_ring(htt, vaddr_ring); in ath10k_htt_rx_alloc()
817 htt->rx_ring.base_paddr = paddr; in ath10k_htt_rx_alloc()
819 vaddr = dma_alloc_coherent(htt->ar->dev, in ath10k_htt_rx_alloc()
820 sizeof(*htt->rx_ring.alloc_idx.vaddr), in ath10k_htt_rx_alloc()
825 htt->rx_ring.alloc_idx.vaddr = vaddr; in ath10k_htt_rx_alloc()
826 htt->rx_ring.alloc_idx.paddr = paddr; in ath10k_htt_rx_alloc()
827 htt->rx_ring.sw_rd_idx.msdu_payld = htt->rx_ring.size_mask; in ath10k_htt_rx_alloc()
828 *htt->rx_ring.alloc_idx.vaddr = 0; in ath10k_htt_rx_alloc()
833 spin_lock_init(&htt->rx_ring.lock); in ath10k_htt_rx_alloc()
835 htt->rx_ring.fill_cnt = 0; in ath10k_htt_rx_alloc()
836 htt->rx_ring.sw_rd_idx.msdu_payld = 0; in ath10k_htt_rx_alloc()
837 hash_init(htt->rx_ring.skb_table); in ath10k_htt_rx_alloc()
839 skb_queue_head_init(&htt->rx_msdus_q); in ath10k_htt_rx_alloc()
840 skb_queue_head_init(&htt->rx_in_ord_compl_q); in ath10k_htt_rx_alloc()
841 skb_queue_head_init(&htt->tx_fetch_ind_q); in ath10k_htt_rx_alloc()
842 atomic_set(&htt->num_mpdus_ready, 0); in ath10k_htt_rx_alloc()
845 htt->rx_ring.size, htt->rx_ring.fill_level); in ath10k_htt_rx_alloc()
849 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_alloc()
850 ath10k_htt_get_rx_ring_size(htt), in ath10k_htt_rx_alloc()
852 htt->rx_ring.base_paddr); in ath10k_htt_rx_alloc()
853 ath10k_htt_config_paddrs_ring(htt, NULL); in ath10k_htt_rx_alloc()
855 kfree(htt->rx_ring.netbufs_ring); in ath10k_htt_rx_alloc()
856 htt->rx_ring.netbufs_ring = NULL; in ath10k_htt_rx_alloc()
1356 skb_queue_tail(&ar->htt.rx_msdus_q, skb); in ath10k_htt_rx_h_queue_msdu()
2317 static int ath10k_htt_rx_handle_amsdu(struct ath10k_htt *htt) in ath10k_htt_rx_handle_amsdu() argument
2319 struct ath10k *ar = htt->ar; in ath10k_htt_rx_handle_amsdu()
2320 struct ieee80211_rx_status *rx_status = &htt->rx_status; in ath10k_htt_rx_handle_amsdu()
2332 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_handle_amsdu()
2333 if (htt->rx_confused) { in ath10k_htt_rx_handle_amsdu()
2334 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_handle_amsdu()
2337 ret = ath10k_htt_rx_amsdu_pop(htt, &amsdu); in ath10k_htt_rx_handle_amsdu()
2338 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_handle_amsdu()
2346 htt->rx_confused = true; in ath10k_htt_rx_handle_amsdu()
2451 static bool ath10k_htt_rx_proc_rx_ind_hl(struct ath10k_htt *htt, in ath10k_htt_rx_proc_rx_ind_hl() argument
2457 struct ath10k *ar = htt->ar; in ath10k_htt_rx_proc_rx_ind_hl()
2755 static bool ath10k_htt_rx_proc_rx_frag_ind_hl(struct ath10k_htt *htt, in ath10k_htt_rx_proc_rx_frag_ind_hl() argument
2759 struct ath10k *ar = htt->ar; in ath10k_htt_rx_proc_rx_frag_ind_hl()
2812 return ath10k_htt_rx_proc_rx_ind_hl(htt, &resp->rx_ind_hl, skb, in ath10k_htt_rx_proc_rx_frag_ind_hl()
2874 return ath10k_htt_rx_proc_rx_ind_hl(htt, &resp->rx_ind_hl, skb, in ath10k_htt_rx_proc_rx_frag_ind_hl()
2901 return ath10k_htt_rx_proc_rx_ind_hl(htt, &resp->rx_ind_hl, skb, in ath10k_htt_rx_proc_rx_frag_ind_hl()
2913 static void ath10k_htt_rx_proc_rx_ind_ll(struct ath10k_htt *htt, in ath10k_htt_rx_proc_rx_ind_ll() argument
2916 struct ath10k *ar = htt->ar; in ath10k_htt_rx_proc_rx_ind_ll()
2936 atomic_add(mpdu_count, &htt->num_mpdus_ready); in ath10k_htt_rx_proc_rx_ind_ll()
2945 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_tx_compl_ind() local
3014 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_rx_tx_compl_ind()
3015 } else if (!kfifo_put(&htt->txdone_fifo, tx_done)) { in ath10k_htt_rx_tx_compl_ind()
3018 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_rx_tx_compl_ind()
3211 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_h_rx_offload() local
3212 struct ieee80211_rx_status *status = &htt->rx_status; in ath10k_htt_rx_h_rx_offload()
3260 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_in_ord_ind() local
3262 struct ieee80211_rx_status *status = &htt->rx_status; in ath10k_htt_rx_in_ord_ind()
3273 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_in_ord_ind()
3275 if (htt->rx_confused) in ath10k_htt_rx_in_ord_ind()
3303 ret = ath10k_htt_rx_pop_paddr64_list(htt, &resp->rx_in_ord_ind, in ath10k_htt_rx_in_ord_ind()
3306 ret = ath10k_htt_rx_pop_paddr32_list(htt, &resp->rx_in_ord_ind, in ath10k_htt_rx_in_ord_ind()
3311 htt->rx_confused = true; in ath10k_htt_rx_in_ord_ind()
3343 htt->rx_confused = true; in ath10k_htt_rx_in_ord_ind()
3414 if (!ar->htt.tx_q_state.enabled) { in ath10k_htt_rx_tx_fetch_ind()
3419 if (ar->htt.tx_q_state.mode == HTT_TX_MODE_SWITCH_PUSH) { in ath10k_htt_rx_tx_fetch_ind()
3438 if (unlikely(peer_id >= ar->htt.tx_q_state.num_peers) || in ath10k_htt_rx_tx_fetch_ind()
3439 unlikely(tid >= ar->htt.tx_q_state.num_tids)) { in ath10k_htt_rx_tx_fetch_ind()
3590 ar->htt.tx_q_state.enabled = enable; in ath10k_htt_rx_tx_mode_switch_ind()
3591 ar->htt.tx_q_state.mode = mode; in ath10k_htt_rx_tx_mode_switch_ind()
3592 ar->htt.tx_q_state.num_push_allowed = threshold; in ath10k_htt_rx_tx_mode_switch_ind()
3602 if (unlikely(peer_id >= ar->htt.tx_q_state.num_peers) || in ath10k_htt_rx_tx_mode_switch_ind()
3603 unlikely(tid >= ar->htt.tx_q_state.num_tids)) { in ath10k_htt_rx_tx_mode_switch_ind()
3623 spin_lock_bh(&ar->htt.tx_lock); in ath10k_htt_rx_tx_mode_switch_ind()
3626 spin_unlock_bh(&ar->htt.tx_lock); in ath10k_htt_rx_tx_mode_switch_ind()
3900 if (ar->htt.disable_tx_comp) { in ath10k_update_per_peer_tx_stats()
4073 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_t2h_msg_handler() local
4084 if (resp->hdr.msg_type >= ar->htt.t2h_msg_types_max) { in ath10k_htt_t2h_msg_handler()
4086 resp->hdr.msg_type, ar->htt.t2h_msg_types_max); in ath10k_htt_t2h_msg_handler()
4089 type = ar->htt.t2h_msg_types[resp->hdr.msg_type]; in ath10k_htt_t2h_msg_handler()
4093 htt->target_version_major = resp->ver_resp.major; in ath10k_htt_t2h_msg_handler()
4094 htt->target_version_minor = resp->ver_resp.minor; in ath10k_htt_t2h_msg_handler()
4095 complete(&htt->target_version_received); in ath10k_htt_t2h_msg_handler()
4100 ath10k_htt_rx_proc_rx_ind_ll(htt, &resp->rx_ind); in ath10k_htt_t2h_msg_handler()
4102 skb_queue_tail(&htt->rx_indication_head, skb); in ath10k_htt_t2h_msg_handler()
4112 ath10k_peer_map_event(htt, &ev); in ath10k_htt_t2h_msg_handler()
4119 ath10k_peer_unmap_event(htt, &ev); in ath10k_htt_t2h_msg_handler()
4124 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_t2h_msg_handler() local
4126 struct ath10k_htc_ep *ep = &ar->htc.endpoint[htt->eid]; in ath10k_htt_t2h_msg_handler()
4152 if (htt->disable_tx_comp) { in ath10k_htt_t2h_msg_handler()
4158 status = ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_t2h_msg_handler()
4160 spin_lock_bh(&htt->tx_lock); in ath10k_htt_t2h_msg_handler()
4161 ath10k_htt_tx_mgmt_dec_pending(htt); in ath10k_htt_t2h_msg_handler()
4162 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_t2h_msg_handler()
4167 ath10k_htt_rx_tx_compl_ind(htt->ar, skb); in ath10k_htt_t2h_msg_handler()
4170 struct ath10k *ar = htt->ar; in ath10k_htt_t2h_msg_handler()
4185 atomic_inc(&htt->num_mpdus_ready); in ath10k_htt_t2h_msg_handler()
4187 return ath10k_htt_rx_proc_rx_frag_ind(htt, in ath10k_htt_t2h_msg_handler()
4228 skb_queue_tail(&htt->rx_in_ord_compl_q, skb); in ath10k_htt_t2h_msg_handler()
4232 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_t2h_msg_handler() local
4234 struct ath10k_htc_ep *ep = &ar->htc.endpoint[htt->eid]; in ath10k_htt_t2h_msg_handler()
4246 if (htt->disable_tx_comp) { in ath10k_htt_t2h_msg_handler()
4276 skb_queue_tail(&htt->tx_fetch_ind_q, tx_fetch_ind); in ath10k_htt_t2h_msg_handler()
4313 if (skb_queue_empty(&ar->htt.rx_msdus_q)) in ath10k_htt_rx_deliver_msdu()
4316 skb = skb_dequeue(&ar->htt.rx_msdus_q); in ath10k_htt_rx_deliver_msdu()
4329 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_hl_indication() local
4335 skb = skb_dequeue(&htt->rx_indication_head); in ath10k_htt_rx_hl_indication()
4341 release = ath10k_htt_rx_proc_rx_ind_hl(htt, in ath10k_htt_rx_hl_indication()
4351 skb_queue_len(&htt->rx_indication_head)); in ath10k_htt_rx_hl_indication()
4359 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_txrx_compl_task() local
4378 while ((skb = skb_dequeue(&htt->rx_in_ord_compl_q))) { in ath10k_htt_txrx_compl_task()
4379 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_txrx_compl_task()
4381 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_txrx_compl_task()
4390 while (atomic_read(&htt->num_mpdus_ready)) { in ath10k_htt_txrx_compl_task()
4391 ret = ath10k_htt_rx_handle_amsdu(htt); in ath10k_htt_txrx_compl_task()
4396 atomic_dec(&htt->num_mpdus_ready); in ath10k_htt_txrx_compl_task()
4407 if ((quota < budget) && !kfifo_is_empty(&htt->txdone_fifo)) in ath10k_htt_txrx_compl_task()
4415 while (kfifo_get(&htt->txdone_fifo, &tx_done)) in ath10k_htt_txrx_compl_task()
4416 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_txrx_compl_task()
4420 spin_lock_irqsave(&htt->tx_fetch_ind_q.lock, flags); in ath10k_htt_txrx_compl_task()
4421 skb_queue_splice_init(&htt->tx_fetch_ind_q, &tx_ind_q); in ath10k_htt_txrx_compl_task()
4422 spin_unlock_irqrestore(&htt->tx_fetch_ind_q.lock, flags); in ath10k_htt_txrx_compl_task()
4430 ath10k_htt_rx_msdu_buff_replenish(htt); in ath10k_htt_txrx_compl_task()
4460 void ath10k_htt_set_rx_ops(struct ath10k_htt *htt) in ath10k_htt_set_rx_ops() argument
4462 struct ath10k *ar = htt->ar; in ath10k_htt_set_rx_ops()
4465 htt->rx_ops = &htt_rx_ops_hl; in ath10k_htt_set_rx_ops()
4467 htt->rx_ops = &htt_rx_ops_64; in ath10k_htt_set_rx_ops()
4469 htt->rx_ops = &htt_rx_ops_32; in ath10k_htt_set_rx_ops()