Lines Matching refs:htt

38 	hash_for_each_possible(ar->htt.rx_ring.skb_table, rxcb, hlist, paddr)  in ath10k_htt_rx_find_skb_paddr()
46 static void ath10k_htt_rx_ring_free(struct ath10k_htt *htt) in ath10k_htt_rx_ring_free() argument
53 if (htt->rx_ring.in_ord_rx) { in ath10k_htt_rx_ring_free()
54 hash_for_each_safe(htt->rx_ring.skb_table, i, n, rxcb, hlist) { in ath10k_htt_rx_ring_free()
56 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_ring_free()
63 for (i = 0; i < htt->rx_ring.size; i++) { in ath10k_htt_rx_ring_free()
64 skb = htt->rx_ring.netbufs_ring[i]; in ath10k_htt_rx_ring_free()
69 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_ring_free()
76 htt->rx_ring.fill_cnt = 0; in ath10k_htt_rx_ring_free()
77 hash_init(htt->rx_ring.skb_table); in ath10k_htt_rx_ring_free()
78 memset(htt->rx_ring.netbufs_ring, 0, in ath10k_htt_rx_ring_free()
79 htt->rx_ring.size * sizeof(htt->rx_ring.netbufs_ring[0])); in ath10k_htt_rx_ring_free()
82 static size_t ath10k_htt_get_rx_ring_size_32(struct ath10k_htt *htt) in ath10k_htt_get_rx_ring_size_32() argument
84 return htt->rx_ring.size * sizeof(htt->rx_ring.paddrs_ring_32); in ath10k_htt_get_rx_ring_size_32()
87 static size_t ath10k_htt_get_rx_ring_size_64(struct ath10k_htt *htt) in ath10k_htt_get_rx_ring_size_64() argument
89 return htt->rx_ring.size * sizeof(htt->rx_ring.paddrs_ring_64); in ath10k_htt_get_rx_ring_size_64()
92 static void ath10k_htt_config_paddrs_ring_32(struct ath10k_htt *htt, in ath10k_htt_config_paddrs_ring_32() argument
95 htt->rx_ring.paddrs_ring_32 = vaddr; in ath10k_htt_config_paddrs_ring_32()
98 static void ath10k_htt_config_paddrs_ring_64(struct ath10k_htt *htt, in ath10k_htt_config_paddrs_ring_64() argument
101 htt->rx_ring.paddrs_ring_64 = vaddr; in ath10k_htt_config_paddrs_ring_64()
104 static void ath10k_htt_set_paddrs_ring_32(struct ath10k_htt *htt, in ath10k_htt_set_paddrs_ring_32() argument
107 htt->rx_ring.paddrs_ring_32[idx] = __cpu_to_le32(paddr); in ath10k_htt_set_paddrs_ring_32()
110 static void ath10k_htt_set_paddrs_ring_64(struct ath10k_htt *htt, in ath10k_htt_set_paddrs_ring_64() argument
113 htt->rx_ring.paddrs_ring_64[idx] = __cpu_to_le64(paddr); in ath10k_htt_set_paddrs_ring_64()
116 static void ath10k_htt_reset_paddrs_ring_32(struct ath10k_htt *htt, int idx) in ath10k_htt_reset_paddrs_ring_32() argument
118 htt->rx_ring.paddrs_ring_32[idx] = 0; in ath10k_htt_reset_paddrs_ring_32()
121 static void ath10k_htt_reset_paddrs_ring_64(struct ath10k_htt *htt, int idx) in ath10k_htt_reset_paddrs_ring_64() argument
123 htt->rx_ring.paddrs_ring_64[idx] = 0; in ath10k_htt_reset_paddrs_ring_64()
126 static void *ath10k_htt_get_vaddr_ring_32(struct ath10k_htt *htt) in ath10k_htt_get_vaddr_ring_32() argument
128 return (void *)htt->rx_ring.paddrs_ring_32; in ath10k_htt_get_vaddr_ring_32()
131 static void *ath10k_htt_get_vaddr_ring_64(struct ath10k_htt *htt) in ath10k_htt_get_vaddr_ring_64() argument
133 return (void *)htt->rx_ring.paddrs_ring_64; in ath10k_htt_get_vaddr_ring_64()
136 static int __ath10k_htt_rx_ring_fill_n(struct ath10k_htt *htt, int num) in __ath10k_htt_rx_ring_fill_n() argument
138 struct ath10k_hw_params *hw = &htt->ar->hw_params; in __ath10k_htt_rx_ring_fill_n()
152 idx = __le32_to_cpu(*htt->rx_ring.alloc_idx.vaddr); in __ath10k_htt_rx_ring_fill_n()
154 if (idx < 0 || idx >= htt->rx_ring.size) { in __ath10k_htt_rx_ring_fill_n()
155 ath10k_err(htt->ar, "rx ring index is not valid, firmware malfunctioning?\n"); in __ath10k_htt_rx_ring_fill_n()
156 idx &= htt->rx_ring.size_mask; in __ath10k_htt_rx_ring_fill_n()
177 paddr = dma_map_single(htt->ar->dev, skb->data, in __ath10k_htt_rx_ring_fill_n()
181 if (unlikely(dma_mapping_error(htt->ar->dev, paddr))) { in __ath10k_htt_rx_ring_fill_n()
189 htt->rx_ring.netbufs_ring[idx] = skb; in __ath10k_htt_rx_ring_fill_n()
190 ath10k_htt_set_paddrs_ring(htt, paddr, idx); in __ath10k_htt_rx_ring_fill_n()
191 htt->rx_ring.fill_cnt++; in __ath10k_htt_rx_ring_fill_n()
193 if (htt->rx_ring.in_ord_rx) { in __ath10k_htt_rx_ring_fill_n()
194 hash_add(htt->rx_ring.skb_table, in __ath10k_htt_rx_ring_fill_n()
201 idx &= htt->rx_ring.size_mask; in __ath10k_htt_rx_ring_fill_n()
210 *htt->rx_ring.alloc_idx.vaddr = __cpu_to_le32(idx); in __ath10k_htt_rx_ring_fill_n()
214 static int ath10k_htt_rx_ring_fill_n(struct ath10k_htt *htt, int num) in ath10k_htt_rx_ring_fill_n() argument
216 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_ring_fill_n()
217 return __ath10k_htt_rx_ring_fill_n(htt, num); in ath10k_htt_rx_ring_fill_n()
220 static void ath10k_htt_rx_msdu_buff_replenish(struct ath10k_htt *htt) in ath10k_htt_rx_msdu_buff_replenish() argument
240 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_msdu_buff_replenish()
241 num_deficit = htt->rx_ring.fill_level - htt->rx_ring.fill_cnt; in ath10k_htt_rx_msdu_buff_replenish()
244 ret = ath10k_htt_rx_ring_fill_n(htt, num_to_fill); in ath10k_htt_rx_msdu_buff_replenish()
252 mod_timer(&htt->rx_ring.refill_retry_timer, jiffies + in ath10k_htt_rx_msdu_buff_replenish()
255 mod_timer(&htt->rx_ring.refill_retry_timer, jiffies + in ath10k_htt_rx_msdu_buff_replenish()
258 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_msdu_buff_replenish()
263 struct ath10k_htt *htt = timer_container_of(htt, t, in ath10k_htt_rx_ring_refill_retry() local
266 ath10k_htt_rx_msdu_buff_replenish(htt); in ath10k_htt_rx_ring_refill_retry()
271 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_ring_refill() local
277 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_ring_refill()
278 ret = ath10k_htt_rx_ring_fill_n(htt, (htt->rx_ring.fill_level - in ath10k_htt_rx_ring_refill()
279 htt->rx_ring.fill_cnt)); in ath10k_htt_rx_ring_refill()
282 ath10k_htt_rx_ring_free(htt); in ath10k_htt_rx_ring_refill()
284 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_ring_refill()
289 void ath10k_htt_rx_free(struct ath10k_htt *htt) in ath10k_htt_rx_free() argument
291 if (htt->ar->bus_param.dev_type == ATH10K_DEV_TYPE_HL) in ath10k_htt_rx_free()
294 timer_delete_sync(&htt->rx_ring.refill_retry_timer); in ath10k_htt_rx_free()
296 skb_queue_purge(&htt->rx_msdus_q); in ath10k_htt_rx_free()
297 skb_queue_purge(&htt->rx_in_ord_compl_q); in ath10k_htt_rx_free()
298 skb_queue_purge(&htt->tx_fetch_ind_q); in ath10k_htt_rx_free()
300 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_free()
301 ath10k_htt_rx_ring_free(htt); in ath10k_htt_rx_free()
302 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_free()
304 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_free()
305 ath10k_htt_get_rx_ring_size(htt), in ath10k_htt_rx_free()
306 ath10k_htt_get_vaddr_ring(htt), in ath10k_htt_rx_free()
307 htt->rx_ring.base_paddr); in ath10k_htt_rx_free()
309 ath10k_htt_config_paddrs_ring(htt, NULL); in ath10k_htt_rx_free()
311 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_free()
312 sizeof(*htt->rx_ring.alloc_idx.vaddr), in ath10k_htt_rx_free()
313 htt->rx_ring.alloc_idx.vaddr, in ath10k_htt_rx_free()
314 htt->rx_ring.alloc_idx.paddr); in ath10k_htt_rx_free()
315 htt->rx_ring.alloc_idx.vaddr = NULL; in ath10k_htt_rx_free()
317 kfree(htt->rx_ring.netbufs_ring); in ath10k_htt_rx_free()
318 htt->rx_ring.netbufs_ring = NULL; in ath10k_htt_rx_free()
321 static inline struct sk_buff *ath10k_htt_rx_netbuf_pop(struct ath10k_htt *htt) in ath10k_htt_rx_netbuf_pop() argument
323 struct ath10k *ar = htt->ar; in ath10k_htt_rx_netbuf_pop()
327 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_netbuf_pop()
329 if (htt->rx_ring.fill_cnt == 0) { in ath10k_htt_rx_netbuf_pop()
334 idx = htt->rx_ring.sw_rd_idx.msdu_payld; in ath10k_htt_rx_netbuf_pop()
335 msdu = htt->rx_ring.netbufs_ring[idx]; in ath10k_htt_rx_netbuf_pop()
336 htt->rx_ring.netbufs_ring[idx] = NULL; in ath10k_htt_rx_netbuf_pop()
337 ath10k_htt_reset_paddrs_ring(htt, idx); in ath10k_htt_rx_netbuf_pop()
340 idx &= htt->rx_ring.size_mask; in ath10k_htt_rx_netbuf_pop()
341 htt->rx_ring.sw_rd_idx.msdu_payld = idx; in ath10k_htt_rx_netbuf_pop()
342 htt->rx_ring.fill_cnt--; in ath10k_htt_rx_netbuf_pop()
344 dma_unmap_single(htt->ar->dev, in ath10k_htt_rx_netbuf_pop()
355 static int ath10k_htt_rx_amsdu_pop(struct ath10k_htt *htt, in ath10k_htt_rx_amsdu_pop() argument
358 struct ath10k *ar = htt->ar; in ath10k_htt_rx_amsdu_pop()
368 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_amsdu_pop()
373 msdu = ath10k_htt_rx_netbuf_pop(htt); in ath10k_htt_rx_amsdu_pop()
424 msdu = ath10k_htt_rx_netbuf_pop(htt); in ath10k_htt_rx_amsdu_pop()
467 static struct sk_buff *ath10k_htt_rx_pop_paddr(struct ath10k_htt *htt, in ath10k_htt_rx_pop_paddr() argument
470 struct ath10k *ar = htt->ar; in ath10k_htt_rx_pop_paddr()
474 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_pop_paddr()
482 htt->rx_ring.fill_cnt--; in ath10k_htt_rx_pop_paddr()
484 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_pop_paddr()
502 static int ath10k_htt_rx_handle_amsdu_mon_32(struct ath10k_htt *htt, in ath10k_htt_rx_handle_amsdu_mon_32() argument
506 struct ath10k *ar = htt->ar; in ath10k_htt_rx_handle_amsdu_mon_32()
536 frag_buf = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_handle_amsdu_mon_32()
551 frag_buf = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_handle_amsdu_mon_32()
579 ath10k_htt_rx_handle_amsdu_mon_64(struct ath10k_htt *htt, in ath10k_htt_rx_handle_amsdu_mon_64() argument
583 struct ath10k *ar = htt->ar; in ath10k_htt_rx_handle_amsdu_mon_64()
613 frag_buf = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_handle_amsdu_mon_64()
628 frag_buf = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_handle_amsdu_mon_64()
655 static int ath10k_htt_rx_pop_paddr32_list(struct ath10k_htt *htt, in ath10k_htt_rx_pop_paddr32_list() argument
659 struct ath10k *ar = htt->ar; in ath10k_htt_rx_pop_paddr32_list()
669 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_pop_paddr32_list()
677 msdu = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_pop_paddr32_list()
684 ret = ath10k_htt_rx_handle_amsdu_mon_32(htt, msdu, in ath10k_htt_rx_pop_paddr32_list()
709 ath10k_warn(htt->ar, "tried to pop an incomplete frame, oops!\n"); in ath10k_htt_rx_pop_paddr32_list()
720 static int ath10k_htt_rx_pop_paddr64_list(struct ath10k_htt *htt, in ath10k_htt_rx_pop_paddr64_list() argument
724 struct ath10k *ar = htt->ar; in ath10k_htt_rx_pop_paddr64_list()
734 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_pop_paddr64_list()
741 msdu = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_pop_paddr64_list()
748 ret = ath10k_htt_rx_handle_amsdu_mon_64(htt, msdu, in ath10k_htt_rx_pop_paddr64_list()
773 ath10k_warn(htt->ar, "tried to pop an incomplete frame, oops!\n"); in ath10k_htt_rx_pop_paddr64_list()
784 int ath10k_htt_rx_alloc(struct ath10k_htt *htt) in ath10k_htt_rx_alloc() argument
786 struct ath10k *ar = htt->ar; in ath10k_htt_rx_alloc()
790 struct timer_list *timer = &htt->rx_ring.refill_retry_timer; in ath10k_htt_rx_alloc()
795 htt->rx_confused = false; in ath10k_htt_rx_alloc()
800 htt->rx_ring.size = HTT_RX_RING_SIZE; in ath10k_htt_rx_alloc()
801 htt->rx_ring.size_mask = htt->rx_ring.size - 1; in ath10k_htt_rx_alloc()
802 htt->rx_ring.fill_level = ar->hw_params.rx_ring_fill_level; in ath10k_htt_rx_alloc()
804 if (!is_power_of_2(htt->rx_ring.size)) { in ath10k_htt_rx_alloc()
809 htt->rx_ring.netbufs_ring = in ath10k_htt_rx_alloc()
810 kcalloc(htt->rx_ring.size, sizeof(struct sk_buff *), in ath10k_htt_rx_alloc()
812 if (!htt->rx_ring.netbufs_ring) in ath10k_htt_rx_alloc()
815 size = ath10k_htt_get_rx_ring_size(htt); in ath10k_htt_rx_alloc()
817 vaddr_ring = dma_alloc_coherent(htt->ar->dev, size, &paddr, GFP_KERNEL); in ath10k_htt_rx_alloc()
821 ath10k_htt_config_paddrs_ring(htt, vaddr_ring); in ath10k_htt_rx_alloc()
822 htt->rx_ring.base_paddr = paddr; in ath10k_htt_rx_alloc()
824 vaddr = dma_alloc_coherent(htt->ar->dev, in ath10k_htt_rx_alloc()
825 sizeof(*htt->rx_ring.alloc_idx.vaddr), in ath10k_htt_rx_alloc()
830 htt->rx_ring.alloc_idx.vaddr = vaddr; in ath10k_htt_rx_alloc()
831 htt->rx_ring.alloc_idx.paddr = paddr; in ath10k_htt_rx_alloc()
832 htt->rx_ring.sw_rd_idx.msdu_payld = htt->rx_ring.size_mask; in ath10k_htt_rx_alloc()
833 *htt->rx_ring.alloc_idx.vaddr = 0; in ath10k_htt_rx_alloc()
838 spin_lock_init(&htt->rx_ring.lock); in ath10k_htt_rx_alloc()
840 htt->rx_ring.fill_cnt = 0; in ath10k_htt_rx_alloc()
841 htt->rx_ring.sw_rd_idx.msdu_payld = 0; in ath10k_htt_rx_alloc()
842 hash_init(htt->rx_ring.skb_table); in ath10k_htt_rx_alloc()
844 skb_queue_head_init(&htt->rx_msdus_q); in ath10k_htt_rx_alloc()
845 skb_queue_head_init(&htt->rx_in_ord_compl_q); in ath10k_htt_rx_alloc()
846 skb_queue_head_init(&htt->tx_fetch_ind_q); in ath10k_htt_rx_alloc()
847 atomic_set(&htt->num_mpdus_ready, 0); in ath10k_htt_rx_alloc()
850 htt->rx_ring.size, htt->rx_ring.fill_level); in ath10k_htt_rx_alloc()
854 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_alloc()
855 ath10k_htt_get_rx_ring_size(htt), in ath10k_htt_rx_alloc()
857 htt->rx_ring.base_paddr); in ath10k_htt_rx_alloc()
858 ath10k_htt_config_paddrs_ring(htt, NULL); in ath10k_htt_rx_alloc()
860 kfree(htt->rx_ring.netbufs_ring); in ath10k_htt_rx_alloc()
861 htt->rx_ring.netbufs_ring = NULL; in ath10k_htt_rx_alloc()
1361 skb_queue_tail(&ar->htt.rx_msdus_q, skb); in ath10k_htt_rx_h_queue_msdu()
2322 static int ath10k_htt_rx_handle_amsdu(struct ath10k_htt *htt) in ath10k_htt_rx_handle_amsdu() argument
2324 struct ath10k *ar = htt->ar; in ath10k_htt_rx_handle_amsdu()
2325 struct ieee80211_rx_status *rx_status = &htt->rx_status; in ath10k_htt_rx_handle_amsdu()
2337 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_handle_amsdu()
2338 if (htt->rx_confused) { in ath10k_htt_rx_handle_amsdu()
2339 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_handle_amsdu()
2342 ret = ath10k_htt_rx_amsdu_pop(htt, &amsdu); in ath10k_htt_rx_handle_amsdu()
2343 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_handle_amsdu()
2351 htt->rx_confused = true; in ath10k_htt_rx_handle_amsdu()
2456 static bool ath10k_htt_rx_proc_rx_ind_hl(struct ath10k_htt *htt, in ath10k_htt_rx_proc_rx_ind_hl() argument
2462 struct ath10k *ar = htt->ar; in ath10k_htt_rx_proc_rx_ind_hl()
2760 static bool ath10k_htt_rx_proc_rx_frag_ind_hl(struct ath10k_htt *htt, in ath10k_htt_rx_proc_rx_frag_ind_hl() argument
2764 struct ath10k *ar = htt->ar; in ath10k_htt_rx_proc_rx_frag_ind_hl()
2817 return ath10k_htt_rx_proc_rx_ind_hl(htt, &resp->rx_ind_hl, skb, in ath10k_htt_rx_proc_rx_frag_ind_hl()
2879 return ath10k_htt_rx_proc_rx_ind_hl(htt, &resp->rx_ind_hl, skb, in ath10k_htt_rx_proc_rx_frag_ind_hl()
2906 return ath10k_htt_rx_proc_rx_ind_hl(htt, &resp->rx_ind_hl, skb, in ath10k_htt_rx_proc_rx_frag_ind_hl()
2918 static void ath10k_htt_rx_proc_rx_ind_ll(struct ath10k_htt *htt, in ath10k_htt_rx_proc_rx_ind_ll() argument
2921 struct ath10k *ar = htt->ar; in ath10k_htt_rx_proc_rx_ind_ll()
2941 atomic_add(mpdu_count, &htt->num_mpdus_ready); in ath10k_htt_rx_proc_rx_ind_ll()
2950 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_tx_compl_ind() local
3018 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_rx_tx_compl_ind()
3019 } else if (!kfifo_put(&htt->txdone_fifo, tx_done)) { in ath10k_htt_rx_tx_compl_ind()
3022 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_rx_tx_compl_ind()
3215 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_h_rx_offload() local
3216 struct ieee80211_rx_status *status = &htt->rx_status; in ath10k_htt_rx_h_rx_offload()
3264 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_in_ord_ind() local
3266 struct ieee80211_rx_status *status = &htt->rx_status; in ath10k_htt_rx_in_ord_ind()
3277 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_in_ord_ind()
3279 if (htt->rx_confused) in ath10k_htt_rx_in_ord_ind()
3307 ret = ath10k_htt_rx_pop_paddr64_list(htt, &resp->rx_in_ord_ind, in ath10k_htt_rx_in_ord_ind()
3310 ret = ath10k_htt_rx_pop_paddr32_list(htt, &resp->rx_in_ord_ind, in ath10k_htt_rx_in_ord_ind()
3315 htt->rx_confused = true; in ath10k_htt_rx_in_ord_ind()
3347 htt->rx_confused = true; in ath10k_htt_rx_in_ord_ind()
3418 if (!ar->htt.tx_q_state.enabled) { in ath10k_htt_rx_tx_fetch_ind()
3423 if (ar->htt.tx_q_state.mode == HTT_TX_MODE_SWITCH_PUSH) { in ath10k_htt_rx_tx_fetch_ind()
3442 if (unlikely(peer_id >= ar->htt.tx_q_state.num_peers) || in ath10k_htt_rx_tx_fetch_ind()
3443 unlikely(tid >= ar->htt.tx_q_state.num_tids)) { in ath10k_htt_rx_tx_fetch_ind()
3594 ar->htt.tx_q_state.enabled = enable; in ath10k_htt_rx_tx_mode_switch_ind()
3595 ar->htt.tx_q_state.mode = mode; in ath10k_htt_rx_tx_mode_switch_ind()
3596 ar->htt.tx_q_state.num_push_allowed = threshold; in ath10k_htt_rx_tx_mode_switch_ind()
3606 if (unlikely(peer_id >= ar->htt.tx_q_state.num_peers) || in ath10k_htt_rx_tx_mode_switch_ind()
3607 unlikely(tid >= ar->htt.tx_q_state.num_tids)) { in ath10k_htt_rx_tx_mode_switch_ind()
3627 spin_lock_bh(&ar->htt.tx_lock); in ath10k_htt_rx_tx_mode_switch_ind()
3630 spin_unlock_bh(&ar->htt.tx_lock); in ath10k_htt_rx_tx_mode_switch_ind()
3904 if (ar->htt.disable_tx_comp) { in ath10k_update_per_peer_tx_stats()
4077 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_t2h_msg_handler() local
4088 if (resp->hdr.msg_type >= ar->htt.t2h_msg_types_max) { in ath10k_htt_t2h_msg_handler()
4090 resp->hdr.msg_type, ar->htt.t2h_msg_types_max); in ath10k_htt_t2h_msg_handler()
4093 type = ar->htt.t2h_msg_types[resp->hdr.msg_type]; in ath10k_htt_t2h_msg_handler()
4097 htt->target_version_major = resp->ver_resp.major; in ath10k_htt_t2h_msg_handler()
4098 htt->target_version_minor = resp->ver_resp.minor; in ath10k_htt_t2h_msg_handler()
4099 complete(&htt->target_version_received); in ath10k_htt_t2h_msg_handler()
4104 ath10k_htt_rx_proc_rx_ind_ll(htt, &resp->rx_ind); in ath10k_htt_t2h_msg_handler()
4106 skb_queue_tail(&htt->rx_indication_head, skb); in ath10k_htt_t2h_msg_handler()
4116 ath10k_peer_map_event(htt, &ev); in ath10k_htt_t2h_msg_handler()
4123 ath10k_peer_unmap_event(htt, &ev); in ath10k_htt_t2h_msg_handler()
4128 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_t2h_msg_handler() local
4130 struct ath10k_htc_ep *ep = &ar->htc.endpoint[htt->eid]; in ath10k_htt_t2h_msg_handler()
4156 if (htt->disable_tx_comp) { in ath10k_htt_t2h_msg_handler()
4162 status = ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_t2h_msg_handler()
4164 spin_lock_bh(&htt->tx_lock); in ath10k_htt_t2h_msg_handler()
4165 ath10k_htt_tx_mgmt_dec_pending(htt); in ath10k_htt_t2h_msg_handler()
4166 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_t2h_msg_handler()
4171 ath10k_htt_rx_tx_compl_ind(htt->ar, skb); in ath10k_htt_t2h_msg_handler()
4174 struct ath10k *ar = htt->ar; in ath10k_htt_t2h_msg_handler()
4189 atomic_inc(&htt->num_mpdus_ready); in ath10k_htt_t2h_msg_handler()
4191 return ath10k_htt_rx_proc_rx_frag_ind(htt, in ath10k_htt_t2h_msg_handler()
4232 skb_queue_tail(&htt->rx_in_ord_compl_q, skb); in ath10k_htt_t2h_msg_handler()
4236 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_t2h_msg_handler() local
4238 struct ath10k_htc_ep *ep = &ar->htc.endpoint[htt->eid]; in ath10k_htt_t2h_msg_handler()
4250 if (htt->disable_tx_comp) { in ath10k_htt_t2h_msg_handler()
4280 skb_queue_tail(&htt->tx_fetch_ind_q, tx_fetch_ind); in ath10k_htt_t2h_msg_handler()
4317 if (skb_queue_empty(&ar->htt.rx_msdus_q)) in ath10k_htt_rx_deliver_msdu()
4320 skb = skb_dequeue(&ar->htt.rx_msdus_q); in ath10k_htt_rx_deliver_msdu()
4333 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_hl_indication() local
4339 skb = skb_dequeue(&htt->rx_indication_head); in ath10k_htt_rx_hl_indication()
4345 release = ath10k_htt_rx_proc_rx_ind_hl(htt, in ath10k_htt_rx_hl_indication()
4355 skb_queue_len(&htt->rx_indication_head)); in ath10k_htt_rx_hl_indication()
4363 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_txrx_compl_task() local
4382 while ((skb = skb_dequeue(&htt->rx_in_ord_compl_q))) { in ath10k_htt_txrx_compl_task()
4383 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_txrx_compl_task()
4385 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_txrx_compl_task()
4394 while (atomic_read(&htt->num_mpdus_ready)) { in ath10k_htt_txrx_compl_task()
4395 ret = ath10k_htt_rx_handle_amsdu(htt); in ath10k_htt_txrx_compl_task()
4400 atomic_dec(&htt->num_mpdus_ready); in ath10k_htt_txrx_compl_task()
4411 if ((quota < budget) && !kfifo_is_empty(&htt->txdone_fifo)) in ath10k_htt_txrx_compl_task()
4419 while (kfifo_get(&htt->txdone_fifo, &tx_done)) in ath10k_htt_txrx_compl_task()
4420 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_txrx_compl_task()
4424 spin_lock_irqsave(&htt->tx_fetch_ind_q.lock, flags); in ath10k_htt_txrx_compl_task()
4425 skb_queue_splice_init(&htt->tx_fetch_ind_q, &tx_ind_q); in ath10k_htt_txrx_compl_task()
4426 spin_unlock_irqrestore(&htt->tx_fetch_ind_q.lock, flags); in ath10k_htt_txrx_compl_task()
4434 ath10k_htt_rx_msdu_buff_replenish(htt); in ath10k_htt_txrx_compl_task()
4464 void ath10k_htt_set_rx_ops(struct ath10k_htt *htt) in ath10k_htt_set_rx_ops() argument
4466 struct ath10k *ar = htt->ar; in ath10k_htt_set_rx_ops()
4469 htt->rx_ops = &htt_rx_ops_hl; in ath10k_htt_set_rx_ops()
4471 htt->rx_ops = &htt_rx_ops_64; in ath10k_htt_set_rx_ops()
4473 htt->rx_ops = &htt_rx_ops_32; in ath10k_htt_set_rx_ops()