Lines Matching refs:ll2

41 #define QED_LL2_RX_REGISTERED(ll2)	((ll2)->rx_queue.b_cb_registered)  argument
42 #define QED_LL2_TX_REGISTERED(ll2) ((ll2)->tx_queue.b_cb_registered) argument
106 if (cdev->ll2->cbs && cdev->ll2->cbs->tx_cb) in qed_ll2b_complete_tx_packet()
107 cdev->ll2->cbs->tx_cb(cdev->ll2->cb_cookie, skb, in qed_ll2b_complete_tx_packet()
116 *data = kmalloc(cdev->ll2->rx_size, GFP_ATOMIC); in qed_ll2_alloc_buffer()
124 cdev->ll2->rx_size, DMA_FROM_DEVICE); in qed_ll2_alloc_buffer()
137 spin_lock_bh(&cdev->ll2->lock); in qed_ll2_dealloc_buffer()
140 cdev->ll2->rx_size, DMA_FROM_DEVICE); in qed_ll2_dealloc_buffer()
144 cdev->ll2->rx_cnt--; in qed_ll2_dealloc_buffer()
145 if (!cdev->ll2->rx_cnt) in qed_ll2_dealloc_buffer()
148 spin_unlock_bh(&cdev->ll2->lock); in qed_ll2_dealloc_buffer()
157 list_for_each_entry_safe(buffer, tmp_buffer, &cdev->ll2->list, list) in qed_ll2_kill_buffers()
201 cdev->ll2->rx_size, DMA_FROM_DEVICE); in qed_ll2b_complete_rx_packet()
222 if (cdev->ll2->cbs && cdev->ll2->cbs->rx_cb) { in qed_ll2b_complete_rx_packet()
226 cdev->ll2->cbs->rx_cb(cdev->ll2->cb_cookie, skb, in qed_ll2b_complete_rx_packet()
242 rc = qed_ll2_post_rx_buffer(p_hwfn, cdev->ll2->handle, in qed_ll2b_complete_rx_packet()
2372 cdev->ll2->cbs = ops; in qed_ll2_register_cb_ops()
2373 cdev->ll2->cb_cookie = cookie; in qed_ll2_register_cb_ops()
2454 rc = qed_ll2_terminate_connection(p_hwfn, cdev->ll2->handle); in __qed_ll2_stop()
2458 qed_ll2_release_connection(p_hwfn, cdev->ll2->handle); in __qed_ll2_stop()
2469 if (cdev->ll2->handle == QED_LL2_UNUSED_HANDLE) in qed_ll2_stop()
2494 cdev->ll2->handle = QED_LL2_UNUSED_HANDLE; in qed_ll2_stop()
2525 &cdev->ll2->handle, false); in __qed_ll2_start()
2533 rc = qed_ll2_establish_connection(p_hwfn, cdev->ll2->handle); in __qed_ll2_start()
2540 spin_lock_bh(&cdev->ll2->lock); in __qed_ll2_start()
2541 rx_cnt = cdev->ll2->rx_cnt; in __qed_ll2_start()
2542 list_for_each_entry_safe(buffer, tmp_buffer, &cdev->ll2->list, list) { in __qed_ll2_start()
2544 cdev->ll2->handle, in __qed_ll2_start()
2550 cdev->ll2->rx_size, DMA_FROM_DEVICE); in __qed_ll2_start()
2558 spin_unlock_bh(&cdev->ll2->lock); in __qed_ll2_start()
2560 if (rx_cnt == cdev->ll2->rx_cnt) { in __qed_ll2_start()
2564 cdev->ll2->rx_cnt = rx_cnt; in __qed_ll2_start()
2569 qed_ll2_terminate_connection(p_hwfn, cdev->ll2->handle); in __qed_ll2_start()
2571 qed_ll2_release_connection(p_hwfn, cdev->ll2->handle); in __qed_ll2_start()
2587 WARN_ON(!cdev->ll2->cbs); in qed_ll2_start()
2590 INIT_LIST_HEAD(&cdev->ll2->list); in qed_ll2_start()
2591 spin_lock_init(&cdev->ll2->lock); in qed_ll2_start()
2593 cdev->ll2->rx_size = NET_SKB_PAD + ETH_HLEN + in qed_ll2_start()
2602 rx_num_desc, cdev->ll2->rx_size); in qed_ll2_start()
2618 list_add_tail(&buffer->list, &cdev->ll2->list); in qed_ll2_start()
2670 cdev->ll2->handle = QED_LL2_UNUSED_HANDLE; in qed_ll2_start()
2735 rc = qed_ll2_prepare_tx_packet(p_hwfn, cdev->ll2->handle, in qed_ll2_start_xmit()
2754 cdev->ll2->handle, in qed_ll2_start_xmit()
2779 if (!cdev->ll2) in qed_ll2_stats()
2782 rc = qed_ll2_get_stats(p_hwfn, cdev->ll2->handle, stats); in qed_ll2_stats()
2791 cdev->ll2->handle, stats); in qed_ll2_stats()
2812 cdev->ll2 = kzalloc(sizeof(*cdev->ll2), GFP_KERNEL); in qed_ll2_alloc_if()
2813 return cdev->ll2 ? 0 : -ENOMEM; in qed_ll2_alloc_if()
2818 kfree(cdev->ll2); in qed_ll2_dealloc_if()
2819 cdev->ll2 = NULL; in qed_ll2_dealloc_if()