| /drivers/net/ethernet/broadcom/bnge/ |
| A D | bnge_netdev.c | 74 u32 ring_size, rx_size, rx_space, max_rx_cmpl; in bnge_set_ring_params() local 84 ring_size = bn->rx_ring_size; in bnge_set_ring_params() 101 if (ring_size > BNGE_MAX_RX_DESC_CNT_JUM_ENA) { in bnge_set_ring_params() 102 ring_size = BNGE_MAX_RX_DESC_CNT_JUM_ENA; in bnge_set_ring_params() 104 bn->rx_ring_size, ring_size); in bnge_set_ring_params() 105 bn->rx_ring_size = ring_size; in bnge_set_ring_params() 107 agg_ring_size = ring_size * agg_factor; in bnge_set_ring_params() 130 bn->rx_nr_pages = bnge_adjust_pow_two(ring_size, RX_DESC_CNT); in bnge_set_ring_params() 133 ring_size = bn->tx_ring_size; in bnge_set_ring_params() 142 bn->cp_ring_size = ring_size; in bnge_set_ring_params() [all …]
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_ih.c | 43 unsigned ring_size, bool use_bus_addr) in amdgpu_ih_ring_init() argument 49 rb_bufsz = order_base_2(ring_size / 4); in amdgpu_ih_ring_init() 50 ring_size = (1 << rb_bufsz) * 4; in amdgpu_ih_ring_init() 51 ih->ring_size = ring_size; in amdgpu_ih_ring_init() 52 ih->ptr_mask = ih->ring_size - 1; in amdgpu_ih_ring_init() 65 ih->ring = dma_alloc_coherent(adev->dev, ih->ring_size + 8, in amdgpu_ih_ring_init() 71 ih->wptr_addr = dma_addr + ih->ring_size; in amdgpu_ih_ring_init() 72 ih->wptr_cpu = &ih->ring[ih->ring_size / 4]; in amdgpu_ih_ring_init() 73 ih->rptr_addr = dma_addr + ih->ring_size + 4; in amdgpu_ih_ring_init() 74 ih->rptr_cpu = &ih->ring[(ih->ring_size / 4) + 1]; in amdgpu_ih_ring_init() [all …]
|
| A D | amdgpu_ring.c | 345 ring->ring_size = roundup_pow_of_two(max_dw * 4 * sched_hw_submission); in amdgpu_ring_init() 347 ring->ring_size = roundup_pow_of_two(max_dw * 4); in amdgpu_ring_init() 348 ring->count_dw = (ring->ring_size - 4) >> 2; in amdgpu_ring_init() 353 ring->buf_mask = (ring->ring_size / 4) - 1; in amdgpu_ring_init() 360 ring->ring_backup = kvzalloc(ring->ring_size, GFP_KERNEL); in amdgpu_ring_init() 367 r = amdgpu_bo_create_kernel(adev, ring->ring_size + ring->funcs->extra_dw, PAGE_SIZE, in amdgpu_ring_init() 537 if (*pos >= (ring->ring_size + 12)) in amdgpu_debugfs_ring_read() 554 size = ring->ring_size - (early[0] - early[1]); in amdgpu_debugfs_ring_read() 656 ring->ring_size + 12); in amdgpu_debugfs_ring_init() 660 ring->ring_size + 12); in amdgpu_debugfs_ring_init() [all …]
|
| A D | navi10_ih.c | 53 if (adev->irq.ih.ring_size) { in navi10_ih_init_register_offset() 66 if (adev->irq.ih1.ring_size) { in navi10_ih_init_register_offset() 77 if (adev->irq.ih2.ring_size) { in navi10_ih_init_register_offset() 203 if (ih[i]->ring_size) { in navi10_ih_toggle_interrupts() 215 int rb_bufsz = order_base_2(ih->ring_size / 4); in navi10_ih_rb_cntl() 353 if (ih[i]->ring_size) { in navi10_ih_irq_init() 373 if (adev->irq.ih_soft.ring_size) in navi10_ih_irq_init() 473 if ((v < ih->ring_size) && (v != ih->rptr)) in navi10_ih_irq_rearm() 580 adev->irq.ih1.ring_size = 0; in navi10_ih_sw_init() 581 adev->irq.ih2.ring_size = 0; in navi10_ih_sw_init()
|
| A D | vega10_ih.c | 51 if (adev->irq.ih.ring_size) { in vega10_ih_init_register_offset() 64 if (adev->irq.ih1.ring_size) { in vega10_ih_init_register_offset() 75 if (adev->irq.ih2.ring_size) { in vega10_ih_init_register_offset() 148 if (ih[i]->ring_size) { in vega10_ih_toggle_interrupts() 160 int rb_bufsz = order_base_2(ih->ring_size / 4); in vega10_ih_rb_cntl() 285 if (ih[i]->ring_size) { in vega10_ih_irq_init() 303 if (adev->irq.ih_soft.ring_size) in vega10_ih_irq_init() 403 if ((v < ih->ring_size) && (v != ih->rptr)) in vega10_ih_irq_rearm()
|
| A D | vega20_ih.c | 59 if (adev->irq.ih.ring_size) { in vega20_ih_init_register_offset() 72 if (adev->irq.ih1.ring_size) { in vega20_ih_init_register_offset() 83 if (adev->irq.ih2.ring_size) { in vega20_ih_init_register_offset() 184 if (ih[i]->ring_size) { in vega20_ih_toggle_interrupts() 196 int rb_bufsz = order_base_2(ih->ring_size / 4); in vega20_ih_rb_cntl() 348 if (ih[i]->ring_size) { in vega20_ih_irq_init() 384 if (adev->irq.ih_soft.ring_size) in vega20_ih_irq_init() 488 if ((v < ih->ring_size) && (v != ih->rptr)) in vega20_ih_irq_rearm()
|
| A D | ih_v6_0.c | 52 if (adev->irq.ih.ring_size) { in ih_v6_0_init_register_offset() 65 if (adev->irq.ih1.ring_size) { in ih_v6_0_init_register_offset() 205 if (ih[i]->ring_size) { in ih_v6_0_toggle_interrupts() 217 int rb_bufsz = order_base_2(ih->ring_size / 4); in ih_v6_0_rb_cntl() 347 if (ih[i]->ring_size) { in ih_v6_0_irq_init() 379 if (adev->irq.ih1.ring_size) { in ih_v6_0_irq_init() 402 if (adev->irq.ih_soft.ring_size) in ih_v6_0_irq_init() 497 if ((v < ih->ring_size) && (v != ih->rptr)) in ih_v6_0_irq_rearm()
|
| A D | ih_v6_1.c | 52 if (adev->irq.ih.ring_size) { in ih_v6_1_init_register_offset() 65 if (adev->irq.ih1.ring_size) { in ih_v6_1_init_register_offset() 177 if (ih[i]->ring_size) { in ih_v6_1_toggle_interrupts() 189 int rb_bufsz = order_base_2(ih->ring_size / 4); in ih_v6_1_rb_cntl() 319 if (ih[i]->ring_size) { in ih_v6_1_irq_init() 350 if (adev->irq.ih1.ring_size) { in ih_v6_1_irq_init() 373 if (adev->irq.ih_soft.ring_size) in ih_v6_1_irq_init() 466 if ((v < ih->ring_size) && (v != ih->rptr)) in ih_v6_1_irq_rearm()
|
| A D | ih_v7_0.c | 52 if (adev->irq.ih.ring_size) { in ih_v7_0_init_register_offset() 65 if (adev->irq.ih1.ring_size) { in ih_v7_0_init_register_offset() 177 if (ih[i]->ring_size) { in ih_v7_0_toggle_interrupts() 189 int rb_bufsz = order_base_2(ih->ring_size / 4); in ih_v7_0_rb_cntl() 319 if (ih[i]->ring_size) { in ih_v7_0_irq_init() 350 if (adev->irq.ih1.ring_size) { in ih_v7_0_irq_init() 373 if (adev->irq.ih_soft.ring_size) in ih_v7_0_irq_init() 465 if ((v < ih->ring_size) && (v != ih->rptr)) in ih_v7_0_irq_rearm()
|
| A D | amdgpu_ih.h | 52 unsigned ring_size; member 105 unsigned ring_size, bool use_bus_addr);
|
| /drivers/crypto/ccp/ |
| A D | tee-dev.c | 25 static int tee_alloc_ring(struct psp_tee_device *tee, int ring_size) in tee_alloc_ring() argument 30 if (!ring_size) in tee_alloc_ring() 36 start_addr = (void *)__get_free_pages(GFP_KERNEL, get_order(ring_size)); in tee_alloc_ring() 40 memset(start_addr, 0x0, ring_size); in tee_alloc_ring() 42 rb_mgr->ring_size = ring_size; in tee_alloc_ring() 57 get_order(rb_mgr->ring_size)); in tee_free_ring() 60 rb_mgr->ring_size = 0; in tee_free_ring() 76 cmd->size = tee->rb_mgr.ring_size; in tee_alloc_cmd_buffer() 91 int ring_size = MAX_RING_BUFFER_ENTRIES * sizeof(struct tee_ring_cmd); in tee_init_ring() local 98 ret = tee_alloc_ring(tee, ring_size); in tee_init_ring() [all …]
|
| /drivers/net/ethernet/intel/idpf/ |
| A D | idpf_controlq.c | 65 for (i = 0; i < cq->ring_size; i++) { in idpf_ctlq_init_rxq_bufs() 105 cq->ring_size = 0; in idpf_ctlq_shutdown() 137 cq->ring_size = qinfo->len; in idpf_ctlq_add() 363 if (*clean_count > cq->ring_size) in idpf_ctlq_clean_sq() 393 if (ntc == cq->ring_size) in idpf_ctlq_clean_sq() 431 if (*buff_count > cq->ring_size) in idpf_ctlq_post_rx_buffs() 439 if (tbp >= cq->ring_size) in idpf_ctlq_post_rx_buffs() 461 if (tbp >= cq->ring_size) in idpf_ctlq_post_rx_buffs() 478 if (tbp >= cq->ring_size) in idpf_ctlq_post_rx_buffs() 505 if (ntp == cq->ring_size) in idpf_ctlq_post_rx_buffs() [all …]
|
| A D | idpf_controlq_setup.c | 14 size_t size = cq->ring_size * sizeof(struct idpf_ctlq_desc); in idpf_ctlq_alloc_desc_ring() 43 cq->bi.rx_buff = kcalloc(cq->ring_size, sizeof(struct idpf_dma_mem *), in idpf_ctlq_alloc_bufs() 49 for (i = 0; i < cq->ring_size - 1; i++) { in idpf_ctlq_alloc_bufs() 112 for (i = 0; i < cq->ring_size; i++) { in idpf_ctlq_free_bufs()
|
| /drivers/gpu/drm/radeon/ |
| A D | radeon_ring.c | 88 ring->ring_free_dw = rptr + (ring->ring_size / 4); in radeon_ring_free_size() 93 ring->ring_free_dw = ring->ring_size / 4; in radeon_ring_free_size() 114 if (ndw > (ring->ring_size / 4)) in radeon_ring_alloc() 316 size = ring->wptr + (ring->ring_size / 4); in radeon_ring_backup() 383 int radeon_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size, in radeon_ring_init() argument 388 ring->ring_size = ring_size; in radeon_ring_init() 394 r = radeon_bo_create(rdev, ring->ring_size, PAGE_SIZE, true, in radeon_ring_init() 420 ring->ptr_mask = (ring->ring_size / 4) - 1; in radeon_ring_init() 421 ring->ring_free_dw = ring->ring_size / 4; in radeon_ring_init() 476 count = (ring->ring_size / 4) - ring->ring_free_dw; in radeon_debugfs_ring_info_show()
|
| /drivers/xen/ |
| A D | evtchn.c | 67 unsigned int ring_size; member 99 return idx & (u->ring_size - 1); in evtchn_ring_offset() 183 if ((prod - cons) < u->ring_size) { in evtchn_interrupt() 240 if (((c ^ p) & u->ring_size) != 0) { in evtchn_read() 241 bytes1 = (u->ring_size - evtchn_ring_offset(u, c)) * in evtchn_read() 327 if (u->nr_evtchns <= u->ring_size) in evtchn_resize_ring() 330 if (u->ring_size == 0) in evtchn_resize_ring() 333 new_size = 2 * u->ring_size; in evtchn_resize_ring() 360 memcpy(new_ring + u->ring_size, old_ring, in evtchn_resize_ring() 361 u->ring_size * sizeof(*u->ring)); in evtchn_resize_ring() [all …]
|
| /drivers/net/ethernet/renesas/ |
| A D | rswitch.c | 222 if (index + num >= gq->ring_size) in rswitch_next_queue_index() 223 index = (index + num) % gq->ring_size; in rswitch_next_queue_index() 255 index = (i + start_index) % gq->ring_size; in rswitch_gwca_queue_alloc_rx_buf() 286 for (i = 0; i < gq->ring_size; i++) in rswitch_gwca_queue_free() 320 gq->ring_size = ring_size; in rswitch_gwca_queue_alloc() 378 unsigned int ring_size = sizeof(struct rswitch_ext_desc) * gq->ring_size; in rswitch_gwca_queue_format() local 384 memset(gq->tx_ring, 0, ring_size); in rswitch_gwca_queue_format() 491 unsigned int ring_size = sizeof(struct rswitch_ext_ts_desc) * gq->ring_size; in rswitch_gwca_queue_ext_ts_format() local 496 memset(gq->rx_ring, 0, ring_size); in rswitch_gwca_queue_ext_ts_format() 548 gq->ring_size = TS_RING_SIZE; in rswitch_gwca_ts_queue_alloc() [all …]
|
| /drivers/net/ethernet/amazon/ena/ |
| A D | ena_netdev.h | 89 #define ENA_TX_RING_IDX_NEXT(idx, ring_size) (((idx) + 1) & ((ring_size) - 1)) argument 91 #define ENA_RX_RING_IDX_NEXT(idx, ring_size) (((idx) + 1) & ((ring_size) - 1)) argument 92 #define ENA_RX_RING_IDX_ADD(idx, n, ring_size) \ argument 93 (((idx) + (n)) & ((ring_size) - 1)) 268 int ring_size; member
|
| A D | ena_netdev.c | 162 ring->ring_size); in ena_xmit_common() 600 rx_ring->ring_size); in ena_refill_rx_bufs() 644 bufs_num = rx_ring->ring_size - 1; in ena_refill_all_rx_bufs() 860 tx_ring->ring_size); in ena_clean_tx_irq() 994 rx_ring->ring_size); in ena_rx_skb() 1027 rx_ring->ring_size); in ena_rx_skb() 1257 rx_ring->ring_size); in ena_clean_rx_irq() 1910 ctx.queue_size = tx_ring->ring_size; in ena_create_io_tx_queue() 1978 ctx.queue_size = rx_ring->ring_size; in ena_create_io_rx_queue() 2900 if (adapter->tx_ring->ring_size) in ena_calc_io_queue_size() [all …]
|
| /drivers/uio/ |
| A D | uio_hv_generic.c | 261 size_t ring_size = hv_dev_ring_size(channel); in hv_uio_probe() local 263 if (!ring_size) in hv_uio_probe() 264 ring_size = SZ_2M; in hv_uio_probe() 267 ring_size = VMBUS_RING_SIZE(ring_size); in hv_uio_probe() 273 ret = vmbus_alloc_ring(channel, ring_size, ring_size); in hv_uio_probe()
|
| /drivers/crypto/intel/qat/qat_common/ |
| A D | adf_transport.c | 93 ADF_MAX_INFLIGHTS(ring->ring_size, ring->msg_size)) { in adf_send_message() 103 ADF_RING_SIZE_MODULO(ring->ring_size)); in adf_send_message() 124 ADF_RING_SIZE_MODULO(ring->ring_size)); in adf_handle_response() 139 u32 ring_config = BUILD_RING_CONFIG(ring->ring_size); in adf_configure_tx_ring() 151 BUILD_RESP_RING_CONFIG(ring->ring_size, in adf_configure_rx_ring() 168 ADF_SIZE_TO_RING_SIZE_IN_BYTES(ring->ring_size); in adf_init_ring() 194 ring->ring_size); in adf_init_ring() 206 ADF_SIZE_TO_RING_SIZE_IN_BYTES(ring->ring_size); in adf_cleanup_ring() 272 ring->ring_size = adf_verify_ring_size(msg_size, num_msgs); in adf_create_ring() 275 max_inflights = ADF_MAX_INFLIGHTS(ring->ring_size, ring->msg_size); in adf_create_ring()
|
| /drivers/bus/mhi/ep/ |
| A D | ring.c | 26 ring->rd_offset = (ring->rd_offset + 1) % ring->ring_size; in mhi_ep_ring_inc_index() 55 buf_info.size = (ring->ring_size - start) * sizeof(struct mhi_ring_element); in __mhi_ep_cache_ring() 125 num_free_elem = ((ring->ring_size - ring->rd_offset) + ring->wr_offset) - 1; in mhi_ep_ring_add_element() 187 ring->ring_size = mhi_ep_ring_num_elems(ring); in mhi_ep_ring_start() 208 ring->ring_cache = kcalloc(ring->ring_size, sizeof(struct mhi_ring_element), GFP_KERNEL); in mhi_ep_ring_start()
|
| /drivers/net/ethernet/atheros/ |
| A D | ag71xx.c | 774 int ring_mask, ring_size; in ag71xx_tx_packets() local 778 ring_size = BIT(ring->order); in ag71xx_tx_packets() 1153 int ring_size = BIT(ring->order); in ag71xx_ring_tx_init() local 1154 int ring_mask = ring_size - 1; in ag71xx_ring_tx_init() 1157 for (i = 0; i < ring_size; i++) { in ag71xx_ring_tx_init() 1178 int ring_size = BIT(ring->order); in ag71xx_ring_rx_clean() local 1184 for (i = 0; i < ring_size; i++) in ag71xx_ring_rx_clean() 1303 int ring_size, tx_size; in ag71xx_rings_init() local 1333 int ring_size; in ag71xx_rings_free() local 1510 ring_size = BIT(ring->order); in ag71xx_hard_start_xmit() [all …]
|
| /drivers/net/ethernet/intel/ice/ |
| A D | ice_fwlog.c | 83 int status, ring_size; in ice_fwlog_realloc_rings() local 94 ring_size = ICE_FWLOG_INDEX_TO_BYTES(index) / ICE_AQ_MAX_BUF_LEN; in ice_fwlog_realloc_rings() 95 if (ring_size == hw->fwlog_ring.size) in ice_fwlog_realloc_rings() 102 ring.rings = kcalloc(ring_size, sizeof(*ring.rings), GFP_KERNEL); in ice_fwlog_realloc_rings() 106 ring.size = ring_size; in ice_fwlog_realloc_rings()
|
| /drivers/net/ethernet/pensando/ionic/ |
| A D | ionic_dev.c | 693 .q_init.ring_size = ilog2(q->num_descs), in ionic_dev_cmd_adminq_init() 737 unsigned int ring_size; in ionic_cq_init() local 742 ring_size = ilog2(num_descs); in ionic_cq_init() 743 if (ring_size < 2 || ring_size > 16) in ionic_cq_init() 787 unsigned int ring_size; in ionic_q_init() local 792 ring_size = ilog2(num_descs); in ionic_q_init() 793 if (ring_size < 2 || ring_size > 16) in ionic_q_init()
|
| /drivers/net/wireless/ath/ath11k/ |
| A D | hal.c | 676 if (srng->u.dst_ring.tp == srng->ring_size) in ath11k_hal_srng_dst_get_next_entry() 705 return (srng->ring_size - tp + hp) / srng->entry_size; in ath11k_hal_srng_dst_num_free() 728 return ((srng->ring_size - hp + tp) / srng->entry_size) - 1; in ath11k_hal_srng_src_num_free() 745 next_hp = (srng->u.src_ring.hp + srng->entry_size) % srng->ring_size; in ath11k_hal_srng_src_get_next_entry() 773 srng->ring_size; in ath11k_hal_srng_src_reap_next() 796 srng->ring_size; in ath11k_hal_srng_src_get_next_reaped() 807 next_hp = (srng->u.src_ring.hp + srng->entry_size) % srng->ring_size; in ath11k_hal_srng_src_next_peek() 819 if (((srng->u.src_ring.hp + srng->entry_size) % srng->ring_size) == in ath11k_hal_srng_src_peek() 1023 srng->ring_size = srng->entry_size * srng->num_entries; in ath11k_hal_srng_setup() 1052 srng->u.src_ring.reap_hp = srng->ring_size - srng->entry_size; in ath11k_hal_srng_setup()
|