Home
last modified time | relevance | path

Searched refs:buf_mask (Results 1 – 20 of 20) sorted by relevance

/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_ring_mux.c58 start = s_start & ring->buf_mask; in amdgpu_ring_mux_copy_pkt_from_sw_ring()
59 end = s_end & ring->buf_mask; in amdgpu_ring_mux_copy_pkt_from_sw_ring()
110 if (chunk->cntl_offset <= e->ring->buf_mask) in amdgpu_mux_resubmit_chunks()
113 if (chunk->ce_offset <= e->ring->buf_mask) in amdgpu_mux_resubmit_chunks()
115 if (chunk->de_offset <= e->ring->buf_mask) in amdgpu_mux_resubmit_chunks()
296 end = e->end_ptr_in_hw_ring & mux->real_ring->buf_mask; in amdgpu_ring_mux_get_rptr()
305 e->sw_rptr = (e->sw_cptr + offset) & ring->buf_mask; in amdgpu_ring_mux_get_rptr()
428 offset = ring->wptr & ring->buf_mask; in amdgpu_sw_ring_ib_mark_offset()
456 chunk->cntl_offset = ring->buf_mask + 1; in amdgpu_ring_mux_start_ib()
457 chunk->de_offset = ring->buf_mask + 1; in amdgpu_ring_mux_start_ib()
[all …]
A Damdgpu_ring.h357 uint32_t buf_mask; member
474 while (i <= ring->buf_mask) in amdgpu_ring_clear_ring()
481 ring->ring[ring->wptr++ & ring->buf_mask] = v; in amdgpu_ring_write()
491 occupied = ring->wptr & ring->buf_mask; in amdgpu_ring_write_multiple()
492 chunk1 = ring->buf_mask + 1 - occupied; in amdgpu_ring_write_multiple()
526 WARN_ON(offset > ring->buf_mask); in amdgpu_ring_patch_cond_exec()
529 cur = (ring->wptr - 1) & ring->buf_mask; in amdgpu_ring_patch_cond_exec()
A Damdgpu_ring.c136 occupied = ring->wptr & ring->buf_mask; in amdgpu_ring_insert_nop()
137 chunk1 = ring->buf_mask + 1 - occupied; in amdgpu_ring_insert_nop()
353 ring->buf_mask = (ring->ring_size / 4) - 1; in amdgpu_ring_init()
355 0xffffffffffffffff : ring->buf_mask; in amdgpu_ring_init()
519 early[0] = amdgpu_ring_get_rptr(ring) & ring->buf_mask; in amdgpu_debugfs_ring_read()
520 early[1] = amdgpu_ring_get_wptr(ring) & ring->buf_mask; in amdgpu_debugfs_ring_read()
521 early[2] = ring->wptr & ring->buf_mask; in amdgpu_debugfs_ring_read()
A Damdgpu_fence.c779 unsigned int first_idx = start_wptr & ring->buf_mask; in amdgpu_ring_backup_unprocessed_command()
780 unsigned int last_idx = end_wptr & ring->buf_mask; in amdgpu_ring_backup_unprocessed_command()
784 for (i = first_idx; i != last_idx; ++i, i &= ring->buf_mask) in amdgpu_ring_backup_unprocessed_command()
A Damdgpu_dev_coredump.c304 ring->buf_mask); in amdgpu_devcoredump_read()
A Damdgpu_cper.c445 for (p = pos + 1; p <= ring->buf_mask; p++) { in amdgpu_cper_ring_get_ent_sz()
A Dgfx_v9_0.c998 amdgpu_ring_write(kiq_ring, lower_32_bits(ring->wptr & ring->buf_mask)); in gfx_v9_0_kiq_unmap_queues()
5469 if (offset + (payload_size >> 2) <= ring->buf_mask + 1) { in gfx_v9_0_ring_patch_ce_meta()
5473 (ring->buf_mask + 1 - offset) << 2); in gfx_v9_0_ring_patch_ce_meta()
5474 payload_size -= (ring->buf_mask + 1 - offset) << 2; in gfx_v9_0_ring_patch_ce_meta()
5476 ce_payload_cpu_addr + ((ring->buf_mask + 1 - offset) << 2), in gfx_v9_0_ring_patch_ce_meta()
5496 if (offset + (payload_size >> 2) <= ring->buf_mask + 1) { in gfx_v9_0_ring_patch_de_meta()
5500 (ring->buf_mask + 1 - offset) << 2); in gfx_v9_0_ring_patch_de_meta()
5501 payload_size -= (ring->buf_mask + 1 - offset) << 2; in gfx_v9_0_ring_patch_de_meta()
5503 de_payload_cpu_addr + ((ring->buf_mask + 1 - offset) << 2), in gfx_v9_0_ring_patch_de_meta()
5851 ret = ring->wptr & ring->buf_mask; in gfx_v9_0_ring_emit_init_cond_exec()
A Damdgpu_vpe.c603 ret = ring->wptr & ring->buf_mask; in vpe_ring_init_cond_exec()
A Dsdma_v6_0.c154 ret = ring->wptr & ring->buf_mask; in sdma_v6_0_ring_init_cond_exec()
A Dsdma_v7_0.c154 ret = ring->wptr & ring->buf_mask; in sdma_v7_0_ring_init_cond_exec()
A Dsdma_v5_2.c153 ret = ring->wptr & ring->buf_mask; in sdma_v5_2_ring_init_cond_exec()
A Dsdma_v5_0.c313 ret = ring->wptr & ring->buf_mask; in sdma_v5_0_ring_init_cond_exec()
A Dgfx_v11_0.c6042 ret = ring->wptr & ring->buf_mask; in gfx_v11_0_ring_emit_init_cond_exec()
6073 offs = ring->wptr & ring->buf_mask; in gfx_v11_0_ring_emit_gfx_shadow()
A Dgfx_v12_0.c4579 ret = ring->wptr & ring->buf_mask; in gfx_v12_0_ring_emit_init_cond_exec()
A Dgfx_v8_0.c6296 ret = ring->wptr & ring->buf_mask; in gfx_v8_0_ring_emit_init_cond_exec()
A Dgfx_v10_0.c8860 ret = ring->wptr & ring->buf_mask; in gfx_v10_0_ring_emit_init_cond_exec()
/drivers/media/platform/samsung/exynos4-is/
A Dfimc-isp-video.c95 dma->dma_out_mask = video->buf_mask; in isp_video_capture_start_streaming()
198 video->buf_mask |= BIT(ivb->index); in isp_video_capture_buffer_queue()
222 video->buf_mask = (1UL << video->buf_count) - 1; in isp_video_capture_buffer_queue()
250 video->buf_mask &= ~BIT(buf_index); in fimc_isp_video_irq_handler()
251 fimc_is_hw_set_isp_buf_mask(is, video->buf_mask); in fimc_isp_video_irq_handler()
A Dfimc-isp.h137 unsigned int buf_mask; member
/drivers/net/ethernet/mellanox/mlx5/core/steering/hws/
A Dsend.h107 u16 buf_mask; member
230 return ((send_sq->cur_post & send_sq->buf_mask) == send_cq->poll_wqe); in mlx5hws_send_engine_empty()
A Dsend.c83 idx = (send_sq->cur_post + ctrl->num_wqebbs) & send_sq->buf_mask; in mlx5hws_send_engine_post_req_wqe()
144 idx = sq->cur_post & sq->buf_mask; in mlx5hws_send_engine_post_end()
280 idx = wqe_cnt & send_sq->buf_mask; in hws_send_engine_retry_post_send()
287 idx = (wqe_cnt + 1) & send_sq->buf_mask; in hws_send_engine_retry_post_send()
621 wqe_cnt = be16_to_cpu(cqe->wqe_counter) & sq->buf_mask; in hws_send_engine_poll_cq()
626 cq->poll_wqe = (cq->poll_wqe + priv->num_wqebbs) & sq->buf_mask; in hws_send_engine_poll_cq()
630 cq->poll_wqe = (wqe_cnt + priv->num_wqebbs) & sq->buf_mask; in hws_send_engine_poll_cq()
716 sq->buf_mask = (queue->num_entries * MAX_WQES_PER_RULE) - 1; in hws_send_ring_alloc_sq()

Completed in 97 milliseconds