| /drivers/media/usb/pvrusb2/ |
| A D | pvrusb2-debugifc.c | 50 const char *wptr; in debugifc_isolate_word() local 55 wptr = NULL; in debugifc_isolate_word() 63 wptr = buf; in debugifc_isolate_word() 68 *wstrPtr = wptr; in debugifc_isolate_word() 177 const char *wptr; in pvr2_debugifc_do1cmd() local 184 if (!wptr) return 0; in pvr2_debugifc_do1cmd() 191 if (!wptr) return -EINVAL; in pvr2_debugifc_do1cmd() 218 if (!wptr) return -EINVAL; in pvr2_debugifc_do1cmd() 221 if (scnt && wptr) { in pvr2_debugifc_do1cmd() 251 if (!wptr) return -EINVAL; in pvr2_debugifc_do1cmd() [all …]
|
| /drivers/media/platform/amphion/ |
| A D | vpu_rpc.c | 39 ptr1 = desc->wptr; in vpu_rpc_check_buffer_space() 43 ptr2 = desc->wptr; in vpu_rpc_check_buffer_space() 61 u32 wptr; in vpu_rpc_send_cmd_buf() local 70 wptr = desc->wptr; in vpu_rpc_send_cmd_buf() 76 wptr += 4; in vpu_rpc_send_cmd_buf() 78 if (wptr >= desc->end) { in vpu_rpc_send_cmd_buf() 79 wptr = desc->start; in vpu_rpc_send_cmd_buf() 85 wptr += 4; in vpu_rpc_send_cmd_buf() 87 if (wptr >= desc->end) { in vpu_rpc_send_cmd_buf() 88 wptr = desc->start; in vpu_rpc_send_cmd_buf() [all …]
|
| A D | vpu_malone.c | 198 u32 wptr; member 527 desc->wptr = readl(&str_buf->wptr); in vpu_malone_get_stream_buffer_desc() 540 writel(wptr, &str_buf->wptr); in vpu_malone_update_wptr() 1089 wptr = readl(&str_buf->wptr); in vpu_malone_add_padding_scode() 1094 size = ALIGN(wptr, 4) - wptr; in vpu_malone_add_padding_scode() 1554 u32 wptr = readl(&str_buf->wptr); in vpu_malone_input_frame_data() local 1561 scode.wptr = wptr; in vpu_malone_input_frame_data() 1569 wptr = scode.wptr; in vpu_malone_input_frame_data() 1579 wptr = scode.wptr; in vpu_malone_input_frame_data() 1616 u32 wptr = readl(&str_buf->wptr); in vpu_malone_input_stream_data() local [all …]
|
| A D | vpu_helpers.c | 284 offset = *wptr; in vpu_helper_copy_to_stream_buffer() 304 u32 *wptr, u8 val, u32 size) in vpu_helper_memset_stream_buffer() argument 311 if (!stream_buffer || !wptr) in vpu_helper_memset_stream_buffer() 317 offset = *wptr; in vpu_helper_memset_stream_buffer() 335 *wptr = offset; in vpu_helper_memset_stream_buffer() 347 if (desc.rptr > desc.wptr) in vpu_helper_get_free_space() 348 return desc.rptr - desc.wptr; in vpu_helper_get_free_space() 349 else if (desc.rptr < desc.wptr) in vpu_helper_get_free_space() 362 if (desc.wptr > desc.rptr) in vpu_helper_get_used_space() 363 return desc.wptr - desc.rptr; in vpu_helper_get_used_space() [all …]
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_ih.c | 157 ih->ring[wptr++] = cpu_to_le32(iv[i]); in amdgpu_ih_ring_write() 159 wptr <<= 2; in amdgpu_ih_ring_write() 160 wptr &= ih->ptr_mask; in amdgpu_ih_ring_write() 163 if (wptr != READ_ONCE(ih->rptr)) { in amdgpu_ih_ring_write() 165 WRITE_ONCE(*ih->wptr_cpu, cpu_to_le32(wptr)); in amdgpu_ih_ring_write() 168 wptr, ih->rptr); in amdgpu_ih_ring_write() 212 u32 wptr; in amdgpu_ih_process() local 217 wptr = amdgpu_ih_get_wptr(adev, ih); in amdgpu_ih_process() 226 while (ih->rptr != wptr && --count) { in amdgpu_ih_process() 237 wptr = amdgpu_ih_get_wptr(adev, ih); in amdgpu_ih_process() [all …]
|
| A D | iceland_ih.c | 193 u32 wptr, tmp; in iceland_ih_get_wptr() local 195 wptr = le32_to_cpu(*ih->wptr_cpu); in iceland_ih_get_wptr() 197 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in iceland_ih_get_wptr() 201 wptr = RREG32(mmIH_RB_WPTR); in iceland_ih_get_wptr() 203 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in iceland_ih_get_wptr() 206 wptr = REG_SET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW, 0); in iceland_ih_get_wptr() 212 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in iceland_ih_get_wptr() 213 ih->rptr = (wptr + 16) & ih->ptr_mask; in iceland_ih_get_wptr() 225 return (wptr & ih->ptr_mask); in iceland_ih_get_wptr()
|
| A D | cz_ih.c | 193 u32 wptr, tmp; in cz_ih_get_wptr() local 195 wptr = le32_to_cpu(*ih->wptr_cpu); in cz_ih_get_wptr() 197 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in cz_ih_get_wptr() 201 wptr = RREG32(mmIH_RB_WPTR); in cz_ih_get_wptr() 203 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in cz_ih_get_wptr() 206 wptr = REG_SET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW, 0); in cz_ih_get_wptr() 213 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in cz_ih_get_wptr() 214 ih->rptr = (wptr + 16) & ih->ptr_mask; in cz_ih_get_wptr() 226 return (wptr & ih->ptr_mask); in cz_ih_get_wptr()
|
| A D | tonga_ih.c | 195 u32 wptr, tmp; in tonga_ih_get_wptr() local 197 wptr = le32_to_cpu(*ih->wptr_cpu); in tonga_ih_get_wptr() 199 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in tonga_ih_get_wptr() 203 wptr = RREG32(mmIH_RB_WPTR); in tonga_ih_get_wptr() 205 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in tonga_ih_get_wptr() 208 wptr = REG_SET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW, 0); in tonga_ih_get_wptr() 216 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in tonga_ih_get_wptr() 217 ih->rptr = (wptr + 16) & ih->ptr_mask; in tonga_ih_get_wptr() 229 return (wptr & ih->ptr_mask); in tonga_ih_get_wptr()
|
| A D | cik_ih.c | 191 u32 wptr, tmp; in cik_ih_get_wptr() local 193 wptr = le32_to_cpu(*ih->wptr_cpu); in cik_ih_get_wptr() 195 if (wptr & IH_RB_WPTR__RB_OVERFLOW_MASK) { in cik_ih_get_wptr() 196 wptr &= ~IH_RB_WPTR__RB_OVERFLOW_MASK; in cik_ih_get_wptr() 202 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in cik_ih_get_wptr() 203 ih->rptr = (wptr + 16) & ih->ptr_mask; in cik_ih_get_wptr() 214 return (wptr & ih->ptr_mask); in cik_ih_get_wptr()
|
| A D | si_ih.c | 111 u32 wptr, tmp; in si_ih_get_wptr() local 113 wptr = le32_to_cpu(*ih->wptr_cpu); in si_ih_get_wptr() 115 if (wptr & IH_RB_WPTR__RB_OVERFLOW_MASK) { in si_ih_get_wptr() 116 wptr &= ~IH_RB_WPTR__RB_OVERFLOW_MASK; in si_ih_get_wptr() 118 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in si_ih_get_wptr() 119 ih->rptr = (wptr + 16) & ih->ptr_mask; in si_ih_get_wptr() 130 return (wptr & ih->ptr_mask); in si_ih_get_wptr()
|
| A D | amdgpu_vpe.c | 603 ret = ring->wptr & ring->buf_mask; in vpe_ring_init_cond_exec() 700 uint64_t wptr; in vpe_ring_get_wptr() local 707 wptr = wptr << 32; in vpe_ring_get_wptr() 712 return (wptr >> 2); in vpe_ring_get_wptr() 726 lower_32_bits(ring->wptr << 2), in vpe_ring_set_wptr() 727 upper_32_bits(ring->wptr << 2)); in vpe_ring_set_wptr() 729 WDOORBELL64(ring->doorbell_index, ring->wptr << 2); in vpe_ring_set_wptr() 739 lower_32_bits(ring->wptr << 2), in vpe_ring_set_wptr() 740 upper_32_bits(ring->wptr << 2)); in vpe_ring_set_wptr() 742 lower_32_bits(ring->wptr << 2)); in vpe_ring_set_wptr() [all …]
|
| A D | amdgpu_ring_mux.c | 213 void amdgpu_ring_mux_set_wptr(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring, u64 wptr) in amdgpu_ring_mux_set_wptr() argument 239 e->sw_wptr = wptr; in amdgpu_ring_mux_set_wptr() 240 e->start_ptr_in_hw_ring = mux->real_ring->wptr; in amdgpu_ring_mux_set_wptr() 243 if (ring->hw_prio > AMDGPU_RING_PRIO_DEFAULT || mux->wptr_resubmit < wptr) { in amdgpu_ring_mux_set_wptr() 244 amdgpu_ring_mux_copy_pkt_from_sw_ring(mux, ring, e->sw_cptr, wptr); in amdgpu_ring_mux_set_wptr() 245 e->end_ptr_in_hw_ring = mux->real_ring->wptr; in amdgpu_ring_mux_set_wptr() 248 e->end_ptr_in_hw_ring = mux->real_ring->wptr; in amdgpu_ring_mux_set_wptr() 340 amdgpu_ring_mux_set_wptr(mux, ring, ring->wptr); in amdgpu_sw_ring_set_wptr_gfx() 428 offset = ring->wptr & ring->buf_mask; in amdgpu_sw_ring_ib_mark_offset() 454 chunk->start = ring->wptr; in amdgpu_ring_mux_start_ib() [all …]
|
| A D | amdgpu_cper.c | 477 wptr_old = ring->wptr; in amdgpu_cper_ring_write() 481 ent_sz = amdgpu_cper_ring_get_ent_sz(ring, ring->wptr); in amdgpu_cper_ring_write() 484 memcpy(&ring->ring[ring->wptr], s, chunk); in amdgpu_cper_ring_write() 486 ring->wptr += (chunk >> 2); in amdgpu_cper_ring_write() 487 ring->wptr &= ring->ptr_mask; in amdgpu_cper_ring_write() 496 if (((wptr_old < rptr) && (rptr <= ring->wptr)) || in amdgpu_cper_ring_write() 497 ((ring->wptr < wptr_old) && (wptr_old < rptr)) || in amdgpu_cper_ring_write() 498 ((rptr <= ring->wptr) && (ring->wptr < wptr_old))) { in amdgpu_cper_ring_write() 499 pos = (ring->wptr + 1) & ring->ptr_mask; in amdgpu_cper_ring_write() 524 return ring->wptr; in amdgpu_cper_ring_get_wptr() [all …]
|
| A D | sdma_v4_4_2.c | 241 u64 wptr; in sdma_v4_4_2_ring_get_wptr() local 249 wptr = wptr << 32; in sdma_v4_4_2_ring_get_wptr() 252 ring->me, wptr); in sdma_v4_4_2_ring_get_wptr() 255 return wptr >> 2; in sdma_v4_4_2_ring_get_wptr() 310 u64 wptr; in sdma_v4_4_2_page_ring_get_wptr() local 317 wptr = wptr << 32; in sdma_v4_4_2_page_ring_get_wptr() 321 return wptr >> 2; in sdma_v4_4_2_page_ring_get_wptr() 342 uint64_t wptr = ring->wptr << 2; in sdma_v4_4_2_page_ring_set_wptr() local 715 ring->wptr = 0; in sdma_v4_4_2_gfx_resume() 724 rwptr = ring->wptr; in sdma_v4_4_2_gfx_resume() [all …]
|
| A D | ih_v6_0.c | 438 u32 wptr, tmp; in ih_v6_0_get_wptr() local 441 wptr = le32_to_cpu(*ih->wptr_cpu); in ih_v6_0_get_wptr() 444 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in ih_v6_0_get_wptr() 447 wptr = RREG32_NO_KIQ(ih_regs->ih_rb_wptr); in ih_v6_0_get_wptr() 448 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in ih_v6_0_get_wptr() 451 wptr = REG_SET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW, 0); in ih_v6_0_get_wptr() 459 tmp = (wptr + 32) & ih->ptr_mask; in ih_v6_0_get_wptr() 462 wptr, ih->rptr, tmp); in ih_v6_0_get_wptr() 475 return (wptr & ih->ptr_mask); in ih_v6_0_get_wptr() 543 uint32_t wptr = cpu_to_le32(entry->src_data[0]); in ih_v6_0_self_irq() local [all …]
|
| A D | ih_v6_1.c | 409 u32 wptr, tmp; in ih_v6_1_get_wptr() local 412 wptr = le32_to_cpu(*ih->wptr_cpu); in ih_v6_1_get_wptr() 415 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in ih_v6_1_get_wptr() 418 wptr = RREG32_NO_KIQ(ih_regs->ih_rb_wptr); in ih_v6_1_get_wptr() 419 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in ih_v6_1_get_wptr() 421 wptr = REG_SET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW, 0); in ih_v6_1_get_wptr() 427 tmp = (wptr + 32) & ih->ptr_mask; in ih_v6_1_get_wptr() 430 wptr, ih->rptr, tmp); in ih_v6_1_get_wptr() 444 return (wptr & ih->ptr_mask); in ih_v6_1_get_wptr() 512 uint32_t wptr = cpu_to_le32(entry->src_data[0]); in ih_v6_1_self_irq() local [all …]
|
| A D | ih_v7_0.c | 409 u32 wptr, tmp; in ih_v7_0_get_wptr() local 412 wptr = le32_to_cpu(*ih->wptr_cpu); in ih_v7_0_get_wptr() 415 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in ih_v7_0_get_wptr() 418 wptr = RREG32_NO_KIQ(ih_regs->ih_rb_wptr); in ih_v7_0_get_wptr() 419 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in ih_v7_0_get_wptr() 421 wptr = REG_SET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW, 0); in ih_v7_0_get_wptr() 427 tmp = (wptr + 32) & ih->ptr_mask; in ih_v7_0_get_wptr() 430 wptr, ih->rptr, tmp); in ih_v7_0_get_wptr() 443 return (wptr & ih->ptr_mask); in ih_v7_0_get_wptr() 509 uint32_t wptr = cpu_to_le32(entry->src_data[0]); in ih_v7_0_self_irq() local [all …]
|
| /drivers/net/ppp/ |
| A D | bsd_comp.c | 586 if (wptr) \ in bsd_compress() 591 wptr = NULL; \ in bsd_compress() 630 wptr = obuf; in bsd_compress() 639 if (wptr) in bsd_compress() 643 *wptr++ = 0; in bsd_compress() 893 wptr = obuf; in bsd_decompress() 894 *wptr++ = adrs; in bsd_decompress() 895 *wptr++ = ctrl; in bsd_decompress() 896 *wptr++ = 0; in bsd_decompress() 994 wptr += codelen; in bsd_decompress() [all …]
|
| A D | ppp_deflate.c | 190 unsigned char *wptr; in z_compress() local 204 wptr = obuf; in z_compress() 209 wptr[0] = PPP_ADDRESS(rptr); in z_compress() 210 wptr[1] = PPP_CONTROL(rptr); in z_compress() 211 put_unaligned_be16(PPP_COMP, wptr + 2); in z_compress() 212 wptr += PPP_HDRLEN; in z_compress() 213 put_unaligned_be16(state->seqno, wptr); in z_compress() 214 wptr += DEFLATE_OVHD; in z_compress() 216 state->strm.next_out = wptr; in z_compress()
|
| /drivers/net/ethernet/tehuti/ |
| A D | tehuti.c | 171 f->wptr = 0; in bdx_fifo_init() 1112 f->m.wptr = delta; in bdx_rx_alloc_skbs() 1167 f->m.wptr = delta; in bdx_recycle_skb() 1210 size = f->m.wptr - f->m.rptr; in bdx_rx_receive() 1427 d->wptr = d->start; in bdx_tx_db_init() 1505 db->wptr->addr.skb = skb; in bdx_tx_map_skb() 1568 fsize = f->m.rptr - f->m.wptr; in bdx_tx_space() 1639 len = f->m.wptr - f->m.memsz; in bdx_tx_transmit() 1641 f->m.wptr = len; in bdx_tx_transmit() 1806 f->m.wptr += size; in bdx_tx_push_desc() [all …]
|
| A D | tn40.c | 54 f->wptr = 0; in tn40_fifo_alloc() 221 f->m.wptr = delta; in tn40_set_rx_desc() 492 d->wptr = d->start; in tn40_tx_db_init() 550 db->wptr->len = len; in tn40_txdb_set() 551 db->wptr->addr.dma = dma; in tn40_txdb_set() 602 tn40_pbl_set(pbl++, db->wptr->addr.dma, db->wptr->len); in tn40_tx_map_skb() 622 tn40_pbl_set(pbl++, db->wptr->addr.dma, db->wptr->len); in tn40_tx_map_skb() 638 dma_unmap_page(&priv->pdev->dev, db->wptr->addr.dma, db->wptr->len, in tn40_tx_map_skb() 785 f->m.wptr = len; in tn40_start_xmit() 924 f->m.wptr += size; in tn40_tx_push_desc() [all …]
|
| /drivers/crypto/ccp/ |
| A D | tee-dev.c | 104 tee->rb_mgr.wptr = 0; in tee_init_ring() 230 (tee->rb_mgr.ring_start + tee->rb_mgr.wptr); in tee_submit_cmd() 237 if (!(tee->rb_mgr.wptr + sizeof(struct tee_ring_cmd) == rptr || in tee_submit_cmd() 242 rptr, tee->rb_mgr.wptr); in tee_submit_cmd() 252 (tee->rb_mgr.wptr + sizeof(struct tee_ring_cmd) == rptr || in tee_submit_cmd() 255 rptr, tee->rb_mgr.wptr, cmd->flag); in tee_submit_cmd() 278 tee->rb_mgr.wptr += sizeof(struct tee_ring_cmd); in tee_submit_cmd() 279 if (tee->rb_mgr.wptr >= tee->rb_mgr.ring_size) in tee_submit_cmd() 280 tee->rb_mgr.wptr = 0; in tee_submit_cmd() 283 iowrite32(tee->rb_mgr.wptr, tee->io_regs + tee->vdata->ring_wptr_reg); in tee_submit_cmd()
|
| /drivers/gpu/drm/radeon/ |
| A D | radeon_ring.c | 89 ring->ring_free_dw -= ring->wptr; in radeon_ring_free_size() 130 ring->wptr_old = ring->wptr; in radeon_ring_alloc() 178 while (ring->wptr & ring->align_mask) { in radeon_ring_commit() 216 ring->wptr = ring->wptr_old; in radeon_ring_undo() 316 size = ring->wptr + (ring->ring_size / 4); in radeon_ring_backup() 472 uint32_t rptr, wptr, rptr_next; in radeon_debugfs_ring_info_show() local 478 wptr = radeon_ring_get_wptr(rdev, ring); in radeon_debugfs_ring_info_show() 480 wptr, wptr); in radeon_debugfs_ring_info_show() 494 ring->wptr, ring->wptr); in radeon_debugfs_ring_info_show()
|
| /drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/ |
| A D | rpc.c | 153 u32 wptr = *gsp->msgq.wptr; in r535_gsp_msgq_wait() local 155 used = wptr + gsp->msgq.cnt - rptr; in r535_gsp_msgq_wait() 361 u32 wptr, size, step, len; in r535_gsp_cmdq_push() local 377 wptr = *gsp->cmdq.wptr; in r535_gsp_cmdq_push() 395 step = min_t(u32, free, (gsp->cmdq.cnt - wptr)); in r535_gsp_cmdq_push() 400 wptr += DIV_ROUND_UP(size, 0x1000); in r535_gsp_cmdq_push() 401 if (wptr == gsp->cmdq.cnt) in r535_gsp_cmdq_push() 402 wptr = 0; in r535_gsp_cmdq_push() 408 nvkm_trace(&gsp->subdev, "cmdq: wptr %d\n", wptr); in r535_gsp_cmdq_push() 410 (*gsp->cmdq.wptr) = wptr; in r535_gsp_cmdq_push() [all …]
|
| /drivers/gpu/drm/amd/amdkfd/ |
| A D | kfd_kernel_queue.c | 237 uint32_t wptr, rptr; in kq_acquire_packet_buffer() local 247 wptr = kq->pending_wptr; in kq_acquire_packet_buffer() 253 pr_debug("wptr: %d\n", wptr); in kq_acquire_packet_buffer() 256 available_size = (rptr + queue_size_dwords - 1 - wptr) % in kq_acquire_packet_buffer() 267 if (wptr + packet_size_in_dwords >= queue_size_dwords) { in kq_acquire_packet_buffer() 275 while (wptr > 0) { in kq_acquire_packet_buffer() 276 queue_address[wptr] = kq->nop_packet; in kq_acquire_packet_buffer() 277 wptr = (wptr + 1) % queue_size_dwords; in kq_acquire_packet_buffer() 282 *buffer_ptr = &queue_address[wptr]; in kq_acquire_packet_buffer() 283 kq->pending_wptr = wptr + packet_size_in_dwords; in kq_acquire_packet_buffer()
|