Home
last modified time | relevance | path

Searched refs:wb (Results 1 – 25 of 147) sorted by relevance

123456

/drivers/staging/media/atomisp/pci/isp/kernels/wb/wb_1.0/
A Dia_css_wb.host.c50 const struct sh_css_isp_wb_params *wb, in ia_css_wb_dump() argument
53 if (!wb) return; in ia_css_wb_dump()
56 "wb_gain_shift", wb->gain_shift); in ia_css_wb_dump()
58 "wb_gain_gr", wb->gain_gr); in ia_css_wb_dump()
60 "wb_gain_r", wb->gain_r); in ia_css_wb_dump()
62 "wb_gain_b", wb->gain_b); in ia_css_wb_dump()
64 "wb_gain_gb", wb->gain_gb); in ia_css_wb_dump()
A Dia_css_wb.host.h23 const struct sh_css_isp_wb_params *wb,
28 const struct ia_css_wb_config *wb,
/drivers/gpu/drm/radeon/
A Dr600_dma.c55 if (rdev->wb.enabled) in r600_dma_get_rptr()
56 rptr = rdev->wb.wb[ring->rptr_offs/4]; in r600_dma_get_rptr()
143 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); in r600_dma_resume()
145 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); in r600_dma_resume()
147 if (rdev->wb.enabled) in r600_dma_resume()
243 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ring_test()
246 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in r600_dma_ring_test()
260 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in r600_dma_ring_test()
350 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ib_test()
381 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in r600_dma_ib_test()
[all …]
A Dradeon_device.c424 rdev->wb.enabled = false; in radeon_wb_disable()
438 if (rdev->wb.wb_obj) { in radeon_wb_fini()
445 rdev->wb.wb = NULL; in radeon_wb_fini()
446 rdev->wb.wb_obj = NULL; in radeon_wb_fini()
466 &rdev->wb.wb_obj); in radeon_wb_init()
477 &rdev->wb.gpu_addr); in radeon_wb_init()
484 r = radeon_bo_kmap(rdev->wb.wb_obj, (void **)&rdev->wb.wb); in radeon_wb_init()
494 memset((char *)rdev->wb.wb, 0, RADEON_GPU_PAGE_SIZE); in radeon_wb_init()
499 rdev->wb.enabled = false; in radeon_wb_init()
508 rdev->wb.enabled = true; in radeon_wb_init()
[all …]
A Dcik_sdma.c67 if (rdev->wb.enabled) { in cik_sdma_get_rptr()
68 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cik_sdma_get_rptr()
138 if (rdev->wb.enabled) { in cik_sdma_ring_ib_execute()
402 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
404 if (rdev->wb.enabled) in cik_sdma_gfx_resume()
658 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ring_test()
661 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in cik_sdma_ring_test()
676 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in cik_sdma_ring_test()
715 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ib_test()
718 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in cik_sdma_ib_test()
[all …]
A Dni_dma.c57 if (rdev->wb.enabled) { in cayman_dma_get_rptr()
58 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cayman_dma_get_rptr()
127 if (rdev->wb.enabled) { in cayman_dma_ring_ib_execute()
222 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume()
224 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cayman_dma_resume()
226 if (rdev->wb.enabled) in cayman_dma_resume()
A Dradeon_fence.c71 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_write()
93 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_read()
772 if (rdev->wb.use_event || !radeon_ring_supports_scratch_reg(rdev, &rdev->ring[ring])) { in radeon_fence_driver_start_ring()
776 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring()
777 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + in radeon_fence_driver_start_ring()
796 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring()
797 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index; in radeon_fence_driver_start_ring()
A Dradeon_ring.c308 else if (rdev->wb.enabled) in radeon_ring_backup()
422 if (rdev->wb.enabled) { in radeon_ring_init()
424 ring->next_rptr_gpu_addr = rdev->wb.gpu_addr + index; in radeon_ring_init()
425 ring->next_rptr_cpu_addr = &rdev->wb.wb[index/4]; in radeon_ring_init()
/drivers/usb/class/
A Dcdc-acm.c187 wb = &acm->wb[wbn]; in acm_wb_alloc()
188 if (!wb->use) { in acm_wb_alloc()
190 wb->len = 0; in acm_wb_alloc()
235 wb->urb->transfer_buffer = wb->buf; in acm_start_wb()
236 wb->urb->transfer_dma = wb->dmah; in acm_start_wb()
237 wb->urb->transfer_buffer_length = wb->len; in acm_start_wb()
834 wb = &acm->wb[wbn]; in acm_tty_write()
1147 for (wb = &acm->wb[0], i = 0; i < ACM_NW; i++, wb++) in acm_write_buffers_free()
1166 for (wb = &acm->wb[0], i = 0; i < ACM_NW; i++, wb++) { in acm_write_buffers_alloc()
1172 --wb; in acm_write_buffers_alloc()
[all …]
/drivers/net/wireless/microchip/wilc1000/
A Dspi.c411 .tx_buf = wb, in wilc_spi_tx_rx()
524 u8 wb[32], rb[32]; in wilc_spi_single_read() local
531 memset(wb, 0x0, sizeof(wb)); in wilc_spi_single_read()
533 c = (struct wilc_spi_cmd *)wb; in wilc_spi_single_read()
619 u8 wb[32], rb[32]; in wilc_spi_write_cmd() local
624 memset(wb, 0x0, sizeof(wb)); in wilc_spi_write_cmd()
626 c = (struct wilc_spi_cmd *)wb; in wilc_spi_write_cmd()
694 u8 wb[32], rb[32]; in wilc_spi_dma_rw() local
701 memset(wb, 0x0, sizeof(wb)); in wilc_spi_dma_rw()
820 u8 wb[32], rb[32]; in wilc_spi_special_cmd() local
[all …]
/drivers/net/ethernet/intel/ice/
A Dice_txrx_lib.h38 if (likely(ice_test_staterr(rx_desc->wb.status_error0, ICE_RXD_EOF))) in ice_is_non_eop()
70 if (ice_test_staterr(rx_desc->wb.status_error0, stat_err_bits)) in ice_get_vlan_tci()
71 return le16_to_cpu(rx_desc->wb.l2tag1); in ice_get_vlan_tci()
74 if (ice_test_staterr(rx_desc->wb.status_error1, stat_err_bits)) in ice_get_vlan_tci()
75 return le16_to_cpu(rx_desc->wb.l2tag2_2nd); in ice_get_vlan_tci()
/drivers/gpu/drm/msm/disp/dpu1/
A Ddpu_hw_wb.h37 struct dpu_hw_wb_cfg *wb);
40 struct dpu_hw_wb_cfg *wb,
44 struct dpu_hw_wb_cfg *wb);
A Ddpu_hw_wb.c126 static void dpu_hw_wb_roi(struct dpu_hw_wb *ctx, struct dpu_hw_wb_cfg *wb) in dpu_hw_wb_roi() argument
131 image_size = (wb->dest.height << 16) | wb->dest.width; in dpu_hw_wb_roi()
133 out_size = (drm_rect_height(&wb->roi) << 16) | drm_rect_width(&wb->roi); in dpu_hw_wb_roi()
A Ddpu_hw_ctl.c303 enum dpu_wb wb) in dpu_hw_ctl_update_pending_flush_wb() argument
305 switch (wb) { in dpu_hw_ctl_update_pending_flush_wb()
324 enum dpu_wb wb) in dpu_hw_ctl_update_pending_flush_wb_v1() argument
326 ctx->pending_wb_flush_mask |= BIT(wb - WB_0); in dpu_hw_ctl_update_pending_flush_wb_v1()
597 if (cfg->wb) in dpu_hw_ctl_intf_cfg_v1()
598 wb_active |= BIT(cfg->wb - WB_0); in dpu_hw_ctl_intf_cfg_v1()
636 if (cfg->wb) in dpu_hw_ctl_intf_cfg()
637 intf_cfg |= (cfg->wb & 0x3) + 2; in dpu_hw_ctl_intf_cfg()
715 if (cfg->wb) { in dpu_hw_ctl_reset_intf_cfg_v1()
717 wb_active &= ~BIT(cfg->wb - WB_0); in dpu_hw_ctl_reset_intf_cfg_v1()
A Ddpu_encoder_phys_wb.c38 static bool _dpu_encoder_phys_wb_clk_force_ctrl(struct dpu_hw_wb *wb, in _dpu_encoder_phys_wb_clk_force_ctrl() argument
42 if (wb->ops.setup_clk_force_ctrl) { in _dpu_encoder_phys_wb_clk_force_ctrl()
43 *forced_on = wb->ops.setup_clk_force_ctrl(wb, enable); in _dpu_encoder_phys_wb_clk_force_ctrl()
48 *forced_on = mdp->ops.setup_clk_force_ctrl(mdp, wb->caps->clk_ctrl, enable); in _dpu_encoder_phys_wb_clk_force_ctrl()
241 intf_cfg.wb = hw_wb->idx; in dpu_encoder_phys_wb_setup_ctl()
264 intf_cfg.wb = hw_wb->idx; in dpu_encoder_phys_wb_setup_ctl()
/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_ih.c98 ih->wptr_addr = adev->wb.gpu_addr + wptr_offs * 4; in amdgpu_ih_ring_init()
99 ih->wptr_cpu = &adev->wb.wb[wptr_offs]; in amdgpu_ih_ring_init()
100 ih->rptr_addr = adev->wb.gpu_addr + rptr_offs * 4; in amdgpu_ih_ring_init()
101 ih->rptr_cpu = &adev->wb.wb[rptr_offs]; in amdgpu_ih_ring_init()
A Dsdma_v4_4_2.c225 rptr = READ_ONCE(*((u64 *)&ring->adev->wb.wb[ring->rptr_offs])); in sdma_v4_4_2_ring_get_rptr()
245 wptr = READ_ONCE(*((u64 *)&adev->wb.wb[ring->wptr_offs])); in sdma_v4_4_2_ring_get_wptr()
271 u64 *wb = (u64 *)&adev->wb.wb[ring->wptr_offs]; in sdma_v4_4_2_ring_set_wptr() local
281 WRITE_ONCE(*wb, (ring->wptr << 2)); in sdma_v4_4_2_ring_set_wptr()
314 wptr = READ_ONCE(*((u64 *)&adev->wb.wb[ring->wptr_offs])); in sdma_v4_4_2_page_ring_get_wptr()
336 u64 *wb = (u64 *)&adev->wb.wb[ring->wptr_offs]; in sdma_v4_4_2_page_ring_set_wptr() local
339 WRITE_ONCE(*wb, (ring->wptr << 2)); in sdma_v4_4_2_page_ring_set_wptr()
1074 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v4_4_2_ring_test_ring()
1089 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v4_4_2_ring_test_ring()
1128 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v4_4_2_ring_test_ib()
[all …]
A Dsi_dma.c221 gpu_addr = adev->wb.gpu_addr + (index * 4); in si_dma_ring_test_ring()
223 adev->wb.wb[index] = cpu_to_le32(tmp); in si_dma_ring_test_ring()
236 tmp = le32_to_cpu(adev->wb.wb[index]); in si_dma_ring_test_ring()
273 gpu_addr = adev->wb.gpu_addr + (index * 4); in si_dma_ring_test_ib()
275 adev->wb.wb[index] = cpu_to_le32(tmp); in si_dma_ring_test_ib()
298 tmp = le32_to_cpu(adev->wb.wb[index]); in si_dma_ring_test_ib()
A Damdgpu_mes.c168 adev->wb.gpu_addr + (adev->mes.sch_ctx_offs[i] * 4); in amdgpu_mes_init()
170 (uint64_t *)&adev->wb.wb[adev->mes.sch_ctx_offs[i]]; in amdgpu_mes_init()
180 adev->mes.query_status_fence_gpu_addr[i] = adev->wb.gpu_addr + in amdgpu_mes_init()
183 (uint64_t *)&adev->wb.wb[adev->mes.query_status_fence_offs[i]]; in amdgpu_mes_init()
381 read_val_gpu_addr = adev->wb.gpu_addr + (addr_offset * 4); in amdgpu_mes_rreg()
382 read_val_ptr = (uint32_t *)&adev->wb.wb[addr_offset]; in amdgpu_mes_rreg()
A Damdgpu_vpe.c763 adev->wb.wb[index] = 0; in vpe_ring_test_ring()
764 wb_addr = adev->wb.gpu_addr + (index * 4); in vpe_ring_test_ring()
779 if (le32_to_cpu(adev->wb.wb[index]) == test_pattern) in vpe_ring_test_ring()
807 adev->wb.wb[index] = 0; in vpe_ring_test_ib()
808 wb_addr = adev->wb.gpu_addr + (index * 4); in vpe_ring_test_ib()
834 ret = (le32_to_cpu(adev->wb.wb[index]) == test_pattern) ? 0 : -EINVAL; in vpe_ring_test_ib()
A Dsdma_v3_0.c387 u32 *wb = (u32 *)ring->wptr_cpu_addr; in sdma_v3_0_ring_set_wptr() local
389 WRITE_ONCE(*wb, ring->wptr << 2); in sdma_v3_0_ring_set_wptr()
392 u32 *wb = (u32 *)ring->wptr_cpu_addr; in sdma_v3_0_ring_set_wptr() local
394 WRITE_ONCE(*wb, ring->wptr << 2); in sdma_v3_0_ring_set_wptr()
818 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ring()
820 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v3_0_ring_test_ring()
835 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v3_0_ring_test_ring()
872 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ib()
874 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v3_0_ring_test_ib()
903 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v3_0_ring_test_ib()
/drivers/media/platform/mediatek/vcodec/encoder/venc/
A Dvenc_vp8_if.c155 struct venc_vp8_vpu_buf *wb = inst->vsi->work_bufs; in vp8_enc_alloc_work_buf() local
158 if (wb[i].size == 0) in vp8_enc_alloc_work_buf()
170 inst->work_bufs[i].size = wb[i].size; in vp8_enc_alloc_work_buf()
189 wb[i].vpua); in vp8_enc_alloc_work_buf()
190 memcpy(inst->work_bufs[i].va, tmp_va, wb[i].size); in vp8_enc_alloc_work_buf()
192 wb[i].iova = inst->work_bufs[i].dma_addr; in vp8_enc_alloc_work_buf()
/drivers/md/
A Ddm-writecache.c1684 struct writeback_struct *wb; in __writecache_endio_pmem() local
1697 e = wb->wc_list[i]; in __writecache_endio_pmem()
1714 if (wb->wc_list != wb->wc_list_inline) in __writecache_endio_pmem()
1715 kfree(wb->wc_list); in __writecache_endio_pmem()
1716 bio_put(&wb->bio); in __writecache_endio_pmem()
1846 wb->wc = wc; in __writecache_writeback_pmem()
1856 wb->wc_list = wb->wc_list_inline; in __writecache_writeback_pmem()
1862 wb->wc_list[0] = e; in __writecache_writeback_pmem()
1863 wb->wc_list_n = 1; in __writecache_writeback_pmem()
1870 if (!wc_add_block(wb, f)) in __writecache_writeback_pmem()
[all …]
/drivers/gpu/drm/i915/
A Di915_active.c639 struct wait_barrier *wb = container_of(wq, typeof(*wb), base); in barrier_wake() local
641 if (i915_active_is_idle(wb->ref)) { in barrier_wake()
652 struct wait_barrier *wb; in __await_barrier() local
654 wb = kmalloc(sizeof(*wb), GFP_KERNEL); in __await_barrier()
655 if (unlikely(!wb)) in __await_barrier()
660 kfree(wb); in __await_barrier()
664 wb->base.flags = 0; in __await_barrier()
665 wb->base.func = barrier_wake; in __await_barrier()
666 wb->base.private = fence; in __await_barrier()
667 wb->ref = ref; in __await_barrier()
[all …]
/drivers/gpu/drm/vkms/
A Dvkms_writeback.c169 struct drm_writeback_connector *wb = &vkms_output->wb_connector; in vkms_enable_writeback_connector() local
178 drm_connector_helper_add(&wb->base, &vkms_wb_conn_helper_funcs); in vkms_enable_writeback_connector()
180 return drmm_writeback_connector_init(&vkmsdev->drm, wb, in vkms_enable_writeback_connector()

Completed in 81 milliseconds

123456