| /drivers/gpu/drm/amd/display/amdgpu_dm/ |
| A D | amdgpu_dm_crtc.c | 197 struct idle_workqueue *idle_work; in amdgpu_dm_idle_worker() local 202 while (idle_work->enable) { in amdgpu_dm_idle_worker() 204 mutex_lock(&idle_work->dm->dc_lock); in amdgpu_dm_idle_worker() 221 if (idle_work->enable) in amdgpu_dm_idle_worker() 230 struct idle_workqueue *idle_work; in idle_create_workqueue() local 232 idle_work = kzalloc(sizeof(*idle_work), GFP_KERNEL); in idle_create_workqueue() 233 if (ZERO_OR_NULL_PTR(idle_work)) in idle_create_workqueue() 236 idle_work->dm = &adev->dm; in idle_create_workqueue() 237 idle_work->enable = false; in idle_create_workqueue() 238 idle_work->running = false; in idle_create_workqueue() [all …]
|
| /drivers/gpu/drm/msm/ |
| A D | msm_gpu_devfreq.c | 201 msm_hrtimer_work_init(&df->idle_work, gpu->worker, msm_devfreq_idle_work, in msm_devfreq_init() 207 hrtimer_cancel(&df->idle_work.timer); in cancel_idle_work() 208 kthread_cancel_work_sync(&df->idle_work.work); in cancel_idle_work() 344 struct msm_gpu_devfreq, idle_work.work); in msm_devfreq_idle_work() 373 msm_hrtimer_queue_work(&df->idle_work, ms_to_ktime(1), in msm_devfreq_idle()
|
| A D | msm_gpu.h | 159 struct msm_hrtimer_work idle_work; member
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_jpeg.c | 42 INIT_DELAYED_WORK(&adev->jpeg.idle_work, amdgpu_jpeg_idle_work_handler); in amdgpu_jpeg_sw_init() 99 cancel_delayed_work_sync(&adev->jpeg.idle_work); in amdgpu_jpeg_suspend() 112 container_of(work, struct amdgpu_device, jpeg.idle_work.work); in amdgpu_jpeg_idle_work_handler() 128 schedule_delayed_work(&adev->jpeg.idle_work, JPEG_IDLE_TIMEOUT); in amdgpu_jpeg_idle_work_handler() 136 cancel_delayed_work_sync(&adev->jpeg.idle_work); in amdgpu_jpeg_ring_begin_use() 147 schedule_delayed_work(&ring->adev->jpeg.idle_work, JPEG_IDLE_TIMEOUT); in amdgpu_jpeg_ring_end_use()
|
| A D | amdgpu_vce.c | 195 INIT_DELAYED_WORK(&adev->vce.idle_work, amdgpu_vce_idle_work_handler); in amdgpu_vce_sw_init() 264 cancel_delayed_work_sync(&adev->vce.idle_work); in amdgpu_vce_suspend() 335 container_of(work, struct amdgpu_device, vce.idle_work.work); in amdgpu_vce_idle_work_handler() 352 schedule_delayed_work(&adev->vce.idle_work, VCE_IDLE_TIMEOUT); in amdgpu_vce_idle_work_handler() 372 set_clocks = !cancel_delayed_work_sync(&adev->vce.idle_work); in amdgpu_vce_ring_begin_use() 398 schedule_delayed_work(&ring->adev->vce.idle_work, VCE_IDLE_TIMEOUT); in amdgpu_vce_ring_end_use()
|
| A D | amdgpu_vpe.c | 328 container_of(work, struct amdgpu_device, vpe.idle_work.work); in vpe_idle_work_handler() 336 schedule_delayed_work(&adev->vpe.idle_work, VPE_IDLE_TIMEOUT); in vpe_idle_work_handler() 355 INIT_DELAYED_WORK(&adev->vpe.idle_work, vpe_idle_work_handler); in vpe_common_init() 450 cancel_delayed_work_sync(&adev->vpe.idle_work); in vpe_suspend() 850 cancel_delayed_work_sync(&adev->vpe.idle_work); in vpe_ring_begin_use() 874 schedule_delayed_work(&adev->vpe.idle_work, VPE_IDLE_TIMEOUT); in vpe_ring_end_use()
|
| A D | amdgpu_uvd.c | 192 INIT_DELAYED_WORK(&adev->uvd.idle_work, amdgpu_uvd_idle_work_handler); in amdgpu_uvd_sw_init() 428 cancel_delayed_work_sync(&adev->uvd.idle_work); in amdgpu_uvd_prepare_suspend() 1264 container_of(work, struct amdgpu_device, uvd.idle_work.work); in amdgpu_uvd_idle_work_handler() 1287 schedule_delayed_work(&adev->uvd.idle_work, UVD_IDLE_TIMEOUT); in amdgpu_uvd_idle_work_handler() 1299 set_clocks = !cancel_delayed_work_sync(&adev->uvd.idle_work); in amdgpu_uvd_ring_begin_use() 1316 schedule_delayed_work(&ring->adev->uvd.idle_work, UVD_IDLE_TIMEOUT); in amdgpu_uvd_ring_end_use()
|
| A D | amdgpu_vce.h | 45 struct delayed_work idle_work; member
|
| A D | amdgpu_uvd.h | 66 struct delayed_work idle_work; member
|
| A D | amdgpu_vpe.h | 77 struct delayed_work idle_work; member
|
| A D | amdgpu_vcn.c | 139 INIT_DELAYED_WORK(&adev->vcn.inst[i].idle_work, amdgpu_vcn_idle_work_handler); in amdgpu_vcn_sw_init() 355 cancel_delayed_work_sync(&adev->vcn.inst[i].idle_work); in amdgpu_vcn_suspend() 411 container_of(work, struct amdgpu_vcn_inst, idle_work.work); in amdgpu_vcn_idle_work_handler() 452 schedule_delayed_work(&vcn_inst->idle_work, VCN_IDLE_TIMEOUT); in amdgpu_vcn_idle_work_handler() 464 cancel_delayed_work_sync(&vcn_inst->idle_work); in amdgpu_vcn_ring_begin_use() 525 schedule_delayed_work(&ring->adev->vcn.inst[ring->me].idle_work, in amdgpu_vcn_ring_end_use()
|
| A D | vce_v2_0.c | 486 cancel_delayed_work_sync(&ip_block->adev->vce.idle_work); in vce_v2_0_hw_fini() 508 cancel_delayed_work_sync(&adev->vce.idle_work); in vce_v2_0_suspend()
|
| A D | vcn_v1_0.c | 154 adev->vcn.inst[0].idle_work.work.func = vcn_v1_0_idle_work_handler; in vcn_v1_0_sw_init() 282 cancel_delayed_work_sync(&vinst->idle_work); in vcn_v1_0_hw_fini() 306 idle_work_unexecuted = cancel_delayed_work_sync(&adev->vcn.inst[0].idle_work); in vcn_v1_0_suspend() 1873 container_of(work, struct amdgpu_vcn_inst, idle_work.work); in vcn_v1_0_idle_work_handler() 1907 schedule_delayed_work(&adev->vcn.inst[0].idle_work, VCN_IDLE_TIMEOUT); in vcn_v1_0_idle_work_handler() 1914 bool set_clocks = !cancel_delayed_work_sync(&adev->vcn.inst[0].idle_work); in vcn_v1_0_ring_begin_use() 1966 schedule_delayed_work(&ring->adev->vcn.inst[0].idle_work, VCN_IDLE_TIMEOUT); in vcn_v1_0_ring_end_use()
|
| A D | amdgpu_jpeg.h | 130 struct delayed_work idle_work; member
|
| A D | uvd_v3_1.c | 699 cancel_delayed_work_sync(&adev->uvd.idle_work); in uvd_v3_1_hw_fini() 730 cancel_delayed_work_sync(&adev->uvd.idle_work); in uvd_v3_1_suspend()
|
| A D | uvd_v4_2.c | 213 cancel_delayed_work_sync(&adev->uvd.idle_work); in uvd_v4_2_hw_fini() 244 cancel_delayed_work_sync(&adev->uvd.idle_work); in uvd_v4_2_suspend()
|
| A D | uvd_v5_0.c | 211 cancel_delayed_work_sync(&adev->uvd.idle_work); in uvd_v5_0_hw_fini() 242 cancel_delayed_work_sync(&adev->uvd.idle_work); in uvd_v5_0_suspend()
|
| A D | vcn_v2_5.c | 115 container_of(work, struct amdgpu_vcn_inst, idle_work.work); in vcn_v2_5_idle_work_handler() 162 schedule_delayed_work(&adev->vcn.inst[0].idle_work, VCN_IDLE_TIMEOUT); in vcn_v2_5_idle_work_handler() 174 cancel_delayed_work_sync(&adev->vcn.inst[0].idle_work); in vcn_v2_5_ring_begin_use() 235 schedule_delayed_work(&adev->vcn.inst[0].idle_work, in vcn_v2_5_ring_end_use() 334 adev->vcn.inst[j].idle_work.work.func = vcn_v2_5_idle_work_handler; in vcn_v2_5_sw_init() 551 cancel_delayed_work_sync(&vinst->idle_work); in vcn_v2_5_hw_fini()
|
| A D | vce_v3_0.c | 493 cancel_delayed_work_sync(&adev->vce.idle_work); in vce_v3_0_hw_fini() 519 cancel_delayed_work_sync(&adev->vce.idle_work); in vce_v3_0_suspend()
|
| A D | vce_v4_0.c | 543 cancel_delayed_work_sync(&adev->vce.idle_work); in vce_v4_0_hw_fini() 585 cancel_delayed_work_sync(&adev->vce.idle_work); in vce_v4_0_suspend()
|
| /drivers/gpu/drm/radeon/ |
| A D | radeon_vce.c | 65 INIT_DELAYED_WORK(&rdev->vce.idle_work, radeon_vce_idle_work_handler); in radeon_vce_init() 264 container_of(work, struct radeon_device, vce.idle_work.work); in radeon_vce_idle_work_handler() 274 schedule_delayed_work(&rdev->vce.idle_work, in radeon_vce_idle_work_handler() 289 bool set_clocks = !cancel_delayed_work_sync(&rdev->vce.idle_work); in radeon_vce_note_usage() 290 set_clocks &= schedule_delayed_work(&rdev->vce.idle_work, in radeon_vce_note_usage()
|
| A D | radeon_uvd.c | 72 INIT_DELAYED_WORK(&rdev->uvd.idle_work, radeon_uvd_idle_work_handler); in radeon_uvd_init() 862 container_of(work, struct radeon_device, uvd.idle_work.work); in radeon_uvd_idle_work_handler() 873 schedule_delayed_work(&rdev->uvd.idle_work, in radeon_uvd_idle_work_handler() 881 bool set_clocks = !cancel_delayed_work_sync(&rdev->uvd.idle_work); in radeon_uvd_note_usage() 882 set_clocks &= schedule_delayed_work(&rdev->uvd.idle_work, in radeon_uvd_note_usage()
|
| /drivers/net/wireless/zydas/zd1211rw/ |
| A D | zd_usb.h | 175 struct delayed_work idle_work; member
|
| A D | zd_usb.c | 848 cancel_delayed_work_sync(&rx->idle_work); in zd_usb_disable_rx() 1130 container_of(work, struct zd_usb, rx.idle_work.work); in zd_rx_idle_timer_handler() 1153 mod_delayed_work(zd_workqueue, &rx->idle_work, ZD_RX_IDLE_INTERVAL); in zd_usb_reset_rx_idle_timer() 1179 INIT_DELAYED_WORK(&rx->idle_work, zd_rx_idle_timer_handler); in init_usb_rx()
|
| /drivers/bus/ |
| A D | ti-sysc.c | 160 struct delayed_work idle_work; member 2881 ddata = container_of(work, struct sysc, idle_work.work); in ti_sysc_idle() 3240 INIT_DELAYED_WORK(&ddata->idle_work, ti_sysc_idle); in sysc_probe() 3246 schedule_delayed_work(&ddata->idle_work, 3000); in sysc_probe() 3271 if (cancel_delayed_work_sync(&ddata->idle_work)) in sysc_remove() 3272 ti_sysc_idle(&ddata->idle_work.work); in sysc_remove()
|