Home
last modified time | relevance | path

Searched refs:gpu_read (Results 1 – 25 of 28) sorted by relevance

12

/drivers/gpu/drm/panfrost/
A Dpanfrost_gpu.c30 fault_status = gpu_read(pfdev, GPU_FAULT_STATUS); in panfrost_gpu_irq_handler()
31 state = gpu_read(pfdev, GPU_INT_STAT); in panfrost_gpu_irq_handler()
37 address |= gpu_read(pfdev, GPU_FAULT_ADDRESS_LO); in panfrost_gpu_irq_handler()
146 quirks = gpu_read(pfdev, GPU_TILER_CONFIG); in panfrost_gpu_init_quirks()
290 gpu_id = gpu_read(pfdev, GPU_ID); in panfrost_gpu_init_features()
379 hi = gpu_read(pfdev, GPU_CYCLE_COUNT_HI); in panfrost_cycle_counter_read()
380 lo = gpu_read(pfdev, GPU_CYCLE_COUNT_LO); in panfrost_cycle_counter_read()
391 hi = gpu_read(pfdev, GPU_TIMESTAMP_HI); in panfrost_timestamp_read()
392 lo = gpu_read(pfdev, GPU_TIMESTAMP_LO); in panfrost_timestamp_read()
393 } while (hi != gpu_read(pfdev, GPU_TIMESTAMP_HI)); in panfrost_timestamp_read()
[all …]
A Dpanfrost_dump.c97 dumpreg->value = gpu_read(pfdev, reg); in panfrost_core_dump_registers()
A Dpanfrost_regs.h374 #define gpu_read(dev, reg) readl(dev->iomem + reg) macro
/drivers/gpu/drm/panthor/
A Dpanthor_gpu.c93 ptdev->gpu_info.gpu_id = gpu_read(ptdev, GPU_ID); in panthor_gpu_init_info()
94 ptdev->gpu_info.csf_id = gpu_read(ptdev, GPU_CSF_ID); in panthor_gpu_init_info()
95 ptdev->gpu_info.gpu_rev = gpu_read(ptdev, GPU_REVID); in panthor_gpu_init_info()
96 ptdev->gpu_info.core_features = gpu_read(ptdev, GPU_CORE_FEATURES); in panthor_gpu_init_info()
97 ptdev->gpu_info.l2_features = gpu_read(ptdev, GPU_L2_FEATURES); in panthor_gpu_init_info()
98 ptdev->gpu_info.tiler_features = gpu_read(ptdev, GPU_TILER_FEATURES); in panthor_gpu_init_info()
99 ptdev->gpu_info.mem_features = gpu_read(ptdev, GPU_MEM_FEATURES); in panthor_gpu_init_info()
100 ptdev->gpu_info.mmu_features = gpu_read(ptdev, GPU_MMU_FEATURES); in panthor_gpu_init_info()
102 ptdev->gpu_info.max_threads = gpu_read(ptdev, GPU_THREAD_MAX_THREADS); in panthor_gpu_init_info()
109 ptdev->gpu_info.as_present = gpu_read(ptdev, GPU_AS_PRESENT); in panthor_gpu_init_info()
[all …]
A Dpanthor_device.h396 if (!gpu_read(ptdev, __reg_prefix ## _INT_STAT)) \
410 u32 status = gpu_read(ptdev, __reg_prefix ## _INT_RAWSTAT) & pirq->mask; \
463 static inline u32 gpu_read(struct panthor_device *ptdev, u32 reg) in gpu_read() function
481 return (gpu_read(ptdev, reg) | ((u64)gpu_read(ptdev, reg + 4) << 32)); in gpu_read64()
494 hi1 = gpu_read(ptdev, reg + 4); in gpu_read64_counter()
495 lo = gpu_read(ptdev, reg); in gpu_read64_counter()
496 hi2 = gpu_read(ptdev, reg + 4); in gpu_read64_counter()
502 read_poll_timeout(gpu_read, val, cond, delay_us, timeout_us, false, \
507 read_poll_timeout_atomic(gpu_read, val, cond, delay_us, timeout_us, \
A Dpanthor_fw.c1040 !(gpu_read(ptdev, JOB_INT_STAT) & JOB_INT_GLOBAL_IF)) in panthor_fw_start()
1051 u32 status = gpu_read(ptdev, MCU_STATUS); in panthor_fw_start()
/drivers/gpu/drm/msm/adreno/
A Da5xx_gpu.c1030 gpu_read(gpu, REG_A5XX_RBBM_SW_RESET_CMD); in a5xx_recover()
1098 gpu_read(gpu, REG_A5XX_RBBM_STATUS), in a5xx_idle()
1100 gpu_read(gpu, REG_A5XX_CP_RB_RPTR), in a5xx_idle()
1101 gpu_read(gpu, REG_A5XX_CP_RB_WPTR)); in a5xx_idle()
1149 gpu_read(gpu, REG_A5XX_CP_HW_FAULT)); in a5xx_cp_err_irq()
1249 gpu_read(gpu, REG_A5XX_RBBM_STATUS), in a5xx_fault_detect_irq()
1250 gpu_read(gpu, REG_A5XX_CP_RB_RPTR), in a5xx_fault_detect_irq()
1251 gpu_read(gpu, REG_A5XX_CP_RB_WPTR), in a5xx_fault_detect_irq()
1253 gpu_read(gpu, REG_A5XX_CP_IB1_BUFSZ), in a5xx_fault_detect_irq()
1255 gpu_read(gpu, REG_A5XX_CP_IB2_BUFSZ)); in a5xx_fault_detect_irq()
[all …]
A Da2xx_gpu.c274 gpu_read(gpu, REG_AXXX_CP_SCRATCH_REG0 + i)); in a2xx_recover()
282 gpu_read(gpu, REG_A2XX_RBBM_SOFT_RESET); in a2xx_recover()
306 if (spin_until(!(gpu_read(gpu, REG_A2XX_RBBM_STATUS) & in a2xx_idle()
321 mstatus = gpu_read(gpu, REG_A2XX_MASTER_INT_SIGNAL); in a2xx_irq()
324 status = gpu_read(gpu, REG_A2XX_MH_INTERRUPT_STATUS); in a2xx_irq()
328 gpu_read(gpu, REG_A2XX_MH_MMU_PAGE_FAULT)); in a2xx_irq()
334 status = gpu_read(gpu, REG_AXXX_CP_INT_STATUS); in a2xx_irq()
344 status = gpu_read(gpu, REG_A2XX_RBBM_INT_STATUS); in a2xx_irq()
451 gpu_read(gpu, REG_A2XX_RBBM_STATUS)); in a2xx_dump()
464 state->rbbm_status = gpu_read(gpu, REG_A2XX_RBBM_STATUS); in a2xx_gpu_state_get()
[all …]
A Da6xx_gpu.c29 if (gpu_read(gpu, REG_A6XX_RBBM_STATUS) & in _a6xx_check_idle()
46 gpu_read(gpu, REG_A6XX_RBBM_STATUS), in a6xx_idle()
47 gpu_read(gpu, REG_A6XX_RBBM_INT_0_STATUS), in a6xx_idle()
48 gpu_read(gpu, REG_A6XX_CP_RB_RPTR), in a6xx_idle()
49 gpu_read(gpu, REG_A6XX_CP_RB_WPTR)); in a6xx_idle()
1083 gpu_read(gpu, REG_A6XX_GBIF_HALT); in hw_init()
1089 gpu_read(gpu, REG_A6XX_GBIF_HALT); in hw_init()
1438 gpu_read(gpu, REG_A6XX_RBBM_STATUS)); in a6xx_dump()
1719 gpu_read(gpu, REG_A6XX_RBBM_STATUS), in a6xx_fault_detect_irq()
1720 gpu_read(gpu, REG_A6XX_CP_RB_RPTR), in a6xx_fault_detect_irq()
[all …]
A Da4xx_gpu.c277 val = gpu_read(gpu, REG_A4XX_RBBM_CLOCK_DELAY_HLSQ); in a4xx_hw_init()
358 gpu_read(gpu, REG_AXXX_CP_SCRATCH_REG0 + i)); in a4xx_recover()
366 gpu_read(gpu, REG_A4XX_RBBM_SW_RESET_CMD); in a4xx_recover()
392 if (spin_until(!(gpu_read(gpu, REG_A4XX_RBBM_STATUS) & in a4xx_idle()
406 status = gpu_read(gpu, REG_A4XX_RBBM_INT_0_STATUS); in a4xx_irq()
410 uint32_t reg = gpu_read(gpu, REG_A4XX_CP_PROTECT_STATUS); in a4xx_irq()
560 state->rbbm_status = gpu_read(gpu, REG_A4XX_RBBM_STATUS); in a4xx_gpu_state_get()
568 gpu_read(gpu, REG_A4XX_RBBM_STATUS)); in a4xx_dump()
586 reg = gpu_read(gpu, REG_A4XX_RBBM_POWER_STATUS); in a4xx_pm_resume()
626 ring->memptrs->rptr = gpu_read(gpu, REG_A4XX_CP_RB_RPTR); in a4xx_get_rptr()
A Da5xx_debugfs.c23 gpu_read(gpu, REG_A5XX_CP_PFP_STAT_DATA)); in pfp_print()
36 gpu_read(gpu, REG_A5XX_CP_ME_STAT_DATA)); in me_print()
49 gpu_read(gpu, REG_A5XX_CP_MEQ_DBG_DATA)); in meq_print()
64 val[j] = gpu_read(gpu, REG_A5XX_CP_ROQ_DBG_DATA); in roq_print()
A Da3xx_gpu.c374 gpu_read(gpu, REG_AXXX_CP_SCRATCH_REG0 + i)); in a3xx_recover()
382 gpu_read(gpu, REG_A3XX_RBBM_SW_RESET_CMD); in a3xx_recover()
408 if (spin_until(!(gpu_read(gpu, REG_A3XX_RBBM_STATUS) & in a3xx_idle()
423 status = gpu_read(gpu, REG_A3XX_RBBM_INT_0_STATUS); in a3xx_irq()
477 gpu_read(gpu, REG_A3XX_RBBM_STATUS)); in a3xx_dump()
490 state->rbbm_status = gpu_read(gpu, REG_A3XX_RBBM_STATUS); in a3xx_gpu_state_get()
507 ring->memptrs->rptr = gpu_read(gpu, REG_AXXX_CP_RB_RPTR); in a3xx_get_rptr()
A Da6xx_gpu_state.c188 data[0] = gpu_read(gpu, REG_A6XX_DBGC_CFG_DBGBUS_TRACE_BUF2); in debugbus_read()
189 data[1] = gpu_read(gpu, REG_A6XX_DBGC_CFG_DBGBUS_TRACE_BUF1); in debugbus_read()
231 data[i] = gpu_read(gpu, REG_A6XX_VBIF_TEST_BUS_OUT); in vbif_debugbus_read()
261 clk = gpu_read(gpu, REG_A6XX_VBIF_CLKON); in a6xx_get_vbif_debugbus_block()
1132 obj->data[index++] = gpu_read(gpu, in a6xx_get_ahb_gpu_registers()
1157 obj->data[index++] = gpu_read(gpu, regs[i] + j); in a7xx_get_ahb_gpu_registers()
1429 return gpu_read(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_2) >> 14; in a6xx_get_cp_roq_size()
1467 obj->data[i] = gpu_read(gpu, indexed->data); in a6xx_get_indexed_regs()
1489 val = gpu_read(gpu, REG_A6XX_CP_CHICKEN_DBG); in a6xx_get_indexed_registers()
1502 mempool_size = gpu_read(gpu, REG_A6XX_CP_MEM_POOL_SIZE); in a6xx_get_indexed_registers()
[all …]
A Da5xx_gpu.h145 if ((gpu_read(gpu, reg) & mask) == value) in spin_usecs()
A Da5xx_power.c267 u32 val = gpu_read(gpu, REG_A5XX_GPMU_GENERAL_1); in a5xx_gpmu_init()
A Da5xx_preempt.c194 status = gpu_read(gpu, REG_A5XX_CP_CONTEXT_SWITCH_CNTL); in a5xx_preempt_irq()
A Da6xx_preempt.c159 status = gpu_read(gpu, REG_A6XX_CP_CONTEXT_SWITCH_CNTL); in a6xx_preempt_irq()
A Dadreno_gpu.c816 state->registers[pos++] = gpu_read(gpu, addr); in adreno_gpu_state_get()
1075 uint32_t val = gpu_read(gpu, addr); in adreno_dump()
/drivers/gpu/drm/etnaviv/
A Detnaviv_gpu.c215 specs[0] = gpu_read(gpu, VIVS_HI_CHIP_SPECS); in etnaviv_hw_specs()
216 specs[1] = gpu_read(gpu, VIVS_HI_CHIP_SPECS_2); in etnaviv_hw_specs()
217 specs[2] = gpu_read(gpu, VIVS_HI_CHIP_SPECS_3); in etnaviv_hw_specs()
218 specs[3] = gpu_read(gpu, VIVS_HI_CHIP_SPECS_4); in etnaviv_hw_specs()
583 idle = gpu_read(gpu, VIVS_HI_IDLE_STATE); in etnaviv_hw_reset()
610 idle = gpu_read(gpu, VIVS_HI_IDLE_STATE); in etnaviv_hw_reset()
969 dma_lo = gpu_read(gpu, VIVS_FE_DMA_LOW); in etnaviv_gpu_debugfs()
970 dma_hi = gpu_read(gpu, VIVS_FE_DMA_HIGH); in etnaviv_gpu_debugfs()
971 axi = gpu_read(gpu, VIVS_HI_AXI_STATUS); in etnaviv_gpu_debugfs()
972 idle = gpu_read(gpu, VIVS_HI_IDLE_STATE); in etnaviv_gpu_debugfs()
[all …]
A Detnaviv_perfmon.c46 return gpu_read(gpu, domain->profile_read); in perf_reg_read()
61 u32 clock = gpu_read(gpu, VIVS_HI_CLOCK_CONTROL); in pipe_perf_reg_read()
82 u32 clock = gpu_read(gpu, VIVS_HI_CLOCK_CONTROL); in pipe_reg_read()
90 value += gpu_read(gpu, signal->data); in pipe_reg_read()
110 return gpu_read(gpu, reg); in hi_total_cycle_read()
124 return gpu_read(gpu, reg); in hi_total_idle_cycle_read()
A Detnaviv_sched.c54 dma_addr = gpu_read(gpu, VIVS_FE_DMA_ADDRESS); in etnaviv_sched_timedout_job()
62 primid = gpu_read(gpu, VIVS_MC_PROFILE_FE_READ); in etnaviv_sched_timedout_job()
A Detnaviv_iommu_v2.c172 if (gpu_read(gpu, VIVS_MMUv2_CONTROL) & VIVS_MMUv2_CONTROL_ENABLE) in etnaviv_iommuv2_restore_nonsec()
196 if (gpu_read(gpu, VIVS_MMUv2_SEC_CONTROL) & VIVS_MMUv2_SEC_CONTROL_ENABLE) in etnaviv_iommuv2_restore_sec()
A Detnaviv_gpu.h175 static inline u32 gpu_read(struct etnaviv_gpu *gpu, u32 reg) in gpu_read() function
A Detnaviv_dump.c94 reg->value = cpu_to_le32(gpu_read(gpu, read_addr)); in etnaviv_core_dump_registers()
/drivers/gpu/drm/msm/
A Dmsm_gpu.h619 static inline u32 gpu_read(struct msm_gpu *gpu, u32 reg) in gpu_read() function

Completed in 60 milliseconds

12