Lines Matching refs:hpd

101 	uint32_t	hpd;  member
107 .hpd = DISP_INTERRUPT_STATUS__DC_HPD1_INTERRUPT_MASK
112 .hpd = DISP_INTERRUPT_STATUS_CONTINUE__DC_HPD2_INTERRUPT_MASK
117 .hpd = DISP_INTERRUPT_STATUS_CONTINUE2__DC_HPD3_INTERRUPT_MASK
122 .hpd = DISP_INTERRUPT_STATUS_CONTINUE3__DC_HPD4_INTERRUPT_MASK
127 .hpd = DISP_INTERRUPT_STATUS_CONTINUE4__DC_HPD5_INTERRUPT_MASK
132 .hpd = DISP_INTERRUPT_STATUS_CONTINUE5__DC_HPD6_INTERRUPT_MASK
245 enum amdgpu_hpd_id hpd) in dce_v6_0_hpd_sense() argument
249 if (hpd >= adev->mode_info.num_hpd) in dce_v6_0_hpd_sense()
252 if (RREG32(mmDC_HPD1_INT_STATUS + hpd_offsets[hpd]) & in dce_v6_0_hpd_sense()
268 enum amdgpu_hpd_id hpd) in dce_v6_0_hpd_set_polarity() argument
271 bool connected = dce_v6_0_hpd_sense(adev, hpd); in dce_v6_0_hpd_set_polarity()
273 if (hpd >= adev->mode_info.num_hpd) in dce_v6_0_hpd_set_polarity()
276 tmp = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd]); in dce_v6_0_hpd_set_polarity()
281 WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd], tmp); in dce_v6_0_hpd_set_polarity()
285 int hpd) in dce_v6_0_hpd_int_ack() argument
289 if (hpd >= adev->mode_info.num_hpd) { in dce_v6_0_hpd_int_ack()
290 DRM_DEBUG("invalid hpd %d\n", hpd); in dce_v6_0_hpd_int_ack()
294 tmp = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd]); in dce_v6_0_hpd_int_ack()
296 WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd], tmp); in dce_v6_0_hpd_int_ack()
318 if (amdgpu_connector->hpd.hpd >= adev->mode_info.num_hpd) in dce_v6_0_hpd_init()
321 tmp = RREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); in dce_v6_0_hpd_init()
323 WREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); in dce_v6_0_hpd_init()
332 tmp = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); in dce_v6_0_hpd_init()
334 WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); in dce_v6_0_hpd_init()
338 dce_v6_0_hpd_int_ack(adev, amdgpu_connector->hpd.hpd); in dce_v6_0_hpd_init()
339 dce_v6_0_hpd_set_polarity(adev, amdgpu_connector->hpd.hpd); in dce_v6_0_hpd_init()
340 amdgpu_irq_get(adev, &adev->hpd_irq, amdgpu_connector->hpd.hpd); in dce_v6_0_hpd_init()
364 if (amdgpu_connector->hpd.hpd >= adev->mode_info.num_hpd) in dce_v6_0_hpd_fini()
367 tmp = RREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]); in dce_v6_0_hpd_fini()
369 WREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp); in dce_v6_0_hpd_fini()
371 amdgpu_irq_put(adev, &adev->hpd_irq, amdgpu_connector->hpd.hpd); in dce_v6_0_hpd_fini()
3010 unsigned hpd, in dce_v6_0_set_hpd_irq_state() argument
3015 if (hpd >= adev->mode_info.num_hpd) { in dce_v6_0_set_hpd_irq_state()
3016 DRM_DEBUG("invalid hpd %d\n", hpd); in dce_v6_0_set_hpd_irq_state()
3022 dc_hpd_int_cntl = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd]); in dce_v6_0_set_hpd_irq_state()
3024 WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd], dc_hpd_int_cntl); in dce_v6_0_set_hpd_irq_state()
3027 dc_hpd_int_cntl = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd]); in dce_v6_0_set_hpd_irq_state()
3029 WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd], dc_hpd_int_cntl); in dce_v6_0_set_hpd_irq_state()
3204 unsigned hpd; in dce_v6_0_hpd_irq() local
3211 hpd = entry->src_data[0]; in dce_v6_0_hpd_irq()
3212 disp_int = RREG32(interrupt_status_offsets[hpd].reg); in dce_v6_0_hpd_irq()
3213 mask = interrupt_status_offsets[hpd].hpd; in dce_v6_0_hpd_irq()
3216 dce_v6_0_hpd_int_ack(adev, hpd); in dce_v6_0_hpd_irq()
3218 DRM_DEBUG("IH: HPD%d\n", hpd + 1); in dce_v6_0_hpd_irq()