Home
last modified time | relevance | path

Searched refs:vpe (Results 1 – 15 of 15) sorted by relevance

/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_vpe.h93 #define vpe_ring_init(vpe) ((vpe)->funcs->ring_init ? (vpe)->funcs->ring_init((vpe)) : 0) argument
94 #define vpe_ring_start(vpe) ((vpe)->funcs->ring_start ? (vpe)->funcs->ring_start((vpe)) : 0) argument
95 #define vpe_ring_stop(vpe) ((vpe)->funcs->ring_stop ? (vpe)->funcs->ring_stop((vpe)) : 0) argument
96 #define vpe_ring_fini(vpe) ((vpe)->funcs->ring_fini ? (vpe)->funcs->ring_fini((vpe)) : 0) argument
99 ((vpe)->funcs->get_reg_offset ? (vpe)->funcs->get_reg_offset((vpe), (inst), (offset)) : 0)
100 #define vpe_set_regs(vpe) \ argument
101 ((vpe)->funcs->set_regs ? (vpe)->funcs->set_regs((vpe)) : 0)
102 #define vpe_irq_init(vpe) \ argument
103 ((vpe)->funcs->irq_init ? (vpe)->funcs->irq_init((vpe)) : 0)
105 ((vpe)->funcs->init_microcode ? (vpe)->funcs->init_microcode((vpe)) : 0)
[all …]
A Dvpe_v6_1.c88 struct amdgpu_device *adev = container_of(vpe, struct amdgpu_device, vpe); in vpe_v6_1_irq_init()
93 &adev->vpe.trap_irq); in vpe_v6_1_irq_init()
105 if (!vpe->collaborate_mode) in vpe_v6_1_set_collaborate_mode()
151 if (amdgpu_vpe_configure_dpm(vpe)) in vpe_v6_1_load_microcode()
181 vpe_v6_1_halt(vpe, true); in vpe_v6_1_load_microcode()
201 vpe_v6_1_halt(vpe, false); in vpe_v6_1_load_microcode()
208 struct amdgpu_ring *ring = &vpe->ring; in vpe_v6_1_ring_start()
304 vpe->ring.sched.ready = false; in vpe_v_6_1_ring_stop()
314 struct amdgpu_vpe *vpe = &adev->vpe; in vpe_v6_1_set_trap_irq_state() local
353 struct amdgpu_device *adev = container_of(vpe, struct amdgpu_device, vpe); in vpe_v6_1_set_regs()
[all …]
A Damdgpu_vpe.c302 struct amdgpu_vpe *vpe = &adev->vpe; in vpe_early_init() local
363 struct amdgpu_vpe *vpe = &adev->vpe; in vpe_sw_init() local
395 struct amdgpu_vpe *vpe = &adev->vpe; in vpe_sw_fini() local
413 struct amdgpu_vpe *vpe = &adev->vpe; in vpe_hw_init() local
436 struct amdgpu_vpe *vpe = &adev->vpe; in vpe_hw_fini() local
612 struct amdgpu_vpe *vpe = &adev->vpe; in vpe_ring_preempt_ib() local
660 struct amdgpu_vpe *vpe = &adev->vpe; in vpe_set_powergating_state() local
680 struct amdgpu_vpe *vpe = &adev->vpe; in vpe_ring_get_rptr() local
699 struct amdgpu_vpe *vpe = &adev->vpe; in vpe_ring_get_wptr() local
718 struct amdgpu_vpe *vpe = &adev->vpe; in vpe_ring_set_wptr() local
[all …]
A Dvpe_v6_1.h27 void vpe_v6_1_set_funcs(struct amdgpu_vpe *vpe);
A Damdgpu_dev_coredump.c188 adev->vpe.feature_version, adev->vpe.fw_version); in amdgpu_devcoredump_fw_info()
A Dumsch_mm_v4_0.c282 adev->vpe.collaborate_mode ? 0x3 : 0x0; in umsch_mm_v4_0_set_hw_resources()
358 add_queue.collaboration_mode = adev->vpe.collaborate_mode ? 1 : 0; in umsch_mm_v4_0_add_queue()
A Damdgpu_kms.c365 fw_info->ver = adev->vpe.fw_version; in amdgpu_firmware_info()
366 fw_info->feature = adev->vpe.feature_version; in amdgpu_firmware_info()
535 if (adev->vpe.ring.sched.ready && in amdgpu_hw_ip_info()
536 !adev->vpe.ring.no_user_submission) in amdgpu_hw_ip_info()
A Damdgpu_discovery.c1426 if (adev->vpe.num_instances < AMDGPU_MAX_VPE_INSTANCES) in amdgpu_discovery_reg_base_init()
1427 adev->vpe.num_instances++; in amdgpu_discovery_reg_base_init()
1430 adev->vpe.num_instances + 1, in amdgpu_discovery_reg_base_init()
A Damdgpu.h1123 struct amdgpu_vpe vpe; member
/drivers/irqchip/
A Dirq-gic-v4.c132 if (!vpe->fwnode) in its_alloc_vcpu_sgis()
138 vpe->sgi_domain = irq_domain_create_linear(vpe->fwnode, 16, in its_alloc_vcpu_sgis()
140 if (!vpe->sgi_domain) in its_alloc_vcpu_sgis()
143 sgi_base = irq_domain_alloc_irqs(vpe->sgi_domain, 16, NUMA_NO_NODE, vpe); in its_alloc_vcpu_sgis()
150 if (vpe->sgi_domain) in its_alloc_vcpu_sgis()
152 if (vpe->fwnode) in its_alloc_vcpu_sgis()
249 enable_irq(vpe->irq); in its_make_vpe_non_resident()
254 vpe->resident = false; in its_make_vpe_non_resident()
256 vpe->ready = false; in its_make_vpe_non_resident()
279 vpe->resident = true; in its_make_vpe_resident()
[all …]
A Dirq-gic-v3-its.c382 vpe = map->vpe; in irq_to_cpuid_lock()
385 if (vpe) { in irq_to_cpuid_lock()
407 vpe = map->vpe; in irq_to_cpuid_unlock()
410 if (vpe) in irq_to_cpuid_unlock()
1374 desc.its_vmapti_cmd.vpe = map->vpe; in its_send_vmapti()
1388 desc.its_vmovi_cmd.vpe = map->vpe; in its_send_vmovi()
1401 desc.its_vmapp_cmd.vpe = vpe; in its_send_vmapp()
1414 desc.its_vmovp_cmd.vpe = vpe; in its_send_vmovp()
1452 desc.its_vinvall_cmd.vpe = vpe; in its_send_vinvall()
1502 desc.its_invdb_cmd.vpe = vpe; in its_send_invdb()
[all …]
/drivers/media/platform/ti/vpe/
A DMakefile2 obj-$(CONFIG_VIDEO_TI_VPE) += ti-vpe.o
7 ti-vpe-y := vpe.o
/drivers/media/platform/ti/
A DMakefile4 obj-y += vpe/
/drivers/net/ethernet/intel/iavf/
A Diavf_virtchnl.c85 struct virtchnl_pf_event *vpe = in iavf_poll_virtchnl_msg() local
88 if (vpe->event != VIRTCHNL_EVENT_RESET_IMPENDING) in iavf_poll_virtchnl_msg()
1655 struct virtchnl_pf_event *vpe) in iavf_get_vpe_link_status() argument
1658 return vpe->event_data.link_event_adv.link_status; in iavf_get_vpe_link_status()
1660 return vpe->event_data.link_event.link_status; in iavf_get_vpe_link_status()
1672 struct virtchnl_pf_event *vpe) in iavf_set_adapter_link_speed_from_vpe() argument
1676 vpe->event_data.link_event_adv.link_speed; in iavf_set_adapter_link_speed_from_vpe()
2320 struct virtchnl_pf_event *vpe = in iavf_virtchnl_completion() local
2324 switch (vpe->event) { in iavf_virtchnl_completion()
2326 iavf_set_adapter_link_speed_from_vpe(adapter, vpe); in iavf_virtchnl_completion()
[all …]
/drivers/gpu/drm/ingenic/
A Dingenic-drm-drv.c285 unsigned int vpe, vds, vde, vt, hpe, hds, hde, ht; in ingenic_drm_crtc_update_timings() local
287 vpe = mode->crtc_vsync_end - mode->crtc_vsync_start; in ingenic_drm_crtc_update_timings()
299 vpe << JZ_LCD_VSYNC_VPE_OFFSET); in ingenic_drm_crtc_update_timings()
333 (ht * vpe / 3) << JZ_LCD_IPUR_IPUR_LSB); in ingenic_drm_crtc_update_timings()

Completed in 841 milliseconds