Home
last modified time | relevance | path

Searched refs:pmf (Results 1 – 25 of 35) sorted by relevance

12

/linux-6.3-rc2/drivers/gpu/drm/amd/amdkfd/
A Dkfd_packet_manager.c82 *rlib_size += pm->pmf->runlist_size; in pm_calc_rlib_size()
173 retval = pm->pmf->map_queues(pm, in pm_create_runlist_ib()
181 pm->pmf->map_queues_size, in pm_create_runlist_ib()
192 retval = pm->pmf->map_queues(pm, in pm_create_runlist_ib()
201 pm->pmf->map_queues_size, in pm_create_runlist_ib()
238 pm->pmf = &kfd_vi_pm_funcs; in pm_init()
244 pm->pmf = &kfd_v9_pm_funcs; in pm_init()
277 size = pm->pmf->set_resources_size; in pm_send_set_resources()
351 size = pm->pmf->query_status_size; in pm_send_query_status()
379 size = pm->pmf->unmap_queues_size; in pm_send_unmap_queue()
[all …]
A Dkfd_device_queue_manager.c466 const struct packet_manager_funcs *pmf = qpd->dqm->packet_mgr.pmf; in flush_texture_cache_nocpsch() local
472 ret = pmf->release_mem(qpd->ib_base, (uint32_t *)qpd->ib_kaddr); in flush_texture_cache_nocpsch()
478 pmf->release_mem_size / sizeof(uint32_t)); in flush_texture_cache_nocpsch()
A Dkfd_priv.h1255 const struct packet_manager_funcs *pmf; member
/linux-6.3-rc2/drivers/platform/x86/amd/pmf/
A Dsps.c73 int amd_pmf_set_sps_power_limits(struct amd_pmf_dev *pmf) in amd_pmf_set_sps_power_limits() argument
77 mode = amd_pmf_get_pprof_modes(pmf); in amd_pmf_set_sps_power_limits()
81 amd_pmf_update_slider(pmf, SLIDER_OP_SET, mode, NULL); in amd_pmf_set_sps_power_limits()
86 bool is_pprof_balanced(struct amd_pmf_dev *pmf) in is_pprof_balanced() argument
94 struct amd_pmf_dev *pmf = container_of(pprof, struct amd_pmf_dev, pprof); in amd_pmf_profile_get() local
96 *profile = pmf->current_profile; in amd_pmf_profile_get()
100 int amd_pmf_get_pprof_modes(struct amd_pmf_dev *pmf) in amd_pmf_get_pprof_modes() argument
104 switch (pmf->current_profile) { in amd_pmf_get_pprof_modes()
115 dev_err(pmf->dev, "Unknown Platform Profile.\n"); in amd_pmf_get_pprof_modes()
127 pmf->current_profile = profile; in amd_pmf_profile_set()
[all …]
A Dcore.c63 struct amd_pmf_dev *pmf = container_of(nb, struct amd_pmf_dev, pwr_src_notifier); in amd_pmf_pwr_src_notify_call() local
68 if (is_apmf_func_supported(pmf, APMF_FUNC_AUTO_MODE) || in amd_pmf_pwr_src_notify_call()
69 is_apmf_func_supported(pmf, APMF_FUNC_DYN_SLIDER_DC) || in amd_pmf_pwr_src_notify_call()
70 is_apmf_func_supported(pmf, APMF_FUNC_DYN_SLIDER_AC)) { in amd_pmf_pwr_src_notify_call()
71 if ((pmf->amt_enabled || pmf->cnqf_enabled) && is_pprof_balanced(pmf)) in amd_pmf_pwr_src_notify_call()
75 amd_pmf_set_sps_power_limits(pmf); in amd_pmf_pwr_src_notify_call()
A DMakefile7 obj-$(CONFIG_AMD_PMF) += amd-pmf.o
8 amd-pmf-objs := core.o acpi.o sps.o \
A Dpmf.h388 int amd_pmf_get_pprof_modes(struct amd_pmf_dev *pmf);
395 bool is_pprof_balanced(struct amd_pmf_dev *pmf);
399 int amd_pmf_set_sps_power_limits(struct amd_pmf_dev *pmf);
/linux-6.3-rc2/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_gfx.c473 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in amdgpu_gfx_disable_kcq()
477 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size * in amdgpu_gfx_disable_kcq()
484 kiq->pmf->kiq_unmap_queues(kiq_ring, &adev->gfx.compute_ring[i], in amdgpu_gfx_disable_kcq()
514 if (!kiq->pmf || !kiq->pmf->kiq_map_queues || !kiq->pmf->kiq_set_resources) in amdgpu_gfx_enable_kcq()
535 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size * in amdgpu_gfx_enable_kcq()
537 kiq->pmf->set_resources_size); in amdgpu_gfx_enable_kcq()
547 kiq->pmf->kiq_set_resources(kiq_ring, queue_mask); in amdgpu_gfx_enable_kcq()
549 kiq->pmf->kiq_map_queues(kiq_ring, &adev->gfx.compute_ring[i]); in amdgpu_gfx_enable_kcq()
A Dmes_v10_1.c804 if (!kiq->pmf || !kiq->pmf->kiq_map_queues) in mes_v10_1_kiq_enable_queue()
807 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size); in mes_v10_1_kiq_enable_queue()
813 kiq->pmf->kiq_map_queues(kiq_ring, &adev->mes.ring); in mes_v10_1_kiq_enable_queue()
A Dmes_v11_0.c866 if (!kiq->pmf || !kiq->pmf->kiq_map_queues) in mes_v11_0_kiq_enable_queue()
869 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size); in mes_v11_0_kiq_enable_queue()
875 kiq->pmf->kiq_map_queues(kiq_ring, &adev->mes.ring); in mes_v11_0_kiq_enable_queue()
A Dgmc_v9_0.c932 unsigned int ndw = kiq->pmf->invalidate_tlbs_size + 8; in gmc_v9_0_flush_gpu_tlb_pasid()
935 ndw += kiq->pmf->invalidate_tlbs_size; in gmc_v9_0_flush_gpu_tlb_pasid()
941 kiq->pmf->kiq_invalidate_tlbs(ring, in gmc_v9_0_flush_gpu_tlb_pasid()
943 kiq->pmf->kiq_invalidate_tlbs(ring, in gmc_v9_0_flush_gpu_tlb_pasid()
A Dgmc_v11_0.c334 amdgpu_ring_alloc(ring, kiq->pmf->invalidate_tlbs_size + 8); in gmc_v11_0_flush_gpu_tlb_pasid()
335 kiq->pmf->kiq_invalidate_tlbs(ring, in gmc_v11_0_flush_gpu_tlb_pasid()
A Damdgpu_gfx.h111 const struct kiq_pm4_funcs *pmf; member
A Dgmc_v10_0.c437 amdgpu_ring_alloc(ring, kiq->pmf->invalidate_tlbs_size + 8); in gmc_v10_0_flush_gpu_tlb_pasid()
438 kiq->pmf->kiq_invalidate_tlbs(ring, in gmc_v10_0_flush_gpu_tlb_pasid()
A Dgfx_v11_0.c263 adev->gfx.kiq.pmf = &gfx_v11_0_kiq_pm4_funcs; in gfx_v11_0_set_kiq_pm4_funcs()
3732 if (!kiq->pmf || !kiq->pmf->kiq_map_queues) in gfx_v11_0_kiq_enable_kgq()
3735 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size * in gfx_v11_0_kiq_enable_kgq()
3743 kiq->pmf->kiq_map_queues(kiq_ring, &adev->gfx.gfx_ring[i]); in gfx_v11_0_kiq_enable_kgq()
4417 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in gfx_v11_0_kiq_disable_kgq()
4420 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size * in gfx_v11_0_kiq_disable_kgq()
4425 kiq->pmf->kiq_unmap_queues(kiq_ring, &adev->gfx.gfx_ring[i], in gfx_v11_0_kiq_disable_kgq()
5601 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in gfx_v11_0_ring_preempt_ib()
5606 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in gfx_v11_0_ring_preempt_ib()
5615 kiq->pmf->kiq_unmap_queues(kiq_ring, ring, PREEMPT_QUEUES_NO_UNMAP, in gfx_v11_0_ring_preempt_ib()
A Dgfx_v10_0.c3639 adev->gfx.kiq.pmf = &gfx_v10_0_kiq_pm4_funcs; in gfx_v10_0_set_kiq_pm4_funcs()
6529 if (!kiq->pmf || !kiq->pmf->kiq_map_queues) in gfx_v10_0_kiq_enable_kgq()
6532 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size * in gfx_v10_0_kiq_enable_kgq()
6540 kiq->pmf->kiq_map_queues(kiq_ring, &adev->gfx.gfx_ring[i]); in gfx_v10_0_kiq_enable_kgq()
7248 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in gfx_v10_0_kiq_disable_kgq()
7251 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size * in gfx_v10_0_kiq_disable_kgq()
7256 kiq->pmf->kiq_unmap_queues(kiq_ring, &adev->gfx.gfx_ring[i], in gfx_v10_0_kiq_disable_kgq()
8654 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in gfx_v10_0_ring_preempt_ib()
8659 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in gfx_v10_0_ring_preempt_ib()
8668 kiq->pmf->kiq_unmap_queues(kiq_ring, ring, PREEMPT_QUEUES_NO_UNMAP, in gfx_v10_0_ring_preempt_ib()
/linux-6.3-rc2/sound/aoa/core/
A DMakefile5 gpio-pmf.o \
/linux-6.3-rc2/drivers/platform/x86/amd/
A DMakefile11 obj-$(CONFIG_AMD_PMF) += pmf/
A DKconfig6 source "drivers/platform/x86/amd/pmf/Kconfig"
/linux-6.3-rc2/drivers/media/platform/samsung/exynos4-is/
A Dmedia-dev.c512 if (!fmd->pmf) in fimc_md_register_sensor_entities()
515 ret = pm_runtime_resume_and_get(fmd->pmf); in fimc_md_register_sensor_entities()
555 pm_runtime_put(fmd->pmf); in fimc_md_register_sensor_entities()
561 pm_runtime_put(fmd->pmf); in fimc_md_register_sensor_entities()
629 if (!fmd->pmf && fimc->pdev) in register_fimc_entity()
630 fmd->pmf = &fimc->pdev->dev; in register_fimc_entity()
1287 if (camclk->fmd->pmf == NULL) in cam_clk_prepare()
1290 return pm_runtime_resume_and_get(camclk->fmd->pmf); in cam_clk_prepare()
1297 if (camclk->fmd->pmf == NULL) in cam_clk_unprepare()
1300 pm_runtime_put_sync(camclk->fmd->pmf); in cam_clk_unprepare()
A Dmedia-dev.h128 struct device *pmf; member
/linux-6.3-rc2/drivers/net/fddi/skfp/
A DMakefile9 ecm.o pcmplc.o pmf.o queue.o rmt.o \
/linux-6.3-rc2/drivers/net/ethernet/broadcom/bnx2x/
A Dbnx2x_stats.c228 if (!bp->port.pmf || !bp->port.port_stx) { in bnx2x_stats_pmf_update()
277 if (!bp->link_vars.link_up || !bp->port.pmf) { in bnx2x_port_stats_init()
521 if (bp->port.pmf) in bnx2x_stats_start()
1102 if (bp->port.pmf) { in bnx2x_storm_stats_update()
1230 if (bp->port.pmf) in bnx2x_stats_update()
1330 if (bp->port.pmf) in bnx2x_stats_stop()
1338 if (bp->port.pmf) in bnx2x_stats_stop()
1409 if (!bp->port.pmf || !bp->port.port_stx) { in bnx2x_port_stats_base_init()
1592 if (bp->port.pmf && bp->port.port_stx) in bnx2x_memset_stats()
1626 if (!bp->stats_init && bp->port.pmf && bp->port.port_stx) in bnx2x_stats_init()
[all …]
A Dbnx2x_cmn.c2065 return bnx2x_config_rss_eth(bp, bp->port.pmf || !CHIP_IS_E1x(bp)); in bnx2x_init_rss()
2423 bp->port.pmf = 1; in bnx2x_nic_load_pmf()
2430 bp->port.pmf = 0; in bnx2x_nic_load_pmf()
2433 DP(NETIF_MSG_LINK, "pmf %d\n", bp->port.pmf); in bnx2x_nic_load_pmf()
2836 if (bp->port.pmf) { in bnx2x_nic_load()
2879 if (bp->port.pmf) in bnx2x_nic_load()
2916 if (bp->port.pmf && (bp->state != BNX2X_STATE_DIAG)) in bnx2x_nic_load()
2948 bp->port.pmf = 0; in bnx2x_nic_load()
3110 bp->port.pmf = 0; in bnx2x_nic_unload()
A Dbnx2x_main.c1619 if (bp->port.pmf) in bnx2x_hc_int_enable()
1680 if (bp->port.pmf) in bnx2x_igu_int_enable()
2589 if (bp->port.pmf) in bnx2x_cmng_fns_init()
2986 bp->port.pmf = 1; in bnx2x_pmf_update()
2987 DP(BNX2X_MSG_MCP, "pmf %d\n", bp->port.pmf); in bnx2x_pmf_update()
3341 if (bp->port.pmf) in bnx2x_pf_init()
4309 if ((bp->port.pmf == 0) && (val & DRV_STATUS_PMF)) in bnx2x_attn_int_deasserted3()
4312 if (bp->port.pmf && in bnx2x_attn_int_deasserted3()
9232 if (!bp->port.pmf) in bnx2x_func_wait_started()
10382 if (bp->port.pmf) { in bnx2x_period_task()
[all …]

Completed in 99 milliseconds

12