Home
last modified time | relevance | path

Searched refs:kfd (Results 1 – 25 of 50) sorted by relevance

12

/linux/drivers/gpu/drm/amd/amdkfd/
A Dkfd_device.c474 kfd = kzalloc(sizeof(*kfd), GFP_KERNEL); in kgd2kfd_probe()
549 struct kfd_dev *kfd = node->kfd; in kfd_gws_init() local
779 kfd->adev, size, &kfd->gtt_mem, in kgd2kfd_device_init()
801 kfd->hive_id = kfd->adev->gmc.xgmi.hive_id; in kgd2kfd_device_init()
812 kfd->hive_id = pci_dev_id(kfd->adev->pdev); in kgd2kfd_device_init()
814 kfd->noretry = kfd->adev->gmc.noretry; in kgd2kfd_device_init()
829 node->kfd = kfd; in kgd2kfd_device_init()
924 kfd_cleanup_nodes(kfd, kfd->num_nodes); in kgd2kfd_device_exit()
1246 struct kfd_dev *kfd = node->kfd; in kfd_gtt_sa_allocate() local
1348 struct kfd_dev *kfd = node->kfd; in kfd_gtt_sa_free() local
[all …]
A Dkfd_doorbell.c52 if (!kfd->shared_resources.enable_mes) in kfd_doorbell_process_slice()
76 if (!kfd->doorbell_bitmap) { in kfd_doorbell_init()
82 r = amdgpu_bo_create_kernel(kfd->adev, in kfd_doorbell_init()
86 &kfd->doorbells, in kfd_doorbell_init()
91 bitmap_free(kfd->doorbell_bitmap); in kfd_doorbell_init()
101 bitmap_free(kfd->doorbell_bitmap); in kfd_doorbell_fini()
155 mutex_lock(&kfd->doorbell_mutex); in kfd_get_kernel_doorbell()
159 mutex_unlock(&kfd->doorbell_mutex); in kfd_get_kernel_doorbell()
165 kfd->doorbells, in kfd_get_kernel_doorbell()
185 mutex_lock(&kfd->doorbell_mutex); in kfd_release_kernel_doorbell()
[all …]
A Dkfd_interrupt.c58 KFD_IH_NUM_ENTRIES * node->kfd->device_info.ih_ring_entry_size, in kfd_interrupt_init()
120 node->kfd->device_info.ih_ring_entry_size); in enqueue_ih_ring_entry()
121 if (count != node->kfd->device_info.ih_ring_entry_size) { in enqueue_ih_ring_entry()
139 node->kfd->device_info.ih_ring_entry_size); in dequeue_ih_ring_entry()
141 WARN_ON(count && count != node->kfd->device_info.ih_ring_entry_size); in dequeue_ih_ring_entry()
143 return count == node->kfd->device_info.ih_ring_entry_size; in dequeue_ih_ring_entry()
153 if (dev->kfd->device_info.ih_ring_entry_size > sizeof(ih_ring_entry)) { in interrupt_wq()
159 dev->kfd->device_info.event_interrupt_class->interrupt_wq(dev, in interrupt_wq()
178 wanted |= dev->kfd->device_info.event_interrupt_class->interrupt_isr(dev, in interrupt_is_wanted()
A Dkfd_packet_manager_v9.c37 struct kfd_node *kfd = pm->dqm->dev; in pm_map_process_v9() local
40 struct amdgpu_device *adev = kfd->adev; in pm_map_process_v9()
46 if (adev->enforce_isolation[kfd->node_id]) in pm_map_process_v9()
60 packet->bitfields2.debug_vmid = kfd->dqm->trap_debug_vmid; in pm_map_process_v9()
94 struct kfd_dev *kfd = pm->dqm->dev->kfd; in pm_map_process_aldebaran() local
99 struct amdgpu_device *adev = kfd->adev; in pm_map_process_aldebaran()
153 struct kfd_node *kfd = pm->dqm->dev; in pm_runlist_v9() local
154 struct amdgpu_device *adev = kfd->adev; in pm_runlist_v9()
170 kfd->max_proc_per_quantum); in pm_runlist_v9()
259 !pm_use_ext_eng(q->device->kfd)) in pm_map_queues_v9()
[all …]
A Dkfd_device_queue_manager.c463 dev->kfd->device_info.doorbell_size); in allocate_doorbell()
956 (dqm->dev->kfd->cwsr_enabled ? in update_queue()
1135 (dqm->dev->kfd->cwsr_enabled ? in evict_process_queues_nocpsch()
1196 if (!dqm->dev->kfd->shared_resources.enable_mes) in evict_process_queues_cpsch()
1340 if (!dqm->dev->kfd->shared_resources.enable_mes) in restore_process_queues_cpsch()
1767 if (!dqm->dev->kfd->shared_resources.enable_mes) in unhalt_cpsch()
1811 if (!dqm->dev->kfd->shared_resources.enable_mes) in start_cpsch()
1852 if (!dqm->dev->kfd->shared_resources.enable_mes) in start_cpsch()
1867 if (!dqm->dev->kfd->shared_resources.enable_mes) in stop_cpsch()
1874 if (!dqm->dev->kfd->shared_resources.enable_mes) in stop_cpsch()
[all …]
A Dkfd_device_queue_manager_v9.c63 if (dqm->dev->kfd->noretry) in update_qpd_v9()
66 if (KFD_GC_VERSION(dqm->dev->kfd) == IP_VERSION(9, 4, 3) || in update_qpd_v9()
67 KFD_GC_VERSION(dqm->dev->kfd) == IP_VERSION(9, 4, 4)) in update_qpd_v9()
A Dkfd_debug.h114 && dev->kfd->mec2_fw_version < 0x81b6) || in kfd_dbg_has_gws_support()
117 && dev->kfd->mec2_fw_version < 0x1b6) || in kfd_dbg_has_gws_support()
119 && dev->kfd->mec2_fw_version < 0x1b6) || in kfd_dbg_has_gws_support()
121 && dev->kfd->mec2_fw_version < 0x30) || in kfd_dbg_has_gws_support()
A Dkfd_priv.h312 struct kfd_dev *kfd; member
1095 size_t kfd_doorbell_process_slice(struct kfd_dev *kfd);
1096 int kfd_doorbell_init(struct kfd_dev *kfd);
1097 void kfd_doorbell_fini(struct kfd_dev *kfd);
1100 void __iomem *kfd_get_kernel_doorbell(struct kfd_dev *kfd,
1106 unsigned int kfd_get_doorbell_dw_offset_in_bar(struct kfd_dev *kfd,
1110 int kfd_alloc_process_doorbells(struct kfd_dev *kfd,
1112 void kfd_free_process_doorbells(struct kfd_dev *kfd,
1149 struct kfd_dev *dev = adev->kfd.dev; in kfd_node_by_irq_ids()
1457 uint64_t kfd_get_number_elems(struct kfd_dev *kfd);
[all …]
A Dkfd_kernel_queue.c79 prop.doorbell_ptr = kfd_get_kernel_doorbell(dev->kfd, &prop.doorbell_off); in kq_initialize()
117 retval = kfd_gtt_sa_allocate(dev, dev->kfd->device_info.doorbell_size, in kq_initialize()
194 kfd_release_kernel_doorbell(dev->kfd, prop.doorbell_ptr); in kq_initialize()
227 kfd_release_kernel_doorbell(kq->dev->kfd, in kq_uninitialize()
309 if (kq->dev->kfd->device_info.doorbell_size == 8) { in kq_submit_packet()
324 if (kq->dev->kfd->device_info.doorbell_size == 8) { in kq_rollback_packet()
A Dkfd_debug.c421 if (!pdd->dev->kfd->shared_resources.enable_mes) { in kfd_dbg_trap_clear_dev_address_watch()
433 if (!pdd->dev->kfd->shared_resources.enable_mes) in kfd_dbg_trap_clear_dev_address_watch()
455 if (!pdd->dev->kfd->shared_resources.enable_mes) { in kfd_dbg_trap_set_dev_address_watch()
475 if (!pdd->dev->kfd->shared_resources.enable_mes) in kfd_dbg_trap_set_dev_address_watch()
527 if (!pdd->dev->kfd->shared_resources.enable_mes) in kfd_dbg_trap_set_flags()
550 if (!pdd->dev->kfd->shared_resources.enable_mes) in kfd_dbg_trap_set_flags()
612 if (!pdd->dev->kfd->shared_resources.enable_mes) in kfd_dbg_trap_deactivate()
729 if (!pdd->dev->kfd->shared_resources.enable_mes) in kfd_dbg_trap_activate()
863 if (!pdd->dev->kfd->shared_resources.enable_mes) in kfd_dbg_trap_set_wave_launch_override()
895 if (!pdd->dev->kfd->shared_resources.enable_mes) in kfd_dbg_trap_set_wave_launch_mode()
[all …]
A Dkfd_mqd_manager_v11.c112 if (node->kfd->shared_resources.enable_mes) in allocate_mqd()
135 if (mm->dev->kfd->shared_resources.enable_mes) in init_mqd()
183 if (mm->dev->kfd->cwsr_enabled) { in init_mqd()
272 if (mm->dev->kfd->cwsr_enabled) in update_mqd()
405 if (mm->dev->kfd->shared_resources.enable_mes) in init_mqd_sdma()
556 if (dev->kfd->shared_resources.enable_mes) { in mqd_manager_init_v11()
A Dkfd_topology.c537 dev->gpu->kfd->mec_fw_version); in node_show()
543 dev->gpu->kfd->sdma_fw_version); in node_show()
1241 if (!dev->gpu->kfd->pci_atomic_requested || in kfd_set_iolink_no_atomics()
1595 (dev->gpu->kfd->hive_id && in kfd_dev_create_p2p_links()
1596 dev->gpu->kfd->hive_id == new_dev->gpu->kfd->hive_id)) in kfd_dev_create_p2p_links()
1944 firmware_supported = dev->gpu->kfd->mec_fw_version >= 60; in kfd_topology_set_dbg_firmware_support()
2073 gpu->kfd->device_info.gfx_target_version; in kfd_topology_add_device()
2081 if (gpu->kfd->num_nodes > 1) in kfd_topology_add_device()
2094 gpu->kfd->shared_resources.drm_render_minor; in kfd_topology_add_device()
2096 dev->node_props.hive_id = gpu->kfd->hive_id; in kfd_topology_add_device()
[all …]
A Dkfd_flat_memory.c332 pdd->dev->kfd->shared_resources.gpuvm_size - 1; in kfd_init_apertures_vi()
351 pdd->dev->kfd->shared_resources.gpuvm_size - 1; in kfd_init_apertures_v9()
A Dkfd_process.c1073 kfd_free_process_doorbells(pdd->dev->kfd, pdd); in kfd_process_destroy_pdds()
1075 if (pdd->dev->kfd->shared_resources.enable_mes) in kfd_process_destroy_pdds()
1329 if (!dev->kfd->cwsr_enabled || qpd->cwsr_kaddr || qpd->cwsr_base) in kfd_process_init_cwsr_apu()
1347 memcpy(qpd->cwsr_kaddr, dev->kfd->cwsr_isa, dev->kfd->cwsr_isa_size); in kfd_process_init_cwsr_apu()
1372 if (!dev->kfd->cwsr_enabled || qpd->cwsr_kaddr || !qpd->cwsr_base) in kfd_process_device_init_cwsr_dgpu()
1385 memcpy(qpd->cwsr_kaddr, dev->kfd->cwsr_isa, dev->kfd->cwsr_isa_size); in kfd_process_device_init_cwsr_dgpu()
1459 if (!amdgpu_sriov_xnack_support(dev->kfd->adev)) { in kfd_process_xnack_mode()
1474 if (dev->kfd->noretry) in kfd_process_xnack_mode()
1633 if (dev->kfd->shared_resources.enable_mes) { in kfd_create_process_device_data()
2149 if (KFD_GC_VERSION(pdd->dev->kfd) == IP_VERSION(9, 4, 3) || in kfd_process_drain_interrupts()
[all …]
A Dkfd_mqd_manager_v10.c76 static struct kfd_mem_obj *allocate_mqd(struct kfd_node *kfd, in allocate_mqd() argument
81 if (kfd_gtt_sa_allocate(kfd, sizeof(struct v10_compute_mqd), in allocate_mqd()
129 if (mm->dev->kfd->cwsr_enabled) { in init_mqd()
218 if (mm->dev->kfd->cwsr_enabled) in update_mqd()
A Dkfd_mqd_manager_vi.c79 static struct kfd_mem_obj *allocate_mqd(struct kfd_node *kfd, in allocate_mqd() argument
84 if (kfd_gtt_sa_allocate(kfd, sizeof(struct vi_mqd), in allocate_mqd()
138 if (mm->dev->kfd->cwsr_enabled && q->ctx_save_restore_area_address) { in init_mqd()
229 if (mm->dev->kfd->cwsr_enabled && q->ctx_save_restore_area_address) in __update_mqd()
A Dkfd_process_queue_manager.c91 if (dev->kfd->shared_resources.enable_mes && in kfd_process_dequeue_from_device()
136 !dev->kfd->shared_resources.enable_mes) { in pqm_set_gws()
200 !dev->kfd->shared_resources.enable_mes) in pqm_clean_queue_resource()
206 if (dev->kfd->shared_resources.enable_mes) { in pqm_clean_queue_resource()
257 if (dev->kfd->shared_resources.enable_mes) { in init_user_queue()
343 max_queues = dev->kfd->device_info.max_no_of_hqd/2; in pqm_create_queue()
444 pdd->dev->kfd->device_info.doorbell_size); in pqm_create_queue()
A Dkfd_crat.c1625 kfd_fill_gpu_cache_info_from_gfx_config_v2(kdev->kfd, in kfd_get_gpu_cache_info()
1691 kfd_fill_gpu_cache_info_from_gfx_config(kdev->kfd, *pcache_info); in kfd_get_gpu_cache_info()
2155 bool use_ta_info = kdev->kfd->num_nodes == 1; in kfd_fill_gpu_xgmi_link_to_gpu()
2181 bool is_single_hop = kdev->kfd == peer_kdev->kfd; in kfd_fill_gpu_xgmi_link_to_gpu()
2250 (cu_info->number / kdev->kfd->num_nodes); in kfd_create_vcrat_image_gpu()
2325 if (kdev->kfd->hive_id) { in kfd_create_vcrat_image_gpu()
2330 if (peer_dev->gpu->kfd->hive_id != kdev->kfd->hive_id) in kfd_create_vcrat_image_gpu()
A Dkfd_mqd_manager_v9.c44 if (mm->dev->kfd->cwsr_enabled && in mqd_stride_v9()
133 if (node->kfd->cwsr_enabled && (q->type == KFD_QUEUE_TYPE_COMPUTE)) { in allocate_mqd()
208 if (mm->dev->kfd->cwsr_enabled && q->ctx_save_restore_area_address) { in init_mqd()
300 if (mm->dev->kfd->cwsr_enabled && q->ctx_save_restore_area_address) in update_mqd()
672 if (mm->dev->kfd->cwsr_enabled && in init_mqd_v9_4_3()
/linux/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_amdkfd.h416 bool kgd2kfd_device_init(struct kfd_dev *kfd,
418 void kgd2kfd_device_exit(struct kfd_dev *kfd);
419 void kgd2kfd_suspend(struct kfd_dev *kfd, bool run_pm);
420 int kgd2kfd_resume(struct kfd_dev *kfd, bool run_pm);
421 int kgd2kfd_pre_reset(struct kfd_dev *kfd,
423 int kgd2kfd_post_reset(struct kfd_dev *kfd);
425 void kgd2kfd_set_sram_ecc_flag(struct kfd_dev *kfd);
448 bool kgd2kfd_device_init(struct kfd_dev *kfd, in kgd2kfd_device_init() argument
467 static inline int kgd2kfd_pre_reset(struct kfd_dev *kfd, in kgd2kfd_pre_reset() argument
473 static inline int kgd2kfd_post_reset(struct kfd_dev *kfd) in kgd2kfd_post_reset() argument
[all …]
A Damdgpu_amdkfd.c128 kfd.reset_work); in amdgpu_amdkfd_reset_work()
152 if (!adev->kfd.init_complete || adev->kfd.client.dev) in amdgpu_amdkfd_drm_client_create()
175 if (adev->kfd.dev) { in amdgpu_amdkfd_device_init()
226 adev->kfd.init_complete = kgd2kfd_device_init(adev->kfd.dev, in amdgpu_amdkfd_device_init()
237 if (adev->kfd.dev) { in amdgpu_amdkfd_device_fini_sw()
247 if (adev->kfd.dev) in amdgpu_amdkfd_interrupt()
253 if (adev->kfd.dev) in amdgpu_amdkfd_suspend()
261 if (adev->kfd.dev) in amdgpu_amdkfd_resume()
272 if (adev->kfd.dev) in amdgpu_amdkfd_pre_reset()
282 if (adev->kfd.dev) in amdgpu_amdkfd_post_reset()
[all …]
A Damdgpu_amdkfd_gpuvm.c235 adev->kfd.vram_used[xcp_id] += vram_needed; in amdgpu_amdkfd_reserve_mem_limit()
236 adev->kfd.vram_used_aligned[xcp_id] += in amdgpu_amdkfd_reserve_mem_limit()
264 adev->kfd.vram_used[xcp_id] -= size; in amdgpu_amdkfd_unreserve_mem_limit()
266 adev->kfd.vram_used_aligned[xcp_id] -= size; in amdgpu_amdkfd_unreserve_mem_limit()
270 adev->kfd.vram_used_aligned[xcp_id] -= in amdgpu_amdkfd_unreserve_mem_limit()
282 WARN_ONCE(adev && xcp_id >= 0 && adev->kfd.vram_used[xcp_id] < 0, in amdgpu_amdkfd_unreserve_mem_limit()
1686 - adev->kfd.vram_used_aligned[xcp_id] in amdgpu_amdkfd_get_available_memory()
1884 drm_gem_handle_delete(adev->kfd.client.file, (*mem)->gem_handle); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1999 drm_gem_handle_delete(adev->kfd.client.file, mem->gem_handle); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
2458 obj = drm_gem_object_lookup(adev->kfd.client.file, handle); in amdgpu_amdkfd_gpuvm_import_dmabuf_fd()
[all …]
/linux/samples/bpf/
A Dtask_fd_query_user.c234 int err = -1, res, kfd, efd; in test_debug_fs_uprobe() local
240 kfd = open(buf, O_WRONLY | O_TRUNC, 0); in test_debug_fs_uprobe()
241 CHECK_PERROR_RET(kfd < 0); in test_debug_fs_uprobe()
250 CHECK_PERROR_RET(write(kfd, buf, strlen(buf)) < 0); in test_debug_fs_uprobe()
252 close(kfd); in test_debug_fs_uprobe()
253 kfd = -1; in test_debug_fs_uprobe()
270 kfd = sys_perf_event_open(&attr, -1, 0, -1, PERF_FLAG_FD_CLOEXEC); in test_debug_fs_uprobe()
271 link = bpf_program__attach_perf_event(progs[0], kfd); in test_debug_fs_uprobe()
275 close(kfd); in test_debug_fs_uprobe()
280 err = bpf_task_fd_query(getpid(), kfd, 0, buf, &len, in test_debug_fs_uprobe()
/linux/tools/perf/
A Dbuiltin-probe.c443 int ret, ret2, ufd = -1, kfd = -1; in perf_del_probe_events() local
457 ret = probe_file__open_both(&kfd, &ufd, PF_FL_RW); in perf_del_probe_events()
468 ret = probe_file__get_events(kfd, filter, klist); in perf_del_probe_events()
473 ret = probe_file__del_strlist(kfd, klist); in perf_del_probe_events()
496 if (kfd >= 0) in perf_del_probe_events()
497 close(kfd); in perf_del_probe_events()
/linux/tools/perf/util/
A Dprobe-file.c152 int probe_file__open_both(int *kfd, int *ufd, int flag) in probe_file__open_both() argument
154 if (!kfd || !ufd) in probe_file__open_both()
157 *kfd = open_kprobe_events(flag & PF_FL_RW); in probe_file__open_both()
159 if (*kfd < 0 && *ufd < 0) { in probe_file__open_both()
160 print_both_open_warning(*kfd, *ufd, flag & PF_FL_RW); in probe_file__open_both()
161 return *kfd; in probe_file__open_both()

Completed in 775 milliseconds

12