Home
last modified time | relevance | path

Searched refs:num_rqs (Results 1 – 23 of 23) sorted by relevance

/drivers/gpu/drm/scheduler/
A Dsched_main.c1122 for (i = DRM_SCHED_PRIORITY_KERNEL; i < sched->num_rqs; i++) { in drm_sched_select_entity()
1332 if (args->num_rqs > DRM_SCHED_PRIORITY_COUNT) { in drm_sched_init()
1358 sched->sched_rq = kmalloc_array(args->num_rqs, sizeof(*sched->sched_rq), in drm_sched_init()
1362 sched->num_rqs = args->num_rqs; in drm_sched_init()
1363 for (i = DRM_SCHED_PRIORITY_KERNEL; i < sched->num_rqs; i++) { in drm_sched_init()
1429 for (i = DRM_SCHED_PRIORITY_KERNEL; i < sched->num_rqs; i++) { in drm_sched_fini()
1488 for (i = DRM_SCHED_PRIORITY_HIGH; i < sched->num_rqs; i++) { in drm_sched_increase_karma()
A Dsched_entity.c93 if (entity->priority >= sched_list[0]->num_rqs) { in drm_sched_entity_init()
95 entity->priority, sched_list[0]->num_rqs); in drm_sched_entity_init()
96 entity->priority = max_t(s32, (s32) sched_list[0]->num_rqs - 1, in drm_sched_entity_init()
/drivers/net/ethernet/huawei/hinic/
A Dhinic_hw_qp_ctxt.h129 #define HINIC_RQ_CTXT_SIZE(num_rqs) (sizeof(struct hinic_qp_ctxt_header) \ argument
130 + (num_rqs) * sizeof(struct hinic_rq_ctxt))
A Dhinic_hw_io.c150 u16 num_rqs) in write_rq_ctxts() argument
171 num_rqs, func_to_io->max_qps); in write_rq_ctxts()
172 for (i = 0; i < num_rqs; i++) { in write_rq_ctxts()
179 cmdq_buf.size = HINIC_RQ_CTXT_SIZE(num_rqs); in write_rq_ctxts()
A Dhinic_port.h316 u32 num_rqs; member
795 int hinic_set_max_qnum(struct hinic_dev *nic_dev, u8 num_rqs);
A Dhinic_port.c480 int hinic_set_max_qnum(struct hinic_dev *nic_dev, u8 num_rqs) in hinic_set_max_qnum() argument
490 rq_num.num_rqs = num_rqs; in hinic_set_max_qnum()
/drivers/gpu/drm/xe/
A Dxe_gpu_scheduler.c69 .num_rqs = 1, in xe_sched_init()
A Dxe_execlist.c341 .num_rqs = 1, in execlist_exec_queue_init()
/drivers/gpu/drm/etnaviv/
A Detnaviv_sched.c145 .num_rqs = DRM_SCHED_PRIORITY_COUNT, in etnaviv_sched_init()
/drivers/gpu/drm/msm/
A Dmsm_ringbuffer.c70 .num_rqs = DRM_SCHED_PRIORITY_COUNT, in msm_ringbuffer_new()
A Dmsm_gem_vma.c780 .num_rqs = 1, in msm_gem_vm_create()
/drivers/gpu/drm/scheduler/tests/
A Dmock_scheduler.c293 .num_rqs = DRM_SCHED_PRIORITY_COUNT, in drm_mock_sched_new()
/drivers/scsi/elx/libefc_sli/
A Dsli4.c427 sli_cmd_rq_create_v2(struct sli4 *sli4, u32 num_rqs, in sli_cmd_rq_create_v2() argument
439 page_count = sli_page_count(qs[0]->dma.size, SLI_PAGE_SIZE) * num_rqs; in sli_cmd_rq_create_v2()
468 req->rq_count = num_rqs; in sli_cmd_rq_create_v2()
473 for (i = 0; i < num_rqs; i++) { in sli_cmd_rq_create_v2()
590 u32 num_rqs = num_rq_pairs * 2; in sli_fc_rq_set_alloc() local
592 for (i = 0; i < num_rqs; i++) { in sli_fc_rq_set_alloc()
600 if (sli_cmd_rq_create_v2(sli4, num_rqs, qs, base_cq_id, in sli_fc_rq_set_alloc()
623 for (i = 0; i < num_rqs; i++) { in sli_fc_rq_set_alloc()
638 for (i = 0; i < num_rqs; i++) in sli_fc_rq_set_alloc()
/drivers/gpu/drm/nouveau/
A Dnouveau_sched.c422 .num_rqs = DRM_SCHED_PRIORITY_COUNT, in nouveau_sched_init()
/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_job.c441 for (i = DRM_SCHED_PRIORITY_KERNEL; i < sched->num_rqs; i++) { in amdgpu_job_stop_all_jobs_on_sched()
A Damdgpu_device.c2977 .num_rqs = DRM_SCHED_PRIORITY_COUNT, in amdgpu_device_init_schedulers()
/drivers/gpu/drm/lima/
A Dlima_sched.c522 .num_rqs = DRM_SCHED_PRIORITY_COUNT, in lima_sched_pipe_init()
/drivers/gpu/drm/panfrost/
A Dpanfrost_job.c841 .num_rqs = DRM_SCHED_PRIORITY_COUNT, in panfrost_job_init()
/drivers/gpu/drm/v3d/
A Dv3d_sched.c861 .num_rqs = DRM_SCHED_PRIORITY_COUNT, in v3d_queue_sched_init()
/drivers/accel/amdxdna/
A Daie2_ctx.c524 .num_rqs = DRM_SCHED_PRIORITY_COUNT, in aie2_hwctx_init()
/drivers/gpu/drm/imagination/
A Dpvr_queue.c1231 .num_rqs = 1, in pvr_queue_create()
/drivers/gpu/drm/panthor/
A Dpanthor_mmu.c2295 .num_rqs = 1, in panthor_vm_create()
A Dpanthor_sched.c3293 .num_rqs = 1, in group_create_queue()

Completed in 94 milliseconds