Lines Matching refs:workload

515 	struct intel_vgpu_workload *workload;  member
853 u32 base = s->workload->engine->mmio_base; in is_cmd_update_pdps()
861 struct intel_vgpu_mm *shadow_mm = s->workload->shadow_mm; in cmd_pdp_mmio_update_handler()
877 &s->workload->lri_shadow_mm); in cmd_pdp_mmio_update_handler()
1017 s->workload->ring_context_gpa + 12, &ctx_sr_ctl, 4); in cmd_reg_handler()
1228 s->workload->pending_events); in cmd_handler_pipe_control()
1235 s->workload->pending_events); in cmd_handler_mi_user_interrupt()
1748 s->workload->pending_events); in cmd_handler_mi_flush_dw()
1819 s->vgpu->gtt.ggtt_mm : s->workload->shadow_mm; in find_bb_size()
1835 s->engine->name, s->workload); in find_bb_size()
1847 s->engine->name, s->workload); in find_bb_size()
1881 s->engine->name, s->workload); in audit_bb_end()
1902 s->vgpu->gtt.ggtt_mm : s->workload->shadow_mm; in perform_bb_shadow()
1963 list_add(&bb->list, &s->workload->shadow_bb); in perform_bb_shadow()
2741 s->engine->name, s->workload); in cmd_parser_exec()
2749 s->workload, info->name); in cmd_parser_exec()
2834 static int scan_workload(struct intel_vgpu_workload *workload) in scan_workload() argument
2841 if (WARN_ON(!IS_ALIGNED(workload->rb_start, I915_GTT_PAGE_SIZE))) in scan_workload()
2844 gma_head = workload->rb_start + workload->rb_head; in scan_workload()
2845 gma_tail = workload->rb_start + workload->rb_tail; in scan_workload()
2846 gma_bottom = workload->rb_start + _RING_CTL_BUF_SIZE(workload->rb_ctl); in scan_workload()
2850 s.vgpu = workload->vgpu; in scan_workload()
2851 s.engine = workload->engine; in scan_workload()
2852 s.ring_start = workload->rb_start; in scan_workload()
2853 s.ring_size = _RING_CTL_BUF_SIZE(workload->rb_ctl); in scan_workload()
2856 s.rb_va = workload->shadow_ring_buffer_va; in scan_workload()
2857 s.workload = workload; in scan_workload()
2860 if (bypass_scan_mask & workload->engine->mask || gma_head == gma_tail) in scan_workload()
2867 ret = command_scan(&s, workload->rb_head, workload->rb_tail, in scan_workload()
2868 workload->rb_start, _RING_CTL_BUF_SIZE(workload->rb_ctl)); in scan_workload()
2880 struct intel_vgpu_workload *workload = container_of(wa_ctx, in scan_wa_ctx() local
2898 s.vgpu = workload->vgpu; in scan_wa_ctx()
2899 s.engine = workload->engine; in scan_wa_ctx()
2905 s.workload = workload; in scan_wa_ctx()
2918 static int shadow_workload_ring_buffer(struct intel_vgpu_workload *workload) in shadow_workload_ring_buffer() argument
2920 struct intel_vgpu *vgpu = workload->vgpu; in shadow_workload_ring_buffer()
2926 guest_rb_size = _RING_CTL_BUF_SIZE(workload->rb_ctl); in shadow_workload_ring_buffer()
2929 workload->rb_len = (workload->rb_tail + guest_rb_size - in shadow_workload_ring_buffer()
2930 workload->rb_head) % guest_rb_size; in shadow_workload_ring_buffer()
2932 gma_head = workload->rb_start + workload->rb_head; in shadow_workload_ring_buffer()
2933 gma_tail = workload->rb_start + workload->rb_tail; in shadow_workload_ring_buffer()
2934 gma_top = workload->rb_start + guest_rb_size; in shadow_workload_ring_buffer()
2936 if (workload->rb_len > s->ring_scan_buffer_size[workload->engine->id]) { in shadow_workload_ring_buffer()
2940 p = krealloc(s->ring_scan_buffer[workload->engine->id], in shadow_workload_ring_buffer()
2941 workload->rb_len, GFP_KERNEL); in shadow_workload_ring_buffer()
2946 s->ring_scan_buffer[workload->engine->id] = p; in shadow_workload_ring_buffer()
2947 s->ring_scan_buffer_size[workload->engine->id] = workload->rb_len; in shadow_workload_ring_buffer()
2950 shadow_ring_buffer_va = s->ring_scan_buffer[workload->engine->id]; in shadow_workload_ring_buffer()
2953 workload->shadow_ring_buffer_va = shadow_ring_buffer_va; in shadow_workload_ring_buffer()
2964 gma_head = workload->rb_start; in shadow_workload_ring_buffer()
2977 int intel_gvt_scan_and_shadow_ringbuffer(struct intel_vgpu_workload *workload) in intel_gvt_scan_and_shadow_ringbuffer() argument
2980 struct intel_vgpu *vgpu = workload->vgpu; in intel_gvt_scan_and_shadow_ringbuffer()
2982 ret = shadow_workload_ring_buffer(workload); in intel_gvt_scan_and_shadow_ringbuffer()
2988 ret = scan_workload(workload); in intel_gvt_scan_and_shadow_ringbuffer()
3000 struct intel_vgpu_workload *workload = container_of(wa_ctx, in shadow_indirect_ctx() local
3003 struct intel_vgpu *vgpu = workload->vgpu; in shadow_indirect_ctx()
3008 obj = i915_gem_object_create_shmem(workload->engine->i915, in shadow_indirect_ctx()
3030 ret = copy_gma_to_hva(workload->vgpu, in shadow_indirect_ctx()
3031 workload->vgpu->gtt.ggtt_mm, in shadow_indirect_ctx()
3072 struct intel_vgpu_workload *workload = container_of(wa_ctx, in intel_gvt_scan_and_shadow_wa_ctx() local
3075 struct intel_vgpu *vgpu = workload->vgpu; in intel_gvt_scan_and_shadow_wa_ctx()
3137 s.workload = NULL; in intel_gvt_update_reg_whitelist()
3157 int intel_gvt_scan_engine_context(struct intel_vgpu_workload *workload) in intel_gvt_scan_engine_context() argument
3159 struct intel_vgpu *vgpu = workload->vgpu; in intel_gvt_scan_engine_context()
3162 int ring_id = workload->engine->id; in intel_gvt_scan_engine_context()
3168 ctx_size = workload->engine->context_size - PAGE_SIZE; in intel_gvt_scan_engine_context()
3182 s.vgpu = workload->vgpu; in intel_gvt_scan_engine_context()
3183 s.engine = workload->engine; in intel_gvt_scan_engine_context()
3189 s.workload = workload; in intel_gvt_scan_engine_context()