Lines Matching refs:vgdev

59 	struct virtio_gpu_device *vgdev = dev->dev_private;  in virtio_gpu_ctrl_ack()  local
61 schedule_work(&vgdev->ctrlq.dequeue_work); in virtio_gpu_ctrl_ack()
67 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_cursor_ack() local
69 schedule_work(&vgdev->cursorq.dequeue_work); in virtio_gpu_cursor_ack()
72 int virtio_gpu_alloc_vbufs(struct virtio_gpu_device *vgdev) in virtio_gpu_alloc_vbufs() argument
74 vgdev->vbufs = kmem_cache_create("virtio-gpu-vbufs", in virtio_gpu_alloc_vbufs()
78 if (!vgdev->vbufs) in virtio_gpu_alloc_vbufs()
83 void virtio_gpu_free_vbufs(struct virtio_gpu_device *vgdev) in virtio_gpu_free_vbufs() argument
85 kmem_cache_destroy(vgdev->vbufs); in virtio_gpu_free_vbufs()
86 vgdev->vbufs = NULL; in virtio_gpu_free_vbufs()
91 virtio_gpu_panic_get_vbuf(struct virtio_gpu_device *vgdev, int size) in virtio_gpu_panic_get_vbuf() argument
95 vbuf = kmem_cache_zalloc(vgdev->vbufs, GFP_ATOMIC); in virtio_gpu_panic_get_vbuf()
106 virtio_gpu_get_vbuf(struct virtio_gpu_device *vgdev, in virtio_gpu_get_vbuf() argument
112 vbuf = kmem_cache_zalloc(vgdev->vbufs, GFP_KERNEL | __GFP_NOFAIL); in virtio_gpu_get_vbuf()
140 virtio_gpu_alloc_cursor(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cursor() argument
146 (vgdev, sizeof(struct virtio_gpu_update_cursor), in virtio_gpu_alloc_cursor()
157 static void *virtio_gpu_panic_alloc_cmd_resp(struct virtio_gpu_device *vgdev, in virtio_gpu_panic_alloc_cmd_resp() argument
163 vbuf = virtio_gpu_panic_get_vbuf(vgdev, cmd_size); in virtio_gpu_panic_alloc_cmd_resp()
168 static void *virtio_gpu_alloc_cmd_resp(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd_resp() argument
176 vbuf = virtio_gpu_get_vbuf(vgdev, cmd_size, in virtio_gpu_alloc_cmd_resp()
182 static void *virtio_gpu_alloc_cmd(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd() argument
186 return virtio_gpu_alloc_cmd_resp(vgdev, NULL, vbuffer_p, size, in virtio_gpu_alloc_cmd()
191 static void *virtio_gpu_alloc_cmd_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd_cb() argument
196 return virtio_gpu_alloc_cmd_resp(vgdev, cb, vbuffer_p, size, in virtio_gpu_alloc_cmd_cb()
201 static void free_vbuf(struct virtio_gpu_device *vgdev, in free_vbuf() argument
207 kmem_cache_free(vgdev->vbufs, vbuf); in free_vbuf()
226 struct virtio_gpu_device *vgdev = in virtio_gpu_dequeue_ctrl_func() local
235 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_dequeue_ctrl_func()
237 virtqueue_disable_cb(vgdev->ctrlq.vq); in virtio_gpu_dequeue_ctrl_func()
238 reclaim_vbufs(vgdev->ctrlq.vq, &reclaim_list); in virtio_gpu_dequeue_ctrl_func()
240 } while (!virtqueue_enable_cb(vgdev->ctrlq.vq)); in virtio_gpu_dequeue_ctrl_func()
241 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_dequeue_ctrl_func()
246 trace_virtio_gpu_cmd_response(vgdev->ctrlq.vq, resp, entry->seqno); in virtio_gpu_dequeue_ctrl_func()
260 virtio_gpu_fence_event_process(vgdev, fence_id); in virtio_gpu_dequeue_ctrl_func()
263 entry->resp_cb(vgdev, entry); in virtio_gpu_dequeue_ctrl_func()
265 wake_up(&vgdev->ctrlq.ack_queue); in virtio_gpu_dequeue_ctrl_func()
269 virtio_gpu_array_put_free_delayed(vgdev, entry->objs); in virtio_gpu_dequeue_ctrl_func()
271 free_vbuf(vgdev, entry); in virtio_gpu_dequeue_ctrl_func()
277 struct virtio_gpu_device *vgdev = in virtio_gpu_dequeue_cursor_func() local
284 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_dequeue_cursor_func()
286 virtqueue_disable_cb(vgdev->cursorq.vq); in virtio_gpu_dequeue_cursor_func()
287 reclaim_vbufs(vgdev->cursorq.vq, &reclaim_list); in virtio_gpu_dequeue_cursor_func()
288 } while (!virtqueue_enable_cb(vgdev->cursorq.vq)); in virtio_gpu_dequeue_cursor_func()
289 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_dequeue_cursor_func()
295 trace_virtio_gpu_cmd_response(vgdev->cursorq.vq, resp, entry->seqno); in virtio_gpu_dequeue_cursor_func()
297 free_vbuf(vgdev, entry); in virtio_gpu_dequeue_cursor_func()
299 wake_up(&vgdev->cursorq.ack_queue); in virtio_gpu_dequeue_cursor_func()
343 static int virtio_gpu_panic_queue_ctrl_sgs(struct virtio_gpu_device *vgdev, in virtio_gpu_panic_queue_ctrl_sgs() argument
350 struct virtqueue *vq = vgdev->ctrlq.vq; in virtio_gpu_panic_queue_ctrl_sgs()
353 if (vgdev->has_indirect) in virtio_gpu_panic_queue_ctrl_sgs()
362 vbuf->seqno = ++vgdev->ctrlq.seqno; in virtio_gpu_panic_queue_ctrl_sgs()
365 atomic_inc(&vgdev->pending_commands); in virtio_gpu_panic_queue_ctrl_sgs()
370 static int virtio_gpu_queue_ctrl_sgs(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_ctrl_sgs() argument
378 struct virtqueue *vq = vgdev->ctrlq.vq; in virtio_gpu_queue_ctrl_sgs()
381 if (!drm_dev_enter(vgdev->ddev, &idx)) { in virtio_gpu_queue_ctrl_sgs()
384 free_vbuf(vgdev, vbuf); in virtio_gpu_queue_ctrl_sgs()
388 if (vgdev->has_indirect) in virtio_gpu_queue_ctrl_sgs()
392 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_sgs()
395 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_sgs()
396 virtio_gpu_notify(vgdev); in virtio_gpu_queue_ctrl_sgs()
397 wait_event(vgdev->ctrlq.ack_queue, vq->num_free >= elemcnt); in virtio_gpu_queue_ctrl_sgs()
405 virtio_gpu_fence_emit(vgdev, virtio_gpu_vbuf_ctrl_hdr(vbuf), in virtio_gpu_queue_ctrl_sgs()
416 vbuf->seqno = ++vgdev->ctrlq.seqno; in virtio_gpu_queue_ctrl_sgs()
419 atomic_inc(&vgdev->pending_commands); in virtio_gpu_queue_ctrl_sgs()
421 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_sgs()
428 static int virtio_gpu_panic_queue_ctrl_buffer(struct virtio_gpu_device *vgdev, in virtio_gpu_panic_queue_ctrl_buffer() argument
448 return virtio_gpu_panic_queue_ctrl_sgs(vgdev, vbuf, in virtio_gpu_panic_queue_ctrl_buffer()
453 static int virtio_gpu_queue_fenced_ctrl_buffer(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_fenced_ctrl_buffer() argument
497 ret = virtio_gpu_queue_ctrl_sgs(vgdev, vbuf, fence, elemcnt, sgs, outcnt, in virtio_gpu_queue_fenced_ctrl_buffer()
508 void virtio_gpu_panic_notify(struct virtio_gpu_device *vgdev) in virtio_gpu_panic_notify() argument
512 if (!atomic_read(&vgdev->pending_commands)) in virtio_gpu_panic_notify()
515 atomic_set(&vgdev->pending_commands, 0); in virtio_gpu_panic_notify()
516 notify = virtqueue_kick_prepare(vgdev->ctrlq.vq); in virtio_gpu_panic_notify()
519 virtqueue_notify(vgdev->ctrlq.vq); in virtio_gpu_panic_notify()
522 void virtio_gpu_notify(struct virtio_gpu_device *vgdev) in virtio_gpu_notify() argument
526 if (!atomic_read(&vgdev->pending_commands)) in virtio_gpu_notify()
529 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_notify()
530 atomic_set(&vgdev->pending_commands, 0); in virtio_gpu_notify()
531 notify = virtqueue_kick_prepare(vgdev->ctrlq.vq); in virtio_gpu_notify()
532 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_notify()
535 virtqueue_notify(vgdev->ctrlq.vq); in virtio_gpu_notify()
538 static int virtio_gpu_queue_ctrl_buffer(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_ctrl_buffer() argument
541 return virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, NULL); in virtio_gpu_queue_ctrl_buffer()
544 static void virtio_gpu_queue_cursor(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_cursor() argument
547 struct virtqueue *vq = vgdev->cursorq.vq; in virtio_gpu_queue_cursor()
552 if (!drm_dev_enter(vgdev->ddev, &idx)) { in virtio_gpu_queue_cursor()
553 free_vbuf(vgdev, vbuf); in virtio_gpu_queue_cursor()
561 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
565 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
566 wait_event(vgdev->cursorq.ack_queue, vq->num_free >= outcnt); in virtio_gpu_queue_cursor()
567 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
570 vbuf->seqno = ++vgdev->cursorq.seqno; in virtio_gpu_queue_cursor()
578 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
591 void virtio_gpu_cmd_create_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_create_resource() argument
600 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_create_resource()
610 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_create_resource()
614 static void virtio_gpu_cmd_unref_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_unref_cb() argument
625 void virtio_gpu_cmd_unref_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_unref_resource() argument
632 cmd_p = virtio_gpu_alloc_cmd_cb(vgdev, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_unref_resource()
640 ret = virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_unref_resource()
645 void virtio_gpu_cmd_set_scanout(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_set_scanout() argument
653 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_set_scanout()
664 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_set_scanout()
668 void virtio_gpu_panic_cmd_resource_flush(struct virtio_gpu_device *vgdev, in virtio_gpu_panic_cmd_resource_flush() argument
676 cmd_p = virtio_gpu_panic_alloc_cmd_resp(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_panic_cmd_resource_flush()
687 virtio_gpu_panic_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_panic_cmd_resource_flush()
690 void virtio_gpu_cmd_resource_flush(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_flush() argument
700 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_flush()
711 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_flush()
715 int virtio_gpu_panic_cmd_transfer_to_host_2d(struct virtio_gpu_device *vgdev, in virtio_gpu_panic_cmd_transfer_to_host_2d() argument
724 bool use_dma_api = !virtio_has_dma_quirk(vgdev->vdev); in virtio_gpu_panic_cmd_transfer_to_host_2d()
727 dma_sync_sgtable_for_device(vgdev->vdev->dev.parent, in virtio_gpu_panic_cmd_transfer_to_host_2d()
730 cmd_p = virtio_gpu_panic_alloc_cmd_resp(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_panic_cmd_transfer_to_host_2d()
742 return virtio_gpu_panic_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_panic_cmd_transfer_to_host_2d()
745 void virtio_gpu_cmd_transfer_to_host_2d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_to_host_2d() argument
755 bool use_dma_api = !virtio_has_dma_quirk(vgdev->vdev); in virtio_gpu_cmd_transfer_to_host_2d()
758 dma_sync_sgtable_for_device(vgdev->vdev->dev.parent, in virtio_gpu_cmd_transfer_to_host_2d()
761 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_2d()
773 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_to_host_2d()
777 virtio_gpu_cmd_resource_attach_backing(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_attach_backing() argument
786 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_attach_backing()
796 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_attach_backing()
800 virtio_gpu_cmd_resource_detach_backing(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_detach_backing() argument
807 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_detach_backing()
813 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_detach_backing()
816 static void virtio_gpu_cmd_get_display_info_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_display_info_cb() argument
823 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_display_info_cb()
824 for (i = 0; i < vgdev->num_scanouts; i++) { in virtio_gpu_cmd_get_display_info_cb()
825 vgdev->outputs[i].info = resp->pmodes[i]; in virtio_gpu_cmd_get_display_info_cb()
837 vgdev->display_info_pending = false; in virtio_gpu_cmd_get_display_info_cb()
838 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_display_info_cb()
839 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_display_info_cb()
841 if (!drm_helper_hpd_irq_event(vgdev->ddev)) in virtio_gpu_cmd_get_display_info_cb()
842 drm_kms_helper_hotplug_event(vgdev->ddev); in virtio_gpu_cmd_get_display_info_cb()
845 static void virtio_gpu_cmd_get_capset_info_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_capset_info_cb() argument
854 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset_info_cb()
855 if (vgdev->capsets) { in virtio_gpu_cmd_get_capset_info_cb()
856 vgdev->capsets[i].id = le32_to_cpu(resp->capset_id); in virtio_gpu_cmd_get_capset_info_cb()
857 vgdev->capsets[i].max_version = le32_to_cpu(resp->capset_max_version); in virtio_gpu_cmd_get_capset_info_cb()
858 vgdev->capsets[i].max_size = le32_to_cpu(resp->capset_max_size); in virtio_gpu_cmd_get_capset_info_cb()
862 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset_info_cb()
863 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_capset_info_cb()
866 static void virtio_gpu_cmd_capset_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_capset_cb() argument
875 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_capset_cb()
876 list_for_each_entry(cache_ent, &vgdev->cap_cache, head) { in virtio_gpu_cmd_capset_cb()
887 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_capset_cb()
888 wake_up_all(&vgdev->resp_wq); in virtio_gpu_cmd_capset_cb()
903 static void virtio_gpu_cmd_get_edid_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_edid_cb() argument
914 if (scanout >= vgdev->num_scanouts) in virtio_gpu_cmd_get_edid_cb()
916 output = vgdev->outputs + scanout; in virtio_gpu_cmd_get_edid_cb()
921 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_edid_cb()
924 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_edid_cb()
927 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_edid_cb()
930 int virtio_gpu_cmd_get_display_info(struct virtio_gpu_device *vgdev) in virtio_gpu_cmd_get_display_info() argument
942 (vgdev, &virtio_gpu_cmd_get_display_info_cb, &vbuf, in virtio_gpu_cmd_get_display_info()
947 vgdev->display_info_pending = true; in virtio_gpu_cmd_get_display_info()
949 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_display_info()
953 int virtio_gpu_cmd_get_capset_info(struct virtio_gpu_device *vgdev, int idx) in virtio_gpu_cmd_get_capset_info() argument
965 (vgdev, &virtio_gpu_cmd_get_capset_info_cb, &vbuf, in virtio_gpu_cmd_get_capset_info()
972 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset_info()
976 int virtio_gpu_cmd_get_capset(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_capset() argument
989 if (idx >= vgdev->num_capsets) in virtio_gpu_cmd_get_capset()
992 if (version > vgdev->capsets[idx].max_version) in virtio_gpu_cmd_get_capset()
999 max_size = vgdev->capsets[idx].max_size; in virtio_gpu_cmd_get_capset()
1015 cache_ent->id = vgdev->capsets[idx].id; in virtio_gpu_cmd_get_capset()
1018 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset()
1020 list_for_each_entry(search_ent, &vgdev->cap_cache, head) { in virtio_gpu_cmd_get_capset()
1021 if (search_ent->id == vgdev->capsets[idx].id && in virtio_gpu_cmd_get_capset()
1028 list_add_tail(&cache_ent->head, &vgdev->cap_cache); in virtio_gpu_cmd_get_capset()
1029 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset()
1040 (vgdev, &virtio_gpu_cmd_capset_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_get_capset()
1044 cmd_p->capset_id = cpu_to_le32(vgdev->capsets[idx].id); in virtio_gpu_cmd_get_capset()
1047 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset()
1052 int virtio_gpu_cmd_get_edids(struct virtio_gpu_device *vgdev) in virtio_gpu_cmd_get_edids() argument
1059 if (WARN_ON(!vgdev->has_edid)) in virtio_gpu_cmd_get_edids()
1062 for (scanout = 0; scanout < vgdev->num_scanouts; scanout++) { in virtio_gpu_cmd_get_edids()
1069 (vgdev, &virtio_gpu_cmd_get_edid_cb, &vbuf, in virtio_gpu_cmd_get_edids()
1074 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_edids()
1080 void virtio_gpu_cmd_context_create(struct virtio_gpu_device *vgdev, uint32_t id, in virtio_gpu_cmd_context_create() argument
1087 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_create()
1095 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_create()
1098 void virtio_gpu_cmd_context_destroy(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_destroy() argument
1104 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_destroy()
1109 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_destroy()
1112 void virtio_gpu_cmd_context_attach_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_attach_resource() argument
1120 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_attach_resource()
1127 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_attach_resource()
1130 void virtio_gpu_cmd_context_detach_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_detach_resource() argument
1138 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_detach_resource()
1145 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_detach_resource()
1149 virtio_gpu_cmd_resource_create_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_create_3d() argument
1158 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_create_3d()
1176 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_create_3d()
1181 void virtio_gpu_cmd_transfer_to_host_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_to_host_3d() argument
1193 bool use_dma_api = !virtio_has_dma_quirk(vgdev->vdev); in virtio_gpu_cmd_transfer_to_host_3d()
1196 dma_sync_sgtable_for_device(vgdev->vdev->dev.parent, in virtio_gpu_cmd_transfer_to_host_3d()
1199 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_3d()
1213 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_to_host_3d()
1216 void virtio_gpu_cmd_transfer_from_host_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_from_host_3d() argument
1229 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_from_host_3d()
1243 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_from_host_3d()
1246 void virtio_gpu_cmd_submit(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_submit() argument
1255 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_submit()
1266 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_submit()
1269 void virtio_gpu_object_attach(struct virtio_gpu_device *vgdev, in virtio_gpu_object_attach() argument
1277 virtio_gpu_cmd_resource_attach_backing(vgdev, obj->hw_res_handle, in virtio_gpu_object_attach()
1283 void virtio_gpu_object_detach(struct virtio_gpu_device *vgdev, in virtio_gpu_object_detach() argument
1290 virtio_gpu_cmd_resource_detach_backing(vgdev, obj->hw_res_handle, in virtio_gpu_object_detach()
1296 void virtio_gpu_cursor_ping(struct virtio_gpu_device *vgdev, in virtio_gpu_cursor_ping() argument
1303 cur_p = virtio_gpu_alloc_cursor(vgdev, &vbuf); in virtio_gpu_cursor_ping()
1305 virtio_gpu_queue_cursor(vgdev, vbuf); in virtio_gpu_cursor_ping()
1308 static void virtio_gpu_cmd_resource_uuid_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_uuid_cb() argument
1317 spin_lock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_uuid_cb()
1327 spin_unlock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_uuid_cb()
1329 wake_up_all(&vgdev->resp_wq); in virtio_gpu_cmd_resource_uuid_cb()
1333 virtio_gpu_cmd_resource_assign_uuid(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_assign_uuid() argument
1343 spin_lock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_assign_uuid()
1345 spin_unlock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_assign_uuid()
1351 (vgdev, virtio_gpu_cmd_resource_uuid_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_resource_assign_uuid()
1359 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_assign_uuid()
1363 static void virtio_gpu_cmd_resource_map_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_map_cb() argument
1373 spin_lock(&vgdev->host_visible_lock); in virtio_gpu_cmd_resource_map_cb()
1382 spin_unlock(&vgdev->host_visible_lock); in virtio_gpu_cmd_resource_map_cb()
1383 wake_up_all(&vgdev->resp_wq); in virtio_gpu_cmd_resource_map_cb()
1386 int virtio_gpu_cmd_map(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_map() argument
1399 (vgdev, virtio_gpu_cmd_resource_map_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_map()
1408 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_map()
1412 void virtio_gpu_cmd_unmap(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_unmap() argument
1418 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_unmap()
1424 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_unmap()
1428 virtio_gpu_cmd_resource_create_blob(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_create_blob() argument
1437 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_create_blob()
1452 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_create_blob()
1459 void virtio_gpu_cmd_set_scanout_blob(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_set_scanout_blob() argument
1471 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_set_scanout_blob()
1492 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_set_scanout_blob()