Lines Matching refs:vbuf

94 	struct virtio_gpu_vbuffer *vbuf;  in virtio_gpu_get_vbuf()  local
96 vbuf = kmem_cache_zalloc(vgdev->vbufs, GFP_KERNEL | __GFP_NOFAIL); in virtio_gpu_get_vbuf()
100 vbuf->buf = (void *)vbuf + sizeof(*vbuf); in virtio_gpu_get_vbuf()
101 vbuf->size = size; in virtio_gpu_get_vbuf()
103 vbuf->resp_cb = resp_cb; in virtio_gpu_get_vbuf()
104 vbuf->resp_size = resp_size; in virtio_gpu_get_vbuf()
106 vbuf->resp_buf = (void *)vbuf->buf + size; in virtio_gpu_get_vbuf()
108 vbuf->resp_buf = resp_buf; in virtio_gpu_get_vbuf()
109 BUG_ON(!vbuf->resp_buf); in virtio_gpu_get_vbuf()
110 return vbuf; in virtio_gpu_get_vbuf()
114 virtio_gpu_vbuf_ctrl_hdr(struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_vbuf_ctrl_hdr() argument
120 return (struct virtio_gpu_ctrl_hdr *)vbuf->buf; in virtio_gpu_vbuf_ctrl_hdr()
127 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_alloc_cursor() local
129 vbuf = virtio_gpu_get_vbuf in virtio_gpu_alloc_cursor()
132 if (IS_ERR(vbuf)) { in virtio_gpu_alloc_cursor()
134 return ERR_CAST(vbuf); in virtio_gpu_alloc_cursor()
136 *vbuffer_p = vbuf; in virtio_gpu_alloc_cursor()
137 return (struct virtio_gpu_update_cursor *)vbuf->buf; in virtio_gpu_alloc_cursor()
146 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_alloc_cmd_resp() local
148 vbuf = virtio_gpu_get_vbuf(vgdev, cmd_size, in virtio_gpu_alloc_cmd_resp()
150 *vbuffer_p = vbuf; in virtio_gpu_alloc_cmd_resp()
151 return (struct virtio_gpu_command *)vbuf->buf; in virtio_gpu_alloc_cmd_resp()
174 struct virtio_gpu_vbuffer *vbuf) in free_vbuf() argument
176 if (vbuf->resp_size > MAX_INLINE_RESP_SIZE) in free_vbuf()
177 kfree(vbuf->resp_buf); in free_vbuf()
178 kvfree(vbuf->data_buf); in free_vbuf()
179 kmem_cache_free(vgdev->vbufs, vbuf); in free_vbuf()
184 struct virtio_gpu_vbuffer *vbuf; in reclaim_vbufs() local
188 while ((vbuf = virtqueue_get_buf(vq, &len))) { in reclaim_vbufs()
189 list_add_tail(&vbuf->list, reclaim_list); in reclaim_vbufs()
315 struct virtio_gpu_vbuffer *vbuf, in virtio_gpu_queue_ctrl_sgs() argument
326 if (fence && vbuf->objs) in virtio_gpu_queue_ctrl_sgs()
327 virtio_gpu_array_unlock_resv(vbuf->objs); in virtio_gpu_queue_ctrl_sgs()
328 free_vbuf(vgdev, vbuf); in virtio_gpu_queue_ctrl_sgs()
349 virtio_gpu_fence_emit(vgdev, virtio_gpu_vbuf_ctrl_hdr(vbuf), in virtio_gpu_queue_ctrl_sgs()
351 if (vbuf->objs) { in virtio_gpu_queue_ctrl_sgs()
352 virtio_gpu_array_add_fence(vbuf->objs, &fence->f); in virtio_gpu_queue_ctrl_sgs()
353 virtio_gpu_array_unlock_resv(vbuf->objs); in virtio_gpu_queue_ctrl_sgs()
357 ret = virtqueue_add_sgs(vq, sgs, outcnt, incnt, vbuf, GFP_ATOMIC); in virtio_gpu_queue_ctrl_sgs()
360 vbuf->seqno = ++vgdev->ctrlq.seqno; in virtio_gpu_queue_ctrl_sgs()
361 trace_virtio_gpu_cmd_queue(vq, virtio_gpu_vbuf_ctrl_hdr(vbuf), vbuf->seqno); in virtio_gpu_queue_ctrl_sgs()
372 struct virtio_gpu_vbuffer *vbuf, in virtio_gpu_queue_fenced_ctrl_buffer() argument
380 sg_init_one(&vcmd, vbuf->buf, vbuf->size); in virtio_gpu_queue_fenced_ctrl_buffer()
386 if (vbuf->data_size) { in virtio_gpu_queue_fenced_ctrl_buffer()
387 if (is_vmalloc_addr(vbuf->data_buf)) { in virtio_gpu_queue_fenced_ctrl_buffer()
389 sgt = vmalloc_to_sgt(vbuf->data_buf, vbuf->data_size, in virtio_gpu_queue_fenced_ctrl_buffer()
392 if (fence && vbuf->objs) in virtio_gpu_queue_fenced_ctrl_buffer()
393 virtio_gpu_array_unlock_resv(vbuf->objs); in virtio_gpu_queue_fenced_ctrl_buffer()
400 sg_init_one(&vout, vbuf->data_buf, vbuf->data_size); in virtio_gpu_queue_fenced_ctrl_buffer()
408 if (vbuf->resp_size) { in virtio_gpu_queue_fenced_ctrl_buffer()
409 sg_init_one(&vresp, vbuf->resp_buf, vbuf->resp_size); in virtio_gpu_queue_fenced_ctrl_buffer()
415 ret = virtio_gpu_queue_ctrl_sgs(vgdev, vbuf, fence, elemcnt, sgs, outcnt, in virtio_gpu_queue_fenced_ctrl_buffer()
442 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_queue_ctrl_buffer() argument
444 return virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, NULL); in virtio_gpu_queue_ctrl_buffer()
448 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_queue_cursor() argument
456 free_vbuf(vgdev, vbuf); in virtio_gpu_queue_cursor()
460 sg_init_one(&ccmd, vbuf->buf, vbuf->size); in virtio_gpu_queue_cursor()
466 ret = virtqueue_add_sgs(vq, sgs, outcnt, 0, vbuf, GFP_ATOMIC); in virtio_gpu_queue_cursor()
473 vbuf->seqno = ++vgdev->cursorq.seqno; in virtio_gpu_queue_cursor()
475 virtio_gpu_vbuf_ctrl_hdr(vbuf), in virtio_gpu_queue_cursor()
476 vbuf->seqno); in virtio_gpu_queue_cursor()
501 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_create_resource() local
503 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_create_resource()
505 vbuf->objs = objs; in virtio_gpu_cmd_create_resource()
513 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_create_resource()
518 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_unref_cb() argument
522 bo = vbuf->resp_cb_data; in virtio_gpu_cmd_unref_cb()
523 vbuf->resp_cb_data = NULL; in virtio_gpu_cmd_unref_cb()
532 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_unref_resource() local
535 cmd_p = virtio_gpu_alloc_cmd_cb(vgdev, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_unref_resource()
542 vbuf->resp_cb_data = bo; in virtio_gpu_cmd_unref_resource()
543 ret = virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_unref_resource()
554 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_set_scanout() local
556 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_set_scanout()
567 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_set_scanout()
578 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_resource_flush() local
580 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_flush()
582 vbuf->objs = objs; in virtio_gpu_cmd_resource_flush()
591 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_flush()
603 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_transfer_to_host_2d() local
610 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_2d()
612 vbuf->objs = objs; in virtio_gpu_cmd_transfer_to_host_2d()
622 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_to_host_2d()
633 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_resource_attach_backing() local
635 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_attach_backing()
642 vbuf->data_buf = ents; in virtio_gpu_cmd_resource_attach_backing()
643 vbuf->data_size = sizeof(*ents) * nents; in virtio_gpu_cmd_resource_attach_backing()
645 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_attach_backing()
649 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_get_display_info_cb() argument
652 (struct virtio_gpu_resp_display_info *)vbuf->resp_buf; in virtio_gpu_cmd_get_display_info_cb()
678 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_get_capset_info_cb() argument
681 (struct virtio_gpu_get_capset_info *)vbuf->buf; in virtio_gpu_cmd_get_capset_info_cb()
683 (struct virtio_gpu_resp_capset_info *)vbuf->resp_buf; in virtio_gpu_cmd_get_capset_info_cb()
699 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_capset_cb() argument
702 (struct virtio_gpu_get_capset *)vbuf->buf; in virtio_gpu_cmd_capset_cb()
704 (struct virtio_gpu_resp_capset *)vbuf->resp_buf; in virtio_gpu_cmd_capset_cb()
736 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_get_edid_cb() argument
739 (struct virtio_gpu_cmd_get_edid *)vbuf->buf; in virtio_gpu_cmd_get_edid_cb()
741 (struct virtio_gpu_resp_edid *)vbuf->resp_buf; in virtio_gpu_cmd_get_edid_cb()
765 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_get_display_info() local
774 (vgdev, &virtio_gpu_cmd_get_display_info_cb, &vbuf, in virtio_gpu_cmd_get_display_info()
781 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_display_info()
788 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_get_capset_info() local
797 (vgdev, &virtio_gpu_cmd_get_capset_info_cb, &vbuf, in virtio_gpu_cmd_get_capset_info()
804 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset_info()
813 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_get_capset() local
872 (vgdev, &virtio_gpu_cmd_capset_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_get_capset()
879 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset()
887 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_get_edids() local
901 (vgdev, &virtio_gpu_cmd_get_edid_cb, &vbuf, in virtio_gpu_cmd_get_edids()
906 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_edids()
917 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_context_create() local
919 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_create()
927 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_create()
934 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_context_destroy() local
936 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_destroy()
941 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_destroy()
950 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_context_attach_resource() local
952 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_attach_resource()
954 vbuf->objs = objs; in virtio_gpu_cmd_context_attach_resource()
959 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_attach_resource()
968 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_context_detach_resource() local
970 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_detach_resource()
972 vbuf->objs = objs; in virtio_gpu_cmd_context_detach_resource()
977 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_detach_resource()
988 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_resource_create_3d() local
990 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_create_3d()
992 vbuf->objs = objs; in virtio_gpu_cmd_resource_create_3d()
1008 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_create_3d()
1024 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_transfer_to_host_3d() local
1031 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_3d()
1034 vbuf->objs = objs; in virtio_gpu_cmd_transfer_to_host_3d()
1045 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_to_host_3d()
1059 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_transfer_from_host_3d() local
1061 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_from_host_3d()
1064 vbuf->objs = objs; in virtio_gpu_cmd_transfer_from_host_3d()
1075 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_from_host_3d()
1085 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_submit() local
1087 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_submit()
1090 vbuf->data_buf = data; in virtio_gpu_cmd_submit()
1091 vbuf->data_size = data_size; in virtio_gpu_cmd_submit()
1092 vbuf->objs = objs; in virtio_gpu_cmd_submit()
1098 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_submit()
1113 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cursor_ping() local
1117 cur_p = virtio_gpu_alloc_cursor(vgdev, &vbuf); in virtio_gpu_cursor_ping()
1119 virtio_gpu_queue_cursor(vgdev, vbuf); in virtio_gpu_cursor_ping()
1123 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_resource_uuid_cb() argument
1126 gem_to_virtio_gpu_obj(vbuf->objs->objs[0]); in virtio_gpu_cmd_resource_uuid_cb()
1128 (struct virtio_gpu_resp_resource_uuid *)vbuf->resp_buf; in virtio_gpu_cmd_resource_uuid_cb()
1152 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_resource_assign_uuid() local
1165 (vgdev, virtio_gpu_cmd_resource_uuid_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_resource_assign_uuid()
1172 vbuf->objs = objs; in virtio_gpu_cmd_resource_assign_uuid()
1173 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_assign_uuid()
1178 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_resource_map_cb() argument
1181 gem_to_virtio_gpu_obj(vbuf->objs->objs[0]); in virtio_gpu_cmd_resource_map_cb()
1183 (struct virtio_gpu_resp_map_info *)vbuf->resp_buf; in virtio_gpu_cmd_resource_map_cb()
1205 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_map() local
1213 (vgdev, virtio_gpu_cmd_resource_map_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_map()
1220 vbuf->objs = objs; in virtio_gpu_cmd_map()
1222 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_map()
1230 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_unmap() local
1232 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_unmap()
1238 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_unmap()
1249 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_resource_create_blob() local
1251 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_create_blob()
1263 vbuf->data_buf = ents; in virtio_gpu_cmd_resource_create_blob()
1264 vbuf->data_size = sizeof(*ents) * nents; in virtio_gpu_cmd_resource_create_blob()
1266 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_create_blob()
1279 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_set_scanout_blob() local
1282 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_set_scanout_blob()
1303 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_set_scanout_blob()