Lines Matching refs:srf
52 struct vmw_surface srf; member
194 static inline uint32_t vmw_surface_dma_size(const struct vmw_surface *srf) in vmw_surface_dma_size() argument
196 return srf->metadata.num_sizes * sizeof(struct vmw_surface_dma); in vmw_surface_dma_size()
208 static inline uint32_t vmw_surface_define_size(const struct vmw_surface *srf) in vmw_surface_define_size() argument
210 return sizeof(struct vmw_surface_define) + srf->metadata.num_sizes * in vmw_surface_define_size()
249 static void vmw_surface_define_encode(const struct vmw_surface *srf, in vmw_surface_define_encode() argument
259 cmd_len = sizeof(cmd->body) + srf->metadata.num_sizes * in vmw_surface_define_encode()
264 cmd->body.sid = srf->res.id; in vmw_surface_define_encode()
270 cmd->body.surfaceFlags = (SVGA3dSurface1Flags)srf->metadata.flags; in vmw_surface_define_encode()
271 cmd->body.format = srf->metadata.format; in vmw_surface_define_encode()
273 cmd->body.face[i].numMipLevels = srf->metadata.mip_levels[i]; in vmw_surface_define_encode()
277 src_size = srf->metadata.sizes; in vmw_surface_define_encode()
279 for (i = 0; i < srf->metadata.num_sizes; ++i, cmd_size++, src_size++) { in vmw_surface_define_encode()
295 static void vmw_surface_dma_encode(struct vmw_surface *srf, in vmw_surface_dma_encode() argument
303 vmw_surface_get_desc(srf->metadata.format); in vmw_surface_dma_encode()
305 for (i = 0; i < srf->metadata.num_sizes; ++i) { in vmw_surface_dma_encode()
310 const struct vmw_surface_offset *cur_offset = &srf->offsets[i]; in vmw_surface_dma_encode()
311 const struct drm_vmw_size *cur_size = &srf->metadata.sizes[i]; in vmw_surface_dma_encode()
319 body->host.sid = srf->res.id; in vmw_surface_dma_encode()
403 struct vmw_surface *srf; in vmw_legacy_srf_create() local
411 srf = vmw_res_to_srf(res); in vmw_legacy_srf_create()
435 submit_size = vmw_surface_define_size(srf); in vmw_legacy_srf_create()
442 vmw_surface_define_encode(srf, cmd); in vmw_legacy_srf_create()
482 struct vmw_surface *srf = vmw_res_to_srf(res); in vmw_legacy_srf_dma() local
487 submit_size = vmw_surface_dma_size(srf); in vmw_legacy_srf_dma()
493 vmw_surface_dma_encode(srf, cmd, &ptr, bind); in vmw_legacy_srf_dma()
608 struct vmw_surface *srf, in vmw_surface_init() argument
612 struct vmw_resource *res = &srf->res; in vmw_surface_init()
629 INIT_LIST_HEAD(&srf->view_list); in vmw_surface_init()
647 prime.base)->srf.res); in vmw_user_surface_base_to_res()
657 struct vmw_surface *srf = vmw_res_to_srf(res); in vmw_user_surface_free() local
659 container_of(srf, struct vmw_user_surface, srf); in vmw_user_surface_free()
664 kfree(srf->offsets); in vmw_user_surface_free()
665 kfree(srf->metadata.sizes); in vmw_user_surface_free()
666 kfree(srf->snooper.image); in vmw_user_surface_free()
684 struct vmw_resource *res = &user_srf->srf.res; in vmw_user_surface_base_release()
723 struct vmw_surface *srf; in vmw_surface_define_ioctl() local
764 srf = &user_srf->srf; in vmw_surface_define_ioctl()
765 metadata = &srf->metadata; in vmw_surface_define_ioctl()
766 res = &srf->res; in vmw_surface_define_ioctl()
784 srf->offsets = kmalloc_array(metadata->num_sizes, sizeof(*srf->offsets), in vmw_surface_define_ioctl()
786 if (unlikely(!srf->offsets)) { in vmw_surface_define_ioctl()
791 metadata->base_size = *srf->metadata.sizes; in vmw_surface_define_ioctl()
798 cur_offset = srf->offsets; in vmw_surface_define_ioctl()
826 srf->snooper.image = kzalloc(cursor_size_bytes, GFP_KERNEL); in vmw_surface_define_ioctl()
827 if (!srf->snooper.image) { in vmw_surface_define_ioctl()
833 srf->snooper.image = NULL; in vmw_surface_define_ioctl()
846 ret = vmw_surface_init(dev_priv, srf, vmw_user_surface_free); in vmw_surface_define_ioctl()
874 tmp = vmw_resource_reference(&srf->res); in vmw_surface_define_ioctl()
890 kfree(srf->offsets); in vmw_surface_define_ioctl()
992 struct vmw_surface *srf; in vmw_surface_reference_ioctl() local
1004 srf = &user_srf->srf; in vmw_surface_reference_ioctl()
1007 rep->flags = (uint32_t)srf->metadata.flags; in vmw_surface_reference_ioctl()
1008 rep->format = srf->metadata.format; in vmw_surface_reference_ioctl()
1009 memcpy(rep->mip_levels, srf->metadata.mip_levels, in vmw_surface_reference_ioctl()
1010 sizeof(srf->metadata.mip_levels)); in vmw_surface_reference_ioctl()
1015 ret = copy_to_user(user_sizes, &srf->metadata.base_size, in vmw_surface_reference_ioctl()
1016 sizeof(srf->metadata.base_size)); in vmw_surface_reference_ioctl()
1019 srf->metadata.num_sizes); in vmw_surface_reference_ioctl()
1038 struct vmw_surface *srf = vmw_res_to_srf(res); in vmw_gb_surface_create() local
1039 struct vmw_surface_metadata *metadata = &srf->metadata; in vmw_gb_surface_create()
1105 cmd4->body.sid = srf->res.id; in vmw_gb_surface_create()
1121 cmd3->body.sid = srf->res.id; in vmw_gb_surface_create()
1136 cmd2->body.sid = srf->res.id; in vmw_gb_surface_create()
1149 cmd->body.sid = srf->res.id; in vmw_gb_surface_create()
1287 struct vmw_surface *srf = vmw_res_to_srf(res); in vmw_gb_surface_destroy() local
1297 vmw_view_surface_list_destroy(dev_priv, &srf->view_list); in vmw_gb_surface_destroy()
1430 struct vmw_surface *srf; in vmw_gb_surface_define_internal() local
1494 ret = vmw_gb_surface_define(dev_priv, &metadata, &srf); in vmw_gb_surface_define_internal()
1500 user_srf = container_of(srf, struct vmw_user_surface, srf); in vmw_gb_surface_define_internal()
1504 res = &user_srf->srf.res; in vmw_gb_surface_define_internal()
1602 struct vmw_surface *srf; in vmw_gb_surface_reference_internal() local
1615 srf = &user_srf->srf; in vmw_gb_surface_reference_internal()
1616 if (!srf->res.backup) { in vmw_gb_surface_reference_internal()
1620 metadata = &srf->metadata; in vmw_gb_surface_reference_internal()
1623 ret = drm_gem_handle_create(file_priv, &srf->res.backup->base.base, in vmw_gb_surface_reference_internal()
1642 rep->crep.backup_size = srf->res.backup_size; in vmw_gb_surface_reference_internal()
1645 drm_vma_node_offset_addr(&srf->res.backup->base.base.vma_node); in vmw_gb_surface_reference_internal()
1646 rep->crep.buffer_size = srf->res.backup->base.base.size; in vmw_gb_surface_reference_internal()
1817 struct vmw_surface *srf = vmw_res_to_srf(res); in vmw_surface_dirty_range_add() local
1823 if (srf->metadata.format == SVGA3D_BUFFER) in vmw_surface_dirty_range_add()
1911 struct vmw_surface *srf = vmw_res_to_srf(res); in vmw_surface_dirty_alloc() local
1912 const struct vmw_surface_metadata *metadata = &srf->metadata; in vmw_surface_dirty_alloc()
2012 struct vmw_surface *srf; in vmw_gb_surface_define() local
2059 *srf_out = &user_srf->srf; in vmw_gb_surface_define()
2063 srf = &user_srf->srf; in vmw_gb_surface_define()
2064 srf->metadata = *req; in vmw_gb_surface_define()
2065 srf->offsets = NULL; in vmw_gb_surface_define()
2067 metadata = &srf->metadata; in vmw_gb_surface_define()
2077 srf->res.backup_size = in vmw_gb_surface_define()
2086 srf->res.backup_size += sizeof(SVGA3dDXSOState); in vmw_gb_surface_define()
2106 ret = vmw_surface_init(dev_priv, srf, vmw_user_surface_free); in vmw_gb_surface_define()