Lines Matching refs:bo

47 	struct ttm_buffer_object *bo;  member
131 int ttm_bo_move_memcpy(struct ttm_buffer_object *bo, in ttm_bo_move_memcpy() argument
135 struct ttm_device *bdev = bo->bdev; in ttm_bo_move_memcpy()
137 ttm_manager_type(bo->bdev, dst_mem->mem_type); in ttm_bo_move_memcpy()
138 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_memcpy()
139 struct ttm_resource *src_mem = bo->resource; in ttm_bo_move_memcpy()
159 dst_iter = ttm_kmap_iter_tt_init(&_dst_iter.tt, bo->ttm); in ttm_bo_move_memcpy()
165 src_iter = ttm_kmap_iter_tt_init(&_src_iter.tt, bo->ttm); in ttm_bo_move_memcpy()
177 ttm_bo_move_sync_cleanup(bo, dst_mem); in ttm_bo_move_memcpy()
187 static void ttm_transfered_destroy(struct ttm_buffer_object *bo) in ttm_transfered_destroy() argument
191 fbo = container_of(bo, struct ttm_transfer_obj, base); in ttm_transfered_destroy()
193 ttm_bo_put(fbo->bo); in ttm_transfered_destroy()
212 static int ttm_buffer_object_transfer(struct ttm_buffer_object *bo, in ttm_buffer_object_transfer() argument
222 fbo->base = *bo; in ttm_buffer_object_transfer()
224 ttm_bo_get(bo); in ttm_buffer_object_transfer()
225 fbo->bo = bo; in ttm_buffer_object_transfer()
241 if (bo->type != ttm_bo_type_sg) in ttm_buffer_object_transfer()
255 pgprot_t ttm_io_prot(struct ttm_buffer_object *bo, struct ttm_resource *res, in ttm_io_prot() argument
261 man = ttm_manager_type(bo->bdev, res->mem_type); in ttm_io_prot()
262 caching = man->use_tt ? bo->ttm->caching : res->bus.caching; in ttm_io_prot()
268 static int ttm_bo_ioremap(struct ttm_buffer_object *bo, in ttm_bo_ioremap() argument
273 struct ttm_resource *mem = bo->resource; in ttm_bo_ioremap()
275 if (bo->resource->bus.addr) { in ttm_bo_ioremap()
277 map->virtual = ((u8 *)bo->resource->bus.addr) + offset; in ttm_bo_ioremap()
279 resource_size_t res = bo->resource->bus.offset + offset; in ttm_bo_ioremap()
294 static int ttm_bo_kmap_ttm(struct ttm_buffer_object *bo, in ttm_bo_kmap_ttm() argument
299 struct ttm_resource *mem = bo->resource; in ttm_bo_kmap_ttm()
304 struct ttm_tt *ttm = bo->ttm; in ttm_bo_kmap_ttm()
310 ret = ttm_tt_populate(bo->bdev, ttm, &ctx); in ttm_bo_kmap_ttm()
328 prot = ttm_io_prot(bo, mem, PAGE_KERNEL); in ttm_bo_kmap_ttm()
336 int ttm_bo_kmap(struct ttm_buffer_object *bo, in ttm_bo_kmap() argument
344 map->bo = bo; in ttm_bo_kmap()
345 if (num_pages > bo->resource->num_pages) in ttm_bo_kmap()
347 if ((start_page + num_pages) > bo->resource->num_pages) in ttm_bo_kmap()
350 ret = ttm_mem_io_reserve(bo->bdev, bo->resource); in ttm_bo_kmap()
353 if (!bo->resource->bus.is_iomem) { in ttm_bo_kmap()
354 return ttm_bo_kmap_ttm(bo, start_page, num_pages, map); in ttm_bo_kmap()
358 return ttm_bo_ioremap(bo, offset, size, map); in ttm_bo_kmap()
382 ttm_mem_io_free(map->bo->bdev, map->bo->resource); in ttm_bo_kunmap()
388 int ttm_bo_vmap(struct ttm_buffer_object *bo, struct dma_buf_map *map) in ttm_bo_vmap() argument
390 struct ttm_resource *mem = bo->resource; in ttm_bo_vmap()
393 ret = ttm_mem_io_reserve(bo->bdev, mem); in ttm_bo_vmap()
404 bo->base.size); in ttm_bo_vmap()
408 bo->base.size); in ttm_bo_vmap()
411 vaddr_iomem = ioremap(mem->bus.offset, bo->base.size); in ttm_bo_vmap()
423 struct ttm_tt *ttm = bo->ttm; in ttm_bo_vmap()
427 ret = ttm_tt_populate(bo->bdev, ttm, &ctx); in ttm_bo_vmap()
435 prot = ttm_io_prot(bo, mem, PAGE_KERNEL); in ttm_bo_vmap()
447 void ttm_bo_vunmap(struct ttm_buffer_object *bo, struct dma_buf_map *map) in ttm_bo_vunmap() argument
449 struct ttm_resource *mem = bo->resource; in ttm_bo_vunmap()
460 ttm_mem_io_free(bo->bdev, bo->resource); in ttm_bo_vunmap()
464 static int ttm_bo_wait_free_node(struct ttm_buffer_object *bo, in ttm_bo_wait_free_node() argument
468 ret = ttm_bo_wait(bo, false, false); in ttm_bo_wait_free_node()
473 ttm_bo_tt_destroy(bo); in ttm_bo_wait_free_node()
474 ttm_resource_free(bo, &bo->resource); in ttm_bo_wait_free_node()
478 static int ttm_bo_move_to_ghost(struct ttm_buffer_object *bo, in ttm_bo_move_to_ghost() argument
493 dma_fence_put(bo->moving); in ttm_bo_move_to_ghost()
494 bo->moving = dma_fence_get(fence); in ttm_bo_move_to_ghost()
496 ret = ttm_buffer_object_transfer(bo, &ghost_obj); in ttm_bo_move_to_ghost()
511 bo->ttm = NULL; in ttm_bo_move_to_ghost()
512 bo->resource = NULL; in ttm_bo_move_to_ghost()
519 static void ttm_bo_move_pipeline_evict(struct ttm_buffer_object *bo, in ttm_bo_move_pipeline_evict() argument
522 struct ttm_device *bdev = bo->bdev; in ttm_bo_move_pipeline_evict()
525 from = ttm_manager_type(bdev, bo->resource->mem_type); in ttm_bo_move_pipeline_evict()
538 ttm_resource_free(bo, &bo->resource); in ttm_bo_move_pipeline_evict()
540 dma_fence_put(bo->moving); in ttm_bo_move_pipeline_evict()
541 bo->moving = dma_fence_get(fence); in ttm_bo_move_pipeline_evict()
544 int ttm_bo_move_accel_cleanup(struct ttm_buffer_object *bo, in ttm_bo_move_accel_cleanup() argument
550 struct ttm_device *bdev = bo->bdev; in ttm_bo_move_accel_cleanup()
551 struct ttm_resource_manager *from = ttm_manager_type(bdev, bo->resource->mem_type); in ttm_bo_move_accel_cleanup()
555 dma_resv_add_excl_fence(bo->base.resv, fence); in ttm_bo_move_accel_cleanup()
557 ret = ttm_bo_move_to_ghost(bo, fence, man->use_tt); in ttm_bo_move_accel_cleanup()
559 ttm_bo_move_pipeline_evict(bo, fence); in ttm_bo_move_accel_cleanup()
561 ret = ttm_bo_wait_free_node(bo, man->use_tt); in ttm_bo_move_accel_cleanup()
566 ttm_bo_assign_mem(bo, new_mem); in ttm_bo_move_accel_cleanup()
583 int ttm_bo_pipeline_gutting(struct ttm_buffer_object *bo) in ttm_bo_pipeline_gutting() argument
591 ret = ttm_resource_alloc(bo, &sys_mem, &sys_res); in ttm_bo_pipeline_gutting()
596 ret = ttm_bo_wait(bo, false, true); in ttm_bo_pipeline_gutting()
598 if (!bo->ttm) { in ttm_bo_pipeline_gutting()
600 ret = ttm_tt_create(bo, true); in ttm_bo_pipeline_gutting()
604 ttm_tt_unpopulate(bo->bdev, bo->ttm); in ttm_bo_pipeline_gutting()
605 if (bo->type == ttm_bo_type_device) in ttm_bo_pipeline_gutting()
606 ttm_tt_mark_for_clear(bo->ttm); in ttm_bo_pipeline_gutting()
608 ttm_resource_free(bo, &bo->resource); in ttm_bo_pipeline_gutting()
609 ttm_bo_assign_mem(bo, sys_res); in ttm_bo_pipeline_gutting()
621 ttm = bo->ttm; in ttm_bo_pipeline_gutting()
622 bo->ttm = NULL; in ttm_bo_pipeline_gutting()
623 ret = ttm_tt_create(bo, true); in ttm_bo_pipeline_gutting()
624 swap(bo->ttm, ttm); in ttm_bo_pipeline_gutting()
628 ret = ttm_buffer_object_transfer(bo, &ghost); in ttm_bo_pipeline_gutting()
632 ret = dma_resv_copy_fences(&ghost->base._resv, bo->base.resv); in ttm_bo_pipeline_gutting()
635 ttm_bo_wait(bo, false, false); in ttm_bo_pipeline_gutting()
639 bo->ttm = ttm; in ttm_bo_pipeline_gutting()
640 bo->resource = NULL; in ttm_bo_pipeline_gutting()
641 ttm_bo_assign_mem(bo, sys_res); in ttm_bo_pipeline_gutting()
645 ttm_tt_destroy(bo->bdev, ttm); in ttm_bo_pipeline_gutting()
648 ttm_resource_free(bo, &sys_res); in ttm_bo_pipeline_gutting()