Lines Matching refs:bdev
135 int ttm_device_prepare_hibernation(struct ttm_device *bdev) in ttm_device_prepare_hibernation() argument
144 ret = ttm_device_swapout(bdev, &ctx, GFP_KERNEL); in ttm_device_prepare_hibernation()
157 struct ttm_device *bdev; in ttm_global_swapout() local
161 list_for_each_entry(bdev, &glob->device_list, device_list) { in ttm_global_swapout()
162 ret = ttm_device_swapout(bdev, ctx, gfp_flags); in ttm_global_swapout()
164 list_move_tail(&bdev->device_list, &glob->device_list); in ttm_global_swapout()
172 int ttm_device_swapout(struct ttm_device *bdev, struct ttm_operation_ctx *ctx, in ttm_device_swapout() argument
180 man = ttm_manager_type(bdev, i); in ttm_device_swapout()
184 lret = ttm_bo_swapout(bdev, ctx, man, gfp_flags, 1); in ttm_device_swapout()
208 int ttm_device_init(struct ttm_device *bdev, const struct ttm_device_funcs *funcs, in ttm_device_init() argument
223 bdev->wq = alloc_workqueue("ttm", in ttm_device_init()
225 if (!bdev->wq) { in ttm_device_init()
230 bdev->funcs = funcs; in ttm_device_init()
232 ttm_sys_man_init(bdev); in ttm_device_init()
239 ttm_pool_init(&bdev->pool, dev, nid, use_dma_alloc, use_dma32); in ttm_device_init()
241 bdev->vma_manager = vma_manager; in ttm_device_init()
242 spin_lock_init(&bdev->lru_lock); in ttm_device_init()
243 INIT_LIST_HEAD(&bdev->unevictable); in ttm_device_init()
244 bdev->dev_mapping = mapping; in ttm_device_init()
246 list_add_tail(&bdev->device_list, &glob->device_list); in ttm_device_init()
253 void ttm_device_fini(struct ttm_device *bdev) in ttm_device_fini() argument
259 list_del(&bdev->device_list); in ttm_device_fini()
262 drain_workqueue(bdev->wq); in ttm_device_fini()
263 destroy_workqueue(bdev->wq); in ttm_device_fini()
265 man = ttm_manager_type(bdev, TTM_PL_SYSTEM); in ttm_device_fini()
267 ttm_set_driver_manager(bdev, TTM_PL_SYSTEM, NULL); in ttm_device_fini()
269 spin_lock(&bdev->lru_lock); in ttm_device_fini()
273 spin_unlock(&bdev->lru_lock); in ttm_device_fini()
275 ttm_pool_fini(&bdev->pool); in ttm_device_fini()
280 static void ttm_device_clear_lru_dma_mappings(struct ttm_device *bdev, in ttm_device_clear_lru_dma_mappings() argument
285 spin_lock(&bdev->lru_lock); in ttm_device_clear_lru_dma_mappings()
294 spin_unlock(&bdev->lru_lock); in ttm_device_clear_lru_dma_mappings()
297 ttm_tt_unpopulate(bo->bdev, bo->ttm); in ttm_device_clear_lru_dma_mappings()
300 spin_lock(&bdev->lru_lock); in ttm_device_clear_lru_dma_mappings()
302 spin_unlock(&bdev->lru_lock); in ttm_device_clear_lru_dma_mappings()
305 void ttm_device_clear_dma_mappings(struct ttm_device *bdev) in ttm_device_clear_dma_mappings() argument
310 ttm_device_clear_lru_dma_mappings(bdev, &bdev->unevictable); in ttm_device_clear_dma_mappings()
313 man = ttm_manager_type(bdev, i); in ttm_device_clear_dma_mappings()
318 ttm_device_clear_lru_dma_mappings(bdev, &man->lru[j]); in ttm_device_clear_dma_mappings()