Lines Matching refs:gobj_read
418 struct drm_gem_object **gobj_read = NULL; in amdgpu_userq_signal_ioctl() local
460 gobj_read = kmalloc_array(num_read_bo_handles, sizeof(*gobj_read), GFP_KERNEL); in amdgpu_userq_signal_ioctl()
461 if (!gobj_read) { in amdgpu_userq_signal_ioctl()
467 gobj_read[rentry] = drm_gem_object_lookup(filp, bo_handles_read[rentry]); in amdgpu_userq_signal_ioctl()
468 if (!gobj_read[rentry]) { in amdgpu_userq_signal_ioctl()
532 r = drm_exec_prepare_array(&exec, gobj_read, num_read_bo_handles, 1); in amdgpu_userq_signal_ioctl()
548 if (!gobj_read || !gobj_read[i]->resv) in amdgpu_userq_signal_ioctl()
551 dma_resv_add_fence(gobj_read[i]->resv, fence, in amdgpu_userq_signal_ioctl()
580 drm_gem_object_put(gobj_read[rentry]); in amdgpu_userq_signal_ioctl()
581 kfree(gobj_read); in amdgpu_userq_signal_ioctl()
606 struct drm_gem_object **gobj_read; in amdgpu_userq_wait_ioctl() local
649 gobj_read = kmalloc_array(num_read_bo_handles, sizeof(*gobj_read), GFP_KERNEL); in amdgpu_userq_wait_ioctl()
650 if (!gobj_read) { in amdgpu_userq_wait_ioctl()
656 gobj_read[rentry] = drm_gem_object_lookup(filp, bo_handles_read[rentry]); in amdgpu_userq_wait_ioctl()
657 if (!gobj_read[rentry]) { in amdgpu_userq_wait_ioctl()
682 r = drm_exec_prepare_array(&exec, gobj_read, num_read_bo_handles, 1); in amdgpu_userq_wait_ioctl()
738 dma_resv_for_each_fence(&resv_cursor, gobj_read[i]->resv, in amdgpu_userq_wait_ioctl()
779 dma_resv_for_each_fence(&resv_cursor, gobj_read[i]->resv, in amdgpu_userq_wait_ioctl()
924 drm_gem_object_put(gobj_read[i]); in amdgpu_userq_wait_ioctl()
925 kfree(gobj_read); in amdgpu_userq_wait_ioctl()
953 drm_gem_object_put(gobj_read[rentry]); in amdgpu_userq_wait_ioctl()
954 kfree(gobj_read); in amdgpu_userq_wait_ioctl()