| /drivers/gpu/drm/amd/display/dc/core/ |
| A D | dc_surface.c | 94 kref_init(&plane_state->refcount); in dc_create_plane_state() 164 kref_get(&plane_state->refcount); in dc_plane_state_retain() 181 kref_get(&gamma->refcount); in dc_gamma_retain() 192 kref_put(&(*gamma)->refcount, dc_gamma_free); in dc_gamma_release() 203 kref_init(&gamma->refcount); in dc_create_gamma() 212 kref_get(&tf->refcount); in dc_transfer_func_retain() 223 kref_put(&tf->refcount, dc_transfer_func_free); in dc_transfer_func_release() 233 kref_init(&tf->refcount); in dc_create_transfer_func() 255 kref_init(&lut->refcount); in dc_create_3dlut_func() 267 kref_put(&lut->refcount, dc_3dlut_func_free); in dc_3dlut_func_release() [all …]
|
| A D | dc_sink.c | 62 kref_get(&sink->refcount); in dc_sink_retain() 67 struct dc_sink *sink = container_of(kref, struct dc_sink, refcount); in dc_sink_free() 74 kref_put(&sink->refcount, dc_sink_free); in dc_sink_release() 87 kref_init(&sink->refcount); in dc_sink_create()
|
| /drivers/scsi/qedf/ |
| A D | qedf_els.c | 196 int refcount; in qedf_rrq_compl() local 211 refcount = kref_read(&orig_io_req->refcount); in qedf_rrq_compl() 221 if (orig_io_req && refcount > 0) in qedf_rrq_compl() 248 int refcount; in qedf_send_rrq() local 258 refcount = kref_read(&aborted_io_req->refcount); in qedf_send_rrq() 283 refcount = kref_read(&aborted_io_req->refcount); in qedf_send_rrq() 284 if (refcount != 1) { in qedf_send_rrq() 549 int refcount; in qedf_srr_compl() local 569 refcount = kref_read(&orig_io_req->refcount); in qedf_srr_compl() 844 int refcount; in qedf_rec_compl() local [all …]
|
| A D | qedf_io.c | 828 io_log->refcount = kref_read(&io_req->refcount); in qedf_trace_io() 1127 int refcount; in qedf_scsi_completion() local 1231 refcount = kref_read(&io_req->refcount); in qedf_scsi_completion() 1241 refcount); in qedf_scsi_completion() 1327 int refcount; in qedf_scsi_done() local 1394 refcount = kref_read(&io_req->refcount); in qedf_scsi_done() 1402 refcount); in qedf_scsi_done() 1681 refcount = kref_read(&io_req->refcount); in qedf_flush_active_ios() 1820 refcount = in qedf_flush_active_ios() 1932 refcount = kref_read(&io_req->refcount); in qedf_initiate_abts() [all …]
|
| /drivers/infiniband/hw/hfi1/ |
| A D | mmu_rb.c | 22 static void release_immediate(struct kref *refcount); 111 kref_put(&rbnode->refcount, release_immediate); in hfi1_mmu_rb_unregister() 188 static void release_immediate(struct kref *refcount) in release_immediate() argument 191 container_of(refcount, struct mmu_rb_node, refcount); in release_immediate() 197 static void release_nolock(struct kref *refcount) in release_nolock() argument 200 container_of(refcount, struct mmu_rb_node, refcount); in release_nolock() 213 void hfi1_mmu_rb_release(struct kref *refcount) in hfi1_mmu_rb_release() argument 216 container_of(refcount, struct mmu_rb_node, refcount); in hfi1_mmu_rb_release() 241 if (kref_read(&rbnode->refcount) > 1) in hfi1_mmu_rb_evict() 257 kref_put(&rbnode->refcount, release_immediate); in hfi1_mmu_rb_evict() [all …]
|
| A D | trace_mmu.h | 22 __field(unsigned int, refcount) 26 __entry->refcount = kref_read(&node->refcount); 31 __entry->refcount
|
| A D | pin_system.c | 108 kref_get(&rb_node->refcount); in find_system_node() 181 kref_init(&node->rb.refcount); in add_system_pinning() 184 kref_get(&node->rb.refcount); in add_system_pinning() 249 node->rb.addr, kref_read(&node->rb.refcount)); in get_system_cache_entry() 256 kref_put(&node->rb.refcount, hfi1_mmu_rb_release); in get_system_cache_entry() 272 kref_get(&node->refcount); in sdma_mmu_rb_node_get() 279 kref_put(&node->rb.refcount, hfi1_mmu_rb_release); in sdma_mmu_rb_node_put() 378 kref_put(&cache_entry->rb.refcount, hfi1_mmu_rb_release); in add_system_iovec_to_sdma_packet()
|
| /drivers/gpu/drm/i915/display/ |
| A D | intel_dmc_wl.c | 159 WARN_ON(refcount_read(&wl->refcount)); in __intel_dmc_wl_release() 179 if (refcount_read(&wl->refcount)) in intel_dmc_wl_work() 338 refcount_set(&wl->refcount, in intel_dmc_wl_init() 381 if (refcount_read(&wl->refcount)) in intel_dmc_wl_enable() 450 if (!refcount_inc_not_zero(&wl->refcount)) in intel_dmc_wl_get() 451 refcount_set(&wl->refcount, 1); in intel_dmc_wl_get() 457 if (refcount_inc_not_zero(&wl->refcount)) in intel_dmc_wl_get() 460 refcount_set(&wl->refcount, 1); in intel_dmc_wl_get() 482 if (WARN_RATELIMIT(!refcount_read(&wl->refcount), in intel_dmc_wl_put() 486 if (refcount_dec_and_test(&wl->refcount)) { in intel_dmc_wl_put()
|
| /drivers/accel/habanalabs/common/ |
| A D | context.c | 33 container_of(ref, struct hl_cs_encaps_sig_handle, refcount); in hl_encaps_release_handle_and_put_ctx() 41 container_of(ref, struct hl_cs_encaps_sig_handle, refcount); in hl_encaps_release_handle_and_put_sob() 49 container_of(ref, struct hl_cs_encaps_sig_handle, refcount); in hl_encaps_release_handle_and_put_sob_ctx() 75 kref_put(&handle->refcount, hl_encaps_release_handle_and_put_sob); in hl_encaps_sig_mgr_fini() 135 ctx = container_of(ref, struct hl_ctx, refcount); in hl_ctx_do_release() 206 kref_init(&ctx->refcount); in hl_ctx_init() 295 return kref_get_unless_zero(&ctx->refcount); in hl_ctx_get_unless_zero() 300 kref_get(&ctx->refcount); in hl_ctx_get() 305 return kref_put(&ctx->refcount, hl_ctx_do_release); in hl_ctx_put() 445 kref_put(&ctx->refcount, hl_ctx_do_release); in hl_ctx_mgr_fini()
|
| A D | memory_mgr.c | 31 kref_get(&buf->refcount); in hl_mmap_mem_buf_get() 64 container_of(kref, struct hl_mmap_mem_buf, refcount); in hl_mmap_mem_buf_release() 84 container_of(kref, struct hl_mmap_mem_buf, refcount); in hl_mmap_mem_buf_remove_idr_locked() 99 return kref_put(&buf->refcount, hl_mmap_mem_buf_release); in hl_mmap_mem_buf_put() 126 if (kref_put(&buf->refcount, hl_mmap_mem_buf_remove_idr_locked)) { in hl_mmap_mem_buf_put_handle() 172 kref_init(&buf->refcount); in hl_mmap_mem_buf_alloc()
|
| /drivers/net/ethernet/mellanox/mlx5/core/ |
| A D | rl.c | 186 if (!table->rl_entry[i].refcount) in find_rl_entry() 191 if (table->rl_entry[i].refcount) { in find_rl_entry() 246 table->refcount++; in mlx5_rl_table_get() 261 table->refcount++; in mlx5_rl_table_get() 268 if (--table->refcount) in mlx5_rl_table_put() 284 if (table->rl_entry[i].refcount) in mlx5_rl_table_free() 291 entry->refcount++; in mlx5_rl_entry_get() 297 entry->refcount--; in mlx5_rl_entry_put() 298 if (!entry->refcount) in mlx5_rl_entry_put() 332 if (!entry->refcount) { in mlx5_rl_add_rate_raw() [all …]
|
| /drivers/dpll/ |
| A D | dpll_core.c | 87 refcount_inc(&ref->refcount); in dpll_xa_ref_pin_add() 105 refcount_set(&ref->refcount, 1); in dpll_xa_ref_pin_add() 120 refcount_inc(&ref->refcount); in dpll_xa_ref_pin_add() 168 refcount_inc(&ref->refcount); in dpll_xa_ref_dpll_add() 186 refcount_set(&ref->refcount, 1); in dpll_xa_ref_dpll_add() 201 refcount_inc(&ref->refcount); in dpll_xa_ref_dpll_add() 293 refcount_inc(&ret->refcount); in dpll_device_get() 573 refcount_inc(&ret->refcount); in dpll_pin_get() 750 refcount_inc(&pin->refcount); in dpll_pin_on_pin_register() 770 refcount_dec(&pin->refcount); in dpll_pin_on_pin_register() [all …]
|
| /drivers/net/ethernet/mellanox/mlx5/core/lib/ |
| A D | geneve.c | 14 u32 refcount; member 72 if (geneve->refcount) { in mlx5_geneve_tlv_option_add() 76 geneve->refcount++; in mlx5_geneve_tlv_option_add() 107 geneve->refcount++; in mlx5_geneve_tlv_option_add() 122 if (--geneve->refcount == 0) { in mlx5_geneve_tlv_option_del() 154 if (geneve->refcount) in mlx5_geneve_destroy()
|
| /drivers/media/mc/ |
| A D | mc-dev-allocator.c | 33 struct kref refcount; member 45 container_of(kref, struct media_device_instance, refcount); in media_device_instance_release() 71 kref_get(&mdi->refcount); in __media_device_get() 89 kref_init(&mdi->refcount); in __media_device_get() 133 kref_put(&mdi->refcount, media_device_instance_release); in media_device_delete()
|
| /drivers/gpu/drm/vmwgfx/ |
| A D | ttm_object.c | 82 struct kref refcount; member 135 kref_get(&tfile->refcount); in ttm_object_file_ref() 172 container_of(kref, struct ttm_object_file, refcount); in ttm_object_file_destroy() 183 kref_put(&tfile->refcount, ttm_object_file_destroy); in ttm_object_file_unref() 200 kref_init(&base->refcount); in ttm_base_object_init() 227 container_of(kref, struct ttm_base_object, refcount); in ttm_release_base() 251 kref_put(&base->refcount, ttm_release_base); in ttm_base_object_unref() 266 if (!kref_get_unless_zero(&base->refcount)) in ttm_base_object_lookup() 283 if (base && !kref_get_unless_zero(&base->refcount)) in ttm_base_object_lookup_for_ref() 336 kref_get(&base->refcount); in ttm_ref_object_add() [all …]
|
| /drivers/net/wireguard/ |
| A D | peer.c | 53 kref_init(&peer->refcount); in wg_peer_create() 75 if (unlikely(!peer || !kref_get_unless_zero(&peer->refcount))) in wg_peer_get_maybe_zero() 200 static void kref_release(struct kref *refcount) in kref_release() argument 202 struct wg_peer *peer = container_of(refcount, struct wg_peer, refcount); in kref_release() 227 kref_put(&peer->refcount, kref_release); in wg_peer_put()
|
| /drivers/net/ethernet/mellanox/mlx5/core/steering/sws/ |
| A D | dr_table.c | 75 refcount_dec(&tbl->miss_action->refcount); in mlx5dr_table_set_miss_action() 80 refcount_inc(&action->refcount); in mlx5dr_table_set_miss_action() 256 refcount_inc(&dmn->refcount); in mlx5dr_table_create() 265 refcount_set(&tbl->refcount, 1); in mlx5dr_table_create() 284 refcount_dec(&dmn->refcount); in mlx5dr_table_create() 292 if (WARN_ON_ONCE(refcount_read(&tbl->refcount) > 1)) in mlx5dr_table_destroy() 303 refcount_dec(&tbl->miss_action->refcount); in mlx5dr_table_destroy() 305 refcount_dec(&tbl->dmn->refcount); in mlx5dr_table_destroy()
|
| /drivers/scsi/bnx2fc/ |
| A D | bnx2fc_io.c | 37 kref_get(&io_req->refcount); in bnx2fc_cmd_timer_set() 78 kref_read(&io_req->refcount)); in bnx2fc_cmd_timeout() 460 kref_init(&io_req->refcount); in bnx2fc_elstm_alloc() 513 kref_init(&io_req->refcount); in bnx2fc_cmd_alloc() 1162 kref_get(&io_req->refcount); in bnx2fc_eh_abort() 1210 kref_put(&io_req->refcount, in bnx2fc_eh_abort() 1230 kref_put(&io_req->refcount, in bnx2fc_eh_abort() 1398 kref_put(&io_req->refcount, in bnx2fc_process_abts_compl() 1479 kref_put(&io_req->refcount, in bnx2fc_lun_reset_cmpl() 1512 kref_put(&io_req->refcount, in bnx2fc_tgt_reset_cmpl() [all …]
|
| /drivers/soc/qcom/ |
| A D | smem_state.c | 27 struct kref refcount; member 68 kref_get(&state->refcount); in of_node_to_state() 133 struct qcom_smem_state *state = container_of(ref, struct qcom_smem_state, refcount); in qcom_smem_state_release() 147 kref_put(&state->refcount, qcom_smem_state_release); in qcom_smem_state_put() 204 kref_init(&state->refcount); in qcom_smem_state_register()
|
| /drivers/gpu/drm/display/ |
| A D | drm_dp_aux_dev.c | 49 struct kref refcount; member 66 if (aux_dev && !kref_get_unless_zero(&aux_dev->refcount)) in drm_dp_aux_dev_get_by_minor() 83 kref_init(&aux_dev->refcount); in alloc_drm_dp_aux_dev() 100 container_of(ref, struct drm_dp_aux_dev, refcount); in release_drm_dp_aux_dev() 116 kref_put(&aux_dev->refcount, release_drm_dp_aux_dev); in name_show() 236 kref_put(&aux_dev->refcount, release_drm_dp_aux_dev); in auxdev_release() 300 kref_put(&aux_dev->refcount, release_drm_dp_aux_dev); in drm_dp_aux_unregister_devnode()
|
| /drivers/gpu/host1x/ |
| A D | channel.c | 51 kref_get(&channel->refcount); in host1x_channel_get() 70 if (!kref_get_unless_zero(&ch->refcount)) in host1x_channel_get_index() 106 container_of(kref, struct host1x_channel, refcount); in release_channel() 118 kref_put(&channel->refcount, release_channel); in host1x_channel_put() 164 kref_init(&channel->refcount); in host1x_channel_request()
|
| /drivers/tty/vt/ |
| A D | consolemap.c | 218 unsigned long refcount; member 461 if (--p->refcount) in con_free_unimap() 498 dict2->refcount++; in con_unify_unimap() 549 new->refcount = 1; in con_allocate_new() 553 old->refcount--; in con_allocate_new() 563 if (!old || old->refcount > 1) in con_do_clear_unimap() 625 old->refcount++; in con_unshare_unimap() 660 if (dict->refcount > 1) { in con_set_unimap() 718 dflt->refcount++; in con_set_default_unimap() 720 if (dict && !--dict->refcount) { in con_set_default_unimap() [all …]
|
| /drivers/infiniband/core/ |
| A D | multicast.c | 64 refcount_t refcount; member 106 atomic_t refcount; member 120 refcount_t refcount; member 215 atomic_inc(&group->refcount); in queue_join() 404 refcount_inc(&member->refcount); in process_group_error() 448 refcount_inc(&member->refcount); in mcast_work_handler() 500 refcount_inc(&member->refcount); in process_join_error() 592 refcount_inc(&port->refcount); in acquire_group() 594 atomic_inc(&group->refcount); in acquire_group() 783 atomic_inc(&group->refcount); in mcast_groups_event() [all …]
|
| /drivers/gpu/drm/nouveau/nvkm/core/ |
| A D | subdev.c | 163 if (refcount_read(&subdev->use.refcount) == 0) { in nvkm_subdev_init() 188 if (refcount_dec_and_mutex_lock(&subdev->use.refcount, &subdev->use.mutex)) { in nvkm_subdev_unref() 199 if (subdev && !refcount_inc_not_zero(&subdev->use.refcount)) { in nvkm_subdev_ref() 201 if (!refcount_inc_not_zero(&subdev->use.refcount)) { in nvkm_subdev_ref() 207 refcount_set(&subdev->use.refcount, 1); in nvkm_subdev_ref() 263 refcount_set(&subdev->use.refcount, 1); in __nvkm_subdev_ctor()
|
| /drivers/media/common/videobuf2/ |
| A D | videobuf2-vmalloc.c | 30 refcount_t refcount; member 55 buf->handler.refcount = &buf->refcount; in vb2_vmalloc_alloc() 59 refcount_set(&buf->refcount, 1); in vb2_vmalloc_alloc() 67 if (refcount_dec_and_test(&buf->refcount)) { in vb2_vmalloc_put() 167 return refcount_read(&buf->refcount); in vb2_vmalloc_num_users() 357 refcount_inc(&buf->refcount); in vb2_vmalloc_get_dmabuf()
|