Home
last modified time | relevance | path

Searched refs:tl (Results 1 – 25 of 74) sorted by relevance

123

/drivers/gpu/drm/i915/gt/
A Dintel_timeline.c175 return tl; in intel_timeline_create_from_engine()
182 return tl; in intel_timeline_create_from_engine()
212 tl->fence_context, tl->hwsp_offset); in intel_timeline_pin()
302 GEM_BUG_ON(tl->seqno & tl->has_initial_breadcrumb); in timeline_advance()
304 return tl->seqno += 1 + tl->has_initial_breadcrumb; in timeline_advance()
318 tl->hwsp_seqno = tl->hwsp_map + next_ofs; in __intel_timeline_get_seqno()
350 tl = NULL; in intel_timeline_read_hwsp()
352 if (tl) { in intel_timeline_read_hwsp()
361 tl = NULL; in intel_timeline_read_hwsp()
365 if (!tl) in intel_timeline_read_hwsp()
[all …]
A Dselftest_timeline.c81 tl = xchg(&state->history[idx], tl); in __mock_hwsp_record()
82 if (tl) { in __mock_hwsp_record()
101 if (IS_ERR(tl)) in __mock_hwsp_timeline()
496 if (READ_ONCE(*tl->hwsp_seqno) != tl->seqno) { in checked_tl_write()
498 *tl->hwsp_seqno, tl->seqno); in checked_tl_write()
586 n, tl->fence_context, tl->hwsp_offset, *tl->hwsp_seqno); in live_hwsp_engine()
658 n, tl->fence_context, tl->hwsp_offset, *tl->hwsp_seqno); in live_hwsp_alternate()
684 if (IS_ERR(tl)) in live_hwsp_wrap()
1205 WRITE_ONCE(*(u32 *)tl->hwsp_seqno, tl->seqno); in live_hwsp_rollover_kernel()
1292 WRITE_ONCE(*(u32 *)tl->hwsp_seqno, tl->seqno); in live_hwsp_rollover_user()
[all …]
A Dintel_gt_requests.c78 retire_requests(tl); in engine_retire()
79 mutex_unlock(&tl->mutex); in engine_retire()
81 intel_timeline_put(tl); in engine_retire()
84 tl = ptr_mask_bits(next, 1); in engine_retire()
85 } while (tl); in engine_retire()
103 intel_timeline_get(tl); in add_retire()
118 if (add_retire(engine, tl)) in intel_engine_add_retire()
149 intel_timeline_get(tl); in intel_gt_retire_requests_timeout()
174 if (!retire_requests(tl)) in intel_gt_retire_requests_timeout()
176 mutex_unlock(&tl->mutex); in intel_gt_retire_requests_timeout()
[all …]
A Dintel_timeline.h48 return i915_syncmap_set(&tl->sync, context, seqno); in __intel_timeline_sync_set()
51 static inline int intel_timeline_sync_set(struct intel_timeline *tl, in intel_timeline_sync_set() argument
60 return i915_syncmap_is_later(&tl->sync, context, seqno); in __intel_timeline_sync_is_later()
69 void __intel_timeline_pin(struct intel_timeline *tl);
71 void intel_timeline_enter(struct intel_timeline *tl);
72 int intel_timeline_get_seqno(struct intel_timeline *tl,
75 void intel_timeline_exit(struct intel_timeline *tl);
76 void intel_timeline_unpin(struct intel_timeline *tl);
78 void intel_timeline_reset_seqno(const struct intel_timeline *tl);
95 intel_timeline_is_last(const struct intel_timeline *tl, in intel_timeline_is_last() argument
[all …]
A Dselftest_context.c17 struct intel_timeline *tl = i915_request_timeline(rq); in request_sync() local
21 intel_timeline_get(tl); in request_sync()
35 lockdep_unpin_lock(&tl->mutex, rq->cookie); in request_sync()
36 mutex_unlock(&tl->mutex); in request_sync()
39 intel_timeline_put(tl); in request_sync()
46 struct intel_timeline *tl = ce->timeline; in context_sync() local
49 mutex_lock(&tl->mutex); in context_sync()
54 if (list_empty(&tl->requests)) in context_sync()
57 rq = list_last_entry(&tl->requests, typeof(*rq), link); in context_sync()
68 mutex_unlock(&tl->mutex); in context_sync()
A Dintel_context.h251 struct intel_timeline *tl = ce->timeline; in intel_context_timeline_lock() local
255 err = mutex_lock_interruptible_nested(&tl->mutex, 0); in intel_context_timeline_lock()
257 err = mutex_lock_interruptible_nested(&tl->mutex, in intel_context_timeline_lock()
260 err = mutex_lock_interruptible(&tl->mutex); in intel_context_timeline_lock()
264 return tl; in intel_context_timeline_lock()
267 static inline void intel_context_timeline_unlock(struct intel_timeline *tl) in intel_context_timeline_unlock() argument
268 __releases(&tl->mutex) in intel_context_timeline_unlock()
270 mutex_unlock(&tl->mutex); in intel_context_timeline_unlock()
A Dmock_engine.c16 static int mock_timeline_pin(struct intel_timeline *tl) in mock_timeline_pin() argument
20 if (WARN_ON(!i915_gem_object_trylock(tl->hwsp_ggtt->obj, NULL))) in mock_timeline_pin()
23 err = intel_timeline_pin_map(tl); in mock_timeline_pin()
24 i915_gem_object_unlock(tl->hwsp_ggtt->obj); in mock_timeline_pin()
28 atomic_inc(&tl->pin_count); in mock_timeline_pin()
32 static void mock_timeline_unpin(struct intel_timeline *tl) in mock_timeline_unpin() argument
34 GEM_BUG_ON(!atomic_read(&tl->pin_count)); in mock_timeline_unpin()
35 atomic_dec(&tl->pin_count); in mock_timeline_unpin()
A Dintel_ring.c194 struct intel_timeline *tl, in wait_for_space() argument
203 GEM_BUG_ON(list_empty(&tl->requests)); in wait_for_space()
204 list_for_each_entry(target, &tl->requests, link) { in wait_for_space()
214 if (GEM_WARN_ON(&target->link == &tl->requests)) in wait_for_space()
A Dintel_engine_pm.c110 struct intel_timeline *tl, in __queue_and_release_pm() argument
139 if (!atomic_fetch_inc(&tl->active_count)) in __queue_and_release_pm()
140 list_add_tail(&tl->link, &timelines->active_list); in __queue_and_release_pm()
A Dintel_engine_cs.c1995 struct intel_timeline *tl; in get_timeline() local
2006 tl = rcu_dereference(rq->timeline); in get_timeline()
2007 if (!kref_get_unless_zero(&tl->kref)) in get_timeline()
2008 tl = NULL; in get_timeline()
2011 return tl; in get_timeline()
2024 tl ? tl->hwsp_offset : 0, in print_ring()
2029 if (tl) in print_ring()
2030 intel_timeline_put(tl); in print_ring()
2303 if (tl) { in engine_dump_request()
2305 tl->hwsp_offset); in engine_dump_request()
[all …]
/drivers/net/ethernet/netronome/nfp/
A Dnfp_net_debugdump.c63 struct nfp_dump_tl_hdr tl; member
69 struct nfp_dump_tl_hdr tl; member
75 struct nfp_dump_tl_hdr tl; member
83 struct nfp_dump_tl_hdr tl; member
92 struct nfp_dump_tl_hdr tl; member
125 struct nfp_dump_tl *tl; in nfp_traverse_tlvs() local
131 tl = p; in nfp_traverse_tlvs()
132 if (!tl->type && !tl->length) in nfp_traverse_tlvs()
135 if (be32_to_cpu(tl->length) > remaining - sizeof(*tl)) in nfp_traverse_tlvs()
138 total_tlv_size = sizeof(*tl) + be32_to_cpu(tl->length); in nfp_traverse_tlvs()
[all …]
/drivers/isdn/mISDN/
A Dfsm.c98 struct FsmTimer *ft = timer_container_of(ft, t, tl); in FsmExpireTimer()
114 timer_setup(&ft->tl, FsmExpireTimer, 0); in mISDN_FsmInitTimer()
126 timer_delete(&ft->tl); in mISDN_FsmDelTimer()
141 if (timer_pending(&ft->tl)) { in mISDN_FsmAddTimer()
152 ft->tl.expires = jiffies + (millisec * HZ) / 1000; in mISDN_FsmAddTimer()
153 add_timer(&ft->tl); in mISDN_FsmAddTimer()
169 if (timer_pending(&ft->tl)) in mISDN_FsmRestartTimer()
170 timer_delete(&ft->tl); in mISDN_FsmRestartTimer()
173 ft->tl.expires = jiffies + (millisec * HZ) / 1000; in mISDN_FsmRestartTimer()
174 add_timer(&ft->tl); in mISDN_FsmRestartTimer()
A Dtimerdev.c39 struct timer_list tl; member
77 timer_shutdown_sync(&timer->tl); in mISDN_close()
158 struct mISDNtimer *timer = timer_container_of(timer, t, tl); in dev_expire_timer()
183 timer_setup(&timer->tl, dev_expire_timer, 0); in misdn_add_timer()
189 timer->tl.expires = jiffies + ((HZ * (u_long)timeout) / 1000); in misdn_add_timer()
190 add_timer(&timer->tl); in misdn_add_timer()
207 timer_shutdown_sync(&timer->tl); in misdn_del_timer()
A Ddsp_tones.c462 struct dsp *dsp = timer_container_of(dsp, t, tone.tl); in dsp_tone_timeout()
481 tone->tl.expires = jiffies + (pat->seq[index] * HZ) / 8000; in dsp_tone_timeout()
482 add_timer(&tone->tl); in dsp_tone_timeout()
507 if (dsp->features.hfc_loops && timer_pending(&tonet->tl)) in dsp_tone()
508 timer_delete(&tonet->tl); in dsp_tone()
541 if (timer_pending(&tonet->tl)) in dsp_tone()
542 timer_delete(&tonet->tl); in dsp_tone()
543 tonet->tl.expires = jiffies + (pat->seq[0] * HZ) / 8000; in dsp_tone()
544 add_timer(&tonet->tl); in dsp_tone()
/drivers/s390/net/
A Dfsm.c136 fsm_timer *this = timer_container_of(this, t, tl); in fsm_expire_timer()
152 timer_setup(&this->tl, fsm_expire_timer, 0); in fsm_settimer()
162 timer_delete(&this->tl); in fsm_deltimer()
174 timer_setup(&this->tl, fsm_expire_timer, 0); in fsm_addtimer()
177 this->tl.expires = jiffies + (millisec * HZ) / 1000; in fsm_addtimer()
178 add_timer(&this->tl); in fsm_addtimer()
192 timer_delete(&this->tl); in fsm_modtimer()
193 timer_setup(&this->tl, fsm_expire_timer, 0); in fsm_modtimer()
196 this->tl.expires = jiffies + (millisec * HZ) / 1000; in fsm_modtimer()
197 add_timer(&this->tl); in fsm_modtimer()
/drivers/gpu/drm/i915/
A Di915_request.c849 if (list_empty(&tl->requests)) in request_alloc_slow()
866 retire_requests(tl); in request_alloc_slow()
956 tl->fence_context, seqno); in __i915_request_create()
958 RCU_INIT_POINTER(rq->timeline, tl); in __i915_request_create()
959 rq->hwsp_seqno = tl->hwsp_seqno; in __i915_request_create()
1031 struct intel_timeline *tl; in i915_request_create() local
1034 if (IS_ERR(tl)) in i915_request_create()
1035 return ERR_CAST(tl); in i915_request_create()
1054 intel_context_timeline_unlock(tl); in i915_request_create()
1850 lockdep_assert_held(&tl->mutex); in i915_request_add()
[all …]
/drivers/net/wireless/intel/iwlegacy/
A D4965-rs.c234 while (tl->queue_count && tl->time_stamp < oldest_time) { in il4965_rs_tl_rm_old_stats()
235 tl->total -= tl->packet_count[tl->head]; in il4965_rs_tl_rm_old_stats()
236 tl->packet_count[tl->head] = 0; in il4965_rs_tl_rm_old_stats()
238 tl->queue_count--; in il4965_rs_tl_rm_old_stats()
239 tl->head++; in il4965_rs_tl_rm_old_stats()
241 tl->head = 0; in il4965_rs_tl_rm_old_stats()
273 tl->total = 1; in il4965_rs_tl_add_packet()
276 tl->head = 0; in il4965_rs_tl_add_packet()
290 tl->packet_count[idx] = tl->packet_count[idx] + 1; in il4965_rs_tl_add_packet()
291 tl->total = tl->total + 1; in il4965_rs_tl_add_packet()
[all …]
/drivers/net/ethernet/qlogic/qed/
A Dqed_vf.h55 struct channel_tlv tl; member
62 struct channel_tlv tl; member
74 struct channel_tlv tl; member
123 struct channel_tlv tl; member
232 struct channel_tlv tl; member
345 struct channel_tlv tl; member
353 struct channel_tlv tl; member
359 struct channel_tlv tl; member
365 struct channel_tlv tl; member
377 struct channel_tlv tl; member
[all …]
/drivers/net/wireless/intel/iwlwifi/dvm/
A Drs.c234 tl->total -= tl->packet_count[tl->head]; in rs_tl_rm_old_stats()
235 tl->packet_count[tl->head] = 0; in rs_tl_rm_old_stats()
237 tl->queue_count--; in rs_tl_rm_old_stats()
238 tl->head++; in rs_tl_rm_old_stats()
240 tl->head = 0; in rs_tl_rm_old_stats()
272 tl->total = 1; in rs_tl_add_packet()
274 tl->queue_count = 1; in rs_tl_add_packet()
275 tl->head = 0; in rs_tl_add_packet()
289 tl->packet_count[index] = tl->packet_count[index] + 1; in rs_tl_add_packet()
290 tl->total = tl->total + 1; in rs_tl_add_packet()
[all …]
/drivers/net/ethernet/broadcom/bnx2x/
A Dbnx2x_vfpf.h99 struct channel_tlv tl; member
105 struct channel_tlv tl; member
117 struct channel_tlv tl; member
213 struct channel_tlv tl; member
219 struct channel_tlv tl; member
A Dbnx2x_vfpf.c32 struct channel_tlv *tl = in bnx2x_add_tlv() local
35 tl->type = type; in bnx2x_add_tlv()
36 tl->length = length; in bnx2x_add_tlv()
52 bnx2x_add_tlv(bp, &first_tlv->tl, 0, type, length); in bnx2x_vfpf_prep()
63 first_tlv->tl.type); in bnx2x_vfpf_finalize()
256 bnx2x_add_tlv(bp, req, req->first_tlv.tl.length, in bnx2x_vfpf_acquire()
266 req->first_tlv.tl.length + sizeof(struct channel_tlv), in bnx2x_vfpf_acquire()
1132 type = mbx->first_tlv.tl.type; in bnx2x_vf_mbx_resp_single_tlv()
2125 if (bnx2x_tlv_supported(mbx->first_tlv.tl.type)) { in bnx2x_vf_mbx_request()
2132 switch (mbx->first_tlv.tl.type) { in bnx2x_vf_mbx_request()
[all …]
/drivers/gpu/drm/i915/pxp/
A Dintel_pxp_cmd.c86 struct intel_timeline * const tl = i915_request_timeline(rq); in pxp_request_commit() local
88 lockdep_unpin_lock(&tl->mutex, rq->cookie); in pxp_request_commit()
94 mutex_unlock(&tl->mutex); in pxp_request_commit()
/drivers/nvme/target/
A Dfabrics-cmd-auth.c230 return le32_to_cpu(req->cmd->auth_send.tl); in nvmet_auth_send_data_len()
238 u32 tl; in nvmet_execute_auth_send() local
260 tl = nvmet_auth_send_data_len(req); in nvmet_execute_auth_send()
261 if (!tl) { in nvmet_execute_auth_send()
264 offsetof(struct nvmf_auth_send_command, tl); in nvmet_execute_auth_send()
267 if (!nvmet_check_transfer_len(req, tl)) { in nvmet_execute_auth_send()
268 pr_debug("%s: transfer length mismatch (%u)\n", __func__, tl); in nvmet_execute_auth_send()
272 d = kmalloc(tl, GFP_KERNEL); in nvmet_execute_auth_send()
278 status = nvmet_copy_from_sgl(req, 0, d, tl); in nvmet_execute_auth_send()
/drivers/crypto/amlogic/
A Damlogic-gxl-core.c143 if (mc->chanlist[i].tl) in meson_free_chanlist()
145 mc->chanlist[i].tl, in meson_free_chanlist()
178 mc->chanlist[i].tl = dma_alloc_coherent(mc->dev, in meson_allocate_chanlist()
182 if (!mc->chanlist[i].tl) { in meson_allocate_chanlist()
/drivers/nvme/host/
A Dauth.c78 cmd.auth_send.tl = cpu_to_le32(data_len); in nvme_auth_submit()
784 size_t tl; in nvme_queue_auth_work() local
807 tl = ret; in nvme_queue_auth_work()
808 ret = nvme_auth_submit(ctrl, chap->qid, chap->buf, tl, true); in nvme_queue_auth_work()
873 tl = ret; in nvme_queue_auth_work()
874 ret = nvme_auth_submit(ctrl, chap->qid, chap->buf, tl, true); in nvme_queue_auth_work()
928 tl = nvme_auth_set_dhchap_success2_data(ctrl, chap); in nvme_queue_auth_work()
929 ret = nvme_auth_submit(ctrl, chap->qid, chap->buf, tl, true); in nvme_queue_auth_work()
952 tl = nvme_auth_set_dhchap_failure2_data(ctrl, chap); in nvme_queue_auth_work()
953 ret = nvme_auth_submit(ctrl, chap->qid, chap->buf, tl, true); in nvme_queue_auth_work()

Completed in 83 milliseconds

123