Home
last modified time | relevance | path

Searched refs:ctx_id (Results 1 – 25 of 89) sorted by relevance

1234

/linux/include/linux/sunrpc/
A Dgss_api.h51 struct gss_ctx **ctx_id,
55 struct gss_ctx *ctx_id,
59 struct gss_ctx *ctx_id,
63 struct gss_ctx *ctx_id,
68 struct gss_ctx *ctx_id,
73 struct gss_ctx **ctx_id);
113 struct gss_ctx *ctx_id,
117 struct gss_ctx *ctx_id,
121 struct gss_ctx *ctx_id,
125 struct gss_ctx *ctx_id,
[all …]
/linux/drivers/media/platform/mediatek/vcodec/common/
A Dmtk_vcodec_intr.c19 int ctx_id, ctx_type, status = 0; in mtk_vcodec_wait_for_done_ctx() local
28 ctx_id = ctx->id; in mtk_vcodec_wait_for_done_ctx()
38 ctx_id = ctx->id; in mtk_vcodec_wait_for_done_ctx()
54 ctx_id, command, ctx_type, timeout_ms, in mtk_vcodec_wait_for_done_ctx()
59 ctx_id, command, ctx_type, in mtk_vcodec_wait_for_done_ctx()
A Dmtk_vcodec_dbgfs.h54 void mtk_vcodec_dbgfs_remove(struct mtk_vcodec_dec_dev *vcodec_dev, int ctx_id);
62 static inline void mtk_vcodec_dbgfs_remove(struct mtk_vcodec_dec_dev *vcodec_dev, int ctx_id) in mtk_vcodec_dbgfs_remove() argument
/linux/net/sunrpc/auth_gss/
A Dgss_mech_switch.c355 struct gss_ctx **ctx_id, in gss_import_sec_context() argument
359 if (!(*ctx_id = kzalloc(sizeof(**ctx_id), gfp_mask))) in gss_import_sec_context()
361 (*ctx_id)->mech_type = gss_mech_get(mech); in gss_import_sec_context()
364 *ctx_id, endtime, gfp_mask); in gss_import_sec_context()
408 gss_wrap(struct gss_ctx *ctx_id, in gss_wrap() argument
413 return ctx_id->mech_type->gm_ops in gss_wrap()
414 ->gss_wrap(ctx_id, offset, buf, inpages); in gss_wrap()
418 gss_unwrap(struct gss_ctx *ctx_id, in gss_unwrap() argument
423 return ctx_id->mech_type->gm_ops in gss_unwrap()
424 ->gss_unwrap(ctx_id, offset, len, buf); in gss_unwrap()
/linux/drivers/accel/ivpu/
A Divpu_jsm_msg.h13 int ivpu_jsm_register_db(struct ivpu_device *vdev, u32 ctx_id, u32 db_id,
26 int ivpu_jsm_hws_create_cmdq(struct ivpu_device *vdev, u32 ctx_id, u32 cmdq_group, u32 cmdq_id,
28 int ivpu_jsm_hws_destroy_cmdq(struct ivpu_device *vdev, u32 ctx_id, u32 cmdq_id);
29 int ivpu_jsm_hws_register_db(struct ivpu_device *vdev, u32 ctx_id, u32 cmdq_id, u32 db_id,
32 int ivpu_jsm_hws_set_context_sched_properties(struct ivpu_device *vdev, u32 ctx_id, u32 cmdq_id,
A Divpu_jsm_msg.c92 int ivpu_jsm_register_db(struct ivpu_device *vdev, u32 ctx_id, u32 db_id, in ivpu_jsm_register_db() argument
102 req.payload.register_db.host_ssid = ctx_id; in ivpu_jsm_register_db()
282 int ivpu_jsm_hws_create_cmdq(struct ivpu_device *vdev, u32 ctx_id, u32 cmdq_group, u32 cmdq_id, in ivpu_jsm_hws_create_cmdq() argument
289 req.payload.hws_create_cmdq.host_ssid = ctx_id; in ivpu_jsm_hws_create_cmdq()
305 int ivpu_jsm_hws_destroy_cmdq(struct ivpu_device *vdev, u32 ctx_id, u32 cmdq_id) in ivpu_jsm_hws_destroy_cmdq() argument
311 req.payload.hws_destroy_cmdq.host_ssid = ctx_id; in ivpu_jsm_hws_destroy_cmdq()
322 int ivpu_jsm_hws_register_db(struct ivpu_device *vdev, u32 ctx_id, u32 cmdq_id, u32 db_id, in ivpu_jsm_hws_register_db() argument
330 req.payload.hws_register_db.host_ssid = ctx_id; in ivpu_jsm_hws_register_db()
362 int ivpu_jsm_hws_set_context_sched_properties(struct ivpu_device *vdev, u32 ctx_id, u32 cmdq_id, in ivpu_jsm_hws_set_context_sched_properties() argument
369 req.payload.hws_set_context_sched_properties.host_ssid = ctx_id; in ivpu_jsm_hws_set_context_sched_properties()
A Divpu_drv.c229 u32 ctx_id; in ivpu_open() local
251 ret = xa_alloc_irq(&vdev->context_xa, &ctx_id, file_priv, in ivpu_open()
258 ret = ivpu_mmu_user_context_init(vdev, &file_priv->ctx, ctx_id); in ivpu_open()
268 ctx_id, current->comm, task_pid_nr(current)); in ivpu_open()
273 xa_erase_irq(&vdev->context_xa, ctx_id); in ivpu_open()
457 unsigned long ctx_id; in ivpu_context_abort_invalid() local
461 xa_for_each(&vdev->context_xa, ctx_id, file_priv) { in ivpu_context_abort_invalid()
686 unsigned long ctx_id; in ivpu_bo_unbind_all_user_contexts() local
690 xa_for_each(&vdev->context_xa, ctx_id, file_priv) in ivpu_bo_unbind_all_user_contexts()
/linux/sound/soc/fsl/
A Dfsl_easrc.c240 unsigned int ctx_id, int mem_type) in fsl_easrc_coeff_mem_ptr_reset() argument
253 if (ctx_id >= EASRC_CTX_MAX_NUM) { in fsl_easrc_coeff_mem_ptr_reset()
258 reg = REG_EASRC_CCE1(ctx_id); in fsl_easrc_coeff_mem_ptr_reset()
470 unsigned int ctx_id) in fsl_easrc_prefilter_config() argument
488 if (ctx_id >= EASRC_CTX_MAX_NUM) { in fsl_easrc_prefilter_config()
495 ctx = easrc->pair[ctx_id]; in fsl_easrc_prefilter_config()
554 REG_EASRC_CCE1(ctx_id), in fsl_easrc_prefilter_config()
864 struct fsl_asrc_pair *ctx = easrc->pair[ctx_id]; in fsl_easrc_config_slot()
927 struct fsl_asrc_pair *ctx = easrc->pair[ctx_id]; in fsl_easrc_release_slot()
977 if (ctx_id >= EASRC_CTX_MAX_NUM) { in fsl_easrc_config_context()
[all …]
/linux/drivers/gpu/drm/virtio/
A Dvirtgpu_trace.h21 __field(u32, ctx_id)
32 __entry->ctx_id = le32_to_cpu(hdr->ctx_id);
39 __entry->ctx_id, __entry->num_free, __entry->seqno)
A Dvirtgpu_drv.h84 uint32_t ctx_id; member
273 uint32_t ctx_id; member
366 uint32_t ctx_id,
369 uint32_t ctx_id,
373 uint32_t ctx_id,
377 uint32_t ctx_id,
385 uint32_t ctx_id,
A Dvirtgpu_vq.c945 uint32_t ctx_id, in virtio_gpu_cmd_context_attach_resource() argument
957 cmd_p->hdr.ctx_id = cpu_to_le32(ctx_id); in virtio_gpu_cmd_context_attach_resource()
963 uint32_t ctx_id, in virtio_gpu_cmd_context_detach_resource() argument
975 cmd_p->hdr.ctx_id = cpu_to_le32(ctx_id); in virtio_gpu_cmd_context_detach_resource()
1014 uint32_t ctx_id, in virtio_gpu_cmd_transfer_to_host_3d() argument
1037 cmd_p->hdr.ctx_id = cpu_to_le32(ctx_id); in virtio_gpu_cmd_transfer_to_host_3d()
1049 uint32_t ctx_id, in virtio_gpu_cmd_transfer_from_host_3d() argument
1067 cmd_p->hdr.ctx_id = cpu_to_le32(ctx_id); in virtio_gpu_cmd_transfer_from_host_3d()
1080 uint32_t ctx_id, in virtio_gpu_cmd_submit() argument
1095 cmd_p->hdr.ctx_id = cpu_to_le32(ctx_id); in virtio_gpu_cmd_submit()
[all …]
/linux/drivers/media/platform/st/sti/hva/
A Dhva-hw.c120 u8 ctx_id = 0; in hva_hw_its_irq_thread() local
130 ctx_id = (hva->sts_reg & 0xFF00) >> 8; in hva_hw_its_irq_thread()
131 if (ctx_id >= HVA_MAX_INSTANCES) { in hva_hw_its_irq_thread()
133 HVA_PREFIX, __func__, ctx_id); in hva_hw_its_irq_thread()
137 ctx = hva->instances[ctx_id]; in hva_hw_its_irq_thread()
223 u8 ctx_id = 0; in hva_hw_err_irq_thread() local
233 ctx_id = (hva->sts_reg & 0xFF00) >> 8; in hva_hw_err_irq_thread()
234 if (ctx_id >= HVA_MAX_INSTANCES) { in hva_hw_err_irq_thread()
236 ctx_id); in hva_hw_err_irq_thread()
240 ctx = hva->instances[ctx_id]; in hva_hw_err_irq_thread()
/linux/tools/testing/selftests/drivers/net/hw/
A Drss_ctx.py189 ctx_id = 0
192 ctx_id = ethtool_create(cfg, "-X", "context new")
197 data = get_rss(cfg, context=ctx_id)
227 data = get_rss(cfg, context=ctx_id)
334 expect_tuples = set([(cfg.ifname, -1)] + [(cfg.ifname, ctx_id) for ctx_id in ids])
410 ctx_id = ethtool_create(cfg, "-X", f"context new {create_cfg}")
420 _rss_key_check(cfg, context=ctx_id)
424 _rss_key_check(cfg, context=ctx_id)
427 data = get_rss(cfg, ctx_id)
515 ctx_id = ethtool_create(cfg, "-X", f"context new start {2 + i * 2} equal 2")
[all …]
/linux/drivers/gpu/drm/imx/dcss/
A Ddcss-ctxld.c332 void dcss_ctxld_write_irqsafe(struct dcss_ctxld *ctxld, u32 ctx_id, u32 val, in dcss_ctxld_write_irqsafe() argument
341 int item_idx = ctxld->ctx_size[curr_ctx][ctx_id]; in dcss_ctxld_write_irqsafe()
343 if (item_idx + 1 > dcss_ctxld_ctx_size[ctx_id]) { in dcss_ctxld_write_irqsafe()
348 ctx[ctx_id][item_idx].val = val; in dcss_ctxld_write_irqsafe()
349 ctx[ctx_id][item_idx].ofs = reg_ofs; in dcss_ctxld_write_irqsafe()
350 ctxld->ctx_size[curr_ctx][ctx_id] += 1; in dcss_ctxld_write_irqsafe()
353 void dcss_ctxld_write(struct dcss_ctxld *ctxld, u32 ctx_id, in dcss_ctxld_write() argument
357 dcss_ctxld_write_irqsafe(ctxld, ctx_id, val, reg_ofs); in dcss_ctxld_write()
A Ddcss-ss.c68 u32 ctx_id; member
78 dcss_ctxld_write(ss->ctxld, ss->ctx_id, val, in dcss_ss_write()
101 ss->ctx_id = CTX_SB_HP; in dcss_ss_init()
/linux/drivers/accel/habanalabs/common/
A Dcommand_buffer.c104 int ctx_id, bool internal_cb) in hl_cb_alloc() argument
118 if (ctx_id == HL_KERNEL_ASID_ID && !hdev->disabled) in hl_cb_alloc()
137 } else if (ctx_id == HL_KERNEL_ASID_ID) { in hl_cb_alloc()
186 int rc, ctx_id = cb_args->ctx->asid; in hl_cb_mmap_mem_alloc() local
194 if (ctx_id == HL_KERNEL_ASID_ID && in hl_cb_mmap_mem_alloc()
212 cb = hl_cb_alloc(cb_args->hdev, cb_args->cb_size, ctx_id, cb_args->internal_cb); in hl_cb_mmap_mem_alloc()
226 if (ctx_id == HL_KERNEL_ASID_ID) { in hl_cb_mmap_mem_alloc()
278 int ctx_id = ctx->asid; in hl_cb_create() local
280 if ((hdev->disabled) || (hdev->reset_info.in_reset && (ctx_id != HL_KERNEL_ASID_ID))) { in hl_cb_create()
294 ctx_id == HL_KERNEL_ASID_ID ? GFP_ATOMIC : GFP_KERNEL, &args); in hl_cb_create()
/linux/arch/x86/mm/
A Dtlb.c208 this_cpu_write(cpu_tlbstate.ctxs[asid].ctx_id, 0); in clear_asid_other()
231 if (this_cpu_read(cpu_tlbstate.ctxs[asid].ctx_id) != in choose_new_asid()
232 next->context.ctx_id) in choose_new_asid()
558 VM_WARN_ON(this_cpu_read(cpu_tlbstate.ctxs[prev_asid].ctx_id) != in switch_mm_irqs_off()
559 next->context.ctx_id); in switch_mm_irqs_off()
633 this_cpu_write(cpu_tlbstate.ctxs[new_asid].ctx_id, next->context.ctx_id); in switch_mm_irqs_off()
722 this_cpu_write(cpu_tlbstate.ctxs[0].ctx_id, mm->context.ctx_id); in initialize_tlbstate_and_flush()
727 this_cpu_write(cpu_tlbstate.ctxs[i].ctx_id, 0); in initialize_tlbstate_and_flush()
771 VM_WARN_ON(this_cpu_read(cpu_tlbstate.ctxs[loaded_mm_asid].ctx_id) != in flush_tlb_func()
772 loaded_mm->context.ctx_id); in flush_tlb_func()
/linux/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_sched.c66 unsigned ctx_id, in amdgpu_sched_context_priority_override() argument
83 ctx = amdgpu_ctx_get(fpriv, ctx_id); in amdgpu_sched_context_priority_override()
129 args->in.ctx_id, in amdgpu_sched_ioctl()
/linux/arch/x86/include/asm/
A Dmmu.h28 u64 ctx_id; member
74 .ctx_id = 1, \
/linux/drivers/net/ethernet/netronome/nfp/flower/
A Doffload.c1637 u32 ctx_id; in __nfp_flower_update_merge_stats() local
1640 pkts = priv->stats[ctx_id].pkts; in __nfp_flower_update_merge_stats()
1644 bytes = priv->stats[ctx_id].bytes; in __nfp_flower_update_merge_stats()
1645 used = priv->stats[ctx_id].used; in __nfp_flower_update_merge_stats()
1648 priv->stats[ctx_id].pkts = 0; in __nfp_flower_update_merge_stats()
1649 priv->stats[ctx_id].bytes = 0; in __nfp_flower_update_merge_stats()
1661 priv->stats[ctx_id].used); in __nfp_flower_update_merge_stats()
1695 u32 ctx_id; in nfp_flower_get_stats() local
1718 priv->stats[ctx_id].pkts, 0, priv->stats[ctx_id].used, in nfp_flower_get_stats()
1721 priv->stats[ctx_id].pkts = 0; in nfp_flower_get_stats()
[all …]
A Dconntrack.c1994 u32 ctx_id; in nfp_fl_ct_sub_stats() local
2014 priv->stats[ctx_id].bytes, in nfp_fl_ct_sub_stats()
2015 priv->stats[ctx_id].pkts, in nfp_fl_ct_sub_stats()
2021 priv->stats[ctx_id].bytes, in nfp_fl_ct_sub_stats()
2022 priv->stats[ctx_id].pkts, in nfp_fl_ct_sub_stats()
2027 priv->stats[ctx_id].bytes, in nfp_fl_ct_sub_stats()
2028 priv->stats[ctx_id].pkts, in nfp_fl_ct_sub_stats()
2042 priv->stats[ctx_id].pkts, in nfp_fl_ct_sub_stats()
2047 priv->stats[ctx_id].pkts, in nfp_fl_ct_sub_stats()
2059 priv->stats[ctx_id].pkts = 0; in nfp_fl_ct_sub_stats()
[all …]
/linux/drivers/infiniband/hw/efa/
A Defa_com.c246 u16 ctx_id; in efa_com_alloc_ctx_id() local
253 return ctx_id; in efa_com_alloc_ctx_id()
257 u16 ctx_id) in efa_com_dealloc_ctx_id() argument
270 u16 ctx_id = cmd_id & (aq->depth - 1); in efa_com_put_comp_ctx() local
274 efa_com_dealloc_ctx_id(aq, ctx_id); in efa_com_put_comp_ctx()
291 aq->comp_ctx[ctx_id].occupied = 1; in efa_com_get_comp_ctx()
296 return &aq->comp_ctx[ctx_id]; in efa_com_get_comp_ctx()
309 u16 ctx_id; in __efa_com_submit_admin_cmd() local
315 ctx_id = efa_com_alloc_ctx_id(aq); in __efa_com_submit_admin_cmd()
318 cmd_id = ctx_id & queue_size_mask; in __efa_com_submit_admin_cmd()
[all …]
/linux/fs/
A Daio.c1094 if (ctx && ctx->user_id == ctx_id) { in lookup_ioctx()
2092 ctx = lookup_ioctx(ctx_id); in SYSCALL_DEFINE3()
2134 ctx = lookup_ioctx(ctx_id); in COMPAT_SYSCALL_DEFINE3()
2189 ctx = lookup_ioctx(ctx_id); in SYSCALL_DEFINE3()
2218 static long do_io_getevents(aio_context_t ctx_id, in do_io_getevents() argument
2225 struct kioctx *ioctx = lookup_ioctx(ctx_id); in do_io_getevents()
2277 aio_context_t, ctx_id, in SYSCALL_DEFINE6() argument
2312 aio_context_t, ctx_id, in SYSCALL_DEFINE6() argument
2349 SYSCALL_DEFINE5(io_getevents_time32, __u32, ctx_id, in SYSCALL_DEFINE5() argument
2379 compat_aio_context_t, ctx_id, in COMPAT_SYSCALL_DEFINE6() argument
[all …]
/linux/drivers/infiniband/hw/erdma/
A Derdma_cmdq.c55 cmdq->wait_pool[comp_wait->ctx_id].cmd_status = ERDMA_CMD_STATUS_INIT; in put_comp_wait()
57 used = __test_and_clear_bit(comp_wait->ctx_id, cmdq->comp_wait_bitmap); in put_comp_wait()
82 cmdq->wait_pool[i].ctx_id = i; in erdma_cmdq_wait_res_init()
277 comp_wait->ctx_id) | in push_cmdq_sqe()
289 u16 ctx_id; in erdma_poll_single_cmd_completion() local
304 ctx_id = FIELD_GET(ERDMA_CMD_HDR_CONTEXT_COOKIE_MASK, *sqe); in erdma_poll_single_cmd_completion()
305 comp_wait = &cmdq->wait_pool[ctx_id]; in erdma_poll_single_cmd_completion()
/linux/include/uapi/drm/
A Damdgpu_drm.h291 __u32 ctx_id; member
298 __u32 ctx_id; member
352 __u32 ctx_id; member
495 __u32 ctx_id; member
509 __u32 ctx_id; member
634 __u32 ctx_id; member
700 __u32 ctx_id; member

Completed in 68 milliseconds

1234