Home
last modified time | relevance | path

Searched refs:ctxs (Results 1 – 14 of 14) sorted by relevance

/linux/drivers/video/fbdev/omap2/omapfb/
A Dvrfb.c67 static struct vrfb_ctx *ctxs; variable
88 omap2_sms_write_rot_control(ctxs[ctx].control, ctx); in restore_hw_context()
89 omap2_sms_write_rot_size(ctxs[ctx].size, ctx); in restore_hw_context()
90 omap2_sms_write_rot_physical_ba(ctxs[ctx].physical_ba, ctx); in restore_hw_context()
211 ctxs[ctx].physical_ba = paddr; in omap_vrfb_setup()
212 ctxs[ctx].size = size; in omap_vrfb_setup()
213 ctxs[ctx].control = control; in omap_vrfb_setup()
308 paddr = ctxs[ctx].base + SMS_ROT_VIRT_BASE(rot); in omap_vrfb_request_ctx()
348 ctxs = devm_kcalloc(&pdev->dev, in vrfb_probe()
352 if (!ctxs) in vrfb_probe()
[all …]
/linux/arch/x86/mm/
A Dtlb.c205 this_cpu_write(cpu_tlbstate.ctxs[asid].ctx_id, 0); in clear_asid_other()
228 if (this_cpu_read(cpu_tlbstate.ctxs[asid].ctx_id) != in choose_new_asid()
233 *need_flush = (this_cpu_read(cpu_tlbstate.ctxs[asid].tlb_gen) < in choose_new_asid()
554 VM_WARN_ON(this_cpu_read(cpu_tlbstate.ctxs[prev_asid].ctx_id) != in switch_mm_irqs_off()
582 if (this_cpu_read(cpu_tlbstate.ctxs[prev_asid].tlb_gen) == in switch_mm_irqs_off()
626 this_cpu_write(cpu_tlbstate.ctxs[new_asid].tlb_gen, next_tlb_gen); in switch_mm_irqs_off()
708 this_cpu_write(cpu_tlbstate.ctxs[0].ctx_id, mm->context.ctx_id); in initialize_tlbstate_and_flush()
709 this_cpu_write(cpu_tlbstate.ctxs[0].tlb_gen, tlb_gen); in initialize_tlbstate_and_flush()
712 this_cpu_write(cpu_tlbstate.ctxs[i].ctx_id, 0); in initialize_tlbstate_and_flush()
756 VM_WARN_ON(this_cpu_read(cpu_tlbstate.ctxs[loaded_mm_asid].ctx_id) != in flush_tlb_func()
[all …]
/linux/block/
A Dblk-mq-sysfs.c20 struct blk_mq_ctxs *ctxs = container_of(kobj, struct blk_mq_ctxs, kobj); in blk_mq_sysfs_release() local
22 free_percpu(ctxs->queue_ctx); in blk_mq_sysfs_release()
23 kfree(ctxs); in blk_mq_sysfs_release()
31 kobject_put(&ctx->ctxs->kobj); in blk_mq_ctx_sysfs_release()
44 kfree(hctx->ctxs); in blk_mq_hw_sysfs_release()
A Dblk-mq.c3211 if (!hctx->ctxs) in blk_mq_alloc_hctx()
3236 kfree(hctx->ctxs); in blk_mq_alloc_hctx()
3382 hctx->ctxs[hctx->nr_ctx++] = ctx; in blk_mq_map_swqueue()
3504 struct blk_mq_ctxs *ctxs; in blk_mq_alloc_ctxs() local
3507 ctxs = kzalloc(sizeof(*ctxs), GFP_KERNEL); in blk_mq_alloc_ctxs()
3508 if (!ctxs) in blk_mq_alloc_ctxs()
3512 if (!ctxs->queue_ctx) in blk_mq_alloc_ctxs()
3517 ctx->ctxs = ctxs; in blk_mq_alloc_ctxs()
3520 q->mq_kobj = &ctxs->kobj; in blk_mq_alloc_ctxs()
3521 q->queue_ctx = ctxs->queue_ctx; in blk_mq_alloc_ctxs()
[all …]
A Dblk-mq.h29 struct blk_mq_ctxs *ctxs; member
A Dblk-mq-sched.c226 return hctx->ctxs[idx]; in blk_mq_next_ctx()
/linux/drivers/iommu/arm/arm-smmu/
A Dqcom_iommu.c55 struct qcom_iommu_ctx *ctxs[]; /* indexed by asid-1 */ member
97 return qcom_iommu->ctxs[asid - 1]; in to_ctx()
715 qcom_iommu->ctxs[ctx->asid - 1] = ctx; in qcom_iommu_ctx_probe()
727 qcom_iommu->ctxs[ctx->asid - 1] = NULL; in qcom_iommu_ctx_remove()
773 qcom_iommu = devm_kzalloc(dev, struct_size(qcom_iommu, ctxs, max_asid), in qcom_iommu_device_probe()
/linux/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
A Dnv04.h9 unsigned ctxs:5; member
A Ddmanv04.c101 u32 cm = ((1ULL << c->bits) - 1) << c->ctxs; in nv04_fifo_dma_fini()
104 nvkm_wo32(fctx, c->ctxp + data, cv | (rv << c->ctxs)); in nv04_fifo_dma_fini()
/linux/include/linux/
A Ddamon.h458 int damon_start(struct damon_ctx **ctxs, int nr_ctxs);
459 int damon_stop(struct damon_ctx **ctxs, int nr_ctxs);
A Dblk-mq.h313 struct blk_mq_ctx **ctxs; member
867 ({ ctx = (hctx)->ctxs[(i)]; 1; }); (i)++)
/linux/arch/x86/include/asm/
A Dtlbflush.h135 struct tlb_context ctxs[TLB_NR_DYN_ASIDS]; member
/linux/mm/damon/
A Dcore.c428 int damon_start(struct damon_ctx **ctxs, int nr_ctxs) in damon_start() argument
440 err = __damon_start(ctxs[i]); in damon_start()
481 int damon_stop(struct damon_ctx **ctxs, int nr_ctxs) in damon_stop() argument
487 err = __damon_stop(ctxs[i]); in damon_stop()
/linux/kernel/
A Dworkqueue.c5358 LIST_HEAD(ctxs); in workqueue_apply_unbound_cpumask()
5378 list_add_tail(&ctx->list, &ctxs); in workqueue_apply_unbound_cpumask()
5381 list_for_each_entry_safe(ctx, n, &ctxs, list) { in workqueue_apply_unbound_cpumask()

Completed in 42 milliseconds