Home
last modified time | relevance | path

Searched refs:ctx (Results 1 – 25 of 179) sorted by relevance

12345678

/arch/powerpc/platforms/cell/spufs/
A Dcontext.c29 ctx = kzalloc(sizeof *ctx, GFP_KERNEL); in alloc_spu_context()
30 if (!ctx) in alloc_spu_context()
63 kfree(ctx); in alloc_spu_context()
64 ctx = NULL; in alloc_spu_context()
66 return ctx; in alloc_spu_context()
78 if (ctx->gang) in destroy_spu_context()
79 spu_gang_remove_ctx(ctx->gang, ctx); in destroy_spu_context()
81 kref_put(ctx->prof_priv_kref, ctx->prof_priv_release); in destroy_spu_context()
85 kfree(ctx); in destroy_spu_context()
91 return ctx; in get_spu_context()
[all …]
A Drun.c17 struct spu_context *ctx = spu->ctx; in spufs_stop_callback() local
58 *stat = ctx->ops->status_read(ctx); in spu_stopped()
131 ctx->ops->signal2_write(ctx, in spu_setup_isolated()
134 ctx->ops->runcntl_write(ctx, in spu_setup_isolated()
209 runcntl = ctx->ops->runcntl_read(ctx) & in spu_run_init()
222 ctx->ops->npc_write(ctx, *npc); in spu_run_init()
225 ctx->ops->runcntl_write(ctx, runcntl); in spu_run_init()
251 *status = ctx->ops->status_read(ctx); in spu_run_fini()
252 *npc = ctx->ops->npc_read(ctx); in spu_run_fini()
318 npc = ctx->ops->npc_read(ctx) & ~3; in spu_process_callback()
[all …]
A Dbacking_ops.c45 ctx->csa.spu_chnlcnt_RW[0] = 1; in gen_spu_event()
54 spin_lock(&ctx->csa.register_lock); in spu_backing_mbox_read()
61 *data = ctx->csa.prob.pu_mb_R; in spu_backing_mbox_read()
63 ctx->csa.spu_chnlcnt_RW[28] = 1; in spu_backing_mbox_read()
73 return ctx->csa.prob.mb_stat_R; in spu_backing_mbox_stat_read()
84 stat = ctx->csa.prob.mb_stat_R; in spu_backing_mbox_stat_poll()
182 ctx->csa.spu_chnlcnt_RW[3] = 1; in spu_backing_signal1_write()
199 ctx->csa.spu_chnlcnt_RW[4] = 1; in spu_backing_signal2_write()
249 ctx->csa.prob.spu_npc_RW = val; in spu_backing_npc_write()
259 return ctx->csa.lscsa->ls; in spu_backing_get_ls()
[all …]
A Dsched.c88 ctx->time_slice = SCALE_PRIO(DEF_SPU_TIMESLICE * 4, ctx->prio); in spu_set_timeslice()
90 ctx->time_slice = SCALE_PRIO(DEF_SPU_TIMESLICE, ctx->prio); in spu_set_timeslice()
191 struct spu_context *ctx = spu->ctx; in do_notify_spus_active() local
222 spu->ctx = ctx; in spu_bind_context()
318 if (spu->ctx && spu->ctx->gang && !spu->ctx->aff_offset in aff_ref_location()
491 list_add_tail(&ctx->rq, &spu_prio->runq[ctx->prio]); in __spu_add_to_rq()
778 if (ctx->spu) in spu_activate()
795 runcntl = ctx->ops->runcntl_read(ctx); in spu_activate()
838 ctx = NULL; in grab_runnable_context()
841 return ctx; in grab_runnable_context()
[all …]
A Dfile.c222 local_store = ctx->ops->get_ls(ctx); in spufs_mem_write()
402 *val = ctx->ops->status_read(ctx); in spufs_cntl_get()
416 ctx->ops->runcntl_write(ctx, val); in spufs_cntl_set()
1470 *status = ctx->ops->read_mfc_tagstatus(ctx) & ctx->tagwait; in spufs_read_mfc_tagstatus()
1477 ctx->ops->set_mfc_query(ctx, ctx->tagwait, 1); in spufs_read_mfc_tagstatus()
1597 ctx->ops->set_mfc_query(ctx, ctx->tagwait, 1); in spu_send_mfc_command()
1670 ctx->ops->set_mfc_query(ctx, ctx->tagwait, 2); in spufs_mfc_poll()
1732 ctx->ops->npc_write(ctx, val); in spufs_npc_set()
1740 return ctx->ops->npc_read(ctx); in spufs_npc_get()
2491 ctx->ops->runcntl_read(ctx), in spufs_show_ctx()
[all …]
A Dfault.c37 ctx->ops->restart_dma(ctx); in spufs_handle_event()
48 ctx->ops->npc_read(ctx) - 4); in spufs_handle_event()
61 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0()
65 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0()
69 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0()
111 dsisr, ctx->state); in spufs_handle_class1()
113 ctx->stats.hash_flt++; in spufs_handle_class1()
118 spu_release(ctx); in spufs_handle_class1()
141 ctx->csa.class_1_dar = ctx->csa.class_1_dsisr = 0; in spufs_handle_class1()
160 if (ctx->spu) in spufs_handle_class1()
[all …]
A Dspufs.h188 u32(*npc_read) (struct spu_context * ctx);
191 char*(*get_ls) (struct spu_context * ctx);
207 void (*restart_dma)(struct spu_context *ctx);
268 mutex_unlock(&ctx->state_mutex); in spu_release()
274 int put_spu_context(struct spu_context *ctx);
277 void spu_forget(struct spu_context *ctx);
284 void spu_deactivate(struct spu_context *ctx);
285 void spu_yield(struct spu_context *ctx);
314 spu_release(ctx); \
320 __ret = spu_acquire(ctx); \
[all …]
A Dhw_ops.c26 struct spu *spu = ctx->spu; in spu_hw_mbox_read()
48 struct spu *spu = ctx->spu; in spu_hw_mbox_stat_poll()
84 struct spu *spu = ctx->spu; in spu_hw_ibox_read()
105 struct spu *spu = ctx->spu; in spu_hw_wbox_write()
136 struct spu *spu = ctx->spu; in spu_hw_signal1_type_set()
157 struct spu *spu = ctx->spu; in spu_hw_signal2_type_set()
193 return ctx->spu->local_store; in spu_hw_get_ls()
208 spin_lock_irq(&ctx->spu->register_lock); in spu_hw_runcntl_write()
210 spu_hw_privcntl_write(ctx, in spu_hw_runcntl_write()
227 struct spu *spu = ctx->spu; in spu_hw_master_start()
[all …]
/arch/riscv/net/
A Dbpf_jit_comp64.c172 u64 ip = (u64)(ctx->ro_insns + ctx->ninsns); in emit_addr()
277 ctx); in __build_epilogue()
458 if (addr && ctx->insns && ctx->ro_insns) { in emit_call()
463 ip = (u64)(long)(ctx->ro_insns + ctx->ninsns); in emit_call()
795 if (!ctx->insns || !ctx->ro_insns || !ctx->prog->aux->extable || in add_exception_handler()
809 ex = &ctx->prog->aux->extable[ctx->nexentries]; in add_exception_handler()
1197 im->ip_after_call = ctx->ro_insns + ctx->ninsns; in __arch_prepare_bpf_trampoline()
1218 im->ip_epilogue = ctx->ro_insns + ctx->ninsns; in __arch_prepare_bpf_trampoline()
1349 emit_imm(rd, (ctx->user_vm_start >> 32) << 32, ctx); in bpf_jit_emit_insn()
2223 if (seen_tail_call(ctx) && seen_call(ctx)) in bpf_jit_build_prologue()
[all …]
A Dbpf_jit_comp32.c567 int e, s = ctx->ninsns; in emit_branch_r64()
645 e = ctx->ninsns; in emit_branch_r64()
654 int e, s = ctx->ninsns; in emit_bcc()
714 e = ctx->ninsns; in emit_bcc()
725 int e, s = ctx->ninsns; in emit_branch_r32()
732 e = ctx->ninsns; in emit_branch_r32()
785 tc_ninsn = insn ? ctx->offset[insn] - ctx->offset[insn - 1] : in emit_bpf_tail_call()
786 ctx->offset[0]; in emit_bpf_tail_call()
901 ctx); in emit_store_r64()
1210 s = ctx->ninsns; in bpf_jit_emit_insn()
[all …]
A Dbpf_jit_core.c74 ctx = &jit_data->ctx; in bpf_int_jit_compile()
76 if (ctx->offset) { in bpf_int_jit_compile()
78 prog_size = sizeof(*ctx->insns) * ctx->ninsns; in bpf_int_jit_compile()
84 ctx->prog = prog; in bpf_int_jit_compile()
103 ctx->ninsns = 0; in bpf_int_jit_compile()
106 ctx->prologue_len = ctx->ninsns; in bpf_int_jit_compile()
108 if (build_body(ctx, extra_pass, ctx->offset)) { in bpf_int_jit_compile()
113 ctx->epilogue_offset = ctx->ninsns; in bpf_int_jit_compile()
122 prog_size = sizeof(*ctx->insns) * ctx->ninsns; in bpf_int_jit_compile()
162 ctx->ninsns = 0; in bpf_int_jit_compile()
[all …]
/arch/mips/net/
A Dbpf_jit_comp.c142 ctx->stack_used = max((int)ctx->stack_used, depth); in push_regs()
187 return (INDEX(ctx->descriptors[ctx->bpf_index + off]) - in get_offset()
556 u32 *descp = &ctx->descriptors[ctx->bpf_index]; in setup_jmp()
686 if (CONVERTED(ctx->descriptors[ctx->bpf_index])) { in finish_jmp()
828 int target = get_target(ctx, ctx->bpf_index + off + 1); in emit_ja()
840 int target = get_target(ctx, ctx->program->len); in emit_exit()
866 ctx->changes += INDEX(*descp) != ctx->jit_index; in build_body()
883 ctx->descriptors[prog->len] = ctx->jit_index; in build_body()
895 ctx->descriptors[i] = INDEX(ctx->descriptors[i]) | flag; in set_convert_flag()
942 memset(&ctx, 0, sizeof(ctx)); in bpf_int_jit_compile()
[all …]
A Dbpf_jit_comp32.c535 push_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_divmod_r64()
566 pop_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_divmod_r64()
721 push_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_atomic_r32()
784 pop_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_atomic_r32()
806 push_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_atomic_r64()
866 pop_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_atomic_r64()
878 push_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_cmpxchg_r32()
900 pop_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_cmpxchg_r32()
935 pop_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, in emit_cmpxchg_r64()
1448 ctx->stack_size - ctx->saved_size); in build_epilogue()
[all …]
A Dbpf_jit_comp64.c120 emit_zext(ctx, dst); in emit_zext_ver()
387 LLSC_sync(ctx); in emit_atomic_r64()
427 LLSC_sync(ctx); in emit_cmpxchg_r64()
454 push_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, 0, 0); in emit_call()
462 pop_regs(ctx, ctx->clobbered & JIT_CALLER_REGS, 0, 0); in emit_call()
498 emit(ctx, beqz, tmp, get_offset(ctx, 1)); /* PC += off(1) if tmp == 0*/ in emit_tail_call()
602 push_regs(ctx, ctx->clobbered & JIT_CALLEE_REGS, 0, stack - saved); in build_prologue()
622 pop_regs(ctx, ctx->clobbered & JIT_CALLEE_REGS, 0, in build_epilogue()
623 ctx->stack_size - ctx->saved_size); in build_epilogue()
627 emit(ctx, daddiu, MIPS_R_SP, MIPS_R_SP, ctx->stack_size); in build_epilogue()
[all …]
/arch/loongarch/net/
A Dbpf_jit.c282 tc_ninsn = insn ? ctx->offset[insn+1] - ctx->offset[insn] : ctx->offset[0]; in emit_bpf_tail_call()
464 if (!ctx->image || !ctx->prog->aux->extable) in add_exception_handler()
1188 ctx->offset[i] = ctx->idx; in build_body()
1194 ctx->offset[i] = ctx->idx; in build_body()
1202 ctx->offset[i] = ctx->idx; in build_body()
1393 branch = (u32 *)ctx->image + ctx->idx; in invoke_bpf_prog()
1792 ctx = jit_data->ctx; in bpf_int_jit_compile()
1800 memset(&ctx, 0, sizeof(ctx)); in bpf_int_jit_compile()
1815 ctx.epilogue_offset = ctx.idx; in bpf_int_jit_compile()
1871 ctx.idx, jit_data->ctx.idx); in bpf_int_jit_compile()
[all …]
A Dbpf_jit.h28 struct jit_ctx ctx; member
38 if (ctx->image != NULL) { \
39 union loongarch_instruction *insn = &ctx->image[ctx->idx]; \
42 ctx->idx++; \
62 return (ctx->offset[bpf_insn + off] - (ctx->offset[bpf_insn] - 1)); in bpf2la_offset()
67 int from = ctx->idx; in epilogue_offset()
68 int to = ctx->epilogue_offset; in epilogue_offset()
79 emit_insn(ctx, lu32id, reg, 0); in emit_zext_32()
172 emit_zext_32(ctx, rd, is32); in move_imm()
262 emit_insn(ctx, b, jmp_offset); in cond_jmp_offs26()
[all …]
/arch/parisc/net/
A Dbpf_jit_comp32.c415 pc = (uintptr_t) &ctx->insns[ctx->ninsns]; in emit_jump()
782 e = ctx->ninsns; in emit_branch_r64()
807 s = ctx->ninsns; in emit_bcc()
876 e = ctx->ninsns; in emit_branch_r32()
1538 addr = (uintptr_t) &ctx->insns[ctx->epilogue_offset]; in bpf_jit_build_prologue()
1549 if (REG_WAS_SEEN(ctx, lo(reg)) | REG_WAS_SEEN(ctx, hi(reg))) { in bpf_jit_build_prologue()
1559 if (REG_WAS_SEEN(ctx, lo(reg)) | REG_WAS_SEEN(ctx, hi(reg))) { in bpf_jit_build_prologue()
1569 if (REG_WAS_SEEN(ctx, lo(reg)) | REG_WAS_SEEN(ctx, hi(reg))) { in bpf_jit_build_prologue()
1579 if (REG_WAS_SEEN(ctx, lo(reg)) | REG_WAS_SEEN(ctx, hi(reg))) { in bpf_jit_build_prologue()
1589 if (REG_WAS_SEEN(ctx, lo(reg)) | REG_WAS_SEEN(ctx, hi(reg))) { in bpf_jit_build_prologue()
[all …]
A Dbpf_jit_comp64.c72 REG_SET_SEEN(ctx, rd); in emit_hppa_copy()
75 REG_SET_SEEN(ctx, rs); in emit_hppa_copy()
195 pc = (uintptr_t) &ctx->insns[ctx->ninsns]; in emit_jump()
268 s = ctx->ninsns; in emit_branch()
320 e = ctx->ninsns; in emit_branch()
904 ctx); in bpf_jit_emit_insn()
932 s = ctx->ninsns; in bpf_jit_emit_insn()
945 e = ctx->ninsns; in bpf_jit_emit_insn()
954 s = ctx->ninsns; in bpf_jit_emit_insn()
963 e = ctx->ninsns; in bpf_jit_emit_insn()
[all …]
A Dbpf_jit_core.c74 ctx = &jit_data->ctx; in bpf_int_jit_compile()
78 prog_size = sizeof(*ctx->insns) * ctx->ninsns; in bpf_int_jit_compile()
82 ctx->prog = prog; in bpf_int_jit_compile()
96 if (build_body(ctx, extra_pass, ctx->offset)) { in bpf_int_jit_compile()
100 ctx->body_len = ctx->ninsns; in bpf_int_jit_compile()
102 ctx->prologue_len = ctx->ninsns - ctx->body_len; in bpf_int_jit_compile()
103 ctx->epilogue_offset = ctx->ninsns; in bpf_int_jit_compile()
112 prog_size = sizeof(*ctx->insns) * ctx->ninsns; in bpf_int_jit_compile()
146 ctx->ninsns = 0; in bpf_int_jit_compile()
167 bpf_flush_icache(jit_data->header, ctx->insns + ctx->ninsns); in bpf_int_jit_compile()
[all …]
/arch/arm/net/
A Dbpf_jit_32.c277 ctx->target[ctx->idx] = inst; in _emit()
279 ctx->idx++; in _emit()
420 while ((i < ctx->imm_count) && ctx->imms[i]) { in imm_offset()
430 offset = ctx->offsets[ctx->prog->len - 1] * 4; in imm_offset()
2114 ctx->offsets[i] = ctx->idx; in build_body()
2119 ctx->offsets[i] = ctx->idx; in build_body()
2174 memset(&ctx, 0, sizeof(ctx)); in bpf_int_jit_compile()
2204 ctx.prologue_bytes = (ctx.idx - tmp_idx) * 4; in bpf_int_jit_compile()
2206 ctx.epilogue_offset = ctx.idx; in bpf_int_jit_compile()
2211 ctx.epilogue_bytes = (ctx.idx - tmp_idx) * 4; in bpf_int_jit_compile()
[all …]
/arch/arm64/net/
A Dbpf_jit_comp.c112 if (ctx->image != NULL && ctx->write) in emit()
120 if (ctx->image != NULL && ctx->write) in emit_u32_data()
121 ctx->image[ctx->idx] = data; in emit_u32_data()
646 branch1 = ctx->image + ctx->idx; in emit_bpf_tail_call()
656 branch2 = ctx->image + ctx->idx; in emit_bpf_tail_call()
1866 ctx->offset[i] = ctx->idx; in build_body()
1870 ctx->offset[i] = ctx->idx; in build_body()
1881 ctx->offset[i] = ctx->idx; in build_body()
2014 ctx = jit_data->ctx; in bpf_int_jit_compile()
2024 memset(&ctx, 0, sizeof(ctx)); in bpf_int_jit_compile()
[all …]
/arch/arc/net/
A Dbpf_jit_core.c155 bpf_jit_dump(ctx->prog->len, ctx->jit.len, pass, ctx->jit.buf); in jit_dump()
162 memset(ctx, 0, sizeof(*ctx)); in jit_ctx_init()
170 ctx->blinded = (ctx->prog != ctx->orig_prog); in jit_ctx_init()
175 ctx->is_extra_pass = ctx->prog->jited; in jit_ctx_init()
176 ctx->user_bpf_prog = ctx->prog->is_func; in jit_ctx_init()
232 if (!ctx->success && ctx->bpf_header) { in jit_ctx_cleanup()
296 return ctx->emit ? (ctx->jit.buf + ctx->jit.index) : NULL; in effective_jit_buf()
1071 ctx->bpf2insn[i] = ctx->jit.len; in handle_body()
1115 ctx->bpf2insn = kcalloc(ctx->prog->len, sizeof(ctx->jit.len), in jit_prepare_early_mem_alloc()
1135 ctx->bpf_header = bpf_jit_binary_alloc(ctx->jit.len, &ctx->jit.buf, in jit_prepare_final_mem_alloc()
[all …]
/arch/mips/pci/
A Dpci-alchemy.c96 ctx->last_elo0 = ctx->last_elo1 = ~0; in alchemy_pci_wired_entry()
310 if (!ctx) in alchemy_pci_suspend()
313 ctx->pm[0] = __raw_readl(ctx->regs + PCI_REG_CMEM); in alchemy_pci_suspend()
320 ctx->pm[7] = __raw_readl(ctx->regs + PCI_REG_ID); in alchemy_pci_suspend()
323 ctx->pm[10] = __raw_readl(ctx->regs + PCI_REG_MBAR); in alchemy_pci_suspend()
332 if (!ctx) in alchemy_pci_resume()
335 __raw_writel(ctx->pm[0], ctx->regs + PCI_REG_CMEM); in alchemy_pci_resume()
341 __raw_writel(ctx->pm[7], ctx->regs + PCI_REG_ID); in alchemy_pci_resume()
344 __raw_writel(ctx->pm[10], ctx->regs + PCI_REG_MBAR); in alchemy_pci_resume()
379 ctx = kzalloc(sizeof(*ctx), GFP_KERNEL); in alchemy_pci_probe()
[all …]
/arch/sparc/net/
A Dbpf_jit_comp_64.c239 ctx->image[ctx->idx] = insn; in emit()
863 emit_branch(BGEU, ctx->idx, ctx->idx + OFFSET1, ctx); in emit_tail_call()
870 emit_branch(BGEU, ctx->idx, ctx->idx + OFFSET2, ctx); in emit_tail_call()
884 emit_branch(BE, ctx->idx, ctx->idx + OFFSET3, ctx); in emit_tail_call()
1171 emit_branch(BA, ctx->idx, ctx->offset[i + off], ctx); in build_insn()
1238 emit_branch(BA, ctx->idx, ctx->epilogue_offset, ctx); in build_insn()
1451 ctx->offset[i] = ctx->idx; in build_body()
1454 ctx->offset[i] = ctx->idx; in build_body()
1516 ctx = jit_data->ctx; in bpf_int_jit_compile()
1526 memset(&ctx, 0, sizeof(ctx)); in bpf_int_jit_compile()
[all …]
/arch/loongarch/include/asm/
A Dkvm_mmu.h92 return ctx->flag & _KVM_FLUSH_PGTABLE; in kvm_need_flush()
107 size = 0x1UL << ctx->pgtable_shift; in kvm_pgtable_addr_end()
114 if (!ctx || ctx->level == 0) in kvm_pte_present()
117 return *entry != ctx->invalid_entry; in kvm_pte_present()
122 return *entry == ctx->invalid_entry; in kvm_pte_none()
127 ctx->level--; in kvm_ptw_enter()
128 ctx->pgtable_shift = ctx->pte_shifts[ctx->level]; in kvm_ptw_enter()
129 ctx->invalid_entry = ctx->invalid_ptes[ctx->level]; in kvm_ptw_enter()
134 ctx->level++; in kvm_ptw_exit()
135 ctx->pgtable_shift = ctx->pte_shifts[ctx->level]; in kvm_ptw_exit()
[all …]

Completed in 138 milliseconds

12345678