Lines Matching refs:op
50 struct instruction_op op; in can_optimize() local
88 analyse_instr(&op, ®s, ppc_inst_read(p->ainsn.insn)) == 1) { in can_optimize()
89 emulate_update_regs(®s, &op); in can_optimize()
96 static void optimized_callback(struct optimized_kprobe *op, in optimized_callback() argument
100 if (kprobe_disabled(&op->kp)) in optimized_callback()
106 kprobes_inc_nmissed_count(&op->kp); in optimized_callback()
108 __this_cpu_write(current_kprobe, &op->kp); in optimized_callback()
109 regs_set_return_ip(regs, (unsigned long)op->kp.addr); in optimized_callback()
111 opt_pre_handler(&op->kp, regs); in optimized_callback()
119 void arch_remove_optimized_kprobe(struct optimized_kprobe *op) in arch_remove_optimized_kprobe() argument
121 if (op->optinsn.insn) { in arch_remove_optimized_kprobe()
122 free_optinsn_slot(op->optinsn.insn, 1); in arch_remove_optimized_kprobe()
123 op->optinsn.insn = NULL; in arch_remove_optimized_kprobe()
154 int arch_prepare_optimized_kprobe(struct optimized_kprobe *op, struct kprobe *p) in arch_prepare_optimized_kprobe() argument
204 patch_imm_load_insns((unsigned long)op, 3, buff + TMPL_OP_IDX); in arch_prepare_optimized_kprobe()
241 op->optinsn.insn = buff; in arch_prepare_optimized_kprobe()
261 int arch_check_optimized_kprobe(struct optimized_kprobe *op) in arch_check_optimized_kprobe() argument
269 struct optimized_kprobe *op; in arch_optimize_kprobes() local
272 list_for_each_entry_safe(op, tmp, oplist, list) { in arch_optimize_kprobes()
277 memcpy(op->optinsn.copied_insn, op->kp.addr, RELATIVEJUMP_SIZE); in arch_optimize_kprobes()
278 create_branch(&instr, op->kp.addr, (unsigned long)op->optinsn.insn, 0); in arch_optimize_kprobes()
279 patch_instruction(op->kp.addr, instr); in arch_optimize_kprobes()
280 list_del_init(&op->list); in arch_optimize_kprobes()
284 void arch_unoptimize_kprobe(struct optimized_kprobe *op) in arch_unoptimize_kprobe() argument
286 arch_arm_kprobe(&op->kp); in arch_unoptimize_kprobe()
291 struct optimized_kprobe *op; in arch_unoptimize_kprobes() local
294 list_for_each_entry_safe(op, tmp, oplist, list) { in arch_unoptimize_kprobes()
295 arch_unoptimize_kprobe(op); in arch_unoptimize_kprobes()
296 list_move(&op->list, done_list); in arch_unoptimize_kprobes()
300 int arch_within_optimized_kprobe(struct optimized_kprobe *op, kprobe_opcode_t *addr) in arch_within_optimized_kprobe() argument
302 return (op->kp.addr <= addr && in arch_within_optimized_kprobe()
303 op->kp.addr + (RELATIVEJUMP_SIZE / sizeof(kprobe_opcode_t)) > addr); in arch_within_optimized_kprobe()