Lines Matching refs:op

441 	struct optimized_kprobe *op;  in free_aggr_kprobe()  local
443 op = container_of(p, struct optimized_kprobe, kp); in free_aggr_kprobe()
444 arch_remove_optimized_kprobe(op); in free_aggr_kprobe()
446 kfree(op); in free_aggr_kprobe()
452 struct optimized_kprobe *op; in kprobe_optready() local
455 op = container_of(p, struct optimized_kprobe, kp); in kprobe_optready()
456 return arch_prepared_optinsn(&op->optinsn); in kprobe_optready()
465 struct optimized_kprobe *op; in kprobe_disarmed() local
471 op = container_of(p, struct optimized_kprobe, kp); in kprobe_disarmed()
473 return kprobe_disabled(p) && list_empty(&op->list); in kprobe_disarmed()
479 struct optimized_kprobe *op; in kprobe_queued() local
482 op = container_of(p, struct optimized_kprobe, kp); in kprobe_queued()
483 if (!list_empty(&op->list)) in kprobe_queued()
497 struct optimized_kprobe *op; in get_optimized_kprobe() local
504 op = container_of(p, struct optimized_kprobe, kp); in get_optimized_kprobe()
505 if (arch_within_optimized_kprobe(op, addr)) in get_optimized_kprobe()
554 struct optimized_kprobe *op, *tmp; in do_unoptimize_kprobes() local
564 list_for_each_entry_safe(op, tmp, &freeing_list, list) { in do_unoptimize_kprobes()
566 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in do_unoptimize_kprobes()
568 if (kprobe_disabled(&op->kp) && !kprobe_gone(&op->kp)) in do_unoptimize_kprobes()
569 arch_disarm_kprobe(&op->kp); in do_unoptimize_kprobes()
570 if (kprobe_unused(&op->kp)) { in do_unoptimize_kprobes()
576 hlist_del_rcu(&op->kp.hlist); in do_unoptimize_kprobes()
578 list_del_init(&op->list); in do_unoptimize_kprobes()
585 struct optimized_kprobe *op, *tmp; in do_free_cleaned_kprobes() local
587 list_for_each_entry_safe(op, tmp, &freeing_list, list) { in do_free_cleaned_kprobes()
588 list_del_init(&op->list); in do_free_cleaned_kprobes()
589 if (WARN_ON_ONCE(!kprobe_unused(&op->kp))) { in do_free_cleaned_kprobes()
596 free_aggr_kprobe(&op->kp); in do_free_cleaned_kprobes()
667 bool optprobe_queued_unopt(struct optimized_kprobe *op) in optprobe_queued_unopt() argument
672 if (op == _op) in optprobe_queued_unopt()
682 struct optimized_kprobe *op; in optimize_kprobe() local
693 op = container_of(p, struct optimized_kprobe, kp); in optimize_kprobe()
696 if (arch_check_optimized_kprobe(op) < 0) in optimize_kprobe()
700 if (op->kp.flags & KPROBE_FLAG_OPTIMIZED) { in optimize_kprobe()
701 if (optprobe_queued_unopt(op)) { in optimize_kprobe()
703 list_del_init(&op->list); in optimize_kprobe()
707 op->kp.flags |= KPROBE_FLAG_OPTIMIZED; in optimize_kprobe()
713 if (WARN_ON_ONCE(!list_empty(&op->list))) in optimize_kprobe()
716 list_add(&op->list, &optimizing_list); in optimize_kprobe()
721 static void force_unoptimize_kprobe(struct optimized_kprobe *op) in force_unoptimize_kprobe() argument
724 arch_unoptimize_kprobe(op); in force_unoptimize_kprobe()
725 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in force_unoptimize_kprobe()
731 struct optimized_kprobe *op; in unoptimize_kprobe() local
736 op = container_of(p, struct optimized_kprobe, kp); in unoptimize_kprobe()
740 if (!list_empty(&op->list)) { in unoptimize_kprobe()
741 if (optprobe_queued_unopt(op)) { in unoptimize_kprobe()
748 force_unoptimize_kprobe(op); in unoptimize_kprobe()
749 list_move(&op->list, &freeing_list); in unoptimize_kprobe()
753 list_del_init(&op->list); in unoptimize_kprobe()
754 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in unoptimize_kprobe()
762 force_unoptimize_kprobe(op); in unoptimize_kprobe()
764 list_add(&op->list, &unoptimizing_list); in unoptimize_kprobe()
772 struct optimized_kprobe *op; in reuse_unused_kprobe() local
778 op = container_of(ap, struct optimized_kprobe, kp); in reuse_unused_kprobe()
779 WARN_ON_ONCE(list_empty(&op->list)); in reuse_unused_kprobe()
793 struct optimized_kprobe *op; in kill_optimized_kprobe() local
795 op = container_of(p, struct optimized_kprobe, kp); in kill_optimized_kprobe()
796 if (!list_empty(&op->list)) in kill_optimized_kprobe()
798 list_del_init(&op->list); in kill_optimized_kprobe()
799 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in kill_optimized_kprobe()
807 if (optprobe_queued_unopt(op)) in kill_optimized_kprobe()
808 list_move(&op->list, &freeing_list); in kill_optimized_kprobe()
812 arch_remove_optimized_kprobe(op); in kill_optimized_kprobe()
816 void __prepare_optimized_kprobe(struct optimized_kprobe *op, struct kprobe *p) in __prepare_optimized_kprobe() argument
819 arch_prepare_optimized_kprobe(op, p); in __prepare_optimized_kprobe()
825 struct optimized_kprobe *op; in prepare_optimized_kprobe() local
827 op = container_of(p, struct optimized_kprobe, kp); in prepare_optimized_kprobe()
828 __prepare_optimized_kprobe(op, p); in prepare_optimized_kprobe()
834 struct optimized_kprobe *op; in alloc_aggr_kprobe() local
836 op = kzalloc(sizeof(struct optimized_kprobe), GFP_KERNEL); in alloc_aggr_kprobe()
837 if (!op) in alloc_aggr_kprobe()
840 INIT_LIST_HEAD(&op->list); in alloc_aggr_kprobe()
841 op->kp.addr = p->addr; in alloc_aggr_kprobe()
842 __prepare_optimized_kprobe(op, p); in alloc_aggr_kprobe()
844 return &op->kp; in alloc_aggr_kprobe()
856 struct optimized_kprobe *op; in try_to_optimize_kprobe() local
871 op = container_of(ap, struct optimized_kprobe, kp); in try_to_optimize_kprobe()
872 if (!arch_prepared_optinsn(&op->optinsn)) { in try_to_optimize_kprobe()
874 arch_remove_optimized_kprobe(op); in try_to_optimize_kprobe()
875 kfree(op); in try_to_optimize_kprobe()