/kernel/dma/ |
A D | mapping.c | 121 const struct dma_map_ops *ops) in dma_go_direct() 144 const struct dma_map_ops *ops) in dma_alloc_direct() 150 const struct dma_map_ops *ops) in dma_map_direct() 159 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_page_attrs() local 186 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_page_attrs() local 204 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_map_sg_attrs() local 306 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_sg_attrs() local 324 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_resource() local 348 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_resource() local 366 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_sync_single_for_cpu() local [all …]
|
A D | ops_helpers.c | 65 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_common_alloc_pages() local 92 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_common_free_pages() local
|
/kernel/trace/ |
A D | trace_functions.c | 58 struct ftrace_ops *ops; in ftrace_allocate_ftrace_ops() local 739 struct ftrace_probe_ops *ops, in ftrace_traceon_print() 747 struct ftrace_probe_ops *ops, void *data) in ftrace_traceoff_print() 754 struct ftrace_probe_ops *ops, void *data) in ftrace_stacktrace_print() 761 struct ftrace_probe_ops *ops, void *data) in ftrace_dump_print() 768 struct ftrace_probe_ops *ops, void *data) in ftrace_cpudump_print() 854 struct ftrace_probe_ops *ops, in ftrace_trace_probe_callback() 895 struct ftrace_probe_ops *ops; in ftrace_trace_onoff_callback() local 914 struct ftrace_probe_ops *ops; in ftrace_stacktrace_callback() local 929 struct ftrace_probe_ops *ops; in ftrace_dump_callback() local [all …]
|
A D | ftrace_internal.h | 24 # define ftrace_startup(ops, command) \ argument 31 # define ftrace_shutdown(ops, command) \ argument 39 ftrace_ops_test(struct ftrace_ops *ops, unsigned long ip, void *regs) in ftrace_ops_test() 43 static inline int ftrace_startup_subops(struct ftrace_ops *ops, struct ftrace_ops *subops, int comm… in ftrace_startup_subops() 47 static inline int ftrace_shutdown_subops(struct ftrace_ops *ops, struct ftrace_ops *subops, int com… in ftrace_shutdown_subops()
|
A D | ftrace.c | 288 struct ftrace_ops *ops) in add_ftrace_ops() 302 struct ftrace_ops *ops) in remove_ftrace_ops() 1047 struct ftrace_ops ops; member 1688 struct ftrace_ops *ops; in test_rec_ops_needs_regs() local 2186 struct ftrace_ops *ops = NULL; in ftrace_bug() local 2610 struct ftrace_ops *ops; in ftrace_get_addr_new() local 2651 struct ftrace_ops *ops; in ftrace_get_addr_curr() local 3579 struct ftrace_ops *ops) in rebuild_hashes() 3905 struct ftrace_ops *ops; member 4443 struct ftrace_ops *ops; in t_show() local [all …]
|
A D | trace_event_perf.c | 436 struct ftrace_ops *ops, struct ftrace_regs *fregs) in perf_ftrace_function_call() 489 struct ftrace_ops *ops = &event->ftrace_ops; in perf_ftrace_function_register() local 499 struct ftrace_ops *ops = &event->ftrace_ops; in perf_ftrace_function_unregister() local
|
A D | trace_dynevent.h | 63 struct dyn_event_operations *ops; member 69 int dyn_event_init(struct dyn_event *ev, struct dyn_event_operations *ops) in dyn_event_init()
|
A D | trace_dynevent.c | 59 int dyn_event_register(struct dyn_event_operations *ops) in dyn_event_register() 135 struct dyn_event_operations *ops; in create_dyn_event() local
|
A D | trace_sched_switch.c | 92 static void tracing_start_sched_switch(int ops) in tracing_start_sched_switch() 114 static void tracing_stop_sched_switch(int ops) in tracing_stop_sched_switch()
|
A D | trace_events_filter.c | 44 static const char * ops[] = { OPS }; variable 82 unsigned short *ops; member 2552 struct ftrace_ops *ops; member 2578 static int ftrace_function_set_regexp(struct ftrace_ops *ops, int filter, in ftrace_function_set_regexp()
|
A D | fgraph.c | 295 static int entry_run(struct ftrace_graph_ent *trace, struct fgraph_ops *ops, in entry_run() 302 static void return_run(struct ftrace_graph_ret *trace, struct fgraph_ops *ops, in return_run()
|
A D | trace_stack.c | 499 struct ftrace_ops *ops = inode->i_private; in stack_trace_filter_open() local
|
A D | trace.h | 417 struct ftrace_ops *ops; member 1137 #define init_array_fgraph_ops(tr, ops) do { } while (0) argument 1138 #define allocate_fgraph_ops(tr, ops) ({ 0; }) argument 1280 #define ftrace_create_filter_files(ops, parent) do { } while (0) argument 1281 #define ftrace_destroy_filter_files(ops) do { } while (0) argument 1755 const struct event_trigger_ops *ops; member
|
A D | tracing_map.h | 191 const struct tracing_map_ops *ops; member
|
A D | trace_events.c | 3988 struct trace_array *tr, struct ftrace_probe_ops *ops, in event_enable_probe() 4005 struct trace_array *tr, struct ftrace_probe_ops *ops, in event_enable_count_probe() 4033 struct ftrace_probe_ops *ops, void *data) in event_enable_print() 4062 event_enable_init(struct ftrace_probe_ops *ops, struct trace_array *tr, in event_enable_init() 4100 event_enable_free(struct ftrace_probe_ops *ops, struct trace_array *tr, in event_enable_free() 4157 struct ftrace_probe_ops *ops; in event_enable_func() local
|
/kernel/livepatch/ |
A D | patch.c | 27 struct klp_ops *ops; in klp_find_ops() local 45 struct klp_ops *ops; in klp_ftrace_handler() local 129 struct klp_ops *ops; in klp_unpatch_func() local 162 struct klp_ops *ops; in klp_patch_func() local
|
A D | transition.c | 209 struct klp_ops *ops; in klp_check_stack_func() local
|
/kernel/irq/ |
A D | irq_sim.c | 19 struct irq_sim_ops ops; member 201 const struct irq_sim_ops *ops, in irq_domain_create_sim_full() 281 const struct irq_sim_ops *ops, in devm_irq_domain_create_sim_full()
|
A D | msi.c | 705 struct msi_domain_ops *ops = info->ops; in msi_domain_alloc() local 834 struct msi_domain_ops *ops = info->ops; in msi_domain_update_dom_ops() local 1163 struct msi_domain_ops *ops = info->ops; in msi_domain_prepare_irqs() local 1297 struct msi_domain_ops *ops = info->ops; in __msi_domain_alloc_irqs() local 1372 struct msi_domain_ops *ops; in __msi_domain_alloc_locked() local 1629 struct msi_domain_ops *ops; in msi_domain_free_locked() local
|
/kernel/sched/ |
A D | ext_idle.c | 344 void scx_idle_update_selcpu_topology(struct sched_ext_ops *ops) in scx_idle_update_selcpu_topology() 775 static void reset_idle_masks(struct sched_ext_ops *ops) in reset_idle_masks() 797 void scx_idle_enable(struct sched_ext_ops *ops) in scx_idle_enable()
|
A D | ext.c | 854 struct sched_ext_ops ops; member 5437 static struct scx_sched *scx_alloc_and_add_sched(struct sched_ext_ops *ops) in scx_alloc_and_add_sched() 5516 const struct sched_ext_ops *ops) in check_hotplug_seq() 5536 static int validate_ops(struct scx_sched *sch, const struct sched_ext_ops *ops) in validate_ops() 5563 static int scx_enable(struct sched_ext_ops *ops, struct bpf_link *link) in scx_enable() 5869 struct sched_ext_ops *ops = kdata; in bpf_scx_init_member() local 5943 struct sched_ext_ops *ops = kdata; in bpf_scx_unreg() local
|
/kernel/power/ |
A D | suspend.c | 77 void s2idle_set_ops(const struct platform_s2idle_ops *ops) in s2idle_set_ops() 220 void suspend_set_ops(const struct platform_suspend_ops *ops) in suspend_set_ops()
|
/kernel/bpf/ |
A D | offload.c | 38 const struct bpf_prog_offload_ops *ops; member 351 const struct bpf_prog_offload_ops *ops; in bpf_prog_offload_replace_insn() local 786 bpf_offload_dev_create(const struct bpf_prog_offload_ops *ops, void *priv) in bpf_offload_dev_create() 846 const struct xdp_metadata_ops *ops; in bpf_dev_bound_resolve_kfunc() local
|
/kernel/bpf/preload/ |
A D | bpf_preload_kern.c | 33 static struct bpf_preload_ops ops = { variable
|
/kernel/locking/ |
A D | lockdep_internals.h | 251 unsigned long ops = 0; in debug_class_ops_read() local
|