Home
last modified time | relevance | path

Searched refs:d (Results 1 – 25 of 33) sorted by relevance

12

/kernel/bpf/
A Ddispatcher.c32 return &d->progs[i]; in bpf_dispatcher_find_prog()
64 d->num_progs++; in bpf_dispatcher_add_prog()
83 d->num_progs--; in bpf_dispatcher_remove_prog()
114 new = d->num_progs ? d->image + noff : NULL; in bpf_dispatcher_update()
115 tmp = d->num_progs ? d->rw_image + noff : NULL; in bpf_dispatcher_update()
134 d->image_off = noff; in bpf_dispatcher_update()
147 if (!d->image) { in bpf_dispatcher_change_prog()
149 if (!d->image) in bpf_dispatcher_change_prog()
152 if (!d->rw_image) { in bpf_dispatcher_change_prog()
154 d->image = NULL; in bpf_dispatcher_change_prog()
[all …]
/kernel/irq/
A Dgeneric-chip.c41 u32 mask = d->mask; in irq_gc_mask_disable_reg()
60 u32 mask = d->mask; in irq_gc_mask_set_bit()
79 u32 mask = d->mask; in irq_gc_mask_clr_bit()
98 u32 mask = d->mask; in irq_gc_unmask_enable_reg()
114 u32 mask = d->mask; in irq_gc_ack_set_bit()
151 u32 mask = d->mask; in irq_gc_mask_disable_and_ack_set()
289 if (d->gc) in irq_domain_alloc_generic_chips()
310 d->gc = dgc; in irq_domain_alloc_generic_chips()
321 gc->domain = d; in irq_domain_alloc_generic_chips()
346 d->gc = NULL; in irq_domain_alloc_generic_chips()
[all …]
A Dcpuhotplug.c32 m = irq_data_get_affinity_mask(d); in irq_needs_fixup()
46 cpumask_pr_args(m), d->irq, cpu); in irq_needs_fixup()
57 bool maskchip = !irq_can_move_pcntxt(d) && !irqd_irq_masked(d); in migrate_one_irq()
89 if (irqd_is_per_cpu(d) || !irqd_is_started(d) || !irq_needs_fixup(d)) { in migrate_one_irq()
107 affinity = irq_data_get_affinity_mask(d); in migrate_one_irq()
111 chip->irq_mask(d); in migrate_one_irq()
118 if (irqd_affinity_is_managed(d)) { in migrate_one_irq()
119 irqd_set_managed_shutdown(d); in migrate_one_irq()
141 d->irq, cpumask_pr_args(affinity)); in migrate_one_irq()
151 d->irq, err); in migrate_one_irq()
[all …]
A Dinternals.h195 #define __irqd_to_state(d) ACCESS_PRIVATE((d)->common, state_use_accessors) argument
197 static inline unsigned int irqd_get(struct irq_data *d) in irqd_get() argument
199 return __irqd_to_state(d); in irqd_get()
205 static inline void irqd_set_move_pending(struct irq_data *d) in irqd_set_move_pending() argument
207 __irqd_to_state(d) |= IRQD_SETAFFINITY_PENDING; in irqd_set_move_pending()
212 __irqd_to_state(d) &= ~IRQD_SETAFFINITY_PENDING; in irqd_clr_move_pending()
217 __irqd_to_state(d) |= IRQD_MANAGED_SHUTDOWN; in irqd_set_managed_shutdown()
222 __irqd_to_state(d) &= ~IRQD_MANAGED_SHUTDOWN; in irqd_clr_managed_shutdown()
227 __irqd_to_state(d) &= ~mask; in irqd_clear()
232 __irqd_to_state(d) |= mask; in irqd_set()
[all …]
A Dirqdomain.c845 return d->ops->translate(d, fwspec, hwirq, type); in irq_domain_translate()
848 return d->ops->xlate(d, to_of_node(fwspec->fwnode), in irq_domain_translate()
1693 if (d->hwirq < d->domain->revmap_size) { in irq_domain_fix_revmap()
1695 rcu_assign_pointer(d->domain->revmap[d->hwirq], d); in irq_domain_fix_revmap()
1697 slot = radix_tree_lookup_slot(&d->domain->revmap_tree, d->hwirq); in irq_domain_fix_revmap()
1699 radix_tree_replace_slot(&d->domain->revmap_tree, slot, d); in irq_domain_fix_revmap()
2073 if (d->ops && d->ops->debug_show) in irq_domain_debug_show_one()
2074 d->ops->debug_show(m, d, NULL, ind + 1); in irq_domain_debug_show_one()
2076 if (!d->parent) in irq_domain_debug_show_one()
2088 if (!d) { in irq_domain_debug_show()
[all …]
A Dmigration.c40 for (struct irq_data *d = irq_desc_get_irq_data(desc); d; d = irqd_get_parent_data(d)) { in irq_force_complete_move() local
41 if (d->chip && d->chip->irq_force_complete_move) { in irq_force_complete_move()
42 d->chip->irq_force_complete_move(d); in irq_force_complete_move()
A Dipi-mux.c29 static void ipi_mux_mask(struct irq_data *d) in ipi_mux_mask() argument
33 atomic_andnot(BIT(irqd_to_hwirq(d)), &icpu->enable); in ipi_mux_mask()
36 static void ipi_mux_unmask(struct irq_data *d) in ipi_mux_unmask() argument
39 u32 ibit = BIT(irqd_to_hwirq(d)); in ipi_mux_unmask()
54 static void ipi_mux_send_mask(struct irq_data *d, const struct cpumask *mask) in ipi_mux_send_mask() argument
57 u32 ibit = BIT(irqd_to_hwirq(d)); in ipi_mux_send_mask()
98 static int ipi_mux_domain_alloc(struct irq_domain *d, unsigned int virq, in ipi_mux_domain_alloc() argument
105 irq_domain_set_info(d, virq + i, i, &ipi_mux_chip, NULL, in ipi_mux_domain_alloc()
A Dchip.c175 if (!irqd_affinity_is_managed(d)) in __irq_startup_managed()
178 irqd_clr_managed_shutdown(d); in __irq_startup_managed()
214 irqd_clr_managed_shutdown(d); in irq_startup_managed()
256 WARN_ON_ONCE(!irqd_is_activated(d)); in __irq_startup()
258 if (d->chip->irq_startup) { in __irq_startup()
259 ret = d->chip->irq_startup(d); in __irq_startup()
277 if (irqd_is_started(d)) { in irq_startup()
289 irq_do_set_affinity(d, aff, false); in irq_startup()
294 irqd_set_managed_shutdown(d); in irq_startup()
308 if (!irqd_affinity_is_managed(d)) in irq_activate()
[all …]
A Dmanage.c1338 struct irq_data *d = &desc->irq_data; in irq_request_resources() local
1339 struct irq_chip *c = d->chip; in irq_request_resources()
1346 struct irq_data *d = &desc->irq_data; in irq_release_resources() local
1347 struct irq_chip *c = d->chip; in irq_release_resources()
1350 c->irq_release_resources(d); in irq_release_resources()
1359 if (d->parent_data) in irq_supports_nmi()
1363 if (d->chip->irq_bus_lock || d->chip->irq_bus_sync_unlock) in irq_supports_nmi()
1366 return d->chip->flags & IRQCHIP_SUPPORTS_NMI; in irq_supports_nmi()
1372 struct irq_chip *c = d->chip; in irq_nmi_setup()
1380 struct irq_chip *c = d->chip; in irq_nmi_teardown()
[all …]
A Dmsi.c761 static void msi_domain_debug_show(struct seq_file *m, struct irq_domain *d, in msi_domain_debug_show() argument
925 struct irq_domain *d; in msi_create_parent_irq_domain() local
932 d = irq_domain_instantiate(info); in msi_create_parent_irq_domain()
933 if (IS_ERR(d)) in msi_create_parent_irq_domain()
936 d->msi_parent_ops = msi_parent_ops; in msi_create_parent_irq_domain()
937 return d; in msi_create_parent_irq_domain()
/kernel/
A Ddelayacct.c22 d->type##_delay_total = (tmp < d->type##_delay_total) ? 0 : tmp; \
23 d->type##_count += tsk->delays->type##_count; \
151 tmp = (s64)d->cpu_run_real_total; in delayacct_add_tsk()
153 d->cpu_run_real_total = (tmp < (s64)d->cpu_run_real_total) ? 0 : tmp; in delayacct_add_tsk()
156 tmp = (s64)d->cpu_scaled_run_real_total; in delayacct_add_tsk()
158 d->cpu_scaled_run_real_total = in delayacct_add_tsk()
169 d->cpu_count += t1; in delayacct_add_tsk()
173 tmp = (s64)d->cpu_delay_total + t2; in delayacct_add_tsk()
174 d->cpu_delay_total = (tmp < (s64)d->cpu_delay_total) ? 0 : tmp; in delayacct_add_tsk()
175 tmp = (s64)d->cpu_run_virtual_total + t3; in delayacct_add_tsk()
[all …]
A Daudit_watch.c350 struct dentry *d; in audit_get_nd() local
352 d = kern_path_locked_negative(watch->path, parent); in audit_get_nd()
353 if (IS_ERR(d)) in audit_get_nd()
354 return PTR_ERR(d); in audit_get_nd()
356 if (d_is_positive(d)) { in audit_get_nd()
358 watch->dev = d->d_sb->s_dev; in audit_get_nd()
359 watch->ino = d_backing_inode(d)->i_ino; in audit_get_nd()
363 dput(d); in audit_get_nd()
A DMakefile183 for d in include "arch/$(SRCARCH)/include"; do \
184 find "$${d}/generated" ! -path "include/generated/utsversion.h" -a -name "*.h" -print; \
192 for d in include "arch/$(SRCARCH)/include"; do \
193 find "$(srctree)/$${d}" -path "$(srctree)/$${d}/generated" -prune -o -name "*.h" -print; \
A Daudit.h314 #define audit_watch_compare(w, i, d) 0 argument
320 #define audit_mark_compare(m, i, d) 0 argument
A Dauditsc.c98 struct audit_aux_data d; member
109 struct audit_aux_data d; member
2114 const struct dentry *d, *parent; in handle_path() local
2124 d = dentry; in handle_path()
2141 parent = d->d_parent; in handle_path()
2142 if (parent == d) in handle_path()
2144 d = parent; in handle_path()
2764 axp->d.type = AUDIT_OBJ_PID; in audit_signal_info_syscall()
2765 axp->d.next = ctx->aux_pids; in audit_signal_info_syscall()
2803 ax->d.type = AUDIT_BPRM_FCAPS; in __audit_log_bprm_fcaps()
[all …]
A Dgen_kheaders.sh14 depfile=${dir}/.$(basename "${tarfile}").d
/kernel/power/
A Dswap.c601 wait_event(d->go, atomic_read_acquire(&d->ready) || in crc32_threadfn()
612 *d->crc32 = crc32_le(*d->crc32, in crc32_threadfn()
613 d->unc[i], *d->unc_len[i]); in crc32_threadfn()
648 wait_event(d->go, atomic_read_acquire(&d->ready) || in compress_threadfn()
661 acomp_request_set_src_nondma(d->cr, d->unc, d->unc_len); in compress_threadfn()
664 d->ret = crypto_acomp_compress(d->cr); in compress_threadfn()
665 d->cmp_len = d->cr->dlen; in compress_threadfn()
1164 wait_event(d->go, atomic_read_acquire(&d->ready) || in decompress_threadfn()
1180 d->ret = crypto_acomp_decompress(d->cr); in decompress_threadfn()
1181 d->unc_len = d->cr->dlen; in decompress_threadfn()
[all …]
A Denergy_model.c74 struct dentry *d; in em_debug_create_ps() local
88 d = debugfs_create_dir(name, pd); in em_debug_create_ps()
89 debugfs_create_file("frequency", 0444, d, &em_dbg[i], in em_debug_create_ps()
91 debugfs_create_file("power", 0444, d, &em_dbg[i], in em_debug_create_ps()
93 debugfs_create_file("cost", 0444, d, &em_dbg[i], in em_debug_create_ps()
95 debugfs_create_file("performance", 0444, d, &em_dbg[i], in em_debug_create_ps()
97 debugfs_create_file("inefficient", 0444, d, &em_dbg[i], in em_debug_create_ps()
122 struct dentry *d; in em_debug_create_pd() local
126 d = debugfs_create_dir(dev_name(dev), rootdir); in em_debug_create_pd()
132 debugfs_create_file("flags", 0444, d, dev->em_pd, in em_debug_create_pd()
[all …]
/kernel/time/
A Dtimeconst.bc16 define fmul(b,n,d) {
17 return (2^b*n+d-1)/d;
22 define fadj(b,n,d) {
24 d = d/gcd(n,d);
25 v = 2^b*(d-1)/d;
33 define fmuls(b,n,d) {
36 m = fmul(s,n,d);
/kernel/trace/
A Dtrace_events_filter_test.h12 TP_PROTO(int a, int b, int c, int d, int e, int f, int g, int h),
14 TP_ARGS(a, b, c, d, e, f, g, h),
20 __field(int, d)
31 __entry->d = d;
39 __entry->a, __entry->b, __entry->c, __entry->d,
A Dtrace_events_filter.c2752 .rec = { .a = va, .b = vb, .c = vc, .d = vd, \
2866 struct test_filter_data_t *d = &test_filter_data[i]; in ftrace_test_event_filter() local
2870 d->filter, false, &filter); in ftrace_test_event_filter()
2874 d->filter, err); in ftrace_test_event_filter()
2886 if (*d->not_visited) in ftrace_test_event_filter()
2887 update_pred_fn(filter, d->not_visited); in ftrace_test_event_filter()
2890 err = filter_match_preds(filter, &d->rec); in ftrace_test_event_filter()
2900 d->filter); in ftrace_test_event_filter()
2904 if (err != d->match) { in ftrace_test_event_filter()
2907 d->filter, d->match); in ftrace_test_event_filter()
/kernel/printk/
A Dprintk_ringbuffer.c1281 struct prb_desc *d; in desc_reopen_last() local
1295 d = to_desc(desc_ring, id); in desc_reopen_last()
1321 return d; in desc_reopen_last()
1372 struct prb_desc *d; in prb_reserve_in_last() local
1379 if (!d) { in prb_reserve_in_last()
1417 &d->text_blk_lpos, id); in prb_reserve_in_last()
1441 &d->text_blk_lpos, id); in prb_reserve_in_last()
1604 struct prb_desc *d; in prb_reserve() local
1626 d = to_desc(desc_ring, id); in prb_reserve()
2014 struct prb_desc *d; in prb_next_reserve_seq() local
[all …]
/kernel/sched/
A Dtopology.c1537 if (!atomic_read(&d->rd->refcount)) in __free_domain_allocs()
1538 free_rootdomain(&d->rd->rcu); in __free_domain_allocs()
1541 free_percpu(d->sd); in __free_domain_allocs()
1554 memset(d, 0, sizeof(*d)); in __visit_domain_allocation_hell()
1559 if (!d->sd) in __visit_domain_allocation_hell()
1561 d->rd = alloc_rootdomain(); in __visit_domain_allocation_hell()
1562 if (!d->rd) in __visit_domain_allocation_hell()
2446 struct s_data d; in build_sched_domains() local
2471 *per_cpu_ptr(d.sd, i) = sd; in build_sched_domains()
2567 sd = *per_cpu_ptr(d.sd, i); in build_sched_domains()
[all …]
A Drt.c2640 struct rt_schedulable_data *d = data; in tg_rt_schedulable() local
2648 if (tg == d->tg) { in tg_rt_schedulable()
2649 period = d->rt_period; in tg_rt_schedulable()
2650 runtime = d->rt_runtime; in tg_rt_schedulable()
2684 if (child == d->tg) { in tg_rt_schedulable()
2685 period = d->rt_period; in tg_rt_schedulable()
2686 runtime = d->rt_runtime; in tg_rt_schedulable()
/kernel/events/
A Duprobes.c312 if (!vaddr || !d) in __update_ref_ctr()
328 if (unlikely(*ptr + d < 0)) { in __update_ref_ctr()
330 "curr val: %d, delta: %d\n", vaddr, *ptr, d); in __update_ref_ctr()
335 *ptr += d; in __update_ref_ctr()
344 struct mm_struct *mm, short d) in update_ref_ctr_warn() argument
348 d > 0 ? "increment" : "decrement", uprobe->inode->i_ino, in update_ref_ctr_warn()
354 short d) in update_ref_ctr() argument
364 ret = __update_ref_ctr(mm, rc_vaddr, d); in update_ref_ctr()
366 update_ref_ctr_warn(uprobe, mm, d); in update_ref_ctr()
368 if (d > 0) in update_ref_ctr()
[all …]

Completed in 84 milliseconds

12