Home
last modified time | relevance | path

Searched refs:events (Results 1 – 25 of 98) sorted by relevance

1234

/arch/m68k/mac/
A Dvia.c390 if (!events) in via1_irq()
395 if (events & irq_bit) { in via1_irq()
403 events &= ~irq_bit; in via1_irq()
404 if (!events) in via1_irq()
411 if (events & irq_bit) { in via1_irq()
426 if (!events) in via2_irq()
453 events &= via2[rSIER]; in via_nubus_irq()
456 if (!events) in via_nubus_irq()
464 events &= ~slot_bit; in via_nubus_irq()
469 } while (events); in via_nubus_irq()
[all …]
A Dbaboon.c48 short events, irq_bit; in baboon_irq() local
51 events = baboon->mb_ifr & 0x07; in baboon_irq()
55 if (events & irq_bit) { in baboon_irq()
56 events &= ~irq_bit; in baboon_irq()
61 } while (events); in baboon_irq()
A Dpsc.c124 unsigned char irq_bit, events; in psc_irq() local
126 events = psc_read_byte(pIFR) & psc_read_byte(pIER) & 0xF; in psc_irq()
127 if (!events) in psc_irq()
133 if (events & irq_bit) { in psc_irq()
139 } while (events >= irq_bit); in psc_irq()
A Doss.c74 u16 events, irq_bit; in oss_nubus_irq() local
77 events = oss->irq_pending & OSS_IP_NUBUS; in oss_nubus_irq()
81 if (events & irq_bit) { in oss_nubus_irq()
82 events &= ~irq_bit; in oss_nubus_irq()
87 } while (events); in oss_nubus_irq()
A Diop.c545 u8 events = iop->status_ctrl & (IOP_INT0 | IOP_INT1); in iop_ism_irq() local
552 if (events & IOP_INT0) { in iop_ism_irq()
565 if (events & IOP_INT1) { in iop_ism_irq()
577 events = iop->status_ctrl & (IOP_INT0 | IOP_INT1); in iop_ism_irq()
578 } while (events); in iop_ism_irq()
/arch/um/os-Linux/
A Dirq.c39 int os_epoll_triggered(int index, int events) in os_epoll_triggered() argument
41 return epoll_events[index].events & events; in os_epoll_triggered()
92 int os_add_epoll_fd(int events, int fd, void *data) in os_add_epoll_fd() argument
98 event.events = events | EPOLLET; in os_add_epoll_fd()
101 result = os_mod_epoll_fd(events, fd, data); in os_add_epoll_fd()
110 int os_mod_epoll_fd(int events, int fd, void *data) in os_mod_epoll_fd() argument
116 event.events = events; in os_mod_epoll_fd()
/arch/x86/events/
A DKconfig5 tristate "Intel uncore performance events"
9 Include support for Intel uncore performance events. These are
13 tristate "Intel/AMD rapl performance events"
17 Include support for Intel and AMD rapl performance events for power
21 tristate "Intel cstate performance events"
25 Include support for Intel cstate performance events for power
38 tristate "AMD Uncore performance events"
42 Include support for AMD uncore performance events for use with
A Dmsr.c138 PMU_EVENT_GROUP(events, aperf);
139 PMU_EVENT_GROUP(events, mperf);
140 PMU_EVENT_GROUP(events, pperf);
141 PMU_EVENT_GROUP(events, smi);
142 PMU_EVENT_GROUP(events, ptsc);
143 PMU_EVENT_GROUP(events, irperf);
/arch/um/kernel/
A Dirq.c35 int events; member
168 if (!reg->events) in sigio_reg_handler()
274 int events = 0; in update_irq_entry() local
277 events |= entry->reg[i].events; in update_irq_entry()
279 if (events) { in update_irq_entry()
346 irq_entry->reg[type].events = events; in activate_fd()
394 if (!reg->events) in free_irq_by_irq_and_dev()
402 reg->events = 0; in free_irq_by_irq_and_dev()
426 if (!entry->reg[i].events) in deactivate_fd()
429 entry->reg[i].events = 0; in deactivate_fd()
[all …]
/arch/arm/boot/dts/samsung/
A Dexynos4412-ppmu-common.dtsi12 events {
22 events {
32 events {
42 events {
/arch/x86/kvm/
A Dpmu.c350 u64 *fe = bsearch(&key, events, nevents, sizeof(events[0]), in find_filter_index()
356 return fe - events; in find_filter_index()
386 if (filter_event_cmp(&events[i], &event_select)) in filter_contains_match()
389 if (is_filter_entry_match(events[i], umask)) in filter_contains_match()
394 if (filter_event_cmp(&events[i], &event_select)) in filter_contains_match()
397 if (is_filter_entry_match(events[i], umask)) in filter_contains_match()
911 if (filter->events[i] & ~mask) in is_masked_filter_valid()
940 filter->events[j++] = filter->events[i] | in convert_to_masked_filter()
963 sort(&filter->events, filter->nevents, sizeof(filter->events[0]), in prepare_filter_lists()
977 filter->includes = filter->events; in prepare_filter_lists()
[all …]
/arch/um/drivers/
A Dubd_user.c40 kernel_pollfd.events = POLLIN; in start_io_thread()
70 kernel_pollfd.events = POLLIN; in ubd_read_poll()
75 kernel_pollfd.events = POLLOUT; in ubd_write_poll()
/arch/arm/mm/
A Dcache-l2x0-pmu.c36 static struct perf_event *events[PMU_NR_COUNTERS]; variable
44 if (!events[i]) in l2x0_pmu_find_idx()
57 if (events[i]) in l2x0_pmu_num_active_counters()
160 struct perf_event *event = events[i]; in l2x0_pmu_poll()
246 events[idx] = event; in l2x0_pmu_event_add()
265 events[hw->idx] = NULL; in l2x0_pmu_event_del()
451 if (events[i]) in l2x0_pmu_suspend()
452 l2x0_pmu_event_stop(events[i], PERF_EF_UPDATE); in l2x0_pmu_suspend()
467 if (events[i]) in l2x0_pmu_resume()
468 l2x0_pmu_event_start(events[i], PERF_EF_RELOAD); in l2x0_pmu_resume()
/arch/powerpc/platforms/powernv/
A Dopal-irqchip.c43 __be64 events = 0; in opal_handle_events() local
63 if (opal_poll_events(&events) != OPAL_SUCCESS) in opal_handle_events()
65 e = be64_to_cpu(events) & opal_event_irqchip.mask; in opal_handle_events()
124 __be64 events; in opal_interrupt() local
126 opal_handle_interrupt(virq_to_hw(irq), &events); in opal_interrupt()
127 WRITE_ONCE(last_outstanding_events, be64_to_cpu(events)); in opal_interrupt()
/arch/arm64/kvm/
A Dpmu.c98 static void kvm_vcpu_pmu_enable_el0(unsigned long events) in kvm_vcpu_pmu_enable_el0() argument
103 for_each_set_bit(counter, &events, ARMPMU_MAX_HWEVENTS) { in kvm_vcpu_pmu_enable_el0()
113 static void kvm_vcpu_pmu_disable_el0(unsigned long events) in kvm_vcpu_pmu_disable_el0() argument
118 for_each_set_bit(counter, &events, ARMPMU_MAX_HWEVENTS) { in kvm_vcpu_pmu_disable_el0()
A Dguest.c819 struct kvm_vcpu_events *events) in __kvm_arm_vcpu_get_events() argument
821 events->exception.serror_has_esr = cpus_have_final_cap(ARM64_HAS_RAS_EXTN); in __kvm_arm_vcpu_get_events()
822 events->exception.serror_pending = (vcpu->arch.hcr_el2 & HCR_VSE) || in __kvm_arm_vcpu_get_events()
825 if (events->exception.serror_pending && events->exception.serror_has_esr) in __kvm_arm_vcpu_get_events()
826 events->exception.serror_esr = vcpu_get_vsesr(vcpu); in __kvm_arm_vcpu_get_events()
851 struct kvm_vcpu_events *events) in __kvm_arm_vcpu_set_events() argument
853 bool serror_pending = events->exception.serror_pending; in __kvm_arm_vcpu_set_events()
854 bool has_esr = events->exception.serror_has_esr; in __kvm_arm_vcpu_set_events()
855 bool ext_dabt_pending = events->exception.ext_dabt_pending; in __kvm_arm_vcpu_set_events()
856 u64 esr = events->exception.serror_esr; in __kvm_arm_vcpu_set_events()
A Darm.c1672 struct kvm_vcpu_events *events) in kvm_arm_vcpu_get_events() argument
1674 memset(events, 0, sizeof(*events)); in kvm_arm_vcpu_get_events()
1676 return __kvm_arm_vcpu_get_events(vcpu, events); in kvm_arm_vcpu_get_events()
1680 struct kvm_vcpu_events *events) in kvm_arm_vcpu_set_events() argument
1686 if (events->reserved[i]) in kvm_arm_vcpu_set_events()
1691 if (events->exception.pad[i]) in kvm_arm_vcpu_set_events()
1790 struct kvm_vcpu_events events; in kvm_arch_vcpu_ioctl() local
1792 if (kvm_arm_vcpu_get_events(vcpu, &events)) in kvm_arch_vcpu_ioctl()
1795 if (copy_to_user(argp, &events, sizeof(events))) in kvm_arch_vcpu_ioctl()
1801 struct kvm_vcpu_events events; in kvm_arch_vcpu_ioctl() local
[all …]
/arch/x86/events/intel/
A Dcstate.c164 PMU_EVENT_GROUP(events, cstate_core_c1);
165 PMU_EVENT_GROUP(events, cstate_core_c3);
166 PMU_EVENT_GROUP(events, cstate_core_c6);
167 PMU_EVENT_GROUP(events, cstate_core_c7);
238 PMU_EVENT_GROUP(events, cstate_pkg_c2);
239 PMU_EVENT_GROUP(events, cstate_pkg_c3);
240 PMU_EVENT_GROUP(events, cstate_pkg_c6);
241 PMU_EVENT_GROUP(events, cstate_pkg_c7);
242 PMU_EVENT_GROUP(events, cstate_pkg_c8);
243 PMU_EVENT_GROUP(events, cstate_pkg_c9);
[all …]
/arch/powerpc/perf/
A Dimc-pmu.c206 if (!events) in imc_free_events()
209 kfree(events[i].unit); in imc_free_events()
210 kfree(events[i].scale); in imc_free_events()
211 kfree(events[i].name); in imc_free_events()
214 kfree(events); in imc_free_events()
260 pmu->events = kcalloc(ct, sizeof(struct imc_events), GFP_KERNEL); in update_events_in_group()
261 if (!pmu->events) { in update_events_in_group()
279 imc_free_events(pmu->events, ct); in update_events_in_group()
294 imc_free_events(pmu->events, ct); in update_events_in_group()
309 if (pmu->events[i].scale) { in update_events_in_group()
[all …]
A Dcore-book3s.c40 u64 events[MAX_HWEVENTS]; member
1591 struct perf_event *ctrs[], u64 *events, in collect_events() argument
1602 events[n++] = group->hw.config; in collect_events()
1611 events[n++] = event->hw.config; in collect_events()
1642 cpuhw->events[n0] = event->hw.config; in power_pmu_add()
1668 event->hw.config = cpuhw->events[n0]; in power_pmu_add()
1715 cpuhw->events[i-1] = cpuhw->events[i]; in power_pmu_del()
2005 u64 events[MAX_HWEVENTS]; in power_pmu_event_init() local
2112 ctrs, events, cflags); in power_pmu_event_init()
2116 events[n] = ev; in power_pmu_event_init()
[all …]
/arch/sparc/kernel/
A Dperf_event.c98 unsigned long events[MAX_HWEVENTS]; member
1134 cpuc->events[i - 1] = cpuc->events[i]; in sparc_pmu_del()
1243 unsigned long *events, int n_ev) in sparc_check_constraints() argument
1265 msk0 = perf_event_get_msk(events[0]); in sparc_check_constraints()
1272 msk1 = perf_event_get_msk(events[1]); in sparc_check_constraints()
1356 events[n] = group->hw.event_base; in collect_events()
1365 events[n] = event->hw.event_base; in collect_events()
1385 cpuc->events[n0] = event->hw.event_base; in sparc_pmu_add()
1420 unsigned long events[MAX_HWEVENTS]; in sparc_pmu_event_init() local
1477 evts, events, current_idx_dmy); in sparc_pmu_event_init()
[all …]
/arch/powerpc/platforms/pseries/
A DKconfig57 SPLPAR machines can log hypervisor preempt & dispatch events to a
58 kernel buffer. Saying Y here will enable logging these events,
80 to return information about hardware error and non-error events
81 which may need OS attention. RTAS returns events for multiple
83 to receive events.
142 bool "Hypervisor supplied PMU events (24x7 & GPCI)"
154 tristate "VPA PMU events"
/arch/sh/kernel/
A Dperf_event.c29 struct perf_event *events[MAX_HWEVENTS]; member
207 cpuc->events[idx] = NULL; in sh_pmu_stop()
229 cpuc->events[idx] = event; in sh_pmu_start()
/arch/arm64/kernel/
A Dtrace-events-emulation.h35 #define TRACE_INCLUDE_FILE trace-events-emulation
/arch/arm/kernel/
A Dsys_oabi-compat.c278 __poll_t events; member
293 kernel.events = user.events; in sys_oabi_epoll_ctl()
313 if (__put_user(revents, &oevent->events) || in epoll_put_uevent()
320 if (__put_user(revents, &uevent->events) || in epoll_put_uevent()

Completed in 77 milliseconds

1234