Lines Matching refs:cpu

93 	struct datarec *cpu;  member
334 e->val.cpu = alloc_record_per_cpu(); in map_entry_init()
335 if (!e->val.cpu) in map_entry_init()
357 rec->cpu[i].processed = READ_ONCE(values[i].processed); in map_collect_percpu()
358 rec->cpu[i].dropped = READ_ONCE(values[i].dropped); in map_collect_percpu()
359 rec->cpu[i].issue = READ_ONCE(values[i].issue); in map_collect_percpu()
360 rec->cpu[i].xdp_pass = READ_ONCE(values[i].xdp_pass); in map_collect_percpu()
361 rec->cpu[i].xdp_drop = READ_ONCE(values[i].xdp_drop); in map_collect_percpu()
362 rec->cpu[i].xdp_redirect = READ_ONCE(values[i].xdp_redirect); in map_collect_percpu()
364 sum_processed += rec->cpu[i].processed; in map_collect_percpu()
365 sum_dropped += rec->cpu[i].dropped; in map_collect_percpu()
366 sum_issue += rec->cpu[i].issue; in map_collect_percpu()
367 sum_xdp_pass += rec->cpu[i].xdp_pass; in map_collect_percpu()
368 sum_xdp_drop += rec->cpu[i].xdp_drop; in map_collect_percpu()
369 sum_xdp_redirect += rec->cpu[i].xdp_redirect; in map_collect_percpu()
460 rec->rx_cnt.cpu = alloc_record_per_cpu(); in alloc_stats_record()
461 if (!rec->rx_cnt.cpu) { in alloc_stats_record()
469 rec->redir_err[i].cpu = alloc_record_per_cpu(); in alloc_stats_record()
470 if (!rec->redir_err[i].cpu) { in alloc_stats_record()
476 free(rec->redir_err[i].cpu); in alloc_stats_record()
482 rec->kthread.cpu = alloc_record_per_cpu(); in alloc_stats_record()
483 if (!rec->kthread.cpu) { in alloc_stats_record()
491 rec->exception[i].cpu = alloc_record_per_cpu(); in alloc_stats_record()
492 if (!rec->exception[i].cpu) { in alloc_stats_record()
498 free(rec->exception[i].cpu); in alloc_stats_record()
504 rec->devmap_xmit.cpu = alloc_record_per_cpu(); in alloc_stats_record()
505 if (!rec->devmap_xmit.cpu) { in alloc_stats_record()
515 rec->enq[i].cpu = alloc_record_per_cpu(); in alloc_stats_record()
516 if (!rec->enq[i].cpu) { in alloc_stats_record()
522 free(rec->enq[i].cpu); in alloc_stats_record()
531 free(rec->devmap_xmit.cpu); in alloc_stats_record()
534 free(rec->exception[i].cpu); in alloc_stats_record()
536 free(rec->kthread.cpu); in alloc_stats_record()
539 free(rec->redir_err[i].cpu); in alloc_stats_record()
541 free(rec->rx_cnt.cpu); in alloc_stats_record()
554 free(r->enq[i].cpu); in free_stats_record()
557 free(e->val.cpu); in free_stats_record()
560 free(r->devmap_xmit.cpu); in free_stats_record()
562 free(r->exception[i].cpu); in free_stats_record()
563 free(r->kthread.cpu); in free_stats_record()
565 free(r->redir_err[i].cpu); in free_stats_record()
566 free(r->rx_cnt.cpu); in free_stats_record()
661 struct datarec *r = &rec->cpu[i]; in stats_get_rx_cnt()
662 struct datarec *p = &prev->cpu[i]; in stats_get_rx_cnt()
724 struct datarec *r = &rec->cpu[i]; in stats_get_cpumap_enqueue()
725 struct datarec *p = &prev->cpu[i]; in stats_get_cpumap_enqueue()
768 struct datarec *r = &rec->cpu[i]; in stats_get_cpumap_remote()
769 struct datarec *p = &prev->cpu[i]; in stats_get_cpumap_remote()
805 struct datarec *r = &rec->cpu[i]; in stats_get_cpumap_kthread()
806 struct datarec *p = &prev->cpu[i]; in stats_get_cpumap_kthread()
835 struct datarec *r = &rec->cpu[i]; in stats_get_redirect_cnt()
836 struct datarec *p = &prev->cpu[i]; in stats_get_redirect_cnt()
881 struct datarec *r = &rec->cpu[i]; in stats_get_redirect_err_cnt()
882 struct datarec *p = &prev->cpu[i]; in stats_get_redirect_err_cnt()
926 struct datarec *r = &rec->cpu[i]; in stats_get_exception_cnt()
927 struct datarec *p = &prev->cpu[i]; in stats_get_exception_cnt()
962 struct datarec *r = &rec->cpu[i]; in stats_get_devmap_xmit()
963 struct datarec *p = &prev->cpu[i]; in stats_get_devmap_xmit()
1077 struct datarec *rc = &r->cpu[i]; in stats_get_devmap_xmit_multi()
1081 pc = p == &beg ? &p_beg : &p->cpu[i]; in stats_get_devmap_xmit_multi()