Lines Matching refs:tsc

1634         tsc_t tsc;  member
1638 tsc_t tsc; member
1838 tsc_t tsc; member
1853 tsc_t tsc; member
3359 void hvm_vlapic_vmentry_cleanup(struct vcpu_data *v, tsc_t tsc) in hvm_vlapic_vmentry_cleanup() argument
3373 if(tsc >= o->first_tsc) in hvm_vlapic_vmentry_cleanup()
3374 lat = tsc - o->first_tsc; in hvm_vlapic_vmentry_cleanup()
3377 o->vec, o->first_tsc, tsc); in hvm_vlapic_vmentry_cleanup()
4570 unsigned long long tsc; in hvm_rdtsc_process() member
4576 (unsigned long long)r->tsc, in hvm_rdtsc_process()
4577 (unsigned long long)r->tsc, in hvm_rdtsc_process()
4578 h->last_rdtsc > r->tsc ? "BACKWARDS" : ""); in hvm_rdtsc_process()
4584 abs_cycles_to_time(ri->tsc, &t); in hvm_rdtsc_process()
4589 r->tsc); in hvm_rdtsc_process()
4592 h->last_rdtsc = r->tsc; in hvm_rdtsc_process()
4863 void vcpu_next_update(struct pcpu_info *p, struct vcpu_data *next, tsc_t tsc);
4865 tsc_t tsc, int new_runstate);
4867 void lose_vcpu(struct vcpu_data *v, tsc_t tsc);
4934 tsc_t tsc) in runstate_update() argument
4942 abs_cycles_to_time(tsc, &t); in runstate_update()
4954 if(v->runstate.tsc > 0 && v->runstate.tsc < tsc) { in runstate_update()
4955 update_cycles(v->runstates + v->runstate.state, tsc - v->runstate.tsc); in runstate_update()
4961 abs_cycles_to_time(tsc, &t); in runstate_update()
4962 cycles_to_time(tsc - v->runstate.tsc, &dt); in runstate_update()
4971 update_cycles(&v->d->total_time, tsc - v->runstate.tsc); in runstate_update()
4974 update_cycles(v->runnable_states + v->runstate.runnable_state, tsc - v->runstate.tsc); in runstate_update()
4984 start_tsc = (p->volume.buffer_first_tsc > v->runstate.tsc) ? in runstate_update()
4986 v->runstate.tsc; in runstate_update()
4988 += tsc - start_tsc; in runstate_update()
4993 tsc - start_tsc); in runstate_update()
4996 p->volume.buffer_dom0_runstate_tsc = tsc; in runstate_update()
5021 v->runstate.tsc = tsc; in runstate_update()
5024 if(d->runstate_tsc > 0 && d->runstate_tsc < tsc) in runstate_update()
5025 update_cycles(d->runstates + d->runstate, tsc - d->runstate_tsc); in runstate_update()
5029 d->runstate_tsc = tsc; in runstate_update()
5098 scatterplot_vs_time(ri->tsc, h->rip); in hvm_vmexit_process()
5127 ri->tsc - h->entry_tsc); in hvm_vmexit_process()
5128 h->v->cr3.data->run_time += (ri->tsc - h->entry_tsc); in hvm_vmexit_process()
5131 h->exit_tsc = ri->tsc; in hvm_vmexit_process()
5142 void hvm_close_vmexit(struct hvm_data *h, tsc_t tsc) { in hvm_close_vmexit() argument
5145 if(h->exit_tsc > tsc) in hvm_close_vmexit()
5148 h->arc_cycles = tsc - h->exit_tsc; in hvm_close_vmexit()
5162 abs_cycles_to_time(tsc, &t); in hvm_close_vmexit()
5222 hvm_vlapic_vmentry_cleanup(h->v, ri->tsc); in hvm_vmentry_process()
5238 h->v->d->did, h->v->vid, i, ri->tsc); in hvm_vmentry_process()
5239 h->summary.guest_interrupt[i].start_tsc = ri->tsc; in hvm_vmentry_process()
5256 unsigned long long arc_cycles = ri->tsc - h->exit_tsc; in hvm_vmentry_process()
5261 hvm_close_vmexit(h, ri->tsc); in hvm_vmentry_process()
5262 h->entry_tsc = ri->tsc; in hvm_vmentry_process()
6745 void pcpu_runstate_update(struct pcpu_info *p, tsc_t tsc) in pcpu_runstate_update() argument
6747 if ( p->time.tsc ) in pcpu_runstate_update()
6750 update_cycles(&p->time.idle, tsc - p->time.tsc); in pcpu_runstate_update()
6752 update_cycles(&p->time.running, tsc - p->time.tsc); in pcpu_runstate_update()
6753 p->time.tsc = 0; in pcpu_runstate_update()
6758 tsc_t tsc, int new_runstate) in vcpu_prev_update() argument
6766 prev->runstate.tsc = 0; in vcpu_prev_update()
6783 prev->runstate.tsc = 0; in vcpu_prev_update()
6791 prev->runstate.tsc = 0; in vcpu_prev_update()
6796 pcpu_runstate_update(p, tsc); in vcpu_prev_update()
6799 runstate_update(prev, new_runstate, tsc); in vcpu_prev_update()
6802 void vcpu_next_update(struct pcpu_info *p, struct vcpu_data *next, tsc_t tsc) in vcpu_next_update() argument
6813 runstate_update(p->current, RUNSTATE_LOST, tsc); in vcpu_next_update()
6837 next->runstate.tsc = 0; in vcpu_next_update()
6844 update_cycles(&next->cpu_affinity_all, tsc - next->pcpu_tsc); in vcpu_next_update()
6845 update_cycles(&next->cpu_affinity_pcpu[p->pid], tsc - next->pcpu_tsc); in vcpu_next_update()
6847 next->pcpu_tsc = tsc; in vcpu_next_update()
6854 next->pcpu_tsc = tsc; in vcpu_next_update()
6857 runstate_update(next, RUNSTATE_RUNNING, tsc); in vcpu_next_update()
6865 abs_cycles_to_time(tsc, &t); in vcpu_next_update()
6881 p->time.tsc = tsc; in vcpu_next_update()
6901 if(p->first_tsc <= p->current->runstate.tsc) { in vcpu_start()
6904 p->current->runstate.tsc); in vcpu_start()
6917 p->time.tsc = p->first_tsc; in vcpu_start()
7026 hvm_close_vmexit(&v->hvm, ri->tsc); in sched_runstate_process()
7066 tsc_t t = (start_tsc == 1) ? 0 : ri->tsc - start_tsc; in sched_runstate_process()
7076 scatterplot_vs_time(ri->tsc, t); in sched_runstate_process()
7117 if(ri->tsc < last_oldstate.tsc) { in sched_runstate_process()
7119 ri->tsc, last_oldstate.tsc); in sched_runstate_process()
7125 lag = ri->tsc in sched_runstate_process()
7126 - last_oldstate.tsc; in sched_runstate_process()
7171 v->runstate.last_oldstate.tsc = ri->tsc; in sched_runstate_process()
7199 vcpu_prev_update(p, p->current, ri->tsc, RUNSTATE_LOST); in sched_runstate_process()
7200 vcpu_next_update(p, v, ri->tsc); in sched_runstate_process()
7214 runstate_update(v, RUNSTATE_RUNNING, ri->tsc); in sched_runstate_process()
7243 vcpu_prev_update(p, v, ri->tsc, sevt.new_runstate); in sched_runstate_process()
7245 vcpu_prev_update(v->p, v, ri->tsc, sevt.new_runstate); in sched_runstate_process()
7252 lose_vcpu(v, ri->tsc); in sched_runstate_process()
7262 vcpu_next_update(p, v, ri->tsc); in sched_runstate_process()
7269 runstate_update(v, sevt.new_runstate, ri->tsc); in sched_runstate_process()
7315 vcpu_prev_update(p, prev, ri->tsc, RUNSTATE_QUEUED); /* FIXME */ in sched_switch_process()
7317 vcpu_next_update(p, next, ri->tsc); in sched_switch_process()
8470 abs_cycles_to_time(ri->tsc, &t); in irq_process()
8655 s, ri->event, ri->extra_words, ri->tsc); in dump_raw()
8741 void lose_vcpu(struct vcpu_data *v, tsc_t tsc) in lose_vcpu() argument
8745 runstate_update(v, RUNSTATE_LOST, tsc); in lose_vcpu()
8850 p->lost_record.tsc = first_tsc; in process_lost_records()
8893 if(ri->tsc > p->lost_record.tsc) in process_lost_records_end()
8899 update_cycles(&p->time.lost, ri->tsc - p->lost_record.tsc); in process_lost_records_end()
8923 ri->tsc); in process_lost_records_end()
9112 if(ri->tsc && ri->tsc >= P.f.first_tsc) { in process_record_tsc()
9115 tsc_t tsc = order_tsc; in process_record_tsc() local
9118 P.f.first_tsc = tsc; in process_record_tsc()
9120 P.interval.start_tsc = tsc; in process_record_tsc()
9124 if(P.interval.start_tsc > tsc) { in process_record_tsc()
9126 tsc, P.interval.start_tsc); in process_record_tsc()
9129 while ( tsc - P.interval.start_tsc > opt.interval.cycles ) { in process_record_tsc()
9137 P.f.last_tsc=tsc; in process_record_tsc()
9141 P.now = tsc; in process_record_tsc()
9154 abs_cycles_to_time(ri->tsc, &ri->t); in create_dump_header()
9299 += ri->tsc - cycles; in process_cpu_change()
9304 ri->tsc - cycles); in process_cpu_change()
9307 cycles = ri->tsc - p->volume.buffer_first_tsc; in process_cpu_change()
9316 cycles = ri->tsc - P.buffer_trace_virq_tsc; in process_cpu_change()
9475 p->volume.buffer_first_tsc = ri->tsc; in process_record()
9607 tsc_t tsc=0; in __fill_in_record_info() local
9615 tsc = (((tsc_t)ri->rec.u.tsc.tsc_hi) << 32) in __fill_in_record_info()
9616 | ri->rec.u.tsc.tsc_lo; in __fill_in_record_info()
9618 tsc += p->tsc_skew.offset; in __fill_in_record_info()
9620 ri->tsc = tsc; in __fill_in_record_info()
9621 ri->d = ri->rec.u.tsc.data; in __fill_in_record_info()
9624 p->first_tsc = tsc; in __fill_in_record_info()
9632 p->order_tsc = tsc; in __fill_in_record_info()
9634 p->last_tsc = tsc; in __fill_in_record_info()
9636 ri->tsc = p->last_tsc; in __fill_in_record_info()
9851 if(p->ri.tsc > p->order_tsc) in process_records()
9852 p->order_tsc = p->ri.tsc; in process_records()
9855 p->ri.tsc, p->order_tsc); in process_records()