Lines Matching refs:timestamp

87 	u64			timestamp;  member
355 get_new_event(struct task_desc *task, u64 timestamp) in get_new_event() argument
361 event->timestamp = timestamp; in get_new_event()
383 u64 timestamp, u64 duration) in add_sched_event_run() argument
397 event = get_new_event(task, timestamp); in add_sched_event_run()
406 u64 timestamp, struct task_desc *wakee) in add_sched_event_wakeup() argument
410 event = get_new_event(task, timestamp); in add_sched_event_wakeup()
432 u64 timestamp) in add_sched_event_sleep() argument
434 struct sched_atom *event = get_new_event(task, timestamp); in add_sched_event_sleep()
852 u64 timestamp0, timestamp = sample->time; in replay_switch_event() local
864 delta = timestamp - timestamp0; in replay_switch_event()
879 sched->cpu_last_switched[cpu] = timestamp; in replay_switch_event()
881 add_sched_event_run(sched, prev, timestamp, delta); in replay_switch_event()
882 add_sched_event_sleep(sched, prev, timestamp); in replay_switch_event()
1049 u64 timestamp) in add_sched_out_event() argument
1057 atom->sched_out_time = timestamp; in add_sched_out_event()
1070 u64 timestamp __maybe_unused) in add_runtime_event()
1083 add_sched_in_event(struct work_atoms *atoms, u64 timestamp) in add_sched_in_event() argument
1096 if (timestamp < atom->wake_up_time) { in add_sched_in_event()
1102 atom->sched_in_time = timestamp; in add_sched_in_event()
1109 atoms->max_lat_end = timestamp; in add_sched_in_event()
1139 u64 timestamp0, timestamp = sample->time; in latency_switch_event() local
1146 sched->cpu_last_switched[cpu] = timestamp; in latency_switch_event()
1148 delta = timestamp - timestamp0; in latency_switch_event()
1172 if (add_sched_out_event(out_events, prev_state, timestamp)) in latency_switch_event()
1188 if (add_sched_out_event(in_events, 'R', timestamp)) in latency_switch_event()
1191 add_sched_in_event(in_events, timestamp); in latency_switch_event()
1208 u64 timestamp = sample->time; in latency_runtime_event() local
1223 if (add_sched_out_event(atoms, 'R', timestamp)) in latency_runtime_event()
1227 add_runtime_event(atoms, runtime, timestamp); in latency_runtime_event()
1243 u64 timestamp = sample->time; in latency_wakeup_event() local
1258 if (add_sched_out_event(atoms, 'S', timestamp)) in latency_wakeup_event()
1281 if (atom->sched_out_time > timestamp) { in latency_wakeup_event()
1287 atom->wake_up_time = timestamp; in latency_wakeup_event()
1301 u64 timestamp = sample->time; in latency_migrate_task_event() local
1326 if (add_sched_out_event(atoms, 'R', timestamp)) in latency_migrate_task_event()
1333 atom->sched_in_time = atom->sched_out_time = atom->wake_up_time = timestamp; in latency_migrate_task_event()
1337 if (atom->sched_out_time > timestamp) in latency_migrate_task_event()
1641 u64 timestamp0, timestamp = sample->time; in map_switch_event() local
1669 sched->cpu_last_switched[this_cpu.cpu] = timestamp; in map_switch_event()
1671 delta = timestamp - timestamp0; in map_switch_event()
1752 timestamp__scnprintf_usec(timestamp, stimestamp, sizeof(stimestamp)); in map_switch_event()
1784 timestamp__scnprintf_usec(timestamp, stimestamp, sizeof(stimestamp)); in map_switch_event()
2002 static void evsel__save_time(struct evsel *evsel, u64 timestamp, u32 cpu) in evsel__save_time() argument
2024 r->last_time[cpu] = timestamp; in evsel__save_time()