Lines Matching refs:ret_stack

163 #define RET_STACK(t, offset) ((struct ftrace_ret_stack *)(&(t)->ret_stack[offset]))
169 #define SHADOW_STACK_TASK_VARS(ret_stack) \ argument
170 ((unsigned long *)(&(ret_stack)[SHADOW_STACK_OFFSET - FGRAPH_ARRAY_SIZE]))
249 return t->ret_stack[offset]; in get_fgraph_entry()
255 return __get_offset(t->ret_stack[offset]); in get_frame_offset()
262 return (t->ret_stack[offset] >> FGRAPH_INDEX_SHIFT) & FGRAPH_INDEX_MASK; in get_bitmap_bits()
269 t->ret_stack[offset] = (bitmap << FGRAPH_INDEX_SHIFT) | in set_bitmap()
276 unsigned long val = t->ret_stack[offset]; in get_data_type_data()
281 return (void *)&t->ret_stack[offset]; in get_data_type_data()
305 unsigned long *gvals = SHADOW_STACK_TASK_VARS(t->ret_stack); in ret_stack_set_task_var()
313 unsigned long *gvals = SHADOW_STACK_TASK_VARS(t->ret_stack); in ret_stack_get_task_var()
318 static void ret_stack_init_task_vars(unsigned long *ret_stack) in ret_stack_init_task_vars() argument
320 unsigned long *gvals = SHADOW_STACK_TASK_VARS(ret_stack); in ret_stack_init_task_vars()
356 data = &current->ret_stack[curr_ret_stack]; in fgraph_reserve_data()
365 current->ret_stack[curr_ret_stack - 1] = val; in fgraph_reserve_data()
371 current->ret_stack[curr_ret_stack - 1] = val; in fgraph_reserve_data()
526 struct ftrace_ret_stack *ret_stack; in ftrace_push_return_trace() local
534 if (!current->ret_stack) in ftrace_push_return_trace()
560 ret_stack = RET_STACK(current, offset); in ftrace_push_return_trace()
564 current->ret_stack[offset] = val; in ftrace_push_return_trace()
565 ret_stack->ret = ret; in ftrace_push_return_trace()
588 current->ret_stack[offset] = val; in ftrace_push_return_trace()
590 ret_stack->ret = ret; in ftrace_push_return_trace()
591 ret_stack->func = func; in ftrace_push_return_trace()
592 ret_stack->calltime = calltime; in ftrace_push_return_trace()
594 ret_stack->fp = frame_pointer; in ftrace_push_return_trace()
596 ret_stack->retp = retp; in ftrace_push_return_trace()
682 struct ftrace_ret_stack *ret_stack; in ftrace_pop_return_trace() local
684 ret_stack = get_ret_stack(current, current->curr_ret_stack, offset); in ftrace_pop_return_trace()
686 if (unlikely(!ret_stack)) { in ftrace_pop_return_trace()
710 if (unlikely(ret_stack->fp != frame_pointer)) { in ftrace_pop_return_trace()
714 ret_stack->fp, in ftrace_pop_return_trace()
716 (void *)ret_stack->func, in ftrace_pop_return_trace()
717 ret_stack->ret); in ftrace_pop_return_trace()
724 *ret = ret_stack->ret; in ftrace_pop_return_trace()
725 trace->func = ret_stack->func; in ftrace_pop_return_trace()
726 trace->calltime = ret_stack->calltime; in ftrace_pop_return_trace()
736 return ret_stack; in ftrace_pop_return_trace()
774 struct ftrace_ret_stack *ret_stack; in __ftrace_return_to_handler() local
781 ret_stack = ftrace_pop_return_trace(&trace, &ret, frame_pointer, &offset); in __ftrace_return_to_handler()
783 if (unlikely(!ret_stack)) { in __ftrace_return_to_handler()
857 struct ftrace_ret_stack *ret_stack = NULL; in ftrace_graph_get_ret_stack() local
864 ret_stack = get_ret_stack(task, offset, &offset); in ftrace_graph_get_ret_stack()
865 } while (ret_stack && --idx >= 0); in ftrace_graph_get_ret_stack()
867 return ret_stack; in ftrace_graph_get_ret_stack()
893 struct ftrace_ret_stack *ret_stack; in ftrace_graph_ret_addr() local
905 ret_stack = get_ret_stack(task, i, &i); in ftrace_graph_ret_addr()
906 if (!ret_stack) in ftrace_graph_ret_addr()
916 if (ret_stack->retp == retp && in ftrace_graph_ret_addr()
917 ret_stack->ret != return_handler) { in ftrace_graph_ret_addr()
919 return ret_stack->ret; in ftrace_graph_ret_addr()
990 if (t->ret_stack == NULL) { in alloc_retstack_tasklist()
997 t->ret_stack = ret_stack_list[start++]; in alloc_retstack_tasklist()
1015 struct ftrace_ret_stack *ret_stack; in ftrace_graph_probe_sched_switch() local
1041 ret_stack = get_ret_stack(next, offset, &offset); in ftrace_graph_probe_sched_switch()
1042 if (ret_stack) in ftrace_graph_probe_sched_switch()
1043 ret_stack->calltime += timestamp; in ftrace_graph_probe_sched_switch()
1050 graph_init_task(struct task_struct *t, unsigned long *ret_stack) in graph_init_task() argument
1053 ret_stack_init_task_vars(ret_stack); in graph_init_task()
1059 t->ret_stack = ret_stack; in graph_init_task()
1074 if (t->ret_stack) in ftrace_graph_init_idle_task()
1075 WARN_ON(t->ret_stack != per_cpu(idle_ret_stack, cpu)); in ftrace_graph_init_idle_task()
1078 unsigned long *ret_stack; in ftrace_graph_init_idle_task() local
1080 ret_stack = per_cpu(idle_ret_stack, cpu); in ftrace_graph_init_idle_task()
1081 if (!ret_stack) { in ftrace_graph_init_idle_task()
1082 ret_stack = kmalloc(SHADOW_STACK_SIZE, GFP_KERNEL); in ftrace_graph_init_idle_task()
1083 if (!ret_stack) in ftrace_graph_init_idle_task()
1085 per_cpu(idle_ret_stack, cpu) = ret_stack; in ftrace_graph_init_idle_task()
1087 graph_init_task(t, ret_stack); in ftrace_graph_init_idle_task()
1095 t->ret_stack = NULL; in ftrace_graph_init_task()
1100 unsigned long *ret_stack; in ftrace_graph_init_task() local
1102 ret_stack = kmalloc(SHADOW_STACK_SIZE, GFP_KERNEL); in ftrace_graph_init_task()
1103 if (!ret_stack) in ftrace_graph_init_task()
1105 graph_init_task(t, ret_stack); in ftrace_graph_init_task()
1111 unsigned long *ret_stack = t->ret_stack; in ftrace_graph_exit_task() local
1113 t->ret_stack = NULL; in ftrace_graph_exit_task()
1117 kfree(ret_stack); in ftrace_graph_exit_task()
1192 if (idle_task(cpu)->ret_stack) in init_task_vars()
1198 if (t->ret_stack) in init_task_vars()
1243 if (!idle_task(cpu)->ret_stack) in fgraph_cpu_init()