Lines Matching refs:btf

290 	struct btf *btf;  member
292 struct btf *ret_btf;
301 struct btf *btf; member
328 struct btf *arg_btf;
355 struct btf *btf_vmlinux;
357 static const char *btf_type_name(const struct btf *btf, u32 id) in btf_type_name() argument
359 return btf_name_by_offset(btf, btf_type_by_id(btf, id)->name_off); in btf_type_name()
426 meta = btf_find_struct_meta(reg->btf, reg->btf_id); in reg_btf_record()
448 return btf_type_name(env->prog->aux->btf, info->type_id); in subprog_name()
1030 struct btf *btf, u32 btf_id, int nr_slots) in mark_stack_slots_iter() argument
1057 st->iter.btf = btf; in mark_stack_slots_iter()
1130 struct btf *btf, u32 btf_id, int nr_slots) in is_iter_reg_valid_init() argument
1150 if (st->iter.btf != btf || st->iter.btf_id != btf_id) in is_iter_reg_valid_init()
2258 regs[regno].btf = ds_head->btf; in mark_reg_graph_node()
2841 struct btf *btf, u32 btf_id, in mark_btf_ld_reg() argument
2851 regs[regno].btf = btf; in mark_btf_ld_reg()
3018 struct btf *btf = aux->btf; in bpf_find_exception_callback_insn_off() local
3029 t = btf_type_by_id(btf, main_btf_id); in bpf_find_exception_callback_insn_off()
3035 name = btf_find_decl_tag_value(btf, t, -1, "exception_callback:"); in bpf_find_exception_callback_insn_off()
3046 ret = btf_find_by_name_kind(btf, name, BTF_KIND_FUNC); in bpf_find_exception_callback_insn_off()
3052 t = btf_type_by_id(btf, id); in bpf_find_exception_callback_insn_off()
3089 struct btf *btf; member
3153 static struct btf *__find_kfunc_desc_btf(struct bpf_verifier_env *env, in __find_kfunc_desc_btf()
3160 struct btf *btf; in __find_kfunc_desc_btf() local
3182 btf = btf_get_by_fd(btf_fd); in __find_kfunc_desc_btf()
3183 if (IS_ERR(btf)) { in __find_kfunc_desc_btf()
3185 return btf; in __find_kfunc_desc_btf()
3188 if (!btf_is_module(btf)) { in __find_kfunc_desc_btf()
3190 btf_put(btf); in __find_kfunc_desc_btf()
3194 mod = btf_try_get_module(btf); in __find_kfunc_desc_btf()
3196 btf_put(btf); in __find_kfunc_desc_btf()
3201 b->btf = btf; in __find_kfunc_desc_btf()
3211 btf = b->btf; in __find_kfunc_desc_btf()
3214 return btf; in __find_kfunc_desc_btf()
3224 btf_put(tab->descs[tab->nr_descs].btf); in bpf_free_kfunc_btf_tab()
3229 static struct btf *find_kfunc_desc_btf(struct bpf_verifier_env *env, s16 offset) in find_kfunc_desc_btf()
3253 struct btf *desc_btf; in add_kfunc_call()
4045 struct btf *desc_btf; in disasm_kfunc_name()
5839 const char *targ_name = btf_type_name(kptr_field->kptr.btf, kptr_field->kptr.btf_id); in map_kptr_match_type()
5843 if (btf_is_kernel(reg->btf)) { in map_kptr_match_type()
5859 reg_name = btf_type_name(reg->btf, reg->btf_id); in map_kptr_match_type()
5894 if (!btf_struct_ids_match(&env->log, reg->btf, reg->btf_id, reg->off, in map_kptr_match_type()
5895 kptr_field->kptr.btf, kptr_field->kptr.btf_id, in map_kptr_match_type()
5944 static bool rcu_protected_object(const struct btf *btf, u32 btf_id) in BTF_ID()
5946 if (!btf_is_kernel(btf)) in BTF_ID()
5955 if (btf_is_kernel(kptr_field->kptr.btf)) in kptr_pointee_btf_record()
5958 meta = btf_find_struct_meta(kptr_field->kptr.btf, in kptr_pointee_btf_record()
5969 (field->type == BPF_KPTR_REF && rcu_protected_object(kptr->btf, kptr->btf_id)); in rcu_safe_kptr()
5982 else if (!btf_is_kernel(kptr_field->kptr.btf)) in btf_ld_kptr_type()
6001 t = btf_type_by_id(field->kptr.btf, field->kptr.btf_id); in mark_uptr_ld_reg()
6053 kptr_field->kptr.btf, kptr_field->kptr.btf_id, in check_map_kptr_access()
7218 const struct btf_type *t = btf_type_by_id(reg->btf, reg->btf_id); in check_ptr_to_btf_access()
7219 const char *tname = btf_name_by_offset(reg->btf, t->name_off); in check_ptr_to_btf_access()
7231 if (!env->prog->gpl_compatible && btf_is_kernel(reg->btf)) { in check_ptr_to_btf_access()
7268 if (!btf_is_kernel(reg->btf)) { in check_ptr_to_btf_access()
7356 ret = mark_btf_ld_reg(env, regs, value_regno, ret, reg->btf, btf_id, flag); in check_ptr_to_btf_access()
7685 regs[value_regno].btf = info.btf; in check_mem_access()
8412 struct btf *btf = NULL; in process_spin_lock() local
8425 if (!map->btf) { in process_spin_lock()
8432 btf = reg->btf; in process_spin_lock()
8454 ptr = btf; in process_spin_lock()
8487 ptr = btf; in process_spin_lock()
8532 if (!map->btf) { in process_timer_func()
8585 if (!map_ptr->btf) { in process_kptr_func()
8762 return btf_param_match_suffix(meta->btf, arg, "__iter"); in is_kfunc_arg_iter()
8783 btf_id = btf_check_iter_arg(meta->btf, meta->func_proto, regno - 1); in process_iter_arg()
8788 t = btf_type_by_id(meta->btf, btf_id); in process_iter_arg()
8795 iter_type_str(meta->btf, btf_id), regno - 1); in process_iter_arg()
8806 err = mark_stack_slots_iter(env, meta, reg, insn_idx, meta->btf, btf_id, nr_slots); in process_iter_arg()
8813 err = is_iter_reg_valid_init(env, reg, meta->btf, btf_id, nr_slots); in process_iter_arg()
8819 iter_type_str(meta->btf, btf_id), regno - 1); in process_iter_arg()
9344 if (!btf_struct_ids_match(&env->log, reg->btf, reg->btf_id, reg->off, in check_reg_type()
9348 regno, btf_type_name(reg->btf, reg->btf_id), in check_reg_type()
9826 meta->ret_btf = reg->btf; in check_func_arg()
10454 const struct btf *btf, in btf_check_func_arg_match() argument
10558 struct btf *btf = prog->aux->btf; in btf_check_subprog_call() local
10572 err = btf_check_func_arg_match(env, subprog, btf, regs); in btf_check_subprog_call()
10863 callee->regs[BPF_REG_2].btf = btf_vmlinux; in set_find_vma_callback_state()
11659 type = btf_type_by_id(reg->btf, reg->btf_id); in check_helper_call()
11776 regs[BPF_REG_0].btf = meta.ret_btf; in check_helper_call()
11783 struct btf *ret_btf; in check_helper_call()
11789 ret_btf = meta.kptr_field->kptr.btf; in check_helper_call()
11811 regs[BPF_REG_0].btf = ret_btf; in check_helper_call()
11954 static bool is_kfunc_arg_mem_size(const struct btf *btf, in is_kfunc_arg_mem_size() argument
11960 t = btf_type_skip_modifiers(btf, arg->type, NULL); in is_kfunc_arg_mem_size()
11964 return btf_param_match_suffix(btf, arg, "__sz"); in is_kfunc_arg_mem_size()
11967 static bool is_kfunc_arg_const_mem_size(const struct btf *btf, in is_kfunc_arg_const_mem_size() argument
11973 t = btf_type_skip_modifiers(btf, arg->type, NULL); in is_kfunc_arg_const_mem_size()
11977 return btf_param_match_suffix(btf, arg, "__szk"); in is_kfunc_arg_const_mem_size()
11980 static bool is_kfunc_arg_optional(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_optional() argument
11982 return btf_param_match_suffix(btf, arg, "__opt"); in is_kfunc_arg_optional()
11985 static bool is_kfunc_arg_constant(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_constant() argument
11987 return btf_param_match_suffix(btf, arg, "__k"); in is_kfunc_arg_constant()
11990 static bool is_kfunc_arg_ignore(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_ignore() argument
11992 return btf_param_match_suffix(btf, arg, "__ign"); in is_kfunc_arg_ignore()
11995 static bool is_kfunc_arg_map(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_map() argument
11997 return btf_param_match_suffix(btf, arg, "__map"); in is_kfunc_arg_map()
12000 static bool is_kfunc_arg_alloc_obj(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_alloc_obj() argument
12002 return btf_param_match_suffix(btf, arg, "__alloc"); in is_kfunc_arg_alloc_obj()
12005 static bool is_kfunc_arg_uninit(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_uninit() argument
12007 return btf_param_match_suffix(btf, arg, "__uninit"); in is_kfunc_arg_uninit()
12010 static bool is_kfunc_arg_refcounted_kptr(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_refcounted_kptr() argument
12012 return btf_param_match_suffix(btf, arg, "__refcounted_kptr"); in is_kfunc_arg_refcounted_kptr()
12015 static bool is_kfunc_arg_nullable(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_nullable() argument
12017 return btf_param_match_suffix(btf, arg, "__nullable"); in is_kfunc_arg_nullable()
12020 static bool is_kfunc_arg_const_str(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_const_str() argument
12022 return btf_param_match_suffix(btf, arg, "__str"); in is_kfunc_arg_const_str()
12025 static bool is_kfunc_arg_irq_flag(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_irq_flag() argument
12027 return btf_param_match_suffix(btf, arg, "__irq_flag"); in is_kfunc_arg_irq_flag()
12030 static bool is_kfunc_arg_prog(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_prog() argument
12032 return btf_param_match_suffix(btf, arg, "__prog"); in is_kfunc_arg_prog()
12035 static bool is_kfunc_arg_scalar_with_name(const struct btf *btf, in is_kfunc_arg_scalar_with_name() argument
12042 param_name = btf_name_by_offset(btf, arg->name_off); in is_kfunc_arg_scalar_with_name()
12073 static bool __is_kfunc_ptr_arg_type(const struct btf *btf, in BTF_ID()
12079 t = btf_type_skip_modifiers(btf, arg->type, NULL); in BTF_ID()
12084 t = btf_type_skip_modifiers(btf, t->type, &res_id); in BTF_ID()
12087 return btf_types_are_same(btf, res_id, btf_vmlinux, kf_arg_btf_ids[type]); in BTF_ID()
12090 static bool is_kfunc_arg_dynptr(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_dynptr() argument
12092 return __is_kfunc_ptr_arg_type(btf, arg, KF_ARG_DYNPTR_ID); in is_kfunc_arg_dynptr()
12095 static bool is_kfunc_arg_list_head(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_list_head() argument
12097 return __is_kfunc_ptr_arg_type(btf, arg, KF_ARG_LIST_HEAD_ID); in is_kfunc_arg_list_head()
12100 static bool is_kfunc_arg_list_node(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_list_node() argument
12102 return __is_kfunc_ptr_arg_type(btf, arg, KF_ARG_LIST_NODE_ID); in is_kfunc_arg_list_node()
12105 static bool is_kfunc_arg_rbtree_root(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_rbtree_root() argument
12107 return __is_kfunc_ptr_arg_type(btf, arg, KF_ARG_RB_ROOT_ID); in is_kfunc_arg_rbtree_root()
12110 static bool is_kfunc_arg_rbtree_node(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_rbtree_node() argument
12112 return __is_kfunc_ptr_arg_type(btf, arg, KF_ARG_RB_NODE_ID); in is_kfunc_arg_rbtree_node()
12115 static bool is_kfunc_arg_wq(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_wq() argument
12117 return __is_kfunc_ptr_arg_type(btf, arg, KF_ARG_WORKQUEUE_ID); in is_kfunc_arg_wq()
12120 static bool is_kfunc_arg_res_spin_lock(const struct btf *btf, const struct btf_param *arg) in is_kfunc_arg_res_spin_lock() argument
12122 return __is_kfunc_ptr_arg_type(btf, arg, KF_ARG_RES_SPIN_LOCK_ID); in is_kfunc_arg_res_spin_lock()
12135 static bool is_kfunc_arg_callback(struct bpf_verifier_env *env, const struct btf *btf, in is_kfunc_arg_callback() argument
12140 t = btf_type_resolve_func_ptr(btf, arg->type, NULL); in is_kfunc_arg_callback()
12149 const struct btf *btf, in __btf_type_is_scalar_struct() argument
12162 member_type = btf_type_skip_modifiers(btf, member->type, NULL); in __btf_type_is_scalar_struct()
12168 if (!__btf_type_is_scalar_struct(env, btf, member_type, rec + 1)) in __btf_type_is_scalar_struct()
12176 member_type = btf_type_skip_modifiers(btf, array->type, NULL); in __btf_type_is_scalar_struct()
12372 if (btf_is_prog_ctx_type(&env->log, meta->btf, t, resolve_prog_type(env->prog), argno)) in get_kfunc_ptr_arg_type()
12375 if (is_kfunc_arg_nullable(meta->btf, &args[argno]) && register_is_null(reg)) in get_kfunc_ptr_arg_type()
12378 if (is_kfunc_arg_alloc_obj(meta->btf, &args[argno])) in get_kfunc_ptr_arg_type()
12381 if (is_kfunc_arg_refcounted_kptr(meta->btf, &args[argno])) in get_kfunc_ptr_arg_type()
12384 if (is_kfunc_arg_dynptr(meta->btf, &args[argno])) in get_kfunc_ptr_arg_type()
12390 if (is_kfunc_arg_list_head(meta->btf, &args[argno])) in get_kfunc_ptr_arg_type()
12393 if (is_kfunc_arg_list_node(meta->btf, &args[argno])) in get_kfunc_ptr_arg_type()
12396 if (is_kfunc_arg_rbtree_root(meta->btf, &args[argno])) in get_kfunc_ptr_arg_type()
12399 if (is_kfunc_arg_rbtree_node(meta->btf, &args[argno])) in get_kfunc_ptr_arg_type()
12402 if (is_kfunc_arg_const_str(meta->btf, &args[argno])) in get_kfunc_ptr_arg_type()
12405 if (is_kfunc_arg_map(meta->btf, &args[argno])) in get_kfunc_ptr_arg_type()
12408 if (is_kfunc_arg_wq(meta->btf, &args[argno])) in get_kfunc_ptr_arg_type()
12411 if (is_kfunc_arg_irq_flag(meta->btf, &args[argno])) in get_kfunc_ptr_arg_type()
12414 if (is_kfunc_arg_res_spin_lock(meta->btf, &args[argno])) in get_kfunc_ptr_arg_type()
12426 if (is_kfunc_arg_callback(env, meta->btf, &args[argno])) in get_kfunc_ptr_arg_type()
12430 (is_kfunc_arg_mem_size(meta->btf, &args[argno + 1], &regs[regno + 1]) || in get_kfunc_ptr_arg_type()
12431 is_kfunc_arg_const_mem_size(meta->btf, &args[argno + 1], &regs[regno + 1]))) in get_kfunc_ptr_arg_type()
12439 if (!btf_type_is_scalar(ref_t) && !__btf_type_is_scalar_struct(env, meta->btf, ref_t, 0) && in get_kfunc_ptr_arg_type()
12457 const struct btf *reg_btf; in process_kf_arg_ptr_to_btf_id()
12464 reg_btf = reg->btf; in process_kf_arg_ptr_to_btf_id()
12496 btf_type_ids_nocast_alias(&env->log, reg_btf, reg_ref_id, meta->btf, ref_id)) in process_kf_arg_ptr_to_btf_id()
12505 …struct_same = btf_struct_ids_match(&env->log, reg_btf, reg_ref_id, reg->off, meta->btf, ref_id, st… in process_kf_arg_ptr_to_btf_id()
12682 ptr = reg->btf; in check_reg_allocation_locked()
12845 if (meta->btf != btf_vmlinux) { in __process_kf_arg_ptr_to_graph_root()
12913 if (meta->btf != btf_vmlinux) { in __process_kf_arg_ptr_to_graph_node()
12938 et = btf_type_by_id(field->graph_root.btf, field->graph_root.value_btf_id); in __process_kf_arg_ptr_to_graph_node()
12939 t = btf_type_by_id(reg->btf, reg->btf_id); in __process_kf_arg_ptr_to_graph_node()
12940 if (!btf_struct_ids_match(&env->log, reg->btf, reg->btf_id, 0, field->graph_root.btf, in __process_kf_arg_ptr_to_graph_node()
12947 btf_name_by_offset(field->graph_root.btf, et->name_off), in __process_kf_arg_ptr_to_graph_node()
12948 node_off, btf_name_by_offset(reg->btf, t->name_off)); in __process_kf_arg_ptr_to_graph_node()
12951 meta->arg_btf = reg->btf; in __process_kf_arg_ptr_to_graph_node()
12958 btf_name_by_offset(field->graph_root.btf, et->name_off)); in __process_kf_arg_ptr_to_graph_node()
13009 const struct btf *btf = meta->btf; in check_kfunc_args() local
13034 t = btf_type_skip_modifiers(btf, args[i].type, NULL); in check_kfunc_args()
13036 if (is_kfunc_arg_ignore(btf, &args[i])) in check_kfunc_args()
13039 if (is_kfunc_arg_prog(btf, &args[i])) { in check_kfunc_args()
13056 if (is_kfunc_arg_constant(meta->btf, &args[i])) { in check_kfunc_args()
13070 } else if (is_kfunc_arg_scalar_with_name(btf, &args[i], "rdonly_buf_size")) { in check_kfunc_args()
13073 } else if (is_kfunc_arg_scalar_with_name(btf, &args[i], "rdwr_buf_size")) { in check_kfunc_args()
13103 !is_kfunc_arg_nullable(meta->btf, &args[i])) { in check_kfunc_args()
13120 ref_t = btf_type_skip_modifiers(btf, t->type, &ref_id); in check_kfunc_args()
13121 ref_tname = btf_name_by_offset(btf, ref_t->name_off); in check_kfunc_args()
13236 if (meta->btf == btf_vmlinux) { in check_kfunc_args()
13237 meta->arg_btf = reg->btf; in check_kfunc_args()
13249 if (is_kfunc_arg_uninit(btf, &args[i])) in check_kfunc_args()
13372 ref_tname = btf_name_by_offset(btf, ref_t->name_off); in check_kfunc_args()
13390 resolve_ret = btf_resolve_size(btf, ref_t, &type_size); in check_kfunc_args()
13407 if (!register_is_null(buff_reg) || !is_kfunc_arg_optional(meta->btf, buff_arg)) { in check_kfunc_args()
13415 if (is_kfunc_arg_const_mem_size(meta->btf, size_arg, size_reg)) { in check_kfunc_args()
13458 meta->arg_btf = reg->btf; in check_kfunc_args()
13530 struct btf *desc_btf; in fetch_kfunc_meta()
13555 meta->btf = desc_btf; in fetch_kfunc_meta()
13571 const struct btf_type *ptr_type, struct btf *desc_btf) in check_special_kfunc()
13576 if (meta->btf != btf_vmlinux) in check_special_kfunc()
13582 struct btf *ret_btf; in check_special_kfunc()
13593 ret_btf = env->prog->aux->btf; in check_special_kfunc()
13652 regs[BPF_REG_0].btf = ret_btf; in check_special_kfunc()
13662 regs[BPF_REG_0].btf = meta->arg_btf; in check_special_kfunc()
13679 regs[BPF_REG_0].btf = desc_btf; in check_special_kfunc()
13690 regs[BPF_REG_0].btf = desc_btf; in check_special_kfunc()
13758 struct btf *desc_btf; in check_kfunc_call()
13769 desc_btf = meta.btf; in check_kfunc_call()
13961 if (is_kfunc_acquire(&meta) && !btf_type_is_struct_ptr(meta.btf, t)) { in check_kfunc_call()
13963 if (meta.btf != btf_vmlinux || in check_kfunc_call()
13974 if (meta.btf == btf_vmlinux && (meta.func_id == special_kfunc_list[KF_bpf_res_spin_lock] || in check_kfunc_call()
14019 regs[BPF_REG_0].btf = desc_btf; in check_kfunc_call()
14059 if (meta.btf == btf_vmlinux) { in check_kfunc_call()
16907 dst_reg->btf = aux->btf_var.btf; in check_ld_imm()
17092 reg_type = reg->btf ? btf_type_by_id(reg->btf, reg->btf_id) : NULL; in check_return_code()
17504 cs->is_void = btf_type_is_void(btf_type_by_id(meta.btf, meta.func_proto->type)); in get_call_summary()
17950 const struct btf *btf; in check_btf_func_early() local
17971 btf = prog->aux->btf; in check_btf_func_early()
18018 type = btf_type_by_id(btf, krecord[i].type_id); in check_btf_func_early()
18025 func_proto = btf_type_by_id(btf, type->type); in check_btf_func_early()
18052 const struct btf *btf; in check_btf_func() local
18071 btf = prog->aux->btf; in check_btf_func()
18090 type = btf_type_by_id(btf, krecord[i].type_id); in check_btf_func()
18093 func_proto = btf_type_by_id(btf, type->type); in check_btf_func()
18095 ret_type = btf_type_skip_modifiers(btf, func_proto->type, NULL); in check_btf_func()
18142 const struct btf *btf; in check_btf_line() local
18167 btf = prog->aux->btf; in check_btf_line()
18220 if (!btf_name_by_offset(btf, linfo[i].line_off) || in check_btf_line()
18221 !btf_name_by_offset(btf, linfo[i].file_name_off)) { in check_btf_line()
18269 const struct btf *btf = prog->aux->btf; in check_core_relo() local
18272 .btf = btf, in check_core_relo()
18335 struct btf *btf; in check_btf_info_early() local
18344 btf = btf_get_by_fd(attr->prog_btf_fd); in check_btf_info_early()
18345 if (IS_ERR(btf)) in check_btf_info_early()
18346 return PTR_ERR(btf); in check_btf_info_early()
18347 if (btf_is_kernel(btf)) { in check_btf_info_early()
18348 btf_put(btf); in check_btf_info_early()
18351 env->prog->aux->btf = btf; in check_btf_info_early()
18831 if (old_reg->iter.btf != cur_reg->iter.btf || in stacksafe()
20150 static int find_btf_percpu_datasec(struct btf *btf) in find_btf_percpu_datasec() argument
20161 n = btf_nr_types(btf); in find_btf_percpu_datasec()
20162 if (btf_is_module(btf)) in find_btf_percpu_datasec()
20168 t = btf_type_by_id(btf, i); in find_btf_percpu_datasec()
20172 tname = btf_name_by_offset(btf, t->name_off); in find_btf_percpu_datasec()
20186 static int __add_used_btf(struct bpf_verifier_env *env, struct btf *btf) in __add_used_btf() argument
20193 if (env->used_btfs[i].btf == btf) in __add_used_btf()
20199 btf_get(btf); in __add_used_btf()
20202 btf_mod->btf = btf; in __add_used_btf()
20206 if (btf_is_module(btf)) { in __add_used_btf()
20207 btf_mod->module = btf_try_get_module(btf); in __add_used_btf()
20209 btf_put(btf); in __add_used_btf()
20221 struct btf *btf) in __check_pseudo_btf_id() argument
20233 t = btf_type_by_id(btf, id); in __check_pseudo_btf_id()
20244 sym_name = btf_name_by_offset(btf, t->name_off); in __check_pseudo_btf_id()
20260 datasec_id = find_btf_percpu_datasec(btf); in __check_pseudo_btf_id()
20262 datasec = btf_type_by_id(btf, datasec_id); in __check_pseudo_btf_id()
20272 t = btf_type_skip_modifiers(btf, type, NULL); in __check_pseudo_btf_id()
20275 aux->btf_var.btf = btf; in __check_pseudo_btf_id()
20283 ret = btf_resolve_size(btf, t, &tsize); in __check_pseudo_btf_id()
20285 tname = btf_name_by_offset(btf, t->name_off); in __check_pseudo_btf_id()
20294 aux->btf_var.btf = btf; in __check_pseudo_btf_id()
20305 struct btf *btf; in check_pseudo_btf_id() local
20313 btf = __btf_get_by_fd(f); in check_pseudo_btf_id()
20314 if (IS_ERR(btf)) { in check_pseudo_btf_id()
20323 btf = btf_vmlinux; in check_pseudo_btf_id()
20326 err = __check_pseudo_btf_id(env, insn, aux, btf); in check_pseudo_btf_id()
20330 err = __add_used_btf(env, btf); in check_pseudo_btf_id()
21576 func[i]->aux->btf = prog->aux->btf; in jit_subprogs()
23224 reg->btf = bpf_get_btf_vmlinux(); /* can't fail at this point */ in do_check_common()
23392 struct btf *btf; in check_struct_ops_btf_id() local
23404 btf = prog->aux->attach_btf; in check_struct_ops_btf_id()
23405 if (btf_is_module(btf)) { in check_struct_ops_btf_id()
23407 env->attach_btf_mod = btf_try_get_module(btf); in check_struct_ops_btf_id()
23410 btf_get_name(btf)); in check_struct_ops_btf_id()
23416 st_ops_desc = bpf_struct_ops_find(btf, btf_id); in check_struct_ops_btf_id()
23433 mname = btf_name_by_offset(btf, member->name_off); in check_struct_ops_btf_id()
23434 func_proto = btf_type_resolve_func_ptr(btf, member->type, in check_struct_ops_btf_id()
23539 struct btf *btf; in bpf_check_attach_target() local
23547 btf = tgt_prog ? tgt_prog->aux->btf : prog->aux->attach_btf; in bpf_check_attach_target()
23548 if (!btf) { in bpf_check_attach_target()
23553 t = btf_type_by_id(btf, btf_id); in bpf_check_attach_target()
23558 tname = btf_name_by_offset(btf, t->name_off); in bpf_check_attach_target()
23700 ret = btf_find_by_name_kind(btf, fname, BTF_KIND_FUNC); in bpf_check_attach_target()
23705 t = btf_type_by_id(btf, t->type); in bpf_check_attach_target()
23710 t = btf_type_by_id(btf, ret); in bpf_check_attach_target()
23716 t = btf_type_by_id(btf, t->type); in bpf_check_attach_target()
23728 t = btf_type_by_id(btf, t->type); in bpf_check_attach_target()
23731 ret = btf_distill_func_proto(log, btf, t, tname, &tgt_info->fmodel); in bpf_check_attach_target()
23750 btf_check_type_match(log, prog, btf, t)) in bpf_check_attach_target()
23752 t = btf_type_by_id(btf, t->type); in bpf_check_attach_target()
23764 ret = btf_distill_func_proto(log, btf, t, tname, &tgt_info->fmodel); in bpf_check_attach_target()
23774 if (btf_is_module(btf)) { in bpf_check_attach_target()
23775 mod = btf_try_get_module(btf); in bpf_check_attach_target()
23807 u32 *flags = btf_kfunc_is_modify_return(btf, btf_id, in bpf_check_attach_target()
23836 if (btf_kfunc_is_modify_return(btf, btf_id, prog) || in bpf_check_attach_target()
24009 struct btf *bpf_get_btf_vmlinux(void) in bpf_get_btf_vmlinux()
24028 struct btf *btf; in add_fd_from_fd_array() local
24040 btf = __btf_get_by_fd(f); in add_fd_from_fd_array()
24041 if (!IS_ERR(btf)) { in add_fd_from_fd_array()
24042 err = __add_used_btf(env, btf); in add_fd_from_fd_array()