Lines Matching refs:value
246 void *key, void *value, __u64 flags) in bpf_map_update_value() argument
252 return bpf_map_offload_update_elem(map, key, value, flags); in bpf_map_update_value()
256 return map->ops->map_update_elem(map, key, value, flags); in bpf_map_update_value()
259 return sock_map_update_elem_sys(map, key, value, flags); in bpf_map_update_value()
261 return bpf_fd_array_map_update_elem(map, map_file, key, value, in bpf_map_update_value()
268 err = bpf_percpu_hash_update(map, key, value, flags); in bpf_map_update_value()
270 err = bpf_percpu_array_update(map, key, value, flags); in bpf_map_update_value()
272 err = bpf_percpu_cgroup_storage_update(map, key, value, in bpf_map_update_value()
275 err = bpf_fd_array_map_update_elem(map, map_file, key, value, in bpf_map_update_value()
278 err = bpf_fd_htab_map_update_elem(map, map_file, key, value, in bpf_map_update_value()
282 err = bpf_fd_reuseport_array_update_elem(map, key, value, in bpf_map_update_value()
287 err = map->ops->map_push_elem(map, value, flags); in bpf_map_update_value()
289 err = bpf_obj_pin_uptrs(map->record, value); in bpf_map_update_value()
292 err = map->ops->map_update_elem(map, key, value, flags); in bpf_map_update_value()
295 bpf_obj_unpin_uptrs(map->record, value); in bpf_map_update_value()
303 static int bpf_map_copy_value(struct bpf_map *map, void *key, void *value, in bpf_map_copy_value() argument
310 return bpf_map_offload_lookup_elem(map, key, value); in bpf_map_copy_value()
315 err = bpf_percpu_hash_copy(map, key, value); in bpf_map_copy_value()
317 err = bpf_percpu_array_copy(map, key, value); in bpf_map_copy_value()
319 err = bpf_percpu_cgroup_storage_copy(map, key, value); in bpf_map_copy_value()
321 err = bpf_stackmap_copy(map, key, value); in bpf_map_copy_value()
323 err = bpf_fd_array_map_lookup_elem(map, key, value); in bpf_map_copy_value()
325 err = bpf_fd_htab_map_lookup_elem(map, key, value); in bpf_map_copy_value()
327 err = bpf_fd_reuseport_array_lookup_elem(map, key, value); in bpf_map_copy_value()
331 err = map->ops->map_peek_elem(map, value); in bpf_map_copy_value()
334 err = bpf_struct_ops_map_sys_lookup_elem(map, key, value); in bpf_map_copy_value()
349 copy_map_value_locked(map, value, ptr, true); in bpf_map_copy_value()
351 copy_map_value(map, value, ptr); in bpf_map_copy_value()
353 check_and_init_map_value(map, value); in bpf_map_copy_value()
1630 int __weak bpf_stackmap_copy(struct bpf_map *map, void *key, void *value) in bpf_stackmap_copy() argument
1663 void __user *uvalue = u64_to_user_ptr(attr->value); in map_lookup_elem()
1665 void *key, *value; in map_lookup_elem() local
1693 value = kvmalloc(value_size, GFP_USER | __GFP_NOWARN); in map_lookup_elem()
1694 if (!value) in map_lookup_elem()
1698 if (copy_from_user(value, uvalue, value_size)) in map_lookup_elem()
1701 err = bpf_map_copy_value(map, key, value, attr->flags); in map_lookup_elem()
1705 err = bpf_map_copy_value(map, key, value, attr->flags); in map_lookup_elem()
1710 if (copy_to_user(uvalue, value, value_size) != 0) in map_lookup_elem()
1716 kvfree(value); in map_lookup_elem()
1728 bpfptr_t uvalue = make_bpfptr(attr->value, uattr.is_kernel); in map_update_elem()
1730 void *key, *value; in map_update_elem() local
1760 value = kvmemdup_bpfptr(uvalue, value_size); in map_update_elem()
1761 if (IS_ERR(value)) { in map_update_elem()
1762 err = PTR_ERR(value); in map_update_elem()
1766 err = bpf_map_update_value(map, fd_file(f), key, value, attr->flags); in map_update_elem()
1770 kvfree(value); in map_update_elem()
1952 void *key, *value; in generic_map_update_batch() local
1976 value = kvmalloc(value_size, GFP_USER | __GFP_NOWARN); in generic_map_update_batch()
1977 if (!value) { in generic_map_update_batch()
1986 copy_from_user(value, values + cp * value_size, value_size)) in generic_map_update_batch()
1989 err = bpf_map_update_value(map, map_file, key, value, in generic_map_update_batch()
2000 kvfree(value); in generic_map_update_batch()
2014 void *buf, *buf_prevkey, *prev_key, *key, *value; in generic_map_lookup_batch() local
2049 value = key + map->key_size; in generic_map_lookup_batch()
2059 err = bpf_map_copy_value(map, key, value, in generic_map_lookup_batch()
2073 if (copy_to_user(values + cp * value_size, value, value_size)) { in generic_map_lookup_batch()
2105 void __user *uvalue = u64_to_user_ptr(attr->value); in map_lookup_and_delete_elem()
2107 void *key, *value; in map_lookup_and_delete_elem() local
2150 value = kvmalloc(value_size, GFP_USER | __GFP_NOWARN); in map_lookup_and_delete_elem()
2151 if (!value) in map_lookup_and_delete_elem()
2157 err = map->ops->map_pop_elem(map, value); in map_lookup_and_delete_elem()
2165 err = map->ops->map_lookup_and_delete_elem(map, key, value, attr->flags); in map_lookup_and_delete_elem()
2174 if (copy_to_user(uvalue, value, value_size) != 0) { in map_lookup_and_delete_elem()
2182 kvfree(value); in map_lookup_and_delete_elem()