| /linux/drivers/gpu/drm/i915/ |
| A D | i915_query.c | 25 if (copy_from_user(query_hdr, u64_to_user_ptr(query_item->data_ptr), in copy_query_item() 68 if (copy_to_user(u64_to_user_ptr(query_item->data_ptr), in fill_topology_info() 132 u64_to_user_ptr(query_item->data_ptr); in query_engine_info() 200 u32 __user *p = u64_to_user_ptr(user_regs_ptr); in copy_perf_config_registers_or_number() 230 u64_to_user_ptr(query_item->data_ptr); in query_perf_config_data() 232 u64_to_user_ptr(query_item->data_ptr + in query_perf_config_data() 376 u64_to_user_ptr(query_item->data_ptr); in query_perf_config_list() 464 u64_to_user_ptr(query_item->data_ptr); in query_memregion_info() 547 if (copy_to_user(u64_to_user_ptr(query_item->data_ptr), in query_hwconfig_blob() 559 u64_to_user_ptr(query->data_ptr); in query_guc_submission_version() [all …]
|
| A D | i915_user_extensions.c | 57 ext = u64_to_user_ptr(next); in i915_user_extensions()
|
| /linux/io_uring/ |
| A D | fs.c | 61 oldf = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_renameat_prep() 62 newf = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_renameat_prep() 120 fname = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_unlinkat_prep() 167 fname = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_mkdirat_prep() 209 oldpath = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_symlinkat_prep() 210 newpath = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_symlinkat_prep() 253 oldf = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_linkat_prep() 254 newf = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_linkat_prep()
|
| A D | xattr.c | 56 name = u64_to_user_ptr(READ_ONCE(sqe->addr)); in __io_getxattr_prep() 57 ix->ctx.cvalue = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in __io_getxattr_prep() 97 path = u64_to_user_ptr(READ_ONCE(sqe->addr3)); in io_getxattr_prep() 159 name = u64_to_user_ptr(READ_ONCE(sqe->addr)); in __io_setxattr_prep() 160 ix->ctx.cvalue = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in __io_setxattr_prep() 190 path = u64_to_user_ptr(READ_ONCE(sqe->addr3)); in io_setxattr_prep()
|
| A D | statx.c | 35 path = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_statx_prep() 36 sx->buffer = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_statx_prep()
|
| A D | rsrc.c | 335 u64 __user *tags = u64_to_user_ptr(up->tags); in __io_sqe_files_update() 336 __s32 __user *fds = u64_to_user_ptr(up->data); in __io_sqe_files_update() 400 u64 __user *tags = u64_to_user_ptr(up->tags); in __io_sqe_buffers_update() 417 uvec = u64_to_user_ptr(user_data); in __io_sqe_buffers_update() 529 return io_sqe_files_register(ctx, u64_to_user_ptr(rr.data), in io_register_rsrc() 530 rr.nr, u64_to_user_ptr(rr.tags)); in io_register_rsrc() 534 return io_sqe_buffers_register(ctx, u64_to_user_ptr(rr.data), in io_register_rsrc() 535 rr.nr, u64_to_user_ptr(rr.tags)); in io_register_rsrc() 561 __s32 __user *fds = u64_to_user_ptr(up->arg); in io_files_update_with_index_alloc()
|
| A D | epoll.c | 38 ev = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_epoll_ctl_prep()
|
| /linux/drivers/gpu/drm/xe/ |
| A D | xe_query.c | 128 query_ptr = u64_to_user_ptr(query->data); in query_engine_cycles() 191 u64_to_user_ptr(query->data); in query_engines() 253 u64_to_user_ptr(query->data); in query_mem_regions() 324 u64_to_user_ptr(query->data); in query_config() 365 u64_to_user_ptr(query->data); in query_gt_list() 434 void __user *query_ptr = u64_to_user_ptr(query->data); in query_hwconfig() 489 void __user *query_ptr = u64_to_user_ptr(query->data); in query_gt_topology() 541 struct drm_xe_query_uc_fw_version __user *query_ptr = u64_to_user_ptr(query->data); in query_uc_fw_version() 631 void __user *query_ptr = u64_to_user_ptr(query->data); in query_oa_units()
|
| /linux/drivers/gpu/drm/v3d/ |
| A D | v3d_submit.c | 340 post_deps = u64_to_user_ptr(handles); in v3d_get_multisync_post_deps() 484 syncs = u64_to_user_ptr(timestamp.syncs); in v3d_get_cpu_timestamp_query_params() 549 syncs = u64_to_user_ptr(reset.syncs); in v3d_get_cpu_reset_timestamp_params() 613 offsets = u64_to_user_ptr(copy.offsets); in v3d_get_cpu_copy_query_results_params() 614 syncs = u64_to_user_ptr(copy.syncs); in v3d_get_cpu_copy_query_results_params() 689 ids_pointer = u64_to_user_ptr(ids); in v3d_copy_query_info() 750 u64_to_user_ptr(reset.syncs), in v3d_get_cpu_reset_performance_params() 799 u64_to_user_ptr(copy.syncs), in v3d_get_cpu_copy_performance_query_params() 800 u64_to_user_ptr(copy.kperfmon_ids), in v3d_get_cpu_copy_performance_query_params() 830 user_ext = u64_to_user_ptr(ext_handles); in v3d_get_extensions() [all …]
|
| /linux/drivers/gpu/drm/imagination/ |
| A D | pvr_drv.c | 503 if (copy_to_user(u64_to_user_ptr(query.quirks), out, in pvr_dev_query_quirks_get() 586 if (copy_to_user(u64_to_user_ptr(query.enhancements), out, in pvr_dev_query_enhancements_get() 1168 return copy_struct_from_user(out, obj_size, u64_to_user_ptr(usr_ptr), usr_stride); in pvr_get_uobj() 1177 if (copy_to_user(u64_to_user_ptr(usr_ptr), in, min_t(u32, usr_stride, obj_size))) in pvr_set_uobj() 1181 clear_user(u64_to_user_ptr(usr_ptr + obj_size), usr_stride - obj_size)) { in pvr_set_uobj() 1205 if (copy_from_user(out_alloc, u64_to_user_ptr(in->array), in pvr_get_uobj_array() 1209 void __user *in_ptr = u64_to_user_ptr(in->array); in pvr_get_uobj_array() 1242 if (copy_to_user(u64_to_user_ptr(out->array), in, in pvr_set_uobj_array() 1247 void __user *out_ptr = u64_to_user_ptr(out->array); in pvr_set_uobj_array() 1259 clear_user(u64_to_user_ptr(out->array + obj_size), in pvr_set_uobj_array()
|
| /linux/drivers/gpu/drm/ |
| A D | drm_syncobj.c | 1251 u64_to_user_ptr(timeline_wait->points), in drm_syncobj_array_wait() 1344 u64_to_user_ptr(args->handles), in drm_syncobj_wait_ioctl() 1388 u64_to_user_ptr(args->handles), in drm_syncobj_timeline_wait_ioctl() 1529 u64_to_user_ptr(args->handles), in drm_syncobj_reset_ioctl() 1562 u64_to_user_ptr(args->handles), in drm_syncobj_signal_ioctl() 1600 u64_to_user_ptr(args->handles), in drm_syncobj_timeline_signal_ioctl() 1612 if (!u64_to_user_ptr(args->points)) { in drm_syncobj_timeline_signal_ioctl() 1614 } else if (copy_from_user(points, u64_to_user_ptr(args->points), in drm_syncobj_timeline_signal_ioctl() 1657 uint64_t __user *points = u64_to_user_ptr(args->points); in drm_syncobj_query_ioctl() 1671 u64_to_user_ptr(args->handles), in drm_syncobj_query_ioctl()
|
| A D | drm_mode_config.c | 111 fb_id = u64_to_user_ptr(card_res->fb_id_ptr); in drm_mode_getresources() 129 crtc_id = u64_to_user_ptr(card_res->crtc_id_ptr); in drm_mode_getresources() 141 encoder_id = u64_to_user_ptr(card_res->encoder_id_ptr); in drm_mode_getresources() 152 connector_id = u64_to_user_ptr(card_res->connector_id_ptr); in drm_mode_getresources()
|
| A D | drm_property.c | 481 values_ptr = u64_to_user_ptr(out_resp->values_ptr); in drm_mode_getproperty_ioctl() 492 enum_ptr = u64_to_user_ptr(out_resp->enum_blob_ptr); in drm_mode_getproperty_ioctl() 828 if (copy_to_user(u64_to_user_ptr(out_resp->data), in drm_mode_getblob_ioctl() 857 u64_to_user_ptr(out_resp->data), in drm_mode_createblob_ioctl()
|
| /linux/drivers/gpu/drm/qxl/ |
| A D | qxl_ioctl.c | 165 if (!access_ok(u64_to_user_ptr(cmd->command), in qxl_process_single_command() 187 u64_to_user_ptr(cmd->command), cmd->command_size); in qxl_process_single_command() 205 struct drm_qxl_reloc __user *u = u64_to_user_ptr(cmd->relocs); in qxl_process_single_command() 281 u64_to_user_ptr(execbuffer->commands); in qxl_execbuffer_ioctl()
|
| /linux/net/bpf/ |
| A D | bpf_dummy_struct_ops.c | 43 ctx_in = u64_to_user_ptr(kattr->test.ctx_in); in dummy_ops_init_args() 48 u_state = u64_to_user_ptr(args->args[0]); in dummy_ops_init_args() 63 u_state = u64_to_user_ptr(args->args[0]); in dummy_ops_copy_args()
|
| /linux/drivers/infiniband/core/ |
| A D | uverbs_ioctl.c | 144 return ib_is_buffer_cleared(u64_to_user_ptr(uattr->data) + len, in uverbs_is_attr_cleared() 206 ret = copy_from_user(idr_vals, u64_to_user_ptr(uattr->data), in uverbs_process_idrs_array() 304 if (copy_from_user(p, u64_to_user_ptr(uattr->data), in uverbs_process_attr() 728 udata->inbuf = u64_to_user_ptr(in->ptr_attr.data); in uverbs_fill_udata() 735 udata->outbuf = u64_to_user_ptr(out->ptr_attr.data); in uverbs_fill_udata() 753 if (copy_to_user(u64_to_user_ptr(attr->ptr_attr.data), from, min_size)) in uverbs_copy_to() 831 if (clear_user(u64_to_user_ptr(attr->ptr_attr.data) + size, in uverbs_copy_to_struct_or_zero()
|
| /linux/drivers/gpu/drm/virtio/ |
| A D | virtgpu_submit.c | 118 u64_to_user_ptr(address), in virtio_gpu_parse_deps() 208 u64_to_user_ptr(address), in virtio_gpu_parse_post_deps() 311 if (copy_from_user(bo_handles, u64_to_user_ptr(exbuf->bo_handles), in virtio_gpu_init_submit_buflist() 416 submit->buf = vmemdup_user(u64_to_user_ptr(exbuf->command), exbuf->size); in virtio_gpu_init_submit()
|
| A D | virtgpu_ioctl.c | 123 if (copy_to_user(u64_to_user_ptr(param->value), &value, sizeof(int))) in virtio_gpu_getparam_ioctl() 433 if (copy_to_user(u64_to_user_ptr(args->addr), ptr, size)) in virtio_gpu_get_caps_ioctl() 520 buf = memdup_user(u64_to_user_ptr(rc_blob->cmd), in virtio_gpu_resource_create_blob_ioctl() 596 ctx_set_params = memdup_user(u64_to_user_ptr(args->ctx_set_params), in virtio_gpu_context_init_ioctl() 662 u64_to_user_ptr(value), in virtio_gpu_context_init_ioctl()
|
| /linux/kernel/bpf/ |
| A D | mprog.c | 420 uprog_id = u64_to_user_ptr(attr->query.prog_ids); in bpf_mprog_query() 421 uprog_flags = u64_to_user_ptr(attr->query.prog_attach_flags); in bpf_mprog_query() 422 ulink_id = u64_to_user_ptr(attr->query.link_ids); in bpf_mprog_query() 423 ulink_flags = u64_to_user_ptr(attr->query.link_attach_flags); in bpf_mprog_query()
|
| A D | syscall.c | 1522 void __user *ukey = u64_to_user_ptr(attr->key); in map_lookup_elem() 1523 void __user *uvalue = u64_to_user_ptr(attr->value); in map_lookup_elem() 1695 void __user *ukey = u64_to_user_ptr(attr->key); in map_get_next_key() 1972 void __user *ukey = u64_to_user_ptr(attr->key); in map_lookup_and_delete_elem() 1973 void __user *uvalue = u64_to_user_ptr(attr->value); in map_lookup_and_delete_elem() 2916 u64_to_user_ptr(attr->pathname)); in bpf_obj_pin() 4620 uinsns = u64_to_user_ptr(info.xlated_prog_insns); in bpf_prog_get_info_by_fd() 4652 uinsns = u64_to_user_ptr(info.jited_prog_insns); in bpf_prog_get_info_by_fd() 4695 user_ksyms = u64_to_user_ptr(info.jited_ksyms); in bpf_prog_get_info_by_fd() 4752 user_finfo = u64_to_user_ptr(info.func_info); in bpf_prog_get_info_by_fd() [all …]
|
| /linux/arch/x86/kvm/svm/ |
| A D | sev.c | 979 void __user *measure = u64_to_user_ptr(argp->data); in sev_launch_measure() 999 p = u64_to_user_ptr(params.uaddr); in sev_launch_measure() 1385 void __user *report = u64_to_user_ptr(argp->data); in sev_get_attestation_report() 1405 p = u64_to_user_ptr(params.uaddr); in sev_get_attestation_report() 1458 if (copy_to_user(u64_to_user_ptr(argp->data), params, in __sev_send_start_query_session_length() 1477 if (copy_from_user(¶ms, u64_to_user_ptr(argp->data), in sev_send_start() 1540 if (copy_to_user(u64_to_user_ptr(argp->data), ¶ms, in sev_send_start() 1571 if (copy_to_user(u64_to_user_ptr(argp->data), params, in __sev_send_update_data_query_lengths() 1591 if (copy_from_user(¶ms, u64_to_user_ptr(argp->data), in sev_send_update_data() 1642 if (copy_to_user(u64_to_user_ptr(params.trans_uaddr), in sev_send_update_data() [all …]
|
| /linux/drivers/misc/ |
| A D | nsm.c | 159 if (copy_from_user(req->data, u64_to_user_ptr(raw->request.addr), in fill_req_raw() 176 if (copy_to_user(u64_to_user_ptr(raw->response.addr), in parse_resp_raw() 356 void __user *argp = u64_to_user_ptr((u64)arg); in nsm_dev_ioctl()
|
| /linux/drivers/iommu/iommufd/ |
| A D | ioas.c | 80 ranges = u64_to_user_ptr(cmd->allowed_iovas); in iommufd_ioas_iova_ranges() 160 u64_to_user_ptr(cmd->allowed_iovas), in iommufd_ioas_allow_iovas() 227 u64_to_user_ptr(cmd->user_va), cmd->length, in iommufd_ioas_map()
|
| /linux/drivers/gpu/drm/i915/gem/ |
| A D | i915_gem_phys.c | 143 char __user *user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_object_pwrite_phys() 174 char __user *user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_object_pread_phys()
|
| /linux/drivers/gpu/drm/etnaviv/ |
| A D | etnaviv_gem_submit.c | 480 ret = copy_from_user(bos, u64_to_user_ptr(args->bos), in etnaviv_ioctl_gem_submit() 487 ret = copy_from_user(relocs, u64_to_user_ptr(args->relocs), in etnaviv_ioctl_gem_submit() 494 ret = copy_from_user(pmrs, u64_to_user_ptr(args->pmrs), in etnaviv_ioctl_gem_submit() 501 ret = copy_from_user(stream, u64_to_user_ptr(args->stream), in etnaviv_ioctl_gem_submit()
|