| /drivers/gpu/drm/i915/ |
| A D | i915_gem.c | 229 char __user *user_data; in i915_gem_shmem_pread() local 263 user_data += length; in i915_gem_shmem_pread() 387 void __user *user_data; in i915_gem_gtt_pread() local 430 user_data, page_length)) { in i915_gem_gtt_pread() 436 user_data += page_length; in i915_gem_gtt_pread() 523 user_data, length); in ggtt_write() 528 user_data, length); in ggtt_write() 552 void __user *user_data; in i915_gem_gtt_pwrite_fast() local 623 user_data += page_length; in i915_gem_gtt_pwrite_fast() 669 void __user *user_data; in i915_gem_shmem_pwrite() local [all …]
|
| /drivers/android/ |
| A D | binder_alloc.c | 66 return binder_buffer_next(buffer)->user_data - buffer->user_data; in binder_alloc_buffer_size() 117 if (new_buffer->user_data < buffer->user_data) in binder_insert_allocated_buffer_locked() 119 else if (new_buffer->user_data > buffer->user_data) in binder_insert_allocated_buffer_locked() 139 if (user_ptr < buffer->user_data) { in binder_alloc_prepare_to_free_locked() 141 } else if (user_ptr > buffer->user_data) { in binder_alloc_prepare_to_free_locked() 368 start = buffer->user_data & PAGE_MASK; in binder_install_buffer_pages() 567 new_buffer->user_data = buffer->user_data + size; in binder_alloc_new_buf_locked() 704 return buffer->user_data & PAGE_MASK; in buffer_start_page() 717 if (PAGE_ALIGNED(buffer->user_data)) in binder_delete_free_buffer() 817 (buffer->user_data - alloc->vm_start); in binder_alloc_get_page() [all …]
|
| A D | binder_alloc.h | 56 unsigned long user_data; member
|
| /drivers/iommu/iommufd/ |
| A D | hw_pagetable.c | 111 const struct iommu_user_data *user_data) in iommufd_hwpt_paging_alloc() argument 150 flags & ~IOMMU_HWPT_FAULT_ID_VALID, user_data); in iommufd_hwpt_paging_alloc() 230 const struct iommu_user_data *user_data) in iommufd_hwpt_nested_alloc() argument 238 !user_data->len || !ops->domain_alloc_nested) in iommufd_hwpt_nested_alloc() 256 flags & ~IOMMU_HWPT_FAULT_ID_VALID, user_data); in iommufd_hwpt_nested_alloc() 288 const struct iommu_user_data *user_data) in iommufd_viommu_alloc_hwpt_nested() argument 296 if (!user_data->len) in iommufd_viommu_alloc_hwpt_nested() 337 const struct iommu_user_data user_data = { in iommufd_hwpt_alloc() local 371 false, user_data.len ? &user_data : NULL); in iommufd_hwpt_alloc() 384 idev, cmd->flags, &user_data); in iommufd_hwpt_alloc() [all …]
|
| A D | selftest.c | 422 if (user_data->type != IOMMU_HWPT_DATA_SELFTEST) in __mock_domain_alloc_nested() 425 rc = iommu_copy_struct_from_user(&user_cfg, user_data, in __mock_domain_alloc_nested() 442 u32 flags, const struct iommu_user_data *user_data) in mock_domain_alloc_nested() argument 456 mock_nested = __mock_domain_alloc_nested(user_data); in mock_domain_alloc_nested() 464 const struct iommu_user_data *user_data) in mock_domain_alloc_paging_flags() argument 474 if (user_data) in mock_domain_alloc_paging_flags() 710 const struct iommu_user_data *user_data) in mock_viommu_alloc_domain_nested() argument 718 mock_nested = __mock_domain_alloc_nested(user_data); in mock_viommu_alloc_domain_nested() 887 const struct iommu_user_data *user_data) in mock_viommu_init() argument 895 if (user_data) { in mock_viommu_init() [all …]
|
| A D | viommu.c | 20 const struct iommu_user_data user_data = { in iommufd_viommu_alloc_ioctl() local 93 user_data.len ? &user_data : NULL); in iommufd_viommu_alloc_ioctl()
|
| /drivers/net/ethernet/mellanox/mlx5/core/steering/hws/ |
| A D | send.h | 85 void *user_data; member 101 void *user_data; member 125 void *user_data; member 165 void *user_data; member 249 void *user_data, in mlx5hws_send_engine_gen_comp() argument 255 comp->entries[comp->pi].user_data = user_data; in mlx5hws_send_engine_gen_comp()
|
| A D | rule.c | 63 dep_wqe->user_data = attr->user_data; in hws_rule_init_dep_wqe() 113 void *user_data, in hws_rule_gen_comp() argument 127 mlx5hws_send_engine_gen_comp(queue, user_data, comp_status); in hws_rule_gen_comp() 380 ste_attr.send_attr.user_data = dep_wqe->user_data; in hws_rule_create_hws() 420 attr->user_data, MLX5HWS_RULE_STATUS_DELETED); in hws_rule_destroy_failed_hws() 479 attr->user_data, MLX5HWS_RULE_STATUS_DELETED); in hws_rule_destroy_hws() 500 ste_attr.send_attr.user_data = attr->user_data; in hws_rule_destroy_hws() 524 if (unlikely(!attr->user_data)) in hws_rule_enqueue_precheck() 572 void *user_data) in mlx5hws_rule_move_hws_remove() argument 590 ste_attr.send_attr.user_data = user_data; in mlx5hws_rule_move_hws_remove() [all …]
|
| A D | send.c | 44 ste_attr.send_attr.user_data = dep_wqe->user_data; in mlx5hws_send_all_dep_wqe() 166 sq->wr_priv[idx].user_data = attr->user_data; in mlx5hws_send_engine_post_end() 271 send_attr.user_data = priv->user_data; in hws_send_engine_retry_post_send() 328 mlx5hws_rule_move_hws_remove(priv->rule, queue, priv->user_data); in hws_send_engine_update_rule_resize() 536 if (priv->user_data) { in hws_send_engine_update() 546 res[*i].user_data = priv->user_data; in hws_send_engine_update() 551 mlx5hws_send_engine_gen_comp(queue, priv->user_data, status); in hws_send_engine_update() 646 res[*polled].user_data = in hws_send_engine_poll_list() 647 comp->entries[comp->ci].user_data; in hws_send_engine_poll_list() 1356 mlx5hws_send_engine_gen_comp(queue, send_attr->user_data, MLX5HWS_FLOW_OP_SUCCESS); in mlx5hws_send_stes_fw() [all …]
|
| A D | rule.h | 78 void *queue, void *user_data);
|
| A D | mlx5hws.h | 135 void *user_data; member 807 void *user_data; member
|
| /drivers/pci/pcie/ |
| A D | rcec.c | 21 void *user_data; member 63 rcec_data->user_callback(dev, rcec_data->user_data); in walk_rcec_helper() 119 rcec_data.user_data = NULL; in pcie_link_rcec() 144 rcec_data.user_data = userdata; in pcie_walk_rcec()
|
| /drivers/gpu/drm/i915/gem/ |
| A D | i915_gem_phys.c | 142 char __user *user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_object_pwrite_phys() local 159 if (copy_from_user(vaddr, user_data, args->size)) in i915_gem_object_pwrite_phys() 173 char __user *user_data = u64_to_user_ptr(args->data_ptr); in i915_gem_object_pread_phys() local 183 if (copy_to_user(user_data, vaddr, args->size)) in i915_gem_object_pread_phys()
|
| A D | i915_gem_shmem.c | 403 char __user *user_data = u64_to_user_ptr(arg->data_ptr); in shmem_pwrite() local 411 GEM_BUG_ON(!access_ok(user_data, arg->size)); in shmem_pwrite() 439 iov_iter_ubuf(&iter, ITER_SOURCE, (void __user *)user_data, size); in shmem_pwrite()
|
| /drivers/scsi/megaraid/ |
| A D | megaraid_mm.c | 411 kioc->user_data = mimd.ui.fcs.buffer; in mimd_to_kioc() 427 kioc->user_data = mimd.data; in mimd_to_kioc() 459 if (copy_from_user(kioc->buf_vaddr, kioc->user_data, in mimd_to_kioc() 487 if (copy_from_user(kioc->buf_vaddr, kioc->user_data, in mimd_to_kioc() 620 kioc->user_data = NULL; in mraid_mm_alloc_kioc() 859 if (kioc->user_data) { in kioc_to_mimd() 860 if (copy_to_user(kioc->user_data, kioc->buf_vaddr, in kioc_to_mimd()
|
| A D | megaraid_ioctl.h | 130 void __user * user_data; member
|
| /drivers/iommu/intel/ |
| A D | nested.c | 204 const struct iommu_user_data *user_data) in intel_iommu_domain_alloc_nested() argument 217 if (user_data->type != IOMMU_HWPT_DATA_VTD_S1) in intel_iommu_domain_alloc_nested() 222 ret = iommu_copy_struct_from_user(&vtd, user_data, in intel_iommu_domain_alloc_nested()
|
| /drivers/iommu/arm/arm-smmu-v3/ |
| A D | arm-smmu-v3-iommufd.c | 230 const struct iommu_user_data *user_data) in arm_vsmmu_alloc_domain_nested() argument 241 ret = iommu_copy_struct_from_user(&arg, user_data, in arm_vsmmu_alloc_domain_nested() 433 const struct iommu_user_data *user_data) in arm_vsmmu_init() argument 453 return smmu->impl_ops->vsmmu_init(vsmmu, user_data); in arm_vsmmu_init()
|
| A D | tegra241-cmdqv.c | 808 const struct iommu_user_data *user_data); 1273 const struct iommu_user_data *user_data) in tegra241_cmdqv_init_vintf_user() argument 1289 if (!user_data) in tegra241_cmdqv_init_vintf_user() 1292 ret = iommu_copy_struct_from_user(&data, user_data, in tegra241_cmdqv_init_vintf_user() 1323 ret = iommu_copy_struct_to_user(user_data, &data, in tegra241_cmdqv_init_vintf_user()
|
| A D | arm-smmu-v3.h | 734 const struct iommu_user_data *user_data); 1055 const struct iommu_user_data *user_data); 1063 const struct iommu_user_data *user_data);
|
| /drivers/gpu/drm/vmwgfx/ |
| A D | vmwgfx_fence.c | 607 uint64_t user_data, in vmw_event_fence_action_create() argument 624 event->event.user_data = user_data; in vmw_event_fence_action_create() 719 arg->user_data, in vmw_fence_event_ioctl()
|
| /drivers/s390/char/ |
| A D | uvdevice.c | 100 memcpy(uvcb_attest->user_data, uvio_attest->user_data, sizeof(uvcb_attest->user_data)); in uvio_build_uvcb_attest()
|
| /drivers/mtd/nand/raw/ |
| A D | sunxi_nand.c | 702 static inline void sunxi_nfc_user_data_to_buf(u32 user_data, u8 *buf) in sunxi_nfc_user_data_to_buf() argument 704 buf[0] = user_data; in sunxi_nfc_user_data_to_buf() 705 buf[1] = user_data >> 8; in sunxi_nfc_user_data_to_buf() 706 buf[2] = user_data >> 16; in sunxi_nfc_user_data_to_buf() 707 buf[3] = user_data >> 24; in sunxi_nfc_user_data_to_buf() 733 u8 user_data[4]; in sunxi_nfc_hw_ecc_set_prot_oob_bytes() local 737 memcpy(user_data, oob, sizeof(user_data)); in sunxi_nfc_hw_ecc_set_prot_oob_bytes() 738 sunxi_nfc_randomize_bbm(nand, page, user_data); in sunxi_nfc_hw_ecc_set_prot_oob_bytes() 739 oob = user_data; in sunxi_nfc_hw_ecc_set_prot_oob_bytes()
|
| /drivers/dma-buf/ |
| A D | dma-buf.c | 412 void __user *user_data) in dma_buf_export_sync_file() argument 420 if (copy_from_user(&arg, user_data, sizeof(arg))) in dma_buf_export_sync_file() 451 if (copy_to_user(user_data, &arg, sizeof(arg))) { in dma_buf_export_sync_file() 468 const void __user *user_data) in dma_buf_import_sync_file() argument 477 if (copy_from_user(&arg, user_data, sizeof(arg))) in dma_buf_import_sync_file()
|
| /drivers/gpu/drm/exynos/ |
| A D | exynos_drm_ipp.c | 696 struct drm_file *file_priv, uint64_t user_data) in exynos_drm_ipp_event_create() argument 707 e->event.user_data = user_data; in exynos_drm_ipp_event_create() 909 arg->user_data); in exynos_drm_ipp_commit_ioctl()
|