Home
last modified time | relevance | path

Searched refs:indirect (Results 1 – 25 of 40) sorted by relevance

12

/drivers/gpu/drm/amd/amdgpu/
A Dvcn_v4_0_5.c472 bool indirect) in vcn_v4_0_5_mc_resume_dpg_mode() argument
484 if (!indirect) { in vcn_v4_0_5_mc_resume_dpg_mode()
488 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
492 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
517 if (!indirect) in vcn_v4_0_5_mc_resume_dpg_mode()
525 if (!indirect) { in vcn_v4_0_5_mc_resume_dpg_mode()
550 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
554 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
922 bool indirect) in vcn_v4_0_5_start_dpg_mode() argument
939 if (indirect) in vcn_v4_0_5_start_dpg_mode()
[all …]
A Dvcn_v4_0_3.c532 bool indirect) in vcn_v4_0_3_mc_resume_dpg_mode() argument
544 if (!indirect) { in vcn_v4_0_3_mc_resume_dpg_mode()
577 if (!indirect) in vcn_v4_0_3_mc_resume_dpg_mode()
585 if (!indirect) { in vcn_v4_0_3_mc_resume_dpg_mode()
744 uint8_t indirect) in vcn_v4_0_3_disable_clock_gating_dpg_mode() argument
847 bool indirect) in vcn_v4_0_3_start_dpg_mode() argument
867 if (indirect) { in vcn_v4_0_3_start_dpg_mode()
947 if (indirect) in vcn_v4_0_3_start_dpg_mode()
2179 tmp, 0, indirect); in vcn_v4_0_3_enable_ras()
2184 tmp, 0, indirect); in vcn_v4_0_3_enable_ras()
[all …]
A Dvcn_v5_0_0.c439 bool indirect) in vcn_v5_0_0_mc_resume_dpg_mode() argument
451 if (!indirect) { in vcn_v5_0_0_mc_resume_dpg_mode()
482 if (!indirect) in vcn_v5_0_0_mc_resume_dpg_mode()
490 if (!indirect) { in vcn_v5_0_0_mc_resume_dpg_mode()
538 adev->gfx.config.gb_addr_config, 0, indirect); in vcn_v5_0_0_mc_resume_dpg_mode()
682 uint8_t indirect)
709 bool indirect) in vcn_v5_0_0_start_dpg_mode() argument
727 if (indirect) in vcn_v5_0_0_start_dpg_mode()
752 vcn_v5_0_0_mc_resume_dpg_mode(vinst, indirect); in vcn_v5_0_0_start_dpg_mode()
767 UVD_MASTINT_EN__VCPU_EN_MASK, 0, indirect); in vcn_v5_0_0_start_dpg_mode()
[all …]
A Dvcn_v5_0_1.c418 bool indirect) in vcn_v5_0_1_mc_resume_dpg_mode() argument
430 if (!indirect) { in vcn_v5_0_1_mc_resume_dpg_mode()
434 inst_idx].tmr_mc_addr_lo), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
438 inst_idx].tmr_mc_addr_hi), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
463 if (!indirect) in vcn_v5_0_1_mc_resume_dpg_mode()
471 if (!indirect) { in vcn_v5_0_1_mc_resume_dpg_mode()
495 AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
499 AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode()
600 bool indirect) in vcn_v5_0_1_start_dpg_mode() argument
622 if (indirect) { in vcn_v5_0_1_start_dpg_mode()
[all …]
A Dvcn_v2_5.c677 bool indirect) in vcn_v2_5_mc_resume_dpg_mode() argument
686 if (!indirect) { in vcn_v2_5_mc_resume_dpg_mode()
717 if (!indirect) in vcn_v2_5_mc_resume_dpg_mode()
725 if (!indirect) { in vcn_v2_5_mc_resume_dpg_mode()
892 uint8_t sram_sel, uint8_t indirect) in vcn_v2_5_clock_gating_dpg_mode() argument
1003 bool indirect) in vcn_v2_6_enable_ras() argument
1018 tmp, 0, indirect); in vcn_v2_6_enable_ras()
1023 tmp, 0, indirect); in vcn_v2_6_enable_ras()
1028 tmp, 0, indirect); in vcn_v2_6_enable_ras()
1048 if (indirect) in vcn_v2_5_start_dpg_mode()
[all …]
A Dvcn_v4_0.c522 bool indirect) in vcn_v4_0_mc_resume_dpg_mode() argument
533 if (!indirect) { in vcn_v4_0_mc_resume_dpg_mode()
564 if (!indirect) in vcn_v4_0_mc_resume_dpg_mode()
572 if (!indirect) { in vcn_v4_0_mc_resume_dpg_mode()
868 uint8_t indirect) in vcn_v4_0_disable_clock_gating_dpg_mode() argument
977 bool indirect) in vcn_v4_0_enable_ras() argument
992 tmp, 0, indirect); in vcn_v4_0_enable_ras()
997 tmp, 0, indirect); in vcn_v4_0_enable_ras()
1025 if (indirect) in vcn_v4_0_start_dpg_mode()
1089 vcn_v4_0_enable_ras(vinst, indirect); in vcn_v4_0_start_dpg_mode()
[all …]
A Dvcn_v2_0.c448 bool indirect) in vcn_v2_0_mc_resume_dpg_mode() argument
456 if (!indirect) { in vcn_v2_0_mc_resume_dpg_mode()
487 if (!indirect) in vcn_v2_0_mc_resume_dpg_mode()
495 if (!indirect) { in vcn_v2_0_mc_resume_dpg_mode()
659 uint8_t sram_sel, uint8_t indirect) in vcn_v2_0_clock_gating_dpg_mode() argument
877 if (indirect) in vcn_v2_0_start_dpg_mode()
892 UVD, 0, mmUVD_MASTINT_EN), 0, 0, indirect); in vcn_v2_0_start_dpg_mode()
904 UVD, 0, mmUVD_LMI_CTRL), tmp, 0, indirect); in vcn_v2_0_start_dpg_mode()
939 UVD, 0, mmUVD_SOFT_RESET), 0, 0, indirect); in vcn_v2_0_start_dpg_mode()
949 UVD_MASTINT_EN__VCPU_EN_MASK, 0, indirect); in vcn_v2_0_start_dpg_mode()
[all …]
A Dvcn_v3_0.c583 bool indirect) in vcn_v3_0_mc_resume_dpg_mode() argument
592 if (!indirect) { in vcn_v3_0_mc_resume_dpg_mode()
620 AMDGPU_UVD_FIRMWARE_OFFSET >> 3, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
623 if (!indirect) in vcn_v3_0_mc_resume_dpg_mode()
631 if (!indirect) { in vcn_v3_0_mc_resume_dpg_mode()
920 uint8_t indirect) in vcn_v3_0_clock_gating_dpg_mode() argument
1055 if (indirect) in vcn_v3_0_start_dpg_mode()
1108 vcn_v3_0_mc_resume_dpg_mode(vinst, indirect); in vcn_v3_0_start_dpg_mode()
1117 VCN, inst_idx, mmUVD_LMI_CTRL2), 0, 0, indirect); in vcn_v3_0_start_dpg_mode()
1131 UVD_MASTINT_EN__VCPU_EN_MASK, 0, indirect); in vcn_v3_0_start_dpg_mode()
[all …]
A Djpeg_v5_0_0.c304 int inst_idx, uint8_t indirect) in jpeg_engine_5_0_0_dpg_clock_gating_mode() argument
317 if (indirect) { in jpeg_engine_5_0_0_dpg_clock_gating_mode()
318 ADD_SOC24_JPEG_TO_DPG_SRAM(inst_idx, vcnipJPEG_CGC_CTRL, data, indirect); in jpeg_engine_5_0_0_dpg_clock_gating_mode()
353 if (indirect) in jpeg_v5_0_0_start_dpg_mode()
357 jpeg_engine_5_0_0_dpg_clock_gating_mode(adev, inst_idx, indirect); in jpeg_v5_0_0_start_dpg_mode()
360 if (indirect) in jpeg_v5_0_0_start_dpg_mode()
362 adev->gfx.config.gb_addr_config, indirect); in jpeg_v5_0_0_start_dpg_mode()
368 if (indirect) in jpeg_v5_0_0_start_dpg_mode()
370 JPEG_SYS_INT_EN__DJRBC0_MASK, indirect); in jpeg_v5_0_0_start_dpg_mode()
375 if (indirect) { in jpeg_v5_0_0_start_dpg_mode()
[all …]
A Damdgpu_jpeg.h36 #define WREG32_SOC15_JPEG_DPG_MODE(inst_idx, offset, value, indirect) \ argument
38 if (!indirect) { \
46 indirect << UVD_DPG_LMA_CTL__SRAM_SEL__SHIFT)); \
64 #define WREG32_SOC24_JPEG_DPG_MODE(inst_idx, offset, value, indirect) \ argument
75 indirect << UVD_DPG_LMA_CTL__SRAM_SEL__SHIFT)); \
89 #define ADD_SOC24_JPEG_TO_DPG_SRAM(inst_idx, offset, value, indirect) \ argument
A Djpeg_v4_0_5.c356 int inst_idx, uint8_t indirect) in jpeg_engine_4_0_5_dpg_clock_gating_mode() argument
367 WREG32_SOC15_JPEG_DPG_MODE(inst_idx, regJPEG_CGC_CTRL_INTERNAL_OFFSET, data, indirect); in jpeg_engine_4_0_5_dpg_clock_gating_mode()
371 data, indirect); in jpeg_engine_4_0_5_dpg_clock_gating_mode()
421 static void jpeg_v4_0_5_start_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) in jpeg_v4_0_5_start_dpg_mode() argument
444 if (indirect) in jpeg_v4_0_5_start_dpg_mode()
448 jpeg_engine_4_0_5_dpg_clock_gating_mode(adev, inst_idx, indirect); in jpeg_v4_0_5_start_dpg_mode()
452 adev->gfx.config.gb_addr_config, indirect); in jpeg_v4_0_5_start_dpg_mode()
455 JPEG_SYS_INT_EN__DJRBC_MASK, indirect); in jpeg_v4_0_5_start_dpg_mode()
458 WREG32_SOC15_JPEG_DPG_MODE(inst_idx, regUVD_NO_OP_INTERNAL_OFFSET, 0, indirect); in jpeg_v4_0_5_start_dpg_mode()
460 if (indirect) in jpeg_v4_0_5_start_dpg_mode()
A Damdgpu_vcn.h143 #define WREG32_SOC15_DPG_MODE(inst_idx, offset, value, mask_en, indirect) \ argument
145 if (!indirect) { \
195 #define WREG32_SOC24_DPG_MODE(inst_idx, offset, value, mask_en, indirect) \ argument
197 if (!indirect) { \
/drivers/net/ethernet/intel/idpf/
A Didpf_controlq.c80 desc->params.indirect.addr_high = in idpf_ctlq_init_rxq_bufs()
82 desc->params.indirect.addr_low = in idpf_ctlq_init_rxq_bufs()
84 desc->params.indirect.param0 = 0; in idpf_ctlq_init_rxq_bufs()
85 desc->params.indirect.sw_cookie = 0; in idpf_ctlq_init_rxq_bufs()
86 desc->params.indirect.v_flags = 0; in idpf_ctlq_init_rxq_bufs()
306 desc->params.indirect.addr_high = in idpf_ctlq_send()
308 desc->params.indirect.addr_low = in idpf_ctlq_send()
311 memcpy(&desc->params, msg->ctx.indirect.context, in idpf_ctlq_send()
499 desc->params.indirect.addr_high = in idpf_ctlq_post_rx_buffs()
501 desc->params.indirect.addr_low = in idpf_ctlq_post_rx_buffs()
[all …]
A Didpf_controlq.h59 } indirect; member
/drivers/block/xen-blkback/
A Dblkback.c1105 dst->u.indirect.indirect_op = src->u.indirect.indirect_op; in blkif_get_x86_32_req()
1106 dst->u.indirect.nr_segments = in blkif_get_x86_32_req()
1108 dst->u.indirect.handle = src->u.indirect.handle; in blkif_get_x86_32_req()
1109 dst->u.indirect.id = src->u.indirect.id; in blkif_get_x86_32_req()
1110 dst->u.indirect.sector_number = src->u.indirect.sector_number; in blkif_get_x86_32_req()
1114 dst->u.indirect.indirect_grefs[i] = in blkif_get_x86_32_req()
1158 dst->u.indirect.indirect_op = src->u.indirect.indirect_op; in blkif_get_x86_64_req()
1159 dst->u.indirect.nr_segments = in blkif_get_x86_64_req()
1161 dst->u.indirect.handle = src->u.indirect.handle; in blkif_get_x86_64_req()
1162 dst->u.indirect.id = src->u.indirect.id; in blkif_get_x86_64_req()
[all …]
A Dcommon.h126 struct blkif_x86_32_request_indirect indirect; member
182 struct blkif_x86_64_request_indirect indirect; member
/drivers/net/can/sja1000/
A Dsja1000_isa.c37 static int indirect[MAXDEV] = {[0 ... (MAXDEV - 1)] = -1}; variable
46 module_param_hw_array(indirect, int, ioport, NULL, 0444);
47 MODULE_PARM_DESC(indirect, "Indirect access via address and data port");
139 if (indirect[idx] > 0 || in sja1000_isa_probe()
140 (indirect[idx] == -1 && indirect[0] > 0)) in sja1000_isa_probe()
/drivers/net/can/cc770/
A Dcc770_isa.c75 static int indirect[MAXDEV] = {[0 ... (MAXDEV - 1)] = -1}; variable
83 module_param_hw_array(indirect, int, ioport, NULL, 0444);
84 MODULE_PARM_DESC(indirect, "Indirect access via address and data port");
184 if (indirect[idx] > 0 || in cc770_isa_probe()
185 (indirect[idx] == -1 && indirect[0] > 0)) in cc770_isa_probe()
A DKconfig14 indirect access.
/drivers/virtio/
A Dvirtio_ring.c178 bool indirect; member
545 bool indirect; in virtqueue_add_split() local
572 indirect = true; in virtqueue_add_split()
578 indirect = false; in virtqueue_add_split()
593 if (indirect) in virtqueue_add_split()
640 if (indirect) { in virtqueue_add_split()
659 if (indirect) in virtqueue_add_split()
666 if (indirect) in virtqueue_add_split()
697 if (indirect) in virtqueue_add_split()
709 if (indirect) in virtqueue_add_split()
[all …]
/drivers/scsi/ibmvscsi/
A Dibmvscsi.c689 struct srp_indirect_buf *indirect = in map_sg_data() local
706 indirect->table_desc.va = 0; in map_sg_data()
709 indirect->table_desc.key = 0; in map_sg_data()
713 &indirect->desc_list[0]); in map_sg_data()
714 indirect->len = cpu_to_be32(total_length); in map_sg_data()
735 indirect->len = cpu_to_be32(total_length); in map_sg_data()
738 sizeof(indirect->desc_list[0])); in map_sg_data()
739 memcpy(indirect->desc_list, evt_struct->ext_list, in map_sg_data()
1048 struct srp_indirect_buf *indirect; in ibmvscsi_queuecommand_lck() local
1088 indirect->table_desc.va == 0) { in ibmvscsi_queuecommand_lck()
[all …]
/drivers/infiniband/hw/efa/
A Defa_verbs.c130 } indirect; member
1369 int page_cnt = pbl->phys.indirect.pbl_buf_size_in_pages; in pbl_chunk_list_create()
1370 struct scatterlist *pages_sgl = pbl->phys.indirect.sgl; in pbl_chunk_list_create()
1372 int sg_dma_cnt = pbl->phys.indirect.sg_dma_cnt; in pbl_chunk_list_create()
1524 pbl->phys.indirect.sgl = sgl; in pbl_indirect_initialize()
1525 pbl->phys.indirect.sg_dma_cnt = sg_dma_cnt; in pbl_indirect_initialize()
1536 pbl->phys.indirect.chunk_list.size); in pbl_indirect_initialize()
1550 dma_unmap_sg(&dev->pdev->dev, pbl->phys.indirect.sgl, in pbl_indirect_terminate()
1552 kfree(pbl->phys.indirect.sgl); in pbl_indirect_terminate()
1645 params->indirect = !pbl->physically_continuous; in efa_create_pbl()
[all …]
/drivers/net/ethernet/wiznet/
A Dw5300.c89 bool indirect; member
245 w5300_write_direct(priv, W5300_MR, priv->indirect ? in w5300_hw_reset()
549 priv->indirect = mem_size < W5300_BUS_DIRECT_SIZE; in w5300_hw_probe()
550 if (priv->indirect) { in w5300_hw_probe()
/drivers/gpu/drm/v3d/
A Dv3d_sched.c433 struct v3d_bo *indirect = to_v3d_bo(indirect_csd->indirect); in v3d_rewrite_csd_job_wg_counts_from_indirect() local
439 v3d_get_bo_vaddr(indirect); in v3d_rewrite_csd_job_wg_counts_from_indirect()
465 ((uint32_t *)indirect->vaddr)[uniform_idx] = wg_counts[i]; in v3d_rewrite_csd_job_wg_counts_from_indirect()
469 v3d_put_bo_vaddr(indirect); in v3d_rewrite_csd_job_wg_counts_from_indirect()
/drivers/vhost/
A Dvhost.c499 kfree(vq->indirect); in vhost_vq_free_iovecs()
500 vq->indirect = NULL; in vhost_vq_free_iovecs()
517 vq->indirect = kmalloc_array(UIO_MAXIOV, in vhost_dev_alloc_iovecs()
518 sizeof(*vq->indirect), in vhost_dev_alloc_iovecs()
526 if (!vq->indirect || !vq->log || !vq->heads || !vq->nheads) in vhost_dev_alloc_iovecs()
616 vq->indirect = NULL; in vhost_dev_init()
2706 struct vring_desc *indirect) in get_indirect() argument
2710 u32 len = vhost32_to_cpu(vq, indirect->len); in get_indirect()
2723 ret = translate_desc(vq, vhost64_to_cpu(vq, indirect->addr), len, vq->indirect, in get_indirect()
2730 iov_iter_init(&from, ITER_SOURCE, vq->indirect, ret, len); in get_indirect()
[all …]

Completed in 99 milliseconds

12