| /drivers/gpu/drm/radeon/ |
| A D | ni_dma.c | 319 unsigned ndw; in cayman_dma_vm_copy_pages() local 322 ndw = count * 2; in cayman_dma_vm_copy_pages() 324 ndw = 0xFFFFE; in cayman_dma_vm_copy_pages() 333 pe += ndw * 4; in cayman_dma_vm_copy_pages() 334 src += ndw * 4; in cayman_dma_vm_copy_pages() 359 unsigned ndw; in cayman_dma_vm_write_pages() local 364 ndw = 0xFFFFE; in cayman_dma_vm_write_pages() 371 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in cayman_dma_vm_write_pages() 407 unsigned ndw; in cayman_dma_vm_set_pages() local 412 ndw = 0xFFFFE; in cayman_dma_vm_set_pages() [all …]
|
| A D | si_dma.c | 111 unsigned ndw; in si_dma_vm_write_pages() local 114 ndw = count * 2; in si_dma_vm_write_pages() 115 if (ndw > 0xFFFFE) in si_dma_vm_write_pages() 116 ndw = 0xFFFFE; in si_dma_vm_write_pages() 122 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in si_dma_vm_write_pages() 158 unsigned ndw; in si_dma_vm_set_pages() local 161 ndw = count * 2; in si_dma_vm_set_pages() 162 if (ndw > 0xFFFFE) in si_dma_vm_set_pages() 163 ndw = 0xFFFFE; in si_dma_vm_set_pages() 180 pe += ndw * 4; in si_dma_vm_set_pages() [all …]
|
| A D | radeon_ring.c | 109 int radeon_ring_alloc(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw) in radeon_ring_alloc() argument 114 if (ndw > (ring->ring_size / 4)) in radeon_ring_alloc() 119 ndw = (ndw + ring->align_mask) & ~ring->align_mask; in radeon_ring_alloc() 120 while (ndw > (ring->ring_free_dw - 1)) { in radeon_ring_alloc() 122 if (ndw < ring->ring_free_dw) { in radeon_ring_alloc() 129 ring->count_dw = ndw; in radeon_ring_alloc() 145 int radeon_ring_lock(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw) in radeon_ring_lock() argument 150 r = radeon_ring_alloc(rdev, ring, ndw); in radeon_ring_lock()
|
| A D | cik_sdma.c | 847 unsigned ndw; in cik_sdma_vm_write_pages() local 850 ndw = count * 2; in cik_sdma_vm_write_pages() 851 if (ndw > 0xFFFFE) in cik_sdma_vm_write_pages() 852 ndw = 0xFFFFE; in cik_sdma_vm_write_pages() 860 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in cik_sdma_vm_write_pages() 896 unsigned ndw; in cik_sdma_vm_set_pages() local 899 ndw = count; in cik_sdma_vm_set_pages() 900 if (ndw > 0x7FFFF) in cik_sdma_vm_set_pages() 901 ndw = 0x7FFFF; in cik_sdma_vm_set_pages() 920 pe += ndw * 8; in cik_sdma_vm_set_pages() [all …]
|
| A D | radeon_vm.c | 650 ndw = 64; in radeon_vm_update_page_directory() 653 ndw += vm->max_pde_used * 6; in radeon_vm_update_page_directory() 656 if (ndw > 0xfffff) in radeon_vm_update_page_directory() 915 unsigned nptes, ncmds, ndw; in radeon_vm_bo_update() local 971 ndw = 64; in radeon_vm_bo_update() 976 ndw += ncmds * 7; in radeon_vm_bo_update() 980 ndw += ncmds * 4; in radeon_vm_bo_update() 983 ndw += nptes * 2; in radeon_vm_bo_update() 987 ndw += ncmds * 10; in radeon_vm_bo_update() 990 ndw += 2 * 10; in radeon_vm_bo_update() [all …]
|
| A D | r100.c | 913 unsigned ndw; in r100_copy_blit() local 925 ndw = 64 + (10 * num_loops); in r100_copy_blit() 926 r = radeon_ring_lock(rdev, ring, ndw); in r100_copy_blit() 928 DRM_ERROR("radeon: moving bo (%d) asking for %u dw.\n", r, ndw); in r100_copy_blit()
|
| A D | radeon.h | 976 int radeon_ring_alloc(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ndw); 977 int radeon_ring_lock(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ndw);
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_vm_sdma.c | 49 unsigned int ndw; in amdgpu_vm_sdma_alloc_job() local 53 ndw = AMDGPU_VM_SDMA_MIN_NUM_DW; in amdgpu_vm_sdma_alloc_job() 55 ndw += count * 2; in amdgpu_vm_sdma_alloc_job() 56 ndw = min(ndw, AMDGPU_VM_SDMA_MAX_NUM_DW); in amdgpu_vm_sdma_alloc_job() 59 ndw * 4, pool, &p->job); in amdgpu_vm_sdma_alloc_job() 63 p->num_dw_left = ndw; in amdgpu_vm_sdma_alloc_job() 225 unsigned int i, ndw, nptes; in amdgpu_vm_sdma_update() local 244 ndw = p->num_dw_left; in amdgpu_vm_sdma_update() 247 if (ndw < 32) { in amdgpu_vm_sdma_update() 268 ndw -= 7; in amdgpu_vm_sdma_update() [all …]
|
| A D | si_dma.c | 351 unsigned ndw = count * 2; in si_dma_vm_write_pte() local 353 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pte() 356 for (; ndw > 0; ndw -= 2) { in si_dma_vm_write_pte() 381 unsigned ndw; in si_dma_vm_set_pte_pde() local 384 ndw = count * 2; in si_dma_vm_set_pte_pde() 385 if (ndw > 0xFFFFE) in si_dma_vm_set_pte_pde() 386 ndw = 0xFFFFE; in si_dma_vm_set_pte_pde() 394 ib->ptr[ib->length_dw++] = DMA_PTE_PDE_PACKET(ndw); in si_dma_vm_set_pte_pde() 403 pe += ndw * 4; in si_dma_vm_set_pte_pde() 404 addr += (ndw / 2) * incr; in si_dma_vm_set_pte_pde() [all …]
|
| A D | amdgpu_ring.c | 81 int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigned int ndw) in amdgpu_ring_alloc() argument 85 ndw = (ndw + ring->funcs->align_mask) & ~ring->funcs->align_mask; in amdgpu_ring_alloc() 90 if (WARN_ON_ONCE(ndw > ring->max_dw)) in amdgpu_ring_alloc() 93 ring->count_dw = ndw; in amdgpu_ring_alloc() 112 static void amdgpu_ring_alloc_reemit(struct amdgpu_ring *ring, unsigned int ndw) in amdgpu_ring_alloc_reemit() argument 116 ndw = (ndw + ring->funcs->align_mask) & ~ring->funcs->align_mask; in amdgpu_ring_alloc_reemit() 118 ring->count_dw = ndw; in amdgpu_ring_alloc_reemit()
|
| A D | amdgpu_gmc.c | 720 unsigned int ndw; in amdgpu_gmc_flush_gpu_tlb_pasid() local 748 ndw = kiq->pmf->invalidate_tlbs_size + 8; in amdgpu_gmc_flush_gpu_tlb_pasid() 751 ndw += kiq->pmf->invalidate_tlbs_size; in amdgpu_gmc_flush_gpu_tlb_pasid() 754 ndw += kiq->pmf->invalidate_tlbs_size; in amdgpu_gmc_flush_gpu_tlb_pasid() 757 r = amdgpu_ring_alloc(ring, ndw); in amdgpu_gmc_flush_gpu_tlb_pasid()
|
| A D | sdma_v2_4.c | 684 unsigned ndw = count * 2; in sdma_v2_4_vm_write_pte() local 690 ib->ptr[ib->length_dw++] = ndw; in sdma_v2_4_vm_write_pte() 691 for (; ndw > 0; ndw -= 2) { in sdma_v2_4_vm_write_pte()
|
| A D | cik_sdma.c | 748 unsigned ndw = count * 2; in cik_sdma_vm_write_pte() local 754 ib->ptr[ib->length_dw++] = ndw; in cik_sdma_vm_write_pte() 755 for (; ndw > 0; ndw -= 2) { in cik_sdma_vm_write_pte()
|
| A D | sdma_v3_0.c | 957 unsigned ndw = count * 2; in sdma_v3_0_vm_write_pte() local 963 ib->ptr[ib->length_dw++] = ndw; in sdma_v3_0_vm_write_pte() 964 for (; ndw > 0; ndw -= 2) { in sdma_v3_0_vm_write_pte()
|
| A D | sdma_v6_0.c | 1091 unsigned ndw = count * 2; in sdma_v6_0_vm_write_pte() local 1097 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v6_0_vm_write_pte() 1098 for (; ndw > 0; ndw -= 2) { in sdma_v6_0_vm_write_pte()
|
| A D | sdma_v7_0.c | 1111 unsigned ndw = count * 2; in sdma_v7_0_vm_write_pte() local 1117 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v7_0_vm_write_pte() 1118 for (; ndw > 0; ndw -= 2) { in sdma_v7_0_vm_write_pte()
|
| A D | sdma_v5_2.c | 1085 unsigned ndw = count * 2; in sdma_v5_2_vm_write_pte() local 1091 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v5_2_vm_write_pte() 1092 for (; ndw > 0; ndw -= 2) { in sdma_v5_2_vm_write_pte()
|
| A D | sdma_v5_0.c | 1186 unsigned ndw = count * 2; in sdma_v5_0_vm_write_pte() local 1192 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v5_0_vm_write_pte() 1193 for (; ndw > 0; ndw -= 2) { in sdma_v5_0_vm_write_pte()
|
| A D | sdma_v4_0.c | 1618 unsigned ndw = count * 2; in sdma_v4_0_vm_write_pte() local 1624 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v4_0_vm_write_pte() 1625 for (; ndw > 0; ndw -= 2) { in sdma_v4_0_vm_write_pte()
|
| A D | sdma_v4_4_2.c | 1214 unsigned ndw = count * 2; in sdma_v4_4_2_vm_write_pte() local 1220 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v4_4_2_vm_write_pte() 1221 for (; ndw > 0; ndw -= 2) { in sdma_v4_4_2_vm_write_pte()
|
| A D | amdgpu_ring.h | 443 int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigned ndw);
|
| /drivers/crypto/aspeed/ |
| A D | aspeed-acry.c | 253 int nbits, ndw; in aspeed_acry_rsa_ctx_copy() local 273 ndw = DIV_ROUND_UP(nbytes, BYTES_PER_DWORD); in aspeed_acry_rsa_ctx_copy() 279 for (j = ndw; j > 0; j--) { in aspeed_acry_rsa_ctx_copy()
|