Home
last modified time | relevance | path

Searched refs:length_dw (Results 1 – 25 of 70) sorted by relevance

123

/linux/drivers/gpu/drm/radeon/
A Dsi_dma.c80 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages()
81 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages()
120 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages()
132 ib->ptr[ib->length_dw++] = value; in si_dma_vm_write_pages()
133 ib->ptr[ib->length_dw++] = upper_32_bits(value); in si_dma_vm_write_pages()
172 ib->ptr[ib->length_dw++] = pe; /* dst addr */ in si_dma_vm_set_pages()
174 ib->ptr[ib->length_dw++] = flags; /* mask */ in si_dma_vm_set_pages()
175 ib->ptr[ib->length_dw++] = 0; in si_dma_vm_set_pages()
176 ib->ptr[ib->length_dw++] = value; /* value */ in si_dma_vm_set_pages()
177 ib->ptr[ib->length_dw++] = upper_32_bits(value); in si_dma_vm_set_pages()
[all …]
A Dradeon_vce.c362 ib.length_dw = 0; in radeon_vce_get_create_msg()
365 ib.ptr[ib.length_dw++] = cpu_to_le32(handle); in radeon_vce_get_create_msg()
383 ib.ptr[ib.length_dw++] = cpu_to_le32(dummy); in radeon_vce_get_create_msg()
386 for (i = ib.length_dw; i < ib_size_dw; ++i) in radeon_vce_get_create_msg()
429 ib.length_dw = 0; in radeon_vce_get_destroy_msg()
432 ib.ptr[ib.length_dw++] = cpu_to_le32(handle); in radeon_vce_get_destroy_msg()
437 ib.ptr[ib.length_dw++] = cpu_to_le32(dummy); in radeon_vce_get_destroy_msg()
443 for (i = ib.length_dw; i < ib_size_dw; ++i) in radeon_vce_get_destroy_msg()
481 if (idx >= relocs_chunk->length_dw) { in radeon_vce_cs_reloc()
483 idx, relocs_chunk->length_dw); in radeon_vce_cs_reloc()
[all …]
A Dni_dma.c328 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages()
329 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages()
369 ib->ptr[ib->length_dw++] = pe; in cayman_dma_vm_write_pages()
381 ib->ptr[ib->length_dw++] = value; in cayman_dma_vm_write_pages()
421 ib->ptr[ib->length_dw++] = pe; /* dst addr */ in cayman_dma_vm_set_pages()
423 ib->ptr[ib->length_dw++] = flags; /* mask */ in cayman_dma_vm_set_pages()
424 ib->ptr[ib->length_dw++] = 0; in cayman_dma_vm_set_pages()
425 ib->ptr[ib->length_dw++] = value; /* value */ in cayman_dma_vm_set_pages()
426 ib->ptr[ib->length_dw++] = upper_32_bits(value); in cayman_dma_vm_set_pages()
428 ib->ptr[ib->length_dw++] = 0; in cayman_dma_vm_set_pages()
[all …]
A Dradeon_cs.c95 p->nrelocs = chunk->length_dw / 4; in radeon_cs_parser_relocs()
316 p->chunks[i].length_dw = user_chunk.length_dw; in radeon_cs_parser_init()
323 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init()
329 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init()
339 size = p->chunks[i].length_dw; in radeon_cs_parser_init()
360 if (p->chunks[i].length_dw > 1) in radeon_cs_parser_init()
362 if (p->chunks[i].length_dw > 2) in radeon_cs_parser_init()
555 if (parser->const_ib.length_dw) { in radeon_cs_ib_vm_chunk()
632 parser->const_ib.length_dw = ib_chunk->length_dw; in radeon_cs_ib_fill()
653 parser->ib.length_dw = ib_chunk->length_dw; in radeon_cs_ib_fill()
[all …]
A Dcik_sdma.c156 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute()
731 ib.length_dw = 5; in cik_sdma_ib_test()
814 ib->ptr[ib->length_dw++] = bytes; in cik_sdma_vm_copy_pages()
818 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pages()
857 ib->ptr[ib->length_dw++] = pe; in cik_sdma_vm_write_pages()
859 ib->ptr[ib->length_dw++] = ndw; in cik_sdma_vm_write_pages()
870 ib->ptr[ib->length_dw++] = value; in cik_sdma_vm_write_pages()
912 ib->ptr[ib->length_dw++] = flags; /* mask */ in cik_sdma_vm_set_pages()
913 ib->ptr[ib->length_dw++] = 0; in cik_sdma_vm_set_pages()
917 ib->ptr[ib->length_dw++] = 0; in cik_sdma_vm_set_pages()
[all …]
A Dradeon_vm.c412 ib.length_dw = 0; in radeon_vm_clear_bo()
416 WARN_ON(ib.length_dw > 64); in radeon_vm_clear_bo()
664 ib.length_dw = 0; in radeon_vm_update_page_directory()
701 if (ib.length_dw != 0) { in radeon_vm_update_page_directory()
705 WARN_ON(ib.length_dw > ndw); in radeon_vm_update_page_directory()
1002 ib.length_dw = 0; in radeon_vm_bo_update()
1020 WARN_ON(ib.length_dw > ndw); in radeon_vm_bo_update()
A Dradeon_uvd.c587 if (idx >= relocs_chunk->length_dw) { in radeon_uvd_cs_reloc()
589 idx, relocs_chunk->length_dw); in radeon_uvd_cs_reloc()
702 if (p->chunk_ib->length_dw % 16) { in radeon_uvd_cs_parse()
704 p->chunk_ib->length_dw); in radeon_uvd_cs_parse()
732 } while (p->idx < p->chunk_ib->length_dw); in radeon_uvd_cs_parse()
763 ib.length_dw = 16; in radeon_uvd_send_msg()
/linux/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_vce.c471 ib->length_dw = 0; in amdgpu_vce_get_create_msg()
474 ib->ptr[ib->length_dw++] = handle; in amdgpu_vce_get_create_msg()
481 ib->ptr[ib->length_dw++] = 0x00000000; in amdgpu_vce_get_create_msg()
482 ib->ptr[ib->length_dw++] = 0x00000042; in amdgpu_vce_get_create_msg()
483 ib->ptr[ib->length_dw++] = 0x0000000a; in amdgpu_vce_get_create_msg()
501 ib->ptr[ib->length_dw++] = addr; in amdgpu_vce_get_create_msg()
550 ib->length_dw = 0; in amdgpu_vce_get_destroy_msg()
553 ib->ptr[ib->length_dw++] = handle; in amdgpu_vce_get_destroy_msg()
751 for (idx = 0; idx < ib->length_dw;) { in amdgpu_vce_ring_parse_cs()
817 for (idx = 0; idx < ib->length_dw;) { in amdgpu_vce_ring_parse_cs()
[all …]
A Dsi_dma.c281 ib.length_dw = 4; in si_dma_ring_test_ib()
325 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pte()
326 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pte()
349 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_write_pte()
350 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in si_dma_vm_write_pte()
352 ib->ptr[ib->length_dw++] = lower_32_bits(value); in si_dma_vm_write_pte()
390 ib->ptr[ib->length_dw++] = pe; /* dst addr */ in si_dma_vm_set_pte_pde()
394 ib->ptr[ib->length_dw++] = value; /* value */ in si_dma_vm_set_pte_pde()
397 ib->ptr[ib->length_dw++] = 0; in si_dma_vm_set_pte_pde()
413 while (ib->length_dw & 0x7) in si_dma_ring_pad_ib()
[all …]
A Dsdma_v2_4.c267 amdgpu_ring_write(ring, ib->length_dw); in sdma_v2_4_ring_emit_ib()
634 ib.length_dw = 8; in sdma_v2_4_ring_test_ib()
679 ib->ptr[ib->length_dw++] = bytes; in sdma_v2_4_vm_copy_pte()
706 ib->ptr[ib->length_dw++] = pe; in sdma_v2_4_vm_write_pte()
708 ib->ptr[ib->length_dw++] = ndw; in sdma_v2_4_vm_write_pte()
741 ib->ptr[ib->length_dw++] = 0; in sdma_v2_4_vm_set_pte_pde()
758 pad_count = (-ib->length_dw) & 7; in sdma_v2_4_ring_pad_ib()
761 ib->ptr[ib->length_dw++] = in sdma_v2_4_ring_pad_ib()
765 ib->ptr[ib->length_dw++] = in sdma_v2_4_ring_pad_ib()
1214 ib->ptr[ib->length_dw++] = byte_count; in sdma_v2_4_emit_copy_buffer()
[all …]
A Dcik_sdma.c238 amdgpu_ring_write(ring, ib->length_dw); in cik_sdma_ring_emit_ib()
696 ib.length_dw = 5; in cik_sdma_ring_test_ib()
740 ib->ptr[ib->length_dw++] = bytes; in cik_sdma_vm_copy_pte()
769 ib->ptr[ib->length_dw++] = ndw; in cik_sdma_vm_write_pte()
802 ib->ptr[ib->length_dw++] = 0; in cik_sdma_vm_set_pte_pde()
819 pad_count = (-ib->length_dw) & 7; in cik_sdma_ring_pad_ib()
822 ib->ptr[ib->length_dw++] = in cik_sdma_ring_pad_ib()
826 ib->ptr[ib->length_dw++] = in cik_sdma_ring_pad_ib()
1324 ib->ptr[ib->length_dw++] = byte_count; in cik_sdma_emit_copy_buffer()
1350 ib->ptr[ib->length_dw++] = src_data; in cik_sdma_emit_fill_buffer()
[all …]
A Damdgpu_vcn.c567 ib->length_dw = 16; in amdgpu_vcn_dec_send_msg()
701 ib->length_dw = 0; in amdgpu_vcn_dec_sw_send_msg()
817 ib->length_dw = 0; in amdgpu_vcn_enc_get_create_msg()
818 ib->ptr[ib->length_dw++] = 0x00000018; in amdgpu_vcn_enc_get_create_msg()
820 ib->ptr[ib->length_dw++] = handle; in amdgpu_vcn_enc_get_create_msg()
822 ib->ptr[ib->length_dw++] = addr; in amdgpu_vcn_enc_get_create_msg()
823 ib->ptr[ib->length_dw++] = 0x0000000b; in amdgpu_vcn_enc_get_create_msg()
825 ib->ptr[ib->length_dw++] = 0x00000014; in amdgpu_vcn_enc_get_create_msg()
871 ib->length_dw = 0; in amdgpu_vcn_enc_get_destroy_msg()
874 ib->ptr[ib->length_dw++] = handle; in amdgpu_vcn_enc_get_destroy_msg()
[all …]
A Dsdma_v3_0.c441 amdgpu_ring_write(ring, ib->length_dw); in sdma_v3_0_ring_emit_ib()
906 ib.length_dw = 8; in sdma_v3_0_ring_test_ib()
950 ib->ptr[ib->length_dw++] = bytes; in sdma_v3_0_vm_copy_pte()
979 ib->ptr[ib->length_dw++] = ndw; in sdma_v3_0_vm_write_pte()
1012 ib->ptr[ib->length_dw++] = 0; in sdma_v3_0_vm_set_pte_pde()
1029 pad_count = (-ib->length_dw) & 7; in sdma_v3_0_ring_pad_ib()
1032 ib->ptr[ib->length_dw++] = in sdma_v3_0_ring_pad_ib()
1036 ib->ptr[ib->length_dw++] = in sdma_v3_0_ring_pad_ib()
1652 ib->ptr[ib->length_dw++] = byte_count; in sdma_v3_0_emit_copy_buffer()
1678 ib->ptr[ib->length_dw++] = src_data; in sdma_v3_0_emit_fill_buffer()
[all …]
A Dsdma_v5_2.c362 amdgpu_ring_write(ring, ib->length_dw); in sdma_v5_2_ring_emit_ib()
999 ib.length_dw = 8; in sdma_v5_2_ring_test_ib()
1047 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v5_2_vm_copy_pte()
1077 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v5_2_vm_write_pte()
1111 ib->ptr[ib->length_dw++] = 0; in sdma_v5_2_vm_set_pte_pde()
1129 pad_count = (-ib->length_dw) & 0x7; in sdma_v5_2_ring_pad_ib()
1132 ib->ptr[ib->length_dw++] = in sdma_v5_2_ring_pad_ib()
1136 ib->ptr[ib->length_dw++] = in sdma_v5_2_ring_pad_ib()
1750 ib->ptr[ib->length_dw++] = byte_count - 1; in sdma_v5_2_emit_copy_buffer()
1776 ib->ptr[ib->length_dw++] = src_data; in sdma_v5_2_emit_fill_buffer()
[all …]
A Duvd_v6_0.c227 ib->length_dw = 0; in uvd_v6_0_enc_get_create_msg()
228 ib->ptr[ib->length_dw++] = 0x00000018; in uvd_v6_0_enc_get_create_msg()
230 ib->ptr[ib->length_dw++] = handle; in uvd_v6_0_enc_get_create_msg()
231 ib->ptr[ib->length_dw++] = 0x00010000; in uvd_v6_0_enc_get_create_msg()
233 ib->ptr[ib->length_dw++] = addr; in uvd_v6_0_enc_get_create_msg()
235 ib->ptr[ib->length_dw++] = 0x00000014; in uvd_v6_0_enc_get_create_msg()
237 ib->ptr[ib->length_dw++] = 0x0000001c; in uvd_v6_0_enc_get_create_msg()
238 ib->ptr[ib->length_dw++] = 0x00000001; in uvd_v6_0_enc_get_create_msg()
291 ib->length_dw = 0; in uvd_v6_0_enc_get_destroy_msg()
294 ib->ptr[ib->length_dw++] = handle; in uvd_v6_0_enc_get_destroy_msg()
[all …]
A Dsdma_v5_0.c475 amdgpu_ring_write(ring, ib->length_dw); in sdma_v5_0_ring_emit_ib()
1078 ib.length_dw = 8; in sdma_v5_0_ring_test_ib()
1126 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v5_0_vm_copy_pte()
1156 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v5_0_vm_write_pte()
1190 ib->ptr[ib->length_dw++] = 0; in sdma_v5_0_vm_set_pte_pde()
1207 pad_count = (-ib->length_dw) & 0x7; in sdma_v5_0_ring_pad_ib()
1210 ib->ptr[ib->length_dw++] = in sdma_v5_0_ring_pad_ib()
1214 ib->ptr[ib->length_dw++] = in sdma_v5_0_ring_pad_ib()
1782 ib->ptr[ib->length_dw++] = byte_count - 1; in sdma_v5_0_emit_copy_buffer()
1808 ib->ptr[ib->length_dw++] = src_data; in sdma_v5_0_emit_fill_buffer()
[all …]
A Duvd_v7_0.c234 ib->length_dw = 0; in uvd_v7_0_enc_get_create_msg()
235 ib->ptr[ib->length_dw++] = 0x00000018; in uvd_v7_0_enc_get_create_msg()
237 ib->ptr[ib->length_dw++] = handle; in uvd_v7_0_enc_get_create_msg()
238 ib->ptr[ib->length_dw++] = 0x00000000; in uvd_v7_0_enc_get_create_msg()
240 ib->ptr[ib->length_dw++] = addr; in uvd_v7_0_enc_get_create_msg()
242 ib->ptr[ib->length_dw++] = 0x00000014; in uvd_v7_0_enc_get_create_msg()
244 ib->ptr[ib->length_dw++] = 0x0000001c; in uvd_v7_0_enc_get_create_msg()
245 ib->ptr[ib->length_dw++] = 0x00000000; in uvd_v7_0_enc_get_create_msg()
297 ib->length_dw = 0; in uvd_v7_0_enc_get_destroy_msg()
300 ib->ptr[ib->length_dw++] = handle; in uvd_v7_0_enc_get_destroy_msg()
[all …]
A Dsdma_v4_0.c889 amdgpu_ring_write(ring, ib->length_dw); in sdma_v4_0_ring_emit_ib()
1648 ib.length_dw = 8; in sdma_v4_0_ring_test_ib()
1694 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v4_0_vm_copy_pte()
1724 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v4_0_vm_write_pte()
1758 ib->ptr[ib->length_dw++] = 0; in sdma_v4_0_vm_set_pte_pde()
1774 pad_count = (-ib->length_dw) & 7; in sdma_v4_0_ring_pad_ib()
1777 ib->ptr[ib->length_dw++] = in sdma_v4_0_ring_pad_ib()
1781 ib->ptr[ib->length_dw++] = in sdma_v4_0_ring_pad_ib()
2646 ib->ptr[ib->length_dw++] = byte_count - 1; in sdma_v4_0_emit_copy_buffer()
2672 ib->ptr[ib->length_dw++] = src_data; in sdma_v4_0_emit_fill_buffer()
[all …]
A Dgfx_v9_4_2.c377 ib->length_dw = 0; in gfx_v9_4_2_run_shader()
384 ib->ptr[ib->length_dw++] = init_regs[i].reg_value; in gfx_v9_4_2_run_shader()
392 ib->ptr[ib->length_dw++] = lower_32_bits(gpu_addr); in gfx_v9_4_2_run_shader()
393 ib->ptr[ib->length_dw++] = upper_32_bits(gpu_addr); in gfx_v9_4_2_run_shader()
399 ib->ptr[ib->length_dw++] = lower_32_bits(wb_gpu_addr); in gfx_v9_4_2_run_shader()
400 ib->ptr[ib->length_dw++] = upper_32_bits(wb_gpu_addr); in gfx_v9_4_2_run_shader()
401 ib->ptr[ib->length_dw++] = pattern; in gfx_v9_4_2_run_shader()
405 ib->ptr[ib->length_dw++] = compute_dim_x; /* x */ in gfx_v9_4_2_run_shader()
406 ib->ptr[ib->length_dw++] = 1; /* y */ in gfx_v9_4_2_run_shader()
407 ib->ptr[ib->length_dw++] = 1; /* z */ in gfx_v9_4_2_run_shader()
[all …]
A Damdgpu_vm_sdma.c102 WARN_ON(ib->length_dw == 0); in amdgpu_vm_sdma_commit()
104 WARN_ON(ib->length_dw > p->num_dw_left); in amdgpu_vm_sdma_commit()
218 ndw -= p->job->ibs->length_dw; in amdgpu_vm_sdma_update()
A Dgfx_v9_0.c1129 ib.length_dw = 5; in gfx_v9_0_ring_test_ib()
4659 ib.length_dw = 0; in gfx_v9_0_do_edc_gpr_workarounds()
4680 ib.ptr[ib.length_dw++] = 1; /* y */ in gfx_v9_0_do_edc_gpr_workarounds()
4681 ib.ptr[ib.length_dw++] = 1; /* z */ in gfx_v9_0_do_edc_gpr_workarounds()
4682 ib.ptr[ib.length_dw++] = in gfx_v9_0_do_edc_gpr_workarounds()
4708 ib.ptr[ib.length_dw++] = 1; /* y */ in gfx_v9_0_do_edc_gpr_workarounds()
4709 ib.ptr[ib.length_dw++] = 1; /* z */ in gfx_v9_0_do_edc_gpr_workarounds()
4710 ib.ptr[ib.length_dw++] = in gfx_v9_0_do_edc_gpr_workarounds()
4736 ib.ptr[ib.length_dw++] = 1; /* y */ in gfx_v9_0_do_edc_gpr_workarounds()
4737 ib.ptr[ib.length_dw++] = 1; /* z */ in gfx_v9_0_do_edc_gpr_workarounds()
[all …]
A Dgfx_v8_0.c910 ib.length_dw = 5; in gfx_v8_0_ring_test_ib()
1579 ib.length_dw = 0; in gfx_v8_0_do_edc_gpr_workarounds()
1597 ib.ptr[ib.length_dw++] = 8; /* x */ in gfx_v8_0_do_edc_gpr_workarounds()
1598 ib.ptr[ib.length_dw++] = 1; /* y */ in gfx_v8_0_do_edc_gpr_workarounds()
1599 ib.ptr[ib.length_dw++] = 1; /* z */ in gfx_v8_0_do_edc_gpr_workarounds()
1600 ib.ptr[ib.length_dw++] = in gfx_v8_0_do_edc_gpr_workarounds()
1623 ib.ptr[ib.length_dw++] = 8; /* x */ in gfx_v8_0_do_edc_gpr_workarounds()
1624 ib.ptr[ib.length_dw++] = 1; /* y */ in gfx_v8_0_do_edc_gpr_workarounds()
1625 ib.ptr[ib.length_dw++] = 1; /* z */ in gfx_v8_0_do_edc_gpr_workarounds()
1626 ib.ptr[ib.length_dw++] = in gfx_v8_0_do_edc_gpr_workarounds()
[all …]
A Damdgpu_cs.c167 p->chunks[i].length_dw = user_chunk.length_dw; in amdgpu_cs_parser_init()
169 size = p->chunks[i].length_dw; in amdgpu_cs_parser_init()
191 if (p->chunks[i].length_dw * sizeof(uint32_t) < size) { in amdgpu_cs_parser_init()
205 if (p->chunks[i].length_dw * sizeof(uint32_t) < size) { in amdgpu_cs_parser_init()
937 ib->length_dw = chunk_ib->ib_bytes / 4; in amdgpu_cs_ib_fill()
960 num_deps = chunk->length_dw * 4 / in amdgpu_cs_process_fence_dep()
1033 num_deps = chunk->length_dw * 4 / in amdgpu_cs_process_syncobj_in_dep()
1054 num_deps = chunk->length_dw * 4 / in amdgpu_cs_process_syncobj_timeline_in_dep()
1076 num_deps = chunk->length_dw * 4 / in amdgpu_cs_process_syncobj_out_dep()
1112 num_deps = chunk->length_dw * 4 / in amdgpu_cs_process_syncobj_timeline_out_dep()
A Damdgpu_ring.c107 while (ib->length_dw & ring->funcs->align_mask) in amdgpu_ring_generic_pad_ib()
108 ib->ptr[ib->length_dw++] = ring->funcs->nop; in amdgpu_ring_generic_pad_ib()
/linux/drivers/net/ethernet/qlogic/qed/
A Dqed_hw.c462 le16_to_cpu(p_command->length_dw), in qed_dmae_post_command()
477 le16_to_cpu(p_command->length_dw), in qed_dmae_post_command()
608 u32 length_dw) in qed_dmae_execute_sub_operation() argument
626 length_dw * sizeof(u32)); in qed_dmae_execute_sub_operation()
647 cmd->length_dw = cpu_to_le16((u16)length_dw); in qed_dmae_execute_sub_operation()
656 src_addr, dst_addr, length_dw); in qed_dmae_execute_sub_operation()
663 length_dw * sizeof(u32)); in qed_dmae_execute_sub_operation()

Completed in 91 milliseconds

123