Lines Matching refs:ib

248 				   struct amdgpu_ib *ib,  in sdma_v2_4_ring_emit_ib()  argument
259 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v2_4_ring_emit_ib()
260 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v2_4_ring_emit_ib()
261 amdgpu_ring_write(ring, ib->length_dw); in sdma_v2_4_ring_emit_ib()
593 struct amdgpu_ib ib; in sdma_v2_4_ring_test_ib() local
607 memset(&ib, 0, sizeof(ib)); in sdma_v2_4_ring_test_ib()
609 AMDGPU_IB_POOL_DIRECT, &ib); in sdma_v2_4_ring_test_ib()
613 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v2_4_ring_test_ib()
615 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v2_4_ring_test_ib()
616 ib.ptr[2] = upper_32_bits(gpu_addr); in sdma_v2_4_ring_test_ib()
617 ib.ptr[3] = SDMA_PKT_WRITE_UNTILED_DW_3_COUNT(1); in sdma_v2_4_ring_test_ib()
618 ib.ptr[4] = 0xDEADBEEF; in sdma_v2_4_ring_test_ib()
619 ib.ptr[5] = SDMA_PKT_HEADER_OP(SDMA_OP_NOP); in sdma_v2_4_ring_test_ib()
620 ib.ptr[6] = SDMA_PKT_HEADER_OP(SDMA_OP_NOP); in sdma_v2_4_ring_test_ib()
621 ib.ptr[7] = SDMA_PKT_HEADER_OP(SDMA_OP_NOP); in sdma_v2_4_ring_test_ib()
622 ib.length_dw = 8; in sdma_v2_4_ring_test_ib()
624 r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f); in sdma_v2_4_ring_test_ib()
642 amdgpu_ib_free(adev, &ib, NULL); in sdma_v2_4_ring_test_ib()
659 static void sdma_v2_4_vm_copy_pte(struct amdgpu_ib *ib, in sdma_v2_4_vm_copy_pte() argument
665 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v2_4_vm_copy_pte()
667 ib->ptr[ib->length_dw++] = bytes; in sdma_v2_4_vm_copy_pte()
668 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v2_4_vm_copy_pte()
669 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v2_4_vm_copy_pte()
670 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v2_4_vm_copy_pte()
671 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v2_4_vm_copy_pte()
672 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v2_4_vm_copy_pte()
686 static void sdma_v2_4_vm_write_pte(struct amdgpu_ib *ib, uint64_t pe, in sdma_v2_4_vm_write_pte() argument
692 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v2_4_vm_write_pte()
694 ib->ptr[ib->length_dw++] = pe; in sdma_v2_4_vm_write_pte()
695 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v2_4_vm_write_pte()
696 ib->ptr[ib->length_dw++] = ndw; in sdma_v2_4_vm_write_pte()
698 ib->ptr[ib->length_dw++] = lower_32_bits(value); in sdma_v2_4_vm_write_pte()
699 ib->ptr[ib->length_dw++] = upper_32_bits(value); in sdma_v2_4_vm_write_pte()
716 static void sdma_v2_4_vm_set_pte_pde(struct amdgpu_ib *ib, uint64_t pe, in sdma_v2_4_vm_set_pte_pde() argument
721 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_GEN_PTEPDE); in sdma_v2_4_vm_set_pte_pde()
722 ib->ptr[ib->length_dw++] = lower_32_bits(pe); /* dst addr */ in sdma_v2_4_vm_set_pte_pde()
723 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v2_4_vm_set_pte_pde()
724 ib->ptr[ib->length_dw++] = lower_32_bits(flags); /* mask */ in sdma_v2_4_vm_set_pte_pde()
725 ib->ptr[ib->length_dw++] = upper_32_bits(flags); in sdma_v2_4_vm_set_pte_pde()
726 ib->ptr[ib->length_dw++] = lower_32_bits(addr); /* value */ in sdma_v2_4_vm_set_pte_pde()
727 ib->ptr[ib->length_dw++] = upper_32_bits(addr); in sdma_v2_4_vm_set_pte_pde()
728 ib->ptr[ib->length_dw++] = incr; /* increment size */ in sdma_v2_4_vm_set_pte_pde()
729 ib->ptr[ib->length_dw++] = 0; in sdma_v2_4_vm_set_pte_pde()
730 ib->ptr[ib->length_dw++] = count; /* number of entries */ in sdma_v2_4_vm_set_pte_pde()
740 static void sdma_v2_4_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib) in sdma_v2_4_ring_pad_ib() argument
746 pad_count = (-ib->length_dw) & 7; in sdma_v2_4_ring_pad_ib()
749 ib->ptr[ib->length_dw++] = in sdma_v2_4_ring_pad_ib()
753 ib->ptr[ib->length_dw++] = in sdma_v2_4_ring_pad_ib()
1195 static void sdma_v2_4_emit_copy_buffer(struct amdgpu_ib *ib, in sdma_v2_4_emit_copy_buffer() argument
1201 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v2_4_emit_copy_buffer()
1203 ib->ptr[ib->length_dw++] = byte_count; in sdma_v2_4_emit_copy_buffer()
1204 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v2_4_emit_copy_buffer()
1205 ib->ptr[ib->length_dw++] = lower_32_bits(src_offset); in sdma_v2_4_emit_copy_buffer()
1206 ib->ptr[ib->length_dw++] = upper_32_bits(src_offset); in sdma_v2_4_emit_copy_buffer()
1207 ib->ptr[ib->length_dw++] = lower_32_bits(dst_offset); in sdma_v2_4_emit_copy_buffer()
1208 ib->ptr[ib->length_dw++] = upper_32_bits(dst_offset); in sdma_v2_4_emit_copy_buffer()
1221 static void sdma_v2_4_emit_fill_buffer(struct amdgpu_ib *ib, in sdma_v2_4_emit_fill_buffer() argument
1226 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_CONST_FILL); in sdma_v2_4_emit_fill_buffer()
1227 ib->ptr[ib->length_dw++] = lower_32_bits(dst_offset); in sdma_v2_4_emit_fill_buffer()
1228 ib->ptr[ib->length_dw++] = upper_32_bits(dst_offset); in sdma_v2_4_emit_fill_buffer()
1229 ib->ptr[ib->length_dw++] = src_data; in sdma_v2_4_emit_fill_buffer()
1230 ib->ptr[ib->length_dw++] = byte_count; in sdma_v2_4_emit_fill_buffer()