Lines Matching refs:m0

410     s_mov_b32	    s_save_m0,		m0								    //save M0
483 s_mov_b32 m0, 0x0 //SGPR initial index value =0
497 s_add_u32 m0, m0, 16 //next sgpr index
498 s_cmp_lt_u32 m0, s_save_alloc_size //scc = (m0 < s_save_alloc_size) ? 1 : 0
567 s_mov_b32 m0, 0x0 //lds_offset initial value = 0
604 s_mov_b32 m0, 0x10000
643 s_mov_b32 m0, 0x4 //VGPR initial index value =0
644 s_cmp_lt_u32 m0, s_save_alloc_size
648 s_set_gpr_idx_on m0, 0x1 //M0[7:0] = M0[7:0] and M0[15:12] = 0x1
658 s_add_u32 m0, m0, 4
659 s_cmp_lt_u32 m0, s_save_alloc_size
674 s_add_u32 m0, m0, 4 //next vgpr index
676 s_cmp_lt_u32 m0, s_save_alloc_size //scc = (m0 < s_save_alloc_size) ? 1 : 0
693 s_mov_b32 m0, 0x0 //VGPR initial index value =0
694 s_set_gpr_idx_on m0, 0x1 //M0[7:0] = M0[7:0] and M0[15:12] = 0x1
707 s_add_u32 m0, m0, 4
708 s_cmp_lt_u32 m0, s_save_alloc_size
722 s_add_u32 m0, m0, 4
724 s_cmp_lt_u32 m0, s_save_alloc_size
777 s_mov_b32 m0, 0x0 //lds_offset initial value = 0
787 s_add_u32 m0, m0, LDS_RESTORE_GRANULARITY_BYTES // 128/320 DW
789 s_cmp_lt_u32 m0, s_restore_alloc_size //scc=(m0 < s_restore_alloc_size) ? 1 : 0
808 s_mov_b32 m0, 4 //VGPR initial index value = 1
809 s_set_gpr_idx_on m0, 0x8 //M0[7:0] = M0[7:0] and M0[15:12] = 0x8
817 s_add_u32 m0, m0, 4 //next vgpr index
819 s_cmp_lt_u32 m0, s_restore_alloc_size //scc = (m0 < s_restore_alloc_size) ? 1 : 0
832 s_mov_b32 m0, 0
833 s_set_gpr_idx_on m0, 0x8 //M0[7:0] = M0[7:0] and M0[15:12] = 0x8
842 s_add_u32 m0, m0, 4 //next vgpr index
844 s_cmp_lt_u32 m0, s_restore_alloc_size //scc = (m0 < s_restore_alloc_size) ? 1 : 0
871 s_mov_b32 m0, s_restore_alloc_size
877 s_sub_u32 m0, m0, 16 // Restore from S[n] to S[0]
889 s_cmp_eq_u32 m0, 0 //scc = (m0 < s_restore_alloc_size) ? 1 : 0
919 s_mov_b32 m0, s_restore_m0
971 s_mov_b32 exec_lo, m0 //assuming exec_lo is not needed anymore from this point on
972 s_mov_b32 m0, s_mem_offset
973 s_buffer_store_dword s, s_rsrc, m0 glc:1
976 s_mov_b32 m0, exec_lo