Lines Matching refs:mov

66 	mov loc0=rp
68 mov out0=in0 // filename
70 mov out1=in1 // argv
71 mov out2=in2 // envp
75 mov ar.pfs=loc1 // restore ar.pfs
79 mov rp=loc0
80 (p6) mov ar.pfs=r0 // clear ar.pfs on success
89 mov ar.unat=0; mov ar.lc=0
90 mov r4=0; mov f2=f0; mov b1=r0
91 mov r5=0; mov f3=f0; mov b2=r0
92 mov r6=0; mov f4=f0; mov b3=r0
93 mov r7=0; mov f5=f0; mov b4=r0
94 ldf.fill f12=[sp]; mov f13=f0; mov b5=r0
95 ldf.fill f14=[sp]; ldf.fill f15=[sp]; mov f16=f0
96 ldf.fill f17=[sp]; ldf.fill f18=[sp]; mov f19=f0
97 ldf.fill f20=[sp]; ldf.fill f21=[sp]; mov f22=f0
98 ldf.fill f23=[sp]; ldf.fill f24=[sp]; mov f25=f0
99 ldf.fill f26=[sp]; ldf.fill f27=[sp]; mov f28=f0
100 ldf.fill f29=[sp]; ldf.fill f30=[sp]; mov f31=f0
115 mov loc0=rp
116 mov loc1=r16 // save ar.pfs across ia64_clone
118 mov out0=in0
119 mov out1=in1
120 mov out2=in2
121 mov out3=in3
122 mov out4=in4
123 mov out5=in5
127 mov ar.pfs=loc1
128 mov rp=loc0
143 mov loc0=rp
144 mov loc1=r16 // save ar.pfs across ia64_clone
146 mov out0=in0
147 mov out1=in1
148 mov out2=16 // stacksize (compensates for 16-byte scratch area)
149 mov out3=in3
150 mov out4=in4
151 mov out5=in5
155 mov ar.pfs=loc1
156 mov rp=loc0
174 mov r27=IA64_KR(CURRENT_STACK)
191 mov r8=r13 // return pointer to previously running task
192 mov r13=in0 // set "current" pointer
207 mov r25=IA64_GRANULE_SHIFT<<2
212 mov r25=IA64_TR_CURRENT_STACK
241 mov r17=ar.unat // preserve caller's
274 mov.m ar.rsc=0 // put RSE in mode: enforced lazy, little endian, pl 0
278 mov.m r18=ar.fpsr // preserve fpsr
282 mov.m r19=ar.rnat
283 mov r21=b0
287 mov r22=b1
290 mov.m r29=ar.unat
291 mov.m r20=ar.bspstore
292 mov r23=b2
295 mov r24=b3
299 mov r25=b4
300 mov r26=b5
304 mov r21=ar.lc // I-unit
342 mov r21=pr
349 mov ar.rsc=3 // put RSE back into eager mode, pl 0
367 mov ar.rsc=0 // put RSE into enforced lazy mode
409 mov b0=r21
413 mov b1=r22
417 mov b2=r23
419 mov ar.bspstore=r27
420 mov ar.unat=r29 // establish unat holding the NaT bits for r4-r7
421 mov b3=r24
425 mov b4=r25
429 mov b5=r26
433 mov ar.pfs=r16
437 mov ar.lc=r17
441 mov pr=r28,-1
446 mov ar.unat=r18 // restore caller's unat
447 mov ar.rnat=r30 // must restore after bspstore but before rsc!
448 mov ar.fpsr=r19 // restore fpsr
449 mov ar.rsc=3 // put RSE back into eager mode, pl 0
480 mov r10=0
498 mov r3=NR_syscalls - 1
509 mov b6=r20
515 mov r10=0
534 (p6) mov r10=-1
535 (p6) mov r8=r9
560 mov loc0=rp
561 mov loc2=gp
562 mov out0=r5 // arg
565 mov b6 = r14
569 .ret12: mov gp=loc2
570 mov rp=loc0
571 mov ar.pfs=loc1
598 mov r8=0
610 mov r10=r0 // clear error indication in r10
679 (pUStk) mov r21=0 // r21 <- 0
708 mov r16=ar.bsp // M2 get existing backing store pointer
735 mov r22=r0 // A clear r22
748 mov f6=f0 // F clear f6
752 mov f7=f0 // F clear f7
756 (pUStk) mov r17=1 // A
764 mov f8=f0 // F clear f8
768 mov b6=r18 // I0 restore b6
771 mov f9=f0 // F clear f9
779 mov r19=ar.bsp // M2 get new backing store pointer
781 mov f10=f0 // F clear f10
783 mov r22=r0 // A clear r22
787 mov r19=ar.bsp // M2 get new backing store pointer
788 mov f10=f0 // F clear f10
794 mov.m ar.csd=r0 // M2 clear ar.csd
795 mov.m ar.ccv=r0 // M2 clear ar.ccv
796 mov b7=r14 // I0 clear b7 (hint with __kernel_syscall_via_epc)
798 mov.m ar.ssd=r0 // M2 clear ar.ssd
799 mov f11=f0 // F clear f11
823 (pUStk) mov r21=0 // r21 <- 0
872 mov ar.csd=r30
873 mov ar.ssd=r31
880 mov b6=r28
884 mov b7=r29
904 mov ar.ccv=r15
909 (pUStk) mov r18=IA64_KR(CURRENT)// M2 (12 cycle read latency)
960 (pUStk) mov r17=1
975 mov r16=ar.bsp // get existing backing store pointer
981 mov r16=ar.bsp // get existing backing store pointer
994 mov r19=ar.bsp // get new backing store pointer
1022 mov ar.rsc=r19 // load ar.rsc to be used for "loadrs"
1024 mov in1=0
1040 mov loc1=0
1042 mov loc2=0
1044 mov loc3=0
1045 mov loc4=0
1049 mov loc5=0
1053 mov loc6=0
1054 mov loc7=0
1062 mov loc1=0
1063 mov loc2=0
1065 mov loc3=0
1066 mov loc4=0
1067 mov loc5=0
1068 mov loc6=0
1069 mov loc7=0
1072 mov loc8=0
1073 mov loc9=0
1075 mov loc10=0
1076 mov loc11=0
1087 mov ar.unat=r25 // M2
1089 (pLvSys)mov r19=r0 // A clear r19 for leave_syscall, no-op otherwise
1091 (pUStk) mov ar.bspstore=r23 // M2
1093 (pLvSys)mov r16=r0 // A clear r16 for leave_syscall, no-op otherwise
1096 mov ar.pfs=r26 // I0
1097 (pLvSys)mov r17=r0 // A clear r17 for leave_syscall, no-op otherwise
1100 mov b0=r21 // I0
1101 (pLvSys)mov r18=r0 // A clear r18 for leave_syscall, no-op otherwise
1103 mov ar.fpsr=r20 // M2
1107 (pUStk) mov ar.rnat=r24 // M2 must happen with RSE in lazy mode
1109 (pLvSys)mov r2=r0
1111 mov ar.rsc=r27 // M2
1112 mov pr=r31,-1 // I0
1167 (p7) mov r10=-1
1179 mov loc0=rp
1180 mov out0=r8 // Address of previous task
1183 .ret11: mov ar.pfs=loc1
1184 mov rp=loc0
1199 mov r9=ar.unat
1200 mov loc0=rp // save return address
1201 mov out0=0 // there is no "oldset"
1203 (pSys) mov out2=1 // out2==1 => we're in a syscall
1205 (pNonSys) mov out2=0 // out2==0 => not a syscall
1216 mov rp=loc0
1218 mov ar.unat=r9
1219 mov ar.pfs=loc1
1258 mov.sptk b7=r8,ia64_leave_kernel
1260 mov ar.unat=r9
1269 mov r16=r0
1287 mov loc0=rp
1288 mov r16=loc1
1299 mov out1=r13 // current
1303 mov b6=loc2
1304 mov loc2=gp // save gp across indirect function call
1307 mov out1=in1 // arg
1309 1: mov gp=loc2 // restore gp
1317 mov ar.pfs=loc1
1318 mov rp=loc0
1335 mov out3 = r0
1337 mov out2 = b0
1339 mov out1 = r1;
1344 mov b0 = r3
1355 mov loc1 = b0
1356 mov out0 = b0
1357 mov loc2 = r8
1358 mov loc3 = r15
1361 mov out1 = in2
1362 mov b6 = r3
1366 mov ar.pfs = loc0
1367 mov b0 = loc1
1368 mov r8 = loc2
1369 mov r15 = loc3
1386 mov loc1 = b0
1387 mov out0 = b0
1388 mov loc2 = r8
1389 mov loc3 = r15
1392 mov out1 = in2
1393 mov b6 = r3
1397 mov ar.pfs = loc0
1398 mov b0 = loc1
1399 mov r8 = loc2
1400 mov r15 = loc3
1407 mov r3 = b0
1410 mov b6 = r2
1411 mov b7 = r3
1415 mov b0 = r42
1416 mov r1 = r41
1417 mov ar.pfs = r40