Lines Matching refs:r8

238 #.set r8,8
283 # Freely use registers r5,r6,r7,r8,r9,r10,r11 as follows:
286 # r7,r8 are the results of the 32x32 giving 64 bit multiply.
306 $UMULH r8,r5,r6
308 addc r7,r7,r7 # compute (r7,r8)=2*(r7,r8)
309 adde r8,r8,r8
314 addze r11,r8 # r8 added to r11 which is 0
320 $UMULH r8,r6,r6
322 adde r9,r8,r9
327 $UMULH r8,r5,r6
330 adde r8,r8,r8
334 adde r9,r8,r9
340 $UMULH r8,r5,r6
342 adde r8,r8,r8
346 adde r10,r8,r10
352 $UMULH r8,r5,r6
355 adde r8,r8,r8
358 adde r10,r8,r10
363 $UMULH r8,r6,r6
365 adde r11,r8,r11
370 $UMULH r8,r5,r6
372 adde r8,r8,r8
376 adde r11,r8,r11
382 $UMULH r8,r5,r6
384 adde r8,r8,r8
388 adde r9,r8,r9
393 $UMULH r8,r6,r6
395 adde r10,r8,r10
423 # Freely use registers r5,r6,r7,r8,r9,r10,r11 as follows:
426 # r7,r8 are the results of the 32x32 giving 64 bit multiply.
444 $UMULH r8,r5,r6
447 adde r11,r8,r0 # (r8,r7) to the three register
451 adde r11,r8,r11 # (r8,r7) to the three register
458 $UMULH r8,r6,r6
460 adde r9,r8,r9
465 $UMULH r8,r5,r6
468 adde r9,r8,r9
472 adde r9,r8,r9
479 $UMULH r8,r5,r6
482 adde r10,r8,r10
486 adde r10,r8,r10
492 $UMULH r8,r5,r6
495 adde r10,r8,r10
499 adde r10,r8,r10
505 $UMULH r8,r6,r6
508 adde r11,r8,r11
513 $UMULH r8,r5,r6
516 adde r11,r8,r11
520 adde r11,r8,r11
526 $UMULH r8,r5,r6
529 adde r11,r8,r11
533 adde r11,r8,r11
539 $UMULH r8,r5,r6
542 adde r9,r8,r9
546 adde r9,r8,r9
552 $UMULH r8,r5,r6
555 adde r9,r8,r9
559 adde r9,r8,r9
565 $UMULH r8,r5,r6
568 adde r9,r8,r9
572 adde r9,r8,r9
577 $UMULH r8,r6,r6
579 adde r10,r8,r10
584 $UMULH r8,r5,r6
587 adde r10,r8,r10
591 adde r10,r8,r10
597 $UMULH r8,r5,r6
600 adde r10,r8,r10
604 adde r10,r8,r10
610 $UMULH r8,r5,r6
612 adde r10,r8,r10
615 adde r10,r8,r10
621 $UMULH r8,r5,r6
624 adde r11,r8,r11
627 adde r11,r8,r11
633 $UMULH r8,r5,r6
636 adde r11,r8,r11
639 adde r11,r8,r11
645 $UMULH r8,r5,r6
647 adde r11,r8,r11
650 adde r11,r8,r11
656 $UMULH r8,r5,r6
659 adde r11,r8,r11
662 adde r11,r8,r11
667 $UMULH r8,r6,r6
669 adde r9,r8,r9
674 $UMULH r8,r5,r6
676 adde r9,r8,r9
679 adde r9,r8,r9
685 $UMULH r8,r5,r6
687 adde r9,r8,r9
691 adde r9,r8,r9
697 $UMULH r8,r5,r6
699 adde r9,r8,r9
702 adde r9,r8,r9
708 $UMULH r8,r5,r6
711 adde r10,r8,r10
714 adde r10,r8,r10
720 $UMULH r8,r5,r6
722 adde r10,r8,r10
725 adde r10,r8,r10
731 $UMULH r8,r5,r6
733 adde r10,r8,r10
736 adde r10,r8,r10
741 $UMULH r8,r6,r6
743 adde r11,r8,r11
748 $UMULH r8,r5,r6
750 adde r11,r8,r11
753 adde r11,r8,r11
759 $UMULH r8,r5,r6
761 adde r11,r8,r11
764 adde r11,r8,r11
770 $UMULH r8,r5,r6
772 adde r9,r8,r9
775 adde r9,r8,r9
781 $UMULH r8,r5,r6
783 adde r9,r8,r9
786 adde r9,r8,r9
791 $UMULH r8,r6,r6
793 adde r10,r8,r10
798 $UMULH r8,r5,r6
800 adde r10,r8,r10
803 adde r10,r8,r10
810 $UMULH r8,r5,r6
812 adde r11,r8,r11
815 adde r11,r8,r11
820 $UMULH r8,r6,r6
822 adde r9,r8,r9
850 # r8, r9 are the results of the 32x32 giving 64 multiply.
862 $UMULL r8,r6,r7
864 addc r11,r8,r11
870 $UMULL r8,r6,r7
872 addc r11,r8,r11
878 $UMULL r8,r6,r7
880 addc r12,r8,r12
886 $UMULL r8,r6,r7
888 addc r12,r8,r12
894 $UMULL r8,r6,r7
896 addc r12,r8,r12
902 $UMULL r8,r6,r7
904 addc r10,r8,r10
910 $UMULL r8,r6,r7
912 addc r10,r8,r10
918 $UMULL r8,r6,r7
920 addc r10,r8,r10
926 $UMULL r8,r6,r7
928 addc r10,r8,r10
934 $UMULL r8,r6,r7
936 addc r11,r8,r11
942 $UMULL r8,r6,r7
944 addc r11,r8,r11
950 $UMULL r8,r6,r7
952 addc r11,r8,r11
958 $UMULL r8,r6,r7
960 addc r12,r8,r12
966 $UMULL r8,r6,r7
968 addc r12,r8,r12
974 $UMULL r8,r6,r7
976 addc r10,r8,r10
1004 # r8, r9 are the results of the 32x32 giving 64 multiply.
1017 $UMULL r8,r6,r7
1019 addc r11,r11,r8
1025 $UMULL r8,r6,r7
1027 addc r11,r11,r8
1033 $UMULL r8,r6,r7
1035 addc r12,r12,r8
1041 $UMULL r8,r6,r7
1043 addc r12,r12,r8
1049 $UMULL r8,r6,r7
1051 addc r12,r12,r8
1057 $UMULL r8,r6,r7
1059 addc r10,r10,r8
1065 $UMULL r8,r6,r7
1067 addc r10,r10,r8
1074 $UMULL r8,r6,r7
1076 addc r10,r10,r8
1082 $UMULL r8,r6,r7
1084 addc r10,r10,r8
1090 $UMULL r8,r6,r7
1092 addc r11,r11,r8
1098 $UMULL r8,r6,r7
1100 addc r11,r11,r8
1106 $UMULL r8,r6,r7
1108 addc r11,r11,r8
1114 $UMULL r8,r6,r7
1116 addc r11,r11,r8
1122 $UMULL r8,r6,r7
1124 addc r11,r11,r8
1130 $UMULL r8,r6,r7
1132 addc r12,r12,r8
1138 $UMULL r8,r6,r7
1140 addc r12,r12,r8
1146 $UMULL r8,r6,r7
1148 addc r12,r12,r8
1154 $UMULL r8,r6,r7
1156 addc r12,r12,r8
1162 $UMULL r8,r6,r7
1164 addc r12,r12,r8
1170 $UMULL r8,r6,r7
1172 addc r12,r12,r8
1178 $UMULL r8,r6,r7
1180 addc r10,r10,r8
1186 $UMULL r8,r6,r7
1188 addc r10,r10,r8
1194 $UMULL r8,r6,r7
1196 addc r10,r10,r8
1202 $UMULL r8,r6,r7
1204 addc r10,r10,r8
1210 $UMULL r8,r6,r7
1212 addc r10,r10,r8
1218 $UMULL r8,r6,r7
1220 addc r10,r10,r8
1226 $UMULL r8,r6,r7
1228 addc r10,r10,r8
1234 $UMULL r8,r6,r7
1236 addc r11,r11,r8
1242 $UMULL r8,r6,r7
1244 addc r11,r11,r8
1250 $UMULL r8,r6,r7
1252 addc r11,r11,r8
1258 $UMULL r8,r6,r7
1260 addc r11,r11,r8
1266 $UMULL r8,r6,r7
1268 addc r11,r11,r8
1274 $UMULL r8,r6,r7
1276 addc r11,r11,r8
1282 $UMULL r8,r6,r7
1284 addc r11,r11,r8
1290 $UMULL r8,r6,r7
1292 addc r11,r11,r8
1298 $UMULL r8,r6,r7
1300 addc r12,r12,r8
1306 $UMULL r8,r6,r7
1308 addc r12,r12,r8
1314 $UMULL r8,r6,r7
1316 addc r12,r12,r8
1322 $UMULL r8,r6,r7
1324 addc r12,r12,r8
1330 $UMULL r8,r6,r7
1332 addc r12,r12,r8
1338 $UMULL r8,r6,r7
1340 addc r12,r12,r8
1346 $UMULL r8,r6,r7
1348 addc r12,r12,r8
1354 $UMULL r8,r6,r7
1356 addc r10,r10,r8
1362 $UMULL r8,r6,r7
1364 addc r10,r10,r8
1370 $UMULL r8,r6,r7
1372 addc r10,r10,r8
1378 $UMULL r8,r6,r7
1380 addc r10,r10,r8
1386 $UMULL r8,r6,r7
1388 addc r10,r10,r8
1394 $UMULL r8,r6,r7
1396 addc r10,r10,r8
1402 $UMULL r8,r6,r7
1404 addc r11,r11,r8
1410 $UMULL r8,r6,r7
1412 addc r11,r11,r8
1418 $UMULL r8,r6,r7
1420 addc r11,r11,r8
1426 $UMULL r8,r6,r7
1428 addc r11,r11,r8
1434 $UMULL r8,r6,r7
1436 addc r11,r11,r8
1442 $UMULL r8,r6,r7
1444 addc r12,r12,r8
1450 $UMULL r8,r6,r7
1452 addc r12,r12,r8
1458 $UMULL r8,r6,r7
1460 addc r12,r12,r8
1466 $UMULL r8,r6,r7
1468 addc r12,r12,r8
1474 $UMULL r8,r6,r7
1476 addc r10,r10,r8
1482 $UMULL r8,r6,r7
1484 addc r10,r10,r8
1490 $UMULL r8,r6,r7
1492 addc r10,r10,r8
1498 $UMULL r8,r6,r7
1500 addc r11,r11,r8
1506 $UMULL r8,r6,r7
1508 addc r11,r11,r8
1514 $UMULL r8,r6,r7
1516 addc r12,r12,r8
1562 $LDU r8,$BNSZ(r5)
1563 subfe r6,r8,r7 # r6 = r7+carry bit + onescomplement(r8)
1564 # if carry = 1 this is r7-r8. Else it
1565 # is r7-r8 -1 as we need.
1611 $LDU r8,$BNSZ(r5)
1612 adde r8,r7,r8
1613 $STU r8,$BNSZ(r3)
1649 li r8,$BITS
1652 subf r8,r7,r8 #r8 = BN_num_bits_word(d)
1653 $SHR. r9,r3,r8 #are there any bits above r8'th?
1663 $SHR r8,r4,r8 # r8 = (l >> BN_BITS2 -i)
1665 or r3,r3,r8 # h = (h<<i)|(l>>(BN_BITS2-i))
1674 $SHRI r8,r3,`$BITS/2` #r8 = (h>>BN_BITS4)
1677 $UCMP 0,r8,r9 # is (h>>BN_BITS4)==dh
1680 li r8,-1
1681 $CLRU r8,r8,`$BITS/2` #q = BN_MASK2l
1684 $UDIV r8,r3,r9 #q = h/dh
1686 $UMULL r12,r9,r8 #th = q*dh
1688 $UMULL r6,r8,r10 #tl = q*dl
1702 addi r8,r8,-1 #q--
1718 addi r8,r8,-1 # q--
1729 $SHLI r0,r8,`$BITS/2` #ret =q<<BN_BITS4
1732 or r3,r8,r0
1757 # r7,r8 = product.
1770 $UMULH r8,r6,r6
1772 $STU r8,$BNSZ(r3)
1804 $LD r8,`0*$BNSZ`(r4)
1805 $UMULL r9,r6,r8
1806 $UMULH r10,r6,r8
1814 $LD r8,`1*$BNSZ`(r4)
1815 $UMULL r11,r6,r8
1816 $UMULH r12,r6,r8
1821 $LD r8,`2*$BNSZ`(r4)
1822 $UMULL r9,r6,r8
1823 $UMULH r10,r6,r8
1828 $LD r8,`3*$BNSZ`(r4)
1829 $UMULL r11,r6,r8
1830 $UMULH r12,r6,r8
1844 $LD r8,`0*$BNSZ`(r4)
1845 $UMULL r9,r6,r8
1846 $UMULH r10,r6,r8
1858 $LD r8,`1*$BNSZ`(r4)
1859 $UMULL r9,r6,r8
1860 $UMULH r10,r6,r8
1871 $LD r8,`2*$BNSZ`(r4)
1872 $UMULL r9,r6,r8
1873 $UMULH r10,r6,r8
1913 $LD r8,`0*$BNSZ`(r4)
1915 $UMULL r9,r6,r8
1916 $UMULH r10,r6,r8
1929 $LD r8,`1*$BNSZ`(r4)
1931 $UMULL r11,r6,r8
1932 $UMULH r12,r6,r8
1940 $LD r8,`2*$BNSZ`(r4)
1941 $UMULL r9,r6,r8
1943 $UMULH r10,r6,r8
1951 $LD r8,`3*$BNSZ`(r4)
1952 $UMULL r11,r6,r8
1954 $UMULH r12,r6,r8
1971 $LDU r8,$BNSZ(r4)
1972 $UMULL r9,r6,r8
1973 $UMULH r10,r6,r8
1983 $LDU r8,$BNSZ(r4)
1984 $UMULL r9,r6,r8
1985 $UMULH r10,r6,r8
1995 $LDU r8,$BNSZ(r4)
1996 $UMULL r9,r6,r8
1997 $UMULH r10,r6,r8