Lines Matching refs:rd
340 void TurboAssembler::Add32(Register rd, Register rs, const Operand& rt) {
342 if (FLAG_riscv_c_extension && (rd.code() == rs.code()) &&
343 ((rd.code() & 0b11000) == 0b01000) &&
345 c_addw(rd, rt.rm());
347 addw(rd, rs, rt.rm());
351 (rd.code() == rs.code()) && (rd != zero_reg) &&
353 c_addiw(rd, static_cast<int8_t>(rt.immediate()));
355 addiw(rd, rs, static_cast<int32_t>(rt.immediate()));
358 addiw(rd, rs, rt.immediate() / 2);
359 addiw(rd, rd, rt.immediate() - (rt.immediate() / 2));
365 addw(rd, rs, scratch);
370 void TurboAssembler::Add64(Register rd, Register rs, const Operand& rt) {
372 if (FLAG_riscv_c_extension && (rd.code() == rs.code()) &&
374 c_add(rd, rt.rm());
376 add(rd, rs, rt.rm());
380 (rd.code() == rs.code()) && (rd != zero_reg) && (rt.immediate() != 0) &&
382 c_addi(rd, static_cast<int8_t>(rt.immediate()));
385 (rd.code() == rs.code()) && (rd == sp) &&
388 } else if (FLAG_riscv_c_extension && ((rd.code() & 0b11000) == 0b01000) &&
391 c_addi4spn(rd, static_cast<uint16_t>(rt.immediate()));
393 addi(rd, rs, static_cast<int32_t>(rt.immediate()));
396 addi(rd, rs, rt.immediate() / 2);
397 addi(rd, rd, rt.immediate() - (rt.immediate() / 2));
404 add(rd, rs, scratch);
409 void TurboAssembler::Sub32(Register rd, Register rs, const Operand& rt) {
411 if (FLAG_riscv_c_extension && (rd.code() == rs.code()) &&
412 ((rd.code() & 0b11000) == 0b01000) &&
414 c_subw(rd, rt.rm());
416 subw(rd, rs, rt.rm());
420 if (FLAG_riscv_c_extension && (rd.code() == rs.code()) &&
421 (rd != zero_reg) && is_int6(-rt.immediate()) &&
424 rd,
428 addiw(rd, rs,
433 addiw(rd, rs, -rt.immediate() / 2);
434 addiw(rd, rd, -rt.immediate() - (-rt.immediate() / 2));
441 addw(rd, rs, scratch);
445 subw(rd, rs, scratch);
451 void TurboAssembler::Sub64(Register rd, Register rs, const Operand& rt) {
453 if (FLAG_riscv_c_extension && (rd.code() == rs.code()) &&
454 ((rd.code() & 0b11000) == 0b01000) &&
456 c_sub(rd, rt.rm());
458 sub(rd, rs, rt.rm());
460 } else if (FLAG_riscv_c_extension && (rd.code() == rs.code()) &&
461 (rd != zero_reg) && is_int6(-rt.immediate()) &&
463 c_addi(rd,
469 (rd.code() == rs.code()) && (rd == sp) &&
473 addi(rd, rs,
478 addi(rd, rs, -rt.immediate() / 2);
479 addi(rd, rd, -rt.immediate() - (-rt.immediate() / 2));
489 add(rd, rs, scratch);
495 sub(rd, rs, scratch);
500 void TurboAssembler::Mul32(Register rd, Register rs, const Operand& rt) {
502 mulw(rd, rs, rt.rm());
508 mulw(rd, rs, scratch);
512 void TurboAssembler::Mulh32(Register rd, Register rs, const Operand& rt) {
514 mul(rd, rs, rt.rm());
520 mul(rd, rs, scratch);
522 srai(rd, rd, 32);
525 void TurboAssembler::Mulhu32(Register rd, Register rs, const Operand& rt,
533 mulhu(rd, rsz, rtz);
534 srai(rd, rd, 32);
537 void TurboAssembler::Mul64(Register rd, Register rs, const Operand& rt) {
539 mul(rd, rs, rt.rm());
545 mul(rd, rs, scratch);
549 void TurboAssembler::Mulh64(Register rd, Register rs, const Operand& rt) {
551 mulh(rd, rs, rt.rm());
557 mulh(rd, rs, scratch);
573 void TurboAssembler::Mod32(Register rd, Register rs, const Operand& rt) {
575 remw(rd, rs, rt.rm());
581 remw(rd, rs, scratch);
585 void TurboAssembler::Modu32(Register rd, Register rs, const Operand& rt) {
587 remuw(rd, rs, rt.rm());
593 remuw(rd, rs, scratch);
597 void TurboAssembler::Div64(Register rd, Register rs, const Operand& rt) {
599 div(rd, rs, rt.rm());
605 div(rd, rs, scratch);
633 void TurboAssembler::Mod64(Register rd, Register rs, const Operand& rt) {
635 rem(rd, rs, rt.rm());
641 rem(rd, rs, scratch);
645 void TurboAssembler::Modu64(Register rd, Register rs, const Operand& rt) {
647 remu(rd, rs, rt.rm());
653 remu(rd, rs, scratch);
657 void TurboAssembler::And(Register rd, Register rs, const Operand& rt) {
659 if (FLAG_riscv_c_extension && (rd.code() == rs.code()) &&
660 ((rd.code() & 0b11000) == 0b01000) &&
662 c_and(rd, rt.rm());
664 and_(rd, rs, rt.rm());
668 !MustUseReg(rt.rmode()) && (rd.code() == rs.code()) &&
669 ((rd.code() & 0b11000) == 0b01000)) {
670 c_andi(rd, static_cast<int8_t>(rt.immediate()));
672 andi(rd, rs, static_cast<int32_t>(rt.immediate()));
678 and_(rd, rs, scratch);
683 void TurboAssembler::Or(Register rd, Register rs, const Operand& rt) {
685 if (FLAG_riscv_c_extension && (rd.code() == rs.code()) &&
686 ((rd.code() & 0b11000) == 0b01000) &&
688 c_or(rd, rt.rm());
690 or_(rd, rs, rt.rm());
694 ori(rd, rs, static_cast<int32_t>(rt.immediate()));
700 or_(rd, rs, scratch);
705 void TurboAssembler::Xor(Register rd, Register rs, const Operand& rt) {
707 if (FLAG_riscv_c_extension && (rd.code() == rs.code()) &&
708 ((rd.code() & 0b11000) == 0b01000) &&
710 c_xor(rd, rt.rm());
712 xor_(rd, rs, rt.rm());
716 xori(rd, rs, static_cast<int32_t>(rt.immediate()));
722 xor_(rd, rs, scratch);
727 void TurboAssembler::Nor(Register rd, Register rs, const Operand& rt) {
729 or_(rd, rs, rt.rm());
730 not_(rd, rd);
732 Or(rd, rs, rt);
733 not_(rd, rd);
742 void TurboAssembler::Seqz(Register rd, const Operand& rt) {
744 seqz(rd, rt.rm());
746 li(rd, rt.immediate() == 0);
750 void TurboAssembler::Snez(Register rd, const Operand& rt) {
752 snez(rd, rt.rm());
754 li(rd, rt.immediate() != 0);
758 void TurboAssembler::Seq(Register rd, Register rs, const Operand& rt) {
760 Seqz(rd, rt);
762 seqz(rd, rs);
764 Sub64(rd, rs, rt);
765 seqz(rd, rd);
769 void TurboAssembler::Sne(Register rd, Register rs, const Operand& rt) {
771 Snez(rd, rt);
773 snez(rd, rs);
775 Sub64(rd, rs, rt);
776 snez(rd, rd);
780 void TurboAssembler::Slt(Register rd, Register rs, const Operand& rt) {
782 slt(rd, rs, rt.rm());
785 slti(rd, rs, static_cast<int32_t>(rt.immediate()));
792 slt(rd, rs, scratch);
797 void TurboAssembler::Sltu(Register rd, Register rs, const Operand& rt) {
799 sltu(rd, rs, rt.rm());
802 sltiu(rd, rs, static_cast<int32_t>(rt.immediate()));
809 sltu(rd, rs, scratch);
814 void TurboAssembler::Sle(Register rd, Register rs, const Operand& rt) {
816 slt(rd, rt.rm(), rs);
823 slt(rd, scratch, rs);
825 xori(rd, rd, 1);
828 void TurboAssembler::Sleu(Register rd, Register rs, const Operand& rt) {
830 sltu(rd, rt.rm(), rs);
837 sltu(rd, scratch, rs);
839 xori(rd, rd, 1);
842 void TurboAssembler::Sge(Register rd, Register rs, const Operand& rt) {
843 Slt(rd, rs, rt);
844 xori(rd, rd, 1);
847 void TurboAssembler::Sgeu(Register rd, Register rs, const Operand& rt) {
848 Sltu(rd, rs, rt);
849 xori(rd, rd, 1);
852 void TurboAssembler::Sgt(Register rd, Register rs, const Operand& rt) {
854 slt(rd, rt.rm(), rs);
861 slt(rd, scratch, rs);
865 void TurboAssembler::Sgtu(Register rd, Register rs, const Operand& rt) {
867 sltu(rd, rt.rm(), rs);
874 sltu(rd, scratch, rs);
878 void TurboAssembler::Sll32(Register rd, Register rs, const Operand& rt) {
880 sllw(rd, rs, rt.rm());
883 slliw(rd, rs, shamt);
887 void TurboAssembler::Sra32(Register rd, Register rs, const Operand& rt) {
889 sraw(rd, rs, rt.rm());
892 sraiw(rd, rs, shamt);
896 void TurboAssembler::Srl32(Register rd, Register rs, const Operand& rt) {
898 srlw(rd, rs, rt.rm());
901 srliw(rd, rs, shamt);
905 void TurboAssembler::Sra64(Register rd, Register rs, const Operand& rt) {
907 sra(rd, rs, rt.rm());
908 } else if (FLAG_riscv_c_extension && (rd.code() == rs.code()) &&
909 ((rd.code() & 0b11000) == 0b01000) && is_int6(rt.immediate())) {
911 c_srai(rd, shamt);
914 srai(rd, rs, shamt);
918 void TurboAssembler::Srl64(Register rd, Register rs, const Operand& rt) {
920 srl(rd, rs, rt.rm());
921 } else if (FLAG_riscv_c_extension && (rd.code() == rs.code()) &&
922 ((rd.code() & 0b11000) == 0b01000) && is_int6(rt.immediate())) {
924 c_srli(rd, shamt);
927 srli(rd, rs, shamt);
931 void TurboAssembler::Sll64(Register rd, Register rs, const Operand& rt) {
933 sll(rd, rs, rt.rm());
936 if (FLAG_riscv_c_extension && (rd.code() == rs.code()) &&
937 (rd != zero_reg) && (shamt != 0) && is_uint6(shamt)) {
938 c_slli(rd, shamt);
940 slli(rd, rs, shamt);
945 void TurboAssembler::Li(Register rd, int64_t imm) {
946 if (FLAG_riscv_c_extension && (rd != zero_reg) && is_int6(imm)) {
947 c_li(rd, imm);
949 RV_li(rd, imm);
953 void TurboAssembler::Mv(Register rd, const Operand& rt) {
954 if (FLAG_riscv_c_extension && (rd != zero_reg) && (rt.rm() != zero_reg)) {
955 c_mv(rd, rt.rm());
957 mv(rd, rt.rm());
961 void TurboAssembler::Ror(Register rd, Register rs, const Operand& rt) {
968 srlw(rd, rs, rt.rm());
969 or_(rd, scratch, rd);
970 sext_w(rd, rd);
974 Mv(rd, rs);
980 slliw(rd, rs, 32 - ror_value);
981 or_(rd, scratch, rd);
982 sext_w(rd, rd);
986 void TurboAssembler::Dror(Register rd, Register rs, const Operand& rt) {
993 srl(rd, rs, rt.rm());
994 or_(rd, scratch, rd);
998 Mv(rd, rs);
1004 slli(rd, rs, 64 - dror_value);
1005 or_(rd, scratch, rd);
1009 void TurboAssembler::CalcScaledAddress(Register rd, Register rt, Register rs,
1013 Register tmp = rd == rt ? temps.Acquire() : rd;
1016 Add64(rd, rt, tmp);
1021 void TurboAssembler::ByteSwap(Register rd, Register rs, int operand_size,
1024 DCHECK_NE(scratch, rd);
1032 DCHECK((rd != t6) && (rs != t6));
1038 srliw(rd, rs, 16);
1039 or_(x0, rd, x0); // x0 <- x0 << 16 | x0 >> 16
1043 and_(rd, x0, x1); // x0 & 0xFF00FF00
1044 srliw(rd, rd, 8);
1045 or_(rd, rd, x2); // (((x0 & x1) << 8) | ((x0 & (x1 << 8)) >> 8))
1054 DCHECK((rd != t6) && (rs != t6));
1060 srli(rd, rs, 32);
1061 or_(x0, rd, x0); // x0 <- x0 << 32 | x0 >> 32
1065 and_(rd, x0, x1); // rd <- x0 & 0xFFFF0000FFFF0000
1066 srli(rd, rd, 16); // rd <- x0 & (x1 << 16)) >> 16
1067 or_(x0, rd, x2); // (x0 & x1) << 16 | (x0 & (x1 << 16)) >> 16;
1072 and_(rd, x0, x1);
1073 srli(rd, rd, 8); // rd <- (x0 & (x1 << 8)) >> 8
1074 or_(rd, rd, x2); // (((x0 & x1) << 8) | ((x0 & (x1 << 8)) >> 8))
1079 void TurboAssembler::LoadNBytes(Register rd, const MemOperand& rs,
1081 DCHECK(rd != rs.rm() && rd != scratch);
1086 lb(rd, rs.rm(), rs.offset() + (NBYTES - 1));
1088 lbu(rd, rs.rm(), rs.offset() + (NBYTES - 1));
1092 slli(rd, rd, 8 * (NBYTES - 1));
1096 or_(rd, rd, scratch);
1130 void TurboAssembler::UnalignedLoadHelper(Register rd, const MemOperand& rs) {
1142 // Since source.rm() is scratch_base, assume rd != source.rm()
1143 DCHECK(rd != source.rm());
1145 LoadNBytes<NBYTES, IS_SIGNED>(rd, source, scratch_other);
1148 if (rd != rs.rm()) {
1150 LoadNBytes<NBYTES, IS_SIGNED>(rd, rs, scratch);
1151 } else { // rd == rs.rm()
1185 void TurboAssembler::UnalignedStoreHelper(Register rd, const MemOperand& rs,
1194 DCHECK(scratch_base != rd && scratch_base != rs.rm());
1209 DCHECK(scratch_other != rd && scratch_other != rs.rm() &&
1212 sb(rd, source.rm(), source.offset());
1214 srli(scratch_other, rd, i * 8);
1267 void TurboAssembler::Ulw(Register rd, const MemOperand& rs) {
1268 UnalignedLoadHelper<4, true>(rd, rs);
1271 void TurboAssembler::Ulwu(Register rd, const MemOperand& rs) {
1272 UnalignedLoadHelper<4, false>(rd, rs);
1275 void TurboAssembler::Usw(Register rd, const MemOperand& rs) {
1276 UnalignedStoreHelper<4>(rd, rs);
1279 void TurboAssembler::Ulh(Register rd, const MemOperand& rs) {
1280 UnalignedLoadHelper<2, true>(rd, rs);
1283 void TurboAssembler::Ulhu(Register rd, const MemOperand& rs) {
1284 UnalignedLoadHelper<2, false>(rd, rs);
1287 void TurboAssembler::Ush(Register rd, const MemOperand& rs) {
1288 UnalignedStoreHelper<2>(rd, rs);
1291 void TurboAssembler::Uld(Register rd, const MemOperand& rs) {
1292 UnalignedLoadHelper<8, true>(rd, rs);
1298 void MacroAssembler::LoadWordPair(Register rd, const MemOperand& rs) {
1301 Lwu(rd, rs);
1304 Add64(rd, rd, scratch);
1307 void TurboAssembler::Usd(Register rd, const MemOperand& rs) {
1308 UnalignedStoreHelper<8>(rd, rs);
1312 void MacroAssembler::StoreWordPair(Register rd, const MemOperand& rs) {
1315 Sw(rd, rs);
1316 srai(scratch, rd, 32);
1344 void TurboAssembler::Lb(Register rd, const MemOperand& rs) {
1348 AlignedLoadHelper(rd, rs, fn);
1351 void TurboAssembler::Lbu(Register rd, const MemOperand& rs) {
1355 AlignedLoadHelper(rd, rs, fn);
1358 void TurboAssembler::Sb(Register rd, const MemOperand& rs) {
1362 AlignedStoreHelper(rd, rs, fn);
1365 void TurboAssembler::Lh(Register rd, const MemOperand& rs) {
1369 AlignedLoadHelper(rd, rs, fn);
1372 void TurboAssembler::Lhu(Register rd, const MemOperand& rs) {
1376 AlignedLoadHelper(rd, rs, fn);
1379 void TurboAssembler::Sh(Register rd, const MemOperand& rs) {
1383 AlignedStoreHelper(rd, rs, fn);
1386 void TurboAssembler::Lw(Register rd, const MemOperand& rs) {
1400 AlignedLoadHelper(rd, rs, fn);
1403 void TurboAssembler::Lwu(Register rd, const MemOperand& rs) {
1407 AlignedLoadHelper(rd, rs, fn);
1410 void TurboAssembler::Sw(Register rd, const MemOperand& rs) {
1423 AlignedStoreHelper(rd, rs, fn);
1426 void TurboAssembler::Ld(Register rd, const MemOperand& rs) {
1440 AlignedLoadHelper(rd, rs, fn);
1443 void TurboAssembler::Sd(Register rd, const MemOperand& rs) {
1456 AlignedStoreHelper(rd, rs, fn);
1505 void TurboAssembler::Ll(Register rd, const MemOperand& rs) {
1508 lr_w(false, false, rd, rs.rm());
1513 lr_w(false, false, rd, scratch);
1517 void TurboAssembler::Lld(Register rd, const MemOperand& rs) {
1520 lr_d(false, false, rd, rs.rm());
1525 lr_d(false, false, rd, scratch);
1529 void TurboAssembler::Sc(Register rd, const MemOperand& rs) {
1532 sc_w(false, false, rd, rs.rm(), rd);
1537 sc_w(false, false, rd, scratch, rd);
1541 void TurboAssembler::Scd(Register rd, const MemOperand& rs) {
1544 sc_d(false, false, rd, rs.rm(), rd);
1549 sc_d(false, false, rd, scratch, rd);
1606 void TurboAssembler::li_optimized(Register rd, Operand j, LiFlags mode) {
1610 Li(rd, j.immediate());
1613 void TurboAssembler::li(Register rd, Operand j, LiFlags mode) {
1623 auipc(rd, 0);
1625 ld(rd, rd, 0);
1628 Li(rd, ~j.immediate());
1629 not_(rd, rd);
1631 Li(rd, j.immediate());
1644 li_ptr(rd, immediate);
1649 li_ptr(rd, j.immediate());
1652 li_ptr(rd, j.immediate());
1839 void TurboAssembler::RoundFloatingPointToInteger(Register rd, FPURegister fs,
1852 fcvt_generator(this, rd, fs);
1864 fcvt_generator(this, rd, fs);
1868 void TurboAssembler::Clear_if_nan_d(Register rd, FPURegister fs) {
1872 Move(rd, zero_reg);
1876 void TurboAssembler::Clear_if_nan_s(Register rd, FPURegister fs) {
1880 Move(rd, zero_reg);
1884 void TurboAssembler::Trunc_uw_d(Register rd, FPURegister fs, Register result) {
1886 rd, fs, result, [](TurboAssembler* tasm, Register dst, FPURegister src) {
1891 void TurboAssembler::Trunc_w_d(Register rd, FPURegister fs, Register result) {
1893 rd, fs, result, [](TurboAssembler* tasm, Register dst, FPURegister src) {
1898 void TurboAssembler::Trunc_uw_s(Register rd, FPURegister fs, Register result) {
1900 rd, fs, result, [](TurboAssembler* tasm, Register dst, FPURegister src) {
1905 void TurboAssembler::Trunc_w_s(Register rd, FPURegister fs, Register result) {
1907 rd, fs, result, [](TurboAssembler* tasm, Register dst, FPURegister src) {
1912 void TurboAssembler::Trunc_ul_d(Register rd, FPURegister fs, Register result) {
1914 rd, fs, result, [](TurboAssembler* tasm, Register dst, FPURegister src) {
1919 void TurboAssembler::Trunc_l_d(Register rd, FPURegister fs, Register result) {
1921 rd, fs, result, [](TurboAssembler* tasm, Register dst, FPURegister src) {
1926 void TurboAssembler::Trunc_ul_s(Register rd, FPURegister fs, Register result) {
1928 rd, fs, result, [](TurboAssembler* tasm, Register dst, FPURegister src) {
1933 void TurboAssembler::Trunc_l_s(Register rd, FPURegister fs, Register result) {
1935 rd, fs, result, [](TurboAssembler* tasm, Register dst, FPURegister src) {
1940 void TurboAssembler::Round_w_s(Register rd, FPURegister fs, Register result) {
1942 rd, fs, result, [](TurboAssembler* tasm, Register dst, FPURegister src) {
1947 void TurboAssembler::Round_w_d(Register rd, FPURegister fs, Register result) {
1949 rd, fs, result, [](TurboAssembler* tasm, Register dst, FPURegister src) {
1954 void TurboAssembler::Ceil_w_s(Register rd, FPURegister fs, Register result) {
1956 rd, fs, result, [](TurboAssembler* tasm, Register dst, FPURegister src) {
1961 void TurboAssembler::Ceil_w_d(Register rd, FPURegister fs, Register result) {
1963 rd, fs, result, [](TurboAssembler* tasm, Register dst, FPURegister src) {
1968 void TurboAssembler::Floor_w_s(Register rd, FPURegister fs, Register result) {
1970 rd, fs, result, [](TurboAssembler* tasm, Register dst, FPURegister src) {
1975 void TurboAssembler::Floor_w_d(Register rd, FPURegister fs, Register result) {
1977 rd, fs, result, [](TurboAssembler* tasm, Register dst, FPURegister src) {
2262 void TurboAssembler::CompareF32(Register rd, FPUCondition cc, FPURegister cmp1,
2266 feq_s(rd, cmp1, cmp2);
2269 feq_s(rd, cmp1, cmp2);
2270 NegateBool(rd, rd);
2273 flt_s(rd, cmp1, cmp2);
2276 fle_s(rd, cmp2, cmp1);
2279 fle_s(rd, cmp1, cmp2);
2282 flt_s(rd, cmp2, cmp1);
2289 void TurboAssembler::CompareF64(Register rd, FPUCondition cc, FPURegister cmp1,
2293 feq_d(rd, cmp1, cmp2);
2296 feq_d(rd, cmp1, cmp2);
2297 NegateBool(rd, rd);
2300 flt_d(rd, cmp1, cmp2);
2303 fle_d(rd, cmp2, cmp1);
2306 fle_d(rd, cmp1, cmp2);
2309 flt_d(rd, cmp2, cmp1);
2316 void TurboAssembler::CompareIsNotNanF32(Register rd, FPURegister cmp1,
2322 feq_s(rd, cmp1, cmp1); // rd <- !isNan(cmp1)
2324 And(rd, rd, scratch); // rd <- !isNan(cmp1) && !isNan(cmp2)
2327 void TurboAssembler::CompareIsNotNanF64(Register rd, FPURegister cmp1,
2333 feq_d(rd, cmp1, cmp1); // rd <- !isNan(cmp1)
2335 And(rd, rd, scratch); // rd <- !isNan(cmp1) && !isNan(cmp2)
2338 void TurboAssembler::CompareIsNanF32(Register rd, FPURegister cmp1,
2340 CompareIsNotNanF32(rd, cmp1, cmp2); // rd <- !isNan(cmp1) && !isNan(cmp2)
2341 Xor(rd, rd, 1); // rd <- isNan(cmp1) || isNan(cmp2)
2344 void TurboAssembler::CompareIsNanF64(Register rd, FPURegister cmp1,
2346 CompareIsNotNanF64(rd, cmp1, cmp2); // rd <- !isNan(cmp1) && !isNan(cmp2)
2347 Xor(rd, rd, 1); // rd <- isNan(cmp1) || isNan(cmp2)
2458 void TurboAssembler::CompareI(Register rd, Register rs, const Operand& rt,
2462 Seq(rd, rs, rt);
2465 Sne(rd, rs, rt);
2470 Sgt(rd, rs, rt);
2473 Sge(rd, rs, rt); // rs >= rt
2476 Slt(rd, rs, rt); // rs < rt
2479 Sle(rd, rs, rt); // rs <= rt
2484 Sgtu(rd, rs, rt); // rs > rt
2487 Sgeu(rd, rs, rt); // rs >= rt
2490 Sltu(rd, rs, rt); // rs < rt
2493 Sleu(rd, rs, rt); // rs <= rt
2524 void TurboAssembler::Clz32(Register rd, Register xx) {
2533 // y = x >> 1; if (y != 0) {rd = n - 2; return;}
2534 // rd = n - x;
2539 Register x = rd;
2566 subw(rd, n, x);
2568 addiw(rd, n, -2);
2572 void TurboAssembler::Clz64(Register rd, Register xx) {
2582 // y = x >> 1; if (y != 0) {rd = n - 2; return;}
2583 // rd = n - x;
2588 Register x = rd;
2620 subw(rd, n, x);
2622 addiw(rd, n, -2);
2626 void TurboAssembler::Ctz32(Register rd, Register rs) {
2635 Xor(rd, scratch, rs);
2636 And(rd, rd, scratch);
2639 Clz32(rd, rd);
2646 Sub32(rd, scratch, rd);
2650 void TurboAssembler::Ctz64(Register rd, Register rs) {
2659 Xor(rd, scratch, rs);
2660 And(rd, rd, scratch);
2663 Clz64(rd, rd);
2670 Sub64(rd, scratch, rd);
2674 void TurboAssembler::Popcnt32(Register rd, Register rs, Register scratch) {
2676 DCHECK_NE(scratch, rd);
2702 DCHECK((rd != value) && (rs != value));
2709 slli(rd, scratch2, 4);
2710 or_(scratch2, scratch2, rd);
2711 And(rd, scratch, scratch2);
2714 Add32(scratch, rd, scratch);
2715 srliw(rd, scratch, 4);
2716 Add32(rd, rd, scratch);
2719 And(rd, rd, scratch2);
2720 Mul32(rd, rd, value);
2721 Srl32(rd, rd, shift);
2724 void TurboAssembler::Popcnt64(Register rd, Register rs, Register scratch) {
2726 DCHECK_NE(scratch, rd);
2738 DCHECK((rd != value) && (rs != value));
2747 And(rd, scratch, scratch2);
2750 Add64(scratch, rd, scratch);
2751 Srl64(rd, scratch, 4);
2752 Add64(rd, rd, scratch);
2756 And(rd, rd, scratch2);
2757 Mul64(rd, rd, value);
2758 srli(rd, rd, 32 + shift);