/third_party/node/deps/v8/src/codegen/riscv64/ |
H A D | assembler-riscv64.h | 1032 vrsub_vx(dst, src, zero_reg, mask); in vneg_vv() 1064 void neg(Register rd, Register rs) { sub(rd, zero_reg, rs); } in neg() 1065 void negw(Register rd, Register rs) { subw(rd, zero_reg, rs); } in negw() 1068 void snez(Register rd, Register rs) { sltu(rd, zero_reg, rs); } in snez() 1069 void sltz(Register rd, Register rs) { slt(rd, rs, zero_reg); } in sltz() 1070 void sgtz(Register rd, Register rs) { slt(rd, zero_reg, rs); } in sgtz() 1079 void beqz(Register rs, int16_t imm13) { beq(rs, zero_reg, imm13); } in beqz() 1081 void bnez(Register rs, int16_t imm13) { bne(rs, zero_reg, imm13); } in bnez() 1083 void blez(Register rs, int16_t imm13) { bge(zero_reg, rs, imm13); } in blez() 1085 void bgez(Register rs, int16_t imm13) { bge(rs, zero_reg, imm1 in blez() [all...] |
H A D | macro-assembler-riscv64.cc | 40 return rt.rm() == zero_reg; in IsZero() 168 BranchShort(&ok, eq, scratch, Operand(zero_reg)); in RecordWriteField() 351 (rd.code() == rs.code()) && (rd != zero_reg) && in Add32() 373 (rt.rm() != zero_reg) && (rs != zero_reg)) { in Add64() 380 (rd.code() == rs.code()) && (rd != zero_reg) && (rt.immediate() != 0) && in Add64() 421 (rd != zero_reg) && is_int6(-rt.immediate()) && in Sub32() 461 (rd != zero_reg) && is_int6(-rt.immediate()) && in Sub64() 759 if (rs == zero_reg) { in Seq() 770 if (rs == zero_reg) { in Sne() [all...] |
H A D | assembler-riscv64.cc | 88 0, // zero_reg in ToNumber() 127 zero_reg, ra, sp, gp, tp, t0, t1, t2, fp, s1, a0, a1, a2, a3, a4, a5, in ToRegister() 1840 GenInstrRAtomic(0b00010, aq, rl, 0b010, rd, rs1, zero_reg); in lr_w() 1896 GenInstrRAtomic(0b00010, aq, rl, 0b011, rd, rs1, zero_reg); in lr_d() 2000 GenInstrALUFP_rr(0b0101100, frm, rd, rs1, zero_reg); in fsqrt_s() 2024 GenInstrALUFP_rr(0b1100000, frm, rd, rs1, zero_reg); in fcvt_w_s() 2032 GenInstrALUFP_rr(0b1110000, 0b000, rd, rs1, zero_reg); in fmv_x_w() 2048 GenInstrALUFP_rr(0b1110000, 0b001, rd, rs1, zero_reg); in fclass_s() 2052 GenInstrALUFP_rr(0b1101000, frm, rd, rs1, zero_reg); in fcvt_s_w() 2060 GenInstrALUFP_rr(0b1111000, 0b000, rd, rs1, zero_reg); in fmv_w_x() [all...] |
/third_party/node/deps/v8/src/codegen/mips/ |
H A D | macro-assembler-mips.cc | 43 return rt.rm() == zero_reg; in IsZero() 165 Branch(&ok, eq, t8, Operand(zero_reg)); in RecordWriteField() 766 subu(rs, zero_reg, rt.rm()); in CallRecordWriteStub() 891 subu(scratch, zero_reg, rt.rm()); in CallRecordWriteStub() 911 lw(zero_reg, rs); in CallRecordWriteStub() 1347 addiu(rd, zero_reg, j.immediate()); in CallRecordWriteStub() 1349 ori(rd, zero_reg, j.immediate()); in CallRecordWriteStub() 1509 Nor(scratch2, zero_reg, scratch3); in CallRecordWriteStub() 1516 Branch(&done, eq, scratch1, Operand(zero_reg)); in CallRecordWriteStub() 1518 mov(dst_low, zero_reg); in CallRecordWriteStub() [all...] |
H A D | assembler-mips.cc | 134 0, // zero_reg in ToNumber() 173 zero_reg, at, v0, v1, a0, a1, a2, a3, t0, t1, t2, t3, t4, t5, t6, t7, in ToRegister() 638 // Checks if the instruction is a OR with zero_reg argument (aka MOV). in IsMov() 653 // Traditional mips nop == sll(zero_reg, zero_reg, 0) in IsNop() 654 // When marking non-zero type, use sll(zero_reg, at, type) in IsNop() 658 Register nop_rt_reg = (type == 0) ? zero_reg : at; in IsNop() 660 rd == static_cast<uint32_t>(ToNumber(zero_reg)) && in IsNop() 1495 void Assembler::b(int16_t offset) { beq(zero_reg, zero_reg, offse [all...] |
/third_party/node/deps/v8/src/codegen/mips64/ |
H A D | macro-assembler-mips64.cc | 43 return rt.rm() == zero_reg; in IsZero() 161 Branch(&ok, eq, t8, Operand(zero_reg)); in RecordWriteField() 899 dsubu(rs, zero_reg, rt.rm()); in CallRecordWriteStub() 1121 dinsu_(dest, zero_reg, 32, 32); in CallRecordWriteStub() 1562 daddiu(rd, zero_reg, (j.immediate() & kImm16Mask)); in CallRecordWriteStub() 1564 ori(rd, zero_reg, j.immediate() & kImm16Mask); in CallRecordWriteStub() 1694 ori(rd, zero_reg, j.immediate() & kImm16Mask); in CallRecordWriteStub() 1701 ori(rd, zero_reg, j.immediate() & kImm16Mask); in CallRecordWriteStub() 1724 daddiu(rd, zero_reg, j.immediate() & kImm16Mask); in CallRecordWriteStub() 1732 daddiu(rd, zero_reg, in CallRecordWriteStub() [all...] |
H A D | assembler-mips64.cc | 110 0, // zero_reg in ToNumber() 149 zero_reg, at, v0, v1, a0, a1, a2, a3, a4, a5, a6, a7, t0, t1, t2, t3, in ToRegister() 550 // Checks if the instruction is a OR with zero_reg argument (aka MOV). in IsMov() 605 // Traditional mips nop == sll(zero_reg, zero_reg, 0) in IsNop() 606 // When marking non-zero type, use sll(zero_reg, at, type) in IsNop() 610 Register nop_rt_reg = (type == 0) ? zero_reg : at; in IsNop() 612 rd == static_cast<uint32_t>(ToNumber(zero_reg)) && in IsNop() 1439 void Assembler::b(int16_t offset) { beq(zero_reg, zero_reg, offse [all...] |
/third_party/node/deps/v8/src/codegen/loong64/ |
H A D | macro-assembler-loong64.cc | 43 return rk.rm() == zero_reg; in IsZero() 156 Branch(&ok, eq, scratch, Operand(zero_reg)); in RecordWriteField() 690 sub_d(rj, zero_reg, rk.rm()); in CallRecordWriteStub() 867 bstrins_d(dest, zero_reg, 63, 16); in CallRecordWriteStub() 870 bstrins_d(dest, zero_reg, 63, 32); in CallRecordWriteStub() 1146 addi_d(rd, zero_reg, j.immediate()); in CallRecordWriteStub() 1148 ori(rd, zero_reg, j.immediate() & kImm12Mask); in CallRecordWriteStub() 1204 lu52i_d(rd, zero_reg, imm >> 52 & kImm12Mask); in CallRecordWriteStub() 1443 Branch(&msb_clear, ge, rj, Operand(zero_reg)); in CallRecordWriteStub() 1489 Branch(&positive, ge, rj, Operand(zero_reg)); in CallRecordWriteStub() [all...] |
/third_party/node/deps/v8/src/builtins/mips/ |
H A D | builtins-mips.cc | 96 __ Branch(&loop, greater_equal, scratch, Operand(zero_reg)); in Generate_PushArguments() 423 __ Sw(zero_reg, MemOperand(t4)); in Generate_JSEntryVariant() 446 __ Branch(&non_outermost_js, ne, t2, Operand(zero_reg)); in Generate_JSEntryVariant() 511 __ sw(zero_reg, MemOperand(t1)); in Generate_JSEntryVariant() 696 __ Branch(&prepare_step_in_if_stepping, ne, t1, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline() 732 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline() 883 __ Branch(&heal_optimized_code_slot, ne, scratch1, Operand(zero_reg)); in TailCallOptimizedCodeSlot() 958 __ Branch(&extra_wide, ne, scratch2, Operand(zero_reg)); in AdvanceBytecodeOffsetOrReturn() 1016 __ Branch(has_optimized_code_or_state, ne, scratch, Operand(zero_reg)); in LoadTieringStateAndJumpIfNeedsProcessing() 1030 __ Branch(&maybe_has_optimized_code, eq, scratch, Operand(zero_reg)); in MaybeOptimizeCodeOrTailCallOptimizedCodeSlot() [all...] |
/third_party/node/deps/v8/src/builtins/mips64/ |
H A D | builtins-mips64.cc | 95 __ Branch(&loop, greater_equal, scratch, Operand(zero_reg)); in Generate_PushArguments() 390 __ Branch(&prepare_step_in_if_stepping, ne, a5, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline() 428 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline() 591 __ Sd(zero_reg, MemOperand(s5)); in Generate_JSEntryVariant() 621 __ Branch(&non_outermost_js, ne, s2, Operand(zero_reg)); in Generate_JSEntryVariant() 696 __ Sd(zero_reg, MemOperand(a5)); in Generate_JSEntryVariant() 892 __ Branch(&heal_optimized_code_slot, ne, scratch1, Operand(zero_reg)); in TailCallOptimizedCodeSlot() 967 __ Branch(&extra_wide, ne, scratch2, Operand(zero_reg)); in AdvanceBytecodeOffsetOrReturn() 1025 __ Branch(has_optimized_code_or_state, ne, scratch, Operand(zero_reg)); in LoadTieringStateAndJumpIfNeedsProcessing() 1039 __ Branch(&maybe_has_optimized_code, eq, scratch, Operand(zero_reg)); in MaybeOptimizeCodeOrTailCallOptimizedCodeSlot() [all...] |
/third_party/node/deps/v8/src/builtins/riscv64/ |
H A D | builtins-riscv64.cc | 94 __ Branch(&loop, greater_equal, scratch, Operand(zero_reg)); in Generate_PushArguments() 415 __ Branch(&prepare_step_in_if_stepping, ne, a5, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline() 455 __ Branch(&done_loop, lt, a3, Operand(zero_reg), Label::Distance::kNear); in Generate_ResumeGeneratorTrampoline() 627 __ Sd(zero_reg, MemOperand(s5)); in Generate_JSEntryVariant() 655 __ Branch(&non_outermost_js, ne, s2, Operand(zero_reg), in Generate_JSEntryVariant() 730 __ Sd(zero_reg, MemOperand(a5)); in Generate_JSEntryVariant() 939 __ Branch(&heal_optimized_code_slot, ne, a5, Operand(zero_reg), in TailCallOptimizedCodeSlot() 1020 __ Branch(&extra_wide, ne, scratch2, Operand(zero_reg), in AdvanceBytecodeOffsetOrReturn() 1082 __ Branch(has_optimized_code_or_state, ne, scratch, Operand(zero_reg)); in LoadTieringStateAndJumpIfNeedsProcessing() 1099 __ Branch(&maybe_has_optimized_code, eq, scratch, Operand(zero_reg), in MaybeOptimizeCodeOrTailCallOptimizedCodeSlot() [all...] |
/third_party/node/deps/v8/src/regexp/mips64/ |
H A D | regexp-macro-assembler-mips64.cc | 272 __ Branch(&fallthrough, eq, a1, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 281 BranchOrBacktrack(on_no_match, gt, t1, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 380 BranchOrBacktrack(on_no_match, eq, v0, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 405 __ Branch(&fallthrough, eq, a1, Operand(zero_reg)); in CheckNotBackReference() 414 BranchOrBacktrack(on_no_match, gt, t1, Operand(zero_reg)); in CheckNotBackReference() 464 Operand rhs = (c == 0) ? Operand(zero_reg) : Operand(c); in CheckCharacterAfterAnd() 473 Operand rhs = (c == 0) ? Operand(zero_reg) : Operand(c); in CheckNotCharacterAfterAnd() 522 BranchOrBacktrack(on_in_range, ne, v0, Operand(zero_reg)); in CheckCharacterInRangeArray() 529 BranchOrBacktrack(on_not_in_range, eq, v0, Operand(zero_reg)); in CheckCharacterNotInRangeArray() 545 BranchOrBacktrack(on_bit_set, ne, a0, Operand(zero_reg)); in CheckBitInTable() [all...] |
/third_party/node/deps/v8/src/regexp/loong64/ |
H A D | regexp-macro-assembler-loong64.cc | 227 __ Branch(&fallthrough, eq, a1, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 236 BranchOrBacktrack(on_no_match, gt, t1, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 336 BranchOrBacktrack(on_no_match, eq, a0, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 361 __ Branch(&fallthrough, eq, a1, Operand(zero_reg)); in CheckNotBackReference() 370 BranchOrBacktrack(on_no_match, gt, t1, Operand(zero_reg)); in CheckNotBackReference() 418 Operand rhs = (c == 0) ? Operand(zero_reg) : Operand(c); in CheckCharacterAfterAnd() 425 Operand rhs = (c == 0) ? Operand(zero_reg) : Operand(c); in CheckNotCharacterAfterAnd() 474 BranchOrBacktrack(on_in_range, ne, a0, Operand(zero_reg)); in CheckCharacterInRangeArray() 481 BranchOrBacktrack(on_not_in_range, eq, a0, Operand(zero_reg)); in CheckCharacterNotInRangeArray() 496 BranchOrBacktrack(on_bit_set, ne, a0, Operand(zero_reg)); in CheckBitInTable() [all...] |
/third_party/node/deps/v8/src/regexp/mips/ |
H A D | regexp-macro-assembler-mips.cc | 234 __ Branch(&fallthrough, eq, a1, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 243 BranchOrBacktrack(on_no_match, gt, t5, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 342 BranchOrBacktrack(on_no_match, eq, v0, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 367 __ Branch(&fallthrough, le, a1, Operand(zero_reg)); in CheckNotBackReference() 376 BranchOrBacktrack(on_no_match, gt, t5, Operand(zero_reg)); in CheckNotBackReference() 432 Operand rhs = (c == 0) ? Operand(zero_reg) : Operand(c); in CheckCharacterAfterAnd() 441 Operand rhs = (c == 0) ? Operand(zero_reg) : Operand(c); in CheckNotCharacterAfterAnd() 490 BranchOrBacktrack(on_in_range, ne, v0, Operand(zero_reg)); in CheckCharacterInRangeArray() 497 BranchOrBacktrack(on_not_in_range, eq, v0, Operand(zero_reg)); in CheckCharacterNotInRangeArray() 513 BranchOrBacktrack(on_bit_set, ne, a0, Operand(zero_reg)); in CheckBitInTable() [all...] |
/third_party/node/deps/v8/src/regexp/riscv64/ |
H A D | regexp-macro-assembler-riscv64.cc | 260 BranchOrBacktrack(on_in_range, ne, a0, Operand(zero_reg)); in CheckCharacterInRangeArray() 267 BranchOrBacktrack(on_not_in_range, eq, a0, Operand(zero_reg)); in CheckCharacterNotInRangeArray() 281 __ BranchShort(&fallthrough, eq, a1, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 290 BranchOrBacktrack(on_no_match, gt, t1, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 395 BranchOrBacktrack(on_no_match, eq, a0, Operand(zero_reg)); in CheckNotBackReferenceIgnoreCase() 420 __ BranchShort(&fallthrough, eq, a1, Operand(zero_reg)); in CheckNotBackReference() 429 BranchOrBacktrack(on_no_match, gt, t1, Operand(zero_reg)); in CheckNotBackReference() 477 Operand rhs = (c == 0) ? Operand(zero_reg) : Operand(c); in CheckCharacterAfterAnd() 485 Operand rhs = (c == 0) ? Operand(zero_reg) : Operand(c); in CheckNotCharacterAfterAnd() 523 BranchOrBacktrack(on_bit_set, ne, a0, Operand(zero_reg)); in CheckBitInTable() [all...] |
/third_party/node/deps/v8/src/builtins/loong64/ |
H A D | builtins-loong64.cc | 95 __ Branch(&loop, greater_equal, scratch, Operand(zero_reg)); in Generate_PushArguments() 391 __ Branch(&prepare_step_in_if_stepping, ne, a5, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline() 429 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline() 588 __ St_d(zero_reg, MemOperand(s5, 0)); in Generate_JSEntryVariant() 618 __ Branch(&non_outermost_js, ne, s2, Operand(zero_reg)); in Generate_JSEntryVariant() 694 __ St_d(zero_reg, MemOperand(a5, 0)); in Generate_JSEntryVariant() 885 __ Branch(&heal_optimized_code_slot, ne, a6, Operand(zero_reg)); in TailCallOptimizedCodeSlot() 957 __ Branch(&extra_wide, ne, scratch2, Operand(zero_reg)); in AdvanceBytecodeOffsetOrReturn() 1018 __ Branch(has_optimized_code_or_state, ne, scratch, Operand(zero_reg)); in LoadTieringStateAndJumpIfNeedsProcessing() 1032 __ Branch(&maybe_has_optimized_code, eq, scratch, Operand(zero_reg)); in MaybeOptimizeCodeOrTailCallOptimizedCodeSlot() [all...] |
/third_party/node/deps/v8/src/wasm/baseline/loong64/ |
H A D | liftoff-assembler-loong64.h | 594 BranchShort(&binop, eq, temp1, Operand(zero_reg)); \ 650 BranchShort(&binop, eq, temp1, Operand(zero_reg)); \ 707 BranchShort(&exchange, eq, temp2, Operand(zero_reg)); \ 760 BranchShort(&compareExchange, eq, temp2, Operand(zero_reg)); \ 777 ExtractBits(temp2, expected.gp(), zero_reg, size, false); \ 781 BranchShort(&compareExchange, eq, temp2, Operand(zero_reg)); \ 962 St_d(zero_reg, liftoff::GetStackSlot(start + remainder)); in FillStackSlotsWithZero() 966 St_w(zero_reg, liftoff::GetStackSlot(start + remainder)); in FillStackSlotsWithZero() 977 St_d(zero_reg, MemOperand(a0, 0)); in FillStackSlotsWithZero() 1015 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg)); in emit_i32_divs() [all...] |
/third_party/node/deps/v8/src/compiler/backend/loong64/ |
H A D | code-generator-loong64.cc | 59 return zero_reg; in InputOrZeroRegister() 345 __ BranchShort(&binop, eq, i.TempRegister(1), Operand(zero_reg)); \ 374 __ BranchShort(&binop, eq, i.TempRegister(1), Operand(zero_reg)); \ 401 __ BranchShort(&exchange, eq, i.TempRegister(2), Operand(zero_reg)); \ 420 Operand(zero_reg)); \ 446 __ ExtractBits(i.TempRegister(2), i.InputRegister(2), zero_reg, size, \ 454 Operand(zero_reg)); \ 553 RelocInfo::CODE_TARGET, ne, scratch, Operand(zero_reg)); in BailoutIfDeoptimized() 817 Operand offset(zero_reg); in AssembleArchInstruction() 841 __ amswap_db_d(zero_reg, valu in AssembleArchInstruction() [all...] |
/third_party/node/deps/v8/src/compiler/backend/riscv64/ |
H A D | code-generator-riscv64.cc | 70 return zero_reg; in InputOrZeroRegister() 336 __ BranchShort(&binop, ne, i.TempRegister(1), Operand(zero_reg)); \ 364 __ BranchShort(&binop, ne, i.TempRegister(1), Operand(zero_reg)); \ 377 __ BranchShort(&exchange, ne, i.TempRegister(1), Operand(zero_reg)); \ 403 __ BranchShort(&exchange, ne, i.TempRegister(2), Operand(zero_reg)); \ 421 Operand(zero_reg)); \ 454 Operand(zero_reg)); \ 624 RelocInfo::CODE_TARGET, ne, kScratchReg, Operand(zero_reg)); in BailoutIfDeoptimized() 898 Operand(zero_reg)); in AssembleArchInstruction() 904 __ BranchShort(&done, eq, kScratchReg, Operand(zero_reg)); in AssembleArchInstruction() [all...] |
/third_party/node/deps/v8/src/wasm/baseline/mips64/ |
H A D | liftoff-assembler-mips64.h | 715 BranchShort(&binop, eq, temp1, Operand(zero_reg)); \ 733 BranchShort(&binop, eq, temp1, Operand(zero_reg)); \ 793 BranchShort(&exchange, eq, temp1, Operand(zero_reg)); \ 810 BranchShort(&exchange, eq, temp2, Operand(zero_reg)); \ 864 BranchShort(&compareExchange, eq, temp2, Operand(zero_reg)); \ 881 ExtractBits(temp2, expected.gp(), zero_reg, size, false); \ 885 BranchShort(&compareExchange, eq, temp2, Operand(zero_reg)); \ 1064 Sd(zero_reg, liftoff::GetStackSlot(start + remainder)); in FillStackSlotsWithZero() 1068 Sw(zero_reg, liftoff::GetStackSlot(start + remainder)); in FillStackSlotsWithZero() 1079 Sd(zero_reg, MemOperan in FillStackSlotsWithZero() [all...] |
/third_party/node/deps/v8/src/wasm/baseline/mips/ |
H A D | liftoff-assembler-mips.h | 176 assm->movz(tmp, reg, zero_reg); in EnsureNoAlias() 218 assm->TurboAssembler::Move(tmp.high_gp(), zero_reg); in ChangeEndiannessLoad() 226 assm->TurboAssembler::Move(tmp.high_gp(), zero_reg); in ChangeEndiannessLoad() 900 Sw(zero_reg, liftoff::GetStackSlot(start + offset)); in FillStackSlotsWithZero() 911 Sw(zero_reg, MemOperand(a0)); in FillStackSlotsWithZero() 926 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg)); in emit_i32_divs() 935 Operand(zero_reg)); in emit_i32_divs() 942 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg)); in emit_i32_divu() 948 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg)); in emit_i32_rems() 954 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg)); in emit_i32_remu() [all...] |
/third_party/node/deps/v8/src/wasm/baseline/riscv64/ |
H A D | liftoff-assembler-riscv64.h | 729 __ mv(store_result, zero_reg); in AtomicBinop() 736 __ mv(store_result, zero_reg); in AtomicBinop() 878 BranchShort(&compareExchange, eq, temp2, Operand(zero_reg)); \ 895 ExtractBits(temp2, expected.gp(), zero_reg, size, false); \ 899 BranchShort(&compareExchange, eq, temp2, Operand(zero_reg)); \ 1090 Sd(zero_reg, liftoff::GetStackSlot(start + remainder)); in FillStackSlotsWithZero() 1094 Sw(zero_reg, liftoff::GetStackSlot(start + remainder)); in FillStackSlotsWithZero() 1105 Sd(zero_reg, MemOperand(a0)); in FillStackSlotsWithZero() 1134 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg)); in emit_i32_divs() 1141 Operand(zero_reg)); in emit_i32_divs() [all...] |
/third_party/node/deps/v8/src/compiler/backend/mips/ |
H A D | code-generator-mips.cc | 58 return zero_reg; in InputOrZeroRegister() 342 __ BranchShort(&binop, eq, i.TempRegister(1), Operand(zero_reg)); \ 363 __ BranchShort(&binop, eq, i.TempRegister(1), Operand(zero_reg)); \ 393 __ BranchShort(&binop, eq, i.TempRegister(1), Operand(zero_reg)); \ 422 __ BranchShort(&binop, eq, i.TempRegister(1), Operand(zero_reg)); \ 435 __ BranchShort(&exchange, eq, i.TempRegister(1), Operand(zero_reg)); \ 454 __ BranchShort(&exchange, eq, i.TempRegister(2), Operand(zero_reg)); \ 471 Operand(zero_reg)); \ 489 __ ExtractBits(i.InputRegister(2), i.InputRegister(2), zero_reg, size, \ 497 Operand(zero_reg)); \ [all...] |
/third_party/node/deps/v8/src/compiler/backend/mips64/ |
H A D | code-generator-mips64.cc | 59 return zero_reg; in InputOrZeroRegister() 350 __ BranchShort(&binop, eq, i.TempRegister(1), Operand(zero_reg)); \ 378 __ BranchShort(&binop, eq, i.TempRegister(1), Operand(zero_reg)); \ 391 __ BranchShort(&exchange, eq, i.TempRegister(1), Operand(zero_reg)); \ 417 __ BranchShort(&exchange, eq, i.TempRegister(2), Operand(zero_reg)); \ 435 Operand(zero_reg)); \ 460 __ ExtractBits(i.TempRegister(2), i.InputRegister(2), zero_reg, size, \ 468 Operand(zero_reg)); \ 568 RelocInfo::CODE_TARGET, ne, kScratchReg, Operand(zero_reg)); in BailoutIfDeoptimized() 872 Operand(zero_reg)); in AssembleArchInstruction() [all...] |
/third_party/node/deps/v8/src/baseline/loong64/ |
H A D | baseline-assembler-loong64-inl.h | 143 __ Branch(target, AsMasmCondition(cc), scratch, Operand(zero_reg)); in TestAndBranch() 401 __ Branch(skip_interrupt_label, ge, interrupt_budget, Operand(zero_reg)); in AddToInterruptBudgetAndJumpIfNotExceeded() 420 __ Branch(skip_interrupt_label, ge, interrupt_budget, Operand(zero_reg)); in AddToInterruptBudgetAndJumpIfNotExceeded()
|