Lines Matching refs:scratch

299 int TurboAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, Register scratch,
310 MultiPushF64OrV128(kCallerSavedDoubles, scratch);
317 int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register scratch,
322 MultiPopF64OrV128(kCallerSavedDoubles, scratch);
409 Register scratch = temps.Acquire();
410 Move(scratch, reference);
411 Jump(scratch);
468 // preserve scratch registers across calls.
493 void TurboAssembler::Drop(Register count, Register scratch) {
494 ShiftLeftU64(scratch, count, Operand(kSystemPointerSizeLog2));
495 AddS64(sp, sp, scratch);
602 void TurboAssembler::PushArray(Register array, Register size, Register scratch,
607 ShiftLeftU64(scratch, size, Operand(kSystemPointerSizeLog2));
608 lay(scratch, MemOperand(array, scratch));
610 CmpS64(array, scratch);
612 lay(scratch, MemOperand(scratch, -kSystemPointerSize));
614 MoveChar(MemOperand(sp), MemOperand(scratch), Operand(kSystemPointerSize));
619 ShiftLeftU64(scratch, size, Operand(kSystemPointerSizeLog2));
620 lay(scratch, MemOperand(array, scratch));
623 CmpS64(scratch2, scratch);
672 void TurboAssembler::MultiPushV128(DoubleRegList dregs, Register scratch,
682 StoreV128(dreg, MemOperand(location, stack_offset), scratch);
700 void TurboAssembler::MultiPopV128(DoubleRegList dregs, Register scratch,
707 LoadV128(dreg, MemOperand(location, stack_offset), scratch);
714 void TurboAssembler::MultiPushF64OrV128(DoubleRegList dregs, Register scratch,
726 MultiPushV128(dregs, scratch);
737 MultiPushV128(dregs, scratch);
748 void TurboAssembler::MultiPopF64OrV128(DoubleRegList dregs, Register scratch,
760 MultiPopV128(dregs, scratch);
769 MultiPopV128(dregs, scratch);
788 const Register& scratch) {
792 LoadU64(destination, field_operand, scratch);
798 const Register& scratch) {
802 LoadU64(destination, field_operand, scratch);
821 const Register& scratch) {
827 StoreU64(value, dst_field_operand, scratch);
1015 // Will clobber 4 registers: object, address, scratch, ip. The
1044 value, // Used as scratch.
1047 value, // Used as scratch.
1435 Register scratch = no_reg;
1437 scratch = ip;
1438 mov(scratch, Operand(StackFrame::TypeToMarker(type)));
1440 PushCommonFrame(scratch);
1624 void MacroAssembler::StackOverflowCheck(Register num_args, Register scratch,
1629 LoadU64(scratch, StackLimitAsMemOperand(StackLimitKind::kRealStackLimit));
1630 // Make scratch the space we have left. The stack might already be overflowed
1631 // here which will cause scratch to become negative.
1632 SubS64(scratch, sp, scratch);
1635 CmpS64(scratch, r0);
1665 Register scratch = r6;
1666 StackOverflowCheck(expected_parameter_count, scratch, &stack_overflow);
1675 ShiftLeftU64(scratch, expected_parameter_count,
1677 SubS64(sp, sp, scratch);
1692 LoadRoot(scratch, RootIndex::kUndefinedValue);
1696 StoreU64(scratch, MemOperand(ip));
1894 Register scratch = temps.Acquire();
1895 mov(scratch, value);
1896 slgfi(scratch, Operand(lower_limit));
1897 CmpU64(scratch, Operand(higher_limit - lower_limit));
2135 void MacroAssembler::AssertConstructor(Register object, Register scratch) {
2140 LoadMap(scratch, object);
2141 tm(FieldMemOperand(scratch, Map::kBitFieldOffset),
2217 Register scratch) {
2223 LoadMap(scratch, object);
2224 CompareInstanceType(scratch, scratch, ALLOCATION_SITE_TYPE);
2248 Register scratch) {
2256 mov(scratch, sp);
2261 StoreU64(scratch,
2270 Register scratch) {
2271 PrepareCallCFunction(num_reg_arguments, 0, scratch);
2378 Register scratch, // scratch may be same register as object
2381 ClearRightImm(scratch, object, Operand(kPageSizeBits));
2405 tm(MemOperand(scratch, BasicMemoryChunk::kFlagsOffset + byte_offset),
2408 LoadU64(scratch, MemOperand(scratch, BasicMemoryChunk::kFlagsOffset));
2409 AndP(r0, scratch, Operand(mask));
3584 void TurboAssembler::CmpSmiLiteral(Register src1, Smi smi, Register scratch) {
3592 LoadSmiLiteral(scratch, smi);
3593 cgr(src1, scratch);
3599 Register scratch) {
3604 DCHECK(scratch != no_reg && scratch != r0 && mem.rx() == r0);
3605 DCHECK(scratch != mem.rb());
3606 mov(scratch, Operand(offset));
3607 src = MemOperand(mem.rb(), scratch);
3614 Register scratch) {
3616 DCHECK(scratch != no_reg);
3617 DCHECK(scratch != r0);
3618 mov(scratch, Operand(mem.offset()));
3619 stg(src, MemOperand(mem.rb(), scratch));
3627 Register scratch) {
3636 mov(scratch, opnd);
3637 StoreU64(scratch, mem);
3702 Register scratch) {
3706 DCHECK(scratch != no_reg);
3707 mov(scratch, Operand(offset));
3709 lgf(dst, MemOperand(mem.rb(), scratch));
3711 l(dst, MemOperand(mem.rb(), scratch));
3738 Register scratch) {
3745 } else if (scratch != no_reg) {
3746 // Materialize offset into scratch register.
3747 mov(scratch, Operand(offset));
3748 llgf(dst, MemOperand(base, scratch));
3761 } else if (scratch != no_reg) {
3762 // Materialize offset into scratch register.
3763 mov(scratch, Operand(offset));
3773 ly(dst, MemOperand(base, scratch));
3779 // TODO(s390x): Add scratch reg
3796 // TODO(s390x): Add scratch reg
3813 // TODO(s390x): Add scratch reg
3831 Register scratch) {
3836 Register scratch) {
3842 Register scratch) {
3872 Register scratch) {
3873 lrvg(scratch, opnd);
3874 ldgr(dst, scratch);
3878 Register scratch) {
3879 lrv(scratch, opnd);
3880 ShiftLeftU64(scratch, scratch, Operand(32));
3881 ldgr(dst, scratch);
3885 Register scratch) {
3887 DCHECK(scratch != no_reg);
3888 DCHECK(scratch != r0);
3889 mov(scratch, Operand(mem.offset()));
3890 strvg(src, MemOperand(mem.rb(), scratch));
3897 Register scratch) {
3899 DCHECK(scratch != no_reg);
3900 DCHECK(scratch != r0);
3901 mov(scratch, Operand(mem.offset()));
3902 strv(src, MemOperand(mem.rb(), scratch));
3909 Register scratch) {
3911 DCHECK(scratch != no_reg);
3912 DCHECK(scratch != r0);
3913 mov(scratch, Operand(mem.offset()));
3914 strvh(src, MemOperand(mem.rb(), scratch));
3921 Register scratch) {
3923 lgdr(scratch, src);
3924 strvg(scratch, opnd);
3928 Register scratch) {
3930 lgdr(scratch, src);
3931 ShiftRightU64(scratch, scratch, Operand(32));
3932 strv(scratch, opnd);
3952 Register scratch) {
3953 LoadU64(dst, mem, scratch);
3957 Register scratch) {
3958 LoadS32(dst, opnd, scratch);
3962 Register scratch) {
3963 LoadU32(dst, opnd, scratch);
3981 Register scratch) {
3982 USE(scratch);
3987 Register scratch) {
3988 USE(scratch);
3993 Register scratch) {
3994 StoreU64(src, mem, scratch);
3998 Register scratch) {
3999 StoreU32(src, mem, scratch);
4003 Register scratch) {
4004 StoreU16(src, mem, scratch);
4008 Register scratch) {
4013 Register scratch) {
4083 Register scratch) {
4084 DCHECK(scratch != r0);
4089 lay(scratch, mem);
4090 vl(dst, MemOperand(scratch), Condition(0));
4113 Register scratch) {
4114 DCHECK(scratch != r0);
4119 lay(scratch, mem);
4120 vst(src, MemOperand(scratch), Condition(0));
4231 DoubleRegister scratch) {
4235 ley(scratch, opnd);
4236 aebr(dst, scratch);
4241 DoubleRegister scratch) {
4245 ldy(scratch, opnd);
4246 adbr(dst, scratch);
4251 DoubleRegister scratch) {
4255 ley(scratch, opnd);
4256 sebr(dst, scratch);
4261 DoubleRegister scratch) {
4265 ldy(scratch, opnd);
4266 sdbr(dst, scratch);
4271 DoubleRegister scratch) {
4275 ley(scratch, opnd);
4276 meebr(dst, scratch);
4281 DoubleRegister scratch) {
4285 ldy(scratch, opnd);
4286 mdbr(dst, scratch);
4291 DoubleRegister scratch) {
4295 ley(scratch, opnd);
4296 debr(dst, scratch);
4301 DoubleRegister scratch) {
4305 ldy(scratch, opnd);
4306 ddbr(dst, scratch);
4311 DoubleRegister scratch) {
4315 ley(scratch, opnd);
4316 ldebr(dst, scratch);
4323 Register scratch) {
4336 } else if (scratch != no_reg) {
4337 // Materialize offset into scratch register.
4338 mov(scratch, Operand(offset));
4340 // scratch is no_reg
4349 StoreU32(src, MemOperand(base, scratch));
4364 Register scratch) {
4369 DCHECK(scratch != no_reg);
4370 mov(scratch, Operand(offset));
4372 lgh(dst, MemOperand(base, scratch));
4374 lh(dst, MemOperand(base, scratch));
4392 Register scratch) {
4401 DCHECK(scratch != no_reg);
4402 mov(scratch, Operand(offset));
4403 sth(src, MemOperand(base, scratch));
4410 Register scratch) {
4419 DCHECK(scratch != no_reg);
4420 mov(scratch, Operand(offset));
4421 stc(src, MemOperand(base, scratch));
4576 void TurboAssembler::SwapP(Register src, Register dst, Register scratch) {
4578 DCHECK(!AreAliased(src, dst, scratch));
4579 mov(scratch, src);
4581 mov(dst, scratch);
4584 void TurboAssembler::SwapP(Register src, MemOperand dst, Register scratch) {
4585 if (dst.rx() != r0) DCHECK(!AreAliased(src, dst.rx(), scratch));
4586 if (dst.rb() != r0) DCHECK(!AreAliased(src, dst.rb(), scratch));
4587 DCHECK(!AreAliased(src, scratch));
4588 mov(scratch, src);
4590 StoreU64(scratch, dst);
4607 DoubleRegister scratch) {
4609 DCHECK(!AreAliased(src, dst, scratch));
4610 ldr(scratch, src);
4612 ldr(dst, scratch);
4616 DoubleRegister scratch) {
4617 DCHECK(!AreAliased(src, scratch));
4618 ldr(scratch, src);
4620 StoreF32(scratch, dst);
4624 DoubleRegister scratch) {
4625 // push d0, to be used as scratch
4628 LoadF32(scratch, src);
4630 StoreF32(scratch, dst);
4638 DoubleRegister scratch) {
4640 DCHECK(!AreAliased(src, dst, scratch));
4641 ldr(scratch, src);
4643 ldr(dst, scratch);
4647 DoubleRegister scratch) {
4648 DCHECK(!AreAliased(src, scratch));
4649 ldr(scratch, src);
4651 StoreF64(scratch, dst);
4655 DoubleRegister scratch) {
4656 // push d0, to be used as scratch
4659 LoadF64(scratch, src);
4661 StoreF64(scratch, dst);
4669 Simd128Register scratch) {
4671 vlr(scratch, src, Condition(0), Condition(0), Condition(0));
4673 vlr(dst, scratch, Condition(0), Condition(0), Condition(0));
4677 Simd128Register scratch) {
4678 DCHECK(!AreAliased(src, scratch));
4679 vlr(scratch, src, Condition(0), Condition(0), Condition(0));
4681 StoreV128(scratch, dst, ip);
4685 Simd128Register scratch) {
4686 // push d0, to be used as scratch
4689 LoadV128(scratch, src, ip);
4691 StoreV128(scratch, dst, ip);
4768 Register scratch = r1;
4770 DCHECK(!AreAliased(destination, scratch));
4771 DCHECK(!AreAliased(code_object, scratch));
4776 LoadS32(scratch, FieldMemOperand(code_object, Code::kFlagsOffset));
4777 tmlh(scratch, Operand(Code::IsOffHeapTrampoline::kMask >> 16));
4789 LoadS32(scratch, FieldMemOperand(code_object, Code::kBuiltinIndexOffset));
4790 ShiftLeftU64(destination, scratch, Operand(kSystemPointerSizeLog2));
4850 dst); // will modify a register pair scratch and scratch + 1
4857 src); // will modify a register pair scratch and scratch + 1
5010 Register scratch) {
5014 llgfr(scratch, output);
5015 RotateInsertSelectBits(scratch, value, Operand(start), Operand(end),
5017 csy(output, scratch, MemOperand(addr, offset));
5023 Register output, Register scratch) {
5033 scratch); \
5044 scratch); \
5075 Register output, Register scratch) {
5085 -idx * 2, scratch); \
5096 -idx * 2, scratch); \
5169 uint8_t imm_lane_idx, Register scratch) {
5170 vlgv(scratch, src, MemOperand(r0, 7 - imm_lane_idx), Condition(1));
5171 lghr(dst, scratch);
5180 uint8_t imm_lane_idx, Register scratch) {
5181 vlgv(scratch, src, MemOperand(r0, 15 - imm_lane_idx), Condition(0));
5182 lgbr(dst, scratch);
5187 Register scratch) {
5188 vlgv(scratch, src2, MemOperand(r0, 0), Condition(3));
5192 vlvg(dst, scratch, MemOperand(r0, 1 - imm_lane_idx), Condition(3));
5197 Register scratch) {
5198 vlgv(scratch, src2, MemOperand(r0, 0), Condition(2));
5202 vlvg(dst, scratch, MemOperand(r0, 3 - imm_lane_idx), Condition(2));
5398 Register src2, Simd128Register scratch) { \
5399 vlvg(scratch, src2, MemOperand(r0, 0), Condition(c1)); \
5400 vrep(scratch, scratch, Operand(0), Condition(c1)); \
5401 op(dst, src1, scratch, Condition(0), Condition(0), Condition(c1)); \
5429 Simd128Register src2, Simd128Register scratch) { \
5430 mul_even(scratch, src1, src2, Condition(0), Condition(0), \
5433 merge(dst, scratch, dst, Condition(0), Condition(0), Condition(mode + 1)); \
5549 Simd128Register src2, Simd128Register scratch) {
5550 vceq(scratch, src1, src2, Condition(0), Condition(2));
5552 vo(dst, dst, scratch, Condition(0), Condition(0), Condition(2));
5569 Simd128Register src2, Simd128Register scratch) {
5570 vceq(scratch, src1, src2, Condition(0), Condition(1));
5572 vo(dst, dst, scratch, Condition(0), Condition(0), Condition(1));
5589 Simd128Register src2, Simd128Register scratch) {
5590 vceq(scratch, src1, src2, Condition(0), Condition(0));
5592 vo(dst, dst, scratch, Condition(0), Condition(0), Condition(0));
5642 Register scratch) {
5644 xgr(scratch, scratch);
5646 locgr(Condition(8), dst, scratch);
5727 #define VECTOR_PACK_UNSIGNED(dst, src1, src2, scratch, mode) \
5730 vmx(scratch, src1, kDoubleRegZero, Condition(0), Condition(0), \
5736 Simd128Register scratch) {
5738 VECTOR_PACK_UNSIGNED(dst, src1, src2, scratch, 2)
5739 vpkls(dst, dst, scratch, Condition(0), Condition(2));
5745 Simd128Register scratch) {
5747 VECTOR_PACK_UNSIGNED(dst, src1, src2, scratch, 1)
5748 vpkls(dst, dst, scratch, Condition(0), Condition(1));
5894 Simd128Register scratch,
5896 vx(scratch, scratch, scratch, Condition(0), Condition(0), Condition(3));
5897 vsum(dst, src, scratch, Condition(0), Condition(0), Condition(1));
5917 Simd128Register scratch) {
5919 vlr(scratch, src, Condition(0), Condition(0), Condition(0));
5920 vfce(scratch, scratch, scratch, Condition(0), Condition(0), Condition(3));
5921 vn(scratch, src, scratch, Condition(0), Condition(0), Condition(0));
5922 vcgd(scratch, scratch, Condition(5), Condition(0), Condition(3));
5924 vpks(dst, dst, scratch, Condition(0), Condition(3));
5929 Simd128Register scratch) {
5930 vclgd(scratch, src, Condition(5), Condition(0), Condition(3));
5932 vpkls(dst, dst, scratch, Condition(0), Condition(3));
5957 // Clear scratch.
5974 Simd128Register scratch) {
5975 vme(scratch, src1, src2, Condition(0), Condition(0), Condition(1));
5977 va(dst, scratch, dst, Condition(0), Condition(0), Condition(2));
5980 #define Q15_MUL_ROAUND(accumulator, src1, src2, const_val, scratch, unpack) \
5981 unpack(scratch, src1, Condition(0), Condition(0), Condition(1)); \
5983 vml(accumulator, scratch, accumulator, Condition(0), Condition(0), \
5987 vrepi(scratch, Operand(15), Condition(2)); \
5988 vesrav(accumulator, accumulator, scratch, Condition(0), Condition(0), \
6021 Simd128Register dst, const MemOperand& mem, Register scratch) { \
6026 scalar_instr(scratch, mem); \
6027 vlvg(dst, scratch, MemOperand(r0, 0), Condition(condition)); \
6044 Simd128Register dst, const MemOperand& mem, Register scratch) { \
6048 LoadU64LE(scratch, mem); \
6049 vlvg(dst, scratch, MemOperand(r0, 0), Condition(3)); \
6058 Register scratch) {
6064 LoadU32LE(scratch, mem);
6065 vlvg(dst, scratch, MemOperand(r0, 3), Condition(2));
6069 Register scratch) {
6075 LoadU64LE(scratch, mem);
6076 vlvg(dst, scratch, MemOperand(r0, 1), Condition(3));
6088 Register scratch) { \
6093 scalar_instr(scratch, mem); \
6094 vlvg(dst, scratch, MemOperand(r0, lane), Condition(condition)); \
6109 Register scratch) { \
6114 vlgv(scratch, src, MemOperand(r0, lane), Condition(condition)); \
6115 scalar_instr(scratch, mem); \