/third_party/node/deps/v8/src/codegen/ |
H A D | constant-pool.cc | 100 void ConstantPoolBuilder::EmitSharedEntries(Assembler* assm, in EmitSharedEntries() argument 110 int offset = assm->pc_offset() - base; in EmitSharedEntries() 113 assm->dp(shared_it->value()); in EmitSharedEntries() 115 assm->dq(shared_it->value64()); in EmitSharedEntries() 120 assm->PatchConstantPoolAccessInstruction(shared_it->position(), offset, in EmitSharedEntries() 125 void ConstantPoolBuilder::EmitGroup(Assembler* assm, in EmitGroup() argument 140 EmitSharedEntries(assm, type); in EmitGroup() 161 offset = assm->pc_offset() - base; in EmitGroup() 164 assm->dp(it->value()); in EmitGroup() 166 assm in EmitGroup() 184 Emit(Assembler* assm) Emit() argument 218 ConstantPool(Assembler* assm) ConstantPool() argument 440 BlockScope(Assembler* assm, size_t margin) BlockScope() argument 446 BlockScope(Assembler* assm, PoolEmissionCheck check) BlockScope() argument 466 ConstantPool(Assembler* assm) ConstantPool() argument 689 BlockScope(Assembler* assm, size_t margin) BlockScope() argument 695 BlockScope(Assembler* assm, PoolEmissionCheck check) BlockScope() argument [all...] |
H A D | code-comments.cc | 66 void CodeCommentsWriter::Emit(Assembler* assm) { in Emit() argument 67 assm->dd(section_size()); in Emit() 69 assm->dd(i->pc_offset); in Emit() 70 assm->dd(i->comment_length()); in Emit() 72 EnsureSpace ensure_space(assm); in Emit() 73 assm->db(c); in Emit() 75 assm->db('\0'); in Emit()
|
H A D | constant-pool.h | 132 int Emit(Assembler* assm); 143 void EmitSharedEntries(Assembler* assm, ConstantPoolEntry::Type type); 144 void EmitGroup(Assembler* assm, ConstantPoolEntry::Access access, 253 explicit ConstantPool(Assembler* assm);
|
H A D | code-comments.h | 39 void Emit(Assembler* assm);
|
/third_party/node/deps/v8/src/wasm/baseline/ia32/ |
H A D | liftoff-assembler-ia32.h | 71 inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Register base, in Load() argument 79 assm->mov(dst.gp(), src); in Load() 82 assm->mov(dst.low_gp(), src); in Load() 83 assm->mov(dst.high_gp(), Operand(base, offset + 4)); in Load() 86 assm->movss(dst.fp(), src); in Load() 89 assm->movsd(dst.fp(), src); in Load() 92 assm->movdqu(dst.fp(), src); in Load() 99 inline void Store(LiftoffAssembler* assm, Register base, int32_t offset, in Store() argument 107 assm->mov(dst, src.gp()); in Store() 110 assm in Store() 130 push(LiftoffAssembler* assm, LiftoffRegister reg, ValueKind kind, int padding = 0) push() argument 165 SignExtendI32ToI64(Assembler* assm, LiftoffRegister reg) SignExtendI32ToI64() argument 172 GetTmpByteRegister(LiftoffAssembler* assm, Register candidate) GetTmpByteRegister() argument 179 MoveStackValue(LiftoffAssembler* assm, const Operand& src, const Operand& dst) MoveStackValue() argument 1330 EmitCommutativeBinOp(LiftoffAssembler* assm, Register dst, Register lhs, Register rhs) EmitCommutativeBinOp() argument 1341 EmitCommutativeBinOpImm(LiftoffAssembler* assm, Register dst, Register lhs, int32_t imm) EmitCommutativeBinOpImm() argument 1355 EmitInt32DivOrRem(LiftoffAssembler* assm, Register dst, Register lhs, Register rhs, Label* trap_div_by_zero, Label* trap_div_unrepresentable) EmitInt32DivOrRem() argument 1469 EmitShiftOperation(LiftoffAssembler* assm, Register dst, Register src, Register amount, void (Assembler::*emit_shift)(Register)) EmitShiftOperation() argument 1556 OpWithCarry(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) OpWithCarry() argument 1585 OpWithCarryI(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister lhs, int64_t imm) OpWithCarryI() argument 1699 Emit64BitShiftOperation( LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister src, Register amount, void (TurboAssembler::*emit_shift)(Register, Register)) Emit64BitShiftOperation() argument 1935 EmitFloatMinOrMax(LiftoffAssembler* assm, DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs, MinOrMax min_or_max) EmitFloatMinOrMax() argument 2212 ConvertFloatToIntAndBack(LiftoffAssembler* assm, Register dst, DoubleRegister src, DoubleRegister converted_back, LiftoffRegList pinned) ConvertFloatToIntAndBack() argument 2238 EmitTruncateFloatToInt(LiftoffAssembler* assm, Register dst, DoubleRegister src, Label* trap) EmitTruncateFloatToInt() argument 2273 EmitSatTruncateFloatToInt(LiftoffAssembler* assm, Register dst, DoubleRegister src) EmitSatTruncateFloatToInt() argument 2507 setcc_32_no_spill(LiftoffAssembler* assm, Condition cond, Register dst, Register tmp_byte_reg) setcc_32_no_spill() argument 2514 setcc_32(LiftoffAssembler* assm, Condition cond, Register dst) setcc_32() argument 2597 EmitFloatSetCond(LiftoffAssembler* assm, Condition cond, Register dst, DoubleRegister lhs, DoubleRegister rhs) EmitFloatSetCond() argument 2654 EmitSimdCommutativeBinOp( LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, base::Optional<CpuFeature> feature = base::nullopt) EmitSimdCommutativeBinOp() argument 2676 EmitSimdNonCommutativeBinOp( LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, base::Optional<CpuFeature> feature = base::nullopt) EmitSimdNonCommutativeBinOp() argument 2700 EmitSimdShiftOp(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister operand, LiftoffRegister count) EmitSimdShiftOp() argument 2720 EmitSimdShiftOpImm(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister operand, int32_t count) EmitSimdShiftOpImm() argument 2733 EmitAnyTrue(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister src) EmitAnyTrue() argument 2743 EmitAllTrue(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister src, base::Optional<CpuFeature> feature = base::nullopt) EmitAllTrue() argument [all...] |
/third_party/node/deps/v8/src/codegen/shared-ia32-x64/ |
H A D | macro-assembler-shared-ia32-x64.h | 100 Assembler* assm; member 110 CpuFeatureScope scope(assm, AVX); in emit() 111 (assm->*avx)(dst, dst, arg, args...); in emit() 114 CpuFeatureScope scope(assm, *feature); in emit() 115 (assm->*no_avx)(dst, arg, args...); in emit() 117 (assm->*no_avx)(dst, arg, args...); in emit() 129 CpuFeatureScope scope(assm, AVX); in emit() 130 (assm->*avx)(dst, arg, args...); in emit() 134 CpuFeatureScope scope(assm, *feature); in emit() 135 (assm in emit() 495 PinsrHelper(Assembler* assm, AvxFn<Op> avx, NoAvxFn<Op> noavx, XMMRegister dst, XMMRegister src1, Op src2, uint8_t imm8, uint32_t* load_pc_offset = nullptr, base::Optional<CpuFeature> feature = base::nullopt) PinsrHelper() argument 977 SharedTurboAssembler* assm = this; FloatUnop() local [all...] |
/third_party/node/deps/v8/src/wasm/baseline/mips/ |
H A D | liftoff-assembler-mips.h | 90 inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Register base, in Load() argument 98 assm->lw(dst.gp(), src); in Load() 101 assm->lw(dst.low_gp(), in Load() 103 assm->lw(dst.high_gp(), in Load() 107 assm->lwc1(dst.fp(), src); in Load() 110 assm->Ldc1(dst.fp(), src); in Load() 117 inline void Store(LiftoffAssembler* assm, Register base, int32_t offset, in Store() argument 125 assm->Usw(src.gp(), dst); in Store() 128 assm->Usw(src.low_gp(), in Store() 130 assm in Store() 144 push(LiftoffAssembler* assm, LiftoffRegister reg, ValueKind kind) push() argument 168 EnsureNoAlias(Assembler* assm, Register reg, LiftoffRegister must_not_alias, UseScratchRegisterScope* temps) EnsureNoAlias() argument 181 ChangeEndiannessLoad(LiftoffAssembler* assm, LiftoffRegister dst, LoadType type, LiftoffRegList pinned) ChangeEndiannessLoad() argument 250 ChangeEndiannessStore(LiftoffAssembler* assm, LiftoffRegister src, StoreType type, LiftoffRegList pinned) ChangeEndiannessStore() argument 1089 Emit64BitShiftOperation( LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister src, Register amount, void (TurboAssembler::*emit_shift)(Register, Register, Register, Register, Register, Register, Register)) Emit64BitShiftOperation() argument [all...] |
/third_party/node/deps/v8/src/wasm/baseline/x64/ |
H A D | liftoff-assembler-x64.h | 79 inline Operand GetMemOp(LiftoffAssembler* assm, Register addr, Register offset, in GetMemOp() argument 88 assm->TurboAssembler::Move(scratch, offset_imm); in GetMemOp() 89 if (offset != no_reg) assm->addq(scratch, offset); in GetMemOp() 93 inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Operand src, in Load() argument 97 assm->movl(dst.gp(), src); in Load() 103 assm->movq(dst.gp(), src); in Load() 106 assm->Movss(dst.fp(), src); in Load() 109 assm->Movsd(dst.fp(), src); in Load() 112 assm->Movdqu(dst.fp(), src); in Load() 119 inline void Store(LiftoffAssembler* assm, Operan argument 147 push(LiftoffAssembler* assm, LiftoffRegister reg, ValueKind kind, int padding = 0) push() argument 1055 EmitCommutativeBinOp(LiftoffAssembler* assm, Register dst, Register lhs, Register rhs) EmitCommutativeBinOp() argument 1067 EmitCommutativeBinOpImm(LiftoffAssembler* assm, Register dst, Register lhs, int32_t imm) EmitCommutativeBinOpImm() argument 1083 EmitIntDivOrRem(LiftoffAssembler* assm, Register dst, Register lhs, Register rhs, Label* trap_div_by_zero, Label* trap_div_unrepresentable) EmitIntDivOrRem() argument 1223 EmitShiftOperation(LiftoffAssembler* assm, Register dst, Register src, Register amount, void (Assembler::*emit_shift)(Register)) EmitShiftOperation() argument 1539 EmitFloatMinOrMax(LiftoffAssembler* assm, DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs, MinOrMax min_or_max) EmitFloatMinOrMax() argument 1805 ConvertFloatToIntAndBack(LiftoffAssembler* assm, Register dst, DoubleRegister src, DoubleRegister converted_back) ConvertFloatToIntAndBack() argument 1840 EmitTruncateFloatToInt(LiftoffAssembler* assm, Register dst, DoubleRegister src, Label* trap) EmitTruncateFloatToInt() argument 1872 EmitSatTruncateFloatToInt(LiftoffAssembler* assm, Register dst, DoubleRegister src) EmitSatTruncateFloatToInt() argument 1949 EmitSatTruncateFloatToUInt64(LiftoffAssembler* assm, Register dst, DoubleRegister src) EmitSatTruncateFloatToUInt64() argument 2210 EmitFloatSetCond(LiftoffAssembler* assm, Condition cond, Register dst, DoubleRegister lhs, DoubleRegister rhs) EmitFloatSetCond() argument 2287 EmitSimdCommutativeBinOp( LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, base::Optional<CpuFeature> feature = base::nullopt) EmitSimdCommutativeBinOp() argument 2309 EmitSimdNonCommutativeBinOp( LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, base::Optional<CpuFeature> feature = base::nullopt) EmitSimdNonCommutativeBinOp() argument 2333 EmitSimdShiftOp(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister operand, LiftoffRegister count) EmitSimdShiftOp() argument 2350 EmitSimdShiftOpImm(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister operand, int32_t count) EmitSimdShiftOpImm() argument 2363 EmitAnyTrue(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister src) EmitAnyTrue() argument 2371 EmitAllTrue(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister src, base::Optional<CpuFeature> feature = base::nullopt) EmitAllTrue() argument 3318 I32x4ExtMulHelper(LiftoffAssembler* assm, XMMRegister dst, XMMRegister src1, XMMRegister src2, bool low, bool is_signed) I32x4ExtMulHelper() argument [all...] |
/third_party/node/deps/v8/src/wasm/baseline/arm/ |
H A D | liftoff-assembler-arm.h | 91 inline MemOperand GetMemOp(LiftoffAssembler* assm, in GetMemOp() argument 97 assm->add(tmp, offset, Operand(offset_imm)); in GetMemOp() 103 inline Register CalculateActualAddress(LiftoffAssembler* assm, in CalculateActualAddress() argument 112 assm->mov(result_reg, addr_reg); in CalculateActualAddress() 119 assm->add(actual_addr_reg, addr_reg, Operand(offset_imm)); in CalculateActualAddress() 121 assm->add(actual_addr_reg, addr_reg, Operand(offset_reg)); in CalculateActualAddress() 123 assm->add(actual_addr_reg, actual_addr_reg, Operand(offset_imm)); in CalculateActualAddress() 154 inline void I64Binop(LiftoffAssembler* assm, LiftoffRegister dst, in I64Binop() argument 159 assm->GetUnusedRegister(kGpReg, LiftoffRegList{lhs, rhs, dst.high_gp()}) in I64Binop() 162 (assm in I64Binop() 172 I64BinopI(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister lhs, int64_t imm) I64BinopI() argument 187 I64Shiftop(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister src, Register amount) I64Shiftop() argument 227 EmitFloatMinOrMax(LiftoffAssembler* assm, RegisterType dst, RegisterType lhs, RegisterType rhs, MinOrMax min_or_max) EmitFloatMinOrMax() argument 248 EnsureNoAlias(Assembler* assm, Register reg, Register must_not_alias, UseScratchRegisterScope* temps) EnsureNoAlias() argument 258 S128NarrowOp(LiftoffAssembler* assm, NeonDataType dt, NeonDataType sdt, LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) S128NarrowOp() argument 270 F64x2Compare(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Condition cond) F64x2Compare() argument 300 Store(LiftoffAssembler* assm, LiftoffRegister src, MemOperand dst, ValueKind kind) Store() argument 340 Load(LiftoffAssembler* assm, LiftoffRegister dst, MemOperand src, ValueKind kind) Load() argument 394 EmitSimdShift(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) EmitSimdShift() argument 410 EmitSimdShiftImmediate(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs) EmitSimdShiftImmediate() argument 428 EmitAnyTrue(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister src) EmitAnyTrue() argument 1589 GeneratePopCnt(Assembler* assm, Register dst, Register src, Register scratch1, Register scratch2) GeneratePopCnt() argument [all...] |
/third_party/node/deps/v8/src/wasm/baseline/mips64/ |
H A D | liftoff-assembler-mips64.h | 82 inline MemOperand GetMemOp(LiftoffAssembler* assm, Register addr, in GetMemOp() argument 87 assm->daddu(kScratchReg, addr, offset); in GetMemOp() 91 assm->li(kScratchReg, offset_imm); in GetMemOp() 92 assm->daddu(kScratchReg, kScratchReg, addr); in GetMemOp() 94 assm->daddu(kScratchReg, kScratchReg, offset); in GetMemOp() 99 inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, MemOperand src, in Load() argument 103 assm->Lw(dst.gp(), src); in Load() 109 assm->Ld(dst.gp(), src); in Load() 112 assm->Lwc1(dst.fp(), src); in Load() 115 assm in Load() 125 Store(LiftoffAssembler* assm, Register base, int32_t offset, LiftoffRegister src, ValueKind kind) Store() argument 152 push(LiftoffAssembler* assm, LiftoffRegister reg, ValueKind kind) push() argument 182 ChangeEndiannessLoad(LiftoffAssembler* assm, LiftoffRegister dst, LoadType type, LiftoffRegList pinned) ChangeEndiannessLoad() argument 239 ChangeEndiannessStore(LiftoffAssembler* assm, LiftoffRegister src, StoreType type, LiftoffRegList pinned) ChangeEndiannessStore() argument 1915 EmitAnyTrue(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister src) EmitAnyTrue() argument 1925 EmitAllTrue(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister src, MSABranchDF msa_branch_df) EmitAllTrue() argument [all...] |
/third_party/node/deps/v8/src/wasm/baseline/arm64/ |
H A D | liftoff-assembler-arm64.h | 131 inline MemOperand GetMemOp(LiftoffAssembler* assm, in GetMemOp() argument 139 assm->Add(effective_addr, addr.X(), offset_imm); in GetMemOp() 149 inline Register GetEffectiveAddress(LiftoffAssembler* assm, in GetEffectiveAddress() argument 157 assm->Add(tmp, addr, Operand(offset, UXTW)); in GetEffectiveAddress() 160 if (offset_imm != 0) assm->Add(tmp, addr, offset_imm); in GetEffectiveAddress() 169 inline void EmitSimdShift(LiftoffAssembler* assm, VRegister dst, VRegister lhs, in EmitSimdShift() argument 175 UseScratchRegisterScope temps(assm); in EmitSimdShift() 179 assm->And(shift, rhs, mask); in EmitSimdShift() 180 assm->Dup(tmp, shift); in EmitSimdShift() 183 assm in EmitSimdShift() 194 EmitSimdShiftRightImmediate(LiftoffAssembler* assm, VRegister dst, VRegister lhs, int32_t rhs) EmitSimdShiftRightImmediate() argument 213 EmitAnyTrue(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister src) EmitAnyTrue() argument 224 EmitAllTrue(LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister src, VectorFormat format) EmitAllTrue() argument [all...] |
/third_party/vixl/test/aarch64/ |
H A D | test-api-movprfx-aarch64.cc | 38 #define __ assm. 100 Assembler assm; in TEST() local 101 assm.GetCPUFeatures()->Combine(CPUFeatures::kSVE, CPUFeatures::kSVEI8MM); in TEST() 106 CodeBufferCheckScope guard(&assm, kPairCount * 2 * kInstructionSize); in TEST() 345 assm.FinalizeCode(); in TEST() 347 CheckAndMaybeDisassembleMovprfxPairs(assm.GetBuffer(), false); in TEST() 353 Assembler assm; in TEST() local 354 assm.GetCPUFeatures()->Combine(CPUFeatures::kSVE, in TEST() 361 CodeBufferCheckScope guard(&assm, kPairCount * 2 * kInstructionSize); in TEST() 603 assm in TEST() 609 Assembler assm; TEST() local 671 Assembler assm; TEST() local 877 Assembler assm; TEST() local 1086 Assembler assm; TEST() local 1282 Assembler assm; TEST() local 1332 Assembler assm; TEST() local 1716 Assembler assm; TEST() local 1959 Assembler assm; TEST() local 2408 Assembler assm; TEST() local 2826 Assembler assm; TEST() local 3149 Assembler assm; TEST() local 3583 Assembler assm; TEST() local [all...] |
H A D | test-utils-aarch64.h | 114 void Dump(MacroAssembler* assm);
|
/third_party/node/deps/v8/src/wasm/baseline/ |
H A D | liftoff-assembler.h | 1577 void EmitI64IndependentHalfOperation(LiftoffAssembler* assm, in EmitI64IndependentHalfOperation() argument 1583 (assm->*op)(dst.low_gp(), lhs.low_gp(), rhs.low_gp()); in EmitI64IndependentHalfOperation() 1584 (assm->*op)(dst.high_gp(), lhs.high_gp(), rhs.high_gp()); in EmitI64IndependentHalfOperation() 1590 (assm->*op)(dst.high_gp(), lhs.high_gp(), rhs.high_gp()); in EmitI64IndependentHalfOperation() 1591 (assm->*op)(dst.low_gp(), lhs.low_gp(), rhs.low_gp()); in EmitI64IndependentHalfOperation() 1595 Register tmp = assm->GetUnusedRegister(kGpReg, LiftoffRegList{lhs, rhs}).gp(); in EmitI64IndependentHalfOperation() 1596 (assm->*op)(tmp, lhs.low_gp(), rhs.low_gp()); in EmitI64IndependentHalfOperation() 1597 (assm->*op)(dst.high_gp(), lhs.high_gp(), rhs.high_gp()); in EmitI64IndependentHalfOperation() 1598 assm->Move(dst.low_gp(), tmp, kI32); in EmitI64IndependentHalfOperation() 1602 void EmitI64IndependentHalfOperationImm(LiftoffAssembler* assm, in EmitI64IndependentHalfOperationImm() argument [all...] |
/third_party/node/deps/v8/src/wasm/baseline/loong64/ |
H A D | liftoff-assembler-loong64.h | 82 inline MemOperand GetMemOp(LiftoffAssembler* assm, Register addr, in GetMemOp() argument 87 assm->add_d(kScratchReg, addr, offset); in GetMemOp() 91 assm->li(kScratchReg, Operand(offset_imm)); in GetMemOp() 92 assm->add_d(kScratchReg, kScratchReg, addr); in GetMemOp() 94 assm->add_d(kScratchReg, kScratchReg, offset); in GetMemOp() 99 inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, MemOperand src, in Load() argument 103 assm->Ld_w(dst.gp(), src); in Load() 109 assm->Ld_d(dst.gp(), src); in Load() 112 assm->Fld_s(dst.fp(), src); in Load() 115 assm in Load() 125 Store(LiftoffAssembler* assm, Register base, int32_t offset, LiftoffRegister src, ValueKind kind) Store() argument 149 push(LiftoffAssembler* assm, LiftoffRegister reg, ValueKind kind) push() argument [all...] |
/third_party/node/deps/v8/src/wasm/baseline/riscv64/ |
H A D | liftoff-assembler-riscv64.h | 81 inline MemOperand GetMemOp(LiftoffAssembler* assm, Register addr, in GetMemOp() argument 86 assm->Add64(kScratchReg2, addr, offset); in GetMemOp() 90 assm->li(kScratchReg2, offset_imm); in GetMemOp() 91 assm->Add64(kScratchReg2, kScratchReg2, addr); in GetMemOp() 93 assm->Add64(kScratchReg2, kScratchReg2, offset); in GetMemOp() 98 inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, MemOperand src, in Load() argument 102 assm->Lw(dst.gp(), src); in Load() 108 assm->Ld(dst.gp(), src); in Load() 111 assm->LoadFloat(dst.fp(), src); in Load() 114 assm in Load() 121 Store(LiftoffAssembler* assm, Register base, int32_t offset, LiftoffRegister src, ValueKind kind) Store() argument 145 push(LiftoffAssembler* assm, LiftoffRegister reg, ValueKind kind) push() argument 171 ChangeEndiannessLoad(LiftoffAssembler* assm, LiftoffRegister dst, LoadType type, LiftoffRegList pinned) ChangeEndiannessLoad() argument 228 ChangeEndiannessStore(LiftoffAssembler* assm, LiftoffRegister src, StoreType type, LiftoffRegList pinned) ChangeEndiannessStore() argument [all...] |
/third_party/vixl/test/aarch32/ |
H A D | test-assembler-aarch32.cc | 3103 void CheckInstructionSetA32(const T& assm) { in CheckInstructionSetA32() argument 3104 VIXL_CHECK(assm.IsUsingA32()); in CheckInstructionSetA32() 3105 VIXL_CHECK(!assm.IsUsingT32()); in CheckInstructionSetA32() 3106 VIXL_CHECK(assm.GetInstructionSetInUse() == A32); in CheckInstructionSetA32() 3111 void CheckInstructionSetT32(const T& assm) { in CheckInstructionSetT32() argument 3112 VIXL_CHECK(assm.IsUsingT32()); in CheckInstructionSetT32() 3113 VIXL_CHECK(!assm.IsUsingA32()); in CheckInstructionSetT32() 3114 VIXL_CHECK(assm.GetInstructionSetInUse() == T32); in CheckInstructionSetT32() 3170 Assembler assm; in TEST_NOASM() local 3171 CheckInstructionSetA32(assm); in TEST_NOASM() [all...] |
/third_party/node/deps/v8/src/codegen/arm64/ |
H A D | utils-arm64.cc | 12 #define __ assm->
|
/third_party/node/deps/v8/src/baseline/ppc/ |
H A D | baseline-assembler-ppc-inl.h | 136 #define __ assm-> 138 static void JumpIfHelper(MacroAssembler* assm, Condition cc, Register lhs, in JumpIfHelper() argument
|
/third_party/node/deps/v8/src/baseline/s390/ |
H A D | baseline-assembler-s390-inl.h | 136 #define __ assm-> 138 static void JumpIfHelper(MacroAssembler* assm, Condition cc, Register lhs, in JumpIfHelper() argument
|
/third_party/node/deps/v8/src/codegen/arm/ |
H A D | macro-assembler-arm.cc | 2744 void F64x2ConvertLowHelper(Assembler* assm, QwNeonRegister dst, in CallRecordWriteStub() argument 2747 UseScratchRegisterScope temps(assm); in CallRecordWriteStub() 2750 assm->vmov(tmp, src_d); in CallRecordWriteStub() 2754 (assm->*convert_fn)(dst.low(), src_d.low(), kDefaultRoundToZero, al); in CallRecordWriteStub() 2755 (assm->*convert_fn)(dst.high(), src_d.high(), kDefaultRoundToZero, al); in CallRecordWriteStub()
|
/third_party/node/deps/v8/src/codegen/riscv64/ |
H A D | assembler-riscv64.h | 1334 explicit VectorUnit(Assembler* assm) : assm_(assm) {} in VectorUnit() argument
|
/third_party/vixl/src/aarch64/ |
H A D | assembler-aarch64.cc | 5889 bool Assembler::OneInstrMoveImmediateHelper(Assembler* assm, 5892 bool emit_code = assm != NULL; 5900 assm->movz(dst, imm); 5907 assm->movn(dst, dst.Is64Bits() ? ~imm : (~imm & kWRegMask)); 5914 assm->LogicalImmediate(dst,
|
H A D | assembler-aarch64.h | 7130 static bool OneInstrMoveImmediateHelper(Assembler* assm,
|
/third_party/node/deps/v8/src/codegen/x64/ |
H A D | assembler-x64.h | 350 explicit ConstPool(Assembler* assm) : assm_(assm) {} in ConstPool() argument
|