Home
last modified time | relevance | path

Searched refs:Asr (Results 1 - 20 of 20) sorted by relevance

/third_party/node/deps/v8/src/codegen/arm64/
H A Dmacro-assembler-arm64-inl.h273 void TurboAssembler::Asr(const Register& rd, const Register& rn, in Asr() function in v8::internal::TurboAssembler
280 void TurboAssembler::Asr(const Register& rd, const Register& rn, in Asr() function in v8::internal::TurboAssembler
1059 Asr(dst.W(), src.W(), kSmiShift); in SmiUntag()
1062 Asr(dst, src, kSmiShift); in SmiUntag()
1101 Asr(smi.W(), smi.W(), kSmiShift); in SmiToInt32()
H A Dmacro-assembler-arm64.h615 inline void Asr(const Register& rd, const Register& rn, unsigned shift);
616 inline void Asr(const Register& rd, const Register& rn, const Register& rm);
H A Dmacro-assembler-arm64.cc1987 Asr(builtin_index, builtin_index, kSmiShift - kSystemPointerSizeLog2); in LoadEntryFromBuiltinIndex()
/third_party/vixl/test/aarch32/
H A Dtest-disasm-a32.cc1597 COMPARE_T32(Asr(r2, r2, Operand(r2, ROR, r2)), in TEST()
1601 COMPARE_T32(Asr(r2, r2, Operand(r2, ROR, r2)), in TEST()
3270 COMPARE_T32(Asr(eq, r0, r1, 16), in TEST()
3274 COMPARE_T32(Asr(eq, r0, r1, 32), in TEST()
3278 COMPARE_T32(Asr(eq, r0, r1, 0), in TEST()
3284 COMPARE_T32(Asr(eq, r7, r7, r3), in TEST()
3288 COMPARE_T32(Asr(eq, r8, r8, r3), in TEST()
4036 CHECK_T32_16(Asr(DontCare, r0, r1, 32), "asrs r0, r1, #32\n"); in TEST()
4038 CHECK_T32_16_IT_BLOCK(Asr(DontCare, eq, r0, r1, 32), in TEST()
4042 CHECK_T32_16(Asr(DontCar in TEST()
[all...]
H A Dtest-simulator-cond-rd-rn-operand-rm-a32.cc142 M(Asr) \
H A Dtest-simulator-cond-rd-rn-operand-rm-t32.cc142 M(Asr) \
H A Dtest-assembler-aarch32.cc785 __ Asr(r5, r1, 16); in TEST()
813 __ Asr(r5, r1, r9); in TEST()
/third_party/skia/third_party/externals/swiftshader/third_party/subzero/src/DartARM32/
H A Dassembler_arm.h1003 void Asr(Register rd, Register rm, const Operand& shift_imm,
1006 void Asr(Register rd, Register rm, Register rs, Condition cond = AL);
1033 Asr(reg, reg, Operand(kSmiTagSize), cond); in SmiUntag()
1037 Asr(dst, src, Operand(kSmiTagSize), cond); in SmiUntag()
H A Dassembler_arm.cc2475 void Assembler::Asr(Register rd, Register rm, const Operand& shift_imm,
2479 ASSERT(shift != 0); // Do not use Asr if no shift is wanted.
2491 ASSERT(shift != 0); // Do not use Asr if no shift is wanted.
2500 void Assembler::Asr(Register rd, Register rm, Register rs, Condition cond) {
2521 Asr(rd, rm, Operand(31), cond);
/third_party/vixl/benchmarks/aarch64/
H A Dbench-utils.cc197 __ Asr(PickR(size), PickR(size), 4); in GenerateTrivialSequence()
/third_party/skia/third_party/externals/swiftshader/third_party/subzero/src/
H A DIceInstARM32.h383 Asr, enumerator
1005 using InstARM32Asr = InstARM32ThreeAddrGPR<InstARM32::Asr>;
H A DIceInstARM32.cpp3408 template class InstARM32ThreeAddrGPR<InstARM32::Asr>;
/third_party/vixl/test/aarch64/
H A Dtest-assembler-aarch64.cc6679 __ Asr(x16, x0, x1);
6680 __ Asr(x17, x0, x2);
6681 __ Asr(x18, x0, x3);
6682 __ Asr(x19, x0, x4);
6683 __ Asr(x20, x0, x5);
6684 __ Asr(x21, x0, x6);
6686 __ Asr(w22, w0, w1);
6687 __ Asr(w23, w0, w2);
6688 __ Asr(w24, w0, w3);
6689 __ Asr(w2
[all...]
H A Dtest-assembler-sve-aarch64.cc11914 masm->Asr(ztmp, ztmp, kQRegSizeInBytesLog2 - dst.GetLaneSizeInBytesLog2());
12698 __ Asr(zd_asr, zn, shift);
12787 macro = &MacroAssembler::Asr;
12995 __ Asr(z4.VnB(), p0.Merging(), z31.VnB(), z1.VnB());
13001 __ Asr(z7.VnH(), p0.Merging(), z31.VnH(), z1.VnH());
13007 __ Asr(z10.VnS(), p4.Merging(), z31.VnS(), z1.VnS());
13012 __ Asr(z13.VnD(), p0.Merging(), z31.VnD(), z1.VnD());
13070 __ Asr(z4.VnB(), p0.Merging(), z31.VnB(), z1.VnD());
13075 __ Asr(z7.VnH(), p0.Merging(), z31.VnH(), z1.VnD());
13080 __ Asr(z1
[all...]
H A Dtest-disasm-sve-aarch64.cc346 COMPARE_MACRO(Asr(z4.VnB(), p0.Merging(), z4.VnB(), z30.VnB()), in TEST()
348 COMPARE_MACRO(Asr(z4.VnB(), p0.Merging(), z30.VnB(), z4.VnB()), in TEST()
350 COMPARE_MACRO(Asr(z4.VnB(), p0.Merging(), z10.VnB(), z14.VnB()), in TEST()
400 COMPARE_MACRO(Asr(z8.VnH(), p7.Merging(), z29.VnH(), 3), in TEST()
/third_party/vixl/src/aarch64/
H A Dmacro-assembler-aarch64.h1141 void Asr(const Register& rd, const Register& rn, unsigned shift) { in Asr() function in vixl::aarch64::MacroAssembler
1148 void Asr(const Register& rd, const Register& rn, const Register& rm) { in Asr() function in vixl::aarch64::MacroAssembler
3708 void Asr(const ZRegister& zd, in Asr() function in vixl::aarch64::MacroAssembler
3716 void Asr(const ZRegister& zd,
3720 void Asr(const ZRegister& zd, const ZRegister& zn, int shift) { in Asr() function in vixl::aarch64::MacroAssembler
3725 void Asr(const ZRegister& zd, const ZRegister& zn, const ZRegister& zm) { in Asr() function in vixl::aarch64::MacroAssembler
H A Dmacro-assembler-sve-aarch64.cc667 V(Asr, asr) \
/third_party/node/deps/v8/src/wasm/baseline/arm64/
H A Dliftoff-assembler-arm64.h1110 I32_SHIFTOP(i32_sar, Asr)
1119 I64_SHIFTOP(i64_sar, Asr)
/third_party/vixl/src/aarch32/
H A Dmacro-assembler-aarch32.h1389 void Asr(Condition cond, Register rd, Register rm, const Operand& operand) { in MacroAssembler() function in vixl::aarch32::MacroAssembler
1410 void Asr(Register rd, Register rm, const Operand& operand) { in MacroAssembler() function in vixl::aarch32::MacroAssembler
1411 Asr(al, rd, rm, operand); in MacroAssembler()
1413 void Asr(FlagsUpdate flags, in MacroAssembler() function in vixl::aarch32::MacroAssembler
1420 Asr(cond, rd, rm, operand); in MacroAssembler()
1434 Asr(cond, rd, rm, operand); in MacroAssembler()
1439 void Asr(FlagsUpdate flags, in MacroAssembler() function in vixl::aarch32::MacroAssembler
1443 Asr(flags, al, rd, rm, operand); in MacroAssembler()
/third_party/node/deps/v8/src/compiler/backend/arm64/
H A Dcode-generator-arm64.cc1431 ASSEMBLE_SHIFT(Asr, 64); in AssembleArchInstruction()
1434 ASSEMBLE_SHIFT(Asr, 32); in AssembleArchInstruction()

Completed in 121 milliseconds