/third_party/vixl/test/aarch64/ |
H A D | test-assembler-aarch64.cc | 57 __ Mov(x29, sp); in TEST() 60 __ Mov(x0, 0x1000); in TEST() 61 __ Mov(sp, 0x1004); in TEST() 80 __ Mov(x12, sp); in TEST() 81 __ Mov(sp, 0x1004); in TEST() 84 __ Mov(x13, sp); in TEST() 85 __ Mov(sp, 0x1004); in TEST() 90 __ Mov(x15, sp); in TEST() 93 __ Mov(sp, x29); in TEST() 124 __ Mov(x2 in TEST() [all...] |
H A D | test-assembler-neon-aarch64.cc | 56 __ Mov(x17, src_base); in TEST() 57 __ Mov(x18, dst_base); in TEST() 58 __ Mov(x19, src_base); in TEST() 59 __ Mov(x20, dst_base); in TEST() 60 __ Mov(x21, src_base); in TEST() 61 __ Mov(x22, dst_base); in TEST() 98 __ Mov(x17, src_base); in TEST() 99 __ Mov(x18, dst_base); in TEST() 100 __ Mov(x19, src_base); in TEST() 101 __ Mov(x2 in TEST() [all...] |
H A D | test-assembler-sve-aarch64.cc | 145 __ Mov(x0, 0x0123456789abcdef); 299 __ Mov(x10, reinterpret_cast<uintptr_t>(data)); 407 __ Mov(mla_da_result, za); 410 __ Mov(mla_dn_result, zn); 413 __ Mov(mla_dm_result, zm); 416 __ Mov(mla_d_result, zd); 426 __ Mov(mls_da_result, za); 429 __ Mov(mls_dn_result, zn); 432 __ Mov(mls_dm_result, zm); 435 __ Mov(mls_d_resul [all...] |
H A D | test-simulator-aarch64.cc | 292 __ Mov(out, results); in Test1Op_Helper() 293 __ Mov(inputs_base, inputs); in Test1Op_Helper() 294 __ Mov(length, inputs_length); in Test1Op_Helper() 296 __ Mov(index_n, 0); in Test1Op_Helper() 438 __ Mov(out, results); in Test2Op_Helper() 439 __ Mov(inputs_base, inputs); in Test2Op_Helper() 440 __ Mov(length, inputs_length); in Test2Op_Helper() 442 __ Mov(index_n, 0); in Test2Op_Helper() 446 __ Mov(index_m, 0); in Test2Op_Helper() 582 __ Mov(ou in Test3Op_Helper() [all...] |
H A D | test-metadata-aarch64.cc | 58 __ Mov(tagged_heap_ptr, reinterpret_cast<uintptr_t>(tagged_address)); in TEST() 93 __ Mov(tagged_heap_ptr, reinterpret_cast<uintptr_t>(tagged_address)); in TEST() 113 __ Mov(x22, reinterpret_cast<uintptr_t>(tagged_address_2)); in TEST()
|
H A D | test-simulator-sve2-aarch64.cc | 167 __ Mov(x0, reinterpret_cast<uint64_t>(&state)); in TEST_SVE() 311 __ Mov(x0, reinterpret_cast<uint64_t>(&state)); in TEST_SVE() 455 __ Mov(x0, reinterpret_cast<uint64_t>(&state)); in TEST_SVE() 599 __ Mov(x0, reinterpret_cast<uint64_t>(&state)); in TEST_SVE() 843 __ Mov(x0, reinterpret_cast<uint64_t>(&state)); in TEST_SVE() 1087 __ Mov(x0, reinterpret_cast<uint64_t>(&state)); in TEST_SVE() 1231 __ Mov(x0, reinterpret_cast<uint64_t>(&state)); in TEST_SVE() 1375 __ Mov(x0, reinterpret_cast<uint64_t>(&state)); in TEST_SVE() 1519 __ Mov(x0, reinterpret_cast<uint64_t>(&state)); in TEST_SVE() 1691 __ Mov(x in TEST_SVE() [all...] |
/third_party/vixl/test/ |
H A D | test-code-generation-scopes.cc | 59 __ Mov(aarch32::r0, 0); in TEST() 73 __ Mov(aarch64::x0, 0); in TEST() 87 __ Mov(aarch32::r0, 0); in TEST() 102 __ Mov(aarch64::x0, 0); in TEST() 117 __ Mov(aarch32::r0, 0); in TEST() 119 __ Mov(aarch32::r1, 1); in TEST() 133 __ Mov(aarch64::x0, 0); in TEST() 135 __ Mov(aarch64::x1, 1); in TEST() 149 __ Mov(aarch32::r0, 0); in TEST() 151 __ Mov(aarch3 in TEST() [all...] |
/third_party/vixl/examples/aarch64/ |
H A D | simulator_interception.cc | 63 __ Mov(x16, reinterpret_cast<uint64_t>(example_1)); in GenerateInterceptionExamples() 65 __ Mov(w1, w0); in GenerateInterceptionExamples() 68 __ Mov(x16, reinterpret_cast<uint64_t>(example_2)); in GenerateInterceptionExamples() 70 __ Mov(w2, w0); in GenerateInterceptionExamples() 73 __ Mov(x0, FAILURE); in GenerateInterceptionExamples() 76 __ Mov(x16, reinterpret_cast<uint64_t>(example_3)); in GenerateInterceptionExamples() 78 __ Mov(w3, w0); in GenerateInterceptionExamples()
|
H A D | factorial.cc | 41 __ Mov(x1, x0); in GenerateFactorial() 42 __ Mov(x0, 1); // Use x0 as the accumulator. in GenerateFactorial()
|
H A D | crc-checksums.cc | 46 __ Mov(x2, x0); in GenerateCrc32() 50 __ Mov(w0, 0xffffffff); in GenerateCrc32()
|
H A D | factorial-rec.cc | 45 __ Mov(x1, x0); in GenerateFactorialRec() 54 __ Mov(x0, 1); in GenerateFactorialRec()
|
H A D | sum-array.cc | 43 __ Mov(x2, x0); in GenerateSumArray() 44 __ Mov(w0, 0); in GenerateSumArray()
|
H A D | swap-int32.cc | 39 __ Mov(x2, __ StackPointer()); in GenerateSwapInt32() 56 __ Mov(old_stack_pointer, __ StackPointer()); in GenerateSwapInt32()
|
H A D | sve-strlen.cc | 38 __ Mov(len, 0); in GenerateSVEStrlen() 80 __ Mov(x0, len); in GenerateSVEStrlen()
|
/third_party/node/deps/v8/src/regexp/arm64/ |
H A D | regexp-macro-assembler-arm64.cc | 321 __ Mov(capture_start_offset.X(), GetCachedRegister(start_reg)); in CheckNotBackReferenceIgnoreCase() 425 __ Mov(w2, capture_length); in CheckNotBackReferenceIgnoreCase() 432 __ Mov(x3, ExternalReference::isolate_address(isolate())); in CheckNotBackReferenceIgnoreCase() 474 __ Mov(x10, GetCachedRegister(start_reg)); in CheckNotBackReference() 589 __ Mov(w0, current_character()); in CallIsCharacterInRangeArray() 590 __ Mov(x1, GetOrAddRangeArray(ranges)); in CallIsCharacterInRangeArray() 591 __ Mov(x2, ExternalReference::isolate_address(isolate())); in CallIsCharacterInRangeArray() 600 __ Mov(code_pointer(), Operand(masm_->CodeObject())); in CallIsCharacterInRangeArray() 632 __ Mov(x11, Operand(table)); in CheckBitInTable() 719 __ Mov(x1 in CheckSpecialCharacterClass() [all...] |
/third_party/skia/third_party/externals/swiftshader/third_party/subzero/src/ |
H A D | IceVariableSplitting.cpp | 316 Inst *Mov = Target->createLoweredMove(NewMapped, Dest); in handleSimpleVarAssign() local 317 Node->getInsts().insert(IterNext, Mov); in handleSimpleVarAssign() 341 Inst *Mov = Target->createLoweredMove(NewMapped, SrcVar); in handleSimpleVarAssign() local 342 Node->getInsts().insert(IterNext, Mov); in handleSimpleVarAssign() 347 Inst *Mov = Target->createLoweredMove(OldMapped, SrcVar); in handleSimpleVarAssign() local 348 Mov->setDestRedefined(); in handleSimpleVarAssign() 349 Node->getInsts().insert(IterNext, Mov); in handleSimpleVarAssign() 366 Inst *Mov = Target->createLoweredMove(NewMapped, Dest); in handlePhi() local 367 Node->getInsts().insert(IterCur, Mov); in handlePhi() 400 Inst *Mov in handleGeneralInst() local 412 Inst *Mov = Target->createLoweredMove(NewMapped, Dest); handleGeneralInst() local 416 Inst *Mov = Target->createLoweredMove(OldMapped, Dest); handleGeneralInst() local [all...] |
/third_party/vixl/test/aarch32/ |
H A D | test-assembler-aarch32.cc | 139 __ Mov(r0, 0); \ 235 __ Mov(r0, 0); in TEST() 236 __ Mov(r1, 1); in TEST() 237 __ Mov(r2, 0x01234567); in TEST() 238 __ Mov(r3, 0xfedcba98); in TEST() 262 __ Mov(r0, 0); in TEST() 263 __ Mov(r1, 1); in TEST() 264 __ Mov(r2, 0x01234567); in TEST() 265 __ Mov(r3, 0xfedcba98); in TEST() 266 __ Mov(r in TEST() [all...] |
H A D | test-disasm-a32.cc | 1432 COMPARE_BOTH(Mov(r0, 0xbadbeef), in TEST() 1435 COMPARE_A32(Mov(eq, r0, 0xbadbeef), in TEST() 1438 COMPARE_T32(Mov(eq, r0, 0xbadbeef), in TEST() 1460 COMPARE_BOTH(Mov(pc, 0xbadbeef), in TEST() 1464 COMPARE_A32(Mov(eq, pc, 0xbadbeef), in TEST() 1468 COMPARE_T32(Mov(eq, pc, 0xbadbeef), in TEST() 2358 TEST_MOV_SHIFT_T32(Mov, "", 0x00000006) in TEST() 2367 // Wide immediates (Mov and Movs are tested in in TEST() 2739 COMPARE_A32(Mov(pc, 1), "mov pc, #1\n"); in TEST() 2740 MUST_FAIL_TEST_T32(Mov(p in TEST() [all...] |
/third_party/node/deps/v8/src/codegen/arm64/ |
H A D | macro-assembler-arm64.cc | 189 Mov(rd, 0); in LogicalMacro() 193 Mov(rd, rn); in LogicalMacro() 205 Mov(rd, rn); in LogicalMacro() 208 Mov(rd, immediate); in LogicalMacro() 238 Mov(sp, temp); in LogicalMacro() 263 void TurboAssembler::Mov(const Register& rd, uint64_t imm) { in Mov() function in v8::internal::TurboAssembler 304 // Mov instructions can't move immediate values into the stack pointer, so in Mov() 339 void TurboAssembler::Mov(const Register& rd, const Operand& operand, in Mov() function in v8::internal::TurboAssembler 371 Mov(dst, operand.ImmediateValue()); in Mov() 407 void TurboAssembler::Mov(cons function in v8::internal::TurboAssembler [all...] |
/third_party/node/deps/v8/src/builtins/arm64/ |
H A D | builtins-arm64.cc | 45 __ Mov(kJavaScriptCallExtraArg1Register, ExternalReference::Create(address)); in Generate_Adaptor() 69 __ Mov(x2, x0); in GenerateTailCallToReturnedCode() 335 __ Mov(count, argc_without_receiver); in Generate_JSConstructStubGeneric() 344 __ Mov(x0, x12); in Generate_JSConstructStubGeneric() 489 __ Mov(x10, debug_hook); in Generate_ResumeGeneratorTrampoline() 496 __ Mov(x10, debug_suspended_generator); in Generate_ResumeGeneratorTrampoline() 579 __ Mov(x3, x1); in Generate_ResumeGeneratorTrampoline() 580 __ Mov(x1, x4); in Generate_ResumeGeneratorTrampoline() 670 __ Mov(kRootRegister, x0); in Generate_JSEntryVariant() 684 __ Mov(f in Generate_JSEntryVariant() [all...] |
/third_party/vixl/examples/aarch32/ |
H A D | mandelbrot.cc | 50 __ Mov(kZero, 0); in GenerateMandelBrot() 68 __ Mov(r4, 64); in GenerateMandelBrot() 78 __ Mov(kStringPtr, sp); in GenerateMandelBrot() 96 __ Mov(r5, 32); in GenerateMandelBrot() 100 __ Mov(kWriteCursor, kStringPtr); in GenerateMandelBrot()
|
/third_party/vixl/src/aarch64/ |
H A D | macro-assembler-sve-aarch64.cc | 289 Mov(a, multiplier); in Addvl() 325 Mov(xd, base); in CalculateSVEAddress() 416 Mov(scratch, imm); in Cpy() 494 Mov(scratch, imm); in Dup() 552 Mov(zd, scratch); in FPCommutativeArithmeticHelper() 581 Mov(ztmp, zd.Aliases(zn) ? zn : zm); \ 617 Mov(zd, ztmp); \ 653 Mov(scratch, zm); \ 904 masm->Mov(scratch, op); in Index() 960 Mov(scratc in Insr() [all...] |
H A D | macro-assembler-aarch64.cc | 537 // Mov instructions can't move values into the stack pointer, so set up a in Emit() 911 Mov(rd, 0); in Emit() 916 Mov(rd, rn); in Emit() 929 Mov(rd, rn); in Emit() 932 Mov(rd, immediate); in Emit() 964 Mov(rd, temp); in Emit() 993 void MacroAssembler::Mov(const Register& rd, in Emit() function in vixl::aarch64::MacroAssembler 1002 Mov(rd, operand.GetImmediate()); in Emit() 1019 Mov(rd, operand.GetRegister(), discard_mode); in Emit() 1119 Mov(tem in Emit() 1232 void MacroAssembler::Mov(const Register& rd, uint64_t imm) { Emit() function in vixl::aarch64::MacroAssembler [all...] |
/third_party/node/deps/v8/src/wasm/baseline/arm64/ |
H A D | liftoff-assembler-arm64.h | 201 assm->Mov(dst, lhs); in EmitSimdShiftRightImmediate() 262 Mov(lr, x17); in PrepareTailCall() 412 Mov(reg.gp().W(), Immediate(value.to_i32(), rmode)); in LoadConstant() 415 Mov(reg.gp().X(), Immediate(value.to_i64(), rmode)); in LoadConstant() 913 Mov(dst.W(), src.W()); in Move() 916 Mov(dst.X(), src.X()); in Move() 928 Mov(dst.Q(), src.Q()); in Move() 949 Mov(src.W(), value.to_i32()); in Spill() 957 Mov(src.X(), value.to_i64()); in Spill() 1024 Mov(count_re in FillStackSlotsWithZero() [all...] |
/third_party/node/deps/v8/src/baseline/arm64/ |
H A D | baseline-assembler-arm64-inl.h | 245 __ Mov(output, Immediate(value.ptr())); in Move() 251 __ Mov(output, Operand(reference)); in Move() 254 __ Mov(output, Operand(value)); in Move() 257 __ Mov(output, Immediate(value)); in Move() 260 __ Mov(output, source); in MoveMaybeSmi() 263 __ Mov(output, source); in MoveSmi() 463 __ Mov(tmp, Operand(value)); in StoreTaggedSignedField() 609 __ masm()->Mov(params_size, actual_params_size); in EmitReturn()
|