/arkcompiler/ets_runtime/ecmascript/compiler/assembler/aarch64/ |
H A D | extend_assembler.cpp | 31 Stp(Register(X27), Register(X28), MemoryOperand(sp, -PAIR_SLOT_SIZE, PREINDEX)); in CalleeSave() 32 Stp(Register(X25), Register(X26), MemoryOperand(sp, -PAIR_SLOT_SIZE, PREINDEX)); in CalleeSave() 33 Stp(Register(X23), Register(X24), MemoryOperand(sp, -PAIR_SLOT_SIZE, PREINDEX)); in CalleeSave() 34 Stp(Register(X21), Register(X22), MemoryOperand(sp, -PAIR_SLOT_SIZE, PREINDEX)); in CalleeSave() 35 Stp(Register(X19), Register(X20), MemoryOperand(sp, -PAIR_SLOT_SIZE, PREINDEX)); in CalleeSave() 37 Stp(VectorRegister(v14), VectorRegister(v15), MemoryOperand(sp, -PAIR_SLOT_SIZE, PREINDEX)); in CalleeSave() 38 Stp(VectorRegister(v12), VectorRegister(v13), MemoryOperand(sp, -PAIR_SLOT_SIZE, PREINDEX)); in CalleeSave() 39 Stp(VectorRegister(v10), VectorRegister(v11), MemoryOperand(sp, -PAIR_SLOT_SIZE, PREINDEX)); in CalleeSave() 40 Stp(VectorRegister(v8), VectorRegister(v9), MemoryOperand(sp, -PAIR_SLOT_SIZE, PREINDEX)); in CalleeSave() 75 Stp(Registe in PushFpAndLr() [all...] |
H A D | assembler_aarch64.h | 301 void Stp(const Register &rt, const Register &rt2, const MemoryOperand &operand); 303 void Stp(const VectorRegister &vt, const VectorRegister &vt2, const MemoryOperand &operand);
|
H A D | assembler_aarch64.cpp | 124 void AssemblerAarch64::Stp(const Register &rt, const Register &rt2, const MemoryOperand &operand) in Stp() function in panda::ecmascript::aarch64::AssemblerAarch64 204 void AssemblerAarch64::Stp(const VectorRegister &vt, const VectorRegister &vt2, const MemoryOperand &operand) in Stp() function in panda::ecmascript::aarch64::AssemblerAarch64
|
/arkcompiler/ets_runtime/ecmascript/compiler/trampoline/aarch64/ |
H A D | optimized_call.cpp | 74 __ Stp(tmp, frameType, MemoryOperand(sp, -FRAME_SLOT_SIZE * 2, AddrMode::PREINDEX)); in CallRuntime() 329 __ Stp(glue, temp, MemoryOperand(sp, 0)); // argc, glue in CallBuiltinTrampoline() 331 __ Stp(Register(X29), Register(X30), MemoryOperand(sp, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX)); in CallBuiltinTrampoline() 336 __ Stp(Register(Zero), temp, MemoryOperand(sp, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX)); // frameType, argv in CallBuiltinTrampoline() 343 __ Stp(temp, Register(Zero), MemoryOperand(sp, 0)); // argv, argc in CallBuiltinTrampoline() 382 __ Stp(glue, temp, MemoryOperand(sp, 0)); // argc, glue in CallBuiltinConstructorStub() 384 __ Stp(Register(X29), Register(X30), MemoryOperand(sp, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX)); in CallBuiltinConstructorStub() 388 __ Stp(Register(Zero), temp, MemoryOperand(sp, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX)); // frameType, argv in CallBuiltinConstructorStub() 395 __ Stp(temp, Register(Zero), MemoryOperand(sp, 0)); // argv, argc in CallBuiltinConstructorStub() 575 __ Stp(arg in JSCallInternal() [all...] |
H A D | common_call.cpp | 44 __ Stp(frameTypeRegister, Register(X30), MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); in PushAsmInterpBridgeFrame() 46 __ Stp(Register(Zero), Register(FP), MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); in PushAsmInterpBridgeFrame() 79 __ Stp(Register(X19), frameType, MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); in PushLeaveFrame() 130 __ Stp(op1, Register(Zero), MemoryOperand(sp, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX)); in PushArgsWithArgvInPair() 146 __ Stp(op1, op2, MemoryOperand(sp, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX)); in PushArgsWithArgvInPair() 153 __ Stp(padding, op2, MemoryOperand(sp, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX)); in PushArgsWithArgvInPair() 200 __ Stp(Register(X19), frameType, MemoryOperand(sp, -FRAME_SLOT_SIZE * 2, AddrMode::PREINDEX)); in PushAsmBridgeFrame()
|
H A D | asm_interpreter_call.cpp | 520 __ Stp(newTarget, thisObj, MemoryOperand(currentSlotRegister, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX)); in CallNativeWithArgv() 523 __ Stp(callTarget, thisObj, MemoryOperand(currentSlotRegister, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX)); in CallNativeWithArgv() 528 __ Stp(temp, thisObj, MemoryOperand(currentSlotRegister, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX)); in CallNativeWithArgv() 539 __ Stp(glue, temp, MemoryOperand(currentSlotRegister, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); in CallNativeWithArgv() 556 __ Stp(Register(Zero), frameType, MemoryOperand(Register(SP), -FRAME_SLOT_SIZE * 2, AddrMode::PREINDEX)); in CallNativeWithArgv() 559 __ Stp(temp, temp, MemoryOperand(spRegister, -FRAME_SLOT_SIZE * 2, AddrMode::PREINDEX)); in CallNativeWithArgv() 561 __ Stp(Register(Zero), temp, MemoryOperand(spRegister, -FRAME_SLOT_SIZE * 2, AddrMode::PREINDEX)); in CallNativeWithArgv() 659 __ Stp(next, op, MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); in PushBuiltinFrame() 665 __ Stp(next, op, MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); in PushBuiltinFrame() 673 __ Stp(nex in PushBuiltinFrame() [all...] |
/arkcompiler/runtime_core/static_core/compiler/optimizer/code_generator/target/aarch64/ |
H A D | callconv.cpp | 229 GetMasm()->Stp(VixlReg(InitFlagsReg(frameInfo.GetHasFloatRegs())), // Reset OSR flag and set HasFloatRegsFlag in GeneratePrologue()
|
H A D | encode.cpp | 2513 GetMasm()->Stp(tmp, zero, ConvertMem(memTo)); in EncodeMemCopyz() 2516 GetMasm()->Stp(tmp, zero, ConvertMem(memTo)); in EncodeMemCopyz() 2521 GetMasm()->Stp(tmp, zero, ConvertMem(memTo)); in EncodeMemCopyz() 2693 GetMasm()->Stp(VixlVReg(src0), VixlVReg(src1), ConvertMem(mem)); in EncodeStp() 2696 GetMasm()->Stp(VixlReg(src0), VixlReg(src1), ConvertMem(mem)); in EncodeStp() 3088 masm->Stp(lastReg, reg, MemOperand(baseReg, (idx - OFFSET) * DOUBLE_WORD_SIZE_BYTES)); in LoadStorePair() 3204 GetMasm()->Stp(currReg, nextReg, in LoadStoreRegistersLoop()
|
/arkcompiler/ets_runtime/ecmascript/compiler/assembler/tests/ |
H A D | assembler_aarch64_test.cpp | 167 __ Stp(Register(X1), Register(X2), MemoryOperand(Register(SP), 8, POSTINDEX)); in HWTEST_F_L0()
|