/third_party/node/deps/v8/src/baseline/mips/ |
H A D | baseline-assembler-mips-inl.h | 171 __ Lw(type, FieldMemOperand(map, Map::kInstanceTypeOffset)); in JumpIfInstanceType() 179 __ Lw(scratch, operand); in JumpIfPointer() 201 __ Lw(scratch, operand); in JumpIfTagged() 209 __ Lw(scratch, operand); in JumpIfTagged() 354 __ Lw(output, FieldMemOperand(source, offset)); in LoadTaggedPointerField() 358 __ Lw(output, FieldMemOperand(source, offset)); in LoadTaggedSignedField() 362 __ Lw(output, FieldMemOperand(source, offset)); in LoadTaggedAnyField() 406 __ Lw(interrupt_budget, in AddToInterruptBudgetAndJumpIfNotExceeded() 426 __ Lw(interrupt_budget, in AddToInterruptBudgetAndJumpIfNotExceeded()
|
/third_party/node/deps/v8/src/builtins/mips/ |
H A D | builtins-mips.cc | 734 __ Lw(kScratchReg, FieldMemOperand(kScratchReg, FixedArray::kHeaderSize)); in Generate_ResumeGeneratorTrampoline() 739 __ Lw(kScratchReg, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset)); in Generate_ResumeGeneratorTrampoline() 828 __ Lw(actual_params_size, in LeaveInterpreterFrame() 878 __ Lw(scratch1, in TailCallOptimizedCodeSlot() 880 __ Lw(scratch1, in TailCallOptimizedCodeSlot() 1011 __ Lw(optimization_state, in LoadTieringStateAndJumpIfNeedsProcessing() 1039 __ Lw(tiering_state, in MaybeOptimizeCodeOrTailCallOptimizedCodeSlot() 1069 __ Lw(feedback_vector, in Generate_BaselineOutOfLinePrologue() 1071 __ Lw(feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset)); in Generate_BaselineOutOfLinePrologue() 1095 __ Lw(invocation_coun in Generate_BaselineOutOfLinePrologue() [all...] |
/third_party/node/deps/openssl/openssl/crypto/bn/ |
H A D | bn_local.h | 477 # define Lw(t) (((BN_ULONG)(t))&BN_MASK2) macro 483 (r)= Lw(t); \ 490 (r)= Lw(t); \ 497 (r0)=Lw(t); \
|
H A D | bn_asm.c | 457 c0 = (BN_ULONG)Lw(t); \ 466 c0 = (BN_ULONG)Lw(tt); \ 470 c0 = (BN_ULONG)Lw(t); \ 479 c0 = (BN_ULONG)Lw(t); \
|
/third_party/openssl/crypto/bn/ |
H A D | bn_local.h | 477 # define Lw(t) (((BN_ULONG)(t))&BN_MASK2) macro 483 (r)= Lw(t); \ 490 (r)= Lw(t); \ 497 (r0)=Lw(t); \
|
H A D | bn_asm.c | 457 c0 = (BN_ULONG)Lw(t); \ 466 c0 = (BN_ULONG)Lw(tt); \ 470 c0 = (BN_ULONG)Lw(t); \ 479 c0 = (BN_ULONG)Lw(t); \
|
/third_party/node/deps/v8/src/regexp/mips/ |
H A D | regexp-macro-assembler-mips.cc | 156 __ Lw(a0, MemOperand(frame_pointer(), kBacktrackCount)); in Backtrack() 627 __ Lw(dst, MemOperand(dst)); in LoadRegExpStackPointerFromMemory() 643 __ Lw(scratch, MemOperand(scratch)); in PushRegExpBasePointer() 652 __ Lw(stack_pointer_out, in PopRegExpBasePointer() 655 __ Lw(scratch, MemOperand(scratch)); in PopRegExpBasePointer() 1077 __ Lw(a0, MemOperand(a0)); in WriteStackPointerToRegister() 1086 __ Lw(a0, MemOperand(a0)); in ReadStackPointerFromRegister()
|
/third_party/node/deps/v8/src/builtins/mips64/ |
H A D | builtins-mips64.cc | 833 __ Lw(params_size, in LeaveInterpreterFrame() 889 __ Lw(scratch1, in TailCallOptimizedCodeSlot() 1020 __ Lw(optimization_state, in LoadTieringStateAndJumpIfNeedsProcessing() 1103 __ Lw(invocation_count, in Generate_BaselineOutOfLinePrologue() 1255 __ Lw(optimization_state, in Generate_InterpreterEntryTrampoline() 1269 __ Lw(a4, FieldMemOperand(feedback_vector, in Generate_InterpreterEntryTrampoline() 1296 __ Lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline() 1321 __ Lw(a5, FieldMemOperand( in Generate_InterpreterEntryTrampoline() 2975 __ Lw(input_low, MemOperand(sp, kArgumentOffset + Register::kMantissaOffset)); in Generate_DoubleToI() 2976 __ Lw(input_hig in Generate_DoubleToI() [all...] |
/third_party/node/deps/v8/src/builtins/riscv64/ |
H A D | builtins-riscv64.cc | 876 __ Lw(params_size, in LeaveInterpreterFrame() 937 __ Lw(a5, FieldMemOperand(a5, CodeDataContainer::kKindSpecificFlagsOffset)); in TailCallOptimizedCodeSlot() 1077 __ Lw(optimization_state, in LoadTieringStateAndJumpIfNeedsProcessing() 1160 __ Lw(invocation_count, in Generate_BaselineOutOfLinePrologue() 1319 __ Lw(optimization_state, in Generate_InterpreterEntryTrampoline() 1333 __ Lw(a4, FieldMemOperand(feedback_vector, in Generate_InterpreterEntryTrampoline() 1360 __ Lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline() 1385 __ Lw(a5, FieldMemOperand( in Generate_InterpreterEntryTrampoline() 3002 __ Lw(input_low, MemOperand(sp, kArgumentOffset + Register::kMantissaOffset)); in Generate_DoubleToI() 3003 __ Lw(input_hig in Generate_DoubleToI() [all...] |
/third_party/node/deps/v8/src/compiler/backend/mips/ |
H A D | code-generator-mips.cc | 1690 __ Lw(kScratchReg, i.MemoryOperand()); in AssembleArchInstruction() 1699 __ Lw(kScratchReg, memLow); in AssembleArchInstruction() 1701 __ Lw(kScratchReg, memHigh); in AssembleArchInstruction() 1711 __ Lw(kScratchReg, memLow); in AssembleArchInstruction() 1713 __ Lw(kScratchReg, memHigh); in AssembleArchInstruction() 1725 __ Lw(kScratchReg, memLow); in AssembleArchInstruction() 1727 __ Lw(kScratchReg, memHigh); in AssembleArchInstruction() 1738 __ Lw(kScratchReg, memLow); in AssembleArchInstruction() 1740 __ Lw(kScratchReg, memHigh); in AssembleArchInstruction() 1752 __ Lw(kScratchRe in AssembleArchInstruction() [all...] |
/third_party/node/deps/v8/src/baseline/riscv64/ |
H A D | baseline-assembler-riscv64-inl.h | 394 __ Lw(interrupt_budget, in AddToInterruptBudgetAndJumpIfNotExceeded() 416 __ Lw(interrupt_budget, in AddToInterruptBudgetAndJumpIfNotExceeded()
|
/third_party/node/deps/v8/src/baseline/mips64/ |
H A D | baseline-assembler-mips64-inl.h | 404 __ Lw(interrupt_budget, in AddToInterruptBudgetAndJumpIfNotExceeded() 424 __ Lw(interrupt_budget, in AddToInterruptBudgetAndJumpIfNotExceeded()
|
/third_party/node/deps/v8/src/codegen/mips/ |
H A D | macro-assembler-mips.cc | 1272 void TurboAssembler::Lw(Register rd, const MemOperand& rs) { in CallRecordWriteStub() function in v8::internal::TurboAssembler 3927 Lw(destination, EntryFromBuiltinAsOperand(builtin)); in CallRecordWriteStub() 4175 Lw(scratch2, MemOperand(scratch2)); in CallRecordWriteStub() 4185 Lw(scratch2, MemOperand(scratch2)); in CallRecordWriteStub() 4315 Lw(destination, MemOperand(kRootRegister, static_cast<int32_t>(offset))); in CallRecordWriteStub() 4376 Lw(t1, MemOperand(src, 0)); in CallRecordWriteStub() 4791 Lw(destination, FieldMemOperand(object, HeapObject::kMapOffset)); in CallRecordWriteStub() 4796 Lw(dst, in CallRecordWriteStub() 4798 Lw(dst, MemOperand(dst, Context::SlotOffset(index))); in CallRecordWriteStub() 5570 Lw(t in CallRecordWriteStub() [all...] |
H A D | macro-assembler-mips.h | 197 inline void Move(Register output, MemOperand operand) { Lw(output, operand); } in Move() 276 void Lw(Register rd, const MemOperand& rs); 923 Lw(dest, MemOperand(sp, 0)); in LoadReceiver()
|
/third_party/node/deps/v8/src/regexp/riscv64/ |
H A D | regexp-macro-assembler-riscv64.cc | 214 __ Lw(a0, MemOperand(backtrack_stackpointer(), 0)); in CheckGreedyLoop() 1101 __ Lw(backtrack_stackpointer(), register_location(reg)); in ReadStackPointerFromRegister() 1302 __ Lw(target, MemOperand(backtrack_stackpointer()));
|
/third_party/node/deps/v8/src/wasm/baseline/mips/ |
H A D | liftoff-assembler-mips.h | 327 Lw(scratch, MemOperand(fp, 4)); in PrepareTailCall() 329 Lw(scratch, MemOperand(fp, 0)); in PrepareTailCall() 335 Lw(scratch, MemOperand(sp, i * 4)); in PrepareTailCall() 392 Lw(stack_limit, in PatchPrepareStackFrame() 395 Lw(stack_limit, MemOperand(stack_limit)); in PatchPrepareStackFrame()
|
/third_party/node/deps/v8/src/codegen/mips64/ |
H A D | macro-assembler-mips64.cc | 1129 Lw(rd, rs); in CallRecordWriteStub() 1303 Lw(scratch, MemOperand(rs.rm(), rs.offset() + kPointerSize / 2)); in CallRecordWriteStub() 1414 void TurboAssembler::Lw(Register rd, const MemOperand& rs) { in CallRecordWriteStub() function in v8::internal::TurboAssembler 2726 Lw(scratch, src); in CallRecordWriteStub() 5231 Lw(scratch1, MemOperand(scratch2)); in CallRecordWriteStub() 5247 Lw(scratch1, MemOperand(scratch2)); in CallRecordWriteStub() 5539 Lw(dst, MemOperand(src.rm(), SmiWordOffset(src.offset()))); in CallRecordWriteStub() 5542 Lw(dst, src); in CallRecordWriteStub() 6143 Lw(scratch, FieldMemOperand(code_object, Code::kFlagsOffset)); in CallRecordWriteStub() 6155 Lw(scratc in CallRecordWriteStub() [all...] |
H A D | macro-assembler-mips64.h | 661 void Lw(Register rd, const MemOperand& rs);
|
/third_party/node/deps/v8/src/compiler/backend/mips64/ |
H A D | code-generator-mips64.cc | 562 __ Lw(kScratchReg, in BailoutIfDeoptimized() 1636 __ Lw(i.OutputRegister(), i.MemoryOperand()); in AssembleArchInstruction() 1887 ASSEMBLE_ATOMIC_LOAD_INTEGER(Lw); in AssembleArchInstruction() 4589 __ Lw(temp_0, src0); // Then use temp_0 to copy source to destination. in AssembleConstructFrame() 4591 __ Lw(temp_0, src1); in AssembleConstructFrame() 4593 __ Lw(temp_0, src2); in AssembleConstructFrame() 4595 __ Lw(temp_0, src3); in AssembleConstructFrame() 4601 __ Lw(temp_0, src0); // Then use temp_0 to copy source to destination. in AssembleConstructFrame() 4603 __ Lw(temp_0, src1); in AssembleConstructFrame()
|
/third_party/node/deps/v8/src/codegen/riscv64/ |
H A D | macro-assembler-riscv64.cc | 1302 Lw(scratch, MemOperand(rs.rm(), rs.offset() + kSystemPointerSize / 2)); in LoadWordPair() 1386 void TurboAssembler::Lw(Register rd, const MemOperand& rs) { in Lw() function in v8::internal::TurboAssembler 4210 Lw(scratch1, MemOperand(scratch2)); in EmitIncrementCounter() 4226 Lw(scratch1, MemOperand(scratch2)); in EmitDecrementCounter() 4537 Lw(dst, MemOperand(src.rm(), SmiWordOffset(src.offset()))); in SmiUntag() 4541 Lw(dst, src); in SmiUntag() 5019 Lw(scratch, FieldMemOperand(code_object, Code::kFlagsOffset)); in LoadCodeObjectEntry() 5030 Lw(scratch, FieldMemOperand(code_object, Code::kBuiltinIndexOffset)); in LoadCodeObjectEntry()
|
/third_party/node/deps/v8/src/wasm/baseline/mips64/ |
H A D | liftoff-assembler-mips64.h | 103 assm->Lw(dst.gp(), src); in Load() 460 Lw(dst, MemOperand(instance, offset)); in LoadFromInstance() 654 Lw(dst.gp(), src_op); in AtomicLoad() 1030 Lw(reg.gp(), src); in Fill() 2074 Lw(scratch, src_op); in LoadTransform()
|
/third_party/node/deps/v8/src/regexp/mips64/ |
H A D | regexp-macro-assembler-mips64.cc | 253 __ Lw(a0, MemOperand(backtrack_stackpointer(), 0)); in CheckGreedyLoop() 1345 __ Lw(target, MemOperand(backtrack_stackpointer()));
|
/third_party/skia/third_party/externals/swiftshader/third_party/subzero/src/ |
H A D | IceInstMIPS32.h | 230 Lw, enumerator 1236 using InstMIPS32Lw = InstMIPS32Load<InstMIPS32::Lw>;
|
/third_party/node/deps/v8/src/wasm/baseline/riscv64/ |
H A D | liftoff-assembler-riscv64.h | 102 assm->Lw(dst.gp(), src); in Load() 453 Lw(dst, MemOperand(src)); in LoadFromInstance() 549 TurboAssembler::Lw(dst.gp(), src_op); in Load() 1050 Lw(reg.gp(), src); in Fill() 1717 Lw(scratch, MemOperand(dst.gp(), offset)); in IncrementSmi() 1796 Lw(scratch, src_op); in LoadTransform()
|
/third_party/node/deps/v8/src/compiler/backend/riscv64/ |
H A D | code-generator-riscv64.cc | 618 __ Lw(kScratchReg, in BailoutIfDeoptimized() 1605 __ Lw(i.OutputRegister(), i.MemoryOperand()); in AssembleArchInstruction() 1765 ASSEMBLE_ATOMIC_LOAD_INTEGER(Lw); in AssembleArchInstruction() 2024 __ Lw(kScratchReg, i.MemoryOperand()); in AssembleArchInstruction()
|