/arkcompiler/ets_runtime/ecmascript/compiler/trampoline/x64/ |
H A D | common_call.cpp | 85 Register argv, Register op1, Register op2, Label *stackOverflow) in PushArgsWithArgvAndCheckStack() 88 StackOverflowCheck(assembler, glue, argc, op1, op2, stackOverflow); in PushArgsWithArgvAndCheckStack() 90 Register op = op1; in PushArgsWithArgvAndCheckStack() 91 if (op1 != op2) { in PushArgsWithArgvAndCheckStack() 104 void CommonCall::StackOverflowCheck(ExtendedAssembler *assembler, Register glue, Register numArgs, Register op1, in StackOverflowCheck() argument 107 Register temp1 = op1; in StackOverflowCheck() 109 if (op1 == op2) { in StackOverflowCheck() 120 if (op1 == op2) { in StackOverflowCheck() 84 PushArgsWithArgvAndCheckStack(ExtendedAssembler *assembler, Register glue, Register argc, Register argv, Register op1, Register op2, Label *stackOverflow) PushArgsWithArgvAndCheckStack() argument
|
H A D | common_call.h | 46 Register op1, Register op2, Label *stackOverflow); 47 static void StackOverflowCheck(ExtendedAssembler *assembler, Register glue, Register numArgs, Register op1, 202 Register op1, Register op2, Label *stackOverflow);
|
H A D | asm_interpreter_call.cpp | 503 Register op1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1); in JSCallCommonSlowPath() local 506 PushUndefinedWithArgcAndCheckStack(assembler, glueRegister, declaredNumArgsRegister, op1, op2, in JSCallCommonSlowPath() 1834 Register op1, Register op2, Label *stackOverflow) in PushUndefinedWithArgcAndCheckStack() 1837 StackOverflowCheck(assembler, glue, argc, op1, op2, stackOverflow); in PushUndefinedWithArgcAndCheckStack() 1833 PushUndefinedWithArgcAndCheckStack(ExtendedAssembler *assembler, Register glue, Register argc, Register op1, Register op2, Label *stackOverflow) PushUndefinedWithArgcAndCheckStack() argument
|
/arkcompiler/runtime_core/static_core/libllvmbackend/transforms/ |
H A D | gc_utils.h | 66 auto op1 = llvm::dyn_cast<llvm::Constant>(cmp->getOperand(1)); in IsNullCmp() local 67 return (op0 != nullptr && op0->isNullValue()) || (op1 != nullptr && op1->isNullValue()); in IsNullCmp()
|
H A D | builtins.cpp | 170 auto op1 = inst->getOperand(1U); in PreWRBHelper() local 171 ASSERT(llvm::isa<llvm::ConstantInt>(op1)); in PreWRBHelper() 172 auto isConst = llvm::cast<llvm::ConstantInt>(op1); in PreWRBHelper()
|
/arkcompiler/runtime_core/static_core/libllvmbackend/transforms/passes/ |
H A D | intrinsics_lowering.cpp | 113 llvm::Value *op1 = call->getOperand(SRC_OR_CHAR); in HandleMemCall() local 120 if (op1->getType() != callee.getFunctionType()->getParamType(SRC_OR_CHAR)) { in HandleMemCall() 121 ASSERT(op1->getType()->isPointerTy()); in HandleMemCall() 122 op1 = builder.CreateAddrSpaceCast(op1, callee.getFunctionType()->getParamType(SRC_OR_CHAR)); in HandleMemCall() 131 auto newCall = llvm::CallInst::Create(callee, {op0, op1, op2}); in HandleMemCall()
|
/arkcompiler/runtime_core/compiler/optimizer/ir/ |
H A D | graph_checker.cpp | 761 [[maybe_unused]] auto op1 = inst->GetInputs()[0].GetInst(); in VisitIf() local 773 if (op1->IsConst()) { in VisitIf() 774 ASSERT_DO(IsZeroConstant(op1), (std::cerr << "Constant reference input must be integer 0: \n", in VisitIf() 775 inst->Dump(&std::cerr), op1->Dump(&std::cerr))); in VisitIf() 777 ASSERT_DO(op1->GetType() == DataType::REFERENCE, (std::cerr << "If 1st operand type is not a reference\n", in VisitIf() 778 inst->Dump(&std::cerr), op1->Dump(&std::cerr))); in VisitIf() 795 [[maybe_unused]] auto op1 = inst->GetInput(0).GetInst(); in VisitIfImm() local 803 if (op1->IsConst()) { in VisitIfImm() 804 ASSERT_DO(IsZeroConstant(op1), (std::cerr << "Constant reference input must be integer 0: \n", in VisitIfImm() 805 inst->Dump(&std::cerr), op1 in VisitIfImm() [all...] |
/arkcompiler/ets_runtime/ecmascript/compiler/trampoline/aarch64/ |
H A D | common_call.cpp | 117 Register argv, Register padding, Register op1, Register op2, Label *next) in PushArgsWithArgvInPair() 129 __ Ldr(op1, MemoryOperand(argv, -FRAME_SLOT_SIZE, PREINDEX)); in PushArgsWithArgvInPair() 130 __ Stp(op1, Register(Zero), MemoryOperand(sp, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX)); in PushArgsWithArgvInPair() 145 __ Ldp(op1, op2, MemoryOperand(argv, -DOUBLE_SLOT_SIZE, PREINDEX)); in PushArgsWithArgvInPair() 146 __ Stp(op1, op2, MemoryOperand(sp, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX)); in PushArgsWithArgvInPair() 116 PushArgsWithArgvInPair(ExtendedAssembler *assembler, Register argc, Register argv, Register padding, Register op1, Register op2, Label *next) PushArgsWithArgvInPair() argument
|
H A D | common_call.h | 69 Register op1, Register op2, Label *next);
|
H A D | optimized_call.cpp | 772 Register op1 = __ AvailableRegister2(); in FastCallToAsmInterBridge() local 779 PushArgsWithArgvInPair(assembler, tempArgc, onStackArgs, arg4, op1, op2, &lCall4); in FastCallToAsmInterBridge()
|
/arkcompiler/runtime_core/static_core/compiler/optimizer/ir/ |
H A D | graph_checker.cpp | 1504 [[maybe_unused]] auto op1 = inst->GetInputs()[0].GetInst(); in CheckUserOfInt32() local 1506 CHECKER_DO_IF_NOT_AND_PRINT_VISITOR(v, op1->WithGluedInsts(), in CheckUserOfInt32() 1508 if (op1->GetOpcode() == Opcode::LoadArrayPairI) { in CheckUserOfInt32() 1509 CHECKER_DO_IF_NOT_AND_PRINT_VISITOR(v, idx < op1->CastToLoadArrayPairI()->GetDstCount(), in CheckUserOfInt32() 1511 } else if (op1->GetOpcode() == Opcode::LoadArrayPair) { in CheckUserOfInt32() 1512 CHECKER_DO_IF_NOT_AND_PRINT_VISITOR(v, idx < op1->CastToLoadArrayPair()->GetDstCount(), in CheckUserOfInt32() 1515 ASSERT(op1->GetOpcode() == Opcode::LoadObjectPair); in CheckUserOfInt32() 1523 while (prev != nullptr && prev != op1) { in CheckUserOfInt32() 1531 v, prev != nullptr && prev == op1, in CheckUserOfInt32() 1968 [[maybe_unused]] auto op1 in CheckUserOfInt32() local 2036 [[maybe_unused]] auto op1 = inst->GetInput(0).GetInst(); CheckUserOfInt32() local 2066 [[maybe_unused]] auto op1 = inst->GetInputs()[1].GetInst(); CheckUserOfInt32() local 2363 [[maybe_unused]] auto op1 = inst->GetInput(1).GetInst(); CheckUserOfInt32() local 2394 [[maybe_unused]] auto op1 = inst->GetInput(1).GetInst(); CheckUserOfInt32() local 2442 [[maybe_unused]] auto op1 = inst->GetInput(1).GetInst(); CheckUserOfInt32() local 2486 [[maybe_unused]] auto op1 = inst->GetInput(1).GetInst(); CheckUserOfInt32() local 2536 [[maybe_unused]] auto op1 = inst->GetInputs()[0].GetInst(); CheckUserOfInt32() local 2577 [[maybe_unused]] auto op1 = inst->GetInput(0).GetInst(); CheckUserOfInt32() local [all...] |
H A D | graph_checker.h | 284 [[maybe_unused]] auto op1 = inst->GetInputs()[0].GetInst(); in CheckBinaryOperationTypes() local 300 v, DataType::IsTypeNumeric(op1->GetType()), in CheckBinaryOperationTypes() 308 CHECKER_DO_IF_NOT_VISITOR(v, CheckCommonTypes(op1, op2), in CheckBinaryOperationTypes() 310 op1->Dump(&std::cerr), op2->Dump(&std::cerr), inst->Dump(&std::cerr))); in CheckBinaryOperationTypes() 312 v, CheckCommonTypes(inst, op1), in CheckBinaryOperationTypes() 352 [[maybe_unused]] auto op1 = inst->GetInputs()[0].GetInst(); in CheckTernaryOperationTypes() local 363 v, DataType::IsTypeNumeric(op1->GetType()), in CheckTernaryOperationTypes() 374 CHECKER_DO_IF_NOT_VISITOR(v, CheckCommonTypes(op1, op2) && CheckCommonTypes(op2, op3), in CheckTernaryOperationTypes() 376 op1->Dump(&std::cerr), op2->Dump(&std::cerr), op3->Dump(&std::cerr), in CheckTernaryOperationTypes() 379 v, CheckCommonTypes(inst, op1), in CheckTernaryOperationTypes() [all...] |
/arkcompiler/runtime_core/static_core/compiler/optimizer/optimizations/ |
H A D | peepholes.cpp | 623 auto op1 = inst->GetInput(0).GetInst(); in VisitShr() local 625 if (op1->GetOpcode() == Opcode::Shl && op2->IsConst() && op1->GetInput(1) == op2) { in VisitShr() 627 ASSERT(inst->GetType() == op1->GetType()); in VisitShr() 640 // "inst"(shr) and "op1->GetInput(0).GetInst()" in VisitShr() 641 if (SkipThisPeepholeInOSR(op1->GetInput(0).GetInst(), inst)) { in VisitShr() 644 CreateAndInsertInst(Opcode::And, inst, op1->GetInput(0).GetInst(), newCnst); in VisitShr() 677 auto op1 = inst->GetInput(0).GetInst(); in VisitAShr() local 679 if (op1->GetOpcode() == Opcode::Shl && op2->IsConst() && op1 in VisitAShr() 1236 auto op1 = input->GetInput(1).GetInst(); VisitCastCase3() local [all...] |
/arkcompiler/runtime_core/static_core/compiler/tests/amd64/ |
H A D | encoder64_test_2.cpp | 388 T op1 = RandomGen<T>(); in TestDivImm() local 391 if (!test->CallCode<T>(op1, (op1 / imm))) { in TestDivImm() 519 T op1 = RandomGen<T>(); in TestModImm() local 522 if (!test->CallCode<T>(op1, (op1 % imm))) { in TestModImm()
|
/arkcompiler/ets_runtime/ecmascript/compiler/codegen/maple/maple_be/src/cg/ |
H A D | cfgo.cpp | 67 Operand &op1 = insn1->GetOperand(i); in DoSameThing() local 69 if (&op1 == &op2) { in DoSameThing() 72 if (!op1.Equals(op2)) { in DoSameThing()
|
H A D | cgfunc.cpp | 337 * bgt (cmp (op0, op1), 0) ==> in HandleCondbr() 338 * bgt (op0, op1) in HandleCondbr() 343 BaseNode *op1 = condNode->Opnd(1); in HandleCondbr() local 344 DEBUG_ASSERT(op1 != nullptr, "get second opnd of a condNode failed"); in HandleCondbr() 345 if ((op0->GetOpCode() == OP_cmp) && (op1->GetOpCode() == OP_constval)) { in HandleCondbr() 346 auto *constValNode = static_cast<ConstvalNode *>(op1); in HandleCondbr()
|
/arkcompiler/runtime_core/static_core/static_linker/ |
H A D | linker_context.cpp | 381 bool Context::IsSameProto(panda_file::ProtoItem *op1, panda_file::ProtoItem *op2) in IsSameProto() argument 383 if (op1->GetRefTypes().size() != op2->GetRefTypes().size()) { in IsSameProto() 387 if (op1->GetShorty() != op2->GetShorty()) { in IsSameProto() 391 for (size_t i = 0; i < op1->GetRefTypes().size(); i++) { in IsSameProto() 392 if (!IsSameType(op2->GetRefTypes()[i], op1->GetRefTypes()[i])) { in IsSameProto()
|
H A D | linker_context.h | 209 bool IsSameProto(panda_file::ProtoItem *op1, panda_file::ProtoItem *op2);
|
/arkcompiler/runtime_core/static_core/compiler/tests/aarch64/ |
H A D | encoder64_test_2.cpp | 531 T op1 = RandomGen<T>(); in TestDivImm() local 534 if (!test->CallCode<T>(op1, (op1 / imm))) { in TestDivImm() 589 T op1 = RandomGen<T>(); in TestModImm() local 592 if (!test->CallCode<T>(op1, (op1 % imm))) { in TestModImm()
|
/arkcompiler/runtime_core/static_core/compiler/optimizer/analysis/ |
H A D | bounds_analysis.cpp | 883 auto op1 = compare->GetInput(1).GetInst(); in VisitIfImm() local 885 (DataType::GetCommonType(op1->GetType()) != DataType::INT64 && op1->GetType() != DataType::REFERENCE)) { in VisitIfImm() 904 CalcNewBoundsRangeForCompare(v, block, cc, {op0, op1}, trueBlock); in VisitIfImm() 905 CalcNewBoundsRangeForCompare(v, block, GetInverseConditionCode(cc), {op0, op1}, falseBlock); in VisitIfImm()
|
/arkcompiler/runtime_core/static_core/compiler/optimizer/code_generator/target/amd64/ |
H A D | encode.cpp | 1450 Reg op1 {src1}; in EncodeDiv() 1454 op1 = Reg(tmpReg); in EncodeDiv() 1466 GetMasm()->idiv(ArchReg(op1)); in EncodeDiv() 1469 GetMasm()->div(ArchReg(op1)); in EncodeDiv() 1637 Reg op1 {src1}; in EncodeMod() 1641 op1 = Reg(tmpReg); in EncodeMod() 1654 GetMasm()->idiv(ArchReg(op1)); in EncodeMod() 1657 GetMasm()->div(ArchReg(op1)); in EncodeMod()
|
/arkcompiler/runtime_core/static_core/compiler/optimizer/code_generator/target/aarch32/ |
H A D | encode.cpp | 2918 Reg op1 = swap ? src0 : src1; in CompareHelper() local 2920 GetMasm()->Cmp(VixlReg(op0), VixlReg(op1)); in CompareHelper() 2921 GetMasm()->Sbcs(VixlReg(tmpReg), VixlRegU(op0), VixlRegU(op1)); in CompareHelper()
|