1// Copyright 2015 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#include "src/compiler/graph-assembler.h" 6 7#include "src/codegen/code-factory.h" 8#include "src/compiler/access-builder.h" 9#include "src/compiler/graph-reducer.h" 10#include "src/compiler/linkage.h" 11#include "src/compiler/schedule.h" 12// For TNode types. 13#include "src/objects/heap-number.h" 14#include "src/objects/oddball.h" 15#include "src/objects/smi.h" 16#include "src/objects/string.h" 17 18namespace v8 { 19namespace internal { 20namespace compiler { 21 22class V8_NODISCARD GraphAssembler::BlockInlineReduction { 23 public: 24 explicit BlockInlineReduction(GraphAssembler* gasm) : gasm_(gasm) { 25 DCHECK(!gasm_->inline_reductions_blocked_); 26 gasm_->inline_reductions_blocked_ = true; 27 } 28 ~BlockInlineReduction() { 29 DCHECK(gasm_->inline_reductions_blocked_); 30 gasm_->inline_reductions_blocked_ = false; 31 } 32 33 private: 34 GraphAssembler* gasm_; 35}; 36 37GraphAssembler::GraphAssembler( 38 MachineGraph* mcgraph, Zone* zone, 39 base::Optional<NodeChangedCallback> node_changed_callback, 40 bool mark_loop_exits) 41 : temp_zone_(zone), 42 mcgraph_(mcgraph), 43 effect_(nullptr), 44 control_(nullptr), 45 node_changed_callback_(node_changed_callback), 46 inline_reducers_(zone), 47 inline_reductions_blocked_(false), 48 loop_headers_(zone), 49 mark_loop_exits_(mark_loop_exits) {} 50 51GraphAssembler::~GraphAssembler() { DCHECK_EQ(loop_nesting_level_, 0); } 52 53Node* GraphAssembler::IntPtrConstant(intptr_t value) { 54 return AddClonedNode(mcgraph()->IntPtrConstant(value)); 55} 56 57Node* GraphAssembler::UintPtrConstant(uintptr_t value) { 58 return AddClonedNode(mcgraph()->UintPtrConstant(value)); 59} 60 61Node* GraphAssembler::Int32Constant(int32_t value) { 62 return AddClonedNode(mcgraph()->Int32Constant(value)); 63} 64 65Node* GraphAssembler::Uint32Constant(uint32_t value) { 66 return AddClonedNode(mcgraph()->Uint32Constant(value)); 67} 68 69Node* GraphAssembler::Int64Constant(int64_t value) { 70 return AddClonedNode(mcgraph()->Int64Constant(value)); 71} 72 73Node* GraphAssembler::Uint64Constant(uint64_t value) { 74 return AddClonedNode(mcgraph()->Uint64Constant(value)); 75} 76 77Node* GraphAssembler::UniqueIntPtrConstant(intptr_t value) { 78 return AddNode(graph()->NewNode( 79 machine()->Is64() 80 ? common()->Int64Constant(value) 81 : common()->Int32Constant(static_cast<int32_t>(value)))); 82} 83 84Node* JSGraphAssembler::SmiConstant(int32_t value) { 85 return AddClonedNode(jsgraph()->SmiConstant(value)); 86} 87 88Node* GraphAssembler::Float64Constant(double value) { 89 return AddClonedNode(mcgraph()->Float64Constant(value)); 90} 91 92TNode<HeapObject> JSGraphAssembler::HeapConstant(Handle<HeapObject> object) { 93 return TNode<HeapObject>::UncheckedCast( 94 AddClonedNode(jsgraph()->HeapConstant(object))); 95} 96 97TNode<Object> JSGraphAssembler::Constant(const ObjectRef& ref) { 98 return TNode<Object>::UncheckedCast(AddClonedNode(jsgraph()->Constant(ref))); 99} 100 101TNode<Number> JSGraphAssembler::NumberConstant(double value) { 102 return TNode<Number>::UncheckedCast( 103 AddClonedNode(jsgraph()->Constant(value))); 104} 105 106Node* GraphAssembler::ExternalConstant(ExternalReference ref) { 107 return AddClonedNode(mcgraph()->ExternalConstant(ref)); 108} 109 110Node* GraphAssembler::Parameter(int index) { 111 return AddNode( 112 graph()->NewNode(common()->Parameter(index), graph()->start())); 113} 114 115Node* JSGraphAssembler::CEntryStubConstant(int result_size) { 116 return AddClonedNode(jsgraph()->CEntryStubConstant(result_size)); 117} 118 119Node* GraphAssembler::LoadFramePointer() { 120 return AddNode(graph()->NewNode(machine()->LoadFramePointer())); 121} 122 123Node* GraphAssembler::LoadHeapNumberValue(Node* heap_number) { 124 return Load(MachineType::Float64(), heap_number, 125 IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag)); 126} 127 128#define SINGLETON_CONST_DEF(Name, Type) \ 129 TNode<Type> JSGraphAssembler::Name##Constant() { \ 130 return TNode<Type>::UncheckedCast( \ 131 AddClonedNode(jsgraph()->Name##Constant())); \ 132 } 133JSGRAPH_SINGLETON_CONSTANT_LIST(SINGLETON_CONST_DEF) 134#undef SINGLETON_CONST_DEF 135 136#define SINGLETON_CONST_TEST_DEF(Name, ...) \ 137 TNode<Boolean> JSGraphAssembler::Is##Name(TNode<Object> value) { \ 138 return TNode<Boolean>::UncheckedCast( \ 139 ReferenceEqual(value, Name##Constant())); \ 140 } 141JSGRAPH_SINGLETON_CONSTANT_LIST(SINGLETON_CONST_TEST_DEF) 142#undef SINGLETON_CONST_TEST_DEF 143 144#define PURE_UNOP_DEF(Name) \ 145 Node* GraphAssembler::Name(Node* input) { \ 146 return AddNode(graph()->NewNode(machine()->Name(), input)); \ 147 } 148PURE_ASSEMBLER_MACH_UNOP_LIST(PURE_UNOP_DEF) 149#undef PURE_UNOP_DEF 150 151#define PURE_BINOP_DEF(Name) \ 152 Node* GraphAssembler::Name(Node* left, Node* right) { \ 153 return AddNode(graph()->NewNode(machine()->Name(), left, right)); \ 154 } 155PURE_ASSEMBLER_MACH_BINOP_LIST(PURE_BINOP_DEF) 156#undef PURE_BINOP_DEF 157 158#define CHECKED_BINOP_DEF(Name) \ 159 Node* GraphAssembler::Name(Node* left, Node* right) { \ 160 return AddNode( \ 161 graph()->NewNode(machine()->Name(), left, right, control())); \ 162 } 163CHECKED_ASSEMBLER_MACH_BINOP_LIST(CHECKED_BINOP_DEF) 164#undef CHECKED_BINOP_DEF 165 166Node* GraphAssembler::IntPtrEqual(Node* left, Node* right) { 167 return WordEqual(left, right); 168} 169 170Node* GraphAssembler::TaggedEqual(Node* left, Node* right) { 171 if (COMPRESS_POINTERS_BOOL) { 172 return Word32Equal(left, right); 173 } else { 174 return WordEqual(left, right); 175 } 176} 177 178Node* GraphAssembler::SmiSub(Node* left, Node* right) { 179 if (COMPRESS_POINTERS_BOOL) { 180 return Int32Sub(left, right); 181 } else { 182 return IntSub(left, right); 183 } 184} 185 186Node* GraphAssembler::SmiLessThan(Node* left, Node* right) { 187 if (COMPRESS_POINTERS_BOOL) { 188 return Int32LessThan(left, right); 189 } else { 190 return IntLessThan(left, right); 191 } 192} 193 194Node* GraphAssembler::Float64RoundDown(Node* value) { 195 CHECK(machine()->Float64RoundDown().IsSupported()); 196 return AddNode(graph()->NewNode(machine()->Float64RoundDown().op(), value)); 197} 198 199Node* GraphAssembler::Float64RoundTruncate(Node* value) { 200 CHECK(machine()->Float64RoundTruncate().IsSupported()); 201 return AddNode( 202 graph()->NewNode(machine()->Float64RoundTruncate().op(), value)); 203} 204 205Node* GraphAssembler::TruncateFloat64ToInt64(Node* value, TruncateKind kind) { 206 return AddNode( 207 graph()->NewNode(machine()->TruncateFloat64ToInt64(kind), value)); 208} 209 210Node* GraphAssembler::Projection(int index, Node* value) { 211 return AddNode( 212 graph()->NewNode(common()->Projection(index), value, control())); 213} 214 215Node* JSGraphAssembler::Allocate(AllocationType allocation, Node* size) { 216 return AddNode( 217 graph()->NewNode(simplified()->AllocateRaw(Type::Any(), allocation), size, 218 effect(), control())); 219} 220 221Node* JSGraphAssembler::LoadField(FieldAccess const& access, Node* object) { 222 Node* value = AddNode(graph()->NewNode(simplified()->LoadField(access), 223 object, effect(), control())); 224 return value; 225} 226 227Node* JSGraphAssembler::LoadElement(ElementAccess const& access, Node* object, 228 Node* index) { 229 Node* value = AddNode(graph()->NewNode(simplified()->LoadElement(access), 230 object, index, effect(), control())); 231 return value; 232} 233 234Node* JSGraphAssembler::StoreField(FieldAccess const& access, Node* object, 235 Node* value) { 236 return AddNode(graph()->NewNode(simplified()->StoreField(access), object, 237 value, effect(), control())); 238} 239 240#ifdef V8_MAP_PACKING 241TNode<Map> GraphAssembler::UnpackMapWord(Node* map_word) { 242 map_word = BitcastTaggedToWordForTagAndSmiBits(map_word); 243 // TODO(wenyuzhao): Clear header metadata. 244 Node* map = WordXor(map_word, IntPtrConstant(Internals::kMapWordXorMask)); 245 return TNode<Map>::UncheckedCast(BitcastWordToTagged(map)); 246} 247 248Node* GraphAssembler::PackMapWord(TNode<Map> map) { 249 Node* map_word = BitcastTaggedToWordForTagAndSmiBits(map); 250 Node* packed = WordXor(map_word, IntPtrConstant(Internals::kMapWordXorMask)); 251 return BitcastWordToTaggedSigned(packed); 252} 253#endif 254 255TNode<Map> GraphAssembler::LoadMap(Node* object) { 256 Node* map_word = Load(MachineType::TaggedPointer(), object, 257 HeapObject::kMapOffset - kHeapObjectTag); 258#ifdef V8_MAP_PACKING 259 return UnpackMapWord(map_word); 260#else 261 return TNode<Map>::UncheckedCast(map_word); 262#endif 263} 264 265Node* JSGraphAssembler::StoreElement(ElementAccess const& access, Node* object, 266 Node* index, Node* value) { 267 return AddNode(graph()->NewNode(simplified()->StoreElement(access), object, 268 index, value, effect(), control())); 269} 270 271void JSGraphAssembler::TransitionAndStoreElement(MapRef double_map, 272 MapRef fast_map, 273 TNode<HeapObject> object, 274 TNode<Number> index, 275 TNode<Object> value) { 276 AddNode(graph()->NewNode(simplified()->TransitionAndStoreElement( 277 double_map.object(), fast_map.object()), 278 object, index, value, effect(), control())); 279} 280 281TNode<Number> JSGraphAssembler::StringLength(TNode<String> string) { 282 return AddNode<Number>( 283 graph()->NewNode(simplified()->StringLength(), string)); 284} 285 286TNode<Boolean> JSGraphAssembler::ReferenceEqual(TNode<Object> lhs, 287 TNode<Object> rhs) { 288 return AddNode<Boolean>( 289 graph()->NewNode(simplified()->ReferenceEqual(), lhs, rhs)); 290} 291 292TNode<Boolean> JSGraphAssembler::NumberEqual(TNode<Number> lhs, 293 TNode<Number> rhs) { 294 return AddNode<Boolean>( 295 graph()->NewNode(simplified()->NumberEqual(), lhs, rhs)); 296} 297 298TNode<Number> JSGraphAssembler::NumberMin(TNode<Number> lhs, 299 TNode<Number> rhs) { 300 return AddNode<Number>(graph()->NewNode(simplified()->NumberMin(), lhs, rhs)); 301} 302 303TNode<Number> JSGraphAssembler::NumberMax(TNode<Number> lhs, 304 TNode<Number> rhs) { 305 return AddNode<Number>(graph()->NewNode(simplified()->NumberMax(), lhs, rhs)); 306} 307 308TNode<Number> JSGraphAssembler::NumberAdd(TNode<Number> lhs, 309 TNode<Number> rhs) { 310 return AddNode<Number>(graph()->NewNode(simplified()->NumberAdd(), lhs, rhs)); 311} 312 313TNode<Number> JSGraphAssembler::NumberSubtract(TNode<Number> lhs, 314 TNode<Number> rhs) { 315 return AddNode<Number>( 316 graph()->NewNode(simplified()->NumberSubtract(), lhs, rhs)); 317} 318 319TNode<Boolean> JSGraphAssembler::NumberLessThan(TNode<Number> lhs, 320 TNode<Number> rhs) { 321 return AddNode<Boolean>( 322 graph()->NewNode(simplified()->NumberLessThan(), lhs, rhs)); 323} 324 325TNode<Boolean> JSGraphAssembler::NumberLessThanOrEqual(TNode<Number> lhs, 326 TNode<Number> rhs) { 327 return AddNode<Boolean>( 328 graph()->NewNode(simplified()->NumberLessThanOrEqual(), lhs, rhs)); 329} 330 331TNode<String> JSGraphAssembler::StringSubstring(TNode<String> string, 332 TNode<Number> from, 333 TNode<Number> to) { 334 return AddNode<String>(graph()->NewNode( 335 simplified()->StringSubstring(), string, from, to, effect(), control())); 336} 337 338TNode<Boolean> JSGraphAssembler::ObjectIsCallable(TNode<Object> value) { 339 return AddNode<Boolean>( 340 graph()->NewNode(simplified()->ObjectIsCallable(), value)); 341} 342 343TNode<Boolean> JSGraphAssembler::ObjectIsUndetectable(TNode<Object> value) { 344 return AddNode<Boolean>( 345 graph()->NewNode(simplified()->ObjectIsUndetectable(), value)); 346} 347 348Node* JSGraphAssembler::CheckIf(Node* cond, DeoptimizeReason reason) { 349 return AddNode(graph()->NewNode(simplified()->CheckIf(reason), cond, effect(), 350 control())); 351} 352 353TNode<Boolean> JSGraphAssembler::NumberIsFloat64Hole(TNode<Number> value) { 354 return AddNode<Boolean>( 355 graph()->NewNode(simplified()->NumberIsFloat64Hole(), value)); 356} 357 358TNode<Boolean> JSGraphAssembler::ToBoolean(TNode<Object> value) { 359 return AddNode<Boolean>(graph()->NewNode(simplified()->ToBoolean(), value)); 360} 361 362TNode<Object> JSGraphAssembler::ConvertTaggedHoleToUndefined( 363 TNode<Object> value) { 364 return AddNode<Object>( 365 graph()->NewNode(simplified()->ConvertTaggedHoleToUndefined(), value)); 366} 367 368TNode<FixedArrayBase> JSGraphAssembler::MaybeGrowFastElements( 369 ElementsKind kind, const FeedbackSource& feedback, TNode<JSArray> array, 370 TNode<FixedArrayBase> elements, TNode<Number> new_length, 371 TNode<Number> old_length) { 372 GrowFastElementsMode mode = IsDoubleElementsKind(kind) 373 ? GrowFastElementsMode::kDoubleElements 374 : GrowFastElementsMode::kSmiOrObjectElements; 375 return AddNode<FixedArrayBase>(graph()->NewNode( 376 simplified()->MaybeGrowFastElements(mode, feedback), array, elements, 377 new_length, old_length, effect(), control())); 378} 379 380Node* JSGraphAssembler::StringCharCodeAt(TNode<String> string, 381 TNode<Number> position) { 382 return AddNode(graph()->NewNode(simplified()->StringCharCodeAt(), string, 383 position, effect(), control())); 384} 385 386Node* GraphAssembler::TypeGuard(Type type, Node* value) { 387 return AddNode( 388 graph()->NewNode(common()->TypeGuard(type), value, effect(), control())); 389} 390 391Node* GraphAssembler::Checkpoint(FrameState frame_state) { 392 return AddNode(graph()->NewNode(common()->Checkpoint(), frame_state, effect(), 393 control())); 394} 395 396Node* GraphAssembler::DebugBreak() { 397 return AddNode( 398 graph()->NewNode(machine()->DebugBreak(), effect(), control())); 399} 400 401Node* GraphAssembler::Unreachable( 402 GraphAssemblerLabel<0u>* block_updater_successor) { 403 Node* result = UnreachableWithoutConnectToEnd(); 404 ConnectUnreachableToEnd(); 405 InitializeEffectControl(nullptr, nullptr); 406 return result; 407} 408 409Node* GraphAssembler::UnreachableWithoutConnectToEnd() { 410 return AddNode( 411 graph()->NewNode(common()->Unreachable(), effect(), control())); 412} 413 414TNode<RawPtrT> GraphAssembler::StackSlot(int size, int alignment) { 415 return AddNode<RawPtrT>( 416 graph()->NewNode(machine()->StackSlot(size, alignment))); 417} 418 419Node* GraphAssembler::Store(StoreRepresentation rep, Node* object, Node* offset, 420 Node* value) { 421 return AddNode(graph()->NewNode(machine()->Store(rep), object, offset, value, 422 effect(), control())); 423} 424 425Node* GraphAssembler::Store(StoreRepresentation rep, Node* object, int offset, 426 Node* value) { 427 return Store(rep, object, Int32Constant(offset), value); 428} 429 430Node* GraphAssembler::Load(MachineType type, Node* object, Node* offset) { 431 return AddNode(graph()->NewNode(machine()->Load(type), object, offset, 432 effect(), control())); 433} 434 435Node* GraphAssembler::Load(MachineType type, Node* object, int offset) { 436 return Load(type, object, Int32Constant(offset)); 437} 438 439Node* GraphAssembler::StoreUnaligned(MachineRepresentation rep, Node* object, 440 Node* offset, Node* value) { 441 Operator const* const op = 442 (rep == MachineRepresentation::kWord8 || 443 machine()->UnalignedStoreSupported(rep)) 444 ? machine()->Store(StoreRepresentation(rep, kNoWriteBarrier)) 445 : machine()->UnalignedStore(rep); 446 return AddNode( 447 graph()->NewNode(op, object, offset, value, effect(), control())); 448} 449 450Node* GraphAssembler::LoadUnaligned(MachineType type, Node* object, 451 Node* offset) { 452 Operator const* const op = 453 (type.representation() == MachineRepresentation::kWord8 || 454 machine()->UnalignedLoadSupported(type.representation())) 455 ? machine()->Load(type) 456 : machine()->UnalignedLoad(type); 457 return AddNode(graph()->NewNode(op, object, offset, effect(), control())); 458} 459 460Node* GraphAssembler::ProtectedStore(MachineRepresentation rep, Node* object, 461 Node* offset, Node* value) { 462 return AddNode(graph()->NewNode(machine()->ProtectedStore(rep), object, 463 offset, value, effect(), control())); 464} 465 466Node* GraphAssembler::ProtectedLoad(MachineType type, Node* object, 467 Node* offset) { 468 return AddNode(graph()->NewNode(machine()->ProtectedLoad(type), object, 469 offset, effect(), control())); 470} 471 472Node* GraphAssembler::Retain(Node* buffer) { 473 return AddNode(graph()->NewNode(common()->Retain(), buffer, effect())); 474} 475 476Node* GraphAssembler::UnsafePointerAdd(Node* base, Node* external) { 477 return AddNode(graph()->NewNode(machine()->UnsafePointerAdd(), base, external, 478 effect(), control())); 479} 480 481TNode<Number> JSGraphAssembler::PlainPrimitiveToNumber(TNode<Object> value) { 482 return AddNode<Number>(graph()->NewNode( 483 PlainPrimitiveToNumberOperator(), PlainPrimitiveToNumberBuiltinConstant(), 484 value, effect())); 485} 486 487Node* GraphAssembler::BitcastWordToTaggedSigned(Node* value) { 488 return AddNode( 489 graph()->NewNode(machine()->BitcastWordToTaggedSigned(), value)); 490} 491 492Node* GraphAssembler::BitcastWordToTagged(Node* value) { 493 return AddNode(graph()->NewNode(machine()->BitcastWordToTagged(), value, 494 effect(), control())); 495} 496 497Node* GraphAssembler::BitcastTaggedToWord(Node* value) { 498 return AddNode(graph()->NewNode(machine()->BitcastTaggedToWord(), value, 499 effect(), control())); 500} 501 502Node* GraphAssembler::BitcastTaggedToWordForTagAndSmiBits(Node* value) { 503 return AddNode(graph()->NewNode( 504 machine()->BitcastTaggedToWordForTagAndSmiBits(), value)); 505} 506 507Node* GraphAssembler::BitcastMaybeObjectToWord(Node* value) { 508 return AddNode(graph()->NewNode(machine()->BitcastMaybeObjectToWord(), value, 509 effect(), control())); 510} 511 512Node* GraphAssembler::DeoptimizeIf(DeoptimizeReason reason, 513 FeedbackSource const& feedback, 514 Node* condition, Node* frame_state) { 515 return AddNode(graph()->NewNode(common()->DeoptimizeIf(reason, feedback), 516 condition, frame_state, effect(), control())); 517} 518 519Node* GraphAssembler::DeoptimizeIfNot(DeoptimizeReason reason, 520 FeedbackSource const& feedback, 521 Node* condition, Node* frame_state) { 522 return AddNode(graph()->NewNode(common()->DeoptimizeUnless(reason, feedback), 523 condition, frame_state, effect(), control())); 524} 525 526TNode<Object> GraphAssembler::Call(const CallDescriptor* call_descriptor, 527 int inputs_size, Node** inputs) { 528 return Call(common()->Call(call_descriptor), inputs_size, inputs); 529} 530 531TNode<Object> GraphAssembler::Call(const Operator* op, int inputs_size, 532 Node** inputs) { 533 DCHECK_EQ(IrOpcode::kCall, op->opcode()); 534 return AddNode<Object>(graph()->NewNode(op, inputs_size, inputs)); 535} 536 537void GraphAssembler::TailCall(const CallDescriptor* call_descriptor, 538 int inputs_size, Node** inputs) { 539#ifdef DEBUG 540 static constexpr int kTargetEffectControl = 3; 541 DCHECK_EQ(inputs_size, 542 call_descriptor->ParameterCount() + kTargetEffectControl); 543#endif // DEBUG 544 545 Node* node = AddNode(graph()->NewNode(common()->TailCall(call_descriptor), 546 inputs_size, inputs)); 547 548 // Unlike ConnectUnreachableToEnd, the TailCall node terminates a block; to 549 // keep it live, it *must* be connected to End (also in Turboprop schedules). 550 NodeProperties::MergeControlToEnd(graph(), common(), node); 551 552 // Setting effect, control to nullptr effectively terminates the current block 553 // by disallowing the addition of new nodes until a new label has been bound. 554 InitializeEffectControl(nullptr, nullptr); 555} 556 557void GraphAssembler::BranchWithCriticalSafetyCheck( 558 Node* condition, GraphAssemblerLabel<0u>* if_true, 559 GraphAssemblerLabel<0u>* if_false) { 560 BranchHint hint = BranchHint::kNone; 561 if (if_true->IsDeferred() != if_false->IsDeferred()) { 562 hint = if_false->IsDeferred() ? BranchHint::kTrue : BranchHint::kFalse; 563 } 564 565 BranchImpl(condition, if_true, if_false, hint); 566} 567 568void GraphAssembler::ConnectUnreachableToEnd() { 569 DCHECK_EQ(effect()->opcode(), IrOpcode::kUnreachable); 570 Node* throw_node = graph()->NewNode(common()->Throw(), effect(), control()); 571 NodeProperties::MergeControlToEnd(graph(), common(), throw_node); 572 if (node_changed_callback_.has_value()) { 573 (*node_changed_callback_)(graph()->end()); 574 } 575 effect_ = control_ = mcgraph()->Dead(); 576} 577 578Node* GraphAssembler::AddClonedNode(Node* node) { 579 DCHECK(node->op()->HasProperty(Operator::kPure)); 580 UpdateEffectControlWith(node); 581 return node; 582} 583 584Node* GraphAssembler::AddNode(Node* node) { 585 if (!inline_reducers_.empty() && !inline_reductions_blocked_) { 586 // Reducers may add new nodes to the graph using this graph assembler, 587 // however they should never introduce nodes that need further reduction, 588 // so block reduction 589 BlockInlineReduction scope(this); 590 Reduction reduction; 591 for (auto reducer : inline_reducers_) { 592 reduction = reducer->Reduce(node, nullptr); 593 if (reduction.Changed()) break; 594 } 595 if (reduction.Changed()) { 596 Node* replacement = reduction.replacement(); 597 if (replacement != node) { 598 // Replace all uses of node and kill the node to make sure we don't 599 // leave dangling dead uses. 600 NodeProperties::ReplaceUses(node, replacement, effect(), control()); 601 node->Kill(); 602 return replacement; 603 } 604 } 605 } 606 607 if (node->opcode() == IrOpcode::kTerminate) { 608 return node; 609 } 610 611 UpdateEffectControlWith(node); 612 return node; 613} 614 615void GraphAssembler::Reset() { 616 effect_ = nullptr; 617 control_ = nullptr; 618} 619 620void GraphAssembler::InitializeEffectControl(Node* effect, Node* control) { 621 effect_ = effect; 622 control_ = control; 623} 624 625Operator const* JSGraphAssembler::PlainPrimitiveToNumberOperator() { 626 if (!to_number_operator_.is_set()) { 627 Callable callable = 628 Builtins::CallableFor(isolate(), Builtin::kPlainPrimitiveToNumber); 629 CallDescriptor::Flags flags = CallDescriptor::kNoFlags; 630 auto call_descriptor = Linkage::GetStubCallDescriptor( 631 graph()->zone(), callable.descriptor(), 632 callable.descriptor().GetStackParameterCount(), flags, 633 Operator::kEliminatable); 634 to_number_operator_.set(common()->Call(call_descriptor)); 635 } 636 return to_number_operator_.get(); 637} 638 639} // namespace compiler 640} // namespace internal 641} // namespace v8 642