1// Copyright 2017 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#include "src/compiler/escape-analysis.h" 6 7#include "src/codegen/tick-counter.h" 8#include "src/compiler/frame-states.h" 9#include "src/compiler/linkage.h" 10#include "src/compiler/node-matchers.h" 11#include "src/compiler/operator-properties.h" 12#include "src/compiler/simplified-operator.h" 13#include "src/compiler/state-values-utils.h" 14#include "src/handles/handles-inl.h" 15#include "src/init/bootstrapper.h" 16#include "src/objects/map-inl.h" 17 18#ifdef DEBUG 19#define TRACE(...) \ 20 do { \ 21 if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \ 22 } while (false) 23#else 24#define TRACE(...) 25#endif 26 27namespace v8 { 28namespace internal { 29namespace compiler { 30 31template <class T> 32class Sidetable { 33 public: 34 explicit Sidetable(Zone* zone) : map_(zone) {} 35 T& operator[](const Node* node) { 36 NodeId id = node->id(); 37 if (id >= map_.size()) { 38 map_.resize(id + 1); 39 } 40 return map_[id]; 41 } 42 43 private: 44 ZoneVector<T> map_; 45}; 46 47template <class T> 48class SparseSidetable { 49 public: 50 explicit SparseSidetable(Zone* zone, T def_value = T()) 51 : def_value_(std::move(def_value)), map_(zone) {} 52 void Set(const Node* node, T value) { 53 auto iter = map_.find(node->id()); 54 if (iter != map_.end()) { 55 iter->second = std::move(value); 56 } else if (value != def_value_) { 57 map_.insert(iter, std::make_pair(node->id(), std::move(value))); 58 } 59 } 60 const T& Get(const Node* node) const { 61 auto iter = map_.find(node->id()); 62 return iter != map_.end() ? iter->second : def_value_; 63 } 64 65 private: 66 T def_value_; 67 ZoneUnorderedMap<NodeId, T> map_; 68}; 69 70// Keeps track of the changes to the current node during reduction. 71// Encapsulates the current state of the IR graph and the reducer state like 72// side-tables. All access to the IR and the reducer state should happen through 73// a ReduceScope to ensure that changes and dependencies are tracked and all 74// necessary node revisitations happen. 75class ReduceScope { 76 public: 77 using Reduction = EffectGraphReducer::Reduction; 78 explicit ReduceScope(Node* node, Reduction* reduction) 79 : current_node_(node), reduction_(reduction) {} 80 81 void SetValueChanged() { reduction()->set_value_changed(); } 82 83 protected: 84 Node* current_node() const { return current_node_; } 85 Reduction* reduction() { return reduction_; } 86 87 private: 88 Node* current_node_; 89 Reduction* reduction_; 90}; 91 92// A VariableTracker object keeps track of the values of variables at all points 93// of the effect chain and introduces new phi nodes when necessary. 94// Initially and by default, variables are mapped to nullptr, which means that 95// the variable allocation point does not dominate the current point on the 96// effect chain. We map variables that represent uninitialized memory to the 97// Dead node to ensure it is not read. 98// Unmapped values are impossible by construction, it is indistinguishable if a 99// PersistentMap does not contain an element or maps it to the default element. 100class VariableTracker { 101 private: 102 // The state of all variables at one point in the effect chain. 103 class State { 104 public: 105 using Map = PersistentMap<Variable, Node*>; 106 107 explicit State(Zone* zone) : map_(zone) {} 108 Node* Get(Variable var) const { 109 CHECK(var != Variable::Invalid()); 110 return map_.Get(var); 111 } 112 void Set(Variable var, Node* node) { 113 CHECK(var != Variable::Invalid()); 114 return map_.Set(var, node); 115 } 116 Map::iterator begin() const { return map_.begin(); } 117 Map::iterator end() const { return map_.end(); } 118 bool operator!=(const State& other) const { return map_ != other.map_; } 119 120 private: 121 Map map_; 122 }; 123 124 public: 125 VariableTracker(JSGraph* graph, EffectGraphReducer* reducer, Zone* zone); 126 VariableTracker(const VariableTracker&) = delete; 127 VariableTracker& operator=(const VariableTracker&) = delete; 128 129 Variable NewVariable() { return Variable(next_variable_++); } 130 Node* Get(Variable var, Node* effect) { return table_.Get(effect).Get(var); } 131 Zone* zone() { return zone_; } 132 133 class V8_NODISCARD Scope : public ReduceScope { 134 public: 135 Scope(VariableTracker* tracker, Node* node, Reduction* reduction); 136 ~Scope(); 137 Maybe<Node*> Get(Variable var) { 138 Node* node = current_state_.Get(var); 139 if (node && node->opcode() == IrOpcode::kDead) { 140 // TODO(turbofan): We use {Dead} as a sentinel for uninitialized memory. 141 // Reading uninitialized memory can only happen in unreachable code. In 142 // this case, we have to mark the object as escaping to avoid dead nodes 143 // in the graph. This is a workaround that should be removed once we can 144 // handle dead nodes everywhere. 145 return Nothing<Node*>(); 146 } 147 return Just(node); 148 } 149 void Set(Variable var, Node* node) { current_state_.Set(var, node); } 150 151 private: 152 VariableTracker* states_; 153 State current_state_; 154 }; 155 156 private: 157 State MergeInputs(Node* effect_phi); 158 Zone* zone_; 159 JSGraph* graph_; 160 SparseSidetable<State> table_; 161 ZoneVector<Node*> buffer_; 162 EffectGraphReducer* reducer_; 163 int next_variable_ = 0; 164 TickCounter* const tick_counter_; 165}; 166 167// Encapsulates the current state of the escape analysis reducer to preserve 168// invariants regarding changes and re-visitation. 169class EscapeAnalysisTracker : public ZoneObject { 170 public: 171 EscapeAnalysisTracker(JSGraph* jsgraph, EffectGraphReducer* reducer, 172 Zone* zone) 173 : virtual_objects_(zone), 174 replacements_(zone), 175 variable_states_(jsgraph, reducer, zone), 176 jsgraph_(jsgraph), 177 zone_(zone) {} 178 EscapeAnalysisTracker(const EscapeAnalysisTracker&) = delete; 179 EscapeAnalysisTracker& operator=(const EscapeAnalysisTracker&) = delete; 180 181 class V8_NODISCARD Scope : public VariableTracker::Scope { 182 public: 183 Scope(EffectGraphReducer* reducer, EscapeAnalysisTracker* tracker, 184 Node* node, Reduction* reduction) 185 : VariableTracker::Scope(&tracker->variable_states_, node, reduction), 186 tracker_(tracker), 187 reducer_(reducer) {} 188 const VirtualObject* GetVirtualObject(Node* node) { 189 VirtualObject* vobject = tracker_->virtual_objects_.Get(node); 190 if (vobject) vobject->AddDependency(current_node()); 191 return vobject; 192 } 193 // Create or retrieve a virtual object for the current node. 194 const VirtualObject* InitVirtualObject(int size) { 195 DCHECK_EQ(IrOpcode::kAllocate, current_node()->opcode()); 196 VirtualObject* vobject = tracker_->virtual_objects_.Get(current_node()); 197 if (vobject) { 198 CHECK(vobject->size() == size); 199 } else { 200 vobject = tracker_->NewVirtualObject(size); 201 } 202 if (vobject) vobject->AddDependency(current_node()); 203 vobject_ = vobject; 204 return vobject; 205 } 206 207 void SetVirtualObject(Node* object) { 208 vobject_ = tracker_->virtual_objects_.Get(object); 209 } 210 211 void SetEscaped(Node* node) { 212 if (VirtualObject* object = tracker_->virtual_objects_.Get(node)) { 213 if (object->HasEscaped()) return; 214 TRACE("Setting %s#%d to escaped because of use by %s#%d\n", 215 node->op()->mnemonic(), node->id(), 216 current_node()->op()->mnemonic(), current_node()->id()); 217 object->SetEscaped(); 218 object->RevisitDependants(reducer_); 219 } 220 } 221 // The inputs of the current node have to be accessed through the scope to 222 // ensure that they respect the node replacements. 223 Node* ValueInput(int i) { 224 return tracker_->ResolveReplacement( 225 NodeProperties::GetValueInput(current_node(), i)); 226 } 227 Node* ContextInput() { 228 return tracker_->ResolveReplacement( 229 NodeProperties::GetContextInput(current_node())); 230 } 231 // Accessing the current node is fine for `FrameState nodes. 232 Node* CurrentNode() { 233 DCHECK_EQ(current_node()->opcode(), IrOpcode::kFrameState); 234 return current_node(); 235 } 236 237 void SetReplacement(Node* replacement) { 238 replacement_ = replacement; 239 vobject_ = 240 replacement ? tracker_->virtual_objects_.Get(replacement) : nullptr; 241 if (replacement) { 242 TRACE("Set %s#%d as replacement.\n", replacement->op()->mnemonic(), 243 replacement->id()); 244 } else { 245 TRACE("Set nullptr as replacement.\n"); 246 } 247 } 248 249 void MarkForDeletion() { SetReplacement(tracker_->jsgraph_->Dead()); } 250 251 ~Scope() { 252 if (replacement_ != tracker_->replacements_[current_node()] || 253 vobject_ != tracker_->virtual_objects_.Get(current_node())) { 254 reduction()->set_value_changed(); 255 } 256 tracker_->replacements_[current_node()] = replacement_; 257 tracker_->virtual_objects_.Set(current_node(), vobject_); 258 } 259 260 private: 261 EscapeAnalysisTracker* tracker_; 262 EffectGraphReducer* reducer_; 263 VirtualObject* vobject_ = nullptr; 264 Node* replacement_ = nullptr; 265 }; 266 267 Node* GetReplacementOf(Node* node) { return replacements_[node]; } 268 Node* ResolveReplacement(Node* node) { 269 if (Node* replacement = GetReplacementOf(node)) { 270 return replacement; 271 } 272 return node; 273 } 274 275 private: 276 friend class EscapeAnalysisResult; 277 static const size_t kMaxTrackedObjects = 100; 278 279 VirtualObject* NewVirtualObject(int size) { 280 if (next_object_id_ >= kMaxTrackedObjects) return nullptr; 281 return zone_->New<VirtualObject>(&variable_states_, next_object_id_++, 282 size); 283 } 284 285 SparseSidetable<VirtualObject*> virtual_objects_; 286 Sidetable<Node*> replacements_; 287 VariableTracker variable_states_; 288 VirtualObject::Id next_object_id_ = 0; 289 JSGraph* const jsgraph_; 290 Zone* const zone_; 291}; 292 293EffectGraphReducer::EffectGraphReducer( 294 Graph* graph, std::function<void(Node*, Reduction*)> reduce, 295 TickCounter* tick_counter, Zone* zone) 296 : graph_(graph), 297 state_(graph, kNumStates), 298 revisit_(zone), 299 stack_(zone), 300 reduce_(std::move(reduce)), 301 tick_counter_(tick_counter) {} 302 303void EffectGraphReducer::ReduceFrom(Node* node) { 304 // Perform DFS and eagerly trigger revisitation as soon as possible. 305 // A stack element {node, i} indicates that input i of node should be visited 306 // next. 307 DCHECK(stack_.empty()); 308 stack_.push({node, 0}); 309 while (!stack_.empty()) { 310 tick_counter_->TickAndMaybeEnterSafepoint(); 311 Node* current = stack_.top().node; 312 int& input_index = stack_.top().input_index; 313 if (input_index < current->InputCount()) { 314 Node* input = current->InputAt(input_index); 315 input_index++; 316 switch (state_.Get(input)) { 317 case State::kVisited: 318 // The input is already reduced. 319 break; 320 case State::kOnStack: 321 // The input is on the DFS stack right now, so it will be revisited 322 // later anyway. 323 break; 324 case State::kUnvisited: 325 case State::kRevisit: { 326 state_.Set(input, State::kOnStack); 327 stack_.push({input, 0}); 328 break; 329 } 330 } 331 } else { 332 stack_.pop(); 333 Reduction reduction; 334 reduce_(current, &reduction); 335 for (Edge edge : current->use_edges()) { 336 // Mark uses for revisitation. 337 Node* use = edge.from(); 338 if (NodeProperties::IsEffectEdge(edge)) { 339 if (reduction.effect_changed()) Revisit(use); 340 } else { 341 if (reduction.value_changed()) Revisit(use); 342 } 343 } 344 state_.Set(current, State::kVisited); 345 // Process the revisitation buffer immediately. This improves performance 346 // of escape analysis. Using a stack for {revisit_} reverses the order in 347 // which the revisitation happens. This also seems to improve performance. 348 while (!revisit_.empty()) { 349 Node* revisit = revisit_.top(); 350 if (state_.Get(revisit) == State::kRevisit) { 351 state_.Set(revisit, State::kOnStack); 352 stack_.push({revisit, 0}); 353 } 354 revisit_.pop(); 355 } 356 } 357 } 358} 359 360void EffectGraphReducer::Revisit(Node* node) { 361 if (state_.Get(node) == State::kVisited) { 362 TRACE(" Queueing for revisit: %s#%d\n", node->op()->mnemonic(), 363 node->id()); 364 state_.Set(node, State::kRevisit); 365 revisit_.push(node); 366 } 367} 368 369VariableTracker::VariableTracker(JSGraph* graph, EffectGraphReducer* reducer, 370 Zone* zone) 371 : zone_(zone), 372 graph_(graph), 373 table_(zone, State(zone)), 374 buffer_(zone), 375 reducer_(reducer), 376 tick_counter_(reducer->tick_counter()) {} 377 378VariableTracker::Scope::Scope(VariableTracker* states, Node* node, 379 Reduction* reduction) 380 : ReduceScope(node, reduction), 381 states_(states), 382 current_state_(states->zone_) { 383 switch (node->opcode()) { 384 case IrOpcode::kEffectPhi: 385 current_state_ = states_->MergeInputs(node); 386 break; 387 default: 388 int effect_inputs = node->op()->EffectInputCount(); 389 if (effect_inputs == 1) { 390 current_state_ = 391 states_->table_.Get(NodeProperties::GetEffectInput(node, 0)); 392 } else { 393 DCHECK_EQ(0, effect_inputs); 394 } 395 } 396} 397 398VariableTracker::Scope::~Scope() { 399 if (!reduction()->effect_changed() && 400 states_->table_.Get(current_node()) != current_state_) { 401 reduction()->set_effect_changed(); 402 } 403 states_->table_.Set(current_node(), current_state_); 404} 405 406VariableTracker::State VariableTracker::MergeInputs(Node* effect_phi) { 407 // A variable that is mapped to [nullptr] was not assigned a value on every 408 // execution path to the current effect phi. Relying on the invariant that 409 // every variable is initialized (at least with a sentinel like the Dead 410 // node), this means that the variable initialization does not dominate the 411 // current point. So for loop effect phis, we can keep nullptr for a variable 412 // as long as the first input of the loop has nullptr for this variable. For 413 // non-loop effect phis, we can even keep it nullptr as long as any input has 414 // nullptr. 415 DCHECK_EQ(IrOpcode::kEffectPhi, effect_phi->opcode()); 416 int arity = effect_phi->op()->EffectInputCount(); 417 Node* control = NodeProperties::GetControlInput(effect_phi, 0); 418 TRACE("control: %s#%d\n", control->op()->mnemonic(), control->id()); 419 bool is_loop = control->opcode() == IrOpcode::kLoop; 420 buffer_.reserve(arity + 1); 421 422 State first_input = table_.Get(NodeProperties::GetEffectInput(effect_phi, 0)); 423 State result = first_input; 424 for (std::pair<Variable, Node*> var_value : first_input) { 425 tick_counter_->TickAndMaybeEnterSafepoint(); 426 if (Node* value = var_value.second) { 427 Variable var = var_value.first; 428 TRACE("var %i:\n", var.id_); 429 buffer_.clear(); 430 buffer_.push_back(value); 431 bool identical_inputs = true; 432 int num_defined_inputs = 1; 433 TRACE(" input 0: %s#%d\n", value->op()->mnemonic(), value->id()); 434 for (int i = 1; i < arity; ++i) { 435 Node* next_value = 436 table_.Get(NodeProperties::GetEffectInput(effect_phi, i)).Get(var); 437 if (next_value != value) identical_inputs = false; 438 if (next_value != nullptr) { 439 num_defined_inputs++; 440 TRACE(" input %i: %s#%d\n", i, next_value->op()->mnemonic(), 441 next_value->id()); 442 } else { 443 TRACE(" input %i: nullptr\n", i); 444 } 445 buffer_.push_back(next_value); 446 } 447 448 Node* old_value = table_.Get(effect_phi).Get(var); 449 if (old_value) { 450 TRACE(" old: %s#%d\n", old_value->op()->mnemonic(), old_value->id()); 451 } else { 452 TRACE(" old: nullptr\n"); 453 } 454 // Reuse a previously created phi node if possible. 455 if (old_value && old_value->opcode() == IrOpcode::kPhi && 456 NodeProperties::GetControlInput(old_value, 0) == control) { 457 // Since a phi node can never dominate its control node, 458 // [old_value] cannot originate from the inputs. Thus [old_value] 459 // must have been created by a previous reduction of this [effect_phi]. 460 for (int i = 0; i < arity; ++i) { 461 Node* old_input = NodeProperties::GetValueInput(old_value, i); 462 Node* new_input = buffer_[i] ? buffer_[i] : graph_->Dead(); 463 if (old_input != new_input) { 464 NodeProperties::ReplaceValueInput(old_value, new_input, i); 465 reducer_->Revisit(old_value); 466 } 467 } 468 result.Set(var, old_value); 469 } else { 470 if (num_defined_inputs == 1 && is_loop) { 471 // For loop effect phis, the variable initialization dominates iff it 472 // dominates the first input. 473 DCHECK_EQ(2, arity); 474 DCHECK_EQ(value, buffer_[0]); 475 result.Set(var, value); 476 } else if (num_defined_inputs < arity) { 477 // If the variable is undefined on some input of this non-loop effect 478 // phi, then its initialization does not dominate this point. 479 result.Set(var, nullptr); 480 } else { 481 DCHECK_EQ(num_defined_inputs, arity); 482 // We only create a phi if the values are different. 483 if (identical_inputs) { 484 result.Set(var, value); 485 } else { 486 TRACE("Creating new phi\n"); 487 buffer_.push_back(control); 488 Node* phi = graph_->graph()->NewNode( 489 graph_->common()->Phi(MachineRepresentation::kTagged, arity), 490 arity + 1, &buffer_.front()); 491 // TODO(turbofan): Computing precise types here is tricky, because 492 // of the necessary revisitations. If we really need this, we should 493 // probably do it afterwards. 494 NodeProperties::SetType(phi, Type::Any()); 495 reducer_->AddRoot(phi); 496 result.Set(var, phi); 497 } 498 } 499 } 500#ifdef DEBUG 501 if (Node* result_node = result.Get(var)) { 502 TRACE(" result: %s#%d\n", result_node->op()->mnemonic(), 503 result_node->id()); 504 } else { 505 TRACE(" result: nullptr\n"); 506 } 507#endif 508 } 509 } 510 return result; 511} 512 513namespace { 514 515int OffsetOfFieldAccess(const Operator* op) { 516 DCHECK(op->opcode() == IrOpcode::kLoadField || 517 op->opcode() == IrOpcode::kStoreField); 518 FieldAccess access = FieldAccessOf(op); 519 return access.offset; 520} 521 522Maybe<int> OffsetOfElementAt(ElementAccess const& access, int index) { 523 MachineRepresentation representation = access.machine_type.representation(); 524 // Double elements accesses are not yet supported. See chromium:1237821. 525 if (representation == MachineRepresentation::kFloat64) return Nothing<int>(); 526 527 DCHECK_GE(index, 0); 528 DCHECK_GE(ElementSizeLog2Of(representation), kTaggedSizeLog2); 529 return Just(access.header_size + 530 (index << ElementSizeLog2Of(representation))); 531} 532 533Maybe<int> OffsetOfElementsAccess(const Operator* op, Node* index_node) { 534 DCHECK(op->opcode() == IrOpcode::kLoadElement || 535 op->opcode() == IrOpcode::kStoreElement); 536 Type index_type = NodeProperties::GetType(index_node); 537 if (!index_type.Is(Type::OrderedNumber())) return Nothing<int>(); 538 double max = index_type.Max(); 539 double min = index_type.Min(); 540 int index = static_cast<int>(min); 541 if (index < 0 || index != min || index != max) return Nothing<int>(); 542 return OffsetOfElementAt(ElementAccessOf(op), index); 543} 544 545Node* LowerCompareMapsWithoutLoad(Node* checked_map, 546 ZoneHandleSet<Map> const& checked_against, 547 JSGraph* jsgraph) { 548 Node* true_node = jsgraph->TrueConstant(); 549 Node* false_node = jsgraph->FalseConstant(); 550 Node* replacement = false_node; 551 for (Handle<Map> map : checked_against) { 552 Node* map_node = jsgraph->HeapConstant(map); 553 // We cannot create a HeapConstant type here as we are off-thread. 554 NodeProperties::SetType(map_node, Type::Internal()); 555 Node* comparison = jsgraph->graph()->NewNode( 556 jsgraph->simplified()->ReferenceEqual(), checked_map, map_node); 557 NodeProperties::SetType(comparison, Type::Boolean()); 558 if (replacement == false_node) { 559 replacement = comparison; 560 } else { 561 replacement = jsgraph->graph()->NewNode( 562 jsgraph->common()->Select(MachineRepresentation::kTaggedPointer), 563 comparison, true_node, replacement); 564 NodeProperties::SetType(replacement, Type::Boolean()); 565 } 566 } 567 return replacement; 568} 569 570void ReduceNode(const Operator* op, EscapeAnalysisTracker::Scope* current, 571 JSGraph* jsgraph) { 572 switch (op->opcode()) { 573 case IrOpcode::kAllocate: { 574 NumberMatcher size(current->ValueInput(0)); 575 if (!size.HasResolvedValue()) break; 576 int size_int = static_cast<int>(size.ResolvedValue()); 577 if (size_int != size.ResolvedValue()) break; 578 if (const VirtualObject* vobject = current->InitVirtualObject(size_int)) { 579 // Initialize with dead nodes as a sentinel for uninitialized memory. 580 for (Variable field : *vobject) { 581 current->Set(field, jsgraph->Dead()); 582 } 583 } 584 break; 585 } 586 case IrOpcode::kFinishRegion: 587 current->SetVirtualObject(current->ValueInput(0)); 588 break; 589 case IrOpcode::kStoreField: { 590 Node* object = current->ValueInput(0); 591 Node* value = current->ValueInput(1); 592 const VirtualObject* vobject = current->GetVirtualObject(object); 593 Variable var; 594 if (vobject && !vobject->HasEscaped() && 595 vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var)) { 596 current->Set(var, value); 597 current->MarkForDeletion(); 598 } else { 599 current->SetEscaped(object); 600 current->SetEscaped(value); 601 } 602 break; 603 } 604 case IrOpcode::kStoreElement: { 605 Node* object = current->ValueInput(0); 606 Node* index = current->ValueInput(1); 607 Node* value = current->ValueInput(2); 608 const VirtualObject* vobject = current->GetVirtualObject(object); 609 int offset; 610 Variable var; 611 if (vobject && !vobject->HasEscaped() && 612 OffsetOfElementsAccess(op, index).To(&offset) && 613 vobject->FieldAt(offset).To(&var)) { 614 current->Set(var, value); 615 current->MarkForDeletion(); 616 } else { 617 current->SetEscaped(value); 618 current->SetEscaped(object); 619 } 620 break; 621 } 622 case IrOpcode::kLoadField: { 623 Node* object = current->ValueInput(0); 624 const VirtualObject* vobject = current->GetVirtualObject(object); 625 Variable var; 626 Node* value; 627 if (vobject && !vobject->HasEscaped() && 628 vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var) && 629 current->Get(var).To(&value)) { 630 current->SetReplacement(value); 631 } else { 632 current->SetEscaped(object); 633 } 634 break; 635 } 636 case IrOpcode::kLoadElement: { 637 Node* object = current->ValueInput(0); 638 Node* index = current->ValueInput(1); 639 const VirtualObject* vobject = current->GetVirtualObject(object); 640 int offset; 641 Variable var; 642 Node* value; 643 if (vobject && !vobject->HasEscaped() && 644 OffsetOfElementsAccess(op, index).To(&offset) && 645 vobject->FieldAt(offset).To(&var) && current->Get(var).To(&value)) { 646 current->SetReplacement(value); 647 break; 648 } else if (vobject && !vobject->HasEscaped()) { 649 // Compute the known length (aka the number of elements) of {object} 650 // based on the virtual object information. 651 ElementAccess const& access = ElementAccessOf(op); 652 int const length = 653 (vobject->size() - access.header_size) >> 654 ElementSizeLog2Of(access.machine_type.representation()); 655 Variable var0, var1; 656 Node* value0; 657 Node* value1; 658 if (length == 1 && 659 vobject->FieldAt(OffsetOfElementAt(access, 0)).To(&var) && 660 current->Get(var).To(&value) && 661 (value == nullptr || 662 NodeProperties::GetType(value).Is(access.type))) { 663 // The {object} has no elements, and we know that the LoadElement 664 // {index} must be within bounds, thus it must always yield this 665 // one element of {object}. 666 current->SetReplacement(value); 667 break; 668 } else if (length == 2 && 669 vobject->FieldAt(OffsetOfElementAt(access, 0)).To(&var0) && 670 current->Get(var0).To(&value0) && 671 (value0 == nullptr || 672 NodeProperties::GetType(value0).Is(access.type)) && 673 vobject->FieldAt(OffsetOfElementAt(access, 1)).To(&var1) && 674 current->Get(var1).To(&value1) && 675 (value1 == nullptr || 676 NodeProperties::GetType(value1).Is(access.type))) { 677 if (value0 && value1) { 678 // The {object} has exactly two elements, so the LoadElement 679 // must return one of them (i.e. either the element at index 680 // 0 or the one at index 1). So we can turn the LoadElement 681 // into a Select operation instead (still allowing the {object} 682 // to be scalar replaced). We must however mark the elements 683 // of the {object} itself as escaping. 684 Node* check = 685 jsgraph->graph()->NewNode(jsgraph->simplified()->NumberEqual(), 686 index, jsgraph->ZeroConstant()); 687 NodeProperties::SetType(check, Type::Boolean()); 688 Node* select = jsgraph->graph()->NewNode( 689 jsgraph->common()->Select(access.machine_type.representation()), 690 check, value0, value1); 691 NodeProperties::SetType(select, access.type); 692 current->SetReplacement(select); 693 current->SetEscaped(value0); 694 current->SetEscaped(value1); 695 break; 696 } else { 697 // If the variables have no values, we have 698 // not reached the fixed-point yet. 699 break; 700 } 701 } 702 } 703 current->SetEscaped(object); 704 break; 705 } 706 case IrOpcode::kTypeGuard: { 707 current->SetVirtualObject(current->ValueInput(0)); 708 break; 709 } 710 case IrOpcode::kReferenceEqual: { 711 Node* left = current->ValueInput(0); 712 Node* right = current->ValueInput(1); 713 const VirtualObject* left_object = current->GetVirtualObject(left); 714 const VirtualObject* right_object = current->GetVirtualObject(right); 715 Node* replacement = nullptr; 716 if (left_object && !left_object->HasEscaped()) { 717 if (right_object && !right_object->HasEscaped() && 718 left_object->id() == right_object->id()) { 719 replacement = jsgraph->TrueConstant(); 720 } else { 721 replacement = jsgraph->FalseConstant(); 722 } 723 } else if (right_object && !right_object->HasEscaped()) { 724 replacement = jsgraph->FalseConstant(); 725 } 726 // TODO(turbofan) This is a workaround for uninhabited types. If we 727 // replaced a value of uninhabited type with a constant, we would 728 // widen the type of the node. This could produce inconsistent 729 // types (which might confuse representation selection). We get 730 // around this by refusing to constant-fold and escape-analyze 731 // if the type is not inhabited. 732 if (replacement && !NodeProperties::GetType(left).IsNone() && 733 !NodeProperties::GetType(right).IsNone()) { 734 current->SetReplacement(replacement); 735 break; 736 } 737 current->SetEscaped(left); 738 current->SetEscaped(right); 739 break; 740 } 741 case IrOpcode::kCheckMaps: { 742 CheckMapsParameters params = CheckMapsParametersOf(op); 743 Node* checked = current->ValueInput(0); 744 const VirtualObject* vobject = current->GetVirtualObject(checked); 745 Variable map_field; 746 Node* map; 747 if (vobject && !vobject->HasEscaped() && 748 vobject->FieldAt(HeapObject::kMapOffset).To(&map_field) && 749 current->Get(map_field).To(&map)) { 750 if (map) { 751 Type const map_type = NodeProperties::GetType(map); 752 if (map_type.IsHeapConstant() && 753 params.maps().contains( 754 map_type.AsHeapConstant()->Ref().AsMap().object())) { 755 current->MarkForDeletion(); 756 break; 757 } 758 } else { 759 // If the variable has no value, we have not reached the fixed-point 760 // yet. 761 break; 762 } 763 } 764 current->SetEscaped(checked); 765 break; 766 } 767 case IrOpcode::kCompareMaps: { 768 Node* object = current->ValueInput(0); 769 const VirtualObject* vobject = current->GetVirtualObject(object); 770 Variable map_field; 771 Node* object_map; 772 if (vobject && !vobject->HasEscaped() && 773 vobject->FieldAt(HeapObject::kMapOffset).To(&map_field) && 774 current->Get(map_field).To(&object_map)) { 775 if (object_map) { 776 current->SetReplacement(LowerCompareMapsWithoutLoad( 777 object_map, CompareMapsParametersOf(op), jsgraph)); 778 break; 779 } else { 780 // If the variable has no value, we have not reached the fixed-point 781 // yet. 782 break; 783 } 784 } 785 current->SetEscaped(object); 786 break; 787 } 788 case IrOpcode::kCheckHeapObject: { 789 Node* checked = current->ValueInput(0); 790 switch (checked->opcode()) { 791 case IrOpcode::kAllocate: 792 case IrOpcode::kFinishRegion: 793 case IrOpcode::kHeapConstant: 794 current->SetReplacement(checked); 795 break; 796 default: 797 current->SetEscaped(checked); 798 break; 799 } 800 break; 801 } 802 case IrOpcode::kMapGuard: { 803 Node* object = current->ValueInput(0); 804 const VirtualObject* vobject = current->GetVirtualObject(object); 805 if (vobject && !vobject->HasEscaped()) { 806 current->MarkForDeletion(); 807 } 808 break; 809 } 810 case IrOpcode::kStateValues: 811 // We visit StateValue nodes through their correpsonding FrameState node, 812 // so we need to make sure we revisit the FrameState. 813 current->SetValueChanged(); 814 break; 815 case IrOpcode::kFrameState: { 816 // We mark the receiver as escaping due to the non-standard `.getThis` 817 // API. 818 FrameState frame_state{current->CurrentNode()}; 819 if (frame_state.frame_state_info().type() != 820 FrameStateType::kUnoptimizedFunction) 821 break; 822 StateValuesAccess::iterator it = 823 StateValuesAccess(frame_state.parameters()).begin(); 824 if (!it.done()) { 825 if (Node* receiver = it.node()) { 826 current->SetEscaped(receiver); 827 } 828 current->SetEscaped(frame_state.function()); 829 } 830 break; 831 } 832 default: { 833 // For unknown nodes, treat all value inputs as escaping. 834 int value_input_count = op->ValueInputCount(); 835 for (int i = 0; i < value_input_count; ++i) { 836 Node* input = current->ValueInput(i); 837 current->SetEscaped(input); 838 } 839 if (OperatorProperties::HasContextInput(op)) { 840 current->SetEscaped(current->ContextInput()); 841 } 842 break; 843 } 844 } 845} 846 847} // namespace 848 849void EscapeAnalysis::Reduce(Node* node, Reduction* reduction) { 850 const Operator* op = node->op(); 851 TRACE("Reducing %s#%d\n", op->mnemonic(), node->id()); 852 853 EscapeAnalysisTracker::Scope current(this, tracker_, node, reduction); 854 ReduceNode(op, ¤t, jsgraph()); 855} 856 857EscapeAnalysis::EscapeAnalysis(JSGraph* jsgraph, TickCounter* tick_counter, 858 Zone* zone) 859 : EffectGraphReducer( 860 jsgraph->graph(), 861 [this](Node* node, Reduction* reduction) { Reduce(node, reduction); }, 862 tick_counter, zone), 863 tracker_(zone->New<EscapeAnalysisTracker>(jsgraph, this, zone)), 864 jsgraph_(jsgraph) {} 865 866Node* EscapeAnalysisResult::GetReplacementOf(Node* node) { 867 Node* replacement = tracker_->GetReplacementOf(node); 868 // Replacements cannot have replacements. This is important to ensure 869 // re-visitation: If a replacement is replaced, then all nodes accessing 870 // the replacement have to be updated. 871 if (replacement) DCHECK_NULL(tracker_->GetReplacementOf(replacement)); 872 return replacement; 873} 874 875Node* EscapeAnalysisResult::GetVirtualObjectField(const VirtualObject* vobject, 876 int field, Node* effect) { 877 return tracker_->variable_states_.Get(vobject->FieldAt(field).FromJust(), 878 effect); 879} 880 881const VirtualObject* EscapeAnalysisResult::GetVirtualObject(Node* node) { 882 return tracker_->virtual_objects_.Get(node); 883} 884 885VirtualObject::VirtualObject(VariableTracker* var_states, VirtualObject::Id id, 886 int size) 887 : Dependable(var_states->zone()), id_(id), fields_(var_states->zone()) { 888 DCHECK(IsAligned(size, kTaggedSize)); 889 TRACE("Creating VirtualObject id:%d size:%d\n", id, size); 890 int num_fields = size / kTaggedSize; 891 fields_.reserve(num_fields); 892 for (int i = 0; i < num_fields; ++i) { 893 fields_.push_back(var_states->NewVariable()); 894 } 895} 896 897#undef TRACE 898 899} // namespace compiler 900} // namespace internal 901} // namespace v8 902