1// Copyright 2016 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#include "src/compiler/redundancy-elimination.h" 6 7#include "src/compiler/node-properties.h" 8#include "src/compiler/simplified-operator.h" 9 10namespace v8 { 11namespace internal { 12namespace compiler { 13 14RedundancyElimination::RedundancyElimination(Editor* editor, Zone* zone) 15 : AdvancedReducer(editor), node_checks_(zone), zone_(zone) {} 16 17RedundancyElimination::~RedundancyElimination() = default; 18 19Reduction RedundancyElimination::Reduce(Node* node) { 20 if (node_checks_.Get(node)) return NoChange(); 21 switch (node->opcode()) { 22 case IrOpcode::kCheckBigInt: 23 case IrOpcode::kCheckBounds: 24 case IrOpcode::kCheckClosure: 25 case IrOpcode::kCheckEqualsInternalizedString: 26 case IrOpcode::kCheckEqualsSymbol: 27 case IrOpcode::kCheckFloat64Hole: 28 case IrOpcode::kCheckHeapObject: 29 case IrOpcode::kCheckIf: 30 case IrOpcode::kCheckInternalizedString: 31 case IrOpcode::kCheckNotTaggedHole: 32 case IrOpcode::kCheckNumber: 33 case IrOpcode::kCheckReceiver: 34 case IrOpcode::kCheckReceiverOrNullOrUndefined: 35 case IrOpcode::kCheckSmi: 36 case IrOpcode::kCheckString: 37 case IrOpcode::kCheckSymbol: 38#define SIMPLIFIED_CHECKED_OP(Opcode) case IrOpcode::k##Opcode: 39 SIMPLIFIED_CHECKED_OP_LIST(SIMPLIFIED_CHECKED_OP) 40#undef SIMPLIFIED_CHECKED_OP 41 return ReduceCheckNode(node); 42 case IrOpcode::kSpeculativeNumberEqual: 43 case IrOpcode::kSpeculativeNumberLessThan: 44 case IrOpcode::kSpeculativeNumberLessThanOrEqual: 45 return ReduceSpeculativeNumberComparison(node); 46 case IrOpcode::kSpeculativeNumberAdd: 47 case IrOpcode::kSpeculativeNumberSubtract: 48 case IrOpcode::kSpeculativeSafeIntegerAdd: 49 case IrOpcode::kSpeculativeSafeIntegerSubtract: 50 case IrOpcode::kSpeculativeToNumber: 51 return ReduceSpeculativeNumberOperation(node); 52 case IrOpcode::kEffectPhi: 53 return ReduceEffectPhi(node); 54 case IrOpcode::kDead: 55 break; 56 case IrOpcode::kStart: 57 return ReduceStart(node); 58 default: 59 return ReduceOtherNode(node); 60 } 61 return NoChange(); 62} 63 64// static 65RedundancyElimination::EffectPathChecks* 66RedundancyElimination::EffectPathChecks::Copy(Zone* zone, 67 EffectPathChecks const* checks) { 68 return zone->New<EffectPathChecks>(*checks); 69} 70 71// static 72RedundancyElimination::EffectPathChecks const* 73RedundancyElimination::EffectPathChecks::Empty(Zone* zone) { 74 return zone->New<EffectPathChecks>(nullptr, 0); 75} 76 77bool RedundancyElimination::EffectPathChecks::Equals( 78 EffectPathChecks const* that) const { 79 if (this->size_ != that->size_) return false; 80 Check* this_head = this->head_; 81 Check* that_head = that->head_; 82 while (this_head != that_head) { 83 if (this_head->node != that_head->node) return false; 84 this_head = this_head->next; 85 that_head = that_head->next; 86 } 87 return true; 88} 89 90void RedundancyElimination::EffectPathChecks::Merge( 91 EffectPathChecks const* that) { 92 // Change the current check list to a longest common tail of this check 93 // list and the other list. 94 95 // First, we throw away the prefix of the longer list, so that 96 // we have lists of the same length. 97 Check* that_head = that->head_; 98 size_t that_size = that->size_; 99 while (that_size > size_) { 100 that_head = that_head->next; 101 that_size--; 102 } 103 while (size_ > that_size) { 104 head_ = head_->next; 105 size_--; 106 } 107 108 // Then we go through both lists in lock-step until we find 109 // the common tail. 110 while (head_ != that_head) { 111 DCHECK_LT(0u, size_); 112 DCHECK_NOT_NULL(head_); 113 size_--; 114 head_ = head_->next; 115 that_head = that_head->next; 116 } 117} 118 119RedundancyElimination::EffectPathChecks const* 120RedundancyElimination::EffectPathChecks::AddCheck(Zone* zone, 121 Node* node) const { 122 Check* head = zone->New<Check>(node, head_); 123 return zone->New<EffectPathChecks>(head, size_ + 1); 124} 125 126namespace { 127 128// Does check {a} subsume check {b}? 129bool CheckSubsumes(Node const* a, Node const* b) { 130 if (a->op() != b->op()) { 131 if (a->opcode() == IrOpcode::kCheckInternalizedString && 132 b->opcode() == IrOpcode::kCheckString) { 133 // CheckInternalizedString(node) implies CheckString(node) 134 } else if (a->opcode() == IrOpcode::kCheckSmi && 135 b->opcode() == IrOpcode::kCheckNumber) { 136 // CheckSmi(node) implies CheckNumber(node) 137 } else if (a->opcode() == IrOpcode::kCheckedTaggedSignedToInt32 && 138 b->opcode() == IrOpcode::kCheckedTaggedToInt32) { 139 // CheckedTaggedSignedToInt32(node) implies CheckedTaggedToInt32(node) 140 } else if (a->opcode() == IrOpcode::kCheckedTaggedSignedToInt32 && 141 b->opcode() == IrOpcode::kCheckedTaggedToArrayIndex) { 142 // CheckedTaggedSignedToInt32(node) implies 143 // CheckedTaggedToArrayIndex(node) 144 } else if (a->opcode() == IrOpcode::kCheckedTaggedToInt32 && 145 b->opcode() == IrOpcode::kCheckedTaggedToArrayIndex) { 146 // CheckedTaggedToInt32(node) implies CheckedTaggedToArrayIndex(node) 147 } else if (a->opcode() == IrOpcode::kCheckReceiver && 148 b->opcode() == IrOpcode::kCheckReceiverOrNullOrUndefined) { 149 // CheckReceiver(node) implies CheckReceiverOrNullOrUndefined(node) 150 } else if (a->opcode() != b->opcode()) { 151 return false; 152 } else { 153 switch (a->opcode()) { 154 case IrOpcode::kCheckBounds: 155 case IrOpcode::kCheckSmi: 156 case IrOpcode::kCheckString: 157 case IrOpcode::kCheckNumber: 158 case IrOpcode::kCheckBigInt: 159 break; 160 case IrOpcode::kCheckedInt32ToTaggedSigned: 161 case IrOpcode::kCheckedInt64ToInt32: 162 case IrOpcode::kCheckedInt64ToTaggedSigned: 163 case IrOpcode::kCheckedTaggedSignedToInt32: 164 case IrOpcode::kCheckedTaggedToTaggedPointer: 165 case IrOpcode::kCheckedTaggedToTaggedSigned: 166 case IrOpcode::kCheckedTaggedToArrayIndex: 167 case IrOpcode::kCheckedUint32Bounds: 168 case IrOpcode::kCheckedUint32ToInt32: 169 case IrOpcode::kCheckedUint32ToTaggedSigned: 170 case IrOpcode::kCheckedUint64Bounds: 171 case IrOpcode::kCheckedUint64ToInt32: 172 case IrOpcode::kCheckedUint64ToTaggedSigned: 173 break; 174 case IrOpcode::kCheckedFloat64ToInt32: 175 case IrOpcode::kCheckedFloat64ToInt64: 176 case IrOpcode::kCheckedTaggedToInt32: 177 case IrOpcode::kCheckedTaggedToInt64: { 178 const CheckMinusZeroParameters& ap = 179 CheckMinusZeroParametersOf(a->op()); 180 const CheckMinusZeroParameters& bp = 181 CheckMinusZeroParametersOf(b->op()); 182 if (ap.mode() != bp.mode()) { 183 return false; 184 } 185 break; 186 } 187 case IrOpcode::kCheckedTaggedToFloat64: 188 case IrOpcode::kCheckedTruncateTaggedToWord32: { 189 CheckTaggedInputParameters const& ap = 190 CheckTaggedInputParametersOf(a->op()); 191 CheckTaggedInputParameters const& bp = 192 CheckTaggedInputParametersOf(b->op()); 193 // {a} subsumes {b} if the modes are either the same, or {a} checks 194 // for Number, in which case {b} will be subsumed no matter what. 195 if (ap.mode() != bp.mode() && 196 ap.mode() != CheckTaggedInputMode::kNumber) { 197 return false; 198 } 199 break; 200 } 201 default: 202 DCHECK(!IsCheckedWithFeedback(a->op())); 203 return false; 204 } 205 } 206 } 207 for (int i = a->op()->ValueInputCount(); --i >= 0;) { 208 if (a->InputAt(i) != b->InputAt(i)) return false; 209 } 210 return true; 211} 212 213bool TypeSubsumes(Node* node, Node* replacement) { 214 if (!NodeProperties::IsTyped(node) || !NodeProperties::IsTyped(replacement)) { 215 // If either node is untyped, we are running during an untyped optimization 216 // phase, and replacement is OK. 217 return true; 218 } 219 Type node_type = NodeProperties::GetType(node); 220 Type replacement_type = NodeProperties::GetType(replacement); 221 return replacement_type.Is(node_type); 222} 223 224} // namespace 225 226Node* RedundancyElimination::EffectPathChecks::LookupCheck(Node* node) const { 227 for (Check const* check = head_; check != nullptr; check = check->next) { 228 if (CheckSubsumes(check->node, node) && TypeSubsumes(node, check->node)) { 229 DCHECK(!check->node->IsDead()); 230 return check->node; 231 } 232 } 233 return nullptr; 234} 235 236Node* RedundancyElimination::EffectPathChecks::LookupBoundsCheckFor( 237 Node* node) const { 238 for (Check const* check = head_; check != nullptr; check = check->next) { 239 if (check->node->opcode() == IrOpcode::kCheckBounds && 240 check->node->InputAt(0) == node && TypeSubsumes(node, check->node) && 241 !(CheckBoundsParametersOf(check->node->op()).flags() & 242 CheckBoundsFlag::kConvertStringAndMinusZero)) { 243 return check->node; 244 } 245 } 246 return nullptr; 247} 248 249RedundancyElimination::EffectPathChecks const* 250RedundancyElimination::PathChecksForEffectNodes::Get(Node* node) const { 251 size_t const id = node->id(); 252 if (id < info_for_node_.size()) return info_for_node_[id]; 253 return nullptr; 254} 255 256void RedundancyElimination::PathChecksForEffectNodes::Set( 257 Node* node, EffectPathChecks const* checks) { 258 size_t const id = node->id(); 259 if (id >= info_for_node_.size()) info_for_node_.resize(id + 1, nullptr); 260 info_for_node_[id] = checks; 261} 262 263Reduction RedundancyElimination::ReduceCheckNode(Node* node) { 264 Node* const effect = NodeProperties::GetEffectInput(node); 265 EffectPathChecks const* checks = node_checks_.Get(effect); 266 // If we do not know anything about the predecessor, do not propagate just yet 267 // because we will have to recompute anyway once we compute the predecessor. 268 if (checks == nullptr) return NoChange(); 269 // See if we have another check that dominates us. 270 if (Node* check = checks->LookupCheck(node)) { 271 ReplaceWithValue(node, check); 272 return Replace(check); 273 } 274 275 // Learn from this check. 276 return UpdateChecks(node, checks->AddCheck(zone(), node)); 277} 278 279Reduction RedundancyElimination::ReduceEffectPhi(Node* node) { 280 Node* const control = NodeProperties::GetControlInput(node); 281 if (control->opcode() == IrOpcode::kLoop) { 282 // Here we rely on having only reducible loops: 283 // The loop entry edge always dominates the header, so we can just use 284 // the information from the loop entry edge. 285 return TakeChecksFromFirstEffect(node); 286 } 287 DCHECK_EQ(IrOpcode::kMerge, control->opcode()); 288 289 // Shortcut for the case when we do not know anything about some input. 290 int const input_count = node->op()->EffectInputCount(); 291 for (int i = 0; i < input_count; ++i) { 292 Node* const effect = NodeProperties::GetEffectInput(node, i); 293 if (node_checks_.Get(effect) == nullptr) return NoChange(); 294 } 295 296 // Make a copy of the first input's checks and merge with the checks 297 // from other inputs. 298 EffectPathChecks* checks = EffectPathChecks::Copy( 299 zone(), node_checks_.Get(NodeProperties::GetEffectInput(node, 0))); 300 for (int i = 1; i < input_count; ++i) { 301 Node* const input = NodeProperties::GetEffectInput(node, i); 302 checks->Merge(node_checks_.Get(input)); 303 } 304 return UpdateChecks(node, checks); 305} 306 307Reduction RedundancyElimination::ReduceSpeculativeNumberComparison(Node* node) { 308 NumberOperationHint const hint = NumberOperationHintOf(node->op()); 309 Node* const first = NodeProperties::GetValueInput(node, 0); 310 Type const first_type = NodeProperties::GetType(first); 311 Node* const second = NodeProperties::GetValueInput(node, 1); 312 Type const second_type = NodeProperties::GetType(second); 313 Node* const effect = NodeProperties::GetEffectInput(node); 314 EffectPathChecks const* checks = node_checks_.Get(effect); 315 316 // If we do not know anything about the predecessor, do not propagate just yet 317 // because we will have to recompute anyway once we compute the predecessor. 318 if (checks == nullptr) return NoChange(); 319 320 // Avoid the potentially expensive lookups below if the {node} 321 // has seen non-Smi inputs in the past, which is a clear signal 322 // that the comparison is probably not performed on a value that 323 // already passed an array bounds check. 324 if (hint == NumberOperationHint::kSignedSmall) { 325 // Don't bother trying to find a CheckBounds for the {first} input 326 // if it's type is already in UnsignedSmall range, since the bounds 327 // check is only going to narrow that range further, but the result 328 // is not going to make the representation selection any better. 329 if (!first_type.Is(Type::UnsignedSmall())) { 330 if (Node* check = checks->LookupBoundsCheckFor(first)) { 331 if (!first_type.Is(NodeProperties::GetType(check))) { 332 // Replace the {first} input with the {check}. This is safe, 333 // despite the fact that {check} can truncate -0 to 0, because 334 // the regular Number comparisons in JavaScript also identify 335 // 0 and -0 (unlike special comparisons as Object.is). 336 NodeProperties::ReplaceValueInput(node, check, 0); 337 return Changed(node).FollowedBy( 338 ReduceSpeculativeNumberComparison(node)); 339 } 340 } 341 } 342 343 // Don't bother trying to find a CheckBounds for the {second} input 344 // if it's type is already in UnsignedSmall range, since the bounds 345 // check is only going to narrow that range further, but the result 346 // is not going to make the representation selection any better. 347 if (!second_type.Is(Type::UnsignedSmall())) { 348 if (Node* check = checks->LookupBoundsCheckFor(second)) { 349 if (!second_type.Is(NodeProperties::GetType(check))) { 350 // Replace the {second} input with the {check}. This is safe, 351 // despite the fact that {check} can truncate -0 to 0, because 352 // the regular Number comparisons in JavaScript also identify 353 // 0 and -0 (unlike special comparisons as Object.is). 354 NodeProperties::ReplaceValueInput(node, check, 1); 355 return Changed(node).FollowedBy( 356 ReduceSpeculativeNumberComparison(node)); 357 } 358 } 359 } 360 } 361 362 return UpdateChecks(node, checks); 363} 364 365Reduction RedundancyElimination::ReduceSpeculativeNumberOperation(Node* node) { 366 DCHECK(node->opcode() == IrOpcode::kSpeculativeNumberAdd || 367 node->opcode() == IrOpcode::kSpeculativeNumberSubtract || 368 node->opcode() == IrOpcode::kSpeculativeSafeIntegerAdd || 369 node->opcode() == IrOpcode::kSpeculativeSafeIntegerSubtract || 370 node->opcode() == IrOpcode::kSpeculativeToNumber); 371 DCHECK_EQ(1, node->op()->EffectInputCount()); 372 DCHECK_EQ(1, node->op()->EffectOutputCount()); 373 374 Node* const first = NodeProperties::GetValueInput(node, 0); 375 Node* const effect = NodeProperties::GetEffectInput(node); 376 EffectPathChecks const* checks = node_checks_.Get(effect); 377 // If we do not know anything about the predecessor, do not propagate just yet 378 // because we will have to recompute anyway once we compute the predecessor. 379 if (checks == nullptr) return NoChange(); 380 381 // Check if there's a CheckBounds operation on {first} 382 // in the graph already, which we might be able to 383 // reuse here to improve the representation selection 384 // for the {node} later on. 385 if (Node* check = checks->LookupBoundsCheckFor(first)) { 386 // Only use the bounds {check} if its type is better 387 // than the type of the {first} node, otherwise we 388 // would end up replacing NumberConstant inputs with 389 // CheckBounds operations, which is kind of pointless. 390 if (!NodeProperties::GetType(first).Is(NodeProperties::GetType(check))) { 391 NodeProperties::ReplaceValueInput(node, check, 0); 392 } 393 } 394 395 return UpdateChecks(node, checks); 396} 397 398Reduction RedundancyElimination::ReduceStart(Node* node) { 399 return UpdateChecks(node, EffectPathChecks::Empty(zone())); 400} 401 402Reduction RedundancyElimination::ReduceOtherNode(Node* node) { 403 if (node->op()->EffectInputCount() == 1) { 404 if (node->op()->EffectOutputCount() == 1) { 405 return TakeChecksFromFirstEffect(node); 406 } else { 407 // Effect terminators should be handled specially. 408 return NoChange(); 409 } 410 } 411 DCHECK_EQ(0, node->op()->EffectInputCount()); 412 DCHECK_EQ(0, node->op()->EffectOutputCount()); 413 return NoChange(); 414} 415 416Reduction RedundancyElimination::TakeChecksFromFirstEffect(Node* node) { 417 DCHECK_EQ(1, node->op()->EffectOutputCount()); 418 Node* const effect = NodeProperties::GetEffectInput(node); 419 EffectPathChecks const* checks = node_checks_.Get(effect); 420 // If we do not know anything about the predecessor, do not propagate just yet 421 // because we will have to recompute anyway once we compute the predecessor. 422 if (checks == nullptr) return NoChange(); 423 // We just propagate the information from the effect input (ideally, 424 // we would only revisit effect uses if there is change). 425 return UpdateChecks(node, checks); 426} 427 428Reduction RedundancyElimination::UpdateChecks(Node* node, 429 EffectPathChecks const* checks) { 430 EffectPathChecks const* original = node_checks_.Get(node); 431 // Only signal that the {node} has Changed, if the information about {checks} 432 // has changed wrt. the {original}. 433 if (checks != original) { 434 if (original == nullptr || !checks->Equals(original)) { 435 node_checks_.Set(node, checks); 436 return Changed(node); 437 } 438 } 439 return NoChange(); 440} 441 442} // namespace compiler 443} // namespace internal 444} // namespace v8 445