1// Copyright 2014 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#include "src/compiler/js-inlining.h" 6 7#include "src/ast/ast.h" 8#include "src/codegen/compiler.h" 9#include "src/codegen/optimized-compilation-info.h" 10#include "src/codegen/tick-counter.h" 11#include "src/compiler/access-builder.h" 12#include "src/compiler/all-nodes.h" 13#include "src/compiler/bytecode-graph-builder.h" 14#include "src/compiler/common-operator.h" 15#include "src/compiler/compiler-source-position-table.h" 16#include "src/compiler/graph-reducer.h" 17#include "src/compiler/js-heap-broker.h" 18#include "src/compiler/js-operator.h" 19#include "src/compiler/node-matchers.h" 20#include "src/compiler/node-properties.h" 21#include "src/compiler/operator-properties.h" 22#include "src/compiler/simplified-operator.h" 23#include "src/execution/isolate-inl.h" 24#include "src/objects/feedback-cell-inl.h" 25#include "src/parsing/parse-info.h" 26 27#if V8_ENABLE_WEBASSEMBLY 28#include "src/compiler/wasm-compiler.h" 29#endif // V8_ENABLE_WEBASSEMBLY 30 31namespace v8 { 32namespace internal { 33namespace compiler { 34 35namespace { 36// This is just to avoid some corner cases, especially since we allow recursive 37// inlining. 38static const int kMaxDepthForInlining = 50; 39} // namespace 40 41#define TRACE(x) \ 42 do { \ 43 if (FLAG_trace_turbo_inlining) { \ 44 StdoutStream() << x << "\n"; \ 45 } \ 46 } while (false) 47 48// Provides convenience accessors for the common layout of nodes having either 49// the {JSCall} or the {JSConstruct} operator. 50class JSCallAccessor { 51 public: 52 explicit JSCallAccessor(Node* call) : call_(call) { 53 DCHECK(call->opcode() == IrOpcode::kJSCall || 54 call->opcode() == IrOpcode::kJSConstruct); 55 } 56 57 Node* target() const { 58 return call_->InputAt(JSCallOrConstructNode::TargetIndex()); 59 } 60 61 Node* receiver() const { 62 return JSCallNode{call_}.receiver(); 63 } 64 65 Node* new_target() const { return JSConstructNode{call_}.new_target(); } 66 67 FrameState frame_state() const { 68 return FrameState{NodeProperties::GetFrameStateInput(call_)}; 69 } 70 71 int argument_count() const { 72 return (call_->opcode() == IrOpcode::kJSCall) 73 ? JSCallNode{call_}.ArgumentCount() 74 : JSConstructNode{call_}.ArgumentCount(); 75 } 76 77 CallFrequency const& frequency() const { 78 return (call_->opcode() == IrOpcode::kJSCall) 79 ? JSCallNode{call_}.Parameters().frequency() 80 : JSConstructNode{call_}.Parameters().frequency(); 81 } 82 83 private: 84 Node* call_; 85}; 86 87#if V8_ENABLE_WEBASSEMBLY 88Reduction JSInliner::InlineJSWasmCall(Node* call, Node* new_target, 89 Node* context, Node* frame_state, 90 StartNode start, Node* end, 91 Node* exception_target, 92 const NodeVector& uncaught_subcalls) { 93 JSWasmCallNode n(call); 94 return InlineCall( 95 call, new_target, context, frame_state, start, end, exception_target, 96 uncaught_subcalls, 97 static_cast<int>(n.Parameters().signature()->parameter_count())); 98} 99#endif // V8_ENABLE_WEBASSEMBLY 100 101Reduction JSInliner::InlineCall(Node* call, Node* new_target, Node* context, 102 Node* frame_state, StartNode start, Node* end, 103 Node* exception_target, 104 const NodeVector& uncaught_subcalls, 105 int argument_count) { 106 DCHECK_IMPLIES(IrOpcode::IsInlineeOpcode(call->opcode()), 107 argument_count == JSCallAccessor(call).argument_count()); 108 109 // The scheduler is smart enough to place our code; we just ensure {control} 110 // becomes the control input of the start of the inlinee, and {effect} becomes 111 // the effect input of the start of the inlinee. 112 Node* control = NodeProperties::GetControlInput(call); 113 Node* effect = NodeProperties::GetEffectInput(call); 114 115 int const inlinee_new_target_index = start.NewTargetOutputIndex(); 116 int const inlinee_arity_index = start.ArgCountOutputIndex(); 117 int const inlinee_context_index = start.ContextOutputIndex(); 118 119 // {inliner_inputs} counts the target, receiver/new_target, and arguments; but 120 // not feedback vector, context, effect or control. 121 const int inliner_inputs = argument_count + 122 JSCallOrConstructNode::kExtraInputCount - 123 JSCallOrConstructNode::kFeedbackVectorInputCount; 124 // Iterate over all uses of the start node. 125 for (Edge edge : start->use_edges()) { 126 Node* use = edge.from(); 127 switch (use->opcode()) { 128 case IrOpcode::kParameter: { 129 int index = 1 + ParameterIndexOf(use->op()); 130 DCHECK_LE(index, inlinee_context_index); 131 if (index < inliner_inputs && index < inlinee_new_target_index) { 132 // There is an input from the call, and the index is a value 133 // projection but not the context, so rewire the input. 134 Replace(use, call->InputAt(index)); 135 } else if (index == inlinee_new_target_index) { 136 // The projection is requesting the new target value. 137 Replace(use, new_target); 138 } else if (index == inlinee_arity_index) { 139 // The projection is requesting the number of arguments. 140 Replace(use, jsgraph()->Constant(argument_count)); 141 } else if (index == inlinee_context_index) { 142 // The projection is requesting the inlinee function context. 143 Replace(use, context); 144 } else { 145 // Call has fewer arguments than required, fill with undefined. 146 Replace(use, jsgraph()->UndefinedConstant()); 147 } 148 break; 149 } 150 default: 151 if (NodeProperties::IsEffectEdge(edge)) { 152 edge.UpdateTo(effect); 153 } else if (NodeProperties::IsControlEdge(edge)) { 154 edge.UpdateTo(control); 155 } else if (NodeProperties::IsFrameStateEdge(edge)) { 156 edge.UpdateTo(frame_state); 157 } else { 158 UNREACHABLE(); 159 } 160 break; 161 } 162 } 163 164 if (exception_target != nullptr) { 165 // Link uncaught calls in the inlinee to {exception_target} 166 int subcall_count = static_cast<int>(uncaught_subcalls.size()); 167 if (subcall_count > 0) { 168 TRACE("Inlinee contains " << subcall_count 169 << " calls without local exception handler; " 170 << "linking to surrounding exception handler."); 171 } 172 NodeVector on_exception_nodes(local_zone_); 173 for (Node* subcall : uncaught_subcalls) { 174 Node* on_success = graph()->NewNode(common()->IfSuccess(), subcall); 175 NodeProperties::ReplaceUses(subcall, subcall, subcall, on_success); 176 NodeProperties::ReplaceControlInput(on_success, subcall); 177 Node* on_exception = 178 graph()->NewNode(common()->IfException(), subcall, subcall); 179 on_exception_nodes.push_back(on_exception); 180 } 181 182 DCHECK_EQ(subcall_count, static_cast<int>(on_exception_nodes.size())); 183 if (subcall_count > 0) { 184 Node* control_output = 185 graph()->NewNode(common()->Merge(subcall_count), subcall_count, 186 &on_exception_nodes.front()); 187 NodeVector values_effects(local_zone_); 188 values_effects = on_exception_nodes; 189 values_effects.push_back(control_output); 190 Node* value_output = graph()->NewNode( 191 common()->Phi(MachineRepresentation::kTagged, subcall_count), 192 subcall_count + 1, &values_effects.front()); 193 Node* effect_output = 194 graph()->NewNode(common()->EffectPhi(subcall_count), 195 subcall_count + 1, &values_effects.front()); 196 ReplaceWithValue(exception_target, value_output, effect_output, 197 control_output); 198 } else { 199 ReplaceWithValue(exception_target, exception_target, exception_target, 200 jsgraph()->Dead()); 201 } 202 } 203 204 NodeVector values(local_zone_); 205 NodeVector effects(local_zone_); 206 NodeVector controls(local_zone_); 207 for (Node* const input : end->inputs()) { 208 switch (input->opcode()) { 209 case IrOpcode::kReturn: 210 values.push_back(NodeProperties::GetValueInput(input, 1)); 211 effects.push_back(NodeProperties::GetEffectInput(input)); 212 controls.push_back(NodeProperties::GetControlInput(input)); 213 break; 214 case IrOpcode::kDeoptimize: 215 case IrOpcode::kTerminate: 216 case IrOpcode::kThrow: 217 NodeProperties::MergeControlToEnd(graph(), common(), input); 218 Revisit(graph()->end()); 219 break; 220 default: 221 UNREACHABLE(); 222 } 223 } 224 DCHECK_EQ(values.size(), effects.size()); 225 DCHECK_EQ(values.size(), controls.size()); 226 227 // Depending on whether the inlinee produces a value, we either replace value 228 // uses with said value or kill value uses if no value can be returned. 229 if (values.size() > 0) { 230 int const input_count = static_cast<int>(controls.size()); 231 Node* control_output = graph()->NewNode(common()->Merge(input_count), 232 input_count, &controls.front()); 233 values.push_back(control_output); 234 effects.push_back(control_output); 235 Node* value_output = graph()->NewNode( 236 common()->Phi(MachineRepresentation::kTagged, input_count), 237 static_cast<int>(values.size()), &values.front()); 238 Node* effect_output = 239 graph()->NewNode(common()->EffectPhi(input_count), 240 static_cast<int>(effects.size()), &effects.front()); 241 ReplaceWithValue(call, value_output, effect_output, control_output); 242 return Changed(value_output); 243 } else { 244 ReplaceWithValue(call, jsgraph()->Dead(), jsgraph()->Dead(), 245 jsgraph()->Dead()); 246 return Changed(call); 247 } 248} 249 250FrameState JSInliner::CreateArtificialFrameState( 251 Node* node, FrameState outer_frame_state, int parameter_count, 252 BytecodeOffset bailout_id, FrameStateType frame_state_type, 253 SharedFunctionInfoRef shared, Node* context) { 254 const int parameter_count_with_receiver = 255 parameter_count + JSCallOrConstructNode::kReceiverOrNewTargetInputCount; 256 const FrameStateFunctionInfo* state_info = 257 common()->CreateFrameStateFunctionInfo( 258 frame_state_type, parameter_count_with_receiver, 0, shared.object()); 259 260 const Operator* op = common()->FrameState( 261 bailout_id, OutputFrameStateCombine::Ignore(), state_info); 262 const Operator* op0 = common()->StateValues(0, SparseInputMask::Dense()); 263 Node* node0 = graph()->NewNode(op0); 264 265 NodeVector params(local_zone_); 266 params.push_back( 267 node->InputAt(JSCallOrConstructNode::ReceiverOrNewTargetIndex())); 268 for (int i = 0; i < parameter_count; i++) { 269 params.push_back(node->InputAt(JSCallOrConstructNode::ArgumentIndex(i))); 270 } 271 const Operator* op_param = common()->StateValues( 272 static_cast<int>(params.size()), SparseInputMask::Dense()); 273 Node* params_node = graph()->NewNode( 274 op_param, static_cast<int>(params.size()), ¶ms.front()); 275 if (context == nullptr) context = jsgraph()->UndefinedConstant(); 276 return FrameState{graph()->NewNode( 277 op, params_node, node0, node0, context, 278 node->InputAt(JSCallOrConstructNode::TargetIndex()), outer_frame_state)}; 279} 280 281namespace { 282 283bool NeedsImplicitReceiver(SharedFunctionInfoRef shared_info) { 284 DisallowGarbageCollection no_gc; 285 return !shared_info.construct_as_builtin() && 286 !IsDerivedConstructor(shared_info.kind()); 287} 288 289} // namespace 290 291// Determines whether the call target of the given call {node} is statically 292// known and can be used as an inlining candidate. The {SharedFunctionInfo} of 293// the call target is provided (the exact closure might be unknown). 294base::Optional<SharedFunctionInfoRef> JSInliner::DetermineCallTarget( 295 Node* node) { 296 DCHECK(IrOpcode::IsInlineeOpcode(node->opcode())); 297 Node* target = node->InputAt(JSCallOrConstructNode::TargetIndex()); 298 HeapObjectMatcher match(target); 299 300 // This reducer can handle both normal function calls as well a constructor 301 // calls whenever the target is a constant function object, as follows: 302 // - JSCall(target:constant, receiver, args..., vector) 303 // - JSConstruct(target:constant, new.target, args..., vector) 304 if (match.HasResolvedValue() && match.Ref(broker()).IsJSFunction()) { 305 JSFunctionRef function = match.Ref(broker()).AsJSFunction(); 306 307 // The function might have not been called yet. 308 if (!function.feedback_vector(broker()->dependencies()).has_value()) { 309 return base::nullopt; 310 } 311 312 // Disallow cross native-context inlining for now. This means that all parts 313 // of the resulting code will operate on the same global object. This also 314 // prevents cross context leaks, where we could inline functions from a 315 // different context and hold on to that context (and closure) from the code 316 // object. 317 // TODO(turbofan): We might want to revisit this restriction later when we 318 // have a need for this, and we know how to model different native contexts 319 // in the same graph in a compositional way. 320 if (!function.native_context().equals(broker()->target_native_context())) { 321 return base::nullopt; 322 } 323 324 return function.shared(); 325 } 326 327 // This reducer can also handle calls where the target is statically known to 328 // be the result of a closure instantiation operation, as follows: 329 // - JSCall(JSCreateClosure[shared](context), receiver, args..., vector) 330 // - JSConstruct(JSCreateClosure[shared](context), 331 // new.target, args..., vector) 332 if (match.IsJSCreateClosure()) { 333 JSCreateClosureNode n(target); 334 FeedbackCellRef cell = n.GetFeedbackCellRefChecked(broker()); 335 return cell.shared_function_info(); 336 } else if (match.IsCheckClosure()) { 337 FeedbackCellRef cell = MakeRef(broker(), FeedbackCellOf(match.op())); 338 return cell.shared_function_info(); 339 } 340 341 return base::nullopt; 342} 343 344// Determines statically known information about the call target (assuming that 345// the call target is known according to {DetermineCallTarget} above). The 346// following static information is provided: 347// - context : The context (as SSA value) bound by the call target. 348// - feedback_vector : The target is guaranteed to use this feedback vector. 349FeedbackCellRef JSInliner::DetermineCallContext(Node* node, 350 Node** context_out) { 351 DCHECK(IrOpcode::IsInlineeOpcode(node->opcode())); 352 Node* target = node->InputAt(JSCallOrConstructNode::TargetIndex()); 353 HeapObjectMatcher match(target); 354 355 if (match.HasResolvedValue() && match.Ref(broker()).IsJSFunction()) { 356 JSFunctionRef function = match.Ref(broker()).AsJSFunction(); 357 // This was already ensured by DetermineCallTarget 358 CHECK(function.feedback_vector(broker()->dependencies()).has_value()); 359 360 // The inlinee specializes to the context from the JSFunction object. 361 *context_out = jsgraph()->Constant(function.context()); 362 return function.raw_feedback_cell(broker()->dependencies()); 363 } 364 365 if (match.IsJSCreateClosure()) { 366 // Load the feedback vector of the target by looking up its vector cell at 367 // the instantiation site (we only decide to inline if it's populated). 368 JSCreateClosureNode n(target); 369 FeedbackCellRef cell = n.GetFeedbackCellRefChecked(broker()); 370 371 // The inlinee uses the locally provided context at instantiation. 372 *context_out = NodeProperties::GetContextInput(match.node()); 373 return cell; 374 } else if (match.IsCheckClosure()) { 375 FeedbackCellRef cell = MakeRef(broker(), FeedbackCellOf(match.op())); 376 377 Node* effect = NodeProperties::GetEffectInput(node); 378 Node* control = NodeProperties::GetControlInput(node); 379 *context_out = effect = graph()->NewNode( 380 simplified()->LoadField(AccessBuilder::ForJSFunctionContext()), 381 match.node(), effect, control); 382 NodeProperties::ReplaceEffectInput(node, effect); 383 384 return cell; 385 } 386 387 // Must succeed. 388 UNREACHABLE(); 389} 390 391#if V8_ENABLE_WEBASSEMBLY 392Reduction JSInliner::ReduceJSWasmCall(Node* node) { 393 // Create the subgraph for the inlinee. 394 Node* start_node; 395 Node* end; 396 size_t subgraph_min_node_id; 397 { 398 Graph::SubgraphScope scope(graph()); 399 400 graph()->SetEnd(nullptr); 401 402 JSWasmCallNode n(node); 403 const JSWasmCallParameters& wasm_call_params = n.Parameters(); 404 405 // Create a nested frame state inside the frame state attached to the 406 // call; this will ensure that lazy deoptimizations at this point will 407 // still return the result of the Wasm function call. 408 Node* continuation_frame_state = 409 CreateJSWasmCallBuiltinContinuationFrameState( 410 jsgraph(), n.context(), n.frame_state(), 411 wasm_call_params.signature()); 412 JSWasmCallData js_wasm_call_data(wasm_call_params.signature()); 413 414 // All the nodes inserted by the inlined subgraph will have 415 // id >= subgraph_min_node_id. We use this later to avoid wire nodes that 416 // are not inserted by the inlinee but were already part of the graph to the 417 // surrounding exception handler, if present. 418 subgraph_min_node_id = graph()->NodeCount(); 419 420 BuildInlinedJSToWasmWrapper( 421 graph()->zone(), jsgraph(), wasm_call_params.signature(), 422 wasm_call_params.module(), isolate(), source_positions_, 423 StubCallMode::kCallBuiltinPointer, wasm::WasmFeatures::FromFlags(), 424 &js_wasm_call_data, continuation_frame_state); 425 426 // Extract the inlinee start/end nodes. 427 start_node = graph()->start(); 428 end = graph()->end(); 429 } 430 StartNode start{start_node}; 431 432 Node* exception_target = nullptr; 433 NodeProperties::IsExceptionalCall(node, &exception_target); 434 435 // If we are inlining into a surrounding exception handler, we collect all 436 // potentially throwing nodes within the inlinee that are not handled locally 437 // by the inlinee itself. They are later wired into the surrounding handler. 438 NodeVector uncaught_subcalls(local_zone_); 439 if (exception_target != nullptr) { 440 // Find all uncaught 'calls' in the inlinee. 441 AllNodes inlined_nodes(local_zone_, end, graph()); 442 for (Node* subnode : inlined_nodes.reachable) { 443 // Ignore nodes that are not part of the inlinee. 444 if (subnode->id() < subgraph_min_node_id) continue; 445 446 // Every possibly throwing node should get {IfSuccess} and {IfException} 447 // projections, unless there already is local exception handling. 448 if (subnode->op()->HasProperty(Operator::kNoThrow)) continue; 449 if (!NodeProperties::IsExceptionalCall(subnode)) { 450 DCHECK_EQ(2, subnode->op()->ControlOutputCount()); 451 uncaught_subcalls.push_back(subnode); 452 } 453 } 454 } 455 456 Node* context = NodeProperties::GetContextInput(node); 457 Node* frame_state = NodeProperties::GetFrameStateInput(node); 458 Node* new_target = jsgraph()->UndefinedConstant(); 459 460 return InlineJSWasmCall(node, new_target, context, frame_state, start, end, 461 exception_target, uncaught_subcalls); 462} 463#endif // V8_ENABLE_WEBASSEMBLY 464 465Reduction JSInliner::ReduceJSCall(Node* node) { 466 DCHECK(IrOpcode::IsInlineeOpcode(node->opcode())); 467#if V8_ENABLE_WEBASSEMBLY 468 DCHECK_NE(node->opcode(), IrOpcode::kJSWasmCall); 469#endif // V8_ENABLE_WEBASSEMBLY 470 JSCallAccessor call(node); 471 472 // Determine the call target. 473 base::Optional<SharedFunctionInfoRef> shared_info(DetermineCallTarget(node)); 474 if (!shared_info.has_value()) return NoChange(); 475 476 SharedFunctionInfoRef outer_shared_info = 477 MakeRef(broker(), info_->shared_info()); 478 479 SharedFunctionInfo::Inlineability inlineability = 480 shared_info->GetInlineability(); 481 if (inlineability != SharedFunctionInfo::kIsInlineable) { 482 // The function is no longer inlineable. The only way this can happen is if 483 // the function had its optimization disabled in the meantime, e.g. because 484 // another optimization job failed too often. 485 CHECK_EQ(inlineability, SharedFunctionInfo::kHasOptimizationDisabled); 486 TRACE("Not inlining " << *shared_info << " into " << outer_shared_info 487 << " because it had its optimization disabled."); 488 return NoChange(); 489 } 490 // NOTE: Even though we bailout in the kHasOptimizationDisabled case above, we 491 // won't notice if the function's optimization is disabled after this point. 492 493 // Constructor must be constructable. 494 if (node->opcode() == IrOpcode::kJSConstruct && 495 !IsConstructable(shared_info->kind())) { 496 TRACE("Not inlining " << *shared_info << " into " << outer_shared_info 497 << " because constructor is not constructable."); 498 return NoChange(); 499 } 500 501 // Class constructors are callable, but [[Call]] will raise an exception. 502 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList ). 503 if (node->opcode() == IrOpcode::kJSCall && 504 IsClassConstructor(shared_info->kind())) { 505 TRACE("Not inlining " << *shared_info << " into " << outer_shared_info 506 << " because callee is a class constructor."); 507 return NoChange(); 508 } 509 510 // To ensure inlining always terminates, we have an upper limit on inlining 511 // the nested calls. 512 int nesting_level = 0; 513 for (Node* frame_state = call.frame_state(); 514 frame_state->opcode() == IrOpcode::kFrameState; 515 frame_state = FrameState{frame_state}.outer_frame_state()) { 516 nesting_level++; 517 if (nesting_level > kMaxDepthForInlining) { 518 TRACE("Not inlining " 519 << *shared_info << " into " << outer_shared_info 520 << " because call has exceeded the maximum depth for function " 521 "inlining."); 522 return NoChange(); 523 } 524 } 525 526 Node* exception_target = nullptr; 527 NodeProperties::IsExceptionalCall(node, &exception_target); 528 529 // JSInliningHeuristic has already filtered candidates without a BytecodeArray 530 // based on SharedFunctionInfoRef::GetInlineability. For the inlineable ones 531 // (kIsInlineable), the broker holds a reference to the bytecode array, which 532 // prevents it from getting flushed. Therefore, the following check should 533 // always hold true. 534 CHECK(shared_info->is_compiled()); 535 536 if (info_->source_positions() && 537 !shared_info->object()->AreSourcePositionsAvailable( 538 broker()->local_isolate_or_isolate())) { 539 // This case is expected to be very rare, since we generate source 540 // positions for all functions when debugging or profiling are turned 541 // on (see Isolate::NeedsDetailedOptimizedCodeLineInfo). Source 542 // positions should only be missing here if there is a race between 1) 543 // enabling/disabling the debugger/profiler, and 2) this compile job. 544 // In that case, we simply don't inline. 545 TRACE("Not inlining " << *shared_info << " into " << outer_shared_info 546 << " because source positions are missing."); 547 return NoChange(); 548 } 549 550 // Determine the target's feedback vector and its context. 551 Node* context; 552 FeedbackCellRef feedback_cell = DetermineCallContext(node, &context); 553 554 TRACE("Inlining " << *shared_info << " into " << outer_shared_info 555 << ((exception_target != nullptr) ? " (inside try-block)" 556 : "")); 557 // ---------------------------------------------------------------- 558 // After this point, we've made a decision to inline this function. 559 // We shall not bailout from inlining if we got here. 560 561 BytecodeArrayRef bytecode_array = shared_info->GetBytecodeArray(); 562 563 // Remember that we inlined this function. 564 int inlining_id = 565 info_->AddInlinedFunction(shared_info->object(), bytecode_array.object(), 566 source_positions_->GetSourcePosition(node)); 567 568 // Create the subgraph for the inlinee. 569 Node* start_node; 570 Node* end; 571 { 572 // Run the BytecodeGraphBuilder to create the subgraph. 573 Graph::SubgraphScope scope(graph()); 574 BytecodeGraphBuilderFlags flags( 575 BytecodeGraphBuilderFlag::kSkipFirstStackAndTierupCheck); 576 if (info_->analyze_environment_liveness()) { 577 flags |= BytecodeGraphBuilderFlag::kAnalyzeEnvironmentLiveness; 578 } 579 if (info_->bailout_on_uninitialized()) { 580 flags |= BytecodeGraphBuilderFlag::kBailoutOnUninitialized; 581 } 582 { 583 CallFrequency frequency = call.frequency(); 584 BuildGraphFromBytecode(broker(), zone(), *shared_info, feedback_cell, 585 BytecodeOffset::None(), jsgraph(), frequency, 586 source_positions_, inlining_id, info_->code_kind(), 587 flags, &info_->tick_counter()); 588 } 589 590 // Extract the inlinee start/end nodes. 591 start_node = graph()->start(); 592 end = graph()->end(); 593 } 594 StartNode start{start_node}; 595 596 // If we are inlining into a surrounding exception handler, we collect all 597 // potentially throwing nodes within the inlinee that are not handled locally 598 // by the inlinee itself. They are later wired into the surrounding handler. 599 NodeVector uncaught_subcalls(local_zone_); 600 if (exception_target != nullptr) { 601 // Find all uncaught 'calls' in the inlinee. 602 AllNodes inlined_nodes(local_zone_, end, graph()); 603 for (Node* subnode : inlined_nodes.reachable) { 604 // Every possibly throwing node should get {IfSuccess} and {IfException} 605 // projections, unless there already is local exception handling. 606 if (subnode->op()->HasProperty(Operator::kNoThrow)) continue; 607 if (!NodeProperties::IsExceptionalCall(subnode)) { 608 DCHECK_EQ(2, subnode->op()->ControlOutputCount()); 609 uncaught_subcalls.push_back(subnode); 610 } 611 } 612 } 613 614 FrameState frame_state = call.frame_state(); 615 Node* new_target = jsgraph()->UndefinedConstant(); 616 617 // Inline {JSConstruct} requires some additional magic. 618 if (node->opcode() == IrOpcode::kJSConstruct) { 619 STATIC_ASSERT(JSCallOrConstructNode::kHaveIdenticalLayouts); 620 JSConstructNode n(node); 621 622 new_target = n.new_target(); 623 624 // Insert nodes around the call that model the behavior required for a 625 // constructor dispatch (allocate implicit receiver and check return value). 626 // This models the behavior usually accomplished by our {JSConstructStub}. 627 // Note that the context has to be the callers context (input to call node). 628 // Also note that by splitting off the {JSCreate} piece of the constructor 629 // call, we create an observable deoptimization point after the receiver 630 // instantiation but before the invocation (i.e. inside {JSConstructStub} 631 // where execution continues at {construct_stub_create_deopt_pc_offset}). 632 Node* receiver = jsgraph()->TheHoleConstant(); // Implicit receiver. 633 Node* caller_context = NodeProperties::GetContextInput(node); 634 if (NeedsImplicitReceiver(*shared_info)) { 635 Effect effect = n.effect(); 636 Control control = n.control(); 637 Node* frame_state_inside = CreateArtificialFrameState( 638 node, frame_state, n.ArgumentCount(), 639 BytecodeOffset::ConstructStubCreate(), FrameStateType::kConstructStub, 640 *shared_info, caller_context); 641 Node* create = 642 graph()->NewNode(javascript()->Create(), call.target(), new_target, 643 caller_context, frame_state_inside, effect, control); 644 uncaught_subcalls.push_back(create); // Adds {IfSuccess} & {IfException}. 645 NodeProperties::ReplaceControlInput(node, create); 646 NodeProperties::ReplaceEffectInput(node, create); 647 // Placeholder to hold {node}'s value dependencies while {node} is 648 // replaced. 649 Node* dummy = graph()->NewNode(common()->Dead()); 650 NodeProperties::ReplaceUses(node, dummy, node, node, node); 651 Node* result; 652 // Insert a check of the return value to determine whether the return 653 // value or the implicit receiver should be selected as a result of the 654 // call. 655 Node* check = graph()->NewNode(simplified()->ObjectIsReceiver(), node); 656 result = 657 graph()->NewNode(common()->Select(MachineRepresentation::kTagged), 658 check, node, create); 659 receiver = create; // The implicit receiver. 660 ReplaceWithValue(dummy, result); 661 } else if (IsDerivedConstructor(shared_info->kind())) { 662 Node* node_success = 663 NodeProperties::FindSuccessfulControlProjection(node); 664 Node* is_receiver = 665 graph()->NewNode(simplified()->ObjectIsReceiver(), node); 666 Node* branch_is_receiver = 667 graph()->NewNode(common()->Branch(), is_receiver, node_success); 668 Node* branch_is_receiver_true = 669 graph()->NewNode(common()->IfTrue(), branch_is_receiver); 670 Node* branch_is_receiver_false = 671 graph()->NewNode(common()->IfFalse(), branch_is_receiver); 672 branch_is_receiver_false = graph()->NewNode( 673 javascript()->CallRuntime( 674 Runtime::kThrowConstructorReturnedNonObject), 675 caller_context, NodeProperties::GetFrameStateInput(node), node, 676 branch_is_receiver_false); 677 uncaught_subcalls.push_back(branch_is_receiver_false); 678 branch_is_receiver_false = 679 graph()->NewNode(common()->Throw(), branch_is_receiver_false, 680 branch_is_receiver_false); 681 NodeProperties::MergeControlToEnd(graph(), common(), 682 branch_is_receiver_false); 683 684 ReplaceWithValue(node_success, node_success, node_success, 685 branch_is_receiver_true); 686 // Fix input destroyed by the above {ReplaceWithValue} call. 687 NodeProperties::ReplaceControlInput(branch_is_receiver, node_success, 0); 688 } 689 node->ReplaceInput(JSCallNode::ReceiverIndex(), receiver); 690 // Insert a construct stub frame into the chain of frame states. This will 691 // reconstruct the proper frame when deoptimizing within the constructor. 692 frame_state = CreateArtificialFrameState( 693 node, frame_state, n.ArgumentCount(), 694 BytecodeOffset::ConstructStubInvoke(), FrameStateType::kConstructStub, 695 *shared_info, caller_context); 696 } 697 698 // Insert a JSConvertReceiver node for sloppy callees. Note that the context 699 // passed into this node has to be the callees context (loaded above). 700 if (node->opcode() == IrOpcode::kJSCall && 701 is_sloppy(shared_info->language_mode()) && !shared_info->native()) { 702 Effect effect{NodeProperties::GetEffectInput(node)}; 703 if (NodeProperties::CanBePrimitive(broker(), call.receiver(), effect)) { 704 CallParameters const& p = CallParametersOf(node->op()); 705 Node* global_proxy = jsgraph()->Constant( 706 broker()->target_native_context().global_proxy_object()); 707 Node* receiver = effect = 708 graph()->NewNode(simplified()->ConvertReceiver(p.convert_mode()), 709 call.receiver(), global_proxy, effect, start); 710 NodeProperties::ReplaceValueInput(node, receiver, 711 JSCallNode::ReceiverIndex()); 712 NodeProperties::ReplaceEffectInput(node, effect); 713 } 714 } 715 716 // Insert argument adaptor frame if required. The callees formal parameter 717 // count have to match the number of arguments passed 718 // to the call. 719 int parameter_count = 720 shared_info->internal_formal_parameter_count_without_receiver(); 721 DCHECK_EQ(parameter_count, start.FormalParameterCountWithoutReceiver()); 722 if (call.argument_count() != parameter_count) { 723 frame_state = CreateArtificialFrameState( 724 node, frame_state, call.argument_count(), BytecodeOffset::None(), 725 FrameStateType::kArgumentsAdaptor, *shared_info); 726 } 727 728 return InlineCall(node, new_target, context, frame_state, start, end, 729 exception_target, uncaught_subcalls, call.argument_count()); 730} 731 732Graph* JSInliner::graph() const { return jsgraph()->graph(); } 733 734JSOperatorBuilder* JSInliner::javascript() const { 735 return jsgraph()->javascript(); 736} 737 738CommonOperatorBuilder* JSInliner::common() const { return jsgraph()->common(); } 739 740SimplifiedOperatorBuilder* JSInliner::simplified() const { 741 return jsgraph()->simplified(); 742} 743 744#undef TRACE 745 746} // namespace compiler 747} // namespace internal 748} // namespace v8 749