1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/raw-machine-assembler.h"
6
7#include "src/base/small-vector.h"
8#include "src/compiler/compiler-source-position-table.h"
9#include "src/compiler/node-properties.h"
10#include "src/compiler/pipeline.h"
11#include "src/compiler/scheduler.h"
12#include "src/heap/factory-inl.h"
13
14namespace v8 {
15namespace internal {
16namespace compiler {
17
18RawMachineAssembler::RawMachineAssembler(
19    Isolate* isolate, Graph* graph, CallDescriptor* call_descriptor,
20    MachineRepresentation word, MachineOperatorBuilder::Flags flags,
21    MachineOperatorBuilder::AlignmentRequirements alignment_requirements)
22    : isolate_(isolate),
23      graph_(graph),
24      schedule_(zone()->New<Schedule>(zone())),
25      source_positions_(zone()->New<SourcePositionTable>(graph)),
26      machine_(zone(), word, flags, alignment_requirements),
27      common_(zone()),
28      simplified_(zone()),
29      call_descriptor_(call_descriptor),
30      target_parameter_(nullptr),
31      parameters_(parameter_count(), zone()),
32      current_block_(schedule()->start()) {
33  int param_count = static_cast<int>(parameter_count());
34  // Add an extra input for the JSFunction parameter to the start node.
35  graph->SetStart(graph->NewNode(common_.Start(param_count + 1)));
36  if (call_descriptor->IsJSFunctionCall()) {
37    target_parameter_ = AddNode(
38        common()->Parameter(Linkage::kJSCallClosureParamIndex), graph->start());
39  }
40  for (size_t i = 0; i < parameter_count(); ++i) {
41    parameters_[i] =
42        AddNode(common()->Parameter(static_cast<int>(i)), graph->start());
43  }
44  graph->SetEnd(graph->NewNode(common_.End(0)));
45  source_positions_->AddDecorator();
46}
47
48void RawMachineAssembler::SetCurrentExternalSourcePosition(
49    FileAndLine file_and_line) {
50  int file_id =
51      isolate()->LookupOrAddExternallyCompiledFilename(file_and_line.first);
52  SourcePosition p = SourcePosition::External(file_and_line.second, file_id);
53  DCHECK(p.ExternalLine() == file_and_line.second);
54  source_positions()->SetCurrentPosition(p);
55}
56
57FileAndLine RawMachineAssembler::GetCurrentExternalSourcePosition() const {
58  SourcePosition p = source_positions_->GetCurrentPosition();
59  if (!p.IsKnown()) return {nullptr, -1};
60  int file_id = p.ExternalFileId();
61  const char* file_name = isolate()->GetExternallyCompiledFilename(file_id);
62  int line = p.ExternalLine();
63  return {file_name, line};
64}
65
66Node* RawMachineAssembler::NullConstant() {
67  return HeapConstant(isolate()->factory()->null_value());
68}
69
70Node* RawMachineAssembler::UndefinedConstant() {
71  return HeapConstant(isolate()->factory()->undefined_value());
72}
73
74Node* RawMachineAssembler::RelocatableIntPtrConstant(intptr_t value,
75                                                     RelocInfo::Mode rmode) {
76  return kSystemPointerSize == 8
77             ? RelocatableInt64Constant(value, rmode)
78             : RelocatableInt32Constant(static_cast<int>(value), rmode);
79}
80
81Node* RawMachineAssembler::OptimizedAllocate(
82    Node* size, AllocationType allocation,
83    AllowLargeObjects allow_large_objects) {
84  return AddNode(
85      simplified()->AllocateRaw(Type::Any(), allocation, allow_large_objects),
86      size);
87}
88
89Schedule* RawMachineAssembler::ExportForTest() {
90  // Compute the correct codegen order.
91  DCHECK(schedule_->rpo_order()->empty());
92  if (FLAG_trace_turbo_scheduler) {
93    PrintF("--- RAW SCHEDULE -------------------------------------------\n");
94    StdoutStream{} << *schedule_;
95  }
96  schedule_->EnsureCFGWellFormedness();
97  Scheduler::ComputeSpecialRPO(zone(), schedule_);
98  Scheduler::GenerateDominatorTree(schedule_);
99  schedule_->PropagateDeferredMark();
100  if (FLAG_trace_turbo_scheduler) {
101    PrintF("--- EDGE SPLIT AND PROPAGATED DEFERRED SCHEDULE ------------\n");
102    StdoutStream{} << *schedule_;
103  }
104  // Invalidate RawMachineAssembler.
105  source_positions_->RemoveDecorator();
106  Schedule* schedule = schedule_;
107  schedule_ = nullptr;
108  return schedule;
109}
110
111Graph* RawMachineAssembler::ExportForOptimization() {
112  // Compute the correct codegen order.
113  DCHECK(schedule_->rpo_order()->empty());
114  if (FLAG_trace_turbo_scheduler) {
115    PrintF("--- RAW SCHEDULE -------------------------------------------\n");
116    StdoutStream{} << *schedule_;
117  }
118  schedule_->EnsureCFGWellFormedness();
119  OptimizeControlFlow(schedule_, graph(), common());
120  Scheduler::ComputeSpecialRPO(zone(), schedule_);
121  if (FLAG_trace_turbo_scheduler) {
122    PrintF("--- SCHEDULE BEFORE GRAPH CREATION -------------------------\n");
123    StdoutStream{} << *schedule_;
124  }
125  MakeReschedulable();
126  // Invalidate RawMachineAssembler.
127  schedule_ = nullptr;
128  return graph();
129}
130
131void RawMachineAssembler::OptimizeControlFlow(Schedule* schedule, Graph* graph,
132                                              CommonOperatorBuilder* common) {
133  for (bool changed = true; changed;) {
134    changed = false;
135    for (size_t i = 0; i < schedule->all_blocks()->size(); ++i) {
136      BasicBlock* block = (*schedule->all_blocks())[i];
137      if (block == nullptr) continue;
138
139      // Short-circuit a goto if the succeeding block is not a control-flow
140      // merge. This is not really useful on it's own since graph construction
141      // has the same effect, but combining blocks improves the pattern-match on
142      // their structure below.
143      if (block->control() == BasicBlock::kGoto) {
144        DCHECK_EQ(block->SuccessorCount(), 1);
145        BasicBlock* successor = block->SuccessorAt(0);
146        if (successor->PredecessorCount() == 1) {
147          DCHECK_EQ(successor->PredecessorAt(0), block);
148          for (Node* node : *successor) {
149            schedule->SetBlockForNode(nullptr, node);
150            schedule->AddNode(block, node);
151          }
152          block->set_control(successor->control());
153          Node* control_input = successor->control_input();
154          block->set_control_input(control_input);
155          if (control_input) {
156            schedule->SetBlockForNode(block, control_input);
157          }
158          if (successor->deferred()) block->set_deferred(true);
159          block->ClearSuccessors();
160          schedule->MoveSuccessors(successor, block);
161          schedule->ClearBlockById(successor->id());
162          changed = true;
163          --i;
164          continue;
165        }
166      }
167      // Block-cloning in the simple case where a block consists only of a phi
168      // node and a branch on that phi. This just duplicates the branch block
169      // for each predecessor, replacing the phi node with the corresponding phi
170      // input.
171      if (block->control() == BasicBlock::kBranch && block->NodeCount() == 1) {
172        Node* phi = block->NodeAt(0);
173        if (phi->opcode() != IrOpcode::kPhi) continue;
174        Node* branch = block->control_input();
175        DCHECK_EQ(branch->opcode(), IrOpcode::kBranch);
176        if (NodeProperties::GetValueInput(branch, 0) != phi) continue;
177        if (phi->UseCount() != 1) continue;
178        DCHECK_EQ(phi->op()->ValueInputCount(), block->PredecessorCount());
179
180        // Turn projection blocks into normal blocks.
181        DCHECK_EQ(block->SuccessorCount(), 2);
182        BasicBlock* true_block = block->SuccessorAt(0);
183        BasicBlock* false_block = block->SuccessorAt(1);
184        DCHECK_EQ(true_block->NodeAt(0)->opcode(), IrOpcode::kIfTrue);
185        DCHECK_EQ(false_block->NodeAt(0)->opcode(), IrOpcode::kIfFalse);
186        (*true_block->begin())->Kill();
187        true_block->RemoveNode(true_block->begin());
188        (*false_block->begin())->Kill();
189        false_block->RemoveNode(false_block->begin());
190        true_block->ClearPredecessors();
191        false_block->ClearPredecessors();
192
193        size_t arity = block->PredecessorCount();
194        for (size_t j = 0; j < arity; ++j) {
195          BasicBlock* predecessor = block->PredecessorAt(j);
196          predecessor->ClearSuccessors();
197          if (block->deferred()) predecessor->set_deferred(true);
198          Node* branch_clone = graph->CloneNode(branch);
199          int phi_input = static_cast<int>(j);
200          NodeProperties::ReplaceValueInput(
201              branch_clone, NodeProperties::GetValueInput(phi, phi_input), 0);
202          BasicBlock* new_true_block = schedule->NewBasicBlock();
203          BasicBlock* new_false_block = schedule->NewBasicBlock();
204          new_true_block->AddNode(
205              graph->NewNode(common->IfTrue(), branch_clone));
206          new_false_block->AddNode(
207              graph->NewNode(common->IfFalse(), branch_clone));
208          schedule->AddGoto(new_true_block, true_block);
209          schedule->AddGoto(new_false_block, false_block);
210          DCHECK_EQ(predecessor->control(), BasicBlock::kGoto);
211          predecessor->set_control(BasicBlock::kNone);
212          schedule->AddBranch(predecessor, branch_clone, new_true_block,
213                              new_false_block);
214        }
215        branch->Kill();
216        schedule->ClearBlockById(block->id());
217        changed = true;
218        continue;
219      }
220    }
221  }
222}
223
224void RawMachineAssembler::MakeReschedulable() {
225  std::vector<Node*> block_final_control(schedule_->all_blocks_.size());
226  std::vector<Node*> block_final_effect(schedule_->all_blocks_.size());
227
228  struct LoopHeader {
229    BasicBlock* block;
230    Node* loop_node;
231    Node* effect_phi;
232  };
233  std::vector<LoopHeader> loop_headers;
234
235  // These are hoisted outside of the loop to avoid re-allocation.
236  std::vector<Node*> merge_inputs;
237  std::vector<Node*> effect_phi_inputs;
238
239  for (BasicBlock* block : *schedule_->rpo_order()) {
240    Node* current_control;
241    Node* current_effect;
242    if (block == schedule_->start()) {
243      current_control = current_effect = graph()->start();
244    } else if (block == schedule_->end()) {
245      for (size_t i = 0; i < block->PredecessorCount(); ++i) {
246        NodeProperties::MergeControlToEnd(
247            graph(), common(), block->PredecessorAt(i)->control_input());
248      }
249    } else if (block->IsLoopHeader()) {
250      // The graph()->start() inputs are just placeholders until we computed the
251      // real back-edges and re-structure the control flow so the loop has
252      // exactly two predecessors.
253      current_control = graph()->NewNode(common()->Loop(2), graph()->start(),
254                                         graph()->start());
255      current_effect =
256          graph()->NewNode(common()->EffectPhi(2), graph()->start(),
257                           graph()->start(), current_control);
258
259      Node* terminate = graph()->NewNode(common()->Terminate(), current_effect,
260                                         current_control);
261      NodeProperties::MergeControlToEnd(graph(), common(), terminate);
262      loop_headers.push_back(
263          LoopHeader{block, current_control, current_effect});
264    } else if (block->PredecessorCount() == 1) {
265      BasicBlock* predecessor = block->PredecessorAt(0);
266      DCHECK_LT(predecessor->rpo_number(), block->rpo_number());
267      current_effect = block_final_effect[predecessor->id().ToSize()];
268      current_control = block_final_control[predecessor->id().ToSize()];
269    } else {
270      // Create control merge nodes and effect phis for all predecessor blocks.
271      merge_inputs.clear();
272      effect_phi_inputs.clear();
273      int predecessor_count = static_cast<int>(block->PredecessorCount());
274      for (int i = 0; i < predecessor_count; ++i) {
275        BasicBlock* predecessor = block->PredecessorAt(i);
276        DCHECK_LT(predecessor->rpo_number(), block->rpo_number());
277        merge_inputs.push_back(block_final_control[predecessor->id().ToSize()]);
278        effect_phi_inputs.push_back(
279            block_final_effect[predecessor->id().ToSize()]);
280      }
281      current_control = graph()->NewNode(common()->Merge(predecessor_count),
282                                         static_cast<int>(merge_inputs.size()),
283                                         merge_inputs.data());
284      effect_phi_inputs.push_back(current_control);
285      current_effect = graph()->NewNode(
286          common()->EffectPhi(predecessor_count),
287          static_cast<int>(effect_phi_inputs.size()), effect_phi_inputs.data());
288    }
289
290    auto update_current_control_and_effect = [&](Node* node) {
291      bool existing_effect_and_control =
292          IrOpcode::IsIfProjectionOpcode(node->opcode()) ||
293          IrOpcode::IsPhiOpcode(node->opcode());
294      if (node->op()->EffectInputCount() > 0) {
295        DCHECK_EQ(1, node->op()->EffectInputCount());
296        if (existing_effect_and_control) {
297          NodeProperties::ReplaceEffectInput(node, current_effect);
298        } else {
299          node->AppendInput(graph()->zone(), current_effect);
300        }
301      }
302      if (node->op()->ControlInputCount() > 0) {
303        DCHECK_EQ(1, node->op()->ControlInputCount());
304        if (existing_effect_and_control) {
305          NodeProperties::ReplaceControlInput(node, current_control);
306        } else {
307          node->AppendInput(graph()->zone(), current_control);
308        }
309      }
310      if (node->op()->EffectOutputCount() > 0) {
311        DCHECK_EQ(1, node->op()->EffectOutputCount());
312        current_effect = node;
313      }
314      if (node->op()->ControlOutputCount() > 0) {
315        current_control = node;
316      }
317    };
318
319    for (Node* node : *block) {
320      update_current_control_and_effect(node);
321    }
322    if (block->deferred()) MarkControlDeferred(current_control);
323
324    if (Node* block_terminator = block->control_input()) {
325      update_current_control_and_effect(block_terminator);
326    }
327
328    block_final_effect[block->id().ToSize()] = current_effect;
329    block_final_control[block->id().ToSize()] = current_control;
330  }
331
332  // Fix-up loop backedges and re-structure control flow so that loop nodes have
333  // exactly two control predecessors.
334  for (const LoopHeader& loop_header : loop_headers) {
335    BasicBlock* block = loop_header.block;
336    std::vector<BasicBlock*> loop_entries;
337    std::vector<BasicBlock*> loop_backedges;
338    for (size_t i = 0; i < block->PredecessorCount(); ++i) {
339      BasicBlock* predecessor = block->PredecessorAt(i);
340      if (block->LoopContains(predecessor)) {
341        loop_backedges.push_back(predecessor);
342      } else {
343        DCHECK(loop_backedges.empty());
344        loop_entries.push_back(predecessor);
345      }
346    }
347    DCHECK(!loop_entries.empty());
348    DCHECK(!loop_backedges.empty());
349
350    int entrance_count = static_cast<int>(loop_entries.size());
351    int backedge_count = static_cast<int>(loop_backedges.size());
352    Node* control_loop_entry = CreateNodeFromPredecessors(
353        loop_entries, block_final_control, common()->Merge(entrance_count), {});
354    Node* control_backedge =
355        CreateNodeFromPredecessors(loop_backedges, block_final_control,
356                                   common()->Merge(backedge_count), {});
357    Node* effect_loop_entry = CreateNodeFromPredecessors(
358        loop_entries, block_final_effect, common()->EffectPhi(entrance_count),
359        {control_loop_entry});
360    Node* effect_backedge = CreateNodeFromPredecessors(
361        loop_backedges, block_final_effect, common()->EffectPhi(backedge_count),
362        {control_backedge});
363
364    loop_header.loop_node->ReplaceInput(0, control_loop_entry);
365    loop_header.loop_node->ReplaceInput(1, control_backedge);
366    loop_header.effect_phi->ReplaceInput(0, effect_loop_entry);
367    loop_header.effect_phi->ReplaceInput(1, effect_backedge);
368
369    for (Node* node : *block) {
370      if (node->opcode() == IrOpcode::kPhi) {
371        MakePhiBinary(node, static_cast<int>(loop_entries.size()),
372                      control_loop_entry, control_backedge);
373      }
374    }
375  }
376}
377
378Node* RawMachineAssembler::CreateNodeFromPredecessors(
379    const std::vector<BasicBlock*>& predecessors,
380    const std::vector<Node*>& sidetable, const Operator* op,
381    const std::vector<Node*>& additional_inputs) {
382  if (predecessors.size() == 1) {
383    return sidetable[predecessors.front()->id().ToSize()];
384  }
385  std::vector<Node*> inputs;
386  inputs.reserve(predecessors.size());
387  for (BasicBlock* predecessor : predecessors) {
388    inputs.push_back(sidetable[predecessor->id().ToSize()]);
389  }
390  for (Node* additional_input : additional_inputs) {
391    inputs.push_back(additional_input);
392  }
393  return graph()->NewNode(op, static_cast<int>(inputs.size()), inputs.data());
394}
395
396void RawMachineAssembler::MakePhiBinary(Node* phi, int split_point,
397                                        Node* left_control,
398                                        Node* right_control) {
399  int value_count = phi->op()->ValueInputCount();
400  if (value_count == 2) return;
401  DCHECK_LT(split_point, value_count);
402  DCHECK_GT(split_point, 0);
403
404  MachineRepresentation rep = PhiRepresentationOf(phi->op());
405  int left_input_count = split_point;
406  int right_input_count = value_count - split_point;
407
408  Node* left_input;
409  if (left_input_count == 1) {
410    left_input = NodeProperties::GetValueInput(phi, 0);
411  } else {
412    std::vector<Node*> inputs;
413    inputs.reserve(left_input_count);
414    for (int i = 0; i < left_input_count; ++i) {
415      inputs.push_back(NodeProperties::GetValueInput(phi, i));
416    }
417    inputs.push_back(left_control);
418    left_input =
419        graph()->NewNode(common()->Phi(rep, static_cast<int>(left_input_count)),
420                         static_cast<int>(inputs.size()), inputs.data());
421  }
422
423  Node* right_input;
424  if (right_input_count == 1) {
425    right_input = NodeProperties::GetValueInput(phi, split_point);
426  } else {
427    std::vector<Node*> inputs;
428    for (int i = split_point; i < value_count; ++i) {
429      inputs.push_back(NodeProperties::GetValueInput(phi, i));
430    }
431    inputs.push_back(right_control);
432    right_input = graph()->NewNode(
433        common()->Phi(rep, static_cast<int>(right_input_count)),
434        static_cast<int>(inputs.size()), inputs.data());
435  }
436
437  Node* control = NodeProperties::GetControlInput(phi);
438  phi->TrimInputCount(3);
439  phi->ReplaceInput(0, left_input);
440  phi->ReplaceInput(1, right_input);
441  phi->ReplaceInput(2, control);
442  NodeProperties::ChangeOp(phi, common()->Phi(rep, 2));
443}
444
445void RawMachineAssembler::MarkControlDeferred(Node* control_node) {
446  BranchHint new_branch_hint;
447  Node* responsible_branch = nullptr;
448  while (responsible_branch == nullptr) {
449    switch (control_node->opcode()) {
450      case IrOpcode::kIfException:
451        // IfException projections are deferred by default.
452        return;
453      case IrOpcode::kIfSuccess:
454        control_node = NodeProperties::GetControlInput(control_node);
455        continue;
456      case IrOpcode::kIfValue: {
457        IfValueParameters parameters = IfValueParametersOf(control_node->op());
458        if (parameters.hint() != BranchHint::kFalse) {
459          NodeProperties::ChangeOp(
460              control_node, common()->IfValue(parameters.value(),
461                                              parameters.comparison_order(),
462                                              BranchHint::kFalse));
463        }
464        return;
465      }
466      case IrOpcode::kIfDefault:
467        if (BranchHintOf(control_node->op()) != BranchHint::kFalse) {
468          NodeProperties::ChangeOp(control_node,
469                                   common()->IfDefault(BranchHint::kFalse));
470        }
471        return;
472      case IrOpcode::kIfTrue: {
473        Node* branch = NodeProperties::GetControlInput(control_node);
474        BranchHint hint = BranchHintOf(branch->op());
475        if (hint == BranchHint::kTrue) {
476          // The other possibility is also deferred, so the responsible branch
477          // has to be before.
478          control_node = NodeProperties::GetControlInput(branch);
479          continue;
480        }
481        new_branch_hint = BranchHint::kFalse;
482        responsible_branch = branch;
483        break;
484      }
485      case IrOpcode::kIfFalse: {
486        Node* branch = NodeProperties::GetControlInput(control_node);
487        BranchHint hint = BranchHintOf(branch->op());
488        if (hint == BranchHint::kFalse) {
489          // The other possibility is also deferred, so the responsible branch
490          // has to be before.
491          control_node = NodeProperties::GetControlInput(branch);
492          continue;
493        }
494        new_branch_hint = BranchHint::kTrue;
495        responsible_branch = branch;
496        break;
497      }
498      case IrOpcode::kMerge:
499        for (int i = 0; i < control_node->op()->ControlInputCount(); ++i) {
500          MarkControlDeferred(NodeProperties::GetControlInput(control_node, i));
501        }
502        return;
503      case IrOpcode::kLoop:
504        control_node = NodeProperties::GetControlInput(control_node, 0);
505        continue;
506      case IrOpcode::kBranch:
507      case IrOpcode::kSwitch:
508        UNREACHABLE();
509      case IrOpcode::kStart:
510        return;
511      default:
512        DCHECK_EQ(1, control_node->op()->ControlInputCount());
513        control_node = NodeProperties::GetControlInput(control_node);
514        continue;
515    }
516  }
517
518  BranchHint hint = BranchHintOf(responsible_branch->op());
519  if (hint == new_branch_hint) return;
520  NodeProperties::ChangeOp(responsible_branch,
521                           common()->Branch(new_branch_hint));
522}
523
524Node* RawMachineAssembler::TargetParameter() {
525  DCHECK_NOT_NULL(target_parameter_);
526  return target_parameter_;
527}
528
529Node* RawMachineAssembler::Parameter(size_t index) {
530  DCHECK_LT(index, parameter_count());
531  return parameters_[index];
532}
533
534
535void RawMachineAssembler::Goto(RawMachineLabel* label) {
536  DCHECK(current_block_ != schedule()->end());
537  schedule()->AddGoto(CurrentBlock(), Use(label));
538  current_block_ = nullptr;
539}
540
541
542void RawMachineAssembler::Branch(Node* condition, RawMachineLabel* true_val,
543                                 RawMachineLabel* false_val) {
544  DCHECK(current_block_ != schedule()->end());
545  Node* branch = MakeNode(common()->Branch(BranchHint::kNone), 1, &condition);
546  BasicBlock* true_block = schedule()->NewBasicBlock();
547  BasicBlock* false_block = schedule()->NewBasicBlock();
548  schedule()->AddBranch(CurrentBlock(), branch, true_block, false_block);
549
550  true_block->AddNode(MakeNode(common()->IfTrue(), 1, &branch));
551  schedule()->AddGoto(true_block, Use(true_val));
552
553  false_block->AddNode(MakeNode(common()->IfFalse(), 1, &branch));
554  schedule()->AddGoto(false_block, Use(false_val));
555
556  current_block_ = nullptr;
557}
558
559void RawMachineAssembler::Continuations(Node* call, RawMachineLabel* if_success,
560                                        RawMachineLabel* if_exception) {
561  DCHECK_NOT_NULL(schedule_);
562  DCHECK_NOT_NULL(current_block_);
563  schedule()->AddCall(CurrentBlock(), call, Use(if_success), Use(if_exception));
564  current_block_ = nullptr;
565}
566
567void RawMachineAssembler::Switch(Node* index, RawMachineLabel* default_label,
568                                 const int32_t* case_values,
569                                 RawMachineLabel** case_labels,
570                                 size_t case_count) {
571  DCHECK_NE(schedule()->end(), current_block_);
572  size_t succ_count = case_count + 1;
573  Node* switch_node = MakeNode(common()->Switch(succ_count), 1, &index);
574  BasicBlock** succ_blocks = zone()->NewArray<BasicBlock*>(succ_count);
575  for (size_t i = 0; i < case_count; ++i) {
576    int32_t case_value = case_values[i];
577    BasicBlock* case_block = schedule()->NewBasicBlock();
578    Node* case_node =
579        graph()->NewNode(common()->IfValue(case_value), switch_node);
580    schedule()->AddNode(case_block, case_node);
581    schedule()->AddGoto(case_block, Use(case_labels[i]));
582    succ_blocks[i] = case_block;
583  }
584  BasicBlock* default_block = schedule()->NewBasicBlock();
585  Node* default_node = graph()->NewNode(common()->IfDefault(), switch_node);
586  schedule()->AddNode(default_block, default_node);
587  schedule()->AddGoto(default_block, Use(default_label));
588  succ_blocks[case_count] = default_block;
589  schedule()->AddSwitch(CurrentBlock(), switch_node, succ_blocks, succ_count);
590  current_block_ = nullptr;
591}
592
593void RawMachineAssembler::Return(Node* value) {
594  Node* values[] = {Int32Constant(0), value};
595  Node* ret = MakeNode(common()->Return(1), 2, values);
596  schedule()->AddReturn(CurrentBlock(), ret);
597  current_block_ = nullptr;
598}
599
600void RawMachineAssembler::Return(Node* v1, Node* v2) {
601  Node* values[] = {Int32Constant(0), v1, v2};
602  Node* ret = MakeNode(common()->Return(2), 3, values);
603  schedule()->AddReturn(CurrentBlock(), ret);
604  current_block_ = nullptr;
605}
606
607void RawMachineAssembler::Return(Node* v1, Node* v2, Node* v3) {
608  Node* values[] = {Int32Constant(0), v1, v2, v3};
609  Node* ret = MakeNode(common()->Return(3), 4, values);
610  schedule()->AddReturn(CurrentBlock(), ret);
611  current_block_ = nullptr;
612}
613
614void RawMachineAssembler::Return(Node* v1, Node* v2, Node* v3, Node* v4) {
615  Node* values[] = {Int32Constant(0), v1, v2, v3, v4};
616  Node* ret = MakeNode(common()->Return(4), 5, values);
617  schedule()->AddReturn(CurrentBlock(), ret);
618  current_block_ = nullptr;
619}
620
621void RawMachineAssembler::Return(int count, Node* vs[]) {
622  using Node_ptr = Node*;
623  Node** values = new Node_ptr[count + 1];
624  values[0] = Int32Constant(0);
625  for (int i = 0; i < count; ++i) values[i + 1] = vs[i];
626  Node* ret = MakeNode(common()->Return(count), count + 1, values);
627  schedule()->AddReturn(CurrentBlock(), ret);
628  current_block_ = nullptr;
629  delete[] values;
630}
631
632void RawMachineAssembler::PopAndReturn(Node* pop, Node* value) {
633  // PopAndReturn is supposed to be using ONLY in CSA/Torque builtins for
634  // dropping ALL JS arguments that are currently located on the stack.
635  // The check below ensures that there are no directly accessible stack
636  // parameters from current builtin, which implies that the builtin with
637  // JS calling convention (TFJ) was created with kDontAdaptArgumentsSentinel.
638  // This simplifies semantics of this instruction because in case of presence
639  // of directly accessible stack parameters it's impossible to distinguish
640  // the following cases:
641  // 1) stack parameter is included in JS arguments (and therefore it will be
642  //    dropped as a part of 'pop' number of arguments),
643  // 2) stack parameter is NOT included in JS arguments (and therefore it should
644  //    be dropped in ADDITION to the 'pop' number of arguments).
645  // Additionally, in order to simplify assembly code, PopAndReturn is also
646  // not allowed in builtins with stub linkage and parameters on stack.
647  CHECK_EQ(call_descriptor()->ParameterSlotCount(), 0);
648  Node* values[] = {pop, value};
649  Node* ret = MakeNode(common()->Return(1), 2, values);
650  schedule()->AddReturn(CurrentBlock(), ret);
651  current_block_ = nullptr;
652}
653
654void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2) {
655  Node* values[] = {pop, v1, v2};
656  Node* ret = MakeNode(common()->Return(2), 3, values);
657  schedule()->AddReturn(CurrentBlock(), ret);
658  current_block_ = nullptr;
659}
660
661void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2,
662                                       Node* v3) {
663  Node* values[] = {pop, v1, v2, v3};
664  Node* ret = MakeNode(common()->Return(3), 4, values);
665  schedule()->AddReturn(CurrentBlock(), ret);
666  current_block_ = nullptr;
667}
668
669void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2, Node* v3,
670                                       Node* v4) {
671  Node* values[] = {pop, v1, v2, v3, v4};
672  Node* ret = MakeNode(common()->Return(4), 5, values);
673  schedule()->AddReturn(CurrentBlock(), ret);
674  current_block_ = nullptr;
675}
676
677void RawMachineAssembler::AbortCSADcheck(Node* message) {
678  AddNode(machine()->AbortCSADcheck(), message);
679}
680
681void RawMachineAssembler::DebugBreak() { AddNode(machine()->DebugBreak()); }
682
683void RawMachineAssembler::Unreachable() {
684  Node* ret = MakeNode(common()->Throw(), 0, nullptr);
685  schedule()->AddThrow(CurrentBlock(), ret);
686  current_block_ = nullptr;
687}
688
689void RawMachineAssembler::Comment(const std::string& msg) {
690  size_t length = msg.length() + 1;
691  char* zone_buffer = zone()->NewArray<char>(length);
692  MemCopy(zone_buffer, msg.c_str(), length);
693  AddNode(machine()->Comment(zone_buffer));
694}
695
696void RawMachineAssembler::StaticAssert(Node* value, const char* source) {
697  AddNode(common()->StaticAssert(source), value);
698}
699
700Node* RawMachineAssembler::CallN(CallDescriptor* call_descriptor,
701                                 int input_count, Node* const* inputs) {
702  DCHECK(!call_descriptor->NeedsFrameState());
703  // +1 is for target.
704  DCHECK_EQ(input_count, call_descriptor->ParameterCount() + 1);
705  return AddNode(common()->Call(call_descriptor), input_count, inputs);
706}
707
708Node* RawMachineAssembler::CallNWithFrameState(CallDescriptor* call_descriptor,
709                                               int input_count,
710                                               Node* const* inputs) {
711  DCHECK(call_descriptor->NeedsFrameState());
712  // +2 is for target and frame state.
713  DCHECK_EQ(input_count, call_descriptor->ParameterCount() + 2);
714  return AddNode(common()->Call(call_descriptor), input_count, inputs);
715}
716
717void RawMachineAssembler::TailCallN(CallDescriptor* call_descriptor,
718                                    int input_count, Node* const* inputs) {
719  // +1 is for target.
720  DCHECK_EQ(input_count, call_descriptor->ParameterCount() + 1);
721  Node* tail_call =
722      MakeNode(common()->TailCall(call_descriptor), input_count, inputs);
723  schedule()->AddTailCall(CurrentBlock(), tail_call);
724  current_block_ = nullptr;
725}
726
727namespace {
728
729enum FunctionDescriptorMode { kHasFunctionDescriptor, kNoFunctionDescriptor };
730
731Node* CallCFunctionImpl(
732    RawMachineAssembler* rasm, Node* function,
733    base::Optional<MachineType> return_type,
734    std::initializer_list<RawMachineAssembler::CFunctionArg> args,
735    bool caller_saved_regs, SaveFPRegsMode mode,
736    FunctionDescriptorMode no_function_descriptor) {
737  static constexpr std::size_t kNumCArgs = 10;
738
739  MachineSignature::Builder builder(rasm->zone(), return_type ? 1 : 0,
740                                    args.size());
741  if (return_type) {
742    builder.AddReturn(*return_type);
743  }
744  for (const auto& arg : args) builder.AddParam(arg.first);
745
746  bool caller_saved_fp_regs =
747      caller_saved_regs && (mode == SaveFPRegsMode::kSave);
748  CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
749  if (caller_saved_regs) flags |= CallDescriptor::kCallerSavedRegisters;
750  if (caller_saved_fp_regs) flags |= CallDescriptor::kCallerSavedFPRegisters;
751  if (no_function_descriptor) flags |= CallDescriptor::kNoFunctionDescriptor;
752  auto call_descriptor =
753      Linkage::GetSimplifiedCDescriptor(rasm->zone(), builder.Build(), flags);
754
755  base::SmallVector<Node*, kNumCArgs> nodes(args.size() + 1);
756  nodes[0] = function;
757  std::transform(
758      args.begin(), args.end(), std::next(nodes.begin()),
759      [](const RawMachineAssembler::CFunctionArg& arg) { return arg.second; });
760
761  auto common = rasm->common();
762  return rasm->AddNode(common->Call(call_descriptor),
763                       static_cast<int>(nodes.size()), nodes.begin());
764}
765
766}  // namespace
767
768Node* RawMachineAssembler::CallCFunction(
769    Node* function, base::Optional<MachineType> return_type,
770    std::initializer_list<RawMachineAssembler::CFunctionArg> args) {
771  return CallCFunctionImpl(this, function, return_type, args, false,
772                           SaveFPRegsMode::kIgnore, kHasFunctionDescriptor);
773}
774
775Node* RawMachineAssembler::CallCFunctionWithoutFunctionDescriptor(
776    Node* function, MachineType return_type,
777    std::initializer_list<RawMachineAssembler::CFunctionArg> args) {
778  return CallCFunctionImpl(this, function, return_type, args, false,
779                           SaveFPRegsMode::kIgnore, kNoFunctionDescriptor);
780}
781
782Node* RawMachineAssembler::CallCFunctionWithCallerSavedRegisters(
783    Node* function, MachineType return_type, SaveFPRegsMode mode,
784    std::initializer_list<RawMachineAssembler::CFunctionArg> args) {
785  return CallCFunctionImpl(this, function, return_type, args, true, mode,
786                           kHasFunctionDescriptor);
787}
788
789BasicBlock* RawMachineAssembler::Use(RawMachineLabel* label) {
790  label->used_ = true;
791  return EnsureBlock(label);
792}
793
794BasicBlock* RawMachineAssembler::EnsureBlock(RawMachineLabel* label) {
795  if (label->block_ == nullptr) {
796    label->block_ = schedule()->NewBasicBlock();
797  }
798  return label->block_;
799}
800
801void RawMachineAssembler::Bind(RawMachineLabel* label) {
802  DCHECK_NULL(current_block_);
803  DCHECK(!label->bound_);
804  label->bound_ = true;
805  current_block_ = EnsureBlock(label);
806  current_block_->set_deferred(label->deferred_);
807}
808
809#if DEBUG
810void RawMachineAssembler::Bind(RawMachineLabel* label,
811                               AssemblerDebugInfo info) {
812  if (current_block_ != nullptr) {
813    std::stringstream str;
814    str << "Binding label without closing previous block:"
815        << "\n#    label:          " << info
816        << "\n#    previous block: " << *current_block_;
817    FATAL("%s", str.str().c_str());
818  }
819  Bind(label);
820  current_block_->set_debug_info(info);
821}
822
823void RawMachineAssembler::PrintCurrentBlock(std::ostream& os) {
824  os << CurrentBlock();
825}
826
827void RawMachineAssembler::SetInitialDebugInformation(
828    AssemblerDebugInfo debug_info) {
829  CurrentBlock()->set_debug_info(debug_info);
830}
831#endif  // DEBUG
832
833bool RawMachineAssembler::InsideBlock() { return current_block_ != nullptr; }
834
835BasicBlock* RawMachineAssembler::CurrentBlock() {
836  DCHECK(current_block_);
837  return current_block_;
838}
839
840Node* RawMachineAssembler::Phi(MachineRepresentation rep, int input_count,
841                               Node* const* inputs) {
842  Node** buffer = zone()->NewArray<Node*>(input_count + 1);
843  std::copy(inputs, inputs + input_count, buffer);
844  buffer[input_count] = graph()->start();
845  return AddNode(common()->Phi(rep, input_count), input_count + 1, buffer);
846}
847
848void RawMachineAssembler::AppendPhiInput(Node* phi, Node* new_input) {
849  const Operator* op = phi->op();
850  const Operator* new_op = common()->ResizeMergeOrPhi(op, phi->InputCount());
851  phi->InsertInput(zone(), phi->InputCount() - 1, new_input);
852  NodeProperties::ChangeOp(phi, new_op);
853}
854
855Node* RawMachineAssembler::AddNode(const Operator* op, int input_count,
856                                   Node* const* inputs) {
857  DCHECK_NOT_NULL(schedule_);
858  DCHECK_NOT_NULL(current_block_);
859  Node* node = MakeNode(op, input_count, inputs);
860  schedule()->AddNode(CurrentBlock(), node);
861  return node;
862}
863
864Node* RawMachineAssembler::MakeNode(const Operator* op, int input_count,
865                                    Node* const* inputs) {
866  // The raw machine assembler nodes do not have effect and control inputs,
867  // so we disable checking input counts here.
868  return graph()->NewNodeUnchecked(op, input_count, inputs);
869}
870
871RawMachineLabel::~RawMachineLabel() {
872#if DEBUG
873  if (bound_ == used_) return;
874  std::stringstream str;
875  if (bound_) {
876    str << "A label has been bound but it's not used."
877        << "\n#    label: " << *block_;
878  } else {
879    str << "A label has been used but it's not bound.";
880  }
881  FATAL("%s", str.str().c_str());
882#endif  // DEBUG
883}
884
885}  // namespace compiler
886}  // namespace internal
887}  // namespace v8
888