1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/backend/jump-threading.h"
6#include "src/compiler/backend/code-generator-impl.h"
7
8namespace v8 {
9namespace internal {
10namespace compiler {
11
12#define TRACE(...)                                \
13  do {                                            \
14    if (FLAG_trace_turbo_jt) PrintF(__VA_ARGS__); \
15  } while (false)
16
17namespace {
18
19struct JumpThreadingState {
20  bool forwarded;
21  ZoneVector<RpoNumber>& result;
22  ZoneStack<RpoNumber>& stack;
23
24  void Clear(size_t count) { result.assign(count, unvisited()); }
25  void PushIfUnvisited(RpoNumber num) {
26    if (result[num.ToInt()] == unvisited()) {
27      stack.push(num);
28      result[num.ToInt()] = onstack();
29    }
30  }
31  void Forward(RpoNumber to) {
32    RpoNumber from = stack.top();
33    RpoNumber to_to = result[to.ToInt()];
34    bool pop = true;
35    if (to == from) {
36      TRACE("  xx %d\n", from.ToInt());
37      result[from.ToInt()] = from;
38    } else if (to_to == unvisited()) {
39      TRACE("  fw %d -> %d (recurse)\n", from.ToInt(), to.ToInt());
40      stack.push(to);
41      result[to.ToInt()] = onstack();
42      pop = false;  // recurse.
43    } else if (to_to == onstack()) {
44      TRACE("  fw %d -> %d (cycle)\n", from.ToInt(), to.ToInt());
45      result[from.ToInt()] = to;  // break the cycle.
46      forwarded = true;
47    } else {
48      TRACE("  fw %d -> %d (forward)\n", from.ToInt(), to.ToInt());
49      result[from.ToInt()] = to_to;  // forward the block.
50      forwarded = true;
51    }
52    if (pop) stack.pop();
53  }
54  RpoNumber unvisited() { return RpoNumber::FromInt(-1); }
55  RpoNumber onstack() { return RpoNumber::FromInt(-2); }
56};
57
58}  // namespace
59
60bool JumpThreading::ComputeForwarding(Zone* local_zone,
61                                      ZoneVector<RpoNumber>* result,
62                                      InstructionSequence* code,
63                                      bool frame_at_start) {
64  ZoneStack<RpoNumber> stack(local_zone);
65  JumpThreadingState state = {false, *result, stack};
66  state.Clear(code->InstructionBlockCount());
67  RpoNumber empty_deconstruct_frame_return_block = RpoNumber::Invalid();
68  int32_t empty_deconstruct_frame_return_size;
69  RpoNumber empty_no_deconstruct_frame_return_block = RpoNumber::Invalid();
70  int32_t empty_no_deconstruct_frame_return_size;
71
72  // Iterate over the blocks forward, pushing the blocks onto the stack.
73  for (auto const instruction_block : code->instruction_blocks()) {
74    RpoNumber current = instruction_block->rpo_number();
75    state.PushIfUnvisited(current);
76
77    // Process the stack, which implements DFS through empty blocks.
78    while (!state.stack.empty()) {
79      InstructionBlock* block = code->InstructionBlockAt(state.stack.top());
80      // Process the instructions in a block up to a non-empty instruction.
81      TRACE("jt [%d] B%d\n", static_cast<int>(stack.size()),
82            block->rpo_number().ToInt());
83      RpoNumber fw = block->rpo_number();
84      bool fallthru = true;
85      for (int i = block->code_start(); i < block->code_end(); ++i) {
86        Instruction* instr = code->InstructionAt(i);
87        if (!instr->AreMovesRedundant()) {
88          // can't skip instructions with non redundant moves.
89          TRACE("  parallel move\n");
90          fallthru = false;
91        } else if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
92          // can't skip instructions with flags continuations.
93          TRACE("  flags\n");
94          fallthru = false;
95        } else if (instr->IsNop()) {
96          // skip nops.
97          TRACE("  nop\n");
98          continue;
99        } else if (instr->arch_opcode() == kArchJmp) {
100          // try to forward the jump instruction.
101          TRACE("  jmp\n");
102          // if this block deconstructs the frame, we can't forward it.
103          // TODO(mtrofin): we can still forward if we end up building
104          // the frame at start. So we should move the decision of whether
105          // to build a frame or not in the register allocator, and trickle it
106          // here and to the code generator.
107          if (frame_at_start || !(block->must_deconstruct_frame() ||
108                                  block->must_construct_frame())) {
109            fw = code->InputRpo(instr, 0);
110          }
111          fallthru = false;
112        } else if (instr->IsRet()) {
113          TRACE("  ret\n");
114          if (fallthru) {
115            CHECK_IMPLIES(block->must_construct_frame(),
116                          block->must_deconstruct_frame());
117            // Only handle returns with immediate/constant operands, since
118            // they must always be the same for all returns in a function.
119            // Dynamic return values might use different registers at
120            // different return sites and therefore cannot be shared.
121            if (instr->InputAt(0)->IsImmediate()) {
122              int32_t return_size = ImmediateOperand::cast(instr->InputAt(0))
123                                        ->inline_int32_value();
124              // Instructions can be shared only for blocks that share
125              // the same |must_deconstruct_frame| attribute.
126              if (block->must_deconstruct_frame()) {
127                if (empty_deconstruct_frame_return_block ==
128                    RpoNumber::Invalid()) {
129                  empty_deconstruct_frame_return_block = block->rpo_number();
130                  empty_deconstruct_frame_return_size = return_size;
131                } else if (empty_deconstruct_frame_return_size == return_size) {
132                  fw = empty_deconstruct_frame_return_block;
133                  block->clear_must_deconstruct_frame();
134                }
135              } else {
136                if (empty_no_deconstruct_frame_return_block ==
137                    RpoNumber::Invalid()) {
138                  empty_no_deconstruct_frame_return_block = block->rpo_number();
139                  empty_no_deconstruct_frame_return_size = return_size;
140                } else if (empty_no_deconstruct_frame_return_size ==
141                           return_size) {
142                  fw = empty_no_deconstruct_frame_return_block;
143                }
144              }
145            }
146          }
147          fallthru = false;
148        } else {
149          // can't skip other instructions.
150          TRACE("  other\n");
151          fallthru = false;
152        }
153        break;
154      }
155      if (fallthru) {
156        int next = 1 + block->rpo_number().ToInt();
157        if (next < code->InstructionBlockCount()) fw = RpoNumber::FromInt(next);
158      }
159      state.Forward(fw);
160    }
161  }
162
163#ifdef DEBUG
164  for (RpoNumber num : *result) {
165    DCHECK(num.IsValid());
166  }
167#endif
168
169  if (FLAG_trace_turbo_jt) {
170    for (int i = 0; i < static_cast<int>(result->size()); i++) {
171      TRACE("B%d ", i);
172      int to = (*result)[i].ToInt();
173      if (i != to) {
174        TRACE("-> B%d\n", to);
175      } else {
176        TRACE("\n");
177      }
178    }
179  }
180
181  return state.forwarded;
182}
183
184void JumpThreading::ApplyForwarding(Zone* local_zone,
185                                    ZoneVector<RpoNumber> const& result,
186                                    InstructionSequence* code) {
187  if (!FLAG_turbo_jt) return;
188
189  ZoneVector<bool> skip(static_cast<int>(result.size()), false, local_zone);
190
191  // Skip empty blocks when the previous block doesn't fall through.
192  bool prev_fallthru = true;
193  for (auto const block : code->ao_blocks()) {
194    RpoNumber block_rpo = block->rpo_number();
195    int block_num = block_rpo.ToInt();
196    RpoNumber result_rpo = result[block_num];
197    skip[block_num] = !prev_fallthru && result_rpo != block_rpo;
198
199    if (result_rpo != block_rpo) {
200      // We need the handler information to be propagated, so that branch
201      // targets are annotated as necessary for control flow integrity
202      // checks (when enabled).
203      if (code->InstructionBlockAt(block_rpo)->IsHandler()) {
204        code->InstructionBlockAt(result_rpo)->MarkHandler();
205      }
206    }
207
208    bool fallthru = true;
209    for (int i = block->code_start(); i < block->code_end(); ++i) {
210      Instruction* instr = code->InstructionAt(i);
211      FlagsMode mode = FlagsModeField::decode(instr->opcode());
212      if (mode == kFlags_branch) {
213        fallthru = false;  // branches don't fall through to the next block.
214      } else if (instr->arch_opcode() == kArchJmp ||
215                 instr->arch_opcode() == kArchRet) {
216        if (skip[block_num]) {
217          // Overwrite a redundant jump with a nop.
218          TRACE("jt-fw nop @%d\n", i);
219          instr->OverwriteWithNop();
220          // If this block was marked as a handler, it can be unmarked now.
221          code->InstructionBlockAt(block_rpo)->UnmarkHandler();
222        }
223        fallthru = false;  // jumps don't fall through to the next block.
224      }
225    }
226    prev_fallthru = fallthru;
227  }
228
229  // Patch RPO immediates.
230  InstructionSequence::RpoImmediates& rpo_immediates = code->rpo_immediates();
231  for (size_t i = 0; i < rpo_immediates.size(); i++) {
232    RpoNumber rpo = rpo_immediates[i];
233    if (rpo.IsValid()) {
234      RpoNumber fw = result[rpo.ToInt()];
235      if (fw != rpo) rpo_immediates[i] = fw;
236    }
237  }
238
239  // Renumber the blocks so that IsNextInAssemblyOrder() will return true,
240  // even if there are skipped blocks in-between.
241  int ao = 0;
242  for (auto const block : code->ao_blocks()) {
243    block->set_ao_number(RpoNumber::FromInt(ao));
244    if (!skip[block->rpo_number().ToInt()]) ao++;
245  }
246}
247
248#undef TRACE
249
250}  // namespace compiler
251}  // namespace internal
252}  // namespace v8
253