1// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/interpreter/bytecode-generator.h"
6
7#include <map>
8#include <unordered_map>
9#include <unordered_set>
10
11#include "include/v8-extension.h"
12#include "src/api/api-inl.h"
13#include "src/ast/ast-source-ranges.h"
14#include "src/ast/ast.h"
15#include "src/ast/scopes.h"
16#include "src/builtins/builtins-constructor.h"
17#include "src/codegen/compiler.h"
18#include "src/codegen/unoptimized-compilation-info.h"
19#include "src/common/globals.h"
20#include "src/compiler-dispatcher/lazy-compile-dispatcher.h"
21#include "src/heap/parked-scope.h"
22#include "src/interpreter/bytecode-flags.h"
23#include "src/interpreter/bytecode-jump-table.h"
24#include "src/interpreter/bytecode-label.h"
25#include "src/interpreter/bytecode-register-allocator.h"
26#include "src/interpreter/bytecode-register.h"
27#include "src/interpreter/control-flow-builders.h"
28#include "src/logging/local-logger.h"
29#include "src/logging/log.h"
30#include "src/numbers/conversions.h"
31#include "src/objects/debug-objects.h"
32#include "src/objects/literal-objects-inl.h"
33#include "src/objects/objects-inl.h"
34#include "src/objects/smi.h"
35#include "src/objects/template-objects-inl.h"
36#include "src/parsing/parse-info.h"
37#include "src/parsing/token.h"
38#include "src/utils/ostreams.h"
39
40namespace v8 {
41namespace internal {
42namespace interpreter {
43
44// Scoped class tracking context objects created by the visitor. Represents
45// mutations of the context chain within the function body, allowing pushing and
46// popping of the current {context_register} during visitation.
47class V8_NODISCARD BytecodeGenerator::ContextScope {
48 public:
49  ContextScope(BytecodeGenerator* generator, Scope* scope,
50               Register outer_context_reg = Register())
51      : generator_(generator),
52        scope_(scope),
53        outer_(generator_->execution_context()),
54        register_(Register::current_context()),
55        depth_(0) {
56    DCHECK(scope->NeedsContext() || outer_ == nullptr);
57    if (outer_) {
58      depth_ = outer_->depth_ + 1;
59
60      // Push the outer context into a new context register.
61      if (!outer_context_reg.is_valid()) {
62        outer_context_reg = generator_->register_allocator()->NewRegister();
63      }
64      outer_->set_register(outer_context_reg);
65      generator_->builder()->PushContext(outer_context_reg);
66    }
67    generator_->set_execution_context(this);
68  }
69
70  ~ContextScope() {
71    if (outer_) {
72      DCHECK_EQ(register_.index(), Register::current_context().index());
73      generator_->builder()->PopContext(outer_->reg());
74      outer_->set_register(register_);
75    }
76    generator_->set_execution_context(outer_);
77  }
78
79  ContextScope(const ContextScope&) = delete;
80  ContextScope& operator=(const ContextScope&) = delete;
81
82  // Returns the depth of the given |scope| for the current execution context.
83  int ContextChainDepth(Scope* scope) {
84    return scope_->ContextChainLength(scope);
85  }
86
87  // Returns the execution context at |depth| in the current context chain if it
88  // is a function local execution context, otherwise returns nullptr.
89  ContextScope* Previous(int depth) {
90    if (depth > depth_) {
91      return nullptr;
92    }
93
94    ContextScope* previous = this;
95    for (int i = depth; i > 0; --i) {
96      previous = previous->outer_;
97    }
98    return previous;
99  }
100
101  Register reg() const { return register_; }
102
103 private:
104  const BytecodeArrayBuilder* builder() const { return generator_->builder(); }
105
106  void set_register(Register reg) { register_ = reg; }
107
108  BytecodeGenerator* generator_;
109  Scope* scope_;
110  ContextScope* outer_;
111  Register register_;
112  int depth_;
113};
114
115// Scoped class for tracking control statements entered by the
116// visitor.
117class V8_NODISCARD BytecodeGenerator::ControlScope {
118 public:
119  explicit ControlScope(BytecodeGenerator* generator)
120      : generator_(generator),
121        outer_(generator->execution_control()),
122        context_(generator->execution_context()) {
123    generator_->set_execution_control(this);
124  }
125  ~ControlScope() { generator_->set_execution_control(outer()); }
126  ControlScope(const ControlScope&) = delete;
127  ControlScope& operator=(const ControlScope&) = delete;
128
129  void Break(Statement* stmt) {
130    PerformCommand(CMD_BREAK, stmt, kNoSourcePosition);
131  }
132  void Continue(Statement* stmt) {
133    PerformCommand(CMD_CONTINUE, stmt, kNoSourcePosition);
134  }
135  void ReturnAccumulator(int source_position) {
136    PerformCommand(CMD_RETURN, nullptr, source_position);
137  }
138  void AsyncReturnAccumulator(int source_position) {
139    PerformCommand(CMD_ASYNC_RETURN, nullptr, source_position);
140  }
141
142  class DeferredCommands;
143
144 protected:
145  enum Command {
146    CMD_BREAK,
147    CMD_CONTINUE,
148    CMD_RETURN,
149    CMD_ASYNC_RETURN,
150    CMD_RETHROW
151  };
152  static constexpr bool CommandUsesAccumulator(Command command) {
153    return command != CMD_BREAK && command != CMD_CONTINUE;
154  }
155
156  void PerformCommand(Command command, Statement* statement,
157                      int source_position);
158  virtual bool Execute(Command command, Statement* statement,
159                       int source_position) = 0;
160
161  // Helper to pop the context chain to a depth expected by this control scope.
162  // Note that it is the responsibility of each individual {Execute} method to
163  // trigger this when commands are handled and control-flow continues locally.
164  void PopContextToExpectedDepth();
165
166  BytecodeGenerator* generator() const { return generator_; }
167  ControlScope* outer() const { return outer_; }
168  ContextScope* context() const { return context_; }
169
170 private:
171  BytecodeGenerator* generator_;
172  ControlScope* outer_;
173  ContextScope* context_;
174};
175
176// Helper class for a try-finally control scope. It can record intercepted
177// control-flow commands that cause entry into a finally-block, and re-apply
178// them after again leaving that block. Special tokens are used to identify
179// paths going through the finally-block to dispatch after leaving the block.
180class V8_NODISCARD BytecodeGenerator::ControlScope::DeferredCommands final {
181 public:
182  // Fixed value tokens for paths we know we need.
183  // Fallthrough is set to -1 to make it the fallthrough case of the jump table,
184  // where the remaining cases start at 0.
185  static const int kFallthroughToken = -1;
186  // TODO(leszeks): Rethrow being 0 makes it use up a valuable LdaZero, which
187  // means that other commands (such as break or return) have to use LdaSmi.
188  // This can very slightly bloat bytecode, so perhaps token values should all
189  // be shifted down by 1.
190  static const int kRethrowToken = 0;
191
192  DeferredCommands(BytecodeGenerator* generator, Register token_register,
193                   Register result_register)
194      : generator_(generator),
195        deferred_(generator->zone()),
196        token_register_(token_register),
197        result_register_(result_register),
198        return_token_(-1),
199        async_return_token_(-1) {
200    // There's always a rethrow path.
201    // TODO(leszeks): We could decouple deferred_ index and token to allow us
202    // to still push this lazily.
203    STATIC_ASSERT(kRethrowToken == 0);
204    deferred_.push_back({CMD_RETHROW, nullptr, kRethrowToken});
205  }
206
207  // One recorded control-flow command.
208  struct Entry {
209    Command command;       // The command type being applied on this path.
210    Statement* statement;  // The target statement for the command or {nullptr}.
211    int token;             // A token identifying this particular path.
212  };
213
214  // Records a control-flow command while entering the finally-block. This also
215  // generates a new dispatch token that identifies one particular path. This
216  // expects the result to be in the accumulator.
217  void RecordCommand(Command command, Statement* statement) {
218    int token = GetTokenForCommand(command, statement);
219
220    DCHECK_LT(token, deferred_.size());
221    DCHECK_EQ(deferred_[token].command, command);
222    DCHECK_EQ(deferred_[token].statement, statement);
223    DCHECK_EQ(deferred_[token].token, token);
224
225    if (CommandUsesAccumulator(command)) {
226      builder()->StoreAccumulatorInRegister(result_register_);
227    }
228    builder()->LoadLiteral(Smi::FromInt(token));
229    builder()->StoreAccumulatorInRegister(token_register_);
230    if (!CommandUsesAccumulator(command)) {
231      // If we're not saving the accumulator in the result register, shove a
232      // harmless value there instead so that it is still considered "killed" in
233      // the liveness analysis. Normally we would LdaUndefined first, but the
234      // Smi token value is just as good, and by reusing it we save a bytecode.
235      builder()->StoreAccumulatorInRegister(result_register_);
236    }
237  }
238
239  // Records the dispatch token to be used to identify the re-throw path when
240  // the finally-block has been entered through the exception handler. This
241  // expects the exception to be in the accumulator.
242  void RecordHandlerReThrowPath() {
243    // The accumulator contains the exception object.
244    RecordCommand(CMD_RETHROW, nullptr);
245  }
246
247  // Records the dispatch token to be used to identify the implicit fall-through
248  // path at the end of a try-block into the corresponding finally-block.
249  void RecordFallThroughPath() {
250    builder()->LoadLiteral(Smi::FromInt(kFallthroughToken));
251    builder()->StoreAccumulatorInRegister(token_register_);
252    // Since we're not saving the accumulator in the result register, shove a
253    // harmless value there instead so that it is still considered "killed" in
254    // the liveness analysis. Normally we would LdaUndefined first, but the Smi
255    // token value is just as good, and by reusing it we save a bytecode.
256    builder()->StoreAccumulatorInRegister(result_register_);
257  }
258
259  // Applies all recorded control-flow commands after the finally-block again.
260  // This generates a dynamic dispatch on the token from the entry point.
261  void ApplyDeferredCommands() {
262    if (deferred_.size() == 0) return;
263
264    BytecodeLabel fall_through;
265
266    if (deferred_.size() == 1) {
267      // For a single entry, just jump to the fallthrough if we don't match the
268      // entry token.
269      const Entry& entry = deferred_[0];
270
271      builder()
272          ->LoadLiteral(Smi::FromInt(entry.token))
273          .CompareReference(token_register_)
274          .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &fall_through);
275
276      if (CommandUsesAccumulator(entry.command)) {
277        builder()->LoadAccumulatorWithRegister(result_register_);
278      }
279      execution_control()->PerformCommand(entry.command, entry.statement,
280                                          kNoSourcePosition);
281    } else {
282      // For multiple entries, build a jump table and switch on the token,
283      // jumping to the fallthrough if none of them match.
284
285      BytecodeJumpTable* jump_table =
286          builder()->AllocateJumpTable(static_cast<int>(deferred_.size()), 0);
287      builder()
288          ->LoadAccumulatorWithRegister(token_register_)
289          .SwitchOnSmiNoFeedback(jump_table)
290          .Jump(&fall_through);
291      for (const Entry& entry : deferred_) {
292        builder()->Bind(jump_table, entry.token);
293
294        if (CommandUsesAccumulator(entry.command)) {
295          builder()->LoadAccumulatorWithRegister(result_register_);
296        }
297        execution_control()->PerformCommand(entry.command, entry.statement,
298                                            kNoSourcePosition);
299      }
300    }
301
302    builder()->Bind(&fall_through);
303  }
304
305  BytecodeArrayBuilder* builder() { return generator_->builder(); }
306  ControlScope* execution_control() { return generator_->execution_control(); }
307
308 private:
309  int GetTokenForCommand(Command command, Statement* statement) {
310    switch (command) {
311      case CMD_RETURN:
312        return GetReturnToken();
313      case CMD_ASYNC_RETURN:
314        return GetAsyncReturnToken();
315      case CMD_RETHROW:
316        return kRethrowToken;
317      default:
318        // TODO(leszeks): We could also search for entries with the same
319        // command and statement.
320        return GetNewTokenForCommand(command, statement);
321    }
322  }
323
324  int GetReturnToken() {
325    if (return_token_ == -1) {
326      return_token_ = GetNewTokenForCommand(CMD_RETURN, nullptr);
327    }
328    return return_token_;
329  }
330
331  int GetAsyncReturnToken() {
332    if (async_return_token_ == -1) {
333      async_return_token_ = GetNewTokenForCommand(CMD_ASYNC_RETURN, nullptr);
334    }
335    return async_return_token_;
336  }
337
338  int GetNewTokenForCommand(Command command, Statement* statement) {
339    int token = static_cast<int>(deferred_.size());
340    deferred_.push_back({command, statement, token});
341    return token;
342  }
343
344  BytecodeGenerator* generator_;
345  ZoneVector<Entry> deferred_;
346  Register token_register_;
347  Register result_register_;
348
349  // Tokens for commands that don't need a statement.
350  int return_token_;
351  int async_return_token_;
352};
353
354// Scoped class for dealing with control flow reaching the function level.
355class BytecodeGenerator::ControlScopeForTopLevel final
356    : public BytecodeGenerator::ControlScope {
357 public:
358  explicit ControlScopeForTopLevel(BytecodeGenerator* generator)
359      : ControlScope(generator) {}
360
361 protected:
362  bool Execute(Command command, Statement* statement,
363               int source_position) override {
364    switch (command) {
365      case CMD_BREAK:  // We should never see break/continue in top-level.
366      case CMD_CONTINUE:
367        UNREACHABLE();
368      case CMD_RETURN:
369        // No need to pop contexts, execution leaves the method body.
370        generator()->BuildReturn(source_position);
371        return true;
372      case CMD_ASYNC_RETURN:
373        // No need to pop contexts, execution leaves the method body.
374        generator()->BuildAsyncReturn(source_position);
375        return true;
376      case CMD_RETHROW:
377        // No need to pop contexts, execution leaves the method body.
378        generator()->BuildReThrow();
379        return true;
380    }
381    return false;
382  }
383};
384
385// Scoped class for enabling break inside blocks and switch blocks.
386class BytecodeGenerator::ControlScopeForBreakable final
387    : public BytecodeGenerator::ControlScope {
388 public:
389  ControlScopeForBreakable(BytecodeGenerator* generator,
390                           BreakableStatement* statement,
391                           BreakableControlFlowBuilder* control_builder)
392      : ControlScope(generator),
393        statement_(statement),
394        control_builder_(control_builder) {}
395
396 protected:
397  bool Execute(Command command, Statement* statement,
398               int source_position) override {
399    if (statement != statement_) return false;
400    switch (command) {
401      case CMD_BREAK:
402        PopContextToExpectedDepth();
403        control_builder_->Break();
404        return true;
405      case CMD_CONTINUE:
406      case CMD_RETURN:
407      case CMD_ASYNC_RETURN:
408      case CMD_RETHROW:
409        break;
410    }
411    return false;
412  }
413
414 private:
415  Statement* statement_;
416  BreakableControlFlowBuilder* control_builder_;
417};
418
419// Scoped class for enabling 'break' and 'continue' in iteration
420// constructs, e.g. do...while, while..., for...
421class BytecodeGenerator::ControlScopeForIteration final
422    : public BytecodeGenerator::ControlScope {
423 public:
424  ControlScopeForIteration(BytecodeGenerator* generator,
425                           IterationStatement* statement,
426                           LoopBuilder* loop_builder)
427      : ControlScope(generator),
428        statement_(statement),
429        loop_builder_(loop_builder) {}
430
431 protected:
432  bool Execute(Command command, Statement* statement,
433               int source_position) override {
434    if (statement != statement_) return false;
435    switch (command) {
436      case CMD_BREAK:
437        PopContextToExpectedDepth();
438        loop_builder_->Break();
439        return true;
440      case CMD_CONTINUE:
441        PopContextToExpectedDepth();
442        loop_builder_->Continue();
443        return true;
444      case CMD_RETURN:
445      case CMD_ASYNC_RETURN:
446      case CMD_RETHROW:
447        break;
448    }
449    return false;
450  }
451
452 private:
453  Statement* statement_;
454  LoopBuilder* loop_builder_;
455};
456
457// Scoped class for enabling 'throw' in try-catch constructs.
458class BytecodeGenerator::ControlScopeForTryCatch final
459    : public BytecodeGenerator::ControlScope {
460 public:
461  ControlScopeForTryCatch(BytecodeGenerator* generator,
462                          TryCatchBuilder* try_catch_builder)
463      : ControlScope(generator) {}
464
465 protected:
466  bool Execute(Command command, Statement* statement,
467               int source_position) override {
468    switch (command) {
469      case CMD_BREAK:
470      case CMD_CONTINUE:
471      case CMD_RETURN:
472      case CMD_ASYNC_RETURN:
473        break;
474      case CMD_RETHROW:
475        // No need to pop contexts, execution re-enters the method body via the
476        // stack unwinding mechanism which itself restores contexts correctly.
477        generator()->BuildReThrow();
478        return true;
479    }
480    return false;
481  }
482};
483
484// Scoped class for enabling control flow through try-finally constructs.
485class BytecodeGenerator::ControlScopeForTryFinally final
486    : public BytecodeGenerator::ControlScope {
487 public:
488  ControlScopeForTryFinally(BytecodeGenerator* generator,
489                            TryFinallyBuilder* try_finally_builder,
490                            DeferredCommands* commands)
491      : ControlScope(generator),
492        try_finally_builder_(try_finally_builder),
493        commands_(commands) {}
494
495 protected:
496  bool Execute(Command command, Statement* statement,
497               int source_position) override {
498    switch (command) {
499      case CMD_BREAK:
500      case CMD_CONTINUE:
501      case CMD_RETURN:
502      case CMD_ASYNC_RETURN:
503      case CMD_RETHROW:
504        PopContextToExpectedDepth();
505        // We don't record source_position here since we don't generate return
506        // bytecode right here and will generate it later as part of finally
507        // block. Each return bytecode generated in finally block will get own
508        // return source position from corresponded return statement or we'll
509        // use end of function if no return statement is presented.
510        commands_->RecordCommand(command, statement);
511        try_finally_builder_->LeaveTry();
512        return true;
513    }
514    return false;
515  }
516
517 private:
518  TryFinallyBuilder* try_finally_builder_;
519  DeferredCommands* commands_;
520};
521
522// Allocate and fetch the coverage indices tracking NaryLogical Expressions.
523class BytecodeGenerator::NaryCodeCoverageSlots {
524 public:
525  NaryCodeCoverageSlots(BytecodeGenerator* generator, NaryOperation* expr)
526      : generator_(generator) {
527    if (generator_->block_coverage_builder_ == nullptr) return;
528    for (size_t i = 0; i < expr->subsequent_length(); i++) {
529      coverage_slots_.push_back(
530          generator_->AllocateNaryBlockCoverageSlotIfEnabled(expr, i));
531    }
532  }
533
534  int GetSlotFor(size_t subsequent_expr_index) const {
535    if (generator_->block_coverage_builder_ == nullptr) {
536      return BlockCoverageBuilder::kNoCoverageArraySlot;
537    }
538    DCHECK(coverage_slots_.size() > subsequent_expr_index);
539    return coverage_slots_[subsequent_expr_index];
540  }
541
542 private:
543  BytecodeGenerator* generator_;
544  std::vector<int> coverage_slots_;
545};
546
547void BytecodeGenerator::ControlScope::PerformCommand(Command command,
548                                                     Statement* statement,
549                                                     int source_position) {
550  ControlScope* current = this;
551  do {
552    if (current->Execute(command, statement, source_position)) {
553      return;
554    }
555    current = current->outer();
556  } while (current != nullptr);
557  UNREACHABLE();
558}
559
560void BytecodeGenerator::ControlScope::PopContextToExpectedDepth() {
561  // Pop context to the expected depth. Note that this can in fact pop multiple
562  // contexts at once because the {PopContext} bytecode takes a saved register.
563  if (generator()->execution_context() != context()) {
564    generator()->builder()->PopContext(context()->reg());
565  }
566}
567
568class V8_NODISCARD BytecodeGenerator::RegisterAllocationScope final {
569 public:
570  explicit RegisterAllocationScope(BytecodeGenerator* generator)
571      : generator_(generator),
572        outer_next_register_index_(
573            generator->register_allocator()->next_register_index()) {}
574
575  ~RegisterAllocationScope() {
576    generator_->register_allocator()->ReleaseRegisters(
577        outer_next_register_index_);
578  }
579
580  RegisterAllocationScope(const RegisterAllocationScope&) = delete;
581  RegisterAllocationScope& operator=(const RegisterAllocationScope&) = delete;
582
583  BytecodeGenerator* generator() const { return generator_; }
584
585 private:
586  BytecodeGenerator* generator_;
587  int outer_next_register_index_;
588};
589
590class V8_NODISCARD BytecodeGenerator::AccumulatorPreservingScope final {
591 public:
592  explicit AccumulatorPreservingScope(BytecodeGenerator* generator,
593                                      AccumulatorPreservingMode mode)
594      : generator_(generator) {
595    if (mode == AccumulatorPreservingMode::kPreserve) {
596      saved_accumulator_register_ =
597          generator_->register_allocator()->NewRegister();
598      generator_->builder()->StoreAccumulatorInRegister(
599          saved_accumulator_register_);
600    }
601  }
602
603  ~AccumulatorPreservingScope() {
604    if (saved_accumulator_register_.is_valid()) {
605      generator_->builder()->LoadAccumulatorWithRegister(
606          saved_accumulator_register_);
607    }
608  }
609
610  AccumulatorPreservingScope(const AccumulatorPreservingScope&) = delete;
611  AccumulatorPreservingScope& operator=(const AccumulatorPreservingScope&) =
612      delete;
613
614 private:
615  BytecodeGenerator* generator_;
616  Register saved_accumulator_register_;
617};
618
619// Scoped base class for determining how the result of an expression will be
620// used.
621class V8_NODISCARD BytecodeGenerator::ExpressionResultScope {
622 public:
623  ExpressionResultScope(BytecodeGenerator* generator, Expression::Context kind)
624      : outer_(generator->execution_result()),
625        allocator_(generator),
626        kind_(kind),
627        type_hint_(TypeHint::kAny) {
628    generator->set_execution_result(this);
629  }
630
631  ~ExpressionResultScope() {
632    allocator_.generator()->set_execution_result(outer_);
633  }
634
635  ExpressionResultScope(const ExpressionResultScope&) = delete;
636  ExpressionResultScope& operator=(const ExpressionResultScope&) = delete;
637
638  bool IsEffect() const { return kind_ == Expression::kEffect; }
639  bool IsValue() const { return kind_ == Expression::kValue; }
640  bool IsTest() const { return kind_ == Expression::kTest; }
641
642  TestResultScope* AsTest() {
643    DCHECK(IsTest());
644    return reinterpret_cast<TestResultScope*>(this);
645  }
646
647  // Specify expression always returns a Boolean result value.
648  void SetResultIsBoolean() {
649    DCHECK_EQ(type_hint_, TypeHint::kAny);
650    type_hint_ = TypeHint::kBoolean;
651  }
652
653  void SetResultIsString() {
654    DCHECK_EQ(type_hint_, TypeHint::kAny);
655    type_hint_ = TypeHint::kString;
656  }
657
658  TypeHint type_hint() const { return type_hint_; }
659
660 private:
661  ExpressionResultScope* outer_;
662  RegisterAllocationScope allocator_;
663  Expression::Context kind_;
664  TypeHint type_hint_;
665};
666
667// Scoped class used when the result of the current expression is not
668// expected to produce a result.
669class BytecodeGenerator::EffectResultScope final
670    : public ExpressionResultScope {
671 public:
672  explicit EffectResultScope(BytecodeGenerator* generator)
673      : ExpressionResultScope(generator, Expression::kEffect) {}
674};
675
676// Scoped class used when the result of the current expression to be
677// evaluated should go into the interpreter's accumulator.
678class V8_NODISCARD BytecodeGenerator::ValueResultScope final
679    : public ExpressionResultScope {
680 public:
681  explicit ValueResultScope(BytecodeGenerator* generator)
682      : ExpressionResultScope(generator, Expression::kValue) {}
683};
684
685// Scoped class used when the result of the current expression to be
686// evaluated is only tested with jumps to two branches.
687class V8_NODISCARD BytecodeGenerator::TestResultScope final
688    : public ExpressionResultScope {
689 public:
690  TestResultScope(BytecodeGenerator* generator, BytecodeLabels* then_labels,
691                  BytecodeLabels* else_labels, TestFallthrough fallthrough)
692      : ExpressionResultScope(generator, Expression::kTest),
693        result_consumed_by_test_(false),
694        fallthrough_(fallthrough),
695        then_labels_(then_labels),
696        else_labels_(else_labels) {}
697
698  TestResultScope(const TestResultScope&) = delete;
699  TestResultScope& operator=(const TestResultScope&) = delete;
700
701  // Used when code special cases for TestResultScope and consumes any
702  // possible value by testing and jumping to a then/else label.
703  void SetResultConsumedByTest() { result_consumed_by_test_ = true; }
704  bool result_consumed_by_test() { return result_consumed_by_test_; }
705
706  // Inverts the control flow of the operation, swapping the then and else
707  // labels and the fallthrough.
708  void InvertControlFlow() {
709    std::swap(then_labels_, else_labels_);
710    fallthrough_ = inverted_fallthrough();
711  }
712
713  BytecodeLabel* NewThenLabel() { return then_labels_->New(); }
714  BytecodeLabel* NewElseLabel() { return else_labels_->New(); }
715
716  BytecodeLabels* then_labels() const { return then_labels_; }
717  BytecodeLabels* else_labels() const { return else_labels_; }
718
719  void set_then_labels(BytecodeLabels* then_labels) {
720    then_labels_ = then_labels;
721  }
722  void set_else_labels(BytecodeLabels* else_labels) {
723    else_labels_ = else_labels;
724  }
725
726  TestFallthrough fallthrough() const { return fallthrough_; }
727  TestFallthrough inverted_fallthrough() const {
728    switch (fallthrough_) {
729      case TestFallthrough::kThen:
730        return TestFallthrough::kElse;
731      case TestFallthrough::kElse:
732        return TestFallthrough::kThen;
733      default:
734        return TestFallthrough::kNone;
735    }
736  }
737  void set_fallthrough(TestFallthrough fallthrough) {
738    fallthrough_ = fallthrough;
739  }
740
741 private:
742  bool result_consumed_by_test_;
743  TestFallthrough fallthrough_;
744  BytecodeLabels* then_labels_;
745  BytecodeLabels* else_labels_;
746};
747
748// Used to build a list of toplevel declaration data.
749class BytecodeGenerator::TopLevelDeclarationsBuilder final : public ZoneObject {
750 public:
751  template <typename IsolateT>
752  Handle<FixedArray> AllocateDeclarations(UnoptimizedCompilationInfo* info,
753                                          BytecodeGenerator* generator,
754                                          Handle<Script> script,
755                                          IsolateT* isolate) {
756    DCHECK(has_constant_pool_entry_);
757
758    Handle<FixedArray> data =
759        isolate->factory()->NewFixedArray(entry_slots_, AllocationType::kOld);
760
761    int array_index = 0;
762    if (info->scope()->is_module_scope()) {
763      for (Declaration* decl : *info->scope()->declarations()) {
764        Variable* var = decl->var();
765        if (!var->is_used()) continue;
766        if (var->location() != VariableLocation::MODULE) continue;
767#ifdef DEBUG
768        int start = array_index;
769#endif
770        if (decl->IsFunctionDeclaration()) {
771          FunctionLiteral* f = static_cast<FunctionDeclaration*>(decl)->fun();
772          Handle<SharedFunctionInfo> sfi(
773              Compiler::GetSharedFunctionInfo(f, script, isolate));
774          // Return a null handle if any initial values can't be created. Caller
775          // will set stack overflow.
776          if (sfi.is_null()) return Handle<FixedArray>();
777          data->set(array_index++, *sfi);
778          int literal_index = generator->GetCachedCreateClosureSlot(f);
779          data->set(array_index++, Smi::FromInt(literal_index));
780          DCHECK(var->IsExport());
781          data->set(array_index++, Smi::FromInt(var->index()));
782          DCHECK_EQ(start + kModuleFunctionDeclarationSize, array_index);
783        } else if (var->IsExport() && var->binding_needs_init()) {
784          data->set(array_index++, Smi::FromInt(var->index()));
785          DCHECK_EQ(start + kModuleVariableDeclarationSize, array_index);
786        }
787      }
788    } else {
789      for (Declaration* decl : *info->scope()->declarations()) {
790        Variable* var = decl->var();
791        if (!var->is_used()) continue;
792        if (var->location() != VariableLocation::UNALLOCATED) continue;
793#ifdef DEBUG
794        int start = array_index;
795#endif
796        if (decl->IsVariableDeclaration()) {
797          data->set(array_index++, *var->raw_name()->string());
798          DCHECK_EQ(start + kGlobalVariableDeclarationSize, array_index);
799        } else {
800          FunctionLiteral* f = static_cast<FunctionDeclaration*>(decl)->fun();
801          Handle<SharedFunctionInfo> sfi(
802              Compiler::GetSharedFunctionInfo(f, script, isolate));
803          // Return a null handle if any initial values can't be created. Caller
804          // will set stack overflow.
805          if (sfi.is_null()) return Handle<FixedArray>();
806          data->set(array_index++, *sfi);
807          int literal_index = generator->GetCachedCreateClosureSlot(f);
808          data->set(array_index++, Smi::FromInt(literal_index));
809          DCHECK_EQ(start + kGlobalFunctionDeclarationSize, array_index);
810        }
811      }
812    }
813    DCHECK_EQ(array_index, data->length());
814    return data;
815  }
816
817  size_t constant_pool_entry() {
818    DCHECK(has_constant_pool_entry_);
819    return constant_pool_entry_;
820  }
821
822  void set_constant_pool_entry(size_t constant_pool_entry) {
823    DCHECK(has_top_level_declaration());
824    DCHECK(!has_constant_pool_entry_);
825    constant_pool_entry_ = constant_pool_entry;
826    has_constant_pool_entry_ = true;
827  }
828
829  void record_global_variable_declaration() {
830    entry_slots_ += kGlobalVariableDeclarationSize;
831  }
832  void record_global_function_declaration() {
833    entry_slots_ += kGlobalFunctionDeclarationSize;
834  }
835  void record_module_variable_declaration() {
836    entry_slots_ += kModuleVariableDeclarationSize;
837  }
838  void record_module_function_declaration() {
839    entry_slots_ += kModuleFunctionDeclarationSize;
840  }
841  bool has_top_level_declaration() { return entry_slots_ > 0; }
842  bool processed() { return processed_; }
843  void mark_processed() { processed_ = true; }
844
845 private:
846  const int kGlobalVariableDeclarationSize = 1;
847  const int kGlobalFunctionDeclarationSize = 2;
848  const int kModuleVariableDeclarationSize = 1;
849  const int kModuleFunctionDeclarationSize = 3;
850
851  size_t constant_pool_entry_ = 0;
852  int entry_slots_ = 0;
853  bool has_constant_pool_entry_ = false;
854  bool processed_ = false;
855};
856
857class V8_NODISCARD BytecodeGenerator::CurrentScope final {
858 public:
859  CurrentScope(BytecodeGenerator* generator, Scope* scope)
860      : generator_(generator), outer_scope_(generator->current_scope()) {
861    if (scope != nullptr) {
862      DCHECK_EQ(outer_scope_, scope->outer_scope());
863      generator_->set_current_scope(scope);
864    }
865  }
866  ~CurrentScope() {
867    if (outer_scope_ != generator_->current_scope()) {
868      generator_->set_current_scope(outer_scope_);
869    }
870  }
871  CurrentScope(const CurrentScope&) = delete;
872  CurrentScope& operator=(const CurrentScope&) = delete;
873
874 private:
875  BytecodeGenerator* generator_;
876  Scope* outer_scope_;
877};
878
879class V8_NODISCARD BytecodeGenerator::MultipleEntryBlockContextScope {
880 public:
881  MultipleEntryBlockContextScope(BytecodeGenerator* generator, Scope* scope)
882      : generator_(generator), scope_(scope), is_in_scope_(false) {
883    if (scope) {
884      inner_context_ = generator->register_allocator()->NewRegister();
885      outer_context_ = generator->register_allocator()->NewRegister();
886      generator->BuildNewLocalBlockContext(scope_);
887      generator->builder()->StoreAccumulatorInRegister(inner_context_);
888    }
889  }
890
891  void SetEnteredIf(bool condition) {
892    RegisterAllocationScope register_scope(generator_);
893    if (condition && scope_ != nullptr && !is_in_scope_) {
894      EnterScope();
895    } else if (!condition && is_in_scope_) {
896      ExitScope();
897    }
898  }
899
900  MultipleEntryBlockContextScope(const MultipleEntryBlockContextScope&) =
901      delete;
902  MultipleEntryBlockContextScope& operator=(
903      const MultipleEntryBlockContextScope&) = delete;
904
905 private:
906  void EnterScope() {
907    DCHECK(inner_context_.is_valid());
908    DCHECK(outer_context_.is_valid());
909    DCHECK(!is_in_scope_);
910    Register temp = generator_->register_allocator()->NewRegister();
911    generator_->builder()->StoreAccumulatorInRegister(temp);
912    generator_->builder()->LoadAccumulatorWithRegister(inner_context_);
913    current_scope_.emplace(generator_, scope_);
914    context_scope_.emplace(generator_, scope_, outer_context_);
915    generator_->builder()->LoadAccumulatorWithRegister(temp);
916    is_in_scope_ = true;
917  }
918
919  void ExitScope() {
920    DCHECK(inner_context_.is_valid());
921    DCHECK(outer_context_.is_valid());
922    DCHECK(is_in_scope_);
923    Register temp = generator_->register_allocator()->NewRegister();
924    generator_->builder()->StoreAccumulatorInRegister(temp);
925    context_scope_ = base::nullopt;
926    current_scope_ = base::nullopt;
927    generator_->builder()->LoadAccumulatorWithRegister(temp);
928    is_in_scope_ = false;
929  }
930
931  BytecodeGenerator* generator_;
932  Scope* scope_;
933  Register inner_context_;
934  Register outer_context_;
935  bool is_in_scope_;
936  base::Optional<CurrentScope> current_scope_;
937  base::Optional<ContextScope> context_scope_;
938};
939
940class BytecodeGenerator::FeedbackSlotCache : public ZoneObject {
941 public:
942  enum class SlotKind {
943    kStoreGlobalSloppy,
944    kStoreGlobalStrict,
945    kSetNamedStrict,
946    kSetNamedSloppy,
947    kLoadProperty,
948    kLoadSuperProperty,
949    kLoadGlobalNotInsideTypeof,
950    kLoadGlobalInsideTypeof,
951    kClosureFeedbackCell
952  };
953
954  explicit FeedbackSlotCache(Zone* zone) : map_(zone) {}
955
956  void Put(SlotKind slot_kind, Variable* variable, int slot_index) {
957    PutImpl(slot_kind, 0, variable, slot_index);
958  }
959  void Put(SlotKind slot_kind, AstNode* node, int slot_index) {
960    PutImpl(slot_kind, 0, node, slot_index);
961  }
962  void Put(SlotKind slot_kind, int variable_index, const AstRawString* name,
963           int slot_index) {
964    PutImpl(slot_kind, variable_index, name, slot_index);
965  }
966  void Put(SlotKind slot_kind, const AstRawString* name, int slot_index) {
967    PutImpl(slot_kind, 0, name, slot_index);
968  }
969
970  int Get(SlotKind slot_kind, Variable* variable) const {
971    return GetImpl(slot_kind, 0, variable);
972  }
973  int Get(SlotKind slot_kind, AstNode* node) const {
974    return GetImpl(slot_kind, 0, node);
975  }
976  int Get(SlotKind slot_kind, int variable_index,
977          const AstRawString* name) const {
978    return GetImpl(slot_kind, variable_index, name);
979  }
980  int Get(SlotKind slot_kind, const AstRawString* name) const {
981    return GetImpl(slot_kind, 0, name);
982  }
983
984 private:
985  using Key = std::tuple<SlotKind, int, const void*>;
986
987  void PutImpl(SlotKind slot_kind, int index, const void* node,
988               int slot_index) {
989    Key key = std::make_tuple(slot_kind, index, node);
990    auto entry = std::make_pair(key, slot_index);
991    map_.insert(entry);
992  }
993
994  int GetImpl(SlotKind slot_kind, int index, const void* node) const {
995    Key key = std::make_tuple(slot_kind, index, node);
996    auto iter = map_.find(key);
997    if (iter != map_.end()) {
998      return iter->second;
999    }
1000    return -1;
1001  }
1002
1003  ZoneMap<Key, int> map_;
1004};
1005
1006class BytecodeGenerator::IteratorRecord final {
1007 public:
1008  IteratorRecord(Register object_register, Register next_register,
1009                 IteratorType type = IteratorType::kNormal)
1010      : type_(type), object_(object_register), next_(next_register) {
1011    DCHECK(object_.is_valid() && next_.is_valid());
1012  }
1013
1014  inline IteratorType type() const { return type_; }
1015  inline Register object() const { return object_; }
1016  inline Register next() const { return next_; }
1017
1018 private:
1019  IteratorType type_;
1020  Register object_;
1021  Register next_;
1022};
1023
1024class V8_NODISCARD BytecodeGenerator::OptionalChainNullLabelScope final {
1025 public:
1026  explicit OptionalChainNullLabelScope(BytecodeGenerator* bytecode_generator)
1027      : bytecode_generator_(bytecode_generator),
1028        labels_(bytecode_generator->zone()) {
1029    prev_ = bytecode_generator_->optional_chaining_null_labels_;
1030    bytecode_generator_->optional_chaining_null_labels_ = &labels_;
1031  }
1032
1033  ~OptionalChainNullLabelScope() {
1034    bytecode_generator_->optional_chaining_null_labels_ = prev_;
1035  }
1036
1037  BytecodeLabels* labels() { return &labels_; }
1038
1039 private:
1040  BytecodeGenerator* bytecode_generator_;
1041  BytecodeLabels labels_;
1042  BytecodeLabels* prev_;
1043};
1044
1045// LoopScope delimits the scope of {loop}, from its header to its final jump.
1046// It should be constructed iff a (conceptual) back edge should be produced. In
1047// the case of creating a LoopBuilder but never emitting the loop, it is valid
1048// to skip the creation of LoopScope.
1049class V8_NODISCARD BytecodeGenerator::LoopScope final {
1050 public:
1051  explicit LoopScope(BytecodeGenerator* bytecode_generator, LoopBuilder* loop)
1052      : bytecode_generator_(bytecode_generator),
1053        parent_loop_scope_(bytecode_generator_->current_loop_scope()),
1054        loop_builder_(loop) {
1055    loop_builder_->LoopHeader();
1056    bytecode_generator_->set_current_loop_scope(this);
1057    bytecode_generator_->loop_depth_++;
1058  }
1059
1060  ~LoopScope() {
1061    bytecode_generator_->loop_depth_--;
1062    bytecode_generator_->set_current_loop_scope(parent_loop_scope_);
1063    DCHECK_GE(bytecode_generator_->loop_depth_, 0);
1064    loop_builder_->JumpToHeader(
1065        bytecode_generator_->loop_depth_,
1066        parent_loop_scope_ ? parent_loop_scope_->loop_builder_ : nullptr);
1067  }
1068
1069 private:
1070  BytecodeGenerator* const bytecode_generator_;
1071  LoopScope* const parent_loop_scope_;
1072  LoopBuilder* const loop_builder_;
1073};
1074
1075namespace {
1076
1077template <typename PropertyT>
1078struct Accessors : public ZoneObject {
1079  Accessors() : getter(nullptr), setter(nullptr) {}
1080  PropertyT* getter;
1081  PropertyT* setter;
1082};
1083
1084// A map from property names to getter/setter pairs allocated in the zone that
1085// also provides a way of accessing the pairs in the order they were first
1086// added so that the generated bytecode is always the same.
1087template <typename PropertyT>
1088class AccessorTable
1089    : public base::TemplateHashMap<Literal, Accessors<PropertyT>,
1090                                   bool (*)(void*, void*),
1091                                   ZoneAllocationPolicy> {
1092 public:
1093  explicit AccessorTable(Zone* zone)
1094      : base::TemplateHashMap<Literal, Accessors<PropertyT>,
1095                              bool (*)(void*, void*), ZoneAllocationPolicy>(
1096            Literal::Match, ZoneAllocationPolicy(zone)),
1097        zone_(zone) {}
1098
1099  Accessors<PropertyT>* LookupOrInsert(Literal* key) {
1100    auto it = this->find(key, true);
1101    if (it->second == nullptr) {
1102      it->second = zone_->New<Accessors<PropertyT>>();
1103      ordered_accessors_.push_back({key, it->second});
1104    }
1105    return it->second;
1106  }
1107
1108  const std::vector<std::pair<Literal*, Accessors<PropertyT>*>>&
1109  ordered_accessors() {
1110    return ordered_accessors_;
1111  }
1112
1113 private:
1114  std::vector<std::pair<Literal*, Accessors<PropertyT>*>> ordered_accessors_;
1115
1116  Zone* zone_;
1117};
1118
1119}  // namespace
1120
1121#ifdef DEBUG
1122
1123static bool IsInEagerLiterals(
1124    FunctionLiteral* literal,
1125    const std::vector<FunctionLiteral*>& eager_literals) {
1126  for (FunctionLiteral* eager_literal : eager_literals) {
1127    if (literal == eager_literal) return true;
1128  }
1129  return false;
1130}
1131
1132#endif  // DEBUG
1133
1134BytecodeGenerator::BytecodeGenerator(
1135    LocalIsolate* local_isolate, Zone* compile_zone,
1136    UnoptimizedCompilationInfo* info,
1137    const AstStringConstants* ast_string_constants,
1138    std::vector<FunctionLiteral*>* eager_inner_literals, Handle<Script> script)
1139    : local_isolate_(local_isolate),
1140      zone_(compile_zone),
1141      builder_(zone(), info->num_parameters_including_this(),
1142               info->scope()->num_stack_slots(), info->feedback_vector_spec(),
1143               info->SourcePositionRecordingMode()),
1144      info_(info),
1145      ast_string_constants_(ast_string_constants),
1146      closure_scope_(info->scope()),
1147      current_scope_(info->scope()),
1148      eager_inner_literals_(eager_inner_literals),
1149      script_(script),
1150      feedback_slot_cache_(zone()->New<FeedbackSlotCache>(zone())),
1151      top_level_builder_(zone()->New<TopLevelDeclarationsBuilder>()),
1152      block_coverage_builder_(nullptr),
1153      function_literals_(0, zone()),
1154      native_function_literals_(0, zone()),
1155      object_literals_(0, zone()),
1156      array_literals_(0, zone()),
1157      class_literals_(0, zone()),
1158      template_objects_(0, zone()),
1159      execution_control_(nullptr),
1160      execution_context_(nullptr),
1161      execution_result_(nullptr),
1162      incoming_new_target_or_generator_(),
1163      optional_chaining_null_labels_(nullptr),
1164      dummy_feedback_slot_(feedback_spec(), FeedbackSlotKind::kCompareOp),
1165      generator_jump_table_(nullptr),
1166      suspend_count_(0),
1167      loop_depth_(0),
1168      current_loop_scope_(nullptr),
1169      catch_prediction_(HandlerTable::UNCAUGHT) {
1170  DCHECK_EQ(closure_scope(), closure_scope()->GetClosureScope());
1171  if (info->has_source_range_map()) {
1172    block_coverage_builder_ = zone()->New<BlockCoverageBuilder>(
1173        zone(), builder(), info->source_range_map());
1174  }
1175}
1176
1177namespace {
1178
1179template <typename Isolate>
1180struct NullContextScopeHelper;
1181
1182template <>
1183struct NullContextScopeHelper<Isolate> {
1184  using Type = NullContextScope;
1185};
1186
1187template <>
1188struct NullContextScopeHelper<LocalIsolate> {
1189  class V8_NODISCARD DummyNullContextScope {
1190   public:
1191    explicit DummyNullContextScope(LocalIsolate*) {}
1192  };
1193  using Type = DummyNullContextScope;
1194};
1195
1196template <typename Isolate>
1197using NullContextScopeFor = typename NullContextScopeHelper<Isolate>::Type;
1198
1199}  // namespace
1200
1201template <typename IsolateT>
1202Handle<BytecodeArray> BytecodeGenerator::FinalizeBytecode(
1203    IsolateT* isolate, Handle<Script> script) {
1204  DCHECK_EQ(ThreadId::Current(), isolate->thread_id());
1205#ifdef DEBUG
1206  // Unoptimized compilation should be context-independent. Verify that we don't
1207  // access the native context by nulling it out during finalization.
1208  NullContextScopeFor<IsolateT> null_context_scope(isolate);
1209#endif
1210
1211  AllocateDeferredConstants(isolate, script);
1212
1213  if (block_coverage_builder_) {
1214    Handle<CoverageInfo> coverage_info =
1215        isolate->factory()->NewCoverageInfo(block_coverage_builder_->slots());
1216    info()->set_coverage_info(coverage_info);
1217    if (FLAG_trace_block_coverage) {
1218      StdoutStream os;
1219      coverage_info->CoverageInfoPrint(os, info()->literal()->GetDebugName());
1220    }
1221  }
1222
1223  if (HasStackOverflow()) return Handle<BytecodeArray>();
1224  Handle<BytecodeArray> bytecode_array = builder()->ToBytecodeArray(isolate);
1225
1226  if (incoming_new_target_or_generator_.is_valid()) {
1227    bytecode_array->set_incoming_new_target_or_generator_register(
1228        incoming_new_target_or_generator_);
1229  }
1230
1231  return bytecode_array;
1232}
1233
1234template Handle<BytecodeArray> BytecodeGenerator::FinalizeBytecode(
1235    Isolate* isolate, Handle<Script> script);
1236template Handle<BytecodeArray> BytecodeGenerator::FinalizeBytecode(
1237    LocalIsolate* isolate, Handle<Script> script);
1238
1239template <typename IsolateT>
1240Handle<ByteArray> BytecodeGenerator::FinalizeSourcePositionTable(
1241    IsolateT* isolate) {
1242  DCHECK_EQ(ThreadId::Current(), isolate->thread_id());
1243#ifdef DEBUG
1244  // Unoptimized compilation should be context-independent. Verify that we don't
1245  // access the native context by nulling it out during finalization.
1246  NullContextScopeFor<IsolateT> null_context_scope(isolate);
1247#endif
1248
1249  Handle<ByteArray> source_position_table =
1250      builder()->ToSourcePositionTable(isolate);
1251
1252  LOG_CODE_EVENT(isolate,
1253                 CodeLinePosInfoRecordEvent(
1254                     info_->bytecode_array()->GetFirstBytecodeAddress(),
1255                     *source_position_table, JitCodeEvent::BYTE_CODE));
1256
1257  return source_position_table;
1258}
1259
1260template Handle<ByteArray> BytecodeGenerator::FinalizeSourcePositionTable(
1261    Isolate* isolate);
1262template Handle<ByteArray> BytecodeGenerator::FinalizeSourcePositionTable(
1263    LocalIsolate* isolate);
1264
1265#ifdef DEBUG
1266int BytecodeGenerator::CheckBytecodeMatches(BytecodeArray bytecode) {
1267  return builder()->CheckBytecodeMatches(bytecode);
1268}
1269#endif
1270
1271template <typename IsolateT>
1272void BytecodeGenerator::AllocateDeferredConstants(IsolateT* isolate,
1273                                                  Handle<Script> script) {
1274  if (top_level_builder()->has_top_level_declaration()) {
1275    // Build global declaration pair array.
1276    Handle<FixedArray> declarations = top_level_builder()->AllocateDeclarations(
1277        info(), this, script, isolate);
1278    if (declarations.is_null()) return SetStackOverflow();
1279    builder()->SetDeferredConstantPoolEntry(
1280        top_level_builder()->constant_pool_entry(), declarations);
1281  }
1282
1283  // Find or build shared function infos.
1284  for (std::pair<FunctionLiteral*, size_t> literal : function_literals_) {
1285    FunctionLiteral* expr = literal.first;
1286    Handle<SharedFunctionInfo> shared_info =
1287        Compiler::GetSharedFunctionInfo(expr, script, isolate);
1288    if (shared_info.is_null()) return SetStackOverflow();
1289    builder()->SetDeferredConstantPoolEntry(literal.second, shared_info);
1290  }
1291
1292  // Find or build shared function infos for the native function templates.
1293  for (std::pair<NativeFunctionLiteral*, size_t> literal :
1294       native_function_literals_) {
1295    // This should only happen for main-thread compilations.
1296    DCHECK((std::is_same<Isolate, v8::internal::Isolate>::value));
1297
1298    NativeFunctionLiteral* expr = literal.first;
1299    v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
1300
1301    // Compute the function template for the native function.
1302    v8::Local<v8::FunctionTemplate> info =
1303        expr->extension()->GetNativeFunctionTemplate(
1304            v8_isolate, Utils::ToLocal(expr->name()));
1305    DCHECK(!info.IsEmpty());
1306
1307    Handle<SharedFunctionInfo> shared_info =
1308        FunctionTemplateInfo::GetOrCreateSharedFunctionInfo(
1309            isolate, Utils::OpenHandle(*info), expr->name());
1310    DCHECK(!shared_info.is_null());
1311    builder()->SetDeferredConstantPoolEntry(literal.second, shared_info);
1312  }
1313
1314  // Build object literal constant properties
1315  for (std::pair<ObjectLiteralBoilerplateBuilder*, size_t> literal :
1316       object_literals_) {
1317    ObjectLiteralBoilerplateBuilder* object_literal_builder = literal.first;
1318    if (object_literal_builder->properties_count() > 0) {
1319      // If constant properties is an empty fixed array, we've already added it
1320      // to the constant pool when visiting the object literal.
1321      Handle<ObjectBoilerplateDescription> constant_properties =
1322          object_literal_builder->GetOrBuildBoilerplateDescription(isolate);
1323
1324      builder()->SetDeferredConstantPoolEntry(literal.second,
1325                                              constant_properties);
1326    }
1327  }
1328
1329  // Build array literal constant elements
1330  for (std::pair<ArrayLiteralBoilerplateBuilder*, size_t> literal :
1331       array_literals_) {
1332    ArrayLiteralBoilerplateBuilder* array_literal_builder = literal.first;
1333    Handle<ArrayBoilerplateDescription> constant_elements =
1334        array_literal_builder->GetOrBuildBoilerplateDescription(isolate);
1335    builder()->SetDeferredConstantPoolEntry(literal.second, constant_elements);
1336  }
1337
1338  // Build class literal boilerplates.
1339  for (std::pair<ClassLiteral*, size_t> literal : class_literals_) {
1340    ClassLiteral* class_literal = literal.first;
1341    Handle<ClassBoilerplate> class_boilerplate =
1342        ClassBoilerplate::BuildClassBoilerplate(isolate, class_literal);
1343    builder()->SetDeferredConstantPoolEntry(literal.second, class_boilerplate);
1344  }
1345
1346  // Build template literals.
1347  for (std::pair<GetTemplateObject*, size_t> literal : template_objects_) {
1348    GetTemplateObject* get_template_object = literal.first;
1349    Handle<TemplateObjectDescription> description =
1350        get_template_object->GetOrBuildDescription(isolate);
1351    builder()->SetDeferredConstantPoolEntry(literal.second, description);
1352  }
1353}
1354
1355template void BytecodeGenerator::AllocateDeferredConstants(
1356    Isolate* isolate, Handle<Script> script);
1357template void BytecodeGenerator::AllocateDeferredConstants(
1358    LocalIsolate* isolate, Handle<Script> script);
1359
1360namespace {
1361bool NeedsContextInitialization(DeclarationScope* scope) {
1362  return scope->NeedsContext() && !scope->is_script_scope() &&
1363         !scope->is_module_scope();
1364}
1365}  // namespace
1366
1367void BytecodeGenerator::GenerateBytecode(uintptr_t stack_limit) {
1368  InitializeAstVisitor(stack_limit);
1369
1370  // Initialize the incoming context.
1371  ContextScope incoming_context(this, closure_scope());
1372
1373  // Initialize control scope.
1374  ControlScopeForTopLevel control(this);
1375
1376  RegisterAllocationScope register_scope(this);
1377
1378  AllocateTopLevelRegisters();
1379
1380  builder()->EmitFunctionStartSourcePosition(
1381      info()->literal()->start_position());
1382
1383  if (info()->literal()->CanSuspend()) {
1384    BuildGeneratorPrologue();
1385  }
1386
1387  if (NeedsContextInitialization(closure_scope())) {
1388    // Push a new inner context scope for the function.
1389    BuildNewLocalActivationContext();
1390    ContextScope local_function_context(this, closure_scope());
1391    BuildLocalActivationContextInitialization();
1392    GenerateBytecodeBody();
1393  } else {
1394    GenerateBytecodeBody();
1395  }
1396
1397  // Check that we are not falling off the end.
1398  DCHECK(builder()->RemainderOfBlockIsDead());
1399}
1400
1401void BytecodeGenerator::GenerateBytecodeBody() {
1402  // Build the arguments object if it is used.
1403  VisitArgumentsObject(closure_scope()->arguments());
1404
1405  // Build rest arguments array if it is used.
1406  Variable* rest_parameter = closure_scope()->rest_parameter();
1407  VisitRestArgumentsArray(rest_parameter);
1408
1409  // Build assignment to the function name or {.this_function}
1410  // variables if used.
1411  VisitThisFunctionVariable(closure_scope()->function_var());
1412  VisitThisFunctionVariable(closure_scope()->this_function_var());
1413
1414  // Build assignment to {new.target} variable if it is used.
1415  VisitNewTargetVariable(closure_scope()->new_target_var());
1416
1417  // Create a generator object if necessary and initialize the
1418  // {.generator_object} variable.
1419  FunctionLiteral* literal = info()->literal();
1420  if (IsResumableFunction(literal->kind())) {
1421    BuildGeneratorObjectVariableInitialization();
1422  }
1423
1424  // Emit tracing call if requested to do so.
1425  if (FLAG_trace) builder()->CallRuntime(Runtime::kTraceEnter);
1426
1427  // Emit type profile call.
1428  if (info()->flags().collect_type_profile()) {
1429    feedback_spec()->AddTypeProfileSlot();
1430    int num_parameters = closure_scope()->num_parameters();
1431    for (int i = 0; i < num_parameters; i++) {
1432      Register parameter(builder()->Parameter(i));
1433      builder()->LoadAccumulatorWithRegister(parameter).CollectTypeProfile(
1434          closure_scope()->parameter(i)->initializer_position());
1435    }
1436  }
1437
1438  // Increment the function-scope block coverage counter.
1439  BuildIncrementBlockCoverageCounterIfEnabled(literal, SourceRangeKind::kBody);
1440
1441  // Visit declarations within the function scope.
1442  if (closure_scope()->is_script_scope()) {
1443    VisitGlobalDeclarations(closure_scope()->declarations());
1444  } else if (closure_scope()->is_module_scope()) {
1445    VisitModuleDeclarations(closure_scope()->declarations());
1446  } else {
1447    VisitDeclarations(closure_scope()->declarations());
1448  }
1449
1450  // Emit initializing assignments for module namespace imports (if any).
1451  VisitModuleNamespaceImports();
1452
1453  // The derived constructor case is handled in VisitCallSuper.
1454  if (IsBaseConstructor(function_kind())) {
1455    if (literal->class_scope_has_private_brand()) {
1456      ClassScope* scope = info()->scope()->outer_scope()->AsClassScope();
1457      DCHECK_NOT_NULL(scope->brand());
1458      BuildPrivateBrandInitialization(builder()->Receiver(), scope->brand());
1459    }
1460
1461    if (literal->requires_instance_members_initializer()) {
1462      BuildInstanceMemberInitialization(Register::function_closure(),
1463                                        builder()->Receiver());
1464    }
1465  }
1466
1467  // Visit statements in the function body.
1468  VisitStatements(literal->body());
1469
1470  // Emit an implicit return instruction in case control flow can fall off the
1471  // end of the function without an explicit return being present on all paths.
1472  if (!builder()->RemainderOfBlockIsDead()) {
1473    builder()->LoadUndefined();
1474    BuildReturn(literal->return_position());
1475  }
1476}
1477
1478void BytecodeGenerator::AllocateTopLevelRegisters() {
1479  if (IsResumableFunction(info()->literal()->kind())) {
1480    // Either directly use generator_object_var or allocate a new register for
1481    // the incoming generator object.
1482    Variable* generator_object_var = closure_scope()->generator_object_var();
1483    if (generator_object_var->location() == VariableLocation::LOCAL) {
1484      incoming_new_target_or_generator_ =
1485          GetRegisterForLocalVariable(generator_object_var);
1486    } else {
1487      incoming_new_target_or_generator_ = register_allocator()->NewRegister();
1488    }
1489  } else if (closure_scope()->new_target_var()) {
1490    // Either directly use new_target_var or allocate a new register for
1491    // the incoming new target object.
1492    Variable* new_target_var = closure_scope()->new_target_var();
1493    if (new_target_var->location() == VariableLocation::LOCAL) {
1494      incoming_new_target_or_generator_ =
1495          GetRegisterForLocalVariable(new_target_var);
1496    } else {
1497      incoming_new_target_or_generator_ = register_allocator()->NewRegister();
1498    }
1499  }
1500}
1501
1502void BytecodeGenerator::BuildGeneratorPrologue() {
1503  DCHECK_GT(info()->literal()->suspend_count(), 0);
1504  DCHECK(generator_object().is_valid());
1505  generator_jump_table_ =
1506      builder()->AllocateJumpTable(info()->literal()->suspend_count(), 0);
1507
1508  // If the generator is not undefined, this is a resume, so perform state
1509  // dispatch.
1510  builder()->SwitchOnGeneratorState(generator_object(), generator_jump_table_);
1511
1512  // Otherwise, fall-through to the ordinary function prologue, after which we
1513  // will run into the generator object creation and other extra code inserted
1514  // by the parser.
1515}
1516
1517void BytecodeGenerator::VisitBlock(Block* stmt) {
1518  // Visit declarations and statements.
1519  CurrentScope current_scope(this, stmt->scope());
1520  if (stmt->scope() != nullptr && stmt->scope()->NeedsContext()) {
1521    BuildNewLocalBlockContext(stmt->scope());
1522    ContextScope scope(this, stmt->scope());
1523    VisitBlockDeclarationsAndStatements(stmt);
1524  } else {
1525    VisitBlockDeclarationsAndStatements(stmt);
1526  }
1527}
1528
1529void BytecodeGenerator::VisitBlockDeclarationsAndStatements(Block* stmt) {
1530  BlockBuilder block_builder(builder(), block_coverage_builder_, stmt);
1531  ControlScopeForBreakable execution_control(this, stmt, &block_builder);
1532  if (stmt->scope() != nullptr) {
1533    VisitDeclarations(stmt->scope()->declarations());
1534  }
1535  VisitStatements(stmt->statements());
1536}
1537
1538void BytecodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) {
1539  Variable* variable = decl->var();
1540  // Unused variables don't need to be visited.
1541  if (!variable->is_used()) return;
1542
1543  switch (variable->location()) {
1544    case VariableLocation::UNALLOCATED:
1545    case VariableLocation::MODULE:
1546      UNREACHABLE();
1547    case VariableLocation::LOCAL:
1548      if (variable->binding_needs_init()) {
1549        Register destination(builder()->Local(variable->index()));
1550        builder()->LoadTheHole().StoreAccumulatorInRegister(destination);
1551      }
1552      break;
1553    case VariableLocation::PARAMETER:
1554      if (variable->binding_needs_init()) {
1555        Register destination(builder()->Parameter(variable->index()));
1556        builder()->LoadTheHole().StoreAccumulatorInRegister(destination);
1557      }
1558      break;
1559    case VariableLocation::REPL_GLOBAL:
1560      // REPL let's are stored in script contexts. They get initialized
1561      // with the hole the same way as normal context allocated variables.
1562    case VariableLocation::CONTEXT:
1563      if (variable->binding_needs_init()) {
1564        DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
1565        builder()->LoadTheHole().StoreContextSlot(execution_context()->reg(),
1566                                                  variable->index(), 0);
1567      }
1568      break;
1569    case VariableLocation::LOOKUP: {
1570      DCHECK_EQ(VariableMode::kDynamic, variable->mode());
1571      DCHECK(!variable->binding_needs_init());
1572
1573      Register name = register_allocator()->NewRegister();
1574
1575      builder()
1576          ->LoadLiteral(variable->raw_name())
1577          .StoreAccumulatorInRegister(name)
1578          .CallRuntime(Runtime::kDeclareEvalVar, name);
1579      break;
1580    }
1581  }
1582}
1583
1584void BytecodeGenerator::VisitFunctionDeclaration(FunctionDeclaration* decl) {
1585  Variable* variable = decl->var();
1586  DCHECK(variable->mode() == VariableMode::kLet ||
1587         variable->mode() == VariableMode::kVar ||
1588         variable->mode() == VariableMode::kDynamic);
1589  // Unused variables don't need to be visited.
1590  if (!variable->is_used()) return;
1591
1592  switch (variable->location()) {
1593    case VariableLocation::UNALLOCATED:
1594    case VariableLocation::MODULE:
1595      UNREACHABLE();
1596    case VariableLocation::PARAMETER:
1597    case VariableLocation::LOCAL: {
1598      VisitFunctionLiteral(decl->fun());
1599      BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1600      break;
1601    }
1602    case VariableLocation::REPL_GLOBAL:
1603    case VariableLocation::CONTEXT: {
1604      DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
1605      VisitFunctionLiteral(decl->fun());
1606      builder()->StoreContextSlot(execution_context()->reg(), variable->index(),
1607                                  0);
1608      break;
1609    }
1610    case VariableLocation::LOOKUP: {
1611      RegisterList args = register_allocator()->NewRegisterList(2);
1612      builder()
1613          ->LoadLiteral(variable->raw_name())
1614          .StoreAccumulatorInRegister(args[0]);
1615      VisitFunctionLiteral(decl->fun());
1616      builder()->StoreAccumulatorInRegister(args[1]).CallRuntime(
1617          Runtime::kDeclareEvalFunction, args);
1618      break;
1619    }
1620  }
1621  DCHECK_IMPLIES(
1622      eager_inner_literals_ != nullptr && decl->fun()->ShouldEagerCompile(),
1623      IsInEagerLiterals(decl->fun(), *eager_inner_literals_));
1624}
1625
1626void BytecodeGenerator::VisitModuleNamespaceImports() {
1627  if (!closure_scope()->is_module_scope()) return;
1628
1629  RegisterAllocationScope register_scope(this);
1630  Register module_request = register_allocator()->NewRegister();
1631
1632  SourceTextModuleDescriptor* descriptor =
1633      closure_scope()->AsModuleScope()->module();
1634  for (auto entry : descriptor->namespace_imports()) {
1635    builder()
1636        ->LoadLiteral(Smi::FromInt(entry->module_request))
1637        .StoreAccumulatorInRegister(module_request)
1638        .CallRuntime(Runtime::kGetModuleNamespace, module_request);
1639    Variable* var = closure_scope()->LookupInModule(entry->local_name);
1640    BuildVariableAssignment(var, Token::INIT, HoleCheckMode::kElided);
1641  }
1642}
1643
1644void BytecodeGenerator::BuildDeclareCall(Runtime::FunctionId id) {
1645  if (!top_level_builder()->has_top_level_declaration()) return;
1646  DCHECK(!top_level_builder()->processed());
1647
1648  top_level_builder()->set_constant_pool_entry(
1649      builder()->AllocateDeferredConstantPoolEntry());
1650
1651  // Emit code to declare globals.
1652  RegisterList args = register_allocator()->NewRegisterList(2);
1653  builder()
1654      ->LoadConstantPoolEntry(top_level_builder()->constant_pool_entry())
1655      .StoreAccumulatorInRegister(args[0])
1656      .MoveRegister(Register::function_closure(), args[1])
1657      .CallRuntime(id, args);
1658
1659  top_level_builder()->mark_processed();
1660}
1661
1662void BytecodeGenerator::VisitModuleDeclarations(Declaration::List* decls) {
1663  RegisterAllocationScope register_scope(this);
1664  for (Declaration* decl : *decls) {
1665    Variable* var = decl->var();
1666    if (!var->is_used()) continue;
1667    if (var->location() == VariableLocation::MODULE) {
1668      if (decl->IsFunctionDeclaration()) {
1669        DCHECK(var->IsExport());
1670        FunctionDeclaration* f = static_cast<FunctionDeclaration*>(decl);
1671        AddToEagerLiteralsIfEager(f->fun());
1672        top_level_builder()->record_module_function_declaration();
1673      } else if (var->IsExport() && var->binding_needs_init()) {
1674        DCHECK(decl->IsVariableDeclaration());
1675        top_level_builder()->record_module_variable_declaration();
1676      }
1677    } else {
1678      RegisterAllocationScope inner_register_scope(this);
1679      Visit(decl);
1680    }
1681  }
1682  BuildDeclareCall(Runtime::kDeclareModuleExports);
1683}
1684
1685void BytecodeGenerator::VisitGlobalDeclarations(Declaration::List* decls) {
1686  RegisterAllocationScope register_scope(this);
1687  for (Declaration* decl : *decls) {
1688    Variable* var = decl->var();
1689    DCHECK(var->is_used());
1690    if (var->location() == VariableLocation::UNALLOCATED) {
1691      // var or function.
1692      if (decl->IsFunctionDeclaration()) {
1693        top_level_builder()->record_global_function_declaration();
1694        FunctionDeclaration* f = static_cast<FunctionDeclaration*>(decl);
1695        AddToEagerLiteralsIfEager(f->fun());
1696      } else {
1697        top_level_builder()->record_global_variable_declaration();
1698      }
1699    } else {
1700      // let or const. Handled in NewScriptContext.
1701      DCHECK(decl->IsVariableDeclaration());
1702      DCHECK(IsLexicalVariableMode(var->mode()));
1703    }
1704  }
1705
1706  BuildDeclareCall(Runtime::kDeclareGlobals);
1707}
1708
1709void BytecodeGenerator::VisitDeclarations(Declaration::List* declarations) {
1710  for (Declaration* decl : *declarations) {
1711    RegisterAllocationScope register_scope(this);
1712    Visit(decl);
1713  }
1714}
1715
1716void BytecodeGenerator::VisitStatements(
1717    const ZonePtrList<Statement>* statements) {
1718  for (int i = 0; i < statements->length(); i++) {
1719    // Allocate an outer register allocations scope for the statement.
1720    RegisterAllocationScope allocation_scope(this);
1721    Statement* stmt = statements->at(i);
1722    Visit(stmt);
1723    if (builder()->RemainderOfBlockIsDead()) break;
1724  }
1725}
1726
1727void BytecodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
1728  builder()->SetStatementPosition(stmt);
1729  VisitForEffect(stmt->expression());
1730}
1731
1732void BytecodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {}
1733
1734void BytecodeGenerator::VisitIfStatement(IfStatement* stmt) {
1735  ConditionalControlFlowBuilder conditional_builder(
1736      builder(), block_coverage_builder_, stmt);
1737  builder()->SetStatementPosition(stmt);
1738
1739  if (stmt->condition()->ToBooleanIsTrue()) {
1740    // Generate then block unconditionally as always true.
1741    conditional_builder.Then();
1742    Visit(stmt->then_statement());
1743  } else if (stmt->condition()->ToBooleanIsFalse()) {
1744    // Generate else block unconditionally if it exists.
1745    if (stmt->HasElseStatement()) {
1746      conditional_builder.Else();
1747      Visit(stmt->else_statement());
1748    }
1749  } else {
1750    // TODO(oth): If then statement is BreakStatement or
1751    // ContinueStatement we can reduce number of generated
1752    // jump/jump_ifs here. See BasicLoops test.
1753    VisitForTest(stmt->condition(), conditional_builder.then_labels(),
1754                 conditional_builder.else_labels(), TestFallthrough::kThen);
1755
1756    conditional_builder.Then();
1757    Visit(stmt->then_statement());
1758
1759    if (stmt->HasElseStatement()) {
1760      conditional_builder.JumpToEnd();
1761      conditional_builder.Else();
1762      Visit(stmt->else_statement());
1763    }
1764  }
1765}
1766
1767void BytecodeGenerator::VisitSloppyBlockFunctionStatement(
1768    SloppyBlockFunctionStatement* stmt) {
1769  Visit(stmt->statement());
1770}
1771
1772void BytecodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
1773  AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1774  builder()->SetStatementPosition(stmt);
1775  execution_control()->Continue(stmt->target());
1776}
1777
1778void BytecodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1779  AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1780  builder()->SetStatementPosition(stmt);
1781  execution_control()->Break(stmt->target());
1782}
1783
1784void BytecodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1785  AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1786  builder()->SetStatementPosition(stmt);
1787  VisitForAccumulatorValue(stmt->expression());
1788  int return_position = stmt->end_position();
1789  if (return_position == ReturnStatement::kFunctionLiteralReturnPosition) {
1790    return_position = info()->literal()->return_position();
1791  }
1792  if (stmt->is_async_return()) {
1793    execution_control()->AsyncReturnAccumulator(return_position);
1794  } else {
1795    execution_control()->ReturnAccumulator(return_position);
1796  }
1797}
1798
1799void BytecodeGenerator::VisitWithStatement(WithStatement* stmt) {
1800  builder()->SetStatementPosition(stmt);
1801  VisitForAccumulatorValue(stmt->expression());
1802  BuildNewLocalWithContext(stmt->scope());
1803  VisitInScope(stmt->statement(), stmt->scope());
1804}
1805
1806namespace {
1807
1808bool IsSmiLiteralSwitchCaseValue(Expression* expr) {
1809  if (expr->IsSmiLiteral() ||
1810      (expr->IsLiteral() && expr->AsLiteral()->IsNumber() &&
1811       expr->AsLiteral()->AsNumber() == 0.0)) {
1812    return true;
1813#ifdef DEBUG
1814  } else if (expr->IsLiteral() && expr->AsLiteral()->IsNumber()) {
1815    DCHECK(!IsSmiDouble(expr->AsLiteral()->AsNumber()));
1816#endif
1817  }
1818  return false;
1819}
1820
1821// Precondition: we called IsSmiLiteral to check this.
1822inline int ReduceToSmiSwitchCaseValue(Expression* expr) {
1823  if (V8_LIKELY(expr->IsSmiLiteral())) {
1824    return expr->AsLiteral()->AsSmiLiteral().value();
1825  } else {
1826    // Only the zero case is possible otherwise.
1827    DCHECK(expr->IsLiteral() && expr->AsLiteral()->IsNumber() &&
1828           expr->AsLiteral()->AsNumber() == -0.0);
1829    return 0;
1830  }
1831}
1832
1833// Is the range of Smi's small enough relative to number of cases?
1834inline bool IsSpreadAcceptable(int spread, int ncases) {
1835  return spread < FLAG_switch_table_spread_threshold * ncases;
1836}
1837
1838struct SwitchInfo {
1839  static const int kDefaultNotFound = -1;
1840
1841  std::map<int, CaseClause*> covered_cases;
1842  int default_case;
1843
1844  SwitchInfo() { default_case = kDefaultNotFound; }
1845
1846  bool DefaultExists() { return default_case != kDefaultNotFound; }
1847  bool CaseExists(int j) {
1848    return covered_cases.find(j) != covered_cases.end();
1849  }
1850  bool CaseExists(Expression* expr) {
1851    return IsSmiLiteralSwitchCaseValue(expr)
1852               ? CaseExists(ReduceToSmiSwitchCaseValue(expr))
1853               : false;
1854  }
1855  CaseClause* GetClause(int j) { return covered_cases[j]; }
1856
1857  bool IsDuplicate(CaseClause* clause) {
1858    return IsSmiLiteralSwitchCaseValue(clause->label()) &&
1859           CaseExists(clause->label()) &&
1860           clause != GetClause(ReduceToSmiSwitchCaseValue(clause->label()));
1861  }
1862  int MinCase() {
1863    return covered_cases.size() == 0 ? INT_MAX : covered_cases.begin()->first;
1864  }
1865  int MaxCase() {
1866    return covered_cases.size() == 0 ? INT_MIN : covered_cases.rbegin()->first;
1867  }
1868  void Print() {
1869    std::cout << "Covered_cases: " << '\n';
1870    for (auto iter = covered_cases.begin(); iter != covered_cases.end();
1871         ++iter) {
1872      std::cout << iter->first << "->" << iter->second << '\n';
1873    }
1874    std::cout << "Default_case: " << default_case << '\n';
1875  }
1876};
1877
1878// Checks whether we should use a jump table to implement a switch operation.
1879bool IsSwitchOptimizable(SwitchStatement* stmt, SwitchInfo* info) {
1880  ZonePtrList<CaseClause>* cases = stmt->cases();
1881
1882  for (int i = 0; i < cases->length(); ++i) {
1883    CaseClause* clause = cases->at(i);
1884    if (clause->is_default()) {
1885      continue;
1886    } else if (!(clause->label()->IsLiteral())) {
1887      // Don't consider Smi cases after a non-literal, because we
1888      // need to evaluate the non-literal.
1889      break;
1890    } else if (IsSmiLiteralSwitchCaseValue(clause->label())) {
1891      int value = ReduceToSmiSwitchCaseValue(clause->label());
1892      info->covered_cases.insert({value, clause});
1893    }
1894  }
1895
1896  // GCC also jump-table optimizes switch statements with 6 cases or more.
1897  if (static_cast<int>(info->covered_cases.size()) >=
1898      FLAG_switch_table_min_cases) {
1899    // Due to case spread will be used as the size of jump-table,
1900    // we need to check if it doesn't overflow by casting its
1901    // min and max bounds to int64_t, and calculate if the difference is less
1902    // than or equal to INT_MAX.
1903    int64_t min = static_cast<int64_t>(info->MinCase());
1904    int64_t max = static_cast<int64_t>(info->MaxCase());
1905    int64_t spread = max - min + 1;
1906
1907    DCHECK_GT(spread, 0);
1908
1909    // Check if casted spread is acceptable and doesn't overflow.
1910    if (spread <= INT_MAX &&
1911        IsSpreadAcceptable(static_cast<int>(spread), cases->length())) {
1912      return true;
1913    }
1914  }
1915  // Invariant- covered_cases has all cases and only cases that will go in the
1916  // jump table.
1917  info->covered_cases.clear();
1918  return false;
1919}
1920
1921}  // namespace
1922
1923// This adds a jump table optimization for switch statements with Smi cases.
1924// If there are 5+ non-duplicate Smi clauses, and they are sufficiently compact,
1925// we generate a jump table. In the fall-through path, we put the compare-jumps
1926// for the non-Smi cases.
1927
1928// e.g.
1929//
1930// switch(x){
1931//   case -0: out = 10;
1932//   case 1: out = 11; break;
1933//   case 0: out = 12; break;
1934//   case 2: out = 13;
1935//   case 3: out = 14; break;
1936//   case 0.5: out = 15; break;
1937//   case 4: out = 16;
1938//   case y: out = 17;
1939//   case 5: out = 18;
1940//   default: out = 19; break;
1941// }
1942
1943// becomes this pseudo-bytecode:
1944
1945//   lda x
1946//   star r1
1947//   test_type number
1948//   jump_if_false @fallthrough
1949//   ldar r1
1950//   test_greater_than_or_equal_to smi_min
1951//   jump_if_false @fallthrough
1952//   ldar r1
1953//   test_less_than_or_equal_to smi_max
1954//   jump_if_false @fallthrough
1955//   ldar r1
1956//   bitwise_or 0
1957//   star r2
1958//   test_strict_equal r1
1959//   jump_if_false @fallthrough
1960//   ldar r2
1961//   switch_on_smi {1: @case_1, 2: @case_2, 3: @case_3, 4: @case_4}
1962// @fallthrough:
1963//   jump_if_strict_equal -0.0 @case_minus_0.0
1964//   jump_if_strict_equal 0.5  @case_0.5
1965//   jump_if_strict_equal y    @case_y
1966//   jump_if_strict_equal 5    @case_5
1967//   jump @default
1968// @case_minus_0.0:
1969//   <out = 10>
1970// @case_1
1971//   <out = 11, break>
1972// @case_0:
1973//   <out = 12, break>
1974// @case_2:
1975//   <out = 13>
1976// @case_3:
1977//   <out = 14, break>
1978// @case_0.5:
1979//   <out = 15, break>
1980// @case_4:
1981//   <out = 16>
1982// @case_y:
1983//   <out = 17>
1984// @case_5:
1985//   <out = 18>
1986// @default:
1987//   <out = 19, break>
1988
1989void BytecodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1990  // We need this scope because we visit for register values. We have to
1991  // maintain a execution result scope where registers can be allocated.
1992  ZonePtrList<CaseClause>* clauses = stmt->cases();
1993
1994  SwitchInfo info;
1995  BytecodeJumpTable* jump_table = nullptr;
1996  bool use_jump_table = IsSwitchOptimizable(stmt, &info);
1997
1998  // N_comp_cases is number of cases we will generate comparison jumps for.
1999  // Note we ignore duplicate cases, since they are very unlikely.
2000
2001  int n_comp_cases = clauses->length();
2002  if (use_jump_table) {
2003    n_comp_cases -= static_cast<int>(info.covered_cases.size());
2004    jump_table = builder()->AllocateJumpTable(
2005        info.MaxCase() - info.MinCase() + 1, info.MinCase());
2006  }
2007
2008  // Are we still using any if-else bytecodes to evaluate the switch?
2009  bool use_jumps = n_comp_cases != 0;
2010
2011  SwitchBuilder switch_builder(builder(), block_coverage_builder_, stmt,
2012                               n_comp_cases, jump_table);
2013  ControlScopeForBreakable scope(this, stmt, &switch_builder);
2014  builder()->SetStatementPosition(stmt);
2015
2016  VisitForAccumulatorValue(stmt->tag());
2017
2018  if (use_jump_table) {
2019    // This also fills empty slots in jump table.
2020    Register r2 = register_allocator()->NewRegister();
2021
2022    Register r1 = register_allocator()->NewRegister();
2023    builder()->StoreAccumulatorInRegister(r1);
2024
2025    builder()->CompareTypeOf(TestTypeOfFlags::LiteralFlag::kNumber);
2026    switch_builder.JumpToFallThroughIfFalse();
2027    builder()->LoadAccumulatorWithRegister(r1);
2028
2029    // TODO(leszeks): Note these are duplicated range checks with the
2030    // SwitchOnSmi handler for the most part.
2031
2032    builder()->LoadLiteral(Smi::kMinValue);
2033    builder()->StoreAccumulatorInRegister(r2);
2034    builder()->CompareOperation(
2035        Token::Value::GTE, r1,
2036        feedback_index(feedback_spec()->AddCompareICSlot()));
2037
2038    switch_builder.JumpToFallThroughIfFalse();
2039    builder()->LoadAccumulatorWithRegister(r1);
2040
2041    builder()->LoadLiteral(Smi::kMaxValue);
2042    builder()->StoreAccumulatorInRegister(r2);
2043    builder()->CompareOperation(
2044        Token::Value::LTE, r1,
2045        feedback_index(feedback_spec()->AddCompareICSlot()));
2046
2047    switch_builder.JumpToFallThroughIfFalse();
2048    builder()->LoadAccumulatorWithRegister(r1);
2049
2050    builder()->BinaryOperationSmiLiteral(
2051        Token::Value::BIT_OR, Smi::FromInt(0),
2052        feedback_index(feedback_spec()->AddBinaryOpICSlot()));
2053
2054    builder()->StoreAccumulatorInRegister(r2);
2055    builder()->CompareOperation(
2056        Token::Value::EQ_STRICT, r1,
2057        feedback_index(feedback_spec()->AddCompareICSlot()));
2058
2059    switch_builder.JumpToFallThroughIfFalse();
2060    builder()->LoadAccumulatorWithRegister(r2);
2061
2062    switch_builder.EmitJumpTableIfExists(info.MinCase(), info.MaxCase(),
2063                                         info.covered_cases);
2064
2065    if (use_jumps) {
2066      builder()->LoadAccumulatorWithRegister(r1);
2067    }
2068  }
2069
2070  int case_compare_ctr = 0;
2071#ifdef DEBUG
2072  std::unordered_map<int, int> case_ctr_checker;
2073#endif
2074
2075  if (use_jumps) {
2076    Register tag_holder = register_allocator()->NewRegister();
2077    FeedbackSlot slot = clauses->length() > 0
2078                            ? feedback_spec()->AddCompareICSlot()
2079                            : FeedbackSlot::Invalid();
2080    builder()->StoreAccumulatorInRegister(tag_holder);
2081
2082    for (int i = 0; i < clauses->length(); ++i) {
2083      CaseClause* clause = clauses->at(i);
2084      if (clause->is_default()) {
2085        info.default_case = i;
2086      } else if (!info.CaseExists(clause->label())) {
2087        // Perform label comparison as if via '===' with tag.
2088        VisitForAccumulatorValue(clause->label());
2089        builder()->CompareOperation(Token::Value::EQ_STRICT, tag_holder,
2090                                    feedback_index(slot));
2091#ifdef DEBUG
2092        case_ctr_checker[i] = case_compare_ctr;
2093#endif
2094        switch_builder.JumpToCaseIfTrue(ToBooleanMode::kAlreadyBoolean,
2095                                        case_compare_ctr++);
2096      }
2097    }
2098  }
2099
2100  // For fall-throughs after comparisons (or out-of-range/non-Smi's for jump
2101  // tables).
2102  if (info.DefaultExists()) {
2103    switch_builder.JumpToDefault();
2104  } else {
2105    switch_builder.Break();
2106  }
2107
2108  case_compare_ctr = 0;
2109  for (int i = 0; i < clauses->length(); ++i) {
2110    CaseClause* clause = clauses->at(i);
2111    if (i != info.default_case) {
2112      if (!info.IsDuplicate(clause)) {
2113        bool use_table = use_jump_table && info.CaseExists(clause->label());
2114        if (!use_table) {
2115// Guarantee that we should generate compare/jump if no table.
2116#ifdef DEBUG
2117          DCHECK(case_ctr_checker[i] == case_compare_ctr);
2118#endif
2119          switch_builder.BindCaseTargetForCompareJump(case_compare_ctr++,
2120                                                      clause);
2121        } else {
2122          // Use jump table if this is not a duplicate label.
2123          switch_builder.BindCaseTargetForJumpTable(
2124              ReduceToSmiSwitchCaseValue(clause->label()), clause);
2125        }
2126      }
2127    } else {
2128      switch_builder.BindDefault(clause);
2129    }
2130    // Regardless, generate code (in case of fall throughs).
2131    VisitStatements(clause->statements());
2132  }
2133}
2134
2135template <typename TryBodyFunc, typename CatchBodyFunc>
2136void BytecodeGenerator::BuildTryCatch(
2137    TryBodyFunc try_body_func, CatchBodyFunc catch_body_func,
2138    HandlerTable::CatchPrediction catch_prediction,
2139    TryCatchStatement* stmt_for_coverage) {
2140  if (builder()->RemainderOfBlockIsDead()) return;
2141
2142  TryCatchBuilder try_control_builder(
2143      builder(),
2144      stmt_for_coverage == nullptr ? nullptr : block_coverage_builder_,
2145      stmt_for_coverage, catch_prediction);
2146
2147  // Preserve the context in a dedicated register, so that it can be restored
2148  // when the handler is entered by the stack-unwinding machinery.
2149  // TODO(ignition): Be smarter about register allocation.
2150  Register context = register_allocator()->NewRegister();
2151  builder()->MoveRegister(Register::current_context(), context);
2152
2153  // Evaluate the try-block inside a control scope. This simulates a handler
2154  // that is intercepting 'throw' control commands.
2155  try_control_builder.BeginTry(context);
2156  {
2157    ControlScopeForTryCatch scope(this, &try_control_builder);
2158    try_body_func();
2159  }
2160  try_control_builder.EndTry();
2161
2162  catch_body_func(context);
2163
2164  try_control_builder.EndCatch();
2165}
2166
2167template <typename TryBodyFunc, typename FinallyBodyFunc>
2168void BytecodeGenerator::BuildTryFinally(
2169    TryBodyFunc try_body_func, FinallyBodyFunc finally_body_func,
2170    HandlerTable::CatchPrediction catch_prediction,
2171    TryFinallyStatement* stmt_for_coverage) {
2172  if (builder()->RemainderOfBlockIsDead()) return;
2173
2174  // We can't know whether the finally block will override ("catch") an
2175  // exception thrown in the try block, so we just adopt the outer prediction.
2176  TryFinallyBuilder try_control_builder(
2177      builder(),
2178      stmt_for_coverage == nullptr ? nullptr : block_coverage_builder_,
2179      stmt_for_coverage, catch_prediction);
2180
2181  // We keep a record of all paths that enter the finally-block to be able to
2182  // dispatch to the correct continuation point after the statements in the
2183  // finally-block have been evaluated.
2184  //
2185  // The try-finally construct can enter the finally-block in three ways:
2186  // 1. By exiting the try-block normally, falling through at the end.
2187  // 2. By exiting the try-block with a function-local control flow transfer
2188  //    (i.e. through break/continue/return statements).
2189  // 3. By exiting the try-block with a thrown exception.
2190  //
2191  // The result register semantics depend on how the block was entered:
2192  //  - ReturnStatement: It represents the return value being returned.
2193  //  - ThrowStatement: It represents the exception being thrown.
2194  //  - BreakStatement/ContinueStatement: Undefined and not used.
2195  //  - Falling through into finally-block: Undefined and not used.
2196  Register token = register_allocator()->NewRegister();
2197  Register result = register_allocator()->NewRegister();
2198  ControlScope::DeferredCommands commands(this, token, result);
2199
2200  // Preserve the context in a dedicated register, so that it can be restored
2201  // when the handler is entered by the stack-unwinding machinery.
2202  // TODO(ignition): Be smarter about register allocation.
2203  Register context = register_allocator()->NewRegister();
2204  builder()->MoveRegister(Register::current_context(), context);
2205
2206  // Evaluate the try-block inside a control scope. This simulates a handler
2207  // that is intercepting all control commands.
2208  try_control_builder.BeginTry(context);
2209  {
2210    ControlScopeForTryFinally scope(this, &try_control_builder, &commands);
2211    try_body_func();
2212  }
2213  try_control_builder.EndTry();
2214
2215  // Record fall-through and exception cases.
2216  commands.RecordFallThroughPath();
2217  try_control_builder.LeaveTry();
2218  try_control_builder.BeginHandler();
2219  commands.RecordHandlerReThrowPath();
2220
2221  // Pending message object is saved on entry.
2222  try_control_builder.BeginFinally();
2223  Register message = context;  // Reuse register.
2224
2225  // Clear message object as we enter the finally block.
2226  builder()->LoadTheHole().SetPendingMessage().StoreAccumulatorInRegister(
2227      message);
2228
2229  // Evaluate the finally-block.
2230  finally_body_func(token);
2231  try_control_builder.EndFinally();
2232
2233  // Pending message object is restored on exit.
2234  builder()->LoadAccumulatorWithRegister(message).SetPendingMessage();
2235
2236  // Dynamic dispatch after the finally-block.
2237  commands.ApplyDeferredCommands();
2238}
2239
2240void BytecodeGenerator::VisitIterationBody(IterationStatement* stmt,
2241                                           LoopBuilder* loop_builder) {
2242  loop_builder->LoopBody();
2243  ControlScopeForIteration execution_control(this, stmt, loop_builder);
2244  Visit(stmt->body());
2245  loop_builder->BindContinueTarget();
2246}
2247
2248void BytecodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
2249  LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
2250  if (stmt->cond()->ToBooleanIsFalse()) {
2251    // Since we know that the condition is false, we don't create a loop.
2252    // Therefore, we don't create a LoopScope (and thus we don't create a header
2253    // and a JumpToHeader). However, we still need to iterate once through the
2254    // body.
2255    VisitIterationBody(stmt, &loop_builder);
2256  } else if (stmt->cond()->ToBooleanIsTrue()) {
2257    LoopScope loop_scope(this, &loop_builder);
2258    VisitIterationBody(stmt, &loop_builder);
2259  } else {
2260    LoopScope loop_scope(this, &loop_builder);
2261    VisitIterationBody(stmt, &loop_builder);
2262    builder()->SetExpressionAsStatementPosition(stmt->cond());
2263    BytecodeLabels loop_backbranch(zone());
2264    VisitForTest(stmt->cond(), &loop_backbranch, loop_builder.break_labels(),
2265                 TestFallthrough::kThen);
2266    loop_backbranch.Bind(builder());
2267  }
2268}
2269
2270void BytecodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
2271  LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
2272
2273  if (stmt->cond()->ToBooleanIsFalse()) {
2274    // If the condition is false there is no need to generate the loop.
2275    return;
2276  }
2277
2278  LoopScope loop_scope(this, &loop_builder);
2279  if (!stmt->cond()->ToBooleanIsTrue()) {
2280    builder()->SetExpressionAsStatementPosition(stmt->cond());
2281    BytecodeLabels loop_body(zone());
2282    VisitForTest(stmt->cond(), &loop_body, loop_builder.break_labels(),
2283                 TestFallthrough::kThen);
2284    loop_body.Bind(builder());
2285  }
2286  VisitIterationBody(stmt, &loop_builder);
2287}
2288
2289void BytecodeGenerator::VisitForStatement(ForStatement* stmt) {
2290  if (stmt->init() != nullptr) {
2291    Visit(stmt->init());
2292  }
2293
2294  LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
2295  if (stmt->cond() && stmt->cond()->ToBooleanIsFalse()) {
2296    // If the condition is known to be false there is no need to generate
2297    // body, next or condition blocks. Init block should be generated.
2298    return;
2299  }
2300
2301  LoopScope loop_scope(this, &loop_builder);
2302  if (stmt->cond() && !stmt->cond()->ToBooleanIsTrue()) {
2303    builder()->SetExpressionAsStatementPosition(stmt->cond());
2304    BytecodeLabels loop_body(zone());
2305    VisitForTest(stmt->cond(), &loop_body, loop_builder.break_labels(),
2306                 TestFallthrough::kThen);
2307    loop_body.Bind(builder());
2308  }
2309  VisitIterationBody(stmt, &loop_builder);
2310  if (stmt->next() != nullptr) {
2311    builder()->SetStatementPosition(stmt->next());
2312    Visit(stmt->next());
2313  }
2314}
2315
2316void BytecodeGenerator::VisitForInStatement(ForInStatement* stmt) {
2317  if (stmt->subject()->IsNullLiteral() ||
2318      stmt->subject()->IsUndefinedLiteral()) {
2319    // ForIn generates lots of code, skip if it wouldn't produce any effects.
2320    return;
2321  }
2322
2323  BytecodeLabel subject_undefined_label;
2324  FeedbackSlot slot = feedback_spec()->AddForInSlot();
2325
2326  // Prepare the state for executing ForIn.
2327  builder()->SetExpressionAsStatementPosition(stmt->subject());
2328  VisitForAccumulatorValue(stmt->subject());
2329  builder()->JumpIfUndefinedOrNull(&subject_undefined_label);
2330  Register receiver = register_allocator()->NewRegister();
2331  builder()->ToObject(receiver);
2332
2333  // Used as kRegTriple and kRegPair in ForInPrepare and ForInNext.
2334  RegisterList triple = register_allocator()->NewRegisterList(3);
2335  Register cache_length = triple[2];
2336  builder()->ForInEnumerate(receiver);
2337  builder()->ForInPrepare(triple, feedback_index(slot));
2338
2339  // Set up loop counter
2340  Register index = register_allocator()->NewRegister();
2341  builder()->LoadLiteral(Smi::zero());
2342  builder()->StoreAccumulatorInRegister(index);
2343
2344  // The loop
2345  {
2346    LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
2347    LoopScope loop_scope(this, &loop_builder);
2348    builder()->SetExpressionAsStatementPosition(stmt->each());
2349    builder()->ForInContinue(index, cache_length);
2350    loop_builder.BreakIfFalse(ToBooleanMode::kAlreadyBoolean);
2351    builder()->ForInNext(receiver, index, triple.Truncate(2),
2352                         feedback_index(slot));
2353    loop_builder.ContinueIfUndefined();
2354
2355    // Assign accumulator value to the 'each' target.
2356    {
2357      EffectResultScope scope(this);
2358      // Make sure to preserve the accumulator across the PrepareAssignmentLhs
2359      // call.
2360      AssignmentLhsData lhs_data = PrepareAssignmentLhs(
2361          stmt->each(), AccumulatorPreservingMode::kPreserve);
2362      builder()->SetExpressionPosition(stmt->each());
2363      BuildAssignment(lhs_data, Token::ASSIGN, LookupHoistingMode::kNormal);
2364    }
2365
2366    VisitIterationBody(stmt, &loop_builder);
2367    builder()->ForInStep(index);
2368    builder()->StoreAccumulatorInRegister(index);
2369  }
2370  builder()->Bind(&subject_undefined_label);
2371}
2372
2373// Desugar a for-of statement into an application of the iteration protocol.
2374//
2375// for (EACH of SUBJECT) BODY
2376//
2377//   becomes
2378//
2379// iterator = %GetIterator(SUBJECT)
2380// try {
2381//
2382//   loop {
2383//     // Make sure we are considered 'done' if .next(), .done or .value fail.
2384//     done = true
2385//     value = iterator.next()
2386//     if (value.done) break;
2387//     value = value.value
2388//     done = false
2389//
2390//     EACH = value
2391//     BODY
2392//   }
2393//   done = true
2394//
2395// } catch(e) {
2396//   iteration_continuation = RETHROW
2397// } finally {
2398//   %FinalizeIteration(iterator, done, iteration_continuation)
2399// }
2400void BytecodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
2401  EffectResultScope effect_scope(this);
2402
2403  builder()->SetExpressionAsStatementPosition(stmt->subject());
2404  VisitForAccumulatorValue(stmt->subject());
2405
2406  // Store the iterator in a dedicated register so that it can be closed on
2407  // exit, and the 'done' value in a dedicated register so that it can be
2408  // changed and accessed independently of the iteration result.
2409  IteratorRecord iterator = BuildGetIteratorRecord(stmt->type());
2410  Register done = register_allocator()->NewRegister();
2411  builder()->LoadFalse();
2412  builder()->StoreAccumulatorInRegister(done);
2413
2414  BuildTryFinally(
2415      // Try block.
2416      [&]() {
2417        Register next_result = register_allocator()->NewRegister();
2418
2419        LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
2420        LoopScope loop_scope(this, &loop_builder);
2421
2422        builder()->LoadTrue().StoreAccumulatorInRegister(done);
2423
2424        // Call the iterator's .next() method. Break from the loop if the `done`
2425        // property is truthy, otherwise load the value from the iterator result
2426        // and append the argument.
2427        builder()->SetExpressionAsStatementPosition(stmt->each());
2428        BuildIteratorNext(iterator, next_result);
2429        builder()->LoadNamedProperty(
2430            next_result, ast_string_constants()->done_string(),
2431            feedback_index(feedback_spec()->AddLoadICSlot()));
2432        loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
2433
2434        builder()
2435            // value = value.value
2436            ->LoadNamedProperty(
2437                next_result, ast_string_constants()->value_string(),
2438                feedback_index(feedback_spec()->AddLoadICSlot()));
2439        // done = false, before the assignment to each happens, so that done is
2440        // false if the assignment throws.
2441        builder()
2442            ->StoreAccumulatorInRegister(next_result)
2443            .LoadFalse()
2444            .StoreAccumulatorInRegister(done);
2445
2446        // Assign to the 'each' target.
2447        AssignmentLhsData lhs_data = PrepareAssignmentLhs(stmt->each());
2448        builder()->LoadAccumulatorWithRegister(next_result);
2449        BuildAssignment(lhs_data, Token::ASSIGN, LookupHoistingMode::kNormal);
2450
2451        VisitIterationBody(stmt, &loop_builder);
2452      },
2453      // Finally block.
2454      [&](Register iteration_continuation_token) {
2455        // Finish the iteration in the finally block.
2456        BuildFinalizeIteration(iterator, done, iteration_continuation_token);
2457      },
2458      HandlerTable::UNCAUGHT);
2459}
2460
2461void BytecodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
2462  // Update catch prediction tracking. The updated catch_prediction value lasts
2463  // until the end of the try_block in the AST node, and does not apply to the
2464  // catch_block.
2465  HandlerTable::CatchPrediction outer_catch_prediction = catch_prediction();
2466  set_catch_prediction(stmt->GetCatchPrediction(outer_catch_prediction));
2467
2468  BuildTryCatch(
2469      // Try body.
2470      [&]() {
2471        Visit(stmt->try_block());
2472        set_catch_prediction(outer_catch_prediction);
2473      },
2474      // Catch body.
2475      [&](Register context) {
2476        if (stmt->scope()) {
2477          // Create a catch scope that binds the exception.
2478          BuildNewLocalCatchContext(stmt->scope());
2479          builder()->StoreAccumulatorInRegister(context);
2480        }
2481
2482        // If requested, clear message object as we enter the catch block.
2483        if (stmt->ShouldClearPendingException(outer_catch_prediction)) {
2484          builder()->LoadTheHole().SetPendingMessage();
2485        }
2486
2487        // Load the catch context into the accumulator.
2488        builder()->LoadAccumulatorWithRegister(context);
2489
2490        // Evaluate the catch-block.
2491        if (stmt->scope()) {
2492          VisitInScope(stmt->catch_block(), stmt->scope());
2493        } else {
2494          VisitBlock(stmt->catch_block());
2495        }
2496      },
2497      catch_prediction(), stmt);
2498}
2499
2500void BytecodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
2501  BuildTryFinally(
2502      // Try block.
2503      [&]() { Visit(stmt->try_block()); },
2504      // Finally block.
2505      [&](Register body_continuation_token) { Visit(stmt->finally_block()); },
2506      catch_prediction(), stmt);
2507}
2508
2509void BytecodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
2510  builder()->SetStatementPosition(stmt);
2511  builder()->Debugger();
2512}
2513
2514void BytecodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
2515  DCHECK_EQ(expr->scope()->outer_scope(), current_scope());
2516  uint8_t flags = CreateClosureFlags::Encode(
2517      expr->pretenure(), closure_scope()->is_function_scope(),
2518      info()->flags().might_always_opt());
2519  size_t entry = builder()->AllocateDeferredConstantPoolEntry();
2520  builder()->CreateClosure(entry, GetCachedCreateClosureSlot(expr), flags);
2521  function_literals_.push_back(std::make_pair(expr, entry));
2522  AddToEagerLiteralsIfEager(expr);
2523}
2524
2525void BytecodeGenerator::AddToEagerLiteralsIfEager(FunctionLiteral* literal) {
2526  // Only parallel compile when there's a script (not the case for source
2527  // position collection).
2528  if (!script_.is_null() && literal->should_parallel_compile()) {
2529    // If we should normally be eagerly compiling this function, we must be here
2530    // because of post_parallel_compile_tasks_for_eager_toplevel.
2531    DCHECK_IMPLIES(
2532        literal->ShouldEagerCompile(),
2533        info()->flags().post_parallel_compile_tasks_for_eager_toplevel());
2534    // There exists a lazy compile dispatcher.
2535    DCHECK(info()->dispatcher());
2536    // There exists a cloneable character stream.
2537    DCHECK(info()->character_stream()->can_be_cloned_for_parallel_access());
2538
2539    UnparkedScope scope(local_isolate_);
2540    // If there doesn't already exist a SharedFunctionInfo for this function,
2541    // then create one and enqueue it. Otherwise, we're reparsing (e.g. for the
2542    // debugger, source position collection, call printing, recompile after
2543    // flushing, etc.) and don't want to over-compile.
2544    Handle<SharedFunctionInfo> shared_info;
2545    if (!Script::FindSharedFunctionInfo(script_, local_isolate_, literal)
2546             .ToHandle(&shared_info)) {
2547      shared_info =
2548          Compiler::GetSharedFunctionInfo(literal, script_, local_isolate_);
2549      info()->dispatcher()->Enqueue(local_isolate_, shared_info,
2550                                    info()->character_stream()->Clone());
2551    }
2552  } else if (eager_inner_literals_ && literal->ShouldEagerCompile()) {
2553    DCHECK(!IsInEagerLiterals(literal, *eager_inner_literals_));
2554    DCHECK(!literal->should_parallel_compile());
2555    eager_inner_literals_->push_back(literal);
2556  }
2557}
2558
2559void BytecodeGenerator::BuildClassLiteral(ClassLiteral* expr, Register name) {
2560  size_t class_boilerplate_entry =
2561      builder()->AllocateDeferredConstantPoolEntry();
2562  class_literals_.push_back(std::make_pair(expr, class_boilerplate_entry));
2563
2564  VisitDeclarations(expr->scope()->declarations());
2565  Register class_constructor = register_allocator()->NewRegister();
2566
2567  // Create the class brand symbol and store it on the context during class
2568  // evaluation. This will be stored in the instance later in the constructor.
2569  // We do this early so that invalid access to private methods or accessors
2570  // in computed property keys throw.
2571  if (expr->scope()->brand() != nullptr) {
2572    Register brand = register_allocator()->NewRegister();
2573    const AstRawString* class_name =
2574        expr->scope()->class_variable() != nullptr
2575            ? expr->scope()->class_variable()->raw_name()
2576            : ast_string_constants()->anonymous_string();
2577    builder()
2578        ->LoadLiteral(class_name)
2579        .StoreAccumulatorInRegister(brand)
2580        .CallRuntime(Runtime::kCreatePrivateBrandSymbol, brand);
2581    BuildVariableAssignment(expr->scope()->brand(), Token::INIT,
2582                            HoleCheckMode::kElided);
2583  }
2584
2585  AccessorTable<ClassLiteral::Property> private_accessors(zone());
2586  for (int i = 0; i < expr->private_members()->length(); i++) {
2587    ClassLiteral::Property* property = expr->private_members()->at(i);
2588    DCHECK(property->is_private());
2589    switch (property->kind()) {
2590      case ClassLiteral::Property::FIELD: {
2591        // Initialize the private field variables early.
2592        // Create the private name symbols for fields during class
2593        // evaluation and store them on the context. These will be
2594        // used as keys later during instance or static initialization.
2595        RegisterAllocationScope private_name_register_scope(this);
2596        Register private_name = register_allocator()->NewRegister();
2597        VisitForRegisterValue(property->key(), private_name);
2598        builder()
2599            ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
2600            .StoreAccumulatorInRegister(private_name)
2601            .CallRuntime(Runtime::kCreatePrivateNameSymbol, private_name);
2602        DCHECK_NOT_NULL(property->private_name_var());
2603        BuildVariableAssignment(property->private_name_var(), Token::INIT,
2604                                HoleCheckMode::kElided);
2605        break;
2606      }
2607      case ClassLiteral::Property::METHOD: {
2608        RegisterAllocationScope register_scope(this);
2609        VisitForAccumulatorValue(property->value());
2610        BuildVariableAssignment(property->private_name_var(), Token::INIT,
2611                                HoleCheckMode::kElided);
2612        break;
2613      }
2614      // Collect private accessors into a table to merge the creation of
2615      // those closures later.
2616      case ClassLiteral::Property::GETTER: {
2617        Literal* key = property->key()->AsLiteral();
2618        DCHECK_NULL(private_accessors.LookupOrInsert(key)->getter);
2619        private_accessors.LookupOrInsert(key)->getter = property;
2620        break;
2621      }
2622      case ClassLiteral::Property::SETTER: {
2623        Literal* key = property->key()->AsLiteral();
2624        DCHECK_NULL(private_accessors.LookupOrInsert(key)->setter);
2625        private_accessors.LookupOrInsert(key)->setter = property;
2626        break;
2627      }
2628      default:
2629        UNREACHABLE();
2630    }
2631  }
2632
2633  {
2634    RegisterAllocationScope register_scope(this);
2635    RegisterList args = register_allocator()->NewGrowableRegisterList();
2636
2637    Register class_boilerplate = register_allocator()->GrowRegisterList(&args);
2638    Register class_constructor_in_args =
2639        register_allocator()->GrowRegisterList(&args);
2640    Register super_class = register_allocator()->GrowRegisterList(&args);
2641    DCHECK_EQ(ClassBoilerplate::kFirstDynamicArgumentIndex,
2642              args.register_count());
2643
2644    VisitForAccumulatorValueOrTheHole(expr->extends());
2645    builder()->StoreAccumulatorInRegister(super_class);
2646
2647    VisitFunctionLiteral(expr->constructor());
2648    builder()
2649        ->StoreAccumulatorInRegister(class_constructor)
2650        .MoveRegister(class_constructor, class_constructor_in_args)
2651        .LoadConstantPoolEntry(class_boilerplate_entry)
2652        .StoreAccumulatorInRegister(class_boilerplate);
2653
2654    // Create computed names and method values nodes to store into the literal.
2655    for (int i = 0; i < expr->public_members()->length(); i++) {
2656      ClassLiteral::Property* property = expr->public_members()->at(i);
2657      if (property->is_computed_name()) {
2658        Register key = register_allocator()->GrowRegisterList(&args);
2659
2660        builder()->SetExpressionAsStatementPosition(property->key());
2661        BuildLoadPropertyKey(property, key);
2662        if (property->is_static()) {
2663          // The static prototype property is read only. We handle the non
2664          // computed property name case in the parser. Since this is the only
2665          // case where we need to check for an own read only property we
2666          // special case this so we do not need to do this for every property.
2667
2668          FeedbackSlot slot = GetDummyCompareICSlot();
2669          BytecodeLabel done;
2670          builder()
2671              ->LoadLiteral(ast_string_constants()->prototype_string())
2672              .CompareOperation(Token::Value::EQ_STRICT, key,
2673                                feedback_index(slot))
2674              .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &done)
2675              .CallRuntime(Runtime::kThrowStaticPrototypeError)
2676              .Bind(&done);
2677        }
2678
2679        if (property->kind() == ClassLiteral::Property::FIELD) {
2680          DCHECK(!property->is_private());
2681          // Initialize field's name variable with the computed name.
2682          DCHECK_NOT_NULL(property->computed_name_var());
2683          builder()->LoadAccumulatorWithRegister(key);
2684          BuildVariableAssignment(property->computed_name_var(), Token::INIT,
2685                                  HoleCheckMode::kElided);
2686        }
2687      }
2688
2689      DCHECK(!property->is_private());
2690
2691      if (property->kind() == ClassLiteral::Property::FIELD) {
2692        // We don't compute field's value here, but instead do it in the
2693        // initializer function.
2694        continue;
2695      }
2696
2697      Register value = register_allocator()->GrowRegisterList(&args);
2698      VisitForRegisterValue(property->value(), value);
2699    }
2700
2701    builder()->CallRuntime(Runtime::kDefineClass, args);
2702  }
2703  Register prototype = register_allocator()->NewRegister();
2704  builder()->StoreAccumulatorInRegister(prototype);
2705
2706  // Assign to the home object variable. Accumulator already contains the
2707  // prototype.
2708  Variable* home_object_variable = expr->home_object();
2709  if (home_object_variable != nullptr) {
2710    DCHECK(home_object_variable->is_used());
2711    DCHECK(home_object_variable->IsContextSlot());
2712    BuildVariableAssignment(home_object_variable, Token::INIT,
2713                            HoleCheckMode::kElided);
2714  }
2715  Variable* static_home_object_variable = expr->static_home_object();
2716  if (static_home_object_variable != nullptr) {
2717    DCHECK(static_home_object_variable->is_used());
2718    DCHECK(static_home_object_variable->IsContextSlot());
2719    builder()->LoadAccumulatorWithRegister(class_constructor);
2720    BuildVariableAssignment(static_home_object_variable, Token::INIT,
2721                            HoleCheckMode::kElided);
2722  }
2723
2724  // Assign to class variable.
2725  Variable* class_variable = expr->scope()->class_variable();
2726  if (class_variable != nullptr && class_variable->is_used()) {
2727    DCHECK(class_variable->IsStackLocal() || class_variable->IsContextSlot());
2728    builder()->LoadAccumulatorWithRegister(class_constructor);
2729    BuildVariableAssignment(class_variable, Token::INIT,
2730                            HoleCheckMode::kElided);
2731  }
2732
2733  // Define private accessors, using only a single call to the runtime for
2734  // each pair of corresponding getters and setters, in the order the first
2735  // component is declared.
2736  for (auto accessors : private_accessors.ordered_accessors()) {
2737    RegisterAllocationScope inner_register_scope(this);
2738    RegisterList accessors_reg = register_allocator()->NewRegisterList(2);
2739    ClassLiteral::Property* getter = accessors.second->getter;
2740    ClassLiteral::Property* setter = accessors.second->setter;
2741    VisitLiteralAccessor(getter, accessors_reg[0]);
2742    VisitLiteralAccessor(setter, accessors_reg[1]);
2743    builder()->CallRuntime(Runtime::kCreatePrivateAccessors, accessors_reg);
2744    Variable* var = getter != nullptr ? getter->private_name_var()
2745                                      : setter->private_name_var();
2746    DCHECK_NOT_NULL(var);
2747    BuildVariableAssignment(var, Token::INIT, HoleCheckMode::kElided);
2748  }
2749
2750  if (expr->instance_members_initializer_function() != nullptr) {
2751    Register initializer =
2752        VisitForRegisterValue(expr->instance_members_initializer_function());
2753
2754    FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
2755    builder()
2756        ->LoadAccumulatorWithRegister(initializer)
2757        .StoreClassFieldsInitializer(class_constructor, feedback_index(slot))
2758        .LoadAccumulatorWithRegister(class_constructor);
2759  }
2760
2761  if (expr->static_initializer() != nullptr) {
2762    // TODO(gsathya): This can be optimized away to be a part of the
2763    // class boilerplate in the future. The name argument can be
2764    // passed to the DefineClass runtime function and have it set
2765    // there.
2766    if (name.is_valid()) {
2767      Register key = register_allocator()->NewRegister();
2768      builder()
2769          ->LoadLiteral(ast_string_constants()->name_string())
2770          .StoreAccumulatorInRegister(key);
2771
2772      DefineKeyedOwnPropertyInLiteralFlags data_property_flags =
2773          DefineKeyedOwnPropertyInLiteralFlag::kNoFlags;
2774      FeedbackSlot slot =
2775          feedback_spec()->AddDefineKeyedOwnPropertyInLiteralICSlot();
2776      builder()
2777          ->LoadAccumulatorWithRegister(name)
2778          .DefineKeyedOwnPropertyInLiteral(class_constructor, key,
2779                                           data_property_flags,
2780                                           feedback_index(slot));
2781    }
2782
2783    RegisterList args = register_allocator()->NewRegisterList(1);
2784    Register initializer = VisitForRegisterValue(expr->static_initializer());
2785
2786    builder()
2787        ->MoveRegister(class_constructor, args[0])
2788        .CallProperty(initializer, args,
2789                      feedback_index(feedback_spec()->AddCallICSlot()));
2790  }
2791  builder()->LoadAccumulatorWithRegister(class_constructor);
2792}
2793
2794void BytecodeGenerator::VisitClassLiteral(ClassLiteral* expr) {
2795  VisitClassLiteral(expr, Register::invalid_value());
2796}
2797
2798void BytecodeGenerator::VisitClassLiteral(ClassLiteral* expr, Register name) {
2799  CurrentScope current_scope(this, expr->scope());
2800  DCHECK_NOT_NULL(expr->scope());
2801  if (expr->scope()->NeedsContext()) {
2802    // Make sure to associate the source position for the class
2803    // after the block context is created. Otherwise we have a mismatch
2804    // between the scope and the context, where we already are in a
2805    // block context for the class, but not yet in the class scope.
2806    BytecodeSourceInfo source_info = builder()->PopSourcePosition();
2807    BuildNewLocalBlockContext(expr->scope());
2808    ContextScope scope(this, expr->scope());
2809    builder()->PushSourcePosition(source_info);
2810    BuildClassLiteral(expr, name);
2811  } else {
2812    BuildClassLiteral(expr, name);
2813  }
2814}
2815
2816void BytecodeGenerator::BuildClassProperty(ClassLiteral::Property* property) {
2817  RegisterAllocationScope register_scope(this);
2818  Register key;
2819
2820  // Private methods are not initialized in BuildClassProperty.
2821  DCHECK_IMPLIES(property->is_private(),
2822                 property->kind() == ClassLiteral::Property::FIELD);
2823  builder()->SetExpressionPosition(property->key());
2824
2825  bool is_literal_store = property->key()->IsPropertyName() &&
2826                          !property->is_computed_name() &&
2827                          !property->is_private();
2828
2829  if (!is_literal_store) {
2830    key = register_allocator()->NewRegister();
2831    if (property->is_computed_name()) {
2832      DCHECK_EQ(property->kind(), ClassLiteral::Property::FIELD);
2833      DCHECK(!property->is_private());
2834      Variable* var = property->computed_name_var();
2835      DCHECK_NOT_NULL(var);
2836      // The computed name is already evaluated and stored in a variable at
2837      // class definition time.
2838      BuildVariableLoad(var, HoleCheckMode::kElided);
2839      builder()->StoreAccumulatorInRegister(key);
2840    } else if (property->is_private()) {
2841      Variable* private_name_var = property->private_name_var();
2842      DCHECK_NOT_NULL(private_name_var);
2843      BuildVariableLoad(private_name_var, HoleCheckMode::kElided);
2844      builder()->StoreAccumulatorInRegister(key);
2845    } else {
2846      VisitForRegisterValue(property->key(), key);
2847    }
2848  }
2849
2850  builder()->SetExpressionAsStatementPosition(property->value());
2851  VisitForAccumulatorValue(property->value());
2852
2853  if (is_literal_store) {
2854    FeedbackSlot slot = feedback_spec()->AddDefineNamedOwnICSlot();
2855    builder()->DefineNamedOwnProperty(
2856        builder()->Receiver(),
2857        property->key()->AsLiteral()->AsRawPropertyName(),
2858        feedback_index(slot));
2859  } else {
2860    FeedbackSlot slot = feedback_spec()->AddDefineKeyedOwnICSlot();
2861    builder()->DefineKeyedOwnProperty(builder()->Receiver(), key,
2862                                      feedback_index(slot));
2863  }
2864}
2865
2866void BytecodeGenerator::VisitInitializeClassMembersStatement(
2867    InitializeClassMembersStatement* stmt) {
2868  for (int i = 0; i < stmt->fields()->length(); i++) {
2869    BuildClassProperty(stmt->fields()->at(i));
2870  }
2871}
2872
2873void BytecodeGenerator::VisitInitializeClassStaticElementsStatement(
2874    InitializeClassStaticElementsStatement* stmt) {
2875  for (int i = 0; i < stmt->elements()->length(); i++) {
2876    ClassLiteral::StaticElement* element = stmt->elements()->at(i);
2877    switch (element->kind()) {
2878      case ClassLiteral::StaticElement::PROPERTY:
2879        BuildClassProperty(element->property());
2880        break;
2881      case ClassLiteral::StaticElement::STATIC_BLOCK:
2882        VisitBlock(element->static_block());
2883        break;
2884    }
2885  }
2886}
2887
2888void BytecodeGenerator::BuildInvalidPropertyAccess(MessageTemplate tmpl,
2889                                                   Property* property) {
2890  RegisterAllocationScope register_scope(this);
2891  const AstRawString* name = property->key()->AsVariableProxy()->raw_name();
2892  RegisterList args = register_allocator()->NewRegisterList(2);
2893  builder()
2894      ->LoadLiteral(Smi::FromEnum(tmpl))
2895      .StoreAccumulatorInRegister(args[0])
2896      .LoadLiteral(name)
2897      .StoreAccumulatorInRegister(args[1])
2898      .CallRuntime(Runtime::kNewTypeError, args)
2899      .Throw();
2900}
2901
2902void BytecodeGenerator::BuildPrivateBrandInitialization(Register receiver,
2903                                                        Variable* brand) {
2904  BuildVariableLoad(brand, HoleCheckMode::kElided);
2905  int depth = execution_context()->ContextChainDepth(brand->scope());
2906  ContextScope* class_context = execution_context()->Previous(depth);
2907  if (class_context) {
2908    Register brand_reg = register_allocator()->NewRegister();
2909    FeedbackSlot slot = feedback_spec()->AddDefineKeyedOwnICSlot();
2910    builder()
2911        ->StoreAccumulatorInRegister(brand_reg)
2912        .LoadAccumulatorWithRegister(class_context->reg())
2913        .DefineKeyedOwnProperty(receiver, brand_reg, feedback_index(slot));
2914  } else {
2915    // We are in the slow case where super() is called from a nested
2916    // arrow function or a eval(), so the class scope context isn't
2917    // tracked in a context register in the stack, and we have to
2918    // walk the context chain from the runtime to find it.
2919    DCHECK_NE(info()->literal()->scope()->outer_scope(), brand->scope());
2920    RegisterList brand_args = register_allocator()->NewRegisterList(4);
2921    builder()
2922        ->StoreAccumulatorInRegister(brand_args[1])
2923        .MoveRegister(receiver, brand_args[0])
2924        .MoveRegister(execution_context()->reg(), brand_args[2])
2925        .LoadLiteral(Smi::FromInt(depth))
2926        .StoreAccumulatorInRegister(brand_args[3])
2927        .CallRuntime(Runtime::kAddPrivateBrand, brand_args);
2928  }
2929}
2930
2931void BytecodeGenerator::BuildInstanceMemberInitialization(Register constructor,
2932                                                          Register instance) {
2933  RegisterList args = register_allocator()->NewRegisterList(1);
2934  Register initializer = register_allocator()->NewRegister();
2935
2936  FeedbackSlot slot = feedback_spec()->AddLoadICSlot();
2937  BytecodeLabel done;
2938
2939  builder()
2940      ->LoadClassFieldsInitializer(constructor, feedback_index(slot))
2941      // TODO(gsathya): This jump can be elided for the base
2942      // constructor and derived constructor. This is only required
2943      // when called from an arrow function.
2944      .JumpIfUndefined(&done)
2945      .StoreAccumulatorInRegister(initializer)
2946      .MoveRegister(instance, args[0])
2947      .CallProperty(initializer, args,
2948                    feedback_index(feedback_spec()->AddCallICSlot()))
2949      .Bind(&done);
2950}
2951
2952void BytecodeGenerator::VisitNativeFunctionLiteral(
2953    NativeFunctionLiteral* expr) {
2954  size_t entry = builder()->AllocateDeferredConstantPoolEntry();
2955  int index = feedback_spec()->AddCreateClosureSlot();
2956  uint8_t flags = CreateClosureFlags::Encode(false, false, false);
2957  builder()->CreateClosure(entry, index, flags);
2958  native_function_literals_.push_back(std::make_pair(expr, entry));
2959}
2960
2961void BytecodeGenerator::VisitConditional(Conditional* expr) {
2962  ConditionalControlFlowBuilder conditional_builder(
2963      builder(), block_coverage_builder_, expr);
2964
2965  if (expr->condition()->ToBooleanIsTrue()) {
2966    // Generate then block unconditionally as always true.
2967    conditional_builder.Then();
2968    VisitForAccumulatorValue(expr->then_expression());
2969  } else if (expr->condition()->ToBooleanIsFalse()) {
2970    // Generate else block unconditionally if it exists.
2971    conditional_builder.Else();
2972    VisitForAccumulatorValue(expr->else_expression());
2973  } else {
2974    VisitForTest(expr->condition(), conditional_builder.then_labels(),
2975                 conditional_builder.else_labels(), TestFallthrough::kThen);
2976
2977    conditional_builder.Then();
2978    VisitForAccumulatorValue(expr->then_expression());
2979    conditional_builder.JumpToEnd();
2980
2981    conditional_builder.Else();
2982    VisitForAccumulatorValue(expr->else_expression());
2983  }
2984}
2985
2986void BytecodeGenerator::VisitLiteral(Literal* expr) {
2987  if (execution_result()->IsEffect()) return;
2988  switch (expr->type()) {
2989    case Literal::kSmi:
2990      builder()->LoadLiteral(expr->AsSmiLiteral());
2991      break;
2992    case Literal::kHeapNumber:
2993      builder()->LoadLiteral(expr->AsNumber());
2994      break;
2995    case Literal::kUndefined:
2996      builder()->LoadUndefined();
2997      break;
2998    case Literal::kBoolean:
2999      builder()->LoadBoolean(expr->ToBooleanIsTrue());
3000      execution_result()->SetResultIsBoolean();
3001      break;
3002    case Literal::kNull:
3003      builder()->LoadNull();
3004      break;
3005    case Literal::kTheHole:
3006      builder()->LoadTheHole();
3007      break;
3008    case Literal::kString:
3009      builder()->LoadLiteral(expr->AsRawString());
3010      execution_result()->SetResultIsString();
3011      break;
3012    case Literal::kBigInt:
3013      builder()->LoadLiteral(expr->AsBigInt());
3014      break;
3015  }
3016}
3017
3018void BytecodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
3019  // Materialize a regular expression literal.
3020  builder()->CreateRegExpLiteral(
3021      expr->raw_pattern(), feedback_index(feedback_spec()->AddLiteralSlot()),
3022      expr->flags());
3023}
3024
3025void BytecodeGenerator::BuildCreateObjectLiteral(Register literal,
3026                                                 uint8_t flags, size_t entry) {
3027  // TODO(cbruni): Directly generate runtime call for literals we cannot
3028  // optimize once the CreateShallowObjectLiteral stub is in sync with the TF
3029  // optimizations.
3030  int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
3031  builder()
3032      ->CreateObjectLiteral(entry, literal_index, flags)
3033      .StoreAccumulatorInRegister(literal);
3034}
3035
3036void BytecodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
3037  expr->builder()->InitDepthAndFlags();
3038
3039  // Fast path for the empty object literal which doesn't need an
3040  // AllocationSite.
3041  if (expr->builder()->IsEmptyObjectLiteral()) {
3042    DCHECK(expr->builder()->IsFastCloningSupported());
3043    builder()->CreateEmptyObjectLiteral();
3044    return;
3045  }
3046
3047  Variable* home_object = expr->home_object();
3048  if (home_object != nullptr) {
3049    DCHECK(home_object->is_used());
3050    DCHECK(home_object->IsContextSlot());
3051  }
3052  MultipleEntryBlockContextScope object_literal_context_scope(
3053      this, home_object ? home_object->scope() : nullptr);
3054
3055  // Deep-copy the literal boilerplate.
3056  uint8_t flags = CreateObjectLiteralFlags::Encode(
3057      expr->builder()->ComputeFlags(),
3058      expr->builder()->IsFastCloningSupported());
3059
3060  Register literal = register_allocator()->NewRegister();
3061
3062  // Create literal object.
3063  int property_index = 0;
3064  bool clone_object_spread =
3065      expr->properties()->first()->kind() == ObjectLiteral::Property::SPREAD;
3066  if (clone_object_spread) {
3067    // Avoid the slow path for spreads in the following common cases:
3068    //   1) `let obj = { ...source }`
3069    //   2) `let obj = { ...source, override: 1 }`
3070    //   3) `let obj = { ...source, ...overrides }`
3071    RegisterAllocationScope register_scope(this);
3072    Expression* property = expr->properties()->first()->value();
3073    Register from_value = VisitForRegisterValue(property);
3074    int clone_index = feedback_index(feedback_spec()->AddCloneObjectSlot());
3075    builder()->CloneObject(from_value, flags, clone_index);
3076    builder()->StoreAccumulatorInRegister(literal);
3077    property_index++;
3078  } else {
3079    size_t entry;
3080    // If constant properties is an empty fixed array, use a cached empty fixed
3081    // array to ensure it's only added to the constant pool once.
3082    if (expr->builder()->properties_count() == 0) {
3083      entry = builder()->EmptyObjectBoilerplateDescriptionConstantPoolEntry();
3084    } else {
3085      entry = builder()->AllocateDeferredConstantPoolEntry();
3086      object_literals_.push_back(std::make_pair(expr->builder(), entry));
3087    }
3088    BuildCreateObjectLiteral(literal, flags, entry);
3089  }
3090
3091  // Store computed values into the literal.
3092  AccessorTable<ObjectLiteral::Property> accessor_table(zone());
3093  for (; property_index < expr->properties()->length(); property_index++) {
3094    ObjectLiteral::Property* property = expr->properties()->at(property_index);
3095    if (property->is_computed_name()) break;
3096    if (!clone_object_spread && property->IsCompileTimeValue()) continue;
3097
3098    RegisterAllocationScope inner_register_scope(this);
3099    Literal* key = property->key()->AsLiteral();
3100    switch (property->kind()) {
3101      case ObjectLiteral::Property::SPREAD:
3102        UNREACHABLE();
3103      case ObjectLiteral::Property::CONSTANT:
3104      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
3105        DCHECK(clone_object_spread || !property->value()->IsCompileTimeValue());
3106        V8_FALLTHROUGH;
3107      case ObjectLiteral::Property::COMPUTED: {
3108        // It is safe to use [[Put]] here because the boilerplate already
3109        // contains computed properties with an uninitialized value.
3110        Register key_reg;
3111        if (key->IsStringLiteral()) {
3112          DCHECK(key->IsPropertyName());
3113        } else {
3114          key_reg = register_allocator()->NewRegister();
3115          builder()->SetExpressionPosition(property->key());
3116          VisitForRegisterValue(property->key(), key_reg);
3117        }
3118
3119        object_literal_context_scope.SetEnteredIf(
3120            property->value()->IsConciseMethodDefinition());
3121        builder()->SetExpressionPosition(property->value());
3122
3123        if (property->emit_store()) {
3124          VisitForAccumulatorValue(property->value());
3125          if (key->IsStringLiteral()) {
3126            FeedbackSlot slot = feedback_spec()->AddDefineNamedOwnICSlot();
3127            builder()->DefineNamedOwnProperty(literal, key->AsRawPropertyName(),
3128                                              feedback_index(slot));
3129          } else {
3130            FeedbackSlot slot = feedback_spec()->AddDefineKeyedOwnICSlot();
3131            builder()->DefineKeyedOwnProperty(literal, key_reg,
3132                                              feedback_index(slot));
3133          }
3134        } else {
3135          VisitForEffect(property->value());
3136        }
3137        break;
3138      }
3139      case ObjectLiteral::Property::PROTOTYPE: {
3140        // __proto__:null is handled by CreateObjectLiteral.
3141        if (property->IsNullPrototype()) break;
3142        DCHECK(property->emit_store());
3143        DCHECK(!property->NeedsSetFunctionName());
3144        RegisterList args = register_allocator()->NewRegisterList(2);
3145        builder()->MoveRegister(literal, args[0]);
3146        object_literal_context_scope.SetEnteredIf(false);
3147        builder()->SetExpressionPosition(property->value());
3148        VisitForRegisterValue(property->value(), args[1]);
3149        builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
3150        break;
3151      }
3152      case ObjectLiteral::Property::GETTER:
3153        if (property->emit_store()) {
3154          accessor_table.LookupOrInsert(key)->getter = property;
3155        }
3156        break;
3157      case ObjectLiteral::Property::SETTER:
3158        if (property->emit_store()) {
3159          accessor_table.LookupOrInsert(key)->setter = property;
3160        }
3161        break;
3162    }
3163  }
3164
3165    // Define accessors, using only a single call to the runtime for each pair
3166    // of corresponding getters and setters.
3167    object_literal_context_scope.SetEnteredIf(true);
3168    for (auto accessors : accessor_table.ordered_accessors()) {
3169      RegisterAllocationScope inner_register_scope(this);
3170      RegisterList args = register_allocator()->NewRegisterList(5);
3171      builder()->MoveRegister(literal, args[0]);
3172      VisitForRegisterValue(accessors.first, args[1]);
3173      VisitLiteralAccessor(accessors.second->getter, args[2]);
3174      VisitLiteralAccessor(accessors.second->setter, args[3]);
3175      builder()
3176          ->LoadLiteral(Smi::FromInt(NONE))
3177          .StoreAccumulatorInRegister(args[4])
3178          .CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, args);
3179    }
3180
3181  // Object literals have two parts. The "static" part on the left contains no
3182  // computed property names, and so we can compute its map ahead of time; see
3183  // Runtime_CreateObjectLiteralBoilerplate. The second "dynamic" part starts
3184  // with the first computed property name and continues with all properties to
3185  // its right. All the code from above initializes the static component of the
3186  // object literal, and arranges for the map of the result to reflect the
3187  // static order in which the keys appear. For the dynamic properties, we
3188  // compile them into a series of "SetOwnProperty" runtime calls. This will
3189  // preserve insertion order.
3190  for (; property_index < expr->properties()->length(); property_index++) {
3191    ObjectLiteral::Property* property = expr->properties()->at(property_index);
3192    RegisterAllocationScope inner_register_scope(this);
3193
3194    bool should_be_in_object_literal_scope =
3195        (property->value()->IsConciseMethodDefinition() ||
3196         property->value()->IsAccessorFunctionDefinition());
3197
3198    if (property->IsPrototype()) {
3199      // __proto__:null is handled by CreateObjectLiteral.
3200      if (property->IsNullPrototype()) continue;
3201      DCHECK(property->emit_store());
3202      DCHECK(!property->NeedsSetFunctionName());
3203      RegisterList args = register_allocator()->NewRegisterList(2);
3204      builder()->MoveRegister(literal, args[0]);
3205
3206      DCHECK(!should_be_in_object_literal_scope);
3207      object_literal_context_scope.SetEnteredIf(false);
3208      builder()->SetExpressionPosition(property->value());
3209      VisitForRegisterValue(property->value(), args[1]);
3210      builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
3211      continue;
3212    }
3213
3214    switch (property->kind()) {
3215      case ObjectLiteral::Property::CONSTANT:
3216      case ObjectLiteral::Property::COMPUTED:
3217      case ObjectLiteral::Property::MATERIALIZED_LITERAL: {
3218        // Computed property keys don't belong to the object literal scope (even
3219        // if they're syntactically inside it).
3220        if (property->is_computed_name()) {
3221          object_literal_context_scope.SetEnteredIf(false);
3222        }
3223        Register key = register_allocator()->NewRegister();
3224        BuildLoadPropertyKey(property, key);
3225
3226        object_literal_context_scope.SetEnteredIf(
3227            should_be_in_object_literal_scope);
3228        builder()->SetExpressionPosition(property->value());
3229        Register value;
3230
3231        // Static class fields require the name property to be set on
3232        // the class, meaning we can't wait until the
3233        // DefineKeyedOwnPropertyInLiteral call later to set the name.
3234        if (property->value()->IsClassLiteral() &&
3235            property->value()->AsClassLiteral()->static_initializer() !=
3236                nullptr) {
3237          value = register_allocator()->NewRegister();
3238          VisitClassLiteral(property->value()->AsClassLiteral(), key);
3239          builder()->StoreAccumulatorInRegister(value);
3240        } else {
3241          value = VisitForRegisterValue(property->value());
3242        }
3243
3244        DefineKeyedOwnPropertyInLiteralFlags data_property_flags =
3245            DefineKeyedOwnPropertyInLiteralFlag::kNoFlags;
3246        if (property->NeedsSetFunctionName()) {
3247          data_property_flags |=
3248              DefineKeyedOwnPropertyInLiteralFlag::kSetFunctionName;
3249        }
3250
3251        FeedbackSlot slot =
3252            feedback_spec()->AddDefineKeyedOwnPropertyInLiteralICSlot();
3253        builder()
3254            ->LoadAccumulatorWithRegister(value)
3255            .DefineKeyedOwnPropertyInLiteral(literal, key, data_property_flags,
3256                                             feedback_index(slot));
3257        break;
3258      }
3259      case ObjectLiteral::Property::GETTER:
3260      case ObjectLiteral::Property::SETTER: {
3261        // Computed property keys don't belong to the object literal scope (even
3262        // if they're syntactically inside it).
3263        if (property->is_computed_name()) {
3264          object_literal_context_scope.SetEnteredIf(false);
3265        }
3266        RegisterList args = register_allocator()->NewRegisterList(4);
3267        builder()->MoveRegister(literal, args[0]);
3268        BuildLoadPropertyKey(property, args[1]);
3269
3270        DCHECK(should_be_in_object_literal_scope);
3271        object_literal_context_scope.SetEnteredIf(true);
3272        builder()->SetExpressionPosition(property->value());
3273        VisitForRegisterValue(property->value(), args[2]);
3274        builder()
3275            ->LoadLiteral(Smi::FromInt(NONE))
3276            .StoreAccumulatorInRegister(args[3]);
3277        Runtime::FunctionId function_id =
3278            property->kind() == ObjectLiteral::Property::GETTER
3279                ? Runtime::kDefineGetterPropertyUnchecked
3280                : Runtime::kDefineSetterPropertyUnchecked;
3281        builder()->CallRuntime(function_id, args);
3282        break;
3283      }
3284      case ObjectLiteral::Property::SPREAD: {
3285        RegisterList args = register_allocator()->NewRegisterList(2);
3286        builder()->MoveRegister(literal, args[0]);
3287        builder()->SetExpressionPosition(property->value());
3288        object_literal_context_scope.SetEnteredIf(false);
3289        VisitForRegisterValue(property->value(), args[1]);
3290        builder()->CallRuntime(Runtime::kInlineCopyDataProperties, args);
3291        break;
3292      }
3293      case ObjectLiteral::Property::PROTOTYPE:
3294        UNREACHABLE();  // Handled specially above.
3295    }
3296  }
3297
3298  builder()->LoadAccumulatorWithRegister(literal);
3299  if (home_object != nullptr) {
3300    object_literal_context_scope.SetEnteredIf(true);
3301    BuildVariableAssignment(home_object, Token::INIT, HoleCheckMode::kElided);
3302  }
3303}
3304
3305// Fill an array with values from an iterator, starting at a given index. It is
3306// guaranteed that the loop will only terminate if the iterator is exhausted, or
3307// if one of iterator.next(), value.done, or value.value fail.
3308//
3309// In pseudocode:
3310//
3311// loop {
3312//   value = iterator.next()
3313//   if (value.done) break;
3314//   value = value.value
3315//   array[index++] = value
3316// }
3317void BytecodeGenerator::BuildFillArrayWithIterator(
3318    IteratorRecord iterator, Register array, Register index, Register value,
3319    FeedbackSlot next_value_slot, FeedbackSlot next_done_slot,
3320    FeedbackSlot index_slot, FeedbackSlot element_slot) {
3321  DCHECK(array.is_valid());
3322  DCHECK(index.is_valid());
3323  DCHECK(value.is_valid());
3324
3325  LoopBuilder loop_builder(builder(), nullptr, nullptr);
3326  LoopScope loop_scope(this, &loop_builder);
3327
3328  // Call the iterator's .next() method. Break from the loop if the `done`
3329  // property is truthy, otherwise load the value from the iterator result and
3330  // append the argument.
3331  BuildIteratorNext(iterator, value);
3332  builder()->LoadNamedProperty(
3333      value, ast_string_constants()->done_string(),
3334      feedback_index(feedback_spec()->AddLoadICSlot()));
3335  loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
3336
3337  loop_builder.LoopBody();
3338  builder()
3339      // value = value.value
3340      ->LoadNamedProperty(value, ast_string_constants()->value_string(),
3341                          feedback_index(next_value_slot))
3342      // array[index] = value
3343      .StoreInArrayLiteral(array, index, feedback_index(element_slot))
3344      // index++
3345      .LoadAccumulatorWithRegister(index)
3346      .UnaryOperation(Token::INC, feedback_index(index_slot))
3347      .StoreAccumulatorInRegister(index);
3348  loop_builder.BindContinueTarget();
3349}
3350
3351void BytecodeGenerator::BuildCreateArrayLiteral(
3352    const ZonePtrList<Expression>* elements, ArrayLiteral* expr) {
3353  RegisterAllocationScope register_scope(this);
3354  Register index = register_allocator()->NewRegister();
3355  Register array = register_allocator()->NewRegister();
3356  SharedFeedbackSlot element_slot(feedback_spec(),
3357                                  FeedbackSlotKind::kStoreInArrayLiteral);
3358  ZonePtrList<Expression>::const_iterator current = elements->begin();
3359  ZonePtrList<Expression>::const_iterator end = elements->end();
3360  bool is_empty = elements->is_empty();
3361
3362  if (!is_empty && (*current)->IsSpread()) {
3363    // If we have a leading spread, use CreateArrayFromIterable to create
3364    // an array from it and then add the remaining components to that array.
3365    VisitForAccumulatorValue(*current);
3366    builder()->SetExpressionPosition((*current)->AsSpread()->expression());
3367    builder()->CreateArrayFromIterable().StoreAccumulatorInRegister(array);
3368
3369    if (++current != end) {
3370      // If there are remaning elements, prepare the index register that is
3371      // used for adding those elements. The next index is the length of the
3372      // newly created array.
3373      auto length = ast_string_constants()->length_string();
3374      int length_load_slot = feedback_index(feedback_spec()->AddLoadICSlot());
3375      builder()
3376          ->LoadNamedProperty(array, length, length_load_slot)
3377          .StoreAccumulatorInRegister(index);
3378    }
3379  } else {
3380    // There are some elements before the first (if any) spread, and we can
3381    // use a boilerplate when creating the initial array from those elements.
3382
3383    // First, allocate a constant pool entry for the boilerplate that will
3384    // be created during finalization, and will contain all the constant
3385    // elements before the first spread. This also handle the empty array case
3386    // and one-shot optimization.
3387
3388    ArrayLiteralBoilerplateBuilder* array_literal_builder = nullptr;
3389    if (expr != nullptr) {
3390      array_literal_builder = expr->builder();
3391    } else {
3392      DCHECK(!elements->is_empty());
3393
3394      // get first_spread_index
3395      int first_spread_index = -1;
3396      for (auto iter = elements->begin(); iter != elements->end(); iter++) {
3397        if ((*iter)->IsSpread()) {
3398          first_spread_index = static_cast<int>(iter - elements->begin());
3399          break;
3400        }
3401      }
3402
3403      array_literal_builder = zone()->New<ArrayLiteralBoilerplateBuilder>(
3404          elements, first_spread_index);
3405      array_literal_builder->InitDepthAndFlags();
3406    }
3407
3408    DCHECK(array_literal_builder != nullptr);
3409    uint8_t flags = CreateArrayLiteralFlags::Encode(
3410        array_literal_builder->IsFastCloningSupported(),
3411        array_literal_builder->ComputeFlags());
3412    if (is_empty) {
3413      // Empty array literal fast-path.
3414      int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
3415      DCHECK(array_literal_builder->IsFastCloningSupported());
3416      builder()->CreateEmptyArrayLiteral(literal_index);
3417    } else {
3418      // Create array literal from boilerplate.
3419      size_t entry = builder()->AllocateDeferredConstantPoolEntry();
3420      array_literals_.push_back(std::make_pair(array_literal_builder, entry));
3421      int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
3422      builder()->CreateArrayLiteral(entry, literal_index, flags);
3423    }
3424    builder()->StoreAccumulatorInRegister(array);
3425
3426    ZonePtrList<Expression>::const_iterator first_spread_or_end =
3427        array_literal_builder->first_spread_index() >= 0
3428            ? current + array_literal_builder->first_spread_index()
3429            : end;
3430
3431    // Insert the missing non-constant elements, up until the first spread
3432    // index, into the initial array (the remaining elements will be inserted
3433    // below).
3434    DCHECK_EQ(current, elements->begin());
3435    int array_index = 0;
3436    for (; current != first_spread_or_end; ++current, array_index++) {
3437      Expression* subexpr = *current;
3438      DCHECK(!subexpr->IsSpread());
3439      // Skip the constants.
3440      if (subexpr->IsCompileTimeValue()) continue;
3441
3442      builder()
3443          ->LoadLiteral(Smi::FromInt(array_index))
3444          .StoreAccumulatorInRegister(index);
3445      VisitForAccumulatorValue(subexpr);
3446      builder()->StoreInArrayLiteral(array, index,
3447                                     feedback_index(element_slot.Get()));
3448    }
3449
3450    if (current != end) {
3451      // If there are remaining elements, prepare the index register
3452      // to store the next element, which comes from the first spread.
3453      builder()
3454          ->LoadLiteral(Smi::FromInt(array_index))
3455          .StoreAccumulatorInRegister(index);
3456    }
3457  }
3458
3459  // Now build insertions for the remaining elements from current to end.
3460  SharedFeedbackSlot index_slot(feedback_spec(), FeedbackSlotKind::kBinaryOp);
3461  SharedFeedbackSlot length_slot(
3462      feedback_spec(), feedback_spec()->GetStoreICSlot(LanguageMode::kStrict));
3463  for (; current != end; ++current) {
3464    Expression* subexpr = *current;
3465    if (subexpr->IsSpread()) {
3466      RegisterAllocationScope scope(this);
3467      builder()->SetExpressionAsStatementPosition(
3468          subexpr->AsSpread()->expression());
3469      VisitForAccumulatorValue(subexpr->AsSpread()->expression());
3470      builder()->SetExpressionPosition(subexpr->AsSpread()->expression());
3471      IteratorRecord iterator = BuildGetIteratorRecord(IteratorType::kNormal);
3472
3473      Register value = register_allocator()->NewRegister();
3474      FeedbackSlot next_value_load_slot = feedback_spec()->AddLoadICSlot();
3475      FeedbackSlot next_done_load_slot = feedback_spec()->AddLoadICSlot();
3476      FeedbackSlot real_index_slot = index_slot.Get();
3477      FeedbackSlot real_element_slot = element_slot.Get();
3478      BuildFillArrayWithIterator(iterator, array, index, value,
3479                                 next_value_load_slot, next_done_load_slot,
3480                                 real_index_slot, real_element_slot);
3481    } else if (!subexpr->IsTheHoleLiteral()) {
3482      // literal[index++] = subexpr
3483      VisitForAccumulatorValue(subexpr);
3484      builder()
3485          ->StoreInArrayLiteral(array, index,
3486                                feedback_index(element_slot.Get()))
3487          .LoadAccumulatorWithRegister(index);
3488      // Only increase the index if we are not the last element.
3489      if (current + 1 != end) {
3490        builder()
3491            ->UnaryOperation(Token::INC, feedback_index(index_slot.Get()))
3492            .StoreAccumulatorInRegister(index);
3493      }
3494    } else {
3495      // literal.length = ++index
3496      // length_slot is only used when there are holes.
3497      auto length = ast_string_constants()->length_string();
3498      builder()
3499          ->LoadAccumulatorWithRegister(index)
3500          .UnaryOperation(Token::INC, feedback_index(index_slot.Get()))
3501          .StoreAccumulatorInRegister(index)
3502          .SetNamedProperty(array, length, feedback_index(length_slot.Get()),
3503                            LanguageMode::kStrict);
3504    }
3505  }
3506
3507  builder()->LoadAccumulatorWithRegister(array);
3508}
3509
3510void BytecodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
3511  expr->builder()->InitDepthAndFlags();
3512  BuildCreateArrayLiteral(expr->values(), expr);
3513}
3514
3515void BytecodeGenerator::VisitVariableProxy(VariableProxy* proxy) {
3516  builder()->SetExpressionPosition(proxy);
3517  BuildVariableLoad(proxy->var(), proxy->hole_check_mode());
3518}
3519
3520void BytecodeGenerator::BuildVariableLoad(Variable* variable,
3521                                          HoleCheckMode hole_check_mode,
3522                                          TypeofMode typeof_mode) {
3523  switch (variable->location()) {
3524    case VariableLocation::LOCAL: {
3525      Register source(builder()->Local(variable->index()));
3526      // We need to load the variable into the accumulator, even when in a
3527      // VisitForRegisterScope, in order to avoid register aliasing if
3528      // subsequent expressions assign to the same variable.
3529      builder()->LoadAccumulatorWithRegister(source);
3530      if (hole_check_mode == HoleCheckMode::kRequired) {
3531        BuildThrowIfHole(variable);
3532      }
3533      break;
3534    }
3535    case VariableLocation::PARAMETER: {
3536      Register source;
3537      if (variable->IsReceiver()) {
3538        source = builder()->Receiver();
3539      } else {
3540        source = builder()->Parameter(variable->index());
3541      }
3542      // We need to load the variable into the accumulator, even when in a
3543      // VisitForRegisterScope, in order to avoid register aliasing if
3544      // subsequent expressions assign to the same variable.
3545      builder()->LoadAccumulatorWithRegister(source);
3546      if (hole_check_mode == HoleCheckMode::kRequired) {
3547        BuildThrowIfHole(variable);
3548      }
3549      break;
3550    }
3551    case VariableLocation::UNALLOCATED: {
3552      // The global identifier "undefined" is immutable. Everything
3553      // else could be reassigned. For performance, we do a pointer comparison
3554      // rather than checking if the raw_name is really "undefined".
3555      if (variable->raw_name() == ast_string_constants()->undefined_string()) {
3556        builder()->LoadUndefined();
3557      } else {
3558        FeedbackSlot slot = GetCachedLoadGlobalICSlot(typeof_mode, variable);
3559        builder()->LoadGlobal(variable->raw_name(), feedback_index(slot),
3560                              typeof_mode);
3561      }
3562      break;
3563    }
3564    case VariableLocation::CONTEXT: {
3565      int depth = execution_context()->ContextChainDepth(variable->scope());
3566      ContextScope* context = execution_context()->Previous(depth);
3567      Register context_reg;
3568      if (context) {
3569        context_reg = context->reg();
3570        depth = 0;
3571      } else {
3572        context_reg = execution_context()->reg();
3573      }
3574
3575      BytecodeArrayBuilder::ContextSlotMutability immutable =
3576          (variable->maybe_assigned() == kNotAssigned)
3577              ? BytecodeArrayBuilder::kImmutableSlot
3578              : BytecodeArrayBuilder::kMutableSlot;
3579
3580      builder()->LoadContextSlot(context_reg, variable->index(), depth,
3581                                 immutable);
3582      if (hole_check_mode == HoleCheckMode::kRequired) {
3583        BuildThrowIfHole(variable);
3584      }
3585      break;
3586    }
3587    case VariableLocation::LOOKUP: {
3588      switch (variable->mode()) {
3589        case VariableMode::kDynamicLocal: {
3590          Variable* local_variable = variable->local_if_not_shadowed();
3591          int depth =
3592              execution_context()->ContextChainDepth(local_variable->scope());
3593          builder()->LoadLookupContextSlot(variable->raw_name(), typeof_mode,
3594                                           local_variable->index(), depth);
3595          if (hole_check_mode == HoleCheckMode::kRequired) {
3596            BuildThrowIfHole(variable);
3597          }
3598          break;
3599        }
3600        case VariableMode::kDynamicGlobal: {
3601          int depth =
3602              current_scope()->ContextChainLengthUntilOutermostSloppyEval();
3603          // TODO(1008414): Add back caching here when bug is fixed properly.
3604          FeedbackSlot slot = feedback_spec()->AddLoadGlobalICSlot(typeof_mode);
3605
3606          builder()->LoadLookupGlobalSlot(variable->raw_name(), typeof_mode,
3607                                          feedback_index(slot), depth);
3608          break;
3609        }
3610        default:
3611          builder()->LoadLookupSlot(variable->raw_name(), typeof_mode);
3612      }
3613      break;
3614    }
3615    case VariableLocation::MODULE: {
3616      int depth = execution_context()->ContextChainDepth(variable->scope());
3617      builder()->LoadModuleVariable(variable->index(), depth);
3618      if (hole_check_mode == HoleCheckMode::kRequired) {
3619        BuildThrowIfHole(variable);
3620      }
3621      break;
3622    }
3623    case VariableLocation::REPL_GLOBAL: {
3624      DCHECK(variable->IsReplGlobal());
3625      FeedbackSlot slot = GetCachedLoadGlobalICSlot(typeof_mode, variable);
3626      builder()->LoadGlobal(variable->raw_name(), feedback_index(slot),
3627                            typeof_mode);
3628      break;
3629    }
3630  }
3631}
3632
3633void BytecodeGenerator::BuildVariableLoadForAccumulatorValue(
3634    Variable* variable, HoleCheckMode hole_check_mode, TypeofMode typeof_mode) {
3635  ValueResultScope accumulator_result(this);
3636  BuildVariableLoad(variable, hole_check_mode, typeof_mode);
3637}
3638
3639void BytecodeGenerator::BuildReturn(int source_position) {
3640  if (FLAG_trace) {
3641    RegisterAllocationScope register_scope(this);
3642    Register result = register_allocator()->NewRegister();
3643    // Runtime returns {result} value, preserving accumulator.
3644    builder()->StoreAccumulatorInRegister(result).CallRuntime(
3645        Runtime::kTraceExit, result);
3646  }
3647  if (info()->flags().collect_type_profile()) {
3648    builder()->CollectTypeProfile(info()->literal()->return_position());
3649  }
3650  builder()->SetStatementPosition(source_position);
3651  builder()->Return();
3652}
3653
3654void BytecodeGenerator::BuildAsyncReturn(int source_position) {
3655  RegisterAllocationScope register_scope(this);
3656
3657  if (IsAsyncGeneratorFunction(info()->literal()->kind())) {
3658    RegisterList args = register_allocator()->NewRegisterList(3);
3659    builder()
3660        ->MoveRegister(generator_object(), args[0])  // generator
3661        .StoreAccumulatorInRegister(args[1])         // value
3662        .LoadTrue()
3663        .StoreAccumulatorInRegister(args[2])  // done
3664        .CallRuntime(Runtime::kInlineAsyncGeneratorResolve, args);
3665  } else {
3666    DCHECK(IsAsyncFunction(info()->literal()->kind()) ||
3667           IsAsyncModule(info()->literal()->kind()));
3668    RegisterList args = register_allocator()->NewRegisterList(2);
3669    builder()
3670        ->MoveRegister(generator_object(), args[0])  // generator
3671        .StoreAccumulatorInRegister(args[1])         // value
3672        .CallRuntime(Runtime::kInlineAsyncFunctionResolve, args);
3673  }
3674
3675  BuildReturn(source_position);
3676}
3677
3678void BytecodeGenerator::BuildReThrow() { builder()->ReThrow(); }
3679
3680void BytecodeGenerator::BuildThrowIfHole(Variable* variable) {
3681  if (variable->is_this()) {
3682    DCHECK(variable->mode() == VariableMode::kConst);
3683    builder()->ThrowSuperNotCalledIfHole();
3684  } else {
3685    builder()->ThrowReferenceErrorIfHole(variable->raw_name());
3686  }
3687}
3688
3689void BytecodeGenerator::BuildHoleCheckForVariableAssignment(Variable* variable,
3690                                                            Token::Value op) {
3691  DCHECK(!IsPrivateMethodOrAccessorVariableMode(variable->mode()));
3692  if (variable->is_this() && variable->mode() == VariableMode::kConst &&
3693      op == Token::INIT) {
3694    // Perform an initialization check for 'this'. 'this' variable is the
3695    // only variable able to trigger bind operations outside the TDZ
3696    // via 'super' calls.
3697    builder()->ThrowSuperAlreadyCalledIfNotHole();
3698  } else {
3699    // Perform an initialization check for let/const declared variables.
3700    // E.g. let x = (x = 20); is not allowed.
3701    DCHECK(IsLexicalVariableMode(variable->mode()));
3702    BuildThrowIfHole(variable);
3703  }
3704}
3705
3706void BytecodeGenerator::BuildVariableAssignment(
3707    Variable* variable, Token::Value op, HoleCheckMode hole_check_mode,
3708    LookupHoistingMode lookup_hoisting_mode) {
3709  VariableMode mode = variable->mode();
3710  RegisterAllocationScope assignment_register_scope(this);
3711  BytecodeLabel end_label;
3712  switch (variable->location()) {
3713    case VariableLocation::PARAMETER:
3714    case VariableLocation::LOCAL: {
3715      Register destination;
3716      if (VariableLocation::PARAMETER == variable->location()) {
3717        if (variable->IsReceiver()) {
3718          destination = builder()->Receiver();
3719        } else {
3720          destination = builder()->Parameter(variable->index());
3721        }
3722      } else {
3723        destination = builder()->Local(variable->index());
3724      }
3725
3726      if (hole_check_mode == HoleCheckMode::kRequired) {
3727        // Load destination to check for hole.
3728        Register value_temp = register_allocator()->NewRegister();
3729        builder()
3730            ->StoreAccumulatorInRegister(value_temp)
3731            .LoadAccumulatorWithRegister(destination);
3732
3733        BuildHoleCheckForVariableAssignment(variable, op);
3734        builder()->LoadAccumulatorWithRegister(value_temp);
3735      }
3736
3737      if (mode != VariableMode::kConst || op == Token::INIT) {
3738        builder()->StoreAccumulatorInRegister(destination);
3739      } else if (variable->throw_on_const_assignment(language_mode())) {
3740        builder()->CallRuntime(Runtime::kThrowConstAssignError);
3741      }
3742      break;
3743    }
3744    case VariableLocation::UNALLOCATED: {
3745      BuildStoreGlobal(variable);
3746      break;
3747    }
3748    case VariableLocation::CONTEXT: {
3749      int depth = execution_context()->ContextChainDepth(variable->scope());
3750      ContextScope* context = execution_context()->Previous(depth);
3751      Register context_reg;
3752
3753      if (context) {
3754        context_reg = context->reg();
3755        depth = 0;
3756      } else {
3757        context_reg = execution_context()->reg();
3758      }
3759
3760      if (hole_check_mode == HoleCheckMode::kRequired) {
3761        // Load destination to check for hole.
3762        Register value_temp = register_allocator()->NewRegister();
3763        builder()
3764            ->StoreAccumulatorInRegister(value_temp)
3765            .LoadContextSlot(context_reg, variable->index(), depth,
3766                             BytecodeArrayBuilder::kMutableSlot);
3767
3768        BuildHoleCheckForVariableAssignment(variable, op);
3769        builder()->LoadAccumulatorWithRegister(value_temp);
3770      }
3771
3772      if (mode != VariableMode::kConst || op == Token::INIT) {
3773        builder()->StoreContextSlot(context_reg, variable->index(), depth);
3774      } else if (variable->throw_on_const_assignment(language_mode())) {
3775        builder()->CallRuntime(Runtime::kThrowConstAssignError);
3776      }
3777      break;
3778    }
3779    case VariableLocation::LOOKUP: {
3780      builder()->StoreLookupSlot(variable->raw_name(), language_mode(),
3781                                 lookup_hoisting_mode);
3782      break;
3783    }
3784    case VariableLocation::MODULE: {
3785      DCHECK(IsDeclaredVariableMode(mode));
3786
3787      if (mode == VariableMode::kConst && op != Token::INIT) {
3788        builder()->CallRuntime(Runtime::kThrowConstAssignError);
3789        break;
3790      }
3791
3792      // If we don't throw above, we know that we're dealing with an
3793      // export because imports are const and we do not generate initializing
3794      // assignments for them.
3795      DCHECK(variable->IsExport());
3796
3797      int depth = execution_context()->ContextChainDepth(variable->scope());
3798      if (hole_check_mode == HoleCheckMode::kRequired) {
3799        Register value_temp = register_allocator()->NewRegister();
3800        builder()
3801            ->StoreAccumulatorInRegister(value_temp)
3802            .LoadModuleVariable(variable->index(), depth);
3803        BuildHoleCheckForVariableAssignment(variable, op);
3804        builder()->LoadAccumulatorWithRegister(value_temp);
3805      }
3806      builder()->StoreModuleVariable(variable->index(), depth);
3807      break;
3808    }
3809    case VariableLocation::REPL_GLOBAL: {
3810      // A let or const declaration like 'let x = 7' is effectively translated
3811      // to:
3812      //   <top of the script>:
3813      //     ScriptContext.x = TheHole;
3814      //   ...
3815      //   <where the actual 'let' is>:
3816      //     ScriptContextTable.x = 7; // no hole check
3817      //
3818      // The ScriptContext slot for 'x' that we store to here is not
3819      // necessarily the ScriptContext of this script, but rather the
3820      // first ScriptContext that has a slot for name 'x'.
3821      DCHECK(variable->IsReplGlobal());
3822      if (op == Token::INIT) {
3823        RegisterList store_args = register_allocator()->NewRegisterList(2);
3824        builder()
3825            ->StoreAccumulatorInRegister(store_args[1])
3826            .LoadLiteral(variable->raw_name())
3827            .StoreAccumulatorInRegister(store_args[0]);
3828        builder()->CallRuntime(
3829            Runtime::kStoreGlobalNoHoleCheckForReplLetOrConst, store_args);
3830      } else {
3831        if (mode == VariableMode::kConst) {
3832          builder()->CallRuntime(Runtime::kThrowConstAssignError);
3833        } else {
3834          BuildStoreGlobal(variable);
3835        }
3836      }
3837      break;
3838    }
3839  }
3840}
3841
3842void BytecodeGenerator::BuildLoadNamedProperty(const Expression* object_expr,
3843                                               Register object,
3844                                               const AstRawString* name) {
3845  FeedbackSlot slot = GetCachedLoadICSlot(object_expr, name);
3846  builder()->LoadNamedProperty(object, name, feedback_index(slot));
3847}
3848
3849void BytecodeGenerator::BuildSetNamedProperty(const Expression* object_expr,
3850                                              Register object,
3851                                              const AstRawString* name) {
3852  Register value;
3853  if (!execution_result()->IsEffect()) {
3854    value = register_allocator()->NewRegister();
3855    builder()->StoreAccumulatorInRegister(value);
3856  }
3857
3858  FeedbackSlot slot = GetCachedStoreICSlot(object_expr, name);
3859  builder()->SetNamedProperty(object, name, feedback_index(slot),
3860                              language_mode());
3861
3862  if (!execution_result()->IsEffect()) {
3863    builder()->LoadAccumulatorWithRegister(value);
3864  }
3865}
3866
3867void BytecodeGenerator::BuildStoreGlobal(Variable* variable) {
3868  Register value;
3869  if (!execution_result()->IsEffect()) {
3870    value = register_allocator()->NewRegister();
3871    builder()->StoreAccumulatorInRegister(value);
3872  }
3873
3874  FeedbackSlot slot = GetCachedStoreGlobalICSlot(language_mode(), variable);
3875  builder()->StoreGlobal(variable->raw_name(), feedback_index(slot));
3876
3877  if (!execution_result()->IsEffect()) {
3878    builder()->LoadAccumulatorWithRegister(value);
3879  }
3880}
3881
3882// static
3883BytecodeGenerator::AssignmentLhsData
3884BytecodeGenerator::AssignmentLhsData::NonProperty(Expression* expr) {
3885  return AssignmentLhsData(NON_PROPERTY, expr, RegisterList(), Register(),
3886                           Register(), nullptr, nullptr);
3887}
3888// static
3889BytecodeGenerator::AssignmentLhsData
3890BytecodeGenerator::AssignmentLhsData::NamedProperty(Expression* object_expr,
3891                                                    Register object,
3892                                                    const AstRawString* name) {
3893  return AssignmentLhsData(NAMED_PROPERTY, nullptr, RegisterList(), object,
3894                           Register(), object_expr, name);
3895}
3896// static
3897BytecodeGenerator::AssignmentLhsData
3898BytecodeGenerator::AssignmentLhsData::KeyedProperty(Register object,
3899                                                    Register key) {
3900  return AssignmentLhsData(KEYED_PROPERTY, nullptr, RegisterList(), object, key,
3901                           nullptr, nullptr);
3902}
3903// static
3904BytecodeGenerator::AssignmentLhsData
3905BytecodeGenerator::AssignmentLhsData::NamedSuperProperty(
3906    RegisterList super_property_args) {
3907  return AssignmentLhsData(NAMED_SUPER_PROPERTY, nullptr, super_property_args,
3908                           Register(), Register(), nullptr, nullptr);
3909}
3910// static
3911BytecodeGenerator::AssignmentLhsData
3912BytecodeGenerator::AssignmentLhsData::PrivateMethodOrAccessor(
3913    AssignType type, Property* property, Register object, Register key) {
3914  return AssignmentLhsData(type, property, RegisterList(), object, key, nullptr,
3915                           nullptr);
3916}
3917// static
3918BytecodeGenerator::AssignmentLhsData
3919BytecodeGenerator::AssignmentLhsData::KeyedSuperProperty(
3920    RegisterList super_property_args) {
3921  return AssignmentLhsData(KEYED_SUPER_PROPERTY, nullptr, super_property_args,
3922                           Register(), Register(), nullptr, nullptr);
3923}
3924
3925BytecodeGenerator::AssignmentLhsData BytecodeGenerator::PrepareAssignmentLhs(
3926    Expression* lhs, AccumulatorPreservingMode accumulator_preserving_mode) {
3927  // Left-hand side can only be a property, a global or a variable slot.
3928  Property* property = lhs->AsProperty();
3929  AssignType assign_type = Property::GetAssignType(property);
3930
3931  // Evaluate LHS expression.
3932  switch (assign_type) {
3933    case NON_PROPERTY:
3934      return AssignmentLhsData::NonProperty(lhs);
3935    case NAMED_PROPERTY: {
3936      AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3937      Register object = VisitForRegisterValue(property->obj());
3938      const AstRawString* name =
3939          property->key()->AsLiteral()->AsRawPropertyName();
3940      return AssignmentLhsData::NamedProperty(property->obj(), object, name);
3941    }
3942    case KEYED_PROPERTY: {
3943      AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3944      Register object = VisitForRegisterValue(property->obj());
3945      Register key = VisitForRegisterValue(property->key());
3946      return AssignmentLhsData::KeyedProperty(object, key);
3947    }
3948    case PRIVATE_METHOD:
3949    case PRIVATE_GETTER_ONLY:
3950    case PRIVATE_SETTER_ONLY:
3951    case PRIVATE_GETTER_AND_SETTER: {
3952      DCHECK(!property->IsSuperAccess());
3953      AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3954      Register object = VisitForRegisterValue(property->obj());
3955      Register key = VisitForRegisterValue(property->key());
3956      return AssignmentLhsData::PrivateMethodOrAccessor(assign_type, property,
3957                                                        object, key);
3958    }
3959    case NAMED_SUPER_PROPERTY: {
3960      AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3961      RegisterList super_property_args =
3962          register_allocator()->NewRegisterList(4);
3963      BuildThisVariableLoad();
3964      builder()->StoreAccumulatorInRegister(super_property_args[0]);
3965      BuildVariableLoad(
3966          property->obj()->AsSuperPropertyReference()->home_object()->var(),
3967          HoleCheckMode::kElided);
3968      builder()->StoreAccumulatorInRegister(super_property_args[1]);
3969      builder()
3970          ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
3971          .StoreAccumulatorInRegister(super_property_args[2]);
3972      return AssignmentLhsData::NamedSuperProperty(super_property_args);
3973    }
3974    case KEYED_SUPER_PROPERTY: {
3975      AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3976      RegisterList super_property_args =
3977          register_allocator()->NewRegisterList(4);
3978      BuildThisVariableLoad();
3979      builder()->StoreAccumulatorInRegister(super_property_args[0]);
3980      BuildVariableLoad(
3981          property->obj()->AsSuperPropertyReference()->home_object()->var(),
3982          HoleCheckMode::kElided);
3983      builder()->StoreAccumulatorInRegister(super_property_args[1]);
3984      VisitForRegisterValue(property->key(), super_property_args[2]);
3985      return AssignmentLhsData::KeyedSuperProperty(super_property_args);
3986    }
3987  }
3988  UNREACHABLE();
3989}
3990
3991// Build the iteration finalizer called in the finally block of an iteration
3992// protocol execution. This closes the iterator if needed, and suppresses any
3993// exception it throws if necessary, including the exception when the return
3994// method is not callable.
3995//
3996// In pseudo-code, this builds:
3997//
3998// if (!done) {
3999//   try {
4000//     let method = iterator.return
4001//     if (method !== null && method !== undefined) {
4002//       let return_val = method.call(iterator)
4003//       if (!%IsObject(return_val)) throw TypeError
4004//     }
4005//   } catch (e) {
4006//     if (iteration_continuation != RETHROW)
4007//       rethrow e
4008//   }
4009// }
4010//
4011// For async iterators, iterator.close() becomes await iterator.close().
4012void BytecodeGenerator::BuildFinalizeIteration(
4013    IteratorRecord iterator, Register done,
4014    Register iteration_continuation_token) {
4015  RegisterAllocationScope register_scope(this);
4016  BytecodeLabels iterator_is_done(zone());
4017
4018  // if (!done) {
4019  builder()->LoadAccumulatorWithRegister(done).JumpIfTrue(
4020      ToBooleanMode::kConvertToBoolean, iterator_is_done.New());
4021
4022  {
4023    RegisterAllocationScope inner_register_scope(this);
4024    BuildTryCatch(
4025        // try {
4026        //   let method = iterator.return
4027        //   if (method !== null && method !== undefined) {
4028        //     let return_val = method.call(iterator)
4029        //     if (!%IsObject(return_val)) throw TypeError
4030        //   }
4031        // }
4032        [&]() {
4033          Register method = register_allocator()->NewRegister();
4034          builder()
4035              ->LoadNamedProperty(
4036                  iterator.object(), ast_string_constants()->return_string(),
4037                  feedback_index(feedback_spec()->AddLoadICSlot()))
4038              .JumpIfUndefinedOrNull(iterator_is_done.New())
4039              .StoreAccumulatorInRegister(method);
4040
4041          RegisterList args(iterator.object());
4042          builder()->CallProperty(
4043              method, args, feedback_index(feedback_spec()->AddCallICSlot()));
4044          if (iterator.type() == IteratorType::kAsync) {
4045            BuildAwait();
4046          }
4047          builder()->JumpIfJSReceiver(iterator_is_done.New());
4048          {
4049            // Throw this exception inside the try block so that it is
4050            // suppressed by the iteration continuation if necessary.
4051            RegisterAllocationScope register_scope(this);
4052            Register return_result = register_allocator()->NewRegister();
4053            builder()
4054                ->StoreAccumulatorInRegister(return_result)
4055                .CallRuntime(Runtime::kThrowIteratorResultNotAnObject,
4056                             return_result);
4057          }
4058        },
4059
4060        // catch (e) {
4061        //   if (iteration_continuation != RETHROW)
4062        //     rethrow e
4063        // }
4064        [&](Register context) {
4065          // Reuse context register to store the exception.
4066          Register close_exception = context;
4067          builder()->StoreAccumulatorInRegister(close_exception);
4068
4069          BytecodeLabel suppress_close_exception;
4070          builder()
4071              ->LoadLiteral(
4072                  Smi::FromInt(ControlScope::DeferredCommands::kRethrowToken))
4073              .CompareReference(iteration_continuation_token)
4074              .JumpIfTrue(ToBooleanMode::kAlreadyBoolean,
4075                          &suppress_close_exception)
4076              .LoadAccumulatorWithRegister(close_exception)
4077              .ReThrow()
4078              .Bind(&suppress_close_exception);
4079        },
4080        HandlerTable::UNCAUGHT);
4081  }
4082
4083  iterator_is_done.Bind(builder());
4084}
4085
4086// Get the default value of a destructuring target. Will mutate the
4087// destructuring target expression if there is a default value.
4088//
4089// For
4090//   a = b
4091// in
4092//   let {a = b} = c
4093// returns b and mutates the input into a.
4094Expression* BytecodeGenerator::GetDestructuringDefaultValue(
4095    Expression** target) {
4096  Expression* default_value = nullptr;
4097  if ((*target)->IsAssignment()) {
4098    Assignment* default_init = (*target)->AsAssignment();
4099    DCHECK_EQ(default_init->op(), Token::ASSIGN);
4100    default_value = default_init->value();
4101    *target = default_init->target();
4102    DCHECK((*target)->IsValidReferenceExpression() || (*target)->IsPattern());
4103  }
4104  return default_value;
4105}
4106
4107// Convert a destructuring assignment to an array literal into a sequence of
4108// iterator accesses into the value being assigned (in the accumulator).
4109//
4110// [a().x, ...b] = accumulator
4111//
4112//   becomes
4113//
4114// iterator = %GetIterator(accumulator)
4115// try {
4116//
4117//   // Individual assignments read off the value from iterator.next() This gets
4118//   // repeated per destructuring element.
4119//   if (!done) {
4120//     // Make sure we are considered 'done' if .next(), .done or .value fail.
4121//     done = true
4122//     var next_result = iterator.next()
4123//     var tmp_done = next_result.done
4124//     if (!tmp_done) {
4125//       value = next_result.value
4126//       done = false
4127//     }
4128//   }
4129//   if (done)
4130//     value = undefined
4131//   a().x = value
4132//
4133//   // A spread receives the remaining items in the iterator.
4134//   var array = []
4135//   var index = 0
4136//   %FillArrayWithIterator(iterator, array, index, done)
4137//   done = true
4138//   b = array
4139//
4140// } catch(e) {
4141//   iteration_continuation = RETHROW
4142// } finally {
4143//   %FinalizeIteration(iterator, done, iteration_continuation)
4144// }
4145void BytecodeGenerator::BuildDestructuringArrayAssignment(
4146    ArrayLiteral* pattern, Token::Value op,
4147    LookupHoistingMode lookup_hoisting_mode) {
4148  RegisterAllocationScope scope(this);
4149
4150  Register value = register_allocator()->NewRegister();
4151  builder()->StoreAccumulatorInRegister(value);
4152
4153  // Store the iterator in a dedicated register so that it can be closed on
4154  // exit, and the 'done' value in a dedicated register so that it can be
4155  // changed and accessed independently of the iteration result.
4156  IteratorRecord iterator = BuildGetIteratorRecord(IteratorType::kNormal);
4157  Register done = register_allocator()->NewRegister();
4158  builder()->LoadFalse();
4159  builder()->StoreAccumulatorInRegister(done);
4160
4161  BuildTryFinally(
4162      // Try block.
4163      [&]() {
4164        Register next_result = register_allocator()->NewRegister();
4165        FeedbackSlot next_value_load_slot = feedback_spec()->AddLoadICSlot();
4166        FeedbackSlot next_done_load_slot = feedback_spec()->AddLoadICSlot();
4167
4168        Spread* spread = nullptr;
4169        for (Expression* target : *pattern->values()) {
4170          if (target->IsSpread()) {
4171            spread = target->AsSpread();
4172            break;
4173          }
4174
4175          Expression* default_value = GetDestructuringDefaultValue(&target);
4176          if (!target->IsPattern()) {
4177            builder()->SetExpressionAsStatementPosition(target);
4178          }
4179
4180          AssignmentLhsData lhs_data = PrepareAssignmentLhs(target);
4181
4182          // if (!done) {
4183          //   // Make sure we are considered done if .next(), .done or .value
4184          //   // fail.
4185          //   done = true
4186          //   var next_result = iterator.next()
4187          //   var tmp_done = next_result.done
4188          //   if (!tmp_done) {
4189          //     value = next_result.value
4190          //     done = false
4191          //   }
4192          // }
4193          // if (done)
4194          //   value = undefined
4195          BytecodeLabels is_done(zone());
4196
4197          builder()->LoadAccumulatorWithRegister(done);
4198          builder()->JumpIfTrue(ToBooleanMode::kConvertToBoolean,
4199                                is_done.New());
4200
4201          builder()->LoadTrue().StoreAccumulatorInRegister(done);
4202          BuildIteratorNext(iterator, next_result);
4203          builder()
4204              ->LoadNamedProperty(next_result,
4205                                  ast_string_constants()->done_string(),
4206                                  feedback_index(next_done_load_slot))
4207              .JumpIfTrue(ToBooleanMode::kConvertToBoolean, is_done.New());
4208
4209          // Only do the assignment if this is not a hole (i.e. 'elided').
4210          if (!target->IsTheHoleLiteral()) {
4211            builder()
4212                ->LoadNamedProperty(next_result,
4213                                    ast_string_constants()->value_string(),
4214                                    feedback_index(next_value_load_slot))
4215                .StoreAccumulatorInRegister(next_result)
4216                .LoadFalse()
4217                .StoreAccumulatorInRegister(done)
4218                .LoadAccumulatorWithRegister(next_result);
4219
4220            // [<pattern> = <init>] = <value>
4221            //   becomes (roughly)
4222            // temp = <value>.next();
4223            // <pattern> = temp === undefined ? <init> : temp;
4224            BytecodeLabel do_assignment;
4225            if (default_value) {
4226              builder()->JumpIfNotUndefined(&do_assignment);
4227              // Since done == true => temp == undefined, jump directly to using
4228              // the default value for that case.
4229              is_done.Bind(builder());
4230              VisitForAccumulatorValue(default_value);
4231            } else {
4232              builder()->Jump(&do_assignment);
4233              is_done.Bind(builder());
4234              builder()->LoadUndefined();
4235            }
4236            builder()->Bind(&do_assignment);
4237
4238            BuildAssignment(lhs_data, op, lookup_hoisting_mode);
4239          } else {
4240            builder()->LoadFalse().StoreAccumulatorInRegister(done);
4241            DCHECK_EQ(lhs_data.assign_type(), NON_PROPERTY);
4242            is_done.Bind(builder());
4243          }
4244        }
4245
4246        if (spread) {
4247          RegisterAllocationScope scope(this);
4248          BytecodeLabel is_done;
4249
4250          // A spread is turned into a loop over the remainer of the iterator.
4251          Expression* target = spread->expression();
4252
4253          if (!target->IsPattern()) {
4254            builder()->SetExpressionAsStatementPosition(spread);
4255          }
4256
4257          AssignmentLhsData lhs_data = PrepareAssignmentLhs(target);
4258
4259          // var array = [];
4260          Register array = register_allocator()->NewRegister();
4261          builder()->CreateEmptyArrayLiteral(
4262              feedback_index(feedback_spec()->AddLiteralSlot()));
4263          builder()->StoreAccumulatorInRegister(array);
4264
4265          // If done, jump to assigning empty array
4266          builder()->LoadAccumulatorWithRegister(done);
4267          builder()->JumpIfTrue(ToBooleanMode::kConvertToBoolean, &is_done);
4268
4269          // var index = 0;
4270          Register index = register_allocator()->NewRegister();
4271          builder()->LoadLiteral(Smi::zero());
4272          builder()->StoreAccumulatorInRegister(index);
4273
4274          // Set done to true, since it's guaranteed to be true by the time the
4275          // array fill completes.
4276          builder()->LoadTrue().StoreAccumulatorInRegister(done);
4277
4278          // Fill the array with the iterator.
4279          FeedbackSlot element_slot =
4280              feedback_spec()->AddStoreInArrayLiteralICSlot();
4281          FeedbackSlot index_slot = feedback_spec()->AddBinaryOpICSlot();
4282          BuildFillArrayWithIterator(iterator, array, index, next_result,
4283                                     next_value_load_slot, next_done_load_slot,
4284                                     index_slot, element_slot);
4285
4286          builder()->Bind(&is_done);
4287          // Assign the array to the LHS.
4288          builder()->LoadAccumulatorWithRegister(array);
4289          BuildAssignment(lhs_data, op, lookup_hoisting_mode);
4290        }
4291      },
4292      // Finally block.
4293      [&](Register iteration_continuation_token) {
4294        // Finish the iteration in the finally block.
4295        BuildFinalizeIteration(iterator, done, iteration_continuation_token);
4296      },
4297      HandlerTable::UNCAUGHT);
4298
4299  if (!execution_result()->IsEffect()) {
4300    builder()->LoadAccumulatorWithRegister(value);
4301  }
4302}
4303
4304// Convert a destructuring assignment to an object literal into a sequence of
4305// property accesses into the value being assigned (in the accumulator).
4306//
4307// { y, [x++]: a(), ...b.c } = value
4308//
4309//   becomes
4310//
4311// var rest_runtime_callargs = new Array(3);
4312// rest_runtime_callargs[0] = value;
4313//
4314// rest_runtime_callargs[1] = "y";
4315// y = value.y;
4316//
4317// var temp1 = %ToName(x++);
4318// rest_runtime_callargs[2] = temp1;
4319// a() = value[temp1];
4320//
4321// b.c =
4322// %CopyDataPropertiesWithExcludedPropertiesOnStack.call(rest_runtime_callargs);
4323void BytecodeGenerator::BuildDestructuringObjectAssignment(
4324    ObjectLiteral* pattern, Token::Value op,
4325    LookupHoistingMode lookup_hoisting_mode) {
4326  RegisterAllocationScope register_scope(this);
4327
4328  // Store the assignment value in a register.
4329  Register value;
4330  RegisterList rest_runtime_callargs;
4331  if (pattern->builder()->has_rest_property()) {
4332    rest_runtime_callargs =
4333        register_allocator()->NewRegisterList(pattern->properties()->length());
4334    value = rest_runtime_callargs[0];
4335  } else {
4336    value = register_allocator()->NewRegister();
4337  }
4338  builder()->StoreAccumulatorInRegister(value);
4339
4340  // if (value === null || value === undefined)
4341  //   throw new TypeError(kNonCoercible);
4342  //
4343  // Since the first property access on null/undefined will also trigger a
4344  // TypeError, we can elide this check. The exception is when there are no
4345  // properties and no rest property (this is an empty literal), or when the
4346  // first property is a computed name and accessing it can have side effects.
4347  //
4348  // TODO(leszeks): Also eliminate this check if the value is known to be
4349  // non-null (e.g. an object literal).
4350  if (pattern->properties()->is_empty() ||
4351      (pattern->properties()->at(0)->is_computed_name() &&
4352       pattern->properties()->at(0)->kind() != ObjectLiteralProperty::SPREAD)) {
4353    BytecodeLabel is_null_or_undefined, not_null_or_undefined;
4354    builder()
4355        ->JumpIfUndefinedOrNull(&is_null_or_undefined)
4356        .Jump(&not_null_or_undefined);
4357
4358    {
4359      builder()->Bind(&is_null_or_undefined);
4360      builder()->SetExpressionPosition(pattern);
4361      builder()->CallRuntime(Runtime::kThrowPatternAssignmentNonCoercible,
4362                             value);
4363    }
4364    builder()->Bind(&not_null_or_undefined);
4365  }
4366
4367  int i = 0;
4368  for (ObjectLiteralProperty* pattern_property : *pattern->properties()) {
4369    RegisterAllocationScope inner_register_scope(this);
4370
4371    // The key of the pattern becomes the key into the RHS value, and the value
4372    // of the pattern becomes the target of the assignment.
4373    //
4374    // e.g. { a: b } = o becomes b = o.a
4375    Expression* pattern_key = pattern_property->key();
4376    Expression* target = pattern_property->value();
4377    Expression* default_value = GetDestructuringDefaultValue(&target);
4378
4379    if (!target->IsPattern()) {
4380      builder()->SetExpressionAsStatementPosition(target);
4381    }
4382
4383    // Calculate this property's key into the assignment RHS value, additionally
4384    // storing the key for rest_runtime_callargs if needed.
4385    //
4386    // The RHS is accessed using the key either by LoadNamedProperty (if
4387    // value_name is valid) or by LoadKeyedProperty (otherwise).
4388    const AstRawString* value_name = nullptr;
4389    Register value_key;
4390
4391    if (pattern_property->kind() != ObjectLiteralProperty::Kind::SPREAD) {
4392      if (pattern_key->IsPropertyName()) {
4393        value_name = pattern_key->AsLiteral()->AsRawPropertyName();
4394      }
4395      if (pattern->builder()->has_rest_property() || !value_name) {
4396        if (pattern->builder()->has_rest_property()) {
4397          value_key = rest_runtime_callargs[i + 1];
4398        } else {
4399          value_key = register_allocator()->NewRegister();
4400        }
4401        if (pattern_property->is_computed_name()) {
4402          // { [a()]: b().x } = c
4403          // becomes
4404          // var tmp = a()
4405          // b().x = c[tmp]
4406          DCHECK(!pattern_key->IsPropertyName() ||
4407                 !pattern_key->IsNumberLiteral());
4408          VisitForAccumulatorValue(pattern_key);
4409          builder()->ToName(value_key);
4410        } else {
4411          // We only need the key for non-computed properties when it is numeric
4412          // or is being saved for the rest_runtime_callargs.
4413          DCHECK(pattern_key->IsNumberLiteral() ||
4414                 (pattern->builder()->has_rest_property() &&
4415                  pattern_key->IsPropertyName()));
4416          VisitForRegisterValue(pattern_key, value_key);
4417        }
4418      }
4419    }
4420
4421    AssignmentLhsData lhs_data = PrepareAssignmentLhs(target);
4422
4423    // Get the value from the RHS.
4424    if (pattern_property->kind() == ObjectLiteralProperty::Kind::SPREAD) {
4425      DCHECK_EQ(i, pattern->properties()->length() - 1);
4426      DCHECK(!value_key.is_valid());
4427      DCHECK_NULL(value_name);
4428      builder()->CallRuntime(
4429          Runtime::kInlineCopyDataPropertiesWithExcludedPropertiesOnStack,
4430          rest_runtime_callargs);
4431    } else if (value_name) {
4432      builder()->LoadNamedProperty(
4433          value, value_name, feedback_index(feedback_spec()->AddLoadICSlot()));
4434    } else {
4435      DCHECK(value_key.is_valid());
4436      builder()->LoadAccumulatorWithRegister(value_key).LoadKeyedProperty(
4437          value, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
4438    }
4439
4440    // {<pattern> = <init>} = <value>
4441    //   becomes
4442    // temp = <value>;
4443    // <pattern> = temp === undefined ? <init> : temp;
4444    if (default_value) {
4445      BytecodeLabel value_not_undefined;
4446      builder()->JumpIfNotUndefined(&value_not_undefined);
4447      VisitForAccumulatorValue(default_value);
4448      builder()->Bind(&value_not_undefined);
4449    }
4450
4451    BuildAssignment(lhs_data, op, lookup_hoisting_mode);
4452
4453    i++;
4454  }
4455
4456  if (!execution_result()->IsEffect()) {
4457    builder()->LoadAccumulatorWithRegister(value);
4458  }
4459}
4460
4461void BytecodeGenerator::BuildAssignment(
4462    const AssignmentLhsData& lhs_data, Token::Value op,
4463    LookupHoistingMode lookup_hoisting_mode) {
4464  // Assign the value to the LHS.
4465  switch (lhs_data.assign_type()) {
4466    case NON_PROPERTY: {
4467      if (ObjectLiteral* pattern_as_object =
4468              lhs_data.expr()->AsObjectLiteral()) {
4469        // Split object literals into destructuring.
4470        BuildDestructuringObjectAssignment(pattern_as_object, op,
4471                                           lookup_hoisting_mode);
4472      } else if (ArrayLiteral* pattern_as_array =
4473                     lhs_data.expr()->AsArrayLiteral()) {
4474        // Split object literals into destructuring.
4475        BuildDestructuringArrayAssignment(pattern_as_array, op,
4476                                          lookup_hoisting_mode);
4477      } else {
4478        DCHECK(lhs_data.expr()->IsVariableProxy());
4479        VariableProxy* proxy = lhs_data.expr()->AsVariableProxy();
4480        BuildVariableAssignment(proxy->var(), op, proxy->hole_check_mode(),
4481                                lookup_hoisting_mode);
4482      }
4483      break;
4484    }
4485    case NAMED_PROPERTY: {
4486      BuildSetNamedProperty(lhs_data.object_expr(), lhs_data.object(),
4487                            lhs_data.name());
4488      break;
4489    }
4490    case KEYED_PROPERTY: {
4491      FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
4492      Register value;
4493      if (!execution_result()->IsEffect()) {
4494        value = register_allocator()->NewRegister();
4495        builder()->StoreAccumulatorInRegister(value);
4496      }
4497      builder()->SetKeyedProperty(lhs_data.object(), lhs_data.key(),
4498                                  feedback_index(slot), language_mode());
4499      if (!execution_result()->IsEffect()) {
4500        builder()->LoadAccumulatorWithRegister(value);
4501      }
4502      break;
4503    }
4504    case NAMED_SUPER_PROPERTY: {
4505      builder()
4506          ->StoreAccumulatorInRegister(lhs_data.super_property_args()[3])
4507          .CallRuntime(Runtime::kStoreToSuper, lhs_data.super_property_args());
4508      break;
4509    }
4510    case KEYED_SUPER_PROPERTY: {
4511      builder()
4512          ->StoreAccumulatorInRegister(lhs_data.super_property_args()[3])
4513          .CallRuntime(Runtime::kStoreKeyedToSuper,
4514                       lhs_data.super_property_args());
4515      break;
4516    }
4517    case PRIVATE_METHOD: {
4518      Property* property = lhs_data.expr()->AsProperty();
4519      BuildPrivateBrandCheck(property, lhs_data.object());
4520      BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateMethodWrite,
4521                                 lhs_data.expr()->AsProperty());
4522      break;
4523    }
4524    case PRIVATE_GETTER_ONLY: {
4525      Property* property = lhs_data.expr()->AsProperty();
4526      BuildPrivateBrandCheck(property, lhs_data.object());
4527      BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateSetterAccess,
4528                                 lhs_data.expr()->AsProperty());
4529      break;
4530    }
4531    case PRIVATE_SETTER_ONLY:
4532    case PRIVATE_GETTER_AND_SETTER: {
4533      Register value = register_allocator()->NewRegister();
4534      builder()->StoreAccumulatorInRegister(value);
4535      Property* property = lhs_data.expr()->AsProperty();
4536      BuildPrivateBrandCheck(property, lhs_data.object());
4537      BuildPrivateSetterAccess(lhs_data.object(), lhs_data.key(), value);
4538      if (!execution_result()->IsEffect()) {
4539        builder()->LoadAccumulatorWithRegister(value);
4540      }
4541      break;
4542    }
4543  }
4544}
4545
4546void BytecodeGenerator::VisitAssignment(Assignment* expr) {
4547  AssignmentLhsData lhs_data = PrepareAssignmentLhs(expr->target());
4548
4549  VisitForAccumulatorValue(expr->value());
4550
4551  builder()->SetExpressionPosition(expr);
4552  BuildAssignment(lhs_data, expr->op(), expr->lookup_hoisting_mode());
4553}
4554
4555void BytecodeGenerator::VisitCompoundAssignment(CompoundAssignment* expr) {
4556  AssignmentLhsData lhs_data = PrepareAssignmentLhs(expr->target());
4557
4558  // Evaluate the value and potentially handle compound assignments by loading
4559  // the left-hand side value and performing a binary operation.
4560  switch (lhs_data.assign_type()) {
4561    case NON_PROPERTY: {
4562      VariableProxy* proxy = expr->target()->AsVariableProxy();
4563      BuildVariableLoad(proxy->var(), proxy->hole_check_mode());
4564      break;
4565    }
4566    case NAMED_PROPERTY: {
4567      BuildLoadNamedProperty(lhs_data.object_expr(), lhs_data.object(),
4568                             lhs_data.name());
4569      break;
4570    }
4571    case KEYED_PROPERTY: {
4572      FeedbackSlot slot = feedback_spec()->AddKeyedLoadICSlot();
4573      builder()
4574          ->LoadAccumulatorWithRegister(lhs_data.key())
4575          .LoadKeyedProperty(lhs_data.object(), feedback_index(slot));
4576      break;
4577    }
4578    case NAMED_SUPER_PROPERTY: {
4579      builder()->CallRuntime(Runtime::kLoadFromSuper,
4580                             lhs_data.super_property_args().Truncate(3));
4581      break;
4582    }
4583    case KEYED_SUPER_PROPERTY: {
4584      builder()->CallRuntime(Runtime::kLoadKeyedFromSuper,
4585                             lhs_data.super_property_args().Truncate(3));
4586      break;
4587    }
4588    // BuildAssignment() will throw an error about the private method being
4589    // read-only.
4590    case PRIVATE_METHOD: {
4591      Property* property = lhs_data.expr()->AsProperty();
4592      BuildPrivateBrandCheck(property, lhs_data.object());
4593      builder()->LoadAccumulatorWithRegister(lhs_data.key());
4594      break;
4595    }
4596    // For read-only properties, BuildAssignment() will throw an error about
4597    // the missing setter.
4598    case PRIVATE_GETTER_ONLY:
4599    case PRIVATE_GETTER_AND_SETTER: {
4600      Property* property = lhs_data.expr()->AsProperty();
4601      BuildPrivateBrandCheck(property, lhs_data.object());
4602      BuildPrivateGetterAccess(lhs_data.object(), lhs_data.key());
4603      break;
4604    }
4605    case PRIVATE_SETTER_ONLY: {
4606      // The property access is invalid, but if the brand check fails too, we
4607      // need to return the error from the brand check.
4608      Property* property = lhs_data.expr()->AsProperty();
4609      BuildPrivateBrandCheck(property, lhs_data.object());
4610      BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateGetterAccess,
4611                                 lhs_data.expr()->AsProperty());
4612      break;
4613    }
4614  }
4615
4616  BinaryOperation* binop = expr->binary_operation();
4617  FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
4618  BytecodeLabel short_circuit;
4619  if (binop->op() == Token::NULLISH) {
4620    BytecodeLabel nullish;
4621    builder()
4622        ->JumpIfUndefinedOrNull(&nullish)
4623        .Jump(&short_circuit)
4624        .Bind(&nullish);
4625    VisitForAccumulatorValue(expr->value());
4626  } else if (binop->op() == Token::OR) {
4627    builder()->JumpIfTrue(ToBooleanMode::kConvertToBoolean, &short_circuit);
4628    VisitForAccumulatorValue(expr->value());
4629  } else if (binop->op() == Token::AND) {
4630    builder()->JumpIfFalse(ToBooleanMode::kConvertToBoolean, &short_circuit);
4631    VisitForAccumulatorValue(expr->value());
4632  } else if (expr->value()->IsSmiLiteral()) {
4633    builder()->BinaryOperationSmiLiteral(
4634        binop->op(), expr->value()->AsLiteral()->AsSmiLiteral(),
4635        feedback_index(slot));
4636  } else {
4637    Register old_value = register_allocator()->NewRegister();
4638    builder()->StoreAccumulatorInRegister(old_value);
4639    VisitForAccumulatorValue(expr->value());
4640    builder()->BinaryOperation(binop->op(), old_value, feedback_index(slot));
4641  }
4642  builder()->SetExpressionPosition(expr);
4643
4644  BuildAssignment(lhs_data, expr->op(), expr->lookup_hoisting_mode());
4645  builder()->Bind(&short_circuit);
4646}
4647
4648// Suspends the generator to resume at the next suspend_id, with output stored
4649// in the accumulator. When the generator is resumed, the sent value is loaded
4650// in the accumulator.
4651void BytecodeGenerator::BuildSuspendPoint(int position) {
4652  // Because we eliminate jump targets in dead code, we also eliminate resumes
4653  // when the suspend is not emitted because otherwise the below call to Bind
4654  // would start a new basic block and the code would be considered alive.
4655  if (builder()->RemainderOfBlockIsDead()) {
4656    return;
4657  }
4658  const int suspend_id = suspend_count_++;
4659
4660  RegisterList registers = register_allocator()->AllLiveRegisters();
4661
4662  // Save context, registers, and state. This bytecode then returns the value
4663  // in the accumulator.
4664  builder()->SetExpressionPosition(position);
4665  builder()->SuspendGenerator(generator_object(), registers, suspend_id);
4666
4667  // Upon resume, we continue here.
4668  builder()->Bind(generator_jump_table_, suspend_id);
4669
4670  // Clobbers all registers and sets the accumulator to the
4671  // [[input_or_debug_pos]] slot of the generator object.
4672  builder()->ResumeGenerator(generator_object(), registers);
4673}
4674
4675void BytecodeGenerator::VisitYield(Yield* expr) {
4676  builder()->SetExpressionPosition(expr);
4677  VisitForAccumulatorValue(expr->expression());
4678
4679  // If this is not the first yield
4680  if (suspend_count_ > 0) {
4681    if (IsAsyncGeneratorFunction(function_kind())) {
4682      // AsyncGenerator yields (with the exception of the initial yield)
4683      // delegate work to the AsyncGeneratorYield stub, which Awaits the operand
4684      // and on success, wraps the value in an IteratorResult.
4685      RegisterAllocationScope register_scope(this);
4686      RegisterList args = register_allocator()->NewRegisterList(3);
4687      builder()
4688          ->MoveRegister(generator_object(), args[0])  // generator
4689          .StoreAccumulatorInRegister(args[1])         // value
4690          .LoadBoolean(catch_prediction() != HandlerTable::ASYNC_AWAIT)
4691          .StoreAccumulatorInRegister(args[2])  // is_caught
4692          .CallRuntime(Runtime::kInlineAsyncGeneratorYield, args);
4693    } else {
4694      // Generator yields (with the exception of the initial yield) wrap the
4695      // value into IteratorResult.
4696      RegisterAllocationScope register_scope(this);
4697      RegisterList args = register_allocator()->NewRegisterList(2);
4698      builder()
4699          ->StoreAccumulatorInRegister(args[0])  // value
4700          .LoadFalse()
4701          .StoreAccumulatorInRegister(args[1])  // done
4702          .CallRuntime(Runtime::kInlineCreateIterResultObject, args);
4703    }
4704  }
4705
4706  BuildSuspendPoint(expr->position());
4707  // At this point, the generator has been resumed, with the received value in
4708  // the accumulator.
4709
4710  // TODO(caitp): remove once yield* desugaring for async generators is handled
4711  // in BytecodeGenerator.
4712  if (expr->on_abrupt_resume() == Yield::kNoControl) {
4713    DCHECK(IsAsyncGeneratorFunction(function_kind()));
4714    return;
4715  }
4716
4717  Register input = register_allocator()->NewRegister();
4718  builder()->StoreAccumulatorInRegister(input).CallRuntime(
4719      Runtime::kInlineGeneratorGetResumeMode, generator_object());
4720
4721  // Now dispatch on resume mode.
4722  STATIC_ASSERT(JSGeneratorObject::kNext + 1 == JSGeneratorObject::kReturn);
4723  BytecodeJumpTable* jump_table =
4724      builder()->AllocateJumpTable(2, JSGeneratorObject::kNext);
4725
4726  builder()->SwitchOnSmiNoFeedback(jump_table);
4727
4728  {
4729    // Resume with throw (switch fallthrough).
4730    // TODO(leszeks): Add a debug-only check that the accumulator is
4731    // JSGeneratorObject::kThrow.
4732    builder()->SetExpressionPosition(expr);
4733    builder()->LoadAccumulatorWithRegister(input);
4734    builder()->Throw();
4735  }
4736
4737  {
4738    // Resume with return.
4739    builder()->Bind(jump_table, JSGeneratorObject::kReturn);
4740    builder()->LoadAccumulatorWithRegister(input);
4741    if (IsAsyncGeneratorFunction(function_kind())) {
4742      execution_control()->AsyncReturnAccumulator(kNoSourcePosition);
4743    } else {
4744      execution_control()->ReturnAccumulator(kNoSourcePosition);
4745    }
4746  }
4747
4748  {
4749    // Resume with next.
4750    builder()->Bind(jump_table, JSGeneratorObject::kNext);
4751    BuildIncrementBlockCoverageCounterIfEnabled(expr,
4752                                                SourceRangeKind::kContinuation);
4753    builder()->LoadAccumulatorWithRegister(input);
4754  }
4755}
4756
4757// Desugaring of (yield* iterable)
4758//
4759//   do {
4760//     const kNext = 0;
4761//     const kReturn = 1;
4762//     const kThrow = 2;
4763//
4764//     let output; // uninitialized
4765//
4766//     let iteratorRecord = GetIterator(iterable);
4767//     let iterator = iteratorRecord.[[Iterator]];
4768//     let next = iteratorRecord.[[NextMethod]];
4769//     let input = undefined;
4770//     let resumeMode = kNext;
4771//
4772//     while (true) {
4773//       // From the generator to the iterator:
4774//       // Forward input according to resumeMode and obtain output.
4775//       switch (resumeMode) {
4776//         case kNext:
4777//           output = next.[[Call]](iterator, « »);;
4778//           break;
4779//         case kReturn:
4780//           let iteratorReturn = iterator.return;
4781//           if (IS_NULL_OR_UNDEFINED(iteratorReturn)) {
4782//             if (IS_ASYNC_GENERATOR) input = await input;
4783//             return input;
4784//           }
4785//           output = iteratorReturn.[[Call]](iterator, «input»);
4786//           break;
4787//         case kThrow:
4788//           let iteratorThrow = iterator.throw;
4789//           if (IS_NULL_OR_UNDEFINED(iteratorThrow)) {
4790//             let iteratorReturn = iterator.return;
4791//             if (!IS_NULL_OR_UNDEFINED(iteratorReturn)) {
4792//               output = iteratorReturn.[[Call]](iterator, « »);
4793//               if (IS_ASYNC_GENERATOR) output = await output;
4794//               if (!IS_RECEIVER(output)) %ThrowIterResultNotAnObject(output);
4795//             }
4796//             throw MakeTypeError(kThrowMethodMissing);
4797//           }
4798//           output = iteratorThrow.[[Call]](iterator, «input»);
4799//           break;
4800//       }
4801//
4802//       if (IS_ASYNC_GENERATOR) output = await output;
4803//       if (!IS_RECEIVER(output)) %ThrowIterResultNotAnObject(output);
4804//       if (output.done) break;
4805//
4806//       // From the generator to its user:
4807//       // Forward output, receive new input, and determine resume mode.
4808//       if (IS_ASYNC_GENERATOR) {
4809//         // AsyncGeneratorYield abstract operation awaits the operand before
4810//         // resolving the promise for the current AsyncGeneratorRequest.
4811//         %_AsyncGeneratorYield(output.value)
4812//       }
4813//       input = Suspend(output);
4814//       resumeMode = %GeneratorGetResumeMode();
4815//     }
4816//
4817//     if (resumeMode === kReturn) {
4818//       return output.value;
4819//     }
4820//     output.value
4821//   }
4822void BytecodeGenerator::VisitYieldStar(YieldStar* expr) {
4823  Register output = register_allocator()->NewRegister();
4824  Register resume_mode = register_allocator()->NewRegister();
4825  IteratorType iterator_type = IsAsyncGeneratorFunction(function_kind())
4826                                   ? IteratorType::kAsync
4827                                   : IteratorType::kNormal;
4828
4829  {
4830    RegisterAllocationScope register_scope(this);
4831    RegisterList iterator_and_input = register_allocator()->NewRegisterList(2);
4832    VisitForAccumulatorValue(expr->expression());
4833    IteratorRecord iterator = BuildGetIteratorRecord(
4834        register_allocator()->NewRegister() /* next method */,
4835        iterator_and_input[0], iterator_type);
4836
4837    Register input = iterator_and_input[1];
4838    builder()->LoadUndefined().StoreAccumulatorInRegister(input);
4839    builder()
4840        ->LoadLiteral(Smi::FromInt(JSGeneratorObject::kNext))
4841        .StoreAccumulatorInRegister(resume_mode);
4842
4843    {
4844      // This loop builder does not construct counters as the loop is not
4845      // visible to the user, and we therefore neither pass the block coverage
4846      // builder nor the expression.
4847      //
4848      // In addition to the normal suspend for yield*, a yield* in an async
4849      // generator has 2 additional suspends:
4850      //   - One for awaiting the iterator result of closing the generator when
4851      //     resumed with a "throw" completion, and a throw method is not
4852      //     present on the delegated iterator
4853      //   - One for awaiting the iterator result yielded by the delegated
4854      //     iterator
4855
4856      LoopBuilder loop_builder(builder(), nullptr, nullptr);
4857      LoopScope loop_scope(this, &loop_builder);
4858
4859      {
4860        BytecodeLabels after_switch(zone());
4861        BytecodeJumpTable* switch_jump_table =
4862            builder()->AllocateJumpTable(2, 1);
4863
4864        builder()
4865            ->LoadAccumulatorWithRegister(resume_mode)
4866            .SwitchOnSmiNoFeedback(switch_jump_table);
4867
4868        // Fallthrough to default case.
4869        // TODO(ignition): Add debug code to check that {resume_mode} really is
4870        // {JSGeneratorObject::kNext} in this case.
4871        STATIC_ASSERT(JSGeneratorObject::kNext == 0);
4872        {
4873          FeedbackSlot slot = feedback_spec()->AddCallICSlot();
4874          builder()->CallProperty(iterator.next(), iterator_and_input,
4875                                  feedback_index(slot));
4876          builder()->Jump(after_switch.New());
4877        }
4878
4879        STATIC_ASSERT(JSGeneratorObject::kReturn == 1);
4880        builder()->Bind(switch_jump_table, JSGeneratorObject::kReturn);
4881        {
4882          const AstRawString* return_string =
4883              ast_string_constants()->return_string();
4884          BytecodeLabels no_return_method(zone());
4885
4886          BuildCallIteratorMethod(iterator.object(), return_string,
4887                                  iterator_and_input, after_switch.New(),
4888                                  &no_return_method);
4889          no_return_method.Bind(builder());
4890          builder()->LoadAccumulatorWithRegister(input);
4891          if (iterator_type == IteratorType::kAsync) {
4892            // Await input.
4893            BuildAwait(expr->position());
4894            execution_control()->AsyncReturnAccumulator(kNoSourcePosition);
4895          } else {
4896            execution_control()->ReturnAccumulator(kNoSourcePosition);
4897          }
4898        }
4899
4900        STATIC_ASSERT(JSGeneratorObject::kThrow == 2);
4901        builder()->Bind(switch_jump_table, JSGeneratorObject::kThrow);
4902        {
4903          const AstRawString* throw_string =
4904              ast_string_constants()->throw_string();
4905          BytecodeLabels no_throw_method(zone());
4906          BuildCallIteratorMethod(iterator.object(), throw_string,
4907                                  iterator_and_input, after_switch.New(),
4908                                  &no_throw_method);
4909
4910          // If there is no "throw" method, perform IteratorClose, and finally
4911          // throw a TypeError.
4912          no_throw_method.Bind(builder());
4913          BuildIteratorClose(iterator, expr);
4914          builder()->CallRuntime(Runtime::kThrowThrowMethodMissing);
4915        }
4916
4917        after_switch.Bind(builder());
4918      }
4919
4920      if (iterator_type == IteratorType::kAsync) {
4921        // Await the result of the method invocation.
4922        BuildAwait(expr->position());
4923      }
4924
4925      // Check that output is an object.
4926      BytecodeLabel check_if_done;
4927      builder()
4928          ->StoreAccumulatorInRegister(output)
4929          .JumpIfJSReceiver(&check_if_done)
4930          .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, output);
4931
4932      builder()->Bind(&check_if_done);
4933      // Break once output.done is true.
4934      builder()->LoadNamedProperty(
4935          output, ast_string_constants()->done_string(),
4936          feedback_index(feedback_spec()->AddLoadICSlot()));
4937
4938      loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
4939
4940      // Suspend the current generator.
4941      if (iterator_type == IteratorType::kNormal) {
4942        builder()->LoadAccumulatorWithRegister(output);
4943      } else {
4944        RegisterAllocationScope inner_register_scope(this);
4945        DCHECK_EQ(iterator_type, IteratorType::kAsync);
4946        // If generatorKind is async, perform AsyncGeneratorYield(output.value),
4947        // which will await `output.value` before resolving the current
4948        // AsyncGeneratorRequest's promise.
4949        builder()->LoadNamedProperty(
4950            output, ast_string_constants()->value_string(),
4951            feedback_index(feedback_spec()->AddLoadICSlot()));
4952
4953        RegisterList args = register_allocator()->NewRegisterList(3);
4954        builder()
4955            ->MoveRegister(generator_object(), args[0])  // generator
4956            .StoreAccumulatorInRegister(args[1])         // value
4957            .LoadBoolean(catch_prediction() != HandlerTable::ASYNC_AWAIT)
4958            .StoreAccumulatorInRegister(args[2])  // is_caught
4959            .CallRuntime(Runtime::kInlineAsyncGeneratorYield, args);
4960      }
4961
4962      BuildSuspendPoint(expr->position());
4963      builder()->StoreAccumulatorInRegister(input);
4964      builder()
4965          ->CallRuntime(Runtime::kInlineGeneratorGetResumeMode,
4966                        generator_object())
4967          .StoreAccumulatorInRegister(resume_mode);
4968
4969      loop_builder.BindContinueTarget();
4970    }
4971  }
4972
4973  // Decide if we trigger a return or if the yield* expression should just
4974  // produce a value.
4975  BytecodeLabel completion_is_output_value;
4976  Register output_value = register_allocator()->NewRegister();
4977  builder()
4978      ->LoadNamedProperty(output, ast_string_constants()->value_string(),
4979                          feedback_index(feedback_spec()->AddLoadICSlot()))
4980      .StoreAccumulatorInRegister(output_value)
4981      .LoadLiteral(Smi::FromInt(JSGeneratorObject::kReturn))
4982      .CompareReference(resume_mode)
4983      .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &completion_is_output_value)
4984      .LoadAccumulatorWithRegister(output_value);
4985  if (iterator_type == IteratorType::kAsync) {
4986    execution_control()->AsyncReturnAccumulator(kNoSourcePosition);
4987  } else {
4988    execution_control()->ReturnAccumulator(kNoSourcePosition);
4989  }
4990
4991  builder()->Bind(&completion_is_output_value);
4992  BuildIncrementBlockCoverageCounterIfEnabled(expr,
4993                                              SourceRangeKind::kContinuation);
4994  builder()->LoadAccumulatorWithRegister(output_value);
4995}
4996
4997void BytecodeGenerator::BuildAwait(int position) {
4998  // Rather than HandlerTable::UNCAUGHT, async functions use
4999  // HandlerTable::ASYNC_AWAIT to communicate that top-level exceptions are
5000  // transformed into promise rejections. This is necessary to prevent emitting
5001  // multiple debug events for the same uncaught exception. There is no point
5002  // in the body of an async function where catch prediction is
5003  // HandlerTable::UNCAUGHT.
5004  DCHECK(catch_prediction() != HandlerTable::UNCAUGHT ||
5005         info()->scope()->is_repl_mode_scope());
5006
5007  {
5008    // Await(operand) and suspend.
5009    RegisterAllocationScope register_scope(this);
5010
5011    Runtime::FunctionId await_intrinsic_id;
5012    if (IsAsyncGeneratorFunction(function_kind())) {
5013      await_intrinsic_id = catch_prediction() == HandlerTable::ASYNC_AWAIT
5014                               ? Runtime::kInlineAsyncGeneratorAwaitUncaught
5015                               : Runtime::kInlineAsyncGeneratorAwaitCaught;
5016    } else {
5017      await_intrinsic_id = catch_prediction() == HandlerTable::ASYNC_AWAIT
5018                               ? Runtime::kInlineAsyncFunctionAwaitUncaught
5019                               : Runtime::kInlineAsyncFunctionAwaitCaught;
5020    }
5021    RegisterList args = register_allocator()->NewRegisterList(2);
5022    builder()
5023        ->MoveRegister(generator_object(), args[0])
5024        .StoreAccumulatorInRegister(args[1])
5025        .CallRuntime(await_intrinsic_id, args);
5026  }
5027
5028  BuildSuspendPoint(position);
5029
5030  Register input = register_allocator()->NewRegister();
5031  Register resume_mode = register_allocator()->NewRegister();
5032
5033  // Now dispatch on resume mode.
5034  BytecodeLabel resume_next;
5035  builder()
5036      ->StoreAccumulatorInRegister(input)
5037      .CallRuntime(Runtime::kInlineGeneratorGetResumeMode, generator_object())
5038      .StoreAccumulatorInRegister(resume_mode)
5039      .LoadLiteral(Smi::FromInt(JSGeneratorObject::kNext))
5040      .CompareReference(resume_mode)
5041      .JumpIfTrue(ToBooleanMode::kAlreadyBoolean, &resume_next);
5042
5043  // Resume with "throw" completion (rethrow the received value).
5044  // TODO(leszeks): Add a debug-only check that the accumulator is
5045  // JSGeneratorObject::kThrow.
5046  builder()->LoadAccumulatorWithRegister(input).ReThrow();
5047
5048  // Resume with next.
5049  builder()->Bind(&resume_next);
5050  builder()->LoadAccumulatorWithRegister(input);
5051}
5052
5053void BytecodeGenerator::VisitAwait(Await* expr) {
5054  builder()->SetExpressionPosition(expr);
5055  VisitForAccumulatorValue(expr->expression());
5056  BuildAwait(expr->position());
5057  BuildIncrementBlockCoverageCounterIfEnabled(expr,
5058                                              SourceRangeKind::kContinuation);
5059}
5060
5061void BytecodeGenerator::VisitThrow(Throw* expr) {
5062  AllocateBlockCoverageSlotIfEnabled(expr, SourceRangeKind::kContinuation);
5063  VisitForAccumulatorValue(expr->exception());
5064  builder()->SetExpressionPosition(expr);
5065  builder()->Throw();
5066}
5067
5068void BytecodeGenerator::VisitPropertyLoad(Register obj, Property* property) {
5069  if (property->is_optional_chain_link()) {
5070    DCHECK_NOT_NULL(optional_chaining_null_labels_);
5071    int right_range =
5072        AllocateBlockCoverageSlotIfEnabled(property, SourceRangeKind::kRight);
5073    builder()->LoadAccumulatorWithRegister(obj).JumpIfUndefinedOrNull(
5074        optional_chaining_null_labels_->New());
5075    BuildIncrementBlockCoverageCounterIfEnabled(right_range);
5076  }
5077
5078  AssignType property_kind = Property::GetAssignType(property);
5079
5080  switch (property_kind) {
5081    case NON_PROPERTY:
5082      UNREACHABLE();
5083    case NAMED_PROPERTY: {
5084      builder()->SetExpressionPosition(property);
5085      const AstRawString* name =
5086          property->key()->AsLiteral()->AsRawPropertyName();
5087      BuildLoadNamedProperty(property->obj(), obj, name);
5088      break;
5089    }
5090    case KEYED_PROPERTY: {
5091      VisitForAccumulatorValue(property->key());
5092      builder()->SetExpressionPosition(property);
5093      builder()->LoadKeyedProperty(
5094          obj, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
5095      break;
5096    }
5097    case NAMED_SUPER_PROPERTY:
5098      VisitNamedSuperPropertyLoad(property, Register::invalid_value());
5099      break;
5100    case KEYED_SUPER_PROPERTY:
5101      VisitKeyedSuperPropertyLoad(property, Register::invalid_value());
5102      break;
5103    case PRIVATE_SETTER_ONLY: {
5104      BuildPrivateBrandCheck(property, obj);
5105      BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateGetterAccess,
5106                                 property);
5107      break;
5108    }
5109    case PRIVATE_GETTER_ONLY:
5110    case PRIVATE_GETTER_AND_SETTER: {
5111      Register key = VisitForRegisterValue(property->key());
5112      BuildPrivateBrandCheck(property, obj);
5113      BuildPrivateGetterAccess(obj, key);
5114      break;
5115    }
5116    case PRIVATE_METHOD: {
5117      BuildPrivateBrandCheck(property, obj);
5118      // In the case of private methods, property->key() is the function to be
5119      // loaded (stored in a context slot), so load this directly.
5120      VisitForAccumulatorValue(property->key());
5121      break;
5122    }
5123  }
5124}
5125
5126void BytecodeGenerator::BuildPrivateGetterAccess(Register object,
5127                                                 Register accessor_pair) {
5128  RegisterAllocationScope scope(this);
5129  Register accessor = register_allocator()->NewRegister();
5130  RegisterList args = register_allocator()->NewRegisterList(1);
5131
5132  builder()
5133      ->CallRuntime(Runtime::kLoadPrivateGetter, accessor_pair)
5134      .StoreAccumulatorInRegister(accessor)
5135      .MoveRegister(object, args[0])
5136      .CallProperty(accessor, args,
5137                    feedback_index(feedback_spec()->AddCallICSlot()));
5138}
5139
5140void BytecodeGenerator::BuildPrivateSetterAccess(Register object,
5141                                                 Register accessor_pair,
5142                                                 Register value) {
5143  RegisterAllocationScope scope(this);
5144  Register accessor = register_allocator()->NewRegister();
5145  RegisterList args = register_allocator()->NewRegisterList(2);
5146
5147  builder()
5148      ->CallRuntime(Runtime::kLoadPrivateSetter, accessor_pair)
5149      .StoreAccumulatorInRegister(accessor)
5150      .MoveRegister(object, args[0])
5151      .MoveRegister(value, args[1])
5152      .CallProperty(accessor, args,
5153                    feedback_index(feedback_spec()->AddCallICSlot()));
5154}
5155
5156void BytecodeGenerator::BuildPrivateMethodIn(Variable* private_name,
5157                                             Expression* object_expression) {
5158  DCHECK(IsPrivateMethodOrAccessorVariableMode(private_name->mode()));
5159  ClassScope* scope = private_name->scope()->AsClassScope();
5160  if (private_name->is_static()) {
5161    // For static private methods, "#privatemethod in ..." only returns true for
5162    // the class constructor.
5163    if (scope->class_variable() == nullptr) {
5164      // Can only happen via the debugger. See comment in
5165      // BuildPrivateBrandCheck.
5166      RegisterAllocationScope register_scope(this);
5167      RegisterList args = register_allocator()->NewRegisterList(2);
5168      builder()
5169          ->LoadLiteral(Smi::FromEnum(
5170              MessageTemplate::
5171                  kInvalidUnusedPrivateStaticMethodAccessedByDebugger))
5172          .StoreAccumulatorInRegister(args[0])
5173          .LoadLiteral(private_name->raw_name())
5174          .StoreAccumulatorInRegister(args[1])
5175          .CallRuntime(Runtime::kNewError, args)
5176          .Throw();
5177    } else {
5178      VisitForAccumulatorValue(object_expression);
5179      Register object = register_allocator()->NewRegister();
5180      builder()->StoreAccumulatorInRegister(object);
5181
5182      BytecodeLabel is_object;
5183      builder()->JumpIfJSReceiver(&is_object);
5184
5185      RegisterList args = register_allocator()->NewRegisterList(3);
5186      builder()
5187          ->StoreAccumulatorInRegister(args[2])
5188          .LoadLiteral(Smi::FromEnum(MessageTemplate::kInvalidInOperatorUse))
5189          .StoreAccumulatorInRegister(args[0])
5190          .LoadLiteral(private_name->raw_name())
5191          .StoreAccumulatorInRegister(args[1])
5192          .CallRuntime(Runtime::kNewTypeError, args)
5193          .Throw();
5194
5195      builder()->Bind(&is_object);
5196      BuildVariableLoadForAccumulatorValue(scope->class_variable(),
5197                                           HoleCheckMode::kElided);
5198      builder()->CompareReference(object);
5199    }
5200  } else {
5201    BuildVariableLoadForAccumulatorValue(scope->brand(),
5202                                         HoleCheckMode::kElided);
5203    Register brand = register_allocator()->NewRegister();
5204    builder()->StoreAccumulatorInRegister(brand);
5205
5206    VisitForAccumulatorValue(object_expression);
5207    builder()->SetExpressionPosition(object_expression);
5208
5209    FeedbackSlot slot = feedback_spec()->AddKeyedHasICSlot();
5210    builder()->CompareOperation(Token::IN, brand, feedback_index(slot));
5211    execution_result()->SetResultIsBoolean();
5212  }
5213}
5214
5215void BytecodeGenerator::BuildPrivateBrandCheck(Property* property,
5216                                               Register object) {
5217  Variable* private_name = property->key()->AsVariableProxy()->var();
5218  DCHECK(IsPrivateMethodOrAccessorVariableMode(private_name->mode()));
5219  ClassScope* scope = private_name->scope()->AsClassScope();
5220  builder()->SetExpressionPosition(property);
5221  if (private_name->is_static()) {
5222    // For static private methods, the only valid receiver is the class.
5223    // Load the class constructor.
5224    if (scope->class_variable() == nullptr) {
5225      // If the static private method has not been used used in source
5226      // code (either explicitly or through the presence of eval), but is
5227      // accessed by the debugger at runtime, reference to the class variable
5228      // is not available since it was not be context-allocated. Therefore we
5229      // can't build a branch check, and throw an ReferenceError as if the
5230      // method was optimized away.
5231      // TODO(joyee): get a reference to the class constructor through
5232      // something other than scope->class_variable() in this scenario.
5233      RegisterAllocationScope register_scope(this);
5234      RegisterList args = register_allocator()->NewRegisterList(2);
5235      builder()
5236          ->LoadLiteral(Smi::FromEnum(
5237              MessageTemplate::
5238                  kInvalidUnusedPrivateStaticMethodAccessedByDebugger))
5239          .StoreAccumulatorInRegister(args[0])
5240          .LoadLiteral(private_name->raw_name())
5241          .StoreAccumulatorInRegister(args[1])
5242          .CallRuntime(Runtime::kNewError, args)
5243          .Throw();
5244    } else {
5245      BuildVariableLoadForAccumulatorValue(scope->class_variable(),
5246                                           HoleCheckMode::kElided);
5247      BytecodeLabel return_check;
5248      builder()->CompareReference(object).JumpIfTrue(
5249          ToBooleanMode::kAlreadyBoolean, &return_check);
5250      const AstRawString* name = scope->class_variable()->raw_name();
5251      RegisterAllocationScope register_scope(this);
5252      RegisterList args = register_allocator()->NewRegisterList(2);
5253      builder()
5254          ->LoadLiteral(
5255              Smi::FromEnum(MessageTemplate::kInvalidPrivateBrandStatic))
5256          .StoreAccumulatorInRegister(args[0])
5257          .LoadLiteral(name)
5258          .StoreAccumulatorInRegister(args[1])
5259          .CallRuntime(Runtime::kNewTypeError, args)
5260          .Throw();
5261      builder()->Bind(&return_check);
5262    }
5263  } else {
5264    BuildVariableLoadForAccumulatorValue(scope->brand(),
5265                                         HoleCheckMode::kElided);
5266    builder()->LoadKeyedProperty(
5267        object, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
5268  }
5269}
5270
5271void BytecodeGenerator::VisitPropertyLoadForRegister(Register obj,
5272                                                     Property* expr,
5273                                                     Register destination) {
5274  ValueResultScope result_scope(this);
5275  VisitPropertyLoad(obj, expr);
5276  builder()->StoreAccumulatorInRegister(destination);
5277}
5278
5279void BytecodeGenerator::VisitNamedSuperPropertyLoad(Property* property,
5280                                                    Register opt_receiver_out) {
5281  RegisterAllocationScope register_scope(this);
5282  if (FLAG_super_ic) {
5283    Register receiver = register_allocator()->NewRegister();
5284    BuildThisVariableLoad();
5285    builder()->StoreAccumulatorInRegister(receiver);
5286    BuildVariableLoad(
5287        property->obj()->AsSuperPropertyReference()->home_object()->var(),
5288        HoleCheckMode::kElided);
5289    builder()->SetExpressionPosition(property);
5290    auto name = property->key()->AsLiteral()->AsRawPropertyName();
5291    FeedbackSlot slot = GetCachedLoadSuperICSlot(name);
5292    builder()->LoadNamedPropertyFromSuper(receiver, name, feedback_index(slot));
5293    if (opt_receiver_out.is_valid()) {
5294      builder()->MoveRegister(receiver, opt_receiver_out);
5295    }
5296  } else {
5297    RegisterList args = register_allocator()->NewRegisterList(3);
5298    BuildThisVariableLoad();
5299    builder()->StoreAccumulatorInRegister(args[0]);
5300    BuildVariableLoad(
5301        property->obj()->AsSuperPropertyReference()->home_object()->var(),
5302        HoleCheckMode::kElided);
5303    builder()->StoreAccumulatorInRegister(args[1]);
5304    builder()->SetExpressionPosition(property);
5305    builder()
5306        ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
5307        .StoreAccumulatorInRegister(args[2])
5308        .CallRuntime(Runtime::kLoadFromSuper, args);
5309
5310    if (opt_receiver_out.is_valid()) {
5311      builder()->MoveRegister(args[0], opt_receiver_out);
5312    }
5313  }
5314}
5315
5316void BytecodeGenerator::VisitKeyedSuperPropertyLoad(Property* property,
5317                                                    Register opt_receiver_out) {
5318  RegisterAllocationScope register_scope(this);
5319  RegisterList args = register_allocator()->NewRegisterList(3);
5320  BuildThisVariableLoad();
5321  builder()->StoreAccumulatorInRegister(args[0]);
5322  BuildVariableLoad(
5323      property->obj()->AsSuperPropertyReference()->home_object()->var(),
5324      HoleCheckMode::kElided);
5325  builder()->StoreAccumulatorInRegister(args[1]);
5326  VisitForRegisterValue(property->key(), args[2]);
5327
5328  builder()->SetExpressionPosition(property);
5329  builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, args);
5330
5331  if (opt_receiver_out.is_valid()) {
5332    builder()->MoveRegister(args[0], opt_receiver_out);
5333  }
5334}
5335
5336template <typename ExpressionFunc>
5337void BytecodeGenerator::BuildOptionalChain(ExpressionFunc expression_func) {
5338  BytecodeLabel done;
5339  OptionalChainNullLabelScope label_scope(this);
5340  expression_func();
5341  builder()->Jump(&done);
5342  label_scope.labels()->Bind(builder());
5343  builder()->LoadUndefined();
5344  builder()->Bind(&done);
5345}
5346
5347void BytecodeGenerator::VisitOptionalChain(OptionalChain* expr) {
5348  BuildOptionalChain([&]() { VisitForAccumulatorValue(expr->expression()); });
5349}
5350
5351void BytecodeGenerator::VisitProperty(Property* expr) {
5352  AssignType property_kind = Property::GetAssignType(expr);
5353  if (property_kind != NAMED_SUPER_PROPERTY &&
5354      property_kind != KEYED_SUPER_PROPERTY) {
5355    Register obj = VisitForRegisterValue(expr->obj());
5356    VisitPropertyLoad(obj, expr);
5357  } else {
5358    VisitPropertyLoad(Register::invalid_value(), expr);
5359  }
5360}
5361
5362void BytecodeGenerator::VisitArguments(const ZonePtrList<Expression>* args,
5363                                       RegisterList* arg_regs) {
5364  // Visit arguments.
5365  for (int i = 0; i < static_cast<int>(args->length()); i++) {
5366    VisitAndPushIntoRegisterList(args->at(i), arg_regs);
5367  }
5368}
5369
5370void BytecodeGenerator::VisitCall(Call* expr) {
5371  Expression* callee_expr = expr->expression();
5372  Call::CallType call_type = expr->GetCallType();
5373
5374  if (call_type == Call::SUPER_CALL) {
5375    return VisitCallSuper(expr);
5376  }
5377
5378  // We compile the call differently depending on the presence of spreads and
5379  // their positions.
5380  //
5381  // If there is only one spread and it is the final argument, there is a
5382  // special CallWithSpread bytecode.
5383  //
5384  // If there is a non-final spread, we rewrite calls like
5385  //     callee(1, ...x, 2)
5386  // to
5387  //     %reflect_apply(callee, receiver, [1, ...x, 2])
5388  const Call::SpreadPosition spread_position = expr->spread_position();
5389
5390  // Grow the args list as we visit receiver / arguments to avoid allocating all
5391  // the registers up-front. Otherwise these registers are unavailable during
5392  // receiver / argument visiting and we can end up with memory leaks due to
5393  // registers keeping objects alive.
5394  RegisterList args = register_allocator()->NewGrowableRegisterList();
5395
5396  // The callee is the first register in args for ease of calling %reflect_apply
5397  // if we have a non-final spread. For all other cases it is popped from args
5398  // before emitting the call below.
5399  Register callee = register_allocator()->GrowRegisterList(&args);
5400
5401  bool implicit_undefined_receiver = false;
5402
5403  // TODO(petermarshall): We have a lot of call bytecodes that are very similar,
5404  // see if we can reduce the number by adding a separate argument which
5405  // specifies the call type (e.g., property, spread, tailcall, etc.).
5406
5407  // Prepare the callee and the receiver to the function call. This depends on
5408  // the semantics of the underlying call type.
5409  switch (call_type) {
5410    case Call::NAMED_PROPERTY_CALL:
5411    case Call::KEYED_PROPERTY_CALL:
5412    case Call::PRIVATE_CALL: {
5413      Property* property = callee_expr->AsProperty();
5414      VisitAndPushIntoRegisterList(property->obj(), &args);
5415      VisitPropertyLoadForRegister(args.last_register(), property, callee);
5416      break;
5417    }
5418    case Call::GLOBAL_CALL: {
5419      // Receiver is undefined for global calls.
5420      if (spread_position == Call::kNoSpread) {
5421        implicit_undefined_receiver = true;
5422      } else {
5423        // TODO(leszeks): There's no special bytecode for tail calls or spread
5424        // calls with an undefined receiver, so just push undefined ourselves.
5425        BuildPushUndefinedIntoRegisterList(&args);
5426      }
5427      // Load callee as a global variable.
5428      VariableProxy* proxy = callee_expr->AsVariableProxy();
5429      BuildVariableLoadForAccumulatorValue(proxy->var(),
5430                                           proxy->hole_check_mode());
5431      builder()->StoreAccumulatorInRegister(callee);
5432      break;
5433    }
5434    case Call::WITH_CALL: {
5435      Register receiver = register_allocator()->GrowRegisterList(&args);
5436      DCHECK(callee_expr->AsVariableProxy()->var()->IsLookupSlot());
5437      {
5438        RegisterAllocationScope inner_register_scope(this);
5439        Register name = register_allocator()->NewRegister();
5440
5441        // Call %LoadLookupSlotForCall to get the callee and receiver.
5442        RegisterList result_pair = register_allocator()->NewRegisterList(2);
5443        Variable* variable = callee_expr->AsVariableProxy()->var();
5444        builder()
5445            ->LoadLiteral(variable->raw_name())
5446            .StoreAccumulatorInRegister(name)
5447            .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, name,
5448                                result_pair)
5449            .MoveRegister(result_pair[0], callee)
5450            .MoveRegister(result_pair[1], receiver);
5451      }
5452      break;
5453    }
5454    case Call::OTHER_CALL: {
5455      // Receiver is undefined for other calls.
5456      if (spread_position == Call::kNoSpread) {
5457        implicit_undefined_receiver = true;
5458      } else {
5459        // TODO(leszeks): There's no special bytecode for tail calls or spread
5460        // calls with an undefined receiver, so just push undefined ourselves.
5461        BuildPushUndefinedIntoRegisterList(&args);
5462      }
5463      VisitForRegisterValue(callee_expr, callee);
5464      break;
5465    }
5466    case Call::NAMED_SUPER_PROPERTY_CALL: {
5467      Register receiver = register_allocator()->GrowRegisterList(&args);
5468      Property* property = callee_expr->AsProperty();
5469      VisitNamedSuperPropertyLoad(property, receiver);
5470      builder()->StoreAccumulatorInRegister(callee);
5471      break;
5472    }
5473    case Call::KEYED_SUPER_PROPERTY_CALL: {
5474      Register receiver = register_allocator()->GrowRegisterList(&args);
5475      Property* property = callee_expr->AsProperty();
5476      VisitKeyedSuperPropertyLoad(property, receiver);
5477      builder()->StoreAccumulatorInRegister(callee);
5478      break;
5479    }
5480    case Call::NAMED_OPTIONAL_CHAIN_PROPERTY_CALL:
5481    case Call::KEYED_OPTIONAL_CHAIN_PROPERTY_CALL:
5482    case Call::PRIVATE_OPTIONAL_CHAIN_CALL: {
5483      OptionalChain* chain = callee_expr->AsOptionalChain();
5484      Property* property = chain->expression()->AsProperty();
5485      BuildOptionalChain([&]() {
5486        VisitAndPushIntoRegisterList(property->obj(), &args);
5487        VisitPropertyLoad(args.last_register(), property);
5488      });
5489      builder()->StoreAccumulatorInRegister(callee);
5490      break;
5491    }
5492    case Call::SUPER_CALL:
5493      UNREACHABLE();
5494  }
5495
5496  if (expr->is_optional_chain_link()) {
5497    DCHECK_NOT_NULL(optional_chaining_null_labels_);
5498    int right_range =
5499        AllocateBlockCoverageSlotIfEnabled(expr, SourceRangeKind::kRight);
5500    builder()->LoadAccumulatorWithRegister(callee).JumpIfUndefinedOrNull(
5501        optional_chaining_null_labels_->New());
5502    BuildIncrementBlockCoverageCounterIfEnabled(right_range);
5503  }
5504
5505  int receiver_arg_count = -1;
5506  if (spread_position == Call::kHasNonFinalSpread) {
5507    // If we're building %reflect_apply, build the array literal and put it in
5508    // the 3rd argument.
5509    DCHECK(!implicit_undefined_receiver);
5510    DCHECK_EQ(args.register_count(), 2);
5511    BuildCreateArrayLiteral(expr->arguments(), nullptr);
5512    builder()->StoreAccumulatorInRegister(
5513        register_allocator()->GrowRegisterList(&args));
5514  } else {
5515    // If we're not building %reflect_apply and don't need to build an array
5516    // literal, pop the callee and evaluate all arguments to the function call
5517    // and store in sequential args registers.
5518    args = args.PopLeft();
5519    VisitArguments(expr->arguments(), &args);
5520    receiver_arg_count = implicit_undefined_receiver ? 0 : 1;
5521    CHECK_EQ(receiver_arg_count + expr->arguments()->length(),
5522             args.register_count());
5523  }
5524
5525  // Resolve callee for a potential direct eval call. This block will mutate the
5526  // callee value.
5527  if (expr->is_possibly_eval() && expr->arguments()->length() > 0) {
5528    RegisterAllocationScope inner_register_scope(this);
5529    RegisterList runtime_call_args = register_allocator()->NewRegisterList(6);
5530    // Set up arguments for ResolvePossiblyDirectEval by copying callee, source
5531    // strings and function closure, and loading language and
5532    // position.
5533
5534    // Move the first arg.
5535    if (spread_position == Call::kHasNonFinalSpread) {
5536      int feedback_slot_index =
5537          feedback_index(feedback_spec()->AddKeyedLoadICSlot());
5538      Register args_array = args[2];
5539      builder()
5540          ->LoadLiteral(Smi::FromInt(0))
5541          .LoadKeyedProperty(args_array, feedback_slot_index)
5542          .StoreAccumulatorInRegister(runtime_call_args[1]);
5543    } else {
5544      // FIXME(v8:5690): Support final spreads for eval.
5545      DCHECK_GE(receiver_arg_count, 0);
5546      builder()->MoveRegister(args[receiver_arg_count], runtime_call_args[1]);
5547    }
5548    builder()
5549        ->MoveRegister(callee, runtime_call_args[0])
5550        .MoveRegister(Register::function_closure(), runtime_call_args[2])
5551        .LoadLiteral(Smi::FromEnum(language_mode()))
5552        .StoreAccumulatorInRegister(runtime_call_args[3])
5553        .LoadLiteral(Smi::FromInt(current_scope()->start_position()))
5554        .StoreAccumulatorInRegister(runtime_call_args[4])
5555        .LoadLiteral(Smi::FromInt(expr->position()))
5556        .StoreAccumulatorInRegister(runtime_call_args[5]);
5557
5558    // Call ResolvePossiblyDirectEval and modify the callee.
5559    builder()
5560        ->CallRuntime(Runtime::kResolvePossiblyDirectEval, runtime_call_args)
5561        .StoreAccumulatorInRegister(callee);
5562  }
5563
5564  builder()->SetExpressionPosition(expr);
5565
5566  if (spread_position == Call::kHasFinalSpread) {
5567    DCHECK(!implicit_undefined_receiver);
5568    builder()->CallWithSpread(callee, args,
5569                              feedback_index(feedback_spec()->AddCallICSlot()));
5570  } else if (spread_position == Call::kHasNonFinalSpread) {
5571    builder()->CallJSRuntime(Context::REFLECT_APPLY_INDEX, args);
5572  } else if (call_type == Call::NAMED_PROPERTY_CALL ||
5573             call_type == Call::KEYED_PROPERTY_CALL) {
5574    DCHECK(!implicit_undefined_receiver);
5575    builder()->CallProperty(callee, args,
5576                            feedback_index(feedback_spec()->AddCallICSlot()));
5577  } else if (implicit_undefined_receiver) {
5578    builder()->CallUndefinedReceiver(
5579        callee, args, feedback_index(feedback_spec()->AddCallICSlot()));
5580  } else {
5581    builder()->CallAnyReceiver(
5582        callee, args, feedback_index(feedback_spec()->AddCallICSlot()));
5583  }
5584}
5585
5586void BytecodeGenerator::VisitCallSuper(Call* expr) {
5587  RegisterAllocationScope register_scope(this);
5588  SuperCallReference* super = expr->expression()->AsSuperCallReference();
5589  const ZonePtrList<Expression>* args = expr->arguments();
5590
5591  // We compile the super call differently depending on the presence of spreads
5592  // and their positions.
5593  //
5594  // If there is only one spread and it is the final argument, there is a
5595  // special ConstructWithSpread bytecode.
5596  //
5597  // It there is a non-final spread, we rewrite something like
5598  //    super(1, ...x, 2)
5599  // to
5600  //    %reflect_construct(constructor, [1, ...x, 2], new_target)
5601  //
5602  // That is, we implement (non-last-arg) spreads in super calls via our
5603  // mechanism for spreads in array literals.
5604  const Call::SpreadPosition spread_position = expr->spread_position();
5605
5606  // Prepare the constructor to the super call.
5607  Register this_function = VisitForRegisterValue(super->this_function_var());
5608  Register constructor = register_allocator()->NewRegister();
5609  builder()
5610      ->LoadAccumulatorWithRegister(this_function)
5611      .GetSuperConstructor(constructor);
5612
5613  if (spread_position == Call::kHasNonFinalSpread) {
5614    // First generate the array containing all arguments.
5615    BuildCreateArrayLiteral(args, nullptr);
5616
5617    // Check if the constructor is in fact a constructor.
5618    builder()->ThrowIfNotSuperConstructor(constructor);
5619
5620    // Now pass that array to %reflect_construct.
5621    RegisterList construct_args = register_allocator()->NewRegisterList(3);
5622    builder()->StoreAccumulatorInRegister(construct_args[1]);
5623    builder()->MoveRegister(constructor, construct_args[0]);
5624    VisitForRegisterValue(super->new_target_var(), construct_args[2]);
5625    builder()->CallJSRuntime(Context::REFLECT_CONSTRUCT_INDEX, construct_args);
5626  } else {
5627    RegisterList args_regs = register_allocator()->NewGrowableRegisterList();
5628    VisitArguments(args, &args_regs);
5629
5630    // Check if the constructor is in fact a constructor.
5631    builder()->ThrowIfNotSuperConstructor(constructor);
5632
5633    // The new target is loaded into the accumulator from the
5634    // {new.target} variable.
5635    VisitForAccumulatorValue(super->new_target_var());
5636    builder()->SetExpressionPosition(expr);
5637
5638    int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
5639
5640    if (spread_position == Call::kHasFinalSpread) {
5641      builder()->ConstructWithSpread(constructor, args_regs,
5642                                     feedback_slot_index);
5643    } else {
5644      DCHECK_EQ(spread_position, Call::kNoSpread);
5645      // Call construct.
5646      // TODO(turbofan): For now we do gather feedback on super constructor
5647      // calls, utilizing the existing machinery to inline the actual call
5648      // target and the JSCreate for the implicit receiver allocation. This
5649      // is not an ideal solution for super constructor calls, but it gets
5650      // the job done for now. In the long run we might want to revisit this
5651      // and come up with a better way.
5652      builder()->Construct(constructor, args_regs, feedback_slot_index);
5653    }
5654  }
5655
5656  // Explicit calls to the super constructor using super() perform an
5657  // implicit binding assignment to the 'this' variable.
5658  //
5659  // Default constructors don't need have to do the assignment because
5660  // 'this' isn't accessed in default constructors.
5661  if (!IsDefaultConstructor(info()->literal()->kind())) {
5662    Variable* var = closure_scope()->GetReceiverScope()->receiver();
5663    BuildVariableAssignment(var, Token::INIT, HoleCheckMode::kRequired);
5664  }
5665
5666  Register instance = register_allocator()->NewRegister();
5667  builder()->StoreAccumulatorInRegister(instance);
5668
5669  // The constructor scope always needs ScopeInfo, so we are certain that
5670  // the first constructor scope found in the outer scope chain is the
5671  // scope that we are looking for for this super() call.
5672  // Note that this doesn't necessarily mean that the constructor needs
5673  // a context, if it doesn't this would get handled specially in
5674  // BuildPrivateBrandInitialization().
5675  DeclarationScope* constructor_scope = info()->scope()->GetConstructorScope();
5676
5677  // We can rely on the class_scope_has_private_brand bit to tell if the
5678  // constructor needs private brand initialization, and if that's
5679  // the case we are certain that its outer class scope requires a context to
5680  // keep the brand variable, so we can just get the brand variable
5681  // from the outer scope.
5682  if (constructor_scope->class_scope_has_private_brand()) {
5683    DCHECK(constructor_scope->outer_scope()->is_class_scope());
5684    ClassScope* class_scope = constructor_scope->outer_scope()->AsClassScope();
5685    DCHECK_NOT_NULL(class_scope->brand());
5686    Variable* brand = class_scope->brand();
5687    BuildPrivateBrandInitialization(instance, brand);
5688  }
5689
5690  // The derived constructor has the correct bit set always, so we
5691  // don't emit code to load and call the initializer if not
5692  // required.
5693  //
5694  // For the arrow function or eval case, we always emit code to load
5695  // and call the initializer.
5696  //
5697  // TODO(gsathya): In the future, we could tag nested arrow functions
5698  // or eval with the correct bit so that we do the load conditionally
5699  // if required.
5700  if (info()->literal()->requires_instance_members_initializer() ||
5701      !IsDerivedConstructor(info()->literal()->kind())) {
5702    BuildInstanceMemberInitialization(this_function, instance);
5703  }
5704
5705  builder()->LoadAccumulatorWithRegister(instance);
5706}
5707
5708void BytecodeGenerator::VisitCallNew(CallNew* expr) {
5709  RegisterList args = register_allocator()->NewGrowableRegisterList();
5710
5711  // Load the constructor. It's in the first register in args for ease of
5712  // calling %reflect_construct if we have a non-final spread. For all other
5713  // cases it is popped before emitting the construct below.
5714  VisitAndPushIntoRegisterList(expr->expression(), &args);
5715
5716  // We compile the new differently depending on the presence of spreads and
5717  // their positions.
5718  //
5719  // If there is only one spread and it is the final argument, there is a
5720  // special ConstructWithSpread bytecode.
5721  //
5722  // If there is a non-final spread, we rewrite calls like
5723  //     new ctor(1, ...x, 2)
5724  // to
5725  //     %reflect_construct(ctor, [1, ...x, 2])
5726  const CallNew::SpreadPosition spread_position = expr->spread_position();
5727
5728  if (spread_position == CallNew::kHasNonFinalSpread) {
5729    BuildCreateArrayLiteral(expr->arguments(), nullptr);
5730    builder()->SetExpressionPosition(expr);
5731    builder()
5732        ->StoreAccumulatorInRegister(
5733            register_allocator()->GrowRegisterList(&args))
5734        .CallJSRuntime(Context::REFLECT_CONSTRUCT_INDEX, args);
5735    return;
5736  }
5737
5738  Register constructor = args.first_register();
5739  args = args.PopLeft();
5740  VisitArguments(expr->arguments(), &args);
5741
5742  // The accumulator holds new target which is the same as the
5743  // constructor for CallNew.
5744  builder()->SetExpressionPosition(expr);
5745  builder()->LoadAccumulatorWithRegister(constructor);
5746
5747  int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
5748  if (spread_position == CallNew::kHasFinalSpread) {
5749    builder()->ConstructWithSpread(constructor, args, feedback_slot_index);
5750  } else {
5751    DCHECK_EQ(spread_position, CallNew::kNoSpread);
5752    builder()->Construct(constructor, args, feedback_slot_index);
5753  }
5754}
5755
5756void BytecodeGenerator::VisitCallRuntime(CallRuntime* expr) {
5757  if (expr->is_jsruntime()) {
5758    RegisterList args = register_allocator()->NewGrowableRegisterList();
5759    VisitArguments(expr->arguments(), &args);
5760    builder()->CallJSRuntime(expr->context_index(), args);
5761  } else {
5762    // Evaluate all arguments to the runtime call.
5763    RegisterList args = register_allocator()->NewGrowableRegisterList();
5764    VisitArguments(expr->arguments(), &args);
5765    Runtime::FunctionId function_id = expr->function()->function_id;
5766    builder()->CallRuntime(function_id, args);
5767  }
5768}
5769
5770void BytecodeGenerator::VisitVoid(UnaryOperation* expr) {
5771  VisitForEffect(expr->expression());
5772  builder()->LoadUndefined();
5773}
5774
5775void BytecodeGenerator::VisitForTypeOfValue(Expression* expr) {
5776  if (expr->IsVariableProxy()) {
5777    // Typeof does not throw a reference error on global variables, hence we
5778    // perform a non-contextual load in case the operand is a variable proxy.
5779    VariableProxy* proxy = expr->AsVariableProxy();
5780    BuildVariableLoadForAccumulatorValue(proxy->var(), proxy->hole_check_mode(),
5781                                         TypeofMode::kInside);
5782  } else {
5783    VisitForAccumulatorValue(expr);
5784  }
5785}
5786
5787void BytecodeGenerator::VisitTypeOf(UnaryOperation* expr) {
5788  VisitForTypeOfValue(expr->expression());
5789  builder()->TypeOf();
5790}
5791
5792void BytecodeGenerator::VisitNot(UnaryOperation* expr) {
5793  if (execution_result()->IsEffect()) {
5794    VisitForEffect(expr->expression());
5795  } else if (execution_result()->IsTest()) {
5796    // No actual logical negation happening, we just swap the control flow, by
5797    // swapping the target labels and the fallthrough branch, and visit in the
5798    // same test result context.
5799    TestResultScope* test_result = execution_result()->AsTest();
5800    test_result->InvertControlFlow();
5801    VisitInSameTestExecutionScope(expr->expression());
5802  } else {
5803    TypeHint type_hint = VisitForAccumulatorValue(expr->expression());
5804    builder()->LogicalNot(ToBooleanModeFromTypeHint(type_hint));
5805    // Always returns a boolean value.
5806    execution_result()->SetResultIsBoolean();
5807  }
5808}
5809
5810void BytecodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
5811  switch (expr->op()) {
5812    case Token::Value::NOT:
5813      VisitNot(expr);
5814      break;
5815    case Token::Value::TYPEOF:
5816      VisitTypeOf(expr);
5817      break;
5818    case Token::Value::VOID:
5819      VisitVoid(expr);
5820      break;
5821    case Token::Value::DELETE:
5822      VisitDelete(expr);
5823      break;
5824    case Token::Value::ADD:
5825    case Token::Value::SUB:
5826    case Token::Value::BIT_NOT:
5827      VisitForAccumulatorValue(expr->expression());
5828      builder()->SetExpressionPosition(expr);
5829      builder()->UnaryOperation(
5830          expr->op(), feedback_index(feedback_spec()->AddBinaryOpICSlot()));
5831      break;
5832    default:
5833      UNREACHABLE();
5834  }
5835}
5836
5837void BytecodeGenerator::VisitDelete(UnaryOperation* unary) {
5838  Expression* expr = unary->expression();
5839  if (expr->IsProperty()) {
5840    // Delete of an object property is allowed both in sloppy
5841    // and strict modes.
5842    Property* property = expr->AsProperty();
5843    DCHECK(!property->IsPrivateReference());
5844    Register object = VisitForRegisterValue(property->obj());
5845    VisitForAccumulatorValue(property->key());
5846    builder()->Delete(object, language_mode());
5847  } else if (expr->IsOptionalChain()) {
5848    Expression* expr_inner = expr->AsOptionalChain()->expression();
5849    if (expr_inner->IsProperty()) {
5850      Property* property = expr_inner->AsProperty();
5851      DCHECK(!property->IsPrivateReference());
5852      BytecodeLabel done;
5853      OptionalChainNullLabelScope label_scope(this);
5854      VisitForAccumulatorValue(property->obj());
5855      if (property->is_optional_chain_link()) {
5856        int right_range = AllocateBlockCoverageSlotIfEnabled(
5857            property, SourceRangeKind::kRight);
5858        builder()->JumpIfUndefinedOrNull(label_scope.labels()->New());
5859        BuildIncrementBlockCoverageCounterIfEnabled(right_range);
5860      }
5861      Register object = register_allocator()->NewRegister();
5862      builder()->StoreAccumulatorInRegister(object);
5863      VisitForAccumulatorValue(property->key());
5864      builder()->Delete(object, language_mode());
5865      builder()->Jump(&done);
5866      label_scope.labels()->Bind(builder());
5867      builder()->LoadTrue();
5868      builder()->Bind(&done);
5869    } else {
5870      VisitForEffect(expr);
5871      builder()->LoadTrue();
5872    }
5873  } else if (expr->IsVariableProxy() &&
5874             !expr->AsVariableProxy()->is_new_target()) {
5875    // Delete of an unqualified identifier is allowed in sloppy mode but is
5876    // not allowed in strict mode.
5877    DCHECK(is_sloppy(language_mode()));
5878    Variable* variable = expr->AsVariableProxy()->var();
5879    switch (variable->location()) {
5880      case VariableLocation::PARAMETER:
5881      case VariableLocation::LOCAL:
5882      case VariableLocation::CONTEXT:
5883      case VariableLocation::REPL_GLOBAL: {
5884        // Deleting local var/let/const, context variables, and arguments
5885        // does not have any effect.
5886        builder()->LoadFalse();
5887        break;
5888      }
5889      case VariableLocation::UNALLOCATED:
5890      // TODO(adamk): Falling through to the runtime results in correct
5891      // behavior, but does unnecessary context-walking (since scope
5892      // analysis has already proven that the variable doesn't exist in
5893      // any non-global scope). Consider adding a DeleteGlobal bytecode
5894      // that knows how to deal with ScriptContexts as well as global
5895      // object properties.
5896      case VariableLocation::LOOKUP: {
5897        Register name_reg = register_allocator()->NewRegister();
5898        builder()
5899            ->LoadLiteral(variable->raw_name())
5900            .StoreAccumulatorInRegister(name_reg)
5901            .CallRuntime(Runtime::kDeleteLookupSlot, name_reg);
5902        break;
5903      }
5904      case VariableLocation::MODULE:
5905        // Modules are always in strict mode and unqualified identifers are not
5906        // allowed in strict mode.
5907        UNREACHABLE();
5908    }
5909  } else {
5910    // Delete of an unresolvable reference, new.target, and this returns true.
5911    VisitForEffect(expr);
5912    builder()->LoadTrue();
5913  }
5914}
5915
5916void BytecodeGenerator::VisitCountOperation(CountOperation* expr) {
5917  DCHECK(expr->expression()->IsValidReferenceExpression());
5918
5919  // Left-hand side can only be a property, a global or a variable slot.
5920  Property* property = expr->expression()->AsProperty();
5921  AssignType assign_type = Property::GetAssignType(property);
5922
5923  bool is_postfix = expr->is_postfix() && !execution_result()->IsEffect();
5924
5925  // Evaluate LHS expression and get old value.
5926  Register object, key, old_value;
5927  RegisterList super_property_args;
5928  const AstRawString* name;
5929  switch (assign_type) {
5930    case NON_PROPERTY: {
5931      VariableProxy* proxy = expr->expression()->AsVariableProxy();
5932      BuildVariableLoadForAccumulatorValue(proxy->var(),
5933                                           proxy->hole_check_mode());
5934      break;
5935    }
5936    case NAMED_PROPERTY: {
5937      object = VisitForRegisterValue(property->obj());
5938      name = property->key()->AsLiteral()->AsRawPropertyName();
5939      builder()->LoadNamedProperty(
5940          object, name,
5941          feedback_index(GetCachedLoadICSlot(property->obj(), name)));
5942      break;
5943    }
5944    case KEYED_PROPERTY: {
5945      object = VisitForRegisterValue(property->obj());
5946      // Use visit for accumulator here since we need the key in the accumulator
5947      // for the LoadKeyedProperty.
5948      key = register_allocator()->NewRegister();
5949      VisitForAccumulatorValue(property->key());
5950      builder()->StoreAccumulatorInRegister(key).LoadKeyedProperty(
5951          object, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
5952      break;
5953    }
5954    case NAMED_SUPER_PROPERTY: {
5955      super_property_args = register_allocator()->NewRegisterList(4);
5956      RegisterList load_super_args = super_property_args.Truncate(3);
5957      BuildThisVariableLoad();
5958      builder()->StoreAccumulatorInRegister(load_super_args[0]);
5959      BuildVariableLoad(
5960          property->obj()->AsSuperPropertyReference()->home_object()->var(),
5961          HoleCheckMode::kElided);
5962      builder()->StoreAccumulatorInRegister(load_super_args[1]);
5963      builder()
5964          ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
5965          .StoreAccumulatorInRegister(load_super_args[2])
5966          .CallRuntime(Runtime::kLoadFromSuper, load_super_args);
5967      break;
5968    }
5969    case KEYED_SUPER_PROPERTY: {
5970      super_property_args = register_allocator()->NewRegisterList(4);
5971      RegisterList load_super_args = super_property_args.Truncate(3);
5972      BuildThisVariableLoad();
5973      builder()->StoreAccumulatorInRegister(load_super_args[0]);
5974      BuildVariableLoad(
5975          property->obj()->AsSuperPropertyReference()->home_object()->var(),
5976          HoleCheckMode::kElided);
5977      builder()->StoreAccumulatorInRegister(load_super_args[1]);
5978      VisitForRegisterValue(property->key(), load_super_args[2]);
5979      builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, load_super_args);
5980      break;
5981    }
5982    case PRIVATE_METHOD: {
5983      object = VisitForRegisterValue(property->obj());
5984      BuildPrivateBrandCheck(property, object);
5985      BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateMethodWrite,
5986                                 property);
5987      return;
5988    }
5989    case PRIVATE_GETTER_ONLY: {
5990      object = VisitForRegisterValue(property->obj());
5991      BuildPrivateBrandCheck(property, object);
5992      BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateSetterAccess,
5993                                 property);
5994      return;
5995    }
5996    case PRIVATE_SETTER_ONLY: {
5997      object = VisitForRegisterValue(property->obj());
5998      BuildPrivateBrandCheck(property, object);
5999      BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateGetterAccess,
6000                                 property);
6001      return;
6002    }
6003    case PRIVATE_GETTER_AND_SETTER: {
6004      object = VisitForRegisterValue(property->obj());
6005      key = VisitForRegisterValue(property->key());
6006      BuildPrivateBrandCheck(property, object);
6007      BuildPrivateGetterAccess(object, key);
6008      break;
6009    }
6010  }
6011
6012  // Save result for postfix expressions.
6013  FeedbackSlot count_slot = feedback_spec()->AddBinaryOpICSlot();
6014  if (is_postfix) {
6015    old_value = register_allocator()->NewRegister();
6016    // Convert old value into a number before saving it.
6017    // TODO(ignition): Think about adding proper PostInc/PostDec bytecodes
6018    // instead of this ToNumeric + Inc/Dec dance.
6019    builder()
6020        ->ToNumeric(feedback_index(count_slot))
6021        .StoreAccumulatorInRegister(old_value);
6022  }
6023
6024  // Perform +1/-1 operation.
6025  builder()->UnaryOperation(expr->op(), feedback_index(count_slot));
6026
6027  // Store the value.
6028  builder()->SetExpressionPosition(expr);
6029  switch (assign_type) {
6030    case NON_PROPERTY: {
6031      VariableProxy* proxy = expr->expression()->AsVariableProxy();
6032      BuildVariableAssignment(proxy->var(), expr->op(),
6033                              proxy->hole_check_mode());
6034      break;
6035    }
6036    case NAMED_PROPERTY: {
6037      FeedbackSlot slot = GetCachedStoreICSlot(property->obj(), name);
6038      Register value;
6039      if (!execution_result()->IsEffect()) {
6040        value = register_allocator()->NewRegister();
6041        builder()->StoreAccumulatorInRegister(value);
6042      }
6043      builder()->SetNamedProperty(object, name, feedback_index(slot),
6044                                  language_mode());
6045      if (!execution_result()->IsEffect()) {
6046        builder()->LoadAccumulatorWithRegister(value);
6047      }
6048      break;
6049    }
6050    case KEYED_PROPERTY: {
6051      FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
6052      Register value;
6053      if (!execution_result()->IsEffect()) {
6054        value = register_allocator()->NewRegister();
6055        builder()->StoreAccumulatorInRegister(value);
6056      }
6057      builder()->SetKeyedProperty(object, key, feedback_index(slot),
6058                                  language_mode());
6059      if (!execution_result()->IsEffect()) {
6060        builder()->LoadAccumulatorWithRegister(value);
6061      }
6062      break;
6063    }
6064    case NAMED_SUPER_PROPERTY: {
6065      builder()
6066          ->StoreAccumulatorInRegister(super_property_args[3])
6067          .CallRuntime(Runtime::kStoreToSuper, super_property_args);
6068      break;
6069    }
6070    case KEYED_SUPER_PROPERTY: {
6071      builder()
6072          ->StoreAccumulatorInRegister(super_property_args[3])
6073          .CallRuntime(Runtime::kStoreKeyedToSuper, super_property_args);
6074      break;
6075    }
6076    case PRIVATE_SETTER_ONLY:
6077    case PRIVATE_GETTER_ONLY:
6078    case PRIVATE_METHOD: {
6079      UNREACHABLE();
6080    }
6081    case PRIVATE_GETTER_AND_SETTER: {
6082      Register value = register_allocator()->NewRegister();
6083      builder()->StoreAccumulatorInRegister(value);
6084      BuildPrivateSetterAccess(object, key, value);
6085      if (!execution_result()->IsEffect()) {
6086        builder()->LoadAccumulatorWithRegister(value);
6087      }
6088      break;
6089    }
6090  }
6091
6092  // Restore old value for postfix expressions.
6093  if (is_postfix) {
6094    builder()->LoadAccumulatorWithRegister(old_value);
6095  }
6096}
6097
6098void BytecodeGenerator::VisitBinaryOperation(BinaryOperation* binop) {
6099  switch (binop->op()) {
6100    case Token::COMMA:
6101      VisitCommaExpression(binop);
6102      break;
6103    case Token::OR:
6104      VisitLogicalOrExpression(binop);
6105      break;
6106    case Token::AND:
6107      VisitLogicalAndExpression(binop);
6108      break;
6109    case Token::NULLISH:
6110      VisitNullishExpression(binop);
6111      break;
6112    default:
6113      VisitArithmeticExpression(binop);
6114      break;
6115  }
6116}
6117
6118void BytecodeGenerator::VisitNaryOperation(NaryOperation* expr) {
6119  switch (expr->op()) {
6120    case Token::COMMA:
6121      VisitNaryCommaExpression(expr);
6122      break;
6123    case Token::OR:
6124      VisitNaryLogicalOrExpression(expr);
6125      break;
6126    case Token::AND:
6127      VisitNaryLogicalAndExpression(expr);
6128      break;
6129    case Token::NULLISH:
6130      VisitNaryNullishExpression(expr);
6131      break;
6132    default:
6133      VisitNaryArithmeticExpression(expr);
6134      break;
6135  }
6136}
6137
6138void BytecodeGenerator::BuildLiteralCompareNil(
6139    Token::Value op, BytecodeArrayBuilder::NilValue nil) {
6140  if (execution_result()->IsTest()) {
6141    TestResultScope* test_result = execution_result()->AsTest();
6142    switch (test_result->fallthrough()) {
6143      case TestFallthrough::kThen:
6144        builder()->JumpIfNotNil(test_result->NewElseLabel(), op, nil);
6145        break;
6146      case TestFallthrough::kElse:
6147        builder()->JumpIfNil(test_result->NewThenLabel(), op, nil);
6148        break;
6149      case TestFallthrough::kNone:
6150        builder()
6151            ->JumpIfNil(test_result->NewThenLabel(), op, nil)
6152            .Jump(test_result->NewElseLabel());
6153    }
6154    test_result->SetResultConsumedByTest();
6155  } else {
6156    builder()->CompareNil(op, nil);
6157  }
6158}
6159
6160void BytecodeGenerator::VisitCompareOperation(CompareOperation* expr) {
6161  Expression* sub_expr;
6162  Literal* literal;
6163  if (expr->IsLiteralCompareTypeof(&sub_expr, &literal)) {
6164    // Emit a fast literal comparion for expressions of the form:
6165    // typeof(x) === 'string'.
6166    VisitForTypeOfValue(sub_expr);
6167    builder()->SetExpressionPosition(expr);
6168    TestTypeOfFlags::LiteralFlag literal_flag =
6169        TestTypeOfFlags::GetFlagForLiteral(ast_string_constants(), literal);
6170    if (literal_flag == TestTypeOfFlags::LiteralFlag::kOther) {
6171      builder()->LoadFalse();
6172    } else {
6173      builder()->CompareTypeOf(literal_flag);
6174    }
6175  } else if (expr->IsLiteralCompareUndefined(&sub_expr)) {
6176    VisitForAccumulatorValue(sub_expr);
6177    builder()->SetExpressionPosition(expr);
6178    BuildLiteralCompareNil(expr->op(), BytecodeArrayBuilder::kUndefinedValue);
6179  } else if (expr->IsLiteralCompareNull(&sub_expr)) {
6180    VisitForAccumulatorValue(sub_expr);
6181    builder()->SetExpressionPosition(expr);
6182    BuildLiteralCompareNil(expr->op(), BytecodeArrayBuilder::kNullValue);
6183  } else {
6184    if (expr->op() == Token::IN && expr->left()->IsPrivateName()) {
6185      DCHECK(FLAG_harmony_private_brand_checks);
6186      Variable* var = expr->left()->AsVariableProxy()->var();
6187      if (IsPrivateMethodOrAccessorVariableMode(var->mode())) {
6188        BuildPrivateMethodIn(var, expr->right());
6189        return;
6190      }
6191      // For private fields, the code below does the right thing.
6192    }
6193
6194    Register lhs = VisitForRegisterValue(expr->left());
6195    VisitForAccumulatorValue(expr->right());
6196    builder()->SetExpressionPosition(expr);
6197    FeedbackSlot slot;
6198    if (expr->op() == Token::IN) {
6199      slot = feedback_spec()->AddKeyedHasICSlot();
6200    } else if (expr->op() == Token::INSTANCEOF) {
6201      slot = feedback_spec()->AddInstanceOfSlot();
6202    } else {
6203      slot = feedback_spec()->AddCompareICSlot();
6204    }
6205    builder()->CompareOperation(expr->op(), lhs, feedback_index(slot));
6206  }
6207  // Always returns a boolean value.
6208  execution_result()->SetResultIsBoolean();
6209}
6210
6211void BytecodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
6212  FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
6213  Expression* subexpr;
6214  Smi literal;
6215  if (expr->IsSmiLiteralOperation(&subexpr, &literal)) {
6216    TypeHint type_hint = VisitForAccumulatorValue(subexpr);
6217    builder()->SetExpressionPosition(expr);
6218    builder()->BinaryOperationSmiLiteral(expr->op(), literal,
6219                                         feedback_index(slot));
6220    if (expr->op() == Token::ADD && type_hint == TypeHint::kString) {
6221      execution_result()->SetResultIsString();
6222    }
6223  } else {
6224    TypeHint lhs_type = VisitForAccumulatorValue(expr->left());
6225    Register lhs = register_allocator()->NewRegister();
6226    builder()->StoreAccumulatorInRegister(lhs);
6227    TypeHint rhs_type = VisitForAccumulatorValue(expr->right());
6228    if (expr->op() == Token::ADD &&
6229        (lhs_type == TypeHint::kString || rhs_type == TypeHint::kString)) {
6230      execution_result()->SetResultIsString();
6231    }
6232
6233    builder()->SetExpressionPosition(expr);
6234    builder()->BinaryOperation(expr->op(), lhs, feedback_index(slot));
6235  }
6236}
6237
6238void BytecodeGenerator::VisitNaryArithmeticExpression(NaryOperation* expr) {
6239  // TODO(leszeks): Add support for lhs smi in commutative ops.
6240  TypeHint type_hint = VisitForAccumulatorValue(expr->first());
6241
6242  for (size_t i = 0; i < expr->subsequent_length(); ++i) {
6243    RegisterAllocationScope register_scope(this);
6244    if (expr->subsequent(i)->IsSmiLiteral()) {
6245      builder()->SetExpressionPosition(expr->subsequent_op_position(i));
6246      builder()->BinaryOperationSmiLiteral(
6247          expr->op(), expr->subsequent(i)->AsLiteral()->AsSmiLiteral(),
6248          feedback_index(feedback_spec()->AddBinaryOpICSlot()));
6249    } else {
6250      Register lhs = register_allocator()->NewRegister();
6251      builder()->StoreAccumulatorInRegister(lhs);
6252      TypeHint rhs_hint = VisitForAccumulatorValue(expr->subsequent(i));
6253      if (rhs_hint == TypeHint::kString) type_hint = TypeHint::kString;
6254      builder()->SetExpressionPosition(expr->subsequent_op_position(i));
6255      builder()->BinaryOperation(
6256          expr->op(), lhs,
6257          feedback_index(feedback_spec()->AddBinaryOpICSlot()));
6258    }
6259  }
6260
6261  if (type_hint == TypeHint::kString && expr->op() == Token::ADD) {
6262    // If any operand of an ADD is a String, a String is produced.
6263    execution_result()->SetResultIsString();
6264  }
6265}
6266
6267// Note: the actual spreading is performed by the surrounding expression's
6268// visitor.
6269void BytecodeGenerator::VisitSpread(Spread* expr) { Visit(expr->expression()); }
6270
6271void BytecodeGenerator::VisitEmptyParentheses(EmptyParentheses* expr) {
6272  UNREACHABLE();
6273}
6274
6275void BytecodeGenerator::VisitImportCallExpression(ImportCallExpression* expr) {
6276  const int register_count = expr->import_assertions() ? 3 : 2;
6277  RegisterList args = register_allocator()->NewRegisterList(register_count);
6278  VisitForRegisterValue(expr->specifier(), args[1]);
6279  if (expr->import_assertions()) {
6280    VisitForRegisterValue(expr->import_assertions(), args[2]);
6281  }
6282  builder()
6283      ->MoveRegister(Register::function_closure(), args[0])
6284      .CallRuntime(Runtime::kDynamicImportCall, args);
6285}
6286
6287void BytecodeGenerator::BuildGetIterator(IteratorType hint) {
6288  if (hint == IteratorType::kAsync) {
6289    RegisterAllocationScope scope(this);
6290
6291    Register obj = register_allocator()->NewRegister();
6292    Register method = register_allocator()->NewRegister();
6293
6294    // Set method to GetMethod(obj, @@asyncIterator)
6295    builder()->StoreAccumulatorInRegister(obj).LoadAsyncIteratorProperty(
6296        obj, feedback_index(feedback_spec()->AddLoadICSlot()));
6297
6298    BytecodeLabel async_iterator_undefined, done;
6299    builder()->JumpIfUndefinedOrNull(&async_iterator_undefined);
6300
6301    // Let iterator be Call(method, obj)
6302    builder()->StoreAccumulatorInRegister(method).CallProperty(
6303        method, RegisterList(obj),
6304        feedback_index(feedback_spec()->AddCallICSlot()));
6305
6306    // If Type(iterator) is not Object, throw a TypeError exception.
6307    builder()->JumpIfJSReceiver(&done);
6308    builder()->CallRuntime(Runtime::kThrowSymbolAsyncIteratorInvalid);
6309
6310    builder()->Bind(&async_iterator_undefined);
6311    // If method is undefined,
6312    //     Let syncMethod be GetMethod(obj, @@iterator)
6313    builder()
6314        ->LoadIteratorProperty(obj,
6315                               feedback_index(feedback_spec()->AddLoadICSlot()))
6316        .StoreAccumulatorInRegister(method);
6317
6318    //     Let syncIterator be Call(syncMethod, obj)
6319    builder()->CallProperty(method, RegisterList(obj),
6320                            feedback_index(feedback_spec()->AddCallICSlot()));
6321
6322    // Return CreateAsyncFromSyncIterator(syncIterator)
6323    // alias `method` register as it's no longer used
6324    Register sync_iter = method;
6325    builder()->StoreAccumulatorInRegister(sync_iter).CallRuntime(
6326        Runtime::kInlineCreateAsyncFromSyncIterator, sync_iter);
6327
6328    builder()->Bind(&done);
6329  } else {
6330    {
6331      RegisterAllocationScope scope(this);
6332
6333      Register obj = register_allocator()->NewRegister();
6334      int load_feedback_index =
6335          feedback_index(feedback_spec()->AddLoadICSlot());
6336      int call_feedback_index =
6337          feedback_index(feedback_spec()->AddCallICSlot());
6338
6339      // Let method be GetMethod(obj, @@iterator) and
6340      // iterator be Call(method, obj).
6341      builder()->StoreAccumulatorInRegister(obj).GetIterator(
6342          obj, load_feedback_index, call_feedback_index);
6343    }
6344
6345    // If Type(iterator) is not Object, throw a TypeError exception.
6346    BytecodeLabel no_type_error;
6347    builder()->JumpIfJSReceiver(&no_type_error);
6348    builder()->CallRuntime(Runtime::kThrowSymbolIteratorInvalid);
6349    builder()->Bind(&no_type_error);
6350  }
6351}
6352
6353// Returns an IteratorRecord which is valid for the lifetime of the current
6354// register_allocation_scope.
6355BytecodeGenerator::IteratorRecord BytecodeGenerator::BuildGetIteratorRecord(
6356    Register next, Register object, IteratorType hint) {
6357  DCHECK(next.is_valid() && object.is_valid());
6358  BuildGetIterator(hint);
6359
6360  builder()
6361      ->StoreAccumulatorInRegister(object)
6362      .LoadNamedProperty(object, ast_string_constants()->next_string(),
6363                         feedback_index(feedback_spec()->AddLoadICSlot()))
6364      .StoreAccumulatorInRegister(next);
6365  return IteratorRecord(object, next, hint);
6366}
6367
6368BytecodeGenerator::IteratorRecord BytecodeGenerator::BuildGetIteratorRecord(
6369    IteratorType hint) {
6370  Register next = register_allocator()->NewRegister();
6371  Register object = register_allocator()->NewRegister();
6372  return BuildGetIteratorRecord(next, object, hint);
6373}
6374
6375void BytecodeGenerator::BuildIteratorNext(const IteratorRecord& iterator,
6376                                          Register next_result) {
6377  DCHECK(next_result.is_valid());
6378  builder()->CallProperty(iterator.next(), RegisterList(iterator.object()),
6379                          feedback_index(feedback_spec()->AddCallICSlot()));
6380
6381  if (iterator.type() == IteratorType::kAsync) {
6382    BuildAwait();
6383  }
6384
6385  BytecodeLabel is_object;
6386  builder()
6387      ->StoreAccumulatorInRegister(next_result)
6388      .JumpIfJSReceiver(&is_object)
6389      .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, next_result)
6390      .Bind(&is_object);
6391}
6392
6393void BytecodeGenerator::BuildCallIteratorMethod(Register iterator,
6394                                                const AstRawString* method_name,
6395                                                RegisterList receiver_and_args,
6396                                                BytecodeLabel* if_called,
6397                                                BytecodeLabels* if_notcalled) {
6398  RegisterAllocationScope register_scope(this);
6399
6400  Register method = register_allocator()->NewRegister();
6401  FeedbackSlot slot = feedback_spec()->AddLoadICSlot();
6402  builder()
6403      ->LoadNamedProperty(iterator, method_name, feedback_index(slot))
6404      .JumpIfUndefinedOrNull(if_notcalled->New())
6405      .StoreAccumulatorInRegister(method)
6406      .CallProperty(method, receiver_and_args,
6407                    feedback_index(feedback_spec()->AddCallICSlot()))
6408      .Jump(if_called);
6409}
6410
6411void BytecodeGenerator::BuildIteratorClose(const IteratorRecord& iterator,
6412                                           Expression* expr) {
6413  RegisterAllocationScope register_scope(this);
6414  BytecodeLabels done(zone());
6415  BytecodeLabel if_called;
6416  RegisterList args = RegisterList(iterator.object());
6417  BuildCallIteratorMethod(iterator.object(),
6418                          ast_string_constants()->return_string(), args,
6419                          &if_called, &done);
6420  builder()->Bind(&if_called);
6421
6422  if (iterator.type() == IteratorType::kAsync) {
6423    DCHECK_NOT_NULL(expr);
6424    BuildAwait(expr->position());
6425  }
6426
6427  builder()->JumpIfJSReceiver(done.New());
6428  {
6429    RegisterAllocationScope inner_register_scope(this);
6430    Register return_result = register_allocator()->NewRegister();
6431    builder()
6432        ->StoreAccumulatorInRegister(return_result)
6433        .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, return_result);
6434  }
6435
6436  done.Bind(builder());
6437}
6438
6439void BytecodeGenerator::VisitGetTemplateObject(GetTemplateObject* expr) {
6440  builder()->SetExpressionPosition(expr);
6441  size_t entry = builder()->AllocateDeferredConstantPoolEntry();
6442  template_objects_.push_back(std::make_pair(expr, entry));
6443  FeedbackSlot literal_slot = feedback_spec()->AddLiteralSlot();
6444  builder()->GetTemplateObject(entry, feedback_index(literal_slot));
6445}
6446
6447void BytecodeGenerator::VisitTemplateLiteral(TemplateLiteral* expr) {
6448  const ZonePtrList<const AstRawString>& parts = *expr->string_parts();
6449  const ZonePtrList<Expression>& substitutions = *expr->substitutions();
6450  // Template strings with no substitutions are turned into StringLiterals.
6451  DCHECK_GT(substitutions.length(), 0);
6452  DCHECK_EQ(parts.length(), substitutions.length() + 1);
6453
6454  // Generate string concatenation
6455  // TODO(caitp): Don't generate feedback slot if it's not used --- introduce
6456  // a simple, concise, reusable mechanism to lazily create reusable slots.
6457  FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
6458  Register last_part = register_allocator()->NewRegister();
6459  bool last_part_valid = false;
6460
6461  builder()->SetExpressionPosition(expr);
6462  for (int i = 0; i < substitutions.length(); ++i) {
6463    if (i != 0) {
6464      builder()->StoreAccumulatorInRegister(last_part);
6465      last_part_valid = true;
6466    }
6467
6468    if (!parts[i]->IsEmpty()) {
6469      builder()->LoadLiteral(parts[i]);
6470      if (last_part_valid) {
6471        builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
6472      }
6473      builder()->StoreAccumulatorInRegister(last_part);
6474      last_part_valid = true;
6475    }
6476
6477    TypeHint type_hint = VisitForAccumulatorValue(substitutions[i]);
6478    if (type_hint != TypeHint::kString) {
6479      builder()->ToString();
6480    }
6481    if (last_part_valid) {
6482      builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
6483    }
6484    last_part_valid = false;
6485  }
6486
6487  if (!parts.last()->IsEmpty()) {
6488    builder()->StoreAccumulatorInRegister(last_part);
6489    builder()->LoadLiteral(parts.last());
6490    builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
6491  }
6492}
6493
6494void BytecodeGenerator::BuildThisVariableLoad() {
6495  DeclarationScope* receiver_scope = closure_scope()->GetReceiverScope();
6496  Variable* var = receiver_scope->receiver();
6497  // TODO(littledan): implement 'this' hole check elimination.
6498  HoleCheckMode hole_check_mode =
6499      IsDerivedConstructor(receiver_scope->function_kind())
6500          ? HoleCheckMode::kRequired
6501          : HoleCheckMode::kElided;
6502  BuildVariableLoad(var, hole_check_mode);
6503}
6504
6505void BytecodeGenerator::VisitThisExpression(ThisExpression* expr) {
6506  BuildThisVariableLoad();
6507}
6508
6509void BytecodeGenerator::VisitSuperCallReference(SuperCallReference* expr) {
6510  // Handled by VisitCall().
6511  UNREACHABLE();
6512}
6513
6514void BytecodeGenerator::VisitSuperPropertyReference(
6515    SuperPropertyReference* expr) {
6516  builder()->CallRuntime(Runtime::kThrowUnsupportedSuperError);
6517}
6518
6519void BytecodeGenerator::VisitCommaExpression(BinaryOperation* binop) {
6520  VisitForEffect(binop->left());
6521  Visit(binop->right());
6522}
6523
6524void BytecodeGenerator::VisitNaryCommaExpression(NaryOperation* expr) {
6525  DCHECK_GT(expr->subsequent_length(), 0);
6526
6527  VisitForEffect(expr->first());
6528  for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
6529    VisitForEffect(expr->subsequent(i));
6530  }
6531  Visit(expr->subsequent(expr->subsequent_length() - 1));
6532}
6533
6534void BytecodeGenerator::VisitLogicalTestSubExpression(
6535    Token::Value token, Expression* expr, BytecodeLabels* then_labels,
6536    BytecodeLabels* else_labels, int coverage_slot) {
6537  DCHECK(token == Token::OR || token == Token::AND || token == Token::NULLISH);
6538
6539  BytecodeLabels test_next(zone());
6540  if (token == Token::OR) {
6541    VisitForTest(expr, then_labels, &test_next, TestFallthrough::kElse);
6542  } else if (token == Token::AND) {
6543    VisitForTest(expr, &test_next, else_labels, TestFallthrough::kThen);
6544  } else {
6545    DCHECK_EQ(Token::NULLISH, token);
6546    VisitForNullishTest(expr, then_labels, &test_next, else_labels);
6547  }
6548  test_next.Bind(builder());
6549
6550  BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
6551}
6552
6553void BytecodeGenerator::VisitLogicalTest(Token::Value token, Expression* left,
6554                                         Expression* right,
6555                                         int right_coverage_slot) {
6556  DCHECK(token == Token::OR || token == Token::AND || token == Token::NULLISH);
6557  TestResultScope* test_result = execution_result()->AsTest();
6558  BytecodeLabels* then_labels = test_result->then_labels();
6559  BytecodeLabels* else_labels = test_result->else_labels();
6560  TestFallthrough fallthrough = test_result->fallthrough();
6561
6562  VisitLogicalTestSubExpression(token, left, then_labels, else_labels,
6563                                right_coverage_slot);
6564  // The last test has the same then, else and fallthrough as the parent test.
6565  VisitForTest(right, then_labels, else_labels, fallthrough);
6566}
6567
6568void BytecodeGenerator::VisitNaryLogicalTest(
6569    Token::Value token, NaryOperation* expr,
6570    const NaryCodeCoverageSlots* coverage_slots) {
6571  DCHECK(token == Token::OR || token == Token::AND || token == Token::NULLISH);
6572  DCHECK_GT(expr->subsequent_length(), 0);
6573
6574  TestResultScope* test_result = execution_result()->AsTest();
6575  BytecodeLabels* then_labels = test_result->then_labels();
6576  BytecodeLabels* else_labels = test_result->else_labels();
6577  TestFallthrough fallthrough = test_result->fallthrough();
6578
6579  VisitLogicalTestSubExpression(token, expr->first(), then_labels, else_labels,
6580                                coverage_slots->GetSlotFor(0));
6581  for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
6582    VisitLogicalTestSubExpression(token, expr->subsequent(i), then_labels,
6583                                  else_labels,
6584                                  coverage_slots->GetSlotFor(i + 1));
6585  }
6586  // The last test has the same then, else and fallthrough as the parent test.
6587  VisitForTest(expr->subsequent(expr->subsequent_length() - 1), then_labels,
6588               else_labels, fallthrough);
6589}
6590
6591bool BytecodeGenerator::VisitLogicalOrSubExpression(Expression* expr,
6592                                                    BytecodeLabels* end_labels,
6593                                                    int coverage_slot) {
6594  if (expr->ToBooleanIsTrue()) {
6595    VisitForAccumulatorValue(expr);
6596    end_labels->Bind(builder());
6597    return true;
6598  } else if (!expr->ToBooleanIsFalse()) {
6599    TypeHint type_hint = VisitForAccumulatorValue(expr);
6600    builder()->JumpIfTrue(ToBooleanModeFromTypeHint(type_hint),
6601                          end_labels->New());
6602  }
6603
6604  BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
6605
6606  return false;
6607}
6608
6609bool BytecodeGenerator::VisitLogicalAndSubExpression(Expression* expr,
6610                                                     BytecodeLabels* end_labels,
6611                                                     int coverage_slot) {
6612  if (expr->ToBooleanIsFalse()) {
6613    VisitForAccumulatorValue(expr);
6614    end_labels->Bind(builder());
6615    return true;
6616  } else if (!expr->ToBooleanIsTrue()) {
6617    TypeHint type_hint = VisitForAccumulatorValue(expr);
6618    builder()->JumpIfFalse(ToBooleanModeFromTypeHint(type_hint),
6619                           end_labels->New());
6620  }
6621
6622  BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
6623
6624  return false;
6625}
6626
6627bool BytecodeGenerator::VisitNullishSubExpression(Expression* expr,
6628                                                  BytecodeLabels* end_labels,
6629                                                  int coverage_slot) {
6630  if (expr->IsLiteralButNotNullOrUndefined()) {
6631    VisitForAccumulatorValue(expr);
6632    end_labels->Bind(builder());
6633    return true;
6634  } else if (!expr->IsNullOrUndefinedLiteral()) {
6635    VisitForAccumulatorValue(expr);
6636    BytecodeLabel is_null_or_undefined;
6637    builder()
6638        ->JumpIfUndefinedOrNull(&is_null_or_undefined)
6639        .Jump(end_labels->New());
6640    builder()->Bind(&is_null_or_undefined);
6641  }
6642
6643  BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
6644
6645  return false;
6646}
6647
6648void BytecodeGenerator::VisitLogicalOrExpression(BinaryOperation* binop) {
6649  Expression* left = binop->left();
6650  Expression* right = binop->right();
6651
6652  int right_coverage_slot =
6653      AllocateBlockCoverageSlotIfEnabled(binop, SourceRangeKind::kRight);
6654
6655  if (execution_result()->IsTest()) {
6656    TestResultScope* test_result = execution_result()->AsTest();
6657    if (left->ToBooleanIsTrue()) {
6658      builder()->Jump(test_result->NewThenLabel());
6659    } else if (left->ToBooleanIsFalse() && right->ToBooleanIsFalse()) {
6660      BuildIncrementBlockCoverageCounterIfEnabled(right_coverage_slot);
6661      builder()->Jump(test_result->NewElseLabel());
6662    } else {
6663      VisitLogicalTest(Token::OR, left, right, right_coverage_slot);
6664    }
6665    test_result->SetResultConsumedByTest();
6666  } else {
6667    BytecodeLabels end_labels(zone());
6668    if (VisitLogicalOrSubExpression(left, &end_labels, right_coverage_slot)) {
6669      return;
6670    }
6671    VisitForAccumulatorValue(right);
6672    end_labels.Bind(builder());
6673  }
6674}
6675
6676void BytecodeGenerator::VisitNaryLogicalOrExpression(NaryOperation* expr) {
6677  Expression* first = expr->first();
6678  DCHECK_GT(expr->subsequent_length(), 0);
6679
6680  NaryCodeCoverageSlots coverage_slots(this, expr);
6681
6682  if (execution_result()->IsTest()) {
6683    TestResultScope* test_result = execution_result()->AsTest();
6684    if (first->ToBooleanIsTrue()) {
6685      builder()->Jump(test_result->NewThenLabel());
6686    } else {
6687      VisitNaryLogicalTest(Token::OR, expr, &coverage_slots);
6688    }
6689    test_result->SetResultConsumedByTest();
6690  } else {
6691    BytecodeLabels end_labels(zone());
6692    if (VisitLogicalOrSubExpression(first, &end_labels,
6693                                    coverage_slots.GetSlotFor(0))) {
6694      return;
6695    }
6696    for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
6697      if (VisitLogicalOrSubExpression(expr->subsequent(i), &end_labels,
6698                                      coverage_slots.GetSlotFor(i + 1))) {
6699        return;
6700      }
6701    }
6702    // We have to visit the last value even if it's true, because we need its
6703    // actual value.
6704    VisitForAccumulatorValue(expr->subsequent(expr->subsequent_length() - 1));
6705    end_labels.Bind(builder());
6706  }
6707}
6708
6709void BytecodeGenerator::VisitLogicalAndExpression(BinaryOperation* binop) {
6710  Expression* left = binop->left();
6711  Expression* right = binop->right();
6712
6713  int right_coverage_slot =
6714      AllocateBlockCoverageSlotIfEnabled(binop, SourceRangeKind::kRight);
6715
6716  if (execution_result()->IsTest()) {
6717    TestResultScope* test_result = execution_result()->AsTest();
6718    if (left->ToBooleanIsFalse()) {
6719      builder()->Jump(test_result->NewElseLabel());
6720    } else if (left->ToBooleanIsTrue() && right->ToBooleanIsTrue()) {
6721      BuildIncrementBlockCoverageCounterIfEnabled(right_coverage_slot);
6722      builder()->Jump(test_result->NewThenLabel());
6723    } else {
6724      VisitLogicalTest(Token::AND, left, right, right_coverage_slot);
6725    }
6726    test_result->SetResultConsumedByTest();
6727  } else {
6728    BytecodeLabels end_labels(zone());
6729    if (VisitLogicalAndSubExpression(left, &end_labels, right_coverage_slot)) {
6730      return;
6731    }
6732    VisitForAccumulatorValue(right);
6733    end_labels.Bind(builder());
6734  }
6735}
6736
6737void BytecodeGenerator::VisitNaryLogicalAndExpression(NaryOperation* expr) {
6738  Expression* first = expr->first();
6739  DCHECK_GT(expr->subsequent_length(), 0);
6740
6741  NaryCodeCoverageSlots coverage_slots(this, expr);
6742
6743  if (execution_result()->IsTest()) {
6744    TestResultScope* test_result = execution_result()->AsTest();
6745    if (first->ToBooleanIsFalse()) {
6746      builder()->Jump(test_result->NewElseLabel());
6747    } else {
6748      VisitNaryLogicalTest(Token::AND, expr, &coverage_slots);
6749    }
6750    test_result->SetResultConsumedByTest();
6751  } else {
6752    BytecodeLabels end_labels(zone());
6753    if (VisitLogicalAndSubExpression(first, &end_labels,
6754                                     coverage_slots.GetSlotFor(0))) {
6755      return;
6756    }
6757    for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
6758      if (VisitLogicalAndSubExpression(expr->subsequent(i), &end_labels,
6759                                       coverage_slots.GetSlotFor(i + 1))) {
6760        return;
6761      }
6762    }
6763    // We have to visit the last value even if it's false, because we need its
6764    // actual value.
6765    VisitForAccumulatorValue(expr->subsequent(expr->subsequent_length() - 1));
6766    end_labels.Bind(builder());
6767  }
6768}
6769
6770void BytecodeGenerator::VisitNullishExpression(BinaryOperation* binop) {
6771  Expression* left = binop->left();
6772  Expression* right = binop->right();
6773
6774  int right_coverage_slot =
6775      AllocateBlockCoverageSlotIfEnabled(binop, SourceRangeKind::kRight);
6776
6777  if (execution_result()->IsTest()) {
6778    TestResultScope* test_result = execution_result()->AsTest();
6779    if (left->IsLiteralButNotNullOrUndefined() && left->ToBooleanIsTrue()) {
6780      builder()->Jump(test_result->NewThenLabel());
6781    } else if (left->IsNullOrUndefinedLiteral() &&
6782               right->IsNullOrUndefinedLiteral()) {
6783      BuildIncrementBlockCoverageCounterIfEnabled(right_coverage_slot);
6784      builder()->Jump(test_result->NewElseLabel());
6785    } else {
6786      VisitLogicalTest(Token::NULLISH, left, right, right_coverage_slot);
6787    }
6788    test_result->SetResultConsumedByTest();
6789  } else {
6790    BytecodeLabels end_labels(zone());
6791    if (VisitNullishSubExpression(left, &end_labels, right_coverage_slot)) {
6792      return;
6793    }
6794    VisitForAccumulatorValue(right);
6795    end_labels.Bind(builder());
6796  }
6797}
6798
6799void BytecodeGenerator::VisitNaryNullishExpression(NaryOperation* expr) {
6800  Expression* first = expr->first();
6801  DCHECK_GT(expr->subsequent_length(), 0);
6802
6803  NaryCodeCoverageSlots coverage_slots(this, expr);
6804
6805  if (execution_result()->IsTest()) {
6806    TestResultScope* test_result = execution_result()->AsTest();
6807    if (first->IsLiteralButNotNullOrUndefined() && first->ToBooleanIsTrue()) {
6808      builder()->Jump(test_result->NewThenLabel());
6809    } else {
6810      VisitNaryLogicalTest(Token::NULLISH, expr, &coverage_slots);
6811    }
6812    test_result->SetResultConsumedByTest();
6813  } else {
6814    BytecodeLabels end_labels(zone());
6815    if (VisitNullishSubExpression(first, &end_labels,
6816                                  coverage_slots.GetSlotFor(0))) {
6817      return;
6818    }
6819    for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
6820      if (VisitNullishSubExpression(expr->subsequent(i), &end_labels,
6821                                    coverage_slots.GetSlotFor(i + 1))) {
6822        return;
6823      }
6824    }
6825    // We have to visit the last value even if it's nullish, because we need its
6826    // actual value.
6827    VisitForAccumulatorValue(expr->subsequent(expr->subsequent_length() - 1));
6828    end_labels.Bind(builder());
6829  }
6830}
6831
6832void BytecodeGenerator::BuildNewLocalActivationContext() {
6833  ValueResultScope value_execution_result(this);
6834  Scope* scope = closure_scope();
6835  DCHECK_EQ(current_scope(), closure_scope());
6836
6837  // Create the appropriate context.
6838  DCHECK(scope->is_function_scope() || scope->is_eval_scope());
6839  int slot_count = scope->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
6840  if (slot_count <= ConstructorBuiltins::MaximumFunctionContextSlots()) {
6841    switch (scope->scope_type()) {
6842      case EVAL_SCOPE:
6843        builder()->CreateEvalContext(scope, slot_count);
6844        break;
6845      case FUNCTION_SCOPE:
6846        builder()->CreateFunctionContext(scope, slot_count);
6847        break;
6848      default:
6849        UNREACHABLE();
6850    }
6851  } else {
6852    Register arg = register_allocator()->NewRegister();
6853    builder()->LoadLiteral(scope).StoreAccumulatorInRegister(arg).CallRuntime(
6854        Runtime::kNewFunctionContext, arg);
6855  }
6856}
6857
6858void BytecodeGenerator::BuildLocalActivationContextInitialization() {
6859  DeclarationScope* scope = closure_scope();
6860
6861  if (scope->has_this_declaration() && scope->receiver()->IsContextSlot()) {
6862    Variable* variable = scope->receiver();
6863    Register receiver(builder()->Receiver());
6864    // Context variable (at bottom of the context chain).
6865    DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
6866    builder()->LoadAccumulatorWithRegister(receiver).StoreContextSlot(
6867        execution_context()->reg(), variable->index(), 0);
6868  }
6869
6870  // Copy parameters into context if necessary.
6871  int num_parameters = scope->num_parameters();
6872  for (int i = 0; i < num_parameters; i++) {
6873    Variable* variable = scope->parameter(i);
6874    if (!variable->IsContextSlot()) continue;
6875
6876    Register parameter(builder()->Parameter(i));
6877    // Context variable (at bottom of the context chain).
6878    DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
6879    builder()->LoadAccumulatorWithRegister(parameter).StoreContextSlot(
6880        execution_context()->reg(), variable->index(), 0);
6881  }
6882}
6883
6884void BytecodeGenerator::BuildNewLocalBlockContext(Scope* scope) {
6885  ValueResultScope value_execution_result(this);
6886  DCHECK(scope->is_block_scope());
6887
6888  builder()->CreateBlockContext(scope);
6889}
6890
6891void BytecodeGenerator::BuildNewLocalWithContext(Scope* scope) {
6892  ValueResultScope value_execution_result(this);
6893
6894  Register extension_object = register_allocator()->NewRegister();
6895
6896  builder()->ToObject(extension_object);
6897  builder()->CreateWithContext(extension_object, scope);
6898}
6899
6900void BytecodeGenerator::BuildNewLocalCatchContext(Scope* scope) {
6901  ValueResultScope value_execution_result(this);
6902  DCHECK(scope->catch_variable()->IsContextSlot());
6903
6904  Register exception = register_allocator()->NewRegister();
6905  builder()->StoreAccumulatorInRegister(exception);
6906  builder()->CreateCatchContext(exception, scope);
6907}
6908
6909void BytecodeGenerator::VisitLiteralAccessor(LiteralProperty* property,
6910                                             Register value_out) {
6911  if (property == nullptr) {
6912    builder()->LoadNull().StoreAccumulatorInRegister(value_out);
6913  } else {
6914    VisitForRegisterValue(property->value(), value_out);
6915  }
6916}
6917
6918void BytecodeGenerator::VisitArgumentsObject(Variable* variable) {
6919  if (variable == nullptr) return;
6920
6921  DCHECK(variable->IsContextSlot() || variable->IsStackAllocated());
6922
6923  // Allocate and initialize a new arguments object and assign to the
6924  // {arguments} variable.
6925  builder()->CreateArguments(closure_scope()->GetArgumentsType());
6926  BuildVariableAssignment(variable, Token::ASSIGN, HoleCheckMode::kElided);
6927}
6928
6929void BytecodeGenerator::VisitRestArgumentsArray(Variable* rest) {
6930  if (rest == nullptr) return;
6931
6932  // Allocate and initialize a new rest parameter and assign to the {rest}
6933  // variable.
6934  builder()->CreateArguments(CreateArgumentsType::kRestParameter);
6935  DCHECK(rest->IsContextSlot() || rest->IsStackAllocated());
6936  BuildVariableAssignment(rest, Token::ASSIGN, HoleCheckMode::kElided);
6937}
6938
6939void BytecodeGenerator::VisitThisFunctionVariable(Variable* variable) {
6940  if (variable == nullptr) return;
6941
6942  // Store the closure we were called with in the given variable.
6943  builder()->LoadAccumulatorWithRegister(Register::function_closure());
6944  BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
6945}
6946
6947void BytecodeGenerator::VisitNewTargetVariable(Variable* variable) {
6948  if (variable == nullptr) return;
6949
6950  // The generator resume trampoline abuses the new.target register
6951  // to pass in the generator object.  In ordinary calls, new.target is always
6952  // undefined because generator functions are non-constructible, so don't
6953  // assign anything to the new.target variable.
6954  if (IsResumableFunction(info()->literal()->kind())) return;
6955
6956  if (variable->location() == VariableLocation::LOCAL) {
6957    // The new.target register was already assigned by entry trampoline.
6958    DCHECK_EQ(incoming_new_target_or_generator_.index(),
6959              GetRegisterForLocalVariable(variable).index());
6960    return;
6961  }
6962
6963  // Store the new target we were called with in the given variable.
6964  builder()->LoadAccumulatorWithRegister(incoming_new_target_or_generator_);
6965  BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
6966}
6967
6968void BytecodeGenerator::BuildGeneratorObjectVariableInitialization() {
6969  DCHECK(IsResumableFunction(info()->literal()->kind()));
6970
6971  Variable* generator_object_var = closure_scope()->generator_object_var();
6972  RegisterAllocationScope register_scope(this);
6973  RegisterList args = register_allocator()->NewRegisterList(2);
6974  Runtime::FunctionId function_id =
6975      ((IsAsyncFunction(info()->literal()->kind()) &&
6976        !IsAsyncGeneratorFunction(info()->literal()->kind())) ||
6977       IsAsyncModule(info()->literal()->kind()))
6978          ? Runtime::kInlineAsyncFunctionEnter
6979          : Runtime::kInlineCreateJSGeneratorObject;
6980  builder()
6981      ->MoveRegister(Register::function_closure(), args[0])
6982      .MoveRegister(builder()->Receiver(), args[1])
6983      .CallRuntime(function_id, args)
6984      .StoreAccumulatorInRegister(generator_object());
6985
6986  if (generator_object_var->location() == VariableLocation::LOCAL) {
6987    // The generator object register is already set to the variable's local
6988    // register.
6989    DCHECK_EQ(generator_object().index(),
6990              GetRegisterForLocalVariable(generator_object_var).index());
6991  } else {
6992    BuildVariableAssignment(generator_object_var, Token::INIT,
6993                            HoleCheckMode::kElided);
6994  }
6995}
6996
6997void BytecodeGenerator::BuildPushUndefinedIntoRegisterList(
6998    RegisterList* reg_list) {
6999  Register reg = register_allocator()->GrowRegisterList(reg_list);
7000  builder()->LoadUndefined().StoreAccumulatorInRegister(reg);
7001}
7002
7003void BytecodeGenerator::BuildLoadPropertyKey(LiteralProperty* property,
7004                                             Register out_reg) {
7005  if (property->key()->IsStringLiteral()) {
7006    builder()
7007        ->LoadLiteral(property->key()->AsLiteral()->AsRawString())
7008        .StoreAccumulatorInRegister(out_reg);
7009  } else {
7010    VisitForAccumulatorValue(property->key());
7011    builder()->ToName(out_reg);
7012  }
7013}
7014
7015int BytecodeGenerator::AllocateBlockCoverageSlotIfEnabled(
7016    AstNode* node, SourceRangeKind kind) {
7017  return (block_coverage_builder_ == nullptr)
7018             ? BlockCoverageBuilder::kNoCoverageArraySlot
7019             : block_coverage_builder_->AllocateBlockCoverageSlot(node, kind);
7020}
7021
7022int BytecodeGenerator::AllocateNaryBlockCoverageSlotIfEnabled(
7023    NaryOperation* node, size_t index) {
7024  return (block_coverage_builder_ == nullptr)
7025             ? BlockCoverageBuilder::kNoCoverageArraySlot
7026             : block_coverage_builder_->AllocateNaryBlockCoverageSlot(node,
7027                                                                      index);
7028}
7029
7030void BytecodeGenerator::BuildIncrementBlockCoverageCounterIfEnabled(
7031    AstNode* node, SourceRangeKind kind) {
7032  if (block_coverage_builder_ == nullptr) return;
7033  block_coverage_builder_->IncrementBlockCounter(node, kind);
7034}
7035
7036void BytecodeGenerator::BuildIncrementBlockCoverageCounterIfEnabled(
7037    int coverage_array_slot) {
7038  if (block_coverage_builder_ != nullptr) {
7039    block_coverage_builder_->IncrementBlockCounter(coverage_array_slot);
7040  }
7041}
7042
7043// Visits the expression |expr| and places the result in the accumulator.
7044BytecodeGenerator::TypeHint BytecodeGenerator::VisitForAccumulatorValue(
7045    Expression* expr) {
7046  ValueResultScope accumulator_scope(this);
7047  Visit(expr);
7048  return accumulator_scope.type_hint();
7049}
7050
7051void BytecodeGenerator::VisitForAccumulatorValueOrTheHole(Expression* expr) {
7052  if (expr == nullptr) {
7053    builder()->LoadTheHole();
7054  } else {
7055    VisitForAccumulatorValue(expr);
7056  }
7057}
7058
7059// Visits the expression |expr| and discards the result.
7060void BytecodeGenerator::VisitForEffect(Expression* expr) {
7061  EffectResultScope effect_scope(this);
7062  Visit(expr);
7063}
7064
7065// Visits the expression |expr| and returns the register containing
7066// the expression result.
7067Register BytecodeGenerator::VisitForRegisterValue(Expression* expr) {
7068  VisitForAccumulatorValue(expr);
7069  Register result = register_allocator()->NewRegister();
7070  builder()->StoreAccumulatorInRegister(result);
7071  return result;
7072}
7073
7074// Visits the expression |expr| and stores the expression result in
7075// |destination|.
7076void BytecodeGenerator::VisitForRegisterValue(Expression* expr,
7077                                              Register destination) {
7078  ValueResultScope register_scope(this);
7079  Visit(expr);
7080  builder()->StoreAccumulatorInRegister(destination);
7081}
7082
7083// Visits the expression |expr| and pushes the result into a new register
7084// added to the end of |reg_list|.
7085void BytecodeGenerator::VisitAndPushIntoRegisterList(Expression* expr,
7086                                                     RegisterList* reg_list) {
7087  {
7088    ValueResultScope register_scope(this);
7089    Visit(expr);
7090  }
7091  // Grow the register list after visiting the expression to avoid reserving
7092  // the register across the expression evaluation, which could cause memory
7093  // leaks for deep expressions due to dead objects being kept alive by pointers
7094  // in registers.
7095  Register destination = register_allocator()->GrowRegisterList(reg_list);
7096  builder()->StoreAccumulatorInRegister(destination);
7097}
7098
7099void BytecodeGenerator::BuildTest(ToBooleanMode mode,
7100                                  BytecodeLabels* then_labels,
7101                                  BytecodeLabels* else_labels,
7102                                  TestFallthrough fallthrough) {
7103  switch (fallthrough) {
7104    case TestFallthrough::kThen:
7105      builder()->JumpIfFalse(mode, else_labels->New());
7106      break;
7107    case TestFallthrough::kElse:
7108      builder()->JumpIfTrue(mode, then_labels->New());
7109      break;
7110    case TestFallthrough::kNone:
7111      builder()->JumpIfTrue(mode, then_labels->New());
7112      builder()->Jump(else_labels->New());
7113      break;
7114  }
7115}
7116
7117// Visits the expression |expr| for testing its boolean value and jumping to the
7118// |then| or |other| label depending on value and short-circuit semantics
7119void BytecodeGenerator::VisitForTest(Expression* expr,
7120                                     BytecodeLabels* then_labels,
7121                                     BytecodeLabels* else_labels,
7122                                     TestFallthrough fallthrough) {
7123  bool result_consumed;
7124  TypeHint type_hint;
7125  {
7126    // To make sure that all temporary registers are returned before generating
7127    // jumps below, we ensure that the result scope is deleted before doing so.
7128    // Dead registers might be materialized otherwise.
7129    TestResultScope test_result(this, then_labels, else_labels, fallthrough);
7130    Visit(expr);
7131    result_consumed = test_result.result_consumed_by_test();
7132    type_hint = test_result.type_hint();
7133    // Labels and fallthrough might have been mutated, so update based on
7134    // TestResultScope.
7135    then_labels = test_result.then_labels();
7136    else_labels = test_result.else_labels();
7137    fallthrough = test_result.fallthrough();
7138  }
7139  if (!result_consumed) {
7140    BuildTest(ToBooleanModeFromTypeHint(type_hint), then_labels, else_labels,
7141              fallthrough);
7142  }
7143}
7144
7145// Visits the expression |expr| for testing its nullish value and jumping to the
7146// |then| or |other| label depending on value and short-circuit semantics
7147void BytecodeGenerator::VisitForNullishTest(Expression* expr,
7148                                            BytecodeLabels* then_labels,
7149                                            BytecodeLabels* test_next_labels,
7150                                            BytecodeLabels* else_labels) {
7151  // Nullish short circuits on undefined or null, otherwise we fall back to
7152  // BuildTest with no fallthrough.
7153  // TODO(joshualitt): We should do this in a TestResultScope.
7154  TypeHint type_hint = VisitForAccumulatorValue(expr);
7155  ToBooleanMode mode = ToBooleanModeFromTypeHint(type_hint);
7156
7157  // Skip the nullish shortcircuit if we already have a boolean.
7158  if (mode != ToBooleanMode::kAlreadyBoolean) {
7159    builder()->JumpIfUndefinedOrNull(test_next_labels->New());
7160  }
7161  BuildTest(mode, then_labels, else_labels, TestFallthrough::kNone);
7162}
7163
7164void BytecodeGenerator::VisitInSameTestExecutionScope(Expression* expr) {
7165  DCHECK(execution_result()->IsTest());
7166  {
7167    RegisterAllocationScope reg_scope(this);
7168    Visit(expr);
7169  }
7170  if (!execution_result()->AsTest()->result_consumed_by_test()) {
7171    TestResultScope* result_scope = execution_result()->AsTest();
7172    BuildTest(ToBooleanModeFromTypeHint(result_scope->type_hint()),
7173              result_scope->then_labels(), result_scope->else_labels(),
7174              result_scope->fallthrough());
7175    result_scope->SetResultConsumedByTest();
7176  }
7177}
7178
7179void BytecodeGenerator::VisitInScope(Statement* stmt, Scope* scope) {
7180  DCHECK(scope->declarations()->is_empty());
7181  CurrentScope current_scope(this, scope);
7182  ContextScope context_scope(this, scope);
7183  Visit(stmt);
7184}
7185
7186Register BytecodeGenerator::GetRegisterForLocalVariable(Variable* variable) {
7187  DCHECK_EQ(VariableLocation::LOCAL, variable->location());
7188  return builder()->Local(variable->index());
7189}
7190
7191FunctionKind BytecodeGenerator::function_kind() const {
7192  return info()->literal()->kind();
7193}
7194
7195LanguageMode BytecodeGenerator::language_mode() const {
7196  return current_scope()->language_mode();
7197}
7198
7199Register BytecodeGenerator::generator_object() const {
7200  DCHECK(IsResumableFunction(info()->literal()->kind()));
7201  return incoming_new_target_or_generator_;
7202}
7203
7204FeedbackVectorSpec* BytecodeGenerator::feedback_spec() {
7205  return info()->feedback_vector_spec();
7206}
7207
7208int BytecodeGenerator::feedback_index(FeedbackSlot slot) const {
7209  DCHECK(!slot.IsInvalid());
7210  return FeedbackVector::GetIndex(slot);
7211}
7212
7213FeedbackSlot BytecodeGenerator::GetCachedLoadGlobalICSlot(
7214    TypeofMode typeof_mode, Variable* variable) {
7215  FeedbackSlotCache::SlotKind slot_kind =
7216      typeof_mode == TypeofMode::kInside
7217          ? FeedbackSlotCache::SlotKind::kLoadGlobalInsideTypeof
7218          : FeedbackSlotCache::SlotKind::kLoadGlobalNotInsideTypeof;
7219  FeedbackSlot slot(feedback_slot_cache()->Get(slot_kind, variable));
7220  if (!slot.IsInvalid()) {
7221    return slot;
7222  }
7223  slot = feedback_spec()->AddLoadGlobalICSlot(typeof_mode);
7224  feedback_slot_cache()->Put(slot_kind, variable, feedback_index(slot));
7225  return slot;
7226}
7227
7228FeedbackSlot BytecodeGenerator::GetCachedStoreGlobalICSlot(
7229    LanguageMode language_mode, Variable* variable) {
7230  FeedbackSlotCache::SlotKind slot_kind =
7231      is_strict(language_mode)
7232          ? FeedbackSlotCache::SlotKind::kStoreGlobalStrict
7233          : FeedbackSlotCache::SlotKind::kStoreGlobalSloppy;
7234  FeedbackSlot slot(feedback_slot_cache()->Get(slot_kind, variable));
7235  if (!slot.IsInvalid()) {
7236    return slot;
7237  }
7238  slot = feedback_spec()->AddStoreGlobalICSlot(language_mode);
7239  feedback_slot_cache()->Put(slot_kind, variable, feedback_index(slot));
7240  return slot;
7241}
7242
7243FeedbackSlot BytecodeGenerator::GetCachedLoadICSlot(const Expression* expr,
7244                                                    const AstRawString* name) {
7245  DCHECK(!expr->IsSuperPropertyReference());
7246  if (!FLAG_ignition_share_named_property_feedback) {
7247    return feedback_spec()->AddLoadICSlot();
7248  }
7249  FeedbackSlotCache::SlotKind slot_kind =
7250      FeedbackSlotCache::SlotKind::kLoadProperty;
7251  if (!expr->IsVariableProxy()) {
7252    return feedback_spec()->AddLoadICSlot();
7253  }
7254  const VariableProxy* proxy = expr->AsVariableProxy();
7255  FeedbackSlot slot(
7256      feedback_slot_cache()->Get(slot_kind, proxy->var()->index(), name));
7257  if (!slot.IsInvalid()) {
7258    return slot;
7259  }
7260  slot = feedback_spec()->AddLoadICSlot();
7261  feedback_slot_cache()->Put(slot_kind, proxy->var()->index(), name,
7262                             feedback_index(slot));
7263  return slot;
7264}
7265
7266FeedbackSlot BytecodeGenerator::GetCachedLoadSuperICSlot(
7267    const AstRawString* name) {
7268  if (!FLAG_ignition_share_named_property_feedback) {
7269    return feedback_spec()->AddLoadICSlot();
7270  }
7271  FeedbackSlotCache::SlotKind slot_kind =
7272      FeedbackSlotCache::SlotKind::kLoadSuperProperty;
7273
7274  FeedbackSlot slot(feedback_slot_cache()->Get(slot_kind, name));
7275  if (!slot.IsInvalid()) {
7276    return slot;
7277  }
7278  slot = feedback_spec()->AddLoadICSlot();
7279  feedback_slot_cache()->Put(slot_kind, name, feedback_index(slot));
7280  return slot;
7281}
7282
7283FeedbackSlot BytecodeGenerator::GetCachedStoreICSlot(const Expression* expr,
7284                                                     const AstRawString* name) {
7285  if (!FLAG_ignition_share_named_property_feedback) {
7286    return feedback_spec()->AddStoreICSlot(language_mode());
7287  }
7288  FeedbackSlotCache::SlotKind slot_kind =
7289      is_strict(language_mode()) ? FeedbackSlotCache::SlotKind::kSetNamedStrict
7290                                 : FeedbackSlotCache::SlotKind::kSetNamedSloppy;
7291  if (!expr->IsVariableProxy()) {
7292    return feedback_spec()->AddStoreICSlot(language_mode());
7293  }
7294  const VariableProxy* proxy = expr->AsVariableProxy();
7295  FeedbackSlot slot(
7296      feedback_slot_cache()->Get(slot_kind, proxy->var()->index(), name));
7297  if (!slot.IsInvalid()) {
7298    return slot;
7299  }
7300  slot = feedback_spec()->AddStoreICSlot(language_mode());
7301  feedback_slot_cache()->Put(slot_kind, proxy->var()->index(), name,
7302                             feedback_index(slot));
7303  return slot;
7304}
7305
7306int BytecodeGenerator::GetCachedCreateClosureSlot(FunctionLiteral* literal) {
7307  FeedbackSlotCache::SlotKind slot_kind =
7308      FeedbackSlotCache::SlotKind::kClosureFeedbackCell;
7309  int index = feedback_slot_cache()->Get(slot_kind, literal);
7310  if (index != -1) {
7311    return index;
7312  }
7313  index = feedback_spec()->AddCreateClosureSlot();
7314  feedback_slot_cache()->Put(slot_kind, literal, index);
7315  return index;
7316}
7317
7318FeedbackSlot BytecodeGenerator::GetDummyCompareICSlot() {
7319  return dummy_feedback_slot_.Get();
7320}
7321
7322}  // namespace interpreter
7323}  // namespace internal
7324}  // namespace v8
7325