xref: /third_party/node/deps/v8/src/objects/code.h (revision 1cb0ef41)
1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_OBJECTS_CODE_H_
6#define V8_OBJECTS_CODE_H_
7
8#include "src/base/bit-field.h"
9#include "src/builtins/builtins.h"
10#include "src/codegen/handler-table.h"
11#include "src/deoptimizer/translation-array.h"
12#include "src/objects/code-kind.h"
13#include "src/objects/contexts.h"
14#include "src/objects/fixed-array.h"
15#include "src/objects/heap-object.h"
16#include "src/objects/objects.h"
17#include "src/objects/shared-function-info.h"
18#include "src/objects/struct.h"
19
20// Has to be the last include (doesn't have include guards):
21#include "src/objects/object-macros.h"
22
23namespace v8 {
24namespace internal {
25
26class ByteArray;
27class BytecodeArray;
28class CodeDataContainer;
29class CodeDesc;
30
31class LocalFactory;
32template <typename Impl>
33class FactoryBase;
34
35namespace interpreter {
36class Register;
37}  // namespace interpreter
38
39#include "torque-generated/src/objects/code-tq.inc"
40
41// CodeDataContainer is a container for all mutable fields associated with its
42// referencing {Code} object. Since {Code} objects reside on write-protected
43// pages within the heap, its header fields need to be immutable. There always
44// is a 1-to-1 relation between {Code} and {CodeDataContainer}, the referencing
45// field {Code::code_data_container} itself is immutable.
46class CodeDataContainer : public HeapObject {
47 public:
48  NEVER_READ_ONLY_SPACE
49  DECL_ACCESSORS(next_code_link, Object)
50  DECL_RELAXED_INT32_ACCESSORS(kind_specific_flags)
51
52  // Clear uninitialized padding space. This ensures that the snapshot content
53  // is deterministic.
54  inline void clear_padding();
55
56  //
57  // A collection of getters and predicates that are used by respective methods
58  // on Code object. They are defined here mostly because they operate on the
59  // writable state of the respective Code object.
60  //
61
62  inline bool can_have_weak_objects() const;
63  inline void set_can_have_weak_objects(bool value);
64
65  inline bool marked_for_deoptimization() const;
66  inline void set_marked_for_deoptimization(bool flag);
67
68  // Back-reference to the Code object.
69  // Available only when V8_EXTERNAL_CODE_SPACE is defined.
70  DECL_GETTER(code, Code)
71  DECL_RELAXED_GETTER(code, Code)
72
73  // When V8_EXTERNAL_CODE_SPACE is enabled, Code objects are allocated in
74  // a separate pointer compression cage instead of the cage where all the
75  // other objects are allocated.
76  // This field contains code cage base value which is used for decompressing
77  // the reference to respective Code. Basically, |code_cage_base| and |code|
78  // fields together form a full pointer. The reason why they are split is that
79  // the code field must also support atomic access and the word alignment of
80  // the full value is not guaranteed.
81  inline PtrComprCageBase code_cage_base() const;
82  inline void set_code_cage_base(Address code_cage_base);
83  inline PtrComprCageBase code_cage_base(RelaxedLoadTag) const;
84  inline void set_code_cage_base(Address code_cage_base, RelaxedStoreTag);
85
86  // Cached value of code().InstructionStart().
87  // Available only when V8_EXTERNAL_CODE_SPACE is defined.
88  DECL_GETTER(code_entry_point, Address)
89
90  inline void SetCodeAndEntryPoint(
91      Isolate* isolate_for_sandbox, Code code,
92      WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
93  // Updates the value of the code entry point. The code must be equal to
94  // the code() value.
95  inline void UpdateCodeEntryPoint(Isolate* isolate_for_sandbox, Code code);
96
97  inline void AllocateExternalPointerEntries(Isolate* isolate);
98
99  // Initializes internal flags field which stores cached values of some
100  // properties of the respective Code object.
101  // Available only when V8_EXTERNAL_CODE_SPACE is enabled.
102  inline void initialize_flags(CodeKind kind, Builtin builtin_id);
103
104  // Alias for code_entry_point to make it API compatible with Code.
105  inline Address InstructionStart() const;
106
107  // Alias for code_entry_point to make it API compatible with Code.
108  inline Address raw_instruction_start();
109
110  // Alias for code_entry_point to make it API compatible with Code.
111  inline Address entry() const;
112
113#ifdef V8_EXTERNAL_CODE_SPACE
114  //
115  // A collection of getters and predicates that forward queries to associated
116  // Code object.
117  //
118
119  inline CodeKind kind() const;
120  inline Builtin builtin_id() const;
121  inline bool is_builtin() const;
122
123  inline bool is_optimized_code() const;
124  inline bool is_wasm_code() const;
125
126  // Testers for interpreter builtins.
127  inline bool is_interpreter_trampoline_builtin() const;
128
129  // Testers for baseline builtins.
130  inline bool is_baseline_trampoline_builtin() const;
131  inline bool is_baseline_leave_frame_builtin() const;
132
133  // Tells whether the code checks the tiering state in the function's
134  // feedback vector.
135  inline bool checks_tiering_state() const;
136
137  // Tells whether the outgoing parameters of this code are tagged pointers.
138  inline bool has_tagged_outgoing_params() const;
139
140  // [is_maglevved]: Tells whether the code object was generated by the
141  // Maglev optimizing compiler.
142  inline bool is_maglevved() const;
143
144  // [is_turbofanned]: Tells whether the code object was generated by the
145  // TurboFan optimizing compiler.
146  inline bool is_turbofanned() const;
147
148  // [is_off_heap_trampoline]: For kind BUILTIN tells whether
149  // this is a trampoline to an off-heap builtin.
150  inline bool is_off_heap_trampoline() const;
151
152  DECL_GETTER(deoptimization_data, FixedArray)
153  DECL_GETTER(bytecode_or_interpreter_data, HeapObject)
154  DECL_GETTER(source_position_table, ByteArray)
155  DECL_GETTER(bytecode_offset_table, ByteArray)
156
157#endif  // V8_EXTERNAL_CODE_SPACE
158
159  DECL_CAST(CodeDataContainer)
160
161  // Dispatched behavior.
162  DECL_PRINTER(CodeDataContainer)
163  DECL_VERIFIER(CodeDataContainer)
164
165// Layout description.
166#define CODE_DATA_FIELDS(V)                                         \
167  /* Strong pointer fields. */                                      \
168  V(kPointerFieldsStrongEndOffset, 0)                               \
169  /* Weak pointer fields. */                                        \
170  V(kNextCodeLinkOffset, kTaggedSize)                               \
171  V(kPointerFieldsWeakEndOffset, 0)                                 \
172  /* Strong Code pointer fields. */                                 \
173  V(kCodeOffset, V8_EXTERNAL_CODE_SPACE_BOOL ? kTaggedSize : 0)     \
174  V(kCodePointerFieldsStrongEndOffset, 0)                           \
175  /* Raw data fields. */                                            \
176  V(kCodeCageBaseUpper32BitsOffset,                                 \
177    V8_EXTERNAL_CODE_SPACE_BOOL ? kTaggedSize : 0)                  \
178  V(kCodeEntryPointOffset,                                          \
179    V8_EXTERNAL_CODE_SPACE_BOOL ? kExternalPointerSize : 0)         \
180  V(kFlagsOffset, V8_EXTERNAL_CODE_SPACE_BOOL ? kUInt16Size : 0)    \
181  V(kBuiltinIdOffset, V8_EXTERNAL_CODE_SPACE_BOOL ? kInt16Size : 0) \
182  V(kKindSpecificFlagsOffset, kInt32Size)                           \
183  V(kUnalignedSize, OBJECT_POINTER_PADDING(kUnalignedSize))         \
184  /* Total size. */                                                 \
185  V(kSize, 0)
186
187  DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_DATA_FIELDS)
188#undef CODE_DATA_FIELDS
189
190  class BodyDescriptor;
191
192  // Flags layout.
193#define FLAGS_BIT_FIELDS(V, _) \
194  V(KindField, CodeKind, 4, _) \
195  /* The other 12 bits are still free. */
196
197  DEFINE_BIT_FIELDS(FLAGS_BIT_FIELDS)
198#undef FLAGS_BIT_FIELDS
199  STATIC_ASSERT(FLAGS_BIT_FIELDS_Ranges::kBitsCount == 4);
200  STATIC_ASSERT(!V8_EXTERNAL_CODE_SPACE_BOOL ||
201                (FLAGS_BIT_FIELDS_Ranges::kBitsCount <=
202                 FIELD_SIZE(CodeDataContainer::kFlagsOffset) * kBitsPerByte));
203
204 private:
205  DECL_ACCESSORS(raw_code, Object)
206  DECL_RELAXED_GETTER(raw_code, Object)
207  inline void set_code_entry_point(Isolate* isolate, Address value);
208
209  // When V8_EXTERNAL_CODE_SPACE is enabled the flags field contains cached
210  // values of some flags of the from the respective Code object.
211  DECL_RELAXED_UINT16_ACCESSORS(flags)
212
213  friend Factory;
214  friend FactoryBase<Factory>;
215  friend FactoryBase<LocalFactory>;
216
217  OBJECT_CONSTRUCTORS(CodeDataContainer, HeapObject);
218};
219
220// Code describes objects with on-the-fly generated machine code.
221class Code : public HeapObject {
222 public:
223  NEVER_READ_ONLY_SPACE
224  // Opaque data type for encapsulating code flags like kind, inline
225  // cache state, and arguments count.
226  using Flags = uint32_t;
227
228  // All Code objects have the following layout:
229  //
230  //  +--------------------------+
231  //  |          header          |
232  //  | padded to code alignment |
233  //  +--------------------------+  <-- raw_body_start()
234  //  |       instructions       |   == raw_instruction_start()
235  //  |           ...            |
236  //  | padded to meta alignment |      see kMetadataAlignment
237  //  +--------------------------+  <-- raw_instruction_end()
238  //  |         metadata         |   == raw_metadata_start() (MS)
239  //  |           ...            |
240  //  |                          |  <-- MS + handler_table_offset()
241  //  |                          |  <-- MS + constant_pool_offset()
242  //  |                          |  <-- MS + code_comments_offset()
243  //  |                          |  <-- MS + unwinding_info_offset()
244  //  | padded to obj alignment  |
245  //  +--------------------------+  <-- raw_metadata_end() == raw_body_end()
246  //  | padded to code alignment |
247  //  +--------------------------+
248  //
249  // In other words, the variable-size 'body' consists of 'instructions' and
250  // 'metadata'.
251  //
252  // Note the accessor functions below may be prefixed with 'raw'. In this case,
253  // raw accessors (e.g. raw_instruction_start) always refer to the on-heap
254  // Code object, while camel-case accessors (e.g. InstructionStart) may refer
255  // to an off-heap area in the case of embedded builtins.
256  //
257  // Embedded builtins are on-heap Code objects, with an out-of-line body
258  // section. The on-heap Code object contains an essentially empty body
259  // section, while accessors, as mentioned above, redirect to the off-heap
260  // area. Metadata table offsets remain relative to MetadataStart(), i.e. they
261  // point into the off-heap metadata section. The off-heap layout is described
262  // in detail in the EmbeddedData class, but at a high level one can assume a
263  // dedicated, out-of-line, instruction and metadata section for each embedded
264  // builtin *in addition* to the on-heap Code object:
265  //
266  //  +--------------------------+  <-- InstructionStart()
267  //  |   off-heap instructions  |
268  //  |           ...            |
269  //  +--------------------------+  <-- InstructionEnd()
270  //
271  //  +--------------------------+  <-- MetadataStart() (MS)
272  //  |    off-heap metadata     |
273  //  |           ...            |  <-- MS + handler_table_offset()
274  //  |                          |  <-- MS + constant_pool_offset()
275  //  |                          |  <-- MS + code_comments_offset()
276  //  |                          |  <-- MS + unwinding_info_offset()
277  //  +--------------------------+  <-- MetadataEnd()
278
279  // Constants for use in static asserts, stating whether the body is adjacent,
280  // i.e. instructions and metadata areas are adjacent.
281  static constexpr bool kOnHeapBodyIsContiguous = true;
282  static constexpr bool kOffHeapBodyIsContiguous = false;
283  static constexpr bool kBodyIsContiguous =
284      kOnHeapBodyIsContiguous && kOffHeapBodyIsContiguous;
285
286  inline Address raw_body_start() const;
287  inline Address raw_body_end() const;
288  inline int raw_body_size() const;
289
290  inline Address raw_instruction_start() const;
291  inline Address InstructionStart() const;
292
293  inline Address raw_instruction_end() const;
294  inline Address InstructionEnd() const;
295
296  // When builtins un-embedding is enabled for the Isolate
297  // (see Isolate::is_short_builtin_calls_enabled()) then both embedded and
298  // un-embedded builtins might be exeuted and thus two kinds of |pc|s might
299  // appear on the stack.
300  // Unlike the paremeterless versions of the functions above the below variants
301  // ensure that the instruction start correspond to the given |pc| value.
302  // Thus for off-heap trampoline Code objects the result might be the
303  // instruction start/end of the embedded code stream or of un-embedded one.
304  // For normal Code objects these functions just return the
305  // raw_instruction_start/end() values.
306  // TODO(11527): remove these versions once the full solution is ready.
307  inline Address InstructionStart(Isolate* isolate, Address pc) const;
308  V8_EXPORT_PRIVATE Address OffHeapInstructionStart(Isolate* isolate,
309                                                    Address pc) const;
310  inline Address InstructionEnd(Isolate* isolate, Address pc) const;
311  V8_EXPORT_PRIVATE Address OffHeapInstructionEnd(Isolate* isolate,
312                                                  Address pc) const;
313
314  // Computes offset of the |pc| from the instruction start. The |pc| must
315  // belong to this code.
316  inline int GetOffsetFromInstructionStart(Isolate* isolate, Address pc) const;
317
318  inline int raw_instruction_size() const;
319  inline void set_raw_instruction_size(int value);
320  inline int InstructionSize() const;
321
322  inline Address raw_metadata_start() const;
323  inline Address raw_metadata_end() const;
324  inline int raw_metadata_size() const;
325  inline void set_raw_metadata_size(int value);
326  inline int MetadataSize() const;
327
328  // The metadata section is aligned to this value.
329  static constexpr int kMetadataAlignment = kIntSize;
330
331  // [safepoint_table_offset]: The offset where the safepoint table starts.
332  inline int safepoint_table_offset() const { return 0; }
333  inline Address SafepointTableAddress() const;
334  inline int safepoint_table_size() const;
335  inline bool has_safepoint_table() const;
336
337  // [handler_table_offset]: The offset where the exception handler table
338  // starts.
339  inline int handler_table_offset() const;
340  inline void set_handler_table_offset(int offset);
341  inline Address HandlerTableAddress() const;
342  inline int handler_table_size() const;
343  inline bool has_handler_table() const;
344
345  // [constant_pool offset]: Offset of the constant pool.
346  inline int constant_pool_offset() const;
347  inline void set_constant_pool_offset(int offset);
348  inline Address constant_pool() const;
349  inline int constant_pool_size() const;
350  inline bool has_constant_pool() const;
351
352  // [code_comments_offset]: Offset of the code comment section.
353  inline int code_comments_offset() const;
354  inline void set_code_comments_offset(int offset);
355  inline Address code_comments() const;
356  inline int code_comments_size() const;
357  inline bool has_code_comments() const;
358
359  // [unwinding_info_offset]: Offset of the unwinding info section.
360  inline int32_t unwinding_info_offset() const;
361  inline void set_unwinding_info_offset(int32_t offset);
362  inline Address unwinding_info_start() const;
363  inline Address unwinding_info_end() const;
364  inline int unwinding_info_size() const;
365  inline bool has_unwinding_info() const;
366
367#ifdef ENABLE_DISASSEMBLER
368  const char* GetName(Isolate* isolate) const;
369  V8_EXPORT_PRIVATE void Disassemble(const char* name, std::ostream& os,
370                                     Isolate* isolate,
371                                     Address current_pc = kNullAddress);
372#endif
373
374  // [relocation_info]: Code relocation information
375  DECL_ACCESSORS(relocation_info, ByteArray)
376
377  // This function should be called only from GC.
378  void ClearEmbeddedObjects(Heap* heap);
379
380  // [deoptimization_data]: Array containing data for deopt for non-baseline
381  // code.
382  DECL_ACCESSORS(deoptimization_data, FixedArray)
383  // [bytecode_or_interpreter_data]: BytecodeArray or InterpreterData for
384  // baseline code.
385  DECL_ACCESSORS(bytecode_or_interpreter_data, HeapObject)
386
387  // [source_position_table]: ByteArray for the source positions table for
388  // non-baseline code.
389  DECL_ACCESSORS(source_position_table, ByteArray)
390  // [bytecode_offset_table]: ByteArray for the bytecode offset for baseline
391  // code.
392  DECL_ACCESSORS(bytecode_offset_table, ByteArray)
393
394  // If source positions have not been collected or an exception has been thrown
395  // this will return empty_byte_array.
396  inline ByteArray SourcePositionTable(SharedFunctionInfo sfi) const;
397
398  // [code_data_container]: A container indirection for all mutable fields.
399  DECL_RELEASE_ACQUIRE_ACCESSORS(code_data_container, CodeDataContainer)
400
401  // [next_code_link]: Link for lists of optimized or deoptimized code.
402  // Note that this field is stored in the {CodeDataContainer} to be mutable.
403  inline Object next_code_link() const;
404  inline void set_next_code_link(Object value);
405
406  // Unchecked accessors to be used during GC.
407  inline ByteArray unchecked_relocation_info() const;
408
409  inline int relocation_size() const;
410
411  // [kind]: Access to specific code kind.
412  inline CodeKind kind() const;
413
414  inline bool is_optimized_code() const;
415  inline bool is_wasm_code() const;
416
417  // Testers for interpreter builtins.
418  inline bool is_interpreter_trampoline_builtin() const;
419
420  // Testers for baseline builtins.
421  inline bool is_baseline_trampoline_builtin() const;
422  inline bool is_baseline_leave_frame_builtin() const;
423
424  // Tells whether the code checks the tiering state in the function's
425  // feedback vector.
426  inline bool checks_tiering_state() const;
427
428  // Tells whether the outgoing parameters of this code are tagged pointers.
429  inline bool has_tagged_outgoing_params() const;
430
431  // [is_turbofanned]: Tells whether the code object was generated by the
432  // TurboFan optimizing compiler.
433  inline bool is_turbofanned() const;
434
435  // TODO(jgruber): Reconsider these predicates; we should probably merge them
436  // and rename to something appropriate.
437  inline bool is_maglevved() const;
438
439  // [can_have_weak_objects]: If CodeKindIsOptimizedJSFunction(kind), tells
440  // whether the embedded objects in code should be treated weakly.
441  inline bool can_have_weak_objects() const;
442  inline void set_can_have_weak_objects(bool value);
443
444  // [builtin]: For builtins, tells which builtin index the code object
445  // has. The builtin index is a non-negative integer for builtins, and
446  // Builtin::kNoBuiltinId (-1) otherwise.
447  inline Builtin builtin_id() const;
448  inline void set_builtin_id(Builtin builtin);
449  inline bool is_builtin() const;
450
451  inline unsigned inlined_bytecode_size() const;
452  inline void set_inlined_bytecode_size(unsigned size);
453
454  // [uses_safepoint_table]: Whether this Code object uses safepoint tables
455  // (note the table may still be empty, see has_safepoint_table).
456  inline bool uses_safepoint_table() const;
457
458  // [stack_slots]: If {uses_safepoint_table()}, the number of stack slots
459  // reserved in the code prologue; otherwise 0.
460  inline int stack_slots() const;
461
462  // [marked_for_deoptimization]: If CodeKindCanDeoptimize(kind), tells whether
463  // the code is going to be deoptimized.
464  inline bool marked_for_deoptimization() const;
465  inline void set_marked_for_deoptimization(bool flag);
466
467  // [embedded_objects_cleared]: If CodeKindIsOptimizedJSFunction(kind), tells
468  // whether the embedded objects in the code marked for deoptimization were
469  // cleared. Note that embedded_objects_cleared() implies
470  // marked_for_deoptimization().
471  inline bool embedded_objects_cleared() const;
472  inline void set_embedded_objects_cleared(bool flag);
473
474  // [is_promise_rejection]: For kind BUILTIN tells whether the
475  // exception thrown by the code will lead to promise rejection or
476  // uncaught if both this and is_exception_caught is set.
477  // Use GetBuiltinCatchPrediction to access this.
478  inline void set_is_promise_rejection(bool flag);
479
480  // [is_off_heap_trampoline]: For kind BUILTIN tells whether
481  // this is a trampoline to an off-heap builtin.
482  inline bool is_off_heap_trampoline() const;
483
484  // Get the safepoint entry for the given pc.
485  SafepointEntry GetSafepointEntry(Isolate* isolate, Address pc);
486
487  // The entire code object including its header is copied verbatim to the
488  // snapshot so that it can be written in one, fast, memcpy during
489  // deserialization. The deserializer will overwrite some pointers, rather
490  // like a runtime linker, but the random allocation addresses used in the
491  // mksnapshot process would still be present in the unlinked snapshot data,
492  // which would make snapshot production non-reproducible. This method wipes
493  // out the to-be-overwritten header data for reproducible snapshots.
494  inline void WipeOutHeader();
495
496  // When V8_EXTERNAL_CODE_SPACE is enabled, Code objects are allocated in
497  // a separate pointer compression cage instead of the cage where all the
498  // other objects are allocated.
499  // This field contains cage base value which is used for decompressing
500  // the references to non-Code objects (map, deoptimization_data, etc.).
501  inline PtrComprCageBase main_cage_base() const;
502  inline PtrComprCageBase main_cage_base(RelaxedLoadTag) const;
503  inline void set_main_cage_base(Address cage_base, RelaxedStoreTag);
504
505  // Clear uninitialized padding space. This ensures that the snapshot content
506  // is deterministic. Depending on the V8 build mode there could be no padding.
507  inline void clear_padding();
508  // Initialize the flags field. Similar to clear_padding above this ensure that
509  // the snapshot content is deterministic.
510  inline void initialize_flags(CodeKind kind, bool is_turbofanned,
511                               int stack_slots, bool is_off_heap_trampoline);
512
513  // Convert a target address into a code object.
514  static inline Code GetCodeFromTargetAddress(Address address);
515
516  // Convert an entry address into an object.
517  static inline Code GetObjectFromEntryAddress(Address location_of_address);
518
519  // Returns the size of code and its metadata. This includes the size of code
520  // relocation information, deoptimization data.
521  inline int SizeIncludingMetadata() const;
522
523  // Returns the address of the first relocation info (read backwards!).
524  inline byte* relocation_start() const;
525
526  // Returns the address right after the relocation info (read backwards!).
527  inline byte* relocation_end() const;
528
529  // Code entry point.
530  inline Address entry() const;
531
532  // Returns true if pc is inside this object's instructions.
533  inline bool contains(Isolate* isolate, Address pc);
534
535  // Relocate the code by delta bytes. Called to signal that this code
536  // object has been moved by delta bytes.
537  void Relocate(intptr_t delta);
538
539  // Migrate code from desc without flushing the instruction cache.
540  void CopyFromNoFlush(ByteArray reloc_info, Heap* heap, const CodeDesc& desc);
541  void RelocateFromDesc(ByteArray reloc_info, Heap* heap, const CodeDesc& desc);
542
543  // Copy the RelocInfo portion of |desc| to |dest|. The ByteArray must be
544  // exactly the same size as the RelocInfo in |desc|.
545  static inline void CopyRelocInfoToByteArray(ByteArray dest,
546                                              const CodeDesc& desc);
547
548  inline uintptr_t GetBaselineStartPCForBytecodeOffset(int bytecode_offset,
549                                                       BytecodeArray bytecodes);
550
551  inline uintptr_t GetBaselineEndPCForBytecodeOffset(int bytecode_offset,
552                                                     BytecodeArray bytecodes);
553
554  // Returns the PC of the next bytecode in execution order.
555  // If the bytecode at the given offset is JumpLoop, the PC of the jump target
556  // is returned. Other jumps are not allowed.
557  // For other bytecodes this is equivalent to
558  // GetBaselineEndPCForBytecodeOffset.
559  inline uintptr_t GetBaselinePCForNextExecutedBytecode(
560      int bytecode_offset, BytecodeArray bytecodes);
561
562  inline int GetBytecodeOffsetForBaselinePC(Address baseline_pc,
563                                            BytecodeArray bytecodes);
564
565  // Flushes the instruction cache for the executable instructions of this code
566  // object. Make sure to call this while the code is still writable.
567  void FlushICache() const;
568
569  // Returns the object size for a given body (used for allocation).
570  static int SizeFor(int body_size) {
571    return RoundUp(kHeaderSize + body_size, kCodeAlignment);
572  }
573
574  inline int CodeSize() const;
575
576  // Hides HeapObject::Size(...) and redirects queries to CodeSize().
577  DECL_GETTER(Size, int)
578
579  DECL_CAST(Code)
580
581  // Dispatched behavior.
582  DECL_PRINTER(Code)
583  DECL_VERIFIER(Code)
584
585  bool CanDeoptAt(Isolate* isolate, Address pc);
586
587  void SetMarkedForDeoptimization(const char* reason);
588
589  inline HandlerTable::CatchPrediction GetBuiltinCatchPrediction();
590
591  bool IsIsolateIndependent(Isolate* isolate);
592
593  inline bool CanContainWeakObjects();
594
595  inline bool IsWeakObject(HeapObject object);
596
597  static inline bool IsWeakObjectInOptimizedCode(HeapObject object);
598
599  static inline bool IsWeakObjectInDeoptimizationLiteralArray(Object object);
600
601  // Returns false if this is an embedded builtin Code object that's in
602  // read_only_space and hence doesn't have execute permissions.
603  inline bool IsExecutable();
604
605  // Returns true if the function is inlined in the code.
606  bool Inlines(SharedFunctionInfo sfi);
607
608  class OptimizedCodeIterator;
609
610  // Layout description.
611#define CODE_FIELDS(V)                                                        \
612  V(kRelocationInfoOffset, kTaggedSize)                                       \
613  V(kDeoptimizationDataOrInterpreterDataOffset, kTaggedSize)                  \
614  V(kPositionTableOffset, kTaggedSize)                                        \
615  V(kCodeDataContainerOffset, kTaggedSize)                                    \
616  /* Data or code not directly visited by GC directly starts here. */         \
617  /* The serializer needs to copy bytes starting from here verbatim. */       \
618  /* Objects embedded into code is visited via reloc info. */                 \
619  V(kDataStart, 0)                                                            \
620  V(kMainCageBaseUpper32BitsOffset,                                           \
621    V8_EXTERNAL_CODE_SPACE_BOOL ? kTaggedSize : 0)                            \
622  V(kInstructionSizeOffset, kIntSize)                                         \
623  V(kMetadataSizeOffset, kIntSize)                                            \
624  V(kFlagsOffset, kInt32Size)                                                 \
625  V(kBuiltinIndexOffset, kIntSize)                                            \
626  V(kInlinedBytecodeSizeOffset, kIntSize)                                     \
627  /* Offsets describing inline metadata tables, relative to MetadataStart. */ \
628  V(kHandlerTableOffsetOffset, kIntSize)                                      \
629  V(kConstantPoolOffsetOffset,                                                \
630    FLAG_enable_embedded_constant_pool ? kIntSize : 0)                        \
631  V(kCodeCommentsOffsetOffset, kIntSize)                                      \
632  V(kUnwindingInfoOffsetOffset, kInt32Size)                                   \
633  V(kUnalignedHeaderSize, 0)                                                  \
634  /* Add padding to align the instruction start following right after */      \
635  /* the Code object header. */                                               \
636  V(kOptionalPaddingOffset, CODE_POINTER_PADDING(kOptionalPaddingOffset))     \
637  V(kHeaderSize, 0)
638
639  DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_FIELDS)
640#undef CODE_FIELDS
641
642  // This documents the amount of free space we have in each Code object header
643  // due to padding for code alignment.
644#if V8_TARGET_ARCH_ARM64
645  static constexpr int kHeaderPaddingSize =
646      V8_EXTERNAL_CODE_SPACE_BOOL ? 8 : (COMPRESS_POINTERS_BOOL ? 12 : 24);
647#elif V8_TARGET_ARCH_MIPS64
648  static constexpr int kHeaderPaddingSize = 24;
649#elif V8_TARGET_ARCH_LOONG64
650  static constexpr int kHeaderPaddingSize = 24;
651#elif V8_TARGET_ARCH_X64
652  static constexpr int kHeaderPaddingSize =
653      V8_EXTERNAL_CODE_SPACE_BOOL ? 8 : (COMPRESS_POINTERS_BOOL ? 12 : 56);
654#elif V8_TARGET_ARCH_ARM
655  static constexpr int kHeaderPaddingSize = 12;
656#elif V8_TARGET_ARCH_IA32
657  static constexpr int kHeaderPaddingSize = 12;
658#elif V8_TARGET_ARCH_MIPS
659  static constexpr int kHeaderPaddingSize = 12;
660#elif V8_TARGET_ARCH_PPC64
661  static constexpr int kHeaderPaddingSize =
662      FLAG_enable_embedded_constant_pool ? (COMPRESS_POINTERS_BOOL ? 8 : 52)
663                                         : (COMPRESS_POINTERS_BOOL ? 12 : 56);
664#elif V8_TARGET_ARCH_S390X
665  static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 12 : 24;
666#elif V8_TARGET_ARCH_RISCV64
667  static constexpr int kHeaderPaddingSize = (COMPRESS_POINTERS_BOOL ? 12 : 24);
668#else
669#error Unknown architecture.
670#endif
671  STATIC_ASSERT(FIELD_SIZE(kOptionalPaddingOffset) == kHeaderPaddingSize);
672
673  class BodyDescriptor;
674
675  // Flags layout.  base::BitField<type, shift, size>.
676#define CODE_FLAGS_BIT_FIELDS(V, _)    \
677  V(KindField, CodeKind, 4, _)         \
678  V(IsTurbofannedField, bool, 1, _)    \
679  V(StackSlotsField, int, 24, _)       \
680  V(IsOffHeapTrampoline, bool, 1, _)
681  DEFINE_BIT_FIELDS(CODE_FLAGS_BIT_FIELDS)
682#undef CODE_FLAGS_BIT_FIELDS
683  STATIC_ASSERT(kCodeKindCount <= KindField::kNumValues);
684  STATIC_ASSERT(CODE_FLAGS_BIT_FIELDS_Ranges::kBitsCount == 30);
685  STATIC_ASSERT(CODE_FLAGS_BIT_FIELDS_Ranges::kBitsCount <=
686                FIELD_SIZE(kFlagsOffset) * kBitsPerByte);
687
688  // KindSpecificFlags layout.
689#define CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS(V, _) \
690  V(MarkedForDeoptimizationField, bool, 1, _)     \
691  V(EmbeddedObjectsClearedField, bool, 1, _)      \
692  V(CanHaveWeakObjectsField, bool, 1, _)          \
693  V(IsPromiseRejectionField, bool, 1, _)
694  DEFINE_BIT_FIELDS(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS)
695#undef CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS
696  STATIC_ASSERT(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS_Ranges::kBitsCount == 4);
697  STATIC_ASSERT(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS_Ranges::kBitsCount <=
698                FIELD_SIZE(CodeDataContainer::kKindSpecificFlagsOffset) *
699                    kBitsPerByte);
700
701  // The {marked_for_deoptimization} field is accessed from generated code.
702  static const int kMarkedForDeoptimizationBit =
703      MarkedForDeoptimizationField::kShift;
704
705  static const int kArgumentsBits = 16;
706  // Reserve one argument count value as the "don't adapt arguments" sentinel.
707  static const int kMaxArguments = (1 << kArgumentsBits) - 2;
708
709 private:
710  friend class RelocIterator;
711  friend class EvacuateVisitorBase;
712
713  inline CodeDataContainer GCSafeCodeDataContainer(AcquireLoadTag) const;
714
715  bool is_promise_rejection() const;
716
717  enum BytecodeToPCPosition {
718    kPcAtStartOfBytecode,
719    // End of bytecode equals the start of the next bytecode.
720    // We need it when we deoptimize to the next bytecode (lazy deopt or deopt
721    // of non-topmost frame).
722    kPcAtEndOfBytecode
723  };
724  inline uintptr_t GetBaselinePCForBytecodeOffset(int bytecode_offset,
725                                                  BytecodeToPCPosition position,
726                                                  BytecodeArray bytecodes);
727
728  OBJECT_CONSTRUCTORS(Code, HeapObject);
729};
730
731// TODO(v8:11880): move these functions to CodeDataContainer once they are no
732// longer used from Code.
733V8_EXPORT_PRIVATE Address OffHeapInstructionStart(HeapObject code,
734                                                  Builtin builtin);
735V8_EXPORT_PRIVATE Address OffHeapInstructionEnd(HeapObject code,
736                                                Builtin builtin);
737V8_EXPORT_PRIVATE int OffHeapInstructionSize(HeapObject code, Builtin builtin);
738
739V8_EXPORT_PRIVATE Address OffHeapMetadataStart(HeapObject code,
740                                               Builtin builtin);
741V8_EXPORT_PRIVATE Address OffHeapMetadataEnd(HeapObject code, Builtin builtin);
742V8_EXPORT_PRIVATE int OffHeapMetadataSize(HeapObject code, Builtin builtin);
743
744V8_EXPORT_PRIVATE Address OffHeapSafepointTableAddress(HeapObject code,
745                                                       Builtin builtin);
746V8_EXPORT_PRIVATE int OffHeapSafepointTableSize(HeapObject code,
747                                                Builtin builtin);
748V8_EXPORT_PRIVATE Address OffHeapHandlerTableAddress(HeapObject code,
749                                                     Builtin builtin);
750V8_EXPORT_PRIVATE int OffHeapHandlerTableSize(HeapObject code, Builtin builtin);
751V8_EXPORT_PRIVATE Address OffHeapConstantPoolAddress(HeapObject code,
752                                                     Builtin builtin);
753V8_EXPORT_PRIVATE int OffHeapConstantPoolSize(HeapObject code, Builtin builtin);
754V8_EXPORT_PRIVATE Address OffHeapCodeCommentsAddress(HeapObject code,
755                                                     Builtin builtin);
756V8_EXPORT_PRIVATE int OffHeapCodeCommentsSize(HeapObject code, Builtin builtin);
757V8_EXPORT_PRIVATE Address OffHeapUnwindingInfoAddress(HeapObject code,
758                                                      Builtin builtin);
759V8_EXPORT_PRIVATE int OffHeapUnwindingInfoSize(HeapObject code,
760                                               Builtin builtin);
761
762class Code::OptimizedCodeIterator {
763 public:
764  explicit OptimizedCodeIterator(Isolate* isolate);
765  OptimizedCodeIterator(const OptimizedCodeIterator&) = delete;
766  OptimizedCodeIterator& operator=(const OptimizedCodeIterator&) = delete;
767  Code Next();
768
769 private:
770  NativeContext next_context_;
771  Code current_code_;
772  Isolate* isolate_;
773
774  DISALLOW_GARBAGE_COLLECTION(no_gc)
775};
776
777// Helper functions for converting Code objects to CodeDataContainer and back
778// when V8_EXTERNAL_CODE_SPACE is enabled.
779inline CodeT ToCodeT(Code code);
780inline Handle<CodeT> ToCodeT(Handle<Code> code, Isolate* isolate);
781inline Code FromCodeT(CodeT code);
782inline Code FromCodeT(CodeT code, RelaxedLoadTag);
783inline Code FromCodeT(CodeT code, AcquireLoadTag);
784inline Code FromCodeT(CodeT code, PtrComprCageBase);
785inline Code FromCodeT(CodeT code, PtrComprCageBase, RelaxedLoadTag);
786inline Code FromCodeT(CodeT code, PtrComprCageBase, AcquireLoadTag);
787inline Handle<CodeT> FromCodeT(Handle<Code> code, Isolate* isolate);
788inline CodeDataContainer CodeDataContainerFromCodeT(CodeT code);
789
790class AbstractCode : public HeapObject {
791 public:
792  NEVER_READ_ONLY_SPACE
793
794  int SourcePosition(int offset);
795  int SourceStatementPosition(int offset);
796
797  // Returns the address of the first instruction.
798  inline Address raw_instruction_start();
799
800  // Returns the address of the first instruction. For off-heap code objects
801  // this differs from instruction_start (which would point to the off-heap
802  // trampoline instead).
803  inline Address InstructionStart();
804
805  // Returns the address right after the last instruction.
806  inline Address raw_instruction_end();
807
808  // Returns the address right after the last instruction. For off-heap code
809  // objects this differs from instruction_end (which would point to the
810  // off-heap trampoline instead).
811  inline Address InstructionEnd();
812
813  // Returns the size of the code instructions.
814  inline int raw_instruction_size();
815
816  // Returns the size of the native instructions, including embedded
817  // data such as the safepoints table. For off-heap code objects
818  // this may differ from instruction_size in that this will return the size of
819  // the off-heap instruction stream rather than the on-heap trampoline located
820  // at instruction_start.
821  inline int InstructionSize();
822
823  // Return the source position table for interpreter code.
824  inline ByteArray SourcePositionTable(SharedFunctionInfo sfi);
825
826  void DropStackFrameCache();
827
828  // Returns the size of instructions and the metadata.
829  inline int SizeIncludingMetadata();
830
831  // Returns true if pc is inside this object's instructions.
832  inline bool contains(Isolate* isolate, Address pc);
833
834  // Returns the kind of the code.
835  inline CodeKind kind();
836
837  DECL_CAST(AbstractCode)
838  inline Code GetCode();
839  inline BytecodeArray GetBytecodeArray();
840
841  OBJECT_CONSTRUCTORS(AbstractCode, HeapObject);
842
843 private:
844  inline ByteArray SourcePositionTableInternal();
845};
846
847// Dependent code is conceptually the list of {Code, DependencyGroup} tuples
848// associated with an object, where the dependency group is a reason that could
849// lead to a deopt of the corresponding code.
850//
851// Implementation details: DependentCode is a weak array list containing
852// entries, where each entry consists of a (weak) Code object and the
853// DependencyGroups bitset as a Smi.
854//
855// Note the underlying weak array list currently never shrinks physically (the
856// contents may shrink).
857// TODO(jgruber): Consider adding physical shrinking.
858class DependentCode : public WeakArrayList {
859 public:
860  DECL_CAST(DependentCode)
861
862  enum DependencyGroup {
863    // Group of code objects that embed a transition to this map, and depend on
864    // being deoptimized when the transition is replaced by a new version.
865    kTransitionGroup = 1 << 0,
866    // Group of code objects that omit run-time prototype checks for prototypes
867    // described by this map. The group is deoptimized whenever the following
868    // conditions hold, possibly invalidating the assumptions embedded in the
869    // code:
870    // a) A fast-mode object described by this map changes shape (and
871    // transitions to a new map), or
872    // b) A dictionary-mode prototype described by this map changes shape, the
873    // const-ness of one of its properties changes, or its [[Prototype]]
874    // changes (only the latter causes a transition).
875    kPrototypeCheckGroup = 1 << 1,
876    // Group of code objects that depends on global property values in property
877    // cells not being changed.
878    kPropertyCellChangedGroup = 1 << 2,
879    // Group of code objects that omit run-time checks for field(s) introduced
880    // by this map, i.e. for the field type.
881    kFieldTypeGroup = 1 << 3,
882    kFieldConstGroup = 1 << 4,
883    kFieldRepresentationGroup = 1 << 5,
884    // Group of code objects that omit run-time type checks for initial maps of
885    // constructors.
886    kInitialMapChangedGroup = 1 << 6,
887    // Group of code objects that depends on tenuring information in
888    // AllocationSites not being changed.
889    kAllocationSiteTenuringChangedGroup = 1 << 7,
890    // Group of code objects that depends on element transition information in
891    // AllocationSites not being changed.
892    kAllocationSiteTransitionChangedGroup = 1 << 8,
893    // IMPORTANT: The last bit must fit into a Smi, i.e. into 31 bits.
894  };
895  using DependencyGroups = base::Flags<DependencyGroup, uint32_t>;
896
897  static const char* DependencyGroupName(DependencyGroup group);
898
899  // Register a dependency of {code} on {object}, of the kinds given by
900  // {groups}.
901  V8_EXPORT_PRIVATE static void InstallDependency(Isolate* isolate,
902                                                  Handle<Code> code,
903                                                  Handle<HeapObject> object,
904                                                  DependencyGroups groups);
905
906  void DeoptimizeDependentCodeGroup(Isolate* isolate, DependencyGroups groups);
907
908  bool MarkCodeForDeoptimization(DependencyGroups deopt_groups);
909
910  V8_EXPORT_PRIVATE static DependentCode empty_dependent_code(
911      const ReadOnlyRoots& roots);
912  static constexpr RootIndex kEmptyDependentCode =
913      RootIndex::kEmptyWeakArrayList;
914
915  // Constants exposed for tests.
916  static constexpr int kSlotsPerEntry = 2;  // {code: weak Code, groups: Smi}.
917  static constexpr int kCodeSlotOffset = 0;
918  static constexpr int kGroupsSlotOffset = 1;
919
920 private:
921  // Get/Set {object}'s {DependentCode}.
922  static DependentCode GetDependentCode(Handle<HeapObject> object);
923  static void SetDependentCode(Handle<HeapObject> object,
924                               Handle<DependentCode> dep);
925
926  static Handle<DependentCode> New(Isolate* isolate, DependencyGroups groups,
927                                   Handle<Code> code);
928  static Handle<DependentCode> InsertWeakCode(Isolate* isolate,
929                                              Handle<DependentCode> entries,
930                                              DependencyGroups groups,
931                                              Handle<Code> code);
932
933  // The callback is called for all non-cleared entries, and should return true
934  // iff the current entry should be cleared.
935  using IterateAndCompactFn = std::function<bool(CodeT, DependencyGroups)>;
936  void IterateAndCompact(const IterateAndCompactFn& fn);
937
938  // Fills the given entry with the last non-cleared entry in this list, and
939  // returns the new length after the last non-cleared entry has been moved.
940  int FillEntryFromBack(int index, int length);
941
942  static constexpr int LengthFor(int number_of_entries) {
943    return number_of_entries * kSlotsPerEntry;
944  }
945
946  OBJECT_CONSTRUCTORS(DependentCode, WeakArrayList);
947};
948
949DEFINE_OPERATORS_FOR_FLAGS(DependentCode::DependencyGroups)
950
951// BytecodeArray represents a sequence of interpreter bytecodes.
952class BytecodeArray
953    : public TorqueGeneratedBytecodeArray<BytecodeArray, FixedArrayBase> {
954 public:
955  DEFINE_TORQUE_GENERATED_OSRURGENCY_AND_INSTALL_TARGET()
956
957  enum Age {
958    kNoAgeBytecodeAge = 0,
959    kQuadragenarianBytecodeAge,
960    kQuinquagenarianBytecodeAge,
961    kSexagenarianBytecodeAge,
962    kSeptuagenarianBytecodeAge,
963    kOctogenarianBytecodeAge,
964    kAfterLastBytecodeAge,
965    kFirstBytecodeAge = kNoAgeBytecodeAge,
966    kLastBytecodeAge = kAfterLastBytecodeAge - 1,
967    kBytecodeAgeCount = kAfterLastBytecodeAge - kFirstBytecodeAge - 1,
968    kIsOldBytecodeAge = kSexagenarianBytecodeAge
969  };
970
971  static constexpr int SizeFor(int length) {
972    return OBJECT_POINTER_ALIGN(kHeaderSize + length);
973  }
974
975  inline byte get(int index) const;
976  inline void set(int index, byte value);
977
978  inline Address GetFirstBytecodeAddress();
979
980  inline int32_t frame_size() const;
981  inline void set_frame_size(int32_t frame_size);
982
983  // Note: The register count is derived from frame_size.
984  inline int register_count() const;
985
986  // Note: the parameter count includes the implicit 'this' receiver.
987  inline int32_t parameter_count() const;
988  inline void set_parameter_count(int32_t number_of_parameters);
989
990  inline interpreter::Register incoming_new_target_or_generator_register()
991      const;
992  inline void set_incoming_new_target_or_generator_register(
993      interpreter::Register incoming_new_target_or_generator_register);
994
995  // The [osr_urgency] controls when OSR is attempted, and is incremented as
996  // the function becomes hotter. When the current loop depth is less than the
997  // osr_urgency, JumpLoop calls into runtime to attempt OSR optimization.
998  static constexpr int kMaxOsrUrgency = 6;
999  STATIC_ASSERT(kMaxOsrUrgency <= OsrUrgencyBits::kMax);
1000  inline int osr_urgency() const;
1001  inline void set_osr_urgency(int urgency);
1002  inline void reset_osr_urgency();
1003  inline void RequestOsrAtNextOpportunity();
1004
1005  // The [osr_install_target] is used upon finishing concurrent OSR
1006  // compilation; instead of bumping the osr_urgency (which would target all
1007  // JumpLoops of appropriate loop_depth), we target a specific JumpLoop at the
1008  // given bytecode offset.
1009  static constexpr int kNoOsrInstallTarget = 0;
1010  static constexpr int OsrInstallTargetFor(BytecodeOffset offset) {
1011    // Any set `osr_install_target` must be non-zero since zero is the 'unset'
1012    // value and is ignored by generated code. For branchless code (both here
1013    // and in generated code), we simply OR in a 1.
1014    STATIC_ASSERT(kNoOsrInstallTarget == 0);
1015    return (offset.ToInt() | 1) &
1016           (OsrInstallTargetBits::kMask >> OsrInstallTargetBits::kShift);
1017  }
1018
1019  inline int osr_install_target();
1020  inline void set_osr_install_target(BytecodeOffset jump_loop_offset);
1021  inline void reset_osr_install_target();
1022
1023  inline void reset_osr_urgency_and_install_target();
1024
1025  static constexpr int kBytecodeAgeSize = kUInt16Size;
1026  static_assert(kBytecodeAgeOffset + kBytecodeAgeSize - 1 ==
1027                kBytecodeAgeOffsetEnd);
1028
1029  // InterpreterEntryTrampoline and other builtins expect these fields to be
1030  // next to each other and fill 32 bits in total, since they write a 32-bit
1031  // value to reset them.
1032  static constexpr bool kOsrStateAndBytecodeAgeAreContiguous32Bits =
1033      kBytecodeAgeOffset == kOsrUrgencyAndInstallTargetOffset + kUInt16Size &&
1034      kBytecodeAgeSize == kUInt16Size;
1035  static_assert(kOsrStateAndBytecodeAgeAreContiguous32Bits);
1036
1037  inline Age bytecode_age() const;
1038  inline void set_bytecode_age(Age age);
1039
1040  inline bool HasSourcePositionTable() const;
1041  inline bool DidSourcePositionGenerationFail() const;
1042
1043  // If source positions have not been collected or an exception has been thrown
1044  // this will return empty_byte_array.
1045  inline ByteArray SourcePositionTable() const;
1046
1047  // Indicates that an attempt was made to collect source positions, but that it
1048  // failed most likely due to stack exhaustion. When in this state
1049  // |SourcePositionTable| will return an empty byte array rather than crashing
1050  // as it would if no attempt was ever made to collect source positions.
1051  inline void SetSourcePositionsFailedToCollect();
1052
1053  inline int BytecodeArraySize();
1054
1055  // Returns the size of bytecode and its metadata. This includes the size of
1056  // bytecode, constant pool, source position table, and handler table.
1057  inline int SizeIncludingMetadata();
1058
1059  DECL_PRINTER(BytecodeArray)
1060  DECL_VERIFIER(BytecodeArray)
1061
1062  V8_EXPORT_PRIVATE void Disassemble(std::ostream& os);
1063
1064  void CopyBytecodesTo(BytecodeArray to);
1065
1066  // Bytecode aging
1067  V8_EXPORT_PRIVATE bool IsOld() const;
1068  V8_EXPORT_PRIVATE void MakeOlder();
1069
1070  // Clear uninitialized padding space. This ensures that the snapshot content
1071  // is deterministic.
1072  inline void clear_padding();
1073
1074  // Maximal memory consumption for a single BytecodeArray.
1075  static const int kMaxSize = 512 * MB;
1076  // Maximal length of a single BytecodeArray.
1077  static const int kMaxLength = kMaxSize - kHeaderSize;
1078
1079  class BodyDescriptor;
1080
1081 private:
1082  // Hide accessors inherited from generated class. Use parameter_count instead.
1083  DECL_INT_ACCESSORS(parameter_size)
1084
1085  TQ_OBJECT_CONSTRUCTORS(BytecodeArray)
1086};
1087
1088// This class holds data required during deoptimization. It does not have its
1089// own instance type.
1090class DeoptimizationLiteralArray : public WeakFixedArray {
1091 public:
1092  // Getters for literals. These include runtime checks that the pointer was not
1093  // cleared, if the literal was held weakly.
1094  inline Object get(int index) const;
1095  inline Object get(PtrComprCageBase cage_base, int index) const;
1096
1097  // Setter for literals. This will set the object as strong or weak depending
1098  // on Code::IsWeakObjectInOptimizedCode.
1099  inline void set(int index, Object value);
1100
1101  DECL_CAST(DeoptimizationLiteralArray)
1102
1103  OBJECT_CONSTRUCTORS(DeoptimizationLiteralArray, WeakFixedArray);
1104};
1105
1106// DeoptimizationData is a fixed array used to hold the deoptimization data for
1107// optimized code.  It also contains information about functions that were
1108// inlined.  If N different functions were inlined then the first N elements of
1109// the literal array will contain these functions.
1110//
1111// It can be empty.
1112class DeoptimizationData : public FixedArray {
1113 public:
1114  // Layout description.  Indices in the array.
1115  static const int kTranslationByteArrayIndex = 0;
1116  static const int kInlinedFunctionCountIndex = 1;
1117  static const int kLiteralArrayIndex = 2;
1118  static const int kOsrBytecodeOffsetIndex = 3;
1119  static const int kOsrPcOffsetIndex = 4;
1120  static const int kOptimizationIdIndex = 5;
1121  static const int kSharedFunctionInfoIndex = 6;
1122  static const int kInliningPositionsIndex = 7;
1123  static const int kDeoptExitStartIndex = 8;
1124  static const int kEagerDeoptCountIndex = 9;
1125  static const int kLazyDeoptCountIndex = 10;
1126  static const int kFirstDeoptEntryIndex = 11;
1127
1128  // Offsets of deopt entry elements relative to the start of the entry.
1129  static const int kBytecodeOffsetRawOffset = 0;
1130  static const int kTranslationIndexOffset = 1;
1131  static const int kPcOffset = 2;
1132#ifdef DEBUG
1133  static const int kNodeIdOffset = 3;
1134  static const int kDeoptEntrySize = 4;
1135#else   // DEBUG
1136  static const int kDeoptEntrySize = 3;
1137#endif  // DEBUG
1138
1139// Simple element accessors.
1140#define DECL_ELEMENT_ACCESSORS(name, type) \
1141  inline type name() const;                \
1142  inline void Set##name(type value);
1143
1144  DECL_ELEMENT_ACCESSORS(TranslationByteArray, TranslationArray)
1145  DECL_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
1146  DECL_ELEMENT_ACCESSORS(LiteralArray, DeoptimizationLiteralArray)
1147  DECL_ELEMENT_ACCESSORS(OsrBytecodeOffset, Smi)
1148  DECL_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
1149  DECL_ELEMENT_ACCESSORS(OptimizationId, Smi)
1150  DECL_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
1151  DECL_ELEMENT_ACCESSORS(InliningPositions, PodArray<InliningPosition>)
1152  DECL_ELEMENT_ACCESSORS(DeoptExitStart, Smi)
1153  DECL_ELEMENT_ACCESSORS(EagerDeoptCount, Smi)
1154  DECL_ELEMENT_ACCESSORS(LazyDeoptCount, Smi)
1155
1156#undef DECL_ELEMENT_ACCESSORS
1157
1158// Accessors for elements of the ith deoptimization entry.
1159#define DECL_ENTRY_ACCESSORS(name, type) \
1160  inline type name(int i) const;         \
1161  inline void Set##name(int i, type value);
1162
1163  DECL_ENTRY_ACCESSORS(BytecodeOffsetRaw, Smi)
1164  DECL_ENTRY_ACCESSORS(TranslationIndex, Smi)
1165  DECL_ENTRY_ACCESSORS(Pc, Smi)
1166#ifdef DEBUG
1167  DECL_ENTRY_ACCESSORS(NodeId, Smi)
1168#endif  // DEBUG
1169
1170#undef DECL_ENTRY_ACCESSORS
1171
1172  inline BytecodeOffset GetBytecodeOffset(int i);
1173
1174  inline void SetBytecodeOffset(int i, BytecodeOffset value);
1175
1176  inline int DeoptCount();
1177
1178  static const int kNotInlinedIndex = -1;
1179
1180  // Returns the inlined function at the given position in LiteralArray, or the
1181  // outer function if index == kNotInlinedIndex.
1182  class SharedFunctionInfo GetInlinedFunction(int index);
1183
1184  // Allocates a DeoptimizationData.
1185  static Handle<DeoptimizationData> New(Isolate* isolate, int deopt_entry_count,
1186                                        AllocationType allocation);
1187
1188  // Return an empty DeoptimizationData.
1189  V8_EXPORT_PRIVATE static Handle<DeoptimizationData> Empty(Isolate* isolate);
1190
1191  DECL_CAST(DeoptimizationData)
1192
1193#ifdef ENABLE_DISASSEMBLER
1194  void DeoptimizationDataPrint(std::ostream& os);
1195#endif
1196
1197 private:
1198  static int IndexForEntry(int i) {
1199    return kFirstDeoptEntryIndex + (i * kDeoptEntrySize);
1200  }
1201
1202  static int LengthFor(int entry_count) { return IndexForEntry(entry_count); }
1203
1204  OBJECT_CONSTRUCTORS(DeoptimizationData, FixedArray);
1205};
1206
1207}  // namespace internal
1208}  // namespace v8
1209
1210#include "src/objects/object-macros-undef.h"
1211
1212#endif  // V8_OBJECTS_CODE_H_
1213