xref: /third_party/node/deps/v8/src/objects/map-inl.h (revision 1cb0ef41)
1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_OBJECTS_MAP_INL_H_
6#define V8_OBJECTS_MAP_INL_H_
7
8#include "src/heap/heap-write-barrier-inl.h"
9#include "src/objects/api-callbacks-inl.h"
10#include "src/objects/cell-inl.h"
11#include "src/objects/descriptor-array-inl.h"
12#include "src/objects/field-type.h"
13#include "src/objects/instance-type-inl.h"
14#include "src/objects/js-function-inl.h"
15#include "src/objects/map-updater.h"
16#include "src/objects/map.h"
17#include "src/objects/objects-inl.h"
18#include "src/objects/property.h"
19#include "src/objects/prototype-info-inl.h"
20#include "src/objects/shared-function-info-inl.h"
21#include "src/objects/templates-inl.h"
22#include "src/objects/transitions-inl.h"
23#include "src/objects/transitions.h"
24
25#if V8_ENABLE_WEBASSEMBLY
26#include "src/wasm/wasm-objects-inl.h"
27#endif  // V8_ENABLE_WEBASSEMBLY
28
29// Has to be the last include (doesn't have include guards):
30#include "src/objects/object-macros.h"
31
32namespace v8 {
33namespace internal {
34
35#include "torque-generated/src/objects/map-tq-inl.inc"
36
37TQ_OBJECT_CONSTRUCTORS_IMPL(Map)
38
39ACCESSORS(Map, instance_descriptors, DescriptorArray,
40          kInstanceDescriptorsOffset)
41RELAXED_ACCESSORS(Map, instance_descriptors, DescriptorArray,
42                  kInstanceDescriptorsOffset)
43RELEASE_ACQUIRE_ACCESSORS(Map, instance_descriptors, DescriptorArray,
44                          kInstanceDescriptorsOffset)
45
46// A freshly allocated layout descriptor can be set on an existing map.
47// We need to use release-store and acquire-load accessor pairs to ensure
48// that the concurrent marking thread observes initializing stores of the
49// layout descriptor.
50WEAK_ACCESSORS(Map, raw_transitions, kTransitionsOrPrototypeInfoOffset)
51RELEASE_ACQUIRE_WEAK_ACCESSORS(Map, raw_transitions,
52                               kTransitionsOrPrototypeInfoOffset)
53
54ACCESSORS_CHECKED2(Map, prototype, HeapObject, kPrototypeOffset, true,
55                   value.IsNull() || value.IsJSReceiver())
56
57DEF_GETTER(Map, prototype_info, Object) {
58  Object value = TaggedField<Object, kTransitionsOrPrototypeInfoOffset>::load(
59      cage_base, *this);
60  DCHECK(this->is_prototype_map());
61  return value;
62}
63RELEASE_ACQUIRE_ACCESSORS(Map, prototype_info, Object,
64                          kTransitionsOrPrototypeInfoOffset)
65
66// |bit_field| fields.
67// Concurrent access to |has_prototype_slot| and |has_non_instance_prototype|
68// is explicitly allowlisted here. The former is never modified after the map
69// is setup but it's being read by concurrent marker when pointer compression
70// is enabled. The latter bit can be modified on a live objects.
71BIT_FIELD_ACCESSORS(Map, relaxed_bit_field, has_non_instance_prototype,
72                    Map::Bits1::HasNonInstancePrototypeBit)
73BIT_FIELD_ACCESSORS(Map, relaxed_bit_field, has_prototype_slot,
74                    Map::Bits1::HasPrototypeSlotBit)
75
76// These are fine to be written as non-atomic since we don't have data races.
77// However, they have to be read atomically from the background since the
78// |bit_field| as a whole can mutate when using the above setters.
79BIT_FIELD_ACCESSORS2(Map, relaxed_bit_field, bit_field, is_callable,
80                     Map::Bits1::IsCallableBit)
81BIT_FIELD_ACCESSORS2(Map, relaxed_bit_field, bit_field, has_named_interceptor,
82                     Map::Bits1::HasNamedInterceptorBit)
83BIT_FIELD_ACCESSORS2(Map, relaxed_bit_field, bit_field, has_indexed_interceptor,
84                     Map::Bits1::HasIndexedInterceptorBit)
85BIT_FIELD_ACCESSORS2(Map, relaxed_bit_field, bit_field, is_undetectable,
86                     Map::Bits1::IsUndetectableBit)
87BIT_FIELD_ACCESSORS2(Map, relaxed_bit_field, bit_field, is_access_check_needed,
88                     Map::Bits1::IsAccessCheckNeededBit)
89BIT_FIELD_ACCESSORS2(Map, relaxed_bit_field, bit_field, is_constructor,
90                     Map::Bits1::IsConstructorBit)
91
92// |bit_field2| fields.
93BIT_FIELD_ACCESSORS(Map, bit_field2, new_target_is_base,
94                    Map::Bits2::NewTargetIsBaseBit)
95BIT_FIELD_ACCESSORS(Map, bit_field2, is_immutable_proto,
96                    Map::Bits2::IsImmutablePrototypeBit)
97
98// |bit_field3| fields.
99BIT_FIELD_ACCESSORS(Map, relaxed_bit_field3, owns_descriptors,
100                    Map::Bits3::OwnsDescriptorsBit)
101BIT_FIELD_ACCESSORS(Map, release_acquire_bit_field3, is_deprecated,
102                    Map::Bits3::IsDeprecatedBit)
103BIT_FIELD_ACCESSORS(Map, relaxed_bit_field3, is_in_retained_map_list,
104                    Map::Bits3::IsInRetainedMapListBit)
105BIT_FIELD_ACCESSORS(Map, release_acquire_bit_field3, is_prototype_map,
106                    Map::Bits3::IsPrototypeMapBit)
107BIT_FIELD_ACCESSORS(Map, relaxed_bit_field3, is_migration_target,
108                    Map::Bits3::IsMigrationTargetBit)
109BIT_FIELD_ACCESSORS2(Map, relaxed_bit_field3, bit_field3, is_extensible,
110                     Map::Bits3::IsExtensibleBit)
111BIT_FIELD_ACCESSORS(Map, bit_field3, may_have_interesting_symbols,
112                    Map::Bits3::MayHaveInterestingSymbolsBit)
113BIT_FIELD_ACCESSORS(Map, relaxed_bit_field3, construction_counter,
114                    Map::Bits3::ConstructionCounterBits)
115
116DEF_GETTER(Map, GetNamedInterceptor, InterceptorInfo) {
117  DCHECK(has_named_interceptor());
118  FunctionTemplateInfo info = GetFunctionTemplateInfo(cage_base);
119  return InterceptorInfo::cast(info.GetNamedPropertyHandler(cage_base));
120}
121
122DEF_GETTER(Map, GetIndexedInterceptor, InterceptorInfo) {
123  DCHECK(has_indexed_interceptor());
124  FunctionTemplateInfo info = GetFunctionTemplateInfo(cage_base);
125  return InterceptorInfo::cast(info.GetIndexedPropertyHandler(cage_base));
126}
127
128// static
129bool Map::IsMostGeneralFieldType(Representation representation,
130                                 FieldType field_type) {
131  return !representation.IsHeapObject() || field_type.IsAny();
132}
133
134// static
135bool Map::FieldTypeIsCleared(Representation rep, FieldType type) {
136  return type.IsNone() && rep.IsHeapObject();
137}
138
139// static
140bool Map::CanHaveFastTransitionableElementsKind(InstanceType instance_type) {
141  return instance_type == JS_ARRAY_TYPE ||
142         instance_type == JS_PRIMITIVE_WRAPPER_TYPE ||
143         instance_type == JS_ARGUMENTS_OBJECT_TYPE;
144}
145
146bool Map::CanHaveFastTransitionableElementsKind() const {
147  return CanHaveFastTransitionableElementsKind(instance_type());
148}
149
150bool Map::IsDetached(Isolate* isolate) const {
151  if (is_prototype_map()) return true;
152  return instance_type() == JS_OBJECT_TYPE && NumberOfOwnDescriptors() > 0 &&
153         GetBackPointer().IsUndefined(isolate);
154}
155
156// static
157void Map::GeneralizeIfCanHaveTransitionableFastElementsKind(
158    Isolate* isolate, InstanceType instance_type,
159    Representation* representation, Handle<FieldType>* field_type) {
160  if (CanHaveFastTransitionableElementsKind(instance_type)) {
161    // We don't support propagation of field generalization through elements
162    // kind transitions because they are inserted into the transition tree
163    // before field transitions. In order to avoid complexity of handling
164    // such a case we ensure that all maps with transitionable elements kinds
165    // have the most general field representation and type.
166    *field_type = FieldType::Any(isolate);
167    *representation = Representation::Tagged();
168  }
169}
170
171Handle<Map> Map::Normalize(Isolate* isolate, Handle<Map> fast_map,
172                           PropertyNormalizationMode mode, const char* reason) {
173  return Normalize(isolate, fast_map, fast_map->elements_kind(), mode, reason);
174}
175
176bool Map::EquivalentToForNormalization(const Map other,
177                                       PropertyNormalizationMode mode) const {
178  return EquivalentToForNormalization(other, elements_kind(), mode);
179}
180
181bool Map::TooManyFastProperties(StoreOrigin store_origin) const {
182  if (UnusedPropertyFields() != 0) return false;
183  if (is_prototype_map()) return false;
184  if (store_origin == StoreOrigin::kNamed) {
185    int limit = std::max({kMaxFastProperties, GetInObjectProperties()});
186    FieldCounts counts = GetFieldCounts();
187    // Only count mutable fields so that objects with large numbers of
188    // constant functions do not go to dictionary mode. That would be bad
189    // because such objects have often been used as modules.
190    int external = counts.mutable_count() - GetInObjectProperties();
191    return external > limit || counts.GetTotal() > kMaxNumberOfDescriptors;
192  } else {
193    int limit = std::max({kFastPropertiesSoftLimit, GetInObjectProperties()});
194    int external =
195        NumberOfFields(ConcurrencyMode::kSynchronous) - GetInObjectProperties();
196    return external > limit;
197  }
198}
199
200Name Map::GetLastDescriptorName(Isolate* isolate) const {
201  return instance_descriptors(isolate).GetKey(LastAdded());
202}
203
204PropertyDetails Map::GetLastDescriptorDetails(Isolate* isolate) const {
205  return instance_descriptors(isolate).GetDetails(LastAdded());
206}
207
208InternalIndex Map::LastAdded() const {
209  int number_of_own_descriptors = NumberOfOwnDescriptors();
210  DCHECK_GT(number_of_own_descriptors, 0);
211  return InternalIndex(number_of_own_descriptors - 1);
212}
213
214int Map::NumberOfOwnDescriptors() const {
215  return Bits3::NumberOfOwnDescriptorsBits::decode(
216      release_acquire_bit_field3());
217}
218
219void Map::SetNumberOfOwnDescriptors(int number) {
220  DCHECK_LE(number, instance_descriptors().number_of_descriptors());
221  CHECK_LE(static_cast<unsigned>(number),
222           static_cast<unsigned>(kMaxNumberOfDescriptors));
223  set_release_acquire_bit_field3(
224      Bits3::NumberOfOwnDescriptorsBits::update(bit_field3(), number));
225}
226
227InternalIndex::Range Map::IterateOwnDescriptors() const {
228  return InternalIndex::Range(NumberOfOwnDescriptors());
229}
230
231int Map::EnumLength() const {
232  return Bits3::EnumLengthBits::decode(bit_field3());
233}
234
235void Map::SetEnumLength(int length) {
236  if (length != kInvalidEnumCacheSentinel) {
237    DCHECK_LE(length, NumberOfOwnDescriptors());
238    CHECK_LE(static_cast<unsigned>(length),
239             static_cast<unsigned>(kMaxNumberOfDescriptors));
240  }
241  set_relaxed_bit_field3(Bits3::EnumLengthBits::update(bit_field3(), length));
242}
243
244FixedArrayBase Map::GetInitialElements() const {
245  FixedArrayBase result;
246  if (has_fast_elements() || has_fast_string_wrapper_elements() ||
247      has_any_nonextensible_elements()) {
248    result = GetReadOnlyRoots().empty_fixed_array();
249  } else if (has_typed_array_or_rab_gsab_typed_array_elements()) {
250    result = GetReadOnlyRoots().empty_byte_array();
251  } else if (has_dictionary_elements()) {
252    result = GetReadOnlyRoots().empty_slow_element_dictionary();
253  } else {
254    UNREACHABLE();
255  }
256  DCHECK(!ObjectInYoungGeneration(result));
257  return result;
258}
259
260VisitorId Map::visitor_id() const {
261  return static_cast<VisitorId>(
262      RELAXED_READ_BYTE_FIELD(*this, kVisitorIdOffset));
263}
264
265void Map::set_visitor_id(VisitorId id) {
266  CHECK_LT(static_cast<unsigned>(id), 256);
267  RELAXED_WRITE_BYTE_FIELD(*this, kVisitorIdOffset, static_cast<byte>(id));
268}
269
270int Map::instance_size_in_words() const {
271  return RELAXED_READ_BYTE_FIELD(*this, kInstanceSizeInWordsOffset);
272}
273
274void Map::set_instance_size_in_words(int value) {
275  RELAXED_WRITE_BYTE_FIELD(*this, kInstanceSizeInWordsOffset,
276                           static_cast<byte>(value));
277}
278
279int Map::instance_size() const {
280  return instance_size_in_words() << kTaggedSizeLog2;
281}
282
283void Map::set_instance_size(int value) {
284  CHECK(IsAligned(value, kTaggedSize));
285  value >>= kTaggedSizeLog2;
286  CHECK_LT(static_cast<unsigned>(value), 256);
287  set_instance_size_in_words(value);
288}
289
290int Map::inobject_properties_start_or_constructor_function_index() const {
291  // TODO(solanes, v8:7790, v8:11353): Make this and the setter non-atomic
292  // when TSAN sees the map's store synchronization.
293  return RELAXED_READ_BYTE_FIELD(
294      *this, kInobjectPropertiesStartOrConstructorFunctionIndexOffset);
295}
296
297void Map::set_inobject_properties_start_or_constructor_function_index(
298    int value) {
299  CHECK_LT(static_cast<unsigned>(value), 256);
300  RELAXED_WRITE_BYTE_FIELD(
301      *this, kInobjectPropertiesStartOrConstructorFunctionIndexOffset,
302      static_cast<byte>(value));
303}
304
305int Map::GetInObjectPropertiesStartInWords() const {
306  DCHECK(IsJSObjectMap());
307  return inobject_properties_start_or_constructor_function_index();
308}
309
310void Map::SetInObjectPropertiesStartInWords(int value) {
311  CHECK(IsJSObjectMap());
312  set_inobject_properties_start_or_constructor_function_index(value);
313}
314
315int Map::GetInObjectProperties() const {
316  DCHECK(IsJSObjectMap());
317  return instance_size_in_words() - GetInObjectPropertiesStartInWords();
318}
319
320int Map::GetConstructorFunctionIndex() const {
321  DCHECK(IsPrimitiveMap());
322  return inobject_properties_start_or_constructor_function_index();
323}
324
325void Map::SetConstructorFunctionIndex(int value) {
326  CHECK(IsPrimitiveMap());
327  set_inobject_properties_start_or_constructor_function_index(value);
328}
329
330int Map::GetInObjectPropertyOffset(int index) const {
331  return (GetInObjectPropertiesStartInWords() + index) * kTaggedSize;
332}
333
334Handle<Map> Map::AddMissingTransitionsForTesting(
335    Isolate* isolate, Handle<Map> split_map,
336    Handle<DescriptorArray> descriptors) {
337  return AddMissingTransitions(isolate, split_map, descriptors);
338}
339
340InstanceType Map::instance_type() const {
341  // TODO(solanes, v8:7790, v8:11353, v8:11945): Make this and the setter
342  // non-atomic when TSAN sees the map's store synchronization.
343  return static_cast<InstanceType>(
344      RELAXED_READ_UINT16_FIELD(*this, kInstanceTypeOffset));
345}
346
347void Map::set_instance_type(InstanceType value) {
348  RELAXED_WRITE_UINT16_FIELD(*this, kInstanceTypeOffset, value);
349}
350
351int Map::UnusedPropertyFields() const {
352  int value = used_or_unused_instance_size_in_words();
353  DCHECK_IMPLIES(!IsJSObjectMap(), value == 0);
354  int unused;
355  if (value >= JSObject::kFieldsAdded) {
356    unused = instance_size_in_words() - value;
357  } else {
358    // For out of object properties "used_or_unused_instance_size_in_words"
359    // byte encodes the slack in the property array.
360    unused = value;
361  }
362  return unused;
363}
364
365int Map::UnusedInObjectProperties() const {
366  // Like Map::UnusedPropertyFields(), but returns 0 for out of object
367  // properties.
368  int value = used_or_unused_instance_size_in_words();
369  DCHECK_IMPLIES(!IsJSObjectMap(), value == 0);
370  if (value >= JSObject::kFieldsAdded) {
371    return instance_size_in_words() - value;
372  }
373  return 0;
374}
375
376int Map::used_or_unused_instance_size_in_words() const {
377  return RELAXED_READ_BYTE_FIELD(*this, kUsedOrUnusedInstanceSizeInWordsOffset);
378}
379
380void Map::set_used_or_unused_instance_size_in_words(int value) {
381  CHECK_LE(static_cast<unsigned>(value), 255);
382  RELAXED_WRITE_BYTE_FIELD(*this, kUsedOrUnusedInstanceSizeInWordsOffset,
383                           static_cast<byte>(value));
384}
385
386int Map::UsedInstanceSize() const {
387  int words = used_or_unused_instance_size_in_words();
388  if (words < JSObject::kFieldsAdded) {
389    // All in-object properties are used and the words is tracking the slack
390    // in the property array.
391    return instance_size();
392  }
393  return words * kTaggedSize;
394}
395
396void Map::SetInObjectUnusedPropertyFields(int value) {
397  STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
398  if (!IsJSObjectMap()) {
399    CHECK_EQ(0, value);
400    set_used_or_unused_instance_size_in_words(0);
401    DCHECK_EQ(0, UnusedPropertyFields());
402    return;
403  }
404  CHECK_LE(0, value);
405  DCHECK_LE(value, GetInObjectProperties());
406  int used_inobject_properties = GetInObjectProperties() - value;
407  set_used_or_unused_instance_size_in_words(
408      GetInObjectPropertyOffset(used_inobject_properties) / kTaggedSize);
409  DCHECK_EQ(value, UnusedPropertyFields());
410}
411
412void Map::SetOutOfObjectUnusedPropertyFields(int value) {
413  STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
414  CHECK_LT(static_cast<unsigned>(value), JSObject::kFieldsAdded);
415  // For out of object properties "used_instance_size_in_words" byte encodes
416  // the slack in the property array.
417  set_used_or_unused_instance_size_in_words(value);
418  DCHECK_EQ(value, UnusedPropertyFields());
419}
420
421void Map::CopyUnusedPropertyFields(Map map) {
422  set_used_or_unused_instance_size_in_words(
423      map.used_or_unused_instance_size_in_words());
424  DCHECK_EQ(UnusedPropertyFields(), map.UnusedPropertyFields());
425}
426
427void Map::CopyUnusedPropertyFieldsAdjustedForInstanceSize(Map map) {
428  int value = map.used_or_unused_instance_size_in_words();
429  if (value >= JSPrimitiveWrapper::kFieldsAdded) {
430    // Unused in-object fields. Adjust the offset from the object’s start
431    // so it matches the distance to the object’s end.
432    value += instance_size_in_words() - map.instance_size_in_words();
433  }
434  set_used_or_unused_instance_size_in_words(value);
435  DCHECK_EQ(UnusedPropertyFields(), map.UnusedPropertyFields());
436}
437
438void Map::AccountAddedPropertyField() {
439  // Update used instance size and unused property fields number.
440  STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
441#ifdef DEBUG
442  int new_unused = UnusedPropertyFields() - 1;
443  if (new_unused < 0) new_unused += JSObject::kFieldsAdded;
444#endif
445  int value = used_or_unused_instance_size_in_words();
446  if (value >= JSObject::kFieldsAdded) {
447    if (value == instance_size_in_words()) {
448      AccountAddedOutOfObjectPropertyField(0);
449    } else {
450      // The property is added in-object, so simply increment the counter.
451      set_used_or_unused_instance_size_in_words(value + 1);
452    }
453  } else {
454    AccountAddedOutOfObjectPropertyField(value);
455  }
456  DCHECK_EQ(new_unused, UnusedPropertyFields());
457}
458
459void Map::AccountAddedOutOfObjectPropertyField(int unused_in_property_array) {
460  unused_in_property_array--;
461  if (unused_in_property_array < 0) {
462    unused_in_property_array += JSObject::kFieldsAdded;
463  }
464  CHECK_LT(static_cast<unsigned>(unused_in_property_array),
465           JSObject::kFieldsAdded);
466  set_used_or_unused_instance_size_in_words(unused_in_property_array);
467  DCHECK_EQ(unused_in_property_array, UnusedPropertyFields());
468}
469
470#if V8_ENABLE_WEBASSEMBLY
471uint8_t Map::WasmByte1() const {
472  DCHECK(IsWasmObjectMap());
473  return inobject_properties_start_or_constructor_function_index();
474}
475
476uint8_t Map::WasmByte2() const {
477  DCHECK(IsWasmObjectMap());
478  return used_or_unused_instance_size_in_words();
479}
480
481void Map::SetWasmByte1(uint8_t value) {
482  CHECK(IsWasmObjectMap());
483  set_inobject_properties_start_or_constructor_function_index(value);
484}
485
486void Map::SetWasmByte2(uint8_t value) {
487  CHECK(IsWasmObjectMap());
488  set_used_or_unused_instance_size_in_words(value);
489}
490#endif  // V8_ENABLE_WEBASSEMBLY
491
492byte Map::bit_field() const {
493  // TODO(solanes, v8:7790, v8:11353): Make this non-atomic when TSAN sees the
494  // map's store synchronization.
495  return relaxed_bit_field();
496}
497
498void Map::set_bit_field(byte value) {
499  // TODO(solanes, v8:7790, v8:11353): Make this non-atomic when TSAN sees the
500  // map's store synchronization.
501  set_relaxed_bit_field(value);
502}
503
504byte Map::relaxed_bit_field() const {
505  return RELAXED_READ_BYTE_FIELD(*this, kBitFieldOffset);
506}
507
508void Map::set_relaxed_bit_field(byte value) {
509  RELAXED_WRITE_BYTE_FIELD(*this, kBitFieldOffset, value);
510}
511
512byte Map::bit_field2() const { return ReadField<byte>(kBitField2Offset); }
513
514void Map::set_bit_field2(byte value) {
515  WriteField<byte>(kBitField2Offset, value);
516}
517
518uint32_t Map::bit_field3() const {
519  // TODO(solanes, v8:7790, v8:11353): Make this and the setter non-atomic
520  // when TSAN sees the map's store synchronization.
521  return relaxed_bit_field3();
522}
523
524void Map::set_bit_field3(uint32_t value) { set_relaxed_bit_field3(value); }
525
526uint32_t Map::relaxed_bit_field3() const {
527  return RELAXED_READ_UINT32_FIELD(*this, kBitField3Offset);
528}
529
530void Map::set_relaxed_bit_field3(uint32_t value) {
531  RELAXED_WRITE_UINT32_FIELD(*this, kBitField3Offset, value);
532}
533
534uint32_t Map::release_acquire_bit_field3() const {
535  return ACQUIRE_READ_UINT32_FIELD(*this, kBitField3Offset);
536}
537
538void Map::set_release_acquire_bit_field3(uint32_t value) {
539  RELEASE_WRITE_UINT32_FIELD(*this, kBitField3Offset, value);
540}
541
542bool Map::is_abandoned_prototype_map() const {
543  return is_prototype_map() && !owns_descriptors();
544}
545
546bool Map::should_be_fast_prototype_map() const {
547  if (!prototype_info().IsPrototypeInfo()) return false;
548  return PrototypeInfo::cast(prototype_info()).should_be_fast_map();
549}
550
551void Map::set_elements_kind(ElementsKind elements_kind) {
552  CHECK_LT(static_cast<int>(elements_kind), kElementsKindCount);
553  set_bit_field2(
554      Map::Bits2::ElementsKindBits::update(bit_field2(), elements_kind));
555}
556
557ElementsKind Map::elements_kind() const {
558  return Map::Bits2::ElementsKindBits::decode(bit_field2());
559}
560
561bool Map::has_fast_smi_elements() const {
562  return IsSmiElementsKind(elements_kind());
563}
564
565bool Map::has_fast_object_elements() const {
566  return IsObjectElementsKind(elements_kind());
567}
568
569bool Map::has_fast_smi_or_object_elements() const {
570  return IsSmiOrObjectElementsKind(elements_kind());
571}
572
573bool Map::has_fast_double_elements() const {
574  return IsDoubleElementsKind(elements_kind());
575}
576
577bool Map::has_fast_elements() const {
578  return IsFastElementsKind(elements_kind());
579}
580
581bool Map::has_sloppy_arguments_elements() const {
582  return IsSloppyArgumentsElementsKind(elements_kind());
583}
584
585bool Map::has_fast_sloppy_arguments_elements() const {
586  return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
587}
588
589bool Map::has_fast_string_wrapper_elements() const {
590  return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS;
591}
592
593bool Map::has_typed_array_or_rab_gsab_typed_array_elements() const {
594  return IsTypedArrayOrRabGsabTypedArrayElementsKind(elements_kind());
595}
596
597bool Map::has_any_typed_array_or_wasm_array_elements() const {
598  ElementsKind kind = elements_kind();
599  return IsTypedArrayOrRabGsabTypedArrayElementsKind(kind) ||
600#if V8_ENABLE_WEBASSEMBLY
601         IsWasmArrayElementsKind(kind) ||
602#endif  // V8_ENABLE_WEBASSEMBLY
603         false;
604}
605
606bool Map::has_dictionary_elements() const {
607  return IsDictionaryElementsKind(elements_kind());
608}
609
610bool Map::has_any_nonextensible_elements() const {
611  return IsAnyNonextensibleElementsKind(elements_kind());
612}
613
614bool Map::has_nonextensible_elements() const {
615  return IsNonextensibleElementsKind(elements_kind());
616}
617
618bool Map::has_sealed_elements() const {
619  return IsSealedElementsKind(elements_kind());
620}
621
622bool Map::has_frozen_elements() const {
623  return IsFrozenElementsKind(elements_kind());
624}
625
626void Map::set_is_dictionary_map(bool value) {
627  uint32_t new_bit_field3 =
628      Bits3::IsDictionaryMapBit::update(bit_field3(), value);
629  new_bit_field3 = Bits3::IsUnstableBit::update(new_bit_field3, value);
630  set_bit_field3(new_bit_field3);
631}
632
633bool Map::is_dictionary_map() const {
634  return Bits3::IsDictionaryMapBit::decode(relaxed_bit_field3());
635}
636
637void Map::mark_unstable() {
638  set_release_acquire_bit_field3(
639      Bits3::IsUnstableBit::update(bit_field3(), true));
640}
641
642bool Map::is_stable() const {
643  return !Bits3::IsUnstableBit::decode(release_acquire_bit_field3());
644}
645
646bool Map::CanBeDeprecated() const {
647  for (InternalIndex i : IterateOwnDescriptors()) {
648    PropertyDetails details = instance_descriptors(kRelaxedLoad).GetDetails(i);
649    if (details.representation().MightCauseMapDeprecation()) return true;
650    if (details.kind() == PropertyKind::kData &&
651        details.location() == PropertyLocation::kDescriptor) {
652      return true;
653    }
654  }
655  return false;
656}
657
658void Map::NotifyLeafMapLayoutChange(Isolate* isolate) {
659  if (is_stable()) {
660    mark_unstable();
661    dependent_code().DeoptimizeDependentCodeGroup(
662        isolate, DependentCode::kPrototypeCheckGroup);
663  }
664}
665
666bool Map::CanTransition() const {
667  // Only JSObject and subtypes have map transitions and back pointers.
668  return InstanceTypeChecker::IsJSObject(instance_type());
669}
670
671#define DEF_TESTER(Type, ...)                              \
672  bool Map::Is##Type##Map() const {                        \
673    return InstanceTypeChecker::Is##Type(instance_type()); \
674  }
675INSTANCE_TYPE_CHECKERS(DEF_TESTER)
676#undef DEF_TESTER
677
678bool Map::IsBooleanMap() const {
679  return *this == GetReadOnlyRoots().boolean_map();
680}
681
682bool Map::IsNullOrUndefinedMap() const {
683  auto roots = GetReadOnlyRoots();
684  return *this == roots.null_map() || *this == roots.undefined_map();
685}
686
687bool Map::IsPrimitiveMap() const {
688  return instance_type() <= LAST_PRIMITIVE_HEAP_OBJECT_TYPE;
689}
690
691void Map::UpdateDescriptors(Isolate* isolate, DescriptorArray descriptors,
692                            int number_of_own_descriptors) {
693  SetInstanceDescriptors(isolate, descriptors, number_of_own_descriptors);
694}
695
696void Map::InitializeDescriptors(Isolate* isolate, DescriptorArray descriptors) {
697  SetInstanceDescriptors(isolate, descriptors,
698                         descriptors.number_of_descriptors());
699}
700
701void Map::clear_padding() {
702  if (FIELD_SIZE(kOptionalPaddingOffset) == 0) return;
703  DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
704  memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
705         FIELD_SIZE(kOptionalPaddingOffset));
706}
707
708void Map::AppendDescriptor(Isolate* isolate, Descriptor* desc) {
709  DescriptorArray descriptors = instance_descriptors(isolate);
710  int number_of_own_descriptors = NumberOfOwnDescriptors();
711  DCHECK(descriptors.number_of_descriptors() == number_of_own_descriptors);
712  {
713    // The following two operations need to happen before the marking write
714    // barrier.
715    descriptors.Append(desc);
716    SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
717#ifndef V8_DISABLE_WRITE_BARRIERS
718    WriteBarrier::Marking(descriptors, number_of_own_descriptors + 1);
719#endif
720  }
721  // Properly mark the map if the {desc} is an "interesting symbol".
722  if (desc->GetKey()->IsInterestingSymbol()) {
723    set_may_have_interesting_symbols(true);
724  }
725  PropertyDetails details = desc->GetDetails();
726  if (details.location() == PropertyLocation::kField) {
727    DCHECK_GT(UnusedPropertyFields(), 0);
728    AccountAddedPropertyField();
729  }
730
731// This function does not support appending double field descriptors and
732// it should never try to (otherwise, layout descriptor must be updated too).
733#ifdef DEBUG
734  DCHECK(details.location() != PropertyLocation::kField ||
735         !details.representation().IsDouble());
736#endif
737}
738
739bool Map::ConcurrentIsMap(PtrComprCageBase cage_base,
740                          const Object& object) const {
741  return object.IsHeapObject() && HeapObject::cast(object).map(cage_base) ==
742                                      GetReadOnlyRoots(cage_base).meta_map();
743}
744
745DEF_GETTER(Map, GetBackPointer, HeapObject) {
746  Object object = constructor_or_back_pointer(cage_base, kRelaxedLoad);
747  if (ConcurrentIsMap(cage_base, object)) {
748    return Map::cast(object);
749  }
750  return GetReadOnlyRoots(cage_base).undefined_value();
751}
752
753void Map::SetBackPointer(HeapObject value, WriteBarrierMode mode) {
754  CHECK_GE(instance_type(), FIRST_JS_RECEIVER_TYPE);
755  CHECK(value.IsMap());
756  CHECK(GetBackPointer().IsUndefined());
757  CHECK_EQ(Map::cast(value).GetConstructor(), constructor_or_back_pointer());
758  set_constructor_or_back_pointer(value, mode);
759}
760
761// static
762Map Map::ElementsTransitionMap(Isolate* isolate, ConcurrencyMode cmode) {
763  return TransitionsAccessor(isolate, *this, IsConcurrent(cmode))
764      .SearchSpecial(ReadOnlyRoots(isolate).elements_transition_symbol());
765}
766
767ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
768ACCESSORS(Map, prototype_validity_cell, Object, kPrototypeValidityCellOffset)
769ACCESSORS_CHECKED2(Map, constructor_or_back_pointer, Object,
770                   kConstructorOrBackPointerOrNativeContextOffset,
771                   !IsContextMap(), value.IsNull() || !IsContextMap())
772RELAXED_ACCESSORS_CHECKED2(Map, constructor_or_back_pointer, Object,
773                           kConstructorOrBackPointerOrNativeContextOffset,
774                           !IsContextMap(), value.IsNull() || !IsContextMap())
775ACCESSORS_CHECKED(Map, native_context, NativeContext,
776                  kConstructorOrBackPointerOrNativeContextOffset,
777                  IsContextMap())
778ACCESSORS_CHECKED(Map, native_context_or_null, Object,
779                  kConstructorOrBackPointerOrNativeContextOffset,
780                  (value.IsNull() || value.IsNativeContext()) && IsContextMap())
781#if V8_ENABLE_WEBASSEMBLY
782ACCESSORS_CHECKED(Map, wasm_type_info, WasmTypeInfo,
783                  kConstructorOrBackPointerOrNativeContextOffset,
784                  IsWasmStructMap() || IsWasmArrayMap() ||
785                      IsWasmInternalFunctionMap())
786#endif  // V8_ENABLE_WEBASSEMBLY
787
788bool Map::IsPrototypeValidityCellValid() const {
789  Object validity_cell = prototype_validity_cell();
790  Object value = validity_cell.IsSmi() ? Smi::cast(validity_cell)
791                                       : Cell::cast(validity_cell).value();
792  return value == Smi::FromInt(Map::kPrototypeChainValid);
793}
794
795DEF_GETTER(Map, GetConstructor, Object) {
796  Object maybe_constructor = constructor_or_back_pointer(cage_base);
797  // Follow any back pointers.
798  while (ConcurrentIsMap(cage_base, maybe_constructor)) {
799    maybe_constructor =
800        Map::cast(maybe_constructor).constructor_or_back_pointer(cage_base);
801  }
802  return maybe_constructor;
803}
804
805Object Map::TryGetConstructor(Isolate* isolate, int max_steps) {
806  Object maybe_constructor = constructor_or_back_pointer(isolate);
807  // Follow any back pointers.
808  while (maybe_constructor.IsMap(isolate)) {
809    if (max_steps-- == 0) return Smi::FromInt(0);
810    maybe_constructor =
811        Map::cast(maybe_constructor).constructor_or_back_pointer(isolate);
812  }
813  return maybe_constructor;
814}
815
816DEF_GETTER(Map, GetFunctionTemplateInfo, FunctionTemplateInfo) {
817  Object constructor = GetConstructor(cage_base);
818  if (constructor.IsJSFunction(cage_base)) {
819    // TODO(ishell): IsApiFunction(isolate) and get_api_func_data(isolate)
820    DCHECK(JSFunction::cast(constructor).shared(cage_base).IsApiFunction());
821    return JSFunction::cast(constructor).shared(cage_base).get_api_func_data();
822  }
823  DCHECK(constructor.IsFunctionTemplateInfo(cage_base));
824  return FunctionTemplateInfo::cast(constructor);
825}
826
827void Map::SetConstructor(Object constructor, WriteBarrierMode mode) {
828  // Never overwrite a back pointer with a constructor.
829  CHECK(!constructor_or_back_pointer().IsMap());
830  set_constructor_or_back_pointer(constructor, mode);
831}
832
833Handle<Map> Map::CopyInitialMap(Isolate* isolate, Handle<Map> map) {
834  return CopyInitialMap(isolate, map, map->instance_size(),
835                        map->GetInObjectProperties(),
836                        map->UnusedPropertyFields());
837}
838
839bool Map::IsInobjectSlackTrackingInProgress() const {
840  return construction_counter() != Map::kNoSlackTracking;
841}
842
843void Map::InobjectSlackTrackingStep(Isolate* isolate) {
844  DisallowGarbageCollection no_gc;
845  // Slack tracking should only be performed on an initial map.
846  DCHECK(GetBackPointer().IsUndefined());
847  if (!IsInobjectSlackTrackingInProgress()) return;
848  int counter = construction_counter();
849  set_construction_counter(counter - 1);
850  if (counter == kSlackTrackingCounterEnd) {
851    MapUpdater::CompleteInobjectSlackTracking(isolate, *this);
852  }
853}
854
855int Map::SlackForArraySize(int old_size, int size_limit) {
856  const int max_slack = size_limit - old_size;
857  CHECK_LE(0, max_slack);
858  if (old_size < 4) {
859    DCHECK_LE(1, max_slack);
860    return 1;
861  }
862  return std::min(max_slack, old_size / 4);
863}
864
865int Map::InstanceSizeFromSlack(int slack) const {
866  return instance_size() - slack * kTaggedSize;
867}
868
869OBJECT_CONSTRUCTORS_IMPL(NormalizedMapCache, WeakFixedArray)
870CAST_ACCESSOR(NormalizedMapCache)
871NEVER_READ_ONLY_SPACE_IMPL(NormalizedMapCache)
872
873int NormalizedMapCache::GetIndex(Handle<Map> map) {
874  return map->Hash() % NormalizedMapCache::kEntries;
875}
876
877DEF_GETTER(HeapObject, IsNormalizedMapCache, bool) {
878  if (!IsWeakFixedArray(cage_base)) return false;
879  if (WeakFixedArray::cast(*this).length() != NormalizedMapCache::kEntries) {
880    return false;
881  }
882  return true;
883}
884
885}  // namespace internal
886}  // namespace v8
887
888#include "src/objects/object-macros-undef.h"
889
890#endif  // V8_OBJECTS_MAP_INL_H_
891