1// Copyright 2021 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#include "src/compiler/heap-refs.h" 6 7#ifdef ENABLE_SLOW_DCHECKS 8#include <algorithm> 9#endif 10 11#include "src/api/api-inl.h" 12#include "src/ast/modules.h" 13#include "src/base/optional.h" 14#include "src/base/platform/platform.h" 15#include "src/codegen/code-factory.h" 16#include "src/compiler/compilation-dependencies.h" 17#include "src/compiler/js-heap-broker.h" 18#include "src/execution/protectors-inl.h" 19#include "src/objects/allocation-site-inl.h" 20#include "src/objects/descriptor-array.h" 21#include "src/objects/heap-number-inl.h" 22#include "src/objects/js-array-buffer-inl.h" 23#include "src/objects/literal-objects-inl.h" 24#include "src/objects/property-cell.h" 25#include "src/objects/template-objects-inl.h" 26 27namespace v8 { 28namespace internal { 29namespace compiler { 30 31#define TRACE(broker, x) TRACE_BROKER(broker, x) 32#define TRACE_MISSING(broker, x) TRACE_BROKER_MISSING(broker, x) 33 34// There are several kinds of ObjectData values. 35// 36// kSmi: The underlying V8 object is a Smi and the data is an instance of the 37// base class (ObjectData), i.e. it's basically just the handle. Because the 38// object is a Smi, it's safe to access the handle in order to extract the 39// number value, and AsSmi() does exactly that. 40// 41// kBackgroundSerializedHeapObject: The underlying V8 object is a HeapObject 42// and the data is an instance of the corresponding (most-specific) subclass, 43// e.g. JSFunctionData, which provides serialized information about the 44// object. Allows serialization from the background thread. 45// 46// kUnserializedHeapObject: The underlying V8 object is a HeapObject and the 47// data is an instance of the base class (ObjectData), i.e. it basically 48// carries no information other than the handle. 49// 50// kNeverSerializedHeapObject: The underlying V8 object is a (potentially 51// mutable) HeapObject and the data is an instance of ObjectData. Its handle 52// must be persistent so that the GC can update it at a safepoint. Via this 53// handle, the object can be accessed concurrently to the main thread. 54// 55// kUnserializedReadOnlyHeapObject: The underlying V8 object is a read-only 56// HeapObject and the data is an instance of ObjectData. For 57// ReadOnlyHeapObjects, it is OK to access heap even from off-thread, so 58// these objects need not be serialized. 59enum ObjectDataKind { 60 kSmi, 61 kBackgroundSerializedHeapObject, 62 kUnserializedHeapObject, 63 kNeverSerializedHeapObject, 64 kUnserializedReadOnlyHeapObject 65}; 66 67namespace { 68 69bool IsReadOnlyHeapObjectForCompiler(PtrComprCageBase cage_base, 70 HeapObject object) { 71 DisallowGarbageCollection no_gc; 72 // TODO(jgruber): Remove this compiler-specific predicate and use the plain 73 // heap predicate instead. This would involve removing the special cases for 74 // builtins. 75 return (object.IsCode(cage_base) && Code::cast(object).is_builtin()) || 76 ReadOnlyHeap::Contains(object); 77} 78 79} // namespace 80 81class ObjectData : public ZoneObject { 82 public: 83 ObjectData(JSHeapBroker* broker, ObjectData** storage, Handle<Object> object, 84 ObjectDataKind kind) 85 : object_(object), 86 kind_(kind) 87#ifdef DEBUG 88 , 89 broker_(broker) 90#endif // DEBUG 91 { 92 // This assignment ensures we don't end up inserting the same object 93 // in an endless recursion. 94 *storage = this; 95 96 TRACE(broker, "Creating data " << this << " for handle " << object.address() 97 << " (" << Brief(*object) << ")"); 98 99 // It is safe to access read only heap objects and builtins from a 100 // background thread. When we read fields of these objects, we may create 101 // ObjectData on the background thread even without a canonical handle 102 // scope. This is safe too since we don't create handles but just get 103 // handles from read only root table or builtins table which is what 104 // canonical scope uses as well. For all other objects we should have 105 // created ObjectData in canonical handle scope on the main thread. 106 Isolate* isolate = broker->isolate(); 107 CHECK_IMPLIES(broker->mode() == JSHeapBroker::kDisabled || 108 broker->mode() == JSHeapBroker::kSerializing, 109 isolate->handle_scope_data()->canonical_scope != nullptr); 110 CHECK_IMPLIES(broker->mode() == JSHeapBroker::kSerialized, 111 kind == kUnserializedReadOnlyHeapObject || kind == kSmi || 112 kind == kNeverSerializedHeapObject || 113 kind == kBackgroundSerializedHeapObject); 114 CHECK_IMPLIES( 115 kind == kUnserializedReadOnlyHeapObject, 116 object->IsHeapObject() && IsReadOnlyHeapObjectForCompiler( 117 isolate, HeapObject::cast(*object))); 118 } 119 120#define DECLARE_IS(Name) bool Is##Name() const; 121 HEAP_BROKER_OBJECT_LIST(DECLARE_IS) 122#undef DECLARE_IS 123 124#define DECLARE_AS(Name) Name##Data* As##Name(); 125 HEAP_BROKER_BACKGROUND_SERIALIZED_OBJECT_LIST(DECLARE_AS) 126#undef DECLARE_AS 127 128 Handle<Object> object() const { return object_; } 129 ObjectDataKind kind() const { return kind_; } 130 bool is_smi() const { return kind_ == kSmi; } 131 bool should_access_heap() const { 132 return kind_ == kUnserializedHeapObject || 133 kind_ == kNeverSerializedHeapObject || 134 kind_ == kUnserializedReadOnlyHeapObject; 135 } 136 bool IsNull() const { return object_->IsNull(); } 137 138#ifdef DEBUG 139 JSHeapBroker* broker() const { return broker_; } 140#endif // DEBUG 141 142 private: 143 Handle<Object> const object_; 144 ObjectDataKind const kind_; 145#ifdef DEBUG 146 JSHeapBroker* const broker_; // For DCHECKs. 147#endif // DEBUG 148}; 149 150class HeapObjectData : public ObjectData { 151 public: 152 HeapObjectData(JSHeapBroker* broker, ObjectData** storage, 153 Handle<HeapObject> object, ObjectDataKind kind); 154 155 base::Optional<bool> TryGetBooleanValue(JSHeapBroker* broker) const; 156 ObjectData* map() const { return map_; } 157 InstanceType GetMapInstanceType() const; 158 159 private: 160 base::Optional<bool> TryGetBooleanValueImpl(JSHeapBroker* broker) const; 161 162 ObjectData* const map_; 163}; 164 165class PropertyCellData : public HeapObjectData { 166 public: 167 PropertyCellData(JSHeapBroker* broker, ObjectData** storage, 168 Handle<PropertyCell> object, ObjectDataKind kind); 169 170 bool Cache(JSHeapBroker* broker); 171 172 PropertyDetails property_details() const { 173 CHECK(serialized()); 174 return property_details_; 175 } 176 177 ObjectData* value() const { 178 DCHECK(serialized()); 179 return value_; 180 } 181 182 private: 183 PropertyDetails property_details_ = PropertyDetails::Empty(); 184 ObjectData* value_ = nullptr; 185 186 bool serialized() const { return value_ != nullptr; } 187}; 188 189namespace { 190 191ZoneVector<Address> GetCFunctions(FixedArray function_overloads, Zone* zone) { 192 const int len = function_overloads.length() / 193 FunctionTemplateInfo::kFunctionOverloadEntrySize; 194 ZoneVector<Address> c_functions = ZoneVector<Address>(len, zone); 195 for (int i = 0; i < len; i++) { 196 c_functions[i] = v8::ToCData<Address>(function_overloads.get( 197 FunctionTemplateInfo::kFunctionOverloadEntrySize * i)); 198 } 199 return c_functions; 200} 201 202ZoneVector<const CFunctionInfo*> GetCSignatures(FixedArray function_overloads, 203 Zone* zone) { 204 const int len = function_overloads.length() / 205 FunctionTemplateInfo::kFunctionOverloadEntrySize; 206 ZoneVector<const CFunctionInfo*> c_signatures = 207 ZoneVector<const CFunctionInfo*>(len, zone); 208 for (int i = 0; i < len; i++) { 209 c_signatures[i] = v8::ToCData<const CFunctionInfo*>(function_overloads.get( 210 FunctionTemplateInfo::kFunctionOverloadEntrySize * i + 1)); 211 } 212 return c_signatures; 213} 214 215} // namespace 216 217PropertyCellData::PropertyCellData(JSHeapBroker* broker, ObjectData** storage, 218 Handle<PropertyCell> object, 219 ObjectDataKind kind) 220 : HeapObjectData(broker, storage, object, kind) {} 221 222bool PropertyCellData::Cache(JSHeapBroker* broker) { 223 if (serialized()) return true; 224 225 TraceScope tracer(broker, this, "PropertyCellData::Serialize"); 226 auto cell = Handle<PropertyCell>::cast(object()); 227 228 // While this code runs on a background thread, the property cell might 229 // undergo state transitions via calls to PropertyCell::Transition. These 230 // transitions follow a certain protocol on which we rely here to ensure that 231 // we only report success when we can guarantee consistent data. A key 232 // property is that after transitioning from cell type A to B (A != B), there 233 // will never be a transition back to A, unless A is kConstant and the new 234 // value is the hole (i.e. the property cell was invalidated, which is a final 235 // state). 236 237 PropertyDetails property_details = cell->property_details(kAcquireLoad); 238 239 Handle<Object> value = 240 broker->CanonicalPersistentHandle(cell->value(kAcquireLoad)); 241 if (broker->ObjectMayBeUninitialized(value)) { 242 DCHECK(!broker->IsMainThread()); 243 return false; 244 } 245 246 { 247 PropertyDetails property_details_again = 248 cell->property_details(kAcquireLoad); 249 if (property_details != property_details_again) { 250 DCHECK(!broker->IsMainThread()); 251 return false; 252 } 253 } 254 255 if (property_details.cell_type() == PropertyCellType::kInTransition) { 256 DCHECK(!broker->IsMainThread()); 257 return false; 258 } 259 260 ObjectData* value_data = broker->TryGetOrCreateData(value); 261 if (value_data == nullptr) { 262 DCHECK(!broker->IsMainThread()); 263 return false; 264 } 265 266 PropertyCell::CheckDataIsCompatible(property_details, *value); 267 268 DCHECK(!serialized()); 269 property_details_ = property_details; 270 value_ = value_data; 271 DCHECK(serialized()); 272 return true; 273} 274 275class JSReceiverData : public HeapObjectData { 276 public: 277 JSReceiverData(JSHeapBroker* broker, ObjectData** storage, 278 Handle<JSReceiver> object, ObjectDataKind kind) 279 : HeapObjectData(broker, storage, object, kind) {} 280}; 281 282class JSObjectData : public JSReceiverData { 283 public: 284 JSObjectData(JSHeapBroker* broker, ObjectData** storage, 285 Handle<JSObject> object, ObjectDataKind kind) 286 : JSReceiverData(broker, storage, object, kind) {} 287}; 288 289namespace { 290 291base::Optional<ObjectRef> GetOwnFastDataPropertyFromHeap( 292 JSHeapBroker* broker, JSObjectRef holder, Representation representation, 293 FieldIndex field_index) { 294 base::Optional<Object> constant; 295 { 296 DisallowGarbageCollection no_gc; 297 PtrComprCageBase cage_base = broker->cage_base(); 298 299 // This check to ensure the live map is the same as the cached map to 300 // to protect us against reads outside the bounds of the heap. This could 301 // happen if the Ref was created in a prior GC epoch, and the object 302 // shrunk in size. It might end up at the edge of a heap boundary. If 303 // we see that the map is the same in this GC epoch, we are safe. 304 Map map = holder.object()->map(cage_base, kAcquireLoad); 305 if (*holder.map().object() != map) { 306 TRACE_BROKER_MISSING(broker, "Map changed for " << holder); 307 return {}; 308 } 309 310 if (field_index.is_inobject()) { 311 constant = 312 holder.object()->RawInobjectPropertyAt(cage_base, map, field_index); 313 if (!constant.has_value()) { 314 TRACE_BROKER_MISSING( 315 broker, "Constant field in " << holder << " is unsafe to read"); 316 return {}; 317 } 318 } else { 319 Object raw_properties_or_hash = 320 holder.object()->raw_properties_or_hash(cage_base, kRelaxedLoad); 321 // Ensure that the object is safe to inspect. 322 if (broker->ObjectMayBeUninitialized(raw_properties_or_hash)) { 323 return {}; 324 } 325 if (!raw_properties_or_hash.IsPropertyArray(cage_base)) { 326 TRACE_BROKER_MISSING( 327 broker, 328 "Expected PropertyArray for backing store in " << holder << "."); 329 return {}; 330 } 331 PropertyArray properties = PropertyArray::cast(raw_properties_or_hash); 332 const int array_index = field_index.outobject_array_index(); 333 if (array_index < properties.length(kAcquireLoad)) { 334 constant = properties.get(array_index); 335 } else { 336 TRACE_BROKER_MISSING( 337 broker, "Backing store for " << holder << " not long enough."); 338 return {}; 339 } 340 } 341 342 // {constant} needs to pass the gc predicate before we can introspect on it. 343 if (broker->ObjectMayBeUninitialized(constant.value())) return {}; 344 345 // Ensure that {constant} matches the {representation} we expect for the 346 // field. 347 if (!constant->FitsRepresentation(representation, false)) { 348 const char* repString = 349 constant->IsSmi() 350 ? "Smi" 351 : constant->IsHeapNumber() ? "HeapNumber" : "HeapObject"; 352 TRACE_BROKER_MISSING(broker, "Mismatched representation for " 353 << holder << ". Expected " 354 << representation << ", but object is a " 355 << repString); 356 return {}; 357 } 358 } 359 360 // Now that we can safely inspect the constant, it may need to be wrapped. 361 Handle<Object> value = broker->CanonicalPersistentHandle(constant.value()); 362 Handle<Object> possibly_wrapped = Object::WrapForRead<AllocationType::kOld>( 363 broker->local_isolate_or_isolate(), value, representation); 364 return TryMakeRef(broker, *possibly_wrapped); 365} 366 367// Tries to get the property at {dict_index}. If we are within bounds of the 368// object, we are guaranteed to see valid heap words even if the data is wrong. 369base::Optional<ObjectRef> GetOwnDictionaryPropertyFromHeap( 370 JSHeapBroker* broker, Handle<JSObject> receiver, InternalIndex dict_index) { 371 Handle<Object> constant; 372 { 373 DisallowGarbageCollection no_gc; 374 // DictionaryPropertyAt will check that we are within the bounds of the 375 // object. 376 base::Optional<Object> maybe_constant = JSObject::DictionaryPropertyAt( 377 receiver, dict_index, broker->isolate()->heap()); 378 DCHECK_IMPLIES(broker->IsMainThread(), maybe_constant); 379 if (!maybe_constant) return {}; 380 constant = broker->CanonicalPersistentHandle(maybe_constant.value()); 381 } 382 return TryMakeRef(broker, constant); 383} 384 385} // namespace 386 387class JSTypedArrayData : public JSObjectData { 388 public: 389 JSTypedArrayData(JSHeapBroker* broker, ObjectData** storage, 390 Handle<JSTypedArray> object, ObjectDataKind kind) 391 : JSObjectData(broker, storage, object, kind) {} 392}; 393 394class JSDataViewData : public JSObjectData { 395 public: 396 JSDataViewData(JSHeapBroker* broker, ObjectData** storage, 397 Handle<JSDataView> object, ObjectDataKind kind) 398 : JSObjectData(broker, storage, object, kind) {} 399}; 400 401class JSBoundFunctionData : public JSObjectData { 402 public: 403 JSBoundFunctionData(JSHeapBroker* broker, ObjectData** storage, 404 Handle<JSBoundFunction> object, ObjectDataKind kind) 405 : JSObjectData(broker, storage, object, kind) {} 406}; 407 408class JSFunctionData : public JSObjectData { 409 public: 410 JSFunctionData(JSHeapBroker* broker, ObjectData** storage, 411 Handle<JSFunction> object, ObjectDataKind kind) 412 : JSObjectData(broker, storage, object, kind) { 413 Cache(broker); 414 } 415 416 bool IsConsistentWithHeapState(JSHeapBroker* broker) const; 417 418 bool has_initial_map() const { 419 DCHECK(serialized_); 420 return has_initial_map_; 421 } 422 bool has_instance_prototype() const { 423 DCHECK(serialized_); 424 return has_instance_prototype_; 425 } 426 bool PrototypeRequiresRuntimeLookup() const { 427 DCHECK(serialized_); 428 return PrototypeRequiresRuntimeLookup_; 429 } 430 431 ObjectData* context() const { 432 DCHECK(serialized_); 433 return context_; 434 } 435 MapData* initial_map() const { 436 DCHECK(serialized_); 437 return initial_map_; 438 } 439 ObjectData* instance_prototype() const { 440 DCHECK(serialized_); 441 return instance_prototype_; 442 } 443 ObjectData* shared() const { 444 DCHECK(serialized_); 445 return shared_; 446 } 447 ObjectData* raw_feedback_cell() const { 448 DCHECK(serialized_); 449 return feedback_cell_; 450 } 451 int initial_map_instance_size_with_min_slack() const { 452 DCHECK(serialized_); 453 return initial_map_instance_size_with_min_slack_; 454 } 455 456 // Track serialized fields that are actually used, in order to relax 457 // ConsistentJSFunctionView dependency validation as much as possible. 458 enum UsedField { 459 kHasFeedbackVector = 1 << 0, 460 kPrototypeOrInitialMap = 1 << 1, 461 kHasInitialMap = 1 << 2, 462 kHasInstancePrototype = 1 << 3, 463 kPrototypeRequiresRuntimeLookup = 1 << 4, 464 kInitialMap = 1 << 5, 465 kInstancePrototype = 1 << 6, 466 kFeedbackVector = 1 << 7, 467 kFeedbackCell = 1 << 8, 468 kInitialMapInstanceSizeWithMinSlack = 1 << 9, 469 }; 470 471 bool has_any_used_field() const { return used_fields_ != 0; } 472 bool has_used_field(UsedField used_field) const { 473 return (used_fields_ & used_field) != 0; 474 } 475 void set_used_field(UsedField used_field) { used_fields_ |= used_field; } 476 477 private: 478 void Cache(JSHeapBroker* broker); 479 480#ifdef DEBUG 481 bool serialized_ = false; 482#endif // DEBUG 483 484 using UsedFields = base::Flags<UsedField>; 485 UsedFields used_fields_; 486 487 ObjectData* prototype_or_initial_map_ = nullptr; 488 bool has_initial_map_ = false; 489 bool has_instance_prototype_ = false; 490 bool PrototypeRequiresRuntimeLookup_ = false; 491 492 ObjectData* context_ = nullptr; 493 MapData* initial_map_ = nullptr; // Derives from prototype_or_initial_map_. 494 ObjectData* instance_prototype_ = 495 nullptr; // Derives from prototype_or_initial_map_. 496 ObjectData* shared_ = nullptr; 497 ObjectData* feedback_cell_ = nullptr; 498 int initial_map_instance_size_with_min_slack_; // Derives from 499 // prototype_or_initial_map_. 500}; 501 502class BigIntData : public HeapObjectData { 503 public: 504 BigIntData(JSHeapBroker* broker, ObjectData** storage, Handle<BigInt> object, 505 ObjectDataKind kind) 506 : HeapObjectData(broker, storage, object, kind), 507 as_uint64_(object->AsUint64(nullptr)) {} 508 509 uint64_t AsUint64() const { return as_uint64_; } 510 511 private: 512 const uint64_t as_uint64_; 513}; 514 515struct PropertyDescriptor { 516 FieldIndex field_index; 517 ObjectData* field_owner = nullptr; 518}; 519 520class MapData : public HeapObjectData { 521 public: 522 MapData(JSHeapBroker* broker, ObjectData** storage, Handle<Map> object, 523 ObjectDataKind kind); 524 525 InstanceType instance_type() const { return instance_type_; } 526 int instance_size() const { return instance_size_; } 527 uint32_t bit_field3() const { return bit_field3_; } 528 int in_object_properties() const { 529 CHECK(InstanceTypeChecker::IsJSObject(instance_type())); 530 return in_object_properties_; 531 } 532 int UnusedPropertyFields() const { return unused_property_fields_; } 533 bool is_abandoned_prototype_map() const { 534 return is_abandoned_prototype_map_; 535 } 536 537 private: 538 // The following fields should be const in principle, but construction 539 // requires locking the MapUpdater lock. For this reason, it's easier to 540 // initialize these inside the constructor body, not in the initializer list. 541 542 InstanceType instance_type_; 543 int instance_size_; 544 uint32_t bit_field3_; 545 int unused_property_fields_; 546 bool is_abandoned_prototype_map_; 547 int in_object_properties_; 548}; 549 550namespace { 551 552int InstanceSizeWithMinSlack(JSHeapBroker* broker, MapRef map) { 553 // This operation is split into two phases (1. map collection, 2. map 554 // processing). This is to avoid having to take two locks 555 // (full_transition_array_access and map_updater_access) at once and thus 556 // having to deal with related deadlock issues. 557 ZoneVector<Handle<Map>> maps(broker->zone()); 558 maps.push_back(map.object()); 559 560 { 561 DisallowGarbageCollection no_gc; 562 563 // Has to be an initial map. 564 DCHECK(map.object()->GetBackPointer().IsUndefined(broker->isolate())); 565 566 static constexpr bool kConcurrentAccess = true; 567 TransitionsAccessor(broker->isolate(), *map.object(), kConcurrentAccess) 568 .TraverseTransitionTree([&](Map m) { 569 maps.push_back(broker->CanonicalPersistentHandle(m)); 570 }); 571 } 572 573 // The lock is needed for UnusedPropertyFields and InstanceSizeFromSlack. 574 JSHeapBroker::MapUpdaterGuardIfNeeded mumd_scope(broker); 575 576 int slack = std::numeric_limits<int>::max(); 577 for (Handle<Map> m : maps) { 578 slack = std::min(slack, m->UnusedPropertyFields()); 579 } 580 581 return map.object()->InstanceSizeFromSlack(slack); 582} 583 584} // namespace 585 586// IMPORTANT: Keep this sync'd with JSFunctionData::IsConsistentWithHeapState. 587void JSFunctionData::Cache(JSHeapBroker* broker) { 588 DCHECK(!serialized_); 589 590 TraceScope tracer(broker, this, "JSFunctionData::Cache"); 591 Handle<JSFunction> function = Handle<JSFunction>::cast(object()); 592 593 // This function may run on the background thread and thus must be individual 594 // fields in a thread-safe manner. Consistency between fields is *not* 595 // guaranteed here, instead we verify it in `IsConsistentWithHeapState`, 596 // called during job finalization. Relaxed loads are thus okay: we're 597 // guaranteed to see an initialized JSFunction object, and after 598 // initialization fields remain in a valid state. 599 600 ContextRef context = 601 MakeRefAssumeMemoryFence(broker, function->context(kRelaxedLoad)); 602 context_ = context.data(); 603 604 SharedFunctionInfoRef shared = 605 MakeRefAssumeMemoryFence(broker, function->shared(kRelaxedLoad)); 606 shared_ = shared.data(); 607 608 if (function->has_prototype_slot()) { 609 prototype_or_initial_map_ = broker->GetOrCreateData( 610 function->prototype_or_initial_map(kAcquireLoad), kAssumeMemoryFence); 611 612 has_initial_map_ = prototype_or_initial_map_->IsMap(); 613 if (has_initial_map_) { 614 initial_map_ = prototype_or_initial_map_->AsMap(); 615 616 MapRef initial_map_ref = TryMakeRef<Map>(broker, initial_map_).value(); 617 if (initial_map_ref.IsInobjectSlackTrackingInProgress()) { 618 initial_map_instance_size_with_min_slack_ = 619 InstanceSizeWithMinSlack(broker, initial_map_ref); 620 } else { 621 initial_map_instance_size_with_min_slack_ = 622 initial_map_ref.instance_size(); 623 } 624 CHECK_GT(initial_map_instance_size_with_min_slack_, 0); 625 } 626 627 if (has_initial_map_) { 628 has_instance_prototype_ = true; 629 instance_prototype_ = 630 MakeRefAssumeMemoryFence( 631 broker, Handle<Map>::cast(initial_map_->object())->prototype()) 632 .data(); 633 } else if (prototype_or_initial_map_->IsHeapObject() && 634 !Handle<HeapObject>::cast(prototype_or_initial_map_->object()) 635 ->IsTheHole()) { 636 has_instance_prototype_ = true; 637 instance_prototype_ = prototype_or_initial_map_; 638 } 639 } 640 641 PrototypeRequiresRuntimeLookup_ = function->PrototypeRequiresRuntimeLookup(); 642 643 FeedbackCellRef feedback_cell = MakeRefAssumeMemoryFence( 644 broker, function->raw_feedback_cell(kAcquireLoad)); 645 feedback_cell_ = feedback_cell.data(); 646 647#ifdef DEBUG 648 serialized_ = true; 649#endif // DEBUG 650} 651 652// IMPORTANT: Keep this sync'd with JSFunctionData::Cache. 653bool JSFunctionData::IsConsistentWithHeapState(JSHeapBroker* broker) const { 654 DCHECK(serialized_); 655 656 Handle<JSFunction> f = Handle<JSFunction>::cast(object()); 657 658 CHECK_EQ(*context_->object(), f->context()); 659 CHECK_EQ(*shared_->object(), f->shared()); 660 661 if (f->has_prototype_slot()) { 662 if (has_used_field(kPrototypeOrInitialMap) && 663 *prototype_or_initial_map_->object() != 664 f->prototype_or_initial_map(kAcquireLoad)) { 665 TRACE_BROKER_MISSING(broker, "JSFunction::prototype_or_initial_map"); 666 return false; 667 } 668 if (has_used_field(kHasInitialMap) && 669 has_initial_map_ != f->has_initial_map()) { 670 TRACE_BROKER_MISSING(broker, "JSFunction::has_initial_map"); 671 return false; 672 } 673 if (has_used_field(kHasInstancePrototype) && 674 has_instance_prototype_ != f->has_instance_prototype()) { 675 TRACE_BROKER_MISSING(broker, "JSFunction::has_instance_prototype"); 676 return false; 677 } 678 } else { 679 DCHECK(!has_initial_map_); 680 DCHECK(!has_instance_prototype_); 681 } 682 683 if (has_initial_map()) { 684 if (has_used_field(kInitialMap) && 685 *initial_map_->object() != f->initial_map()) { 686 TRACE_BROKER_MISSING(broker, "JSFunction::initial_map"); 687 return false; 688 } 689 if (has_used_field(kInitialMapInstanceSizeWithMinSlack) && 690 initial_map_instance_size_with_min_slack_ != 691 f->ComputeInstanceSizeWithMinSlack(f->GetIsolate())) { 692 TRACE_BROKER_MISSING(broker, 693 "JSFunction::ComputeInstanceSizeWithMinSlack"); 694 return false; 695 } 696 } else { 697 DCHECK_NULL(initial_map_); 698 } 699 700 if (has_instance_prototype_) { 701 if (has_used_field(kInstancePrototype) && 702 *instance_prototype_->object() != f->instance_prototype()) { 703 TRACE_BROKER_MISSING(broker, "JSFunction::instance_prototype"); 704 return false; 705 } 706 } else { 707 DCHECK_NULL(instance_prototype_); 708 } 709 710 if (has_used_field(kPrototypeRequiresRuntimeLookup) && 711 PrototypeRequiresRuntimeLookup_ != f->PrototypeRequiresRuntimeLookup()) { 712 TRACE_BROKER_MISSING(broker, "JSFunction::PrototypeRequiresRuntimeLookup"); 713 return false; 714 } 715 716 if (has_used_field(kFeedbackCell) && 717 *feedback_cell_->object() != f->raw_feedback_cell()) { 718 TRACE_BROKER_MISSING(broker, "JSFunction::raw_feedback_cell"); 719 return false; 720 } 721 722 return true; 723} 724 725bool JSFunctionRef::IsConsistentWithHeapState() const { 726 DCHECK(broker()->IsMainThread()); 727 return data()->AsJSFunction()->IsConsistentWithHeapState(broker()); 728} 729 730HeapObjectData::HeapObjectData(JSHeapBroker* broker, ObjectData** storage, 731 Handle<HeapObject> object, ObjectDataKind kind) 732 : ObjectData(broker, storage, object, kind), 733 map_(broker->GetOrCreateData( 734 object->map(broker->cage_base(), kAcquireLoad), kAssumeMemoryFence)) { 735 CHECK_IMPLIES(broker->mode() == JSHeapBroker::kSerialized, 736 kind == kBackgroundSerializedHeapObject); 737} 738 739base::Optional<bool> HeapObjectData::TryGetBooleanValue( 740 JSHeapBroker* broker) const { 741 // Keep in sync with Object::BooleanValue. 742 auto result = TryGetBooleanValueImpl(broker); 743 DCHECK_IMPLIES(broker->IsMainThread() && result.has_value(), 744 result.value() == object()->BooleanValue(broker->isolate())); 745 return result; 746} 747 748base::Optional<bool> HeapObjectData::TryGetBooleanValueImpl( 749 JSHeapBroker* broker) const { 750 DisallowGarbageCollection no_gc; 751 Object o = *object(); 752 Isolate* isolate = broker->isolate(); 753 const InstanceType t = GetMapInstanceType(); 754 if (o.IsTrue(isolate)) { 755 return true; 756 } else if (o.IsFalse(isolate)) { 757 return false; 758 } else if (o.IsNullOrUndefined(isolate)) { 759 return false; 760 } else if (MapRef{broker, map()}.is_undetectable()) { 761 return false; // Undetectable object is false. 762 } else if (InstanceTypeChecker::IsString(t)) { 763 // TODO(jgruber): Implement in possible cases. 764 return {}; 765 } else if (InstanceTypeChecker::IsHeapNumber(t)) { 766 return {}; 767 } else if (InstanceTypeChecker::IsBigInt(t)) { 768 return {}; 769 } 770 return true; 771} 772 773InstanceType HeapObjectData::GetMapInstanceType() const { 774 ObjectData* map_data = map(); 775 if (map_data->should_access_heap()) { 776 return Handle<Map>::cast(map_data->object())->instance_type(); 777 } 778 return map_data->AsMap()->instance_type(); 779} 780 781namespace { 782 783bool IsReadOnlyLengthDescriptor(Isolate* isolate, Handle<Map> jsarray_map) { 784 DCHECK(!jsarray_map->is_dictionary_map()); 785 DescriptorArray descriptors = 786 jsarray_map->instance_descriptors(isolate, kRelaxedLoad); 787 static_assert( 788 JSArray::kLengthOffset == JSObject::kHeaderSize, 789 "The length should be the first property on the descriptor array"); 790 InternalIndex offset(0); 791 return descriptors.GetDetails(offset).IsReadOnly(); 792} 793 794// Important: this predicate does not check Protectors::IsNoElementsIntact. The 795// compiler checks protectors through the compilation dependency mechanism; it 796// doesn't make sense to do that here as part of every MapData construction. 797// Callers *must* take care to take the correct dependency themselves. 798bool SupportsFastArrayIteration(JSHeapBroker* broker, Handle<Map> map) { 799 return map->instance_type() == JS_ARRAY_TYPE && 800 IsFastElementsKind(map->elements_kind()) && 801 map->prototype().IsJSArray() && 802 broker->IsArrayOrObjectPrototype(broker->CanonicalPersistentHandle( 803 JSArray::cast(map->prototype()))); 804} 805 806bool SupportsFastArrayResize(JSHeapBroker* broker, Handle<Map> map) { 807 return SupportsFastArrayIteration(broker, map) && map->is_extensible() && 808 !map->is_dictionary_map() && 809 !IsReadOnlyLengthDescriptor(broker->isolate(), map); 810} 811 812} // namespace 813 814MapData::MapData(JSHeapBroker* broker, ObjectData** storage, Handle<Map> object, 815 ObjectDataKind kind) 816 : HeapObjectData(broker, storage, object, kind) { 817 // This lock ensure that MapData can always be background-serialized, i.e. 818 // while the lock is held the Map object may not be modified (except in 819 // benign ways). 820 // TODO(jgruber): Consider removing this lock by being smrt. 821 JSHeapBroker::MapUpdaterGuardIfNeeded mumd_scope(broker); 822 823 // When background serializing the map, we can perform a lite serialization 824 // since the MapRef will read some of the Map's fields can be read directly. 825 826 // Even though MapRefs can read {instance_type} directly, other classes depend 827 // on {instance_type} being serialized. 828 instance_type_ = object->instance_type(); 829 instance_size_ = object->instance_size(); 830 831 // Both bit_field3 (and below bit_field) are special fields: Even though most 832 // of the individual bits inside of the bitfield could be read / written 833 // non-atomically, the bitfield itself has to use atomic relaxed accessors 834 // since some fields since can be modified in live objects. 835 // TODO(solanes, v8:7790): Assess if adding the exclusive lock in more places 836 // (e.g for set_has_non_instance_prototype) makes sense. Pros: these fields 837 // can use the non-atomic accessors. Cons: We would be acquiring an exclusive 838 // lock in more places. 839 bit_field3_ = object->relaxed_bit_field3(); 840 unused_property_fields_ = object->UnusedPropertyFields(); 841 is_abandoned_prototype_map_ = object->is_abandoned_prototype_map(); 842 in_object_properties_ = 843 object->IsJSObjectMap() ? object->GetInObjectProperties() : 0; 844} 845 846class FixedArrayBaseData : public HeapObjectData { 847 public: 848 FixedArrayBaseData(JSHeapBroker* broker, ObjectData** storage, 849 Handle<FixedArrayBase> object, ObjectDataKind kind) 850 : HeapObjectData(broker, storage, object, kind), 851 length_(object->length(kAcquireLoad)) {} 852 853 int length() const { return length_; } 854 855 private: 856 int const length_; 857}; 858 859class FixedArrayData : public FixedArrayBaseData { 860 public: 861 FixedArrayData(JSHeapBroker* broker, ObjectData** storage, 862 Handle<FixedArray> object, ObjectDataKind kind) 863 : FixedArrayBaseData(broker, storage, object, kind) {} 864}; 865 866// Only used in JSNativeContextSpecialization. 867class ScriptContextTableData : public FixedArrayData { 868 public: 869 ScriptContextTableData(JSHeapBroker* broker, ObjectData** storage, 870 Handle<ScriptContextTable> object, ObjectDataKind kind) 871 : FixedArrayData(broker, storage, object, kind) {} 872}; 873 874class JSArrayData : public JSObjectData { 875 public: 876 JSArrayData(JSHeapBroker* broker, ObjectData** storage, 877 Handle<JSArray> object, ObjectDataKind kind) 878 : JSObjectData(broker, storage, object, kind) {} 879}; 880 881class JSGlobalObjectData : public JSObjectData { 882 public: 883 JSGlobalObjectData(JSHeapBroker* broker, ObjectData** storage, 884 Handle<JSGlobalObject> object, ObjectDataKind kind) 885 : JSObjectData(broker, storage, object, kind) {} 886}; 887 888class JSGlobalProxyData : public JSObjectData { 889 public: 890 JSGlobalProxyData(JSHeapBroker* broker, ObjectData** storage, 891 Handle<JSGlobalProxy> object, ObjectDataKind kind) 892 : JSObjectData(broker, storage, object, kind) {} 893}; 894 895#define DEFINE_IS(Name) \ 896 bool ObjectData::Is##Name() const { \ 897 if (should_access_heap()) { \ 898 return object()->Is##Name(); \ 899 } \ 900 if (is_smi()) return false; \ 901 InstanceType instance_type = \ 902 static_cast<const HeapObjectData*>(this)->GetMapInstanceType(); \ 903 return InstanceTypeChecker::Is##Name(instance_type); \ 904 } 905HEAP_BROKER_OBJECT_LIST(DEFINE_IS) 906#undef DEFINE_IS 907 908#define DEFINE_AS(Name) \ 909 Name##Data* ObjectData::As##Name() { \ 910 CHECK(Is##Name()); \ 911 CHECK(kind_ == kBackgroundSerializedHeapObject); \ 912 return static_cast<Name##Data*>(this); \ 913 } 914HEAP_BROKER_BACKGROUND_SERIALIZED_OBJECT_LIST(DEFINE_AS) 915#undef DEFINE_AS 916 917bool ObjectRef::equals(const ObjectRef& other) const { 918 return data_ == other.data_; 919} 920 921Isolate* ObjectRef::isolate() const { return broker()->isolate(); } 922 923ContextRef ContextRef::previous(size_t* depth) const { 924 DCHECK_NOT_NULL(depth); 925 926 Context current = *object(); 927 while (*depth != 0 && current.unchecked_previous().IsContext()) { 928 current = Context::cast(current.unchecked_previous()); 929 (*depth)--; 930 } 931 // The `previous` field is immutable after initialization and the 932 // context itself is read through an atomic load. 933 return MakeRefAssumeMemoryFence(broker(), current); 934} 935 936base::Optional<ObjectRef> ContextRef::get(int index) const { 937 CHECK_LE(0, index); 938 // Length is immutable after initialization. 939 if (index >= object()->length(kRelaxedLoad)) return {}; 940 return TryMakeRef(broker(), object()->get(index)); 941} 942 943void JSHeapBroker::InitializeAndStartSerializing() { 944 TraceScope tracer(this, "JSHeapBroker::InitializeAndStartSerializing"); 945 946 CHECK_EQ(mode_, kDisabled); 947 mode_ = kSerializing; 948 949 // Throw away the dummy data that we created while disabled. 950 feedback_.clear(); 951 refs_->Clear(); 952 refs_ = 953 zone()->New<RefsMap>(kInitialRefsBucketCount, AddressMatcher(), zone()); 954 955 CollectArrayAndObjectPrototypes(); 956 957 SetTargetNativeContextRef(target_native_context().object()); 958} 959 960namespace { 961 962constexpr ObjectDataKind ObjectDataKindFor(RefSerializationKind kind) { 963 switch (kind) { 964 case RefSerializationKind::kBackgroundSerialized: 965 return kBackgroundSerializedHeapObject; 966 case RefSerializationKind::kNeverSerialized: 967 return kNeverSerializedHeapObject; 968 } 969} 970 971} // namespace 972 973ObjectData* JSHeapBroker::TryGetOrCreateData(Handle<Object> object, 974 GetOrCreateDataFlags flags) { 975 RefsMap::Entry* entry = refs_->Lookup(object.address()); 976 if (entry != nullptr) return entry->value; 977 978 if (mode() == JSHeapBroker::kDisabled) { 979 entry = refs_->LookupOrInsert(object.address()); 980 ObjectData** storage = &entry->value; 981 if (*storage == nullptr) { 982 entry->value = zone()->New<ObjectData>( 983 this, storage, object, 984 object->IsSmi() ? kSmi : kUnserializedHeapObject); 985 } 986 return *storage; 987 } 988 989 CHECK(mode() == JSHeapBroker::kSerializing || 990 mode() == JSHeapBroker::kSerialized); 991 992 ObjectData* object_data; 993 if (object->IsSmi()) { 994 entry = refs_->LookupOrInsert(object.address()); 995 return zone()->New<ObjectData>(this, &entry->value, object, kSmi); 996 } 997 998 DCHECK(!object->IsSmi()); 999 1000 const bool crash_on_error = (flags & kCrashOnError) != 0; 1001 1002 if ((flags & kAssumeMemoryFence) == 0 && 1003 ObjectMayBeUninitialized(HeapObject::cast(*object))) { 1004 TRACE_BROKER_MISSING(this, "Object may be uninitialized " << *object); 1005 CHECK_WITH_MSG(!crash_on_error, "Ref construction failed"); 1006 return nullptr; 1007 } 1008 1009 if (IsReadOnlyHeapObjectForCompiler(isolate(), HeapObject::cast(*object))) { 1010 entry = refs_->LookupOrInsert(object.address()); 1011 return zone()->New<ObjectData>(this, &entry->value, object, 1012 kUnserializedReadOnlyHeapObject); 1013 } 1014 1015#define CREATE_DATA(Name) \ 1016 if (object->Is##Name()) { \ 1017 entry = refs_->LookupOrInsert(object.address()); \ 1018 object_data = zone()->New<ref_traits<Name>::data_type>( \ 1019 this, &entry->value, Handle<Name>::cast(object), \ 1020 ObjectDataKindFor(ref_traits<Name>::ref_serialization_kind)); \ 1021 /* NOLINTNEXTLINE(readability/braces) */ \ 1022 } else 1023 HEAP_BROKER_OBJECT_LIST(CREATE_DATA) 1024#undef CREATE_DATA 1025 { 1026 UNREACHABLE(); 1027 } 1028 // At this point the entry pointer is not guaranteed to be valid as 1029 // the refs_ hash hable could be resized by one of the constructors above. 1030 DCHECK_EQ(object_data, refs_->Lookup(object.address())->value); 1031 return object_data; 1032} 1033 1034#define DEFINE_IS_AND_AS(Name) \ 1035 bool ObjectRef::Is##Name() const { return data()->Is##Name(); } \ 1036 Name##Ref ObjectRef::As##Name() const { \ 1037 DCHECK(Is##Name()); \ 1038 return Name##Ref(broker(), data()); \ 1039 } 1040HEAP_BROKER_OBJECT_LIST(DEFINE_IS_AND_AS) 1041#undef DEFINE_IS_AND_AS 1042 1043bool ObjectRef::IsSmi() const { return data()->is_smi(); } 1044 1045int ObjectRef::AsSmi() const { 1046 DCHECK(IsSmi()); 1047 // Handle-dereference is always allowed for Handle<Smi>. 1048 return Handle<Smi>::cast(object())->value(); 1049} 1050 1051#define DEF_TESTER(Type, ...) \ 1052 bool MapRef::Is##Type##Map() const { \ 1053 return InstanceTypeChecker::Is##Type(instance_type()); \ 1054 } 1055INSTANCE_TYPE_CHECKERS(DEF_TESTER) 1056#undef DEF_TESTER 1057 1058bool MapRef::CanInlineElementAccess() const { 1059 if (!IsJSObjectMap()) return false; 1060 if (is_access_check_needed()) return false; 1061 if (has_indexed_interceptor()) return false; 1062 ElementsKind kind = elements_kind(); 1063 if (IsFastElementsKind(kind)) return true; 1064 if (IsTypedArrayElementsKind(kind) && kind != BIGUINT64_ELEMENTS && 1065 kind != BIGINT64_ELEMENTS) { 1066 return true; 1067 } 1068 return false; 1069} 1070 1071base::Optional<MapRef> MapRef::AsElementsKind(ElementsKind kind) const { 1072 const ElementsKind current_kind = elements_kind(); 1073 if (kind == current_kind) return *this; 1074 1075 base::Optional<Map> maybe_result = Map::TryAsElementsKind( 1076 broker()->isolate(), object(), kind, ConcurrencyMode::kConcurrent); 1077 1078#ifdef DEBUG 1079 // If starting from an initial JSArray map, TryAsElementsKind must succeed 1080 // and return the expected transitioned JSArray map. 1081 NativeContextRef native_context = broker()->target_native_context(); 1082 if (equals(native_context.GetInitialJSArrayMap(current_kind))) { 1083 CHECK_EQ(Map::TryAsElementsKind(broker()->isolate(), object(), kind, 1084 ConcurrencyMode::kConcurrent) 1085 .value(), 1086 *native_context.GetInitialJSArrayMap(kind).object()); 1087 } 1088#endif // DEBUG 1089 1090 if (!maybe_result.has_value()) { 1091 TRACE_BROKER_MISSING(broker(), "MapRef::AsElementsKind " << *this); 1092 return {}; 1093 } 1094 return MakeRefAssumeMemoryFence(broker(), maybe_result.value()); 1095} 1096 1097bool MapRef::HasOnlyStablePrototypesWithFastElements( 1098 ZoneVector<MapRef>* prototype_maps) { 1099 DCHECK_NOT_NULL(prototype_maps); 1100 MapRef prototype_map = prototype().map(); 1101 while (prototype_map.oddball_type() != OddballType::kNull) { 1102 if (!prototype_map.IsJSObjectMap() || !prototype_map.is_stable() || 1103 !IsFastElementsKind(prototype_map.elements_kind())) { 1104 return false; 1105 } 1106 prototype_maps->push_back(prototype_map); 1107 prototype_map = prototype_map.prototype().map(); 1108 } 1109 return true; 1110} 1111 1112bool MapRef::supports_fast_array_iteration() const { 1113 return SupportsFastArrayIteration(broker(), object()); 1114} 1115 1116bool MapRef::supports_fast_array_resize() const { 1117 return SupportsFastArrayResize(broker(), object()); 1118} 1119 1120namespace { 1121 1122void RecordConsistentJSFunctionViewDependencyIfNeeded( 1123 const JSHeapBroker* broker, const JSFunctionRef& ref, JSFunctionData* data, 1124 JSFunctionData::UsedField used_field) { 1125 if (!data->has_any_used_field()) { 1126 // Deduplicate dependencies. 1127 broker->dependencies()->DependOnConsistentJSFunctionView(ref); 1128 } 1129 data->set_used_field(used_field); 1130} 1131 1132} // namespace 1133 1134base::Optional<FeedbackVectorRef> JSFunctionRef::feedback_vector( 1135 CompilationDependencies* dependencies) const { 1136 return raw_feedback_cell(dependencies).feedback_vector(); 1137} 1138 1139int JSFunctionRef::InitialMapInstanceSizeWithMinSlack( 1140 CompilationDependencies* dependencies) const { 1141 if (data_->should_access_heap()) { 1142 return object()->ComputeInstanceSizeWithMinSlack(broker()->isolate()); 1143 } 1144 RecordConsistentJSFunctionViewDependencyIfNeeded( 1145 broker(), *this, data()->AsJSFunction(), 1146 JSFunctionData::kInitialMapInstanceSizeWithMinSlack); 1147 return data()->AsJSFunction()->initial_map_instance_size_with_min_slack(); 1148} 1149 1150OddballType MapRef::oddball_type() const { 1151 if (instance_type() != ODDBALL_TYPE) { 1152 return OddballType::kNone; 1153 } 1154 Factory* f = broker()->isolate()->factory(); 1155 if (equals(MakeRef(broker(), f->undefined_map()))) { 1156 return OddballType::kUndefined; 1157 } 1158 if (equals(MakeRef(broker(), f->null_map()))) { 1159 return OddballType::kNull; 1160 } 1161 if (equals(MakeRef(broker(), f->boolean_map()))) { 1162 return OddballType::kBoolean; 1163 } 1164 if (equals(MakeRef(broker(), f->the_hole_map()))) { 1165 return OddballType::kHole; 1166 } 1167 if (equals(MakeRef(broker(), f->uninitialized_map()))) { 1168 return OddballType::kUninitialized; 1169 } 1170 DCHECK(equals(MakeRef(broker(), f->termination_exception_map())) || 1171 equals(MakeRef(broker(), f->arguments_marker_map())) || 1172 equals(MakeRef(broker(), f->optimized_out_map())) || 1173 equals(MakeRef(broker(), f->stale_register_map()))); 1174 return OddballType::kOther; 1175} 1176 1177FeedbackCellRef FeedbackVectorRef::GetClosureFeedbackCell(int index) const { 1178 return MakeRefAssumeMemoryFence(broker(), 1179 object()->closure_feedback_cell(index)); 1180} 1181 1182base::Optional<ObjectRef> JSObjectRef::raw_properties_or_hash() const { 1183 return TryMakeRef(broker(), object()->raw_properties_or_hash()); 1184} 1185 1186base::Optional<ObjectRef> JSObjectRef::RawInobjectPropertyAt( 1187 FieldIndex index) const { 1188 CHECK(index.is_inobject()); 1189 Handle<Object> value; 1190 { 1191 DisallowGarbageCollection no_gc; 1192 PtrComprCageBase cage_base = broker()->cage_base(); 1193 Map current_map = object()->map(cage_base, kAcquireLoad); 1194 1195 // If the map changed in some prior GC epoch, our {index} could be 1196 // outside the valid bounds of the cached map. 1197 if (*map().object() != current_map) { 1198 TRACE_BROKER_MISSING(broker(), "Map change detected in " << *this); 1199 return {}; 1200 } 1201 1202 base::Optional<Object> maybe_value = 1203 object()->RawInobjectPropertyAt(cage_base, current_map, index); 1204 if (!maybe_value.has_value()) { 1205 TRACE_BROKER_MISSING(broker(), 1206 "Unable to safely read property in " << *this); 1207 return {}; 1208 } 1209 value = broker()->CanonicalPersistentHandle(maybe_value.value()); 1210 } 1211 return TryMakeRef(broker(), value); 1212} 1213 1214bool JSObjectRef::IsElementsTenured(const FixedArrayBaseRef& elements) { 1215 return !ObjectInYoungGeneration(*elements.object()); 1216} 1217 1218FieldIndex MapRef::GetFieldIndexFor(InternalIndex descriptor_index) const { 1219 CHECK_LT(descriptor_index.as_int(), NumberOfOwnDescriptors()); 1220 FieldIndex result = FieldIndex::ForDescriptor(*object(), descriptor_index); 1221 DCHECK(result.is_inobject()); 1222 return result; 1223} 1224 1225int MapRef::GetInObjectPropertyOffset(int i) const { 1226 return object()->GetInObjectPropertyOffset(i); 1227} 1228 1229PropertyDetails MapRef::GetPropertyDetails( 1230 InternalIndex descriptor_index) const { 1231 CHECK_LT(descriptor_index.as_int(), NumberOfOwnDescriptors()); 1232 return instance_descriptors().GetPropertyDetails(descriptor_index); 1233} 1234 1235NameRef MapRef::GetPropertyKey(InternalIndex descriptor_index) const { 1236 CHECK_LT(descriptor_index.as_int(), NumberOfOwnDescriptors()); 1237 return instance_descriptors().GetPropertyKey(descriptor_index); 1238} 1239 1240bool MapRef::IsFixedCowArrayMap() const { 1241 Handle<Map> fixed_cow_array_map = 1242 ReadOnlyRoots(broker()->isolate()).fixed_cow_array_map_handle(); 1243 return equals(MakeRef(broker(), fixed_cow_array_map)); 1244} 1245 1246bool MapRef::IsPrimitiveMap() const { 1247 return instance_type() <= LAST_PRIMITIVE_HEAP_OBJECT_TYPE; 1248} 1249 1250MapRef MapRef::FindFieldOwner(InternalIndex descriptor_index) const { 1251 CHECK_LT(descriptor_index.as_int(), NumberOfOwnDescriptors()); 1252 // TODO(solanes, v8:7790): Consider caching the result of the field owner on 1253 // the descriptor array. It would be useful for same map as well as any 1254 // other map sharing that descriptor array. 1255 return MakeRefAssumeMemoryFence( 1256 broker(), 1257 object()->FindFieldOwner(broker()->isolate(), descriptor_index)); 1258} 1259 1260base::Optional<ObjectRef> StringRef::GetCharAsStringOrUndefined( 1261 uint32_t index) const { 1262 String maybe_char; 1263 auto result = ConcurrentLookupIterator::TryGetOwnChar( 1264 &maybe_char, broker()->isolate(), broker()->local_isolate(), *object(), 1265 index); 1266 1267 if (result == ConcurrentLookupIterator::kGaveUp) { 1268 TRACE_BROKER_MISSING(broker(), "StringRef::GetCharAsStringOrUndefined on " 1269 << *this << " at index " << index); 1270 return {}; 1271 } 1272 1273 DCHECK_EQ(result, ConcurrentLookupIterator::kPresent); 1274 return TryMakeRef(broker(), maybe_char); 1275} 1276 1277bool StringRef::SupportedStringKind() const { 1278 return IsInternalizedString() || object()->IsThinString(); 1279} 1280 1281base::Optional<Handle<String>> StringRef::ObjectIfContentAccessible() { 1282 if (data_->kind() == kNeverSerializedHeapObject && !SupportedStringKind()) { 1283 TRACE_BROKER_MISSING( 1284 broker(), 1285 "content for kNeverSerialized unsupported string kind " << *this); 1286 return base::nullopt; 1287 } else { 1288 return object(); 1289 } 1290} 1291 1292base::Optional<int> StringRef::length() const { 1293 if (data_->kind() == kNeverSerializedHeapObject && !SupportedStringKind()) { 1294 TRACE_BROKER_MISSING( 1295 broker(), 1296 "length for kNeverSerialized unsupported string kind " << *this); 1297 return base::nullopt; 1298 } else { 1299 return object()->length(kAcquireLoad); 1300 } 1301} 1302 1303base::Optional<uint16_t> StringRef::GetFirstChar() const { return GetChar(0); } 1304 1305base::Optional<uint16_t> StringRef::GetChar(int index) const { 1306 if (data_->kind() == kNeverSerializedHeapObject && !SupportedStringKind()) { 1307 TRACE_BROKER_MISSING( 1308 broker(), 1309 "get char for kNeverSerialized unsupported string kind " << *this); 1310 return base::nullopt; 1311 } 1312 1313 if (!broker()->IsMainThread()) { 1314 return object()->Get(index, broker()->local_isolate()); 1315 } else { 1316 // TODO(solanes, v8:7790): Remove this case once the inlining phase is 1317 // done concurrently all the time. 1318 return object()->Get(index); 1319 } 1320} 1321 1322base::Optional<double> StringRef::ToNumber() { 1323 if (data_->kind() == kNeverSerializedHeapObject && !SupportedStringKind()) { 1324 TRACE_BROKER_MISSING( 1325 broker(), 1326 "number for kNeverSerialized unsupported string kind " << *this); 1327 return base::nullopt; 1328 } 1329 1330 return TryStringToDouble(broker()->local_isolate(), object()); 1331} 1332 1333int ArrayBoilerplateDescriptionRef::constants_elements_length() const { 1334 return object()->constant_elements().length(); 1335} 1336 1337base::Optional<ObjectRef> FixedArrayRef::TryGet(int i) const { 1338 Handle<Object> value; 1339 { 1340 DisallowGarbageCollection no_gc; 1341 CHECK_GE(i, 0); 1342 value = broker()->CanonicalPersistentHandle(object()->get(i, kAcquireLoad)); 1343 if (i >= object()->length(kAcquireLoad)) { 1344 // Right-trimming happened. 1345 CHECK_LT(i, length()); 1346 return {}; 1347 } 1348 } 1349 return TryMakeRef(broker(), value); 1350} 1351 1352Float64 FixedDoubleArrayRef::GetFromImmutableFixedDoubleArray(int i) const { 1353 STATIC_ASSERT(ref_traits<FixedDoubleArray>::ref_serialization_kind == 1354 RefSerializationKind::kNeverSerialized); 1355 CHECK(data_->should_access_heap()); 1356 return Float64::FromBits(object()->get_representation(i)); 1357} 1358 1359Handle<ByteArray> BytecodeArrayRef::SourcePositionTable() const { 1360 return broker()->CanonicalPersistentHandle(object()->SourcePositionTable()); 1361} 1362 1363Address BytecodeArrayRef::handler_table_address() const { 1364 return reinterpret_cast<Address>( 1365 object()->handler_table().GetDataStartAddress()); 1366} 1367 1368int BytecodeArrayRef::handler_table_size() const { 1369 return object()->handler_table().length(); 1370} 1371 1372#define IF_ACCESS_FROM_HEAP_C(name) \ 1373 if (data_->should_access_heap()) { \ 1374 return object()->name(); \ 1375 } 1376 1377#define IF_ACCESS_FROM_HEAP(result, name) \ 1378 if (data_->should_access_heap()) { \ 1379 return MakeRef(broker(), result::cast(object()->name())); \ 1380 } 1381 1382// Macros for definining a const getter that, depending on the data kind, 1383// either looks into the heap or into the serialized data. 1384#define BIMODAL_ACCESSOR(holder, result, name) \ 1385 result##Ref holder##Ref::name() const { \ 1386 IF_ACCESS_FROM_HEAP(result, name); \ 1387 return result##Ref(broker(), ObjectRef::data()->As##holder()->name()); \ 1388 } 1389 1390// Like above except that the result type is not an XYZRef. 1391#define BIMODAL_ACCESSOR_C(holder, result, name) \ 1392 result holder##Ref::name() const { \ 1393 IF_ACCESS_FROM_HEAP_C(name); \ 1394 return ObjectRef::data()->As##holder()->name(); \ 1395 } 1396 1397// Like above but for BitFields. 1398#define BIMODAL_ACCESSOR_B(holder, field, name, BitField) \ 1399 typename BitField::FieldType holder##Ref::name() const { \ 1400 IF_ACCESS_FROM_HEAP_C(name); \ 1401 return BitField::decode(ObjectRef::data()->As##holder()->field()); \ 1402 } 1403 1404#define HEAP_ACCESSOR_C(holder, result, name) \ 1405 result holder##Ref::name() const { return object()->name(); } 1406 1407#define HEAP_ACCESSOR_B(holder, field, name, BitField) \ 1408 typename BitField::FieldType holder##Ref::name() const { \ 1409 return object()->name(); \ 1410 } 1411 1412ObjectRef AllocationSiteRef::nested_site() const { 1413 return MakeRefAssumeMemoryFence(broker(), object()->nested_site()); 1414} 1415 1416HEAP_ACCESSOR_C(AllocationSite, bool, CanInlineCall) 1417HEAP_ACCESSOR_C(AllocationSite, bool, PointsToLiteral) 1418HEAP_ACCESSOR_C(AllocationSite, ElementsKind, GetElementsKind) 1419HEAP_ACCESSOR_C(AllocationSite, AllocationType, GetAllocationType) 1420 1421BIMODAL_ACCESSOR_C(BigInt, uint64_t, AsUint64) 1422 1423int BytecodeArrayRef::register_count() const { 1424 return object()->register_count(); 1425} 1426int BytecodeArrayRef::parameter_count() const { 1427 return object()->parameter_count(); 1428} 1429interpreter::Register 1430BytecodeArrayRef::incoming_new_target_or_generator_register() const { 1431 return object()->incoming_new_target_or_generator_register(); 1432} 1433 1434BIMODAL_ACCESSOR(HeapObject, Map, map) 1435 1436HEAP_ACCESSOR_C(HeapNumber, double, value) 1437 1438uint64_t HeapNumberRef::value_as_bits() const { 1439 return object()->value_as_bits(kRelaxedLoad); 1440} 1441 1442JSReceiverRef JSBoundFunctionRef::bound_target_function() const { 1443 // Immutable after initialization. 1444 return MakeRefAssumeMemoryFence(broker(), object()->bound_target_function()); 1445} 1446 1447ObjectRef JSBoundFunctionRef::bound_this() const { 1448 // Immutable after initialization. 1449 return MakeRefAssumeMemoryFence(broker(), object()->bound_this()); 1450} 1451 1452FixedArrayRef JSBoundFunctionRef::bound_arguments() const { 1453 // Immutable after initialization. 1454 return MakeRefAssumeMemoryFence(broker(), object()->bound_arguments()); 1455} 1456 1457// Immutable after initialization. 1458HEAP_ACCESSOR_C(JSDataView, size_t, byte_length) 1459 1460HEAP_ACCESSOR_B(Map, bit_field2, elements_kind, Map::Bits2::ElementsKindBits) 1461HEAP_ACCESSOR_B(Map, bit_field3, is_dictionary_map, 1462 Map::Bits3::IsDictionaryMapBit) 1463HEAP_ACCESSOR_B(Map, bit_field3, is_deprecated, Map::Bits3::IsDeprecatedBit) 1464HEAP_ACCESSOR_B(Map, bit_field3, NumberOfOwnDescriptors, 1465 Map::Bits3::NumberOfOwnDescriptorsBits) 1466HEAP_ACCESSOR_B(Map, bit_field3, is_migration_target, 1467 Map::Bits3::IsMigrationTargetBit) 1468HEAP_ACCESSOR_B(Map, bit_field, has_prototype_slot, 1469 Map::Bits1::HasPrototypeSlotBit) 1470HEAP_ACCESSOR_B(Map, bit_field, is_access_check_needed, 1471 Map::Bits1::IsAccessCheckNeededBit) 1472HEAP_ACCESSOR_B(Map, bit_field, is_callable, Map::Bits1::IsCallableBit) 1473HEAP_ACCESSOR_B(Map, bit_field, has_indexed_interceptor, 1474 Map::Bits1::HasIndexedInterceptorBit) 1475HEAP_ACCESSOR_B(Map, bit_field, is_constructor, Map::Bits1::IsConstructorBit) 1476HEAP_ACCESSOR_B(Map, bit_field, is_undetectable, Map::Bits1::IsUndetectableBit) 1477BIMODAL_ACCESSOR_C(Map, int, instance_size) 1478HEAP_ACCESSOR_C(Map, int, NextFreePropertyIndex) 1479BIMODAL_ACCESSOR_C(Map, int, UnusedPropertyFields) 1480HEAP_ACCESSOR_C(Map, InstanceType, instance_type) 1481BIMODAL_ACCESSOR_C(Map, bool, is_abandoned_prototype_map) 1482 1483int ObjectBoilerplateDescriptionRef::size() const { return object()->size(); } 1484 1485BIMODAL_ACCESSOR(PropertyCell, Object, value) 1486BIMODAL_ACCESSOR_C(PropertyCell, PropertyDetails, property_details) 1487 1488FixedArrayRef RegExpBoilerplateDescriptionRef::data() const { 1489 // Immutable after initialization. 1490 return MakeRefAssumeMemoryFence(broker(), object()->data()); 1491} 1492 1493StringRef RegExpBoilerplateDescriptionRef::source() const { 1494 // Immutable after initialization. 1495 return MakeRefAssumeMemoryFence(broker(), object()->source()); 1496} 1497 1498int RegExpBoilerplateDescriptionRef::flags() const { return object()->flags(); } 1499 1500base::Optional<CallHandlerInfoRef> FunctionTemplateInfoRef::call_code() const { 1501 HeapObject call_code = object()->call_code(kAcquireLoad); 1502 if (call_code.IsUndefined()) return base::nullopt; 1503 return TryMakeRef(broker(), CallHandlerInfo::cast(call_code)); 1504} 1505 1506bool FunctionTemplateInfoRef::is_signature_undefined() const { 1507 return object()->signature().IsUndefined(broker()->isolate()); 1508} 1509 1510HEAP_ACCESSOR_C(FunctionTemplateInfo, bool, accept_any_receiver) 1511 1512HolderLookupResult FunctionTemplateInfoRef::LookupHolderOfExpectedType( 1513 MapRef receiver_map) { 1514 const HolderLookupResult not_found; 1515 if (!receiver_map.IsJSObjectMap() || (receiver_map.is_access_check_needed() && 1516 !object()->accept_any_receiver())) { 1517 return not_found; 1518 } 1519 1520 Handle<FunctionTemplateInfo> expected_receiver_type; 1521 { 1522 DisallowGarbageCollection no_gc; 1523 HeapObject signature = object()->signature(); 1524 if (signature.IsUndefined()) { 1525 return HolderLookupResult(CallOptimization::kHolderIsReceiver); 1526 } 1527 expected_receiver_type = broker()->CanonicalPersistentHandle( 1528 FunctionTemplateInfo::cast(signature)); 1529 if (expected_receiver_type->IsTemplateFor(*receiver_map.object())) { 1530 return HolderLookupResult(CallOptimization::kHolderIsReceiver); 1531 } 1532 if (!receiver_map.IsJSGlobalProxyMap()) return not_found; 1533 } 1534 1535 HeapObjectRef prototype = receiver_map.prototype(); 1536 if (prototype.IsNull()) return not_found; 1537 if (!expected_receiver_type->IsTemplateFor(prototype.object()->map())) { 1538 return not_found; 1539 } 1540 return HolderLookupResult(CallOptimization::kHolderFound, 1541 prototype.AsJSObject()); 1542} 1543 1544ObjectRef CallHandlerInfoRef::data() const { 1545 return MakeRefAssumeMemoryFence(broker(), object()->data()); 1546} 1547 1548HEAP_ACCESSOR_C(ScopeInfo, int, ContextLength) 1549HEAP_ACCESSOR_C(ScopeInfo, bool, HasContextExtensionSlot) 1550HEAP_ACCESSOR_C(ScopeInfo, bool, HasOuterScopeInfo) 1551 1552ScopeInfoRef ScopeInfoRef::OuterScopeInfo() const { 1553 return MakeRefAssumeMemoryFence(broker(), object()->OuterScopeInfo()); 1554} 1555 1556HEAP_ACCESSOR_C(SharedFunctionInfo, Builtin, builtin_id) 1557 1558BytecodeArrayRef SharedFunctionInfoRef::GetBytecodeArray() const { 1559 CHECK(HasBytecodeArray()); 1560 BytecodeArray bytecode_array; 1561 if (!broker()->IsMainThread()) { 1562 bytecode_array = object()->GetBytecodeArray(broker()->local_isolate()); 1563 } else { 1564 bytecode_array = object()->GetBytecodeArray(broker()->isolate()); 1565 } 1566 return MakeRefAssumeMemoryFence(broker(), bytecode_array); 1567} 1568 1569#define DEF_SFI_ACCESSOR(type, name) \ 1570 HEAP_ACCESSOR_C(SharedFunctionInfo, type, name) 1571BROKER_SFI_FIELDS(DEF_SFI_ACCESSOR) 1572#undef DEF_SFI_ACCESSOR 1573 1574SharedFunctionInfo::Inlineability SharedFunctionInfoRef::GetInlineability() 1575 const { 1576 return broker()->IsMainThread() 1577 ? object()->GetInlineability(broker()->isolate()) 1578 : object()->GetInlineability(broker()->local_isolate()); 1579} 1580 1581ObjectRef FeedbackCellRef::value() const { 1582 DCHECK(data_->should_access_heap()); 1583 return MakeRefAssumeMemoryFence(broker(), object()->value(kAcquireLoad)); 1584} 1585 1586base::Optional<ObjectRef> MapRef::GetStrongValue( 1587 InternalIndex descriptor_index) const { 1588 CHECK_LT(descriptor_index.as_int(), NumberOfOwnDescriptors()); 1589 return instance_descriptors().GetStrongValue(descriptor_index); 1590} 1591 1592DescriptorArrayRef MapRef::instance_descriptors() const { 1593 return MakeRefAssumeMemoryFence( 1594 broker(), 1595 object()->instance_descriptors(broker()->isolate(), kAcquireLoad)); 1596} 1597 1598HeapObjectRef MapRef::prototype() const { 1599 return MakeRefAssumeMemoryFence(broker(), 1600 HeapObject::cast(object()->prototype())); 1601} 1602 1603MapRef MapRef::FindRootMap() const { 1604 // TODO(solanes, v8:7790): Consider caching the result of the root map. 1605 return MakeRefAssumeMemoryFence(broker(), 1606 object()->FindRootMap(broker()->isolate())); 1607} 1608 1609ObjectRef MapRef::GetConstructor() const { 1610 // Immutable after initialization. 1611 return MakeRefAssumeMemoryFence(broker(), object()->GetConstructor()); 1612} 1613 1614HeapObjectRef MapRef::GetBackPointer() const { 1615 // Immutable after initialization. 1616 return MakeRefAssumeMemoryFence(broker(), 1617 HeapObject::cast(object()->GetBackPointer())); 1618} 1619 1620bool JSTypedArrayRef::is_on_heap() const { 1621 // Underlying field written 1. during initialization or 2. with release-store. 1622 return object()->is_on_heap(kAcquireLoad); 1623} 1624 1625size_t JSTypedArrayRef::length() const { 1626 CHECK(!is_on_heap()); 1627 // Immutable after initialization. 1628 return object()->length(); 1629} 1630 1631HeapObjectRef JSTypedArrayRef::buffer() const { 1632 CHECK(!is_on_heap()); 1633 // Immutable after initialization. 1634 return MakeRef<HeapObject>(broker(), object()->buffer()); 1635} 1636 1637void* JSTypedArrayRef::data_ptr() const { 1638 CHECK(!is_on_heap()); 1639 // Underlying field written 1. during initialization or 2. protected by the 1640 // is_on_heap release/acquire semantics (external_pointer store happens-before 1641 // base_pointer store, and this external_pointer load happens-after 1642 // base_pointer load). 1643 STATIC_ASSERT(JSTypedArray::kOffHeapDataPtrEqualsExternalPointer); 1644 return object()->DataPtr(); 1645} 1646 1647bool MapRef::IsInobjectSlackTrackingInProgress() const { 1648 return object()->IsInobjectSlackTrackingInProgress(); 1649} 1650 1651int MapRef::constructor_function_index() const { 1652 return object()->GetConstructorFunctionIndex(); 1653} 1654 1655bool MapRef::is_stable() const { 1656 IF_ACCESS_FROM_HEAP_C(is_stable); 1657 return !Map::Bits3::IsUnstableBit::decode(data()->AsMap()->bit_field3()); 1658} 1659 1660bool MapRef::CanBeDeprecated() const { return object()->CanBeDeprecated(); } 1661 1662bool MapRef::CanTransition() const { return object()->CanTransition(); } 1663 1664int MapRef::GetInObjectPropertiesStartInWords() const { 1665 return object()->GetInObjectPropertiesStartInWords(); 1666} 1667 1668int MapRef::GetInObjectProperties() const { 1669 IF_ACCESS_FROM_HEAP_C(GetInObjectProperties); 1670 return data()->AsMap()->in_object_properties(); 1671} 1672 1673bool StringRef::IsExternalString() const { 1674 return object()->IsExternalString(); 1675} 1676 1677Address CallHandlerInfoRef::callback() const { 1678 return v8::ToCData<Address>(object()->callback()); 1679} 1680 1681ZoneVector<Address> FunctionTemplateInfoRef::c_functions() const { 1682 return GetCFunctions(FixedArray::cast(object()->GetCFunctionOverloads()), 1683 broker()->zone()); 1684} 1685 1686ZoneVector<const CFunctionInfo*> FunctionTemplateInfoRef::c_signatures() const { 1687 return GetCSignatures(FixedArray::cast(object()->GetCFunctionOverloads()), 1688 broker()->zone()); 1689} 1690 1691bool StringRef::IsSeqString() const { return object()->IsSeqString(); } 1692 1693ScopeInfoRef NativeContextRef::scope_info() const { 1694 // The scope_info is immutable after initialization. 1695 return MakeRefAssumeMemoryFence(broker(), object()->scope_info()); 1696} 1697 1698MapRef NativeContextRef::GetFunctionMapFromIndex(int index) const { 1699 DCHECK_GE(index, Context::FIRST_FUNCTION_MAP_INDEX); 1700 DCHECK_LE(index, Context::LAST_FUNCTION_MAP_INDEX); 1701 CHECK_LT(index, object()->length()); 1702 return MakeRefAssumeMemoryFence( 1703 broker(), Map::cast(object()->get(index, kAcquireLoad))); 1704} 1705 1706MapRef NativeContextRef::GetInitialJSArrayMap(ElementsKind kind) const { 1707 switch (kind) { 1708 case PACKED_SMI_ELEMENTS: 1709 return js_array_packed_smi_elements_map(); 1710 case HOLEY_SMI_ELEMENTS: 1711 return js_array_holey_smi_elements_map(); 1712 case PACKED_DOUBLE_ELEMENTS: 1713 return js_array_packed_double_elements_map(); 1714 case HOLEY_DOUBLE_ELEMENTS: 1715 return js_array_holey_double_elements_map(); 1716 case PACKED_ELEMENTS: 1717 return js_array_packed_elements_map(); 1718 case HOLEY_ELEMENTS: 1719 return js_array_holey_elements_map(); 1720 default: 1721 UNREACHABLE(); 1722 } 1723} 1724 1725#define DEF_NATIVE_CONTEXT_ACCESSOR(ResultType, Name) \ 1726 ResultType##Ref NativeContextRef::Name() const { \ 1727 return MakeRefAssumeMemoryFence( \ 1728 broker(), ResultType::cast(object()->Name(kAcquireLoad))); \ 1729 } 1730BROKER_NATIVE_CONTEXT_FIELDS(DEF_NATIVE_CONTEXT_ACCESSOR) 1731#undef DEF_NATIVE_CONTEXT_ACCESSOR 1732 1733base::Optional<JSFunctionRef> NativeContextRef::GetConstructorFunction( 1734 const MapRef& map) const { 1735 CHECK(map.IsPrimitiveMap()); 1736 switch (map.constructor_function_index()) { 1737 case Map::kNoConstructorFunctionIndex: 1738 return base::nullopt; 1739 case Context::BIGINT_FUNCTION_INDEX: 1740 return bigint_function(); 1741 case Context::BOOLEAN_FUNCTION_INDEX: 1742 return boolean_function(); 1743 case Context::NUMBER_FUNCTION_INDEX: 1744 return number_function(); 1745 case Context::STRING_FUNCTION_INDEX: 1746 return string_function(); 1747 case Context::SYMBOL_FUNCTION_INDEX: 1748 return symbol_function(); 1749 default: 1750 UNREACHABLE(); 1751 } 1752} 1753 1754bool ObjectRef::IsNull() const { return object()->IsNull(); } 1755 1756bool ObjectRef::IsNullOrUndefined() const { 1757 if (IsSmi()) return false; 1758 OddballType type = AsHeapObject().map().oddball_type(); 1759 return type == OddballType::kNull || type == OddballType::kUndefined; 1760} 1761 1762bool ObjectRef::IsTheHole() const { 1763 return IsHeapObject() && 1764 AsHeapObject().map().oddball_type() == OddballType::kHole; 1765} 1766 1767base::Optional<bool> ObjectRef::TryGetBooleanValue() const { 1768 if (data_->should_access_heap()) { 1769 return object()->BooleanValue(broker()->isolate()); 1770 } 1771 if (IsSmi()) return AsSmi() != 0; 1772 return data()->AsHeapObject()->TryGetBooleanValue(broker()); 1773} 1774 1775Maybe<double> ObjectRef::OddballToNumber() const { 1776 OddballType type = AsHeapObject().map().oddball_type(); 1777 1778 switch (type) { 1779 case OddballType::kBoolean: { 1780 ObjectRef true_ref = MakeRef<Object>( 1781 broker(), broker()->isolate()->factory()->true_value()); 1782 return this->equals(true_ref) ? Just(1.0) : Just(0.0); 1783 } 1784 case OddballType::kUndefined: { 1785 return Just(std::numeric_limits<double>::quiet_NaN()); 1786 } 1787 case OddballType::kNull: { 1788 return Just(0.0); 1789 } 1790 default: { 1791 return Nothing<double>(); 1792 } 1793 } 1794} 1795 1796bool ObjectRef::should_access_heap() const { 1797 return data()->should_access_heap(); 1798} 1799 1800base::Optional<ObjectRef> JSObjectRef::GetOwnConstantElement( 1801 const FixedArrayBaseRef& elements_ref, uint32_t index, 1802 CompilationDependencies* dependencies) const { 1803 base::Optional<Object> maybe_element = GetOwnConstantElementFromHeap( 1804 *elements_ref.object(), map().elements_kind(), index); 1805 if (!maybe_element.has_value()) return {}; 1806 1807 base::Optional<ObjectRef> result = 1808 TryMakeRef(broker(), maybe_element.value()); 1809 if (result.has_value()) { 1810 dependencies->DependOnOwnConstantElement(*this, index, *result); 1811 } 1812 return result; 1813} 1814 1815base::Optional<Object> JSObjectRef::GetOwnConstantElementFromHeap( 1816 FixedArrayBase elements, ElementsKind elements_kind, uint32_t index) const { 1817 DCHECK_LE(index, JSObject::kMaxElementIndex); 1818 1819 Handle<JSObject> holder = object(); 1820 1821 // This block is carefully constructed to avoid Ref creation and access since 1822 // this method may be called after the broker has retired. 1823 // The relaxed `length` read is safe to use in this case since: 1824 // - GetOwnConstantElement only detects a constant for JSArray holders if 1825 // the array is frozen/sealed. 1826 // - Frozen/sealed arrays can't change length. 1827 // - We've already seen a map with frozen/sealed elements_kinds (above); 1828 // - The release-load of that map ensures we read the newest value 1829 // of `length` below. 1830 if (holder->IsJSArray()) { 1831 uint32_t array_length; 1832 if (!JSArray::cast(*holder) 1833 .length(broker()->isolate(), kRelaxedLoad) 1834 .ToArrayLength(&array_length)) { 1835 return {}; 1836 } 1837 // See also ElementsAccessorBase::GetMaxIndex. 1838 if (index >= array_length) return {}; 1839 } 1840 1841 Object maybe_element; 1842 auto result = ConcurrentLookupIterator::TryGetOwnConstantElement( 1843 &maybe_element, broker()->isolate(), broker()->local_isolate(), *holder, 1844 elements, elements_kind, index); 1845 1846 if (result == ConcurrentLookupIterator::kGaveUp) { 1847 TRACE_BROKER_MISSING(broker(), "JSObject::GetOwnConstantElement on " 1848 << *this << " at index " << index); 1849 return {}; 1850 } else if (result == ConcurrentLookupIterator::kNotPresent) { 1851 return {}; 1852 } 1853 1854 DCHECK_EQ(result, ConcurrentLookupIterator::kPresent); 1855 return maybe_element; 1856} 1857 1858base::Optional<ObjectRef> JSObjectRef::GetOwnFastDataProperty( 1859 Representation field_representation, FieldIndex index, 1860 CompilationDependencies* dependencies) const { 1861 base::Optional<ObjectRef> result = GetOwnFastDataPropertyFromHeap( 1862 broker(), *this, field_representation, index); 1863 if (result.has_value()) { 1864 dependencies->DependOnOwnConstantDataProperty( 1865 *this, map(), field_representation, index, *result); 1866 } 1867 return result; 1868} 1869 1870base::Optional<ObjectRef> JSObjectRef::GetOwnDictionaryProperty( 1871 InternalIndex index, CompilationDependencies* dependencies) const { 1872 CHECK(index.is_found()); 1873 base::Optional<ObjectRef> result = 1874 GetOwnDictionaryPropertyFromHeap(broker(), object(), index); 1875 if (result.has_value()) { 1876 dependencies->DependOnOwnConstantDictionaryProperty(*this, index, *result); 1877 } 1878 return result; 1879} 1880 1881ObjectRef JSArrayRef::GetBoilerplateLength() const { 1882 // Safe to read concurrently because: 1883 // - boilerplates are immutable after initialization. 1884 // - boilerplates are published into the feedback vector. 1885 // These facts also mean we can expect a valid value. 1886 return length_unsafe().value(); 1887} 1888 1889base::Optional<ObjectRef> JSArrayRef::length_unsafe() const { 1890 return TryMakeRef(broker(), 1891 object()->length(broker()->isolate(), kRelaxedLoad)); 1892} 1893 1894base::Optional<ObjectRef> JSArrayRef::GetOwnCowElement( 1895 FixedArrayBaseRef elements_ref, uint32_t index) const { 1896 // Note: we'd like to check `elements_ref == elements()` here, but due to 1897 // concurrency this may not hold. The code below must be able to deal with 1898 // concurrent `elements` modifications. 1899 1900 // Due to concurrency, the kind read here may not be consistent with 1901 // `elements_ref`. The caller has to guarantee consistency at runtime by 1902 // other means (e.g. through a runtime equality check or a compilation 1903 // dependency). 1904 ElementsKind elements_kind = map().elements_kind(); 1905 1906 // We only inspect fixed COW arrays, which may only occur for fast 1907 // smi/objects elements kinds. 1908 if (!IsSmiOrObjectElementsKind(elements_kind)) return {}; 1909 DCHECK(IsFastElementsKind(elements_kind)); 1910 if (!elements_ref.map().IsFixedCowArrayMap()) return {}; 1911 1912 // As the name says, the `length` read here is unsafe and may not match 1913 // `elements`. We rely on the invariant that any `length` change will 1914 // also result in an `elements` change to make this safe. The `elements` 1915 // consistency check in the caller thus also guards the value of `length`. 1916 base::Optional<ObjectRef> length_ref = length_unsafe(); 1917 1918 if (!length_ref.has_value()) return {}; 1919 1920 // Likewise we only deal with smi lengths. 1921 if (!length_ref->IsSmi()) return {}; 1922 1923 base::Optional<Object> result = ConcurrentLookupIterator::TryGetOwnCowElement( 1924 broker()->isolate(), *elements_ref.AsFixedArray().object(), elements_kind, 1925 length_ref->AsSmi(), index); 1926 if (!result.has_value()) return {}; 1927 1928 return TryMakeRef(broker(), result.value()); 1929} 1930 1931base::Optional<CellRef> SourceTextModuleRef::GetCell(int cell_index) const { 1932 return TryMakeRef(broker(), object()->GetCell(cell_index)); 1933} 1934 1935base::Optional<ObjectRef> SourceTextModuleRef::import_meta() const { 1936 return TryMakeRef(broker(), object()->import_meta(kAcquireLoad)); 1937} 1938 1939base::Optional<MapRef> HeapObjectRef::map_direct_read() const { 1940 PtrComprCageBase cage_base = broker()->cage_base(); 1941 return TryMakeRef(broker(), object()->map(cage_base, kAcquireLoad), 1942 kAssumeMemoryFence); 1943} 1944 1945namespace { 1946 1947OddballType GetOddballType(Isolate* isolate, Map map) { 1948 if (map.instance_type() != ODDBALL_TYPE) { 1949 return OddballType::kNone; 1950 } 1951 ReadOnlyRoots roots(isolate); 1952 if (map == roots.undefined_map()) { 1953 return OddballType::kUndefined; 1954 } 1955 if (map == roots.null_map()) { 1956 return OddballType::kNull; 1957 } 1958 if (map == roots.boolean_map()) { 1959 return OddballType::kBoolean; 1960 } 1961 if (map == roots.the_hole_map()) { 1962 return OddballType::kHole; 1963 } 1964 if (map == roots.uninitialized_map()) { 1965 return OddballType::kUninitialized; 1966 } 1967 DCHECK(map == roots.termination_exception_map() || 1968 map == roots.arguments_marker_map() || 1969 map == roots.optimized_out_map() || map == roots.stale_register_map()); 1970 return OddballType::kOther; 1971} 1972 1973} // namespace 1974 1975HeapObjectType HeapObjectRef::GetHeapObjectType() const { 1976 if (data_->should_access_heap()) { 1977 Map map = Handle<HeapObject>::cast(object())->map(broker()->cage_base()); 1978 HeapObjectType::Flags flags(0); 1979 if (map.is_undetectable()) flags |= HeapObjectType::kUndetectable; 1980 if (map.is_callable()) flags |= HeapObjectType::kCallable; 1981 return HeapObjectType(map.instance_type(), flags, 1982 GetOddballType(broker()->isolate(), map)); 1983 } 1984 HeapObjectType::Flags flags(0); 1985 if (map().is_undetectable()) flags |= HeapObjectType::kUndetectable; 1986 if (map().is_callable()) flags |= HeapObjectType::kCallable; 1987 return HeapObjectType(map().instance_type(), flags, map().oddball_type()); 1988} 1989 1990base::Optional<JSObjectRef> AllocationSiteRef::boilerplate() const { 1991 if (!PointsToLiteral()) return {}; 1992 DCHECK(data_->should_access_heap()); 1993 return TryMakeRef(broker(), object()->boilerplate(kAcquireLoad)); 1994} 1995 1996base::Optional<FixedArrayBaseRef> JSObjectRef::elements( 1997 RelaxedLoadTag tag) const { 1998 return TryMakeRef(broker(), object()->elements(tag)); 1999} 2000 2001int FixedArrayBaseRef::length() const { 2002 IF_ACCESS_FROM_HEAP_C(length); 2003 return data()->AsFixedArrayBase()->length(); 2004} 2005 2006PropertyDetails DescriptorArrayRef::GetPropertyDetails( 2007 InternalIndex descriptor_index) const { 2008 return object()->GetDetails(descriptor_index); 2009} 2010 2011NameRef DescriptorArrayRef::GetPropertyKey( 2012 InternalIndex descriptor_index) const { 2013 NameRef result = MakeRef(broker(), object()->GetKey(descriptor_index)); 2014 CHECK(result.IsUniqueName()); 2015 return result; 2016} 2017 2018base::Optional<ObjectRef> DescriptorArrayRef::GetStrongValue( 2019 InternalIndex descriptor_index) const { 2020 HeapObject heap_object; 2021 if (!object() 2022 ->GetValue(descriptor_index) 2023 .GetHeapObjectIfStrong(&heap_object)) { 2024 return {}; 2025 } 2026 // Since the descriptors in the descriptor array can be changed in-place 2027 // via DescriptorArray::Replace, we might get a value that we haven't seen 2028 // before. 2029 return TryMakeRef(broker(), heap_object); 2030} 2031 2032base::Optional<FeedbackVectorRef> FeedbackCellRef::feedback_vector() const { 2033 ObjectRef contents = value(); 2034 if (!contents.IsFeedbackVector()) return {}; 2035 return contents.AsFeedbackVector(); 2036} 2037 2038base::Optional<SharedFunctionInfoRef> FeedbackCellRef::shared_function_info() 2039 const { 2040 base::Optional<FeedbackVectorRef> vector = feedback_vector(); 2041 if (!vector.has_value()) return {}; 2042 return vector->shared_function_info(); 2043} 2044 2045SharedFunctionInfoRef FeedbackVectorRef::shared_function_info() const { 2046 // Immutable after initialization. 2047 return MakeRefAssumeMemoryFence(broker(), object()->shared_function_info()); 2048} 2049 2050bool NameRef::IsUniqueName() const { 2051 // Must match Name::IsUniqueName. 2052 return IsInternalizedString() || IsSymbol(); 2053} 2054 2055Handle<Object> ObjectRef::object() const { 2056 return data_->object(); 2057} 2058 2059#ifdef DEBUG 2060#define DEF_OBJECT_GETTER(T) \ 2061 Handle<T> T##Ref::object() const { \ 2062 return Handle<T>(reinterpret_cast<Address*>(data_->object().address())); \ 2063 } 2064#else 2065#define DEF_OBJECT_GETTER(T) \ 2066 Handle<T> T##Ref::object() const { \ 2067 return Handle<T>(reinterpret_cast<Address*>(data_->object().address())); \ 2068 } 2069#endif // DEBUG 2070 2071HEAP_BROKER_OBJECT_LIST(DEF_OBJECT_GETTER) 2072#undef DEF_OBJECT_GETTER 2073 2074JSHeapBroker* ObjectRef::broker() const { return broker_; } 2075 2076ObjectData* ObjectRef::data() const { 2077 switch (broker()->mode()) { 2078 case JSHeapBroker::kDisabled: 2079 return data_; 2080 case JSHeapBroker::kSerializing: 2081 CHECK_NE(data_->kind(), kUnserializedHeapObject); 2082 return data_; 2083 case JSHeapBroker::kSerialized: 2084 case JSHeapBroker::kRetired: 2085 CHECK_NE(data_->kind(), kUnserializedHeapObject); 2086 return data_; 2087 } 2088} 2089 2090template <class T> 2091typename TinyRef<T>::RefType TinyRef<T>::AsRef(JSHeapBroker* broker) const { 2092 if (data_->kind() == kUnserializedHeapObject && 2093 broker->mode() != JSHeapBroker::kDisabled) { 2094 // Gotta reconstruct to avoid returning a stale unserialized ref. 2095 return MakeRefAssumeMemoryFence<T>(broker, 2096 Handle<T>::cast(data_->object())); 2097 } 2098 return TryMakeRef<T>(broker, data_).value(); 2099} 2100 2101template <class T> 2102Handle<T> TinyRef<T>::object() const { 2103 return Handle<T>::cast(data_->object()); 2104} 2105 2106#define V(Name) \ 2107 template class TinyRef<Name>; \ 2108 /* TinyRef should contain only one pointer. */ \ 2109 STATIC_ASSERT(sizeof(TinyRef<Name>) == kSystemPointerSize); 2110HEAP_BROKER_OBJECT_LIST(V) 2111#undef V 2112 2113#define JSFUNCTION_BIMODAL_ACCESSOR_WITH_DEP(Result, Name, UsedField) \ 2114 Result##Ref JSFunctionRef::Name(CompilationDependencies* dependencies) \ 2115 const { \ 2116 IF_ACCESS_FROM_HEAP(Result, Name); \ 2117 RecordConsistentJSFunctionViewDependencyIfNeeded( \ 2118 broker(), *this, data()->AsJSFunction(), UsedField); \ 2119 return Result##Ref(broker(), data()->AsJSFunction()->Name()); \ 2120 } 2121 2122#define JSFUNCTION_BIMODAL_ACCESSOR_WITH_DEP_C(Result, Name, UsedField) \ 2123 Result JSFunctionRef::Name(CompilationDependencies* dependencies) const { \ 2124 IF_ACCESS_FROM_HEAP_C(Name); \ 2125 RecordConsistentJSFunctionViewDependencyIfNeeded( \ 2126 broker(), *this, data()->AsJSFunction(), UsedField); \ 2127 return data()->AsJSFunction()->Name(); \ 2128 } 2129 2130// Like JSFUNCTION_BIMODAL_ACCESSOR_WITH_DEP_C but only depend on the 2131// field in question if its recorded value is "relevant". This is in order to 2132// tolerate certain state changes during compilation, e.g. from "has no feedback 2133// vector" (in which case we would simply do less optimization) to "has feedback 2134// vector". 2135#define JSFUNCTION_BIMODAL_ACCESSOR_WITH_DEP_RELEVANT_C( \ 2136 Result, Name, UsedField, RelevantValue) \ 2137 Result JSFunctionRef::Name(CompilationDependencies* dependencies) const { \ 2138 IF_ACCESS_FROM_HEAP_C(Name); \ 2139 Result const result = data()->AsJSFunction()->Name(); \ 2140 if (result == RelevantValue) { \ 2141 RecordConsistentJSFunctionViewDependencyIfNeeded( \ 2142 broker(), *this, data()->AsJSFunction(), UsedField); \ 2143 } \ 2144 return result; \ 2145 } 2146 2147JSFUNCTION_BIMODAL_ACCESSOR_WITH_DEP_RELEVANT_C(bool, has_initial_map, 2148 JSFunctionData::kHasInitialMap, 2149 true) 2150JSFUNCTION_BIMODAL_ACCESSOR_WITH_DEP_RELEVANT_C( 2151 bool, has_instance_prototype, JSFunctionData::kHasInstancePrototype, true) 2152JSFUNCTION_BIMODAL_ACCESSOR_WITH_DEP_RELEVANT_C( 2153 bool, PrototypeRequiresRuntimeLookup, 2154 JSFunctionData::kPrototypeRequiresRuntimeLookup, false) 2155 2156JSFUNCTION_BIMODAL_ACCESSOR_WITH_DEP(Map, initial_map, 2157 JSFunctionData::kInitialMap) 2158JSFUNCTION_BIMODAL_ACCESSOR_WITH_DEP(Object, instance_prototype, 2159 JSFunctionData::kInstancePrototype) 2160JSFUNCTION_BIMODAL_ACCESSOR_WITH_DEP(FeedbackCell, raw_feedback_cell, 2161 JSFunctionData::kFeedbackCell) 2162 2163BIMODAL_ACCESSOR(JSFunction, Context, context) 2164BIMODAL_ACCESSOR(JSFunction, SharedFunctionInfo, shared) 2165 2166#undef JSFUNCTION_BIMODAL_ACCESSOR_WITH_DEP 2167#undef JSFUNCTION_BIMODAL_ACCESSOR_WITH_DEP_C 2168 2169CodeRef JSFunctionRef::code() const { 2170 CodeT code = object()->code(kAcquireLoad); 2171 // Safe to do a relaxed conversion to Code here since CodeT::code field is 2172 // modified only by GC and the CodeT was acquire-loaded. 2173 return MakeRefAssumeMemoryFence(broker(), FromCodeT(code, kRelaxedLoad)); 2174} 2175 2176NativeContextRef JSFunctionRef::native_context() const { 2177 return MakeRefAssumeMemoryFence(broker(), 2178 context().object()->native_context()); 2179} 2180 2181base::Optional<FunctionTemplateInfoRef> 2182SharedFunctionInfoRef::function_template_info() const { 2183 if (!object()->IsApiFunction()) return {}; 2184 return TryMakeRef(broker(), FunctionTemplateInfo::cast( 2185 object()->function_data(kAcquireLoad))); 2186} 2187 2188int SharedFunctionInfoRef::context_header_size() const { 2189 return object()->scope_info().ContextHeaderLength(); 2190} 2191 2192int SharedFunctionInfoRef::context_parameters_start() const { 2193 return object()->scope_info().ParametersStartIndex(); 2194} 2195 2196ScopeInfoRef SharedFunctionInfoRef::scope_info() const { 2197 return MakeRefAssumeMemoryFence(broker(), object()->scope_info(kAcquireLoad)); 2198} 2199 2200base::Optional<MapRef> JSObjectRef::GetObjectCreateMap() const { 2201 Handle<Map> map_handle = Handle<Map>::cast(map().object()); 2202 // Note: implemented as an acquire-load. 2203 if (!map_handle->is_prototype_map()) return {}; 2204 2205 Handle<Object> maybe_proto_info = broker()->CanonicalPersistentHandle( 2206 map_handle->prototype_info(kAcquireLoad)); 2207 if (!maybe_proto_info->IsPrototypeInfo()) return {}; 2208 2209 MaybeObject maybe_object_create_map = 2210 Handle<PrototypeInfo>::cast(maybe_proto_info) 2211 ->object_create_map(kAcquireLoad); 2212 if (!maybe_object_create_map->IsWeak()) return {}; 2213 2214 return MapRef(broker(), 2215 broker()->GetOrCreateData( 2216 maybe_object_create_map->GetHeapObjectAssumeWeak(), 2217 kAssumeMemoryFence)); 2218} 2219 2220bool PropertyCellRef::Cache() const { 2221 if (data_->should_access_heap()) return true; 2222 CHECK(broker()->mode() == JSHeapBroker::kSerializing || 2223 broker()->mode() == JSHeapBroker::kSerialized); 2224 return data()->AsPropertyCell()->Cache(broker()); 2225} 2226 2227bool NativeContextRef::GlobalIsDetached() const { 2228 ObjectRef proxy_proto = global_proxy_object().map().prototype(); 2229 return !proxy_proto.equals(global_object()); 2230} 2231 2232base::Optional<PropertyCellRef> JSGlobalObjectRef::GetPropertyCell( 2233 NameRef const& name) const { 2234 base::Optional<PropertyCell> maybe_cell = 2235 ConcurrentLookupIterator::TryGetPropertyCell( 2236 broker()->isolate(), broker()->local_isolate_or_isolate(), 2237 broker()->target_native_context().global_object().object(), 2238 name.object()); 2239 if (!maybe_cell.has_value()) return {}; 2240 return TryMakeRef(broker(), *maybe_cell); 2241} 2242 2243std::ostream& operator<<(std::ostream& os, const ObjectRef& ref) { 2244 if (!FLAG_concurrent_recompilation) { 2245 // We cannot be in a background thread so it's safe to read the heap. 2246 AllowHandleDereference allow_handle_dereference; 2247 return os << ref.data() << " {" << ref.object() << "}"; 2248 } else if (ref.data_->should_access_heap()) { 2249 return os << ref.data() << " {" << ref.object() << "}"; 2250 } else { 2251 return os << ref.data(); 2252 } 2253} 2254 2255unsigned CodeRef::GetInlinedBytecodeSize() const { 2256 unsigned value = object()->inlined_bytecode_size(); 2257 if (value > 0) { 2258 // Don't report inlined bytecode size if the code object was already 2259 // deoptimized. 2260 value = object()->marked_for_deoptimization() ? 0 : value; 2261 } 2262 return value; 2263} 2264 2265#undef BIMODAL_ACCESSOR 2266#undef BIMODAL_ACCESSOR_B 2267#undef BIMODAL_ACCESSOR_C 2268#undef HEAP_ACCESSOR_B 2269#undef HEAP_ACCESSOR_C 2270#undef IF_ACCESS_FROM_HEAP 2271#undef IF_ACCESS_FROM_HEAP_C 2272#undef TRACE 2273#undef TRACE_MISSING 2274 2275} // namespace compiler 2276} // namespace internal 2277} // namespace v8 2278