1// Copyright 2018 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#ifndef V8_COMPILER_JS_HEAP_BROKER_H_ 6#define V8_COMPILER_JS_HEAP_BROKER_H_ 7 8#include "src/base/compiler-specific.h" 9#include "src/base/optional.h" 10#include "src/base/platform/mutex.h" 11#include "src/common/globals.h" 12#include "src/compiler/access-info.h" 13#include "src/compiler/feedback-source.h" 14#include "src/compiler/globals.h" 15#include "src/compiler/heap-refs.h" 16#include "src/compiler/processed-feedback.h" 17#include "src/compiler/refs-map.h" 18#include "src/execution/local-isolate.h" 19#include "src/handles/handles.h" 20#include "src/handles/persistent-handles.h" 21#include "src/heap/local-heap.h" 22#include "src/heap/parked-scope.h" 23#include "src/interpreter/bytecode-array-iterator.h" 24#include "src/objects/code-kind.h" 25#include "src/objects/feedback-vector.h" 26#include "src/objects/function-kind.h" 27#include "src/objects/objects.h" 28#include "src/utils/address-map.h" 29#include "src/utils/identity-map.h" 30#include "src/utils/ostreams.h" 31#include "src/zone/zone-containers.h" 32 33namespace v8 { 34namespace internal { 35 36namespace maglev { 37class MaglevCompilationInfo; 38} 39 40namespace compiler { 41 42class ObjectRef; 43 44std::ostream& operator<<(std::ostream& os, const ObjectRef& ref); 45 46#define TRACE_BROKER(broker, x) \ 47 do { \ 48 if (broker->tracing_enabled() && FLAG_trace_heap_broker_verbose) \ 49 StdoutStream{} << broker->Trace() << x << '\n'; \ 50 } while (false) 51 52#define TRACE_BROKER_MEMORY(broker, x) \ 53 do { \ 54 if (broker->tracing_enabled() && FLAG_trace_heap_broker_memory) \ 55 StdoutStream{} << broker->Trace() << x << std::endl; \ 56 } while (false) 57 58#define TRACE_BROKER_MISSING(broker, x) \ 59 do { \ 60 if (broker->tracing_enabled()) \ 61 StdoutStream{} << broker->Trace() << "Missing " << x << " (" << __FILE__ \ 62 << ":" << __LINE__ << ")" << std::endl; \ 63 } while (false) 64 65struct PropertyAccessTarget { 66 MapRef map; 67 NameRef name; 68 AccessMode mode; 69 70 struct Hash { 71 size_t operator()(const PropertyAccessTarget& pair) const { 72 return base::hash_combine( 73 base::hash_combine(pair.map.object().address(), 74 pair.name.object().address()), 75 static_cast<int>(pair.mode)); 76 } 77 }; 78 struct Equal { 79 bool operator()(const PropertyAccessTarget& lhs, 80 const PropertyAccessTarget& rhs) const { 81 return lhs.map.equals(rhs.map) && lhs.name.equals(rhs.name) && 82 lhs.mode == rhs.mode; 83 } 84 }; 85}; 86 87enum GetOrCreateDataFlag { 88 // If set, a failure to create the data object results in a crash. 89 kCrashOnError = 1 << 0, 90 // If set, data construction assumes that the given object is protected by 91 // a memory fence (e.g. acquire-release) and thus fields required for 92 // construction (like Object::map) are safe to read. The protection can 93 // extend to some other situations as well. 94 kAssumeMemoryFence = 1 << 1, 95}; 96using GetOrCreateDataFlags = base::Flags<GetOrCreateDataFlag>; 97DEFINE_OPERATORS_FOR_FLAGS(GetOrCreateDataFlags) 98 99class V8_EXPORT_PRIVATE JSHeapBroker { 100 public: 101 JSHeapBroker(Isolate* isolate, Zone* broker_zone, bool tracing_enabled, 102 CodeKind code_kind); 103 104 // For use only in tests, sets default values for some arguments. Avoids 105 // churn when new flags are added. 106 JSHeapBroker(Isolate* isolate, Zone* broker_zone) 107 : JSHeapBroker(isolate, broker_zone, FLAG_trace_heap_broker, 108 CodeKind::TURBOFAN) {} 109 110 ~JSHeapBroker(); 111 112 // The compilation target's native context. We need the setter because at 113 // broker construction time we don't yet have the canonical handle. 114 NativeContextRef target_native_context() const { 115 return target_native_context_.value(); 116 } 117 void SetTargetNativeContextRef(Handle<NativeContext> native_context); 118 119 void InitializeAndStartSerializing(); 120 121 Isolate* isolate() const { return isolate_; } 122 123 // The pointer compression cage base value used for decompression of all 124 // tagged values except references to Code objects. 125 PtrComprCageBase cage_base() const { 126#if V8_COMPRESS_POINTERS 127 return cage_base_; 128#else 129 return PtrComprCageBase{}; 130#endif // V8_COMPRESS_POINTERS 131 } 132 133 Zone* zone() const { return zone_; } 134 bool tracing_enabled() const { return tracing_enabled_; } 135 136 NexusConfig feedback_nexus_config() const { 137 return IsMainThread() ? NexusConfig::FromMainThread(isolate()) 138 : NexusConfig::FromBackgroundThread( 139 isolate(), local_isolate()->heap()); 140 } 141 142 enum BrokerMode { kDisabled, kSerializing, kSerialized, kRetired }; 143 BrokerMode mode() const { return mode_; } 144 145 void StopSerializing(); 146 void Retire(); 147 bool SerializingAllowed() const; 148 149 // Remember the local isolate and initialize its local heap with the 150 // persistent and canonical handles provided by {info}. 151 void AttachLocalIsolate(OptimizedCompilationInfo* info, 152 LocalIsolate* local_isolate); 153 // Forget about the local isolate and pass the persistent and canonical 154 // handles provided back to {info}. {info} is responsible for disposing of 155 // them. 156 void DetachLocalIsolate(OptimizedCompilationInfo* info); 157 158 // TODO(v8:7700): Refactor this once the broker is no longer 159 // Turbofan-specific. 160 void AttachLocalIsolateForMaglev(maglev::MaglevCompilationInfo* info, 161 LocalIsolate* local_isolate); 162 void DetachLocalIsolateForMaglev(maglev::MaglevCompilationInfo* info); 163 164 bool StackHasOverflowed() const; 165 166#ifdef DEBUG 167 void PrintRefsAnalysis() const; 168#endif // DEBUG 169 170 // Returns the handle from root index table for read only heap objects. 171 Handle<Object> GetRootHandle(Object object); 172 173 // Never returns nullptr. 174 ObjectData* GetOrCreateData(Handle<Object> object, 175 GetOrCreateDataFlags flags = {}); 176 ObjectData* GetOrCreateData(Object object, GetOrCreateDataFlags flags = {}); 177 178 // Gets data only if we have it. However, thin wrappers will be created for 179 // smis, read-only objects and never-serialized objects. 180 ObjectData* TryGetOrCreateData(Handle<Object> object, 181 GetOrCreateDataFlags flags = {}); 182 ObjectData* TryGetOrCreateData(Object object, 183 GetOrCreateDataFlags flags = {}); 184 185 // Check if {object} is any native context's %ArrayPrototype% or 186 // %ObjectPrototype%. 187 bool IsArrayOrObjectPrototype(const JSObjectRef& object) const; 188 bool IsArrayOrObjectPrototype(Handle<JSObject> object) const; 189 190 bool HasFeedback(FeedbackSource const& source) const; 191 void SetFeedback(FeedbackSource const& source, 192 ProcessedFeedback const* feedback); 193 FeedbackSlotKind GetFeedbackSlotKind(FeedbackSource const& source) const; 194 195 ElementAccessFeedback const& ProcessFeedbackMapsForElementAccess( 196 ZoneVector<MapRef>& maps, KeyedAccessMode const& keyed_mode, 197 FeedbackSlotKind slot_kind); 198 199 // Binary, comparison and for-in hints can be fully expressed via 200 // an enum. Insufficient feedback is signaled by <Hint enum>::kNone. 201 BinaryOperationHint GetFeedbackForBinaryOperation( 202 FeedbackSource const& source); 203 CompareOperationHint GetFeedbackForCompareOperation( 204 FeedbackSource const& source); 205 ForInHint GetFeedbackForForIn(FeedbackSource const& source); 206 207 ProcessedFeedback const& GetFeedbackForCall(FeedbackSource const& source); 208 ProcessedFeedback const& GetFeedbackForGlobalAccess( 209 FeedbackSource const& source); 210 ProcessedFeedback const& GetFeedbackForInstanceOf( 211 FeedbackSource const& source); 212 ProcessedFeedback const& GetFeedbackForArrayOrObjectLiteral( 213 FeedbackSource const& source); 214 ProcessedFeedback const& GetFeedbackForRegExpLiteral( 215 FeedbackSource const& source); 216 ProcessedFeedback const& GetFeedbackForTemplateObject( 217 FeedbackSource const& source); 218 ProcessedFeedback const& GetFeedbackForPropertyAccess( 219 FeedbackSource const& source, AccessMode mode, 220 base::Optional<NameRef> static_name); 221 222 ProcessedFeedback const& ProcessFeedbackForBinaryOperation( 223 FeedbackSource const& source); 224 ProcessedFeedback const& ProcessFeedbackForCompareOperation( 225 FeedbackSource const& source); 226 ProcessedFeedback const& ProcessFeedbackForForIn( 227 FeedbackSource const& source); 228 229 bool FeedbackIsInsufficient(FeedbackSource const& source) const; 230 231 base::Optional<NameRef> GetNameFeedback(FeedbackNexus const& nexus); 232 233 PropertyAccessInfo GetPropertyAccessInfo( 234 MapRef map, NameRef name, AccessMode access_mode, 235 CompilationDependencies* dependencies); 236 237 StringRef GetTypedArrayStringTag(ElementsKind kind); 238 239 bool IsMainThread() const { 240 return local_isolate() == nullptr || local_isolate()->is_main_thread(); 241 } 242 243 LocalIsolate* local_isolate() const { return local_isolate_; } 244 245 // TODO(jgruber): Consider always having local_isolate_ set to a real value. 246 // This seems not entirely trivial since we currently reset local_isolate_ to 247 // nullptr at some point in the JSHeapBroker lifecycle. 248 LocalIsolate* local_isolate_or_isolate() const { 249 return local_isolate() != nullptr ? local_isolate() 250 : isolate()->AsLocalIsolate(); 251 } 252 253 // Return the corresponding canonical persistent handle for {object}. Create 254 // one if it does not exist. 255 // If we have the canonical map, we can create the canonical & persistent 256 // handle through it. This commonly happens during the Execute phase. 257 // If we don't, that means we are calling this method from serialization. If 258 // that happens, we should be inside a canonical and a persistent handle 259 // scope. Then, we would just use the regular handle creation. 260 template <typename T> 261 Handle<T> CanonicalPersistentHandle(T object) { 262 if (canonical_handles_) { 263 Address address = object.ptr(); 264 if (Internals::HasHeapObjectTag(address)) { 265 RootIndex root_index; 266 if (root_index_map_.Lookup(address, &root_index)) { 267 return Handle<T>(isolate_->root_handle(root_index).location()); 268 } 269 } 270 271 Object obj(address); 272 auto find_result = canonical_handles_->FindOrInsert(obj); 273 if (!find_result.already_exists) { 274 // Allocate new PersistentHandle if one wasn't created before. 275 DCHECK_NOT_NULL(local_isolate()); 276 *find_result.entry = 277 local_isolate()->heap()->NewPersistentHandle(obj).location(); 278 } 279 return Handle<T>(*find_result.entry); 280 } else { 281 return Handle<T>(object, isolate()); 282 } 283 } 284 285 template <typename T> 286 Handle<T> CanonicalPersistentHandle(Handle<T> object) { 287 if (object.is_null()) return object; // Can't deref a null handle. 288 return CanonicalPersistentHandle<T>(*object); 289 } 290 291 // Find the corresponding handle in the CanonicalHandlesMap. The entry must be 292 // found. 293 template <typename T> 294 Handle<T> FindCanonicalPersistentHandleForTesting(Object object) { 295 Address** entry = canonical_handles_->Find(object); 296 return Handle<T>(*entry); 297 } 298 299 // Set the persistent handles and copy the canonical handles over to the 300 // JSHeapBroker. 301 void SetPersistentAndCopyCanonicalHandlesForTesting( 302 std::unique_ptr<PersistentHandles> persistent_handles, 303 std::unique_ptr<CanonicalHandlesMap> canonical_handles); 304 std::string Trace() const; 305 void IncrementTracingIndentation(); 306 void DecrementTracingIndentation(); 307 308 // Locks {mutex} through the duration of this scope iff it is the first 309 // occurrence. This is done to have a recursive shared lock on {mutex}. 310 class V8_NODISCARD RecursiveSharedMutexGuardIfNeeded { 311 protected: 312 RecursiveSharedMutexGuardIfNeeded(LocalIsolate* local_isolate, 313 base::SharedMutex* mutex, 314 int* mutex_depth_address) 315 : mutex_depth_address_(mutex_depth_address), 316 initial_mutex_depth_(*mutex_depth_address_), 317 shared_mutex_guard_(local_isolate, mutex, initial_mutex_depth_ == 0) { 318 (*mutex_depth_address_)++; 319 } 320 321 ~RecursiveSharedMutexGuardIfNeeded() { 322 DCHECK_GE((*mutex_depth_address_), 1); 323 (*mutex_depth_address_)--; 324 DCHECK_EQ(initial_mutex_depth_, (*mutex_depth_address_)); 325 } 326 327 private: 328 int* const mutex_depth_address_; 329 const int initial_mutex_depth_; 330 ParkedSharedMutexGuardIf<base::kShared> shared_mutex_guard_; 331 }; 332 333 class MapUpdaterGuardIfNeeded final 334 : public RecursiveSharedMutexGuardIfNeeded { 335 public: 336 explicit MapUpdaterGuardIfNeeded(JSHeapBroker* broker) 337 : RecursiveSharedMutexGuardIfNeeded( 338 broker->local_isolate_or_isolate(), 339 broker->isolate()->map_updater_access(), 340 &broker->map_updater_mutex_depth_) {} 341 }; 342 343 class BoilerplateMigrationGuardIfNeeded final 344 : public RecursiveSharedMutexGuardIfNeeded { 345 public: 346 explicit BoilerplateMigrationGuardIfNeeded(JSHeapBroker* broker) 347 : RecursiveSharedMutexGuardIfNeeded( 348 broker->local_isolate_or_isolate(), 349 broker->isolate()->boilerplate_migration_access(), 350 &broker->boilerplate_migration_mutex_depth_) {} 351 }; 352 353 // If this returns false, the object is guaranteed to be fully initialized and 354 // thus safe to read from a memory safety perspective. The converse does not 355 // necessarily hold. 356 bool ObjectMayBeUninitialized(Handle<Object> object) const; 357 bool ObjectMayBeUninitialized(Object object) const; 358 bool ObjectMayBeUninitialized(HeapObject object) const; 359 360 void set_dependencies(CompilationDependencies* dependencies) { 361 DCHECK_NOT_NULL(dependencies); 362 DCHECK_NULL(dependencies_); 363 dependencies_ = dependencies; 364 } 365 CompilationDependencies* dependencies() const { 366 DCHECK_NOT_NULL(dependencies_); 367 return dependencies_; 368 } 369 370 private: 371 friend class HeapObjectRef; 372 friend class ObjectRef; 373 friend class ObjectData; 374 friend class PropertyCellData; 375 376 ProcessedFeedback const& GetFeedback(FeedbackSource const& source) const; 377 const ProcessedFeedback& NewInsufficientFeedback(FeedbackSlotKind kind) const; 378 379 // Bottleneck FeedbackNexus access here, for storage in the broker 380 // or on-the-fly usage elsewhere in the compiler. 381 ProcessedFeedback const& ReadFeedbackForArrayOrObjectLiteral( 382 FeedbackSource const& source); 383 ProcessedFeedback const& ReadFeedbackForBinaryOperation( 384 FeedbackSource const& source) const; 385 ProcessedFeedback const& ReadFeedbackForCall(FeedbackSource const& source); 386 ProcessedFeedback const& ReadFeedbackForCompareOperation( 387 FeedbackSource const& source) const; 388 ProcessedFeedback const& ReadFeedbackForForIn( 389 FeedbackSource const& source) const; 390 ProcessedFeedback const& ReadFeedbackForGlobalAccess( 391 FeedbackSource const& source); 392 ProcessedFeedback const& ReadFeedbackForInstanceOf( 393 FeedbackSource const& source); 394 ProcessedFeedback const& ReadFeedbackForPropertyAccess( 395 FeedbackSource const& source, AccessMode mode, 396 base::Optional<NameRef> static_name); 397 ProcessedFeedback const& ReadFeedbackForRegExpLiteral( 398 FeedbackSource const& source); 399 ProcessedFeedback const& ReadFeedbackForTemplateObject( 400 FeedbackSource const& source); 401 402 void CollectArrayAndObjectPrototypes(); 403 404 void set_persistent_handles( 405 std::unique_ptr<PersistentHandles> persistent_handles) { 406 DCHECK_NULL(ph_); 407 ph_ = std::move(persistent_handles); 408 DCHECK_NOT_NULL(ph_); 409 } 410 std::unique_ptr<PersistentHandles> DetachPersistentHandles() { 411 DCHECK_NOT_NULL(ph_); 412 return std::move(ph_); 413 } 414 415 void set_canonical_handles( 416 std::unique_ptr<CanonicalHandlesMap> canonical_handles) { 417 DCHECK_NULL(canonical_handles_); 418 canonical_handles_ = std::move(canonical_handles); 419 DCHECK_NOT_NULL(canonical_handles_); 420 } 421 422 std::unique_ptr<CanonicalHandlesMap> DetachCanonicalHandles() { 423 DCHECK_NOT_NULL(canonical_handles_); 424 return std::move(canonical_handles_); 425 } 426 427 // Copy the canonical handles over to the JSHeapBroker. 428 void CopyCanonicalHandlesForTesting( 429 std::unique_ptr<CanonicalHandlesMap> canonical_handles); 430 431 Isolate* const isolate_; 432#if V8_COMPRESS_POINTERS 433 const PtrComprCageBase cage_base_; 434#endif // V8_COMPRESS_POINTERS 435 Zone* const zone_; 436 base::Optional<NativeContextRef> target_native_context_; 437 RefsMap* refs_; 438 RootIndexMap root_index_map_; 439 ZoneUnorderedSet<Handle<JSObject>, Handle<JSObject>::hash, 440 Handle<JSObject>::equal_to> 441 array_and_object_prototypes_; 442 BrokerMode mode_ = kDisabled; 443 bool const tracing_enabled_; 444 CodeKind const code_kind_; 445 std::unique_ptr<PersistentHandles> ph_; 446 LocalIsolate* local_isolate_ = nullptr; 447 std::unique_ptr<CanonicalHandlesMap> canonical_handles_; 448 unsigned trace_indentation_ = 0; 449 ZoneUnorderedMap<FeedbackSource, ProcessedFeedback const*, 450 FeedbackSource::Hash, FeedbackSource::Equal> 451 feedback_; 452 ZoneUnorderedMap<PropertyAccessTarget, PropertyAccessInfo, 453 PropertyAccessTarget::Hash, PropertyAccessTarget::Equal> 454 property_access_infos_; 455 456 CompilationDependencies* dependencies_ = nullptr; 457 458 // The MapUpdater mutex is used in recursive patterns; for example, 459 // ComputePropertyAccessInfo may call itself recursively. Thus we need to 460 // emulate a recursive mutex, which we do by checking if this heap broker 461 // instance already holds the mutex when a lock is requested. This field 462 // holds the locking depth, i.e. how many times the mutex has been 463 // recursively locked. Only the outermost locker actually locks underneath. 464 int map_updater_mutex_depth_ = 0; 465 // Likewise for boilerplate migrations. 466 int boilerplate_migration_mutex_depth_ = 0; 467 468 static constexpr uint32_t kMinimalRefsBucketCount = 8; 469 STATIC_ASSERT(base::bits::IsPowerOfTwo(kMinimalRefsBucketCount)); 470 static constexpr uint32_t kInitialRefsBucketCount = 1024; 471 STATIC_ASSERT(base::bits::IsPowerOfTwo(kInitialRefsBucketCount)); 472}; 473 474class V8_NODISCARD TraceScope { 475 public: 476 TraceScope(JSHeapBroker* broker, const char* label) 477 : TraceScope(broker, static_cast<void*>(broker), label) {} 478 479 TraceScope(JSHeapBroker* broker, ObjectData* data, const char* label) 480 : TraceScope(broker, static_cast<void*>(data), label) {} 481 482 TraceScope(JSHeapBroker* broker, void* subject, const char* label) 483 : broker_(broker) { 484 TRACE_BROKER(broker_, "Running " << label << " on " << subject); 485 broker_->IncrementTracingIndentation(); 486 } 487 488 ~TraceScope() { broker_->DecrementTracingIndentation(); } 489 490 private: 491 JSHeapBroker* const broker_; 492}; 493 494// Scope that unparks the LocalHeap, if: 495// a) We have a JSHeapBroker, 496// b) Said JSHeapBroker has a LocalIsolate and thus a LocalHeap, 497// c) Said LocalHeap has been parked and 498// d) The given condition evaluates to true. 499// Used, for example, when printing the graph with --trace-turbo with a 500// previously parked LocalHeap. 501class V8_NODISCARD UnparkedScopeIfNeeded { 502 public: 503 explicit UnparkedScopeIfNeeded(JSHeapBroker* broker, 504 bool extra_condition = true) { 505 if (broker != nullptr && extra_condition) { 506 LocalIsolate* local_isolate = broker->local_isolate(); 507 if (local_isolate != nullptr && local_isolate->heap()->IsParked()) { 508 unparked_scope.emplace(local_isolate->heap()); 509 } 510 } 511 } 512 513 private: 514 base::Optional<UnparkedScope> unparked_scope; 515}; 516 517template <class T, 518 typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>> 519base::Optional<typename ref_traits<T>::ref_type> TryMakeRef( 520 JSHeapBroker* broker, ObjectData* data) { 521 if (data == nullptr) return {}; 522 return {typename ref_traits<T>::ref_type(broker, data)}; 523} 524 525// Usage: 526// 527// base::Optional<FooRef> ref = TryMakeRef(broker, o); 528// if (!ref.has_value()) return {}; // bailout 529// 530// or 531// 532// FooRef ref = MakeRef(broker, o); 533template <class T, 534 typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>> 535base::Optional<typename ref_traits<T>::ref_type> TryMakeRef( 536 JSHeapBroker* broker, T object, GetOrCreateDataFlags flags = {}) { 537 ObjectData* data = broker->TryGetOrCreateData(object, flags); 538 if (data == nullptr) { 539 TRACE_BROKER_MISSING(broker, "ObjectData for " << Brief(object)); 540 } 541 return TryMakeRef<T>(broker, data); 542} 543 544template <class T, 545 typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>> 546base::Optional<typename ref_traits<T>::ref_type> TryMakeRef( 547 JSHeapBroker* broker, Handle<T> object, GetOrCreateDataFlags flags = {}) { 548 ObjectData* data = broker->TryGetOrCreateData(object, flags); 549 if (data == nullptr) { 550 DCHECK_EQ(flags & kCrashOnError, 0); 551 TRACE_BROKER_MISSING(broker, "ObjectData for " << Brief(*object)); 552 } 553 return TryMakeRef<T>(broker, data); 554} 555 556template <class T, 557 typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>> 558typename ref_traits<T>::ref_type MakeRef(JSHeapBroker* broker, T object) { 559 return TryMakeRef(broker, object, kCrashOnError).value(); 560} 561 562template <class T, 563 typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>> 564typename ref_traits<T>::ref_type MakeRef(JSHeapBroker* broker, 565 Handle<T> object) { 566 return TryMakeRef(broker, object, kCrashOnError).value(); 567} 568 569template <class T, 570 typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>> 571typename ref_traits<T>::ref_type MakeRefAssumeMemoryFence(JSHeapBroker* broker, 572 T object) { 573 return TryMakeRef(broker, object, kAssumeMemoryFence | kCrashOnError).value(); 574} 575 576template <class T, 577 typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>> 578typename ref_traits<T>::ref_type MakeRefAssumeMemoryFence(JSHeapBroker* broker, 579 Handle<T> object) { 580 return TryMakeRef(broker, object, kAssumeMemoryFence | kCrashOnError).value(); 581} 582 583} // namespace compiler 584} // namespace internal 585} // namespace v8 586 587#endif // V8_COMPILER_JS_HEAP_BROKER_H_ 588