Lines Matching defs:Heap
118 Isolate* Heap::GetIsolateFromWritableObject(HeapObject object) {
120 third_party_heap::Heap::GetIsolate(object.address()));
124 // These are outside the Heap class so they can be forward-declared
127 return Heap::PageFlagsAreConsistent(object);
140 Heap::GenerationalBarrierSlow(object, slot, value);
144 Heap::WriteBarrierForCodeSlow(host);
149 Heap::GenerationalBarrierForCodeSlow(host, rinfo, object);
152 void Heap_GenerationalEphemeronKeyBarrierSlow(Heap* heap,
158 void Heap::SetConstructStubCreateDeoptPCOffset(int pc_offset) {
163 void Heap::SetConstructStubInvokeDeoptPCOffset(int pc_offset) {
168 void Heap::SetInterpreterEntryReturnPCOffset(int pc_offset) {
173 void Heap::SetSerializedObjects(FixedArray objects) {
178 void Heap::SetSerializedGlobalProxySizes(FixedArray sizes) {
183 void Heap::SetBasicBlockProfilingData(Handle<ArrayList> list) {
187 bool Heap::GCCallbackTuple::operator==(
188 const Heap::GCCallbackTuple& other) const {
194 ScavengeTaskObserver(Heap* heap, intptr_t step_size)
202 Heap* heap_;
205 Heap::Heap()
231 Heap::~Heap() = default;
233 size_t Heap::MaxReserved() {
240 size_t Heap::YoungGenerationSizeFromOldGenerationSize(size_t old_generation) {
252 size_t Heap::HeapSizeFromPhysicalMemory(uint64_t physical_memory) {
269 void Heap::GenerationSizesFromHeapSize(size_t heap_size,
293 size_t Heap::MinYoungGenerationSize() {
297 size_t Heap::MinOldGenerationSize() {
303 size_t Heap::AllocatorLimitOnMaxOldGenerationSize() {
314 size_t Heap::MaxOldGenerationSize(uint64_t physical_memory) {
319 constexpr bool x64_bit = Heap::kHeapLimitMultiplier >= 2;
328 size_t Heap::YoungGenerationSizeFromSemiSpaceSize(size_t semi_space_size) {
332 size_t Heap::SemiSpaceSizeFromYoungGenerationSize(
337 size_t Heap::Capacity() {
345 size_t Heap::OldGenerationCapacity() {
356 size_t Heap::CommittedOldGenerationMemory() {
368 size_t Heap::CommittedMemoryOfUnmapper() {
374 size_t Heap::CommittedMemory() {
384 size_t Heap::CommittedPhysicalMemory() {
395 size_t Heap::CommittedMemoryExecutable() {
401 void Heap::UpdateMaximumCommitted() {
410 size_t Heap::Available() {
423 bool Heap::CanExpandOldGeneration(size_t size) {
432 bool Heap::CanExpandOldGenerationBackground(LocalHeap* local_heap,
442 bool Heap::CanPromoteYoungAndExpandOldGeneration(size_t size) {
451 bool Heap::HasBeenSetUp() const {
456 GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
497 void Heap::SetGCState(HeapState state) {
501 bool Heap::IsGCWithoutStack() const {
506 void Heap::PrintShortHeapStatistics() {
587 void Heap::PrintFreeListsStats() {
662 void Heap::DumpJSONHeapStatistics(std::stringstream& stream) {
720 void Heap::ReportStatisticsAfterGC() {
729 class Heap::AllocationTrackerForDebugging final
737 explicit AllocationTrackerForDebugging(Heap* heap) : heap_(heap) {
820 Heap* const heap_;
829 void Heap::AddHeapObjectAllocationTracker(
837 void Heap::RemoveHeapObjectAllocationTracker(
847 void Heap::AddRetainingPathTarget(Handle<HeapObject> object,
862 bool Heap::IsRetainingPathTarget(HeapObject object,
879 void Heap::PrintRetainingPath(HeapObject target, RetainingPathOption option) {
933 if (Heap::InFromPage(object)) {
939 if (Heap::InFromPage(retainer)) {
951 void Heap::UpdateRetainersAfterScavenge() {
965 if (Heap::InFromPage(object)) {
977 void Heap::AddRetainer(HeapObject retainer, HeapObject object) {
991 void Heap::AddEphemeronRetainer(HeapObject retainer, HeapObject object) {
1004 void Heap::AddRetainingRoot(Root root, HeapObject object) {
1013 void Heap::IncrementDeferredCount(v8::Isolate::UseCounterFeature feature) {
1017 bool Heap::UncommitFromSpace() { return new_space_->UncommitFromSpace(); }
1019 void Heap::GarbageCollectionPrologue(
1066 void Heap::GarbageCollectionPrologueInSafepoint() {
1077 void Heap::UpdateNewSpaceAllocationCounter() {
1081 size_t Heap::NewSpaceAllocationCounter() {
1086 size_t Heap::SizeOfObjects() {
1095 size_t Heap::TotalGlobalHandlesSize() {
1099 size_t Heap::UsedGlobalHandlesSize() {
1103 void Heap::MergeAllocationSitePretenuringFeedback(
1128 void Heap::AddAllocationObserversToAllSpaces(
1142 void Heap::RemoveAllocationObserversFromAllSpaces(
1156 void Heap::PublishPendingAllocations() {
1258 void Heap::RemoveAllocationSitePretenuringFeedback(AllocationSite site) {
1262 bool Heap::DeoptMaybeTenuredAllocationSites() {
1267 void Heap::ProcessPretenuringFeedback() {
1351 void Heap::PretenureAllocationSiteOnNextCollection(AllocationSite site) {
1359 void Heap::InvalidateCodeDeoptimizationData(Code code) {
1364 void Heap::DeoptMarkedAllocationSites() {
1379 void Heap::GarbageCollectionEpilogueInSafepoint(GarbageCollector collector) {
1440 if (Heap::ShouldZapGarbage() || FLAG_clear_free_memory) {
1461 void Heap::GarbageCollectionEpilogue(GarbageCollector collector) {
1507 explicit GCCallbacksScope(Heap* heap) : heap_(heap) {
1515 Heap* heap_;
1518 void Heap::HandleGCRequest() {
1543 void Heap::ScheduleScavengeTaskIfNeeded() {
1548 void Heap::CollectAllGarbage(int flags, GarbageCollectionReason gc_reason,
1618 void Heap::CollectAllAvailableGarbage(GarbageCollectionReason gc_reason) {
1678 void Heap::PreciseCollectAllGarbage(int flags,
1687 void Heap::ReportExternalMemoryPressure() {
1713 CollectAllGarbage(i::Heap::kNoGCFlags,
1733 int64_t Heap::external_memory_limit() { return external_memory_.limit(); }
1735 Heap::DevToolsTraceEventScope::DevToolsTraceEventScope(Heap* heap,
1743 Heap::DevToolsTraceEventScope::~DevToolsTraceEventScope() {
1761 bool Heap::CollectGarbage(AllocationSpace space,
1977 int Heap::NotifyContextDisposed(bool dependant_context) {
1996 void Heap::StartIncrementalMarking(int gc_flags,
2026 void Heap::CompleteSweepingFull() {
2037 void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
2066 void Heap::StartIncrementalMarkingIfAllocationLimitIsReachedBackground() {
2079 void Heap::StartIdleIncrementalMarking(
2086 void Heap::MoveRange(HeapObject dst_object, const ObjectSlot dst_slot,
2127 // Instantiate Heap::CopyRange() for ObjectSlot and MaybeObjectSlot.
2128 template void Heap::CopyRange<ObjectSlot>(HeapObject dst_object,
2132 template void Heap::CopyRange<MaybeObjectSlot>(HeapObject dst_object,
2138 void Heap::CopyRange(HeapObject dst_object, const TSlot dst_slot,
2165 void Heap::EnsureFromSpaceIsCommitted() {
2174 bool Heap::CollectionRequested() {
2178 void Heap::CollectGarbageForBackground(LocalHeap* local_heap) {
2185 void Heap::CheckCollectionRequested() {
2194 void Heap::EnsureWasmCanonicalRttsSize(int length) {
2204 void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
2240 size_t Heap::PerformGarbageCollection(
2250 // completed here, as it will be triggered from Heap::Verify anyway.
2270 if (!Heap::IsYoungGenerationCollector(collector) &&
2391 void Heap::CollectSharedGarbage(GarbageCollectionReason gc_reason) {
2400 void Heap::PerformSharedGarbageCollection(Isolate* initiator,
2443 void Heap::CompleteSweepingYoung(GarbageCollector collector) {
2473 void Heap::EnsureSweepingCompleted(HeapObject object) {
2489 void Heap::RecomputeLimits(GarbageCollector collector) {
2564 void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) {
2574 void Heap::CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags) {
2584 void Heap::MarkCompact() {
2625 void Heap::MinorMarkCompact() {
2660 void Heap::MarkCompactEpilogue() {
2671 void Heap::MarkCompactPrologue() {
2682 void Heap::CheckNewSpaceExpansionCriteria() {
2693 void Heap::EvacuateYoungGeneration() {
2746 void Heap::Scavenge() {
2811 void Heap::ComputeFastPromotionMode() {
2828 void Heap::UnprotectAndRegisterMemoryChunk(MemoryChunk* chunk,
2841 void Heap::UnprotectAndRegisterMemoryChunk(HeapObject object,
2846 void Heap::UnregisterUnprotectedMemoryChunk(MemoryChunk* chunk) {
2850 void Heap::ProtectUnprotectedMemoryChunks() {
2860 bool Heap::ExternalStringTable::Contains(String string) {
2870 void Heap::UpdateExternalString(String string, size_t old_payload,
2886 String Heap::UpdateYoungReferenceInExternalStringTableEntry(Heap* heap,
2927 void Heap::ExternalStringTable::VerifyYoung() {
2950 void Heap::ExternalStringTable::Verify() {
2974 void Heap::ExternalStringTable::UpdateYoungReferences(
2975 Heap::ExternalStringTableUpdaterCallback updater_func) {
3008 void Heap::ExternalStringTable::PromoteYoung() {
3015 void Heap::ExternalStringTable::IterateYoung(RootVisitor* v) {
3024 void Heap::ExternalStringTable::IterateAll(RootVisitor* v) {
3034 void Heap::UpdateYoungReferencesInExternalStringTable(
3039 void Heap::ExternalStringTable::UpdateReferences(
3040 Heap::ExternalStringTableUpdaterCallback updater_func) {
3051 void Heap::UpdateReferencesInExternalStringTable(
3056 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) {
3062 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) {
3066 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
3072 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
3078 void Heap::ProcessDirtyJSFinalizationRegistries(WeakObjectRetainer* retainer) {
3089 void Heap::ProcessWeakListRoots(WeakObjectRetainer* retainer) {
3098 void Heap::ForeachAllocationSite(
3115 void Heap::ResetAllAllocationSitesDependentCode(AllocationType allocation) {
3132 void Heap::EvaluateOldSpaceLocalPretenuring(
3154 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
3195 int Heap::GetMaximumFillToAlign(AllocationAlignment alignment) {
3208 int Heap::GetFillToAlign(Address address, AllocationAlignment alignment) {
3216 size_t Heap::GetCodeRangeReservedAreaSize() {
3220 HeapObject Heap::PrecedeWithFiller(HeapObject object, int filler_size) {
3226 HeapObject Heap::AlignWithFiller(HeapObject object, int object_size,
3243 void* Heap::AllocateExternalBackingStore(
3272 void Heap::ConfigureInitialOldGenerationSize() {
3300 void Heap::FlushNumberStringCache() {
3310 HeapObject CreateFillerObjectAtImpl(Heap* heap, Address addr, int size,
3360 HeapObject Heap::CreateFillerObjectAt(Address addr, int size,
3369 void Heap::CreateFillerObjectAtBackground(
3376 HeapObject Heap::CreateFillerObjectAt(Address addr, int size,
3398 bool Heap::CanMoveObjectStart(HeapObject object) {
3416 bool Heap::IsImmovable(HeapObject object) {
3418 return third_party_heap::Heap::IsImmovable(object);
3424 bool Heap::IsLargeObject(HeapObject object) {
3426 return third_party_heap::Heap::InLargeObjectSpace(object.address()) ||
3427 third_party_heap::Heap::InSpace(object.address(), CODE_LO_SPACE);
3478 void Heap::OnMoveEvent(HeapObject target, HeapObject source,
3501 FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
3589 void Heap::RightTrimFixedArray(FixedArrayBase object, int elements_to_trim) {
3611 void Heap::RightTrimWeakFixedArray(WeakFixedArray object,
3622 void Heap::CreateFillerForArray(T object, int elements_to_trim,
3688 void Heap::MakeHeapIterable() {
3705 void Heap::FreeLinearAllocationAreas() {
3718 void Heap::FreeSharedLinearAllocationAreas() {
3726 void Heap::FreeMainThreadSharedLinearAllocationAreas() {
3752 double Heap::ComputeMutatorUtilization(const char* tag, double mutator_speed,
3764 bool Heap::HasLowYoungGenerationAllocationRate() {
3773 bool Heap::HasLowOldGenerationAllocationRate() {
3782 bool Heap::HasLowEmbedderAllocationRate() {
3794 bool Heap::HasLowAllocationRate() {
3799 bool Heap::IsIneffectiveMarkCompact(size_t old_generation_size,
3808 void Heap::CheckIneffectiveMarkCompact(size_t old_generation_size,
3828 bool Heap::HasHighFragmentation() {
3834 bool Heap::HasHighFragmentation(size_t used, size_t committed) {
3842 bool Heap::ShouldOptimizeForMemoryUsage() {
3849 void Heap::ActivateMemoryReducerIfNeeded() {
3864 void Heap::ReduceNewSpaceSize() {
3880 size_t Heap::NewSpaceSize() { return new_space() ? new_space()->Size() : 0; }
3882 size_t Heap::NewSpaceCapacity() {
3886 void Heap::FinalizeIncrementalMarkingIfComplete(
3903 void Heap::FinalizeIncrementalMarkingAtomically(
3909 void Heap::InvokeIncrementalMarkingPrologueCallbacks() {
3920 void Heap::InvokeIncrementalMarkingEpilogueCallbacks() {
3931 void Heap::FinalizeIncrementalMarkingIncrementally(
3936 Heap::GarbageCollectionReasonToString(gc_reason));
3956 void Heap::NotifyObjectLayoutChange(
4026 void Heap::VerifyObjectLayoutChange(HeapObject object, Map new_map) {
4034 // Check that Heap::NotifyObjectLayoutChange was called for object transitions
4046 void Heap::VerifySafeMapTransition(HeapObject object, Map new_map) {
4057 // Heap::NotifyObjectLayoutChange doesn't need to be invoked because only
4094 GCIdleTimeHeapState Heap::ComputeHeapState() {
4101 bool Heap::PerformIdleTimeAction(GCIdleTimeAction action,
4123 void Heap::IdleNotificationEpilogue(GCIdleTimeAction action,
4155 double Heap::MonotonicallyIncreasingTimeInMs() const {
4161 void Heap::VerifyNewSpaceTop() {
4167 bool Heap::IdleNotification(int idle_time_in_ms) {
4174 bool Heap::IdleNotification(double deadline_in_seconds) {
4197 bool Heap::RecentIdleNotificationHappened() {
4205 explicit MemoryPressureInterruptTask(Heap* heap)
4217 Heap* heap_;
4220 void Heap::CheckMemoryPressure() {
4242 void Heap::CollectGarbageOnMemoryPressure() {
4278 void Heap::MemoryPressureNotification(MemoryPressureLevel level,
4300 void Heap::EagerlyFreeExternalMemory() {
4305 void Heap::AddNearHeapLimitCallback(v8::NearHeapLimitCallback callback,
4315 void Heap::RemoveNearHeapLimitCallback(v8::NearHeapLimitCallback callback,
4329 void Heap::AppendArrayBufferExtension(JSArrayBuffer object,
4331 // ArrayBufferSweeper is managing all counters and updating Heap counters.
4335 void Heap::DetachArrayBufferExtension(JSArrayBuffer object,
4337 // ArrayBufferSweeper is managing all counters and updating Heap counters.
4341 void Heap::AutomaticallyRestoreInitialHeapLimit(double threshold_percent) {
4346 bool Heap::InvokeNearHeapLimitCallback() {
4366 bool Heap::MeasureMemory(std::unique_ptr<v8::MeasureMemoryDelegate> delegate,
4381 std::unique_ptr<v8::MeasureMemoryDelegate> Heap::MeasureMemoryDelegate(
4388 void Heap::CollectCodeStatistics() {
4389 TRACE_EVENT0("v8", "Heap::CollectCodeStatistics");
4403 void Heap::Print() {
4412 void Heap::ReportCodeStatistics(const char* title) {
4420 const char* Heap::GarbageCollectionReasonToString(
4479 bool Heap::Contains(HeapObject value) const {
4497 bool Heap::ContainsCode(HeapObject value) const {
4509 bool Heap::SharedHeapContains(HeapObject value) const {
4516 bool Heap::ShouldBeInSharedOldSpace(HeapObject value) {
4519 if (Heap::InYoungGeneration(value)) return false;
4528 bool Heap::InSpace(HeapObject value, AllocationSpace space) const {
4530 return third_party_heap::Heap::InSpace(value.address(), space);
4558 bool Heap::IsShared() { return isolate()->is_shared(); }
4560 bool Heap::InSpaceSlow(Address addr, AllocationSpace space) const {
4588 bool Heap::IsValidAllocationSpace(AllocationSpace space) {
4605 void Heap::Verify() {
4653 void Heap::VerifyReadOnlyHeap() {
4741 DCHECK_IMPLIES(target->IsStrongOrWeak() && Heap::InYoungGeneration(target),
4742 Heap::InToPage(target));
4743 return target->IsStrongOrWeak() && Heap::InYoungGeneration(target) &&
4744 !Heap::InYoungGeneration(host);
4791 void Heap::VerifyRememberedSetFor(HeapObject object) {
4815 void Heap::VerifyCountersAfterSweeping() {
4823 void Heap::VerifyCountersBeforeConcurrentSweeping() {
4831 void Heap::VerifyCommittedPhysicalMemory() {
4840 void Heap::ZapFromSpace() {
4849 void Heap::ZapCodeObject(Address start_address, int size_in_bytes) {
4858 void Heap::RegisterCodeObject(Handle<Code> code) {
4867 void Heap::IterateWeakRoots(RootVisitor* v, base::EnumSet<SkipRoot> options) {
4892 void Heap::IterateSmiRoots(RootVisitor* v) {
4907 explicit ClearStaleLeftTrimmedHandlesVisitor(Heap* heap)
4970 Heap* heap_;
4977 void Heap::IterateRoots(RootVisitor* v, base::EnumSet<SkipRoot> options) {
5114 void Heap::IterateRootsIncludingClients(RootVisitor* v,
5126 void Heap::IterateWeakGlobalHandles(RootVisitor* v) {
5130 void Heap::IterateBuiltins(RootVisitor* v) {
5148 void Heap::IterateStackRoots(RootVisitor* v) {
5161 void Heap::ConfigureHeap(const v8::ResourceConstraints& constraints) {
5244 Heap::GenerationSizesFromHeapSize(
5322 void Heap::AddToRingBuffer(const char* string) {
5335 void Heap::GetFromRingBuffer(char* buffer) {
5344 void Heap::ConfigureHeapDefault() {
5349 void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
5386 size_t Heap::OldGenerationSizeOfObjects() {
5396 size_t Heap::EmbedderSizeOfObjects() const {
5402 size_t Heap::GlobalSizeOfObjects() {
5406 uint64_t Heap::AllocatedExternalMemorySinceMarkCompact() {
5410 bool Heap::AllocationLimitOvershotByLargeMargin() {
5444 bool Heap::ShouldOptimizeForLoadTime() {
5456 bool Heap::ShouldExpandOldGenerationOnSlowAllocation(LocalHeap* local_heap) {
5490 bool Heap::IsRetryOfFailedAllocation(LocalHeap* local_heap) {
5495 bool Heap::IsMainThreadParked(LocalHeap* local_heap) {
5500 Heap::HeapGrowingMode Heap::CurrentHeapGrowingMode() {
5502 return Heap::HeapGrowingMode::kMinimal;
5506 return Heap::HeapGrowingMode::kConservative;
5510 return Heap::HeapGrowingMode::kSlow;
5513 return Heap::HeapGrowingMode::kDefault;
5516 base::Optional<size_t> Heap::GlobalMemoryAvailable() {
5527 double Heap::PercentToOldGenerationLimit() {
5536 double Heap::PercentToGlobalMemoryLimit() {
5552 Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() {
5644 bool Heap::ShouldStressCompaction() const {
5648 void Heap::EnableInlineAllocation() {
5662 void Heap::DisableInlineAllocation() {
5677 void Heap::SetUp(LocalHeap* main_thread_local_heap) {
5686 tp_heap_ = third_party_heap::Heap::New(isolate());
5775 void Heap::SetUpFromReadOnlyHeap(ReadOnlyHeap* ro_heap) {
5784 void Heap::ReplaceReadOnlySpace(SharedReadOnlySpace* space) {
5797 explicit StressConcurrentAllocationObserver(Heap* heap)
5812 Heap* heap_;
5815 void Heap::SetUpSpaces(LinearAllocationArea* new_allocation_info,
5852 if (Heap::AllocationTrackerForDebugging::IsNeeded()) {
5854 std::make_unique<Heap::AllocationTrackerForDebugging>(this);
5889 Heap* shared_heap = isolate()->shared_isolate()->heap();
5906 void Heap::InitializeHashSeed() {
5920 void Heap::InitializeOncePerProcess() {
5924 void Heap::PrintMaxMarkingLimitReached() {
5929 void Heap::PrintMaxNewSpaceSizeReached() {
5934 int Heap::NextStressMarkingLimit() {
5935 // Reuse Heap-global mutex as this getter is called from different threads on
5941 void Heap::NotifyDeserializationComplete() {
5965 void Heap::NotifyBootstrapComplete() {
5973 void Heap::NotifyOldGenerationExpansion(AllocationSpace space,
5994 void Heap::SetEmbedderHeapTracer(EmbedderHeapTracer* tracer) {
6001 void Heap::SetEmbedderRootsHandler(EmbedderRootsHandler* handler) {
6005 EmbedderRootsHandler* Heap::GetEmbedderRootsHandler() const {
6009 EmbedderHeapTracer* Heap::GetEmbedderHeapTracer() const {
6013 void Heap::AttachCppHeap(v8::CppHeap* cpp_heap) {
6019 void Heap::DetachCppHeap() {
6025 EmbedderHeapTracer::TraceFlags Heap::flags_for_embedder_tracer() const {
6034 const cppgc::EmbedderStackState* Heap::overriden_stack_state() const {
6039 void Heap::RegisterExternallyReferencedObject(Address* location) {
6057 void Heap::StartTearDown() {
6091 void Heap::TearDown() {
6097 // It's too late for Heap::Verify() here, as parts of the Isolate are
6205 void Heap::AddGCPrologueCallback(v8::Isolate::GCCallbackWithData callback,
6214 void Heap::RemoveGCPrologueCallback(v8::Isolate::GCCallbackWithData callback,
6228 void Heap::AddGCEpilogueCallback(v8::Isolate::GCCallbackWithData callback,
6237 void Heap::RemoveGCEpilogueCallback(v8::Isolate::GCCallbackWithData callback,
6252 Handle<WeakArrayList> CompactWeakArrayList(Heap* heap,
6282 void Heap::CompactWeakArrayLists() {
6314 void Heap::AddRetainedMap(Handle<NativeContext> context, Handle<Map> map) {
6335 void Heap::CompactRetainedMaps(WeakArrayList retained_maps) {
6362 void Heap::FatalProcessOutOfMemory(const char* location) {
6378 void Heap::PrintHandles() {
6401 void Heap::CheckHandleCount() {
6406 void Heap::ClearRecordedSlot(HeapObject object, ObjectSlot slot) {
6421 int Heap::InsertIntoRememberedSetFromCode(MemoryChunk* chunk, Address slot) {
6427 void Heap::VerifyClearedSlot(HeapObject object, ObjectSlot slot) {
6441 void Heap::VerifySlotRangeHasNoRecordedSlots(Address start, Address end) {
6450 void Heap::ClearRecordedSlotRange(Address start, Address end) {
6471 SpaceIterator::SpaceIterator(Heap* heap)
6502 explicit UnreachableObjectsFilter(Heap* heap) : heap_(heap) {
6622 Heap* heap_;
6629 Heap* heap, HeapObjectIterator::HeapObjectsFiltering filtering)
6696 void Heap::UpdateTotalGCTime(double duration) {
6702 void Heap::ExternalStringTable::CleanUpYoung() {
6723 void Heap::ExternalStringTable::CleanUpAll() {
6747 void Heap::ExternalStringTable::TearDown() {
6764 void Heap::RememberUnmappedPage(Address page, bool compacted) {
6776 size_t Heap::YoungArrayBufferBytes() {
6780 size_t Heap::OldArrayBufferBytes() {
6784 StrongRootsEntry* Heap::RegisterStrongRoots(const char* label,
6804 void Heap::UpdateStrongRoots(StrongRootsEntry* entry, FullObjectSlot start,
6810 void Heap::UnregisterStrongRoots(StrongRootsEntry* entry) {
6827 void Heap::SetBuiltinsConstantsTable(FixedArray cache) {
6831 void Heap::SetDetachedContexts(WeakArrayList detached_contexts) {
6835 void Heap::SetInterpreterEntryTrampolineForProfiling(Code code) {
6840 void Heap::PostFinalizationRegistryCleanupTaskIfNeeded() {
6853 void Heap::EnqueueDirtyJSFinalizationRegistry(
6881 MaybeHandle<JSFinalizationRegistry> Heap::DequeueDirtyJSFinalizationRegistry() {
6898 void Heap::RemoveDirtyFinalizationRegistriesOnContext(NativeContext context) {
6927 void Heap::KeepDuringJob(Handle<HeapObject> target) {
6941 void Heap::ClearKeptObjects() {
6945 size_t Heap::NumberOfTrackedHeapObjectTypes() {
6949 size_t Heap::ObjectCountAtLastGC(size_t index) {
6955 size_t Heap::ObjectSizeAtLastGC(size_t index) {
6961 bool Heap::GetObjectTypeName(size_t index, const char** object_type,
6985 size_t Heap::NumberOfNativeContexts() {
6996 std::vector<Handle<NativeContext>> Heap::FindAllNativeContexts() {
7007 std::vector<WeakArrayList> Heap::FindAllRetainedMaps() {
7018 size_t Heap::NumberOfDetachedContexts() {
7115 bool Heap::AllowedToBeMigrated(Map map, HeapObject obj, AllocationSpace dst) {
7150 size_t Heap::EmbedderAllocationCounter() const {
7156 void Heap::CreateObjectStats() {
7166 Map Heap::GcSafeMapOfCodeSpaceObject(HeapObject object) {
7180 Code Heap::GcSafeCastToCode(HeapObject object, Address inner_pointer) {
7187 bool Heap::GcSafeCodeContains(Code code, Address addr) {
7201 Code Heap::GcSafeFindCodeForInnerPointer(Address inner_pointer) {
7268 void Heap::WriteBarrierForCodeSlow(Code code) {
7278 void Heap::GenerationalBarrierSlow(HeapObject object, Address slot,
7284 void Heap::RecordEphemeronKeyWrite(EphemeronHashTable table, Address slot) {
7300 void Heap::EphemeronKeyWriteBarrierFromCode(Address raw_object,
7321 void Heap::WriteBarrierForRangeImpl(MemoryChunk* source_page, HeapObject object,
7338 Heap::InYoungGeneration(value_heap_object)) {
7353 // Instantiate Heap::WriteBarrierForRange() for ObjectSlot and MaybeObjectSlot.
7354 template void Heap::WriteBarrierForRange<ObjectSlot>(HeapObject object,
7357 template void Heap::WriteBarrierForRange<MaybeObjectSlot>(
7361 void Heap::WriteBarrierForRange(HeapObject object, TSlot start_slot,
7412 void Heap::GenerationalBarrierForCodeSlow(Code host, RelocInfo* rinfo,
7422 bool Heap::PageFlagsAreConsistent(HeapObject object) {
7448 Heap* heap = Heap::FromWritableHeapObject(object);
7458 void Heap::IncrementObjectCounters() {
7464 bool Heap::IsStressingScavenge() {
7505 void Heap::set_allocation_timeout(int allocation_timeout) {
7511 Heap* heap, Origin origin,