Lines Matching defs:chunk

23 #include "src/heap/basic-memory-chunk.h"
38 #include "src/heap/memory-chunk-layout.h"
99 const MemoryChunk* chunk) = 0;
229 const MemoryChunk* chunk) override {
230 return marking_state_->bitmap(chunk);
1527 MemoryChunk* chunk = MemoryChunk::FromHeapObject(host);
1528 DCHECK(chunk->SweepingDone());
1529 RememberedSet<OLD_TO_NEW>::Insert<AccessMode::NON_ATOMIC>(chunk, slot);
2644 MemoryChunk* chunk = MemoryChunk::FromAddress(compiled_data_start);
2648 chunk, compiled_data_start, compiled_data_start + compiled_data_size,
2651 chunk, compiled_data_start, compiled_data_start + compiled_data_size,
2898 MemoryChunk* chunk = MemoryChunk::FromHeapObject(array);
2899 RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start, end,
2901 RememberedSet<OLD_TO_OLD>::RemoveRange(chunk, start, end,
3524 MemoryChunk* chunk = chunk_iterator.Next();
3527 DCHECK_NULL((chunk->slot_set<OLD_TO_OLD, AccessMode::ATOMIC>()));
3528 DCHECK_NULL((chunk->slot_set<OLD_TO_SHARED, AccessMode::NON_ATOMIC>()));
3529 DCHECK_NULL((chunk->typed_slot_set<OLD_TO_OLD, AccessMode::ATOMIC>()));
3530 DCHECK_NULL(chunk->invalidated_slots<OLD_TO_OLD>());
3531 DCHECK_NULL(chunk->invalidated_slots<OLD_TO_NEW>());
3568 static inline EvacuationMode ComputeEvacuationMode(MemoryChunk* chunk) {
3570 if (chunk->IsFlagSet(MemoryChunk::PAGE_NEW_OLD_PROMOTION))
3572 if (chunk->IsFlagSet(MemoryChunk::PAGE_NEW_NEW_PROMOTION))
3574 if (chunk->InYoungGeneration()) return kObjectsNewToOld;
3609 void EvacuatePage(MemoryChunk* chunk);
3627 virtual void RawEvacuatePage(MemoryChunk* chunk,
3660 void Evacuator::EvacuatePage(MemoryChunk* chunk) {
3662 DCHECK(chunk->SweepingDone());
3668 RawEvacuatePage(chunk, &saved_live_bytes);
3676 static_cast<void*>(this), static_cast<void*>(chunk),
3677 chunk->InNewSpace(),
3678 chunk->IsFlagSet(Page::PAGE_NEW_OLD_PROMOTION) ||
3679 chunk->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION),
3680 chunk->IsFlagSet(MemoryChunk::IS_EXECUTABLE),
3681 chunk->Contains(heap()->new_space()->age_mark()),
3683 chunk->IsFlagSet(Page::COMPACTION_WAS_ABORTED));
3740 void RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) override;
3748 void FullEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) {
3749 const EvacuationMode evacuation_mode = ComputeEvacuationMode(chunk);
3752 *live_bytes = marking_state->live_bytes(chunk);
3760 chunk, marking_state, &new_space_visitor_,
3765 chunk, marking_state, &new_to_old_page_visitor_,
3768 marking_state->live_bytes(chunk));
3772 chunk, marking_state, &new_to_new_page_visitor_,
3775 marking_state->live_bytes(chunk));
3779 chunk, marking_state, &old_space_visitor_,
3788 failed_object.address(), static_cast<Page*>(chunk));
4020 bool LiveObjectVisitor::VisitBlackObjects(MemoryChunk* chunk,
4028 LiveObjectRange<kBlackObjects>(chunk, marking_state->bitmap(chunk))) {
4032 marking_state->bitmap(chunk)->ClearRange(
4033 chunk->AddressToMarkbitIndex(chunk->area_start()),
4034 chunk->AddressToMarkbitIndex(object.address()));
4041 marking_state->ClearLiveness(chunk);
4047 void LiveObjectVisitor::VisitBlackObjectsNoFail(MemoryChunk* chunk,
4053 if (chunk->IsLargePage()) {
4054 HeapObject object = reinterpret_cast<LargePage*>(chunk)->GetObject();
4062 LiveObjectRange<kBlackObjects>(chunk, marking_state->bitmap(chunk))) {
4071 marking_state->ClearLiveness(chunk);
4076 void LiveObjectVisitor::VisitGreyObjectsNoFail(MemoryChunk* chunk,
4082 if (chunk->IsLargePage()) {
4083 HeapObject object = reinterpret_cast<LargePage*>(chunk)->GetObject();
4091 LiveObjectRange<kGreyObjects>(chunk, marking_state->bitmap(chunk))) {
4100 marking_state->ClearLiveness(chunk);
4105 void LiveObjectVisitor::RecomputeLiveBytes(MemoryChunk* chunk,
4109 LiveObjectRange<kAllLiveObjects>(chunk, marking_state->bitmap(chunk))) {
4112 marking_state->SetLiveBytes(chunk, new_live_size);
4253 explicit ToSpaceUpdatingItem(Heap* heap, MemoryChunk* chunk, Address start,
4256 chunk_(chunk),
4309 MemoryChunk* chunk,
4313 chunk_(chunk),
4496 MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) {
4499 heap(), non_atomic_marking_state(), chunk, updating_mode);
4507 for (MemoryChunk* chunk : *space) {
4509 chunk->slot_set<OLD_TO_OLD>() != nullptr ||
4510 chunk->typed_slot_set<OLD_TO_OLD>() != nullptr;
4513 chunk->slot_set<OLD_TO_CODE>() != nullptr;
4515 chunk->slot_set<OLD_TO_NEW>() != nullptr ||
4516 chunk->typed_slot_set<OLD_TO_NEW>() != nullptr;
4518 chunk->invalidated_slots<OLD_TO_OLD>() != nullptr;
4520 chunk->invalidated_slots<OLD_TO_NEW>() != nullptr;
4528 items->emplace_back(CreateRememberedSetUpdatingItem(chunk, mode));
4665 MemoryChunk* chunk = chunk_iterator.Next();
4666 CodePageMemoryModificationScope unprotect_code_page(chunk);
4669 chunk,
4675 chunk->ReleaseSlotSet<OLD_TO_SHARED>();
4678 chunk, [this](SlotType slot_type, Address slot) {
4689 chunk->ReleaseTypedSlotSet<OLD_TO_SHARED>();
4877 const MemoryChunk* chunk) override {
4878 return marking_state_->bitmap(chunk);
5194 MemoryChunk* chunk = MemoryChunk::FromHeapObject(host);
5195 DCHECK(chunk->SweepingDone());
5196 RememberedSet<OLD_TO_NEW>::Insert<AccessMode::NON_ATOMIC>(chunk, slot);
5518 MinorMarkCompactCollector::CreateToSpaceUpdatingItem(MemoryChunk* chunk,
5522 heap(), chunk, start, end, non_atomic_marking_state());
5527 MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) {
5530 heap(), non_atomic_marking_state(), chunk, updating_mode);
5585 explicit PageMarkingItem(MemoryChunk* chunk) : chunk_(chunk) {}
5747 heap(), [&marking_items](MemoryChunk* chunk) {
5748 marking_items.emplace_back(chunk);
5928 void RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) override;
5935 void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk,
5941 *live_bytes = marking_state->live_bytes(chunk);
5942 switch (ComputeEvacuationMode(chunk)) {
5946 chunk, marking_state, &new_space_visitor_,
5951 chunk, marking_state, &new_to_old_page_visitor_,
5954 marking_state->live_bytes(chunk));
5955 if (!chunk->IsLargePage()) {
5957 collector_->MakeIterable(static_cast<Page*>(chunk), ZAP_FREE_SPACE);
5962 collector_->MakeIterable(static_cast<Page*>(chunk),
5969 chunk, marking_state, &new_to_new_page_visitor_,
5972 marking_state->live_bytes(chunk));
5973 DCHECK(!chunk->IsLargePage());
5975 collector_->MakeIterable(static_cast<Page*>(chunk), ZAP_FREE_SPACE);
5980 collector_->MakeIterable(static_cast<Page*>(chunk), IGNORE_FREE_SPACE);