Lines Matching refs:page
18 #include "src/heap/cppgc/heap-page.h"
45 void Verify(NormalPage& page) {
47 Traverse(page);
52 bool VisitNormalPage(NormalPage& page) {
54 bitmap_ = &page.object_start_bitmap();
86 BasePage& page)
87 : page_allocator_(page_allocator), free_list_(free_list), page_(page) {}
126 BasePage& page)
181 BasePage* page = nullptr;
222 InlinedFinalizationBuilder(BasePage& page, PageAllocator& page_allocator)
224 NormalPageSpace::From(page.space()).free_list(), page) {}
246 DeferredFinalizationBuilder(BasePage& page, PageAllocator& page_allocator)
247 : FreeHandler(page_allocator, result_.cached_free_list, page) {
248 result_.page = &page;
294 NormalPage* page, PageAllocator& page_allocator) {
296 FinalizationBuilder builder(*page, page_allocator);
298 PlatformAwareObjectStartBitmap& bitmap = page->object_start_bitmap();
303 Address start_of_gap = page->PayloadStart();
316 for (Address begin = page->PayloadStart(), end = page->PayloadEnd();
354 if (start_of_gap != page->PayloadStart() &&
355 start_of_gap != page->PayloadEnd()) {
357 start_of_gap, static_cast<size_t>(page->PayloadEnd() - start_of_gap));
360 page->SetAllocatedBytesAtLastGC(live_bytes);
362 const bool is_empty = (start_of_gap == page->PayloadStart());
366 // SweepFinalizer is responsible for heap/space/page finalization. Finalization
413 DCHECK(page_state->page);
414 BasePage* page = page_state->page;
424 reinterpret_cast<uint64_t>(page->heap().caged_heap().base());
439 // Unmap page if empty.
441 BasePage::Destroy(page);
445 DCHECK(!page->is_large());
448 FreeList& space_freelist = NormalPageSpace::From(page->space()).free_list();
455 *platform_->GetPageAllocator(), space_freelist, *page))
457 *platform_->GetPageAllocator(), space_freelist, *page));
463 // After the page was fully finalized and freelists have been merged, verify
465 ObjectStartBitmapVerifier().Verify(static_cast<NormalPage&>(*page));
467 // Add the page to the space.
468 page->space().AddPage(page);
495 while (auto page = state.unswept_pages.Pop()) {
496 SweepPage(**page);
501 void SweepPage(BasePage& page) { Traverse(page); }
536 while (auto page = state->unswept_pages.Pop()) {
537 Traverse(**page);
548 bool VisitNormalPage(NormalPage& page) {
550 page.ResetDiscardedMemory();
556 &page, *platform_->GetPageAllocator())
558 &page, *platform_->GetPageAllocator());
560 NormalPage::Destroy(&page);
562 // The page was eagerly finalized and all the freelist have been merged.
564 ObjectStartBitmapVerifier().Verify(page);
565 page.space().AddPage(&page);
572 bool VisitLargePage(LargePage& page) {
573 HeapObjectHeader* header = page.ObjectHeader();
576 page.space().AddPage(&page);
579 LargePage::Destroy(&page);
609 while (auto page = state.unswept_pages.Pop()) {
610 Traverse(**page);
622 bool VisitNormalPage(NormalPage& page) {
624 page.ResetDiscardedMemory();
630 &page, *platform_->GetPageAllocator())
632 &page, *platform_->GetPageAllocator());
633 const size_t space_index = page.space().index();
640 bool VisitLargePage(LargePage& page) {
641 HeapObjectHeader* header = page.ObjectHeader();
644 page.space().AddPage(&page);
649 header->IsFinalizable() ? page.ObjectHeader() : nullptr;
653 unfinalized_objects.push_back(page.ObjectHeader());
656 const size_t space_index = page.space().index();
662 {&page, std::move(unfinalized_objects), {}, {}, true});
797 // First, process unfinalized pages as finalizing a page is faster than
800 while (auto page = space_state.swept_unfinalized_pages.Pop()) {
801 finalizer.FinalizePage(&*page);
807 // unswept page. This also helps out the concurrent sweeper.
810 while (auto page = space_state.unswept_pages.Pop()) {
811 sweeper.SweepPage(**page);