Lines Matching refs:space
176 void MarkingVerifier::VerifyMarking(NewSpace* space) {
177 if (!space) return;
178 Address end = space->top();
181 CHECK_EQ(space->first_allocatable_address(),
182 space->first_page()->area_start());
184 PageRange range(space->first_allocatable_address(), end);
193 void MarkingVerifier::VerifyMarking(PagedSpace* space) {
194 for (Page* p : *space) {
370 void EvacuationVerifier::VerifyEvacuation(NewSpace* space) {
371 if (!space) return;
372 PageRange range(space->first_allocatable_address(), space->top());
376 Address limit = it != range.end() ? page->area_end() : space->top();
377 CHECK(limit == space->top() || !page->Contains(space->top()));
382 void EvacuationVerifier::VerifyEvacuation(PagedSpace* space) {
383 for (Page* p : *space) {
385 if (p->Contains(space->top())) {
388 space->top(), static_cast<int>(space->limit() - space->top()),
544 static void TraceFragmentation(PagedSpace* space) {
545 int number_of_pages = space->CountTotalPages();
546 intptr_t reserved = (number_of_pages * space->AreaSize());
547 intptr_t free = reserved - space->SizeOfObjects();
548 PrintF("[%s]: %d pages, %d (%.1f%%) free\n", space->name(), number_of_pages,
633 void MarkCompactCollector::VerifyMarkbitsAreDirty(ReadOnlySpace* space) {
634 ReadOnlyHeapObjectIterator iterator(space);
641 void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
642 for (Page* p : *space) {
648 void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
649 if (!space) return;
650 for (Page* p : PageRange(space->first_allocatable_address(), space->top())) {
656 void MarkCompactCollector::VerifyMarkbitsAreClean(LargeObjectSpace* space) {
657 if (!space) return;
658 LargeObjectSpaceObjectIterator it(space);
673 // Read-only space should always be black since we never collect any objects
741 AllocationSpace space) {
743 sweeper()->DrainSweepingWorklistForSpace(space);
794 void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
795 DCHECK(space->identity() == OLD_SPACE || space->identity() == CODE_SPACE ||
796 space->identity() == MAP_SPACE);
798 int number_of_pages = space->CountTotalPages();
799 size_t area_size = space->AreaSize();
829 space->top() == space->limit()
831 : Page::FromAllocationAreaAddress(space->top());
832 for (Page* p : *space) {
923 "compaction-selection-page: space=%s free_bytes_page=%zu "
927 space->name(), (area_size - live_bytes) / KB,
949 "compaction-selection: space=%s reduce_memory=%d pages=%d "
951 space->name(), reduce_memory, candidate_count,
1000 for (PagedSpace* space = spaces.Next(); space != nullptr;
1001 space = spaces.Next()) {
1002 space->PrepareForMarkCompact();
1083 // GC, because it relies on the new address of certain old space
1403 // space. These sites get a one-time reprieve.
1495 // The target is always in old space, we don't have to record the slot in
1782 AllocationSpace space = AllocateTargetObject(object, size, &target);
1783 MigrateObject(HeapObject::cast(target), object, size, space);
1833 "MarkCompactCollector: semi-space copy, fallback in old gen");
2660 // Create a filler object for any left over space in the bytecode array.
3222 // Needs to be atomic for map space compaction: This slot could be a map
3290 // Visitor for updating root pointers and to-space pointers.
3385 // Visitor for updating root pointers and to-space pointers.
3470 // New space.
3474 // Append the list of new space pages to be processed.
3490 // Old space.
3501 // New space.
3514 // Old space. Deallocate evacuated candidate pages.
3922 // Evacuation of new space pages cannot be aborted, so it needs to run
3923 // before old space evacuation.
3931 // The move added page->allocated_bytes to the old space, but we are
4140 // slots only handles old space (for unboxed doubles), and thus map space can
4149 // Full GCs don't promote pages within new space.
4347 // If the object was in from space before and is after executing the
4348 // callback in to space, the object is still live.
4350 // just freed free space object.
4355 // Slots can point to "to" space if the page has been moved, or if the
4504 std::vector<std::unique_ptr<UpdatingItem>>* items, IterateableSpace* space,
4507 for (MemoryChunk* chunk : *space) {
4624 // Iterating to space may require a valid body descriptor for e.g.
4625 // WasmStruct which races with updating a slot in Map. Since to space is
4789 PagedSpace* space = static_cast<PagedSpace*>(p->owner());
4792 space->ReleasePage(p);
4798 void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
4799 space->ClearAllocatorState();
4805 for (auto it = space->begin(); it != space->end();) {
4822 space->memory_chunk_list().Remove(p);
4823 space->ReleasePage(p);
4829 sweeper()->AddPage(space->identity(), p, Sweeper::REGULAR);
4834 PrintIsolate(isolate(), "sweeping: space=%s initialized_for_sweeping=%d",
4835 space->name(), will_be_swept);
4906 // Code slots never appear in new space because CodeDataContainers, the
4908 // the old space.
5037 // Code slots never appear in new space because CodeDataContainers, the
5039 // the old space.
5052 // Code objects are not expected in new space.
5057 // Code objects are not expected in new space.
5465 // Internalized strings are always stored in old space, so there is no need
5482 // Append the list of new space pages to be processed.
5503 // Seed to space pages.