Lines Matching defs:object

39 #include "src/heap/code-object-registry.h"
67 #include "src/heap/object-stats.h"
91 #include "src/objects/maybe-object.h"
112 #include "src/objects/object-macros.h"
118 Isolate* Heap::GetIsolateFromWritableObject(HeapObject object) {
120 third_party_heap::Heap::GetIsolate(object.address()));
126 bool Heap_PageFlagsAreConsistent(HeapObject object) {
127 return Heap::PageFlagsAreConsistent(object);
138 void Heap_GenerationalBarrierSlow(HeapObject object, Address slot,
140 Heap::GenerationalBarrierSlow(object, slot, value);
148 HeapObject object) {
149 Heap::GenerationalBarrierForCodeSlow(host, rinfo, object);
526 "New large object space, used: %6zu KB"
553 "Large object space, used: %6zu KB"
559 "Code large object space, used: %6zu KB"
792 void UpdateAllocationsHash(HeapObject object) {
793 Address object_address = object.address();
847 void Heap::AddRetainingPathTarget(Handle<HeapObject> object,
855 MaybeObjectHandle::Weak(object));
862 bool Heap::IsRetainingPathTarget(HeapObject object,
866 MaybeObject object_to_check = HeapObjectReference::Weak(object);
883 HeapObject object = target;
888 retaining_path.push_back(std::make_pair(object, ephemeron));
890 ephemeron_retainer_.count(object)) {
891 object = ephemeron_retainer_[object];
893 } else if (retainer_.count(object)) {
894 object = retainer_[object];
897 if (retaining_root_.count(object)) {
898 root = retaining_root_[object];
930 HeapObject object = pair.first;
933 if (Heap::InFromPage(object)) {
934 MapWord map_word = object.map_word(kRelaxedLoad);
936 object = map_word.ToForwardingAddress();
945 updated_map[object] = retainer;
963 HeapObject object = pair.first;
965 if (Heap::InFromPage(object)) {
966 MapWord map_word = object.map_word(kRelaxedLoad);
968 object = map_word.ToForwardingAddress();
971 updated_retaining_root[object] = pair.second;
977 void Heap::AddRetainer(HeapObject retainer, HeapObject object) {
978 if (retainer_.count(object)) return;
979 retainer_[object] = retainer;
981 if (IsRetainingPathTarget(object, &option)) {
984 if (ephemeron_retainer_.count(object) == 0 ||
986 PrintRetainingPath(object, option);
991 void Heap::AddEphemeronRetainer(HeapObject retainer, HeapObject object) {
992 if (ephemeron_retainer_.count(object)) return;
993 ephemeron_retainer_[object] = retainer;
995 if (IsRetainingPathTarget(object, &option) &&
998 if (retainer_.count(object) == 0) {
999 PrintRetainingPath(object, option);
1004 void Heap::AddRetainingRoot(Root root, HeapObject object) {
1005 if (retaining_root_.count(object)) return;
1006 retaining_root_[object] = root;
1008 if (IsRetainingPathTarget(object, &option)) {
1009 PrintRetainingPath(object, option);
1611 PrintF("Sample object: ");
2473 void Heap::EnsureSweepingCompleted(HeapObject object) {
2476 BasicMemoryChunk* basic_chunk = BasicMemoryChunk::FromHeapObject(object);
2800 // We also flip the young generation large object space. All large objects
2841 void Heap::UnprotectAndRegisterMemoryChunk(HeapObject object,
2843 UnprotectAndRegisterMemoryChunk(MemoryChunk::FromHeapObject(object), origin);
3220 HeapObject Heap::PrecedeWithFiller(HeapObject object, int filler_size) {
3221 CreateFillerObjectAt(object.address(), filler_size,
3223 return HeapObject::FromAddress(object.address() + filler_size);
3226 HeapObject Heap::AlignWithFiller(HeapObject object, int object_size,
3231 int pre_filler = GetFillToAlign(object.address(), alignment);
3233 object = PrecedeWithFiller(object, pre_filler);
3237 CreateFillerObjectAt(object.address() + object_size, filler_size,
3240 return object;
3379 // with {addr} pointing into large object space; however we currently
3398 bool Heap::CanMoveObjectStart(HeapObject object) {
3401 // Sampling heap profiler may have a reference to the object.
3404 if (IsLargeObject(object)) return false;
3406 // Compilation jobs may have references to the object.
3412 // We can move the object start if the page was already swept.
3413 return Page::FromHeapObject(object)->SweepingDone();
3416 bool Heap::IsImmovable(HeapObject object) {
3418 return third_party_heap::Heap::IsImmovable(object);
3420 BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(object);
3421 return chunk->NeverEvacuate() || IsLargeObject(object);
3424 bool Heap::IsLargeObject(HeapObject object) {
3426 return third_party_heap::Heap::InLargeObjectSpace(object.address()) ||
3427 third_party_heap::Heap::InSpace(object.address(), CODE_LO_SPACE);
3428 return BasicMemoryChunk::FromHeapObject(object)->IsLargePage();
3466 bool MayContainRecordedSlots(HeapObject object) {
3468 // New space object do not have recorded slots.
3469 if (BasicMemoryChunk::FromHeapObject(object)->InYoungGeneration())
3472 if (object.IsByteArray() || object.IsFixedDoubleArray()) return false;
3501 FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
3505 return object;
3507 CHECK(!object.is_null());
3508 DCHECK(CanMoveObjectStart(object));
3511 DCHECK(object.IsFixedArray() || object.IsFixedDoubleArray());
3512 const int element_size = object.IsFixedArray() ? kTaggedSize : kDoubleSize;
3514 Map map = object.map();
3517 // space or old space. In a large object space the object's start must
3519 DCHECK(!IsLargeObject(object));
3520 DCHECK(object.map() != ReadOnlyRoots(this).fixed_cow_array_map());
3526 const int len = object.length();
3530 Address old_start = object.address();
3535 object, HeapObject::FromAddress(new_start));
3539 if (MayContainRecordedSlots(object)) {
3540 MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
3541 DCHECK(!chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(object));
3542 DCHECK(!chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_NEW>(object));
3550 MayContainRecordedSlots(object)
3556 // object does not require synchronization.
3557 RELAXED_WRITE_FIELD(object, bytes_to_trim,
3559 RELAXED_WRITE_FIELD(object, bytes_to_trim + kTaggedSize,
3565 // Notify the heap profiler of change in object layout.
3566 OnMoveEvent(new_object, object, new_object.Size());
3571 // to the original FixedArray (which is now the filler object).
3580 LeftTrimmerVerifierRootVisitor root_visitor(object);
3589 void Heap::RightTrimFixedArray(FixedArrayBase object, int elements_to_trim) {
3590 const int len = object.length();
3595 if (object.IsByteArray()) {
3599 } else if (object.IsFixedArray()) {
3603 DCHECK(object.IsFixedDoubleArray());
3608 CreateFillerForArray<FixedArrayBase>(object, elements_to_trim, bytes_to_trim);
3611 void Heap::RightTrimWeakFixedArray(WeakFixedArray object,
3617 CreateFillerForArray<WeakFixedArray>(object, elements_to_trim,
3622 void Heap::CreateFillerForArray(T object, int elements_to_trim,
3624 DCHECK(object.IsFixedArrayBase() || object.IsByteArray() ||
3625 object.IsWeakFixedArray());
3628 DCHECK(object.map() != ReadOnlyRoots(this).fixed_cow_array_map());
3637 int old_size = object.Size();
3638 Address old_end = object.address() + old_size;
3642 if (MayContainRecordedSlots(object)) {
3643 MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
3644 DCHECK(!chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_NEW>(object));
3645 DCHECK(!chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(object));
3649 bool clear_slots = MayContainRecordedSlots(object);
3654 // We do not create a filler for objects in a large object space.
3655 if (!IsLargeObject(object)) {
3679 object.set_length(object.length() - elements_to_trim, kReleaseStore);
3681 // Notify the heap object allocation tracker of change in object layout. The
3684 tracker->UpdateObjectSizeEvent(object.address(), object.Size());
3957 HeapObject object, const DisallowGarbageCollection&,
3960 incremental_marking()->MarkBlackAndVisitObjectDueToLayoutChange(object);
3963 MayContainRecordedSlots(object)) {
3964 MemoryChunk::FromHeapObject(object)
3965 ->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(object);
3969 MayContainRecordedSlots(object)) {
3970 MemoryChunk::FromHeapObject(object)
3971 ->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(object);
3976 pending_layout_change_object_ = object;
4009 void VisitMapPointer(HeapObject object) override {} // do nothing by default
4026 void Heap::VerifyObjectLayoutChange(HeapObject object, Map new_map) {
4034 // Check that Heap::NotifyObjectLayoutChange was called for object transitions
4036 // If you see this check triggering for a freshly allocated object,
4037 // use object->set_map_after_allocation() to initialize its map.
4039 VerifySafeMapTransition(object, new_map);
4041 DCHECK_EQ(pending_layout_change_object_, object);
4046 void Heap::VerifySafeMapTransition(HeapObject object, Map new_map) {
4049 if (object.IsJSObject(cage_base)) {
4050 // Without double unboxing all in-object fields of a JSObject are tagged.
4053 if (object.IsString(cage_base) &&
4061 if (FLAG_shared_string_table && object.IsString(cage_base) &&
4072 object.IterateFast(cage_base, &old_visitor);
4073 MapWord old_map_word = object.map_word(cage_base, kRelaxedLoad);
4075 object.set_map_word(MapWord::FromMap(new_map), kRelaxedStore);
4077 object.IterateFast(cage_base, &new_visitor);
4079 object.set_map_word(old_map_word, kRelaxedStore);
4329 void Heap::AppendArrayBufferExtension(JSArrayBuffer object,
4332 array_buffer_sweeper_->Append(object, extension);
4335 void Heap::DetachArrayBufferExtension(JSArrayBuffer object,
4338 return array_buffer_sweeper_->Detach(object, extension);
4791 void Heap::VerifyRememberedSetFor(HeapObject object) {
4792 MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
4793 DCHECK_IMPLIES(chunk->mutex() == nullptr, ReadOnlyHeap::Contains(object));
4798 Address start = object.address();
4799 Address end = start + object.Size(cage_base);
4802 if (!InYoungGeneration(object)) {
4807 object.IterateBody(cage_base, &visitor);
5007 // - The startup object cache.
5092 // Iterate over the startup and shared heap object caches unless
5099 // shared heap object cache and should iterate it.
5102 // own its shared heap object cache, and should not iterate it.
6041 Object object(*location);
6042 if (!object.IsHeapObject()) {
6047 HeapObject heap_object = HeapObject::cast(object);
6406 void Heap::ClearRecordedSlot(HeapObject object, ObjectSlot slot) {
6408 DCHECK(!IsLargeObject(object));
6427 void Heap::VerifyClearedSlot(HeapObject object, ObjectSlot slot) {
6429 DCHECK(!IsLargeObject(object));
6430 if (InYoungGeneration(object)) return;
6435 page->RegisteredObjectWithInvalidatedSlots<OLD_TO_NEW>(object));
6437 page->RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(object));
6497 virtual bool SkipObject(HeapObject object) = 0;
6513 bool SkipObject(HeapObject object) override {
6514 if (object.IsFreeSpaceOrFiller()) return true;
6515 Address chunk = object.ptr() & ~kLogicalChunkAlignmentMask;
6517 return reachable_[chunk]->count(object) == 0;
6521 bool MarkAsReachable(HeapObject object) {
6522 Address chunk = object.ptr() & ~kLogicalChunkAlignmentMask;
6526 if (reachable_[chunk]->count(object)) return false;
6527 reachable_[chunk]->insert(object);
6542 void VisitMapPointer(HeapObject object) override {
6543 MarkHeapObject(Map::unchecked_cast(object.map(cage_base())));
6596 typename TSlot::TObject object = p.load(cage_base());
6598 if (object.GetHeapObject(&heap_object)) {
6855 std::function<void(HeapObject object, ObjectSlot slot, Object target)>
7076 typename TSlot::TObject object = slot.load(cage_base());
7078 if (object.GetHeapObject(&heap_object)) {
7081 CHECK(object.IsSmi() || object.IsCleared() ||
7082 MapWord::IsPacked(object.ptr()));
7092 // this by moving that object to POINTER_VISITOR_ID_LIST.
7166 Map Heap::GcSafeMapOfCodeSpaceObject(HeapObject object) {
7168 MapWord map_word = object.map_word(cage_base, kRelaxedLoad);
7180 Code Heap::GcSafeCastToCode(HeapObject object, Address inner_pointer) {
7181 Code code = Code::unchecked_cast(object);
7213 // Check if the inner pointer points into a large object chunk.
7220 // Iterate through the page until we reach the end or find an object
7240 for (HeapObject object = iterator.Next(); !object.is_null();
7241 object = iterator.Next()) {
7242 if (!object.IsCode()) continue;
7243 Code code = Code::cast(object);
7278 void Heap::GenerationalBarrierSlow(HeapObject object, Address slot,
7280 MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
7321 void Heap::WriteBarrierForRangeImpl(MemoryChunk* source_page, HeapObject object,
7344 marking_barrier->MarkValue(object, value_heap_object)) {
7354 template void Heap::WriteBarrierForRange<ObjectSlot>(HeapObject object,
7358 HeapObject object, MaybeObjectSlot start_slot, MaybeObjectSlot end_slot);
7361 void Heap::WriteBarrierForRange(HeapObject object, TSlot start_slot,
7364 MemoryChunk* source_page = MemoryChunk::FromHeapObject(object);
7385 return WriteBarrierForRangeImpl<kDoGenerational>(source_page, object,
7389 return WriteBarrierForRangeImpl<kDoMarking>(source_page, object,
7394 source_page, object, start_slot, end_slot);
7399 source_page, object, start_slot, end_slot);
7405 source_page, object, start_slot, end_slot);
7413 HeapObject object) {
7414 DCHECK(InYoungGeneration(object));
7416 MarkCompactCollector::ProcessRelocInfo(host, rinfo, object);
7422 bool Heap::PageFlagsAreConsistent(HeapObject object) {
7426 BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(object);
7428 heap_internals::MemoryChunk::FromHeapObject(object);
7448 Heap* heap = Heap::FromWritableHeapObject(object);