Lines Matching defs:object
24 #include "src/heap/code-object-registry.h"
41 #include "src/heap/object-stats.h"
50 #include "src/heap/weak-object-worklists.h"
60 #include "src/objects/maybe-object.h"
107 virtual bool IsMarked(HeapObject object) = 0;
109 virtual bool IsBlackOrGrey(HeapObject object) = 0;
131 void VisitMapPointer(HeapObject object) override {
132 VerifyMap(object.map(cage_base()));
155 HeapObject object = object_and_size.first;
157 Address current = object.address();
160 CHECK(IsMarked(object));
162 object.Iterate(cage_base(), this);
164 // The object is either part of a black area of black allocation or a
165 // regular black object
233 bool IsMarked(HeapObject object) override {
234 return marking_state_->IsBlack(object);
237 bool IsBlackOrGrey(HeapObject object) override {
238 return marking_state_->IsBlackOrGrey(object);
294 typename TSlot::TObject object = slot.load(cage_base());
296 if (object.GetHeapObjectIfStrong(&heap_object)) {
330 void VisitMapPointer(HeapObject object) override {
331 VerifyMap(object.map(cage_base()));
362 HeapObject object = HeapObject::FromAddress(current);
363 if (!object.IsFreeSpaceOrFiller(cage_base())) {
364 object.Iterate(cage_base(), this);
366 current += object.Size(cage_base());
422 typename TSlot::TObject object = current.load(cage_base());
424 if (object.GetHeapObjectIfStrong(&heap_object)) {
635 for (HeapObject object = iterator.Next(); !object.is_null();
636 object = iterator.Next()) {
637 CHECK(non_atomic_marking_state()->IsBlack(object));
1151 Object object = *p;
1152 if (!object.IsHeapObject()) return;
1153 HeapObject heap_object = HeapObject::cast(object);
1218 V8_INLINE void MarkObject(HeapObject host, Object object) {
1219 if (!object.IsHeapObject()) return;
1220 HeapObject heap_object = HeapObject::cast(object);
1245 MaybeObject object = p.load(cage_base());
1247 if (object.GetHeapObject(&heap_object))
1289 V8_INLINE void MarkObject(HeapObject host, ObjectSlot slot, Object object) {
1291 if (!object.IsHeapObject()) return;
1292 HeapObject heap_object = HeapObject::cast(object);
1395 Object RetainAs(Object object) override {
1396 HeapObject heap_object = HeapObject::cast(object);
1399 return object;
1400 } else if (object.IsAllocationSite() &&
1401 !(AllocationSite::cast(object).IsZombie())) {
1405 Object nested = object;
1415 return object;
1504 HeapObject object = rinfo->target_object(cage_base());
1505 GenerationalBarrierForCode(host, rinfo, object);
1506 collector_->RecordRelocSlot(host, rinfo, object);
1515 virtual void MarkArrayBufferExtensionPromoted(HeapObject object) {}
1576 virtual bool Visit(HeapObject object, int size) = 0;
1616 // In case the object's map gets relocated during GC we load the old map
1636 // In case the object's map gets relocated during GC we load the old map
1660 inline bool TryEvacuateObject(AllocationSpace target_space, HeapObject object,
1663 if (FLAG_stress_compaction && AbortCompactionForTesting(object))
1666 Map map = object.map(cage_base());
1671 DCHECK(Heap::InYoungGeneration(object));
1680 MigrateObject(*target_object, object, size, target_space);
1711 bool AbortCompactionForTesting(HeapObject object) {
1715 if ((object.ptr() & kPageAlignmentMask) == mask) {
1716 Page* page = Page::FromHeapObject(object);
1755 inline bool Visit(HeapObject object, int size) override {
1756 if (TryEvacuateWithoutCopy(object)) return true;
1760 heap_->UpdateAllocationSite(object.map(), object,
1763 if (!TryEvacuateObject(OLD_SPACE, object, size, &target_object)) {
1765 "MarkCompactCollector: young object promotion failed");
1772 if (heap_->ShouldBePromoted(object.address()) &&
1773 TryEvacuateObject(OLD_SPACE, object, size, &target_object)) {
1778 heap_->UpdateAllocationSite(object.map(), object,
1782 AllocationSpace space = AllocateTargetObject(object, size, &target);
1783 MigrateObject(HeapObject::cast(target), object, size, space);
1792 inline bool TryEvacuateWithoutCopy(HeapObject object) {
1795 Map map = object.map();
1799 HeapObject actual = ThinString::cast(object).unchecked_actual();
1801 object.set_map_word(MapWord::FromForwardingAddress(actual),
1873 inline bool Visit(HeapObject object, int size) override {
1875 heap_->UpdateAllocationSite(object.map(), object,
1878 DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(object));
1879 PtrComprCageBase cage_base = GetPtrComprCageBase(object);
1880 object.IterateFast(cage_base, record_visitor_);
1882 record_visitor_->MarkArrayBufferExtensionPromoted(object);
1906 inline bool Visit(HeapObject object, int size) override {
1908 if (TryEvacuateObject(Page::FromHeapObject(object)->owner_identity(),
1909 object, size, &target_object)) {
1910 DCHECK(object.map_word(heap_->isolate(), kRelaxedLoad)
1939 inline bool Visit(HeapObject object, int size) override {
1942 Map map = object.map(cage_base());
1943 // Instead of calling object.IterateBodyFast(cage_base(), &visitor) here
1945 DCHECK_EQ(object.SizeFromMap(map), size);
1946 object.IterateBodyFast(map, size, &visitor);
2082 // As soon as a single object was processed and potentially marked another
2083 // object we need another iteration. Otherwise we might miss to apply
2163 for (HeapObject object : ephemeron_marking_.newly_discovered) {
2164 auto range = key_to_values.equal_range(object);
2167 MarkObject(object, value);
2204 HeapObject object;
2205 while (local_marking_worklists()->PopWrapper(&object)) {
2206 scope.TracePossibleWrapper(JSObject::cast(object));
2219 HeapObject object;
2225 while (local_marking_worklists()->Pop(&object) ||
2226 local_marking_worklists()->PopOnHold(&object)) {
2229 if (object.IsFreeSpaceOrFiller(cage_base)) {
2232 DCHECK_IMPLIES(object.map(cage_base) ==
2234 marking_state()->IsBlack(object));
2235 // Other fillers may be black or grey depending on the color of the object
2237 DCHECK_IMPLIES(object.map(cage_base) !=
2239 marking_state()->IsBlackOrGrey(object));
2242 DCHECK(object.IsHeapObject());
2243 DCHECK(heap()->Contains(object));
2244 DCHECK(!(marking_state()->IsWhite(object)));
2247 AddNewlyDiscovered(object);
2249 Map map = object.map(cage_base);
2252 if (native_context_inferrer_.Infer(isolate, map, object, &context)) {
2256 size_t visited_size = marking_visitor_->Visit(map, object);
2259 map, object, visited_size);
2521 // code object on the JSFunction.
2578 HeapObject object = weak_object_in_code.first;
2580 if (!non_atomic_marking_state()->IsBlackOrGrey(object) &&
2631 isolate(), [](HeapObject object, ObjectSlot slot, HeapObject target) {
2632 RecordSlot(object, slot, target);
2636 // UncompiledData object.
2660 // Create a filler object for any left over space in the bytecode array.
2672 [](HeapObject object, ObjectSlot slot, HeapObject target) {
2673 RecordSlot(object, slot, target);
2727 // with an uncompiled data object.
2744 auto gc_notify_updated_slot = [](HeapObject object, ObjectSlot slot,
2746 RecordSlot(object, slot, HeapObject::cast(target));
2758 auto gc_notify_updated_slot = [](HeapObject object, ObjectSlot slot,
2760 RecordSlot(object, slot, HeapObject::cast(target));
3022 auto gc_notify_updated_slot = [](HeapObject object, ObjectSlot slot,
3025 RecordSlot(object, slot, HeapObject::cast(target));
3968 HeapObject object = current->GetObject();
3969 DCHECK(!marking_state->IsGrey(object));
3970 if (marking_state->IsBlack(object)) {
4000 Object RetainAs(Object object) override {
4001 if (object.IsHeapObject()) {
4002 HeapObject heap_object = HeapObject::cast(object);
4008 return object;
4029 HeapObject const object = object_and_size.first;
4030 if (!visitor->Visit(object, object_and_size.second)) {
4034 chunk->AddressToMarkbitIndex(object.address()));
4035 *failed_object = object;
4054 HeapObject object = reinterpret_cast<LargePage*>(chunk)->GetObject();
4055 if (marking_state->IsBlack(object)) {
4056 const bool success = visitor->Visit(object, object.Size());
4063 HeapObject const object = object_and_size.first;
4064 DCHECK(marking_state->IsBlack(object));
4065 const bool success = visitor->Visit(object, object_and_size.second);
4083 HeapObject object = reinterpret_cast<LargePage*>(chunk)->GetObject();
4084 if (marking_state->IsGrey(object)) {
4085 const bool success = visitor->Visit(object, object.Size());
4092 HeapObject const object = object_and_size.first;
4093 DCHECK(marking_state->IsGrey(object));
4094 const bool success = visitor->Visit(object, object_and_size.second);
4278 HeapObject object = HeapObject::FromAddress(cur);
4279 Map map = object.map(visitor.cage_base());
4280 int size = object.SizeFromMap(map);
4281 object.IterateBodyFast(map, size, &visitor);
4347 // If the object was in from space before and is after executing the
4348 // callback in to space, the object is still live.
4350 // just freed free space object.
4881 bool IsMarked(HeapObject object) override {
4882 return marking_state_->IsGrey(object);
4885 bool IsBlackOrGrey(HeapObject object) override {
4886 return marking_state_->IsBlackOrGrey(object);
4907 // only object that can contain code pointers, are always allocated in
4933 typename TSlot::TObject object = slot.load(cage_base);
4936 if (object.GetHeapObject(&heap_object)) {
4968 typename TSlot::TObject object = current.load(cage_base());
4970 if (object.GetHeapObject(&heap_object)) {
5038 // only object that can contain code pointers, are always allocated in
5061 V8_INLINE int VisitJSArrayBuffer(Map map, JSArrayBuffer object) {
5062 object.YoungMarkExtension();
5063 int size = JSArrayBuffer::BodyDescriptor::SizeOf(map, object);
5064 JSArrayBuffer::BodyDescriptor::IterateBody(map, object, size, this);
5087 inline void MarkObjectViaMarkingWorklist(HeapObject object) {
5088 if (marking_state_->WhiteToGrey(object)) {
5090 worklist_local_->Push(object);
5148 // Migrate color to old generation marking in case the object survived young
5174 void MarkArrayBufferExtensionPromoted(HeapObject object) final {
5175 if (!object.IsJSArrayBuffer()) return;
5176 JSArrayBuffer::cast(object).YoungMarkExtensionPromoted();
5182 inline bool IsLive(HeapObject object) {
5183 return collector_->non_atomic_marking_state()->IsBlack(object);
5361 HeapObject const object = object_and_size.first;
5362 DCHECK(non_atomic_marking_state()->IsGrey(object));
5363 Address free_end = object.address();
5377 Map map = object.map(cage_base, kAcquireLoad);
5378 int size = object.SizeFromMap(map);
5442 Object RetainAs(Object object) override {
5443 HeapObject heap_object = HeapObject::cast(object);
5444 if (!Heap::InYoungGeneration(heap_object)) return object;
5449 return object;
5549 void MarkObject(Object object) {
5550 if (!Heap::InYoungGeneration(object)) return;
5551 HeapObject heap_object = HeapObject::cast(object);
5559 HeapObject object;
5560 while (marking_worklist_local_.Pop(&object)) {
5561 const int size = visitor_.Visit(object);
5562 IncrementLiveBytes(object, size);
5566 void IncrementLiveBytes(HeapObject object, intptr_t bytes) {
5567 local_live_bytes_[Page::FromHeapObject(object)] += bytes;
5627 MaybeObject object = *slot;
5628 if (Heap::InYoungGeneration(object)) {
5629 // Marking happens before flipping the young generation, so the object
5631 DCHECK(Heap::InToPage(object));
5633 bool success = object.GetHeapObject(&heap_object);
5804 HeapObject object;
5805 while (main_thread_worklist_local_.Pop(&object)) {
5806 DCHECK(!object.IsFreeSpaceOrFiller(cage_base));
5807 DCHECK(object.IsHeapObject());
5808 DCHECK(heap()->Contains(object));
5809 DCHECK(non_atomic_marking_state()->IsGrey(object));
5810 main_marking_visitor()->Visit(object);
5827 HeapObject const object = object_and_size.first;
5828 Address free_end = object.address();
5839 Map map = object.map(cage_base, kAcquireLoad);
5840 int size = object.SizeFromMap(map);
6014 HeapObject object = current->GetObject();
6015 DCHECK(!non_atomic_marking_state_.IsBlack(object));
6016 if (non_atomic_marking_state_.IsGrey(object)) {