1 /*
2 * Copyright (c) 2021 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef ECMASCRIPT_MEM_PARALLEL_MARKER_INL_H
17 #define ECMASCRIPT_MEM_PARALLEL_MARKER_INL_H
18
19 #include "ecmascript/mem/parallel_marker.h"
20
21 #include "ecmascript/js_hclass-inl.h"
22 #include "ecmascript/mem/gc_bitset.h"
23 #include "ecmascript/mem/heap.h"
24 #include "ecmascript/mem/region-inl.h"
25 #include "ecmascript/mem/tlab_allocator-inl.h"
26
27 namespace panda::ecmascript {
28
29 template <typename Callback>
VisitBodyInObj(TaggedObject *root, ObjectSlot start, ObjectSlot end, bool needBarrier, Callback callback)30 ARK_INLINE bool NonMovableMarker::VisitBodyInObj(TaggedObject *root, ObjectSlot start, ObjectSlot end,
31 bool needBarrier, Callback callback)
32 {
33 auto hclass = root->SynchronizedGetClass();
34 Region *rootRegion = Region::ObjectAddressToRange(root);
35 int index = 0;
36 auto layout = LayoutInfo::UncheckCast(hclass->GetLayout().GetTaggedObject());
37 ObjectSlot realEnd = start;
38 realEnd += layout->GetPropertiesCapacity();
39 end = end > realEnd ? realEnd : end;
40 for (ObjectSlot slot = start; slot < end; slot++) {
41 auto attr = layout->GetAttr(index++);
42 if (attr.IsTaggedRep()) {
43 callback(slot, rootRegion, needBarrier);
44 }
45 }
46 return true;
47 }
48
MarkValue(uint32_t threadId, ObjectSlot &slot, Region *rootRegion, bool needBarrier)49 inline void NonMovableMarker::MarkValue(uint32_t threadId, ObjectSlot &slot, Region *rootRegion, bool needBarrier)
50 {
51 JSTaggedValue value(slot.GetTaggedType());
52 if (value.IsHeapObject()) {
53 ASSERT(!value.IsHole()); // check that value is not zero
54 TaggedObject *obj = nullptr;
55 if (!value.IsWeakForHeapObject()) {
56 obj = value.GetTaggedObject();
57 Region *objRegion = Region::ObjectAddressToRange(obj);
58 if (objRegion->IsFreshRegion()) {
59 // Object in fresh region should only mark from JS Thread in barrier, or MarkObject in MarkRoots.
60 ASSERT(objRegion->InYoungSpace());
61 return;
62 }
63 MarkObject(threadId, obj);
64 } else {
65 RecordWeakReference(threadId, reinterpret_cast<JSTaggedType *>(slot.SlotAddress()), rootRegion);
66 obj = value.GetWeakReferentUnChecked();
67 }
68 if (needBarrier) {
69 Region *valueRegion = Region::ObjectAddressToRange(obj);
70 if (valueRegion->InCollectSet()) {
71 rootRegion->AtomicInsertCrossRegionRSet(slot.SlotAddress());
72 }
73 }
74 }
75 }
76
MarkObject(uint32_t threadId, TaggedObject *object)77 inline void NonMovableMarker::MarkObject(uint32_t threadId, TaggedObject *object)
78 {
79 Region *objectRegion = Region::ObjectAddressToRange(object);
80
81 if (objectRegion->InSharedHeap()) {
82 return;
83 }
84
85 if (heap_->IsYoungMark() && objectRegion->InGeneralOldSpace()) {
86 return;
87 }
88
89 if (heap_->IsEdenMark() && !objectRegion->InEdenSpace()) {
90 return;
91 }
92
93 if (objectRegion->IsFreshRegion()) {
94 // This should only happen in MarkRoot from js thread.
95 ASSERT(JSThread::GetCurrent() != nullptr);
96 ASSERT(objectRegion->InYoungSpace());
97 objectRegion->NonAtomicMark(object);
98 } else if (objectRegion->AtomicMark(object)) {
99 workManager_->Push(threadId, object);
100 }
101 }
102
HandleRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot slot)103 inline void NonMovableMarker::HandleRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot slot)
104 {
105 JSTaggedValue value(slot.GetTaggedType());
106 if (value.IsHeapObject()) {
107 MarkObject(threadId, value.GetTaggedObject());
108 }
109 }
110
HandleRangeRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot start, ObjectSlot end)111 inline void NonMovableMarker::HandleRangeRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot start,
112 ObjectSlot end)
113 {
114 for (ObjectSlot slot = start; slot < end; slot++) {
115 JSTaggedValue value(slot.GetTaggedType());
116 if (value.IsHeapObject()) {
117 if (value.IsWeakForHeapObject()) {
118 LOG_ECMA_MEM(FATAL) << "Weak Reference in NonMovableMarker roots";
119 }
120 MarkObject(threadId, value.GetTaggedObject());
121 }
122 }
123 }
124
HandleDerivedRoots([[maybe_unused]] Root type, [[maybe_unused]] ObjectSlot base, [[maybe_unused]] ObjectSlot derived, [[maybe_unused]] uintptr_t baseOldObject)125 inline void NonMovableMarker::HandleDerivedRoots([[maybe_unused]] Root type, [[maybe_unused]] ObjectSlot base,
126 [[maybe_unused]] ObjectSlot derived,
127 [[maybe_unused]] uintptr_t baseOldObject)
128 {
129 // It is only used to update the derived value. The mark of partial GC does not need to update slot
130 }
131
HandleNewToEdenRSet(uint32_t threadId, Region *region)132 inline void NonMovableMarker::HandleNewToEdenRSet(uint32_t threadId, Region *region)
133 {
134 ASSERT(region->InYoungSpace());
135 region->IterateAllNewToEdenBits([this, threadId, region](void *mem) -> bool {
136 ObjectSlot slot(ToUintPtr(mem));
137 JSTaggedValue value(slot.GetTaggedType());
138 if (value.IsHeapObject()) {
139 if (value.IsWeakForHeapObject()) {
140 RecordWeakReference(threadId, reinterpret_cast<JSTaggedType *>(mem), region);
141 } else {
142 MarkObject(threadId, value.GetTaggedObject());
143 }
144 }
145 return true;
146 });
147 }
148
HandleOldToNewRSet(uint32_t threadId, Region *region)149 inline void NonMovableMarker::HandleOldToNewRSet(uint32_t threadId, Region *region)
150 {
151 bool isEdenMark = heap_->IsEdenMark();
152 region->IterateAllOldToNewBits([this, threadId, ®ion, isEdenMark](void *mem) -> bool {
153 ObjectSlot slot(ToUintPtr(mem));
154 JSTaggedValue value(slot.GetTaggedType());
155 if (!value.IsHeapObject()) {
156 return true;
157 }
158 if (value.IsWeakForHeapObject()) {
159 RecordWeakReference(threadId, reinterpret_cast<JSTaggedType *>(mem), region);
160 } else {
161 auto object = value.GetTaggedObject();
162 Region *objectRegion = Region::ObjectAddressToRange(object);
163 if (isEdenMark) {
164 if (objectRegion->InEdenSpace()) {
165 MarkObject(threadId, value.GetTaggedObject());
166 }
167 } else {
168 MarkObject(threadId, value.GetTaggedObject());
169 }
170 }
171 return true;
172 });
173 }
174
RecordWeakReference(uint32_t threadId, JSTaggedType *ref, Region *objectRegion)175 inline void NonMovableMarker::RecordWeakReference(uint32_t threadId, JSTaggedType *ref, Region *objectRegion)
176 {
177 auto value = JSTaggedValue(*ref);
178 Region *valueRegion = Region::ObjectAddressToRange(value.GetTaggedWeakRef());
179 if (heap_->IsEdenMark()) {
180 // only record object may be sweep, but no object will be sweep in EdenGC
181 return;
182 }
183 if (!objectRegion->InGeneralNewSpaceOrCSet() && !valueRegion->InGeneralNewSpaceOrCSet()) {
184 workManager_->PushWeakReference(threadId, ref);
185 }
186 }
187
188 template <typename Callback>
VisitBodyInObj(TaggedObject *root, ObjectSlot start, ObjectSlot end, Callback callback)189 ARK_INLINE bool MovableMarker::VisitBodyInObj(TaggedObject *root, ObjectSlot start, ObjectSlot end, Callback callback)
190 {
191 auto hclass = root->GetClass();
192 int index = 0;
193 TaggedObject *dst = hclass->GetLayout().GetTaggedObject();
194 auto layout = LayoutInfo::UncheckCast(dst);
195 ObjectSlot realEnd = start;
196 realEnd += layout->GetPropertiesCapacity();
197 end = end > realEnd ? realEnd : end;
198 for (ObjectSlot slot = start; slot < end; slot++) {
199 auto attr = layout->GetAttr(index++);
200 if (attr.IsTaggedRep()) {
201 callback(slot, root);
202 }
203 }
204 return true;
205 }
206
HandleRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot slot)207 inline void MovableMarker::HandleRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot slot)
208 {
209 JSTaggedValue value(slot.GetTaggedType());
210 if (value.IsHeapObject()) {
211 MarkObject(threadId, value.GetTaggedObject(), slot);
212 }
213 }
214
HandleRangeRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot start, ObjectSlot end)215 inline void MovableMarker::HandleRangeRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot start,
216 ObjectSlot end)
217 {
218 for (ObjectSlot slot = start; slot < end; slot++) {
219 JSTaggedValue value(slot.GetTaggedType());
220 if (value.IsHeapObject()) {
221 if (value.IsWeakForHeapObject()) {
222 Region *objectRegion = Region::ObjectAddressToRange(start.SlotAddress());
223 RecordWeakReference(threadId, reinterpret_cast<JSTaggedType *>(slot.SlotAddress()), objectRegion);
224 } else {
225 MarkObject(threadId, value.GetTaggedObject(), slot);
226 }
227 }
228 }
229 }
230
HandleDerivedRoots([[maybe_unused]] Root type, ObjectSlot base, ObjectSlot derived, uintptr_t baseOldObject)231 inline void MovableMarker::HandleDerivedRoots([[maybe_unused]] Root type, ObjectSlot base,
232 ObjectSlot derived, uintptr_t baseOldObject)
233 {
234 if (JSTaggedValue(base.GetTaggedType()).IsHeapObject()) {
235 derived.Update(base.GetTaggedType() + derived.GetTaggedType() - baseOldObject);
236 }
237 }
238
HandleNewToEdenRSet(uint32_t threadId, Region *region)239 inline void MovableMarker::HandleNewToEdenRSet(uint32_t threadId, Region *region)
240 {
241 region->IterateAllNewToEdenBits([this, threadId, ®ion](void *mem) -> bool {
242 ObjectSlot slot(ToUintPtr(mem));
243 JSTaggedValue value(slot.GetTaggedType());
244 if (value.IsHeapObject()) {
245 if (value.IsWeakForHeapObject()) {
246 RecordWeakReference(threadId, reinterpret_cast<JSTaggedType *>(mem), region);
247 return true;
248 }
249 auto slotStatus = MarkObject(threadId, value.GetTaggedObject(), slot);
250 if (slotStatus == SlotStatus::CLEAR_SLOT) {
251 return false;
252 }
253 }
254 return true;
255 });
256 }
257
HandleOldToNewRSet(uint32_t threadId, Region *region)258 inline void MovableMarker::HandleOldToNewRSet(uint32_t threadId, Region *region)
259 {
260 region->IterateAllOldToNewBits([this, threadId, ®ion](void *mem) -> bool {
261 ObjectSlot slot(ToUintPtr(mem));
262 JSTaggedValue value(slot.GetTaggedType());
263 if (value.IsHeapObject()) {
264 if (value.IsWeakForHeapObject()) {
265 RecordWeakReference(threadId, reinterpret_cast<JSTaggedType *>(mem), region);
266 return true;
267 }
268 auto slotStatus = MarkObject(threadId, value.GetTaggedObject(), slot);
269 if (slotStatus == SlotStatus::CLEAR_SLOT) {
270 return false;
271 }
272 }
273 return true;
274 });
275 }
276
AllocateDstSpace(uint32_t threadId, size_t size, bool &shouldPromote)277 inline uintptr_t MovableMarker::AllocateDstSpace(uint32_t threadId, size_t size, bool &shouldPromote)
278 {
279 uintptr_t forwardAddress = 0;
280 if (shouldPromote) {
281 forwardAddress = workManager_->GetTlabAllocator(threadId)->Allocate(size, COMPRESS_SPACE);
282 if (UNLIKELY(forwardAddress == 0)) {
283 LOG_ECMA_MEM(FATAL) << "EvacuateObject alloc failed: "
284 << " size: " << size;
285 UNREACHABLE();
286 }
287 } else {
288 forwardAddress = workManager_->GetTlabAllocator(threadId)->Allocate(size, SEMI_SPACE);
289 if (UNLIKELY(forwardAddress == 0)) {
290 forwardAddress = workManager_->GetTlabAllocator(threadId)->Allocate(size, COMPRESS_SPACE);
291 if (UNLIKELY(forwardAddress == 0)) {
292 LOG_ECMA_MEM(FATAL) << "EvacuateObject alloc failed: "
293 << " size: " << size;
294 UNREACHABLE();
295 }
296 shouldPromote = true;
297 }
298 }
299 return forwardAddress;
300 }
301
UpdateForwardAddressIfSuccess(uint32_t threadId, TaggedObject *object, JSHClass *klass, uintptr_t toAddress, size_t size, ObjectSlot slot, bool isPromoted)302 inline void MovableMarker::UpdateForwardAddressIfSuccess(uint32_t threadId, TaggedObject *object, JSHClass *klass,
303 uintptr_t toAddress, size_t size, ObjectSlot slot, bool isPromoted)
304 {
305 workManager_->IncreaseAliveSize(threadId, size);
306 if (isPromoted) {
307 workManager_->IncreasePromotedSize(threadId, size);
308 }
309
310 heap_->OnMoveEvent(reinterpret_cast<intptr_t>(object), reinterpret_cast<TaggedObject *>(toAddress), size);
311 if (klass->HasReferenceField()) {
312 workManager_->Push(threadId, reinterpret_cast<TaggedObject *>(toAddress));
313 }
314 slot.Update(reinterpret_cast<TaggedObject *>(toAddress));
315 }
316
UpdateForwardAddressIfFailed(TaggedObject *object, uintptr_t toAddress, size_t size, ObjectSlot slot)317 inline bool MovableMarker::UpdateForwardAddressIfFailed(TaggedObject *object, uintptr_t toAddress, size_t size,
318 ObjectSlot slot)
319 {
320 FreeObject::FillFreeObject(heap_, toAddress, size);
321 TaggedObject *dst = MarkWord(object).ToForwardingAddress();
322 slot.Update(dst);
323 return Region::ObjectAddressToRange(dst)->InYoungSpace();
324 }
325
RawCopyObject(uintptr_t fromAddress, uintptr_t toAddress, size_t size, const MarkWord &markWord)326 inline void MovableMarker::RawCopyObject(uintptr_t fromAddress, uintptr_t toAddress, size_t size,
327 const MarkWord &markWord)
328 {
329 if (memcpy_s(ToVoidPtr(toAddress + HEAD_SIZE), size - HEAD_SIZE, ToVoidPtr(fromAddress + HEAD_SIZE),
330 size - HEAD_SIZE) != EOK) {
331 LOG_FULL(FATAL) << "memcpy_s failed";
332 }
333 *reinterpret_cast<MarkWordType *>(toAddress) = markWord.GetValue();
334 }
335
UpdateLocalToShareRSet(TaggedObject *object, JSHClass *cls)336 void MovableMarker::UpdateLocalToShareRSet(TaggedObject *object, JSHClass *cls)
337 {
338 Region *region = Region::ObjectAddressToRange(object);
339 ASSERT(!region->InSharedHeap());
340 auto callbackWithCSet = [this, region](TaggedObject *root, ObjectSlot start, ObjectSlot end, VisitObjectArea area) {
341 if (area == VisitObjectArea::IN_OBJECT) {
342 if (VisitBodyInObj(root, start, end,
343 [&](ObjectSlot slot, [[maybe_unused]]TaggedObject *root) {
344 SetLocalToShareRSet(slot, region);
345 })) {
346 return;
347 };
348 }
349 for (ObjectSlot slot = start; slot < end; slot++) {
350 SetLocalToShareRSet(slot, region);
351 }
352 };
353 ObjectXRay::VisitObjectBody<VisitType::OLD_GC_VISIT>(object, cls, callbackWithCSet);
354 }
355
SetLocalToShareRSet(ObjectSlot slot, Region *region)356 void MovableMarker::SetLocalToShareRSet(ObjectSlot slot, Region *region)
357 {
358 ASSERT(!region->InSharedHeap());
359 JSTaggedType value = slot.GetTaggedType();
360 if (!JSTaggedValue(value).IsHeapObject()) {
361 return;
362 }
363 Region *valueRegion = Region::ObjectAddressToRange(value);
364 if (valueRegion->InSharedSweepableSpace()) {
365 region->AtomicInsertLocalToShareRSet(slot.SlotAddress());
366 }
367 }
368
MarkValue(uint32_t threadId, TaggedObject *root, ObjectSlot slot)369 inline void SemiGCMarker::MarkValue(uint32_t threadId, TaggedObject *root, ObjectSlot slot)
370 {
371 JSTaggedValue value(slot.GetTaggedType());
372 if (value.IsHeapObject()) {
373 Region *rootRegion = Region::ObjectAddressToRange(root);
374 if (value.IsWeakForHeapObject()) {
375 RecordWeakReference(threadId, reinterpret_cast<JSTaggedType *>(slot.SlotAddress()), rootRegion);
376 return;
377 }
378 auto slotStatus = MarkObject(threadId, value.GetTaggedObject(), slot);
379 if (rootRegion->InGeneralOldSpace() && slotStatus == SlotStatus::KEEP_SLOT) {
380 SlotNeedUpdate waitUpdate(reinterpret_cast<TaggedObject *>(root), slot);
381 workManager_->PushSlotNeedUpdate(threadId, waitUpdate);
382 }
383 }
384 }
385
MarkObject(uint32_t threadId, TaggedObject *object, ObjectSlot slot)386 inline SlotStatus SemiGCMarker::MarkObject(uint32_t threadId, TaggedObject *object, ObjectSlot slot)
387 {
388 Region *objectRegion = Region::ObjectAddressToRange(object);
389 if (objectRegion->InGeneralOldSpace()) {
390 return SlotStatus::CLEAR_SLOT;
391 }
392
393 MarkWord markWord(object);
394 if (markWord.IsForwardingAddress()) {
395 TaggedObject *dst = markWord.ToForwardingAddress();
396 slot.Update(dst);
397 Region *valueRegion = Region::ObjectAddressToRange(dst);
398 return valueRegion->InYoungSpace() ? SlotStatus::KEEP_SLOT : SlotStatus::CLEAR_SLOT;
399 }
400 return EvacuateObject(threadId, object, markWord, slot);
401 }
402
EvacuateObject(uint32_t threadId, TaggedObject *object, const MarkWord &markWord, ObjectSlot slot)403 inline SlotStatus SemiGCMarker::EvacuateObject(uint32_t threadId, TaggedObject *object, const MarkWord &markWord,
404 ObjectSlot slot)
405 {
406 JSHClass *klass = markWord.GetJSHClass();
407 size_t size = klass->SizeFromJSHClass(object);
408 bool isPromoted = ShouldBePromoted(object);
409
410 uintptr_t forwardAddress = AllocateDstSpace(threadId, size, isPromoted);
411 RawCopyObject(ToUintPtr(object), forwardAddress, size, markWord);
412
413 auto oldValue = markWord.GetValue();
414 auto result = Barriers::AtomicSetPrimitive(object, 0, oldValue,
415 MarkWord::FromForwardingAddress(forwardAddress));
416 if (result == oldValue) {
417 UpdateForwardAddressIfSuccess(threadId, object, klass, forwardAddress, size, slot, isPromoted);
418 return isPromoted ? SlotStatus::CLEAR_SLOT : SlotStatus::KEEP_SLOT;
419 }
420 bool keepSlot = UpdateForwardAddressIfFailed(object, forwardAddress, size, slot);
421 return keepSlot ? SlotStatus::KEEP_SLOT : SlotStatus::CLEAR_SLOT;
422 }
423
ShouldBePromoted(TaggedObject *object)424 inline bool SemiGCMarker::ShouldBePromoted(TaggedObject *object)
425 {
426 Region *region = Region::ObjectAddressToRange(object);
427 return (region->BelowAgeMark() || (region->HasAgeMark() && ToUintPtr(object) < waterLine_));
428 }
429
RecordWeakReference(uint32_t threadId, JSTaggedType *ref, [[maybe_unused]] Region *objectRegion)430 inline void SemiGCMarker::RecordWeakReference(uint32_t threadId, JSTaggedType *ref,
431 [[maybe_unused]] Region *objectRegion)
432 {
433 auto value = JSTaggedValue(*ref);
434 Region *valueRegion = Region::ObjectAddressToRange(value.GetTaggedWeakRef());
435 if (valueRegion->InYoungSpace()) {
436 workManager_->PushWeakReference(threadId, ref);
437 }
438 }
439
MarkValue(uint32_t threadId, ObjectSlot slot)440 inline void CompressGCMarker::MarkValue(uint32_t threadId, ObjectSlot slot)
441 {
442 JSTaggedValue value(slot.GetTaggedType());
443 if (value.IsHeapObject()) {
444 if (value.IsWeakForHeapObject()) {
445 // It is unnecessary to use region pointer in compressGCMarker.
446 RecordWeakReference(threadId, reinterpret_cast<JSTaggedType *>(slot.SlotAddress()));
447 return;
448 }
449 MarkObject(threadId, value.GetTaggedObject(), slot);
450 }
451 }
452
MarkObject(uint32_t threadId, TaggedObject *object, ObjectSlot slot)453 inline SlotStatus CompressGCMarker::MarkObject(uint32_t threadId, TaggedObject *object, ObjectSlot slot)
454 {
455 Region *objectRegion = Region::ObjectAddressToRange(object);
456 if (!NeedEvacuate(objectRegion)) {
457 if (!objectRegion->InSharedHeap() && objectRegion->AtomicMark(object)) {
458 workManager_->Push(threadId, object);
459 auto hclass = object->GetClass();
460 auto size = hclass->SizeFromJSHClass(object);
461 objectRegion->IncreaseAliveObject(size);
462 }
463 return SlotStatus::CLEAR_SLOT;
464 }
465
466 MarkWord markWord(object);
467 if (markWord.IsForwardingAddress()) {
468 TaggedObject *dst = markWord.ToForwardingAddress();
469 slot.Update(dst);
470 return SlotStatus::CLEAR_SLOT;
471 }
472 return EvacuateObject(threadId, object, markWord, slot);
473 }
474
AllocateReadOnlySpace(size_t size)475 inline uintptr_t CompressGCMarker::AllocateReadOnlySpace(size_t size)
476 {
477 LockHolder lock(mutex_);
478 uintptr_t forwardAddress = heap_->GetReadOnlySpace()->Allocate(size);
479 if (UNLIKELY(forwardAddress == 0)) {
480 LOG_ECMA_MEM(FATAL) << "Evacuate Read only Object: alloc failed: "
481 << " size: " << size;
482 UNREACHABLE();
483 }
484 return forwardAddress;
485 }
486
AllocateAppSpawnSpace(size_t size)487 inline uintptr_t CompressGCMarker::AllocateAppSpawnSpace(size_t size)
488 {
489 LockHolder lock(mutex_);
490 uintptr_t forwardAddress = heap_->GetAppSpawnSpace()->Allocate(size);
491 if (UNLIKELY(forwardAddress == 0)) {
492 LOG_ECMA_MEM(FATAL) << "Evacuate AppSpawn Object: alloc failed: "
493 << " size: " << size;
494 UNREACHABLE();
495 }
496 return forwardAddress;
497 }
498
EvacuateObject(uint32_t threadId, TaggedObject *object, const MarkWord &markWord, ObjectSlot slot)499 inline SlotStatus CompressGCMarker::EvacuateObject(uint32_t threadId, TaggedObject *object, const MarkWord &markWord,
500 ObjectSlot slot)
501 {
502 JSHClass *klass = markWord.GetJSHClass();
503 size_t size = klass->SizeFromJSHClass(object);
504 uintptr_t forwardAddress = AllocateForwardAddress(threadId, size, klass, object);
505 RawCopyObject(ToUintPtr(object), forwardAddress, size, markWord);
506
507 auto oldValue = markWord.GetValue();
508 auto result = Barriers::AtomicSetPrimitive(object, 0, oldValue,
509 MarkWord::FromForwardingAddress(forwardAddress));
510 if (result == oldValue) {
511 UpdateForwardAddressIfSuccess(threadId, object, klass, forwardAddress, size, slot);
512 Region *region = Region::ObjectAddressToRange(object);
513 if (region->HasLocalToShareRememberedSet()) {
514 UpdateLocalToShareRSet(reinterpret_cast<TaggedObject *>(forwardAddress), klass);
515 }
516 if (isAppSpawn_ && klass->IsString()) {
517 // calculate and set hashcode for read-only ecmastring in advance
518 EcmaStringAccessor(reinterpret_cast<TaggedObject *>(forwardAddress)).GetHashcode();
519 }
520 return SlotStatus::CLEAR_SLOT;
521 }
522 UpdateForwardAddressIfFailed(object, forwardAddress, size, slot);
523 return SlotStatus::CLEAR_SLOT;
524 }
525
RecordWeakReference(uint32_t threadId, JSTaggedType *ref, [[maybe_unused]] Region *objectRegion)526 inline void CompressGCMarker::RecordWeakReference(uint32_t threadId, JSTaggedType *ref,
527 [[maybe_unused]] Region *objectRegion)
528 {
529 workManager_->PushWeakReference(threadId, ref);
530 }
531
NeedEvacuate(Region *region)532 inline bool CompressGCMarker::NeedEvacuate(Region *region)
533 {
534 if (isAppSpawn_) {
535 return !region->InHugeObjectSpace() && !region->InReadOnlySpace() && !region->InNonMovableSpace() &&
536 !region->InSharedHeap();
537 }
538 return region->InYoungOrOldSpace();
539 }
540 } // namespace panda::ecmascript
541 #endif // ECMASCRIPT_MEM_PARALLEL_MARKER_INL_H
542