1 /*
2  * Copyright (c) 2021 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef ECMASCRIPT_MEM_PARALLEL_EVACUATOR_INL_H
17 #define ECMASCRIPT_MEM_PARALLEL_EVACUATOR_INL_H
18 
19 #include "ecmascript/mem/parallel_evacuator.h"
20 
21 #include "ecmascript/mem/heap.h"
22 #include "ecmascript/mem/mark_word.h"
23 #include "ecmascript/mem/region-inl.h"
24 #include "ecmascript/taskpool/taskpool.h"
25 
26 namespace panda::ecmascript {
27 // Move regions with a survival rate of more than 75% to new space
28 // Move regions when young space overshoot size is larger than max capacity.
IsWholeRegionEvacuate(Region *region)29 bool ParallelEvacuator::IsWholeRegionEvacuate(Region *region)
30 {
31     if ((static_cast<double>(region->AliveObject()) / region->GetSize()) > MIN_OBJECT_SURVIVAL_RATE &&
32         !region->HasAgeMark()) {
33         return true;
34     }
35     if (heap_->GetFromSpaceDuringEvacuation()->CommittedSizeIsLarge() && !region->HasAgeMark()) {
36         return true;
37     }
38     return false;
39 }
40 
WholeRegionEvacuate(Region *region)41 bool ParallelEvacuator::WholeRegionEvacuate(Region *region)
42 {
43     if (region->IsFreshRegion()) {
44         ASSERT(region->InYoungSpace());
45         return heap_->MoveYoungRegionSync(region);
46     }
47     bool isInYoung = region->InYoungSpace();
48     bool isBelowAgeMark = region->BelowAgeMark();
49     if (isInYoung && !isBelowAgeMark && IsWholeRegionEvacuate(region) && heap_->MoveYoungRegionSync(region)) {
50         return true;
51     }
52     return false;
53 }
54 
55 template <typename Callback>
VisitBodyInObj( TaggedObject *root, ObjectSlot start, ObjectSlot end, Callback callback)56 bool ParallelEvacuator::VisitBodyInObj(
57     TaggedObject *root, ObjectSlot start, ObjectSlot end, Callback callback)
58 {
59     auto hclass = root->GetClass();
60     ASSERT(!hclass->IsAllTaggedProp());
61     int index = 0;
62     TaggedObject *dst = hclass->GetLayout().GetTaggedObject();
63     auto layout = LayoutInfo::UncheckCast(dst);
64     ObjectSlot realEnd = start;
65     realEnd += layout->GetPropertiesCapacity();
66     end = end > realEnd ? realEnd : end;
67     for (ObjectSlot slot = start; slot < end; slot++) {
68         auto attr = layout->GetAttr(index++);
69         if (attr.IsTaggedRep()) {
70             callback(slot);
71         }
72     }
73     return true;
74 }
75 
UpdateNewToEdenObjectSlot(ObjectSlot &slot)76 bool ParallelEvacuator::UpdateNewToEdenObjectSlot(ObjectSlot &slot)
77 {
78     JSTaggedValue value(slot.GetTaggedType());
79     if (!value.IsHeapObject()) {
80         return false;
81     }
82     TaggedObject *object = value.GetHeapObject();
83     Region *valueRegion = Region::ObjectAddressToRange(object);
84 
85     // It is only update edenSpace object when iterate NewToEdenRSet
86     if (!valueRegion->InEdenSpace()) {
87         return false;
88     }
89     MarkWord markWord(object);
90     if (markWord.IsForwardingAddress()) {
91         TaggedObject *dst = markWord.ToForwardingAddress();
92         if (value.IsWeakForHeapObject()) {
93             dst = JSTaggedValue(dst).CreateAndGetWeakRef().GetRawTaggedObject();
94         }
95         slot.Update(dst);
96     } else {
97         if (value.IsWeakForHeapObject()) {
98             slot.Clear();
99         }
100     }
101     return false;
102 }
103 
UpdateForwardedOldToNewObjectSlot(TaggedObject *object, ObjectSlot &slot, bool isWeak)104 bool ParallelEvacuator::UpdateForwardedOldToNewObjectSlot(TaggedObject *object, ObjectSlot &slot, bool isWeak)
105 {
106     MarkWord markWord(object);
107     if (markWord.IsForwardingAddress()) {
108         TaggedObject *dst = markWord.ToForwardingAddress();
109         if (isWeak) {
110             dst = JSTaggedValue(dst).CreateAndGetWeakRef().GetRawTaggedObject();
111         }
112         slot.Update(dst);
113         Region *dstRegion = Region::ObjectAddressToRange(dst);
114         // Keep oldToNewRSet when object is YoungSpace
115         if (dstRegion->InYoungSpace()) {
116             return true;
117         }
118     } else if (isWeak) {
119         slot.Clear();
120     }
121     return false;
122 }
123 
124 template<bool IsEdenGC>
UpdateOldToNewObjectSlot(ObjectSlot &slot)125 bool ParallelEvacuator::UpdateOldToNewObjectSlot(ObjectSlot &slot)
126 {
127     JSTaggedValue value(slot.GetTaggedType());
128     if (!value.IsHeapObject()) {
129         return false;
130     }
131     TaggedObject *object = value.GetHeapObject();
132     Region *valueRegion = Region::ObjectAddressToRange(object);
133     if constexpr (IsEdenGC) {
134         // only object in EdenSpace will be collect in EdenGC
135         if (valueRegion->InEdenSpace()) {
136             return UpdateForwardedOldToNewObjectSlot(object, slot, value.IsWeakForHeapObject());
137         } else {
138             // Keep oldToNewRSet when object is YoungSpace
139             return valueRegion->InYoungSpace();
140         }
141     } else {
142         // It is only update old to new object when iterate OldToNewRSet
143         if (valueRegion->InGeneralNewSpace()) {
144             if (!valueRegion->InNewToNewSet()) {
145                 return UpdateForwardedOldToNewObjectSlot(object, slot, value.IsWeakForHeapObject());
146             }
147             // move region from fromspace to tospace
148             if (valueRegion->Test(object)) {
149                 return true;
150             }
151             if (value.IsWeakForHeapObject()) {
152                 slot.Clear();
153             }
154         }
155     }
156     return false;
157 }
158 
UpdateObjectSlot(ObjectSlot &slot)159 void ParallelEvacuator::UpdateObjectSlot(ObjectSlot &slot)
160 {
161     JSTaggedValue value(slot.GetTaggedType());
162     if (value.IsHeapObject()) {
163         if (value.IsInSharedHeap()) {
164             return;
165         }
166         if (value.IsWeakForHeapObject()) {
167             return UpdateWeakObjectSlot(value.GetTaggedWeakRef(), slot);
168         }
169         TaggedObject *object = value.GetTaggedObject();
170         MarkWord markWord(object);
171         if (markWord.IsForwardingAddress()) {
172             TaggedObject *dst = markWord.ToForwardingAddress();
173             slot.Update(dst);
174         }
175     }
176 }
177 
UpdateWeakObjectSlot(TaggedObject *value, ObjectSlot &slot)178 void ParallelEvacuator::UpdateWeakObjectSlot(TaggedObject *value, ObjectSlot &slot)
179 {
180     Region *objectRegion = Region::ObjectAddressToRange(value);
181     if (objectRegion->InSharedHeap()) {
182         return;
183     }
184 
185     TaggedObject *dst = UpdateAddressAfterEvacation(value);
186     if (dst == value) {
187         return;
188     }
189     if (dst == nullptr) {
190         slot.Clear();
191         return;
192     }
193     auto weakRef = JSTaggedValue(dst).CreateAndGetWeakRef().GetRawTaggedObject();
194     slot.Update(weakRef);
195 }
196 
197 template<TriggerGCType gcType>
UpdateObjectSlotOpt(ObjectSlot &slot)198 void ParallelEvacuator::UpdateObjectSlotOpt(ObjectSlot &slot)
199 {
200     JSTaggedValue value(slot.GetTaggedType());
201     if (value.IsHeapObject()) {
202         if (UpdateWeakObjectSlotOpt<gcType>(value, slot)) {
203             return;
204         }
205         MarkWord markWord(value.GetTaggedObject());
206         if (markWord.IsForwardingAddress()) {
207             auto dst = reinterpret_cast<JSTaggedType>(markWord.ToForwardingAddress());
208             slot.Update(dst);
209         }
210     }
211 }
212 
213 template<TriggerGCType gcType>
UpdateWeakObjectSlotOpt(JSTaggedValue value, ObjectSlot &slot)214 bool ParallelEvacuator::UpdateWeakObjectSlotOpt(JSTaggedValue value, ObjectSlot &slot)
215 {
216     // if need to update slot as non-weak then return FALSE, else return TRUE
217     Region *objectRegion = Region::ObjectAddressToRange(value.GetRawData());
218     ASSERT(objectRegion != nullptr);
219     if constexpr (gcType == TriggerGCType::YOUNG_GC) {
220         if (!objectRegion->InGeneralNewSpace()) {
221             return true;
222         }
223     } else if constexpr (gcType == TriggerGCType::OLD_GC) {
224         if (!objectRegion->InGeneralNewSpaceOrCSet()) {
225             if (value.IsWeakForHeapObject() && !objectRegion->InSharedHeap() &&
226                     (objectRegion->GetMarkGCBitset() == nullptr || !objectRegion->Test(value.GetRawData()))) {
227                 slot.Clear();
228             }
229             return true;
230         }
231     } else {
232         LOG_GC(FATAL) << "UpdateWeakObjectSlotOpt: not support gcType yet";
233         UNREACHABLE();
234     }
235     if (objectRegion->InNewToNewSet()) {
236         if (value.IsWeakForHeapObject() && !objectRegion->Test(value.GetRawData())) {
237             slot.Clear();
238         }
239         return true;
240     }
241     if (value.IsWeakForHeapObject()) {
242         MarkWord markWord(value.GetWeakReferent());
243         if (markWord.IsForwardingAddress()) {
244             auto dst = static_cast<JSTaggedType>(ToUintPtr(markWord.ToForwardingAddress()));
245             slot.Update(JSTaggedValue(dst).CreateAndGetWeakRef().GetRawData());
246         } else {
247             slot.Clear();
248         }
249         return true;
250     }
251     return false;
252 }
253 
UpdateLocalToShareRSet(TaggedObject *object, JSHClass *cls)254 void ParallelEvacuator::UpdateLocalToShareRSet(TaggedObject *object, JSHClass *cls)
255 {
256     Region *region = Region::ObjectAddressToRange(object);
257     ASSERT(!region->InSharedHeap());
258     auto callbackWithCSet = [this, region](TaggedObject *root, ObjectSlot start, ObjectSlot end, VisitObjectArea area) {
259         if (area == VisitObjectArea::IN_OBJECT) {
260             if (VisitBodyInObj(root, start, end, [&](ObjectSlot slot) { SetLocalToShareRSet(slot, region); })) {
261                 return;
262             };
263         }
264         for (ObjectSlot slot = start; slot < end; slot++) {
265             SetLocalToShareRSet(slot, region);
266         }
267     };
268     ObjectXRay::VisitObjectBody<VisitType::OLD_GC_VISIT>(object, cls, callbackWithCSet);
269 }
270 
SetLocalToShareRSet(ObjectSlot slot, Region *region)271 void ParallelEvacuator::SetLocalToShareRSet(ObjectSlot slot, Region *region)
272 {
273     ASSERT(!region->InSharedHeap());
274     JSTaggedType value = slot.GetTaggedType();
275     if (!JSTaggedValue(value).IsHeapObject()) {
276         return;
277     }
278     Region *valueRegion = Region::ObjectAddressToRange(value);
279     if (valueRegion->InSharedSweepableSpace()) {
280         region->AtomicInsertLocalToShareRSet(slot.SlotAddress());
281     }
282 }
283 
284 template<bool SetEdenObject>
SetObjectFieldRSet(TaggedObject *object, JSHClass *cls)285 void ParallelEvacuator::SetObjectFieldRSet(TaggedObject *object, JSHClass *cls)
286 {
287     Region *region = Region::ObjectAddressToRange(object);
288     auto callbackWithCSet = [this, region](TaggedObject *root, ObjectSlot start, ObjectSlot end, VisitObjectArea area) {
289         if (area == VisitObjectArea::IN_OBJECT) {
290             if (VisitBodyInObj(root, start, end,
291                                [&](ObjectSlot slot) { SetObjectRSet<SetEdenObject>(slot, region); })) {
292                 return;
293             };
294         }
295         for (ObjectSlot slot = start; slot < end; slot++) {
296             SetObjectRSet<SetEdenObject>(slot, region);
297         }
298     };
299     ObjectXRay::VisitObjectBody<VisitType::OLD_GC_VISIT>(object, cls, callbackWithCSet);
300 }
301 
302 template<bool SetEdenObject>
SetObjectRSet(ObjectSlot slot, Region *region)303 void ParallelEvacuator::SetObjectRSet(ObjectSlot slot, Region *region)
304 {
305     JSTaggedType value = slot.GetTaggedType();
306     if (!JSTaggedValue(value).IsHeapObject()) {
307         return;
308     }
309     Region *valueRegion = Region::ObjectAddressToRange(value);
310     if constexpr (SetEdenObject) {
311         if (region->InYoungSpace() && valueRegion->InEdenSpace()) {
312             region->AtomicInsertNewToEdenRSet(slot.SlotAddress());
313         } else if (valueRegion->InSharedSweepableSpace()) {
314             region->AtomicInsertLocalToShareRSet(slot.SlotAddress());
315         }
316     } else {
317         if (valueRegion->InGeneralNewSpace()) {
318             region->InsertOldToNewRSet(slot.SlotAddress());
319         }  else if (valueRegion->InSharedSweepableSpace()) {
320             region->InsertLocalToShareRSet(slot.SlotAddress());
321         } else if (valueRegion->InCollectSet()) {
322             region->InsertCrossRegionRSet(slot.SlotAddress());
323         } else if (JSTaggedValue(value).IsWeakForHeapObject()) {
324             if (heap_->IsConcurrentFullMark() && !valueRegion->InSharedHeap() &&
325                     (valueRegion->GetMarkGCBitset() == nullptr || !valueRegion->Test(value))) {
326                 slot.Clear();
327             }
328         }
329     }
330 }
331 
TryAcquire()332 bool ParallelEvacuator::AcquireItem::TryAcquire()
333 {
334     return acquire_.exchange(true, std::memory_order_relaxed) == false;
335 }
336 
Add(std::unique_ptr<Workload> workload)337 void ParallelEvacuator::WorkloadSet::Add(std::unique_ptr<Workload> workload)
338 {
339     workloads_.emplace_back(AcquireItem{}, std::move(workload));
340 }
341 
HasRemaningWorkload() const342 bool ParallelEvacuator::WorkloadSet::HasRemaningWorkload() const
343 {
344     return remainingWorkloadNum_.load(std::memory_order_relaxed) > 0;
345 }
346 
FetchSubAndCheckWorkloadCount(size_t finishedCount)347 bool ParallelEvacuator::WorkloadSet::FetchSubAndCheckWorkloadCount(size_t finishedCount)
348 {
349     return remainingWorkloadNum_.fetch_sub(finishedCount, std::memory_order_relaxed) == finishedCount;
350 }
351 
UpdateAddressAfterEvacation(TaggedObject *oldAddress)352 TaggedObject* ParallelEvacuator::UpdateAddressAfterEvacation(TaggedObject *oldAddress)
353 {
354     Region *objectRegion = Region::ObjectAddressToRange(reinterpret_cast<TaggedObject *>(oldAddress));
355     if (!objectRegion) {
356         return nullptr;
357     }
358     if (heap_->IsEdenMark()) {
359         if (!objectRegion->InEdenSpace()) {
360             return oldAddress;
361         }
362         MarkWord markWord(oldAddress);
363         if (markWord.IsForwardingAddress()) {
364             return markWord.ToForwardingAddress();
365         }
366         return nullptr;
367     }
368     if (objectRegion->InGeneralNewSpaceOrCSet()) {
369         if (objectRegion->InNewToNewSet()) {
370             if (objectRegion->Test(oldAddress)) {
371                 return oldAddress;
372             }
373         } else {
374             MarkWord markWord(oldAddress);
375             if (markWord.IsForwardingAddress()) {
376                 return markWord.ToForwardingAddress();
377             }
378         }
379         return nullptr;
380     }
381     if (heap_->IsConcurrentFullMark()) {
382         if (objectRegion->GetMarkGCBitset() == nullptr || !objectRegion->Test(oldAddress)) {
383             return nullptr;
384         }
385     }
386     return oldAddress;
387 }
388 
CalculateEvacuationThreadNum()389 int ParallelEvacuator::CalculateEvacuationThreadNum()
390 {
391     uint32_t count = evacuateWorkloadSet_.GetWorkloadCount();
392     uint32_t regionPerThread = 8;
393     uint32_t maxThreadNum = std::min(heap_->GetMaxEvacuateTaskCount(),
394         Taskpool::GetCurrentTaskpool()->GetTotalThreadNum());
395     return static_cast<int>(std::min(std::max(1U, count / regionPerThread), maxThreadNum));
396 }
397 
CalculateUpdateThreadNum()398 int ParallelEvacuator::CalculateUpdateThreadNum()
399 {
400     uint32_t count = updateWorkloadSet_.GetWorkloadCount();
401     double regionPerThread = 1.0 / 4;
402     count = std::pow(count, regionPerThread);
403     uint32_t maxThreadNum = std::min(heap_->GetMaxEvacuateTaskCount(),
404         Taskpool::GetCurrentTaskpool()->GetTotalThreadNum());
405     return static_cast<int>(std::min(std::max(1U, count), maxThreadNum));
406 }
407 
GetWorkloadCount() const408 size_t ParallelEvacuator::WorkloadSet::GetWorkloadCount() const
409 {
410     return workloads_.size();
411 }
412 
413 }  // namespace panda::ecmascript
414 #endif  // ECMASCRIPT_MEM_PARALLEL_EVACUATOR_INL_H
415