1/*
2 * Copyright (c) 2022-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 *     http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#ifndef ECMASCRIPT_MEM_REGION_H
17#define ECMASCRIPT_MEM_REGION_H
18
19#include <type_traits>
20
21#include "ecmascript/base/aligned_struct.h"
22#include "ecmascript/base/asan_interface.h"
23#include "ecmascript/js_tagged_value.h"
24#include "ecmascript/mem/free_object_list.h"
25#include "ecmascript/mem/gc_bitset.h"
26#include "ecmascript/mem/remembered_set.h"
27#include "ecmascript/mem/mem_common.h"
28#include "ecmascript/platform/map.h"
29
30#include "ecmascript/platform/mutex.h"
31
32#include "securec.h"
33
34namespace panda {
35namespace ecmascript {
36class JSThread;
37
38enum RegionSpaceFlag {
39    UNINITIALIZED = 0,
40    // We should avoid using the lower 3 bits (bits 0 to 2).
41    // If ZAP_MEM is enabled, the value of the lower 3 bits conflicts with the INVALID_VALUE.
42
43    // Bits 3 to 7 are reserved to denote the space where the region is located.
44    IN_EDEN_SPACE = 0x08,
45    IN_YOUNG_SPACE = 0x09,
46    IN_SNAPSHOT_SPACE = 0x0A,
47    IN_HUGE_OBJECT_SPACE = 0x0B,
48    IN_OLD_SPACE = 0x0C,
49    IN_NON_MOVABLE_SPACE = 0x0D,
50    IN_MACHINE_CODE_SPACE = 0x0E,
51    IN_READ_ONLY_SPACE = 0X0F,
52    IN_APPSPAWN_SPACE = 0x10,
53    IN_HUGE_MACHINE_CODE_SPACE = 0x11,
54    IN_SHARED_NON_MOVABLE = 0x12,
55    IN_SHARED_OLD_SPACE = 0x13,
56    IN_SHARED_APPSPAWN_SPACE = 0X14,
57    IN_SHARED_HUGE_OBJECT_SPACE = 0x15,
58    IN_SHARED_READ_ONLY_SPACE = 0x16,
59
60    VALID_SPACE_MASK = 0xFF,
61
62    GENERAL_YOUNG_BEGIN = IN_EDEN_SPACE,
63    GENERAL_YOUNG_END = IN_YOUNG_SPACE,
64    GENERAL_OLD_BEGIN = IN_SNAPSHOT_SPACE,
65    GENERAL_OLD_END = IN_HUGE_MACHINE_CODE_SPACE,
66    SHARED_SPACE_BEGIN = IN_SHARED_NON_MOVABLE,
67    SHARED_SPACE_END = IN_SHARED_READ_ONLY_SPACE,
68    SHARED_SWEEPABLE_SPACE_BEGIN = IN_SHARED_NON_MOVABLE,
69    SHARED_SWEEPABLE_SPACE_END = IN_SHARED_HUGE_OBJECT_SPACE,
70
71    HEAP_SPACE_BEGIN = IN_EDEN_SPACE,
72    HEAP_SPACE_END = IN_SHARED_READ_ONLY_SPACE
73};
74
75enum RegionGCFlags {
76    // We should avoid using the lower 3 bits (bits 0 to 2).
77    // If ZAP_MEM is enabled, the value of the lower 3 bits conflicts with the INVALID_VALUE.
78
79    // Below flags are used for GC, and each flag has a dedicated bit starting from the 3rd bit.
80    NEVER_EVACUATE = 1 << 3,
81    HAS_AGE_MARK = 1 << 4,
82    BELOW_AGE_MARK = 1 << 5,
83    IN_COLLECT_SET = 1 << 6,
84    IN_NEW_TO_NEW_SET = 1 << 7,
85    // Bits 8 to 10 (the lower 3 bits for the next byte) are also excluded for the sake of
86    // INVALID_VALUE in ZAP_MEM.
87    HAS_BEEN_SWEPT = 1 << 11,
88    NEED_RELOCATE = 1 << 12,
89    // ONLY used for heap verification.
90    IN_INACTIVE_SEMI_SPACE = 1 << 13,
91};
92
93// Currently only use for region in LinearSpace, to check if the region is allocated during concurrent marking.
94enum class RegionTypeFlag : uint8_t {
95    DEFAULT = 0,
96    // We should avoid using the lower 3 bits (bits 0 to 2).
97    // If ZAP_MEM is enabled, the value of the lower 3 bits conflicts with the INVALID_VALUE.
98
99    // Region is allocated before concurrent marking, but some new object may be allocated here
100    // during concurrent marking.
101    HALF_FRESH = 0x08,
102    // Region is allocated during concurrent marking.
103    FRESH = 0x09,
104};
105
106enum RSetType {
107    OLD_TO_NEW,
108    LOCAL_TO_SHARE,
109};
110
111static inline std::string ToSpaceTypeName(uint8_t space)
112{
113    switch (space) {
114        case RegionSpaceFlag::IN_EDEN_SPACE:
115            return "eden space";
116        case RegionSpaceFlag::IN_YOUNG_SPACE:
117            return "young space";
118        case RegionSpaceFlag::IN_SNAPSHOT_SPACE:
119            return "snapshot space";
120        case RegionSpaceFlag::IN_HUGE_OBJECT_SPACE:
121            return "huge object space";
122        case RegionSpaceFlag::IN_OLD_SPACE:
123            return "old space";
124        case RegionSpaceFlag::IN_NON_MOVABLE_SPACE:
125            return "non movable space";
126        case RegionSpaceFlag::IN_MACHINE_CODE_SPACE:
127            return "machine code space";
128        case RegionSpaceFlag::IN_READ_ONLY_SPACE:
129            return "read only space";
130        case RegionSpaceFlag::IN_APPSPAWN_SPACE:
131            return "appspawn space";
132        case RegionSpaceFlag::IN_HUGE_MACHINE_CODE_SPACE:
133            return "huge machine code space";
134        case RegionSpaceFlag::IN_SHARED_NON_MOVABLE:
135            return "shared non movable space";
136        case RegionSpaceFlag::IN_SHARED_OLD_SPACE:
137            return "shared old space";
138        case RegionSpaceFlag::IN_SHARED_READ_ONLY_SPACE:
139            return "shared read only space";
140        case RegionSpaceFlag::IN_SHARED_HUGE_OBJECT_SPACE:
141            return "shared huge object space";
142        case RegionSpaceFlag::IN_SHARED_APPSPAWN_SPACE:
143            return "shared appspawn space";
144        default:
145            return "invalid space";
146    }
147}
148
149// |---------------------------------------------------------------------------------------|
150// |                                   Region (256 kb)                                     |
151// |---------------------------------|--------------------------------|--------------------|
152// |     Head (sizeof(Region))       |         Mark bitset (4kb)      |      Data          |
153// |---------------------------------|--------------------------------|--------------------|
154
155class Region {
156public:
157    Region(NativeAreaAllocator *allocator, uintptr_t allocateBase, uintptr_t begin, uintptr_t end,
158        RegionSpaceFlag spaceType, RegionTypeFlag typeFlag)
159        : packedData_(begin, end, spaceType, typeFlag),
160          nativeAreaAllocator_(allocator),
161          allocateBase_(allocateBase),
162          end_(end),
163          highWaterMark_(end),
164          aliveObject_(0),
165          wasted_(0),
166          snapshotData_(0) {}
167
168    // JitFort space is divided into regions (JitForRegion) to enable
169    // reusing free_object_list and free_object_set operations for
170    // JitFort space, and GC marking actually happens in corresponding
171    // MachineCode objects where JitFort space is allocated to. So no
172    // gc mark bits needed in JitFortRegions.
173    Region(NativeAreaAllocator *allocator, uintptr_t allocateBase, uintptr_t end,
174        RegionSpaceFlag spaceType)
175        : packedData_(allocateBase, spaceType), // no markGCBitset_ for JitFort
176          nativeAreaAllocator_(allocator),
177          allocateBase_(allocateBase),
178          end_(end),
179          highWaterMark_(end),
180          aliveObject_(0),
181          wasted_(0),
182          snapshotData_(0) {}
183
184    ~Region() = default;
185
186    NO_COPY_SEMANTIC(Region);
187    NO_MOVE_SEMANTIC(Region);
188
189    enum RegionSpaceKind { InYoung, InGeneralOld, Other };
190
191    template <RegionSpaceKind kind>
192    class Updater final {
193    public:
194        Updater(uintptr_t updateAddress, Region& region)
195            : bitsetUpdater_(updateAddress),
196              region_(region)
197        {
198        }
199
200        NO_COPY_SEMANTIC(Updater);
201
202        ARK_INLINE ~Updater()
203        {
204            Flush();
205        }
206
207        ARK_INLINE void UpdateLocalToShare()
208        {
209            bitsetUpdater_.Update(LocalToShareIdx);
210        }
211
212        template <RegionSpaceKind T = kind, std::enable_if_t<T == InYoung, int>  = 0>
213        ARK_INLINE void UpdateNewToEden()
214        {
215            bitsetUpdater_.Update(NewToEdenIdx);
216        }
217
218        template <RegionSpaceKind T = kind, std::enable_if_t<T == InGeneralOld, int>  = 0>
219        ARK_INLINE void UpdateOldToNew()
220        {
221            bitsetUpdater_.Update(OldToNewIdx);
222        }
223
224        ARK_INLINE void Next()
225        {
226            if (bitsetUpdater_.Next()) {
227                Flush();
228            }
229        }
230
231    private:
232        ARK_INLINE void Consume(size_t idx, uintptr_t updateAddress, uint32_t mask);
233
234        ARK_INLINE void Flush();
235
236        static constexpr size_t CalculateBitSetNum()
237        {
238            constexpr size_t InYoungBitSetNum = 2;
239            constexpr size_t InGeneralOldBitSetNum = 2;
240            constexpr size_t OtherBitSetNum = 1;
241            switch (kind) {
242                case InYoung:
243                    return InYoungBitSetNum;
244                case InGeneralOld:
245                    return InGeneralOldBitSetNum;
246                case Other:
247                    return OtherBitSetNum;
248            }
249            return 0;
250        }
251
252        static constexpr size_t BitSetNum = CalculateBitSetNum();
253        static constexpr size_t LocalToShareIdx = 0;
254        static constexpr size_t NewToEdenIdx = 1; // NewToEden and OldToNew can't be used at same time.
255        static constexpr size_t OldToNewIdx = 1;
256        GCBitSetUpdater<BitSetNum> bitsetUpdater_;
257        Region& region_;
258    };
259
260    void Initialize()
261    {
262        lock_ = new Mutex();
263        if (InSparseSpace()) {
264            InitializeFreeObjectSets();
265        }
266    }
267
268    void LinkNext(Region *next)
269    {
270        next_ = next;
271    }
272
273    Region *GetNext() const
274    {
275        return next_;
276    }
277
278    void LinkPrev(Region *prev)
279    {
280        prev_ = prev;
281    }
282
283    Region *GetPrev() const
284    {
285        return prev_;
286    }
287
288    uintptr_t GetBegin() const
289    {
290        return packedData_.begin_;
291    }
292
293    uintptr_t GetEnd() const
294    {
295        return end_;
296    }
297
298    uintptr_t GetHighWaterMark() const
299    {
300        return highWaterMark_;
301    }
302
303    size_t GetCapacity() const
304    {
305        return end_ - allocateBase_;
306    }
307
308    size_t GetSize() const
309    {
310        return end_ - packedData_.begin_;
311    }
312
313    bool IsGCFlagSet(RegionGCFlags flag) const
314    {
315        return (packedData_.flags_.gcFlags_ & flag) == flag;
316    }
317
318    void SetGCFlag(RegionGCFlags flag)
319    {
320        packedData_.flags_.gcFlags_ |= flag;
321    }
322
323    void ClearGCFlag(RegionGCFlags flag)
324    {
325        // NOLINTNEXTLINE(hicpp-signed-bitwise)
326        packedData_.flags_.gcFlags_ &= ~flag;
327    }
328
329    std::string GetSpaceTypeName()
330    {
331        return ToSpaceTypeName(packedData_.flags_.spaceFlag_);
332    }
333
334    uint8_t GetSpaceType() const
335    {
336        return packedData_.flags_.spaceFlag_;
337    }
338
339    // Mark bitset
340    GCBitset *GetMarkGCBitset() const;
341    bool AtomicMark(void *address);
342    // Objects in fresh region should only mark in JS Thread.
343    bool NonAtomicMark(void *address);
344    void ClearMark(void *address);
345    bool Test(void *addr) const;
346    bool Test(uintptr_t addr) const;
347    // ONLY used for heap verification.
348    bool TestNewToEden(uintptr_t addr);
349    bool TestOldToNew(uintptr_t addr);
350    bool TestLocalToShare(uintptr_t addr);
351    template <typename Visitor>
352    void IterateAllMarkedBits(Visitor visitor) const;
353    void ClearMarkGCBitset();
354    // local to share remembered set
355    bool HasLocalToShareRememberedSet() const;
356    RememberedSet *ExtractLocalToShareRSet();
357    void InsertLocalToShareRSet(uintptr_t addr);
358    template<RegionSpaceKind kind>
359    Updater<kind> GetBatchRSetUpdater(uintptr_t addr);
360    void AtomicInsertLocalToShareRSet(uintptr_t addr);
361    void ClearLocalToShareRSetInRange(uintptr_t start, uintptr_t end);
362    void AtomicClearLocalToShareRSetInRange(uintptr_t start, uintptr_t end);
363    void AtomicClearSweepingLocalToShareRSetInRange(uintptr_t start, uintptr_t end);
364    template <typename Visitor>
365    void IterateAllLocalToShareBits(Visitor visitor);
366    void DeleteLocalToShareRSet();
367    void DeleteSweepingLocalToShareRSet();
368    // Cross region remembered set
369    void InsertCrossRegionRSet(uintptr_t addr);
370    void AtomicInsertCrossRegionRSet(uintptr_t addr);
371    template <typename Visitor>
372    void IterateAllCrossRegionBits(Visitor visitor) const;
373    void ClearCrossRegionRSet();
374    void ClearCrossRegionRSetInRange(uintptr_t start, uintptr_t end);
375    void AtomicClearCrossRegionRSetInRange(uintptr_t start, uintptr_t end);
376    void DeleteCrossRegionRSet();
377    // New to eden remembered set
378    void InsertNewToEdenRSet(uintptr_t addr);
379    void AtomicInsertNewToEdenRSet(uintptr_t addr);
380    void ClearNewToEdenRSet(uintptr_t addr);
381    // Old to new remembered set
382    void InsertOldToNewRSet(uintptr_t addr);
383    void ClearOldToNewRSet(uintptr_t addr);
384
385    template <typename Visitor>
386    void IterateAllNewToEdenBits(Visitor visitor);
387    template <typename Visitor>
388    void IterateAllOldToNewBits(Visitor visitor);
389    RememberedSet* GetNewToEdenRSet();
390    void ClearNewToEdenRSet();
391    void ClearNewToEdenRSetInRange(uintptr_t start, uintptr_t end);
392    void DeleteNewToEdenRSet();
393    void ClearOldToNewRSet();
394    void ClearOldToNewRSetInRange(uintptr_t start, uintptr_t end);
395    void DeleteOldToNewRSet();
396
397    void AtomicClearSweepingOldToNewRSetInRange(uintptr_t start, uintptr_t end);
398    void ClearSweepingOldToNewRSetInRange(uintptr_t start, uintptr_t end);
399    void DeleteSweepingOldToNewRSet();
400    template <typename Visitor>
401    void AtomicIterateAllSweepingRSetBits(Visitor visitor);
402    template <typename Visitor>
403    void IterateAllSweepingRSetBits(Visitor visitor);
404
405    static Region *ObjectAddressToRange(TaggedObject *obj)
406    {
407        return reinterpret_cast<Region *>(ToUintPtr(obj) & ~DEFAULT_REGION_MASK);
408    }
409
410    static Region *ObjectAddressToRange(uintptr_t objAddress)
411    {
412        return reinterpret_cast<Region *>(objAddress & ~DEFAULT_REGION_MASK);
413    }
414
415    static size_t GetRegionAvailableSize()
416    {
417        size_t regionHeaderSize = AlignUp(sizeof(Region), static_cast<size_t>(MemAlignment::MEM_ALIGN_REGION));
418        size_t bitsetSize = GCBitset::SizeOfGCBitset(DEFAULT_REGION_SIZE - regionHeaderSize);
419        return DEFAULT_REGION_SIZE - regionHeaderSize - bitsetSize;
420    }
421
422    void ClearMembers()
423    {
424        if (lock_ != nullptr) {
425            delete lock_;
426            lock_ = nullptr;
427        }
428    }
429
430    void Invalidate()
431    {
432        ASAN_UNPOISON_MEMORY_REGION(reinterpret_cast<void *>(GetBegin()), GetSize());
433        packedData_.flags_.spaceFlag_ = RegionSpaceFlag::UNINITIALIZED;
434    }
435
436    uint8_t GetRegionSpaceFlag();
437
438    void SetRegionSpaceFlag(RegionSpaceFlag flag)
439    {
440        packedData_.flags_.spaceFlag_ = flag;
441    }
442    bool InEdenSpace() const
443    {
444        return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_EDEN_SPACE;
445    }
446
447    bool InYoungSpace() const
448    {
449        return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_YOUNG_SPACE;
450    }
451
452    bool InOldSpace() const
453    {
454        return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_OLD_SPACE;
455    }
456
457    bool InYoungOrOldSpace() const
458    {
459        return InGeneralNewSpace() || InOldSpace();
460    }
461
462    bool InGeneralNewSpace() const
463    {
464        auto flag = packedData_.flags_.spaceFlag_;
465        return flag >= RegionSpaceFlag::GENERAL_YOUNG_BEGIN && flag <= RegionSpaceFlag::GENERAL_YOUNG_END;
466    }
467
468    bool InGeneralOldSpace() const
469    {
470        ASSERT(packedData_.flags_.spaceFlag_ != 0);
471        auto flag = packedData_.flags_.spaceFlag_;
472        return flag >= RegionSpaceFlag::GENERAL_OLD_BEGIN && flag <= RegionSpaceFlag::GENERAL_OLD_END;
473    }
474
475    bool InHugeObjectSpace() const
476    {
477        return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_HUGE_OBJECT_SPACE;
478    }
479
480    bool InMachineCodeSpace() const
481    {
482        return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_MACHINE_CODE_SPACE;
483    }
484
485    bool InHugeMachineCodeSpace() const
486    {
487        return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_HUGE_MACHINE_CODE_SPACE;
488    }
489
490    bool InNonMovableSpace() const
491    {
492        return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_NON_MOVABLE_SPACE;
493    }
494
495    bool InSnapshotSpace() const
496    {
497        return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SNAPSHOT_SPACE;
498    }
499
500    bool InReadOnlySpace() const
501    {
502        return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_READ_ONLY_SPACE;
503    }
504
505    bool InSharedOldSpace() const
506    {
507        return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SHARED_OLD_SPACE;
508    }
509
510    bool InSharedNonMovableSpace() const
511    {
512        return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SHARED_NON_MOVABLE;
513    }
514
515    bool InSharedHugeObjectSpace() const
516    {
517        return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SHARED_HUGE_OBJECT_SPACE;
518    }
519
520    bool InSharedReadOnlySpace() const
521    {
522        return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SHARED_READ_ONLY_SPACE;
523    }
524
525    bool InSharedAppSpawnSpace() const
526    {
527        return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SHARED_APPSPAWN_SPACE;
528    }
529
530    bool InAppSpawnSpace() const
531    {
532        return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_APPSPAWN_SPACE;
533    }
534
535    // Not including shared read only space.
536    bool InSharedSweepableSpace() const
537    {
538        auto flag = packedData_.flags_.spaceFlag_;
539        return flag >= RegionSpaceFlag::SHARED_SWEEPABLE_SPACE_BEGIN &&
540               flag <= RegionSpaceFlag::SHARED_SWEEPABLE_SPACE_END;
541    }
542
543    bool InSharedHeap() const
544    {
545        auto flag = packedData_.flags_.spaceFlag_;
546        return flag >= RegionSpaceFlag::SHARED_SPACE_BEGIN && flag <= RegionSpaceFlag::SHARED_SPACE_END;
547    }
548
549    bool InSparseSpace() const
550    {
551        auto flag = packedData_.flags_.spaceFlag_;
552        switch (flag) {
553            case RegionSpaceFlag::IN_OLD_SPACE:
554            case RegionSpaceFlag::IN_NON_MOVABLE_SPACE:
555            case RegionSpaceFlag::IN_MACHINE_CODE_SPACE:
556            case RegionSpaceFlag::IN_APPSPAWN_SPACE:
557            case RegionSpaceFlag::IN_SHARED_NON_MOVABLE:
558            case RegionSpaceFlag::IN_SHARED_OLD_SPACE:
559                return true;
560            default:
561                return false;
562        }
563    }
564
565    bool InHeapSpace() const
566    {
567        uint8_t space = packedData_.flags_.spaceFlag_;
568        return space >= RegionSpaceFlag::HEAP_SPACE_BEGIN && space <= RegionSpaceFlag::HEAP_SPACE_END;
569    }
570
571    bool InCollectSet() const
572    {
573        return IsGCFlagSet(RegionGCFlags::IN_COLLECT_SET);
574    }
575
576    bool InGeneralNewSpaceOrCSet() const
577    {
578        return InGeneralNewSpace() || InCollectSet();
579    }
580
581    bool InNewToNewSet() const
582    {
583        return IsGCFlagSet(RegionGCFlags::IN_NEW_TO_NEW_SET);
584    }
585
586    bool HasAgeMark() const
587    {
588        return IsGCFlagSet(RegionGCFlags::HAS_AGE_MARK);
589    }
590
591    bool BelowAgeMark() const
592    {
593        return IsGCFlagSet(RegionGCFlags::BELOW_AGE_MARK);
594    }
595
596    bool NeedRelocate() const
597    {
598        return IsGCFlagSet(RegionGCFlags::NEED_RELOCATE);
599    }
600
601    // ONLY used for heap verification.
602    bool InInactiveSemiSpace() const
603    {
604        return IsGCFlagSet(RegionGCFlags::IN_INACTIVE_SEMI_SPACE);
605    }
606
607    // ONLY used for heap verification.
608    bool InActiveSemiSpace() const
609    {
610        return InYoungSpace() && !InInactiveSemiSpace();
611    }
612
613    RegionTypeFlag GetRegionTypeFlag() const
614    {
615        return packedData_.typeFlag_;
616    }
617
618    void SetRegionTypeFlag(RegionTypeFlag typeFlag)
619    {
620        packedData_.typeFlag_ = typeFlag;
621    }
622
623    void ResetRegionTypeFlag()
624    {
625        SetRegionTypeFlag(RegionTypeFlag::DEFAULT);
626    }
627
628    bool IsFreshRegion() const
629    {
630        return GetRegionTypeFlag() == RegionTypeFlag::FRESH;
631    }
632
633    bool IsHalfFreshRegion() const
634    {
635        return GetRegionTypeFlag() == RegionTypeFlag::HALF_FRESH;
636    }
637
638    // ONLY used for heap verification.
639    void SetInactiveSemiSpace()
640    {
641        SetGCFlag(RegionGCFlags::IN_INACTIVE_SEMI_SPACE);
642    }
643
644    // ONLY used for heap verification.
645    void ResetInactiveSemiSpace()
646    {
647        ClearGCFlag(RegionGCFlags::IN_INACTIVE_SEMI_SPACE);
648    }
649
650    void SetSwept()
651    {
652        SetGCFlag(RegionGCFlags::HAS_BEEN_SWEPT);
653    }
654
655    void ResetSwept()
656    {
657        ClearGCFlag(RegionGCFlags::HAS_BEEN_SWEPT);
658    }
659
660    bool InRange(uintptr_t address) const
661    {
662        return address >= packedData_.begin_ && address <= end_;
663    }
664
665    uintptr_t GetAllocateBase() const
666    {
667        return allocateBase_;
668    }
669
670    size_t GetAllocatedBytes(uintptr_t top = 0)
671    {
672        ASSERT(top == 0 || InRange(top));
673        return (top == 0) ? (highWaterMark_ - packedData_.begin_) : (top - packedData_.begin_);
674    }
675
676    void SetHighWaterMark(uintptr_t mark)
677    {
678        ASSERT(InRange(mark));
679        highWaterMark_ = mark;
680    }
681
682    void SetReadOnlyAndMarked()
683    {
684        packedData_.markGCBitset_->SetAllBits(packedData_.bitsetSize_);
685        PageProtect(reinterpret_cast<void *>(allocateBase_), GetCapacity(), PAGE_PROT_READ);
686    }
687
688    void ClearReadOnly()
689    {
690        PageProtect(reinterpret_cast<void *>(allocateBase_), GetCapacity(), PAGE_PROT_READWRITE);
691    }
692
693    void InitializeFreeObjectSets()
694    {
695        FreeObjectSet<FreeObject> **sets = new FreeObjectSet<FreeObject> *[FreeObjectList<FreeObject>::NumberOfSets()];
696        for (int i = 0; i < FreeObjectList<FreeObject>::NumberOfSets(); i++) {
697            sets[i] = new FreeObjectSet<FreeObject>(i);
698        }
699        freeObjectSets_ = Span<FreeObjectSet<FreeObject> *>(sets, FreeObjectList<FreeObject>::NumberOfSets());
700    }
701
702    void DestroyFreeObjectSets()
703    {
704        for (int i = 0; i < FreeObjectList<FreeObject>::NumberOfSets(); i++) {
705            delete freeObjectSets_[i];
706            freeObjectSets_[i] = nullptr;
707        }
708        delete[] freeObjectSets_.data();
709    }
710
711    FreeObjectSet<FreeObject> *GetFreeObjectSet(SetType type)
712    {
713        // Thread safe
714        if (freeObjectSets_[type] == nullptr) {
715            freeObjectSets_[type] = new FreeObjectSet<FreeObject>(type);
716        }
717        return freeObjectSets_[type];
718    }
719
720    template<class Callback>
721    void EnumerateFreeObjectSets(Callback cb)
722    {
723        for (auto set : freeObjectSets_) {
724            cb(set);
725        }
726    }
727
728    template<class Callback>
729    void REnumerateFreeObjectSets(Callback cb)
730    {
731        auto last = freeObjectSets_.crbegin();
732        auto first = freeObjectSets_.crend();
733        for (; last != first; last++) {
734            if (!cb(*last)) {
735                break;
736            }
737        }
738    }
739
740    void IncreaseAliveObjectSafe(size_t size)
741    {
742        ASSERT(aliveObject_ + size <= GetSize());
743        aliveObject_ += size;
744    }
745
746    void IncreaseAliveObject(size_t size)
747    {
748        aliveObject_.fetch_add(size, std::memory_order_relaxed);
749    }
750
751    void SetRegionAliveSize()
752    {
753        gcAliveSize_ = aliveObject_;
754    }
755
756    void ResetAliveObject()
757    {
758        aliveObject_ = 0;
759    }
760
761    size_t AliveObject() const
762    {
763        return aliveObject_.load(std::memory_order_relaxed);
764    }
765
766    size_t GetGCAliveSize() const
767    {
768        return gcAliveSize_;
769    }
770
771    bool MostObjectAlive() const
772    {
773        return aliveObject_ > MOST_OBJECT_ALIVE_THRESHOLD_PERCENT * GetSize();
774    }
775
776    bool BelowCompressThreasholdAlive() const
777    {
778        return gcAliveSize_ < COMPRESS_THREASHOLD_PERCENT * GetSize();
779    }
780
781    void ResetWasted()
782    {
783        wasted_ = 0;
784    }
785
786    void IncreaseWasted(uint64_t size)
787    {
788        wasted_ += size;
789    }
790
791    uint64_t GetWastedSize()
792    {
793        return wasted_;
794    }
795
796    uint64_t GetSnapshotData()
797    {
798        return snapshotData_;
799    }
800
801    void SetSnapshotData(uint64_t value)
802    {
803        snapshotData_ = value;
804    }
805
806    void SwapOldToNewRSetForCS()
807    {
808        sweepingOldToNewRSet_ = packedData_.oldToNewSet_;
809        packedData_.oldToNewSet_ = nullptr;
810    }
811
812    void SwapLocalToShareRSetForCS()
813    {
814        sweepingLocalToShareRSet_ = packedData_.localToShareSet_;
815        packedData_.localToShareSet_ = nullptr;
816    }
817
818    void SetLocalHeap(uintptr_t localHeap)
819    {
820        ASSERT(localHeap != (uintptr_t)nullptr);
821        localHeap_ = localHeap;
822    }
823
824    uintptr_t GetLocalHeap(void)
825    {
826        return localHeap_;
827    }
828
829    // should call in js-thread
830    void MergeOldToNewRSetForCS();
831    void MergeLocalToShareRSetForCS();
832
833    // should call in daemon-thread, or in js-thread in RUNNING state
834    void MergeLocalToShareRSetForCM(RememberedSet *set);
835
836    struct alignas(JSTaggedValue::TaggedTypeSize()) PackedPtr : public base::AlignedPointer {
837        uint8_t spaceFlag_;
838        uint16_t  gcFlags_;
839    };
840
841    struct PackedData : public base::AlignedStruct<JSTaggedValue::TaggedTypeSize(),
842                                                 base::AlignedPointer,
843                                                 base::AlignedPointer,
844                                                 base::AlignedPointer,
845                                                 base::AlignedPointer,
846                                                 base::AlignedPointer,
847                                                 base::AlignedPointer,
848                                                 base::AlignedSize> {
849        enum class Index : size_t {
850            FlagsIndex = 0,
851            TypeFlagIndex,
852            MarkGCBitSetIndex,
853            OldToNewSetIndex,
854            LocalToShareSetIndex,
855            BeginIndex,
856            BitSetSizeIndex,
857            NumOfMembers
858        };
859
860        static_assert(static_cast<size_t>(Index::NumOfMembers) == NumOfTypes);
861
862        inline PackedData(uintptr_t begin, uintptr_t end, RegionSpaceFlag spaceType, RegionTypeFlag typeFlag)
863        {
864            flags_.spaceFlag_ = spaceType;
865            flags_.gcFlags_ = 0;
866            typeFlag_ = typeFlag;
867            bitsetSize_ = (spaceType == RegionSpaceFlag::IN_HUGE_OBJECT_SPACE ||
868                           spaceType == RegionSpaceFlag::IN_HUGE_MACHINE_CODE_SPACE ||
869                           spaceType == RegionSpaceFlag::IN_SHARED_HUGE_OBJECT_SPACE) ?
870                GCBitset::BYTE_PER_WORD : GCBitset::SizeOfGCBitset(end - begin);
871            markGCBitset_ = new (ToVoidPtr(begin)) GCBitset();
872            markGCBitset_->Clear(bitsetSize_);
873            begin_ = AlignUp(begin + bitsetSize_, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
874            // The object region marked with poison until it is allocated if is_asan is true
875#ifdef ARK_ASAN_ON
876            ASAN_POISON_MEMORY_REGION(reinterpret_cast<void *>(begin_), (end - begin_));
877#endif
878        }
879
880        inline PackedData(uintptr_t begin, RegionSpaceFlag spaceType)
881        {
882            flags_.spaceFlag_ = spaceType;
883            flags_.gcFlags_ = 0;
884            typeFlag_ = RegionTypeFlag::DEFAULT;
885            // no markGCBitset
886            begin_ = begin;
887            markGCBitset_ = nullptr;
888        }
889
890        static size_t GetFlagsOffset(bool isArch32)
891        {
892            return GetOffset<static_cast<size_t>(Index::FlagsIndex)>(isArch32);
893        }
894
895        static size_t GetTypeFlagOffset(bool isArch32)
896        {
897            return GetOffset<static_cast<size_t>(Index::TypeFlagIndex)>(isArch32);
898        }
899
900        static size_t GetGCBitsetOffset(bool isArch32)
901        {
902            return GetOffset<static_cast<size_t>(Index::MarkGCBitSetIndex)>(isArch32);
903        }
904
905        static size_t GetNewToEdenSetOffset(bool isArch32)
906        {
907            // NewToEdenRSet is Union with OldToNewRSet
908            return GetOffset<static_cast<size_t>(Index::OldToNewSetIndex)>(isArch32);
909        }
910
911        static size_t GetOldToNewSetOffset(bool isArch32)
912        {
913            return GetOffset<static_cast<size_t>(Index::OldToNewSetIndex)>(isArch32);
914        }
915
916        static size_t GetLocalToShareSetOffset(bool isArch32)
917        {
918            return GetOffset<static_cast<size_t>(Index::LocalToShareSetIndex)>(isArch32);
919        }
920
921        static size_t GetBeginOffset(bool isArch32)
922        {
923            return GetOffset<static_cast<size_t>(Index::BeginIndex)>(isArch32);
924        }
925
926        alignas(EAS) PackedPtr flags_;
927        // Use different UIntPtr from flags_ to prevent the potential data race.
928        // Be careful when storing to this value, currently this is only from JS_Thread during ConcurrentMarking,
929        // or from GC_Thread during GC ClearTask.
930        alignas(EAS) RegionTypeFlag typeFlag_;
931        alignas(EAS) GCBitset *markGCBitset_ {nullptr};
932        // OldToNewRSet only for general OldSpace, NewToEdenRSet only for YoungSpace. Their pointers can union
933        union {
934            alignas(EAS) RememberedSet *oldToNewSet_ {nullptr};
935            alignas(EAS) RememberedSet *newToEdenSet_;
936        };
937        alignas(EAS) RememberedSet *localToShareSet_ {nullptr};
938        alignas(EAS) uintptr_t begin_ {0};
939        alignas(EAS) size_t bitsetSize_ {0};
940    };
941    STATIC_ASSERT_EQ_ARCH(sizeof(PackedData), PackedData::SizeArch32, PackedData::SizeArch64);
942
943    static constexpr double MOST_OBJECT_ALIVE_THRESHOLD_PERCENT = 0.8;
944    static constexpr double AVERAGE_REGION_EVACUATE_SIZE = MOST_OBJECT_ALIVE_THRESHOLD_PERCENT *
945                                                           DEFAULT_REGION_SIZE / 2;  // 2 means half
946private:
947    static constexpr double COMPRESS_THREASHOLD_PERCENT = 0.1;
948
949    RememberedSet *CreateRememberedSet();
950    RememberedSet *GetOrCreateCrossRegionRememberedSet();
951    RememberedSet *GetOrCreateNewToEdenRememberedSet();
952    RememberedSet *GetOrCreateOldToNewRememberedSet();
953    RememberedSet *GetOrCreateLocalToShareRememberedSet();
954
955    inline RememberedSet *CreateNewToEdenRememberedSet();
956    inline RememberedSet *CreateOldToNewRememberedSet();
957    inline RememberedSet *CreateLocalToShareRememberedSet();
958
959    PackedData packedData_;
960    NativeAreaAllocator *nativeAreaAllocator_;
961
962    uintptr_t allocateBase_;
963    uintptr_t end_;
964    uintptr_t highWaterMark_;
965    std::atomic_size_t aliveObject_ {0};
966    size_t gcAliveSize_ {0};
967    Region *next_ {nullptr};
968    Region *prev_ {nullptr};
969
970    RememberedSet *crossRegionSet_ {nullptr};
971    RememberedSet *sweepingOldToNewRSet_ {nullptr};
972    RememberedSet *sweepingLocalToShareRSet_ {nullptr};
973    Span<FreeObjectSet<FreeObject> *> freeObjectSets_;
974    Mutex *lock_ {nullptr};
975    uint64_t wasted_;
976    // snapshotdata_ is used to encode the region for snapshot. Its upper 32 bits are used to store the size of
977    // the huge object, and the lower 32 bits are used to store the region index
978    uint64_t snapshotData_;
979    uintptr_t localHeap_ {0};
980
981    friend class Snapshot;
982    friend class SnapshotProcessor;
983};
984}  // namespace ecmascript
985}  // namespace panda
986#endif  // ECMASCRIPT_MEM_REGION_H
987