1 /*
2  * Copyright (c) 2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "ecmascript/mem/shared_heap/shared_concurrent_marker.h"
17 
18 #include "ecmascript/checkpoint/thread_state_transition.h"
19 #include "ecmascript/mem/shared_heap/shared_gc_marker-inl.h"
20 #include "ecmascript/mem/verification.h"
21 
22 namespace panda::ecmascript {
SharedConcurrentMarker(EnableConcurrentMarkType type)23 SharedConcurrentMarker::SharedConcurrentMarker(EnableConcurrentMarkType type)
24     : sHeap_(SharedHeap::GetInstance()),
25       dThread_(DaemonThread::GetInstance()),
26       sWorkManager_(sHeap_->GetWorkManager()),
27       enableMarkType_(type) {}
28 
EnableConcurrentMarking(EnableConcurrentMarkType type)29 void SharedConcurrentMarker::EnableConcurrentMarking(EnableConcurrentMarkType type)
30 {
31     if (IsConfigDisabled()) {
32         return;
33     }
34     if (IsEnabled() && !dThread_->IsReadyToConcurrentMark() && type == EnableConcurrentMarkType::DISABLE) {
35         enableMarkType_ = EnableConcurrentMarkType::REQUEST_DISABLE;
36     } else {
37         enableMarkType_ = type;
38     }
39 }
40 
Mark(TriggerGCType gcType, GCReason gcReason)41 void SharedConcurrentMarker::Mark(TriggerGCType gcType, GCReason gcReason)
42 {
43     RecursionScope recurScope(this);
44     gcType_ = gcType;
45     gcReason_ = gcReason;
46     sHeap_->WaitSensitiveStatusFinished();
47     {
48         ThreadManagedScope runningScope(dThread_);
49         SuspendAllScope scope(dThread_);
50         TRACE_GC(GCStats::Scope::ScopeId::ConcurrentMark, sHeap_->GetEcmaGCStats());
51         LOG_GC(DEBUG) << "SharedConcurrentMarker: Concurrent Marking Begin";
52         ECMA_BYTRACE_NAME(HITRACE_TAG_ARK, "SharedConcurrentMarker::Mark");
53         CHECK_DAEMON_THREAD();
54         // TODO: support shared runtime state
55         if (UNLIKELY(sHeap_->ShouldVerifyHeap())) {
56             SharedHeapVerification(sHeap_, VerifyKind::VERIFY_PRE_SHARED_GC).VerifyAll();
57         }
58         InitializeMarking();
59     }
60     // Daemon thread do not need to post task to GC_Thread
61     ASSERT(!dThread_->IsInRunningState());
62     DoMarking();
63     HandleMarkingFinished();
64 }
65 
Finish()66 void SharedConcurrentMarker::Finish()
67 {
68     sWorkManager_->Finish();
69 }
70 
ReMark()71 void SharedConcurrentMarker::ReMark()
72 {
73     CHECK_DAEMON_THREAD();
74 #ifndef NDEBUG
75     ASSERT(dThread_->HasLaunchedSuspendAll());
76 #endif
77     TRACE_GC(GCStats::Scope::ScopeId::ReMark, sHeap_->GetEcmaGCStats());
78     LOG_GC(DEBUG) << "SharedConcurrentMarker: Remarking Begin";
79     // TODO: support shared runtime state
80     SharedGCMarker *sharedGCMarker = sHeap_->GetSharedGCMarker();
81     // If enable shared concurrent mark, the recorded weak reference slots from local to share may be changed
82     // during LocalGC. For now just re-scan the local_to_share bit to record and update these weak references.
83     sharedGCMarker->MarkRoots(DAEMON_THREAD_INDEX, SharedMarkType::CONCURRENT_MARK_REMARK);
84     sharedGCMarker->DoMark<SharedMarkType::CONCURRENT_MARK_REMARK>(DAEMON_THREAD_INDEX);
85     sharedGCMarker->MergeBackAndResetRSetWorkListHandler();
86     sHeap_->WaitRunningTaskFinished();
87 }
88 
Reset(bool clearGCBits)89 void SharedConcurrentMarker::Reset(bool clearGCBits)
90 {
91     Finish();
92     dThread_->SetSharedMarkStatus(SharedMarkStatus::READY_TO_CONCURRENT_MARK);
93     isConcurrentMarking_ = false;
94     if (clearGCBits) {
95         // Shared gc clear GC bits in ReclaimRegions after GC
96         auto callback = [](Region *region) {
97             region->ClearMarkGCBitset();
98             region->ResetAliveObject();
99         };
100         sHeap_->EnumerateOldSpaceRegions(callback);
101     }
102 }
103 
ResetWorkManager(SharedGCWorkManager *sWorkManager)104 void SharedConcurrentMarker::ResetWorkManager(SharedGCWorkManager *sWorkManager)
105 {
106     sWorkManager_ = sWorkManager;
107 }
108 
InitializeMarking()109 void SharedConcurrentMarker::InitializeMarking()
110 {
111     CHECK_DAEMON_THREAD();
112     // TODO: support shared runtime state
113     sHeap_->Prepare(true);
114     isConcurrentMarking_ = true;
115     dThread_->SetSharedMarkStatus(SharedMarkStatus::CONCURRENT_MARKING_OR_FINISHED);
116 
117     sHeapObjectSize_ = sHeap_->GetHeapObjectSize();
118     sHeap_->GetAppSpawnSpace()->EnumerateRegions([](Region *current) {
119         current->ClearMarkGCBitset();
120     });
121     sHeap_->EnumerateOldSpaceRegions([](Region *current) {
122         ASSERT(current->InSharedSweepableSpace());
123         current->ResetAliveObject();
124     });
125     sWorkManager_->Initialize(TriggerGCType::SHARED_GC, SharedParallelMarkPhase::SHARED_MARK_TASK);
126     sHeap_->GetSharedGCMarker()->MarkRoots(DAEMON_THREAD_INDEX, SharedMarkType::CONCURRENT_MARK_INITIAL_MARK);
127 }
128 
DoMarking()129 void SharedConcurrentMarker::DoMarking()
130 {
131     ClockScope clockScope;
132     sHeap_->GetSharedGCMarker()->DoMark<SharedMarkType::CONCURRENT_MARK_INITIAL_MARK>(DAEMON_THREAD_INDEX);
133     sHeap_->WaitRunningTaskFinished();
134     FinishMarking(clockScope.TotalSpentTime());
135 }
136 
FinishMarking(float spendTime)137 void SharedConcurrentMarker::FinishMarking(float spendTime)
138 {
139     sHeapObjectSize_ = sHeap_->GetHeapObjectSize();
140     SetDuration(spendTime);
141 }
142 
HandleMarkingFinished()143 void SharedConcurrentMarker::HandleMarkingFinished()
144 {
145     sHeap_->WaitSensitiveStatusFinished();
146     sHeap_->DaemonCollectGarbage(gcType_, gcReason_);
147 }
148 }  // namespace panda::ecmascript
149