1/*
2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 *     http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#ifndef ECMASCRIPT_JS_THREAD_H
17#define ECMASCRIPT_JS_THREAD_H
18
19#include <atomic>
20#include <sstream>
21#include <string>
22#include <cstdint>
23
24#include "ecmascript/platform/ffrt.h"
25#include "ecmascript/base/aligned_struct.h"
26#include "ecmascript/builtin_entries.h"
27#include "ecmascript/daemon/daemon_task.h"
28#include "ecmascript/global_index.h"
29#include "ecmascript/js_object_resizing_strategy.h"
30#include "ecmascript/js_tagged_value.h"
31#include "ecmascript/js_thread_hclass_entries.h"
32#include "ecmascript/js_thread_stub_entries.h"
33#include "ecmascript/log_wrapper.h"
34#include "ecmascript/mem/visitor.h"
35#include "ecmascript/mutator_lock.h"
36
37#if defined(ENABLE_FFRT_INTERFACES)
38#include "ffrt.h"
39#include "c/executor_task.h"
40#endif
41
42namespace panda::ecmascript {
43class EcmaContext;
44class EcmaVM;
45class EcmaHandleScope;
46class GlobalIndex;
47class HeapRegionAllocator;
48class PropertiesCache;
49template<typename T>
50class EcmaGlobalStorage;
51class Node;
52class DebugNode;
53class VmThreadControl;
54class GlobalEnvConstants;
55enum class ElementsKind : uint8_t;
56
57// NOTE: remove
58class MachineCode;
59using JitCodeVector = std::vector<std::tuple<MachineCode*, std::string, uintptr_t>>;
60using JitCodeMapVisitor = std::function<void(std::map<JSTaggedType, JitCodeVector*>&)>;
61
62using WeakClearCallback = void (*)(void *);
63
64enum class MarkStatus : uint8_t {
65    READY_TO_MARK,
66    MARKING,
67    MARK_FINISHED,
68};
69
70enum class GCKind : uint8_t {
71    LOCAL_GC,
72    SHARED_GC
73};
74
75enum class PGOProfilerStatus : uint8_t {
76    PGO_PROFILER_DISABLE,
77    PGO_PROFILER_ENABLE,
78};
79
80enum class BCStubStatus: uint8_t {
81    NORMAL_BC_STUB,
82    PROFILE_BC_STUB,
83    JIT_PROFILE_BC_STUB,
84};
85
86enum class StableArrayChangeKind { PROTO, NOT_PROTO };
87
88enum ThreadType : uint8_t {
89    JS_THREAD,
90    JIT_THREAD,
91    DAEMON_THREAD,
92};
93
94enum ThreadFlag : uint16_t {
95    NO_FLAGS = 0 << 0,
96    SUSPEND_REQUEST = 1 << 0,
97    ACTIVE_BARRIER = 1 << 1,
98};
99
100static constexpr uint32_t THREAD_STATE_OFFSET = 16;
101static constexpr uint32_t THREAD_FLAGS_MASK = (0x1 << THREAD_STATE_OFFSET) - 1;
102enum class ThreadState : uint16_t {
103    CREATED = 0,
104    RUNNING = 1,
105    NATIVE = 2,
106    WAIT = 3,
107    IS_SUSPENDED = 4,
108    TERMINATED = 5,
109};
110
111union ThreadStateAndFlags {
112    explicit ThreadStateAndFlags(uint32_t val = 0): asInt(val) {}
113    struct {
114        volatile uint16_t flags;
115        volatile ThreadState state;
116    } asStruct;
117    struct {
118        uint16_t flags;
119        ThreadState state;
120    } asNonvolatileStruct;
121    volatile uint32_t asInt;
122    uint32_t asNonvolatileInt;
123    std::atomic<uint32_t> asAtomicInt;
124private:
125    NO_COPY_SEMANTIC(ThreadStateAndFlags);
126};
127
128static constexpr uint32_t MAIN_THREAD_INDEX = 0;
129
130class JSThread {
131public:
132    static constexpr int CONCURRENT_MARKING_BITFIELD_NUM = 2;
133    static constexpr int CONCURRENT_MARKING_BITFIELD_MASK = 0x3;
134    static constexpr int SHARED_CONCURRENT_MARKING_BITFIELD_NUM = 1;
135    static constexpr int SHARED_CONCURRENT_MARKING_BITFIELD_MASK = 0x1;
136    static constexpr int CHECK_SAFEPOINT_BITFIELD_NUM = 8;
137    static constexpr int PGO_PROFILER_BITFIELD_START = 16;
138    static constexpr int BOOL_BITFIELD_NUM = 1;
139    static constexpr int BCSTUBSTATUS_BITFIELD_NUM = 2;
140    static constexpr uint32_t RESERVE_STACK_SIZE = 128;
141    using MarkStatusBits = BitField<MarkStatus, 0, CONCURRENT_MARKING_BITFIELD_NUM>;
142    using SharedMarkStatusBits = BitField<SharedMarkStatus, 0, SHARED_CONCURRENT_MARKING_BITFIELD_NUM>;
143    using CheckSafePointBit = BitField<bool, 0, BOOL_BITFIELD_NUM>;
144    using VMNeedSuspensionBit = BitField<bool, CHECK_SAFEPOINT_BITFIELD_NUM, BOOL_BITFIELD_NUM>;
145    using VMHasSuspendedBit = VMNeedSuspensionBit::NextFlag;
146    using InstallMachineCodeBit = VMHasSuspendedBit::NextFlag;
147    using PGOStatusBits = BitField<PGOProfilerStatus, PGO_PROFILER_BITFIELD_START, BOOL_BITFIELD_NUM>;
148    using BCStubStatusBits = PGOStatusBits::NextField<BCStubStatus, BCSTUBSTATUS_BITFIELD_NUM>;
149    using ThreadId = uint32_t;
150
151    enum FrameDroppedState {
152        StateFalse = 0,
153        StateTrue,
154        StatePending
155    };
156
157    explicit JSThread(EcmaVM *vm);
158    // only used in jit thread
159    explicit JSThread(EcmaVM *vm, ThreadType threadType);
160    // only used in daemon thread
161    explicit JSThread(ThreadType threadType);
162
163    PUBLIC_API ~JSThread();
164
165    EcmaVM *GetEcmaVM() const
166    {
167        return vm_;
168    }
169
170    static JSThread *Create(EcmaVM *vm);
171    static JSThread *GetCurrent();
172
173    int GetNestedLevel() const
174    {
175        return nestedLevel_;
176    }
177
178    void SetNestedLevel(int level)
179    {
180        nestedLevel_ = level;
181    }
182
183    void SetLastFp(JSTaggedType *fp)
184    {
185        glueData_.lastFp_ = fp;
186    }
187
188    const JSTaggedType *GetLastFp() const
189    {
190        return glueData_.lastFp_;
191    }
192
193    const JSTaggedType *GetCurrentSPFrame() const
194    {
195        return glueData_.currentFrame_;
196    }
197
198    void SetCurrentSPFrame(JSTaggedType *sp)
199    {
200        glueData_.currentFrame_ = sp;
201    }
202
203    const JSTaggedType *GetLastLeaveFrame() const
204    {
205        return glueData_.leaveFrame_;
206    }
207
208    void SetLastLeaveFrame(JSTaggedType *sp)
209    {
210        glueData_.leaveFrame_ = sp;
211    }
212
213    const JSTaggedType *GetCurrentFrame() const;
214
215    void SetCurrentFrame(JSTaggedType *sp);
216
217    const JSTaggedType *GetCurrentInterpretedFrame() const;
218
219    bool DoStackOverflowCheck(const JSTaggedType *sp);
220
221    bool DoStackLimitCheck();
222
223    NativeAreaAllocator *GetNativeAreaAllocator() const
224    {
225        return nativeAreaAllocator_;
226    }
227
228    HeapRegionAllocator *GetHeapRegionAllocator() const
229    {
230        return heapRegionAllocator_;
231    }
232
233    void ReSetNewSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end)
234    {
235        glueData_.newSpaceAllocationTopAddress_ = top;
236        glueData_.newSpaceAllocationEndAddress_ = end;
237    }
238
239    void ReSetSOldSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end)
240    {
241        glueData_.sOldSpaceAllocationTopAddress_ = top;
242        glueData_.sOldSpaceAllocationEndAddress_ = end;
243    }
244
245    void ReSetSNonMovableSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end)
246    {
247        glueData_.sNonMovableSpaceAllocationTopAddress_ = top;
248        glueData_.sNonMovableSpaceAllocationEndAddress_ = end;
249    }
250
251    uintptr_t GetUnsharedConstpools() const
252    {
253        return glueData_.unsharedConstpools_;
254    }
255
256    void SetUnsharedConstpools(uintptr_t unsharedConstpools)
257    {
258        glueData_.unsharedConstpools_ = unsharedConstpools;
259    }
260
261    void SetIsStartHeapSampling(bool isStart)
262    {
263        glueData_.isStartHeapSampling_ = isStart ? JSTaggedValue::True() : JSTaggedValue::False();
264    }
265
266    void SetIsTracing(bool isTracing)
267    {
268        glueData_.isTracing_ = isTracing;
269    }
270
271    void Iterate(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor,
272        const RootBaseAndDerivedVisitor &derivedVisitor);
273
274    void IterateJitCodeMap(const JitCodeMapVisitor &updater);
275
276    void IterateHandleWithCheck(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor);
277
278    uintptr_t* PUBLIC_API ExpandHandleStorage();
279    void PUBLIC_API ShrinkHandleStorage(int prevIndex);
280    void PUBLIC_API CheckJSTaggedType(JSTaggedType value) const;
281    bool PUBLIC_API CpuProfilerCheckJSTaggedType(JSTaggedType value) const;
282
283    void PUBLIC_API SetException(JSTaggedValue exception);
284
285    JSTaggedValue GetException() const
286    {
287        return glueData_.exception_;
288    }
289
290    bool HasPendingException() const
291    {
292        return !glueData_.exception_.IsHole();
293    }
294
295    void ClearException();
296
297    void SetEnableForceIC(bool isEnableForceIC);
298
299    bool IsEnableForceIC() const;
300
301    void SetGlobalObject(JSTaggedValue globalObject)
302    {
303        glueData_.globalObject_ = globalObject;
304    }
305
306    const GlobalEnv *GetGlobalEnv() const
307    {
308        return glueData_.glueGlobalEnv_;
309    }
310
311    const GlobalEnvConstants *GlobalConstants() const
312    {
313        return glueData_.globalConst_;
314    }
315
316    void SetGlobalConstants(const GlobalEnvConstants *constants)
317    {
318        glueData_.globalConst_ = const_cast<GlobalEnvConstants*>(constants);
319    }
320
321    const BuiltinEntries GetBuiltinEntries() const
322    {
323        return glueData_.builtinEntries_;
324    }
325
326    BuiltinEntries* GetBuiltinEntriesPointer()
327    {
328        return &glueData_.builtinEntries_;
329    }
330
331    const CMap<ElementsKind, std::pair<ConstantIndex, ConstantIndex>> &GetArrayHClassIndexMap() const
332    {
333        return arrayHClassIndexMap_;
334    }
335
336    const CMap<JSHClass *, GlobalIndex> &GetCtorHclassEntries() const
337    {
338        return ctorHclassEntries_;
339    }
340
341    void NotifyStableArrayElementsGuardians(JSHandle<JSObject> receiver, StableArrayChangeKind changeKind);
342
343    bool IsStableArrayElementsGuardiansInvalid() const
344    {
345        return !glueData_.stableArrayElementsGuardians_;
346    }
347
348    void ResetGuardians();
349
350    void SetInitialBuiltinHClass(
351        BuiltinTypeId type, JSHClass *builtinHClass, JSHClass *instanceHClass,
352                            JSHClass *prototypeHClass, JSHClass *prototypeOfPrototypeHClass = nullptr,
353                            JSHClass *extraHClass = nullptr);
354
355    void SetInitialBuiltinGlobalHClass(JSHClass *builtinHClass, GlobalIndex globalIndex);
356
357    JSHClass *GetBuiltinHClass(BuiltinTypeId type) const;
358
359    JSHClass *GetBuiltinInstanceHClass(BuiltinTypeId type) const;
360    JSHClass *GetBuiltinExtraHClass(BuiltinTypeId type) const;
361    JSHClass *GetArrayInstanceHClass(ElementsKind kind, bool isPrototype) const;
362
363    PUBLIC_API JSHClass *GetBuiltinPrototypeHClass(BuiltinTypeId type) const;
364    PUBLIC_API JSHClass *GetBuiltinPrototypeOfPrototypeHClass(BuiltinTypeId type) const;
365
366    static size_t GetBuiltinHClassOffset(BuiltinTypeId, bool isArch32);
367
368    static size_t GetBuiltinPrototypeHClassOffset(BuiltinTypeId, bool isArch32);
369
370    const BuiltinHClassEntries &GetBuiltinHClassEntries() const
371    {
372        return glueData_.builtinHClassEntries_;
373    }
374
375    JSTaggedValue GetCurrentLexenv() const;
376    JSTaggedValue GetCurrentFunction() const;
377
378    void RegisterRTInterface(size_t id, Address addr)
379    {
380        ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS);
381        glueData_.rtStubEntries_.Set(id, addr);
382    }
383
384    Address GetRTInterface(size_t id) const
385    {
386        ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS);
387        return glueData_.rtStubEntries_.Get(id);
388    }
389
390    Address GetFastStubEntry(uint32_t id) const
391    {
392        return glueData_.coStubEntries_.Get(id);
393    }
394
395    void SetFastStubEntry(size_t id, Address entry)
396    {
397        glueData_.coStubEntries_.Set(id, entry);
398    }
399
400    Address GetBuiltinStubEntry(uint32_t id) const
401    {
402        return glueData_.builtinStubEntries_.Get(id);
403    }
404
405    void SetBuiltinStubEntry(size_t id, Address entry)
406    {
407        glueData_.builtinStubEntries_.Set(id, entry);
408    }
409
410    Address GetBCStubEntry(uint32_t id) const
411    {
412        return glueData_.bcStubEntries_.Get(id);
413    }
414
415    void SetBCStubEntry(size_t id, Address entry)
416    {
417        glueData_.bcStubEntries_.Set(id, entry);
418    }
419
420    Address GetBaselineStubEntry(uint32_t id) const
421    {
422        return glueData_.baselineStubEntries_.Get(id);
423    }
424
425    void SetBaselineStubEntry(size_t id, Address entry)
426    {
427        glueData_.baselineStubEntries_.Set(id, entry);
428    }
429
430    void SetBCDebugStubEntry(size_t id, Address entry)
431    {
432        glueData_.bcDebuggerStubEntries_.Set(id, entry);
433    }
434
435    Address *GetBytecodeHandler()
436    {
437        return glueData_.bcStubEntries_.GetAddr();
438    }
439
440    void PUBLIC_API CheckSwitchDebuggerBCStub();
441    void CheckOrSwitchPGOStubs();
442    void SwitchJitProfileStubs(bool isEnablePgo);
443
444    ThreadId GetThreadId() const
445    {
446        return id_.load(std::memory_order_acquire);
447    }
448
449    void PostFork();
450
451    static ThreadId GetCurrentThreadId();
452
453    void IterateWeakEcmaGlobalStorage(const WeakRootVisitor &visitor, GCKind gcKind = GCKind::LOCAL_GC);
454
455    void UpdateJitCodeMapReference(const WeakRootVisitor &visitor);
456
457    PUBLIC_API PropertiesCache *GetPropertiesCache() const;
458
459    MarkStatus GetMarkStatus() const
460    {
461        return MarkStatusBits::Decode(glueData_.gcStateBitField_);
462    }
463
464    void SetMarkStatus(MarkStatus status)
465    {
466        MarkStatusBits::Set(status, &glueData_.gcStateBitField_);
467    }
468
469    bool IsConcurrentMarkingOrFinished() const
470    {
471        return !IsReadyToConcurrentMark();
472    }
473
474    bool IsReadyToConcurrentMark() const
475    {
476        auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_);
477        return status == MarkStatus::READY_TO_MARK;
478    }
479
480    bool IsMarking() const
481    {
482        auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_);
483        return status == MarkStatus::MARKING;
484    }
485
486    bool IsMarkFinished() const
487    {
488        auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_);
489        return status == MarkStatus::MARK_FINISHED;
490    }
491
492    SharedMarkStatus GetSharedMarkStatus() const
493    {
494        return SharedMarkStatusBits::Decode(glueData_.sharedGCStateBitField_);
495    }
496
497    void SetSharedMarkStatus(SharedMarkStatus status)
498    {
499        SharedMarkStatusBits::Set(status, &glueData_.sharedGCStateBitField_);
500    }
501
502    bool IsSharedConcurrentMarkingOrFinished() const
503    {
504        auto status = SharedMarkStatusBits::Decode(glueData_.sharedGCStateBitField_);
505        return status == SharedMarkStatus::CONCURRENT_MARKING_OR_FINISHED;
506    }
507
508    bool IsReadyToSharedConcurrentMark() const
509    {
510        auto status = SharedMarkStatusBits::Decode(glueData_.sharedGCStateBitField_);
511        return status == SharedMarkStatus::READY_TO_CONCURRENT_MARK;
512    }
513
514    void SetPGOProfilerEnable(bool enable)
515    {
516        PGOProfilerStatus status =
517            enable ? PGOProfilerStatus::PGO_PROFILER_ENABLE : PGOProfilerStatus::PGO_PROFILER_DISABLE;
518        SetInterruptValue<PGOStatusBits>(status);
519    }
520
521    bool IsPGOProfilerEnable() const
522    {
523        auto status = PGOStatusBits::Decode(glueData_.interruptVector_);
524        return status == PGOProfilerStatus::PGO_PROFILER_ENABLE;
525    }
526
527    void SetBCStubStatus(BCStubStatus status)
528    {
529        SetInterruptValue<BCStubStatusBits>(status);
530    }
531
532    BCStubStatus GetBCStubStatus() const
533    {
534        return BCStubStatusBits::Decode(glueData_.interruptVector_);
535    }
536
537    bool CheckSafepoint();
538
539    void CheckAndPassActiveBarrier();
540
541    bool PassSuspendBarrier();
542
543    void SetGetStackSignal(bool isParseStack)
544    {
545        getStackSignal_ = isParseStack;
546    }
547
548    bool GetStackSignal() const
549    {
550        return getStackSignal_;
551    }
552
553    void SetNeedProfiling(bool needProfiling)
554    {
555        needProfiling_.store(needProfiling);
556    }
557
558    void SetIsProfiling(bool isProfiling)
559    {
560        isProfiling_ = isProfiling;
561    }
562
563    bool GetIsProfiling() const
564    {
565        return isProfiling_;
566    }
567
568    void SetGcState(bool gcState)
569    {
570        gcState_ = gcState;
571    }
572
573    bool GetGcState() const
574    {
575        return gcState_;
576    }
577
578    void SetRuntimeState(bool runtimeState)
579    {
580        runtimeState_ = runtimeState;
581    }
582
583    bool GetRuntimeState() const
584    {
585        return runtimeState_;
586    }
587
588    bool SetMainThread()
589    {
590        return isMainThread_ = true;
591    }
592
593    bool IsMainThreadFast() const
594    {
595        return isMainThread_;
596    }
597
598    void SetCpuProfileName(std::string &profileName)
599    {
600        profileName_ = profileName;
601    }
602
603    void EnableAsmInterpreter()
604    {
605        isAsmInterpreter_ = true;
606    }
607
608    bool IsAsmInterpreter() const
609    {
610        return isAsmInterpreter_;
611    }
612
613    VmThreadControl *GetVmThreadControl() const
614    {
615        return vmThreadControl_;
616    }
617
618    void SetEnableStackSourceFile(bool value)
619    {
620        enableStackSourceFile_ = value;
621    }
622
623    bool GetEnableStackSourceFile() const
624    {
625        return enableStackSourceFile_;
626    }
627
628    void SetEnableLazyBuiltins(bool value)
629    {
630        enableLazyBuiltins_ = value;
631    }
632
633    bool GetEnableLazyBuiltins() const
634    {
635        return enableLazyBuiltins_;
636    }
637
638    void SetReadyForGCIterating(bool flag)
639    {
640        readyForGCIterating_ = flag;
641    }
642
643    bool ReadyForGCIterating() const
644    {
645        return readyForGCIterating_;
646    }
647
648    static constexpr size_t GetGlueDataOffset()
649    {
650        return MEMBER_OFFSET(JSThread, glueData_);
651    }
652
653    uintptr_t GetGlueAddr() const
654    {
655        return reinterpret_cast<uintptr_t>(this) + GetGlueDataOffset();
656    }
657
658    static JSThread *GlueToJSThread(uintptr_t glue)
659    {
660        // very careful to modify here
661        return reinterpret_cast<JSThread *>(glue - GetGlueDataOffset());
662    }
663
664    void SetCheckSafePointStatus()
665    {
666        ASSERT(static_cast<uint8_t>(glueData_.interruptVector_ & 0xFF) <= 1);
667        SetInterruptValue<CheckSafePointBit>(true);
668    }
669
670    void ResetCheckSafePointStatus()
671    {
672        ASSERT(static_cast<uint8_t>(glueData_.interruptVector_ & 0xFF) <= 1);
673        SetInterruptValue<CheckSafePointBit>(false);
674    }
675
676    void SetVMNeedSuspension(bool flag)
677    {
678        SetInterruptValue<VMNeedSuspensionBit>(flag);
679    }
680
681    bool VMNeedSuspension()
682    {
683        return VMNeedSuspensionBit::Decode(glueData_.interruptVector_);
684    }
685
686    void SetVMSuspended(bool flag)
687    {
688        SetInterruptValue<VMHasSuspendedBit>(flag);
689    }
690
691    bool IsVMSuspended()
692    {
693        return VMHasSuspendedBit::Decode(glueData_.interruptVector_);
694    }
695
696    bool HasTerminationRequest() const
697    {
698        return needTermination_;
699    }
700
701    void SetTerminationRequest(bool flag)
702    {
703        needTermination_ = flag;
704    }
705
706    void SetVMTerminated(bool flag)
707    {
708        hasTerminated_ = flag;
709    }
710
711    bool HasTerminated() const
712    {
713        return hasTerminated_;
714    }
715
716    void TerminateExecution();
717
718    void SetInstallMachineCode(bool flag)
719    {
720        SetInterruptValue<InstallMachineCodeBit>(flag);
721    }
722
723    bool HasInstallMachineCode() const
724    {
725        return InstallMachineCodeBit::Decode(glueData_.interruptVector_);
726    }
727
728    static uintptr_t GetCurrentStackPosition()
729    {
730        return reinterpret_cast<uintptr_t>(__builtin_frame_address(0));
731    }
732
733    bool IsLegalAsmSp(uintptr_t sp) const;
734
735    bool IsLegalThreadSp(uintptr_t sp) const;
736
737    bool IsLegalSp(uintptr_t sp) const;
738
739    void SetCheckAndCallEnterState(bool state)
740    {
741        finalizationCheckState_ = state;
742    }
743
744    bool GetCheckAndCallEnterState() const
745    {
746        return finalizationCheckState_;
747    }
748
749    uint64_t GetStackStart() const
750    {
751        return glueData_.stackStart_;
752    }
753
754    uint64_t GetStackLimit() const
755    {
756        return glueData_.stackLimit_;
757    }
758
759    GlobalEnv *GetGlueGlobalEnv()
760    {
761        return glueData_.glueGlobalEnv_;
762    }
763
764    void SetGlueGlobalEnv(GlobalEnv *global)
765    {
766        ASSERT(global != nullptr);
767        glueData_.glueGlobalEnv_ = global;
768    }
769
770    inline uintptr_t NewGlobalHandle(JSTaggedType value)
771    {
772        return newGlobalHandle_(value);
773    }
774
775    inline void DisposeGlobalHandle(uintptr_t nodeAddr)
776    {
777        disposeGlobalHandle_(nodeAddr);
778    }
779
780    inline uintptr_t SetWeak(uintptr_t nodeAddr, void *ref = nullptr, WeakClearCallback freeGlobalCallBack = nullptr,
781                             WeakClearCallback nativeFinalizeCallBack = nullptr)
782    {
783        return setWeak_(nodeAddr, ref, freeGlobalCallBack, nativeFinalizeCallBack);
784    }
785
786    inline uintptr_t ClearWeak(uintptr_t nodeAddr)
787    {
788        return clearWeak_(nodeAddr);
789    }
790
791    inline bool IsWeak(uintptr_t addr) const
792    {
793        return isWeak_(addr);
794    }
795
796    void EnableCrossThreadExecution()
797    {
798        glueData_.allowCrossThreadExecution_ = true;
799    }
800
801    bool IsCrossThreadExecutionEnable() const
802    {
803        return glueData_.allowCrossThreadExecution_;
804    }
805
806    bool IsFrameDropped()
807    {
808        return glueData_.isFrameDropped_;
809    }
810
811    void SetFrameDroppedState()
812    {
813        glueData_.isFrameDropped_ = true;
814    }
815
816    void ResetFrameDroppedState()
817    {
818        glueData_.isFrameDropped_ = false;
819    }
820
821    bool IsEntryFrameDroppedTrue()
822    {
823        return glueData_.entryFrameDroppedState_ == FrameDroppedState::StateTrue;
824    }
825
826    bool IsEntryFrameDroppedPending()
827    {
828        return glueData_.entryFrameDroppedState_ == FrameDroppedState::StatePending;
829    }
830
831    void SetEntryFrameDroppedState()
832    {
833        glueData_.entryFrameDroppedState_ = FrameDroppedState::StateTrue;
834    }
835
836    void ResetEntryFrameDroppedState()
837    {
838        glueData_.entryFrameDroppedState_ = FrameDroppedState::StateFalse;
839    }
840
841    void PendingEntryFrameDroppedState()
842    {
843        glueData_.entryFrameDroppedState_ = FrameDroppedState::StatePending;
844    }
845
846    bool IsDebugMode()
847    {
848        return glueData_.isDebugMode_;
849    }
850
851    void SetDebugModeState()
852    {
853        glueData_.isDebugMode_ = true;
854    }
855
856    void ResetDebugModeState()
857    {
858        glueData_.isDebugMode_ = false;
859    }
860
861    template<typename T, typename V>
862    void SetInterruptValue(V value)
863    {
864        volatile auto interruptValue =
865            reinterpret_cast<volatile std::atomic<uint64_t> *>(&glueData_.interruptVector_);
866        uint64_t oldValue = interruptValue->load(std::memory_order_relaxed);
867        auto newValue = oldValue;
868        do {
869            newValue = oldValue;
870            T::Set(value, &newValue);
871        } while (!std::atomic_compare_exchange_strong_explicit(interruptValue, &oldValue, newValue,
872                                                               std::memory_order_release,
873                                                               std::memory_order_relaxed));
874    }
875
876    void InvokeWeakNodeFreeGlobalCallBack();
877    void InvokeWeakNodeNativeFinalizeCallback();
878    bool IsStartGlobalLeakCheck() const;
879    bool EnableGlobalObjectLeakCheck() const;
880    bool EnableGlobalPrimitiveLeakCheck() const;
881    void WriteToStackTraceFd(std::ostringstream &buffer) const;
882    void SetStackTraceFd(int32_t fd);
883    void CloseStackTraceFd();
884    uint32_t IncreaseGlobalNumberCount()
885    {
886        return ++globalNumberCount_;
887    }
888
889    void SetPropertiesGrowStep(uint32_t step)
890    {
891        glueData_.propertiesGrowStep_ = step;
892    }
893
894    uint32_t GetPropertiesGrowStep() const
895    {
896        return glueData_.propertiesGrowStep_;
897    }
898
899    void SetRandomStatePtr(uint64_t *ptr)
900    {
901        glueData_.randomStatePtr_ = reinterpret_cast<uintptr_t>(ptr);
902    }
903
904    void SetTaskInfo(uintptr_t taskInfo)
905    {
906        glueData_.taskInfo_ = taskInfo;
907    }
908
909    uintptr_t GetTaskInfo() const
910    {
911        return glueData_.taskInfo_;
912    }
913
914    void SetJitCodeMap(JSTaggedType exception,  MachineCode* machineCode, std::string &methodName, uintptr_t offset);
915
916    std::map<JSTaggedType, JitCodeVector*> &GetJitCodeMaps()
917    {
918        return jitCodeMaps_;
919    }
920
921    struct GlueData : public base::AlignedStruct<JSTaggedValue::TaggedTypeSize(),
922                                                 BCStubEntries,
923                                                 JSTaggedValue,
924                                                 JSTaggedValue,
925                                                 base::AlignedBool,
926                                                 base::AlignedPointer,
927                                                 base::AlignedPointer,
928                                                 base::AlignedPointer,
929                                                 base::AlignedPointer,
930                                                 base::AlignedPointer,
931                                                 base::AlignedPointer,
932                                                 base::AlignedPointer,
933                                                 base::AlignedPointer,
934                                                 base::AlignedPointer,
935                                                 RTStubEntries,
936                                                 COStubEntries,
937                                                 BuiltinStubEntries,
938                                                 BuiltinHClassEntries,
939                                                 BCDebuggerStubEntries,
940                                                 BaselineStubEntries,
941                                                 base::AlignedUint64,
942                                                 base::AlignedUint64,
943                                                 base::AlignedPointer,
944                                                 base::AlignedUint64,
945                                                 base::AlignedUint64,
946                                                 base::AlignedPointer,
947                                                 base::AlignedPointer,
948                                                 base::AlignedUint64,
949                                                 base::AlignedUint64,
950                                                 JSTaggedValue,
951                                                 base::AlignedBool,
952                                                 base::AlignedBool,
953                                                 base::AlignedUint32,
954                                                 JSTaggedValue,
955                                                 base::AlignedPointer,
956                                                 BuiltinEntries,
957                                                 base::AlignedBool,
958                                                 base::AlignedPointer,
959                                                 base::AlignedPointer,
960                                                 base::AlignedPointer,
961                                                 base::AlignedUint32,
962                                                 base::AlignedBool,
963                                                 base::AlignedBool> {
964        enum class Index : size_t {
965            BcStubEntriesIndex = 0,
966            ExceptionIndex,
967            GlobalObjIndex,
968            StableArrayElementsGuardiansIndex,
969            CurrentFrameIndex,
970            LeaveFrameIndex,
971            LastFpIndex,
972            NewSpaceAllocationTopAddressIndex,
973            NewSpaceAllocationEndAddressIndex,
974            SOldSpaceAllocationTopAddressIndex,
975            SOldSpaceAllocationEndAddressIndex,
976            SNonMovableSpaceAllocationTopAddressIndex,
977            SNonMovableSpaceAllocationEndAddressIndex,
978            RTStubEntriesIndex,
979            COStubEntriesIndex,
980            BuiltinsStubEntriesIndex,
981            BuiltinHClassEntriesIndex,
982            BcDebuggerStubEntriesIndex,
983            BaselineStubEntriesIndex,
984            GCStateBitFieldIndex,
985            SharedGCStateBitFieldIndex,
986            FrameBaseIndex,
987            StackStartIndex,
988            StackLimitIndex,
989            GlueGlobalEnvIndex,
990            GlobalConstIndex,
991            AllowCrossThreadExecutionIndex,
992            InterruptVectorIndex,
993            IsStartHeapSamplingIndex,
994            IsDebugModeIndex,
995            IsFrameDroppedIndex,
996            PropertiesGrowStepIndex,
997            EntryFrameDroppedStateIndex,
998            CurrentContextIndex,
999            BuiltinEntriesIndex,
1000            IsTracingIndex,
1001            UnsharedConstpoolsIndex,
1002            RandomStatePtrIndex,
1003            StateAndFlagsIndex,
1004            TaskInfoIndex,
1005            IsEnableElementsKindIndex,
1006            IsEnableForceIC,
1007            NumOfMembers
1008        };
1009        static_assert(static_cast<size_t>(Index::NumOfMembers) == NumOfTypes);
1010
1011        static size_t GetExceptionOffset(bool isArch32)
1012        {
1013            return GetOffset<static_cast<size_t>(Index::ExceptionIndex)>(isArch32);
1014        }
1015
1016        static size_t GetGlobalObjOffset(bool isArch32)
1017        {
1018            return GetOffset<static_cast<size_t>(Index::GlobalObjIndex)>(isArch32);
1019        }
1020
1021        static size_t GetStableArrayElementsGuardiansOffset(bool isArch32)
1022        {
1023            return GetOffset<static_cast<size_t>(Index::StableArrayElementsGuardiansIndex)>(isArch32);
1024        }
1025
1026        static size_t GetGlobalConstOffset(bool isArch32)
1027        {
1028            return GetOffset<static_cast<size_t>(Index::GlobalConstIndex)>(isArch32);
1029        }
1030
1031        static size_t GetGCStateBitFieldOffset(bool isArch32)
1032        {
1033            return GetOffset<static_cast<size_t>(Index::GCStateBitFieldIndex)>(isArch32);
1034        }
1035
1036        static size_t GetSharedGCStateBitFieldOffset(bool isArch32)
1037        {
1038            return GetOffset<static_cast<size_t>(Index::SharedGCStateBitFieldIndex)>(isArch32);
1039        }
1040
1041        static size_t GetCurrentFrameOffset(bool isArch32)
1042        {
1043            return GetOffset<static_cast<size_t>(Index::CurrentFrameIndex)>(isArch32);
1044        }
1045
1046        static size_t GetLeaveFrameOffset(bool isArch32)
1047        {
1048            return GetOffset<static_cast<size_t>(Index::LeaveFrameIndex)>(isArch32);
1049        }
1050
1051        static size_t GetLastFpOffset(bool isArch32)
1052        {
1053            return GetOffset<static_cast<size_t>(Index::LastFpIndex)>(isArch32);
1054        }
1055
1056        static size_t GetNewSpaceAllocationTopAddressOffset(bool isArch32)
1057        {
1058            return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationTopAddressIndex)>(isArch32);
1059        }
1060
1061        static size_t GetNewSpaceAllocationEndAddressOffset(bool isArch32)
1062        {
1063            return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationEndAddressIndex)>(isArch32);
1064        }
1065
1066        static size_t GetSOldSpaceAllocationTopAddressOffset(bool isArch32)
1067        {
1068            return GetOffset<static_cast<size_t>(Index::SOldSpaceAllocationTopAddressIndex)>(isArch32);
1069        }
1070
1071        static size_t GetSOldSpaceAllocationEndAddressOffset(bool isArch32)
1072        {
1073            return GetOffset<static_cast<size_t>(Index::SOldSpaceAllocationEndAddressIndex)>(isArch32);
1074        }
1075
1076        static size_t GetSNonMovableSpaceAllocationTopAddressOffset(bool isArch32)
1077        {
1078            return GetOffset<static_cast<size_t>(Index::SNonMovableSpaceAllocationTopAddressIndex)>(isArch32);
1079        }
1080
1081        static size_t GetSNonMovableSpaceAllocationEndAddressOffset(bool isArch32)
1082        {
1083            return GetOffset<static_cast<size_t>(Index::SNonMovableSpaceAllocationEndAddressIndex)>(isArch32);
1084        }
1085
1086        static size_t GetBCStubEntriesOffset(bool isArch32)
1087        {
1088            return GetOffset<static_cast<size_t>(Index::BcStubEntriesIndex)>(isArch32);
1089        }
1090
1091        static size_t GetRTStubEntriesOffset(bool isArch32)
1092        {
1093            return GetOffset<static_cast<size_t>(Index::RTStubEntriesIndex)>(isArch32);
1094        }
1095
1096        static size_t GetCOStubEntriesOffset(bool isArch32)
1097        {
1098            return GetOffset<static_cast<size_t>(Index::COStubEntriesIndex)>(isArch32);
1099        }
1100
1101        static size_t GetBaselineStubEntriesOffset(bool isArch32)
1102        {
1103            return GetOffset<static_cast<size_t>(Index::BaselineStubEntriesIndex)>(isArch32);
1104        }
1105
1106        static size_t GetBuiltinsStubEntriesOffset(bool isArch32)
1107        {
1108            return GetOffset<static_cast<size_t>(Index::BuiltinsStubEntriesIndex)>(isArch32);
1109        }
1110
1111        static size_t GetBuiltinHClassEntriesOffset(bool isArch32)
1112        {
1113            return GetOffset<static_cast<size_t>(Index::BuiltinHClassEntriesIndex)>(isArch32);
1114        }
1115
1116        static size_t GetBuiltinHClassOffset(BuiltinTypeId type, bool isArch32)
1117        {
1118            return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetBuiltinHClassOffset(type);
1119        }
1120
1121        static size_t GetBuiltinInstanceHClassOffset(BuiltinTypeId type, bool isArch32)
1122        {
1123            return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetInstanceHClassOffset(type);
1124        }
1125
1126        static size_t GetBuiltinPrototypeHClassOffset(BuiltinTypeId type, bool isArch32)
1127        {
1128            return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetPrototypeHClassOffset(type);
1129        }
1130
1131        static size_t GetBuiltinPrototypeOfPrototypeHClassOffset(BuiltinTypeId type, bool isArch32)
1132        {
1133            return GetBuiltinHClassEntriesOffset(isArch32) +
1134                   BuiltinHClassEntries::GetPrototypeOfPrototypeHClassOffset(type);
1135        }
1136
1137        static size_t GetBuiltinExtraHClassOffset(BuiltinTypeId type, bool isArch32)
1138        {
1139            return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetExtraHClassOffset(type);
1140        }
1141
1142        static size_t GetBCDebuggerStubEntriesOffset(bool isArch32)
1143        {
1144            return GetOffset<static_cast<size_t>(Index::BcDebuggerStubEntriesIndex)>(isArch32);
1145        }
1146
1147        static size_t GetFrameBaseOffset(bool isArch32)
1148        {
1149            return GetOffset<static_cast<size_t>(Index::FrameBaseIndex)>(isArch32);
1150        }
1151
1152        static size_t GetStackLimitOffset(bool isArch32)
1153        {
1154            return GetOffset<static_cast<size_t>(Index::StackLimitIndex)>(isArch32);
1155        }
1156
1157        static size_t GetGlueGlobalEnvOffset(bool isArch32)
1158        {
1159            return GetOffset<static_cast<size_t>(Index::GlueGlobalEnvIndex)>(isArch32);
1160        }
1161
1162        static size_t GetAllowCrossThreadExecutionOffset(bool isArch32)
1163        {
1164            return GetOffset<static_cast<size_t>(Index::AllowCrossThreadExecutionIndex)>(isArch32);
1165        }
1166
1167        static size_t GetInterruptVectorOffset(bool isArch32)
1168        {
1169            return GetOffset<static_cast<size_t>(Index::InterruptVectorIndex)>(isArch32);
1170        }
1171
1172        static size_t GetIsStartHeapSamplingOffset(bool isArch32)
1173        {
1174            return GetOffset<static_cast<size_t>(Index::IsStartHeapSamplingIndex)>(isArch32);
1175        }
1176
1177        static size_t GetIsDebugModeOffset(bool isArch32)
1178        {
1179            return GetOffset<static_cast<size_t>(Index::IsDebugModeIndex)>(isArch32);
1180        }
1181
1182        static size_t GetIsFrameDroppedOffset(bool isArch32)
1183        {
1184            return GetOffset<static_cast<size_t>(Index::IsFrameDroppedIndex)>(isArch32);
1185        }
1186
1187        static size_t GetPropertiesGrowStepOffset(bool isArch32)
1188        {
1189            return GetOffset<static_cast<size_t>(Index::PropertiesGrowStepIndex)>(isArch32);
1190        }
1191
1192        static size_t GetEntryFrameDroppedStateOffset(bool isArch32)
1193        {
1194            return GetOffset<static_cast<size_t>(Index::EntryFrameDroppedStateIndex)>(isArch32);
1195        }
1196
1197        static size_t GetCurrentContextOffset(bool isArch32)
1198        {
1199            return GetOffset<static_cast<size_t>(Index::CurrentContextIndex)>(isArch32);
1200        }
1201
1202        static size_t GetBuiltinEntriesOffset(bool isArch32)
1203        {
1204            return GetOffset<static_cast<size_t>(Index::BuiltinEntriesIndex)>(isArch32);
1205        }
1206
1207        static size_t GetIsTracingOffset(bool isArch32)
1208        {
1209            return GetOffset<static_cast<size_t>(Index::IsTracingIndex)>(isArch32);
1210        }
1211
1212        static size_t GetUnSharedConstpoolsOffset(bool isArch32)
1213        {
1214            return GetOffset<static_cast<size_t>(Index::UnsharedConstpoolsIndex)>(isArch32);
1215        }
1216
1217        static size_t GetStateAndFlagsOffset(bool isArch32)
1218        {
1219            return GetOffset<static_cast<size_t>(Index::StateAndFlagsIndex)>(isArch32);
1220        }
1221
1222        static size_t GetRandomStatePtrOffset(bool isArch32)
1223        {
1224            return GetOffset<static_cast<size_t>(Index::RandomStatePtrIndex)>(isArch32);
1225        }
1226
1227        static size_t GetTaskInfoOffset(bool isArch32)
1228        {
1229            return GetOffset<static_cast<size_t>(Index::TaskInfoIndex)>(isArch32);
1230        }
1231
1232        static size_t GetIsEnableElementsKindOffset(bool isArch32)
1233        {
1234            return GetOffset<static_cast<size_t>(Index::IsEnableElementsKindIndex)>(isArch32);
1235        }
1236
1237        static size_t GetIsEnableForceICOffSet(bool isArch32)
1238        {
1239            return GetOffset<static_cast<size_t>(Index::IsEnableForceIC)>(isArch32);
1240        }
1241
1242        alignas(EAS) BCStubEntries bcStubEntries_ {};
1243        alignas(EAS) JSTaggedValue exception_ {JSTaggedValue::Hole()};
1244        alignas(EAS) JSTaggedValue globalObject_ {JSTaggedValue::Hole()};
1245        alignas(EAS) bool stableArrayElementsGuardians_ {true};
1246        alignas(EAS) JSTaggedType *currentFrame_ {nullptr};
1247        alignas(EAS) JSTaggedType *leaveFrame_ {nullptr};
1248        alignas(EAS) JSTaggedType *lastFp_ {nullptr};
1249        alignas(EAS) const uintptr_t *newSpaceAllocationTopAddress_ {nullptr};
1250        alignas(EAS) const uintptr_t *newSpaceAllocationEndAddress_ {nullptr};
1251        alignas(EAS) const uintptr_t *sOldSpaceAllocationTopAddress_ {nullptr};
1252        alignas(EAS) const uintptr_t *sOldSpaceAllocationEndAddress_ {nullptr};
1253        alignas(EAS) const uintptr_t *sNonMovableSpaceAllocationTopAddress_ {nullptr};
1254        alignas(EAS) const uintptr_t *sNonMovableSpaceAllocationEndAddress_ {nullptr};
1255        alignas(EAS) RTStubEntries rtStubEntries_ {};
1256        alignas(EAS) COStubEntries coStubEntries_ {};
1257        alignas(EAS) BuiltinStubEntries builtinStubEntries_ {};
1258        alignas(EAS) BuiltinHClassEntries builtinHClassEntries_ {};
1259        alignas(EAS) BCDebuggerStubEntries bcDebuggerStubEntries_ {};
1260        alignas(EAS) BaselineStubEntries baselineStubEntries_ {};
1261        alignas(EAS) volatile uint64_t gcStateBitField_ {0ULL};
1262        alignas(EAS) volatile uint64_t sharedGCStateBitField_ {0ULL};
1263        alignas(EAS) JSTaggedType *frameBase_ {nullptr};
1264        alignas(EAS) uint64_t stackStart_ {0};
1265        alignas(EAS) uint64_t stackLimit_ {0};
1266        alignas(EAS) GlobalEnv *glueGlobalEnv_ {nullptr};
1267        alignas(EAS) GlobalEnvConstants *globalConst_ {nullptr};
1268        alignas(EAS) bool allowCrossThreadExecution_ {false};
1269        alignas(EAS) volatile uint64_t interruptVector_ {0};
1270        alignas(EAS) JSTaggedValue isStartHeapSampling_ {JSTaggedValue::False()};
1271        alignas(EAS) bool isDebugMode_ {false};
1272        alignas(EAS) bool isFrameDropped_ {false};
1273        alignas(EAS) uint32_t propertiesGrowStep_ {JSObjectResizingStrategy::PROPERTIES_GROW_SIZE};
1274        alignas(EAS) uint64_t entryFrameDroppedState_ {FrameDroppedState::StateFalse};
1275        alignas(EAS) EcmaContext *currentContext_ {nullptr};
1276        alignas(EAS) BuiltinEntries builtinEntries_ {};
1277        alignas(EAS) bool isTracing_ {false};
1278        alignas(EAS) uintptr_t unsharedConstpools_ {0};
1279        alignas(EAS) uintptr_t randomStatePtr_ {0};
1280        alignas(EAS) ThreadStateAndFlags stateAndFlags_ {};
1281        alignas(EAS) uintptr_t taskInfo_ {0};
1282        alignas(EAS) bool isEnableElementsKind_ {false};
1283        alignas(EAS) bool isEnableForceIC_ {true};
1284    };
1285    STATIC_ASSERT_EQ_ARCH(sizeof(GlueData), GlueData::SizeArch32, GlueData::SizeArch64);
1286
1287    void PushContext(EcmaContext *context);
1288    void PopContext();
1289
1290    EcmaContext *GetCurrentEcmaContext() const
1291    {
1292        return glueData_.currentContext_;
1293    }
1294
1295    JSTaggedValue GetSingleCharTable() const
1296    {
1297        ASSERT(glueData_.globalConst_->GetSingleCharTable() != JSTaggedValue::Hole());
1298        return glueData_.globalConst_->GetSingleCharTable();
1299    }
1300
1301    void SwitchCurrentContext(EcmaContext *currentContext, bool isInIterate = false);
1302
1303    CVector<EcmaContext *> GetEcmaContexts()
1304    {
1305        return contexts_;
1306    }
1307
1308    bool IsPropertyCacheCleared() const;
1309
1310    bool EraseContext(EcmaContext *context);
1311    void ClearContextCachedConstantPool();
1312
1313    const GlobalEnvConstants *GetFirstGlobalConst() const;
1314    bool IsAllContextsInitialized() const;
1315    bool IsReadyToUpdateDetector() const;
1316    Area *GetOrCreateRegExpCache();
1317
1318    void InitializeBuiltinObject(const std::string& key);
1319    void InitializeBuiltinObject();
1320
1321    void SetFullMarkRequest()
1322    {
1323        fullMarkRequest_ = true;
1324    }
1325
1326    inline bool IsThreadSafe() const
1327    {
1328        return IsMainThread() || HasSuspendRequest();
1329    }
1330
1331    bool IsSuspended() const
1332    {
1333        bool f = ReadFlag(ThreadFlag::SUSPEND_REQUEST);
1334        bool s = (GetState() != ThreadState::RUNNING);
1335        return f && s;
1336    }
1337
1338    inline bool HasSuspendRequest() const
1339    {
1340        return ReadFlag(ThreadFlag::SUSPEND_REQUEST);
1341    }
1342
1343    void CheckSafepointIfSuspended()
1344    {
1345        if (HasSuspendRequest()) {
1346            WaitSuspension();
1347        }
1348    }
1349
1350    bool IsInSuspendedState() const
1351    {
1352        return GetState() == ThreadState::IS_SUSPENDED;
1353    }
1354
1355    bool IsInRunningState() const
1356    {
1357        return GetState() == ThreadState::RUNNING;
1358    }
1359
1360    bool IsInRunningStateOrProfiling() const;
1361
1362    ThreadState GetState() const
1363    {
1364        uint32_t stateAndFlags = glueData_.stateAndFlags_.asAtomicInt.load(std::memory_order_acquire);
1365        return static_cast<enum ThreadState>(stateAndFlags >> THREAD_STATE_OFFSET);
1366    }
1367    void PUBLIC_API UpdateState(ThreadState newState);
1368    void SuspendThread(bool internalSuspend, SuspendBarrier* barrier = nullptr);
1369    void ResumeThread(bool internalSuspend);
1370    void WaitSuspension();
1371    static bool IsMainThread();
1372    PUBLIC_API void ManagedCodeBegin();
1373    PUBLIC_API void ManagedCodeEnd();
1374#ifndef NDEBUG
1375    bool IsInManagedState() const;
1376    MutatorLock::MutatorLockState GetMutatorLockState() const;
1377    void SetMutatorLockState(MutatorLock::MutatorLockState newState);
1378#endif
1379    void SetWeakFinalizeTaskCallback(const WeakFinalizeTaskCallback &callback)
1380    {
1381        finalizeTaskCallback_ = callback;
1382    }
1383
1384    uint64_t GetJobId()
1385    {
1386        if (jobId_ == UINT64_MAX) {
1387            jobId_ = 0;
1388        }
1389        return ++jobId_;
1390    }
1391
1392    void SetAsyncCleanTaskCallback(const NativePointerTaskCallback &callback)
1393    {
1394        asyncCleanTaskCb_ = callback;
1395    }
1396
1397    NativePointerTaskCallback GetAsyncCleanTaskCallback() const
1398    {
1399        return asyncCleanTaskCb_;
1400    }
1401
1402    static void RegisterThread(JSThread *jsThread);
1403
1404    static void UnregisterThread(JSThread *jsThread);
1405
1406    bool IsJSThread() const
1407    {
1408        return threadType_ == ThreadType::JS_THREAD;
1409    }
1410
1411    bool IsJitThread() const
1412    {
1413        return threadType_ == ThreadType::JIT_THREAD;
1414    }
1415
1416    bool IsDaemonThread() const
1417    {
1418        return threadType_ == ThreadType::DAEMON_THREAD;
1419    }
1420
1421    // Daemon_Thread and JS_Thread have some difference in transition, for example, when transition to running,
1422    // JS_Thread may take some local_gc actions, but Daemon_Thread do not need.
1423    void TransferDaemonThreadToRunning();
1424
1425    RecursiveMutex *GetJitLock()
1426    {
1427        return &jitMutex_;
1428    }
1429
1430    RecursiveMutex &GetProfileTypeAccessorLock()
1431    {
1432        return profileTypeAccessorLockMutex_;
1433    }
1434
1435    void SetMachineCodeLowMemory(bool isLow)
1436    {
1437        machineCodeLowMemory_ = isLow;
1438    }
1439
1440    bool IsMachineCodeLowMemory()
1441    {
1442        return machineCodeLowMemory_;
1443    }
1444
1445    void *GetEnv() const
1446    {
1447        return env_;
1448    }
1449
1450    void SetEnv(void *env)
1451    {
1452        env_ = env;
1453    }
1454
1455    void SetIsInConcurrentScope(bool flag)
1456    {
1457        isInConcurrentScope_ = flag;
1458    }
1459
1460    bool IsInConcurrentScope()
1461    {
1462        return isInConcurrentScope_;
1463    }
1464
1465    void EnableEdenGCBarriers()
1466    {
1467        auto setValueStub = GetFastStubEntry(kungfu::CommonStubCSigns::SetValueWithEdenBarrier);
1468        SetFastStubEntry(kungfu::CommonStubCSigns::SetValueWithBarrier, setValueStub);
1469        auto markStub = GetRTInterface(kungfu::RuntimeStubCSigns::ID_MarkingBarrierWithEden);
1470        RegisterRTInterface(kungfu::RuntimeStubCSigns::ID_MarkingBarrier, markStub);
1471        auto setNotShareValueStub = GetFastStubEntry(kungfu::CommonStubCSigns::SetNonSValueWithEdenBarrier);
1472        SetFastStubEntry(kungfu::CommonStubCSigns::SetNonSValueWithBarrier, setNotShareValueStub);
1473        auto asmCheckStub = GetRTInterface(kungfu::RuntimeStubCSigns::ID_ASMWriteBarrierWithEden);
1474        RegisterRTInterface(kungfu::RuntimeStubCSigns::ID_ASMFastWriteBarrier, asmCheckStub);
1475    }
1476
1477
1478
1479#ifndef NDEBUG
1480    inline void LaunchSuspendAll()
1481    {
1482        launchedSuspendAll_ = true;
1483    }
1484
1485    inline bool HasLaunchedSuspendAll() const
1486    {
1487        return launchedSuspendAll_;
1488    }
1489
1490    inline void CompleteSuspendAll()
1491    {
1492        launchedSuspendAll_ = false;
1493    }
1494#endif
1495
1496protected:
1497    void SetThreadId()
1498    {
1499        id_.store(JSThread::GetCurrentThreadId(), std::memory_order_release);
1500    }
1501
1502    // When call EcmaVM::PreFork(), the std::thread for Daemon_Thread is finished, but the Daemon_Thread instance
1503    // is still alive, and need to reset ThreadId to 0.
1504    void ResetThreadId()
1505    {
1506        id_.store(0, std::memory_order_release);
1507    }
1508private:
1509    NO_COPY_SEMANTIC(JSThread);
1510    NO_MOVE_SEMANTIC(JSThread);
1511    void SetGlobalConst(GlobalEnvConstants *globalConst)
1512    {
1513        glueData_.globalConst_ = globalConst;
1514    }
1515    void SetCurrentEcmaContext(EcmaContext *context)
1516    {
1517        glueData_.currentContext_ = context;
1518    }
1519
1520    void SetArrayHClassIndexMap(const CMap<ElementsKind, std::pair<ConstantIndex, ConstantIndex>> &map)
1521    {
1522        arrayHClassIndexMap_ = map;
1523    }
1524
1525    void TransferFromRunningToSuspended(ThreadState newState);
1526
1527    void TransferToRunning();
1528
1529    inline void StoreState(ThreadState newState);
1530
1531    void StoreRunningState(ThreadState newState);
1532
1533    void StoreSuspendedState(ThreadState newState);
1534
1535    bool ReadFlag(ThreadFlag flag) const
1536    {
1537        uint32_t stateAndFlags = glueData_.stateAndFlags_.asAtomicInt.load(std::memory_order_acquire);
1538        uint16_t flags = (stateAndFlags & THREAD_FLAGS_MASK);
1539        return (flags & static_cast<uint16_t>(flag)) != 0;
1540    }
1541
1542    void SetFlag(ThreadFlag flag)
1543    {
1544        glueData_.stateAndFlags_.asAtomicInt.fetch_or(flag, std::memory_order_seq_cst);
1545    }
1546
1547    void ClearFlag(ThreadFlag flag)
1548    {
1549        glueData_.stateAndFlags_.asAtomicInt.fetch_and(UINT32_MAX ^ flag, std::memory_order_seq_cst);
1550    }
1551
1552    void DumpStack() DUMP_API_ATTR;
1553
1554    static size_t GetAsmStackLimit();
1555
1556    static constexpr size_t DEFAULT_MAX_SYSTEM_STACK_SIZE = 8_MB;
1557
1558    GlueData glueData_;
1559    std::atomic<ThreadId> id_ {0};
1560    EcmaVM *vm_ {nullptr};
1561    void *env_ {nullptr};
1562    Area *regExpCache_ {nullptr};
1563
1564    // MM: handles, global-handles, and aot-stubs.
1565    int nestedLevel_ = 0;
1566    NativeAreaAllocator *nativeAreaAllocator_ {nullptr};
1567    HeapRegionAllocator *heapRegionAllocator_ {nullptr};
1568    bool runningNativeFinalizeCallbacks_ {false};
1569    std::vector<std::pair<WeakClearCallback, void *>> weakNodeFreeGlobalCallbacks_ {};
1570    std::vector<std::pair<WeakClearCallback, void *>> weakNodeNativeFinalizeCallbacks_ {};
1571
1572    EcmaGlobalStorage<Node> *globalStorage_ {nullptr};
1573    EcmaGlobalStorage<DebugNode> *globalDebugStorage_ {nullptr};
1574    int32_t stackTraceFd_ {-1};
1575
1576    std::function<uintptr_t(JSTaggedType value)> newGlobalHandle_;
1577    std::function<void(uintptr_t nodeAddr)> disposeGlobalHandle_;
1578    std::function<uintptr_t(uintptr_t nodeAddr, void *ref, WeakClearCallback freeGlobalCallBack_,
1579         WeakClearCallback nativeFinalizeCallBack)> setWeak_;
1580    std::function<uintptr_t(uintptr_t nodeAddr)> clearWeak_;
1581    std::function<bool(uintptr_t addr)> isWeak_;
1582    NativePointerTaskCallback asyncCleanTaskCb_ {nullptr};
1583    WeakFinalizeTaskCallback finalizeTaskCallback_ {nullptr};
1584    uint32_t globalNumberCount_ {0};
1585
1586    // Run-time state
1587    bool getStackSignal_ {false};
1588    bool runtimeState_ {false};
1589    bool isAsmInterpreter_ {false};
1590    VmThreadControl *vmThreadControl_ {nullptr};
1591    bool enableStackSourceFile_ {true};
1592    bool enableLazyBuiltins_ {false};
1593    bool readyForGCIterating_ {false};
1594    // CpuProfiler
1595    bool isProfiling_ {false};
1596    bool gcState_ {false};
1597    std::atomic_bool needProfiling_ {false};
1598    std::string profileName_ {""};
1599
1600    bool finalizationCheckState_ {false};
1601    // Shared heap
1602    bool isMainThread_ {false};
1603    bool fullMarkRequest_ {false};
1604
1605    // { ElementsKind, (hclass, hclassWithProto) }
1606    CMap<ElementsKind, std::pair<ConstantIndex, ConstantIndex>> arrayHClassIndexMap_;
1607    CMap<JSHClass *, GlobalIndex> ctorHclassEntries_;
1608
1609    CVector<EcmaContext *> contexts_;
1610    EcmaContext *currentContext_ {nullptr};
1611
1612    Mutex suspendLock_;
1613    int32_t suspendCount_ {0};
1614    ConditionVariable suspendCondVar_;
1615    SuspendBarrier *suspendBarrier_ {nullptr};
1616
1617    uint64_t jobId_ {0};
1618
1619    ThreadType threadType_ {ThreadType::JS_THREAD};
1620    RecursiveMutex jitMutex_;
1621    bool machineCodeLowMemory_ {false};
1622    RecursiveMutex profileTypeAccessorLockMutex_;
1623
1624#ifndef NDEBUG
1625    MutatorLock::MutatorLockState mutatorLockState_ = MutatorLock::MutatorLockState::UNLOCKED;
1626    std::atomic<bool> launchedSuspendAll_ {false};
1627#endif
1628    // Collect a map from JsError to MachineCode objects, JsError objects with stack frame generated by jit in the map.
1629    // It will be used to keep MachineCode objects alive (for dump) before JsError object be free.
1630    std::map<JSTaggedType, JitCodeVector*> jitCodeMaps_;
1631
1632    std::atomic<bool> needTermination_ {false};
1633    std::atomic<bool> hasTerminated_ {false};
1634
1635    bool isInConcurrentScope_ {false};
1636
1637    friend class GlobalHandleCollection;
1638    friend class EcmaVM;
1639    friend class EcmaContext;
1640    friend class JitVM;
1641};
1642}  // namespace panda::ecmascript
1643#endif  // ECMASCRIPT_JS_THREAD_H
1644