1 /**
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef PANDA_RUNTIME_METHOD_H_
16 #define PANDA_RUNTIME_METHOD_H_
17 
18 #include <atomic>
19 #include <cstdint>
20 #include <functional>
21 #include <string_view>
22 
23 #include "intrinsics_enum.h"
24 #include "libpandabase/utils/arch.h"
25 #include "libpandabase/utils/logger.h"
26 #include "libpandafile/code_data_accessor-inl.h"
27 #include "libpandafile/file.h"
28 #include "libpandafile/file_items.h"
29 #include "libpandafile/method_data_accessor.h"
30 #include "libpandafile/modifiers.h"
31 #include "runtime/bridge/bridge.h"
32 #include "runtime/include/compiler_interface.h"
33 #include "runtime/include/class_helper.h"
34 #include "runtime/include/mem/panda_containers.h"
35 #include "runtime/include/mem/panda_smart_pointers.h"
36 #include "runtime/interpreter/frame.h"
37 #include "value.h"
38 
39 namespace ark {
40 
41 class Class;
42 class ManagedThread;
43 class ProfilingData;
44 
45 #ifdef PANDA_ENABLE_GLOBAL_REGISTER_VARIABLES
46 namespace interpreter {
47 class AccVRegisterT;
48 }  // namespace interpreter
49 using interpreter::AccVRegisterT;
50 #else
51 namespace interpreter {
52 using AccVRegisterT = AccVRegister;
53 }  // namespace interpreter
54 #endif
55 
56 class FrameDeleter {
57 public:
FrameDeleter(ManagedThread *thread)58     explicit FrameDeleter(ManagedThread *thread) : thread_(thread) {}
59 
60     void operator()(Frame *frame) const;
61 
62 private:
63     ManagedThread *thread_;
64 };
65 
66 class Method {
67 public:
68     using UniqId = uint64_t;
69 
70     enum CompilationStage {
71         NOT_COMPILED,
72         WAITING,
73         COMPILATION,
74         COMPILED,
75         FAILED,
76     };
77 
78     enum class VerificationStage { NOT_VERIFIED = 0, VERIFIED_FAIL = 1, VERIFIED_OK = 2, LAST = VERIFIED_OK };
79 
80     static_assert(MinimumBitsToStore(VerificationStage::LAST) <= VERIFICATION_STATUS_WIDTH);
81 
82     using AnnotationField = panda_file::MethodDataAccessor::AnnotationField;
83 
84     class Proto {
85     public:
86         using ShortyVector = PandaSmallVector<panda_file::Type>;
87         using RefTypeVector = PandaSmallVector<std::string_view>;
88         Proto() = default;
89 
90         Proto(const panda_file::File &pf, panda_file::File::EntityId protoId);
91 
Proto(ShortyVector shorty, RefTypeVector refTypes)92         Proto(ShortyVector shorty, RefTypeVector refTypes) : shorty_(std::move(shorty)), refTypes_(std::move(refTypes))
93         {
94         }
95 
operator ==(const Proto &other) const96         bool operator==(const Proto &other) const
97         {
98             return shorty_ == other.shorty_ && refTypes_ == other.refTypes_;
99         }
100 
GetReturnType() const101         panda_file::Type GetReturnType() const
102         {
103             return shorty_[0];
104         }
105 
106         PANDA_PUBLIC_API std::string_view GetReturnTypeDescriptor() const;
107         PandaString GetSignature(bool includeReturnType = true);
108 
GetShorty()109         ShortyVector &GetShorty()
110         {
111             return shorty_;
112         }
113 
GetShorty() const114         const ShortyVector &GetShorty() const
115         {
116             return shorty_;
117         }
118 
GetRefTypes()119         RefTypeVector &GetRefTypes()
120         {
121             return refTypes_;
122         }
123 
GetRefTypes() const124         const RefTypeVector &GetRefTypes() const
125         {
126             return refTypes_;
127         }
128 
129         ~Proto() = default;
130 
131         DEFAULT_COPY_SEMANTIC(Proto);
132         DEFAULT_MOVE_SEMANTIC(Proto);
133 
134     private:
135         ShortyVector shorty_;
136         RefTypeVector refTypes_;
137     };
138 
139     class PANDA_PUBLIC_API ProtoId {
140     public:
ProtoId(const panda_file::File &pf, panda_file::File::EntityId protoId)141         ProtoId(const panda_file::File &pf, panda_file::File::EntityId protoId) : pf_(pf), protoId_(protoId) {}
142         bool operator==(const ProtoId &other) const;
143         bool operator==(const Proto &other) const;
operator !=(const ProtoId &other) const144         bool operator!=(const ProtoId &other) const
145         {
146             return !operator==(other);
147         }
operator !=(const Proto &other) const148         bool operator!=(const Proto &other) const
149         {
150             return !operator==(other);
151         }
152 
GetPandaFile() const153         const panda_file::File &GetPandaFile() const
154         {
155             return pf_;
156         }
157 
GetEntityId() const158         const panda_file::File::EntityId &GetEntityId() const
159         {
160             return protoId_;
161         }
162 
163         ~ProtoId() = default;
164 
165         DEFAULT_COPY_CTOR(ProtoId);
166         NO_COPY_OPERATOR(ProtoId);
167         NO_MOVE_SEMANTIC(ProtoId);
168 
169     private:
170         const panda_file::File &pf_;
171         panda_file::File::EntityId protoId_;
172     };
173 
174     PANDA_PUBLIC_API Method(Class *klass, const panda_file::File *pf, panda_file::File::EntityId fileId,
175                             panda_file::File::EntityId codeId, uint32_t accessFlags, uint32_t numArgs,
176                             const uint16_t *shorty);
177 
Method(const Method *method)178     explicit Method(const Method *method)
179         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
180         // should become visible
181         : accessFlags_(method->accessFlags_.load(std::memory_order_acquire)),
182           numArgs_(method->numArgs_),
183           stor16Pair_(method->stor16Pair_),
184           classWord_(method->classWord_),
185           pandaFile_(method->pandaFile_),
186           fileId_(method->fileId_),
187           codeId_(method->codeId_),
188           shorty_(method->shorty_)
189     {
190         // NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
191         pointer_.nativePointer.store(
192             // Atomic with relaxed order reason: data race with native_pointer_ with no synchronization or ordering
193             // constraints imposed on other reads or writes NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
194             method->pointer_.nativePointer.load(std::memory_order_relaxed), std::memory_order_relaxed);
195 
196         // Atomic with release order reason: data race with compiled_entry_point_ with dependecies on writes before the
197         // store which should become visible acquire
198         compiledEntryPoint_.store(method->IsNative() ? method->GetCompiledEntryPoint()
199                                                      : GetCompiledCodeToInterpreterBridge(method),
200                                   std::memory_order_release);
201         SetCompilationStatus(CompilationStage::NOT_COMPILED);
202     }
203 
204     Method() = delete;
205     Method(const Method &) = delete;
206     Method(Method &&) = delete;
207     Method &operator=(const Method &) = delete;
208     Method &operator=(Method &&) = delete;
209     ~Method() = default;
210 
GetNumArgs() const211     uint32_t GetNumArgs() const
212     {
213         return numArgs_;
214     }
215 
GetNumVregs() const216     uint32_t GetNumVregs() const
217     {
218         if (!codeId_.IsValid()) {
219             return 0;
220         }
221         return panda_file::CodeDataAccessor::GetNumVregs(*(pandaFile_), codeId_);
222     }
223 
GetCodeSize() const224     uint32_t GetCodeSize() const
225     {
226         if (!codeId_.IsValid()) {
227             return 0;
228         }
229         panda_file::CodeDataAccessor cda(*(pandaFile_), codeId_);
230         return cda.GetCodeSize();
231     }
232 
GetInstructions() const233     const uint8_t *GetInstructions() const
234     {
235         if (!codeId_.IsValid()) {
236             return nullptr;
237         }
238         return panda_file::CodeDataAccessor::GetInstructions(*pandaFile_, codeId_);
239     }
240 
241     /*
242      * Invoke the method as a static method.
243      * Number of arguments and their types must match the method's signature
244      */
245     PANDA_PUBLIC_API Value Invoke(ManagedThread *thread, Value *args, bool proxyCall = false);
246 
InvokeVoid(ManagedThread *thread, Value *args)247     void InvokeVoid(ManagedThread *thread, Value *args)
248     {
249         Invoke(thread, args);
250     }
251 
252     /*
253      * Invoke the method as a dynamic function.
254      * Number of arguments may vary, all arguments must be of type coretypes::TaggedValue.
255      * args - array of arguments. The first value must be the callee function object
256      * num_args - length of args array
257      * data - ark::ExtFrame language-related extension data
258      */
259     coretypes::TaggedValue InvokeDyn(ManagedThread *thread, uint32_t numArgs, coretypes::TaggedValue *args);
260 
261     template <class InvokeHelper>
262     coretypes::TaggedValue InvokeDyn(ManagedThread *thread, uint32_t numArgs, coretypes::TaggedValue *args);
263 
264     template <class InvokeHelper>
265     void InvokeEntry(ManagedThread *thread, Frame *currentFrame, Frame *frame, const uint8_t *pc);
266 
267     /*
268      * Enter execution context (ECMAScript generators)
269      * pc - pc of context
270      * acc - accumulator of context
271      * nregs - number of registers in context
272      * regs - registers of context
273      * data - ark::ExtFrame language-related extension data
274      */
275     coretypes::TaggedValue InvokeContext(ManagedThread *thread, const uint8_t *pc, coretypes::TaggedValue *acc,
276                                          uint32_t nregs, coretypes::TaggedValue *regs);
277 
278     template <class InvokeHelper>
279     coretypes::TaggedValue InvokeContext(ManagedThread *thread, const uint8_t *pc, coretypes::TaggedValue *acc,
280                                          uint32_t nregs, coretypes::TaggedValue *regs);
281 
282     /*
283      * Create new frame for native method, but don't start execution
284      * Number of arguments may vary, all arguments must be of type coretypes::TaggedValue.
285      * args - array of arguments. The first value must be the callee function object
286      * num_vregs - number of registers in frame
287      * num_args - length of args array
288      * data - ark::ExtFrame language-related extension data
289      */
290     template <class InvokeHelper, class ValueT>
291     Frame *EnterNativeMethodFrame(ManagedThread *thread, uint32_t numVregs, uint32_t numArgs, ValueT *args);
292 
293     /*
294      * Pop native method frame
295      */
296     static void ExitNativeMethodFrame(ManagedThread *thread);
297 
GetClass() const298     Class *GetClass() const
299     {
300         return reinterpret_cast<Class *>(classWord_);
301     }
302 
SetClass(Class *cls)303     void SetClass(Class *cls)
304     {
305         classWord_ = static_cast<ClassHelper::ClassWordSize>(ToObjPtrType(cls));
306     }
307 
SetPandaFile(const panda_file::File *file)308     void SetPandaFile(const panda_file::File *file)
309     {
310         pandaFile_ = file;
311     }
312 
GetPandaFile() const313     const panda_file::File *GetPandaFile() const
314     {
315         return pandaFile_;
316     }
317 
GetFileId() const318     panda_file::File::EntityId GetFileId() const
319     {
320         return fileId_;
321     }
322 
GetCodeId() const323     panda_file::File::EntityId GetCodeId() const
324     {
325         return codeId_;
326     }
327 
GetHotnessCounter() const328     inline int16_t GetHotnessCounter() const
329     {
330         return stor16Pair_.hotnessCounter;
331     }
332 
DecrementHotnessCounter()333     inline NO_THREAD_SANITIZE void DecrementHotnessCounter()
334     {
335         --stor16Pair_.hotnessCounter;
336     }
337 
338     static NO_THREAD_SANITIZE int16_t GetInitialHotnessCounter();
339 
340     NO_THREAD_SANITIZE void ResetHotnessCounter();
341 
342     template <class AccVRegisterPtrT>
343     NO_THREAD_SANITIZE void SetAcc([[maybe_unused]] AccVRegisterPtrT acc);
344     template <class AccVRegisterPtrT>
345     NO_THREAD_SANITIZE void SetAcc([[maybe_unused]] ManagedThread *thread, [[maybe_unused]] AccVRegisterPtrT acc);
346 
347     // NO_THREAD_SANITIZE because of perfomance degradation (see commit 7c913cb1 and MR 997#note_113500)
348     template <bool IS_CALL, class AccVRegisterPtrT>
349     NO_THREAD_SANITIZE bool DecrementHotnessCounter(uintptr_t bytecodeOffset, [[maybe_unused]] AccVRegisterPtrT cc,
350                                                     bool osr = false,
351                                                     coretypes::TaggedValue func = coretypes::TaggedValue::Hole());
352 
353     template <bool IS_CALL, class AccVRegisterPtrT>
354     NO_THREAD_SANITIZE bool DecrementHotnessCounter(ManagedThread *thread, uintptr_t bytecodeOffset,
355                                                     [[maybe_unused]] AccVRegisterPtrT cc, bool osr = false,
356                                                     coretypes::TaggedValue func = coretypes::TaggedValue::Hole());
357 
358     // NOTE(xucheng): change the input type to uint16_t when we don't input the max num of int32_t
SetHotnessCounter(uint32_t counter)359     inline NO_THREAD_SANITIZE void SetHotnessCounter(uint32_t counter)
360     {
361         stor16Pair_.hotnessCounter = static_cast<uint16_t>(counter);
362     }
363 
364     PANDA_PUBLIC_API int64_t GetBranchTakenCounter(uint32_t pc);
365     PANDA_PUBLIC_API int64_t GetBranchNotTakenCounter(uint32_t pc);
366 
367     int64_t GetThrowTakenCounter(uint32_t pc);
368 
GetCompiledEntryPoint()369     const void *GetCompiledEntryPoint()
370     {
371         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
372         // should become visible
373         return compiledEntryPoint_.load(std::memory_order_acquire);
374     }
375 
GetCompiledEntryPoint() const376     const void *GetCompiledEntryPoint() const
377     {
378         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
379         // should become visible
380         return compiledEntryPoint_.load(std::memory_order_acquire);
381     }
382 
SetCompiledEntryPoint(const void *entryPoint)383     void SetCompiledEntryPoint(const void *entryPoint)
384     {
385         // Atomic with release order reason: data race with compiled_entry_point_ with dependecies on writes before the
386         // store which should become visible acquire
387         compiledEntryPoint_.store(entryPoint, std::memory_order_release);
388     }
389 
SetInterpreterEntryPoint()390     void SetInterpreterEntryPoint()
391     {
392         if (!IsNative()) {
393             SetCompiledEntryPoint(GetCompiledCodeToInterpreterBridge(this));
394         }
395     }
396 
HasCompiledCode() const397     bool HasCompiledCode() const
398     {
399         auto entryPoint = GetCompiledEntryPoint();
400         return entryPoint != GetCompiledCodeToInterpreterBridge() &&
401                entryPoint != GetCompiledCodeToInterpreterBridgeDyn();
402     }
403 
GetCompilationStatus() const404     inline CompilationStage GetCompilationStatus() const
405     {
406         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
407         // should become visible
408         return static_cast<CompilationStage>((accessFlags_.load(std::memory_order_acquire) & COMPILATION_STATUS_MASK) >>
409                                              COMPILATION_STATUS_SHIFT);
410     }
411 
GetCompilationStatus(uint32_t value)412     inline CompilationStage GetCompilationStatus(uint32_t value)
413     {
414         return static_cast<CompilationStage>((value & COMPILATION_STATUS_MASK) >> COMPILATION_STATUS_SHIFT);
415     }
416 
SetCompilationStatus(enum CompilationStage newStatus)417     inline void SetCompilationStatus(enum CompilationStage newStatus)
418     {
419         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
420         // should become visible
421         auto result = (accessFlags_.load(std::memory_order_acquire) & ~COMPILATION_STATUS_MASK) |
422                       static_cast<uint32_t>(newStatus) << COMPILATION_STATUS_SHIFT;
423         // Atomic with release order reason: data race with access_flags_ with dependecies on writes before the store
424         // which should become visible acquire
425         accessFlags_.store(result, std::memory_order_release);
426     }
427 
AtomicSetCompilationStatus(enum CompilationStage oldStatus, enum CompilationStage newStatus)428     inline bool AtomicSetCompilationStatus(enum CompilationStage oldStatus, enum CompilationStage newStatus)
429     {
430         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
431         // should become visible
432         uint32_t oldValue = accessFlags_.load(std::memory_order_acquire);
433         while (GetCompilationStatus(oldValue) == oldStatus) {
434             uint32_t newValue = MakeCompilationStatusValue(oldValue, newStatus);
435             if (accessFlags_.compare_exchange_strong(oldValue, newValue)) {
436                 return true;
437             }
438         }
439         return false;
440     }
441 
442     panda_file::Type GetReturnType() const;
443 
444     panda_file::File::StringData GetRefReturnType() const;
445 
446     // idx - index number of the argument in the signature
447     PANDA_PUBLIC_API panda_file::Type GetArgType(size_t idx) const;
448 
449     PANDA_PUBLIC_API panda_file::File::StringData GetRefArgType(size_t idx) const;
450 
451     template <typename Callback>
452     void EnumerateTypes(Callback handler) const;
453 
454     PANDA_PUBLIC_API panda_file::File::StringData GetName() const;
455 
456     PANDA_PUBLIC_API panda_file::File::StringData GetClassName() const;
457 
458     PANDA_PUBLIC_API PandaString GetFullName(bool withSignature = false) const;
459     PANDA_PUBLIC_API PandaString GetLineNumberAndSourceFile(uint32_t bcOffset) const;
460 
461     static uint32_t GetFullNameHashFromString(const PandaString &str);
462     static uint32_t GetClassNameHashFromString(const PandaString &str);
463 
464     PANDA_PUBLIC_API Proto GetProto() const;
465 
466     PANDA_PUBLIC_API ProtoId GetProtoId() const;
467 
GetFrameSize() const468     size_t GetFrameSize() const
469     {
470         return Frame::GetAllocSize(GetNumArgs() + GetNumVregs(), EMPTY_EXT_FRAME_DATA_SIZE);
471     }
472 
473     uint32_t GetNumericalAnnotation(AnnotationField fieldId) const;
474     panda_file::File::StringData GetStringDataAnnotation(AnnotationField fieldId) const;
475 
GetAccessFlags() const476     uint32_t GetAccessFlags() const
477     {
478         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
479         // should become visible
480         return accessFlags_.load(std::memory_order_acquire);
481     }
482 
SetAccessFlags(uint32_t accessFlags)483     void SetAccessFlags(uint32_t accessFlags)
484     {
485         // Atomic with release order reason: data race with access_flags_ with dependecies on writes before the store
486         // which should become visible acquire
487         accessFlags_.store(accessFlags, std::memory_order_release);
488     }
489 
IsStatic() const490     bool IsStatic() const
491     {
492         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
493         // should become visible
494         return (accessFlags_.load(std::memory_order_acquire) & ACC_STATIC) != 0;
495     }
496 
IsNative() const497     bool IsNative() const
498     {
499         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
500         // should become visible
501         return (accessFlags_.load(std::memory_order_acquire) & ACC_NATIVE) != 0;
502     }
503 
IsPublic() const504     bool IsPublic() const
505     {
506         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
507         // should become visible
508         return (accessFlags_.load(std::memory_order_acquire) & ACC_PUBLIC) != 0;
509     }
510 
IsPrivate() const511     bool IsPrivate() const
512     {
513         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
514         // should become visible
515         return (accessFlags_.load(std::memory_order_acquire) & ACC_PRIVATE) != 0;
516     }
517 
IsProtected() const518     bool IsProtected() const
519     {
520         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
521         // should become visible
522         return (accessFlags_.load(std::memory_order_acquire) & ACC_PROTECTED) != 0;
523     }
524 
IsIntrinsic() const525     bool IsIntrinsic() const
526     {
527         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
528         // should become visible
529         return (accessFlags_.load(std::memory_order_acquire) & ACC_INTRINSIC) != 0;
530     }
531 
IsSynthetic() const532     bool IsSynthetic() const
533     {
534         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
535         // should become visible
536         return (accessFlags_.load(std::memory_order_acquire) & ACC_SYNTHETIC) != 0;
537     }
538 
IsAbstract() const539     bool IsAbstract() const
540     {
541         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
542         // should become visible
543         return (accessFlags_.load(std::memory_order_acquire) & ACC_ABSTRACT) != 0;
544     }
545 
IsFinal() const546     bool IsFinal() const
547     {
548         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
549         // should become visible
550         return (accessFlags_.load(std::memory_order_acquire) & ACC_FINAL) != 0;
551     }
552 
IsSynchronized() const553     bool IsSynchronized() const
554     {
555         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
556         // should become visible
557         return (accessFlags_.load(std::memory_order_acquire) & ACC_SYNCHRONIZED) != 0;
558     }
559 
HasVarArgs() const560     bool HasVarArgs() const
561     {
562         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
563         // should become visible
564         return (accessFlags_.load(std::memory_order_acquire) & ACC_VARARGS) != 0;
565     }
566 
HasSingleImplementation() const567     bool HasSingleImplementation() const
568     {
569         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
570         // should become visible
571         return (accessFlags_.load(std::memory_order_acquire) & ACC_SINGLE_IMPL) != 0;
572     }
573 
IsProfiled() const574     bool IsProfiled() const
575     {
576         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
577         // should become visible
578         return (accessFlags_.load(std::memory_order_acquire) & ACC_PROFILING) != 0;
579     }
580 
IsDestroyed() const581     bool IsDestroyed() const
582     {
583         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
584         // should become visible
585         return (accessFlags_.load(std::memory_order_acquire) & ACC_DESTROYED) != 0;
586     }
587 
SetHasSingleImplementation(bool v)588     void SetHasSingleImplementation(bool v)
589     {
590         if (v) {
591             // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
592             // and on writes before the store
593             accessFlags_.fetch_or(ACC_SINGLE_IMPL, std::memory_order_acq_rel);
594         } else {
595             // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
596             // and on writes before the store
597             accessFlags_.fetch_and(~ACC_SINGLE_IMPL, std::memory_order_acq_rel);
598         }
599     }
600 
SetProfiled()601     void SetProfiled()
602     {
603         ASSERT(!IsIntrinsic());
604         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
605         // and on writes before the store
606         accessFlags_.fetch_or(ACC_PROFILING, std::memory_order_acq_rel);
607     }
608 
SetDestroyed()609     void SetDestroyed()
610     {
611         ASSERT(!IsIntrinsic());
612         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
613         // and on writes before the store
614         accessFlags_.fetch_or(ACC_DESTROYED, std::memory_order_acq_rel);
615     }
616 
GetSingleImplementation()617     Method *GetSingleImplementation()
618     {
619         return HasSingleImplementation() ? this : nullptr;
620     }
621 
SetIntrinsic(intrinsics::Intrinsic intrinsic)622     void SetIntrinsic(intrinsics::Intrinsic intrinsic)
623     {
624         ASSERT(!IsIntrinsic());
625         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
626         // should become visible
627         ASSERT((accessFlags_.load(std::memory_order_acquire) & INTRINSIC_MASK) == 0);
628         auto result = ACC_INTRINSIC | static_cast<uint32_t>(intrinsic) << INTRINSIC_SHIFT;
629         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load and
630         // on writes before the store
631         accessFlags_.fetch_or(result, std::memory_order_acq_rel);
632     }
633 
GetIntrinsic() const634     intrinsics::Intrinsic GetIntrinsic() const
635     {
636         ASSERT(IsIntrinsic());
637         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
638         // should become visible
639         return static_cast<intrinsics::Intrinsic>((accessFlags_.load(std::memory_order_acquire) & INTRINSIC_MASK) >>
640                                                   INTRINSIC_SHIFT);
641     }
642 
SetVTableIndex(uint16_t vtableIndex)643     void SetVTableIndex(uint16_t vtableIndex)
644     {
645         stor16Pair_.vtableIndex = vtableIndex;
646     }
647 
GetVTableIndex() const648     uint16_t GetVTableIndex() const
649     {
650         return stor16Pair_.vtableIndex;
651     }
652 
SetNativePointer(void *nativePointer)653     void SetNativePointer(void *nativePointer)
654     {
655         ASSERT((IsNative() || IsProxy()));
656         // Atomic with relaxed order reason: data race with native_pointer_ with no synchronization or ordering
657         // constraints imposed on other reads or writes NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
658         pointer_.nativePointer.store(nativePointer, std::memory_order_relaxed);
659     }
660 
GetNativePointer() const661     void *GetNativePointer() const
662     {
663         ASSERT((IsNative() || IsProxy()));
664         // Atomic with relaxed order reason: data race with native_pointer_ with no synchronization or ordering
665         // constraints imposed on other reads or writes NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
666         return pointer_.nativePointer.load(std::memory_order_relaxed);
667     }
668 
GetShorty() const669     const uint16_t *GetShorty() const
670     {
671         return shorty_;
672     }
673 
674     uint32_t FindCatchBlockInPandaFile(const Class *cls, uint32_t pc) const;
675     uint32_t FindCatchBlock(const Class *cls, uint32_t pc) const;
676 
677     PANDA_PUBLIC_API panda_file::Type GetEffectiveArgType(size_t idx) const;
678 
679     PANDA_PUBLIC_API panda_file::Type GetEffectiveReturnType() const;
680 
SetIsDefaultInterfaceMethod()681     void SetIsDefaultInterfaceMethod()
682     {
683         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load and
684         // on writes before the store
685         accessFlags_.fetch_or(ACC_DEFAULT_INTERFACE_METHOD, std::memory_order_acq_rel);
686     }
687 
IsDefaultInterfaceMethod() const688     bool IsDefaultInterfaceMethod() const
689     {
690         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
691         // should become visible
692         return (accessFlags_.load(std::memory_order_acquire) & ACC_DEFAULT_INTERFACE_METHOD) != 0;
693     }
694 
IsConstructor() const695     bool IsConstructor() const
696     {
697         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
698         // should become visible
699         return (accessFlags_.load(std::memory_order_acquire) & ACC_CONSTRUCTOR) != 0;
700     }
701 
IsInstanceConstructor() const702     bool IsInstanceConstructor() const
703     {
704         return IsConstructor() && !IsStatic();
705     }
706 
IsStaticConstructor() const707     bool IsStaticConstructor() const
708     {
709         return IsConstructor() && IsStatic();
710     }
711 
GetAccessFlagsOffset()712     static constexpr uint32_t GetAccessFlagsOffset()
713     {
714         return MEMBER_OFFSET(Method, accessFlags_);
715     }
GetNumArgsOffset()716     static constexpr uint32_t GetNumArgsOffset()
717     {
718         return MEMBER_OFFSET(Method, numArgs_);
719     }
GetVTableIndexOffset()720     static constexpr uint32_t GetVTableIndexOffset()
721     {
722         return MEMBER_OFFSET(Method, stor16Pair_) + MEMBER_OFFSET(Storage16Pair, vtableIndex);
723     }
GetHotnessCounterOffset()724     static constexpr uint32_t GetHotnessCounterOffset()
725     {
726         return MEMBER_OFFSET(Method, stor16Pair_) + MEMBER_OFFSET(Storage16Pair, hotnessCounter);
727     }
GetClassOffset()728     static constexpr uint32_t GetClassOffset()
729     {
730         return MEMBER_OFFSET(Method, classWord_);
731     }
732 
GetCompiledEntryPointOffset()733     static constexpr uint32_t GetCompiledEntryPointOffset()
734     {
735         return MEMBER_OFFSET(Method, compiledEntryPoint_);
736     }
GetPandaFileOffset()737     static constexpr uint32_t GetPandaFileOffset()
738     {
739         return MEMBER_OFFSET(Method, pandaFile_);
740     }
GetCodeIdOffset()741     static constexpr uint32_t GetCodeIdOffset()
742     {
743         return MEMBER_OFFSET(Method, codeId_);
744     }
GetNativePointerOffset()745     static constexpr uint32_t GetNativePointerOffset()
746     {
747         return MEMBER_OFFSET(Method, pointer_);
748     }
GetShortyOffset()749     static constexpr uint32_t GetShortyOffset()
750     {
751         return MEMBER_OFFSET(Method, shorty_);
752     }
753 
754     template <typename Callback>
755     void EnumerateTryBlocks(Callback callback) const;
756 
757     template <typename Callback>
758     void EnumerateCatchBlocks(Callback callback) const;
759 
760     template <typename Callback>
761     void EnumerateExceptionHandlers(Callback callback) const;
762 
CalcUniqId(const panda_file::File *file, panda_file::File::EntityId fileId)763     static inline UniqId CalcUniqId(const panda_file::File *file, panda_file::File::EntityId fileId)
764     {
765         constexpr uint64_t HALF = 32ULL;
766         uint64_t uid = file->GetUniqId();
767         uid <<= HALF;
768         uid |= fileId.GetOffset();
769         return uid;
770     }
771 
772     // for synthetic methods, like array .ctor
773     static UniqId CalcUniqId(const uint8_t *classDescr, const uint8_t *name);
774 
GetUniqId() const775     UniqId GetUniqId() const
776     {
777         return CalcUniqId(pandaFile_, fileId_);
778     }
779 
780     size_t GetLineNumFromBytecodeOffset(uint32_t bcOffset) const;
781 
782     panda_file::File::StringData GetClassSourceFile() const;
783 
784     PANDA_PUBLIC_API void StartProfiling();
785     PANDA_PUBLIC_API void StopProfiling();
786 
787     bool IsProxy() const;
788 
GetProfilingData()789     ProfilingData *GetProfilingData()
790     {
791         if (UNLIKELY(IsNative() || IsProxy())) {
792             return nullptr;
793         }
794         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
795         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
796         return pointer_.profilingData.load(std::memory_order_acquire);
797     }
798 
GetProfilingDataWithoutCheck()799     ProfilingData *GetProfilingDataWithoutCheck()
800     {
801         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
802         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
803         return pointer_.profilingData.load(std::memory_order_acquire);
804     }
805 
GetProfilingData() const806     const ProfilingData *GetProfilingData() const
807     {
808         if (UNLIKELY(IsNative() || IsProxy())) {
809             return nullptr;
810         }
811         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
812         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
813         return pointer_.profilingData.load(std::memory_order_acquire);
814     }
815 
IsProfiling() const816     bool IsProfiling() const
817     {
818         return GetProfilingData() != nullptr;
819     }
820 
IsProfilingWithoutLock() const821     bool IsProfilingWithoutLock() const
822     {
823         if (UNLIKELY(IsNative() || IsProxy())) {
824             return false;
825         }
826         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
827         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
828         return pointer_.profilingData.load(std::memory_order_acquire) != nullptr;
829     }
830 
831     void SetVerified(bool result);
832     bool IsVerified() const;
833     PANDA_PUBLIC_API bool Verify();
834     template <bool IS_CALL>
835     bool TryVerify();
836 
GetVerificationStage(uint32_t value)837     inline static VerificationStage GetVerificationStage(uint32_t value)
838     {
839         return static_cast<VerificationStage>((value & VERIFICATION_STATUS_MASK) >> VERIFICATION_STATUS_SHIFT);
840     }
841 
GetVerificationStage() const842     inline VerificationStage GetVerificationStage() const
843     {
844         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
845         // should become visible
846         return GetVerificationStage(accessFlags_.load(std::memory_order_acquire));
847     }
848 
SetVerificationStage(enum VerificationStage newStage)849     inline void SetVerificationStage(enum VerificationStage newStage)
850     {
851         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
852         // should become visible
853         uint32_t oldValue = accessFlags_.load(std::memory_order_acquire);
854         uint32_t newValue = MakeVerificationStageValue(oldValue, newStage);
855         while (!accessFlags_.compare_exchange_weak(oldValue, newValue, std::memory_order_acq_rel)) {
856             newValue = MakeVerificationStageValue(oldValue, newStage);
857         }
858     }
859 
860 private:
861     inline void FillVecsByInsts(BytecodeInstruction &inst, PandaVector<uint32_t> &vcalls,
862                                 PandaVector<uint32_t> &branches, PandaVector<uint32_t> &throws) const;
863 
864     Value InvokeCompiledCode(ManagedThread *thread, uint32_t numArgs, Value *args);
865 
GetReturnValueFromTaggedValue(uint64_t retValue)866     Value GetReturnValueFromTaggedValue(uint64_t retValue)
867     {
868         panda_file::Type retType = GetReturnType();
869         if (retType.GetId() == panda_file::Type::TypeId::VOID) {
870             return Value(static_cast<int64_t>(0));
871         }
872         if (retType.GetId() == panda_file::Type::TypeId::REFERENCE) {
873             return Value(reinterpret_cast<ObjectHeader *>(retValue));
874         }
875         return Value(retValue);
876     }
877 
MakeCompilationStatusValue(uint32_t value, CompilationStage newStatus)878     inline static uint32_t MakeCompilationStatusValue(uint32_t value, CompilationStage newStatus)
879     {
880         value &= ~COMPILATION_STATUS_MASK;
881         value |= static_cast<uint32_t>(newStatus) << COMPILATION_STATUS_SHIFT;
882         return value;
883     }
884 
MakeVerificationStageValue(uint32_t value, VerificationStage newStage)885     inline static uint32_t MakeVerificationStageValue(uint32_t value, VerificationStage newStage)
886     {
887         value &= ~VERIFICATION_STATUS_MASK;
888         value |= static_cast<uint32_t>(newStage) << VERIFICATION_STATUS_SHIFT;
889         return value;
890     }
891 
892     template <class InvokeHelper, class ValueT>
893     ValueT InvokeInterpretedCode(ManagedThread *thread, uint32_t numActualArgs, ValueT *args);
894 
895     template <class InvokeHelper, class ValueT>
896     PandaUniquePtr<Frame, FrameDeleter> InitFrame(ManagedThread *thread, uint32_t numActualArgs, ValueT *args,
897                                                   Frame *currentFrame);
898 
899     template <class InvokeHelper, class ValueT, bool IS_NATIVE_METHOD>
900     PandaUniquePtr<Frame, FrameDeleter> InitFrameWithNumVRegs(ManagedThread *thread, uint32_t numVregs,
901                                                               uint32_t numActualArgs, ValueT *args,
902                                                               Frame *currentFrame);
903 
904     template <class InvokeHelper, class ValueT>
905     ValueT GetReturnValueFromException();
906 
907     template <class InvokeHelper, class ValueT>
908     ValueT GetReturnValueFromAcc(interpreter::AccVRegister &aacVreg);
909 
910     template <class InvokeHelper, class ValueT>
911     ValueT InvokeImpl(ManagedThread *thread, uint32_t numActualArgs, ValueT *args, bool proxyCall);
912 
913 private:
914     union PointerInMethod {
915         // It's native pointer when the method is native or proxy method.
916         std::atomic<void *> nativePointer;
917         // It's profiling data when the method isn't native or proxy method.
918         std::atomic<ProfilingData *> profilingData;
919     };
920 
921     struct Storage16Pair {
922         uint16_t vtableIndex;
923         int16_t hotnessCounter;
924     };
925 
926     std::atomic_uint32_t accessFlags_;
927     uint32_t numArgs_;
928     Storage16Pair stor16Pair_;
929     ClassHelper::ClassWordSize classWord_;
930 
931     std::atomic<const void *> compiledEntryPoint_ {nullptr};
932     const panda_file::File *pandaFile_;
933     union PointerInMethod pointer_ {
934     };
935 
936     panda_file::File::EntityId fileId_;
937     panda_file::File::EntityId codeId_;
938     const uint16_t *shorty_;
939 };
940 
941 static_assert(!std::is_polymorphic_v<Method>);
942 
943 }  // namespace ark
944 
945 #endif  // PANDA_RUNTIME_METHOD_H_
946