1 /*
2 * Copyright (c) 2022-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 #include "ecmascript/deoptimizer/deoptimizer.h"
16
17 #include <cmath>
18
19 #include "ecmascript/dfx/stackinfo/js_stackinfo.h"
20 #include "ecmascript/interpreter/slow_runtime_stub.h"
21 #include "ecmascript/jit/jit.h"
22 #include "ecmascript/stubs/runtime_stubs-inl.h"
23
24 namespace panda::ecmascript {
25 class FrameWriter {
26 public:
FrameWriter(Deoptimizier *deoptimizier)27 explicit FrameWriter(Deoptimizier *deoptimizier) : thread_(deoptimizier->GetThread())
28 {
29 JSTaggedType *prevSp = const_cast<JSTaggedType *>(thread_->GetCurrentSPFrame());
30 start_ = top_ = EcmaInterpreter::GetInterpreterFrameEnd(thread_, prevSp);
31 }
32
PushValue(JSTaggedType value)33 void PushValue(JSTaggedType value)
34 {
35 *(--top_) = value;
36 }
37
PushRawValue(uintptr_t value)38 void PushRawValue(uintptr_t value)
39 {
40 *(--top_) = value;
41 }
42
Reserve(size_t size)43 bool Reserve(size_t size)
44 {
45 return !thread_->DoStackOverflowCheck(top_ - size);
46 }
47
ReserveAsmInterpretedFrame()48 AsmInterpretedFrame *ReserveAsmInterpretedFrame()
49 {
50 auto frame = AsmInterpretedFrame::GetFrameFromSp(top_);
51 top_ = reinterpret_cast<JSTaggedType *>(frame);
52 return frame;
53 }
54
GetStart() const55 JSTaggedType *GetStart() const
56 {
57 return start_;
58 }
59
GetTop() const60 JSTaggedType *GetTop() const
61 {
62 return top_;
63 }
64
GetFirstFrame() const65 JSTaggedType *GetFirstFrame() const
66 {
67 return firstFrame_;
68 }
69
RecordFirstFrame()70 void RecordFirstFrame()
71 {
72 firstFrame_ = top_;
73 }
74
ReviseValueByIndex(JSTaggedType value, size_t index)75 void ReviseValueByIndex(JSTaggedType value, size_t index)
76 {
77 ASSERT(index < static_cast<size_t>(start_ - top_));
78 *(top_ + index) = value;
79 }
80
81 private:
82 JSThread *thread_ {nullptr};
83 JSTaggedType *start_ {nullptr};
84 JSTaggedType *top_ {nullptr};
85 JSTaggedType *firstFrame_ {nullptr};
86 };
87
88 Deoptimizier::Deoptimizier(JSThread *thread, size_t depth) : thread_(thread), inlineDepth_(depth)
89 {
90 CalleeReg callreg;
91 numCalleeRegs_ = static_cast<size_t>(callreg.GetCallRegNum());
92 JSRuntimeOptions options = thread_->GetEcmaVM()->GetJSOptions();
93 traceDeopt_ = options.GetTraceDeopt();
94 }
95
CollectVregs(const std::vector<kungfu::ARKDeopt>& deoptBundle, size_t shift)96 void Deoptimizier::CollectVregs(const std::vector<kungfu::ARKDeopt>& deoptBundle, size_t shift)
97 {
98 deoptVregs_.clear();
99 for (size_t i = 0; i < deoptBundle.size(); i++) {
100 ARKDeopt deopt = deoptBundle.at(i);
101 JSTaggedType v;
102 VRegId id = deopt.id;
103 if (std::holds_alternative<DwarfRegAndOffsetType>(deopt.value)) {
104 ASSERT(deopt.kind == LocationTy::Kind::INDIRECT);
105 auto value = std::get<DwarfRegAndOffsetType>(deopt.value);
106 DwarfRegType dwarfReg = value.first;
107 OffsetType offset = value.second;
108 ASSERT (dwarfReg == GCStackMapRegisters::FP || dwarfReg == GCStackMapRegisters::SP);
109 uintptr_t addr;
110 if (dwarfReg == GCStackMapRegisters::SP) {
111 addr = context_.callsiteSp + offset;
112 } else {
113 addr = context_.callsiteFp + offset;
114 }
115 v = *(reinterpret_cast<JSTaggedType *>(addr));
116 } else if (std::holds_alternative<LargeInt>(deopt.value)) {
117 ASSERT(deopt.kind == LocationTy::Kind::CONSTANTNDEX);
118 v = JSTaggedType(static_cast<int64_t>(std::get<LargeInt>(deopt.value)));
119 } else {
120 ASSERT(std::holds_alternative<IntType>(deopt.value));
121 ASSERT(deopt.kind == LocationTy::Kind::CONSTANT);
122 v = JSTaggedType(static_cast<int64_t>(std::get<IntType>(deopt.value)));
123 }
124 size_t curDepth = DecodeDeoptDepth(id, shift);
125 OffsetType vregId = static_cast<OffsetType>(DecodeVregIndex(id, shift));
126 if (vregId != static_cast<OffsetType>(SpecVregIndex::PC_OFFSET_INDEX)) {
127 deoptVregs_.insert({{curDepth, vregId}, JSHandle<JSTaggedValue>(thread_, JSTaggedValue(v))});
128 } else {
129 pc_.insert({curDepth, static_cast<size_t>(v)});
130 }
131 }
132 }
133
134 // when AOT trigger deopt, frame layout as the following
135 // * OptimizedJSFunctionFrame layout description as the following:
136 // +--------------------------+ ---------------
137 // | ...... | ^
138 // | ...... | callerFunction
139 // | ...... | |
140 // |--------------------------| |
141 // | args | v
142 // +--------------------------+ ---------------
143 // | returnAddr | ^
144 // |--------------------------| |
145 // | callsiteFp | |
146 // |--------------------------| OptimizedJSFunction FrameType:OPTIMIZED_JS_FUNCTION_FRAME
147 // | frameType | |
148 // |--------------------------| |
149 // | call-target | |
150 // |--------------------------| |
151 // | lexEnv | |
152 // |--------------------------| |
153 // | ........... | v
154 // +--------------------------+ ---------------
155 // | returnAddr | ^
156 // |--------------------------| |
157 // | callsiteFp | |
158 // |--------------------------| __llvm_deoptimize FrameType:OPTIMIZED_FRAME
159 // | frameType | |
160 // |--------------------------| |
161 // | No CalleeSave | |
162 // | Registers | v
163 // +--------------------------+ ---------------
164 // | returnAddr | ^
165 // |--------------------------| |
166 // | callsiteFp | |
167 // |--------------------------| DeoptHandlerAsm FrameType:ASM_BRIDGE_FRAME
168 // | frameType | |
169 // |--------------------------| |
170 // | glue | |
171 // |--------------------------| |
172 // | CalleeSave Registers | v
173 // +--------------------------+ ---------------
174 // | ......... | ^
175 // | ......... | CallRuntime FrameType:LEAVE_FRAME
176 // | ......... | |
177 // | ......... | v
178 // |--------------------------| ---------------
179
180 // After gathering the necessary information(After Call Runtime), frame layout after constructing
181 // asminterpreterframe is shown as the following:
182 // +----------------------------------+---------+
183 // | ...... | ^
184 // | ...... | callerFunction
185 // | ...... | |
186 // |----------------------------------| |
187 // | args | v
188 // +----------------------------------+---------+
189 // | returnAddr | ^
190 // |----------------------------------| |
191 // | frameType | |
192 // |----------------------------------| ASM_INTERPRETER_BRIDGE_FRAME
193 // | callsiteFp | |
194 // |----------------------------------| |
195 // | ........... | v
196 // +----------------------------------+---------+
197 // | returnAddr |
198 // |----------------------------------|
199 // | argv[n-1] |
200 // |----------------------------------|
201 // | ...... |
202 // |----------------------------------|
203 // | thisArg [maybe not exist] |
204 // |----------------------------------|
205 // | newTarget [maybe not exist] |
206 // |----------------------------------|
207 // | ...... |
208 // |----------------------------------|
209 // | Vregs [not exist in native] |
210 // +----------------------------------+--------+
211 // | . . . . | ^
212 // | InterpretedFrameBase | |
213 // | . . . . | |
214 // |----------------------------------| |
215 // | pc(bytecode addr) | |
216 // |----------------------------------| |
217 // | sp(current stack pointer) | |
218 // |----------------------------------| AsmInterpretedFrame 0
219 // | callSize | |
220 // |----------------------------------| |
221 // | env | |
222 // |----------------------------------| |
223 // | acc | |
224 // |----------------------------------| |
225 // | thisObj | |
226 // |----------------------------------| |
227 // | call-target | v
228 // +----------------------------------+--------+
229 // | argv[n-1] |
230 // |----------------------------------|
231 // | ...... |
232 // |----------------------------------|
233 // | thisArg [maybe not exist] |
234 // |----------------------------------|
235 // | newTarget [maybe not exist] |
236 // |----------------------------------|
237 // | ...... |
238 // |----------------------------------|
239 // | Vregs [not exist in native] |
240 // +----------------------------------+--------+
241 // | . . . . | ^
242 // | InterpretedFrameBase | |
243 // | . . . . | |
244 // |----------------------------------| |
245 // | pc(bytecode addr) | |
246 // |----------------------------------| |
247 // | sp(current stack pointer) | |
248 // |----------------------------------| AsmInterpretedFrame 1
249 // | callSize | |
250 // |----------------------------------| |
251 // | env | |
252 // |----------------------------------| |
253 // | acc | |
254 // |----------------------------------| |
255 // | thisObj | |
256 // |----------------------------------| |
257 // | call-target | v
258 // +----------------------------------+--------+
259 // | . . . . | ^
260 // | . . . . | AsmInterpretedFrame n
261 // | . . . . | v
262 // +----------------------------------+--------+
263
264 template<class T>
AssistCollectDeoptBundleVec(FrameIterator &it, T &frame)265 void Deoptimizier::AssistCollectDeoptBundleVec(FrameIterator &it, T &frame)
266 {
267 CalleeRegAndOffsetVec calleeRegInfo;
268 frame->GetFuncCalleeRegAndOffset(it, calleeRegInfo);
269 context_.calleeRegAndOffset = calleeRegInfo;
270 context_.callsiteSp = it.GetCallSiteSp();
271 context_.callsiteFp = reinterpret_cast<uintptr_t>(it.GetSp());
272 auto preFrameSp = frame->ComputePrevFrameSp(it);
273 frameArgc_ = frame->GetArgc(preFrameSp);
274 frameArgvs_ = frame->GetArgv(preFrameSp);
275 stackContext_.callFrameTop_ = it.GetPrevFrameCallSiteSp();
276 stackContext_.returnAddr_ = frame->GetReturnAddr();
277 stackContext_.callerFp_ = reinterpret_cast<uintptr_t>(frame->GetPrevFrameFp());
278 }
279
CollectDeoptBundleVec(std::vector<ARKDeopt>& deoptBundle)280 void Deoptimizier::CollectDeoptBundleVec(std::vector<ARKDeopt>& deoptBundle)
281 {
282 JSTaggedType *lastLeave = const_cast<JSTaggedType *>(thread_->GetLastLeaveFrame());
283 FrameIterator it(lastLeave, thread_);
284 // note: last deopt bridge frame is generated by DeoptHandlerAsm, callee Regs is grow from this frame
285 for (; !it.Done() && deoptBundle.empty(); it.Advance<GCVisitedFlag::DEOPT>()) {
286 FrameType type = it.GetFrameType();
287 switch (type) {
288 case FrameType::OPTIMIZED_JS_FAST_CALL_FUNCTION_FRAME:
289 case FrameType::OPTIMIZED_JS_FUNCTION_FRAME: {
290 auto frame = it.GetFrame<OptimizedJSFunctionFrame>();
291 frame->GetDeoptBundleInfo(it, deoptBundle);
292 AssistCollectDeoptBundleVec(it, frame);
293 break;
294 }
295 case FrameType::FASTJIT_FUNCTION_FRAME:
296 case FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME: {
297 auto frame = it.GetFrame<FASTJITFunctionFrame>();
298 frame->GetDeoptBundleInfo(it, deoptBundle);
299 AssistCollectDeoptBundleVec(it, frame);
300 break;
301 }
302 case FrameType::ASM_BRIDGE_FRAME: {
303 auto sp = reinterpret_cast<uintptr_t*>(it.GetSp());
304 static constexpr size_t TYPE_GLUE_SLOT = 2; // 2: skip type & glue
305 sp -= TYPE_GLUE_SLOT;
306 calleeRegAddr_ = sp - numCalleeRegs_;
307 break;
308 }
309 case FrameType::OPTIMIZED_FRAME:
310 case FrameType::LEAVE_FRAME:
311 break;
312 default: {
313 LOG_FULL(FATAL) << "frame type error!";
314 UNREACHABLE();
315 }
316 }
317 }
318 ASSERT(!it.Done());
319 }
320
GetMethod(JSTaggedValue &target)321 Method* Deoptimizier::GetMethod(JSTaggedValue &target)
322 {
323 ECMAObject *callTarget = reinterpret_cast<ECMAObject*>(target.GetTaggedObject());
324 ASSERT(callTarget != nullptr);
325 Method *method = callTarget->GetCallTarget();
326 return method;
327 }
328
RelocateCalleeSave()329 void Deoptimizier::RelocateCalleeSave()
330 {
331 CalleeReg callreg;
332 for (auto &it: context_.calleeRegAndOffset) {
333 auto reg = it.first;
334 auto offset = it.second;
335 uintptr_t value = *(reinterpret_cast<uintptr_t *>(context_.callsiteFp + offset));
336 int order = callreg.FindCallRegOrder(reg);
337 calleeRegAddr_[order] = value;
338 }
339 }
340
CollectVirtualRegisters(JSTaggedValue callTarget, Method *method, FrameWriter *frameWriter, size_t curDepth)341 bool Deoptimizier::CollectVirtualRegisters(JSTaggedValue callTarget, Method *method, FrameWriter *frameWriter,
342 size_t curDepth)
343 {
344 int32_t actualNumArgs = 0;
345 int32_t declaredNumArgs = 0;
346 if (curDepth == 0) {
347 actualNumArgs = static_cast<int32_t>(GetDeoptValue(curDepth,
348 static_cast<int32_t>(SpecVregIndex::ACTUAL_ARGC_INDEX)).GetInt());
349 declaredNumArgs = static_cast<int32_t>(method->GetNumArgsWithCallField());
350 } else {
351 // inline method actualNumArgs equal to declaredNumArgs
352 actualNumArgs = static_cast<int32_t>(method->GetNumArgsWithCallField());
353 declaredNumArgs = static_cast<int32_t>(method->GetNumArgsWithCallField());
354 }
355
356 int32_t callFieldNumVregs = static_cast<int32_t>(method->GetNumVregsWithCallField());
357
358 // layout of frame:
359 // [maybe argc] [actual args] [reserved args] [call field virtual regs]
360
361 // [maybe argc]
362 bool isFastCall = JSFunctionBase::IsFastCallFromCallTarget(callTarget);
363 if (!isFastCall && declaredNumArgs != actualNumArgs) {
364 auto value = JSTaggedValue(actualNumArgs);
365 frameWriter->PushValue(value.GetRawData());
366 }
367 int32_t virtualIndex = declaredNumArgs + callFieldNumVregs +
368 static_cast<int32_t>(method->GetNumRevervedArgs()) - 1;
369 if (!frameWriter->Reserve(static_cast<size_t>(virtualIndex))) {
370 return false;
371 }
372 for (int32_t i = static_cast<int32_t>(declaredNumArgs - 1); i >= 0; i--) {
373 JSTaggedValue value = JSTaggedValue::Undefined();
374 // deopt value
375 if (HasDeoptValue(curDepth, virtualIndex)) {
376 value = GetDeoptValue(curDepth, virtualIndex);
377 }
378 frameWriter->PushValue(value.GetRawData());
379 virtualIndex--;
380 }
381
382 // [reserved args]
383 if (method->HaveThisWithCallField()) {
384 JSTaggedValue value = deoptVregs_.at(
385 {curDepth, static_cast<OffsetType>(SpecVregIndex::THIS_OBJECT_INDEX)}).GetTaggedValue();
386 frameWriter->PushValue(value.GetRawData());
387 virtualIndex--;
388 }
389 if (method->HaveNewTargetWithCallField()) {
390 JSTaggedValue value = deoptVregs_.at(
391 {curDepth, static_cast<OffsetType>(SpecVregIndex::NEWTARGET_INDEX)}).GetTaggedValue();
392 frameWriter->PushValue(value.GetRawData());
393 virtualIndex--;
394 }
395 if (method->HaveFuncWithCallField()) {
396 JSTaggedValue value = deoptVregs_.at(
397 {curDepth, static_cast<OffsetType>(SpecVregIndex::FUNC_INDEX)}).GetTaggedValue();
398 frameWriter->PushValue(value.GetRawData());
399 virtualIndex--;
400 }
401
402 // [call field virtual regs]
403 for (int32_t i = virtualIndex; i >= 0; i--) {
404 JSTaggedValue value = GetDeoptValue(curDepth, virtualIndex);
405 frameWriter->PushValue(value.GetRawData());
406 virtualIndex--;
407 }
408 // revise correct a0 - aN virtual regs , for example: ldobjbyname key; sta a2; update value to a2
409 // +--------------------------+ ^
410 // | aN | |
411 // +--------------------------+ |
412 // | ... | |
413 // +--------------------------+ |
414 // | a2(this) | |
415 // +--------------------------+ revise correct vreg
416 // | a1(newtarget) | |
417 // +--------------------------+ |
418 // | a0(func) | |
419 // |--------------------------| v
420 // | v0 - vN |
421 // sp --> |--------------------------|
422 int32_t vregsAndArgsNum = declaredNumArgs + callFieldNumVregs +
423 static_cast<int32_t>(method->GetNumRevervedArgs());
424 for (int32_t i = callFieldNumVregs; i < vregsAndArgsNum; i++) {
425 JSTaggedValue value = JSTaggedValue::Undefined();
426 if (HasDeoptValue(curDepth, i)) {
427 value = GetDeoptValue(curDepth, i);
428 frameWriter->ReviseValueByIndex(value.GetRawData(), i);
429 }
430 }
431 return true;
432 }
433
Dump(JSTaggedValue callTarget, kungfu::DeoptType type, size_t depth)434 void Deoptimizier::Dump(JSTaggedValue callTarget, kungfu::DeoptType type, size_t depth)
435 {
436 if (thread_->IsPGOProfilerEnable()) {
437 JSFunction *function = JSFunction::Cast(callTarget);
438 auto profileTypeInfo = function->GetProfileTypeInfo();
439 if (profileTypeInfo.IsUndefined()) {
440 SlowRuntimeStub::NotifyInlineCache(thread_, function);
441 }
442 }
443 if (traceDeopt_) {
444 std::string checkType = DisplayItems(type);
445 LOG_TRACE(INFO) << "Check Type: " << checkType;
446 std::string data = JsStackInfo::BuildJsStackTrace(thread_, true);
447 LOG_COMPILER(INFO) << "Deoptimize" << data;
448 const uint8_t *pc = GetMethod(callTarget)->GetBytecodeArray() + pc_.at(depth);
449 BytecodeInstruction inst(pc);
450 LOG_COMPILER(INFO) << inst;
451 }
452 }
453
DisplayItems(DeoptType type)454 std::string Deoptimizier::DisplayItems(DeoptType type)
455 {
456 const std::map<DeoptType, const char *> strMap = {
457 #define DEOPT_NAME_MAP(NAME, TYPE) {DeoptType::TYPE, #NAME},
458 GATE_META_DATA_DEOPT_REASON(DEOPT_NAME_MAP)
459 #undef DEOPT_NAME_MAP
460 };
461 if (strMap.count(type) > 0) {
462 return strMap.at(type);
463 }
464 return "DeoptType-" + std::to_string(static_cast<uint8_t>(type));
465 }
466
467 // layout of frameWriter
468 // |--------------------------| --------------> start(n)
469 // | args |
470 // | this |
471 // | newTarget |
472 // | callTarget |
473 // | vregs |
474 // |---------------------------
475 // | ASM Interpreter |
476 // +--------------------------+ --------------> end(n)
477 // | outputcounts | outputcounts = end(n) - start(n)
478 // |--------------------------| --------------> start(n-1)
479 // | args |
480 // | this |
481 // | newTarget |
482 // | callTarget |
483 // | vregs |
484 // |-------------------------------------------
485 // | ASM Interpreter |
486 // +--------------------------+ --------------> end(n-1)
487 // | outputcounts | outputcounts = end(n-1) - start(n-1)
488 // |--------------------------| --------------> start(n-1)
489 // | ...... |
490 // +--------------------------+ ---------------
491 // | callerFp_ | ^
492 // | returnAddr_ | stackContext
493 // | callFrameTop_ | |
494 // | inlineDepth | v
495 // |--------------------------| ---------------
496
ConstructAsmInterpretFrame()497 JSTaggedType Deoptimizier::ConstructAsmInterpretFrame()
498 {
499 FrameWriter frameWriter(this);
500 // Push asm interpreter frame
501 for (int32_t curDepth = static_cast<int32_t>(inlineDepth_); curDepth >= 0; curDepth--) {
502 auto start = frameWriter.GetTop();
503 JSTaggedValue callTarget = GetDeoptValue(curDepth, static_cast<int32_t>(SpecVregIndex::FUNC_INDEX));
504 auto method = GetMethod(callTarget);
505 if (!CollectVirtualRegisters(callTarget, method, &frameWriter, curDepth)) {
506 return JSTaggedValue::Exception().GetRawData();
507 }
508 AsmInterpretedFrame *statePtr = frameWriter.ReserveAsmInterpretedFrame();
509 const uint8_t *resumePc = method->GetBytecodeArray() + pc_.at(curDepth);
510 JSTaggedValue thisObj = GetDeoptValue(curDepth, static_cast<int32_t>(SpecVregIndex::THIS_OBJECT_INDEX));
511 auto acc = GetDeoptValue(curDepth, static_cast<int32_t>(SpecVregIndex::ACC_INDEX));
512 statePtr->function = callTarget;
513 statePtr->acc = acc;
514 statePtr->env = GetDeoptValue(curDepth, static_cast<int32_t>(SpecVregIndex::ENV_INDEX));
515 statePtr->callSize = GetCallSize(curDepth, resumePc);
516 statePtr->fp = 0; // need update
517 statePtr->thisObj = thisObj;
518 statePtr->pc = resumePc;
519 // -uintptr_t skip lr
520 if (curDepth == 0) {
521 statePtr->base.prev = reinterpret_cast<JSTaggedType *>(stackContext_.callFrameTop_ - sizeof(uintptr_t));
522 } else {
523 statePtr->base.prev = 0; // need update
524 }
525
526 statePtr->base.type = FrameType::ASM_INTERPRETER_FRAME;
527
528 // construct stack context
529 auto end = frameWriter.GetTop();
530 auto outputCount = start - end;
531 frameWriter.PushRawValue(outputCount);
532 }
533
534 RelocateCalleeSave();
535
536 frameWriter.PushRawValue(stackContext_.callerFp_);
537 frameWriter.PushRawValue(stackContext_.returnAddr_);
538 frameWriter.PushRawValue(stackContext_.callFrameTop_);
539 frameWriter.PushRawValue(inlineDepth_);
540 return reinterpret_cast<JSTaggedType>(frameWriter.GetTop());
541 }
542
ResetJitHotness(JSFunction *jsFunc) const543 void Deoptimizier::ResetJitHotness(JSFunction *jsFunc) const
544 {
545 if (jsFunc->GetMachineCode().IsMachineCodeObject()) {
546 JSTaggedValue profileTypeInfoVal = jsFunc->GetProfileTypeInfo();
547 if (!profileTypeInfoVal.IsUndefined()) {
548 ProfileTypeInfo *profileTypeInfo = ProfileTypeInfo::Cast(profileTypeInfoVal.GetTaggedObject());
549 profileTypeInfo->SetJitHotnessCnt(0);
550 constexpr uint16_t thresholdStep = 4;
551 constexpr uint16_t thresholdLimit = ProfileTypeInfo::JIT_DISABLE_FLAG / thresholdStep;
552 uint16_t threshold = profileTypeInfo->GetJitHotnessThreshold();
553 threshold = threshold >= thresholdLimit ? ProfileTypeInfo::JIT_DISABLE_FLAG : threshold * thresholdStep;
554 profileTypeInfo->SetJitHotnessThreshold(threshold);
555 ProfileTypeInfoCell::Cast(jsFunc->GetRawProfileTypeInfo())->SetMachineCode(thread_, JSTaggedValue::Hole());
556 Method *method = Method::Cast(jsFunc->GetMethod().GetTaggedObject());
557 LOG_JIT(DEBUG) << "reset jit hotness for func: " << method->GetMethodName() << ", threshold:" << threshold;
558 }
559 }
560 }
561
ClearCompiledCodeStatusWhenDeopt(JSFunction *func, Method *method)562 void Deoptimizier::ClearCompiledCodeStatusWhenDeopt(JSFunction *func, Method *method)
563 {
564 if (func->GetMachineCode().IsMachineCodeObject()) {
565 Jit::GetInstance()->GetJitDfx()->SetJitDeoptCount();
566 }
567 if (func->IsCompiledCode()) {
568 bool isFastCall = func->IsCompiledFastCall(); // get this flag before clear it
569 uintptr_t entry =
570 isFastCall ? thread_->GetRTInterface(kungfu::RuntimeStubCSigns::ID_FastCallToAsmInterBridge)
571 : thread_->GetRTInterface(kungfu::RuntimeStubCSigns::ID_AOTCallToAsmInterBridge);
572 func->SetCodeEntry(entry);
573 method->ClearAOTStatusWhenDeopt(entry);
574 func->ClearCompiledCodeFlags();
575 ResetJitHotness(func);
576 func->ClearMachineCode(thread_);
577 } // Do not change the func code entry if the method is not aot or deopt has happened already
578 }
579
UpdateAndDumpDeoptInfo(kungfu::DeoptType type)580 void Deoptimizier::UpdateAndDumpDeoptInfo(kungfu::DeoptType type)
581 {
582 // depth records the number of layers of nested calls when deopt occurs
583 for (size_t i = 0; i <= inlineDepth_; i++) {
584 JSTaggedValue callTarget = GetDeoptValue(i, static_cast<int32_t>(SpecVregIndex::FUNC_INDEX));
585 auto func = JSFunction::Cast(callTarget.GetTaggedObject());
586 if (func->GetMachineCode().IsMachineCodeObject()) {
587 MachineCode *machineCode = MachineCode::Cast(func->GetMachineCode().GetTaggedObject());
588 if (type != kungfu::DeoptType::OSRLOOPEXIT &&
589 machineCode->GetOSROffset() != MachineCode::INVALID_OSR_OFFSET) {
590 machineCode->SetOsrDeoptFlag(true);
591 }
592 }
593 auto method = GetMethod(callTarget);
594 if (i == inlineDepth_) {
595 Dump(callTarget, type, i);
596 }
597 ASSERT(thread_ != nullptr);
598 uint8_t deoptThreshold = method->GetDeoptThreshold();
599 if (deoptThreshold > 0) {
600 method->SetDeoptType(type);
601 method->SetDeoptThreshold(--deoptThreshold);
602 } else {
603 ClearCompiledCodeStatusWhenDeopt(func, method);
604 }
605 }
606 }
607
608 // call instructions need compute jumpSize
GetCallSize(size_t curDepth, const uint8_t *resumePc)609 size_t Deoptimizier::GetCallSize(size_t curDepth, const uint8_t *resumePc)
610 {
611 if (inlineDepth_ > 0 && curDepth != inlineDepth_) {
612 auto op = BytecodeInstruction(resumePc).GetOpcode();
613 size_t jumpSize = BytecodeInstruction::Size(op);
614 return jumpSize;
615 }
616 return 0;
617 }
618
EncodeDeoptVregIndex(int32_t index, size_t depth, size_t shift)619 int32_t Deoptimizier::EncodeDeoptVregIndex(int32_t index, size_t depth, size_t shift)
620 {
621 if (index >= 0) {
622 return (index << shift) | depth;
623 }
624 return -((-index << shift) | depth);
625 }
626
ComputeShift(size_t depth)627 size_t Deoptimizier::ComputeShift(size_t depth)
628 {
629 size_t shift = 0;
630 if (depth != 0) {
631 shift = std::floor(std::log2(depth)) + 1;
632 }
633 return shift;
634 }
635
DecodeVregIndex(OffsetType id, size_t shift)636 int32_t Deoptimizier::DecodeVregIndex(OffsetType id, size_t shift)
637 {
638 if (id >= 0) {
639 return id >> shift;
640 }
641 return -((-id) >> shift);
642 }
643
DecodeDeoptDepth(OffsetType id, size_t shift)644 size_t Deoptimizier::DecodeDeoptDepth(OffsetType id, size_t shift)
645 {
646 size_t mask = (1 << shift) - 1;
647 if (id >= 0) {
648 return id & mask;
649 }
650 return (-id) & mask;
651 }
652 } // namespace panda::ecmascript
653