1 /*
2  * Copyright (c) 2022-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <ecmascript/stubs/runtime_stubs.h>
17 
18 #include "ecmascript/compiler/trampoline/x64/common_call.h"
19 
20 #include "ecmascript/compiler/assembler/assembler.h"
21 #include "ecmascript/compiler/rt_call_signature.h"
22 #include "ecmascript/ecma_runtime_call_info.h"
23 #include "ecmascript/frames.h"
24 #include "ecmascript/js_function.h"
25 #include "ecmascript/js_thread.h"
26 #include "ecmascript/js_generator_object.h"
27 #include "ecmascript/mem/machine_code.h"
28 #include "ecmascript/message_string.h"
29 #include "ecmascript/method.h"
30 #include "ecmascript/runtime_call_id.h"
31 
32 namespace panda::ecmascript::x64 {
33 #define __ assembler->
34 
35 // Generate code for Entering asm interpreter
36 // Input: glue           - %rdi
37 //        callTarget     - %rsi
38 //        method         - %rdx
39 //        callField      - %rcx
40 //        argc           - %r8
41 //        argv           - %r9(<callTarget, newTarget, this> are at the beginning of argv)
AsmInterpreterEntry(ExtendedAssembler *assembler)42 void AsmInterpreterCall::AsmInterpreterEntry(ExtendedAssembler *assembler)
43 {
44     __ BindAssemblerStub(RTSTUB_ID(AsmInterpreterEntry));
45     Label target;
46     // push asm interpreter entry frame
47     size_t begin = __ GetCurrentPosition();
48     PushAsmInterpEntryFrame(assembler);
49     __ Callq(&target);
50     PopAsmInterpEntryFrame(assembler);
51     size_t end = __ GetCurrentPosition();
52     if ((end - begin) != FrameCompletionPos::X64EntryFrameDuration) {
53         LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::X64EntryFrameDuration
54                             << "This frame has been modified, and the offset EntryFrameDuration should be updated too.";
55     }
56     __ Ret();
57 
58     __ Bind(&target);
59     AsmInterpEntryDispatch(assembler);
60 }
61 
62 // Generate code for generator re-enter asm interpreter
63 // c++ calling convention
64 // Input: %rdi - glue
65 //        %rsi - context(GeneratorContext)
GeneratorReEnterAsmInterp(ExtendedAssembler *assembler)66 void AsmInterpreterCall::GeneratorReEnterAsmInterp(ExtendedAssembler *assembler)
67 {
68     __ BindAssemblerStub(RTSTUB_ID(GeneratorReEnterAsmInterp));
69     Label target;
70     size_t begin = __ GetCurrentPosition();
71     PushAsmInterpEntryFrame(assembler);
72     __ Callq(&target);
73     PopAsmInterpEntryFrame(assembler);
74     size_t end = __ GetCurrentPosition();
75     if ((end - begin) != FrameCompletionPos::X64EntryFrameDuration) {
76         LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::X64EntryFrameDuration
77                             << "This frame has been modified, and the offset EntryFrameDuration should be updated too.";
78     }
79     __ Ret();
80 
81     __ Bind(&target);
82     GeneratorReEnterAsmInterpDispatch(assembler);
83 }
84 
GeneratorReEnterAsmInterpDispatch(ExtendedAssembler *assembler)85 void AsmInterpreterCall::GeneratorReEnterAsmInterpDispatch(ExtendedAssembler *assembler)
86 {
87     Register glueRegister = __ GlueRegister();
88     Register contextRegister = rsi;
89     Register prevSpRegister = rbp;
90 
91     Register callTargetRegister = r9;
92     Register methodRegister = rcx;
93     Register tempRegister = r11;  // can not be used to store any variable
94     Register opRegister = r8;  // can not be used to store any variable
95     __ Movq(Operand(rsi, GeneratorContext::GENERATOR_METHOD_OFFSET), callTargetRegister);
96     __ Movq(Operand(callTargetRegister, JSFunctionBase::METHOD_OFFSET), methodRegister);
97 
98     Label stackOverflow;
99 
100     Register fpRegister = r10;
101     __ Movq(rsp, fpRegister);
102     Register nRegsRegister = rdx;
103     Register regsArrayRegister = r12;
104     Register thisRegister = r15;
105     // push context regs
106     __ Movl(Operand(rsi, GeneratorContext::GENERATOR_NREGS_OFFSET), nRegsRegister);
107     __ Movq(Operand(rsi, GeneratorContext::GENERATOR_THIS_OFFSET), thisRegister);
108     __ Movq(Operand(rsi, GeneratorContext::GENERATOR_REGS_ARRAY_OFFSET), regsArrayRegister);
109     __ Addq(TaggedArray::DATA_OFFSET, regsArrayRegister);
110     PushArgsWithArgvAndCheckStack(assembler, glueRegister, nRegsRegister, regsArrayRegister, tempRegister, opRegister,
111         &stackOverflow);
112 
113     // newSp
114     Register newSpRegister = r8;
115     __ Movq(rsp, newSpRegister);
116 
117     // resume asm interp frame
118     Register pcRegister = r12;
119     PushGeneratorFrameState(assembler, prevSpRegister, fpRegister, callTargetRegister, thisRegister, methodRegister,
120         contextRegister, pcRegister, tempRegister);
121 
122     // call bc stub
123     DispatchCall(assembler, pcRegister, newSpRegister, callTargetRegister, methodRegister);
124     __ Bind(&stackOverflow);
125     {
126         ThrowStackOverflowExceptionAndReturn(assembler, glueRegister, fpRegister, tempRegister);
127     }
128 }
129 
130 // Input: glue           - %rdi
131 //        callTarget     - %rsi
132 //        method         - %rdx
133 //        callField      - %rcx
134 //        argc           - %r8
135 //        argv           - %r9(<callTarget, newTarget, this> are at the beginning of argv)
136 //        prevSp         - %rbp
AsmInterpEntryDispatch(ExtendedAssembler *assembler)137 void AsmInterpreterCall::AsmInterpEntryDispatch(ExtendedAssembler *assembler)
138 {
139     Label notJSFunction;
140     Label callNativeEntry;
141     Label callJSFunctionEntry;
142     Label notCallable;
143     Register glueRegister = rdi;
144     Register callTargetRegister = rsi;
145     Register argvRegister = r9;
146     Register bitFieldRegister = r12;
147     Register tempRegister = r11;  // can not be used to store any variable
148     __ Movq(Operand(callTargetRegister, TaggedObject::HCLASS_OFFSET), tempRegister);  // hclass
149     __ Movq(Operand(tempRegister, JSHClass::BIT_FIELD_OFFSET), bitFieldRegister);
150     __ Cmpb(static_cast<int32_t>(JSType::JS_FUNCTION_FIRST), bitFieldRegister);
151     __ Jb(&notJSFunction);
152     __ Cmpb(static_cast<int32_t>(JSType::JS_FUNCTION_LAST), bitFieldRegister);
153     __ Jbe(&callJSFunctionEntry);
154     __ Bind(&notJSFunction);
155     {
156         __ Testq(static_cast<int64_t>(1ULL << JSHClass::CallableBit::START_BIT), bitFieldRegister);
157         __ Jz(&notCallable);
158         // fall through
159     }
160     __ Bind(&callNativeEntry);
161     CallNativeEntry(assembler);
162     __ Bind(&callJSFunctionEntry);
163     {
164         Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
165         __ Btq(MethodLiteral::IsNativeBit::START_BIT, callFieldRegister);
166         __ Jb(&callNativeEntry);
167 
168         __ Leaq(Operand(argvRegister, NUM_MANDATORY_JSFUNC_ARGS * JSTaggedValue::TaggedTypeSize()),
169             argvRegister);
170         JSCallCommonEntry(assembler, JSCallMode::CALL_ENTRY, FrameTransitionType::OTHER_TO_BASELINE_CHECK);
171     }
172     __ Bind(&notCallable);
173     {
174         __ Movq(glueRegister, rax);  // glue
175         __ Pushq(0);                 // argc
176         Register runtimeIdRegister = r12;
177         __ Movq(kungfu::RuntimeStubCSigns::ID_ThrowNotCallableException, runtimeIdRegister);
178         __ Pushq(runtimeIdRegister);  // runtimeId
179         Register trampolineIdRegister = r12;
180         Register trampolineRegister = r10;
181         __ Movq(kungfu::RuntimeStubCSigns::ID_CallRuntime, trampolineIdRegister);
182         __ Movq(Operand(rax, trampolineIdRegister, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)),
183             trampolineRegister);
184         __ Callq(trampolineRegister);
185         __ Addq(16, rsp);  // 16: skip argc and runtime_id
186         __ Ret();
187     }
188 }
189 
PushFrameState(ExtendedAssembler *assembler, Register prevSpRegister, Register fpRegister, Register callTargetRegister, Register thisRegister, Register methodRegister, Register pcRegister, Register operatorRegister)190 void AsmInterpreterCall::PushFrameState(ExtendedAssembler *assembler, Register prevSpRegister, Register fpRegister,
191     Register callTargetRegister, Register thisRegister, Register methodRegister, Register pcRegister,
192     Register operatorRegister)
193 {
194     __ Pushq(static_cast<int32_t>(FrameType::ASM_INTERPRETER_FRAME));  // frame type
195     __ Pushq(prevSpRegister);                                          // prevSp
196     __ Movq(Operand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET), pcRegister);
197     __ Pushq(pcRegister);                                              // pc
198     __ Pushq(fpRegister);                                              // fp
199     __ Pushq(0);                                                       // jumpSizeAfterCall
200     __ Movq(Operand(callTargetRegister, JSFunction::LEXICAL_ENV_OFFSET), operatorRegister);
201     __ Pushq(operatorRegister);                                        // env
202     __ Pushq(JSTaggedValue::Hole().GetRawData());                      // acc
203     __ Pushq(thisRegister);                                            // thisObj
204     __ Pushq(callTargetRegister);                                      // callTarget
205 }
206 
PushGeneratorFrameState(ExtendedAssembler *assembler, Register prevSpRegister, Register fpRegister, Register callTargetRegister, Register thisRegister, Register methodRegister, Register contextRegister, Register pcRegister, Register operatorRegister)207 void AsmInterpreterCall::PushGeneratorFrameState(ExtendedAssembler *assembler, Register prevSpRegister,
208     Register fpRegister, Register callTargetRegister, Register thisRegister, Register methodRegister,
209     Register contextRegister, Register pcRegister, Register operatorRegister)
210 {
211     __ Pushq(static_cast<int32_t>(FrameType::ASM_INTERPRETER_FRAME));  // frame type
212     __ Pushq(prevSpRegister);                                          // prevSp
213     __ Movq(Operand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET), pcRegister);
214     __ Movl(Operand(contextRegister, GeneratorContext::GENERATOR_BC_OFFSET_OFFSET), operatorRegister);
215     __ Addq(operatorRegister, pcRegister);
216     __ Pushq(pcRegister);                                              // pc
217     __ Pushq(fpRegister);                                              // fp
218     __ Pushq(0);                                                       // jumpSizeAfterCall
219     __ Movq(Operand(contextRegister, GeneratorContext::GENERATOR_LEXICALENV_OFFSET), operatorRegister);
220     __ Pushq(operatorRegister);                                        // env
221     __ Movq(Operand(contextRegister, GeneratorContext::GENERATOR_ACC_OFFSET), operatorRegister);
222     __ Pushq(operatorRegister);                                        // acc
223     __ Pushq(thisRegister);                                            // thisObj
224     __ Pushq(callTargetRegister);                                      // callTarget
225 }
226 
PushAsmInterpEntryFrame(ExtendedAssembler *assembler)227 void AsmInterpreterCall::PushAsmInterpEntryFrame(ExtendedAssembler *assembler)
228 {
229     size_t begin = __ GetCurrentPosition();
230     if (!assembler->FromInterpreterHandler()) {
231         __ PushCppCalleeSaveRegisters();
232     }
233     Register fpRegister = r10;
234     __ Pushq(rdi);
235     __ PushAlignBytes();
236     __ Movq(Operand(rdi, JSThread::GlueData::GetLeaveFrameOffset(false)), fpRegister);
237     // construct asm interpreter entry frame
238     __ Pushq(rbp);
239     __ Pushq(static_cast<int64_t>(FrameType::ASM_INTERPRETER_ENTRY_FRAME));
240     __ Pushq(fpRegister);
241     __ Pushq(0);    // pc
242     if (!assembler->FromInterpreterHandler()) {
243         size_t end = __ GetCurrentPosition();
244         if ((end - begin) != FrameCompletionPos::X64CppToAsmInterp) {
245             LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::X64CppToAsmInterp
246                                 << "This frame has been modified, and the offset CppToAsmInterp should be updated too.";
247         }
248     }
249     __ Leaq(Operand(rsp, 3 * FRAME_SLOT_SIZE), rbp);  // 3: 24 means skip frame type, prevSp and pc
250 }
251 
PopAsmInterpEntryFrame(ExtendedAssembler *assembler)252 void AsmInterpreterCall::PopAsmInterpEntryFrame(ExtendedAssembler *assembler)
253 {
254     __ Addq(8, rsp);   // 8: skip pc
255     Register fpRegister = r10;
256     __ Popq(fpRegister);
257     __ Addq(FRAME_SLOT_SIZE, rsp);  // 8: skip frame type
258     __ Popq(rbp);
259     __ PopAlignBytes();
260     __ Popq(rdi);
261     __ Movq(fpRegister, Operand(rdi, JSThread::GlueData::GetLeaveFrameOffset(false)));
262     size_t begin = __ GetCurrentPosition();
263     if (!assembler->FromInterpreterHandler()) {
264         __ PopCppCalleeSaveRegisters();
265         size_t end = __ GetCurrentPosition();
266         if ((end - begin) != FrameCompletionPos::X64AsmInterpToCpp) {
267             LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::X64AsmInterpToCpp
268                                 << "This frame has been modified, and the offset AsmInterpToCp should be updated too.";
269         }
270     }
271 }
272 
GetDeclaredNumArgsFromCallField(ExtendedAssembler *assembler, Register callFieldRegister, Register declaredNumArgsRegister)273 void AsmInterpreterCall::GetDeclaredNumArgsFromCallField(ExtendedAssembler *assembler, Register callFieldRegister,
274     Register declaredNumArgsRegister)
275 {
276     __ Movq(callFieldRegister, declaredNumArgsRegister);
277     __ Shrq(MethodLiteral::NumArgsBits::START_BIT, declaredNumArgsRegister);
278     __ Andq(MethodLiteral::NumArgsBits::Mask() >> MethodLiteral::NumArgsBits::START_BIT, declaredNumArgsRegister);
279 }
280 
GetNumVregsFromCallField(ExtendedAssembler *assembler, Register callFieldRegister, Register numVregsRegister)281 void AsmInterpreterCall::GetNumVregsFromCallField(ExtendedAssembler *assembler, Register callFieldRegister,
282     Register numVregsRegister)
283 {
284     __ Movq(callFieldRegister, numVregsRegister);
285     __ Shrq(MethodLiteral::NumVregsBits::START_BIT, numVregsRegister);
286     __ Andq(MethodLiteral::NumVregsBits::Mask() >> MethodLiteral::NumVregsBits::START_BIT, numVregsRegister);
287 }
288 
JSCallCommonEntry(ExtendedAssembler *assembler, JSCallMode mode, FrameTransitionType type)289 void AsmInterpreterCall::JSCallCommonEntry(ExtendedAssembler *assembler,
290     JSCallMode mode, FrameTransitionType type)
291 {
292     Label stackOverflow;
293     Register glueRegister = __ GlueRegister();
294     Register fpRegister = __ AvailableRegister1();
295     Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
296     Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
297     // save fp
298     __ Movq(rsp, fpRegister);
299     Register declaredNumArgsRegister = __ AvailableRegister2();
300     GetDeclaredNumArgsFromCallField(assembler, callFieldRegister, declaredNumArgsRegister);
301 
302     Label slowPathEntry;
303     Label fastPathEntry;
304     Label pushCallThis;
305     auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
306     if (argc >= 0) {
307         __ Cmpq(argc, declaredNumArgsRegister);
308     } else {
309         __ Cmpq(argcRegister, declaredNumArgsRegister);
310     }
311     __ Jne(&slowPathEntry);
312     __ Bind(&fastPathEntry);
313     JSCallCommonFastPath(assembler, mode, &stackOverflow);
314     __ Bind(&pushCallThis);
315     PushCallThis(assembler, mode, &stackOverflow, type);
316     __ Bind(&slowPathEntry);
317     JSCallCommonSlowPath(assembler, mode, &fastPathEntry, &pushCallThis, &stackOverflow);
318 
319     __ Bind(&stackOverflow);
320     if (kungfu::AssemblerModule::IsJumpToCallCommonEntry(mode)) {
321         __ Movq(fpRegister, rsp);
322         Register tempRegister = __ AvailableRegister1();
323         // only glue and acc are useful in exception handler
324         if (glueRegister != r13) {
325             __ Movq(glueRegister, r13);
326         }
327         Register acc = rsi;
328         __ Movq(JSTaggedValue::VALUE_EXCEPTION, acc);
329         Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
330         Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
331         // Reload pc to make sure stack trace is right
332         __ Movq(callTargetRegister, tempRegister);
333         __ Movq(Operand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET), r12);  // pc: r12
334         // Reload constpool and profileInfo to make sure gc map work normally
335         __ Movq(Operand(tempRegister, JSFunction::RAW_PROFILE_TYPE_INFO_OFFSET), r14);
336         __ Movq(Operand(r14, ProfileTypeInfoCell::VALUE_OFFSET), r14);                 // profileTypeInfo: r14
337         __ Movq(Operand(methodRegister, Method::CONSTANT_POOL_OFFSET), rbx);           // constantPool: rbx
338 
339         __ Movq(kungfu::BytecodeStubCSigns::ID_ThrowStackOverflowException, tempRegister);
340         __ Movq(Operand(glueRegister, tempRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
341             tempRegister);
342         __ Jmp(tempRegister);
343     } else {
344         [[maybe_unused]] TempRegisterScope scope(assembler);
345         Register temp = __ TempRegister();
346         ThrowStackOverflowExceptionAndReturn(assembler, glueRegister, fpRegister, temp);
347     }
348 }
349 
350 // void PushCallArgsxAndDispatch(uintptr_t glue, uintptr_t sp, uint64_t callTarget, uintptr_t method,
351 //     uint64_t callField, ...)
352 // GHC calling convention
353 // Input1: for callarg0/1/2/3         Input2: for callrange
354 // %r13 - glue                        // %r13 - glue
355 // %rbp - sp                          // %rbp - sp
356 // %r12 - callTarget                  // %r12 - callTarget
357 // %rbx - method                      // %rbx - method
358 // %r14 - callField                   // %r14 - callField
359 // %rsi - arg0                        // %rsi - actualArgc
360 // %rdi - arg1                        // %rdi - argv
361 // %r8  - arg2
PushCallThisRangeAndDispatch(ExtendedAssembler *assembler)362 void AsmInterpreterCall::PushCallThisRangeAndDispatch(ExtendedAssembler *assembler)
363 {
364     __ BindAssemblerStub(RTSTUB_ID(PushCallThisRangeAndDispatch));
365     JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
366 }
367 
PushCallRangeAndDispatch(ExtendedAssembler *assembler)368 void AsmInterpreterCall::PushCallRangeAndDispatch(ExtendedAssembler *assembler)
369 {
370     __ BindAssemblerStub(RTSTUB_ID(PushCallRangeAndDispatch));
371     JSCallCommonEntry(assembler, JSCallMode::CALL_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
372 }
373 
PushCallNewAndDispatch(ExtendedAssembler *assembler)374 void AsmInterpreterCall::PushCallNewAndDispatch(ExtendedAssembler *assembler)
375 {
376     __ BindAssemblerStub(RTSTUB_ID(PushCallNewAndDispatch));
377     JSCallCommonEntry(assembler, JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
378 }
379 
PushSuperCallAndDispatch(ExtendedAssembler *assembler)380 void AsmInterpreterCall::PushSuperCallAndDispatch(ExtendedAssembler *assembler)
381 {
382     __ BindAssemblerStub(RTSTUB_ID(PushSuperCallAndDispatch));
383     JSCallCommonEntry(assembler, JSCallMode::SUPER_CALL_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
384 }
385 
PushCallArgs3AndDispatch(ExtendedAssembler *assembler)386 void AsmInterpreterCall::PushCallArgs3AndDispatch(ExtendedAssembler *assembler)
387 {
388     __ BindAssemblerStub(RTSTUB_ID(PushCallArgs3AndDispatch));
389     JSCallCommonEntry(assembler, JSCallMode::CALL_ARG3, FrameTransitionType::OTHER_TO_OTHER);
390 }
391 
PushCallArgs2AndDispatch(ExtendedAssembler *assembler)392 void AsmInterpreterCall::PushCallArgs2AndDispatch(ExtendedAssembler *assembler)
393 {
394     __ BindAssemblerStub(RTSTUB_ID(PushCallArgs2AndDispatch));
395     JSCallCommonEntry(assembler, JSCallMode::CALL_ARG2, FrameTransitionType::OTHER_TO_OTHER);
396 }
397 
PushCallArg1AndDispatch(ExtendedAssembler *assembler)398 void AsmInterpreterCall::PushCallArg1AndDispatch(ExtendedAssembler *assembler)
399 {
400     __ BindAssemblerStub(RTSTUB_ID(PushCallArg1AndDispatch));
401     JSCallCommonEntry(assembler, JSCallMode::CALL_ARG1, FrameTransitionType::OTHER_TO_OTHER);
402 }
403 
PushCallArg0AndDispatch(ExtendedAssembler *assembler)404 void AsmInterpreterCall::PushCallArg0AndDispatch(ExtendedAssembler *assembler)
405 {
406     __ BindAssemblerStub(RTSTUB_ID(PushCallArg0AndDispatch));
407     JSCallCommonEntry(assembler, JSCallMode::CALL_ARG0, FrameTransitionType::OTHER_TO_OTHER);
408 }
PushCallThisArg0AndDispatch(ExtendedAssembler *assembler)409 void AsmInterpreterCall::PushCallThisArg0AndDispatch(ExtendedAssembler *assembler)
410 {
411     __ BindAssemblerStub(RTSTUB_ID(PushCallThisArg0AndDispatch));
412     JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG0, FrameTransitionType::OTHER_TO_OTHER);
413 }
414 
PushCallThisArg1AndDispatch(ExtendedAssembler *assembler)415 void AsmInterpreterCall::PushCallThisArg1AndDispatch(ExtendedAssembler *assembler)
416 {
417     __ BindAssemblerStub(RTSTUB_ID(PushCallThisArg1AndDispatch));
418     JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG1, FrameTransitionType::OTHER_TO_OTHER);
419 }
420 
PushCallThisArgs2AndDispatch(ExtendedAssembler *assembler)421 void AsmInterpreterCall::PushCallThisArgs2AndDispatch(ExtendedAssembler *assembler)
422 {
423     __ BindAssemblerStub(RTSTUB_ID(PushCallThisArgs2AndDispatch));
424     JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG2, FrameTransitionType::OTHER_TO_OTHER);
425 }
426 
PushCallThisArgs3AndDispatch(ExtendedAssembler *assembler)427 void AsmInterpreterCall::PushCallThisArgs3AndDispatch(ExtendedAssembler *assembler)
428 {
429     __ BindAssemblerStub(RTSTUB_ID(PushCallThisArgs3AndDispatch));
430     JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG3, FrameTransitionType::OTHER_TO_OTHER);
431 }
432 
JSCallCommonFastPath(ExtendedAssembler *assembler, JSCallMode mode, Label *stackOverflow)433 void AsmInterpreterCall::JSCallCommonFastPath(ExtendedAssembler *assembler, JSCallMode mode, Label *stackOverflow)
434 {
435     Register glueRegister = __ GlueRegister();
436     Register arg0 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
437     Register arg1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
438 
439     Label pushCallThis;
440     auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
441     // call range
442     if (argc < 0) {
443         Register argcRegister = arg0;
444         Register argvRegister = arg1;
445         __ Cmpq(0, argcRegister);
446         __ Jbe(&pushCallThis);
447         // fall through
448         {
449             [[maybe_unused]] TempRegisterScope scope(assembler);
450             Register opRegister = __ TempRegister();
451             Register op2Register = __ AvailableRegister2();
452             PushArgsWithArgvAndCheckStack(assembler, glueRegister, argcRegister, argvRegister, opRegister, op2Register,
453                 stackOverflow);
454         }
455         __ Bind(&pushCallThis);
456     } else if (argc > 0) {
457         if (argc > 2) { // 2: call arg2
458             if (mode == JSCallMode::CALL_THIS_ARG3_WITH_RETURN) {
459                 Register arg2 = __ CppJSCallAvailableRegister1();
460                 __ Pushq(arg2);
461             } else {
462                 Register arg2 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG2);
463                 __ Pushq(arg2);
464             }
465         }
466         if (argc > 1) {
467             __ Pushq(arg1);
468         }
469         if (argc > 0) {
470             __ Pushq(arg0);
471         }
472     }
473 }
474 
JSCallCommonSlowPath(ExtendedAssembler *assembler, JSCallMode mode, Label *fastPathEntry, Label *pushCallThis, Label *stackOverflow)475 void AsmInterpreterCall::JSCallCommonSlowPath(ExtendedAssembler *assembler, JSCallMode mode,
476                                               Label *fastPathEntry, Label *pushCallThis, Label *stackOverflow)
477 {
478     Register glueRegister = __ GlueRegister();
479     Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
480     Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
481     Register arg0 = argcRegister;
482     Register arg1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
483     Label noExtraEntry;
484     Label pushArgsEntry;
485 
486     auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
487     Register declaredNumArgsRegister = __ AvailableRegister2();
488     __ Testq(MethodLiteral::HaveExtraBit::Mask(), callFieldRegister);
489     __ Jz(&noExtraEntry);
490     // extra entry
491     {
492         [[maybe_unused]] TempRegisterScope scope(assembler);
493         Register tempArgcRegister = __ TempRegister();
494         if (argc >= 0) {
495             __ PushArgc(argc, tempArgcRegister);
496         } else {
497             __ PushArgc(argcRegister, tempArgcRegister);
498         }
499     }
500     __ Bind(&noExtraEntry);
501     {
502         if (argc == 0) {
503             Register op1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
504             [[maybe_unused]] TempRegisterScope scope(assembler);
505             Register op2 = __ TempRegister();
506             PushUndefinedWithArgcAndCheckStack(assembler, glueRegister, declaredNumArgsRegister, op1, op2,
507                 stackOverflow);
508             __ Jmp(fastPathEntry);
509             return;
510         }
511         [[maybe_unused]] TempRegisterScope scope(assembler);
512         Register diffRegister = __ TempRegister();
513         __ Movq(declaredNumArgsRegister, diffRegister);
514         if (argc >= 0) {
515             __ Subq(argc, diffRegister);
516         } else {
517             __ Subq(argcRegister, diffRegister);
518         }
519         __ Cmpq(0, diffRegister);
520         __ Jle(&pushArgsEntry);
521         PushUndefinedWithArgc(assembler, diffRegister);
522         __ Jmp(fastPathEntry);
523     }
524     __ Bind(&pushArgsEntry);
525     __ Testq(MethodLiteral::HaveExtraBit::Mask(), callFieldRegister);
526     __ Jnz(fastPathEntry);
527     // arg1, declare must be 0
528     if (argc == 1) {
529         __ Jmp(pushCallThis);
530         return;
531     }
532     // decalare < actual
533     __ Cmpq(0, declaredNumArgsRegister);
534     __ Je(pushCallThis);
535     if (argc < 0) {
536         Register argvRegister = arg1;
537         [[maybe_unused]] TempRegisterScope scope(assembler);
538         Register opRegister = __ TempRegister();
539         PushArgsWithArgvAndCheckStack(assembler, glueRegister, declaredNumArgsRegister, argvRegister, opRegister,
540             opRegister, stackOverflow);
541     } else if (argc > 0) {
542         Label pushArgs0;
543         if (argc > 2) { // 2: call arg2
544             // decalare is 2 or 1 now
545             __ Cmpq(1, declaredNumArgsRegister);
546             __ Je(&pushArgs0);
547             __ Pushq(arg1);
548         }
549         if (argc > 1) {
550             __ Bind(&pushArgs0);
551             // decalare is is 1 now
552             __ Pushq(arg0);
553         }
554     }
555     __ Jmp(pushCallThis);
556 }
557 
GetThisRegsiter(ExtendedAssembler *assembler, JSCallMode mode, Register defaultRegister)558 Register AsmInterpreterCall::GetThisRegsiter(ExtendedAssembler *assembler, JSCallMode mode, Register defaultRegister)
559 {
560     switch (mode) {
561         case JSCallMode::CALL_GETTER:
562         case JSCallMode::CALL_THIS_ARG0:
563             return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
564         case JSCallMode::CALL_SETTER:
565         case JSCallMode::CALL_THIS_ARG1:
566             return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
567         case JSCallMode::CALL_THIS_ARG2:
568         case JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV:
569         case JSCallMode::CALL_THIS_WITH_ARGV:
570         case JSCallMode::SUPER_CALL_WITH_ARGV:
571         case JSCallMode::SUPER_CALL_SPREAD_WITH_ARGV:
572             return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG2);
573         case JSCallMode::CALL_THIS_ARG3:
574             return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG3);
575         case JSCallMode::CALL_ENTRY:
576         case JSCallMode::CALL_FROM_AOT: {
577             Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
578             __ Movq(Operand(argvRegister, -FRAME_SLOT_SIZE), defaultRegister);  // 8: this is just before the argv list
579             return defaultRegister;
580         }
581         case JSCallMode::CALL_THIS_ARG3_WITH_RETURN:
582             return __ CppJSCallAvailableRegister2();
583         case JSCallMode::CALL_THIS_ARG2_WITH_RETURN:
584         case JSCallMode::CALL_THIS_ARGV_WITH_RETURN: {
585             return __ CppJSCallAvailableRegister1();
586         }
587         default:
588             LOG_ECMA(FATAL) << "this branch is unreachable";
589             UNREACHABLE();
590     }
591     return rInvalid;
592 }
593 
GetNewTargetRegsiter(ExtendedAssembler *assembler, JSCallMode mode, Register defaultRegister)594 Register AsmInterpreterCall::GetNewTargetRegsiter(ExtendedAssembler *assembler, JSCallMode mode,
595                                                   Register defaultRegister)
596 {
597     switch (mode) {
598         case JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV:
599         case JSCallMode::CALL_THIS_WITH_ARGV:
600             return __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
601         case JSCallMode::SUPER_CALL_WITH_ARGV:
602         case JSCallMode::SUPER_CALL_SPREAD_WITH_ARGV:
603             return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG3);
604         case JSCallMode::CALL_FROM_AOT:
605         case JSCallMode::CALL_ENTRY: {
606             Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
607             // -2: new Target offset
608             __ Movq(Operand(argvRegister, -2 * FRAME_SLOT_SIZE), defaultRegister);
609             return defaultRegister;
610         }
611         default:
612             LOG_ECMA(FATAL) << "this branch is unreachable";
613             UNREACHABLE();
614     }
615     return rInvalid;
616 }
617 
618 // Input: %r14 - callField
619 //        %rdi - argv
PushCallThis(ExtendedAssembler *assembler, JSCallMode mode, Label *stackOverflow, FrameTransitionType type)620 void AsmInterpreterCall::PushCallThis(ExtendedAssembler *assembler,
621     JSCallMode mode, Label *stackOverflow, FrameTransitionType type)
622 {
623     Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
624     Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
625     Register thisRegister = __ AvailableRegister2();
626 
627     Label pushVregs;
628     Label pushNewTarget;
629     Label pushCallTarget;
630     bool haveThis = kungfu::AssemblerModule::JSModeHaveThisArg(mode);
631     bool haveNewTarget = kungfu::AssemblerModule::JSModeHaveNewTargetArg(mode);
632     if (!haveThis) {
633         __ Movq(JSTaggedValue::VALUE_UNDEFINED, thisRegister);  // default this: undefined
634     } else {
635         Register thisArgRegister = GetThisRegsiter(assembler, mode, thisRegister);
636         if (thisRegister != thisArgRegister) {
637             __ Movq(thisArgRegister, thisRegister);
638         }
639     }
640     __ Testb(CALL_TYPE_MASK, callFieldRegister);
641     __ Jz(&pushVregs);
642     // fall through
643     __ Testq(MethodLiteral::HaveThisBit::Mask(), callFieldRegister);
644     __ Jz(&pushNewTarget);
645     // push this
646     if (!haveThis) {
647         __ Pushq(JSTaggedValue::Undefined().GetRawData());
648     } else {
649         __ Pushq(thisRegister);
650     }
651     // fall through
652     __ Bind(&pushNewTarget);
653     {
654         __ Testq(MethodLiteral::HaveNewTargetBit::Mask(), callFieldRegister);
655         __ Jz(&pushCallTarget);
656         if (!haveNewTarget) {
657             __ Pushq(JSTaggedValue::Undefined().GetRawData());
658         } else {
659             [[maybe_unused]] TempRegisterScope scope(assembler);
660             Register defaultRegister = __ TempRegister();
661             Register newTargetRegister = GetNewTargetRegsiter(assembler, mode, defaultRegister);
662             __ Pushq(newTargetRegister);
663         }
664     }
665     // fall through
666     __ Bind(&pushCallTarget);
667     {
668         __ Testq(MethodLiteral::HaveFuncBit::Mask(), callFieldRegister);
669         __ Jz(&pushVregs);
670         __ Pushq(callTargetRegister);
671     }
672     // fall through
673     __ Bind(&pushVregs);
674     {
675         PushVregs(assembler, stackOverflow, type);
676     }
677 }
678 
679 // Input: %rbp - sp
680 //        %r12 - callTarget
681 //        %rbx - method
682 //        %r14 - callField
683 //        %rdx - jumpSizeAfterCall
684 //        %r10 - fp
PushVregs(ExtendedAssembler *assembler, Label *stackOverflow, FrameTransitionType type)685 void AsmInterpreterCall::PushVregs(ExtendedAssembler *assembler,
686     Label *stackOverflow, FrameTransitionType type)
687 {
688     Register glueRegister = __ GlueRegister();
689     Register prevSpRegister = rbp;
690     Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
691     Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
692     Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
693     Register fpRegister = __ AvailableRegister1();
694     Register thisRegister = __ AvailableRegister2();
695 
696     Label pushFrameState;
697 
698     [[maybe_unused]] TempRegisterScope scope(assembler);
699     Register tempRegister = __ TempRegister();
700     // args register can reused now.
701     Register pcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
702     Register numVregsRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
703     GetNumVregsFromCallField(assembler, callFieldRegister, numVregsRegister);
704     __ Cmpq(0, numVregsRegister);
705     __ Jz(&pushFrameState);
706     Register temp2Register = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);  // reuse
707     PushUndefinedWithArgcAndCheckStack(assembler, glueRegister, numVregsRegister, tempRegister, temp2Register,
708         stackOverflow);
709     // fall through
710     Register newSpRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
711     __ Bind(&pushFrameState);
712     {
713         StackOverflowCheck(assembler, glueRegister, numVregsRegister, tempRegister, temp2Register, stackOverflow);
714         __ Movq(rsp, newSpRegister);
715 
716         PushFrameState(assembler, prevSpRegister, fpRegister,
717             callTargetRegister, thisRegister, methodRegister, pcRegister, tempRegister);
718     }
719     if (type == FrameTransitionType::OTHER_TO_BASELINE_CHECK ||
720         type == FrameTransitionType::BASELINE_TO_BASELINE_CHECK) {
721         __ Movq(Operand(callTargetRegister, JSFunction::BASELINECODE_OFFSET), tempRegister);
722         Label baselineCodeUndefined;
723         __ Cmpq(JSTaggedValue::Undefined().GetRawData(), tempRegister);
724         __ Je(&baselineCodeUndefined);
725 
726         // check is compiling
727         __ Cmpq(JSTaggedValue::Hole().GetRawData(), tempRegister);
728         __ Je(&baselineCodeUndefined);
729 
730         Label stackAligned;
731         // align 16 bytes
732         __ Testq(15, rsp);  // 15: low 4 bits must be 0b0000
733         __ Jz(&stackAligned);
734         __ PushAlignBytes();
735         __ Bind(&stackAligned);
736 
737         __ Movq(Operand(tempRegister, MachineCode::FUNCADDR_OFFSET), tempRegister);
738         if (glueRegister != r13) {
739             __ Movq(glueRegister, r13);
740         }
741         if (methodRegister != rbx) {
742             __ Movq(methodRegister, rbx);
743         }
744         const int32_t pcOffsetFromSP = -24; // -24: 3 slots, frameType, prevFrame, pc
745         Register temp3Register = r10;
746         __ Movabs(std::numeric_limits<uint64_t>::max(), temp3Register);
747         __ Movq(temp3Register, Operand(newSpRegister, pcOffsetFromSP));
748         __ Movq(newSpRegister, rbp);
749         __ Jmp(tempRegister);
750 
751         __ Bind(&baselineCodeUndefined);
752     }
753     DispatchCall(assembler, pcRegister, newSpRegister, callTargetRegister, methodRegister);
754 }
755 
756 // Input: %r13 - glue
757 //        %rbp - sp
758 //        %r12 - callTarget
759 //        %rbx - method
DispatchCall(ExtendedAssembler *assembler, Register pcRegister, Register newSpRegister, Register callTargetRegister, Register methodRegister, Register accRegister)760 void AsmInterpreterCall::DispatchCall(ExtendedAssembler *assembler, Register pcRegister,
761     Register newSpRegister, Register callTargetRegister, Register methodRegister, Register accRegister)
762 {
763     Register glueRegister = __ GlueRegister();
764     Label dispatchCall;
765     // align 16 bytes
766     __ Testq(15, rsp);  // 15: low 4 bits must be 0b0000
767     __ Jnz(&dispatchCall);
768     __ PushAlignBytes();
769     __ Bind(&dispatchCall);
770     // profileTypeInfo: r14
771     __ Movq(Operand(callTargetRegister, JSFunction::RAW_PROFILE_TYPE_INFO_OFFSET), r14);
772     __ Movq(Operand(r14, ProfileTypeInfoCell::VALUE_OFFSET), r14);
773     // glue may rdi
774     if (glueRegister != r13) {
775         __ Movq(glueRegister, r13);
776     }
777     // sp: rbp
778     __ Movq(newSpRegister, rbp);
779     // hotnessCounter: rdi
780     __ Movzwq(Operand(methodRegister, Method::LITERAL_INFO_OFFSET), rdi);
781     // constantPool: rbx
782     __ Movq(Operand(methodRegister, Method::CONSTANT_POOL_OFFSET), rbx);
783     // pc: r12
784     if (pcRegister != r12) {
785         __ Movq(pcRegister, r12);
786     }
787 
788     Register bcIndexRegister = rax;
789     Register tempRegister = __ AvailableRegister1();
790     __ Movzbq(Operand(pcRegister, 0), bcIndexRegister);
791     // acc: rsi
792     if (accRegister != rInvalid) {
793         ASSERT(accRegister == rsi);
794     } else {
795         __ Movq(JSTaggedValue::Hole().GetRawData(), rsi);
796     }
797     __ Movq(Operand(r13, bcIndexRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)), tempRegister);
798     __ Jmp(tempRegister);
799 }
800 
801 // uint64_t PushCallRangeAndDispatchNative(uintptr_t glue, uint32_t argc, JSTaggedType calltarget, uintptr_t argv[])
802 // c++ calling convention call js function
803 // Input: %rdi - glue
804 //        %rsi - nativeCode
805 //        %rdx - func
806 //        %rcx - thisValue
807 //        %r8  - argc
808 //        %r9  - argV (...)
PushCallRangeAndDispatchNative(ExtendedAssembler *assembler)809 void AsmInterpreterCall::PushCallRangeAndDispatchNative(ExtendedAssembler *assembler)
810 {
811     __ BindAssemblerStub(RTSTUB_ID(PushCallRangeAndDispatchNative));
812     CallNativeWithArgv(assembler, false);
813 }
814 
PushCallNewAndDispatchNative(ExtendedAssembler *assembler)815 void AsmInterpreterCall::PushCallNewAndDispatchNative(ExtendedAssembler *assembler)
816 {
817     __ BindAssemblerStub(RTSTUB_ID(PushCallNewAndDispatchNative));
818     CallNativeWithArgv(assembler, true);
819 }
820 
PushNewTargetAndDispatchNative(ExtendedAssembler *assembler)821 void AsmInterpreterCall::PushNewTargetAndDispatchNative(ExtendedAssembler *assembler)
822 {
823     __ BindAssemblerStub(RTSTUB_ID(PushNewTargetAndDispatchNative));
824     CallNativeWithArgv(assembler, true, true);
825 }
826 
CallNativeWithArgv(ExtendedAssembler *assembler, bool callNew, bool hasNewTarget)827 void AsmInterpreterCall::CallNativeWithArgv(ExtendedAssembler *assembler, bool callNew, bool hasNewTarget)
828 {
829     Register glue = rdi;
830     Register nativeCode = rsi;
831     Register func = rdx;
832     Register thisValue = rcx;
833     Register numArgs = r8;
834     Register stackArgs = r9;
835     Register temporary = rax;
836     Register temporary2 = r11;
837     Register opNumArgs = r10;
838     Label aligned;
839     Label pushThis;
840     Label stackOverflow;
841 
842     bool isFrameComplete = PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_FRAME_WITH_ARGV);
843 
844     __ Push(numArgs);
845     __ Cmpq(0, numArgs);
846     __ Jz(&pushThis);
847     __ Movq(numArgs, opNumArgs);
848     PushArgsWithArgvAndCheckStack(assembler, glue, opNumArgs, stackArgs, temporary, temporary2, &stackOverflow);
849 
850     __ Bind(&pushThis);
851     __ Push(thisValue);
852     // new.target
853     if (callNew) {
854         if (hasNewTarget) {
855             Register newTarget = r12;
856             // 5: skip frame type, numArgs, func, newTarget and this
857             __ Movq(Operand(rsp, numArgs, Times8, 5 * FRAME_SLOT_SIZE), newTarget);
858             __ Pushq(newTarget);
859         } else {
860             __ Pushq(func);
861         }
862     } else {
863         __ Pushq(JSTaggedValue::Undefined().GetRawData());
864     }
865     __ Pushq(func);
866     if (!isFrameComplete) {
867         // 5: skip frame type, numArgs, func, newTarget and this
868         __ Leaq(Operand(rsp, numArgs, Times8, 5 * FRAME_SLOT_SIZE), rbp);
869     }
870     __ Movq(rsp, stackArgs);
871 
872     // push argc
873     __ Addl(NUM_MANDATORY_JSFUNC_ARGS, numArgs);
874     __ Pushq(numArgs);
875     // push thread
876     __ Pushq(glue);
877     // EcmaRuntimeCallInfo
878     __ Movq(rsp, rdi);
879 
880     __ Testq(0xf, rsp);  // 0xf: 0x1111
881     __ Jz(&aligned, Distance::Near);
882     __ PushAlignBytes();
883 
884     __ Bind(&aligned);
885     CallNativeInternal(assembler, nativeCode);
886     __ Ret();
887 
888     __ Bind(&stackOverflow);
889     {
890         Label aligneThrow;
891         __ Movq(Operand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)), rsp);
892         __ Pushq(static_cast<int32_t>(FrameType::BUILTIN_FRAME_WITH_ARGV_STACK_OVER_FLOW_FRAME));  // frame type
893         __ Pushq(0);  // argc
894         __ Pushq(JSTaggedValue::VALUE_UNDEFINED);  // this
895         __ Pushq(JSTaggedValue::VALUE_UNDEFINED);  // newTarget
896         __ Pushq(JSTaggedValue::VALUE_UNDEFINED);  // callTarget
897         // 5: skip frame type, argc, this, newTarget and callTarget
898         // +----------------------------------------------------------------+ <---- rbp = rsp + 5 * frame_slot_size
899         // |     FrameType =  BUILTIN_FRAME_WITH_ARGV_STACK_OVER_FLOW_FRAME |
900         // |----------------------------------------------------------------|
901         // |                               argc = 0                         |
902         // |----------------------------------------------------------------|
903         // |                           this = undefine                      |
904         // |----------------------------------------------------------------|
905         // |                        newTarget = undefined                   |
906         // |----------------------------------------------------------------|
907         // |                        callTarget = undefined                  |
908         // +----------------------------------------------------------------+  <---- rsp
909         __ Leaq(Operand(rsp, 5 * FRAME_SLOT_SIZE), rbp);
910 
911         __ Testq(0xf, rsp);  // 0xf: 0x1111
912         __ Jz(&aligneThrow, Distance::Near);
913         __ PushAlignBytes();
914 
915         __ Bind(&aligneThrow);
916         Register trampolineIdRegister = r9;
917         Register trampolineRegister = r10;
918         __ Movq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException, trampolineIdRegister);
919         __ Movq(Operand(glue, trampolineIdRegister, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)),
920             trampolineRegister);
921         __ Callq(trampolineRegister);
922 
923         // resume rsp
924         __ Movq(rbp, rsp);
925         __ Pop(rbp);
926         __ Ret();
927     }
928 }
929 
CallNativeEntry(ExtendedAssembler *assembler)930 void AsmInterpreterCall::CallNativeEntry(ExtendedAssembler *assembler)
931 {
932     Label callFastBuiltin;
933     Label callNativeBuiltin;
934     Register glue = rdi;
935     Register argv = r9;
936     Register method = rdx;
937     Register function = rsi;
938     Register nativeCode = r10;
939     Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
940     __ Movq(Operand(method, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET), nativeCode); // get native pointer
941     __ Btq(MethodLiteral::IsFastBuiltinBit::START_BIT, callFieldRegister);
942     __ Jb(&callFastBuiltin);
943 
944     __ Bind(&callNativeBuiltin);
945     __ PushAlignBytes();
946     __ Push(function);
947     // 3: 24 means skip thread & argc & returnAddr
948     __ Subq(3 * FRAME_SLOT_SIZE, rsp);
949     PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_ENTRY_FRAME);
950     __ Movq(argv, r11);
951     // 2: 16 means skip numArgs & thread
952     __ Subq(2 * FRAME_SLOT_SIZE, r11);
953     // EcmaRuntimeCallInfo
954     __ Movq(r11, rdi);
955 
956     CallNativeInternal(assembler, nativeCode);
957 
958     // 5: 40 means skip function
959     __ Addq(5 * FRAME_SLOT_SIZE, rsp);
960     __ Ret();
961 
962     __ Bind(&callFastBuiltin);
963     CallFastBuiltin(assembler, &callNativeBuiltin);
964 }
965 
CallFastBuiltin(ExtendedAssembler *assembler, Label *callNativeBuiltin)966 void AsmInterpreterCall::CallFastBuiltin(ExtendedAssembler *assembler, Label *callNativeBuiltin)
967 {
968     Label arg1;
969     Label arg2;
970     Label arg3;
971     Label callEntry;
972     Register glue = rdi;
973     Register argc = r8;
974     Register argv = r9;
975     Register method = rdx;
976     Register function = rsi;
977     Register nativeCode = r10;
978     Register temp = rax;
979     Register temp1 = r11;
980     // get builtins id
981     __ Movq(Operand(method, Method::EXTRA_LITERAL_INFO_OFFSET), temp1);
982     __ Shr(MethodLiteral::BuiltinIdBits::START_BIT, temp1);
983     __ Andl((1LU << MethodLiteral::BuiltinIdBits::SIZE) - 1, temp1);
984 
985     __ Cmpl(static_cast<int32_t>(kungfu::BuiltinsStubCSigns::BUILTINS_CONSTRUCTOR_STUB_FIRST), temp1);
986     __ Jge(callNativeBuiltin);
987 
988     __ Cmpq(Immediate(3), argc); // 3: number of args
989     __ Jg(callNativeBuiltin);
990 
991     // create frame
992     PushAsmBridgeFrame(assembler);
993 
994     // register args
995     __ Movq(function, temp);
996     __ Movq(nativeCode, rsi); // nativeCode is rsi
997     __ Movq(temp, rdx); // fun is rdx
998     __ Movq(argv, temp); // temp is argv
999     __ Movq(argc, r9); // argc is r9
1000     __ Movq(Operand(temp, FRAME_SLOT_SIZE), rcx); // get new target
1001     __ Movq(Operand(temp, FRAME_SLOT_SIZE * 2), r8); // 2: skip func & new target to get this target
1002 
1003     __ Cmp(Immediate(0), r9);
1004     __ Jne(&arg1);
1005     __ Pushq(JSTaggedValue::VALUE_UNDEFINED);
1006     __ Pushq(JSTaggedValue::VALUE_UNDEFINED);
1007     __ Pushq(JSTaggedValue::VALUE_UNDEFINED);
1008     __ Jmp(&callEntry);
1009     __ Bind(&arg1);
1010     {
1011         __ Cmp(Immediate(1), r9);
1012         __ Jne(&arg2);
1013         __ Pushq(JSTaggedValue::VALUE_UNDEFINED);
1014         __ Pushq(JSTaggedValue::VALUE_UNDEFINED);
1015         __ Movq(Operand(temp, FRAME_SLOT_SIZE * 3), r10); // 3: get arg0
1016         __ Pushq(r10);
1017         __ Jmp(&callEntry);
1018     }
1019     __ Bind(&arg2);
1020     {
1021         __ Cmp(Immediate(2), r9); // 2: number of args
1022         __ Jne(&arg3);
1023         __ Pushq(JSTaggedValue::VALUE_UNDEFINED);
1024         __ Movq(Operand(temp, FRAME_SLOT_SIZE * 4), r10); // 4: get arg1
1025         __ Pushq(r10);
1026         __ Movq(Operand(temp, FRAME_SLOT_SIZE * 3), r10); // 3: get arg0
1027         __ Pushq(r10);
1028         __ Jmp(&callEntry);
1029     }
1030     __ Bind(&arg3);
1031     {
1032         __ Movq(Operand(temp, FRAME_SLOT_SIZE * 5), r10); // 5: get arg2
1033         __ Pushq(r10);
1034         __ Movq(Operand(temp, FRAME_SLOT_SIZE * 4), r10); // 4: get arg1
1035         __ Pushq(r10);
1036         __ Movq(Operand(temp, FRAME_SLOT_SIZE * 3), r10); // 3: get arg0
1037         __ Pushq(r10);
1038         __ Jmp(&callEntry);
1039     }
1040     __ Bind(&callEntry);
1041     {
1042         __ Movq(Operand(glue, temp1, Times8, JSThread::GlueData::GetBuiltinsStubEntriesOffset(false)), temp1);
1043         __ Callq(temp1);
1044         __ Addq(QUADRUPLE_SLOT_SIZE, rsp);
1045         __ Pop(rbp);
1046         __ Ret();
1047     }
1048 }
1049 
1050 // uint64_t PushCallArgsAndDispatchNative(uintptr_t codeAddress, uintptr_t glue, uint32_t argc, ...)
1051 // webkit_jscc calling convention call runtime_id's runtion function(c-abi)
1052 // Input:        %rax - codeAddress
1053 // stack layout: sp + N*8 argvN
1054 //               ........
1055 //               sp + 24: argv1
1056 //               sp + 16: argv0
1057 //               sp + 8:  actualArgc
1058 //               sp:      thread
1059 // construct Native Leave Frame
1060 //               +--------------------------+
1061 //               |     argV[N - 1]          |
1062 //               |--------------------------|
1063 //               |       . . . .            |
1064 //               |--------------------------+
1065 //               |     argV[2]=this         |
1066 //               +--------------------------+
1067 //               |     argV[1]=new-target   |
1068 //               +--------------------------+
1069 //               |     argV[0]=call-target  |
1070 //               +--------------------------+ ---------
1071 //               |       argc               |         ^
1072 //               |--------------------------|         |
1073 //               |       thread             |         |
1074 //               |--------------------------|         |
1075 //               |       returnAddr         |     BuiltinFrame
1076 //               |--------------------------|         |
1077 //               |       callsiteFp         |         |
1078 //               |--------------------------|         |
1079 //               |       frameType          |         v
1080 //               +--------------------------+ ---------
1081 
PushCallArgsAndDispatchNative(ExtendedAssembler *assembler)1082 void AsmInterpreterCall::PushCallArgsAndDispatchNative(ExtendedAssembler *assembler)
1083 {
1084     __ BindAssemblerStub(RTSTUB_ID(PushCallArgsAndDispatchNative));
1085     Register nativeCode = rax;
1086     Register glue = rdi;
1087 
1088     __ Movq(Operand(rsp, FRAME_SLOT_SIZE), glue); // 8: glue
1089     PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_FRAME);
1090     __ Leaq(Operand(rbp, 2 * FRAME_SLOT_SIZE), rdi); // 2: skip argc & thread
1091     __ PushAlignBytes();
1092     CallNativeInternal(assembler, nativeCode);
1093     __ Ret();
1094 }
1095 
PushBuiltinFrame(ExtendedAssembler *assembler, Register glue, FrameType type)1096 bool AsmInterpreterCall::PushBuiltinFrame(ExtendedAssembler *assembler,
1097                                           Register glue, FrameType type)
1098 {
1099     __ Pushq(rbp);
1100     __ Movq(rsp, Operand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)));
1101     __ Pushq(static_cast<int32_t>(type));
1102     if (type != FrameType::BUILTIN_FRAME_WITH_ARGV) {
1103         __ Leaq(Operand(rsp, FRAME_SLOT_SIZE), rbp);  // 8: skip frame type
1104         return true;
1105     } else if (type == FrameType::BUILTIN_FRAME_WITH_ARGV) {
1106         // this frame push stack args must before update rbp, otherwise cpu profiler maybe visit incomplete stack
1107         // BuiltinWithArgvFrame layout please see frames.h
1108         return false;
1109     } else {
1110         LOG_ECMA(FATAL) << "this branch is unreachable";
1111         UNREACHABLE();
1112     }
1113 }
1114 
CallNativeInternal(ExtendedAssembler *assembler, Register nativeCode)1115 void AsmInterpreterCall::CallNativeInternal(ExtendedAssembler *assembler, Register nativeCode)
1116 {
1117     __ Callq(nativeCode);
1118     // resume rsp
1119     __ Movq(rbp, rsp);
1120     __ Pop(rbp);
1121 }
1122 
1123 // ResumeRspAndDispatch(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
1124 //     uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter, size_t jumpSize)
1125 // GHC calling convention
1126 // %r13 - glue
1127 // %rbp - sp
1128 // %r12 - pc
1129 // %rbx - constantPool
1130 // %r14 - profileTypeInfo
1131 // %rsi - acc
1132 // %rdi - hotnessCounter
1133 // %r8  - jumpSizeAfterCall
ResumeRspAndDispatch(ExtendedAssembler *assembler)1134 void AsmInterpreterCall::ResumeRspAndDispatch(ExtendedAssembler *assembler)
1135 {
1136     __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndDispatch));
1137     Register glueRegister = __ GlueRegister();
1138     Register spRegister = rbp;
1139     Register pcRegister = r12;
1140     Register ret = rsi;
1141     Register jumpSizeRegister = r8;
1142 
1143     Register frameStateBaseRegister = r11;
1144     __ Movq(spRegister, frameStateBaseRegister);
1145     __ Subq(AsmInterpretedFrame::GetSize(false), frameStateBaseRegister);
1146 
1147     Label dispatch;
1148     Label newObjectRangeReturn;
1149     __ Cmpq(0, jumpSizeRegister);
1150     __ Jle(&newObjectRangeReturn);
1151 
1152     __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister);  // update sp
1153     __ Addq(jumpSizeRegister, pcRegister);  // newPC
1154     Register temp = rax;
1155     Register opcodeRegister = rax;
1156     __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1157 
1158     __ Bind(&dispatch);
1159     {
1160         __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFpOffset(false)), rsp);   // resume rsp
1161         Register bcStubRegister = r11;
1162         __ Movq(Operand(glueRegister, opcodeRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
1163             bcStubRegister);
1164         __ Jmp(bcStubRegister);
1165     }
1166 
1167     Label getThis;
1168     Label notUndefined;
1169     __ Bind(&newObjectRangeReturn);
1170     __ Cmpq(JSTaggedValue::Undefined().GetRawData(), ret);
1171     __ Jne(&notUndefined);
1172 
1173     __ Bind(&getThis);
1174     __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister);  // update sp
1175     __ Subq(jumpSizeRegister, pcRegister);  // sub negative jmupSize
1176     __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1177     {
1178         __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetThisOffset(false)), ret);
1179         __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFpOffset(false)), rsp);   // resume rsp
1180         Register bcStubRegister = r11;
1181         __ Movq(Operand(glueRegister, opcodeRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
1182             bcStubRegister);
1183         __ Jmp(bcStubRegister);
1184     }
1185 
1186     __ Bind(&notUndefined);
1187     {
1188         Label notEcmaObject;
1189         __ Movabs(JSTaggedValue::TAG_HEAPOBJECT_MASK, temp);
1190         __ And(ret, temp);
1191         __ Cmpq(0, temp);
1192         __ Jne(&notEcmaObject);
1193         // acc is heap object
1194         __ Movq(Operand(ret, 0), temp);  // hclass
1195         __ Movl(Operand(temp, JSHClass::BIT_FIELD_OFFSET), temp);
1196         __ Cmpb(static_cast<int32_t>(JSType::ECMA_OBJECT_LAST), temp);
1197         __ Ja(&notEcmaObject);
1198         __ Cmpb(static_cast<int32_t>(JSType::ECMA_OBJECT_FIRST), temp);
1199         __ Jb(&notEcmaObject);
1200         // acc is ecma object
1201         __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister);  // update sp
1202         __ Subq(jumpSizeRegister, pcRegister);  // sub negative jmupSize
1203         __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1204         __ Jmp(&dispatch);
1205 
1206         __ Bind(&notEcmaObject);
1207         {
1208             // load constructor
1209             __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFunctionOffset(false)), temp);
1210             __ Movq(Operand(temp, JSFunctionBase::METHOD_OFFSET), temp);
1211             __ Movq(Operand(temp, Method::EXTRA_LITERAL_INFO_OFFSET), temp);
1212             __ Shr(MethodLiteral::FunctionKindBits::START_BIT, temp);
1213             __ Andl((1LU << MethodLiteral::FunctionKindBits::SIZE) - 1, temp);
1214             __ Cmpl(static_cast<int32_t>(FunctionKind::CLASS_CONSTRUCTOR), temp);
1215             __ Jbe(&getThis);  // constructor is base
1216             // fall through
1217         }
1218         // exception branch
1219         {
1220             __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister);
1221             __ Movq(kungfu::BytecodeStubCSigns::ID_NewObjectRangeThrowException, opcodeRegister);
1222             __ Jmp(&dispatch);
1223         }
1224     }
1225 }
1226 
1227 // c++ calling convention
1228 // %rdi - glue
1229 // %rsi - callTarget
1230 // %rdx - method
1231 // %rcx - callField
1232 // %r8 - receiver
1233 // %r9 - value
CallGetter(ExtendedAssembler *assembler)1234 void AsmInterpreterCall::CallGetter(ExtendedAssembler *assembler)
1235 {
1236     __ BindAssemblerStub(RTSTUB_ID(CallGetter));
1237     Label target;
1238 
1239     PushAsmInterpBridgeFrame(assembler);
1240     __ Callq(&target);
1241     PopAsmInterpBridgeFrame(assembler);
1242     __ Ret();
1243     __ Bind(&target);
1244     JSCallCommonEntry(assembler, JSCallMode::CALL_GETTER, FrameTransitionType::OTHER_TO_OTHER);
1245 }
1246 
CallSetter(ExtendedAssembler *assembler)1247 void AsmInterpreterCall::CallSetter(ExtendedAssembler *assembler)
1248 {
1249     __ BindAssemblerStub(RTSTUB_ID(CallSetter));
1250     Label target;
1251     PushAsmInterpBridgeFrame(assembler);
1252     __ Callq(&target);
1253     PopAsmInterpBridgeFrame(assembler);
1254     __ Ret();
1255     __ Bind(&target);
1256     JSCallCommonEntry(assembler, JSCallMode::CALL_SETTER, FrameTransitionType::OTHER_TO_OTHER);
1257 }
1258 
1259 // Input: glue             - %rdi
1260 //        callTarget       - %rsi
1261 //        method           - %rdx
1262 //        callField        - %rcx
1263 //        arg0(argc)       - %r8
1264 //        arg1(arglist)    - %r9
1265 //        argthis          - stack
CallReturnWithArgv(ExtendedAssembler *assembler)1266 void AsmInterpreterCall::CallReturnWithArgv(ExtendedAssembler *assembler)
1267 {
1268     __ BindAssemblerStub(RTSTUB_ID(CallReturnWithArgv));
1269     Label target;
1270     PushAsmInterpBridgeFrame(assembler);
1271     Register r13 = __ CppJSCallAvailableRegister1();
1272     __ Movq(Operand(rbp, FRAME_SLOT_SIZE), r13);
1273     __ Callq(&target);
1274     PopAsmInterpBridgeFrame(assembler);
1275     __ Ret();
1276     __ Bind(&target);
1277     {
1278         JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARGV_WITH_RETURN,
1279                           FrameTransitionType::OTHER_TO_OTHER);
1280     }
1281 }
1282 
CallContainersArgs2(ExtendedAssembler *assembler)1283 void AsmInterpreterCall::CallContainersArgs2(ExtendedAssembler *assembler)
1284 {
1285     __ BindAssemblerStub(RTSTUB_ID(CallContainersArgs2));
1286     Label target;
1287     PushAsmInterpBridgeFrame(assembler);
1288     GetArgvAtStack(assembler);
1289     __ Callq(&target);
1290     PopAsmInterpBridgeFrame(assembler);
1291     __ Ret();
1292     __ Bind(&target);
1293     {
1294         JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG2_WITH_RETURN,
1295                           FrameTransitionType::OTHER_TO_OTHER);
1296     }
1297 }
1298 
CallContainersArgs3(ExtendedAssembler *assembler)1299 void AsmInterpreterCall::CallContainersArgs3(ExtendedAssembler *assembler)
1300 {
1301     __ BindAssemblerStub(RTSTUB_ID(CallContainersArgs3));
1302     Label target;
1303     PushAsmInterpBridgeFrame(assembler);
1304     GetArgvAtStack(assembler);
1305     __ Callq(&target);
1306     PopAsmInterpBridgeFrame(assembler);
1307     __ Ret();
1308     __ Bind(&target);
1309     {
1310         JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG3_WITH_RETURN,
1311                           FrameTransitionType::OTHER_TO_OTHER);
1312     }
1313 }
1314 
1315 // ResumeRspAndReturn(uintptr_t acc)
1316 // GHC calling convention
1317 // %r13 - acc
1318 // %rbp - prevSp
1319 // %r12 - sp
ResumeRspAndReturn(ExtendedAssembler *assembler)1320 void AsmInterpreterCall::ResumeRspAndReturn(ExtendedAssembler *assembler)
1321 {
1322     __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndReturn));
1323     Register currentSp = r12;
1324     Register fpRegister = r10;
1325     intptr_t offset = AsmInterpretedFrame::GetFpOffsetAsIntptr(false) -
1326         AsmInterpretedFrame::GetSizeAsIntptr(false);
1327     __ Movq(Operand(currentSp, static_cast<int32_t>(offset)), fpRegister);
1328     __ Movq(fpRegister, rsp);
1329     // return
1330     {
1331         __ Movq(r13, rax);
1332         __ Ret();
1333     }
1334 }
1335 
1336 // ResumeRspAndReturnBaseline(uintptr_t acc)
1337 // GHC calling convention
1338 // %r13 - acc
1339 // %rbp - prevSp
1340 // %r12 - sp
1341 // %rbx - jumpSizeAfterCall
ResumeRspAndReturnBaseline(ExtendedAssembler *assembler)1342 void AsmInterpreterCall::ResumeRspAndReturnBaseline(ExtendedAssembler *assembler)
1343 {
1344     __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndReturnBaseline));
1345     Register currentSp = r12;
1346     Register fpRegister = r10;
1347     intptr_t fpOffset = static_cast<intptr_t>(AsmInterpretedFrame::GetFpOffset(false)) -
1348         static_cast<intptr_t>(AsmInterpretedFrame::GetSize(false));
1349     __ Movq(Operand(currentSp, static_cast<int32_t>(fpOffset)), fpRegister);
1350     __ Movq(fpRegister, rsp);
1351 
1352     // Check result
1353     Register ret = r13;
1354     Register jumpSizeRegister = rbx;
1355     Label getThis;
1356     Label notUndefined;
1357     Label normalReturn;
1358     Label newObjectRangeReturn;
1359     __ Cmpq(0, jumpSizeRegister);
1360     __ Jg(&normalReturn);
1361 
1362     __ Bind(&newObjectRangeReturn);
1363     {
1364         __ Cmpq(JSTaggedValue::Undefined().GetRawData(), ret);
1365         __ Jne(&notUndefined);
1366 
1367         // acc is undefined
1368         __ Bind(&getThis);
1369         intptr_t thisOffset = static_cast<intptr_t>(AsmInterpretedFrame::GetThisOffset(false)) -
1370             static_cast<intptr_t>(AsmInterpretedFrame::GetSize(false));
1371         __ Movq(Operand(currentSp, static_cast<int32_t>(thisOffset)), ret);
1372         __ Jmp(&normalReturn);
1373 
1374         // acc is not undefined
1375         __ Bind(&notUndefined);
1376         {
1377             Register temp = rax;
1378             Label notEcmaObject;
1379             __ Movabs(JSTaggedValue::TAG_HEAPOBJECT_MASK, temp);
1380             __ And(ret, temp);
1381             __ Cmpq(0, temp);
1382             __ Jne(&notEcmaObject);
1383             // acc is heap object
1384             __ Movq(Operand(ret, 0), temp);  // hclass
1385             __ Movl(Operand(temp, JSHClass::BIT_FIELD_OFFSET), temp);
1386             __ Cmpb(static_cast<int32_t>(JSType::ECMA_OBJECT_LAST), temp);
1387             __ Ja(&notEcmaObject);
1388             __ Cmpb(static_cast<int32_t>(JSType::ECMA_OBJECT_FIRST), temp);
1389             __ Jb(&notEcmaObject);
1390             // acc is ecma object
1391             __ Jmp(&normalReturn);
1392 
1393             __ Bind(&notEcmaObject);
1394             {
1395                 // load constructor
1396                 intptr_t funcOffset = AsmInterpretedFrame::GetFunctionOffsetAsIntptr(false) -
1397                     AsmInterpretedFrame::GetSizeAsIntptr(false);
1398                 __ Movq(Operand(currentSp, static_cast<int32_t>(funcOffset)), temp);
1399                 __ Movq(Operand(temp, JSFunctionBase::METHOD_OFFSET), temp);
1400                 __ Movq(Operand(temp, Method::EXTRA_LITERAL_INFO_OFFSET), temp);
1401                 __ Shr(MethodLiteral::FunctionKindBits::START_BIT, temp);
1402                 __ Andl((1LU << MethodLiteral::FunctionKindBits::SIZE) - 1, temp);
1403                 __ Cmpl(static_cast<int32_t>(FunctionKind::CLASS_CONSTRUCTOR), temp);
1404                 __ Jbe(&getThis);  // constructor is base
1405                 // fall through
1406             }
1407         }
1408     }
1409     __ Bind(&normalReturn);
1410     __ Movq(ret, rax);
1411     __ Ret();
1412 }
1413 
1414 // ResumeCaughtFrameAndDispatch(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
1415 //     uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter)
1416 // GHC calling convention
1417 // %r13 - glue
1418 // %rbp - sp
1419 // %r12 - pc
1420 // %rbx - constantPool
1421 // %r14 - profileTypeInfo
1422 // %rsi - acc
1423 // %rdi - hotnessCounter
ResumeCaughtFrameAndDispatch(ExtendedAssembler *assembler)1424 void AsmInterpreterCall::ResumeCaughtFrameAndDispatch(ExtendedAssembler *assembler)
1425 {
1426     __ BindAssemblerStub(RTSTUB_ID(ResumeCaughtFrameAndDispatch));
1427     Register glueRegister = __ GlueRegister();
1428     Register pcRegister = r12;
1429 
1430     Label dispatch;
1431     Register fpRegister = r11;
1432     __ Movq(Operand(glueRegister, JSThread::GlueData::GetLastFpOffset(false)), fpRegister);
1433     __ Cmpq(0, fpRegister);
1434     __ Jz(&dispatch);
1435     __ Movq(fpRegister, rsp);  // resume rsp
1436     __ Bind(&dispatch);
1437     {
1438         Register opcodeRegister = rax;
1439         __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1440         Register bcStubRegister = r11;
1441         __ Movq(Operand(glueRegister, opcodeRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
1442             bcStubRegister);
1443         __ Jmp(bcStubRegister);
1444     }
1445 }
1446 
1447 // ResumeUncaughtFrameAndReturn(uintptr_t glue)
1448 // GHC calling convention
1449 // %r13 - glue
1450 // %rbp - sp
1451 // %r12 - acc
ResumeUncaughtFrameAndReturn(ExtendedAssembler *assembler)1452 void AsmInterpreterCall::ResumeUncaughtFrameAndReturn(ExtendedAssembler *assembler)
1453 {
1454     __ BindAssemblerStub(RTSTUB_ID(ResumeUncaughtFrameAndReturn));
1455     Register glueRegister = __ GlueRegister();
1456     Register acc(r12);
1457     Register cppRet(rax);
1458 
1459     Label ret;
1460     Register fpRegister = r11;
1461     __ Movq(Operand(glueRegister, JSThread::GlueData::GetLastFpOffset(false)), fpRegister);
1462     __ Cmpq(0, fpRegister);
1463     __ Jz(&ret);
1464     __ Movq(fpRegister, rsp);  // resume rsp
1465     __ Bind(&ret);
1466     // this method will return to Execute(cpp calling convention), and the return value should be put into rax.
1467     __ Movq(acc, cppRet);
1468     __ Ret();
1469 }
1470 
1471 // ResumeRspAndRollback(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
1472 //     uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter, size_t jumpSize)
1473 // GHC calling convention
1474 // %r13 - glue
1475 // %rbp - sp
1476 // %r12 - pc
1477 // %rbx - constantPool
1478 // %r14 - profileTypeInfo
1479 // %rsi - acc
1480 // %rdi - hotnessCounter
1481 // %r8  - jumpSizeAfterCall
ResumeRspAndRollback(ExtendedAssembler *assembler)1482 void AsmInterpreterCall::ResumeRspAndRollback(ExtendedAssembler *assembler)
1483 {
1484     __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndRollback));
1485     Register glueRegister = __ GlueRegister();
1486     Register spRegister = rbp;
1487     Register pcRegister = r12;
1488     Register ret = rsi;
1489     Register jumpSizeRegister = r8;
1490 
1491     Register frameStateBaseRegister = r11;
1492     __ Movq(spRegister, frameStateBaseRegister);
1493     __ Subq(AsmInterpretedFrame::GetSize(false), frameStateBaseRegister);
1494 
1495     __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetBaseOffset(false)), spRegister);  // update sp
1496     __ Addq(jumpSizeRegister, pcRegister);  // newPC
1497     Register opcodeRegister = rax;
1498     __ Movzbq(Operand(pcRegister, 0), opcodeRegister);
1499 
1500     __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFunctionOffset(false)), ret); // restore acc
1501 
1502     __ Movq(Operand(frameStateBaseRegister, AsmInterpretedFrame::GetFpOffset(false)), rsp);   // resume rsp
1503     Register bcStubRegister = r11;
1504     __ Movq(Operand(glueRegister, opcodeRegister, Times8, JSThread::GlueData::GetBCStubEntriesOffset(false)),
1505         bcStubRegister);
1506     __ Jmp(bcStubRegister);
1507 }
1508 
1509 // preserve all the general registers, except r11 and callee saved registers/
1510 // and call r11
PreserveMostCall(ExtendedAssembler* assembler)1511 void AsmInterpreterCall::PreserveMostCall(ExtendedAssembler* assembler)
1512 {
1513     // * layout as the following:
1514     //               +--------------------------+ ---------
1515     //               |       . . . . .          |         ^
1516     // callerSP ---> |--------------------------|         |
1517     //               |       returnAddr         |         |
1518     //               |--------------------------|   OptimizedFrame
1519     //               |       callsiteFp         |         |
1520     //       fp ---> |--------------------------|         |
1521     //               |     OPTIMIZED_FRAME      |         v
1522     //               +--------------------------+ ---------
1523     //               |           rdi            |
1524     //               +--------------------------+
1525     //               |           rsi            |
1526     //               +--------------------------+
1527     //               |           rdx            |
1528     //               +--------------------------+
1529     //               |           rcx            |
1530     //               +--------------------------+
1531     //               |           r8            |
1532     //               +--------------------------+
1533     //               |           r9             |
1534     //               +--------------------------+
1535     //               |           r10             |
1536     //               +--------------------------+
1537     //               |           rax            |
1538     //               +--------------------------+
1539     //               |          align           |
1540     // calleeSP ---> +--------------------------+
1541     {
1542         // prologue to save rbp, frametype, and update rbp.
1543         __ Pushq(rbp);
1544         __ Pushq(static_cast<int64_t>(FrameType::OPTIMIZED_FRAME)); // set frame type
1545         __ Leaq(Operand(rsp, FRAME_SLOT_SIZE), rbp); // skip frame type
1546     }
1547     int32_t PreserveRegisterIndex = 9;
1548     // rdi,rsi,rdx,rcx,r8,r9,r10,rax should be preserved,
1549     // other general registers are callee saved register, callee will save them.
1550     __ Subq(PreserveRegisterIndex * FRAME_SLOT_SIZE, rsp);
1551     __ Movq(rdi, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1552     __ Movq(rsi, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1553     __ Movq(rdx, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1554     __ Movq(rcx, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1555     __ Movq(r8, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1556     __ Movq(r9, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1557     __ Movq(r10, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1558     __ Movq(rax, Operand(rsp, FRAME_SLOT_SIZE * (--PreserveRegisterIndex)));
1559     __ Callq(r11);
1560     __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rax);
1561     __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), r10);
1562     __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), r9);
1563     __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), r8);
1564     __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rcx);
1565     __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rdx);
1566     __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rsi);
1567     __ Movq(Operand(rsp, FRAME_SLOT_SIZE * (PreserveRegisterIndex++)), rdi);
1568     {
1569         // epilogue to restore rsp, rbp.
1570         // need add the frametype slot
1571         __ Addq(PreserveRegisterIndex * FRAME_SLOT_SIZE + FRAME_SLOT_SIZE, rsp);
1572         __ Popq(rbp);
1573         __ Ret();
1574     }
1575 }
1576 
1577 // ASMFastWriteBarrier(GateRef glue, GateRef obj, GateRef offset, GateRef value)
1578 // c calling convention, but preserve all general registers except %r11
1579 // %rd1 - glue
1580 // %rsi - obj
1581 // %rdx - offset
1582 // %rcx - value
ASMFastWriteBarrier(ExtendedAssembler* assembler)1583 void AsmInterpreterCall::ASMFastWriteBarrier(ExtendedAssembler* assembler)
1584 {
1585     // valid region flag are as follows, assume it will be ALWAYS VALID.
1586     // Judge the region of value with:
1587     //                          "young"            "sweepable share"  "readonly share"
1588     // region flag:         0x08, 0x09, [0x0A, 0x11], [0x12, 0x14],     0x15
1589     // value is share:                                [0x12,            0x15] =>  valueMaybeSweepableShare
1590     // readonly share:                                                  0x15  =>  return
1591     // sweepable share:                               [0x12, 0x14]            =>  needShareBarrier
1592     // value is not share:  0x08, 0x09, [0x0A, 0x11],                         =>  valueNotShare
1593     // value is young :           0x09                                        =>  needCallNotShare
1594     // value is not young : 0x08,       [0x0A, 0x11],                         =>  checkMark
1595     ASSERT(GENERAL_YOUNG_BEGIN <= IN_YOUNG_SPACE && IN_YOUNG_SPACE < SHARED_SPACE_BEGIN &&
1596         SHARED_SPACE_BEGIN <= SHARED_SWEEPABLE_SPACE_BEGIN && SHARED_SWEEPABLE_SPACE_END < IN_SHARED_READ_ONLY_SPACE &&
1597         IN_SHARED_READ_ONLY_SPACE == HEAP_SPACE_END);
1598     __ BindAssemblerStub(RTSTUB_ID(ASMFastWriteBarrier));
1599     Label needCall;
1600     Label checkMark;
1601     Label needCallNotShare;
1602     Label needShareBarrier;
1603     Label valueNotShare;
1604     Label valueMaybeSweepableShare;
1605     {
1606         // int8_t *valueRegion = value & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1607         // int8_t valueFlag = *valueRegion
1608         // if (valueFlag >= SHARED_SWEEPABLE_SPACE_BEGIN){
1609         //    goto valueMaybeSweepableShare
1610         // }
1611 
1612         __ Movabs(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), r11); // r11 is the mask to get the region.
1613         __ And(rcx, r11); // r11 is the region address of value.
1614         __ Movzbl(Operand(r11, 0), r11); // r11 is the flag load from region of value.
1615         __ Cmpl(Immediate(RegionSpaceFlag::SHARED_SWEEPABLE_SPACE_BEGIN), r11);
1616         __ Jae(&valueMaybeSweepableShare);
1617         // if value may be SweepableShare, goto valueMaybeSweepableShare
1618     }
1619     __ Bind(&valueNotShare);
1620     {
1621         // valueNotShare:
1622         // if (valueFlag != IN_YOUNG_SPACE){
1623         //      goto checkMark
1624         // }
1625         // int8_t *objRegion = obj & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1626         // int8_t objFlag = *objRegion
1627         // if (objFlag != IN_YOUNG_SPACE){
1628         //    goto needCallNotShare
1629         // }
1630 
1631         __ Cmpl(Immediate(RegionSpaceFlag::IN_YOUNG_SPACE), r11);
1632         __ Jne(&checkMark);
1633         // if value is not in young, goto checkMark
1634 
1635         __ Movabs(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), r11);
1636         __ And(rsi, r11); // r11 is the region address of obj.
1637         __ Movzbl(Operand(r11, 0), r11); // r11 is the flag load from region of obj.
1638         __ Cmpl(Immediate(RegionSpaceFlag::IN_YOUNG_SPACE), r11);
1639         __ Jne(&needCallNotShare);
1640         // if obj is not in young, goto needCallNotShare
1641     }
1642 
1643     __ Bind(&checkMark);
1644     {
1645         // checkMark:
1646         // int8_t GCStateBitField = *(glue+GCStateBitFieldOffset)
1647         // if (GCStateBitField & JSThread::CONCURRENT_MARKING_BITFIELD_MASK != 0) {
1648         //    goto needCallNotShare
1649         // }
1650         // return
1651 
1652         __ Movl(Operand(rdi, JSThread::GlueData::GetGCStateBitFieldOffset(false)), r11);
1653         __ Testb(Immediate(JSThread::CONCURRENT_MARKING_BITFIELD_MASK), r11);
1654         __ Jne(&needCallNotShare);
1655         // if GCState is not READY_TO_MARK, go to needCallNotShare.
1656         __ Ret();
1657     }
1658 
1659     __ Bind(&valueMaybeSweepableShare);
1660     {
1661         // valueMaybeSweepableShare:
1662         // if (valueFlag != IN_SHARED_READ_ONLY_SPACE){
1663         //    goto needShareBarrier
1664         // }
1665         // return
1666         __ Cmpl(Immediate(RegionSpaceFlag::IN_SHARED_READ_ONLY_SPACE), r11);
1667         __ Jne(&needShareBarrier);
1668         __ Ret();
1669     }
1670 
1671     __ Bind(&needCallNotShare);
1672     {
1673         int32_t NonSValueBarrier = static_cast<int32_t>(JSThread::GlueData::GetCOStubEntriesOffset(false)) +
1674             kungfu::CommonStubCSigns::SetNonSValueWithBarrier * FRAME_SLOT_SIZE;
1675         __ Movq(Operand(rdi, NonSValueBarrier), r11);
1676     }
1677     __ Bind(&needCall);
1678     {
1679         PreserveMostCall(assembler);
1680     }
1681     __ Bind(&needShareBarrier);
1682     {
1683         ASMFastSharedWriteBarrier(assembler, needCall);
1684     }
1685 }
1686 
1687 // ASMWriteBarrierWithEden(GateRef glue, GateRef obj, GateRef offset, GateRef value)
1688 // c calling convention, but preserve all general registers except %x15
1689 // %rd1 - glue
1690 // %rsi - obj
1691 // %rdx - offset
1692 // %rcx - value
ASMWriteBarrierWithEden(ExtendedAssembler* assembler)1693 void AsmInterpreterCall::ASMWriteBarrierWithEden(ExtendedAssembler* assembler)
1694 {
1695     __ BindAssemblerStub(RTSTUB_ID(ASMWriteBarrierWithEden));
1696     // Just for compitability, not a fast implement, should be refactored when enable EdenBarrier.
1697     int32_t EdenBarrierOffset = static_cast<int32_t>(JSThread::GlueData::GetCOStubEntriesOffset(false)) +
1698         kungfu::CommonStubCSigns::SetValueWithEdenBarrier * FRAME_SLOT_SIZE;
1699     __ Movq(Operand(rdi, EdenBarrierOffset), r11);
1700     PreserveMostCall(assembler);
1701 }
1702 
1703 // %rd1 - glue
1704 // %rsi - obj
1705 // %rdx - offset
1706 // %rcx - value
ASMFastSharedWriteBarrier(ExtendedAssembler* assembler, Label& needcall)1707 void AsmInterpreterCall::ASMFastSharedWriteBarrier(ExtendedAssembler* assembler, Label& needcall)
1708 {
1709     Label checkBarrierForSharedValue;
1710     Label restoreScratchRegister;
1711     Label callSharedBarrier;
1712     {
1713         // int8_t *objRegion = obj & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1714         // int8_t objFlag = *objRegion
1715         // if (objFlag >= SHARED_SPACE_BEGIN){
1716         //    // share to share, just check the barrier
1717         //    goto checkBarrierForSharedValue
1718         // }
1719         __ Movabs(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), r11); // r11 is the mask to get the region.
1720         __ And(rsi, r11); // r11: region address of obj.
1721         __ Movzbl(Operand(r11, 0), r11); // r11: the flag load from region of obj.
1722         __ Cmpl(Immediate(RegionSpaceFlag::SHARED_SPACE_BEGIN), r11);
1723         __ Jae(&checkBarrierForSharedValue); // if objflag >= SHARED_SPACE_BEGIN  => checkBarrierForSharedValue
1724     }
1725     {
1726         // int8_t *objRegion = obj & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1727         // int8_t *localToShareSet = *(objRegion + LocalToShareSetOffset)
1728         // if (localToShareSet == 0){
1729         //    goto callSharedBarrier
1730         // }
1731         __ Movabs(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), r11);  // r11 is the mask to get the region.
1732         __ And(rsi, r11); // r11: region address of obj.
1733         __ Movq(Operand(r11, Region::PackedData::GetLocalToShareSetOffset(false)), r11);
1734         // r11 is localToShareSet for obj region.
1735         __ Cmpq(Immediate(0), r11);
1736         __ Je(&callSharedBarrier); // if localToShareSet == 0  => callSharedBarrier
1737     }
1738     {
1739         // r12, r13 will be used as scratch register, spill them.
1740         {
1741             __ Pushq(r12);
1742             __ Pushq(r13);
1743         }
1744         // int64_t objOffset = obj & DEFAULT_REGION_MASK
1745         // int64_t slotOffset = objOffset + offset
1746         // int8_t lowSlotOffset = slotOffset & 0xff
1747 
1748         __ Movabs(DEFAULT_REGION_MASK, r12);
1749         __ And(rsi, r12); // obj & DEFAULT_REGION_MASK => r12 is obj's offset to region
1750         __ Addq(rdx, r12); // r12 is slotAddr's offset to region
1751         __ Movzbl(r12, r13); // r13 is low 8 bit of slotAddr's offset to region
1752 
1753         // the logic to get byteIndex in stub_builder.cpp
1754         //               [63-------------------------35][34------------------------8][7---3][2-0]
1755         // slotOffset:    aaaaaaaaaaaaaaaaaaaaaaaaaaaaa  bbbbbbbbbbbbbbbbbbbbbbbbbbb  ccccc  ddd
1756         // 1. bitOffsetPtr = LSR TAGGED_TYPE_SIZE_LOG(3) slotOffset
1757         // bitOffsetPtr:     aaaaaaaaaaaaaaaaaaaaaaaaaa  aaabbbbbbbbbbbbbbbbbbbbbbbb  bbbcc  ccc
1758         // 2. bitOffset = TruncPtrToInt32 bitOffsetPtr
1759         // bitOffset:                                       bbbbbbbbbbbbbbbbbbbbbbbb  bbbcc  ccc
1760         // 3. index = LSR BIT_PER_WORD_LOG2(5) bitOffset
1761         // index:                                                bbbbbbbbbbbbbbbbbbb  bbbbb  bbb
1762         // 4. byteIndex = Mul index BYTE_PER_WORD(4)
1763         // byteIndex:                                          bbbbbbbbbbbbbbbbbbbbb  bbbbb  b00
1764 
1765         // the logic to get byteIndex here:
1766         //               [63-------------------------35][34------------------------8][7---3][2-0]
1767         // slotOffset:    aaaaaaaaaaaaaaaaaaaaaaaaaaaaa  bbbbbbbbbbbbbbbbbbbbbbbbbbb  ccccc  ddd
1768         // 1. LSR (TAGGED_TYPE_SIZE_LOG + GCBitset::BIT_PER_WORD_LOG2 - GCBitset::BYTE_PER_WORD_LOG2)(6) slotOffset
1769         // r12:                 aaaaaaaaaaaaaaaaaaaaaaa  aaaaaabbbbbbbbbbbbbbbbbbbbb  bbbbb  bcc
1770         // indexMask:     00000000000000000000000000000  000000111111111111111111111  11111  100
1771         // 2. And r12 indexMask
1772         // byteIndex:                                          bbbbbbbbbbbbbbbbbbbbb  bbbbb  b00
1773         constexpr uint32_t byteIndexMask = static_cast<uint32_t>(0xffffffffffffffff >> TAGGED_TYPE_SIZE_LOG) >>
1774             GCBitset::BIT_PER_WORD_LOG2 << GCBitset::BYTE_PER_WORD_LOG2;
1775         static_assert(byteIndexMask == 0x1ffffffc && "LocalToShareSet is changed?");
1776         __ Shrq(TAGGED_TYPE_SIZE_LOG + GCBitset::BIT_PER_WORD_LOG2 - GCBitset::BYTE_PER_WORD_LOG2, r12);
1777         __ Andq(byteIndexMask, r12); // r12 is byteIndex
1778 
1779         __ Addq(RememberedSet::GCBITSET_DATA_OFFSET, r11); // r11 is bitsetData addr
1780         __ Addq(r12, r11);  // r11 is the addr of bitset value
1781         __ Movl(Operand(r11, 0), r12); // r12: oldsetValue
1782 
1783         // the logic to get mask in stub_builder.cpp
1784         //               [63-------------------------35][34------------------------8][7---3][2-0]
1785         // bitOffset:                                       bbbbbbbbbbbbbbbbbbbbbbbb  bbbcc  ccc
1786         // bitPerWordMask:                                                               11  111
1787         // indexInWord = And bitoffset bitPerWordMask
1788         // indexInWord:                                                                  cc  ccc
1789         // mask = 1 << indexInWord
1790 
1791         // the logic to test bit set value here:
1792         //               [63-------------------------35][34------------------------8][7---3][2-0]
1793         // slotOffset:    aaaaaaaaaaaaaaaaaaaaaaaaaaaaa  bbbbbbbbbbbbbbbbbbbbbbbbbbb  ccccc  ddd
1794         // lowSlotOffset:                                                             ccccc  ddd
1795         // indexInWord = Shrl TAGGED_TYPE_SIZE_LOG lowSlotOffset
1796         // indexInWord:                                                                  cc  ccc
1797         __ Shrl(TAGGED_TYPE_SIZE_LOG, r13);
1798 
1799         // if "r13" position in r12 is 1, goto restoreScratchRegister;
1800         // if "r13" position in r12 is 0, set it to 1 and store r12 to r11(addr of bitset value)
1801         __ Btsl(r13, r12);
1802         __ Jb(&restoreScratchRegister);
1803         __ Movl(r12, Operand(r11, 0));
1804     }
1805     __ Bind(&restoreScratchRegister);
1806     {
1807         __ Popq(r13);
1808         __ Popq(r12);
1809     }
1810     __ Bind(&checkBarrierForSharedValue);
1811     {
1812         // checkBarrierForSharedValue:
1813         // int8_t GCStateBitField = *(glue+SharedGCStateBitFieldOffset)
1814         // if (GCStateBitField & JSThread::SHARED_CONCURRENT_MARKING_BITFIELD_MASK != 0) {
1815         //    goto callSharedBarrier
1816         // }
1817         // return
1818         __ Movl(Operand(rdi, JSThread::GlueData::GetSharedGCStateBitFieldOffset(false)), r11);
1819         __ Testb(Immediate(JSThread::SHARED_CONCURRENT_MARKING_BITFIELD_MASK), r11);
1820         __ Jne(&callSharedBarrier);
1821         // if GCState is not READY_TO_MARK, go to needCallNotShare.
1822         __ Ret();
1823     }
1824     __ Bind(&callSharedBarrier);
1825     {
1826         int32_t NonSValueBarrier = static_cast<int32_t>(JSThread::GlueData::GetCOStubEntriesOffset(false)) +
1827             kungfu::CommonStubCSigns::SetSValueWithBarrier * FRAME_SLOT_SIZE;
1828         __ Movq(Operand(rdi, NonSValueBarrier), r11);
1829         __ Jmp(&needcall);
1830     }
1831 }
1832 
PushUndefinedWithArgcAndCheckStack(ExtendedAssembler *assembler, Register glue, Register argc, Register op1, Register op2, Label *stackOverflow)1833 void AsmInterpreterCall::PushUndefinedWithArgcAndCheckStack(ExtendedAssembler *assembler, Register glue, Register argc,
1834                                                             Register op1, Register op2, Label *stackOverflow)
1835 {
1836     ASSERT(stackOverflow != nullptr);
1837     StackOverflowCheck(assembler, glue, argc, op1, op2, stackOverflow);
1838     PushUndefinedWithArgc(assembler, argc);
1839 }
1840 
ThrowStackOverflowExceptionAndReturn(ExtendedAssembler *assembler, Register glue, Register fp, Register op)1841 void AsmInterpreterCall::ThrowStackOverflowExceptionAndReturn(ExtendedAssembler *assembler, Register glue, Register fp,
1842     Register op)
1843 {
1844     if (fp != rsp) {
1845         __ Movq(fp, rsp);
1846     }
1847     __ Movq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException, op);
1848     __ Movq(Operand(glue, op, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)), op);
1849     if (glue != r13) {
1850         __ Movq(glue, r13);
1851     }
1852 
1853     __ Pushq(rbp);
1854     __ Pushq(static_cast<int64_t>(FrameType::ASM_BRIDGE_FRAME)); // set frame type
1855     __ Leaq(Operand(rsp, FRAME_SLOT_SIZE), rbp); // skip frame type
1856 
1857     __ Pushq(r10); // caller save
1858     __ Pushq(0); // argc
1859     __ Pushq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException); // runtime id
1860     __ Movq(glue, rax); // glue
1861     __ Movq(kungfu::RuntimeStubCSigns::ID_CallRuntime, r10);
1862     __ Movq(Operand(rax, r10, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)), r10);
1863     __ Callq(r10); // call CallRuntime
1864     __ Addq(2 * FRAME_SLOT_SIZE, rsp); // 2: skip argc and runtime_id
1865     __ Popq(r10);
1866     __ Addq(FRAME_SLOT_SIZE, rsp); // skip frame type
1867     __ Popq(rbp);
1868     __ Ret();
1869 }
1870 
ThrowStackOverflowExceptionAndReturnToAotFrame(ExtendedAssembler *assembler, Register glue, Register fp, Register op)1871 void AsmInterpreterCall::ThrowStackOverflowExceptionAndReturnToAotFrame(ExtendedAssembler *assembler, Register glue,
1872     Register fp, Register op)
1873 {
1874     if (fp != rsp) {
1875         __ Movq(fp, rsp);
1876     }
1877     __ Movq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException, op);
1878     __ Movq(Operand(glue, op, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)), op);
1879     if (glue != r13) {
1880         __ Movq(glue, r13);
1881     }
1882 
1883     __ Pushq(rbp);
1884     __ Pushq(static_cast<int64_t>(FrameType::ASM_BRIDGE_FRAME)); // set frame type
1885     __ Leaq(Operand(rsp, FRAME_SLOT_SIZE), rbp); // skip frame type
1886 
1887     __ Pushq(r10); // caller save
1888     __ Pushq(0); // argc
1889     __ Pushq(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException); // runtime id
1890     __ Movq(glue, rax); // glue
1891     __ Movq(kungfu::RuntimeStubCSigns::ID_CallRuntime, r10);
1892     __ Movq(Operand(rax, r10, Times8, JSThread::GlueData::GetRTStubEntriesOffset(false)), r10);
1893     __ Callq(r10); // call CallRuntime
1894     __ Addq(2 * FRAME_SLOT_SIZE, rsp); // 2: skip argc and runtime_id
1895     __ Popq(r10);
1896     __ Addq(FRAME_SLOT_SIZE, rsp); // skip frame type
1897     __ Popq(rbp);
1898     __ Movq(rbp, rsp);
1899     __ Movq(Operand(rbp, -2 * FRAME_SLOT_SIZE), rbp); // 2: skip returnAddr and frameType in AsmBridgeFrame
1900     __ Ret();
1901 }
1902 
HasPendingException([[maybe_unused]] ExtendedAssembler *assembler, [[maybe_unused]] Register threadRegister)1903 void AsmInterpreterCall::HasPendingException([[maybe_unused]] ExtendedAssembler *assembler,
1904     [[maybe_unused]] Register threadRegister)
1905 {
1906 }
1907 #undef __
1908 }  // namespace panda::ecmascript::x64