1 /*
2 * Copyright (c) 2022-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "ecmascript/compiler/trampoline/aarch64/common_call.h"
17
18 #include "ecmascript/compiler/assembler/assembler.h"
19 #include "ecmascript/compiler/argument_accessor.h"
20 #include "ecmascript/compiler/rt_call_signature.h"
21 #include "ecmascript/ecma_runtime_call_info.h"
22 #include "ecmascript/frames.h"
23 #include "ecmascript/js_function.h"
24 #include "ecmascript/mem/machine_code.h"
25 #include "ecmascript/method.h"
26 #include "ecmascript/js_thread.h"
27 #include "ecmascript/js_generator_object.h"
28 #include "ecmascript/message_string.h"
29 #include "ecmascript/runtime_call_id.h"
30
31 namespace panda::ecmascript::aarch64 {
32 using Label = panda::ecmascript::Label;
33 #define __ assembler->
34
35 // Generate code for entering asm interpreter
36 // c++ calling convention
37 // Input: glue - %X0
38 // callTarget - %X1
39 // method - %X2
40 // callField - %X3
41 // argc - %X4
42 // argv - %X5(<callTarget, newTarget, this> are at the beginning of argv)
AsmInterpreterEntry(ExtendedAssembler *assembler)43 void AsmInterpreterCall::AsmInterpreterEntry(ExtendedAssembler *assembler)
44 {
45 __ BindAssemblerStub(RTSTUB_ID(AsmInterpreterEntry));
46 Label target;
47 size_t begin = __ GetCurrentPosition();
48 PushAsmInterpEntryFrame(assembler);
49 __ Bl(&target);
50 PopAsmInterpEntryFrame(assembler);
51 size_t end = __ GetCurrentPosition();
52 if ((end - begin) != FrameCompletionPos::ARM64EntryFrameDuration) {
53 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::ARM64EntryFrameDuration
54 << "This frame has been modified, and the offset EntryFrameDuration should be updated too.";
55 }
56 __ Ret();
57
58 __ Bind(&target);
59 {
60 AsmInterpEntryDispatch(assembler);
61 }
62 }
63
64 // Input: glue - %X0
65 // callTarget - %X1
66 // method - %X2
67 // callField - %X3
68 // argc - %X4
69 // argv - %X5(<callTarget, newTarget, this> are at the beginning of argv)
AsmInterpEntryDispatch(ExtendedAssembler *assembler)70 void AsmInterpreterCall::AsmInterpEntryDispatch(ExtendedAssembler *assembler)
71 {
72 Label notJSFunction;
73 Label callNativeEntry;
74 Label callJSFunctionEntry;
75 Label notCallable;
76 Register glueRegister(X0);
77 Register argcRegister(X4, W);
78 Register argvRegister(X5);
79 Register callTargetRegister(X1);
80 Register callFieldRegister(X3);
81 Register bitFieldRegister(X16);
82 Register tempRegister(X17); // can not be used to store any variable
83 Register functionTypeRegister(X18, W);
84 __ Ldr(tempRegister, MemoryOperand(callTargetRegister, TaggedObject::HCLASS_OFFSET));
85 __ Ldr(bitFieldRegister, MemoryOperand(tempRegister, JSHClass::BIT_FIELD_OFFSET));
86 __ And(functionTypeRegister, bitFieldRegister.W(), LogicalImmediate::Create(0xFF, RegWSize));
87 __ Mov(tempRegister.W(), Immediate(static_cast<int64_t>(JSType::JS_FUNCTION_FIRST)));
88 __ Cmp(functionTypeRegister, tempRegister.W());
89 __ B(Condition::LO, ¬JSFunction);
90 __ Mov(tempRegister.W(), Immediate(static_cast<int64_t>(JSType::JS_FUNCTION_LAST)));
91 __ Cmp(functionTypeRegister, tempRegister.W());
92 __ B(Condition::LS, &callJSFunctionEntry);
93 __ Bind(¬JSFunction);
94 {
95 __ Tst(bitFieldRegister,
96 LogicalImmediate::Create(static_cast<int64_t>(1ULL << JSHClass::CallableBit::START_BIT), RegXSize));
97 __ B(Condition::EQ, ¬Callable);
98 // fall through
99 }
100 __ Bind(&callNativeEntry);
101 CallNativeEntry(assembler);
102 __ Bind(&callJSFunctionEntry);
103 {
104 __ Tbnz(callFieldRegister, MethodLiteral::IsNativeBit::START_BIT, &callNativeEntry);
105 // fast path
106 __ Add(argvRegister, argvRegister, Immediate(NUM_MANDATORY_JSFUNC_ARGS * JSTaggedValue::TaggedTypeSize()));
107 JSCallCommonEntry(assembler, JSCallMode::CALL_ENTRY, FrameTransitionType::OTHER_TO_BASELINE_CHECK);
108 }
109 __ Bind(¬Callable);
110 {
111 Register runtimeId(X11);
112 Register trampoline(X12);
113 __ Mov(runtimeId, Immediate(kungfu::RuntimeStubCSigns::ID_ThrowNotCallableException));
114 // 3 : 3 means *8
115 __ Add(trampoline, glueRegister, Operand(runtimeId, LSL, 3));
116 __ Ldr(trampoline, MemoryOperand(trampoline, JSThread::GlueData::GetRTStubEntriesOffset(false)));
117 __ Blr(trampoline);
118 __ Ret();
119 }
120 }
121
JSCallCommonEntry(ExtendedAssembler *assembler, JSCallMode mode, FrameTransitionType type)122 void AsmInterpreterCall::JSCallCommonEntry(ExtendedAssembler *assembler,
123 JSCallMode mode, FrameTransitionType type)
124 {
125 Label stackOverflow;
126 Register glueRegister = __ GlueRegister();
127 Register fpRegister = __ AvailableRegister1();
128 Register currentSlotRegister = __ AvailableRegister3();
129 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
130 Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARGC);
131 if (!kungfu::AssemblerModule::IsJumpToCallCommonEntry(mode) || type == FrameTransitionType::BASELINE_TO_OTHER ||
132 type == FrameTransitionType::BASELINE_TO_BASELINE_CHECK) {
133 __ PushFpAndLr();
134 }
135 // save fp
136 __ Mov(fpRegister, Register(SP));
137 __ Mov(currentSlotRegister, Register(SP));
138
139 {
140 // Reserve enough sp space to prevent stack parameters from being covered by cpu profiler.
141 [[maybe_unused]] TempRegister1Scope scope(assembler);
142 Register tempRegister = __ TempRegister1();
143 __ Ldr(tempRegister, MemoryOperand(glueRegister, JSThread::GlueData::GetStackLimitOffset(false)));
144 __ Mov(Register(SP), tempRegister);
145 }
146
147 Register declaredNumArgsRegister = __ AvailableRegister2();
148 GetDeclaredNumArgsFromCallField(assembler, callFieldRegister, declaredNumArgsRegister);
149
150 Label slowPathEntry;
151 Label fastPathEntry;
152 Label pushCallThis;
153 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
154 if (argc >= 0) {
155 __ Cmp(declaredNumArgsRegister, Immediate(argc));
156 } else {
157 __ Cmp(declaredNumArgsRegister, argcRegister);
158 }
159 __ B(Condition::NE, &slowPathEntry);
160 __ Bind(&fastPathEntry);
161 JSCallCommonFastPath(assembler, mode, &pushCallThis, &stackOverflow);
162 __ Bind(&pushCallThis);
163 PushCallThis(assembler, mode, &stackOverflow, type);
164 __ Bind(&slowPathEntry);
165 JSCallCommonSlowPath(assembler, mode, &fastPathEntry, &pushCallThis, &stackOverflow);
166
167 __ Bind(&stackOverflow);
168 if (kungfu::AssemblerModule::IsJumpToCallCommonEntry(mode)) {
169 __ Mov(Register(SP), fpRegister);
170 [[maybe_unused]] TempRegister1Scope scope(assembler);
171 Register temp = __ TempRegister1();
172 // only glue and acc are useful in exception handler
173 if (glueRegister.GetId() != X19) {
174 __ Mov(Register(X19), glueRegister);
175 }
176 Register acc(X23);
177 __ Mov(acc, Immediate(JSTaggedValue::VALUE_EXCEPTION));
178 Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
179 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
180 // Reload pc to make sure stack trace is right
181 __ Mov(temp, callTargetRegister);
182 __ Ldr(Register(X20), MemoryOperand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET));
183 // Reload constpool and profileInfo to make sure gc map work normally
184 __ Ldr(Register(X22), MemoryOperand(temp, JSFunction::RAW_PROFILE_TYPE_INFO_OFFSET));
185 __ Ldr(Register(X22), MemoryOperand(Register(X22), ProfileTypeInfoCell::VALUE_OFFSET));
186 __ Ldr(Register(X21), MemoryOperand(methodRegister, Method::CONSTANT_POOL_OFFSET));
187
188 __ Mov(temp, kungfu::BytecodeStubCSigns::ID_ThrowStackOverflowException);
189 __ Add(temp, glueRegister, Operand(temp, UXTW, 3)); // 3: bc * 8
190 __ Ldr(temp, MemoryOperand(temp, JSThread::GlueData::GetBCStubEntriesOffset(false)));
191 __ Br(temp);
192 } else {
193 [[maybe_unused]] TempRegister1Scope scope(assembler);
194 Register temp = __ TempRegister1();
195 ThrowStackOverflowExceptionAndReturn(assembler, glueRegister, fpRegister, temp);
196 }
197 }
198
JSCallCommonFastPath(ExtendedAssembler *assembler, JSCallMode mode, Label *pushCallThis, Label *stackOverflow)199 void AsmInterpreterCall::JSCallCommonFastPath(ExtendedAssembler *assembler, JSCallMode mode, Label *pushCallThis,
200 Label *stackOverflow)
201 {
202 Register glueRegister = __ GlueRegister();
203 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
204 Register currentSlotRegister = __ AvailableRegister3();
205 // call range
206 if (argc < 0) {
207 Register numRegister = __ AvailableRegister2();
208 Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARGC);
209 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARGV);
210 __ Mov(numRegister, argcRegister);
211 [[maybe_unused]] TempRegister1Scope scope(assembler);
212 Register opRegister = __ TempRegister1();
213 PushArgsWithArgv(assembler, glueRegister, numRegister, argvRegister, opRegister,
214 currentSlotRegister, pushCallThis, stackOverflow);
215 } else if (argc > 0) {
216 if (argc > 2) { // 2: call arg2
217 Register arg2 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG2);
218 __ Str(arg2, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
219 }
220 if (argc > 1) {
221 Register arg1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
222 __ Str(arg1, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
223 }
224 if (argc > 0) {
225 Register arg0 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
226 __ Str(arg0, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
227 }
228 }
229 }
230
JSCallCommonSlowPath(ExtendedAssembler *assembler, JSCallMode mode, Label *fastPathEntry, Label *pushCallThis, Label *stackOverflow)231 void AsmInterpreterCall::JSCallCommonSlowPath(ExtendedAssembler *assembler, JSCallMode mode,
232 Label *fastPathEntry, Label *pushCallThis, Label *stackOverflow)
233 {
234 Register glueRegister = __ GlueRegister();
235 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
236 Register argcRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARGC);
237 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARGV);
238 Register currentSlotRegister = __ AvailableRegister3();
239 Register arg0 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
240 Register arg1 = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
241 Label noExtraEntry;
242 Label pushArgsEntry;
243
244 auto argc = kungfu::AssemblerModule::GetArgcFromJSCallMode(mode);
245 Register declaredNumArgsRegister = __ AvailableRegister2();
246 __ Tbz(callFieldRegister, MethodLiteral::HaveExtraBit::START_BIT, &noExtraEntry);
247 // extra entry
248 {
249 [[maybe_unused]] TempRegister1Scope scope1(assembler);
250 Register tempArgcRegister = __ TempRegister1();
251 if (argc >= 0) {
252 __ PushArgc(argc, tempArgcRegister, currentSlotRegister);
253 } else {
254 __ PushArgc(argcRegister, tempArgcRegister, currentSlotRegister);
255 }
256 // fall through
257 }
258 __ Bind(&noExtraEntry);
259 {
260 if (argc == 0) {
261 {
262 [[maybe_unused]] TempRegister1Scope scope(assembler);
263 Register tempRegister = __ TempRegister1();
264 PushUndefinedWithArgc(assembler, glueRegister, declaredNumArgsRegister, tempRegister,
265 currentSlotRegister, nullptr, stackOverflow);
266 }
267 __ B(fastPathEntry);
268 return;
269 }
270 [[maybe_unused]] TempRegister1Scope scope1(assembler);
271 Register diffRegister = __ TempRegister1();
272 if (argc >= 0) {
273 __ Sub(diffRegister.W(), declaredNumArgsRegister.W(), Immediate(argc));
274 } else {
275 __ Sub(diffRegister.W(), declaredNumArgsRegister.W(), argcRegister.W());
276 }
277 [[maybe_unused]] TempRegister2Scope scope2(assembler);
278 Register tempRegister = __ TempRegister2();
279 PushUndefinedWithArgc(assembler, glueRegister, diffRegister, tempRegister,
280 currentSlotRegister, &pushArgsEntry, stackOverflow);
281 __ B(fastPathEntry);
282 }
283 // declare < actual
284 __ Bind(&pushArgsEntry);
285 {
286 __ Tbnz(callFieldRegister, MethodLiteral::HaveExtraBit::START_BIT, fastPathEntry);
287 // no extra branch
288 // arg1, declare must be 0
289 if (argc == 1) {
290 __ B(pushCallThis);
291 return;
292 }
293 __ Cmp(declaredNumArgsRegister, Immediate(0));
294 __ B(Condition::EQ, pushCallThis);
295 // call range
296 if (argc < 0) {
297 [[maybe_unused]] TempRegister1Scope scope(assembler);
298 Register opRegister = __ TempRegister1();
299 PushArgsWithArgv(assembler, glueRegister, declaredNumArgsRegister,
300 argvRegister, opRegister,
301 currentSlotRegister, nullptr, stackOverflow);
302 } else if (argc > 0) {
303 Label pushArgs0;
304 if (argc > 2) { // 2: call arg2
305 // decalare is 2 or 1 now
306 __ Cmp(declaredNumArgsRegister, Immediate(1));
307 __ B(Condition::EQ, &pushArgs0);
308 __ Str(arg1, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
309 }
310 if (argc > 1) {
311 __ Bind(&pushArgs0);
312 // decalare is is 1 now
313 __ Str(arg0, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
314 }
315 }
316 __ B(pushCallThis);
317 }
318 }
319
GetThisRegsiter(ExtendedAssembler *assembler, JSCallMode mode, Register defaultRegister)320 Register AsmInterpreterCall::GetThisRegsiter(ExtendedAssembler *assembler, JSCallMode mode, Register defaultRegister)
321 {
322 switch (mode) {
323 case JSCallMode::CALL_GETTER:
324 case JSCallMode::CALL_THIS_ARG0:
325 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG0);
326 case JSCallMode::CALL_SETTER:
327 case JSCallMode::CALL_THIS_ARG1:
328 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
329 case JSCallMode::CALL_THIS_ARG2:
330 case JSCallMode::CALL_THIS_ARG2_WITH_RETURN:
331 case JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV:
332 case JSCallMode::SUPER_CALL_WITH_ARGV:
333 case JSCallMode::SUPER_CALL_SPREAD_WITH_ARGV:
334 case JSCallMode::CALL_THIS_WITH_ARGV:
335 case JSCallMode::CALL_THIS_ARGV_WITH_RETURN:
336 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG2);
337 case JSCallMode::CALL_THIS_ARG3:
338 case JSCallMode::CALL_THIS_ARG3_WITH_RETURN:
339 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG3);
340 case JSCallMode::CALL_FROM_AOT:
341 case JSCallMode::CALL_ENTRY: {
342 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
343 __ Ldur(defaultRegister, MemoryOperand(argvRegister, -FRAME_SLOT_SIZE));
344 return defaultRegister;
345 }
346 default:
347 LOG_ECMA(FATAL) << "this branch is unreachable";
348 UNREACHABLE();
349 }
350 return INVALID_REG;
351 }
352
GetNewTargetRegsiter(ExtendedAssembler *assembler, JSCallMode mode, Register defaultRegister)353 Register AsmInterpreterCall::GetNewTargetRegsiter(ExtendedAssembler *assembler, JSCallMode mode,
354 Register defaultRegister)
355 {
356 switch (mode) {
357 case JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV:
358 case JSCallMode::CALL_THIS_WITH_ARGV:
359 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
360 case JSCallMode::SUPER_CALL_WITH_ARGV:
361 case JSCallMode::SUPER_CALL_SPREAD_WITH_ARGV:
362 return __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG3);
363 case JSCallMode::CALL_FROM_AOT:
364 case JSCallMode::CALL_ENTRY: {
365 Register argvRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
366 // 2: new Target index
367 __ Ldur(defaultRegister, MemoryOperand(argvRegister, -2 * FRAME_SLOT_SIZE));
368 return defaultRegister;
369 }
370 default:
371 LOG_ECMA(FATAL) << "this branch is unreachable";
372 UNREACHABLE();
373 }
374 return INVALID_REG;
375 }
376
377 // void PushCallArgsxAndDispatch(uintptr_t glue, uintptr_t sp, uint64_t callTarget, uintptr_t method,
378 // uint64_t callField, ...)
379 // GHC calling convention
380 // Input1: for callarg0/1/2/3 Input2: for callrange
381 // X19 - glue // X19 - glue
382 // FP - sp // FP - sp
383 // X20 - callTarget // X20 - callTarget
384 // X21 - method // X21 - method
385 // X22 - callField // X22 - callField
386 // X23 - arg0 // X23 - actualArgc
387 // X24 - arg1 // X24 - argv
388 // X25 - arg2
PushCallThisRangeAndDispatch(ExtendedAssembler *assembler)389 void AsmInterpreterCall::PushCallThisRangeAndDispatch(ExtendedAssembler *assembler)
390 {
391 __ BindAssemblerStub(RTSTUB_ID(PushCallThisRangeAndDispatch));
392 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
393 }
394
PushCallRangeAndDispatch(ExtendedAssembler *assembler)395 void AsmInterpreterCall::PushCallRangeAndDispatch(ExtendedAssembler *assembler)
396 {
397 __ BindAssemblerStub(RTSTUB_ID(PushCallRangeAndDispatch));
398 JSCallCommonEntry(assembler, JSCallMode::CALL_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
399 }
400
PushCallNewAndDispatch(ExtendedAssembler *assembler)401 void AsmInterpreterCall::PushCallNewAndDispatch(ExtendedAssembler *assembler)
402 {
403 __ BindAssemblerStub(RTSTUB_ID(PushCallNewAndDispatch));
404 JSCallCommonEntry(assembler, JSCallMode::CALL_CONSTRUCTOR_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
405 }
406
PushSuperCallAndDispatch(ExtendedAssembler *assembler)407 void AsmInterpreterCall::PushSuperCallAndDispatch(ExtendedAssembler *assembler)
408 {
409 __ BindAssemblerStub(RTSTUB_ID(PushSuperCallAndDispatch));
410 JSCallCommonEntry(assembler, JSCallMode::SUPER_CALL_WITH_ARGV, FrameTransitionType::OTHER_TO_OTHER);
411 }
412
PushCallArgs3AndDispatch(ExtendedAssembler *assembler)413 void AsmInterpreterCall::PushCallArgs3AndDispatch(ExtendedAssembler *assembler)
414 {
415 __ BindAssemblerStub(RTSTUB_ID(PushCallArgs3AndDispatch));
416 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG3, FrameTransitionType::OTHER_TO_OTHER);
417 }
418
PushCallArgs2AndDispatch(ExtendedAssembler *assembler)419 void AsmInterpreterCall::PushCallArgs2AndDispatch(ExtendedAssembler *assembler)
420 {
421 __ BindAssemblerStub(RTSTUB_ID(PushCallArgs2AndDispatch));
422 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG2, FrameTransitionType::OTHER_TO_OTHER);
423 }
424
PushCallArg1AndDispatch(ExtendedAssembler *assembler)425 void AsmInterpreterCall::PushCallArg1AndDispatch(ExtendedAssembler *assembler)
426 {
427 __ BindAssemblerStub(RTSTUB_ID(PushCallArg1AndDispatch));
428 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG1, FrameTransitionType::OTHER_TO_OTHER);
429 }
430
PushCallArg0AndDispatch(ExtendedAssembler *assembler)431 void AsmInterpreterCall::PushCallArg0AndDispatch(ExtendedAssembler *assembler)
432 {
433 __ BindAssemblerStub(RTSTUB_ID(PushCallArg0AndDispatch));
434 JSCallCommonEntry(assembler, JSCallMode::CALL_ARG0, FrameTransitionType::OTHER_TO_OTHER);
435 }
436
PushCallThisArg0AndDispatch(ExtendedAssembler *assembler)437 void AsmInterpreterCall::PushCallThisArg0AndDispatch(ExtendedAssembler *assembler)
438 {
439 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArg0AndDispatch));
440 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG0, FrameTransitionType::OTHER_TO_OTHER);
441 }
442
PushCallThisArg1AndDispatch(ExtendedAssembler *assembler)443 void AsmInterpreterCall::PushCallThisArg1AndDispatch(ExtendedAssembler *assembler)
444 {
445 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArg1AndDispatch));
446 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG1, FrameTransitionType::OTHER_TO_OTHER);
447 }
448
PushCallThisArgs2AndDispatch(ExtendedAssembler *assembler)449 void AsmInterpreterCall::PushCallThisArgs2AndDispatch(ExtendedAssembler *assembler)
450 {
451 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArgs2AndDispatch));
452 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG2, FrameTransitionType::OTHER_TO_OTHER);
453 }
454
PushCallThisArgs3AndDispatch(ExtendedAssembler *assembler)455 void AsmInterpreterCall::PushCallThisArgs3AndDispatch(ExtendedAssembler *assembler)
456 {
457 __ BindAssemblerStub(RTSTUB_ID(PushCallThisArgs3AndDispatch));
458 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG3, FrameTransitionType::OTHER_TO_OTHER);
459 }
460
461 // uint64_t PushCallRangeAndDispatchNative(uintptr_t glue, uint32_t argc, JSTaggedType calltarget, uintptr_t argv[])
462 // c++ calling convention call js function
463 // Input: X0 - glue
464 // X1 - nativeCode
465 // X2 - callTarget
466 // X3 - thisValue
467 // X4 - argc
468 // X5 - argV (...)
PushCallRangeAndDispatchNative(ExtendedAssembler *assembler)469 void AsmInterpreterCall::PushCallRangeAndDispatchNative(ExtendedAssembler *assembler)
470 {
471 __ BindAssemblerStub(RTSTUB_ID(PushCallRangeAndDispatchNative));
472 CallNativeWithArgv(assembler, false);
473 }
474
PushCallNewAndDispatchNative(ExtendedAssembler *assembler)475 void AsmInterpreterCall::PushCallNewAndDispatchNative(ExtendedAssembler *assembler)
476 {
477 __ BindAssemblerStub(RTSTUB_ID(PushCallNewAndDispatchNative));
478 CallNativeWithArgv(assembler, true);
479 }
480
PushNewTargetAndDispatchNative(ExtendedAssembler *assembler)481 void AsmInterpreterCall::PushNewTargetAndDispatchNative(ExtendedAssembler *assembler)
482 {
483 __ BindAssemblerStub(RTSTUB_ID(PushNewTargetAndDispatchNative));
484 CallNativeWithArgv(assembler, true, true);
485 }
486
CallNativeWithArgv(ExtendedAssembler *assembler, bool callNew, bool hasNewTarget)487 void AsmInterpreterCall::CallNativeWithArgv(ExtendedAssembler *assembler, bool callNew, bool hasNewTarget)
488 {
489 Register glue(X0);
490 Register nativeCode(X1);
491 Register callTarget(X2);
492 Register thisObj(X3);
493 Register argc(X4);
494 Register argv(X5);
495 Register newTarget(X6);
496 Register opArgc(X8);
497 Register opArgv(X9);
498 Register temp(X10);
499 Register currentSlotRegister(X11);
500 Register spRegister(SP);
501
502 Label pushThis;
503 Label stackOverflow;
504 bool isFrameComplete = PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_FRAME_WITH_ARGV, temp, argc);
505
506 __ Mov(currentSlotRegister, spRegister);
507 // Reserve enough sp space to prevent stack parameters from being covered by cpu profiler.
508 __ Ldr(temp, MemoryOperand(glue, JSThread::GlueData::GetStackLimitOffset(false)));
509 __ Mov(Register(SP), temp);
510
511 __ Mov(opArgc, argc);
512 __ Mov(opArgv, argv);
513 PushArgsWithArgv(assembler, glue, opArgc, opArgv, temp, currentSlotRegister, &pushThis, &stackOverflow);
514
515 __ Bind(&pushThis);
516 // newTarget
517 if (callNew) {
518 if (hasNewTarget) {
519 // 16: this & newTarget
520 __ Stp(newTarget, thisObj, MemoryOperand(currentSlotRegister, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX));
521 } else {
522 // 16: this & newTarget
523 __ Stp(callTarget, thisObj, MemoryOperand(currentSlotRegister, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX));
524 }
525 } else {
526 __ Mov(temp, Immediate(JSTaggedValue::VALUE_UNDEFINED));
527 // 16: this & newTarget
528 __ Stp(temp, thisObj, MemoryOperand(currentSlotRegister, -DOUBLE_SLOT_SIZE, AddrMode::PREINDEX));
529 }
530 // callTarget
531 __ Str(callTarget, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
532 __ Add(temp, currentSlotRegister, Immediate(QUINTUPLE_SLOT_SIZE));
533 if (!isFrameComplete) {
534 __ Add(Register(FP), temp, Operand(argc, LSL, 3)); // 3: argc * 8
535 }
536
537 __ Add(temp, argc, Immediate(NUM_MANDATORY_JSFUNC_ARGS));
538 // 2: thread & argc
539 __ Stp(glue, temp, MemoryOperand(currentSlotRegister, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
540 __ Add(Register(X0), currentSlotRegister, Immediate(0));
541
542 __ Align16(currentSlotRegister);
543 __ Mov(spRegister, currentSlotRegister);
544
545 CallNativeInternal(assembler, nativeCode);
546 __ Ret();
547
548 __ Bind(&stackOverflow);
549 {
550 // use builtin_with_argv_frame to mark gc map
551 Register frameType(X11);
552 __ Ldr(temp, MemoryOperand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)));
553 __ Mov(spRegister, temp);
554 __ Mov(frameType, Immediate(static_cast<int32_t>(FrameType::BUILTIN_FRAME_WITH_ARGV_STACK_OVER_FLOW_FRAME)));
555 // 2: frame type and argc
556 __ Stp(Register(Zero), frameType, MemoryOperand(Register(SP), -FRAME_SLOT_SIZE * 2, AddrMode::PREINDEX));
557 __ Mov(temp, Immediate(JSTaggedValue::VALUE_UNDEFINED));
558 // 2: fill this&newtgt slots
559 __ Stp(temp, temp, MemoryOperand(spRegister, -FRAME_SLOT_SIZE * 2, AddrMode::PREINDEX));
560 // 2: fill func&align slots
561 __ Stp(Register(Zero), temp, MemoryOperand(spRegister, -FRAME_SLOT_SIZE * 2, AddrMode::PREINDEX));
562 __ Mov(temp, spRegister);
563 // 6:frame type, argc, this, newTarget, func and align
564 // +----------------------------------------------------------------+ <---- fp = sp + 6 * frame_slot_size
565 // | FrameType = BUILTIN_FRAME_WITH_ARGV_STACK_OVER_FLOW_FRAME |
566 // +----------------------------------------------------------------+
567 // | argc = 0 |
568 // |----------------------------------------------------------------|
569 // | this = undefined |
570 // |----------------------------------------------------------------|
571 // | newTarget = undefine |
572 // |----------------------------------------------------------------|
573 // | function = undefined |
574 // |----------------------------------------------------------------|
575 // | align |
576 // +----------------------------------------------------------------+ <---- sp
577 __ Add(Register(FP), temp, Immediate(FRAME_SLOT_SIZE * 6));
578
579 Register runtimeId(X11);
580 Register trampoline(X12);
581 __ Mov(runtimeId, Immediate(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException));
582 // 3 : 3 means *8
583 __ Add(trampoline, glue, Operand(runtimeId, LSL, 3));
584 __ Ldr(trampoline, MemoryOperand(trampoline, JSThread::GlueData::GetRTStubEntriesOffset(false)));
585 __ Blr(trampoline);
586
587 // resume rsp
588 __ Mov(Register(SP), Register(FP));
589 __ RestoreFpAndLr();
590 __ Ret();
591 }
592 }
593
594 // uint64_t PushCallArgsAndDispatchNative(uintptr_t codeAddress, uintptr_t glue, uint32_t argc, ...)
595 // webkit_jscc calling convention call runtime_id's runtion function(c-abi)
596 // Input: X0 - codeAddress
597 // stack layout: sp + N*8 argvN
598 // ........
599 // sp + 24: argv1
600 // sp + 16: argv0
601 // sp + 8: actualArgc
602 // sp: thread
603 // construct Native Leave Frame
604 // +--------------------------+
605 // | argV[N - 1] |
606 // |--------------------------|
607 // | . . . . |
608 // |--------------------------+
609 // | argV[2]=this |
610 // +--------------------------+
611 // | argV[1]=new-target |
612 // +--------------------------+
613 // | argV[0]=call-target |
614 // +--------------------------+ ---------
615 // | argc | ^
616 // |--------------------------| |
617 // | thread | |
618 // |--------------------------| |
619 // | returnAddr | BuiltinFrame
620 // |--------------------------| |
621 // | callsiteFp | |
622 // |--------------------------| |
623 // | frameType | v
624 // +--------------------------+ ---------
625
PushCallArgsAndDispatchNative(ExtendedAssembler *assembler)626 void AsmInterpreterCall::PushCallArgsAndDispatchNative(ExtendedAssembler *assembler)
627 {
628 __ BindAssemblerStub(RTSTUB_ID(PushCallArgsAndDispatchNative));
629
630 Register nativeCode(X0);
631 Register glue(X1);
632 Register argv(X5);
633 Register temp(X6);
634 Register sp(SP);
635 Register nativeCodeTemp(X2);
636
637 __ Mov(nativeCodeTemp, nativeCode);
638
639 __ Ldr(glue, MemoryOperand(sp, 0));
640 __ Add(Register(X0), sp, Immediate(0));
641 PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_FRAME, temp, argv);
642
643 CallNativeInternal(assembler, nativeCodeTemp);
644 __ Ret();
645 }
646
PushBuiltinFrame(ExtendedAssembler *assembler, Register glue, FrameType type, Register op, Register next)647 bool AsmInterpreterCall::PushBuiltinFrame(ExtendedAssembler *assembler, Register glue,
648 FrameType type, Register op, Register next)
649 {
650 Register sp(SP);
651 __ PushFpAndLr();
652 __ Mov(op, sp);
653 __ Str(op, MemoryOperand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)));
654 __ Mov(op, Immediate(static_cast<int32_t>(type)));
655 if (type == FrameType::BUILTIN_FRAME) {
656 // push stack args
657 __ Add(next, sp, Immediate(BuiltinFrame::GetStackArgsToFpDelta(false)));
658 // 2: -2 * FRAME_SLOT_SIZE means type & next
659 __ Stp(next, op, MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
660 // 2: 2 * FRAME_SLOT_SIZE means skip next and frame type
661 __ Add(Register(FP), sp, Immediate(2 * FRAME_SLOT_SIZE));
662 return true;
663 } else if (type == FrameType::BUILTIN_ENTRY_FRAME) {
664 // 2: -2 * FRAME_SLOT_SIZE means type & next
665 __ Stp(next, op, MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
666 // 2: 2 * FRAME_SLOT_SIZE means skip next and frame type
667 __ Add(Register(FP), sp, Immediate(2 * FRAME_SLOT_SIZE));
668 return true;
669 } else if (type == FrameType::BUILTIN_FRAME_WITH_ARGV) {
670 // this frame push stack args must before update FP, otherwise cpu profiler maybe visit incomplete stack
671 // BuiltinWithArgvFrame layout please see frames.h
672 // 2: -2 * FRAME_SLOT_SIZE means type & next
673 __ Stp(next, op, MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
674 return false;
675 } else {
676 LOG_ECMA(FATAL) << "this branch is unreachable";
677 UNREACHABLE();
678 }
679 }
680
CallNativeInternal(ExtendedAssembler *assembler, Register nativeCode)681 void AsmInterpreterCall::CallNativeInternal(ExtendedAssembler *assembler, Register nativeCode)
682 {
683 __ Blr(nativeCode);
684 // resume rsp
685 __ Mov(Register(SP), Register(FP));
686 __ RestoreFpAndLr();
687 }
688
689 // ResumeRspAndDispatch(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
690 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter, size_t jumpSize)
691 // GHC calling convention
692 // X19 - glue
693 // FP - sp
694 // X20 - pc
695 // X21 - constantPool
696 // X22 - profileTypeInfo
697 // X23 - acc
698 // X24 - hotnessCounter
699 // X25 - jumpSizeAfterCall
ResumeRspAndDispatch(ExtendedAssembler *assembler)700 void AsmInterpreterCall::ResumeRspAndDispatch(ExtendedAssembler *assembler)
701 {
702 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndDispatch));
703
704 Register glueRegister = __ GlueRegister();
705 Register sp(FP);
706 Register rsp(SP);
707 Register pc(X20);
708 Register jumpSizeRegister(X25);
709
710 Register ret(X23);
711 Register opcode(X6, W);
712 Register temp(X7);
713 Register bcStub(X7);
714 Register fp(X8);
715
716 int64_t fpOffset = static_cast<int64_t>(AsmInterpretedFrame::GetFpOffset(false))
717 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
718 int64_t spOffset = static_cast<int64_t>(AsmInterpretedFrame::GetBaseOffset(false))
719 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
720 int64_t thisOffset = static_cast<int64_t>(AsmInterpretedFrame::GetThisOffset(false))
721 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
722 ASSERT(fpOffset < 0);
723 ASSERT(spOffset < 0);
724
725 Label newObjectRangeReturn;
726 Label dispatch;
727 __ Ldur(fp, MemoryOperand(sp, fpOffset)); // store fp for temporary
728 __ Cmp(jumpSizeRegister, Immediate(0));
729 __ B(Condition::LE, &newObjectRangeReturn);
730 __ Ldur(sp, MemoryOperand(sp, spOffset)); // update sp
731
732 __ Add(pc, pc, Operand(jumpSizeRegister, LSL, 0));
733 __ Ldrb(opcode, MemoryOperand(pc, 0));
734 __ Bind(&dispatch);
735 {
736 __ Mov(rsp, fp); // resume rsp
737 __ Add(bcStub, glueRegister, Operand(opcode, UXTW, FRAME_SLOT_SIZE_LOG2));
738 __ Ldr(bcStub, MemoryOperand(bcStub, JSThread::GlueData::GetBCStubEntriesOffset(false)));
739 __ Br(bcStub);
740 }
741
742 Label getThis;
743 Label notUndefined;
744 __ Bind(&newObjectRangeReturn);
745 {
746 __ Cmp(ret, Immediate(JSTaggedValue::VALUE_UNDEFINED));
747 __ B(Condition::NE, ¬Undefined);
748 ASSERT(thisOffset < 0);
749 __ Bind(&getThis);
750 __ Ldur(ret, MemoryOperand(sp, thisOffset)); // update acc
751 __ Ldur(sp, MemoryOperand(sp, spOffset)); // update sp
752 __ Mov(rsp, fp); // resume rsp
753 __ Sub(pc, pc, jumpSizeRegister); // sub negative jmupSize
754 __ Ldrb(opcode, MemoryOperand(pc, 0));
755 __ Add(bcStub, glueRegister, Operand(opcode, UXTW, FRAME_SLOT_SIZE_LOG2));
756 __ Ldr(bcStub, MemoryOperand(bcStub, JSThread::GlueData::GetBCStubEntriesOffset(false)));
757 __ Br(bcStub);
758 }
759 __ Bind(¬Undefined);
760 {
761 Label notEcmaObject;
762 __ Mov(temp, Immediate(JSTaggedValue::TAG_HEAPOBJECT_MASK));
763 __ And(temp, temp, ret);
764 __ Cmp(temp, Immediate(0));
765 __ B(Condition::NE, ¬EcmaObject);
766 // acc is heap object
767 __ Ldr(temp, MemoryOperand(ret, TaggedObject::HCLASS_OFFSET));
768 __ Ldr(temp, MemoryOperand(temp, JSHClass::BIT_FIELD_OFFSET));
769 __ And(temp.W(), temp.W(), LogicalImmediate::Create(0xFF, RegWSize));
770 __ Cmp(temp.W(), Immediate(static_cast<int64_t>(JSType::ECMA_OBJECT_LAST)));
771 __ B(Condition::HI, ¬EcmaObject);
772 __ Cmp(temp.W(), Immediate(static_cast<int64_t>(JSType::ECMA_OBJECT_FIRST)));
773 __ B(Condition::LO, ¬EcmaObject);
774 // acc is ecma object
775 __ Ldur(sp, MemoryOperand(sp, spOffset)); // update sp
776 __ Sub(pc, pc, jumpSizeRegister); // sub negative jmupSize
777 __ Ldrb(opcode, MemoryOperand(pc, 0));
778 __ B(&dispatch);
779
780 __ Bind(¬EcmaObject);
781 {
782 int64_t constructorOffset = static_cast<int64_t>(AsmInterpretedFrame::GetFunctionOffset(false))
783 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
784 ASSERT(constructorOffset < 0);
785 __ Ldur(temp, MemoryOperand(sp, constructorOffset)); // load constructor
786 __ Ldr(temp, MemoryOperand(temp, JSFunctionBase::METHOD_OFFSET));
787 __ Ldr(temp, MemoryOperand(temp, Method::EXTRA_LITERAL_INFO_OFFSET));
788 __ Lsr(temp.W(), temp.W(), MethodLiteral::FunctionKindBits::START_BIT);
789 __ And(temp.W(), temp.W(),
790 LogicalImmediate::Create((1LU << MethodLiteral::FunctionKindBits::SIZE) - 1, RegWSize));
791 __ Cmp(temp.W(), Immediate(static_cast<int64_t>(FunctionKind::CLASS_CONSTRUCTOR)));
792 __ B(Condition::LS, &getThis); // constructor is base
793 // exception branch
794 {
795 __ Mov(opcode, kungfu::BytecodeStubCSigns::ID_NewObjectRangeThrowException);
796 __ Ldur(sp, MemoryOperand(sp, spOffset)); // update sp
797 __ B(&dispatch);
798 }
799 }
800 }
801 }
802
803 // ResumeRspAndReturn(uintptr_t acc)
804 // GHC calling convention
805 // X19 - acc
806 // FP - prevSp
807 // X20 - sp
808 void AsmInterpreterCall::ResumeRspAndReturn(ExtendedAssembler *assembler)
809 {
810 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndReturn));
811 Register rsp(SP);
812 Register currentSp(X20);
813
814 [[maybe_unused]] TempRegister1Scope scope1(assembler);
815 Register fpRegister = __ TempRegister1();
816 int64_t offset = static_cast<int64_t>(AsmInterpretedFrame::GetFpOffset(false))
817 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
818 ASSERT(offset < 0);
819 __ Ldur(fpRegister, MemoryOperand(currentSp, offset));
820 __ Mov(rsp, fpRegister);
821
822 // return
823 {
824 __ RestoreFpAndLr();
825 __ Mov(Register(X0), Register(X19));
826 __ Ret();
827 }
828 }
829
830 // ResumeRspAndReturnBaseline(uintptr_t acc)
831 // GHC calling convention
832 // X19 - acc
833 // FP - prevSp
834 // X20 - sp
835 // X21 - jumpSizeAfterCall
836 void AsmInterpreterCall::ResumeRspAndReturnBaseline(ExtendedAssembler *assembler)
837 {
838 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndReturnBaseline));
839 Register rsp(SP);
840 Register currentSp(X20);
841
842 [[maybe_unused]] TempRegister1Scope scope1(assembler);
843 Register fpRegister = __ TempRegister1();
844 int64_t fpOffset = static_cast<int64_t>(AsmInterpretedFrame::GetFpOffset(false)) -
845 static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
846 ASSERT(fpOffset < 0);
847 __ Ldur(fpRegister, MemoryOperand(currentSp, fpOffset));
848 __ Mov(rsp, fpRegister);
849 __ RestoreFpAndLr();
850 __ Mov(Register(X0), Register(X19));
851
852 // Check and set result
853 Register ret = X0;
854 Register jumpSizeRegister = X21;
855 Label getThis;
856 Label notUndefined;
857 Label normalReturn;
858 Label newObjectRangeReturn;
859 __ Cmp(jumpSizeRegister, Immediate(0));
860 __ B(Condition::GT, &normalReturn);
861
862 __ Bind(&newObjectRangeReturn);
863 {
864 __ Cmp(ret, Immediate(JSTaggedValue::VALUE_UNDEFINED));
865 __ B(Condition::NE, ¬Undefined);
866
867 __ Bind(&getThis);
868 int64_t thisOffset = static_cast<int64_t>(AsmInterpretedFrame::GetThisOffset(false)) -
869 static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
870 ASSERT(thisOffset < 0);
871 __ Ldur(ret, MemoryOperand(currentSp, thisOffset)); // update result
872 __ B(&normalReturn);
873
874 __ Bind(¬Undefined);
875 {
876 Register temp = X19;
877 Label notEcmaObject;
878 __ Mov(temp, Immediate(JSTaggedValue::TAG_HEAPOBJECT_MASK));
879 __ And(temp, temp, ret);
880 __ Cmp(temp, Immediate(0));
881 __ B(Condition::NE, ¬EcmaObject);
882 // acc is heap object
883 __ Ldr(temp, MemoryOperand(ret, TaggedObject::HCLASS_OFFSET));
884 __ Ldr(temp, MemoryOperand(temp, JSHClass::BIT_FIELD_OFFSET));
885 __ And(temp.W(), temp.W(), LogicalImmediate::Create(0xFF, RegWSize));
886 __ Cmp(temp.W(), Immediate(static_cast<int64_t>(JSType::ECMA_OBJECT_LAST)));
887 __ B(Condition::HI, ¬EcmaObject);
888 __ Cmp(temp.W(), Immediate(static_cast<int64_t>(JSType::ECMA_OBJECT_FIRST)));
889 __ B(Condition::LO, ¬EcmaObject);
890 // acc is ecma object
891 __ B(&normalReturn);
892
893 __ Bind(¬EcmaObject);
894 {
895 int64_t funcOffset = static_cast<int64_t>(AsmInterpretedFrame::GetFunctionOffset(false)) -
896 static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
897 ASSERT(funcOffset < 0);
898 __ Ldur(temp, MemoryOperand(currentSp, funcOffset)); // load constructor
899 __ Ldr(temp, MemoryOperand(temp, JSFunctionBase::METHOD_OFFSET));
900 __ Ldr(temp, MemoryOperand(temp, Method::EXTRA_LITERAL_INFO_OFFSET));
901 __ Lsr(temp.W(), temp.W(), MethodLiteral::FunctionKindBits::START_BIT);
902 __ And(temp.W(), temp.W(),
903 LogicalImmediate::Create((1LU << MethodLiteral::FunctionKindBits::SIZE) - 1, RegWSize));
904 __ Cmp(temp.W(), Immediate(static_cast<int64_t>(FunctionKind::CLASS_CONSTRUCTOR)));
905 __ B(Condition::LS, &getThis); // constructor is base
906 // fall through
907 }
908 }
909 }
910 __ Bind(&normalReturn);
911 __ Ret();
912 }
913
914 // ResumeCaughtFrameAndDispatch(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
915 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter)
916 // GHC calling convention
917 // X19 - glue
918 // FP - sp
919 // X20 - pc
920 // X21 - constantPool
921 // X22 - profileTypeInfo
922 // X23 - acc
923 // X24 - hotnessCounter
924 void AsmInterpreterCall::ResumeCaughtFrameAndDispatch(ExtendedAssembler *assembler)
925 {
926 __ BindAssemblerStub(RTSTUB_ID(ResumeCaughtFrameAndDispatch));
927
928 Register glue(X19);
929 Register pc(X20);
930 Register fp(X5);
931 Register opcode(X6, W);
932 Register bcStub(X7);
933
934 Label dispatch;
935 __ Ldr(fp, MemoryOperand(glue, JSThread::GlueData::GetLastFpOffset(false)));
936 __ Cmp(fp, Immediate(0));
937 __ B(Condition::EQ, &dispatch);
938 // up frame
939 __ Mov(Register(SP), fp);
940 // fall through
941 __ Bind(&dispatch);
942 {
943 __ Ldrb(opcode, MemoryOperand(pc, 0));
944 __ Add(bcStub, glue, Operand(opcode, UXTW, FRAME_SLOT_SIZE_LOG2));
945 __ Ldr(bcStub, MemoryOperand(bcStub, JSThread::GlueData::GetBCStubEntriesOffset(false)));
946 __ Br(bcStub);
947 }
948 }
949
950 // ResumeUncaughtFrameAndReturn(uintptr_t glue)
951 // GHC calling convention
952 // X19 - glue
953 // FP - sp
954 // X20 - acc
955 void AsmInterpreterCall::ResumeUncaughtFrameAndReturn(ExtendedAssembler *assembler)
956 {
957 __ BindAssemblerStub(RTSTUB_ID(ResumeUncaughtFrameAndReturn));
958
959 Register glue(X19);
960 Register fp(X5);
961 Register acc(X20);
962 Register cppRet(X0);
963
964 __ Ldr(fp, MemoryOperand(glue, JSThread::GlueData::GetLastFpOffset(false)));
965 __ Mov(Register(SP), fp);
966 // this method will return to Execute(cpp calling convention), and the return value should be put into X0.
967 __ Mov(cppRet, acc);
968 __ RestoreFpAndLr();
969 __ Ret();
970 }
971
972 // ResumeRspAndRollback(uintptr_t glue, uintptr_t sp, uintptr_t pc, uintptr_t constantPool,
973 // uint64_t profileTypeInfo, uint64_t acc, uint32_t hotnessCounter, size_t jumpSize)
974 // GHC calling convention
975 // X19 - glue
976 // FP - sp
977 // X20 - pc
978 // X21 - constantPool
979 // X22 - profileTypeInfo
980 // X23 - acc
981 // X24 - hotnessCounter
982 // X25 - jumpSizeAfterCall
983 void AsmInterpreterCall::ResumeRspAndRollback(ExtendedAssembler *assembler)
984 {
985 __ BindAssemblerStub(RTSTUB_ID(ResumeRspAndRollback));
986
987 Register glueRegister = __ GlueRegister();
988 Register sp(FP);
989 Register rsp(SP);
990 Register pc(X20);
991 Register jumpSizeRegister(X25);
992
993 Register ret(X23);
994 Register opcode(X6, W);
995 Register bcStub(X7);
996 Register fp(X8);
997
998 int64_t fpOffset = static_cast<int64_t>(AsmInterpretedFrame::GetFpOffset(false))
999 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
1000 int64_t spOffset = static_cast<int64_t>(AsmInterpretedFrame::GetBaseOffset(false))
1001 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
1002 int64_t funcOffset = static_cast<int64_t>(AsmInterpretedFrame::GetFunctionOffset(false))
1003 - static_cast<int64_t>(AsmInterpretedFrame::GetSize(false));
1004 ASSERT(fpOffset < 0);
1005 ASSERT(spOffset < 0);
1006 ASSERT(funcOffset < 0);
1007
1008 __ Ldur(fp, MemoryOperand(sp, fpOffset)); // store fp for temporary
1009 __ Ldur(ret, MemoryOperand(sp, funcOffset)); // restore acc
1010 __ Ldur(sp, MemoryOperand(sp, spOffset)); // update sp
1011
1012 __ Add(pc, pc, Operand(jumpSizeRegister, LSL, 0));
1013 __ Ldrb(opcode, MemoryOperand(pc, 0));
1014
1015 __ Mov(rsp, fp); // resume rsp
1016 __ Add(bcStub, glueRegister, Operand(opcode, UXTW, FRAME_SLOT_SIZE_LOG2));
1017 __ Ldr(bcStub, MemoryOperand(bcStub, JSThread::GlueData::GetBCStubEntriesOffset(false)));
1018 __ Br(bcStub);
1019 }
1020
1021 // c++ calling convention
1022 // X0 - glue
1023 // X1 - callTarget
1024 // X2 - method
1025 // X3 - callField
1026 // X4 - receiver
1027 // X5 - value
1028 void AsmInterpreterCall::CallGetter(ExtendedAssembler *assembler)
1029 {
1030 __ BindAssemblerStub(RTSTUB_ID(CallGetter));
1031 Label target;
1032
1033 PushAsmInterpBridgeFrame(assembler);
1034 __ Bl(&target);
1035 PopAsmInterpBridgeFrame(assembler);
1036 __ Ret();
1037 __ Bind(&target);
1038 {
1039 JSCallCommonEntry(assembler, JSCallMode::CALL_GETTER, FrameTransitionType::OTHER_TO_OTHER);
1040 }
1041 }
1042
1043 void AsmInterpreterCall::CallSetter(ExtendedAssembler *assembler)
1044 {
1045 __ BindAssemblerStub(RTSTUB_ID(CallSetter));
1046 Label target;
1047 PushAsmInterpBridgeFrame(assembler);
1048 __ Bl(&target);
1049 PopAsmInterpBridgeFrame(assembler);
1050 __ Ret();
1051 __ Bind(&target);
1052 {
1053 JSCallCommonEntry(assembler, JSCallMode::CALL_SETTER, FrameTransitionType::OTHER_TO_OTHER);
1054 }
1055 }
1056
1057 void AsmInterpreterCall::CallContainersArgs2(ExtendedAssembler *assembler)
1058 {
1059 __ BindAssemblerStub(RTSTUB_ID(CallContainersArgs2));
1060 Label target;
1061 PushAsmInterpBridgeFrame(assembler);
1062 __ Bl(&target);
1063 PopAsmInterpBridgeFrame(assembler);
1064 __ Ret();
1065 __ Bind(&target);
1066 {
1067 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG2_WITH_RETURN,
1068 FrameTransitionType::OTHER_TO_OTHER);
1069 }
1070 }
1071
1072 void AsmInterpreterCall::CallContainersArgs3(ExtendedAssembler *assembler)
1073 {
1074 __ BindAssemblerStub(RTSTUB_ID(CallContainersArgs3));
1075 Label target;
1076 PushAsmInterpBridgeFrame(assembler);
1077 __ Bl(&target);
1078 PopAsmInterpBridgeFrame(assembler);
1079 __ Ret();
1080 __ Bind(&target);
1081 {
1082 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARG3_WITH_RETURN,
1083 FrameTransitionType::OTHER_TO_OTHER);
1084 }
1085 }
1086
1087 // c++ calling convention
1088 // X0 - glue
1089 // X1 - callTarget
1090 // X2 - method
1091 // X3 - callField
1092 // X4 - arg0(argc)
1093 // X5 - arg1(arglist)
1094 // X6 - arg3(argthis)
1095 void AsmInterpreterCall::CallReturnWithArgv(ExtendedAssembler *assembler)
1096 {
1097 __ BindAssemblerStub(RTSTUB_ID(CallReturnWithArgv));
1098 Label target;
1099 PushAsmInterpBridgeFrame(assembler);
1100 __ Bl(&target);
1101 PopAsmInterpBridgeFrame(assembler);
1102 __ Ret();
1103 __ Bind(&target);
1104 {
1105 JSCallCommonEntry(assembler, JSCallMode::CALL_THIS_ARGV_WITH_RETURN,
1106 FrameTransitionType::OTHER_TO_OTHER);
1107 }
1108 }
1109
1110 // preserve all the general registers, except x15 and callee saved registers/
1111 // and call x15
1112 void AsmInterpreterCall::PreserveMostCall(ExtendedAssembler* assembler)
1113 {
1114 // * layout as the following:
1115 // +--------------------------+ ---------
1116 // | . . . . . | ^
1117 // callerSP ---> |--------------------------| |
1118 // | returnAddr | |
1119 // |--------------------------| OptimizedFrame
1120 // | callsiteFp | |
1121 // fp ---> |--------------------------| |
1122 // | OPTIMIZED_FRAME | v
1123 // +--------------------------+ ---------
1124 // | x0 |
1125 // +--------------------------+
1126 // | x1 |
1127 // +--------------------------+
1128 // | r2 |
1129 // +--------------------------+
1130 // | x3 |
1131 // +--------------------------+
1132 // | x4 |
1133 // +--------------------------+
1134 // | x5 |
1135 // +--------------------------+
1136 // | x6 |
1137 // +--------------------------+
1138 // | x7 |
1139 // +--------------------------+
1140 // | x8 |
1141 // +--------------------------+
1142 // | x9 |
1143 // +--------------------------+
1144 // | x10 |
1145 // +--------------------------+
1146 // | x11 |
1147 // +--------------------------+
1148 // | x12 |
1149 // +--------------------------+
1150 // | x13 |
1151 // +--------------------------+
1152 // | x14 |
1153 // +--------------------------+
1154 // | x16 |
1155 // +--------------------------+
1156 // | x17 |
1157 // +--------------------------+
1158 // | x18 |
1159 // +--------------------------+
1160 // | align |
1161 // calleeSP ---> +--------------------------+
1162 {
1163 // prologue to save fp, frametype, and update fp.
1164 __ Stp(X29, X30, MemoryOperand(SP, -DOUBLE_SLOT_SIZE, PREINDEX));
1165 // Zero register means OPTIMIZED_FRAME
1166 __ Stp(X0, Zero, MemoryOperand(SP, -DOUBLE_SLOT_SIZE, PREINDEX));
1167 __ Add(FP, SP, Immediate(DOUBLE_SLOT_SIZE));
1168 }
1169 int32_t PreserveRegPairIndex = 9;
1170 // x0~x14,x16,x17,x18 should be preserved,
1171 // other general registers are callee saved register, callee will save them.
1172 __ Sub(SP, SP, Immediate(DOUBLE_SLOT_SIZE * PreserveRegPairIndex));
1173 __ Stp(X1, X2, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (--PreserveRegPairIndex)));
1174 __ Stp(X3, X4, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (--PreserveRegPairIndex)));
1175 __ Stp(X5, X6, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (--PreserveRegPairIndex)));
1176 __ Stp(X7, X8, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (--PreserveRegPairIndex)));
1177 __ Stp(X9, X10, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (--PreserveRegPairIndex)));
1178 __ Stp(X11, X12, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (--PreserveRegPairIndex)));
1179 __ Stp(X13, X14, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (--PreserveRegPairIndex)));
1180 __ Stp(X16, X17, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (--PreserveRegPairIndex)));
1181 __ Str(X18, MemoryOperand(SP, FRAME_SLOT_SIZE));
1182 __ Blr(X15);
1183 __ Ldr(X18, MemoryOperand(SP, FRAME_SLOT_SIZE));
1184 __ Ldp(X16, X17, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (PreserveRegPairIndex++)));
1185 __ Ldp(X13, X14, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (PreserveRegPairIndex++)));
1186 __ Ldp(X11, X12, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (PreserveRegPairIndex++)));
1187 __ Ldp(X9, X10, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (PreserveRegPairIndex++)));
1188 __ Ldp(X7, X8, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (PreserveRegPairIndex++)));
1189 __ Ldp(X5, X6, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (PreserveRegPairIndex++)));
1190 __ Ldp(X3, X4, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (PreserveRegPairIndex++)));
1191 __ Ldp(X1, X2, MemoryOperand(SP, DOUBLE_SLOT_SIZE * (PreserveRegPairIndex++)));
1192 __ Ldr(X0, MemoryOperand(SP, DOUBLE_SLOT_SIZE * PreserveRegPairIndex));
1193 {
1194 // epilogue to restore sp, fp, lr.
1195 // Skip x0 slot and frametype slot
1196 __ Add(SP, SP, Immediate(DOUBLE_SLOT_SIZE * PreserveRegPairIndex +
1197 FRAME_SLOT_SIZE + FRAME_SLOT_SIZE));
1198 __ Ldp(FP, X30, MemoryOperand(SP, DOUBLE_SLOT_SIZE, AddrMode::POSTINDEX));
1199 __ Ret();
1200 }
1201 }
1202
1203 // ASMFastWriteBarrier(GateRef glue, GateRef obj, GateRef offset, GateRef value)
1204 // c calling convention, but preserve all general registers except %x15
1205 // %x0 - glue
1206 // %x1 - obj
1207 // %x2 - offset
1208 // %x3 - value
1209 void AsmInterpreterCall::ASMFastWriteBarrier(ExtendedAssembler* assembler)
1210 {
1211 // valid region flag are as follows, assume it will be ALWAYS VALID.
1212 // Judge the region of value with:
1213 // "young" "sweepable share" "readonly share"
1214 // region flag: 0x08, 0x09, [0x0A, 0x11], [0x12, 0x15], 0x16
1215 // value is share: [0x12, 0x16] => valueMaybeSweepableShare
1216 // readonly share: 0x16 => return
1217 // sweepable share: [0x12, 0x15] => needShareBarrier
1218 // value is not share: 0x08, 0x09, [0x0A, 0x11], => valueNotShare
1219 // value is young : 0x09 => needCallNotShare
1220 // value is not young : 0x08, [0x0A, 0x11], => checkMark
1221 ASSERT(GENERAL_YOUNG_BEGIN <= IN_YOUNG_SPACE && IN_YOUNG_SPACE < SHARED_SPACE_BEGIN &&
1222 SHARED_SPACE_BEGIN <= SHARED_SWEEPABLE_SPACE_BEGIN && SHARED_SWEEPABLE_SPACE_END < IN_SHARED_READ_ONLY_SPACE &&
1223 IN_SHARED_READ_ONLY_SPACE == HEAP_SPACE_END);
1224 __ BindAssemblerStub(RTSTUB_ID(ASMFastWriteBarrier));
1225 Label needCall;
1226 Label checkMark;
1227 Label needCallNotShare;
1228 Label needShareBarrier;
1229 Label valueNotShare;
1230 Label valueMaybeSweepableShare;
1231 {
1232 // int8_t *valueRegion = value & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1233 // int8_t valueFlag = *valueRegion
1234 // if (valueFlag >= SHARED_SWEEPABLE_SPACE_BEGIN){
1235 // goto valueMaybeSweepableShare
1236 // }
1237
1238 __ And(X15, X3, LogicalImmediate::Create(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), RegXSize));
1239 // X15 is the region address of value.
1240 __ Ldrb(Register(X15, W), MemoryOperand(X15, 0));
1241 // X15 is the flag load from region of value.
1242 __ Cmp(Register(X15, W), Immediate(SHARED_SWEEPABLE_SPACE_BEGIN));
1243 __ B(GE, &valueMaybeSweepableShare);
1244 // if value may be SweepableShare, goto valueMaybeSweepableShare
1245 }
1246 __ Bind(&valueNotShare);
1247 {
1248 // valueNotShare:
1249 // if (valueFlag != IN_YOUNG_SPACE){
1250 // goto checkMark
1251 // }
1252 // int8_t *objRegion = obj & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1253 // int8_t objFlag = *objRegion
1254 // if (objFlag != IN_YOUNG_SPACE){
1255 // goto needCallNotShare
1256 // }
1257
1258 __ Cmp(Register(X15, W), Immediate(RegionSpaceFlag::IN_YOUNG_SPACE));
1259 __ B(NE, &checkMark);
1260 // if value is not in young, goto checkMark
1261
1262 __ And(X15, X1, LogicalImmediate::Create(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), RegXSize));
1263 // X15 is the region address of obj.
1264 __ Ldrb(Register(X15, W), MemoryOperand(X15, 0));
1265 // X15 is the flag load from region of obj.
1266 __ Cmp(Register(X15, W), Immediate(RegionSpaceFlag::IN_YOUNG_SPACE));
1267 __ B(NE, &needCallNotShare);
1268 // if obj is not in young, goto needCallNotShare
1269 }
1270
1271 __ Bind(&checkMark);
1272 {
1273 // checkMark:
1274 // int8_t GCStateBitField = *(glue+GCStateBitFieldOffset)
1275 // if (GCStateBitField & JSThread::CONCURRENT_MARKING_BITFIELD_MASK != 0) {
1276 // goto needCallNotShare
1277 // }
1278 // return
1279
1280 __ Mov(X15, JSThread::GlueData::GetGCStateBitFieldOffset(false));
1281 __ Ldrb(Register(X15, W), MemoryOperand(X0, Register(X15), UXTX));
1282 __ Tst(Register(X15, W), LogicalImmediate::Create(JSThread::CONCURRENT_MARKING_BITFIELD_MASK, RegWSize));
1283 __ B(NE, &needCallNotShare);
1284 // if GCState is not READY_TO_MARK, go to needCallNotShare.
1285 __ Ret();
1286 }
1287
1288 __ Bind(&valueMaybeSweepableShare);
1289 {
1290 // valueMaybeSweepableShare:
1291 // if (valueFlag != IN_SHARED_READ_ONLY_SPACE){
1292 // goto needShareBarrier
1293 // }
1294 // return
1295 __ Cmp(Register(X15, W), Immediate(RegionSpaceFlag::IN_SHARED_READ_ONLY_SPACE));
1296 __ B(NE, &needShareBarrier);
1297 __ Ret();
1298 }
1299
1300 __ Bind(&needCallNotShare);
1301 {
1302 int32_t NonSValueBarrier = static_cast<int32_t>(JSThread::GlueData::GetCOStubEntriesOffset(false)) +
1303 kungfu::CommonStubCSigns::SetNonSValueWithBarrier * FRAME_SLOT_SIZE;
1304 __ Mov(X15, NonSValueBarrier);
1305 }
1306 __ Bind(&needCall);
1307 {
1308 __ Ldr(X15, MemoryOperand(X0, Register(X15), UXTX));
1309 PreserveMostCall(assembler);
1310 }
1311 __ Bind(&needShareBarrier);
1312 {
1313 ASMFastSharedWriteBarrier(assembler, needCall);
1314 }
1315 }
1316
1317 // ASMWriteBarrierWithEden(GateRef glue, GateRef obj, GateRef offset, GateRef value)
1318 // c calling convention, but preserve all general registers except %x15
1319 // %x0 - glue
1320 // %x1 - obj
1321 // %x2 - offset
1322 // %x3 - value
1323 void AsmInterpreterCall::ASMWriteBarrierWithEden(ExtendedAssembler* assembler)
1324 {
1325 __ BindAssemblerStub(RTSTUB_ID(ASMWriteBarrierWithEden));
1326 // Just for compitability, not a fast implement, should be refactored when enable EdenBarrier.
1327 int32_t EdenBarrierOffset = static_cast<int32_t>(JSThread::GlueData::GetCOStubEntriesOffset(false)) +
1328 kungfu::CommonStubCSigns::SetValueWithEdenBarrier * FRAME_SLOT_SIZE;
1329 __ Mov(X15, EdenBarrierOffset);
1330 __ Ldr(X15, MemoryOperand(X0, Register(X15), UXTX));
1331 PreserveMostCall(assembler);
1332 }
1333
1334 // %x0 - glue
1335 // %x1 - obj
1336 // %x2 - offset
1337 // %x3 - value
1338 void AsmInterpreterCall::ASMFastSharedWriteBarrier(ExtendedAssembler* assembler, Label& needCall)
1339 {
1340 Label checkBarrierForSharedValue;
1341 Label restoreScratchRegister;
1342 Label callSharedBarrier;
1343 {
1344 // int8_t *objRegion = obj & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1345 // int8_t objFlag = *objRegion
1346 // if (objFlag >= SHARED_SPACE_BEGIN){
1347 // // share to share, just check the barrier
1348 // goto checkBarrierForSharedValue
1349 // }
1350 __ And(X15, X1, LogicalImmediate::Create(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), RegXSize));
1351 __ Ldrb(Register(X15, W), MemoryOperand(X15, 0));
1352 // X15 is the flag load from region of obj.
1353 __ Cmp(Register(X15, W), Immediate(RegionSpaceFlag::SHARED_SPACE_BEGIN));
1354 __ B(GE, &checkBarrierForSharedValue); // if objflag >= SHARED_SPACE_BEGIN => checkBarrierForSharedValue
1355 }
1356 {
1357 // int8_t *objRegion = obj & (~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK))
1358 // int8_t *localToShareSet = *(objRegion + LocalToShareSetOffset)
1359 // if (localToShareSet == 0){
1360 // goto callSharedBarrier
1361 // }
1362 __ And(X15, X1, LogicalImmediate::Create(~(JSTaggedValue::TAG_MARK | DEFAULT_REGION_MASK), RegXSize));
1363 __ Ldr(X15, MemoryOperand(X15, Region::PackedData::GetLocalToShareSetOffset(false)));
1364 // X15 is localToShareSet for obj region.
1365 __ Cbz({X15, X}, &callSharedBarrier); // if localToShareSet == 0 => callSharedBarrier
1366 }
1367 {
1368 // X16, X17 will be used as scratch register, spill them.
1369 // the caller will call this function with inline asm, it will not save any registers except x15.
1370 // So we need spill and restore x16, x17 when we need them as scratch register.
1371 {
1372 __ Stp(X16, X17, MemoryOperand(SP, -DOUBLE_SLOT_SIZE, PREINDEX));
1373 }
1374 // int64_t objOffset = obj & DEFAULT_REGION_MASK
1375 // int64_t slotOffset = objOffset + offset
1376 __ And(X16, X1, LogicalImmediate::Create(DEFAULT_REGION_MASK, RegXSize));
1377 __ Add(X16, X16, Operand(Register(X2)));
1378
1379 // the logic to get mask in stub_builder.cpp
1380 // [63-------------------------35][34------------------------8][7---3][2-0]
1381 // bitOffset: bbbbbbbbbbbbbbbbbbbbbbbb bbbcc ccc
1382 // bitPerWordMask: 11 111
1383 // indexInWord = And bitoffset bitPerWordMask
1384 // indexInWord: cc ccc
1385 // mask = 1 << indexInWord
1386
1387 // the logic to test bit set value here:
1388 // [63-------------------------35][34------------------------8][7---3][2-0]
1389 // slotOffset: aaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbb ccccc ddd
1390 // Ubfm X16 slotOffset 3 7
1391 // indexInWord: cc ccc
1392 __ Ubfm(X17, X16, TAGGED_TYPE_SIZE_LOG, TAGGED_TYPE_SIZE_LOG + GCBitset::BIT_PER_WORD_LOG2 - 1);
1393
1394 // the logic to get byteIndex in stub_builder.cpp
1395 // [63-------------------------35][34------------------------8][7---3][2-0]
1396 // slotOffset: aaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbb ccccc ddd
1397 // 1. bitOffsetPtr = LSR TAGGED_TYPE_SIZE_LOG(3) slotOffset
1398 // bitOffsetPtr: aaaaaaaaaaaaaaaaaaaaaaaaaa aaabbbbbbbbbbbbbbbbbbbbbbbb bbbcc ccc
1399 // 2. bitOffset = TruncPtrToInt32 bitOffsetPtr
1400 // bitOffset: bbbbbbbbbbbbbbbbbbbbbbbb bbbcc ccc
1401 // 3. index = LSR BIT_PER_WORD_LOG2(5) bitOffset
1402 // index: bbbbbbbbbbbbbbbbbbb bbbbb bbb
1403 // 4. byteIndex = Mul index BYTE_PER_WORD(4)
1404 // byteIndex: bbbbbbbbbbbbbbbbbbbbb bbbbb b00
1405
1406 // the logic to get byteIndex here:
1407 // [63-------------------------35][34------------------------8][7---3][2-0]
1408 // slotOffset: aaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbb ccccc ddd
1409 // Ubfm X16 slotOffset 8 34
1410 // index: bbbbbbbbbbbbbbbbbbb bbbbb bbb
1411 __ Ubfm(X16, X16, TAGGED_TYPE_SIZE_LOG + GCBitset::BIT_PER_WORD_LOG2,
1412 sizeof(uint32_t) * GCBitset::BIT_PER_BYTE + TAGGED_TYPE_SIZE_LOG - 1);
1413 __ Add(X15, X15, Operand(Register(X16), LSL, GCBitset::BYTE_PER_WORD_LOG2));
1414 __ Add(X15, X15, Immediate(RememberedSet::GCBITSET_DATA_OFFSET));
1415 // X15 is the address of bitset value. X15 = X15 + X16 << BYTE_PER_WORD_LOG2 + GCBITSET_DATA_OFFSET
1416
1417 // mask = 1 << indexInWord
1418 __ Mov(Register(X16, W), 1);
1419 __ Lsl(Register(X17, W), Register(X16, W), Register(X17, W)); // X17 is the mask
1420
1421 __ Ldr(Register(X16, W), MemoryOperand(X15, 0)); // x16: oldsetValue
1422 __ Tst(Register(X16, W), Register(X17, W));
1423 __ B(NE, &restoreScratchRegister);
1424 __ Orr(Register(X16, W), Register(X16, W), Register(X17, W));
1425 __ Str(Register(X16, W), MemoryOperand(X15, 0));
1426 }
1427 __ Bind(&restoreScratchRegister);
1428 {
1429 __ Ldp(X16, X17, MemoryOperand(SP, DOUBLE_SLOT_SIZE, POSTINDEX));
1430 }
1431 __ Bind(&checkBarrierForSharedValue);
1432 {
1433 // checkBarrierForSharedValue:
1434 // int8_t GCStateBitField = *(glue+SharedGCStateBitFieldOffset)
1435 // if (GCStateBitField & JSThread::SHARED_CONCURRENT_MARKING_BITFIELD_MASK != 0) {
1436 // goto callSharedBarrier
1437 // }
1438 // return
1439 __ Mov(X15, JSThread::GlueData::GetSharedGCStateBitFieldOffset(false));
1440 __ Ldrb(Register(X15, W), MemoryOperand(X0, Register(X15), UXTX));
1441 static_assert(JSThread::SHARED_CONCURRENT_MARKING_BITFIELD_MASK == 1 && "Tbnz can't handle other bit mask");
1442 __ Tbnz(Register(X15, W), 0, &callSharedBarrier);
1443 // if GCState is not READY_TO_MARK, go to needCallNotShare.
1444 __ Ret();
1445 }
1446
1447 __ Bind(&callSharedBarrier);
1448 {
1449 int32_t SValueBarrierOffset = static_cast<int32_t>(JSThread::GlueData::GetCOStubEntriesOffset(false)) +
1450 kungfu::CommonStubCSigns::SetSValueWithBarrier * FRAME_SLOT_SIZE;
1451 __ Mov(X15, SValueBarrierOffset);
1452 __ B(&needCall);
1453 }
1454 }
1455
1456 // Generate code for generator re-entering asm interpreter
1457 // c++ calling convention
1458 // Input: %X0 - glue
1459 // %X1 - context(GeneratorContext)
1460 void AsmInterpreterCall::GeneratorReEnterAsmInterp(ExtendedAssembler *assembler)
1461 {
1462 __ BindAssemblerStub(RTSTUB_ID(GeneratorReEnterAsmInterp));
1463 Label target;
1464 size_t begin = __ GetCurrentPosition();
1465 PushAsmInterpEntryFrame(assembler);
1466 __ Bl(&target);
1467 PopAsmInterpEntryFrame(assembler);
1468 size_t end = __ GetCurrentPosition();
1469 if ((end - begin) != FrameCompletionPos::ARM64EntryFrameDuration) {
1470 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::ARM64EntryFrameDuration
1471 << "This frame has been modified, and the offset EntryFrameDuration should be updated too.";
1472 }
1473 __ Ret();
1474 __ Bind(&target);
1475 {
1476 GeneratorReEnterAsmInterpDispatch(assembler);
1477 }
1478 }
1479
1480 void AsmInterpreterCall::GeneratorReEnterAsmInterpDispatch(ExtendedAssembler *assembler)
1481 {
1482 Label pushFrameState;
1483 Label stackOverflow;
1484 Register glue = __ GlueRegister();
1485 Register contextRegister(X1);
1486 Register spRegister(SP);
1487 Register pc(X8);
1488 Register prevSpRegister(FP);
1489 Register callTarget(X4);
1490 Register method(X5);
1491 Register temp(X6); // can not be used to store any variable
1492 Register currentSlotRegister(X7);
1493 Register fpRegister(X9);
1494 Register thisRegister(X25);
1495 Register nRegsRegister(X26, W);
1496 Register regsArrayRegister(X27);
1497 Register newSp(X28);
1498 __ Ldr(callTarget, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_METHOD_OFFSET));
1499 __ Ldr(method, MemoryOperand(callTarget, JSFunctionBase::METHOD_OFFSET));
1500 __ PushFpAndLr();
1501 // save fp
1502 __ Mov(fpRegister, spRegister);
1503 __ Mov(currentSlotRegister, spRegister);
1504 // Reserve enough sp space to prevent stack parameters from being covered by cpu profiler.
1505 __ Ldr(temp, MemoryOperand(glue, JSThread::GlueData::GetStackLimitOffset(false)));
1506 __ Mov(Register(SP), temp);
1507 // push context regs
1508 __ Ldr(nRegsRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_NREGS_OFFSET));
1509 __ Ldr(thisRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_THIS_OFFSET));
1510 __ Ldr(regsArrayRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_REGS_ARRAY_OFFSET));
1511 __ Add(regsArrayRegister, regsArrayRegister, Immediate(TaggedArray::DATA_OFFSET));
1512 PushArgsWithArgv(assembler, glue, nRegsRegister, regsArrayRegister, temp,
1513 currentSlotRegister, &pushFrameState, &stackOverflow);
1514
1515 __ Bind(&pushFrameState);
1516 __ Mov(newSp, currentSlotRegister);
1517 // push frame state
1518 PushGeneratorFrameState(assembler, prevSpRegister, fpRegister, currentSlotRegister, callTarget, thisRegister,
1519 method, contextRegister, pc, temp);
1520 __ Align16(currentSlotRegister);
1521 __ Mov(Register(SP), currentSlotRegister);
1522 // call bc stub
1523 CallBCStub(assembler, newSp, glue, callTarget, method, pc, temp);
1524
1525 __ Bind(&stackOverflow);
1526 {
1527 ThrowStackOverflowExceptionAndReturn(assembler, glue, fpRegister, temp);
1528 }
1529 }
1530
1531 void AsmInterpreterCall::PushCallThis(ExtendedAssembler *assembler,
1532 JSCallMode mode, Label *stackOverflow, FrameTransitionType type)
1533 {
1534 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
1535 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
1536 Register thisRegister = __ AvailableRegister2();
1537 Register currentSlotRegister = __ AvailableRegister3();
1538
1539 Label pushVregs;
1540 Label pushNewTarget;
1541 Label pushCallTarget;
1542 bool haveThis = kungfu::AssemblerModule::JSModeHaveThisArg(mode);
1543 bool haveNewTarget = kungfu::AssemblerModule::JSModeHaveNewTargetArg(mode);
1544 if (!haveThis) {
1545 __ Mov(thisRegister, Immediate(JSTaggedValue::VALUE_UNDEFINED)); // default this: undefined
1546 } else {
1547 Register thisArgRegister = GetThisRegsiter(assembler, mode, thisRegister);
1548 if (thisRegister.GetId() != thisArgRegister.GetId()) {
1549 __ Mov(thisRegister, thisArgRegister);
1550 }
1551 }
1552 __ Tst(callFieldRegister, LogicalImmediate::Create(CALL_TYPE_MASK, RegXSize));
1553 __ B(Condition::EQ, &pushVregs);
1554 __ Tbz(callFieldRegister, MethodLiteral::HaveThisBit::START_BIT, &pushNewTarget);
1555 if (!haveThis) {
1556 [[maybe_unused]] TempRegister1Scope scope1(assembler);
1557 Register tempRegister = __ TempRegister1();
1558 __ Mov(tempRegister, Immediate(JSTaggedValue::VALUE_UNDEFINED));
1559 __ Str(tempRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1560 } else {
1561 __ Str(thisRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1562 }
1563 __ Bind(&pushNewTarget);
1564 {
1565 __ Tbz(callFieldRegister, MethodLiteral::HaveNewTargetBit::START_BIT, &pushCallTarget);
1566 if (!haveNewTarget) {
1567 [[maybe_unused]] TempRegister1Scope scope1(assembler);
1568 Register newTarget = __ TempRegister1();
1569 __ Mov(newTarget, Immediate(JSTaggedValue::VALUE_UNDEFINED));
1570 __ Str(newTarget, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1571 } else {
1572 [[maybe_unused]] TempRegister1Scope scope1(assembler);
1573 Register defaultRegister = __ TempRegister1();
1574 Register newTargetRegister = GetNewTargetRegsiter(assembler, mode, defaultRegister);
1575 __ Str(newTargetRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1576 }
1577 }
1578 __ Bind(&pushCallTarget);
1579 {
1580 __ Tbz(callFieldRegister, MethodLiteral::HaveFuncBit::START_BIT, &pushVregs);
1581 __ Str(callTargetRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1582 }
1583 __ Bind(&pushVregs);
1584 {
1585 PushVregs(assembler, stackOverflow, type);
1586 }
1587 }
1588
1589 void AsmInterpreterCall::PushVregs(ExtendedAssembler *assembler,
1590 Label *stackOverflow, FrameTransitionType type)
1591 {
1592 Register glue = __ GlueRegister();
1593 Register prevSpRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::SP);
1594 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
1595 Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
1596 Register callFieldRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_FIELD);
1597 Register fpRegister = __ AvailableRegister1();
1598 Register thisRegister = __ AvailableRegister2();
1599 Register currentSlotRegister = __ AvailableRegister3();
1600
1601 Label pushFrameStateAndCall;
1602 [[maybe_unused]] TempRegister1Scope scope1(assembler);
1603 Register tempRegister = __ TempRegister1();
1604 // args register can be reused now.
1605 Register newSpRegister = __ AvailableRegister4();
1606 Register numVregsRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::ARG1);
1607 GetNumVregsFromCallField(assembler, callFieldRegister, numVregsRegister);
1608 PushUndefinedWithArgc(assembler, glue, numVregsRegister, tempRegister, currentSlotRegister, &pushFrameStateAndCall,
1609 stackOverflow);
1610 // fall through
1611 __ Bind(&pushFrameStateAndCall);
1612 {
1613 __ Mov(newSpRegister, currentSlotRegister);
1614
1615 [[maybe_unused]] TempRegister2Scope scope2(assembler);
1616 Register pcRegister = __ TempRegister2();
1617 PushFrameState(assembler, prevSpRegister, fpRegister, currentSlotRegister, callTargetRegister, thisRegister,
1618 methodRegister, pcRegister, tempRegister);
1619
1620 __ Align16(currentSlotRegister);
1621 __ Mov(Register(SP), currentSlotRegister);
1622 if (type == FrameTransitionType::OTHER_TO_BASELINE_CHECK ||
1623 type == FrameTransitionType::BASELINE_TO_BASELINE_CHECK) {
1624 // check baselinecode, temp modify TOOD: need to check
1625 Label baselineCodeUndefined;
1626 __ Ldr(tempRegister, MemoryOperand(callTargetRegister, JSFunction::BASELINECODE_OFFSET));
1627 __ Cmp(tempRegister, Immediate(JSTaggedValue::VALUE_UNDEFINED));
1628 __ B(Condition::EQ, &baselineCodeUndefined);
1629
1630 // check is compiling
1631 __ Cmp(tempRegister, Immediate(JSTaggedValue::VALUE_HOLE));
1632 __ B(Condition::EQ, &baselineCodeUndefined);
1633
1634 __ Ldr(tempRegister, MemoryOperand(tempRegister, MachineCode::FUNCADDR_OFFSET));
1635 if (glue != X19) {
1636 __ Mov(X19, glue);
1637 }
1638 if (methodRegister != X21) {
1639 __ Mov(X21, methodRegister);
1640 }
1641 __ Mov(currentSlotRegister, Immediate(BASELINEJIT_PC_FLAG));
1642 // -3: frame type, prevSp, pc
1643 __ Stur(currentSlotRegister, MemoryOperand(newSpRegister, -3 * FRAME_SLOT_SIZE));
1644 __ Mov(Register(X29), newSpRegister);
1645 __ Br(tempRegister);
1646 __ Bind(&baselineCodeUndefined);
1647 }
1648 DispatchCall(assembler, pcRegister, newSpRegister);
1649 }
1650 }
1651
1652 // Input: X19 - glue
1653 // FP - sp
1654 // X20 - callTarget
1655 // X21 - method
1656 void AsmInterpreterCall::DispatchCall(ExtendedAssembler *assembler, Register pcRegister,
1657 Register newSpRegister, Register accRegister)
1658 {
1659 Register glueRegister = __ GlueRegister();
1660 Register callTargetRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::CALL_TARGET);
1661 Register methodRegister = __ CallDispatcherArgument(kungfu::CallDispatchInputs::METHOD);
1662
1663 if (glueRegister.GetId() != X19) {
1664 __ Mov(Register(X19), glueRegister);
1665 }
1666 __ Ldrh(Register(X24, W), MemoryOperand(methodRegister, Method::LITERAL_INFO_OFFSET));
1667 if (accRegister == INVALID_REG) {
1668 __ Mov(Register(X23), Immediate(JSTaggedValue::VALUE_HOLE));
1669 } else {
1670 ASSERT(accRegister == Register(X23));
1671 }
1672 __ Ldr(Register(X22), MemoryOperand(callTargetRegister, JSFunction::RAW_PROFILE_TYPE_INFO_OFFSET));
1673 __ Ldr(Register(X22), MemoryOperand(Register(X22), ProfileTypeInfoCell::VALUE_OFFSET));
1674 __ Ldr(Register(X21), MemoryOperand(methodRegister, Method::CONSTANT_POOL_OFFSET));
1675 __ Mov(Register(X20), pcRegister);
1676 __ Mov(Register(FP), newSpRegister);
1677
1678 Register bcIndexRegister = __ AvailableRegister1();
1679 Register tempRegister = __ AvailableRegister2();
1680 __ Ldrb(bcIndexRegister.W(), MemoryOperand(pcRegister, 0));
1681 __ Add(tempRegister, glueRegister, Operand(bcIndexRegister.W(), UXTW, FRAME_SLOT_SIZE_LOG2));
1682 __ Ldr(tempRegister, MemoryOperand(tempRegister, JSThread::GlueData::GetBCStubEntriesOffset(false)));
1683 __ Br(tempRegister);
1684 }
1685
1686 void AsmInterpreterCall::PushFrameState(ExtendedAssembler *assembler, Register prevSp, Register fp,
1687 Register currentSlot, Register callTarget, Register thisObj, Register method, Register pc, Register op)
1688 {
1689 __ Mov(op, Immediate(static_cast<int32_t>(FrameType::ASM_INTERPRETER_FRAME)));
1690 __ Stp(prevSp, op, MemoryOperand(currentSlot, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // -2: frame type & prevSp
1691 __ Ldr(pc, MemoryOperand(method, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET));
1692 __ Stp(fp, pc, MemoryOperand(currentSlot, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // -2: pc & fp
1693 __ Ldr(op, MemoryOperand(callTarget, JSFunction::LEXICAL_ENV_OFFSET));
1694 __ Stp(op, Register(Zero), MemoryOperand(currentSlot,
1695 -2 * FRAME_SLOT_SIZE, // -2: jumpSizeAfterCall & env
1696 AddrMode::PREINDEX));
1697 __ Mov(op, Immediate(JSTaggedValue::VALUE_HOLE));
1698 __ Stp(thisObj, op, MemoryOperand(currentSlot, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // -2: acc & this
1699 __ Str(callTarget, MemoryOperand(currentSlot, -FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // -1: callTarget
1700 }
1701
1702 void AsmInterpreterCall::GetNumVregsFromCallField(ExtendedAssembler *assembler, Register callField, Register numVregs)
1703 {
1704 __ Mov(numVregs, callField);
1705 __ Lsr(numVregs, numVregs, MethodLiteral::NumVregsBits::START_BIT);
1706 __ And(numVregs.W(), numVregs.W(), LogicalImmediate::Create(
1707 MethodLiteral::NumVregsBits::Mask() >> MethodLiteral::NumVregsBits::START_BIT, RegWSize));
1708 }
1709
1710 void AsmInterpreterCall::GetDeclaredNumArgsFromCallField(ExtendedAssembler *assembler, Register callField,
1711 Register declaredNumArgs)
1712 {
1713 __ Mov(declaredNumArgs, callField);
1714 __ Lsr(declaredNumArgs, declaredNumArgs, MethodLiteral::NumArgsBits::START_BIT);
1715 __ And(declaredNumArgs.W(), declaredNumArgs.W(), LogicalImmediate::Create(
1716 MethodLiteral::NumArgsBits::Mask() >> MethodLiteral::NumArgsBits::START_BIT, RegWSize));
1717 }
1718
1719 void AsmInterpreterCall::PushAsmInterpEntryFrame(ExtendedAssembler *assembler)
1720 {
1721 Register glue = __ GlueRegister();
1722 Register fp(X29);
1723 Register sp(SP);
1724
1725 size_t begin = __ GetCurrentPosition();
1726 if (!assembler->FromInterpreterHandler()) {
1727 __ CalleeSave();
1728 }
1729
1730 [[maybe_unused]] TempRegister1Scope scope1(assembler);
1731 Register prevFrameRegister = __ TempRegister1();
1732 [[maybe_unused]] TempRegister2Scope scope2(assembler);
1733 Register frameTypeRegister = __ TempRegister2();
1734
1735 __ PushFpAndLr();
1736
1737 // prev managed fp is leave frame or nullptr(the first frame)
1738 __ Ldr(prevFrameRegister, MemoryOperand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)));
1739 __ Mov(frameTypeRegister, Immediate(static_cast<int64_t>(FrameType::ASM_INTERPRETER_ENTRY_FRAME)));
1740 // 2 : prevSp & frame type
1741 __ Stp(prevFrameRegister, frameTypeRegister, MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1742 // 2 : pc & glue
1743 __ Stp(glue, Register(Zero), MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // pc
1744 if (!assembler->FromInterpreterHandler()) {
1745 size_t end = __ GetCurrentPosition();
1746 if ((end - begin) != FrameCompletionPos::ARM64CppToAsmInterp) {
1747 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::ARM64CppToAsmInterp
1748 << "This frame has been modified, and the offset CppToAsmInterp should be updated too.";
1749 }
1750 }
1751 __ Add(fp, sp, Immediate(4 * FRAME_SLOT_SIZE)); // 4: 32 means skip frame type, prevSp, pc and glue
1752 }
1753
1754 void AsmInterpreterCall::PopAsmInterpEntryFrame(ExtendedAssembler *assembler)
1755 {
1756 Register sp(SP);
1757
1758 [[maybe_unused]] TempRegister1Scope scope1(assembler);
1759 Register prevFrameRegister = __ TempRegister1();
1760 [[maybe_unused]] TempRegister2Scope scope2(assembler);
1761 Register glue = __ TempRegister2();
1762 // 2: glue & pc
1763 __ Ldp(glue, Register(Zero), MemoryOperand(sp, 2 * FRAME_SLOT_SIZE, AddrMode::POSTINDEX));
1764 // 2: skip frame type & prev
1765 __ Ldp(prevFrameRegister, Register(Zero), MemoryOperand(sp, 2 * FRAME_SLOT_SIZE, AddrMode::POSTINDEX));
1766 __ Str(prevFrameRegister, MemoryOperand(glue, JSThread::GlueData::GetLeaveFrameOffset(false)));
1767 size_t begin = __ GetCurrentPosition();
1768 __ RestoreFpAndLr();
1769 if (!assembler->FromInterpreterHandler()) {
1770 __ CalleeRestore();
1771 size_t end = __ GetCurrentPosition();
1772 if ((end - begin) != FrameCompletionPos::ARM64AsmInterpToCpp) {
1773 LOG_COMPILER(FATAL) << (end - begin) << " != " << FrameCompletionPos::ARM64AsmInterpToCpp
1774 << "This frame has been modified, and the offset AsmInterpToCpp should be updated too.";
1775 }
1776 }
1777 }
1778
1779 void AsmInterpreterCall::PushGeneratorFrameState(ExtendedAssembler *assembler, Register &prevSpRegister,
1780 Register &fpRegister, Register ¤tSlotRegister, Register &callTargetRegister, Register &thisRegister,
1781 Register &methodRegister, Register &contextRegister, Register &pcRegister, Register &operatorRegister)
1782 {
1783 __ Mov(operatorRegister, Immediate(static_cast<int64_t>(FrameType::ASM_INTERPRETER_FRAME)));
1784 __ Stp(prevSpRegister, operatorRegister,
1785 MemoryOperand(currentSlotRegister, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // 2 : frameType and prevSp
1786 __ Ldr(pcRegister, MemoryOperand(methodRegister, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET));
1787 // offset need 8 align, GENERATOR_NREGS_OFFSET instead of GENERATOR_BC_OFFSET_OFFSET
1788 __ Ldr(operatorRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_NREGS_OFFSET));
1789 // 32: get high 32bit
1790 __ Lsr(operatorRegister, operatorRegister, 32);
1791 __ Add(pcRegister, operatorRegister, pcRegister);
1792 // 2 : pc and fp
1793 __ Stp(fpRegister, pcRegister, MemoryOperand(currentSlotRegister, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1794 // jumpSizeAfterCall
1795 __ Str(Register(Zero), MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1796 __ Ldr(operatorRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_LEXICALENV_OFFSET));
1797 // env
1798 __ Str(operatorRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1799 __ Ldr(operatorRegister, MemoryOperand(contextRegister, GeneratorContext::GENERATOR_ACC_OFFSET));
1800 // acc
1801 __ Str(operatorRegister, MemoryOperand(currentSlotRegister, -FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1802 __ Stp(callTargetRegister, thisRegister,
1803 MemoryOperand(currentSlotRegister, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX)); // 2 : acc and callTarget
1804 }
1805
1806 void AsmInterpreterCall::CallBCStub(ExtendedAssembler *assembler, Register &newSp, Register &glue,
1807 Register &callTarget, Register &method, Register &pc, Register &temp)
1808 {
1809 // prepare call entry
1810 __ Mov(Register(X19), glue); // X19 - glue
1811 __ Mov(Register(FP), newSp); // FP - sp
1812 __ Mov(Register(X20), pc); // X20 - pc
1813 __ Ldr(Register(X21), MemoryOperand(method, Method::CONSTANT_POOL_OFFSET)); // X21 - constantpool
1814 __ Ldr(Register(X22), MemoryOperand(callTarget, JSFunction::RAW_PROFILE_TYPE_INFO_OFFSET));
1815 __ Ldr(Register(X22), MemoryOperand(Register(X22), ProfileTypeInfoCell::VALUE_OFFSET)); // X22 - profileTypeInfo
1816 __ Mov(Register(X23), Immediate(JSTaggedValue::Hole().GetRawData())); // X23 - acc
1817 __ Ldr(Register(X24), MemoryOperand(method, Method::LITERAL_INFO_OFFSET)); // X24 - hotnessCounter
1818
1819 // call the first bytecode handler
1820 __ Ldrb(temp.W(), MemoryOperand(pc, 0));
1821 // 3 : 3 means *8
1822 __ Add(temp, glue, Operand(temp.W(), UXTW, FRAME_SLOT_SIZE_LOG2));
1823 __ Ldr(temp, MemoryOperand(temp, JSThread::GlueData::GetBCStubEntriesOffset(false)));
1824 __ Br(temp);
1825 }
1826
1827 void AsmInterpreterCall::CallNativeEntry(ExtendedAssembler *assembler)
1828 {
1829 Label callFastBuiltin;
1830 Label callNativeBuiltin;
1831 Register glue(X0);
1832 Register argv(X5);
1833 Register method(X2);
1834 Register function(X1);
1835 Register nativeCode(X7);
1836 Register temp(X9);
1837 Register callFieldRegister(X3);
1838 // get native pointer
1839 __ Ldr(nativeCode, MemoryOperand(method, Method::NATIVE_POINTER_OR_BYTECODE_ARRAY_OFFSET));
1840 __ Tbnz(callFieldRegister, MethodLiteral::IsFastBuiltinBit::START_BIT, &callFastBuiltin);
1841
1842 __ Bind(&callNativeBuiltin);
1843 Register sp(SP);
1844 // 2: function & align
1845 __ Stp(function, Register(Zero), MemoryOperand(sp, -2 * FRAME_SLOT_SIZE, AddrMode::PREINDEX));
1846 // 2: skip argc & thread
1847 __ Sub(sp, sp, Immediate(2 * FRAME_SLOT_SIZE));
1848 PushBuiltinFrame(assembler, glue, FrameType::BUILTIN_ENTRY_FRAME, temp, argv);
1849 __ Mov(temp, argv);
1850 __ Sub(Register(X0), temp, Immediate(2 * FRAME_SLOT_SIZE)); // 2: skip argc & thread
1851 CallNativeInternal(assembler, nativeCode);
1852
1853 // 4: skip function
1854 __ Add(sp, sp, Immediate(4 * FRAME_SLOT_SIZE));
1855 __ Ret();
1856
1857 __ Bind(&callFastBuiltin);
1858 CallFastBuiltin(assembler, &callNativeBuiltin);
1859 }
1860
1861 void AsmInterpreterCall::CallFastBuiltin(ExtendedAssembler *assembler, Label *callNativeBuiltin)
1862 {
1863 Label lCall1;
1864 Label lCall2;
1865 Label lCall3;
1866 Label callEntry;
1867 Register sp(SP);
1868 Register glue(X0);
1869 Register function(X1);
1870 Register method(X2);
1871 Register argc(X4);
1872 Register argv(X5);
1873 Register nativeCode(X7);
1874
1875 Register builtinId = __ AvailableRegister1();
1876 Register temp = __ AvailableRegister2();
1877 // get builtinid
1878 __ Ldr(builtinId, MemoryOperand(method, Method::EXTRA_LITERAL_INFO_OFFSET)); // get extra literal
1879 __ And(builtinId.W(), builtinId.W(), LogicalImmediate::Create(0xff, RegWSize));
1880 __ Cmp(builtinId.W(), Immediate(kungfu::BuiltinsStubCSigns::BUILTINS_CONSTRUCTOR_STUB_FIRST));
1881 __ B(Condition::GE, callNativeBuiltin);
1882
1883 __ Cmp(argc, Immediate(3)); // 3: number of args
1884 __ B(Condition::HI, callNativeBuiltin);
1885
1886 // get builtin func addr
1887 __ Add(builtinId, glue, Operand(builtinId.W(), UXTW, FRAME_SLOT_SIZE_LOG2));
1888 __ Ldr(builtinId, MemoryOperand(builtinId, JSThread::GlueData::GetBuiltinsStubEntriesOffset(false)));
1889 // create frame
1890 PushAsmBridgeFrame(assembler);
1891 __ Mov(temp, function);
1892 __ Mov(X1, nativeCode);
1893 __ Mov(X2, temp);
1894 __ Mov(temp, argv);
1895 __ Mov(X5, argc);
1896 __ Ldr(X3, MemoryOperand(temp, FRAME_SLOT_SIZE));
1897 __ Ldr(X4, MemoryOperand(temp, DOUBLE_SLOT_SIZE));
1898
1899 __ Cmp(Register(X5), Immediate(0));
1900 __ B(Condition::NE, &lCall1);
1901 __ Mov(Register(X6), Immediate(JSTaggedValue::VALUE_UNDEFINED));
1902 __ Mov(Register(X7), Immediate(JSTaggedValue::VALUE_UNDEFINED));
1903 __ Stp(Register(X7), Register(X7), MemoryOperand(sp, -DOUBLE_SLOT_SIZE, PREINDEX));
1904 __ B(&callEntry);
1905
1906 __ Bind(&lCall1);
1907 {
1908 __ Cmp(Register(X5), Immediate(1));
1909 __ B(Condition::NE, &lCall2);
1910 __ Ldr(Register(X6), MemoryOperand(temp, TRIPLE_SLOT_SIZE));
1911 __ Mov(Register(X7), Immediate(JSTaggedValue::VALUE_UNDEFINED)); // reset x7
1912 __ Stp(Register(X7), Register(X7), MemoryOperand(sp, -DOUBLE_SLOT_SIZE, PREINDEX));
1913 __ B(&callEntry);
1914 }
1915
1916 __ Bind(&lCall2);
1917 {
1918 __ Cmp(Register(X5), Immediate(2)); // 2: number of args
1919 __ B(Condition::NE, &lCall3);
1920 __ Mov(Register(X7), Immediate(JSTaggedValue::VALUE_UNDEFINED));
1921 __ Stp(Register(X7), Register(X7), MemoryOperand(sp, -DOUBLE_SLOT_SIZE, PREINDEX));
1922 __ Ldp(Register(X6), Register(X7), MemoryOperand(temp, TRIPLE_SLOT_SIZE));
1923 __ B(&callEntry);
1924 }
1925
1926 __ Bind(&lCall3);
1927 {
1928 __ Ldr(Register(X7), MemoryOperand(temp, QUINTUPLE_SLOT_SIZE));
1929 __ Stp(Register(X7), Register(X7), MemoryOperand(sp, -DOUBLE_SLOT_SIZE, PREINDEX));
1930 __ Ldp(Register(X6), Register(X7), MemoryOperand(temp, TRIPLE_SLOT_SIZE)); // get arg0 arg1
1931 __ B(&callEntry);
1932 }
1933
1934 __ Bind(&callEntry);
1935 {
1936 __ Blr(builtinId);
1937 __ Add(sp, sp, Immediate(DOUBLE_SLOT_SIZE));
1938 }
1939 PopAsmBridgeFrame(assembler);
1940 __ Ret();
1941 }
1942
1943 void AsmInterpreterCall::ThrowStackOverflowExceptionAndReturn(ExtendedAssembler *assembler, Register glue,
1944 Register fp, Register op)
1945 {
1946 if (fp != Register(SP)) {
1947 __ Mov(Register(SP), fp);
1948 }
1949 __ Mov(op, Immediate(kungfu::RuntimeStubCSigns::ID_ThrowStackOverflowException));
1950 // 3 : 3 means *8
1951 __ Add(op, glue, Operand(op, LSL, 3));
1952 __ Ldr(op, MemoryOperand(op, JSThread::GlueData::GetRTStubEntriesOffset(false)));
1953 if (glue.GetId() != X0) {
1954 __ Mov(Register(X0), glue);
1955 }
1956 __ Blr(op);
1957 __ RestoreFpAndLr();
1958 __ Ret();
1959 }
1960 #undef __
1961 } // panda::ecmascript::aarch64