1 /* 2 * Copyright (c) 2021 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 #ifndef ECMASCRIPT_ECMA_MACROS_H 17 #define ECMASCRIPT_ECMA_MACROS_H 18 19 #include "ecmascript/common.h" 20 #include "ecmascript/log_wrapper.h" 21 22 #if defined(ENABLE_BYTRACE) 23 #include "hitrace_meter.h" 24 #endif 25 26 #if defined(__cplusplus) 27 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 28 29 #define OPTIONAL_LOG(vm, level) LOG_ECMA_IF(vm->IsOptionalLogEnabled(), level) 30 #define OPTIONAL_LOG_COMPILER(level) LOG_ECMA_IF(IsLogEnabled(), level) 31 32 #if !defined(ENABLE_BYTRACE) 33 #define ECMA_BYTRACE_NAME(tag, name) 34 #define ECMA_BYTRACE_START_TRACE(tag, msg) 35 #define ECMA_BYTRACE_FINISH_TRACE(tag) 36 #else 37 #define ECMA_BYTRACE_NAME(tag, name) HITRACE_METER_NAME(tag, name) 38 #define ECMA_BYTRACE_START_TRACE(tag, msg) StartTrace(tag, msg) 39 #define ECMA_BYTRACE_FINISH_TRACE(tag) FinishTrace(tag) 40 #endif 41 42 #if defined(ENABLE_HITRACE) 43 #define ENQUEUE_JOB_HITRACE(pendingJob, queueType) job::EnqueueJobScope hitraceScope(pendingJob, queueType) 44 #define EXECUTE_JOB_HITRACE(pendingJob) job::ExecuteJobScope hitraceScope(pendingJob) 45 #define ENQUEUE_JOB_TRACE(thread, pendingJob) job::EnqueueJobTrace enqueueJobTrace(thread, pendingJob) 46 #define EXECUTE_JOB_TRACE(thread, pendingJob) job::ExecuteJobTrace executeJobTrace(thread, pendingJob) 47 #else 48 #define ENQUEUE_JOB_HITRACE(pendingJob, queueType) 49 #define EXECUTE_JOB_HITRACE(pendingJob) 50 #define ENQUEUE_JOB_TRACE(thread, pendingJob) 51 #define EXECUTE_JOB_TRACE(thread, pendingJob) 52 #endif 53 54 /* Note: We can't statically decide the element type is a primitive or heap object, especially for */ 55 /* dynamically-typed languages like JavaScript. So we simply skip the read-barrier. */ 56 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 57 #define GET_VALUE(addr, offset) Barriers::GetValue<JSTaggedType>((addr), (offset)) 58 59 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 60 #define SET_VALUE_WITH_BARRIER(thread, addr, offset, value) \ 61 if ((value).IsHeapObject()) { \ 62 Barriers::SetObject<true>(thread, addr, offset, (value).GetRawData()); \ 63 } else { \ 64 Barriers::SetPrimitive<JSTaggedType>(addr, offset, (value).GetRawData()); \ 65 } 66 67 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 68 #define SET_VALUE_PRIMITIVE(addr, offset, value) \ 69 Barriers::SetPrimitive<JSTaggedType>(this, offset, (value).GetRawData()) 70 71 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 72 #define ACCESSORS(name, offset, endOffset) \ 73 static constexpr size_t endOffset = (offset) + JSTaggedValue::TaggedTypeSize(); \ 74 JSTaggedValue Get##name() const \ 75 { \ 76 /* Note: We can't statically decide the element type is a primitive or heap object, especially for */ \ 77 /* dynamically-typed languages like JavaScript. So we simply skip the read-barrier. */ \ 78 return JSTaggedValue(Barriers::GetValue<JSTaggedType>(this, offset)); \ 79 } \ 80 template<typename T> \ 81 void Set##name(const JSThread *thread, JSHandle<T> value, BarrierMode mode = WRITE_BARRIER) \ 82 { \ 83 if (mode == WRITE_BARRIER) { \ 84 if (value.GetTaggedValue().IsHeapObject()) { \ 85 Barriers::SetObject<true>(thread, this, offset, value.GetTaggedValue().GetRawData()); \ 86 } else { \ 87 Barriers::SetPrimitive<JSTaggedType>(this, offset, value.GetTaggedValue().GetRawData()); \ 88 } \ 89 } else { \ 90 Barriers::SetPrimitive<JSTaggedType>(this, offset, value.GetTaggedValue().GetRawData()); \ 91 } \ 92 } \ 93 void Set##name(const JSThread *thread, JSTaggedValue value, BarrierMode mode = WRITE_BARRIER) \ 94 { \ 95 if (mode == WRITE_BARRIER) { \ 96 if (value.IsHeapObject()) { \ 97 Barriers::SetObject<true>(thread, this, offset, value.GetRawData()); \ 98 } else { \ 99 Barriers::SetPrimitive<JSTaggedType>(this, offset, value.GetRawData()); \ 100 } \ 101 } else { \ 102 Barriers::SetPrimitive<JSTaggedType>(this, offset, value.GetRawData()); \ 103 } \ 104 } 105 106 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 107 #define ACCESSORS_SYNCHRONIZED_PRIMITIVE_FIELD(name, type, offset, endOffset) \ 108 static constexpr size_t endOffset = (offset) + sizeof(type); \ 109 inline type Get##name() const \ 110 { \ 111 return reinterpret_cast<volatile std::atomic<type> *>(ToUintPtr(this) + offset) \ 112 ->load(std::memory_order_acquire); \ 113 } \ 114 inline void Set##name(type value) \ 115 { \ 116 reinterpret_cast<volatile std::atomic<type> *>(ToUintPtr(this) + offset) \ 117 ->store(value, std::memory_order_release); \ 118 } 119 120 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 121 #define ACCESSORS_SYNCHRONIZED(name, offset, endOffset) \ 122 static constexpr size_t endOffset = (offset) + JSTaggedValue::TaggedTypeSize(); \ 123 JSTaggedValue Get##name() const \ 124 { \ 125 /* Note: We can't statically decide the element type is a primitive or heap object, especially for */ \ 126 /* dynamically-typed languages like JavaScript. So we simply skip the read-barrier. */ \ 127 /* Synchronized means it will restrain the store and load in atomic. */ \ 128 return JSTaggedValue(reinterpret_cast<volatile std::atomic<JSTaggedType> *>(ToUintPtr(this) + offset) \ 129 ->load(std::memory_order_acquire)); \ 130 } \ 131 template<typename T> \ 132 void Set##name(const JSThread *thread, JSHandle<T> value) \ 133 { \ 134 bool isPrimitive = !value.GetTaggedValue().IsHeapObject(); \ 135 Barriers::SynchronizedSetObject(thread, this, offset, value.GetTaggedValue().GetRawData(), isPrimitive); \ 136 } \ 137 void Set##name(const JSThread *thread, JSTaggedValue value) \ 138 { \ 139 bool isPrimitive = !value.IsHeapObject(); \ 140 Barriers::SynchronizedSetObject(thread, this, offset, value.GetRawData(), isPrimitive); \ 141 } 142 143 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 144 #define DEFINE_ALIGN_SIZE(offset) \ 145 static constexpr size_t SIZE = ((offset) + sizeof(JSTaggedType) - 1U) & (~(sizeof(JSTaggedType) - 1U)) 146 147 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 148 #define ACCESSORS_FIXED_SIZE_FIELD(name, type, sizeType, offset, endOffset) \ 149 static_assert(sizeof(type) <= sizeof(sizeType)); \ 150 static constexpr size_t endOffset = (offset) + sizeof(sizeType); \ 151 inline void Set##name(type value) \ 152 { \ 153 Barriers::SetPrimitive<type>(this, offset, value); \ 154 } \ 155 inline type Get##name() const \ 156 { \ 157 return Barriers::GetValue<type>(this, offset); \ 158 } 159 160 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 161 #define ACCESSORS_NATIVE_FIELD(name, type, offset, endOffset) \ 162 ACCESSORS_FIXED_SIZE_FIELD(name, type *, type *, offset, endOffset) 163 164 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 165 #define ACCESSORS_PRIMITIVE_FIELD(name, type, offset, endOffset) \ 166 ACCESSORS_FIXED_SIZE_FIELD(name, type, type, offset, endOffset) 167 168 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 169 #define ACCESSORS_BIT_FIELD(name, offset, endOffset) \ 170 ACCESSORS_FIXED_SIZE_FIELD(name, uint32_t, uint32_t, offset, endOffset) \ 171 inline void Clear##name() \ 172 { \ 173 Set##name(0UL); \ 174 } 175 176 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 177 #define SET_GET_BIT_FIELD(bitFieldName, name, type) \ 178 inline type Get##name() const \ 179 { \ 180 return name##Bits::Decode(Get##bitFieldName()); \ 181 } \ 182 inline void Set##name(type t) \ 183 { \ 184 Set##bitFieldName(name##Bits::Update(Get##bitFieldName(), t)); \ 185 } 186 187 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 188 #define ACCESSORS_PRIMITIVE_FIELD_ATOMIC(name, type, offset) \ 189 inline void AtomicSet##name(type value) \ 190 { \ 191 volatile auto *atomicField = reinterpret_cast<volatile std::atomic<type> *>(ToUintPtr(this) + (offset)); \ 192 atomicField->store(value, std::memory_order_release); \ 193 } \ 194 inline type AtomicGet##name() const \ 195 { \ 196 volatile auto *atomicField = reinterpret_cast<volatile std::atomic<type> *>(ToUintPtr(this) + (offset)); \ 197 return atomicField->load(std::memory_order_acquire); \ 198 } 199 200 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 201 #define ACCESSORS_PRIMITIVE_FIELD_HAS_ATOMIC_INTERFACE(name, type, offset, endOffset) \ 202 ACCESSORS_PRIMITIVE_FIELD(name, type, offset, endOffset) \ 203 ACCESSORS_PRIMITIVE_FIELD_ATOMIC(name, type, offset) 204 205 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 206 #define FIRST_BIT_FIELD(bitFieldName, name, type, bits) \ 207 using name##Bits = BitField<type, 0, bits>; \ 208 SET_GET_BIT_FIELD(bitFieldName, name, type) 209 210 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 211 #define NEXT_BIT_FIELD(bitFieldName, name, type, bits, lastName) \ 212 using name##Bits = lastName##Bits::NextField<type, bits>; \ 213 SET_GET_BIT_FIELD(bitFieldName, name, type) 214 215 #if !defined(NDEBUG) 216 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 217 #define DASSERT(cond) assert(cond) 218 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 219 #define DASSERT_PRINT(cond, message) \ 220 if (auto cond_val = (cond); UNLIKELY(!(cond_val))) { \ 221 std::cerr << (message) << std::endl; \ 222 ASSERT(#cond &&cond_val); \ 223 } 224 #else // NDEBUG 225 #define DASSERT(cond) static_cast<void>(0) // NOLINT(cppcoreguidelines-macro-usage) 226 #define DASSERT_PRINT(cond, message) static_cast<void>(0) // NOLINT(cppcoreguidelines-macro-usage) 227 #endif // !NDEBUG 228 229 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 230 #define RASSERT(cond) assert(cond) 231 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 232 #define RASSERT_PRINT(cond, message) \ 233 if (auto cond_val = cond; UNLIKELY(!(cond_val))) { \ 234 std::cerr << message << std::endl; \ 235 RASSERT(#cond &&cond_val); \ 236 } 237 238 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 239 #define RETURN_IF_ABRUPT_COMPLETION(thread) \ 240 do { \ 241 if ((thread)->HasPendingException()) { \ 242 return; \ 243 } \ 244 } while (false) 245 246 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 247 #define RETURN_VALUE_IF_ABRUPT_COMPLETION(thread, value) \ 248 do { \ 249 if ((thread)->HasPendingException()) { \ 250 return (value); \ 251 } \ 252 } while (false) 253 254 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 255 #define RETURN_VALUE_IF_ABRUPT_COMPLETION_WITH_DATA_DELETE(thread, value, flagsStr) \ 256 do { \ 257 if ((thread)->HasPendingException()) { \ 258 delete[] flagsStr; \ 259 return (value); \ 260 } \ 261 } while (false) 262 263 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 264 #define RETURN_EXCEPTION_IF_ABRUPT_COMPLETION(thread) \ 265 do { \ 266 if ((thread)->HasPendingException()) { \ 267 return JSTaggedValue::Exception(); \ 268 } \ 269 } while (false) 270 271 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 272 #define RETURN_EXCEPTION_AND_POP_JOINSTACK(thread, value) \ 273 do { \ 274 if ((thread)->HasPendingException()) { \ 275 auto ecmaContext = thread->GetCurrentEcmaContext(); \ 276 ecmaContext->JoinStackPopFastPath(value); \ 277 return JSTaggedValue::Exception(); \ 278 } \ 279 } while (false) 280 281 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 282 #define RETURN_HANDLE_IF_ABRUPT_COMPLETION(type, thread) \ 283 do { \ 284 if ((thread)->HasPendingException()) { \ 285 return JSHandle<type>(thread, JSTaggedValue::Exception()); \ 286 } \ 287 } while (false) 288 289 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 290 #define ASSERT_NO_ABRUPT_COMPLETION(thread) ASSERT(!(thread)->HasPendingException()); 291 292 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 293 #define SET_DATE_VALUE(name, code, isLocal) \ 294 static JSTaggedValue name(EcmaRuntimeCallInfo *argv) \ 295 { \ 296 ASSERT(argv); \ 297 JSThread *thread = argv->GetThread(); \ 298 [[maybe_unused]] EcmaHandleScope handleScope(thread); \ 299 JSHandle<JSTaggedValue> msg = GetThis(argv); \ 300 if (!msg->IsDate()) { \ 301 THROW_TYPE_ERROR_AND_RETURN(thread, "Not a Date Object", JSTaggedValue::Exception()); \ 302 } \ 303 JSHandle<JSDate> jsDate(msg); \ 304 JSTaggedValue result = jsDate->SetDateValue(argv, code, isLocal); \ 305 RETURN_EXCEPTION_IF_ABRUPT_COMPLETION(thread); \ 306 jsDate->SetTimeValue(thread, result); \ 307 return result; \ 308 } 309 310 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 311 #define DATE_TO_STRING(name) \ 312 static JSTaggedValue name(EcmaRuntimeCallInfo *argv) \ 313 { \ 314 ASSERT(argv); \ 315 JSThread *thread = argv->GetThread(); \ 316 [[maybe_unused]] EcmaHandleScope handleScope(thread); \ 317 JSHandle<JSTaggedValue> msg = GetThis(argv); \ 318 if (!msg->IsDate()) { \ 319 THROW_TYPE_ERROR_AND_RETURN(thread, "Not a Date Object", JSTaggedValue::Exception()); \ 320 } \ 321 if (std::isnan(JSDate::Cast(msg->GetTaggedObject())->GetTimeValue().GetDouble())) { \ 322 THROW_RANGE_ERROR_AND_RETURN(thread, "range error", JSTaggedValue::Exception()); \ 323 } \ 324 return JSDate::Cast(msg->GetTaggedObject())->name(thread); \ 325 } 326 327 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 328 #define DATE_STRING(name) \ 329 static JSTaggedValue name(EcmaRuntimeCallInfo *argv) \ 330 { \ 331 ASSERT(argv); \ 332 JSThread *thread = argv->GetThread(); \ 333 [[maybe_unused]] EcmaHandleScope handleScope(thread); \ 334 JSHandle<JSTaggedValue> msg = GetThis(argv); \ 335 if (!msg->IsDate()) { \ 336 THROW_TYPE_ERROR_AND_RETURN(thread, "Not a Date Object", JSTaggedValue::Exception()); \ 337 } \ 338 if (std::isnan(JSDate::Cast(msg->GetTaggedObject())->GetTimeValue().GetDouble())) { \ 339 return thread->GetEcmaVM()->GetFactory()->NewFromASCII("Invalid Date").GetTaggedValue(); \ 340 } \ 341 return JSDate::Cast(msg->GetTaggedObject())->name(thread); \ 342 } 343 344 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 345 #define GET_DATE_VALUE(name, code, isLocal) \ 346 static JSTaggedValue name(EcmaRuntimeCallInfo *argv) \ 347 { \ 348 ASSERT(argv); \ 349 JSThread *thread = argv->GetThread(); \ 350 [[maybe_unused]] EcmaHandleScope handleScope(thread); \ 351 JSHandle<JSTaggedValue> msg = GetThis(argv); \ 352 if (!msg->IsDate()) { \ 353 THROW_TYPE_ERROR_AND_RETURN(thread, "Not a Date Object", JSTaggedValue::Exception()); \ 354 } \ 355 JSHandle<JSDate> jsDate(msg); \ 356 double result = jsDate->GetDateValue(jsDate->GetTimeValue().GetDouble(), code, isLocal); \ 357 return GetTaggedDouble(result); \ 358 } 359 360 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 361 #define THROW_NEW_ERROR_AND_RETURN(thread, error) \ 362 do { \ 363 if (!(thread)->HasPendingException()) { \ 364 (thread)->SetException(error); \ 365 } \ 366 return; \ 367 } while (false) 368 369 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 370 #define THROW_ERROR(thread, type, message) \ 371 do { \ 372 if ((thread)->HasPendingException()) { \ 373 return; \ 374 } \ 375 ObjectFactory *_factory = (thread)->GetEcmaVM()->GetFactory(); \ 376 JSHandle<JSObject> _error = _factory->GetJSError(type, message, StackCheck::NO); \ 377 (thread)->SetException(_error.GetTaggedValue()); \ 378 return; \ 379 } while (false) 380 381 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 382 #define THROW_UNCATCHABLE_ERROR(thread, type, message) \ 383 do { \ 384 EcmaVM *_ecmaVm = (thread)->GetEcmaVM(); \ 385 ObjectFactory *_factory = _ecmaVm->GetFactory(); \ 386 JSHandle<JSObject> _error = _factory->GetJSError(type, message, StackCheck::NO); \ 387 (thread)->SetException(_error.GetTaggedValue()); \ 388 _ecmaVm->HandleUncatchableError(); \ 389 } while (false) 390 391 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 392 #define THROW_NEW_ERROR_AND_RETURN_VALUE(thread, error, value) \ 393 do { \ 394 if (!(thread)->HasPendingException()) { \ 395 (thread)->SetException(error); \ 396 } \ 397 return (value); \ 398 } while (false) 399 400 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 401 #define THROW_NEW_ERROR_AND_RETURN_HANDLE(thread, errorType, type, message) \ 402 do { \ 403 if ((thread)->HasPendingException()) { \ 404 return JSHandle<type>(thread, JSTaggedValue::Exception()); \ 405 } \ 406 ObjectFactory *_factory = (thread)->GetEcmaVM()->GetFactory(); \ 407 JSHandle<JSObject> _error = _factory->GetJSError(errorType, message, StackCheck::NO); \ 408 (thread)->SetException(_error.GetTaggedValue()); \ 409 return JSHandle<type>(thread, JSTaggedValue::Exception()); \ 410 } while (false) 411 412 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 413 #define THROW_NEW_ERROR_WITH_MSG_AND_RETURN_VALUE(thread, errorType, message, value) \ 414 do { \ 415 if ((thread)->HasPendingException()) { \ 416 return (value); \ 417 } \ 418 ObjectFactory *_factory = (thread)->GetEcmaVM()->GetFactory(); \ 419 JSHandle<JSObject> _error = _factory->GetJSError(errorType, message, ecmascript::StackCheck::NO); \ 420 (thread)->SetException(_error.GetTaggedValue()); \ 421 return (value); \ 422 } while (false) 423 424 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 425 #define THROW_MODULE_NOT_FOUND_ERROR_WITH_RETURN_VALUE(thread, requestStr, currentRecord, value) \ 426 do { \ 427 if ((thread)->HasPendingException()) { \ 428 return (value); \ 429 } \ 430 ObjectFactory *_factory = (thread)->GetEcmaVM()->GetFactory(); \ 431 CString normalizeStr = ModulePathHelper::ReformatPath(currentRecord); \ 432 CString msg = "Cannot find module '" + requestStr + "' imported from '" + normalizeStr + "'."; \ 433 JSHandle<JSObject> _error = _factory->GetJSError(ErrorType::REFERENCE_ERROR, \ 434 msg.c_str(), StackCheck::NO); \ 435 (thread)->SetException(_error.GetTaggedValue()); \ 436 return (value); \ 437 } while (false) 438 439 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 440 #define THROW_TYPE_ERROR_AND_RETURN(thread, message, value) \ 441 THROW_NEW_ERROR_WITH_MSG_AND_RETURN_VALUE(thread, ErrorType::TYPE_ERROR, message, value) 442 443 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 444 #define THROW_RANGE_ERROR_AND_RETURN(thread, message, value) \ 445 THROW_NEW_ERROR_WITH_MSG_AND_RETURN_VALUE(thread, ErrorType::RANGE_ERROR, message, value) 446 447 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 448 #define THROW_URI_ERROR_AND_RETURN(thread, message, value) \ 449 THROW_NEW_ERROR_WITH_MSG_AND_RETURN_VALUE(thread, ErrorType::URI_ERROR, message, value) 450 451 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 452 #define THROW_SYNTAX_ERROR_AND_RETURN(thread, message, value) \ 453 THROW_NEW_ERROR_WITH_MSG_AND_RETURN_VALUE(thread, ErrorType::SYNTAX_ERROR, message, value) 454 455 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 456 #define THROW_REFERENCE_ERROR_AND_RETURN(thread, message, value) \ 457 THROW_NEW_ERROR_WITH_MSG_AND_RETURN_VALUE(thread, ErrorType::REFERENCE_ERROR, message, value) 458 459 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 460 #define THROW_TYPE_ERROR(thread, message) \ 461 THROW_ERROR(thread, ErrorType::TYPE_ERROR, message) 462 463 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 464 #define THROW_OOM_ERROR(thread, message) \ 465 THROW_UNCATCHABLE_ERROR(thread, ErrorType::OOM_ERROR, message) 466 467 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 468 #define THROW_TERMINATION_ERROR(thread, message) \ 469 THROW_ERROR(thread, ErrorType::TERMINATION_ERROR, message) 470 471 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 472 #define RETURN_STACK_BEFORE_THROW_IF_ASM(thread) \ 473 do { \ 474 if ((thread)->IsAsmInterpreter()) { \ 475 FrameIterator it(const_cast<JSTaggedType *>((thread)->GetCurrentSPFrame()), (thread)); \ 476 it.Advance(); \ 477 (thread)->SetCurrentSPFrame(it.GetSp()); \ 478 } \ 479 } while (false) 480 481 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 482 #define RETURN_REJECT_PROMISE_IF_ABRUPT(thread, value, capability) \ 483 do { \ 484 const GlobalEnvConstants *globalConst = (thread)->GlobalConstants(); \ 485 if ((value).GetTaggedValue().IsCompletionRecord()) { \ 486 JSHandle<CompletionRecord> record = JSHandle<CompletionRecord>::Cast(value); \ 487 if (record->IsThrow()) { \ 488 JSHandle<JSTaggedValue> reject(thread, (capability)->GetReject()); \ 489 JSHandle<JSTaggedValue> undefine = globalConst->GetHandledUndefined(); \ 490 EcmaRuntimeCallInfo *info = \ 491 EcmaInterpreter::NewRuntimeCallInfo(thread, reject, undefine, undefine, 1); \ 492 RETURN_VALUE_IF_ABRUPT_COMPLETION(thread, JSTaggedValue::Exception()); \ 493 info->SetCallArg(record->GetValue()); \ 494 JSTaggedValue res = JSFunction::Call(info); \ 495 RETURN_VALUE_IF_ABRUPT_COMPLETION(thread, res); \ 496 return (capability)->GetPromise(); \ 497 } \ 498 } \ 499 if ((thread)->HasPendingException()) { \ 500 (thread)->ClearException(); \ 501 JSHandle<JSTaggedValue> reject(thread, (capability)->GetReject()); \ 502 JSHandle<JSTaggedValue> undefined = globalConst->GetHandledUndefined(); \ 503 EcmaRuntimeCallInfo *info = \ 504 EcmaInterpreter::NewRuntimeCallInfo(thread, reject, undefined, undefined, 1); \ 505 RETURN_VALUE_IF_ABRUPT_COMPLETION(thread, JSTaggedValue::Exception()); \ 506 info->SetCallArg(value.GetTaggedValue()); \ 507 JSTaggedValue res = JSFunction::Call(info); \ 508 RETURN_VALUE_IF_ABRUPT_COMPLETION(thread, res); \ 509 return (capability)->GetPromise(); \ 510 } \ 511 } while (false) 512 513 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 514 #define RETURN_COMPLETION_IF_ABRUPT(thread, value) \ 515 do { \ 516 if ((thread)->HasPendingException()) { \ 517 JSHandle<CompletionRecord> completionRecord = \ 518 factory->NewCompletionRecord(CompletionRecordType::THROW, value); \ 519 return (completionRecord); \ 520 } \ 521 } while (false) 522 523 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 524 #define RETURN_COMPLETION_VALUE_IF_ABRUPT(thread, value) \ 525 do { \ 526 if ((thread)->HasPendingException()) { \ 527 JSHandle<CompletionRecord> completionRecord = \ 528 factory->NewCompletionRecord(CompletionRecordType::THROW, value); \ 529 return (completionRecord).GetTaggedValue(); \ 530 } \ 531 } while (false) 532 533 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 534 #define DECL_DUMP() \ 535 void Dump(std::ostream &os) const DUMP_API_ATTR; \ 536 void Dump() const DUMP_API_ATTR \ 537 { \ 538 Dump(std::cout); \ 539 } \ 540 void DumpForSnapshot(std::vector<Reference> &vec) const; 541 542 #endif // defined(__cplusplus) 543 544 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 545 #define DECL_CAST(TYPE) \ 546 static TYPE *Cast(TaggedObject *object) \ 547 { \ 548 ASSERT(JSTaggedValue(object).Is##TYPE()); \ 549 return reinterpret_cast<TYPE *>(object); \ 550 } 551 552 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 553 #define DECL_VISIT_ARRAY(BEGIN_OFFSET, REF_LENGTH, LENGTH) \ 554 template <VisitType visitType> \ 555 void VisitRangeSlot(const EcmaObjectRangeVisitor &visitor) \ 556 { \ 557 ArrayBodyIterator<visitType, (BEGIN_OFFSET)>::IterateBody(this, visitor, (REF_LENGTH), (LENGTH)); \ 558 } 559 560 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 561 #define DECL_VISIT_OBJECT(BEGIN_OFFSET, END_OFFSET) \ 562 template <VisitType visitType> \ 563 void VisitRangeSlot(const EcmaObjectRangeVisitor &visitor) \ 564 { \ 565 ObjectBodyIterator<visitType, (BEGIN_OFFSET), (END_OFFSET), SIZE>::IterateRefBody(this, visitor); \ 566 } 567 568 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 569 #define DECL_VISIT_PRIMITIVE_OBJECT() \ 570 template <VisitType visitType> \ 571 void VisitRangeSlot(const EcmaObjectRangeVisitor &visitor) \ 572 { \ 573 PrimitiveObjectBodyIterator<visitType, SIZE>::IterateBody(this, visitor); \ 574 } 575 576 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 577 #define DECL_VISIT_NATIVE_FIELD(BEGIN_OFFSET, END_OFFSET) \ 578 template <VisitType visitType> \ 579 void VisitRangeSlot(const EcmaObjectRangeVisitor &visitor) \ 580 { \ 581 ObjectBodyIterator<visitType, (BEGIN_OFFSET), (END_OFFSET), SIZE>::IterateNativeBody(this, visitor); \ 582 } \ 583 584 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 585 #define DECL_VISIT_OBJECT_FOR_JS_OBJECT(PARENTCLASS, BEGIN_OFFSET, END_OFFSET) \ 586 template <VisitType visitType> \ 587 void VisitRangeSlot(const EcmaObjectRangeVisitor &visitor) \ 588 { \ 589 VisitObjects<visitType>(visitor); \ 590 JSObjectBodyIterator<visitType, SIZE>::IterateBody(this, visitor); \ 591 } \ 592 template <VisitType visitType> \ 593 void VisitObjects(const EcmaObjectRangeVisitor &visitor) \ 594 { \ 595 PARENTCLASS::VisitObjects<visitType>(visitor); \ 596 static constexpr size_t PARENT_SIZE = PARENTCLASS::SIZE; \ 597 ObjectBodyIterator<visitType, (BEGIN_OFFSET), \ 598 (END_OFFSET), SIZE, PARENT_SIZE>::IterateDerivedRefBody(this, visitor); \ 599 } 600 601 #if ECMASCRIPT_ENABLE_CAST_CHECK 602 #define CAST_CHECK(CAST_TYPE, CHECK_METHOD) \ 603 static inline CAST_TYPE *Cast(TaggedObject *object) \ 604 { \ 605 if (!JSTaggedValue(object).CHECK_METHOD()) { \ 606 std::abort(); \ 607 } \ 608 return static_cast<CAST_TYPE *>(object); \ 609 } \ 610 static inline const CAST_TYPE *ConstCast(const TaggedObject *object) \ 611 { \ 612 if (!JSTaggedValue(object).CHECK_METHOD()) { \ 613 std::abort(); \ 614 } \ 615 return static_cast<const CAST_TYPE *>(object); \ 616 } \ 617 static inline CAST_TYPE *Cast(JSTaggedValue value) \ 618 { \ 619 if (!value.CHECK_METHOD()) { \ 620 std::abort(); \ 621 } \ 622 return static_cast<CAST_TYPE *>(value.GetTaggedObject()); \ 623 } 624 #else 625 #define CAST_CHECK(CAST_TYPE, CHECK_METHOD) \ 626 static inline CAST_TYPE *Cast(TaggedObject *object) \ 627 { \ 628 ASSERT(JSTaggedValue(object).CHECK_METHOD()); \ 629 return static_cast<CAST_TYPE *>(object); \ 630 } \ 631 static const inline CAST_TYPE *ConstCast(const TaggedObject *object) \ 632 { \ 633 ASSERT(JSTaggedValue(object).CHECK_METHOD()); \ 634 return static_cast<const CAST_TYPE *>(object); \ 635 } \ 636 static inline CAST_TYPE *Cast(JSTaggedValue value) \ 637 { \ 638 ASSERT(value.CHECK_METHOD()); \ 639 return static_cast<CAST_TYPE *>(value.GetTaggedObject()); \ 640 } 641 #endif 642 643 #define CAST_NO_CHECK(CAST_TYPE) \ 644 static inline CAST_TYPE *Cast(TaggedObject *object) \ 645 { \ 646 return static_cast<CAST_TYPE *>(object); \ 647 } \ 648 static const inline CAST_TYPE *ConstCast(const TaggedObject *object) \ 649 { \ 650 return static_cast<const CAST_TYPE *>(object); \ 651 } 652 653 #define CHECK_OBJECT_SIZE(size) \ 654 if ((size) == 0) { \ 655 LOG_FULL(FATAL) << __func__ << ":" << __LINE__ << " objectSize is " << (size); \ 656 } 657 658 #define CHECK_REGION_END(begin, end) \ 659 if ((begin) > (end)) { \ 660 LOG_FULL(FATAL) << __func__ << ":" << __LINE__ << " begin: " << (begin) << " end: " << (end); \ 661 } 662 663 #define CHECK_JS_THREAD(vm) \ 664 if (!(vm)->GetJSThread()->IsCrossThreadExecutionEnable()) { \ 665 ASSERT((vm)->GetJSThread()->GetThreadId() == JSThread::GetCurrentThreadId()); \ 666 } 667 #define CHECK_DAEMON_THREAD() \ 668 ASSERT(os::thread::GetCurrentThreadId() == DaemonThread::GetInstance()->GetThreadId()) 669 670 #if !defined(NDEBUG) 671 #define STACK_ASSERT_SCOPE(thread) [[maybe_unused]] StackAssertScope stackAssertScope = StackAssertScope(thread) 672 #else 673 #define STACK_ASSERT_SCOPE(thread) static_cast<void>(0) 674 #endif 675 676 #if !defined(NDEBUG) 677 #define BUILTINS_ENTRY_DEBUG_LOG() LOG_BUILTINS(DEBUG) << "Builtins C++ " << __func__ 678 #else 679 #define BUILTINS_ENTRY_DEBUG_LOG() static_cast<void>(0) 680 #endif 681 682 #if defined(ARK_NOT_SUPPORT_INTL_GLOBAL) 683 #define ARK_SUPPORT_INTL_RETURN_STR(msg) "Please use import intl lib "#msg 684 #define ARK_SUPPORT_INTL_RETURN(thread, message) \ 685 THROW_TYPE_ERROR(thread, ARK_SUPPORT_INTL_RETURN_STR(message)) 686 #define ARK_SUPPORT_INTL_RETURN_JSVALUE(thread, message) \ 687 THROW_TYPE_ERROR_AND_RETURN(thread, \ 688 ARK_SUPPORT_INTL_RETURN_STR(message), JSTaggedValue::Exception()) 689 #else 690 #define ARK_SUPPORT_INTL_RETURN(thread, message) static_cast<void>(0) 691 #define ARK_SUPPORT_INTL_RETURN_JSVALUE(thread, message) static_cast<void>(0) 692 #endif 693 694 #define STACK_LIMIT_CHECK(thread, retVal) \ 695 do { \ 696 if ((thread)->DoStackLimitCheck()) { \ 697 return (retVal); \ 698 } \ 699 } while (0) 700 701 #define STACK_LIMIT_CHECK_VOID(thread) \ 702 do { \ 703 if ((thread)->DoStackLimitCheck()) { \ 704 return; \ 705 } \ 706 } while (0) 707 708 #define CHECK_SLOTID_BREAK(slotId) \ 709 if ((slotId) == 0xff) { \ 710 break; \ 711 } 712 713 #define CHECK_INPUT_NULLPTR(ptr, msg) \ 714 if ((ptr) == nullptr) { \ 715 LOG_FULL(FATAL) << (msg); \ 716 UNREACHABLE(); \ 717 } 718 719 #endif // ECMASCRIPT_ECMA_MACROS_H 720