1// Copyright 2012 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#ifndef INCLUDED_FROM_MACRO_ASSEMBLER_H 6#error This header must be included via macro-assembler.h 7#endif 8 9#ifndef V8_CODEGEN_IA32_MACRO_ASSEMBLER_IA32_H_ 10#define V8_CODEGEN_IA32_MACRO_ASSEMBLER_IA32_H_ 11 12#include <stdint.h> 13 14#include "include/v8-internal.h" 15#include "src/base/logging.h" 16#include "src/base/macros.h" 17#include "src/builtins/builtins.h" 18#include "src/codegen/assembler.h" 19#include "src/codegen/bailout-reason.h" 20#include "src/codegen/cpu-features.h" 21#include "src/codegen/ia32/assembler-ia32.h" 22#include "src/codegen/ia32/register-ia32.h" 23#include "src/codegen/label.h" 24#include "src/codegen/reglist.h" 25#include "src/codegen/reloc-info.h" 26#include "src/codegen/shared-ia32-x64/macro-assembler-shared-ia32-x64.h" 27#include "src/codegen/turbo-assembler.h" 28#include "src/common/globals.h" 29#include "src/execution/frames.h" 30#include "src/handles/handles.h" 31#include "src/objects/heap-object.h" 32#include "src/objects/smi.h" 33#include "src/roots/roots.h" 34#include "src/runtime/runtime.h" 35 36namespace v8 { 37namespace internal { 38 39class Code; 40class ExternalReference; 41class StatsCounter; 42 43// Convenience for platform-independent signatures. We do not normally 44// distinguish memory operands from other operands on ia32. 45using MemOperand = Operand; 46 47// TODO(victorgomes): Move definition to macro-assembler.h, once all other 48// platforms are updated. 49enum class StackLimitKind { kInterruptStackLimit, kRealStackLimit }; 50 51// Convenient class to access arguments below the stack pointer. 52class StackArgumentsAccessor { 53 public: 54 // argc = the number of arguments not including the receiver. 55 explicit StackArgumentsAccessor(Register argc) : argc_(argc) { 56 DCHECK_NE(argc_, no_reg); 57 } 58 59 // Argument 0 is the receiver (despite argc not including the receiver). 60 Operand operator[](int index) const { return GetArgumentOperand(index); } 61 62 Operand GetArgumentOperand(int index) const; 63 Operand GetReceiverOperand() const { return GetArgumentOperand(0); } 64 65 private: 66 const Register argc_; 67 68 DISALLOW_IMPLICIT_CONSTRUCTORS(StackArgumentsAccessor); 69}; 70 71class V8_EXPORT_PRIVATE TurboAssembler 72 : public SharedTurboAssemblerBase<TurboAssembler> { 73 public: 74 using SharedTurboAssemblerBase<TurboAssembler>::SharedTurboAssemblerBase; 75 76 void CheckPageFlag(Register object, Register scratch, int mask, Condition cc, 77 Label* condition_met, 78 Label::Distance condition_met_distance = Label::kFar); 79 80 // Activation support. 81 void EnterFrame(StackFrame::Type type); 82 void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg) { 83 // Out-of-line constant pool not implemented on ia32. 84 UNREACHABLE(); 85 } 86 void LeaveFrame(StackFrame::Type type); 87 88// Allocate stack space of given size (i.e. decrement {esp} by the value 89// stored in the given register, or by a constant). If you need to perform a 90// stack check, do it before calling this function because this function may 91// write into the newly allocated space. It may also overwrite the given 92// register's value, in the version that takes a register. 93#ifdef V8_OS_WIN 94 void AllocateStackSpace(Register bytes_scratch); 95 void AllocateStackSpace(int bytes); 96#else 97 void AllocateStackSpace(Register bytes) { sub(esp, bytes); } 98 void AllocateStackSpace(int bytes) { 99 DCHECK_GE(bytes, 0); 100 if (bytes == 0) return; 101 sub(esp, Immediate(bytes)); 102 } 103#endif 104 105 // Print a message to stdout and abort execution. 106 void Abort(AbortReason reason); 107 108 // Calls Abort(msg) if the condition cc is not satisfied. 109 // Use --debug_code to enable. 110 void Assert(Condition cc, AbortReason reason); 111 112 // Like Assert(), but without condition. 113 // Use --debug_code to enable. 114 void AssertUnreachable(AbortReason reason); 115 116 // Like Assert(), but always enabled. 117 void Check(Condition cc, AbortReason reason); 118 119 // Check that the stack is aligned. 120 void CheckStackAlignment(); 121 122 // Move a constant into a destination using the most efficient encoding. 123 void Move(Register dst, int32_t x) { 124 if (x == 0) { 125 xor_(dst, dst); 126 } else { 127 mov(dst, Immediate(x)); 128 } 129 } 130 void Move(Register dst, const Immediate& src); 131 void Move(Register dst, Smi src) { Move(dst, Immediate(src)); } 132 void Move(Register dst, Handle<HeapObject> src); 133 void Move(Register dst, Register src); 134 void Move(Register dst, Operand src); 135 void Move(Operand dst, const Immediate& src); 136 137 // Move an immediate into an XMM register. 138 void Move(XMMRegister dst, uint32_t src); 139 void Move(XMMRegister dst, uint64_t src); 140 void Move(XMMRegister dst, float src) { Move(dst, bit_cast<uint32_t>(src)); } 141 void Move(XMMRegister dst, double src) { Move(dst, bit_cast<uint64_t>(src)); } 142 143 Operand EntryFromBuiltinAsOperand(Builtin builtin); 144 145 void Call(Register reg) { call(reg); } 146 void Call(Operand op) { call(op); } 147 void Call(Label* target) { call(target); } 148 void Call(Handle<Code> code_object, RelocInfo::Mode rmode); 149 150 // Load the builtin given by the Smi in |builtin_index| into the same 151 // register. 152 void LoadEntryFromBuiltinIndex(Register builtin_index); 153 void CallBuiltinByIndex(Register builtin_index); 154 void CallBuiltin(Builtin builtin); 155 156 void LoadCodeObjectEntry(Register destination, Register code_object); 157 void CallCodeObject(Register code_object); 158 void JumpCodeObject(Register code_object, 159 JumpMode jump_mode = JumpMode::kJump); 160 void Jump(const ExternalReference& reference); 161 162 void Jump(Handle<Code> code_object, RelocInfo::Mode rmode); 163 164 void LoadMap(Register destination, Register object); 165 166 void Trap(); 167 void DebugBreak(); 168 169 void CallForDeoptimization(Builtin target, int deopt_id, Label* exit, 170 DeoptimizeKind kind, Label* ret, 171 Label* jump_deoptimization_entry_label); 172 173 // Jump the register contains a smi. 174 inline void JumpIfSmi(Register value, Label* smi_label, 175 Label::Distance distance = Label::kFar) { 176 test(value, Immediate(kSmiTagMask)); 177 j(zero, smi_label, distance); 178 } 179 // Jump if the operand is a smi. 180 inline void JumpIfSmi(Operand value, Label* smi_label, 181 Label::Distance distance = Label::kFar) { 182 test(value, Immediate(kSmiTagMask)); 183 j(zero, smi_label, distance); 184 } 185 186 void JumpIfEqual(Register a, int32_t b, Label* dest) { 187 cmp(a, Immediate(b)); 188 j(equal, dest); 189 } 190 191 void JumpIfLessThan(Register a, int32_t b, Label* dest) { 192 cmp(a, Immediate(b)); 193 j(less, dest); 194 } 195 196 void SmiUntag(Register reg) { sar(reg, kSmiTagSize); } 197 void SmiUntag(Register output, Register value) { 198 mov(output, value); 199 SmiUntag(output); 200 } 201 202 void SmiToInt32(Register reg) { SmiUntag(reg); } 203 204 // Before calling a C-function from generated code, align arguments on stack. 205 // After aligning the frame, arguments must be stored in esp[0], esp[4], 206 // etc., not pushed. The argument count assumes all arguments are word sized. 207 // Some compilers/platforms require the stack to be aligned when calling 208 // C++ code. 209 // Needs a scratch register to do some arithmetic. This register will be 210 // trashed. 211 void PrepareCallCFunction(int num_arguments, Register scratch); 212 213 // Calls a C function and cleans up the space for arguments allocated 214 // by PrepareCallCFunction. The called function is not allowed to trigger a 215 // garbage collection, since that might move the code and invalidate the 216 // return address (unless this is somehow accounted for by the called 217 // function). 218 void CallCFunction(ExternalReference function, int num_arguments); 219 void CallCFunction(Register function, int num_arguments); 220 221 void ShlPair(Register high, Register low, uint8_t imm8); 222 void ShlPair_cl(Register high, Register low); 223 void ShrPair(Register high, Register low, uint8_t imm8); 224 void ShrPair_cl(Register high, Register low); 225 void SarPair(Register high, Register low, uint8_t imm8); 226 void SarPair_cl(Register high, Register low); 227 228 // Generates function and stub prologue code. 229 void StubPrologue(StackFrame::Type type); 230 void Prologue(); 231 232 // Helpers for argument handling 233 enum ArgumentsCountMode { kCountIncludesReceiver, kCountExcludesReceiver }; 234 enum ArgumentsCountType { kCountIsInteger, kCountIsSmi, kCountIsBytes }; 235 void DropArguments(Register count, Register scratch, ArgumentsCountType type, 236 ArgumentsCountMode mode); 237 void DropArgumentsAndPushNewReceiver(Register argc, Register receiver, 238 Register scratch, 239 ArgumentsCountType type, 240 ArgumentsCountMode mode); 241 void DropArgumentsAndPushNewReceiver(Register argc, Operand receiver, 242 Register scratch, 243 ArgumentsCountType type, 244 ArgumentsCountMode mode); 245 246 void Lzcnt(Register dst, Register src) { Lzcnt(dst, Operand(src)); } 247 void Lzcnt(Register dst, Operand src); 248 249 void Tzcnt(Register dst, Register src) { Tzcnt(dst, Operand(src)); } 250 void Tzcnt(Register dst, Operand src); 251 252 void Popcnt(Register dst, Register src) { Popcnt(dst, Operand(src)); } 253 void Popcnt(Register dst, Operand src); 254 255 void PushReturnAddressFrom(Register src) { push(src); } 256 void PopReturnAddressTo(Register dst) { pop(dst); } 257 258 void PushReturnAddressFrom(XMMRegister src, Register scratch) { 259 Push(src, scratch); 260 } 261 void PopReturnAddressTo(XMMRegister dst, Register scratch) { 262 Pop(dst, scratch); 263 } 264 265 void Ret(); 266 267 // Root register utility functions. 268 269 void InitializeRootRegister(); 270 271 Operand RootAsOperand(RootIndex index); 272 void LoadRoot(Register destination, RootIndex index) final; 273 274 // Indirect root-relative loads. 275 void LoadFromConstantsTable(Register destination, int constant_index) final; 276 void LoadRootRegisterOffset(Register destination, intptr_t offset) final; 277 void LoadRootRelative(Register destination, int32_t offset) final; 278 279 void PushPC(); 280 281 enum class PushArrayOrder { kNormal, kReverse }; 282 // `array` points to the first element (the lowest address). 283 // `array` and `size` are not modified. 284 void PushArray(Register array, Register size, Register scratch, 285 PushArrayOrder order = PushArrayOrder::kNormal); 286 287 // Operand pointing to an external reference. 288 // May emit code to set up the scratch register. The operand is 289 // only guaranteed to be correct as long as the scratch register 290 // isn't changed. 291 // If the operand is used more than once, use a scratch register 292 // that is guaranteed not to be clobbered. 293 Operand ExternalReferenceAsOperand(ExternalReference reference, 294 Register scratch); 295 Operand ExternalReferenceAddressAsOperand(ExternalReference reference); 296 Operand HeapObjectAsOperand(Handle<HeapObject> object); 297 298 void LoadAddress(Register destination, ExternalReference source); 299 300 void CompareRoot(Register with, RootIndex index); 301 void CompareRoot(Register with, Register scratch, RootIndex index); 302 303 // Return and drop arguments from stack, where the number of arguments 304 // may be bigger than 2^16 - 1. Requires a scratch register. 305 void Ret(int bytes_dropped, Register scratch); 306 307 void PextrdPreSse41(Register dst, XMMRegister src, uint8_t imm8); 308 void PinsrdPreSse41(XMMRegister dst, Register src, uint8_t imm8, 309 uint32_t* load_pc_offset) { 310 PinsrdPreSse41(dst, Operand(src), imm8, load_pc_offset); 311 } 312 void PinsrdPreSse41(XMMRegister dst, Operand src, uint8_t imm8, 313 uint32_t* load_pc_offset); 314 315 // Expression support 316 // cvtsi2sd instruction only writes to the low 64-bit of dst register, which 317 // hinders register renaming and makes dependence chains longer. So we use 318 // xorps to clear the dst register before cvtsi2sd to solve this issue. 319 void Cvtsi2ss(XMMRegister dst, Register src) { Cvtsi2ss(dst, Operand(src)); } 320 void Cvtsi2ss(XMMRegister dst, Operand src); 321 void Cvtsi2sd(XMMRegister dst, Register src) { Cvtsi2sd(dst, Operand(src)); } 322 void Cvtsi2sd(XMMRegister dst, Operand src); 323 324 void Cvtui2ss(XMMRegister dst, Register src, Register tmp) { 325 Cvtui2ss(dst, Operand(src), tmp); 326 } 327 void Cvtui2ss(XMMRegister dst, Operand src, Register tmp); 328 void Cvttss2ui(Register dst, XMMRegister src, XMMRegister tmp) { 329 Cvttss2ui(dst, Operand(src), tmp); 330 } 331 void Cvttss2ui(Register dst, Operand src, XMMRegister tmp); 332 void Cvtui2sd(XMMRegister dst, Register src, Register scratch) { 333 Cvtui2sd(dst, Operand(src), scratch); 334 } 335 void Cvtui2sd(XMMRegister dst, Operand src, Register scratch); 336 void Cvttsd2ui(Register dst, XMMRegister src, XMMRegister tmp) { 337 Cvttsd2ui(dst, Operand(src), tmp); 338 } 339 void Cvttsd2ui(Register dst, Operand src, XMMRegister tmp); 340 341 void Push(Register src) { push(src); } 342 void Push(Operand src) { push(src); } 343 void Push(Immediate value); 344 void Push(Handle<HeapObject> handle) { push(Immediate(handle)); } 345 void Push(Smi smi) { Push(Immediate(smi)); } 346 void Push(XMMRegister src, Register scratch) { 347 movd(scratch, src); 348 push(scratch); 349 } 350 351 void Pop(Register dst) { pop(dst); } 352 void Pop(Operand dst) { pop(dst); } 353 void Pop(XMMRegister dst, Register scratch) { 354 pop(scratch); 355 movd(dst, scratch); 356 } 357 358 void MaybeSaveRegisters(RegList registers); 359 void MaybeRestoreRegisters(RegList registers); 360 361 void CallEphemeronKeyBarrier(Register object, Register slot_address, 362 SaveFPRegsMode fp_mode); 363 364 void CallRecordWriteStubSaveRegisters( 365 Register object, Register slot_address, 366 RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode, 367 StubCallMode mode = StubCallMode::kCallBuiltinPointer); 368 void CallRecordWriteStub( 369 Register object, Register slot_address, 370 RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode, 371 StubCallMode mode = StubCallMode::kCallBuiltinPointer); 372 373 // Calculate how much stack space (in bytes) are required to store caller 374 // registers excluding those specified in the arguments. 375 int RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode, 376 Register exclusion1 = no_reg, 377 Register exclusion2 = no_reg, 378 Register exclusion3 = no_reg) const; 379 380 // PushCallerSaved and PopCallerSaved do not arrange the registers in any 381 // particular order so they are not useful for calls that can cause a GC. 382 // The caller can exclude up to 3 registers that do not need to be saved and 383 // restored. 384 385 // Push caller saved registers on the stack, and return the number of bytes 386 // stack pointer is adjusted. 387 int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg, 388 Register exclusion2 = no_reg, 389 Register exclusion3 = no_reg); 390 // Restore caller saved registers from the stack, and return the number of 391 // bytes stack pointer is adjusted. 392 int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg, 393 Register exclusion2 = no_reg, 394 Register exclusion3 = no_reg); 395 396 // Compute the start of the generated instruction stream from the current PC. 397 // This is an alternative to embedding the {CodeObject} handle as a reference. 398 void ComputeCodeStartAddress(Register dst); 399 400 // Control-flow integrity: 401 402 // Define a function entrypoint. This doesn't emit any code for this 403 // architecture, as control-flow integrity is not supported for it. 404 void CodeEntry() {} 405 // Define an exception handler. 406 void ExceptionHandler() {} 407 // Define an exception handler and bind a label. 408 void BindExceptionHandler(Label* label) { bind(label); } 409 410 protected: 411 // Drops arguments assuming that the return address was already popped. 412 void DropArguments(Register count, ArgumentsCountType type = kCountIsInteger, 413 ArgumentsCountMode mode = kCountExcludesReceiver); 414}; 415 416// MacroAssembler implements a collection of frequently used macros. 417class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { 418 public: 419 using TurboAssembler::TurboAssembler; 420 421 void PushRoot(RootIndex index); 422 423 // Compare the object in a register to a value and jump if they are equal. 424 void JumpIfRoot(Register with, RootIndex index, Label* if_equal, 425 Label::Distance if_equal_distance = Label::kFar) { 426 CompareRoot(with, index); 427 j(equal, if_equal, if_equal_distance); 428 } 429 430 // Compare the object in a register to a value and jump if they are not equal. 431 void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal, 432 Label::Distance if_not_equal_distance = Label::kFar) { 433 CompareRoot(with, index); 434 j(not_equal, if_not_equal, if_not_equal_distance); 435 } 436 437 // Checks if value is in range [lower_limit, higher_limit] using a single 438 // comparison. Flags CF=1 or ZF=1 indicate the value is in the range 439 // (condition below_equal). It is valid, that |value| == |scratch| as far as 440 // this function is concerned. 441 void CompareRange(Register value, unsigned lower_limit, unsigned higher_limit, 442 Register scratch); 443 void JumpIfIsInRange(Register value, unsigned lower_limit, 444 unsigned higher_limit, Register scratch, 445 Label* on_in_range, 446 Label::Distance near_jump = Label::kFar); 447 448 // --------------------------------------------------------------------------- 449 // GC Support 450 // Notify the garbage collector that we wrote a pointer into an object. 451 // |object| is the object being stored into, |value| is the object being 452 // stored. value and scratch registers are clobbered by the operation. 453 // The offset is the offset from the start of the object, not the offset from 454 // the tagged HeapObject pointer. For use with FieldOperand(reg, off). 455 void RecordWriteField( 456 Register object, int offset, Register value, Register scratch, 457 SaveFPRegsMode save_fp, 458 RememberedSetAction remembered_set_action = RememberedSetAction::kEmit, 459 SmiCheck smi_check = SmiCheck::kInline); 460 461 // For page containing |object| mark region covering |address| 462 // dirty. |object| is the object being stored into, |value| is the 463 // object being stored. The address and value registers are clobbered by the 464 // operation. RecordWrite filters out smis so it does not update the 465 // write barrier if the value is a smi. 466 void RecordWrite( 467 Register object, Register address, Register value, SaveFPRegsMode save_fp, 468 RememberedSetAction remembered_set_action = RememberedSetAction::kEmit, 469 SmiCheck smi_check = SmiCheck::kInline); 470 471 // Enter specific kind of exit frame. Expects the number of 472 // arguments in register eax and sets up the number of arguments in 473 // register edi and the pointer to the first argument in register 474 // esi. 475 void EnterExitFrame(int argc, bool save_doubles, StackFrame::Type frame_type); 476 477 void EnterApiExitFrame(int argc, Register scratch); 478 479 // Leave the current exit frame. Expects the return value in 480 // register eax:edx (untouched) and the pointer to the first 481 // argument in register esi (if pop_arguments == true). 482 void LeaveExitFrame(bool save_doubles, bool pop_arguments = true); 483 484 // Leave the current exit frame. Expects the return value in 485 // register eax (untouched). 486 void LeaveApiExitFrame(); 487 488 // Load the global proxy from the current context. 489 void LoadGlobalProxy(Register dst); 490 491 // Load a value from the native context with a given index. 492 void LoadNativeContextSlot(Register dst, int index); 493 494 // --------------------------------------------------------------------------- 495 // JavaScript invokes 496 497 // Invoke the JavaScript function code by either calling or jumping. 498 499 void InvokeFunctionCode(Register function, Register new_target, 500 Register expected_parameter_count, 501 Register actual_parameter_count, InvokeType type); 502 503 // On function call, call into the debugger. 504 // This may clobber ecx. 505 void CallDebugOnFunctionCall(Register fun, Register new_target, 506 Register expected_parameter_count, 507 Register actual_parameter_count); 508 509 // Invoke the JavaScript function in the given register. Changes the 510 // current context to the context in the function before invoking. 511 void InvokeFunction(Register function, Register new_target, 512 Register actual_parameter_count, InvokeType type); 513 514 // Compare object type for heap object. 515 // Incoming register is heap_object and outgoing register is map. 516 void CmpObjectType(Register heap_object, InstanceType type, Register map); 517 518 // Compare instance type for map. 519 void CmpInstanceType(Register map, InstanceType type); 520 521 // Compare instance type ranges for a map (lower_limit and higher_limit 522 // inclusive). 523 // 524 // Always use unsigned comparisons: below_equal for a positive 525 // result. 526 void CmpInstanceTypeRange(Register map, Register instance_type_out, 527 Register scratch, InstanceType lower_limit, 528 InstanceType higher_limit); 529 530 // Smi tagging support. 531 void SmiTag(Register reg) { 532 STATIC_ASSERT(kSmiTag == 0); 533 STATIC_ASSERT(kSmiTagSize == 1); 534 add(reg, reg); 535 } 536 537 // Jump if register contain a non-smi. 538 inline void JumpIfNotSmi(Register value, Label* not_smi_label, 539 Label::Distance distance = Label::kFar) { 540 test(value, Immediate(kSmiTagMask)); 541 j(not_zero, not_smi_label, distance); 542 } 543 // Jump if the operand is not a smi. 544 inline void JumpIfNotSmi(Operand value, Label* smi_label, 545 Label::Distance distance = Label::kFar) { 546 test(value, Immediate(kSmiTagMask)); 547 j(not_zero, smi_label, distance); 548 } 549 550 template <typename Field> 551 void DecodeField(Register reg) { 552 static const int shift = Field::kShift; 553 static const int mask = Field::kMask >> Field::kShift; 554 if (shift != 0) { 555 sar(reg, shift); 556 } 557 and_(reg, Immediate(mask)); 558 } 559 560 // Abort execution if argument is not a smi, enabled via --debug-code. 561 void AssertSmi(Register object); 562 563 // Abort execution if argument is a smi, enabled via --debug-code. 564 void AssertNotSmi(Register object); 565 566 // Abort execution if argument is not a JSFunction, enabled via --debug-code. 567 void AssertFunction(Register object, Register scratch); 568 569 // Abort execution if argument is not a callable JSFunction, enabled via 570 // --debug-code. 571 void AssertCallableFunction(Register object, Register scratch); 572 573 // Abort execution if argument is not a Constructor, enabled via --debug-code. 574 void AssertConstructor(Register object); 575 576 // Abort execution if argument is not a JSBoundFunction, 577 // enabled via --debug-code. 578 void AssertBoundFunction(Register object); 579 580 // Abort execution if argument is not a JSGeneratorObject (or subclass), 581 // enabled via --debug-code. 582 void AssertGeneratorObject(Register object); 583 584 // Abort execution if argument is not undefined or an AllocationSite, enabled 585 // via --debug-code. 586 void AssertUndefinedOrAllocationSite(Register object, Register scratch); 587 588 // --------------------------------------------------------------------------- 589 // Exception handling 590 591 // Push a new stack handler and link it into stack handler chain. 592 void PushStackHandler(Register scratch); 593 594 // Unlink the stack handler on top of the stack from the stack handler chain. 595 void PopStackHandler(Register scratch); 596 597 // --------------------------------------------------------------------------- 598 // Runtime calls 599 600 // Call a runtime routine. 601 void CallRuntime(const Runtime::Function* f, int num_arguments, 602 SaveFPRegsMode save_doubles = SaveFPRegsMode::kIgnore); 603 604 // Convenience function: Same as above, but takes the fid instead. 605 void CallRuntime(Runtime::FunctionId fid, 606 SaveFPRegsMode save_doubles = SaveFPRegsMode::kIgnore) { 607 const Runtime::Function* function = Runtime::FunctionForId(fid); 608 CallRuntime(function, function->nargs, save_doubles); 609 } 610 611 // Convenience function: Same as above, but takes the fid instead. 612 void CallRuntime(Runtime::FunctionId fid, int num_arguments, 613 SaveFPRegsMode save_doubles = SaveFPRegsMode::kIgnore) { 614 CallRuntime(Runtime::FunctionForId(fid), num_arguments, save_doubles); 615 } 616 617 // Convenience function: tail call a runtime routine (jump). 618 void TailCallRuntime(Runtime::FunctionId fid); 619 620 // Jump to a runtime routine. 621 void JumpToExternalReference(const ExternalReference& ext, 622 bool builtin_exit_frame = false); 623 624 // Generates a trampoline to jump to the off-heap instruction stream. 625 void JumpToOffHeapInstructionStream(Address entry); 626 627 // --------------------------------------------------------------------------- 628 // Utilities 629 630 // Emit code to discard a non-negative number of pointer-sized elements 631 // from the stack, clobbering only the esp register. 632 void Drop(int element_count); 633 634 // --------------------------------------------------------------------------- 635 // In-place weak references. 636 void LoadWeakValue(Register in_out, Label* target_if_cleared); 637 638 // --------------------------------------------------------------------------- 639 // StatsCounter support 640 641 void IncrementCounter(StatsCounter* counter, int value, Register scratch) { 642 if (!FLAG_native_code_counters) return; 643 EmitIncrementCounter(counter, value, scratch); 644 } 645 void EmitIncrementCounter(StatsCounter* counter, int value, Register scratch); 646 void DecrementCounter(StatsCounter* counter, int value, Register scratch) { 647 if (!FLAG_native_code_counters) return; 648 EmitDecrementCounter(counter, value, scratch); 649 } 650 void EmitDecrementCounter(StatsCounter* counter, int value, Register scratch); 651 652 // --------------------------------------------------------------------------- 653 // Stack limit utilities 654 void CompareStackLimit(Register with, StackLimitKind kind); 655 void StackOverflowCheck(Register num_args, Register scratch, 656 Label* stack_overflow, bool include_receiver = false); 657 658 private: 659 // Helper functions for generating invokes. 660 void InvokePrologue(Register expected_parameter_count, 661 Register actual_parameter_count, Label* done, 662 InvokeType type); 663 664 void EnterExitFramePrologue(StackFrame::Type frame_type, Register scratch); 665 void EnterExitFrameEpilogue(int argc, bool save_doubles); 666 667 void LeaveExitFrameEpilogue(); 668 669 DISALLOW_IMPLICIT_CONSTRUCTORS(MacroAssembler); 670}; 671 672// ----------------------------------------------------------------------------- 673// Static helper functions. 674 675// Generate an Operand for loading a field from an object. 676inline Operand FieldOperand(Register object, int offset) { 677 return Operand(object, offset - kHeapObjectTag); 678} 679 680// Generate an Operand for loading an indexed field from an object. 681inline Operand FieldOperand(Register object, Register index, ScaleFactor scale, 682 int offset) { 683 return Operand(object, index, scale, offset - kHeapObjectTag); 684} 685 686#define ACCESS_MASM(masm) masm-> 687 688} // namespace internal 689} // namespace v8 690 691#endif // V8_CODEGEN_IA32_MACRO_ASSEMBLER_IA32_H_ 692