1// Copyright (c) 1994-2006 Sun Microsystems Inc.
2// All Rights Reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions
6// are met:
7//
8// - Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10//
11// - Redistribution in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the
14// distribution.
15//
16// - Neither the name of Sun Microsystems or the names of contributors may
17// be used to endorse or promote products derived from this software without
18// specific prior written permission.
19//
20// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31// OF THE POSSIBILITY OF SUCH DAMAGE.
32
33// The original source code covered by the above license above has been modified
34// significantly by Google Inc.
35// Copyright 2012 the V8 project authors. All rights reserved.
36
37#include "src/codegen/ia32/assembler-ia32.h"
38
39#include <cstring>
40
41#if V8_TARGET_ARCH_IA32
42
43#if V8_LIBC_MSVCRT
44#include <intrin.h>  // _xgetbv()
45#endif
46#if V8_OS_DARWIN
47#include <sys/sysctl.h>
48#endif
49
50#include "src/base/bits.h"
51#include "src/base/cpu.h"
52#include "src/codegen/assembler-inl.h"
53#include "src/codegen/macro-assembler.h"
54#include "src/codegen/string-constants.h"
55#include "src/deoptimizer/deoptimizer.h"
56#include "src/diagnostics/disassembler.h"
57#include "src/init/v8.h"
58#include "src/numbers/conversions-inl.h"
59
60namespace v8 {
61namespace internal {
62
63Immediate Immediate::EmbeddedNumber(double value) {
64  int32_t smi;
65  if (DoubleToSmiInteger(value, &smi)) return Immediate(Smi::FromInt(smi));
66  Immediate result(0, RelocInfo::FULL_EMBEDDED_OBJECT);
67  result.is_heap_object_request_ = true;
68  result.value_.heap_object_request = HeapObjectRequest(value);
69  return result;
70}
71
72Immediate Immediate::EmbeddedStringConstant(const StringConstantBase* str) {
73  Immediate result(0, RelocInfo::FULL_EMBEDDED_OBJECT);
74  result.is_heap_object_request_ = true;
75  result.value_.heap_object_request = HeapObjectRequest(str);
76  return result;
77}
78
79// -----------------------------------------------------------------------------
80// Implementation of CpuFeatures
81
82namespace {
83
84V8_INLINE uint64_t xgetbv(unsigned int xcr) {
85#if V8_LIBC_MSVCRT
86  return _xgetbv(xcr);
87#else
88  unsigned eax, edx;
89  // Check xgetbv; this uses a .byte sequence instead of the instruction
90  // directly because older assemblers do not include support for xgetbv and
91  // there is no easy way to conditionally compile based on the assembler
92  // used.
93  __asm__ volatile(".byte 0x0F, 0x01, 0xD0" : "=a"(eax), "=d"(edx) : "c"(xcr));
94  return static_cast<uint64_t>(eax) | (static_cast<uint64_t>(edx) << 32);
95#endif
96}
97
98bool OSHasAVXSupport() {
99#if V8_OS_DARWIN
100  // Mac OS X up to 10.9 has a bug where AVX transitions were indeed being
101  // caused by ISRs, so we detect that here and disable AVX in that case.
102  char buffer[128];
103  size_t buffer_size = arraysize(buffer);
104  int ctl_name[] = {CTL_KERN, KERN_OSRELEASE};
105  if (sysctl(ctl_name, 2, buffer, &buffer_size, nullptr, 0) != 0) {
106    FATAL("V8 failed to get kernel version");
107  }
108  // The buffer now contains a string of the form XX.YY.ZZ, where
109  // XX is the major kernel version component.
110  char* period_pos = strchr(buffer, '.');
111  DCHECK_NOT_NULL(period_pos);
112  *period_pos = '\0';
113  long kernel_version_major = strtol(buffer, nullptr, 10);  // NOLINT
114  if (kernel_version_major <= 13) return false;
115#endif  // V8_OS_DARWIN
116  // Check whether OS claims to support AVX.
117  uint64_t feature_mask = xgetbv(0);  // XCR_XFEATURE_ENABLED_MASK
118  return (feature_mask & 0x6) == 0x6;
119}
120
121#undef _XCR_XFEATURE_ENABLED_MASK
122
123}  // namespace
124
125bool CpuFeatures::SupportsWasmSimd128() {
126#if V8_ENABLE_WEBASSEMBLY
127  if (IsSupported(SSE4_1)) return true;
128  if (FLAG_wasm_simd_ssse3_codegen && IsSupported(SSSE3)) return true;
129#endif  // V8_ENABLE_WEBASSEMBLY
130  return false;
131}
132
133void CpuFeatures::ProbeImpl(bool cross_compile) {
134  base::CPU cpu;
135  CHECK(cpu.has_sse2());  // SSE2 support is mandatory.
136  CHECK(cpu.has_cmov());  // CMOV support is mandatory.
137
138  // Only use statically determined features for cross compile (snapshot).
139  if (cross_compile) return;
140
141  if (cpu.has_sse42()) SetSupported(SSE4_2);
142  if (cpu.has_sse41()) SetSupported(SSE4_1);
143  if (cpu.has_ssse3()) SetSupported(SSSE3);
144  if (cpu.has_sse3()) SetSupported(SSE3);
145  if (cpu.has_avx() && cpu.has_osxsave() && OSHasAVXSupport()) {
146    SetSupported(AVX);
147    if (cpu.has_avx2()) SetSupported(AVX2);
148    if (cpu.has_fma3()) SetSupported(FMA3);
149  }
150
151  if (cpu.has_bmi1() && FLAG_enable_bmi1) SetSupported(BMI1);
152  if (cpu.has_bmi2() && FLAG_enable_bmi2) SetSupported(BMI2);
153  if (cpu.has_lzcnt() && FLAG_enable_lzcnt) SetSupported(LZCNT);
154  if (cpu.has_popcnt() && FLAG_enable_popcnt) SetSupported(POPCNT);
155  if (strcmp(FLAG_mcpu, "auto") == 0) {
156    if (cpu.is_atom()) SetSupported(INTEL_ATOM);
157  } else if (strcmp(FLAG_mcpu, "atom") == 0) {
158    SetSupported(INTEL_ATOM);
159  }
160
161  // Ensure that supported cpu features make sense. E.g. it is wrong to support
162  // AVX but not SSE4_2, if we have --enable-avx and --no-enable-sse4-2, the
163  // code above would set AVX to supported, and SSE4_2 to unsupported, then the
164  // checks below will set AVX to unsupported.
165  if (!FLAG_enable_sse3) SetUnsupported(SSE3);
166  if (!FLAG_enable_ssse3 || !IsSupported(SSE3)) SetUnsupported(SSSE3);
167  if (!FLAG_enable_sse4_1 || !IsSupported(SSSE3)) SetUnsupported(SSE4_1);
168  if (!FLAG_enable_sse4_2 || !IsSupported(SSE4_1)) SetUnsupported(SSE4_2);
169  if (!FLAG_enable_avx || !IsSupported(SSE4_2)) SetUnsupported(AVX);
170  if (!FLAG_enable_avx2 || !IsSupported(AVX)) SetUnsupported(AVX2);
171  if (!FLAG_enable_fma3 || !IsSupported(AVX)) SetUnsupported(FMA3);
172
173  // Set a static value on whether Simd is supported.
174  // This variable is only used for certain archs to query SupportWasmSimd128()
175  // at runtime in builtins using an extern ref. Other callers should use
176  // CpuFeatures::SupportWasmSimd128().
177  CpuFeatures::supports_wasm_simd_128_ = CpuFeatures::SupportsWasmSimd128();
178}
179
180void CpuFeatures::PrintTarget() {}
181void CpuFeatures::PrintFeatures() {
182  printf(
183      "SSE3=%d SSSE3=%d SSE4_1=%d AVX=%d AVX2=%d FMA3=%d BMI1=%d BMI2=%d "
184      "LZCNT=%d "
185      "POPCNT=%d ATOM=%d\n",
186      CpuFeatures::IsSupported(SSE3), CpuFeatures::IsSupported(SSSE3),
187      CpuFeatures::IsSupported(SSE4_1), CpuFeatures::IsSupported(AVX),
188      CpuFeatures::IsSupported(AVX2), CpuFeatures::IsSupported(FMA3),
189      CpuFeatures::IsSupported(BMI1), CpuFeatures::IsSupported(BMI2),
190      CpuFeatures::IsSupported(LZCNT), CpuFeatures::IsSupported(POPCNT),
191      CpuFeatures::IsSupported(INTEL_ATOM));
192}
193
194// -----------------------------------------------------------------------------
195// Implementation of Displacement
196
197void Displacement::init(Label* L, Type type) {
198  DCHECK(!L->is_bound());
199  int next = 0;
200  if (L->is_linked()) {
201    next = L->pos();
202    DCHECK_GT(next, 0);  // Displacements must be at positions > 0
203  }
204  // Ensure that we _never_ overflow the next field.
205  DCHECK(NextField::is_valid(Assembler::kMaximalBufferSize));
206  data_ = NextField::encode(next) | TypeField::encode(type);
207}
208
209// -----------------------------------------------------------------------------
210// Implementation of RelocInfo
211
212const int RelocInfo::kApplyMask =
213    RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
214    RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
215    RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET) |
216    RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
217
218bool RelocInfo::IsCodedSpecially() {
219  // The deserializer needs to know whether a pointer is specially coded.  Being
220  // specially coded on IA32 means that it is a relative address, as used by
221  // branch instructions.  These are also the ones that need changing when a
222  // code object moves.
223  return RelocInfo::ModeMask(rmode_) & kApplyMask;
224}
225
226bool RelocInfo::IsInConstantPool() { return false; }
227
228uint32_t RelocInfo::wasm_call_tag() const {
229  DCHECK(rmode_ == WASM_CALL || rmode_ == WASM_STUB_CALL);
230  return ReadUnalignedValue<uint32_t>(pc_);
231}
232
233// -----------------------------------------------------------------------------
234// Implementation of Operand
235
236Operand::Operand(Register base, int32_t disp, RelocInfo::Mode rmode) {
237  // [base + disp/r]
238  if (disp == 0 && RelocInfo::IsNoInfo(rmode) && base != ebp) {
239    // [base]
240    set_modrm(0, base);
241    if (base == esp) set_sib(times_1, esp, base);
242  } else if (is_int8(disp) && RelocInfo::IsNoInfo(rmode)) {
243    // [base + disp8]
244    set_modrm(1, base);
245    if (base == esp) set_sib(times_1, esp, base);
246    set_disp8(disp);
247  } else {
248    // [base + disp/r]
249    set_modrm(2, base);
250    if (base == esp) set_sib(times_1, esp, base);
251    set_dispr(disp, rmode);
252  }
253}
254
255Operand::Operand(Register base, Register index, ScaleFactor scale, int32_t disp,
256                 RelocInfo::Mode rmode) {
257  DCHECK(index != esp);  // illegal addressing mode
258  // [base + index*scale + disp/r]
259  if (disp == 0 && RelocInfo::IsNoInfo(rmode) && base != ebp) {
260    // [base + index*scale]
261    set_modrm(0, esp);
262    set_sib(scale, index, base);
263  } else if (is_int8(disp) && RelocInfo::IsNoInfo(rmode)) {
264    // [base + index*scale + disp8]
265    set_modrm(1, esp);
266    set_sib(scale, index, base);
267    set_disp8(disp);
268  } else {
269    // [base + index*scale + disp/r]
270    set_modrm(2, esp);
271    set_sib(scale, index, base);
272    set_dispr(disp, rmode);
273  }
274}
275
276Operand::Operand(Register index, ScaleFactor scale, int32_t disp,
277                 RelocInfo::Mode rmode) {
278  DCHECK(index != esp);  // illegal addressing mode
279  // [index*scale + disp/r]
280  set_modrm(0, esp);
281  set_sib(scale, index, ebp);
282  set_dispr(disp, rmode);
283}
284
285bool Operand::is_reg_only() const {
286  return (buf_[0] & 0xF8) == 0xC0;  // Addressing mode is register only.
287}
288
289Register Operand::reg() const {
290  DCHECK(is_reg_only());
291  return Register::from_code(buf_[0] & 0x07);
292}
293
294bool operator!=(Operand op, XMMRegister r) { return !op.is_reg(r); }
295
296void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
297  DCHECK_IMPLIES(isolate == nullptr, heap_object_requests_.empty());
298  for (auto& request : heap_object_requests_) {
299    Handle<HeapObject> object;
300    switch (request.kind()) {
301      case HeapObjectRequest::kHeapNumber:
302        object = isolate->factory()->NewHeapNumber<AllocationType::kOld>(
303            request.heap_number());
304        break;
305      case HeapObjectRequest::kStringConstant: {
306        const StringConstantBase* str = request.string();
307        CHECK_NOT_NULL(str);
308        object = str->AllocateStringConstant(isolate);
309        break;
310      }
311    }
312    Address pc = reinterpret_cast<Address>(buffer_start_) + request.offset();
313    WriteUnalignedValue(pc, object);
314  }
315}
316
317// -----------------------------------------------------------------------------
318// Implementation of Assembler.
319
320// Emit a single byte. Must always be inlined.
321#define EMIT(x) *pc_++ = (x)
322
323Assembler::Assembler(const AssemblerOptions& options,
324                     std::unique_ptr<AssemblerBuffer> buffer)
325    : AssemblerBase(options, std::move(buffer)) {
326  reloc_info_writer.Reposition(buffer_start_ + buffer_->size(), pc_);
327  if (CpuFeatures::IsSupported(SSE4_2)) {
328    EnableCpuFeature(SSE4_1);
329  }
330  if (CpuFeatures::IsSupported(SSE4_1)) {
331    EnableCpuFeature(SSSE3);
332  }
333  if (CpuFeatures::IsSupported(SSSE3)) {
334    EnableCpuFeature(SSE3);
335  }
336}
337
338void Assembler::GetCode(Isolate* isolate, CodeDesc* desc,
339                        SafepointTableBuilder* safepoint_table_builder,
340                        int handler_table_offset) {
341  // As a crutch to avoid having to add manual Align calls wherever we use a
342  // raw workflow to create Code objects (mostly in tests), add another Align
343  // call here. It does no harm - the end of the Code object is aligned to the
344  // (larger) kCodeAlignment anyways.
345  // TODO(jgruber): Consider moving responsibility for proper alignment to
346  // metadata table builders (safepoint, handler, constant pool, code
347  // comments).
348  DataAlign(Code::kMetadataAlignment);
349
350  const int code_comments_size = WriteCodeComments();
351
352  // Finalize code (at this point overflow() may be true, but the gap ensures
353  // that we are still not overlapping instructions and relocation info).
354  DCHECK(pc_ <= reloc_info_writer.pos());  // No overlap.
355
356  AllocateAndInstallRequestedHeapObjects(isolate);
357
358  // Set up code descriptor.
359  // TODO(jgruber): Reconsider how these offsets and sizes are maintained up to
360  // this point to make CodeDesc initialization less fiddly.
361
362  static constexpr int kConstantPoolSize = 0;
363  const int instruction_size = pc_offset();
364  const int code_comments_offset = instruction_size - code_comments_size;
365  const int constant_pool_offset = code_comments_offset - kConstantPoolSize;
366  const int handler_table_offset2 = (handler_table_offset == kNoHandlerTable)
367                                        ? constant_pool_offset
368                                        : handler_table_offset;
369  const int safepoint_table_offset =
370      (safepoint_table_builder == kNoSafepointTable)
371          ? handler_table_offset2
372          : safepoint_table_builder->safepoint_table_offset();
373  const int reloc_info_offset =
374      static_cast<int>(reloc_info_writer.pos() - buffer_->start());
375  CodeDesc::Initialize(desc, this, safepoint_table_offset,
376                       handler_table_offset2, constant_pool_offset,
377                       code_comments_offset, reloc_info_offset);
378}
379
380void Assembler::FinalizeJumpOptimizationInfo() {
381  // Collection stage
382  auto jump_opt = jump_optimization_info();
383  if (jump_opt && jump_opt->is_collecting()) {
384    auto& bitmap = jump_opt->farjmp_bitmap();
385    int num = static_cast<int>(farjmp_positions_.size());
386    if (num && bitmap.empty()) {
387      bool can_opt = false;
388
389      bitmap.resize((num + 31) / 32, 0);
390      for (int i = 0; i < num; i++) {
391        int disp_pos = farjmp_positions_[i];
392        int disp = long_at(disp_pos);
393        if (is_int8(disp)) {
394          bitmap[i / 32] |= 1 << (i & 31);
395          can_opt = true;
396        }
397      }
398      if (can_opt) {
399        jump_opt->set_optimizable();
400      }
401    }
402  }
403}
404
405void Assembler::Align(int m) {
406  DCHECK(base::bits::IsPowerOfTwo(m));
407  int mask = m - 1;
408  int addr = pc_offset();
409  Nop((m - (addr & mask)) & mask);
410}
411
412bool Assembler::IsNop(Address addr) {
413  byte* a = reinterpret_cast<byte*>(addr);
414  while (*a == 0x66) a++;
415  if (*a == 0x90) return true;
416  if (a[0] == 0xF && a[1] == 0x1F) return true;
417  return false;
418}
419
420void Assembler::Nop(int bytes) {
421  EnsureSpace ensure_space(this);
422  // Multi byte nops from http://support.amd.com/us/Processor_TechDocs/40546.pdf
423  while (bytes > 0) {
424    switch (bytes) {
425      case 2:
426        EMIT(0x66);
427        V8_FALLTHROUGH;
428      case 1:
429        EMIT(0x90);
430        return;
431      case 3:
432        EMIT(0xF);
433        EMIT(0x1F);
434        EMIT(0);
435        return;
436      case 4:
437        EMIT(0xF);
438        EMIT(0x1F);
439        EMIT(0x40);
440        EMIT(0);
441        return;
442      case 6:
443        EMIT(0x66);
444        V8_FALLTHROUGH;
445      case 5:
446        EMIT(0xF);
447        EMIT(0x1F);
448        EMIT(0x44);
449        EMIT(0);
450        EMIT(0);
451        return;
452      case 7:
453        EMIT(0xF);
454        EMIT(0x1F);
455        EMIT(0x80);
456        EMIT(0);
457        EMIT(0);
458        EMIT(0);
459        EMIT(0);
460        return;
461      default:
462      case 11:
463        EMIT(0x66);
464        bytes--;
465        V8_FALLTHROUGH;
466      case 10:
467        EMIT(0x66);
468        bytes--;
469        V8_FALLTHROUGH;
470      case 9:
471        EMIT(0x66);
472        bytes--;
473        V8_FALLTHROUGH;
474      case 8:
475        EMIT(0xF);
476        EMIT(0x1F);
477        EMIT(0x84);
478        EMIT(0);
479        EMIT(0);
480        EMIT(0);
481        EMIT(0);
482        EMIT(0);
483        bytes -= 8;
484    }
485  }
486}
487
488void Assembler::CodeTargetAlign() {
489  Align(16);  // Preferred alignment of jump targets on ia32.
490}
491
492void Assembler::cpuid() {
493  EnsureSpace ensure_space(this);
494  EMIT(0x0F);
495  EMIT(0xA2);
496}
497
498void Assembler::pushad() {
499  EnsureSpace ensure_space(this);
500  EMIT(0x60);
501}
502
503void Assembler::popad() {
504  EnsureSpace ensure_space(this);
505  EMIT(0x61);
506}
507
508void Assembler::pushfd() {
509  EnsureSpace ensure_space(this);
510  EMIT(0x9C);
511}
512
513void Assembler::popfd() {
514  EnsureSpace ensure_space(this);
515  EMIT(0x9D);
516}
517
518void Assembler::push(const Immediate& x) {
519  EnsureSpace ensure_space(this);
520  if (x.is_int8()) {
521    EMIT(0x6A);
522    EMIT(x.immediate());
523  } else {
524    EMIT(0x68);
525    emit(x);
526  }
527}
528
529void Assembler::push_imm32(int32_t imm32) {
530  EnsureSpace ensure_space(this);
531  EMIT(0x68);
532  emit(imm32);
533}
534
535void Assembler::push(Register src) {
536  EnsureSpace ensure_space(this);
537  EMIT(0x50 | src.code());
538}
539
540void Assembler::push(Operand src) {
541  EnsureSpace ensure_space(this);
542  EMIT(0xFF);
543  emit_operand(esi, src);
544}
545
546void Assembler::pop(Register dst) {
547  DCHECK_NOT_NULL(reloc_info_writer.last_pc());
548  EnsureSpace ensure_space(this);
549  EMIT(0x58 | dst.code());
550}
551
552void Assembler::pop(Operand dst) {
553  EnsureSpace ensure_space(this);
554  EMIT(0x8F);
555  emit_operand(eax, dst);
556}
557
558void Assembler::leave() {
559  EnsureSpace ensure_space(this);
560  EMIT(0xC9);
561}
562
563void Assembler::mov_b(Register dst, Operand src) {
564  CHECK(dst.is_byte_register());
565  EnsureSpace ensure_space(this);
566  EMIT(0x8A);
567  emit_operand(dst, src);
568}
569
570void Assembler::mov_b(Operand dst, const Immediate& src) {
571  EnsureSpace ensure_space(this);
572  EMIT(0xC6);
573  emit_operand(eax, dst);
574  EMIT(static_cast<int8_t>(src.immediate()));
575}
576
577void Assembler::mov_b(Operand dst, Register src) {
578  CHECK(src.is_byte_register());
579  EnsureSpace ensure_space(this);
580  EMIT(0x88);
581  emit_operand(src, dst);
582}
583
584void Assembler::mov_w(Register dst, Operand src) {
585  EnsureSpace ensure_space(this);
586  EMIT(0x66);
587  EMIT(0x8B);
588  emit_operand(dst, src);
589}
590
591void Assembler::mov_w(Operand dst, Register src) {
592  EnsureSpace ensure_space(this);
593  EMIT(0x66);
594  EMIT(0x89);
595  emit_operand(src, dst);
596}
597
598void Assembler::mov_w(Operand dst, const Immediate& src) {
599  EnsureSpace ensure_space(this);
600  EMIT(0x66);
601  EMIT(0xC7);
602  emit_operand(eax, dst);
603  EMIT(static_cast<int8_t>(src.immediate() & 0xFF));
604  EMIT(static_cast<int8_t>(src.immediate() >> 8));
605}
606
607void Assembler::mov(Register dst, int32_t imm32) {
608  EnsureSpace ensure_space(this);
609  EMIT(0xB8 | dst.code());
610  emit(imm32);
611}
612
613void Assembler::mov(Register dst, const Immediate& x) {
614  EnsureSpace ensure_space(this);
615  EMIT(0xB8 | dst.code());
616  emit(x);
617}
618
619void Assembler::mov(Register dst, Handle<HeapObject> handle) {
620  EnsureSpace ensure_space(this);
621  EMIT(0xB8 | dst.code());
622  emit(handle);
623}
624
625void Assembler::mov(Register dst, Operand src) {
626  EnsureSpace ensure_space(this);
627  EMIT(0x8B);
628  emit_operand(dst, src);
629}
630
631void Assembler::mov(Register dst, Register src) {
632  EnsureSpace ensure_space(this);
633  EMIT(0x89);
634  EMIT(0xC0 | src.code() << 3 | dst.code());
635}
636
637void Assembler::mov(Operand dst, const Immediate& x) {
638  EnsureSpace ensure_space(this);
639  EMIT(0xC7);
640  emit_operand(eax, dst);
641  emit(x);
642}
643
644void Assembler::mov(Operand dst, Address src, RelocInfo::Mode rmode) {
645  EnsureSpace ensure_space(this);
646  EMIT(0xC7);
647  emit_operand(eax, dst);
648  emit(src, rmode);
649}
650
651void Assembler::mov(Operand dst, Handle<HeapObject> handle) {
652  EnsureSpace ensure_space(this);
653  EMIT(0xC7);
654  emit_operand(eax, dst);
655  emit(handle);
656}
657
658void Assembler::mov(Operand dst, Register src) {
659  EnsureSpace ensure_space(this);
660  EMIT(0x89);
661  emit_operand(src, dst);
662}
663
664void Assembler::movsx_b(Register dst, Operand src) {
665  DCHECK_IMPLIES(src.is_reg_only(), src.reg().is_byte_register());
666  EnsureSpace ensure_space(this);
667  EMIT(0x0F);
668  EMIT(0xBE);
669  emit_operand(dst, src);
670}
671
672void Assembler::movsx_w(Register dst, Operand src) {
673  EnsureSpace ensure_space(this);
674  EMIT(0x0F);
675  EMIT(0xBF);
676  emit_operand(dst, src);
677}
678
679void Assembler::movzx_b(Register dst, Operand src) {
680  DCHECK_IMPLIES(src.is_reg_only(), src.reg().is_byte_register());
681  EnsureSpace ensure_space(this);
682  EMIT(0x0F);
683  EMIT(0xB6);
684  emit_operand(dst, src);
685}
686
687void Assembler::movzx_w(Register dst, Operand src) {
688  EnsureSpace ensure_space(this);
689  EMIT(0x0F);
690  EMIT(0xB7);
691  emit_operand(dst, src);
692}
693
694void Assembler::movq(XMMRegister dst, Operand src) {
695  EnsureSpace ensure_space(this);
696  EMIT(0xF3);
697  EMIT(0x0F);
698  EMIT(0x7E);
699  emit_operand(dst, src);
700}
701
702void Assembler::movq(Operand dst, XMMRegister src) {
703  EnsureSpace ensure_space(this);
704  EMIT(0x66);
705  EMIT(0x0F);
706  EMIT(0xD6);
707  emit_operand(src, dst);
708}
709
710void Assembler::cmov(Condition cc, Register dst, Operand src) {
711  EnsureSpace ensure_space(this);
712  // Opcode: 0f 40 + cc /r.
713  EMIT(0x0F);
714  EMIT(0x40 + cc);
715  emit_operand(dst, src);
716}
717
718void Assembler::cld() {
719  EnsureSpace ensure_space(this);
720  EMIT(0xFC);
721}
722
723void Assembler::rep_movs() {
724  EnsureSpace ensure_space(this);
725  EMIT(0xF3);
726  EMIT(0xA5);
727}
728
729void Assembler::rep_stos() {
730  EnsureSpace ensure_space(this);
731  EMIT(0xF3);
732  EMIT(0xAB);
733}
734
735void Assembler::stos() {
736  EnsureSpace ensure_space(this);
737  EMIT(0xAB);
738}
739
740void Assembler::xadd(Operand dst, Register src) {
741  EnsureSpace ensure_space(this);
742  EMIT(0x0F);
743  EMIT(0xC1);
744  emit_operand(src, dst);
745}
746
747void Assembler::xadd_b(Operand dst, Register src) {
748  DCHECK(src.is_byte_register());
749  EnsureSpace ensure_space(this);
750  EMIT(0x0F);
751  EMIT(0xC0);
752  emit_operand(src, dst);
753}
754
755void Assembler::xadd_w(Operand dst, Register src) {
756  EnsureSpace ensure_space(this);
757  EMIT(0x66);
758  EMIT(0x0F);
759  EMIT(0xC1);
760  emit_operand(src, dst);
761}
762
763void Assembler::xchg(Register dst, Register src) {
764  EnsureSpace ensure_space(this);
765  if (src == eax || dst == eax) {  // Single-byte encoding.
766    EMIT(0x90 | (src == eax ? dst.code() : src.code()));
767  } else {
768    EMIT(0x87);
769    EMIT(0xC0 | src.code() << 3 | dst.code());
770  }
771}
772
773void Assembler::xchg(Register dst, Operand src) {
774  EnsureSpace ensure_space(this);
775  EMIT(0x87);
776  emit_operand(dst, src);
777}
778
779void Assembler::xchg_b(Register reg, Operand op) {
780  DCHECK(reg.is_byte_register());
781  EnsureSpace ensure_space(this);
782  EMIT(0x86);
783  emit_operand(reg, op);
784}
785
786void Assembler::xchg_w(Register reg, Operand op) {
787  EnsureSpace ensure_space(this);
788  EMIT(0x66);
789  EMIT(0x87);
790  emit_operand(reg, op);
791}
792
793void Assembler::lock() {
794  EnsureSpace ensure_space(this);
795  EMIT(0xF0);
796}
797
798void Assembler::cmpxchg(Operand dst, Register src) {
799  EnsureSpace ensure_space(this);
800  EMIT(0x0F);
801  EMIT(0xB1);
802  emit_operand(src, dst);
803}
804
805void Assembler::cmpxchg_b(Operand dst, Register src) {
806  DCHECK(src.is_byte_register());
807  EnsureSpace ensure_space(this);
808  EMIT(0x0F);
809  EMIT(0xB0);
810  emit_operand(src, dst);
811}
812
813void Assembler::cmpxchg_w(Operand dst, Register src) {
814  EnsureSpace ensure_space(this);
815  EMIT(0x66);
816  EMIT(0x0F);
817  EMIT(0xB1);
818  emit_operand(src, dst);
819}
820
821void Assembler::cmpxchg8b(Operand dst) {
822  EnsureSpace enure_space(this);
823  EMIT(0x0F);
824  EMIT(0xC7);
825  emit_operand(ecx, dst);
826}
827
828void Assembler::mfence() {
829  EnsureSpace ensure_space(this);
830  EMIT(0x0F);
831  EMIT(0xAE);
832  EMIT(0xF0);
833}
834
835void Assembler::lfence() {
836  EnsureSpace ensure_space(this);
837  EMIT(0x0F);
838  EMIT(0xAE);
839  EMIT(0xE8);
840}
841
842void Assembler::pause() {
843  EnsureSpace ensure_space(this);
844  EMIT(0xF3);
845  EMIT(0x90);
846}
847
848void Assembler::adc(Register dst, int32_t imm32) {
849  EnsureSpace ensure_space(this);
850  emit_arith(2, Operand(dst), Immediate(imm32));
851}
852
853void Assembler::adc(Register dst, Operand src) {
854  EnsureSpace ensure_space(this);
855  EMIT(0x13);
856  emit_operand(dst, src);
857}
858
859void Assembler::add(Register dst, Operand src) {
860  EnsureSpace ensure_space(this);
861  EMIT(0x03);
862  emit_operand(dst, src);
863}
864
865void Assembler::add(Operand dst, Register src) {
866  EnsureSpace ensure_space(this);
867  EMIT(0x01);
868  emit_operand(src, dst);
869}
870
871void Assembler::add(Operand dst, const Immediate& x) {
872  DCHECK_NOT_NULL(reloc_info_writer.last_pc());
873  EnsureSpace ensure_space(this);
874  emit_arith(0, dst, x);
875}
876
877void Assembler::and_(Register dst, int32_t imm32) {
878  and_(dst, Immediate(imm32));
879}
880
881void Assembler::and_(Register dst, const Immediate& x) {
882  EnsureSpace ensure_space(this);
883  emit_arith(4, Operand(dst), x);
884}
885
886void Assembler::and_(Register dst, Operand src) {
887  EnsureSpace ensure_space(this);
888  EMIT(0x23);
889  emit_operand(dst, src);
890}
891
892void Assembler::and_(Operand dst, const Immediate& x) {
893  EnsureSpace ensure_space(this);
894  emit_arith(4, dst, x);
895}
896
897void Assembler::and_(Operand dst, Register src) {
898  EnsureSpace ensure_space(this);
899  EMIT(0x21);
900  emit_operand(src, dst);
901}
902
903void Assembler::cmpb(Operand op, Immediate imm8) {
904  DCHECK(imm8.is_int8() || imm8.is_uint8());
905  EnsureSpace ensure_space(this);
906  if (op.is_reg(eax)) {
907    EMIT(0x3C);
908  } else {
909    EMIT(0x80);
910    emit_operand(edi, op);  // edi == 7
911  }
912  emit_b(imm8);
913}
914
915void Assembler::cmpb(Operand op, Register reg) {
916  CHECK(reg.is_byte_register());
917  EnsureSpace ensure_space(this);
918  EMIT(0x38);
919  emit_operand(reg, op);
920}
921
922void Assembler::cmpb(Register reg, Operand op) {
923  CHECK(reg.is_byte_register());
924  EnsureSpace ensure_space(this);
925  EMIT(0x3A);
926  emit_operand(reg, op);
927}
928
929void Assembler::cmpw(Operand op, Immediate imm16) {
930  DCHECK(imm16.is_int16() || imm16.is_uint16());
931  EnsureSpace ensure_space(this);
932  EMIT(0x66);
933  EMIT(0x81);
934  emit_operand(edi, op);
935  emit_w(imm16);
936}
937
938void Assembler::cmpw(Register reg, Operand op) {
939  EnsureSpace ensure_space(this);
940  EMIT(0x66);
941  EMIT(0x3B);
942  emit_operand(reg, op);
943}
944
945void Assembler::cmpw(Operand op, Register reg) {
946  EnsureSpace ensure_space(this);
947  EMIT(0x66);
948  EMIT(0x39);
949  emit_operand(reg, op);
950}
951
952void Assembler::cmp(Register reg, int32_t imm32) {
953  EnsureSpace ensure_space(this);
954  emit_arith(7, Operand(reg), Immediate(imm32));
955}
956
957void Assembler::cmp(Register reg, Handle<HeapObject> handle) {
958  EnsureSpace ensure_space(this);
959  emit_arith(7, Operand(reg), Immediate(handle));
960}
961
962void Assembler::cmp(Register reg, Operand op) {
963  EnsureSpace ensure_space(this);
964  EMIT(0x3B);
965  emit_operand(reg, op);
966}
967
968void Assembler::cmp(Operand op, Register reg) {
969  EnsureSpace ensure_space(this);
970  EMIT(0x39);
971  emit_operand(reg, op);
972}
973
974void Assembler::cmp(Operand op, const Immediate& imm) {
975  EnsureSpace ensure_space(this);
976  emit_arith(7, op, imm);
977}
978
979void Assembler::cmp(Operand op, Handle<HeapObject> handle) {
980  EnsureSpace ensure_space(this);
981  emit_arith(7, op, Immediate(handle));
982}
983
984void Assembler::cmpb_al(Operand op) {
985  EnsureSpace ensure_space(this);
986  EMIT(0x38);             // CMP r/m8, r8
987  emit_operand(eax, op);  // eax has same code as register al.
988}
989
990void Assembler::cmpw_ax(Operand op) {
991  EnsureSpace ensure_space(this);
992  EMIT(0x66);
993  EMIT(0x39);             // CMP r/m16, r16
994  emit_operand(eax, op);  // eax has same code as register ax.
995}
996
997void Assembler::dec_b(Register dst) {
998  CHECK(dst.is_byte_register());
999  EnsureSpace ensure_space(this);
1000  EMIT(0xFE);
1001  EMIT(0xC8 | dst.code());
1002}
1003
1004void Assembler::dec_b(Operand dst) {
1005  EnsureSpace ensure_space(this);
1006  EMIT(0xFE);
1007  emit_operand(ecx, dst);
1008}
1009
1010void Assembler::dec(Register dst) {
1011  EnsureSpace ensure_space(this);
1012  EMIT(0x48 | dst.code());
1013}
1014
1015void Assembler::dec(Operand dst) {
1016  EnsureSpace ensure_space(this);
1017  EMIT(0xFF);
1018  emit_operand(ecx, dst);
1019}
1020
1021void Assembler::cdq() {
1022  EnsureSpace ensure_space(this);
1023  EMIT(0x99);
1024}
1025
1026void Assembler::idiv(Operand src) {
1027  EnsureSpace ensure_space(this);
1028  EMIT(0xF7);
1029  emit_operand(edi, src);
1030}
1031
1032void Assembler::div(Operand src) {
1033  EnsureSpace ensure_space(this);
1034  EMIT(0xF7);
1035  emit_operand(esi, src);
1036}
1037
1038void Assembler::imul(Register reg) {
1039  EnsureSpace ensure_space(this);
1040  EMIT(0xF7);
1041  EMIT(0xE8 | reg.code());
1042}
1043
1044void Assembler::imul(Register dst, Operand src) {
1045  EnsureSpace ensure_space(this);
1046  EMIT(0x0F);
1047  EMIT(0xAF);
1048  emit_operand(dst, src);
1049}
1050
1051void Assembler::imul(Register dst, Register src, int32_t imm32) {
1052  imul(dst, Operand(src), imm32);
1053}
1054
1055void Assembler::imul(Register dst, Operand src, int32_t imm32) {
1056  EnsureSpace ensure_space(this);
1057  if (is_int8(imm32)) {
1058    EMIT(0x6B);
1059    emit_operand(dst, src);
1060    EMIT(imm32);
1061  } else {
1062    EMIT(0x69);
1063    emit_operand(dst, src);
1064    emit(imm32);
1065  }
1066}
1067
1068void Assembler::inc(Register dst) {
1069  EnsureSpace ensure_space(this);
1070  EMIT(0x40 | dst.code());
1071}
1072
1073void Assembler::inc(Operand dst) {
1074  EnsureSpace ensure_space(this);
1075  EMIT(0xFF);
1076  emit_operand(eax, dst);
1077}
1078
1079void Assembler::lea(Register dst, Operand src) {
1080  EnsureSpace ensure_space(this);
1081  EMIT(0x8D);
1082  emit_operand(dst, src);
1083}
1084
1085void Assembler::mul(Register src) {
1086  EnsureSpace ensure_space(this);
1087  EMIT(0xF7);
1088  EMIT(0xE0 | src.code());
1089}
1090
1091void Assembler::neg(Register dst) {
1092  EnsureSpace ensure_space(this);
1093  EMIT(0xF7);
1094  EMIT(0xD8 | dst.code());
1095}
1096
1097void Assembler::neg(Operand dst) {
1098  EnsureSpace ensure_space(this);
1099  EMIT(0xF7);
1100  emit_operand(ebx, dst);
1101}
1102
1103void Assembler::not_(Register dst) {
1104  EnsureSpace ensure_space(this);
1105  EMIT(0xF7);
1106  EMIT(0xD0 | dst.code());
1107}
1108
1109void Assembler::not_(Operand dst) {
1110  EnsureSpace ensure_space(this);
1111  EMIT(0xF7);
1112  emit_operand(edx, dst);
1113}
1114
1115void Assembler::or_(Register dst, int32_t imm32) {
1116  EnsureSpace ensure_space(this);
1117  emit_arith(1, Operand(dst), Immediate(imm32));
1118}
1119
1120void Assembler::or_(Register dst, Operand src) {
1121  EnsureSpace ensure_space(this);
1122  EMIT(0x0B);
1123  emit_operand(dst, src);
1124}
1125
1126void Assembler::or_(Operand dst, const Immediate& x) {
1127  EnsureSpace ensure_space(this);
1128  emit_arith(1, dst, x);
1129}
1130
1131void Assembler::or_(Operand dst, Register src) {
1132  EnsureSpace ensure_space(this);
1133  EMIT(0x09);
1134  emit_operand(src, dst);
1135}
1136
1137void Assembler::rcl(Register dst, uint8_t imm8) {
1138  EnsureSpace ensure_space(this);
1139  DCHECK(is_uint5(imm8));  // illegal shift count
1140  if (imm8 == 1) {
1141    EMIT(0xD1);
1142    EMIT(0xD0 | dst.code());
1143  } else {
1144    EMIT(0xC1);
1145    EMIT(0xD0 | dst.code());
1146    EMIT(imm8);
1147  }
1148}
1149
1150void Assembler::rcr(Register dst, uint8_t imm8) {
1151  EnsureSpace ensure_space(this);
1152  DCHECK(is_uint5(imm8));  // illegal shift count
1153  if (imm8 == 1) {
1154    EMIT(0xD1);
1155    EMIT(0xD8 | dst.code());
1156  } else {
1157    EMIT(0xC1);
1158    EMIT(0xD8 | dst.code());
1159    EMIT(imm8);
1160  }
1161}
1162
1163void Assembler::rol(Operand dst, uint8_t imm8) {
1164  EnsureSpace ensure_space(this);
1165  DCHECK(is_uint5(imm8));  // illegal shift count
1166  if (imm8 == 1) {
1167    EMIT(0xD1);
1168    emit_operand(eax, dst);
1169  } else {
1170    EMIT(0xC1);
1171    emit_operand(eax, dst);
1172    EMIT(imm8);
1173  }
1174}
1175
1176void Assembler::rol_cl(Operand dst) {
1177  EnsureSpace ensure_space(this);
1178  EMIT(0xD3);
1179  emit_operand(eax, dst);
1180}
1181
1182void Assembler::ror(Operand dst, uint8_t imm8) {
1183  EnsureSpace ensure_space(this);
1184  DCHECK(is_uint5(imm8));  // illegal shift count
1185  if (imm8 == 1) {
1186    EMIT(0xD1);
1187    emit_operand(ecx, dst);
1188  } else {
1189    EMIT(0xC1);
1190    emit_operand(ecx, dst);
1191    EMIT(imm8);
1192  }
1193}
1194
1195void Assembler::ror_cl(Operand dst) {
1196  EnsureSpace ensure_space(this);
1197  EMIT(0xD3);
1198  emit_operand(ecx, dst);
1199}
1200
1201void Assembler::sar(Operand dst, uint8_t imm8) {
1202  EnsureSpace ensure_space(this);
1203  DCHECK(is_uint5(imm8));  // illegal shift count
1204  if (imm8 == 1) {
1205    EMIT(0xD1);
1206    emit_operand(edi, dst);
1207  } else {
1208    EMIT(0xC1);
1209    emit_operand(edi, dst);
1210    EMIT(imm8);
1211  }
1212}
1213
1214void Assembler::sar_cl(Operand dst) {
1215  EnsureSpace ensure_space(this);
1216  EMIT(0xD3);
1217  emit_operand(edi, dst);
1218}
1219
1220void Assembler::sbb(Register dst, Operand src) {
1221  EnsureSpace ensure_space(this);
1222  EMIT(0x1B);
1223  emit_operand(dst, src);
1224}
1225
1226void Assembler::shld(Register dst, Register src, uint8_t shift) {
1227  DCHECK(is_uint5(shift));
1228  EnsureSpace ensure_space(this);
1229  EMIT(0x0F);
1230  EMIT(0xA4);
1231  emit_operand(src, Operand(dst));
1232  EMIT(shift);
1233}
1234
1235void Assembler::shld_cl(Register dst, Register src) {
1236  EnsureSpace ensure_space(this);
1237  EMIT(0x0F);
1238  EMIT(0xA5);
1239  emit_operand(src, Operand(dst));
1240}
1241
1242void Assembler::shl(Operand dst, uint8_t imm8) {
1243  EnsureSpace ensure_space(this);
1244  DCHECK(is_uint5(imm8));  // illegal shift count
1245  if (imm8 == 1) {
1246    EMIT(0xD1);
1247    emit_operand(esp, dst);
1248  } else {
1249    EMIT(0xC1);
1250    emit_operand(esp, dst);
1251    EMIT(imm8);
1252  }
1253}
1254
1255void Assembler::shl_cl(Operand dst) {
1256  EnsureSpace ensure_space(this);
1257  EMIT(0xD3);
1258  emit_operand(esp, dst);
1259}
1260
1261void Assembler::shr(Operand dst, uint8_t imm8) {
1262  EnsureSpace ensure_space(this);
1263  DCHECK(is_uint5(imm8));  // illegal shift count
1264  if (imm8 == 1) {
1265    EMIT(0xD1);
1266    emit_operand(ebp, dst);
1267  } else {
1268    EMIT(0xC1);
1269    emit_operand(ebp, dst);
1270    EMIT(imm8);
1271  }
1272}
1273
1274void Assembler::shr_cl(Operand dst) {
1275  EnsureSpace ensure_space(this);
1276  EMIT(0xD3);
1277  emit_operand(ebp, dst);
1278}
1279
1280void Assembler::shrd(Register dst, Register src, uint8_t shift) {
1281  DCHECK(is_uint5(shift));
1282  EnsureSpace ensure_space(this);
1283  EMIT(0x0F);
1284  EMIT(0xAC);
1285  emit_operand(src, Operand(dst));
1286  EMIT(shift);
1287}
1288
1289void Assembler::shrd_cl(Operand dst, Register src) {
1290  EnsureSpace ensure_space(this);
1291  EMIT(0x0F);
1292  EMIT(0xAD);
1293  emit_operand(src, dst);
1294}
1295
1296void Assembler::sub(Operand dst, const Immediate& x) {
1297  EnsureSpace ensure_space(this);
1298  emit_arith(5, dst, x);
1299}
1300
1301void Assembler::sub(Register dst, Operand src) {
1302  EnsureSpace ensure_space(this);
1303  EMIT(0x2B);
1304  emit_operand(dst, src);
1305}
1306
1307void Assembler::sub(Operand dst, Register src) {
1308  EnsureSpace ensure_space(this);
1309  EMIT(0x29);
1310  emit_operand(src, dst);
1311}
1312
1313void Assembler::sub_sp_32(uint32_t imm) {
1314  EnsureSpace ensure_space(this);
1315  EMIT(0x81);  // using a literal 32-bit immediate.
1316  static constexpr Register ireg = Register::from_code(5);
1317  emit_operand(ireg, Operand(esp));
1318  emit(imm);
1319}
1320
1321void Assembler::test(Register reg, const Immediate& imm) {
1322  if (imm.is_uint8()) {
1323    test_b(reg, imm);
1324    return;
1325  }
1326
1327  EnsureSpace ensure_space(this);
1328  // This is not using emit_arith because test doesn't support
1329  // sign-extension of 8-bit operands.
1330  if (reg == eax) {
1331    EMIT(0xA9);
1332  } else {
1333    EMIT(0xF7);
1334    EMIT(0xC0 | reg.code());
1335  }
1336  emit(imm);
1337}
1338
1339void Assembler::test(Register reg, Operand op) {
1340  EnsureSpace ensure_space(this);
1341  EMIT(0x85);
1342  emit_operand(reg, op);
1343}
1344
1345void Assembler::test_b(Register reg, Operand op) {
1346  CHECK(reg.is_byte_register());
1347  EnsureSpace ensure_space(this);
1348  EMIT(0x84);
1349  emit_operand(reg, op);
1350}
1351
1352void Assembler::test(Operand op, const Immediate& imm) {
1353  if (op.is_reg_only()) {
1354    test(op.reg(), imm);
1355    return;
1356  }
1357  if (imm.is_uint8()) {
1358    return test_b(op, imm);
1359  }
1360  EnsureSpace ensure_space(this);
1361  EMIT(0xF7);
1362  emit_operand(eax, op);
1363  emit(imm);
1364}
1365
1366void Assembler::test_b(Register reg, Immediate imm8) {
1367  DCHECK(imm8.is_uint8());
1368  EnsureSpace ensure_space(this);
1369  // Only use test against byte for registers that have a byte
1370  // variant: eax, ebx, ecx, and edx.
1371  if (reg == eax) {
1372    EMIT(0xA8);
1373    emit_b(imm8);
1374  } else if (reg.is_byte_register()) {
1375    emit_arith_b(0xF6, 0xC0, reg, static_cast<uint8_t>(imm8.immediate()));
1376  } else {
1377    EMIT(0x66);
1378    EMIT(0xF7);
1379    EMIT(0xC0 | reg.code());
1380    emit_w(imm8);
1381  }
1382}
1383
1384void Assembler::test_b(Operand op, Immediate imm8) {
1385  if (op.is_reg_only()) {
1386    test_b(op.reg(), imm8);
1387    return;
1388  }
1389  EnsureSpace ensure_space(this);
1390  EMIT(0xF6);
1391  emit_operand(eax, op);
1392  emit_b(imm8);
1393}
1394
1395void Assembler::test_w(Register reg, Immediate imm16) {
1396  DCHECK(imm16.is_int16() || imm16.is_uint16());
1397  EnsureSpace ensure_space(this);
1398  if (reg == eax) {
1399    EMIT(0xA9);
1400    emit_w(imm16);
1401  } else {
1402    EMIT(0x66);
1403    EMIT(0xF7);
1404    EMIT(0xC0 | reg.code());
1405    emit_w(imm16);
1406  }
1407}
1408
1409void Assembler::test_w(Register reg, Operand op) {
1410  EnsureSpace ensure_space(this);
1411  EMIT(0x66);
1412  EMIT(0x85);
1413  emit_operand(reg, op);
1414}
1415
1416void Assembler::test_w(Operand op, Immediate imm16) {
1417  DCHECK(imm16.is_int16() || imm16.is_uint16());
1418  if (op.is_reg_only()) {
1419    test_w(op.reg(), imm16);
1420    return;
1421  }
1422  EnsureSpace ensure_space(this);
1423  EMIT(0x66);
1424  EMIT(0xF7);
1425  emit_operand(eax, op);
1426  emit_w(imm16);
1427}
1428
1429void Assembler::xor_(Register dst, int32_t imm32) {
1430  EnsureSpace ensure_space(this);
1431  emit_arith(6, Operand(dst), Immediate(imm32));
1432}
1433
1434void Assembler::xor_(Register dst, Operand src) {
1435  EnsureSpace ensure_space(this);
1436  EMIT(0x33);
1437  emit_operand(dst, src);
1438}
1439
1440void Assembler::xor_(Operand dst, Register src) {
1441  EnsureSpace ensure_space(this);
1442  EMIT(0x31);
1443  emit_operand(src, dst);
1444}
1445
1446void Assembler::xor_(Operand dst, const Immediate& x) {
1447  EnsureSpace ensure_space(this);
1448  emit_arith(6, dst, x);
1449}
1450
1451void Assembler::bswap(Register dst) {
1452  EnsureSpace ensure_space(this);
1453  EMIT(0x0F);
1454  EMIT(0xC8 + dst.code());
1455}
1456
1457void Assembler::bt(Operand dst, Register src) {
1458  EnsureSpace ensure_space(this);
1459  EMIT(0x0F);
1460  EMIT(0xA3);
1461  emit_operand(src, dst);
1462}
1463
1464void Assembler::bts(Operand dst, Register src) {
1465  EnsureSpace ensure_space(this);
1466  EMIT(0x0F);
1467  EMIT(0xAB);
1468  emit_operand(src, dst);
1469}
1470
1471void Assembler::bsr(Register dst, Operand src) {
1472  EnsureSpace ensure_space(this);
1473  EMIT(0x0F);
1474  EMIT(0xBD);
1475  emit_operand(dst, src);
1476}
1477
1478void Assembler::bsf(Register dst, Operand src) {
1479  EnsureSpace ensure_space(this);
1480  EMIT(0x0F);
1481  EMIT(0xBC);
1482  emit_operand(dst, src);
1483}
1484
1485void Assembler::hlt() {
1486  EnsureSpace ensure_space(this);
1487  EMIT(0xF4);
1488}
1489
1490void Assembler::int3() {
1491  EnsureSpace ensure_space(this);
1492  EMIT(0xCC);
1493}
1494
1495void Assembler::nop() {
1496  EnsureSpace ensure_space(this);
1497  EMIT(0x90);
1498}
1499
1500void Assembler::ret(int imm16) {
1501  EnsureSpace ensure_space(this);
1502  DCHECK(is_uint16(imm16));
1503  if (imm16 == 0) {
1504    EMIT(0xC3);
1505  } else {
1506    EMIT(0xC2);
1507    EMIT(imm16 & 0xFF);
1508    EMIT((imm16 >> 8) & 0xFF);
1509  }
1510}
1511
1512void Assembler::ud2() {
1513  EnsureSpace ensure_space(this);
1514  EMIT(0x0F);
1515  EMIT(0x0B);
1516}
1517
1518// Labels refer to positions in the (to be) generated code.
1519// There are bound, linked, and unused labels.
1520//
1521// Bound labels refer to known positions in the already
1522// generated code. pos() is the position the label refers to.
1523//
1524// Linked labels refer to unknown positions in the code
1525// to be generated; pos() is the position of the 32bit
1526// Displacement of the last instruction using the label.
1527
1528void Assembler::print(const Label* L) {
1529  if (L->is_unused()) {
1530    PrintF("unused label\n");
1531  } else if (L->is_bound()) {
1532    PrintF("bound label to %d\n", L->pos());
1533  } else if (L->is_linked()) {
1534    Label l;
1535    l.link_to(L->pos());
1536    PrintF("unbound label");
1537    while (l.is_linked()) {
1538      Displacement disp = disp_at(&l);
1539      PrintF("@ %d ", l.pos());
1540      disp.print();
1541      PrintF("\n");
1542      disp.next(&l);
1543    }
1544  } else {
1545    PrintF("label in inconsistent state (pos = %d)\n", L->pos_);
1546  }
1547}
1548
1549void Assembler::bind_to(Label* L, int pos) {
1550  EnsureSpace ensure_space(this);
1551  DCHECK(0 <= pos && pos <= pc_offset());  // must have a valid binding position
1552  while (L->is_linked()) {
1553    Displacement disp = disp_at(L);
1554    int fixup_pos = L->pos();
1555    if (disp.type() == Displacement::CODE_ABSOLUTE) {
1556      long_at_put(fixup_pos, reinterpret_cast<int>(buffer_start_ + pos));
1557      internal_reference_positions_.push_back(fixup_pos);
1558    } else if (disp.type() == Displacement::CODE_RELATIVE) {
1559      // Relative to Code heap object pointer.
1560      long_at_put(fixup_pos, pos + Code::kHeaderSize - kHeapObjectTag);
1561    } else {
1562      if (disp.type() == Displacement::UNCONDITIONAL_JUMP) {
1563        DCHECK_EQ(byte_at(fixup_pos - 1), 0xE9);  // jmp expected
1564      }
1565      // Relative address, relative to point after address.
1566      int imm32 = pos - (fixup_pos + sizeof(int32_t));
1567      long_at_put(fixup_pos, imm32);
1568    }
1569    disp.next(L);
1570  }
1571  while (L->is_near_linked()) {
1572    int fixup_pos = L->near_link_pos();
1573    int offset_to_next =
1574        static_cast<int>(*reinterpret_cast<int8_t*>(addr_at(fixup_pos)));
1575    DCHECK_LE(offset_to_next, 0);
1576    // Relative address, relative to point after address.
1577    int disp = pos - fixup_pos - sizeof(int8_t);
1578    CHECK(0 <= disp && disp <= 127);
1579    set_byte_at(fixup_pos, disp);
1580    if (offset_to_next < 0) {
1581      L->link_to(fixup_pos + offset_to_next, Label::kNear);
1582    } else {
1583      L->UnuseNear();
1584    }
1585  }
1586
1587  // Optimization stage
1588  auto jump_opt = jump_optimization_info();
1589  if (jump_opt && jump_opt->is_optimizing()) {
1590    auto it = label_farjmp_maps_.find(L);
1591    if (it != label_farjmp_maps_.end()) {
1592      auto& pos_vector = it->second;
1593      for (auto fixup_pos : pos_vector) {
1594        int disp = pos - (fixup_pos + sizeof(int8_t));
1595        CHECK(is_int8(disp));
1596        set_byte_at(fixup_pos, disp);
1597      }
1598      label_farjmp_maps_.erase(it);
1599    }
1600  }
1601  L->bind_to(pos);
1602}
1603
1604void Assembler::bind(Label* L) {
1605  EnsureSpace ensure_space(this);
1606  DCHECK(!L->is_bound());  // label can only be bound once
1607  bind_to(L, pc_offset());
1608}
1609
1610void Assembler::record_farjmp_position(Label* L, int pos) {
1611  auto& pos_vector = label_farjmp_maps_[L];
1612  pos_vector.push_back(pos);
1613}
1614
1615bool Assembler::is_optimizable_farjmp(int idx) {
1616  if (predictable_code_size()) return false;
1617
1618  auto jump_opt = jump_optimization_info();
1619  CHECK(jump_opt->is_optimizing());
1620
1621  auto& bitmap = jump_opt->farjmp_bitmap();
1622  CHECK(idx < static_cast<int>(bitmap.size() * 32));
1623  return !!(bitmap[idx / 32] & (1 << (idx & 31)));
1624}
1625
1626void Assembler::call(Label* L) {
1627  EnsureSpace ensure_space(this);
1628  if (L->is_bound()) {
1629    const int long_size = 5;
1630    int offs = L->pos() - pc_offset();
1631    DCHECK_LE(offs, 0);
1632    // 1110 1000 #32-bit disp.
1633    EMIT(0xE8);
1634    emit(offs - long_size);
1635  } else {
1636    // 1110 1000 #32-bit disp.
1637    EMIT(0xE8);
1638    emit_disp(L, Displacement::OTHER);
1639  }
1640}
1641
1642void Assembler::call(Address entry, RelocInfo::Mode rmode) {
1643  EnsureSpace ensure_space(this);
1644  DCHECK(!RelocInfo::IsCodeTarget(rmode));
1645  EMIT(0xE8);
1646  if (RelocInfo::IsRuntimeEntry(rmode)) {
1647    emit(entry, rmode);
1648  } else {
1649    emit(entry - (reinterpret_cast<Address>(pc_) + sizeof(int32_t)), rmode);
1650  }
1651}
1652
1653void Assembler::wasm_call(Address entry, RelocInfo::Mode rmode) {
1654  EnsureSpace ensure_space(this);
1655  EMIT(0xE8);
1656  emit(entry, rmode);
1657}
1658
1659void Assembler::call(Operand adr) {
1660  EnsureSpace ensure_space(this);
1661  EMIT(0xFF);
1662  emit_operand(edx, adr);
1663}
1664
1665void Assembler::call(Handle<Code> code, RelocInfo::Mode rmode) {
1666  EnsureSpace ensure_space(this);
1667  DCHECK(RelocInfo::IsCodeTarget(rmode));
1668  DCHECK(code->IsExecutable());
1669  EMIT(0xE8);
1670  emit(code, rmode);
1671}
1672
1673void Assembler::jmp_rel(int offset) {
1674  EnsureSpace ensure_space(this);
1675  const int short_size = 2;
1676  const int long_size = 5;
1677  if (is_int8(offset - short_size) && !predictable_code_size()) {
1678    // 1110 1011 #8-bit disp.
1679    EMIT(0xEB);
1680    EMIT((offset - short_size) & 0xFF);
1681  } else {
1682    // 1110 1001 #32-bit disp.
1683    EMIT(0xE9);
1684    emit(offset - long_size);
1685  }
1686}
1687
1688void Assembler::jmp(Label* L, Label::Distance distance) {
1689  if (L->is_bound()) {
1690    int offset = L->pos() - pc_offset();
1691    DCHECK_LE(offset, 0);  // backward jump.
1692    jmp_rel(offset);
1693    return;
1694  }
1695
1696  EnsureSpace ensure_space(this);
1697  if (distance == Label::kNear) {
1698    EMIT(0xEB);
1699    emit_near_disp(L);
1700  } else {
1701    auto jump_opt = jump_optimization_info();
1702    if (V8_UNLIKELY(jump_opt)) {
1703      if (jump_opt->is_optimizing() && is_optimizable_farjmp(farjmp_num_++)) {
1704        EMIT(0xEB);
1705        record_farjmp_position(L, pc_offset());
1706        EMIT(0);
1707        return;
1708      }
1709      if (jump_opt->is_collecting()) {
1710        farjmp_positions_.push_back(pc_offset() + 1);
1711      }
1712    }
1713    // 1110 1001 #32-bit disp.
1714    EMIT(0xE9);
1715    emit_disp(L, Displacement::UNCONDITIONAL_JUMP);
1716  }
1717}
1718
1719void Assembler::jmp(Address entry, RelocInfo::Mode rmode) {
1720  EnsureSpace ensure_space(this);
1721  DCHECK(!RelocInfo::IsCodeTarget(rmode));
1722  EMIT(0xE9);
1723  if (RelocInfo::IsRuntimeEntry(rmode) || RelocInfo::IsWasmCall(rmode)) {
1724    emit(entry, rmode);
1725  } else {
1726    emit(entry - (reinterpret_cast<Address>(pc_) + sizeof(int32_t)), rmode);
1727  }
1728}
1729
1730void Assembler::jmp(Operand adr) {
1731  EnsureSpace ensure_space(this);
1732  EMIT(0xFF);
1733  emit_operand(esp, adr);
1734}
1735
1736void Assembler::jmp(Handle<Code> code, RelocInfo::Mode rmode) {
1737  EnsureSpace ensure_space(this);
1738  DCHECK(RelocInfo::IsCodeTarget(rmode));
1739  EMIT(0xE9);
1740  emit(code, rmode);
1741}
1742
1743void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
1744  EnsureSpace ensure_space(this);
1745  DCHECK(0 <= cc && static_cast<int>(cc) < 16);
1746  if (L->is_bound()) {
1747    const int short_size = 2;
1748    const int long_size = 6;
1749    int offs = L->pos() - pc_offset();
1750    DCHECK_LE(offs, 0);
1751    if (is_int8(offs - short_size)) {
1752      // 0111 tttn #8-bit disp
1753      EMIT(0x70 | cc);
1754      EMIT((offs - short_size) & 0xFF);
1755    } else {
1756      // 0000 1111 1000 tttn #32-bit disp
1757      EMIT(0x0F);
1758      EMIT(0x80 | cc);
1759      emit(offs - long_size);
1760    }
1761  } else if (distance == Label::kNear) {
1762    EMIT(0x70 | cc);
1763    emit_near_disp(L);
1764  } else {
1765    auto jump_opt = jump_optimization_info();
1766    if (V8_UNLIKELY(jump_opt)) {
1767      if (jump_opt->is_optimizing() && is_optimizable_farjmp(farjmp_num_++)) {
1768        // 0111 tttn #8-bit disp
1769        EMIT(0x70 | cc);
1770        record_farjmp_position(L, pc_offset());
1771        EMIT(0);
1772        return;
1773      }
1774      if (jump_opt->is_collecting()) {
1775        farjmp_positions_.push_back(pc_offset() + 2);
1776      }
1777    }
1778    // 0000 1111 1000 tttn #32-bit disp
1779    // Note: could eliminate cond. jumps to this jump if condition
1780    //       is the same however, seems to be rather unlikely case.
1781    EMIT(0x0F);
1782    EMIT(0x80 | cc);
1783    emit_disp(L, Displacement::OTHER);
1784  }
1785}
1786
1787void Assembler::j(Condition cc, byte* entry, RelocInfo::Mode rmode) {
1788  EnsureSpace ensure_space(this);
1789  DCHECK((0 <= cc) && (static_cast<int>(cc) < 16));
1790  // 0000 1111 1000 tttn #32-bit disp.
1791  EMIT(0x0F);
1792  EMIT(0x80 | cc);
1793  if (RelocInfo::IsRuntimeEntry(rmode)) {
1794    emit(reinterpret_cast<uint32_t>(entry), rmode);
1795  } else {
1796    emit(entry - (pc_ + sizeof(int32_t)), rmode);
1797  }
1798}
1799
1800void Assembler::j(Condition cc, Handle<Code> code, RelocInfo::Mode rmode) {
1801  EnsureSpace ensure_space(this);
1802  // 0000 1111 1000 tttn #32-bit disp
1803  EMIT(0x0F);
1804  EMIT(0x80 | cc);
1805  emit(code, rmode);
1806}
1807
1808// FPU instructions.
1809
1810void Assembler::fld(int i) {
1811  EnsureSpace ensure_space(this);
1812  emit_farith(0xD9, 0xC0, i);
1813}
1814
1815void Assembler::fstp(int i) {
1816  EnsureSpace ensure_space(this);
1817  emit_farith(0xDD, 0xD8, i);
1818}
1819
1820void Assembler::fld1() {
1821  EnsureSpace ensure_space(this);
1822  EMIT(0xD9);
1823  EMIT(0xE8);
1824}
1825
1826void Assembler::fldpi() {
1827  EnsureSpace ensure_space(this);
1828  EMIT(0xD9);
1829  EMIT(0xEB);
1830}
1831
1832void Assembler::fldz() {
1833  EnsureSpace ensure_space(this);
1834  EMIT(0xD9);
1835  EMIT(0xEE);
1836}
1837
1838void Assembler::fldln2() {
1839  EnsureSpace ensure_space(this);
1840  EMIT(0xD9);
1841  EMIT(0xED);
1842}
1843
1844void Assembler::fld_s(Operand adr) {
1845  EnsureSpace ensure_space(this);
1846  EMIT(0xD9);
1847  emit_operand(eax, adr);
1848}
1849
1850void Assembler::fld_d(Operand adr) {
1851  EnsureSpace ensure_space(this);
1852  EMIT(0xDD);
1853  emit_operand(eax, adr);
1854}
1855
1856void Assembler::fstp_s(Operand adr) {
1857  EnsureSpace ensure_space(this);
1858  EMIT(0xD9);
1859  emit_operand(ebx, adr);
1860}
1861
1862void Assembler::fst_s(Operand adr) {
1863  EnsureSpace ensure_space(this);
1864  EMIT(0xD9);
1865  emit_operand(edx, adr);
1866}
1867
1868void Assembler::fstp_d(Operand adr) {
1869  EnsureSpace ensure_space(this);
1870  EMIT(0xDD);
1871  emit_operand(ebx, adr);
1872}
1873
1874void Assembler::fst_d(Operand adr) {
1875  EnsureSpace ensure_space(this);
1876  EMIT(0xDD);
1877  emit_operand(edx, adr);
1878}
1879
1880void Assembler::fild_s(Operand adr) {
1881  EnsureSpace ensure_space(this);
1882  EMIT(0xDB);
1883  emit_operand(eax, adr);
1884}
1885
1886void Assembler::fild_d(Operand adr) {
1887  EnsureSpace ensure_space(this);
1888  EMIT(0xDF);
1889  emit_operand(ebp, adr);
1890}
1891
1892void Assembler::fistp_s(Operand adr) {
1893  EnsureSpace ensure_space(this);
1894  EMIT(0xDB);
1895  emit_operand(ebx, adr);
1896}
1897
1898void Assembler::fisttp_s(Operand adr) {
1899  DCHECK(IsEnabled(SSE3));
1900  EnsureSpace ensure_space(this);
1901  EMIT(0xDB);
1902  emit_operand(ecx, adr);
1903}
1904
1905void Assembler::fisttp_d(Operand adr) {
1906  DCHECK(IsEnabled(SSE3));
1907  EnsureSpace ensure_space(this);
1908  EMIT(0xDD);
1909  emit_operand(ecx, adr);
1910}
1911
1912void Assembler::fist_s(Operand adr) {
1913  EnsureSpace ensure_space(this);
1914  EMIT(0xDB);
1915  emit_operand(edx, adr);
1916}
1917
1918void Assembler::fistp_d(Operand adr) {
1919  EnsureSpace ensure_space(this);
1920  EMIT(0xDF);
1921  emit_operand(edi, adr);
1922}
1923
1924void Assembler::fabs() {
1925  EnsureSpace ensure_space(this);
1926  EMIT(0xD9);
1927  EMIT(0xE1);
1928}
1929
1930void Assembler::fchs() {
1931  EnsureSpace ensure_space(this);
1932  EMIT(0xD9);
1933  EMIT(0xE0);
1934}
1935
1936void Assembler::fcos() {
1937  EnsureSpace ensure_space(this);
1938  EMIT(0xD9);
1939  EMIT(0xFF);
1940}
1941
1942void Assembler::fsin() {
1943  EnsureSpace ensure_space(this);
1944  EMIT(0xD9);
1945  EMIT(0xFE);
1946}
1947
1948void Assembler::fptan() {
1949  EnsureSpace ensure_space(this);
1950  EMIT(0xD9);
1951  EMIT(0xF2);
1952}
1953
1954void Assembler::fyl2x() {
1955  EnsureSpace ensure_space(this);
1956  EMIT(0xD9);
1957  EMIT(0xF1);
1958}
1959
1960void Assembler::f2xm1() {
1961  EnsureSpace ensure_space(this);
1962  EMIT(0xD9);
1963  EMIT(0xF0);
1964}
1965
1966void Assembler::fscale() {
1967  EnsureSpace ensure_space(this);
1968  EMIT(0xD9);
1969  EMIT(0xFD);
1970}
1971
1972void Assembler::fninit() {
1973  EnsureSpace ensure_space(this);
1974  EMIT(0xDB);
1975  EMIT(0xE3);
1976}
1977
1978void Assembler::fadd(int i) {
1979  EnsureSpace ensure_space(this);
1980  emit_farith(0xDC, 0xC0, i);
1981}
1982
1983void Assembler::fadd_i(int i) {
1984  EnsureSpace ensure_space(this);
1985  emit_farith(0xD8, 0xC0, i);
1986}
1987
1988void Assembler::fsub(int i) {
1989  EnsureSpace ensure_space(this);
1990  emit_farith(0xDC, 0xE8, i);
1991}
1992
1993void Assembler::fsub_i(int i) {
1994  EnsureSpace ensure_space(this);
1995  emit_farith(0xD8, 0xE0, i);
1996}
1997
1998void Assembler::fisub_s(Operand adr) {
1999  EnsureSpace ensure_space(this);
2000  EMIT(0xDA);
2001  emit_operand(esp, adr);
2002}
2003
2004void Assembler::fmul_i(int i) {
2005  EnsureSpace ensure_space(this);
2006  emit_farith(0xD8, 0xC8, i);
2007}
2008
2009void Assembler::fmul(int i) {
2010  EnsureSpace ensure_space(this);
2011  emit_farith(0xDC, 0xC8, i);
2012}
2013
2014void Assembler::fdiv(int i) {
2015  EnsureSpace ensure_space(this);
2016  emit_farith(0xDC, 0xF8, i);
2017}
2018
2019void Assembler::fdiv_i(int i) {
2020  EnsureSpace ensure_space(this);
2021  emit_farith(0xD8, 0xF0, i);
2022}
2023
2024void Assembler::faddp(int i) {
2025  EnsureSpace ensure_space(this);
2026  emit_farith(0xDE, 0xC0, i);
2027}
2028
2029void Assembler::fsubp(int i) {
2030  EnsureSpace ensure_space(this);
2031  emit_farith(0xDE, 0xE8, i);
2032}
2033
2034void Assembler::fsubrp(int i) {
2035  EnsureSpace ensure_space(this);
2036  emit_farith(0xDE, 0xE0, i);
2037}
2038
2039void Assembler::fmulp(int i) {
2040  EnsureSpace ensure_space(this);
2041  emit_farith(0xDE, 0xC8, i);
2042}
2043
2044void Assembler::fdivp(int i) {
2045  EnsureSpace ensure_space(this);
2046  emit_farith(0xDE, 0xF8, i);
2047}
2048
2049void Assembler::fprem() {
2050  EnsureSpace ensure_space(this);
2051  EMIT(0xD9);
2052  EMIT(0xF8);
2053}
2054
2055void Assembler::fprem1() {
2056  EnsureSpace ensure_space(this);
2057  EMIT(0xD9);
2058  EMIT(0xF5);
2059}
2060
2061void Assembler::fxch(int i) {
2062  EnsureSpace ensure_space(this);
2063  emit_farith(0xD9, 0xC8, i);
2064}
2065
2066void Assembler::fincstp() {
2067  EnsureSpace ensure_space(this);
2068  EMIT(0xD9);
2069  EMIT(0xF7);
2070}
2071
2072void Assembler::ffree(int i) {
2073  EnsureSpace ensure_space(this);
2074  emit_farith(0xDD, 0xC0, i);
2075}
2076
2077void Assembler::ftst() {
2078  EnsureSpace ensure_space(this);
2079  EMIT(0xD9);
2080  EMIT(0xE4);
2081}
2082
2083void Assembler::fucomp(int i) {
2084  EnsureSpace ensure_space(this);
2085  emit_farith(0xDD, 0xE8, i);
2086}
2087
2088void Assembler::fucompp() {
2089  EnsureSpace ensure_space(this);
2090  EMIT(0xDA);
2091  EMIT(0xE9);
2092}
2093
2094void Assembler::fucomi(int i) {
2095  EnsureSpace ensure_space(this);
2096  EMIT(0xDB);
2097  EMIT(0xE8 + i);
2098}
2099
2100void Assembler::fucomip() {
2101  EnsureSpace ensure_space(this);
2102  EMIT(0xDF);
2103  EMIT(0xE9);
2104}
2105
2106void Assembler::fcompp() {
2107  EnsureSpace ensure_space(this);
2108  EMIT(0xDE);
2109  EMIT(0xD9);
2110}
2111
2112void Assembler::fnstsw_ax() {
2113  EnsureSpace ensure_space(this);
2114  EMIT(0xDF);
2115  EMIT(0xE0);
2116}
2117
2118void Assembler::fwait() {
2119  EnsureSpace ensure_space(this);
2120  EMIT(0x9B);
2121}
2122
2123void Assembler::frndint() {
2124  EnsureSpace ensure_space(this);
2125  EMIT(0xD9);
2126  EMIT(0xFC);
2127}
2128
2129void Assembler::fnclex() {
2130  EnsureSpace ensure_space(this);
2131  EMIT(0xDB);
2132  EMIT(0xE2);
2133}
2134
2135void Assembler::sahf() {
2136  EnsureSpace ensure_space(this);
2137  EMIT(0x9E);
2138}
2139
2140void Assembler::setcc(Condition cc, Register reg) {
2141  DCHECK(reg.is_byte_register());
2142  EnsureSpace ensure_space(this);
2143  EMIT(0x0F);
2144  EMIT(0x90 | cc);
2145  EMIT(0xC0 | reg.code());
2146}
2147
2148void Assembler::cvttss2si(Register dst, Operand src) {
2149  EnsureSpace ensure_space(this);
2150  // The [src] might contain ebx's register code, but in
2151  // this case, it refers to xmm3, so it is OK to emit.
2152  EMIT(0xF3);
2153  EMIT(0x0F);
2154  EMIT(0x2C);
2155  emit_operand(dst, src);
2156}
2157
2158void Assembler::cvttsd2si(Register dst, Operand src) {
2159  EnsureSpace ensure_space(this);
2160  // The [src] might contain ebx's register code, but in
2161  // this case, it refers to xmm3, so it is OK to emit.
2162  EMIT(0xF2);
2163  EMIT(0x0F);
2164  EMIT(0x2C);
2165  emit_operand(dst, src);
2166}
2167
2168void Assembler::cvtsd2si(Register dst, XMMRegister src) {
2169  EnsureSpace ensure_space(this);
2170  EMIT(0xF2);
2171  EMIT(0x0F);
2172  EMIT(0x2D);
2173  emit_sse_operand(dst, src);
2174}
2175
2176void Assembler::cvtsi2ss(XMMRegister dst, Operand src) {
2177  EnsureSpace ensure_space(this);
2178  EMIT(0xF3);
2179  EMIT(0x0F);
2180  EMIT(0x2A);
2181  emit_sse_operand(dst, src);
2182}
2183
2184void Assembler::cvtsi2sd(XMMRegister dst, Operand src) {
2185  EnsureSpace ensure_space(this);
2186  EMIT(0xF2);
2187  EMIT(0x0F);
2188  EMIT(0x2A);
2189  emit_sse_operand(dst, src);
2190}
2191
2192void Assembler::cvtss2sd(XMMRegister dst, Operand src) {
2193  EnsureSpace ensure_space(this);
2194  EMIT(0xF3);
2195  EMIT(0x0F);
2196  EMIT(0x5A);
2197  emit_sse_operand(dst, src);
2198}
2199
2200void Assembler::cvtdq2pd(XMMRegister dst, XMMRegister src) {
2201  EnsureSpace ensure_space(this);
2202  EMIT(0xF3);
2203  EMIT(0x0F);
2204  EMIT(0xE6);
2205  emit_sse_operand(dst, src);
2206}
2207
2208void Assembler::cvtpd2ps(XMMRegister dst, XMMRegister src) {
2209  EnsureSpace ensure_space(this);
2210  EMIT(0x66);
2211  EMIT(0x0F);
2212  EMIT(0x5A);
2213  emit_sse_operand(dst, src);
2214}
2215
2216void Assembler::cvttps2dq(XMMRegister dst, Operand src) {
2217  EnsureSpace ensure_space(this);
2218  EMIT(0xF3);
2219  EMIT(0x0F);
2220  EMIT(0x5B);
2221  emit_sse_operand(dst, src);
2222}
2223
2224void Assembler::cvttpd2dq(XMMRegister dst, XMMRegister src) {
2225  EnsureSpace ensure_space(this);
2226  EMIT(0x66);
2227  EMIT(0x0F);
2228  EMIT(0xE6);
2229  emit_sse_operand(dst, src);
2230}
2231
2232void Assembler::cmpps(XMMRegister dst, Operand src, uint8_t cmp) {
2233  EnsureSpace ensure_space(this);
2234  EMIT(0x0F);
2235  EMIT(0xC2);
2236  emit_sse_operand(dst, src);
2237  EMIT(cmp);
2238}
2239
2240void Assembler::cmppd(XMMRegister dst, Operand src, uint8_t cmp) {
2241  EnsureSpace ensure_space(this);
2242  EMIT(0x66);
2243  EMIT(0x0F);
2244  EMIT(0xC2);
2245  emit_sse_operand(dst, src);
2246  EMIT(cmp);
2247}
2248
2249void Assembler::haddps(XMMRegister dst, Operand src) {
2250  DCHECK(IsEnabled(SSE3));
2251  EnsureSpace ensure_space(this);
2252  EMIT(0xF2);
2253  EMIT(0x0F);
2254  EMIT(0x7C);
2255  emit_sse_operand(dst, src);
2256}
2257
2258void Assembler::ucomisd(XMMRegister dst, Operand src) {
2259  EnsureSpace ensure_space(this);
2260  EMIT(0x66);
2261  EMIT(0x0F);
2262  EMIT(0x2E);
2263  emit_sse_operand(dst, src);
2264}
2265
2266void Assembler::roundps(XMMRegister dst, XMMRegister src, RoundingMode mode) {
2267  DCHECK(IsEnabled(SSE4_1));
2268  EnsureSpace ensure_space(this);
2269  EMIT(0x66);
2270  EMIT(0x0F);
2271  EMIT(0x3A);
2272  EMIT(0x08);
2273  emit_sse_operand(dst, src);
2274  // Mask precision exeption.
2275  EMIT(static_cast<byte>(mode) | 0x8);
2276}
2277
2278void Assembler::roundpd(XMMRegister dst, XMMRegister src, RoundingMode mode) {
2279  DCHECK(IsEnabled(SSE4_1));
2280  EnsureSpace ensure_space(this);
2281  EMIT(0x66);
2282  EMIT(0x0F);
2283  EMIT(0x3A);
2284  EMIT(0x09);
2285  emit_sse_operand(dst, src);
2286  // Mask precision exeption.
2287  EMIT(static_cast<byte>(mode) | 0x8);
2288}
2289
2290void Assembler::roundss(XMMRegister dst, XMMRegister src, RoundingMode mode) {
2291  DCHECK(IsEnabled(SSE4_1));
2292  EnsureSpace ensure_space(this);
2293  EMIT(0x66);
2294  EMIT(0x0F);
2295  EMIT(0x3A);
2296  EMIT(0x0A);
2297  emit_sse_operand(dst, src);
2298  // Mask precision exeption.
2299  EMIT(static_cast<byte>(mode) | 0x8);
2300}
2301
2302void Assembler::roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode) {
2303  DCHECK(IsEnabled(SSE4_1));
2304  EnsureSpace ensure_space(this);
2305  EMIT(0x66);
2306  EMIT(0x0F);
2307  EMIT(0x3A);
2308  EMIT(0x0B);
2309  emit_sse_operand(dst, src);
2310  // Mask precision exeption.
2311  EMIT(static_cast<byte>(mode) | 0x8);
2312}
2313
2314void Assembler::movmskpd(Register dst, XMMRegister src) {
2315  EnsureSpace ensure_space(this);
2316  EMIT(0x66);
2317  EMIT(0x0F);
2318  EMIT(0x50);
2319  emit_sse_operand(dst, src);
2320}
2321
2322void Assembler::movmskps(Register dst, XMMRegister src) {
2323  EnsureSpace ensure_space(this);
2324  EMIT(0x0F);
2325  EMIT(0x50);
2326  emit_sse_operand(dst, src);
2327}
2328
2329void Assembler::pmovmskb(Register dst, XMMRegister src) {
2330  EnsureSpace ensure_space(this);
2331  EMIT(0x66);
2332  EMIT(0x0F);
2333  EMIT(0xD7);
2334  emit_sse_operand(dst, src);
2335}
2336
2337void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) {
2338  EnsureSpace ensure_space(this);
2339  EMIT(0xF2);
2340  EMIT(0x0F);
2341  EMIT(0xC2);
2342  emit_sse_operand(dst, src);
2343  EMIT(1);  // LT == 1
2344}
2345
2346void Assembler::movaps(XMMRegister dst, Operand src) {
2347  EnsureSpace ensure_space(this);
2348  EMIT(0x0F);
2349  EMIT(0x28);
2350  emit_sse_operand(dst, src);
2351}
2352
2353void Assembler::movups(XMMRegister dst, Operand src) {
2354  EnsureSpace ensure_space(this);
2355  EMIT(0x0F);
2356  EMIT(0x10);
2357  emit_sse_operand(dst, src);
2358}
2359
2360void Assembler::movups(Operand dst, XMMRegister src) {
2361  EnsureSpace ensure_space(this);
2362  EMIT(0x0F);
2363  EMIT(0x11);
2364  emit_sse_operand(src, dst);
2365}
2366
2367void Assembler::movddup(XMMRegister dst, Operand src) {
2368  DCHECK(IsEnabled(SSE3));
2369  EnsureSpace ensure_space(this);
2370  EMIT(0xF2);
2371  EMIT(0x0F);
2372  EMIT(0x12);
2373  emit_sse_operand(dst, src);
2374}
2375
2376void Assembler::movshdup(XMMRegister dst, XMMRegister src) {
2377  DCHECK(IsEnabled(SSE3));
2378  EnsureSpace ensure_space(this);
2379  EMIT(0xF3);
2380  EMIT(0x0F);
2381  EMIT(0x16);
2382  emit_sse_operand(dst, src);
2383}
2384
2385void Assembler::shufps(XMMRegister dst, XMMRegister src, byte imm8) {
2386  DCHECK(is_uint8(imm8));
2387  EnsureSpace ensure_space(this);
2388  EMIT(0x0F);
2389  EMIT(0xC6);
2390  emit_sse_operand(dst, src);
2391  EMIT(imm8);
2392}
2393
2394void Assembler::shufpd(XMMRegister dst, XMMRegister src, byte imm8) {
2395  DCHECK(is_uint8(imm8));
2396  EnsureSpace ensure_space(this);
2397  EMIT(0x66);
2398  EMIT(0x0F);
2399  EMIT(0xC6);
2400  emit_sse_operand(dst, src);
2401  EMIT(imm8);
2402}
2403
2404void Assembler::movhlps(XMMRegister dst, XMMRegister src) {
2405  EnsureSpace ensure_space(this);
2406  EMIT(0x0F);
2407  EMIT(0x12);
2408  emit_sse_operand(dst, src);
2409}
2410
2411void Assembler::movlhps(XMMRegister dst, XMMRegister src) {
2412  EnsureSpace ensure_space(this);
2413  EMIT(0x0F);
2414  EMIT(0x16);
2415  emit_sse_operand(dst, src);
2416}
2417
2418void Assembler::movlps(XMMRegister dst, Operand src) {
2419  EnsureSpace ensure_space(this);
2420  EMIT(0x0F);
2421  EMIT(0x12);
2422  emit_sse_operand(dst, src);
2423}
2424
2425void Assembler::movlps(Operand dst, XMMRegister src) {
2426  EnsureSpace ensure_space(this);
2427  EMIT(0x0F);
2428  EMIT(0x13);
2429  emit_sse_operand(src, dst);
2430}
2431
2432void Assembler::movhps(XMMRegister dst, Operand src) {
2433  EnsureSpace ensure_space(this);
2434  EMIT(0x0F);
2435  EMIT(0x16);
2436  emit_sse_operand(dst, src);
2437}
2438
2439void Assembler::movhps(Operand dst, XMMRegister src) {
2440  EnsureSpace ensure_space(this);
2441  EMIT(0x0F);
2442  EMIT(0x17);
2443  emit_sse_operand(src, dst);
2444}
2445
2446void Assembler::movdqa(Operand dst, XMMRegister src) {
2447  EnsureSpace ensure_space(this);
2448  EMIT(0x66);
2449  EMIT(0x0F);
2450  EMIT(0x7F);
2451  emit_sse_operand(src, dst);
2452}
2453
2454void Assembler::movdqa(XMMRegister dst, Operand src) {
2455  EnsureSpace ensure_space(this);
2456  EMIT(0x66);
2457  EMIT(0x0F);
2458  EMIT(0x6F);
2459  emit_sse_operand(dst, src);
2460}
2461
2462void Assembler::movdqa(XMMRegister dst, XMMRegister src) {
2463  EnsureSpace ensure_space(this);
2464  EMIT(0x66);
2465  EMIT(0x0F);
2466  EMIT(0x6F);
2467  emit_sse_operand(dst, src);
2468}
2469
2470void Assembler::movdqu(Operand dst, XMMRegister src) {
2471  EnsureSpace ensure_space(this);
2472  EMIT(0xF3);
2473  EMIT(0x0F);
2474  EMIT(0x7F);
2475  emit_sse_operand(src, dst);
2476}
2477
2478void Assembler::movdqu(XMMRegister dst, Operand src) {
2479  EnsureSpace ensure_space(this);
2480  EMIT(0xF3);
2481  EMIT(0x0F);
2482  EMIT(0x6F);
2483  emit_sse_operand(dst, src);
2484}
2485
2486void Assembler::movdqu(XMMRegister dst, XMMRegister src) {
2487  EnsureSpace ensure_space(this);
2488  EMIT(0xF3);
2489  EMIT(0x0F);
2490  EMIT(0x7F);
2491  emit_sse_operand(src, dst);
2492}
2493
2494void Assembler::prefetch(Operand src, int level) {
2495  DCHECK(is_uint2(level));
2496  EnsureSpace ensure_space(this);
2497  EMIT(0x0F);
2498  EMIT(0x18);
2499  // Emit hint number in Reg position of RegR/M.
2500  XMMRegister code = XMMRegister::from_code(level);
2501  emit_sse_operand(code, src);
2502}
2503
2504void Assembler::movsd(Operand dst, XMMRegister src) {
2505  EnsureSpace ensure_space(this);
2506  EMIT(0xF2);  // double
2507  EMIT(0x0F);
2508  EMIT(0x11);  // store
2509  emit_sse_operand(src, dst);
2510}
2511
2512void Assembler::movsd(XMMRegister dst, Operand src) {
2513  EnsureSpace ensure_space(this);
2514  EMIT(0xF2);  // double
2515  EMIT(0x0F);
2516  EMIT(0x10);  // load
2517  emit_sse_operand(dst, src);
2518}
2519
2520void Assembler::movss(Operand dst, XMMRegister src) {
2521  EnsureSpace ensure_space(this);
2522  EMIT(0xF3);  // float
2523  EMIT(0x0F);
2524  EMIT(0x11);  // store
2525  emit_sse_operand(src, dst);
2526}
2527
2528void Assembler::movss(XMMRegister dst, Operand src) {
2529  EnsureSpace ensure_space(this);
2530  EMIT(0xF3);  // float
2531  EMIT(0x0F);
2532  EMIT(0x10);  // load
2533  emit_sse_operand(dst, src);
2534}
2535
2536void Assembler::movd(XMMRegister dst, Operand src) {
2537  EnsureSpace ensure_space(this);
2538  EMIT(0x66);
2539  EMIT(0x0F);
2540  EMIT(0x6E);
2541  emit_sse_operand(dst, src);
2542}
2543
2544void Assembler::movd(Operand dst, XMMRegister src) {
2545  EnsureSpace ensure_space(this);
2546  EMIT(0x66);
2547  EMIT(0x0F);
2548  EMIT(0x7E);
2549  emit_sse_operand(src, dst);
2550}
2551
2552void Assembler::extractps(Operand dst, XMMRegister src, byte imm8) {
2553  DCHECK(IsEnabled(SSE4_1));
2554  DCHECK(is_uint8(imm8));
2555  EnsureSpace ensure_space(this);
2556  EMIT(0x66);
2557  EMIT(0x0F);
2558  EMIT(0x3A);
2559  EMIT(0x17);
2560  emit_sse_operand(src, dst);
2561  EMIT(imm8);
2562}
2563
2564void Assembler::extractps(Register dst, XMMRegister src, byte imm8) {
2565  DCHECK(IsEnabled(SSE4_1));
2566  DCHECK(is_uint8(imm8));
2567  EnsureSpace ensure_space(this);
2568  EMIT(0x66);
2569  EMIT(0x0F);
2570  EMIT(0x3A);
2571  EMIT(0x17);
2572  emit_sse_operand(src, dst);
2573  EMIT(imm8);
2574}
2575
2576void Assembler::pcmpgtq(XMMRegister dst, XMMRegister src) {
2577  DCHECK(IsEnabled(SSE4_2));
2578  EnsureSpace ensure_space(this);
2579  EMIT(0x66);
2580  EMIT(0x0F);
2581  EMIT(0x38);
2582  EMIT(0x37);
2583  emit_sse_operand(dst, src);
2584}
2585
2586void Assembler::psllw(XMMRegister reg, uint8_t shift) {
2587  EnsureSpace ensure_space(this);
2588  EMIT(0x66);
2589  EMIT(0x0F);
2590  EMIT(0x71);
2591  emit_sse_operand(esi, reg);  // esi == 6
2592  EMIT(shift);
2593}
2594
2595void Assembler::pslld(XMMRegister reg, uint8_t shift) {
2596  EnsureSpace ensure_space(this);
2597  EMIT(0x66);
2598  EMIT(0x0F);
2599  EMIT(0x72);
2600  emit_sse_operand(esi, reg);  // esi == 6
2601  EMIT(shift);
2602}
2603
2604void Assembler::psrlw(XMMRegister reg, uint8_t shift) {
2605  EnsureSpace ensure_space(this);
2606  EMIT(0x66);
2607  EMIT(0x0F);
2608  EMIT(0x71);
2609  emit_sse_operand(edx, reg);  // edx == 2
2610  EMIT(shift);
2611}
2612
2613void Assembler::psrld(XMMRegister reg, uint8_t shift) {
2614  EnsureSpace ensure_space(this);
2615  EMIT(0x66);
2616  EMIT(0x0F);
2617  EMIT(0x72);
2618  emit_sse_operand(edx, reg);  // edx == 2
2619  EMIT(shift);
2620}
2621
2622void Assembler::psraw(XMMRegister reg, uint8_t shift) {
2623  EnsureSpace ensure_space(this);
2624  EMIT(0x66);
2625  EMIT(0x0F);
2626  EMIT(0x71);
2627  emit_sse_operand(esp, reg);  // esp == 4
2628  EMIT(shift);
2629}
2630
2631void Assembler::psrad(XMMRegister reg, uint8_t shift) {
2632  EnsureSpace ensure_space(this);
2633  EMIT(0x66);
2634  EMIT(0x0F);
2635  EMIT(0x72);
2636  emit_sse_operand(esp, reg);  // esp == 4
2637  EMIT(shift);
2638}
2639
2640void Assembler::psllq(XMMRegister reg, uint8_t shift) {
2641  EnsureSpace ensure_space(this);
2642  EMIT(0x66);
2643  EMIT(0x0F);
2644  EMIT(0x73);
2645  emit_sse_operand(esi, reg);  // esi == 6
2646  EMIT(shift);
2647}
2648
2649void Assembler::psrlq(XMMRegister reg, uint8_t shift) {
2650  EnsureSpace ensure_space(this);
2651  EMIT(0x66);
2652  EMIT(0x0F);
2653  EMIT(0x73);
2654  emit_sse_operand(edx, reg);  // edx == 2
2655  EMIT(shift);
2656}
2657
2658void Assembler::pshufhw(XMMRegister dst, Operand src, uint8_t shuffle) {
2659  EnsureSpace ensure_space(this);
2660  EMIT(0xF3);
2661  EMIT(0x0F);
2662  EMIT(0x70);
2663  emit_sse_operand(dst, src);
2664  EMIT(shuffle);
2665}
2666
2667void Assembler::pshuflw(XMMRegister dst, Operand src, uint8_t shuffle) {
2668  EnsureSpace ensure_space(this);
2669  EMIT(0xF2);
2670  EMIT(0x0F);
2671  EMIT(0x70);
2672  emit_sse_operand(dst, src);
2673  EMIT(shuffle);
2674}
2675
2676void Assembler::pshufd(XMMRegister dst, Operand src, uint8_t shuffle) {
2677  EnsureSpace ensure_space(this);
2678  EMIT(0x66);
2679  EMIT(0x0F);
2680  EMIT(0x70);
2681  emit_sse_operand(dst, src);
2682  EMIT(shuffle);
2683}
2684
2685void Assembler::pblendw(XMMRegister dst, Operand src, uint8_t mask) {
2686  DCHECK(IsEnabled(SSE4_1));
2687  EnsureSpace ensure_space(this);
2688  EMIT(0x66);
2689  EMIT(0x0F);
2690  EMIT(0x3A);
2691  EMIT(0x0E);
2692  emit_sse_operand(dst, src);
2693  EMIT(mask);
2694}
2695
2696void Assembler::palignr(XMMRegister dst, Operand src, uint8_t mask) {
2697  DCHECK(IsEnabled(SSSE3));
2698  EnsureSpace ensure_space(this);
2699  EMIT(0x66);
2700  EMIT(0x0F);
2701  EMIT(0x3A);
2702  EMIT(0x0F);
2703  emit_sse_operand(dst, src);
2704  EMIT(mask);
2705}
2706
2707void Assembler::pextrb(Operand dst, XMMRegister src, uint8_t offset) {
2708  DCHECK(IsEnabled(SSE4_1));
2709  EnsureSpace ensure_space(this);
2710  EMIT(0x66);
2711  EMIT(0x0F);
2712  EMIT(0x3A);
2713  EMIT(0x14);
2714  emit_sse_operand(src, dst);
2715  EMIT(offset);
2716}
2717
2718void Assembler::pextrw(Operand dst, XMMRegister src, uint8_t offset) {
2719  DCHECK(IsEnabled(SSE4_1));
2720  EnsureSpace ensure_space(this);
2721  EMIT(0x66);
2722  EMIT(0x0F);
2723  EMIT(0x3A);
2724  EMIT(0x15);
2725  emit_sse_operand(src, dst);
2726  EMIT(offset);
2727}
2728
2729void Assembler::pextrd(Operand dst, XMMRegister src, uint8_t offset) {
2730  DCHECK(IsEnabled(SSE4_1));
2731  EnsureSpace ensure_space(this);
2732  EMIT(0x66);
2733  EMIT(0x0F);
2734  EMIT(0x3A);
2735  EMIT(0x16);
2736  emit_sse_operand(src, dst);
2737  EMIT(offset);
2738}
2739
2740void Assembler::insertps(XMMRegister dst, Operand src, uint8_t offset) {
2741  DCHECK(IsEnabled(SSE4_1));
2742  EnsureSpace ensure_space(this);
2743  EMIT(0x66);
2744  EMIT(0x0F);
2745  EMIT(0x3A);
2746  EMIT(0x21);
2747  emit_sse_operand(dst, src);
2748  EMIT(offset);
2749}
2750
2751void Assembler::pinsrb(XMMRegister dst, Operand src, uint8_t offset) {
2752  DCHECK(IsEnabled(SSE4_1));
2753  EnsureSpace ensure_space(this);
2754  EMIT(0x66);
2755  EMIT(0x0F);
2756  EMIT(0x3A);
2757  EMIT(0x20);
2758  emit_sse_operand(dst, src);
2759  EMIT(offset);
2760}
2761
2762void Assembler::pinsrw(XMMRegister dst, Operand src, uint8_t offset) {
2763  DCHECK(is_uint8(offset));
2764  EnsureSpace ensure_space(this);
2765  EMIT(0x66);
2766  EMIT(0x0F);
2767  EMIT(0xC4);
2768  emit_sse_operand(dst, src);
2769  EMIT(offset);
2770}
2771
2772void Assembler::pinsrd(XMMRegister dst, Operand src, uint8_t offset) {
2773  DCHECK(IsEnabled(SSE4_1));
2774  EnsureSpace ensure_space(this);
2775  EMIT(0x66);
2776  EMIT(0x0F);
2777  EMIT(0x3A);
2778  EMIT(0x22);
2779  emit_sse_operand(dst, src);
2780  EMIT(offset);
2781}
2782
2783void Assembler::addss(XMMRegister dst, Operand src) {
2784  EnsureSpace ensure_space(this);
2785  EMIT(0xF3);
2786  EMIT(0x0F);
2787  EMIT(0x58);
2788  emit_sse_operand(dst, src);
2789}
2790
2791void Assembler::subss(XMMRegister dst, Operand src) {
2792  EnsureSpace ensure_space(this);
2793  EMIT(0xF3);
2794  EMIT(0x0F);
2795  EMIT(0x5C);
2796  emit_sse_operand(dst, src);
2797}
2798
2799void Assembler::mulss(XMMRegister dst, Operand src) {
2800  EnsureSpace ensure_space(this);
2801  EMIT(0xF3);
2802  EMIT(0x0F);
2803  EMIT(0x59);
2804  emit_sse_operand(dst, src);
2805}
2806
2807void Assembler::divss(XMMRegister dst, Operand src) {
2808  EnsureSpace ensure_space(this);
2809  EMIT(0xF3);
2810  EMIT(0x0F);
2811  EMIT(0x5E);
2812  emit_sse_operand(dst, src);
2813}
2814
2815void Assembler::sqrtss(XMMRegister dst, Operand src) {
2816  EnsureSpace ensure_space(this);
2817  EMIT(0xF3);
2818  EMIT(0x0F);
2819  EMIT(0x51);
2820  emit_sse_operand(dst, src);
2821}
2822
2823void Assembler::ucomiss(XMMRegister dst, Operand src) {
2824  EnsureSpace ensure_space(this);
2825  EMIT(0x0F);
2826  EMIT(0x2E);
2827  emit_sse_operand(dst, src);
2828}
2829
2830void Assembler::maxss(XMMRegister dst, Operand src) {
2831  EnsureSpace ensure_space(this);
2832  EMIT(0xF3);
2833  EMIT(0x0F);
2834  EMIT(0x5F);
2835  emit_sse_operand(dst, src);
2836}
2837
2838void Assembler::minss(XMMRegister dst, Operand src) {
2839  EnsureSpace ensure_space(this);
2840  EMIT(0xF3);
2841  EMIT(0x0F);
2842  EMIT(0x5D);
2843  emit_sse_operand(dst, src);
2844}
2845
2846// Packed single-precision floating-point SSE instructions.
2847void Assembler::ps(byte opcode, XMMRegister dst, Operand src) {
2848  EnsureSpace ensure_space(this);
2849  EMIT(0x0F);
2850  EMIT(opcode);
2851  emit_sse_operand(dst, src);
2852}
2853
2854// Packed double-precision floating-point SSE instructions.
2855void Assembler::pd(byte opcode, XMMRegister dst, Operand src) {
2856  EnsureSpace ensure_space(this);
2857  EMIT(0x66);
2858  EMIT(0x0F);
2859  EMIT(opcode);
2860  emit_sse_operand(dst, src);
2861}
2862
2863// AVX instructions
2864
2865void Assembler::vss(byte op, XMMRegister dst, XMMRegister src1, Operand src2) {
2866  vinstr(op, dst, src1, src2, kF3, k0F, kWIG);
2867}
2868
2869void Assembler::vps(byte op, XMMRegister dst, XMMRegister src1, Operand src2) {
2870  vinstr(op, dst, src1, src2, kNoPrefix, k0F, kWIG);
2871}
2872
2873void Assembler::vpd(byte op, XMMRegister dst, XMMRegister src1, Operand src2) {
2874  vinstr(op, dst, src1, src2, k66, k0F, kWIG);
2875}
2876
2877void Assembler::vshufpd(XMMRegister dst, XMMRegister src1, Operand src2,
2878                        byte imm8) {
2879  DCHECK(is_uint8(imm8));
2880  vpd(0xC6, dst, src1, src2);
2881  EMIT(imm8);
2882}
2883
2884void Assembler::vmovhlps(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
2885  vinstr(0x12, dst, src1, src2, kNoPrefix, k0F, kWIG);
2886}
2887
2888void Assembler::vmovlhps(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
2889  vinstr(0x16, dst, src1, src2, kNoPrefix, k0F, kWIG);
2890}
2891
2892void Assembler::vmovlps(XMMRegister dst, XMMRegister src1, Operand src2) {
2893  vinstr(0x12, dst, src1, src2, kNoPrefix, k0F, kWIG);
2894}
2895
2896void Assembler::vmovlps(Operand dst, XMMRegister src) {
2897  vinstr(0x13, src, xmm0, dst, kNoPrefix, k0F, kWIG);
2898}
2899
2900void Assembler::vmovhps(XMMRegister dst, XMMRegister src1, Operand src2) {
2901  vinstr(0x16, dst, src1, src2, kNoPrefix, k0F, kWIG);
2902}
2903
2904void Assembler::vmovhps(Operand dst, XMMRegister src) {
2905  vinstr(0x17, src, xmm0, dst, kNoPrefix, k0F, kWIG);
2906}
2907
2908void Assembler::vcmpps(XMMRegister dst, XMMRegister src1, Operand src2,
2909                       uint8_t cmp) {
2910  vps(0xC2, dst, src1, src2);
2911  EMIT(cmp);
2912}
2913
2914void Assembler::vcmppd(XMMRegister dst, XMMRegister src1, Operand src2,
2915                       uint8_t cmp) {
2916  vpd(0xC2, dst, src1, src2);
2917  EMIT(cmp);
2918}
2919
2920void Assembler::vshufps(XMMRegister dst, XMMRegister src1, Operand src2,
2921                        byte imm8) {
2922  DCHECK(is_uint8(imm8));
2923  vps(0xC6, dst, src1, src2);
2924  EMIT(imm8);
2925}
2926
2927void Assembler::vpsllw(XMMRegister dst, XMMRegister src, uint8_t imm8) {
2928  XMMRegister iop = XMMRegister::from_code(6);
2929  vinstr(0x71, iop, dst, Operand(src), k66, k0F, kWIG);
2930  EMIT(imm8);
2931}
2932
2933void Assembler::vpslld(XMMRegister dst, XMMRegister src, uint8_t imm8) {
2934  XMMRegister iop = XMMRegister::from_code(6);
2935  vinstr(0x72, iop, dst, Operand(src), k66, k0F, kWIG);
2936  EMIT(imm8);
2937}
2938
2939void Assembler::vpsllq(XMMRegister dst, XMMRegister src, uint8_t imm8) {
2940  XMMRegister iop = XMMRegister::from_code(6);
2941  vinstr(0x73, iop, dst, Operand(src), k66, k0F, kWIG);
2942  EMIT(imm8);
2943}
2944
2945void Assembler::vpsrlw(XMMRegister dst, XMMRegister src, uint8_t imm8) {
2946  XMMRegister iop = XMMRegister::from_code(2);
2947  vinstr(0x71, iop, dst, Operand(src), k66, k0F, kWIG);
2948  EMIT(imm8);
2949}
2950
2951void Assembler::vpsrld(XMMRegister dst, XMMRegister src, uint8_t imm8) {
2952  XMMRegister iop = XMMRegister::from_code(2);
2953  vinstr(0x72, iop, dst, Operand(src), k66, k0F, kWIG);
2954  EMIT(imm8);
2955}
2956
2957void Assembler::vpsrlq(XMMRegister dst, XMMRegister src, uint8_t imm8) {
2958  XMMRegister iop = XMMRegister::from_code(2);
2959  vinstr(0x73, iop, dst, Operand(src), k66, k0F, kWIG);
2960  EMIT(imm8);
2961}
2962
2963void Assembler::vpsraw(XMMRegister dst, XMMRegister src, uint8_t imm8) {
2964  XMMRegister iop = XMMRegister::from_code(4);
2965  vinstr(0x71, iop, dst, Operand(src), k66, k0F, kWIG);
2966  EMIT(imm8);
2967}
2968
2969void Assembler::vpsrad(XMMRegister dst, XMMRegister src, uint8_t imm8) {
2970  XMMRegister iop = XMMRegister::from_code(4);
2971  vinstr(0x72, iop, dst, Operand(src), k66, k0F, kWIG);
2972  EMIT(imm8);
2973}
2974
2975void Assembler::vpshufhw(XMMRegister dst, Operand src, uint8_t shuffle) {
2976  vinstr(0x70, dst, xmm0, src, kF3, k0F, kWIG);
2977  EMIT(shuffle);
2978}
2979
2980void Assembler::vpshuflw(XMMRegister dst, Operand src, uint8_t shuffle) {
2981  vinstr(0x70, dst, xmm0, src, kF2, k0F, kWIG);
2982  EMIT(shuffle);
2983}
2984
2985void Assembler::vpshufd(XMMRegister dst, Operand src, uint8_t shuffle) {
2986  vinstr(0x70, dst, xmm0, src, k66, k0F, kWIG);
2987  EMIT(shuffle);
2988}
2989
2990void Assembler::vblendvps(XMMRegister dst, XMMRegister src1, XMMRegister src2,
2991                          XMMRegister mask) {
2992  vinstr(0x4A, dst, src1, src2, k66, k0F3A, kW0);
2993  EMIT(mask.code() << 4);
2994}
2995
2996void Assembler::vblendvpd(XMMRegister dst, XMMRegister src1, XMMRegister src2,
2997                          XMMRegister mask) {
2998  vinstr(0x4B, dst, src1, src2, k66, k0F3A, kW0);
2999  EMIT(mask.code() << 4);
3000}
3001
3002void Assembler::vpblendvb(XMMRegister dst, XMMRegister src1, XMMRegister src2,
3003                          XMMRegister mask) {
3004  vinstr(0x4C, dst, src1, src2, k66, k0F3A, kW0);
3005  EMIT(mask.code() << 4);
3006}
3007
3008void Assembler::vpblendw(XMMRegister dst, XMMRegister src1, Operand src2,
3009                         uint8_t mask) {
3010  vinstr(0x0E, dst, src1, src2, k66, k0F3A, kWIG);
3011  EMIT(mask);
3012}
3013
3014void Assembler::vpalignr(XMMRegister dst, XMMRegister src1, Operand src2,
3015                         uint8_t mask) {
3016  vinstr(0x0F, dst, src1, src2, k66, k0F3A, kWIG);
3017  EMIT(mask);
3018}
3019
3020void Assembler::vpextrb(Operand dst, XMMRegister src, uint8_t offset) {
3021  vinstr(0x14, src, xmm0, dst, k66, k0F3A, kWIG);
3022  EMIT(offset);
3023}
3024
3025void Assembler::vpextrw(Operand dst, XMMRegister src, uint8_t offset) {
3026  vinstr(0x15, src, xmm0, dst, k66, k0F3A, kWIG);
3027  EMIT(offset);
3028}
3029
3030void Assembler::vpextrd(Operand dst, XMMRegister src, uint8_t offset) {
3031  vinstr(0x16, src, xmm0, dst, k66, k0F3A, kWIG);
3032  EMIT(offset);
3033}
3034
3035void Assembler::vinsertps(XMMRegister dst, XMMRegister src1, Operand src2,
3036                          uint8_t offset) {
3037  vinstr(0x21, dst, src1, src2, k66, k0F3A, kWIG);
3038  EMIT(offset);
3039}
3040
3041void Assembler::vpinsrb(XMMRegister dst, XMMRegister src1, Operand src2,
3042                        uint8_t offset) {
3043  vinstr(0x20, dst, src1, src2, k66, k0F3A, kWIG);
3044  EMIT(offset);
3045}
3046
3047void Assembler::vpinsrw(XMMRegister dst, XMMRegister src1, Operand src2,
3048                        uint8_t offset) {
3049  vinstr(0xC4, dst, src1, src2, k66, k0F, kWIG);
3050  EMIT(offset);
3051}
3052
3053void Assembler::vpinsrd(XMMRegister dst, XMMRegister src1, Operand src2,
3054                        uint8_t offset) {
3055  vinstr(0x22, dst, src1, src2, k66, k0F3A, kWIG);
3056  EMIT(offset);
3057}
3058
3059void Assembler::vroundsd(XMMRegister dst, XMMRegister src1, XMMRegister src2,
3060                         RoundingMode mode) {
3061  vinstr(0x0b, dst, src1, src2, k66, k0F3A, kWIG);
3062  EMIT(static_cast<byte>(mode) | 0x8);  // Mask precision exception.
3063}
3064void Assembler::vroundss(XMMRegister dst, XMMRegister src1, XMMRegister src2,
3065                         RoundingMode mode) {
3066  vinstr(0x0a, dst, src1, src2, k66, k0F3A, kWIG);
3067  EMIT(static_cast<byte>(mode) | 0x8);  // Mask precision exception.
3068}
3069void Assembler::vroundps(XMMRegister dst, XMMRegister src, RoundingMode mode) {
3070  vinstr(0x08, dst, xmm0, Operand(src), k66, k0F3A, kWIG);
3071  EMIT(static_cast<byte>(mode) | 0x8);  // Mask precision exception.
3072}
3073void Assembler::vroundpd(XMMRegister dst, XMMRegister src, RoundingMode mode) {
3074  vinstr(0x09, dst, xmm0, Operand(src), k66, k0F3A, kWIG);
3075  EMIT(static_cast<byte>(mode) | 0x8);  // Mask precision exception.
3076}
3077
3078void Assembler::vmovmskpd(Register dst, XMMRegister src) {
3079  DCHECK(IsEnabled(AVX));
3080  EnsureSpace ensure_space(this);
3081  emit_vex_prefix(xmm0, kL128, k66, k0F, kWIG);
3082  EMIT(0x50);
3083  emit_sse_operand(dst, src);
3084}
3085
3086void Assembler::vmovmskps(Register dst, XMMRegister src) {
3087  DCHECK(IsEnabled(AVX));
3088  EnsureSpace ensure_space(this);
3089  emit_vex_prefix(xmm0, kL128, kNoPrefix, k0F, kWIG);
3090  EMIT(0x50);
3091  emit_sse_operand(dst, src);
3092}
3093
3094void Assembler::vpmovmskb(Register dst, XMMRegister src) {
3095  DCHECK(IsEnabled(AVX));
3096  EnsureSpace ensure_space(this);
3097  emit_vex_prefix(xmm0, kL128, k66, k0F, kWIG);
3098  EMIT(0xD7);
3099  emit_sse_operand(dst, src);
3100}
3101
3102void Assembler::vextractps(Operand dst, XMMRegister src, byte imm8) {
3103  vinstr(0x17, src, xmm0, dst, k66, k0F3A, VexW::kWIG);
3104  EMIT(imm8);
3105}
3106
3107void Assembler::vpcmpgtq(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
3108  vinstr(0x37, dst, src1, src2, k66, k0F38, VexW::kWIG);
3109}
3110
3111void Assembler::bmi1(byte op, Register reg, Register vreg, Operand rm) {
3112  DCHECK(IsEnabled(BMI1));
3113  EnsureSpace ensure_space(this);
3114  emit_vex_prefix(vreg, kLZ, kNoPrefix, k0F38, kW0);
3115  EMIT(op);
3116  emit_operand(reg, rm);
3117}
3118
3119void Assembler::tzcnt(Register dst, Operand src) {
3120  DCHECK(IsEnabled(BMI1));
3121  EnsureSpace ensure_space(this);
3122  EMIT(0xF3);
3123  EMIT(0x0F);
3124  EMIT(0xBC);
3125  emit_operand(dst, src);
3126}
3127
3128void Assembler::lzcnt(Register dst, Operand src) {
3129  DCHECK(IsEnabled(LZCNT));
3130  EnsureSpace ensure_space(this);
3131  EMIT(0xF3);
3132  EMIT(0x0F);
3133  EMIT(0xBD);
3134  emit_operand(dst, src);
3135}
3136
3137void Assembler::popcnt(Register dst, Operand src) {
3138  DCHECK(IsEnabled(POPCNT));
3139  EnsureSpace ensure_space(this);
3140  EMIT(0xF3);
3141  EMIT(0x0F);
3142  EMIT(0xB8);
3143  emit_operand(dst, src);
3144}
3145
3146void Assembler::bmi2(SIMDPrefix pp, byte op, Register reg, Register vreg,
3147                     Operand rm) {
3148  DCHECK(IsEnabled(BMI2));
3149  EnsureSpace ensure_space(this);
3150  emit_vex_prefix(vreg, kLZ, pp, k0F38, kW0);
3151  EMIT(op);
3152  emit_operand(reg, rm);
3153}
3154
3155void Assembler::rorx(Register dst, Operand src, byte imm8) {
3156  DCHECK(IsEnabled(BMI2));
3157  DCHECK(is_uint8(imm8));
3158  Register vreg = Register::from_code(0);  // VEX.vvvv unused
3159  EnsureSpace ensure_space(this);
3160  emit_vex_prefix(vreg, kLZ, kF2, k0F3A, kW0);
3161  EMIT(0xF0);
3162  emit_operand(dst, src);
3163  EMIT(imm8);
3164}
3165
3166void Assembler::sse_instr(XMMRegister dst, Operand src, byte escape,
3167                          byte opcode) {
3168  EnsureSpace ensure_space(this);
3169  EMIT(escape);
3170  EMIT(opcode);
3171  emit_sse_operand(dst, src);
3172}
3173
3174void Assembler::sse2_instr(XMMRegister dst, Operand src, byte prefix,
3175                           byte escape, byte opcode) {
3176  EnsureSpace ensure_space(this);
3177  EMIT(prefix);
3178  EMIT(escape);
3179  EMIT(opcode);
3180  emit_sse_operand(dst, src);
3181}
3182
3183void Assembler::ssse3_instr(XMMRegister dst, Operand src, byte prefix,
3184                            byte escape1, byte escape2, byte opcode) {
3185  DCHECK(IsEnabled(SSSE3));
3186  EnsureSpace ensure_space(this);
3187  EMIT(prefix);
3188  EMIT(escape1);
3189  EMIT(escape2);
3190  EMIT(opcode);
3191  emit_sse_operand(dst, src);
3192}
3193
3194void Assembler::sse4_instr(XMMRegister dst, Operand src, byte prefix,
3195                           byte escape1, byte escape2, byte opcode) {
3196  DCHECK(IsEnabled(SSE4_1));
3197  EnsureSpace ensure_space(this);
3198  EMIT(prefix);
3199  EMIT(escape1);
3200  EMIT(escape2);
3201  EMIT(opcode);
3202  emit_sse_operand(dst, src);
3203}
3204
3205void Assembler::vinstr(byte op, XMMRegister dst, XMMRegister src1,
3206                       XMMRegister src2, SIMDPrefix pp, LeadingOpcode m, VexW w,
3207                       CpuFeature feature) {
3208  vinstr(op, dst, src1, src2, kL128, pp, m, w, feature);
3209}
3210
3211void Assembler::vinstr(byte op, XMMRegister dst, XMMRegister src1, Operand src2,
3212                       SIMDPrefix pp, LeadingOpcode m, VexW w,
3213                       CpuFeature feature) {
3214  vinstr(op, dst, src1, src2, kL128, pp, m, w, feature);
3215}
3216
3217void Assembler::vinstr(byte op, XMMRegister dst, XMMRegister src1,
3218                       XMMRegister src2, VectorLength l, SIMDPrefix pp,
3219                       LeadingOpcode m, VexW w, CpuFeature feature) {
3220  DCHECK(IsEnabled(feature));
3221  EnsureSpace ensure_space(this);
3222  emit_vex_prefix(src1, l, pp, m, w);
3223  EMIT(op);
3224  emit_sse_operand(dst, src2);
3225}
3226
3227void Assembler::vinstr(byte op, XMMRegister dst, XMMRegister src1, Operand src2,
3228                       VectorLength l, SIMDPrefix pp, LeadingOpcode m, VexW w,
3229                       CpuFeature feature) {
3230  DCHECK(IsEnabled(feature));
3231  EnsureSpace ensure_space(this);
3232  emit_vex_prefix(src1, l, pp, m, w);
3233  EMIT(op);
3234  emit_sse_operand(dst, src2);
3235}
3236
3237void Assembler::emit_sse_operand(XMMRegister reg, Operand adr) {
3238  Register ireg = Register::from_code(reg.code());
3239  emit_operand(ireg, adr);
3240}
3241
3242void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) {
3243  EMIT(0xC0 | dst.code() << 3 | src.code());
3244}
3245
3246void Assembler::emit_sse_operand(Register dst, XMMRegister src) {
3247  EMIT(0xC0 | dst.code() << 3 | src.code());
3248}
3249
3250void Assembler::emit_sse_operand(XMMRegister dst, Register src) {
3251  EMIT(0xC0 | (dst.code() << 3) | src.code());
3252}
3253
3254void Assembler::emit_vex_prefix(XMMRegister vreg, VectorLength l, SIMDPrefix pp,
3255                                LeadingOpcode mm, VexW w) {
3256  if (mm != k0F || w != kW0) {
3257    EMIT(0xC4);
3258    // Change RXB from "110" to "111" to align with gdb disassembler.
3259    EMIT(0xE0 | mm);
3260    EMIT(w | ((~vreg.code() & 0xF) << 3) | l | pp);
3261  } else {
3262    EMIT(0xC5);
3263    EMIT(((~vreg.code()) << 3) | l | pp);
3264  }
3265}
3266
3267void Assembler::emit_vex_prefix(Register vreg, VectorLength l, SIMDPrefix pp,
3268                                LeadingOpcode mm, VexW w) {
3269  XMMRegister ivreg = XMMRegister::from_code(vreg.code());
3270  emit_vex_prefix(ivreg, l, pp, mm, w);
3271}
3272
3273void Assembler::GrowBuffer() {
3274  DCHECK(buffer_overflow());
3275  DCHECK_EQ(buffer_start_, buffer_->start());
3276
3277  // Compute new buffer size.
3278  int old_size = buffer_->size();
3279  int new_size = 2 * old_size;
3280
3281  // Some internal data structures overflow for very large buffers,
3282  // they must ensure that kMaximalBufferSize is not too large.
3283  if (new_size > kMaximalBufferSize) {
3284    V8::FatalProcessOutOfMemory(nullptr, "Assembler::GrowBuffer");
3285  }
3286
3287  // Set up new buffer.
3288  std::unique_ptr<AssemblerBuffer> new_buffer = buffer_->Grow(new_size);
3289  DCHECK_EQ(new_size, new_buffer->size());
3290  byte* new_start = new_buffer->start();
3291
3292  // Copy the data.
3293  intptr_t pc_delta = new_start - buffer_start_;
3294  intptr_t rc_delta = (new_start + new_size) - (buffer_start_ + old_size);
3295  size_t reloc_size = (buffer_start_ + old_size) - reloc_info_writer.pos();
3296  MemMove(new_start, buffer_start_, pc_offset());
3297  MemMove(rc_delta + reloc_info_writer.pos(), reloc_info_writer.pos(),
3298          reloc_size);
3299
3300  // Switch buffers.
3301  buffer_ = std::move(new_buffer);
3302  buffer_start_ = new_start;
3303  pc_ += pc_delta;
3304  reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
3305                               reloc_info_writer.last_pc() + pc_delta);
3306
3307  // Relocate internal references.
3308  for (auto pos : internal_reference_positions_) {
3309    Address p = reinterpret_cast<Address>(buffer_start_ + pos);
3310    WriteUnalignedValue(p, ReadUnalignedValue<int>(p) + pc_delta);
3311  }
3312
3313  // Relocate pc-relative references.
3314  int mode_mask = RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET);
3315  DCHECK_EQ(mode_mask, RelocInfo::kApplyMask & mode_mask);
3316  base::Vector<byte> instructions{buffer_start_,
3317                                  static_cast<size_t>(pc_offset())};
3318  base::Vector<const byte> reloc_info{reloc_info_writer.pos(), reloc_size};
3319  for (RelocIterator it(instructions, reloc_info, 0, mode_mask); !it.done();
3320       it.next()) {
3321    it.rinfo()->apply(pc_delta);
3322  }
3323
3324  DCHECK(!buffer_overflow());
3325}
3326
3327void Assembler::emit_arith_b(int op1, int op2, Register dst, int imm8) {
3328  DCHECK(is_uint8(op1) && is_uint8(op2));  // wrong opcode
3329  DCHECK(is_uint8(imm8));
3330  DCHECK_EQ(op1 & 0x01, 0);  // should be 8bit operation
3331  EMIT(op1);
3332  EMIT(op2 | dst.code());
3333  EMIT(imm8);
3334}
3335
3336void Assembler::emit_arith(int sel, Operand dst, const Immediate& x) {
3337  DCHECK((0 <= sel) && (sel <= 7));
3338  Register ireg = Register::from_code(sel);
3339  if (x.is_int8()) {
3340    EMIT(0x83);  // using a sign-extended 8-bit immediate.
3341    emit_operand(ireg, dst);
3342    EMIT(x.immediate() & 0xFF);
3343  } else if (dst.is_reg(eax)) {
3344    EMIT((sel << 3) | 0x05);  // short form if the destination is eax.
3345    emit(x);
3346  } else {
3347    EMIT(0x81);  // using a literal 32-bit immediate.
3348    emit_operand(ireg, dst);
3349    emit(x);
3350  }
3351}
3352
3353void Assembler::emit_operand(Register reg, Operand adr) {
3354  emit_operand(reg.code(), adr);
3355}
3356
3357void Assembler::emit_operand(XMMRegister reg, Operand adr) {
3358  Register ireg = Register::from_code(reg.code());
3359  emit_operand(ireg, adr);
3360}
3361
3362void Assembler::emit_operand(int code, Operand adr) {
3363  // Isolate-independent code may not embed relocatable addresses.
3364  DCHECK_IMPLIES(options().isolate_independent_code,
3365                 adr.rmode() != RelocInfo::CODE_TARGET);
3366  DCHECK_IMPLIES(options().isolate_independent_code,
3367                 adr.rmode() != RelocInfo::FULL_EMBEDDED_OBJECT);
3368  DCHECK_IMPLIES(options().isolate_independent_code,
3369                 adr.rmode() != RelocInfo::EXTERNAL_REFERENCE);
3370
3371  const unsigned length = adr.encoded_bytes().length();
3372  DCHECK_GT(length, 0);
3373
3374  // Emit updated ModRM byte containing the given register.
3375  EMIT((adr.encoded_bytes()[0] & ~0x38) | (code << 3));
3376
3377  // Emit the rest of the encoded operand.
3378  for (unsigned i = 1; i < length; i++) EMIT(adr.encoded_bytes()[i]);
3379
3380  // Emit relocation information if necessary.
3381  if (length >= sizeof(int32_t) && !RelocInfo::IsNoInfo(adr.rmode())) {
3382    pc_ -= sizeof(int32_t);  // pc_ must be *at* disp32
3383    RecordRelocInfo(adr.rmode());
3384    if (adr.rmode() == RelocInfo::INTERNAL_REFERENCE) {  // Fixup for labels
3385      emit_label(ReadUnalignedValue<Label*>(reinterpret_cast<Address>(pc_)));
3386    } else {
3387      pc_ += sizeof(int32_t);
3388    }
3389  }
3390}
3391
3392void Assembler::emit_label(Label* label) {
3393  if (label->is_bound()) {
3394    internal_reference_positions_.push_back(pc_offset());
3395    emit(reinterpret_cast<uint32_t>(buffer_start_ + label->pos()));
3396  } else {
3397    emit_disp(label, Displacement::CODE_ABSOLUTE);
3398  }
3399}
3400
3401void Assembler::emit_farith(int b1, int b2, int i) {
3402  DCHECK(is_uint8(b1) && is_uint8(b2));  // wrong opcode
3403  DCHECK(0 <= i && i < 8);               // illegal stack offset
3404  EMIT(b1);
3405  EMIT(b2 + i);
3406}
3407
3408void Assembler::db(uint8_t data) {
3409  EnsureSpace ensure_space(this);
3410  EMIT(data);
3411}
3412
3413void Assembler::dd(uint32_t data, RelocInfo::Mode rmode) {
3414  EnsureSpace ensure_space(this);
3415  if (!RelocInfo::IsNoInfo(rmode)) {
3416    DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) ||
3417           RelocInfo::IsLiteralConstant(rmode));
3418    RecordRelocInfo(rmode);
3419  }
3420  emit(data);
3421}
3422
3423void Assembler::dq(uint64_t data, RelocInfo::Mode rmode) {
3424  EnsureSpace ensure_space(this);
3425  if (!RelocInfo::IsNoInfo(rmode)) {
3426    DCHECK(RelocInfo::IsDataEmbeddedObject(rmode));
3427    RecordRelocInfo(rmode);
3428  }
3429  emit_q(data);
3430}
3431
3432void Assembler::dd(Label* label) {
3433  EnsureSpace ensure_space(this);
3434  RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
3435  emit_label(label);
3436}
3437
3438void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
3439  if (!ShouldRecordRelocInfo(rmode)) return;
3440  RelocInfo rinfo(reinterpret_cast<Address>(pc_), rmode, data, Code());
3441  reloc_info_writer.Write(&rinfo);
3442}
3443
3444#undef EMIT
3445
3446}  // namespace internal
3447}  // namespace v8
3448
3449#endif  // V8_TARGET_ARCH_IA32
3450