1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
6
// * Redistributions of source code must retain the above copyright
7
// notice, this list of conditions and the following disclaimer.
8
// * Redistributions in binary form must reproduce the above
9
// copyright notice, this list of conditions and the following
10
// disclaimer in the documentation and/or other materials provided
11
// with the distribution.
12
// * Neither the name of Google Inc. nor the names of its
13
// contributors may be used to endorse or promote products derived
14
// from this software without specific prior written permission.
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
#if defined(V8_TARGET_ARCH_X64)
32
#include "macro-assembler.h"
33
#include "serialize.h"
38
// -----------------------------------------------------------------------------
39
// Implementation of CpuFeatures
43
bool CpuFeatures::initialized_ = false;
45
uint64_t CpuFeatures::supported_ = CpuFeatures::kDefaultCpuFeatures;
46
uint64_t CpuFeatures::found_by_runtime_probing_ = 0;
49
void CpuFeatures::Probe() {
50
ASSERT(supported_ == CpuFeatures::kDefaultCpuFeatures);
54
supported_ = kDefaultCpuFeatures;
55
if (Serializer::enabled()) {
56
supported_ |= OS::CpuFeaturesImpliedByPlatform();
57
return; // No features if we might serialize.
60
const int kBufferSize = 4 * KB;
61
VirtualMemory* memory = new VirtualMemory(kBufferSize);
62
if (!memory->IsReserved()) {
66
ASSERT(memory->size() >= static_cast<size_t>(kBufferSize));
67
if (!memory->Commit(memory->address(), kBufferSize, true/*executable*/)) {
72
Assembler assm(NULL, memory->address(), kBufferSize);
75
// Save old rsp, since we are going to modify the stack.
83
// If we can modify bit 21 of the EFLAGS register, then CPUID is supported.
87
__ xor_(rax, Immediate(0x200000)); // Flip bit 21.
92
__ xor_(rax, rdx); // Different if CPUID is supported.
93
__ j(not_zero, &cpuid);
95
// CPUID not supported. Clear the supported features in rax.
99
// Invoke CPUID with 1 in eax to get feature information in
100
// ecx:edx. Temporarily enable CPUID support because we know it's
103
__ movl(rax, Immediate(1));
104
supported_ = kDefaultCpuFeatures | (1 << CPUID);
105
{ Scope fscope(CPUID);
107
// Move the result from ecx:edx to rdi.
108
__ movl(rdi, rdx); // Zero-extended to 64 bits.
109
__ shl(rcx, Immediate(32));
112
// Get the sahf supported flag, from CPUID(0x80000001)
113
__ movq(rax, 0x80000001, RelocInfo::NONE);
116
supported_ = kDefaultCpuFeatures;
118
// Put the CPU flags in rax.
119
// rax = (rcx & 1) | (rdi & ~1) | (1 << CPUID).
120
__ movl(rax, Immediate(1));
121
__ and_(rcx, rax); // Bit 0 is set if SAHF instruction supported.
125
__ or_(rax, Immediate(1 << CPUID));
138
typedef uint64_t (*F0)();
139
F0 probe = FUNCTION_CAST<F0>(reinterpret_cast<Address>(memory->address()));
140
supported_ = probe();
141
found_by_runtime_probing_ = supported_;
142
found_by_runtime_probing_ &= ~kDefaultCpuFeatures;
143
uint64_t os_guarantees = OS::CpuFeaturesImpliedByPlatform();
144
supported_ |= os_guarantees;
145
found_by_runtime_probing_ &= ~os_guarantees;
146
// SSE2 and CMOV must be available on an X64 CPU.
147
ASSERT(IsSupported(CPUID));
148
ASSERT(IsSupported(SSE2));
149
ASSERT(IsSupported(CMOV));
155
// -----------------------------------------------------------------------------
156
// Implementation of RelocInfo
158
// Patch the code at the current PC with a call to the target address.
159
// Additional guard int3 instructions can be added if required.
160
void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
161
// Load register with immediate 64 and call through a register instructions
162
// takes up 13 bytes and int3 takes up one byte.
163
static const int kCallCodeSize = 13;
164
int code_size = kCallCodeSize + guard_bytes;
166
// Create a code patcher.
167
CodePatcher patcher(pc_, code_size);
169
// Add a label for checking the size of the code used for returning.
171
Label check_codesize;
172
patcher.masm()->bind(&check_codesize);
176
patcher.masm()->movq(r10, target, RelocInfo::NONE);
177
patcher.masm()->call(r10);
179
// Check that the size of the code generated is as expected.
180
ASSERT_EQ(kCallCodeSize,
181
patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize));
183
// Add the requested number of int3 instructions after the call.
184
for (int i = 0; i < guard_bytes; i++) {
185
patcher.masm()->int3();
190
void RelocInfo::PatchCode(byte* instructions, int instruction_count) {
191
// Patch the code at the current address with the supplied instructions.
192
for (int i = 0; i < instruction_count; i++) {
193
*(pc_ + i) = *(instructions + i);
196
// Indicate that code has changed.
197
CPU::FlushICache(pc_, instruction_count);
201
// -----------------------------------------------------------------------------
202
// Register constants.
204
const int Register::kRegisterCodeByAllocationIndex[kNumAllocatableRegisters] = {
205
// rax, rbx, rdx, rcx, rdi, r8, r9, r11, r14, r15
206
0, 3, 2, 1, 7, 8, 9, 11, 14, 15
209
const int Register::kAllocationIndexByRegisterCode[kNumRegisters] = {
210
0, 3, 2, 1, -1, -1, -1, 4, 5, 6, -1, 7, -1, -1, 8, 9
214
// -----------------------------------------------------------------------------
215
// Implementation of Operand
217
Operand::Operand(Register base, int32_t disp) : rex_(0) {
219
if (base.is(rsp) || base.is(r12)) {
220
// SIB byte is needed to encode (rsp + offset) or (r12 + offset).
221
set_sib(times_1, rsp, base);
224
if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
226
} else if (is_int8(disp)) {
236
Operand::Operand(Register base,
239
int32_t disp) : rex_(0) {
240
ASSERT(!index.is(rsp));
242
set_sib(scale, index, base);
243
if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
244
// This call to set_modrm doesn't overwrite the REX.B (or REX.X) bits
245
// possibly set by set_sib.
247
} else if (is_int8(disp)) {
257
Operand::Operand(Register index,
259
int32_t disp) : rex_(0) {
260
ASSERT(!index.is(rsp));
263
set_sib(scale, index, rbp);
268
Operand::Operand(const Operand& operand, int32_t offset) {
269
ASSERT(operand.len_ >= 1);
270
// Operand encodes REX ModR/M [SIB] [Disp].
271
byte modrm = operand.buf_[0];
272
ASSERT(modrm < 0xC0); // Disallow mode 3 (register target).
273
bool has_sib = ((modrm & 0x07) == 0x04);
274
byte mode = modrm & 0xC0;
275
int disp_offset = has_sib ? 2 : 1;
276
int base_reg = (has_sib ? operand.buf_[1] : modrm) & 0x07;
277
// Mode 0 with rbp/r13 as ModR/M or SIB base register always has a 32-bit
279
bool is_baseless = (mode == 0) && (base_reg == 0x05); // No base or RIP base.
280
int32_t disp_value = 0;
281
if (mode == 0x80 || is_baseless) {
282
// Mode 2 or mode 0 with rbp/r13 as base: Word displacement.
283
disp_value = *BitCast<const int32_t*>(&operand.buf_[disp_offset]);
284
} else if (mode == 0x40) {
285
// Mode 1: Byte displacement.
286
disp_value = static_cast<signed char>(operand.buf_[disp_offset]);
289
// Write new operand with same registers, but with modified displacement.
290
ASSERT(offset >= 0 ? disp_value + offset > disp_value
291
: disp_value + offset < disp_value); // No overflow.
292
disp_value += offset;
294
if (!is_int8(disp_value) || is_baseless) {
295
// Need 32 bits of displacement, mode 2 or mode 1 with register rbp/r13.
296
buf_[0] = (modrm & 0x3f) | (is_baseless ? 0x00 : 0x80);
297
len_ = disp_offset + 4;
298
Memory::int32_at(&buf_[disp_offset]) = disp_value;
299
} else if (disp_value != 0 || (base_reg == 0x05)) {
300
// Need 8 bits of displacement.
301
buf_[0] = (modrm & 0x3f) | 0x40; // Mode 1.
302
len_ = disp_offset + 1;
303
buf_[disp_offset] = static_cast<byte>(disp_value);
305
// Need no displacement.
306
buf_[0] = (modrm & 0x3f); // Mode 0.
310
buf_[1] = operand.buf_[1];
315
bool Operand::AddressUsesRegister(Register reg) const {
316
int code = reg.code();
317
ASSERT((buf_[0] & 0xC0) != 0xC0); // Always a memory operand.
318
// Start with only low three bits of base register. Initial decoding doesn't
319
// distinguish on the REX.B bit.
320
int base_code = buf_[0] & 0x07;
321
if (base_code == rsp.code()) {
322
// SIB byte present in buf_[1].
323
// Check the index register from the SIB byte + REX.X prefix.
324
int index_code = ((buf_[1] >> 3) & 0x07) | ((rex_ & 0x02) << 2);
325
// Index code (including REX.X) of 0x04 (rsp) means no index register.
326
if (index_code != rsp.code() && index_code == code) return true;
327
// Add REX.B to get the full base register code.
328
base_code = (buf_[1] & 0x07) | ((rex_ & 0x01) << 3);
329
// A base register of 0x05 (rbp) with mod = 0 means no base register.
330
if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
331
return code == base_code;
333
// A base register with low bits of 0x05 (rbp or r13) and mod = 0 means
335
if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
336
base_code |= ((rex_ & 0x01) << 3);
337
return code == base_code;
342
// -----------------------------------------------------------------------------
343
// Implementation of Assembler.
345
#ifdef GENERATED_CODE_COVERAGE
346
static void InitCoverageLog();
349
Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
350
: AssemblerBase(arg_isolate),
352
positions_recorder_(this),
353
emit_debug_code_(FLAG_debug_code),
354
predictable_code_size_(false) {
355
if (buffer == NULL) {
356
// Do our own buffer management.
357
if (buffer_size <= kMinimalBufferSize) {
358
buffer_size = kMinimalBufferSize;
360
if (isolate() != NULL && isolate()->assembler_spare_buffer() != NULL) {
361
buffer = isolate()->assembler_spare_buffer();
362
isolate()->set_assembler_spare_buffer(NULL);
365
if (buffer == NULL) {
366
buffer_ = NewArray<byte>(buffer_size);
368
buffer_ = static_cast<byte*>(buffer);
370
buffer_size_ = buffer_size;
373
// Use externally provided buffer instead.
374
ASSERT(buffer_size > 0);
375
buffer_ = static_cast<byte*>(buffer);
376
buffer_size_ = buffer_size;
380
// Clear the buffer in debug mode unless it was provided by the
381
// caller in which case we can't be sure it's okay to overwrite
382
// existing code in it.
385
memset(buffer_, 0xCC, buffer_size); // int3
389
// Set up buffer pointers.
390
ASSERT(buffer_ != NULL);
392
reloc_info_writer.Reposition(buffer_ + buffer_size, pc_);
395
#ifdef GENERATED_CODE_COVERAGE
401
Assembler::~Assembler() {
403
if (isolate() != NULL &&
404
isolate()->assembler_spare_buffer() == NULL &&
405
buffer_size_ == kMinimalBufferSize) {
406
isolate()->set_assembler_spare_buffer(buffer_);
408
DeleteArray(buffer_);
414
void Assembler::GetCode(CodeDesc* desc) {
415
// Finalize code (at this point overflow() may be true, but the gap ensures
416
// that we are still not overlapping instructions and relocation info).
417
ASSERT(pc_ <= reloc_info_writer.pos()); // No overlap.
418
// Set up code descriptor.
419
desc->buffer = buffer_;
420
desc->buffer_size = buffer_size_;
421
desc->instr_size = pc_offset();
422
ASSERT(desc->instr_size > 0); // Zero-size code objects upset the system.
424
static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos());
429
void Assembler::Align(int m) {
430
ASSERT(IsPowerOf2(m));
431
int delta = (m - (pc_offset() & (m - 1))) & (m - 1);
436
void Assembler::CodeTargetAlign() {
437
Align(16); // Preferred alignment of jump targets on x64.
441
bool Assembler::IsNop(Address addr) {
443
while (*a == 0x66) a++;
444
if (*a == 0x90) return true;
445
if (a[0] == 0xf && a[1] == 0x1f) return true;
450
void Assembler::bind_to(Label* L, int pos) {
451
ASSERT(!L->is_bound()); // Label may only be bound once.
452
ASSERT(0 <= pos && pos <= pc_offset()); // Position must be valid.
453
if (L->is_linked()) {
454
int current = L->pos();
455
int next = long_at(current);
456
while (next != current) {
457
// Relative address, relative to point after address.
458
int imm32 = pos - (current + sizeof(int32_t));
459
long_at_put(current, imm32);
461
next = long_at(next);
463
// Fix up last fixup on linked list.
464
int last_imm32 = pos - (current + sizeof(int32_t));
465
long_at_put(current, last_imm32);
467
while (L->is_near_linked()) {
468
int fixup_pos = L->near_link_pos();
470
static_cast<int>(*reinterpret_cast<int8_t*>(addr_at(fixup_pos)));
471
ASSERT(offset_to_next <= 0);
472
int disp = pos - (fixup_pos + sizeof(int8_t));
473
CHECK(is_int8(disp));
474
set_byte_at(fixup_pos, disp);
475
if (offset_to_next < 0) {
476
L->link_to(fixup_pos + offset_to_next, Label::kNear);
485
void Assembler::bind(Label* L) {
486
bind_to(L, pc_offset());
490
void Assembler::GrowBuffer() {
491
ASSERT(buffer_overflow());
492
if (!own_buffer_) FATAL("external code buffer is too small");
494
// Compute new buffer size.
495
CodeDesc desc; // the new buffer
496
if (buffer_size_ < 4*KB) {
497
desc.buffer_size = 4*KB;
499
desc.buffer_size = 2*buffer_size_;
501
// Some internal data structures overflow for very large buffers,
502
// they must ensure that kMaximalBufferSize is not too large.
503
if ((desc.buffer_size > kMaximalBufferSize) ||
504
(desc.buffer_size > HEAP->MaxOldGenerationSize())) {
505
V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
508
// Set up new buffer.
509
desc.buffer = NewArray<byte>(desc.buffer_size);
510
desc.instr_size = pc_offset();
512
static_cast<int>((buffer_ + buffer_size_) - (reloc_info_writer.pos()));
514
// Clear the buffer in debug mode. Use 'int3' instructions to make
515
// sure to get into problems if we ever run uninitialized code.
517
memset(desc.buffer, 0xCC, desc.buffer_size);
521
intptr_t pc_delta = desc.buffer - buffer_;
522
intptr_t rc_delta = (desc.buffer + desc.buffer_size) -
523
(buffer_ + buffer_size_);
524
memmove(desc.buffer, buffer_, desc.instr_size);
525
memmove(rc_delta + reloc_info_writer.pos(),
526
reloc_info_writer.pos(), desc.reloc_size);
529
if (isolate() != NULL &&
530
isolate()->assembler_spare_buffer() == NULL &&
531
buffer_size_ == kMinimalBufferSize) {
532
isolate()->set_assembler_spare_buffer(buffer_);
534
DeleteArray(buffer_);
536
buffer_ = desc.buffer;
537
buffer_size_ = desc.buffer_size;
539
reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
540
reloc_info_writer.last_pc() + pc_delta);
542
// Relocate runtime entries.
543
for (RelocIterator it(desc); !it.done(); it.next()) {
544
RelocInfo::Mode rmode = it.rinfo()->rmode();
545
if (rmode == RelocInfo::INTERNAL_REFERENCE) {
546
intptr_t* p = reinterpret_cast<intptr_t*>(it.rinfo()->pc());
547
if (*p != 0) { // 0 means uninitialized.
553
ASSERT(!buffer_overflow());
557
void Assembler::emit_operand(int code, const Operand& adr) {
558
ASSERT(is_uint3(code));
559
const unsigned length = adr.len_;
562
// Emit updated ModR/M byte containing the given register.
563
ASSERT((adr.buf_[0] & 0x38) == 0);
564
pc_[0] = adr.buf_[0] | code << 3;
566
// Emit the rest of the encoded operand.
567
for (unsigned i = 1; i < length; i++) pc_[i] = adr.buf_[i];
572
// Assembler Instruction implementations.
574
void Assembler::arithmetic_op(byte opcode, Register reg, const Operand& op) {
575
EnsureSpace ensure_space(this);
576
emit_rex_64(reg, op);
578
emit_operand(reg, op);
582
void Assembler::arithmetic_op(byte opcode, Register reg, Register rm_reg) {
583
EnsureSpace ensure_space(this);
584
ASSERT((opcode & 0xC6) == 2);
585
if (rm_reg.low_bits() == 4) { // Forces SIB byte.
586
// Swap reg and rm_reg and change opcode operand order.
587
emit_rex_64(rm_reg, reg);
589
emit_modrm(rm_reg, reg);
591
emit_rex_64(reg, rm_reg);
593
emit_modrm(reg, rm_reg);
598
void Assembler::arithmetic_op_16(byte opcode, Register reg, Register rm_reg) {
599
EnsureSpace ensure_space(this);
600
ASSERT((opcode & 0xC6) == 2);
601
if (rm_reg.low_bits() == 4) { // Forces SIB byte.
602
// Swap reg and rm_reg and change opcode operand order.
604
emit_optional_rex_32(rm_reg, reg);
606
emit_modrm(rm_reg, reg);
609
emit_optional_rex_32(reg, rm_reg);
611
emit_modrm(reg, rm_reg);
616
void Assembler::arithmetic_op_16(byte opcode,
618
const Operand& rm_reg) {
619
EnsureSpace ensure_space(this);
621
emit_optional_rex_32(reg, rm_reg);
623
emit_operand(reg, rm_reg);
627
void Assembler::arithmetic_op_32(byte opcode, Register reg, Register rm_reg) {
628
EnsureSpace ensure_space(this);
629
ASSERT((opcode & 0xC6) == 2);
630
if (rm_reg.low_bits() == 4) { // Forces SIB byte.
631
// Swap reg and rm_reg and change opcode operand order.
632
emit_optional_rex_32(rm_reg, reg);
633
emit(opcode ^ 0x02); // E.g. 0x03 -> 0x01 for ADD.
634
emit_modrm(rm_reg, reg);
636
emit_optional_rex_32(reg, rm_reg);
638
emit_modrm(reg, rm_reg);
643
void Assembler::arithmetic_op_32(byte opcode,
645
const Operand& rm_reg) {
646
EnsureSpace ensure_space(this);
647
emit_optional_rex_32(reg, rm_reg);
649
emit_operand(reg, rm_reg);
653
void Assembler::immediate_arithmetic_op(byte subcode,
656
EnsureSpace ensure_space(this);
658
if (is_int8(src.value_)) {
660
emit_modrm(subcode, dst);
662
} else if (dst.is(rax)) {
663
emit(0x05 | (subcode << 3));
667
emit_modrm(subcode, dst);
672
void Assembler::immediate_arithmetic_op(byte subcode,
675
EnsureSpace ensure_space(this);
677
if (is_int8(src.value_)) {
679
emit_operand(subcode, dst);
683
emit_operand(subcode, dst);
689
void Assembler::immediate_arithmetic_op_16(byte subcode,
692
EnsureSpace ensure_space(this);
693
emit(0x66); // Operand size override prefix.
694
emit_optional_rex_32(dst);
695
if (is_int8(src.value_)) {
697
emit_modrm(subcode, dst);
699
} else if (dst.is(rax)) {
700
emit(0x05 | (subcode << 3));
704
emit_modrm(subcode, dst);
710
void Assembler::immediate_arithmetic_op_16(byte subcode,
713
EnsureSpace ensure_space(this);
714
emit(0x66); // Operand size override prefix.
715
emit_optional_rex_32(dst);
716
if (is_int8(src.value_)) {
718
emit_operand(subcode, dst);
722
emit_operand(subcode, dst);
728
void Assembler::immediate_arithmetic_op_32(byte subcode,
731
EnsureSpace ensure_space(this);
732
emit_optional_rex_32(dst);
733
if (is_int8(src.value_)) {
735
emit_modrm(subcode, dst);
737
} else if (dst.is(rax)) {
738
emit(0x05 | (subcode << 3));
742
emit_modrm(subcode, dst);
748
void Assembler::immediate_arithmetic_op_32(byte subcode,
751
EnsureSpace ensure_space(this);
752
emit_optional_rex_32(dst);
753
if (is_int8(src.value_)) {
755
emit_operand(subcode, dst);
759
emit_operand(subcode, dst);
765
void Assembler::immediate_arithmetic_op_8(byte subcode,
768
EnsureSpace ensure_space(this);
769
emit_optional_rex_32(dst);
770
ASSERT(is_int8(src.value_) || is_uint8(src.value_));
772
emit_operand(subcode, dst);
777
void Assembler::immediate_arithmetic_op_8(byte subcode,
780
EnsureSpace ensure_space(this);
781
if (!dst.is_byte_register()) {
782
// Use 64-bit mode byte registers.
785
ASSERT(is_int8(src.value_) || is_uint8(src.value_));
787
emit_modrm(subcode, dst);
792
void Assembler::shift(Register dst, Immediate shift_amount, int subcode) {
793
EnsureSpace ensure_space(this);
794
ASSERT(is_uint6(shift_amount.value_)); // illegal shift count
795
if (shift_amount.value_ == 1) {
798
emit_modrm(subcode, dst);
802
emit_modrm(subcode, dst);
803
emit(shift_amount.value_);
808
void Assembler::shift(Register dst, int subcode) {
809
EnsureSpace ensure_space(this);
812
emit_modrm(subcode, dst);
816
void Assembler::shift_32(Register dst, int subcode) {
817
EnsureSpace ensure_space(this);
818
emit_optional_rex_32(dst);
820
emit_modrm(subcode, dst);
824
void Assembler::shift_32(Register dst, Immediate shift_amount, int subcode) {
825
EnsureSpace ensure_space(this);
826
ASSERT(is_uint5(shift_amount.value_)); // illegal shift count
827
if (shift_amount.value_ == 1) {
828
emit_optional_rex_32(dst);
830
emit_modrm(subcode, dst);
832
emit_optional_rex_32(dst);
834
emit_modrm(subcode, dst);
835
emit(shift_amount.value_);
840
void Assembler::bt(const Operand& dst, Register src) {
841
EnsureSpace ensure_space(this);
842
emit_rex_64(src, dst);
845
emit_operand(src, dst);
849
void Assembler::bts(const Operand& dst, Register src) {
850
EnsureSpace ensure_space(this);
851
emit_rex_64(src, dst);
854
emit_operand(src, dst);
858
void Assembler::call(Label* L) {
859
positions_recorder()->WriteRecordedPositions();
860
EnsureSpace ensure_space(this);
861
// 1110 1000 #32-bit disp.
864
int offset = L->pos() - pc_offset() - sizeof(int32_t);
867
} else if (L->is_linked()) {
869
L->link_to(pc_offset() - sizeof(int32_t));
871
ASSERT(L->is_unused());
872
int32_t current = pc_offset();
879
void Assembler::call(Handle<Code> target,
880
RelocInfo::Mode rmode,
882
positions_recorder()->WriteRecordedPositions();
883
EnsureSpace ensure_space(this);
884
// 1110 1000 #32-bit disp.
886
emit_code_target(target, rmode, ast_id);
890
void Assembler::call(Register adr) {
891
positions_recorder()->WriteRecordedPositions();
892
EnsureSpace ensure_space(this);
893
// Opcode: FF /2 r64.
894
emit_optional_rex_32(adr);
896
emit_modrm(0x2, adr);
900
void Assembler::call(const Operand& op) {
901
positions_recorder()->WriteRecordedPositions();
902
EnsureSpace ensure_space(this);
903
// Opcode: FF /2 m64.
904
emit_optional_rex_32(op);
906
emit_operand(0x2, op);
910
// Calls directly to the given address using a relative offset.
911
// Should only ever be used in Code objects for calls within the
912
// same Code object. Should not be used when generating new code (use labels),
913
// but only when patching existing code.
914
void Assembler::call(Address target) {
915
positions_recorder()->WriteRecordedPositions();
916
EnsureSpace ensure_space(this);
917
// 1110 1000 #32-bit disp.
919
Address source = pc_ + 4;
920
intptr_t displacement = target - source;
921
ASSERT(is_int32(displacement));
922
emitl(static_cast<int32_t>(displacement));
926
void Assembler::clc() {
927
EnsureSpace ensure_space(this);
931
void Assembler::cld() {
932
EnsureSpace ensure_space(this);
936
void Assembler::cdq() {
937
EnsureSpace ensure_space(this);
942
void Assembler::cmovq(Condition cc, Register dst, Register src) {
945
} else if (cc == never) {
948
// No need to check CpuInfo for CMOV support, it's a required part of the
949
// 64-bit architecture.
950
ASSERT(cc >= 0); // Use mov for unconditional moves.
951
EnsureSpace ensure_space(this);
952
// Opcode: REX.W 0f 40 + cc /r.
953
emit_rex_64(dst, src);
956
emit_modrm(dst, src);
960
void Assembler::cmovq(Condition cc, Register dst, const Operand& src) {
963
} else if (cc == never) {
967
EnsureSpace ensure_space(this);
968
// Opcode: REX.W 0f 40 + cc /r.
969
emit_rex_64(dst, src);
972
emit_operand(dst, src);
976
void Assembler::cmovl(Condition cc, Register dst, Register src) {
979
} else if (cc == never) {
983
EnsureSpace ensure_space(this);
984
// Opcode: 0f 40 + cc /r.
985
emit_optional_rex_32(dst, src);
988
emit_modrm(dst, src);
992
void Assembler::cmovl(Condition cc, Register dst, const Operand& src) {
995
} else if (cc == never) {
999
EnsureSpace ensure_space(this);
1000
// Opcode: 0f 40 + cc /r.
1001
emit_optional_rex_32(dst, src);
1004
emit_operand(dst, src);
1008
void Assembler::cmpb_al(Immediate imm8) {
1009
ASSERT(is_int8(imm8.value_) || is_uint8(imm8.value_));
1010
EnsureSpace ensure_space(this);
1016
void Assembler::cpuid() {
1017
ASSERT(CpuFeatures::IsEnabled(CPUID));
1018
EnsureSpace ensure_space(this);
1024
void Assembler::cqo() {
1025
EnsureSpace ensure_space(this);
1031
void Assembler::decq(Register dst) {
1032
EnsureSpace ensure_space(this);
1035
emit_modrm(0x1, dst);
1039
void Assembler::decq(const Operand& dst) {
1040
EnsureSpace ensure_space(this);
1043
emit_operand(1, dst);
1047
void Assembler::decl(Register dst) {
1048
EnsureSpace ensure_space(this);
1049
emit_optional_rex_32(dst);
1051
emit_modrm(0x1, dst);
1055
void Assembler::decl(const Operand& dst) {
1056
EnsureSpace ensure_space(this);
1057
emit_optional_rex_32(dst);
1059
emit_operand(1, dst);
1063
void Assembler::decb(Register dst) {
1064
EnsureSpace ensure_space(this);
1065
if (!dst.is_byte_register()) {
1066
// Register is not one of al, bl, cl, dl. Its encoding needs REX.
1070
emit_modrm(0x1, dst);
1074
void Assembler::decb(const Operand& dst) {
1075
EnsureSpace ensure_space(this);
1076
emit_optional_rex_32(dst);
1078
emit_operand(1, dst);
1082
void Assembler::enter(Immediate size) {
1083
EnsureSpace ensure_space(this);
1085
emitw(size.value_); // 16 bit operand, always.
1090
void Assembler::hlt() {
1091
EnsureSpace ensure_space(this);
1096
void Assembler::idivq(Register src) {
1097
EnsureSpace ensure_space(this);
1100
emit_modrm(0x7, src);
1104
void Assembler::idivl(Register src) {
1105
EnsureSpace ensure_space(this);
1106
emit_optional_rex_32(src);
1108
emit_modrm(0x7, src);
1112
void Assembler::imul(Register src) {
1113
EnsureSpace ensure_space(this);
1116
emit_modrm(0x5, src);
1120
void Assembler::imul(Register dst, Register src) {
1121
EnsureSpace ensure_space(this);
1122
emit_rex_64(dst, src);
1125
emit_modrm(dst, src);
1129
void Assembler::imul(Register dst, const Operand& src) {
1130
EnsureSpace ensure_space(this);
1131
emit_rex_64(dst, src);
1134
emit_operand(dst, src);
1138
void Assembler::imul(Register dst, Register src, Immediate imm) {
1139
EnsureSpace ensure_space(this);
1140
emit_rex_64(dst, src);
1141
if (is_int8(imm.value_)) {
1143
emit_modrm(dst, src);
1147
emit_modrm(dst, src);
1153
void Assembler::imull(Register dst, Register src) {
1154
EnsureSpace ensure_space(this);
1155
emit_optional_rex_32(dst, src);
1158
emit_modrm(dst, src);
1162
void Assembler::imull(Register dst, const Operand& src) {
1163
EnsureSpace ensure_space(this);
1164
emit_optional_rex_32(dst, src);
1167
emit_operand(dst, src);
1171
void Assembler::imull(Register dst, Register src, Immediate imm) {
1172
EnsureSpace ensure_space(this);
1173
emit_optional_rex_32(dst, src);
1174
if (is_int8(imm.value_)) {
1176
emit_modrm(dst, src);
1180
emit_modrm(dst, src);
1186
void Assembler::incq(Register dst) {
1187
EnsureSpace ensure_space(this);
1190
emit_modrm(0x0, dst);
1194
void Assembler::incq(const Operand& dst) {
1195
EnsureSpace ensure_space(this);
1198
emit_operand(0, dst);
1202
void Assembler::incl(const Operand& dst) {
1203
EnsureSpace ensure_space(this);
1204
emit_optional_rex_32(dst);
1206
emit_operand(0, dst);
1210
void Assembler::incl(Register dst) {
1211
EnsureSpace ensure_space(this);
1212
emit_optional_rex_32(dst);
1218
void Assembler::int3() {
1219
EnsureSpace ensure_space(this);
1224
void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
1228
} else if (cc == never) {
1231
EnsureSpace ensure_space(this);
1232
ASSERT(is_uint4(cc));
1233
if (L->is_bound()) {
1234
const int short_size = 2;
1235
const int long_size = 6;
1236
int offs = L->pos() - pc_offset();
1238
// Determine whether we can use 1-byte offsets for backwards branches,
1239
// which have a max range of 128 bytes.
1241
// We also need to check the predictable_code_size_ flag here, because
1242
// on x64, when the full code generator recompiles code for debugging, some
1243
// places need to be padded out to a certain size. The debugger is keeping
1244
// track of how often it did this so that it can adjust return addresses on
1245
// the stack, but if the size of jump instructions can also change, that's
1246
// not enough and the calculated offsets would be incorrect.
1247
if (is_int8(offs - short_size) && !predictable_code_size_) {
1248
// 0111 tttn #8-bit disp.
1250
emit((offs - short_size) & 0xFF);
1252
// 0000 1111 1000 tttn #32-bit disp.
1255
emitl(offs - long_size);
1257
} else if (distance == Label::kNear) {
1258
// 0111 tttn #8-bit disp
1261
if (L->is_near_linked()) {
1262
int offset = L->near_link_pos() - pc_offset();
1263
ASSERT(is_int8(offset));
1264
disp = static_cast<byte>(offset & 0xFF);
1266
L->link_to(pc_offset(), Label::kNear);
1268
} else if (L->is_linked()) {
1269
// 0000 1111 1000 tttn #32-bit disp.
1273
L->link_to(pc_offset() - sizeof(int32_t));
1275
ASSERT(L->is_unused());
1278
int32_t current = pc_offset();
1280
L->link_to(current);
1285
void Assembler::j(Condition cc,
1286
Handle<Code> target,
1287
RelocInfo::Mode rmode) {
1288
EnsureSpace ensure_space(this);
1289
ASSERT(is_uint4(cc));
1290
// 0000 1111 1000 tttn #32-bit disp.
1293
emit_code_target(target, rmode);
1297
void Assembler::jmp(Label* L, Label::Distance distance) {
1298
EnsureSpace ensure_space(this);
1299
const int short_size = sizeof(int8_t);
1300
const int long_size = sizeof(int32_t);
1301
if (L->is_bound()) {
1302
int offs = L->pos() - pc_offset() - 1;
1304
if (is_int8(offs - short_size) && !predictable_code_size_) {
1305
// 1110 1011 #8-bit disp.
1307
emit((offs - short_size) & 0xFF);
1309
// 1110 1001 #32-bit disp.
1311
emitl(offs - long_size);
1313
} else if (distance == Label::kNear) {
1316
if (L->is_near_linked()) {
1317
int offset = L->near_link_pos() - pc_offset();
1318
ASSERT(is_int8(offset));
1319
disp = static_cast<byte>(offset & 0xFF);
1321
L->link_to(pc_offset(), Label::kNear);
1323
} else if (L->is_linked()) {
1324
// 1110 1001 #32-bit disp.
1327
L->link_to(pc_offset() - long_size);
1329
// 1110 1001 #32-bit disp.
1330
ASSERT(L->is_unused());
1332
int32_t current = pc_offset();
1334
L->link_to(current);
1339
void Assembler::jmp(Handle<Code> target, RelocInfo::Mode rmode) {
1340
EnsureSpace ensure_space(this);
1341
// 1110 1001 #32-bit disp.
1343
emit_code_target(target, rmode);
1347
void Assembler::jmp(Register target) {
1348
EnsureSpace ensure_space(this);
1350
emit_optional_rex_32(target);
1352
emit_modrm(0x4, target);
1356
void Assembler::jmp(const Operand& src) {
1357
EnsureSpace ensure_space(this);
1359
emit_optional_rex_32(src);
1361
emit_operand(0x4, src);
1365
void Assembler::lea(Register dst, const Operand& src) {
1366
EnsureSpace ensure_space(this);
1367
emit_rex_64(dst, src);
1369
emit_operand(dst, src);
1373
void Assembler::leal(Register dst, const Operand& src) {
1374
EnsureSpace ensure_space(this);
1375
emit_optional_rex_32(dst, src);
1377
emit_operand(dst, src);
1381
void Assembler::load_rax(void* value, RelocInfo::Mode mode) {
1382
EnsureSpace ensure_space(this);
1383
emit(0x48); // REX.W
1385
emitq(reinterpret_cast<uintptr_t>(value), mode);
1389
void Assembler::load_rax(ExternalReference ref) {
1390
load_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
1394
void Assembler::leave() {
1395
EnsureSpace ensure_space(this);
1400
void Assembler::movb(Register dst, const Operand& src) {
1401
EnsureSpace ensure_space(this);
1402
if (!dst.is_byte_register()) {
1403
// Register is not one of al, bl, cl, dl. Its encoding needs REX.
1404
emit_rex_32(dst, src);
1406
emit_optional_rex_32(dst, src);
1409
emit_operand(dst, src);
1413
void Assembler::movb(Register dst, Immediate imm) {
1414
EnsureSpace ensure_space(this);
1415
if (!dst.is_byte_register()) {
1418
emit(0xB0 + dst.low_bits());
1423
void Assembler::movb(const Operand& dst, Register src) {
1424
EnsureSpace ensure_space(this);
1425
if (!src.is_byte_register()) {
1426
emit_rex_32(src, dst);
1428
emit_optional_rex_32(src, dst);
1431
emit_operand(src, dst);
1435
void Assembler::movw(const Operand& dst, Register src) {
1436
EnsureSpace ensure_space(this);
1438
emit_optional_rex_32(src, dst);
1440
emit_operand(src, dst);
1444
void Assembler::movl(Register dst, const Operand& src) {
1445
EnsureSpace ensure_space(this);
1446
emit_optional_rex_32(dst, src);
1448
emit_operand(dst, src);
1452
void Assembler::movl(Register dst, Register src) {
1453
EnsureSpace ensure_space(this);
1454
if (src.low_bits() == 4) {
1455
emit_optional_rex_32(src, dst);
1457
emit_modrm(src, dst);
1459
emit_optional_rex_32(dst, src);
1461
emit_modrm(dst, src);
1466
void Assembler::movl(const Operand& dst, Register src) {
1467
EnsureSpace ensure_space(this);
1468
emit_optional_rex_32(src, dst);
1470
emit_operand(src, dst);
1474
void Assembler::movl(const Operand& dst, Immediate value) {
1475
EnsureSpace ensure_space(this);
1476
emit_optional_rex_32(dst);
1478
emit_operand(0x0, dst);
1483
void Assembler::movl(Register dst, Immediate value) {
1484
EnsureSpace ensure_space(this);
1485
emit_optional_rex_32(dst);
1486
emit(0xB8 + dst.low_bits());
1491
void Assembler::movq(Register dst, const Operand& src) {
1492
EnsureSpace ensure_space(this);
1493
emit_rex_64(dst, src);
1495
emit_operand(dst, src);
1499
void Assembler::movq(Register dst, Register src) {
1500
EnsureSpace ensure_space(this);
1501
if (src.low_bits() == 4) {
1502
emit_rex_64(src, dst);
1504
emit_modrm(src, dst);
1506
emit_rex_64(dst, src);
1508
emit_modrm(dst, src);
1513
void Assembler::movq(Register dst, Immediate value) {
1514
EnsureSpace ensure_space(this);
1517
emit_modrm(0x0, dst);
1518
emit(value); // Only 32-bit immediates are possible, not 8-bit immediates.
1522
void Assembler::movq(const Operand& dst, Register src) {
1523
EnsureSpace ensure_space(this);
1524
emit_rex_64(src, dst);
1526
emit_operand(src, dst);
1530
void Assembler::movq(Register dst, void* value, RelocInfo::Mode rmode) {
1531
// This method must not be used with heap object references. The stored
1532
// address is not GC safe. Use the handle version instead.
1533
ASSERT(rmode > RelocInfo::LAST_GCED_ENUM);
1534
EnsureSpace ensure_space(this);
1536
emit(0xB8 | dst.low_bits());
1537
emitq(reinterpret_cast<uintptr_t>(value), rmode);
1541
void Assembler::movq(Register dst, int64_t value, RelocInfo::Mode rmode) {
1542
// Non-relocatable values might not need a 64-bit representation.
1543
if (rmode == RelocInfo::NONE) {
1544
// Sadly, there is no zero or sign extending move for 8-bit immediates.
1545
if (is_int32(value)) {
1546
movq(dst, Immediate(static_cast<int32_t>(value)));
1548
} else if (is_uint32(value)) {
1549
movl(dst, Immediate(static_cast<int32_t>(value)));
1552
// Value cannot be represented by 32 bits, so do a full 64 bit immediate
1555
EnsureSpace ensure_space(this);
1557
emit(0xB8 | dst.low_bits());
1558
emitq(value, rmode);
1562
void Assembler::movq(Register dst, ExternalReference ref) {
1563
int64_t value = reinterpret_cast<int64_t>(ref.address());
1564
movq(dst, value, RelocInfo::EXTERNAL_REFERENCE);
1568
void Assembler::movq(const Operand& dst, Immediate value) {
1569
EnsureSpace ensure_space(this);
1572
emit_operand(0, dst);
1577
// Loads the ip-relative location of the src label into the target location
1578
// (as a 32-bit offset sign extended to 64-bit).
1579
void Assembler::movl(const Operand& dst, Label* src) {
1580
EnsureSpace ensure_space(this);
1581
emit_optional_rex_32(dst);
1583
emit_operand(0, dst);
1584
if (src->is_bound()) {
1585
int offset = src->pos() - pc_offset() - sizeof(int32_t);
1586
ASSERT(offset <= 0);
1588
} else if (src->is_linked()) {
1590
src->link_to(pc_offset() - sizeof(int32_t));
1592
ASSERT(src->is_unused());
1593
int32_t current = pc_offset();
1595
src->link_to(current);
1600
void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) {
1601
// If there is no relocation info, emit the value of the handle efficiently
1602
// (possibly using less that 8 bytes for the value).
1603
if (mode == RelocInfo::NONE) {
1604
// There is no possible reason to store a heap pointer without relocation
1605
// info, so it must be a smi.
1606
ASSERT(value->IsSmi());
1607
movq(dst, reinterpret_cast<int64_t>(*value), RelocInfo::NONE);
1609
EnsureSpace ensure_space(this);
1610
ASSERT(value->IsHeapObject());
1611
ASSERT(!HEAP->InNewSpace(*value));
1613
emit(0xB8 | dst.low_bits());
1614
emitq(reinterpret_cast<uintptr_t>(value.location()), mode);
1619
void Assembler::movsxbq(Register dst, const Operand& src) {
1620
EnsureSpace ensure_space(this);
1621
emit_rex_64(dst, src);
1624
emit_operand(dst, src);
1628
void Assembler::movsxwq(Register dst, const Operand& src) {
1629
EnsureSpace ensure_space(this);
1630
emit_rex_64(dst, src);
1633
emit_operand(dst, src);
1637
void Assembler::movsxlq(Register dst, Register src) {
1638
EnsureSpace ensure_space(this);
1639
emit_rex_64(dst, src);
1641
emit_modrm(dst, src);
1645
void Assembler::movsxlq(Register dst, const Operand& src) {
1646
EnsureSpace ensure_space(this);
1647
emit_rex_64(dst, src);
1649
emit_operand(dst, src);
1653
void Assembler::movzxbq(Register dst, const Operand& src) {
1654
EnsureSpace ensure_space(this);
1655
// 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
1656
// there is no need to make this a 64 bit operation.
1657
emit_optional_rex_32(dst, src);
1660
emit_operand(dst, src);
1664
void Assembler::movzxbl(Register dst, const Operand& src) {
1665
EnsureSpace ensure_space(this);
1666
emit_optional_rex_32(dst, src);
1669
emit_operand(dst, src);
1673
void Assembler::movzxwq(Register dst, const Operand& src) {
1674
EnsureSpace ensure_space(this);
1675
emit_optional_rex_32(dst, src);
1678
emit_operand(dst, src);
1682
void Assembler::movzxwl(Register dst, const Operand& src) {
1683
EnsureSpace ensure_space(this);
1684
emit_optional_rex_32(dst, src);
1687
emit_operand(dst, src);
1691
void Assembler::repmovsb() {
1692
EnsureSpace ensure_space(this);
1698
void Assembler::repmovsw() {
1699
EnsureSpace ensure_space(this);
1700
emit(0x66); // Operand size override.
1706
void Assembler::repmovsl() {
1707
EnsureSpace ensure_space(this);
1713
void Assembler::repmovsq() {
1714
EnsureSpace ensure_space(this);
1721
void Assembler::mul(Register src) {
1722
EnsureSpace ensure_space(this);
1725
emit_modrm(0x4, src);
1729
void Assembler::neg(Register dst) {
1730
EnsureSpace ensure_space(this);
1733
emit_modrm(0x3, dst);
1737
void Assembler::negl(Register dst) {
1738
EnsureSpace ensure_space(this);
1739
emit_optional_rex_32(dst);
1741
emit_modrm(0x3, dst);
1745
void Assembler::neg(const Operand& dst) {
1746
EnsureSpace ensure_space(this);
1749
emit_operand(3, dst);
1753
void Assembler::nop() {
1754
EnsureSpace ensure_space(this);
1759
void Assembler::not_(Register dst) {
1760
EnsureSpace ensure_space(this);
1763
emit_modrm(0x2, dst);
1767
void Assembler::not_(const Operand& dst) {
1768
EnsureSpace ensure_space(this);
1771
emit_operand(2, dst);
1775
void Assembler::notl(Register dst) {
1776
EnsureSpace ensure_space(this);
1777
emit_optional_rex_32(dst);
1779
emit_modrm(0x2, dst);
1783
void Assembler::Nop(int n) {
1784
// The recommended muti-byte sequences of NOP instructions from the Intel 64
1785
// and IA-32 Architectures Software Developer's Manual.
1787
// Length Assembly Byte Sequence
1788
// 2 bytes 66 NOP 66 90H
1789
// 3 bytes NOP DWORD ptr [EAX] 0F 1F 00H
1790
// 4 bytes NOP DWORD ptr [EAX + 00H] 0F 1F 40 00H
1791
// 5 bytes NOP DWORD ptr [EAX + EAX*1 + 00H] 0F 1F 44 00 00H
1792
// 6 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 00H] 66 0F 1F 44 00 00H
1793
// 7 bytes NOP DWORD ptr [EAX + 00000000H] 0F 1F 80 00 00 00 00H
1794
// 8 bytes NOP DWORD ptr [EAX + EAX*1 + 00000000H] 0F 1F 84 00 00 00 00 00H
1795
// 9 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 66 0F 1F 84 00 00 00 00
1798
EnsureSpace ensure_space(this);
1860
void Assembler::pop(Register dst) {
1861
EnsureSpace ensure_space(this);
1862
emit_optional_rex_32(dst);
1863
emit(0x58 | dst.low_bits());
1867
void Assembler::pop(const Operand& dst) {
1868
EnsureSpace ensure_space(this);
1869
emit_optional_rex_32(dst);
1871
emit_operand(0, dst);
1875
void Assembler::popfq() {
1876
EnsureSpace ensure_space(this);
1881
void Assembler::push(Register src) {
1882
EnsureSpace ensure_space(this);
1883
emit_optional_rex_32(src);
1884
emit(0x50 | src.low_bits());
1888
void Assembler::push(const Operand& src) {
1889
EnsureSpace ensure_space(this);
1890
emit_optional_rex_32(src);
1892
emit_operand(6, src);
1896
void Assembler::push(Immediate value) {
1897
EnsureSpace ensure_space(this);
1898
if (is_int8(value.value_)) {
1900
emit(value.value_); // Emit low byte of value.
1903
emitl(value.value_);
1908
void Assembler::push_imm32(int32_t imm32) {
1909
EnsureSpace ensure_space(this);
1915
void Assembler::pushfq() {
1916
EnsureSpace ensure_space(this);
1921
void Assembler::rdtsc() {
1922
EnsureSpace ensure_space(this);
1928
void Assembler::ret(int imm16) {
1929
EnsureSpace ensure_space(this);
1930
ASSERT(is_uint16(imm16));
1936
emit((imm16 >> 8) & 0xFF);
1941
void Assembler::setcc(Condition cc, Register reg) {
1942
if (cc > last_condition) {
1943
movb(reg, Immediate(cc == always ? 1 : 0));
1946
EnsureSpace ensure_space(this);
1947
ASSERT(is_uint4(cc));
1948
if (!reg.is_byte_register()) { // Use x64 byte registers, where different.
1953
emit_modrm(0x0, reg);
1957
void Assembler::shld(Register dst, Register src) {
1958
EnsureSpace ensure_space(this);
1959
emit_rex_64(src, dst);
1962
emit_modrm(src, dst);
1966
void Assembler::shrd(Register dst, Register src) {
1967
EnsureSpace ensure_space(this);
1968
emit_rex_64(src, dst);
1971
emit_modrm(src, dst);
1975
void Assembler::xchg(Register dst, Register src) {
1976
EnsureSpace ensure_space(this);
1977
if (src.is(rax) || dst.is(rax)) { // Single-byte encoding
1978
Register other = src.is(rax) ? dst : src;
1980
emit(0x90 | other.low_bits());
1981
} else if (dst.low_bits() == 4) {
1982
emit_rex_64(dst, src);
1984
emit_modrm(dst, src);
1986
emit_rex_64(src, dst);
1988
emit_modrm(src, dst);
1993
void Assembler::store_rax(void* dst, RelocInfo::Mode mode) {
1994
EnsureSpace ensure_space(this);
1995
emit(0x48); // REX.W
1997
emitq(reinterpret_cast<uintptr_t>(dst), mode);
2001
void Assembler::store_rax(ExternalReference ref) {
2002
store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
2006
void Assembler::testb(Register dst, Register src) {
2007
EnsureSpace ensure_space(this);
2008
if (src.low_bits() == 4) {
2009
emit_rex_32(src, dst);
2011
emit_modrm(src, dst);
2013
if (!dst.is_byte_register() || !src.is_byte_register()) {
2014
// Register is not one of al, bl, cl, dl. Its encoding needs REX.
2015
emit_rex_32(dst, src);
2018
emit_modrm(dst, src);
2023
void Assembler::testb(Register reg, Immediate mask) {
2024
ASSERT(is_int8(mask.value_) || is_uint8(mask.value_));
2025
EnsureSpace ensure_space(this);
2028
emit(mask.value_); // Low byte emitted.
2030
if (!reg.is_byte_register()) {
2031
// Register is not one of al, bl, cl, dl. Its encoding needs REX.
2035
emit_modrm(0x0, reg);
2036
emit(mask.value_); // Low byte emitted.
2041
void Assembler::testb(const Operand& op, Immediate mask) {
2042
ASSERT(is_int8(mask.value_) || is_uint8(mask.value_));
2043
EnsureSpace ensure_space(this);
2044
emit_optional_rex_32(rax, op);
2046
emit_operand(rax, op); // Operation code 0
2047
emit(mask.value_); // Low byte emitted.
2051
void Assembler::testb(const Operand& op, Register reg) {
2052
EnsureSpace ensure_space(this);
2053
if (!reg.is_byte_register()) {
2054
// Register is not one of al, bl, cl, dl. Its encoding needs REX.
2055
emit_rex_32(reg, op);
2057
emit_optional_rex_32(reg, op);
2060
emit_operand(reg, op);
2064
void Assembler::testl(Register dst, Register src) {
2065
EnsureSpace ensure_space(this);
2066
if (src.low_bits() == 4) {
2067
emit_optional_rex_32(src, dst);
2069
emit_modrm(src, dst);
2071
emit_optional_rex_32(dst, src);
2073
emit_modrm(dst, src);
2078
void Assembler::testl(Register reg, Immediate mask) {
2079
// testl with a mask that fits in the low byte is exactly testb.
2080
if (is_uint8(mask.value_)) {
2084
EnsureSpace ensure_space(this);
2089
emit_optional_rex_32(rax, reg);
2091
emit_modrm(0x0, reg);
2097
void Assembler::testl(const Operand& op, Immediate mask) {
2098
// testl with a mask that fits in the low byte is exactly testb.
2099
if (is_uint8(mask.value_)) {
2103
EnsureSpace ensure_space(this);
2104
emit_optional_rex_32(rax, op);
2106
emit_operand(rax, op); // Operation code 0
2111
void Assembler::testq(const Operand& op, Register reg) {
2112
EnsureSpace ensure_space(this);
2113
emit_rex_64(reg, op);
2115
emit_operand(reg, op);
2119
void Assembler::testq(Register dst, Register src) {
2120
EnsureSpace ensure_space(this);
2121
if (src.low_bits() == 4) {
2122
emit_rex_64(src, dst);
2124
emit_modrm(src, dst);
2126
emit_rex_64(dst, src);
2128
emit_modrm(dst, src);
2133
void Assembler::testq(Register dst, Immediate mask) {
2134
EnsureSpace ensure_space(this);
2148
// FPU instructions.
2151
void Assembler::fld(int i) {
2152
EnsureSpace ensure_space(this);
2153
emit_farith(0xD9, 0xC0, i);
2157
void Assembler::fld1() {
2158
EnsureSpace ensure_space(this);
2164
void Assembler::fldz() {
2165
EnsureSpace ensure_space(this);
2171
void Assembler::fldpi() {
2172
EnsureSpace ensure_space(this);
2178
void Assembler::fldln2() {
2179
EnsureSpace ensure_space(this);
2185
void Assembler::fld_s(const Operand& adr) {
2186
EnsureSpace ensure_space(this);
2187
emit_optional_rex_32(adr);
2189
emit_operand(0, adr);
2193
void Assembler::fld_d(const Operand& adr) {
2194
EnsureSpace ensure_space(this);
2195
emit_optional_rex_32(adr);
2197
emit_operand(0, adr);
2201
void Assembler::fstp_s(const Operand& adr) {
2202
EnsureSpace ensure_space(this);
2203
emit_optional_rex_32(adr);
2205
emit_operand(3, adr);
2209
void Assembler::fstp_d(const Operand& adr) {
2210
EnsureSpace ensure_space(this);
2211
emit_optional_rex_32(adr);
2213
emit_operand(3, adr);
2217
void Assembler::fstp(int index) {
2218
ASSERT(is_uint3(index));
2219
EnsureSpace ensure_space(this);
2220
emit_farith(0xDD, 0xD8, index);
2224
void Assembler::fild_s(const Operand& adr) {
2225
EnsureSpace ensure_space(this);
2226
emit_optional_rex_32(adr);
2228
emit_operand(0, adr);
2232
void Assembler::fild_d(const Operand& adr) {
2233
EnsureSpace ensure_space(this);
2234
emit_optional_rex_32(adr);
2236
emit_operand(5, adr);
2240
void Assembler::fistp_s(const Operand& adr) {
2241
EnsureSpace ensure_space(this);
2242
emit_optional_rex_32(adr);
2244
emit_operand(3, adr);
2248
void Assembler::fisttp_s(const Operand& adr) {
2249
ASSERT(CpuFeatures::IsEnabled(SSE3));
2250
EnsureSpace ensure_space(this);
2251
emit_optional_rex_32(adr);
2253
emit_operand(1, adr);
2257
void Assembler::fisttp_d(const Operand& adr) {
2258
ASSERT(CpuFeatures::IsEnabled(SSE3));
2259
EnsureSpace ensure_space(this);
2260
emit_optional_rex_32(adr);
2262
emit_operand(1, adr);
2266
void Assembler::fist_s(const Operand& adr) {
2267
EnsureSpace ensure_space(this);
2268
emit_optional_rex_32(adr);
2270
emit_operand(2, adr);
2274
void Assembler::fistp_d(const Operand& adr) {
2275
EnsureSpace ensure_space(this);
2276
emit_optional_rex_32(adr);
2278
emit_operand(7, adr);
2282
void Assembler::fabs() {
2283
EnsureSpace ensure_space(this);
2289
void Assembler::fchs() {
2290
EnsureSpace ensure_space(this);
2296
void Assembler::fcos() {
2297
EnsureSpace ensure_space(this);
2303
void Assembler::fsin() {
2304
EnsureSpace ensure_space(this);
2310
void Assembler::fptan() {
2311
EnsureSpace ensure_space(this);
2317
void Assembler::fyl2x() {
2318
EnsureSpace ensure_space(this);
2324
void Assembler::f2xm1() {
2325
EnsureSpace ensure_space(this);
2331
void Assembler::fscale() {
2332
EnsureSpace ensure_space(this);
2338
void Assembler::fninit() {
2339
EnsureSpace ensure_space(this);
2345
void Assembler::fadd(int i) {
2346
EnsureSpace ensure_space(this);
2347
emit_farith(0xDC, 0xC0, i);
2351
void Assembler::fsub(int i) {
2352
EnsureSpace ensure_space(this);
2353
emit_farith(0xDC, 0xE8, i);
2357
void Assembler::fisub_s(const Operand& adr) {
2358
EnsureSpace ensure_space(this);
2359
emit_optional_rex_32(adr);
2361
emit_operand(4, adr);
2365
void Assembler::fmul(int i) {
2366
EnsureSpace ensure_space(this);
2367
emit_farith(0xDC, 0xC8, i);
2371
void Assembler::fdiv(int i) {
2372
EnsureSpace ensure_space(this);
2373
emit_farith(0xDC, 0xF8, i);
2377
void Assembler::faddp(int i) {
2378
EnsureSpace ensure_space(this);
2379
emit_farith(0xDE, 0xC0, i);
2383
void Assembler::fsubp(int i) {
2384
EnsureSpace ensure_space(this);
2385
emit_farith(0xDE, 0xE8, i);
2389
void Assembler::fsubrp(int i) {
2390
EnsureSpace ensure_space(this);
2391
emit_farith(0xDE, 0xE0, i);
2395
void Assembler::fmulp(int i) {
2396
EnsureSpace ensure_space(this);
2397
emit_farith(0xDE, 0xC8, i);
2401
void Assembler::fdivp(int i) {
2402
EnsureSpace ensure_space(this);
2403
emit_farith(0xDE, 0xF8, i);
2407
void Assembler::fprem() {
2408
EnsureSpace ensure_space(this);
2414
void Assembler::fprem1() {
2415
EnsureSpace ensure_space(this);
2421
void Assembler::fxch(int i) {
2422
EnsureSpace ensure_space(this);
2423
emit_farith(0xD9, 0xC8, i);
2427
void Assembler::fincstp() {
2428
EnsureSpace ensure_space(this);
2434
void Assembler::ffree(int i) {
2435
EnsureSpace ensure_space(this);
2436
emit_farith(0xDD, 0xC0, i);
2440
void Assembler::ftst() {
2441
EnsureSpace ensure_space(this);
2447
void Assembler::fucomp(int i) {
2448
EnsureSpace ensure_space(this);
2449
emit_farith(0xDD, 0xE8, i);
2453
void Assembler::fucompp() {
2454
EnsureSpace ensure_space(this);
2460
void Assembler::fucomi(int i) {
2461
EnsureSpace ensure_space(this);
2467
void Assembler::fucomip() {
2468
EnsureSpace ensure_space(this);
2474
void Assembler::fcompp() {
2475
EnsureSpace ensure_space(this);
2481
void Assembler::fnstsw_ax() {
2482
EnsureSpace ensure_space(this);
2488
void Assembler::fwait() {
2489
EnsureSpace ensure_space(this);
2494
void Assembler::frndint() {
2495
EnsureSpace ensure_space(this);
2501
void Assembler::fnclex() {
2502
EnsureSpace ensure_space(this);
2508
void Assembler::sahf() {
2509
// TODO(X64): Test for presence. Not all 64-bit intel CPU's have sahf
2510
// in 64-bit mode. Test CpuID.
2511
EnsureSpace ensure_space(this);
2516
void Assembler::emit_farith(int b1, int b2, int i) {
2517
ASSERT(is_uint8(b1) && is_uint8(b2)); // wrong opcode
2518
ASSERT(is_uint3(i)); // illegal stack offset
2523
// SSE 2 operations.
2525
void Assembler::movd(XMMRegister dst, Register src) {
2526
EnsureSpace ensure_space(this);
2528
emit_optional_rex_32(dst, src);
2531
emit_sse_operand(dst, src);
2535
void Assembler::movd(Register dst, XMMRegister src) {
2536
EnsureSpace ensure_space(this);
2538
emit_optional_rex_32(src, dst);
2541
emit_sse_operand(src, dst);
2545
void Assembler::movq(XMMRegister dst, Register src) {
2546
EnsureSpace ensure_space(this);
2548
emit_rex_64(dst, src);
2551
emit_sse_operand(dst, src);
2555
void Assembler::movq(Register dst, XMMRegister src) {
2556
EnsureSpace ensure_space(this);
2558
emit_rex_64(src, dst);
2561
emit_sse_operand(src, dst);
2565
void Assembler::movq(XMMRegister dst, XMMRegister src) {
2566
EnsureSpace ensure_space(this);
2567
if (dst.low_bits() == 4) {
2568
// Avoid unnecessary SIB byte.
2570
emit_optional_rex_32(dst, src);
2573
emit_sse_operand(dst, src);
2576
emit_optional_rex_32(src, dst);
2579
emit_sse_operand(src, dst);
2583
void Assembler::movdqa(const Operand& dst, XMMRegister src) {
2584
EnsureSpace ensure_space(this);
2586
emit_rex_64(src, dst);
2589
emit_sse_operand(src, dst);
2593
void Assembler::movdqa(XMMRegister dst, const Operand& src) {
2594
EnsureSpace ensure_space(this);
2596
emit_rex_64(dst, src);
2599
emit_sse_operand(dst, src);
2603
void Assembler::extractps(Register dst, XMMRegister src, byte imm8) {
2604
ASSERT(CpuFeatures::IsSupported(SSE4_1));
2605
ASSERT(is_uint8(imm8));
2606
EnsureSpace ensure_space(this);
2608
emit_optional_rex_32(dst, src);
2612
emit_sse_operand(dst, src);
2617
void Assembler::movsd(const Operand& dst, XMMRegister src) {
2618
EnsureSpace ensure_space(this);
2619
emit(0xF2); // double
2620
emit_optional_rex_32(src, dst);
2622
emit(0x11); // store
2623
emit_sse_operand(src, dst);
2627
void Assembler::movsd(XMMRegister dst, XMMRegister src) {
2628
EnsureSpace ensure_space(this);
2629
emit(0xF2); // double
2630
emit_optional_rex_32(dst, src);
2633
emit_sse_operand(dst, src);
2637
void Assembler::movsd(XMMRegister dst, const Operand& src) {
2638
EnsureSpace ensure_space(this);
2639
emit(0xF2); // double
2640
emit_optional_rex_32(dst, src);
2643
emit_sse_operand(dst, src);
2647
void Assembler::movaps(XMMRegister dst, XMMRegister src) {
2648
EnsureSpace ensure_space(this);
2649
if (src.low_bits() == 4) {
2650
// Try to avoid an unnecessary SIB byte.
2651
emit_optional_rex_32(src, dst);
2654
emit_sse_operand(src, dst);
2656
emit_optional_rex_32(dst, src);
2659
emit_sse_operand(dst, src);
2664
void Assembler::movapd(XMMRegister dst, XMMRegister src) {
2665
EnsureSpace ensure_space(this);
2666
if (src.low_bits() == 4) {
2667
// Try to avoid an unnecessary SIB byte.
2669
emit_optional_rex_32(src, dst);
2672
emit_sse_operand(src, dst);
2675
emit_optional_rex_32(dst, src);
2678
emit_sse_operand(dst, src);
2683
void Assembler::movss(XMMRegister dst, const Operand& src) {
2684
EnsureSpace ensure_space(this);
2685
emit(0xF3); // single
2686
emit_optional_rex_32(dst, src);
2689
emit_sse_operand(dst, src);
2693
void Assembler::movss(const Operand& src, XMMRegister dst) {
2694
EnsureSpace ensure_space(this);
2695
emit(0xF3); // single
2696
emit_optional_rex_32(dst, src);
2698
emit(0x11); // store
2699
emit_sse_operand(dst, src);
2703
void Assembler::cvttss2si(Register dst, const Operand& src) {
2704
EnsureSpace ensure_space(this);
2706
emit_optional_rex_32(dst, src);
2709
emit_operand(dst, src);
2713
void Assembler::cvttss2si(Register dst, XMMRegister src) {
2714
EnsureSpace ensure_space(this);
2716
emit_optional_rex_32(dst, src);
2719
emit_sse_operand(dst, src);
2723
void Assembler::cvttsd2si(Register dst, const Operand& src) {
2724
EnsureSpace ensure_space(this);
2726
emit_optional_rex_32(dst, src);
2729
emit_operand(dst, src);
2733
void Assembler::cvttsd2si(Register dst, XMMRegister src) {
2734
EnsureSpace ensure_space(this);
2736
emit_optional_rex_32(dst, src);
2739
emit_sse_operand(dst, src);
2743
void Assembler::cvttsd2siq(Register dst, XMMRegister src) {
2744
EnsureSpace ensure_space(this);
2746
emit_rex_64(dst, src);
2749
emit_sse_operand(dst, src);
2753
void Assembler::cvtlsi2sd(XMMRegister dst, const Operand& src) {
2754
EnsureSpace ensure_space(this);
2756
emit_optional_rex_32(dst, src);
2759
emit_sse_operand(dst, src);
2763
void Assembler::cvtlsi2sd(XMMRegister dst, Register src) {
2764
EnsureSpace ensure_space(this);
2766
emit_optional_rex_32(dst, src);
2769
emit_sse_operand(dst, src);
2773
void Assembler::cvtlsi2ss(XMMRegister dst, Register src) {
2774
EnsureSpace ensure_space(this);
2776
emit_optional_rex_32(dst, src);
2779
emit_sse_operand(dst, src);
2783
void Assembler::cvtqsi2sd(XMMRegister dst, Register src) {
2784
EnsureSpace ensure_space(this);
2786
emit_rex_64(dst, src);
2789
emit_sse_operand(dst, src);
2793
void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) {
2794
EnsureSpace ensure_space(this);
2796
emit_optional_rex_32(dst, src);
2799
emit_sse_operand(dst, src);
2803
void Assembler::cvtss2sd(XMMRegister dst, const Operand& src) {
2804
EnsureSpace ensure_space(this);
2806
emit_optional_rex_32(dst, src);
2809
emit_sse_operand(dst, src);
2813
void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) {
2814
EnsureSpace ensure_space(this);
2816
emit_optional_rex_32(dst, src);
2819
emit_sse_operand(dst, src);
2823
void Assembler::cvtsd2si(Register dst, XMMRegister src) {
2824
EnsureSpace ensure_space(this);
2826
emit_optional_rex_32(dst, src);
2829
emit_sse_operand(dst, src);
2833
void Assembler::cvtsd2siq(Register dst, XMMRegister src) {
2834
EnsureSpace ensure_space(this);
2836
emit_rex_64(dst, src);
2839
emit_sse_operand(dst, src);
2843
void Assembler::addsd(XMMRegister dst, XMMRegister src) {
2844
EnsureSpace ensure_space(this);
2846
emit_optional_rex_32(dst, src);
2849
emit_sse_operand(dst, src);
2853
void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
2854
EnsureSpace ensure_space(this);
2856
emit_optional_rex_32(dst, src);
2859
emit_sse_operand(dst, src);
2863
void Assembler::subsd(XMMRegister dst, XMMRegister src) {
2864
EnsureSpace ensure_space(this);
2866
emit_optional_rex_32(dst, src);
2869
emit_sse_operand(dst, src);
2873
void Assembler::divsd(XMMRegister dst, XMMRegister src) {
2874
EnsureSpace ensure_space(this);
2876
emit_optional_rex_32(dst, src);
2879
emit_sse_operand(dst, src);
2883
void Assembler::andpd(XMMRegister dst, XMMRegister src) {
2884
EnsureSpace ensure_space(this);
2886
emit_optional_rex_32(dst, src);
2889
emit_sse_operand(dst, src);
2893
void Assembler::orpd(XMMRegister dst, XMMRegister src) {
2894
EnsureSpace ensure_space(this);
2896
emit_optional_rex_32(dst, src);
2899
emit_sse_operand(dst, src);
2903
void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
2904
EnsureSpace ensure_space(this);
2906
emit_optional_rex_32(dst, src);
2909
emit_sse_operand(dst, src);
2913
void Assembler::xorps(XMMRegister dst, XMMRegister src) {
2914
EnsureSpace ensure_space(this);
2915
emit_optional_rex_32(dst, src);
2918
emit_sse_operand(dst, src);
2922
void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
2923
EnsureSpace ensure_space(this);
2925
emit_optional_rex_32(dst, src);
2928
emit_sse_operand(dst, src);
2932
void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
2933
EnsureSpace ensure_space(this);
2935
emit_optional_rex_32(dst, src);
2938
emit_sse_operand(dst, src);
2942
void Assembler::ucomisd(XMMRegister dst, const Operand& src) {
2943
EnsureSpace ensure_space(this);
2945
emit_optional_rex_32(dst, src);
2948
emit_sse_operand(dst, src);
2952
void Assembler::roundsd(XMMRegister dst, XMMRegister src,
2953
Assembler::RoundingMode mode) {
2954
ASSERT(CpuFeatures::IsEnabled(SSE4_1));
2955
EnsureSpace ensure_space(this);
2957
emit_optional_rex_32(dst, src);
2961
emit_sse_operand(dst, src);
2962
// Mask precision exeption.
2963
emit(static_cast<byte>(mode) | 0x8);
2967
void Assembler::movmskpd(Register dst, XMMRegister src) {
2968
EnsureSpace ensure_space(this);
2970
emit_optional_rex_32(dst, src);
2973
emit_sse_operand(dst, src);
2977
void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) {
2978
Register ireg = { reg.code() };
2979
emit_operand(ireg, adr);
2983
void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) {
2984
emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
2987
void Assembler::emit_sse_operand(XMMRegister dst, Register src) {
2988
emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
2991
void Assembler::emit_sse_operand(Register dst, XMMRegister src) {
2992
emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
2996
void Assembler::db(uint8_t data) {
2997
EnsureSpace ensure_space(this);
3002
void Assembler::dd(uint32_t data) {
3003
EnsureSpace ensure_space(this);
3008
// Relocation information implementations.
3010
void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
3011
ASSERT(rmode != RelocInfo::NONE);
3012
// Don't record external references unless the heap will be serialized.
3013
if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
3015
if (!Serializer::enabled()) {
3016
Serializer::TooLateToEnableNow();
3019
if (!Serializer::enabled() && !emit_debug_code()) {
3023
RelocInfo rinfo(pc_, rmode, data, NULL);
3024
reloc_info_writer.Write(&rinfo);
3027
void Assembler::RecordJSReturn() {
3028
positions_recorder()->WriteRecordedPositions();
3029
EnsureSpace ensure_space(this);
3030
RecordRelocInfo(RelocInfo::JS_RETURN);
3034
void Assembler::RecordDebugBreakSlot() {
3035
positions_recorder()->WriteRecordedPositions();
3036
EnsureSpace ensure_space(this);
3037
RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT);
3041
void Assembler::RecordComment(const char* msg, bool force) {
3042
if (FLAG_code_comments || force) {
3043
EnsureSpace ensure_space(this);
3044
RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
3049
const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
3050
1 << RelocInfo::INTERNAL_REFERENCE;
3053
bool RelocInfo::IsCodedSpecially() {
3054
// The deserializer needs to know whether a pointer is specially coded. Being
3055
// specially coded on x64 means that it is a relative 32 bit address, as used
3056
// by branch instructions.
3057
return (1 << rmode_) & kApplyMask;
3060
} } // namespace v8::internal
3062
#endif // V8_TARGET_ARCH_X64