34
#undef FIELD_OFFSET // Fix conflict with winnt.h.
37
// FIELD_OFFSET: Like the C++ offsetof macro, but you can use it with classes.
38
// The magic number 0x4000 is insignificant. We use it to avoid using NULL, since
39
// NULL can cause compiler problems, especially in cases of multiple inheritance.
40
#define FIELD_OFFSET(class, field) (reinterpret_cast<ptrdiff_t>(&(reinterpret_cast<class*>(0x4000)->field)) - 0x4000)
35
/* Deprecated: Please use JITStubCall instead. */
37
// puts an arg onto the stack, as an arg to a context threaded function.
38
ALWAYS_INLINE void JIT::emitPutJITStubArg(RegisterID src, unsigned argumentNumber)
40
unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + 1;
41
poke(src, argumentStackOffset);
44
/* Deprecated: Please use JITStubCall instead. */
46
ALWAYS_INLINE void JIT::emitPutJITStubArgConstant(unsigned value, unsigned argumentNumber)
48
unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + 1;
49
poke(Imm32(value), argumentStackOffset);
52
/* Deprecated: Please use JITStubCall instead. */
54
ALWAYS_INLINE void JIT::emitPutJITStubArgConstant(void* value, unsigned argumentNumber)
56
unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + 1;
57
poke(ImmPtr(value), argumentStackOffset);
60
/* Deprecated: Please use JITStubCall instead. */
62
ALWAYS_INLINE void JIT::emitGetJITStubArg(unsigned argumentNumber, RegisterID dst)
64
unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + 1;
65
peek(dst, argumentStackOffset);
68
ALWAYS_INLINE bool JIT::isOperandConstantImmediateDouble(unsigned src)
70
return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isDouble();
73
ALWAYS_INLINE JSValue JIT::getConstantOperand(unsigned src)
75
ASSERT(m_codeBlock->isConstantRegisterIndex(src));
76
return m_codeBlock->getConstant(src);
79
ALWAYS_INLINE void JIT::emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
81
storePtr(from, Address(callFrameRegister, entry * sizeof(Register)));
84
ALWAYS_INLINE void JIT::emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry)
86
storePtr(ImmPtr(value), Address(callFrameRegister, entry * sizeof(Register)));
89
ALWAYS_INLINE void JIT::emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
91
loadPtr(Address(from, entry * sizeof(Register)), to);
92
#if !USE(JSVALUE32_64)
93
killLastResultRegister();
97
ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
99
load32(Address(from, entry * sizeof(Register)), to);
100
#if !USE(JSVALUE32_64)
101
killLastResultRegister();
105
ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
107
ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
109
Call nakedCall = nearCall();
110
m_calls.append(CallRecord(nakedCall, m_bytecodeIndex, function.executableAddress()));
114
#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
116
ALWAYS_INLINE void JIT::beginUninterruptedSequence(int insnSpace, int constSpace)
118
#if PLATFORM(ARM_TRADITIONAL)
120
// Ensure the label after the sequence can also fit
121
insnSpace += sizeof(ARMWord);
122
constSpace += sizeof(uint64_t);
125
ensureSpace(insnSpace, constSpace);
129
#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
131
m_uninterruptedInstructionSequenceBegin = label();
132
m_uninterruptedConstantSequenceBegin = sizeOfConstantPool();
137
ALWAYS_INLINE void JIT::endUninterruptedSequence(int insnSpace, int constSpace)
139
#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
140
ASSERT(differenceBetween(m_uninterruptedInstructionSequenceBegin, label()) == insnSpace);
141
ASSERT(sizeOfConstantPool() - m_uninterruptedConstantSequenceBegin == constSpace);
147
#if PLATFORM(ARM_THUMB2)
149
ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
151
move(linkRegister, reg);
154
ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
156
move(reg, linkRegister);
159
ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
161
loadPtr(address, linkRegister);
164
#else // PLATFORM(X86) || PLATFORM(X86_64) || PLATFORM(ARM_TRADITIONAL)
166
ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
171
ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
176
ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
183
#if USE(JIT_STUB_ARGUMENT_VA_LIST)
184
ALWAYS_INLINE void JIT::restoreArgumentReference()
186
poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
188
ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline() {}
190
ALWAYS_INLINE void JIT::restoreArgumentReference()
192
move(stackPointerRegister, firstArgumentRegister);
193
poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
194
#if PLATFORM(ARM_TRADITIONAL)
195
move(ctiReturnRegister, ARMRegisters::lr);
198
ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline()
201
// Within a trampoline the return address will be on the stack at this point.
202
addPtr(Imm32(sizeof(void*)), stackPointerRegister, firstArgumentRegister);
203
#elif PLATFORM(ARM_THUMB2)
204
move(stackPointerRegister, firstArgumentRegister);
206
// In the trampoline on x86-64, the first argument register is not overwritten.
210
ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
212
return branchPtr(NotEqual, Address(reg, OBJECT_OFFSETOF(JSCell, m_structure)), ImmPtr(structure));
215
ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
217
if (!m_codeBlock->isKnownNotImmediate(vReg))
221
ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
223
ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
225
m_slowCases.append(SlowCaseEntry(jump, m_bytecodeIndex));
228
ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList)
230
ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
232
const JumpList::JumpVector& jumpVector = jumpList.jumps();
233
size_t size = jumpVector.size();
234
for (size_t i = 0; i < size; ++i)
235
m_slowCases.append(SlowCaseEntry(jumpVector[i], m_bytecodeIndex));
238
ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
240
ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
242
m_jmpTable.append(JumpTable(jump, m_bytecodeIndex + relativeOffset));
245
ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
247
ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
249
jump.linkTo(m_labels[m_bytecodeIndex + relativeOffset], this);
252
#if ENABLE(SAMPLING_FLAGS)
253
ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
257
or32(Imm32(1u << (flag - 1)), AbsoluteAddress(&SamplingFlags::s_flags));
260
ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
264
and32(Imm32(~(1u << (flag - 1))), AbsoluteAddress(&SamplingFlags::s_flags));
268
#if ENABLE(SAMPLING_COUNTERS)
269
ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, uint32_t count)
271
#if PLATFORM(X86_64) // Or any other 64-bit plattform.
272
addPtr(Imm32(count), AbsoluteAddress(&counter.m_counter));
273
#elif PLATFORM(X86) // Or any other little-endian 32-bit plattform.
274
intptr_t hiWord = reinterpret_cast<intptr_t>(&counter.m_counter) + sizeof(int32_t);
275
add32(Imm32(count), AbsoluteAddress(&counter.m_counter));
276
addWithCarry32(Imm32(0), AbsoluteAddress(reinterpret_cast<void*>(hiWord)));
278
#error "SAMPLING_FLAGS not implemented on this platform."
283
#if ENABLE(OPCODE_SAMPLING)
285
ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
287
move(ImmPtr(m_interpreter->sampler()->sampleSlot()), X86Registers::ecx);
288
storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86Registers::ecx);
291
ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
293
storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
298
#if ENABLE(CODEBLOCK_SAMPLING)
300
ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
302
move(ImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86Registers::ecx);
303
storePtr(ImmPtr(codeBlock), X86Registers::ecx);
306
ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
308
storePtr(ImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
313
inline JIT::Address JIT::addressFor(unsigned index, RegisterID base)
315
return Address(base, (index * sizeof(Register)));
318
#if USE(JSVALUE32_64)
320
inline JIT::Address JIT::tagFor(unsigned index, RegisterID base)
322
return Address(base, (index * sizeof(Register)) + OBJECT_OFFSETOF(JSValue, u.asBits.tag));
325
inline JIT::Address JIT::payloadFor(unsigned index, RegisterID base)
327
return Address(base, (index * sizeof(Register)) + OBJECT_OFFSETOF(JSValue, u.asBits.payload));
330
inline void JIT::emitLoadTag(unsigned index, RegisterID tag)
332
RegisterID mappedTag;
333
if (getMappedTag(index, mappedTag)) {
334
move(mappedTag, tag);
339
if (m_codeBlock->isConstantRegisterIndex(index)) {
340
move(Imm32(getConstantOperand(index).tag()), tag);
345
load32(tagFor(index), tag);
349
inline void JIT::emitLoadPayload(unsigned index, RegisterID payload)
351
RegisterID mappedPayload;
352
if (getMappedPayload(index, mappedPayload)) {
353
move(mappedPayload, payload);
358
if (m_codeBlock->isConstantRegisterIndex(index)) {
359
move(Imm32(getConstantOperand(index).payload()), payload);
364
load32(payloadFor(index), payload);
368
inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload)
370
move(Imm32(v.payload()), payload);
371
move(Imm32(v.tag()), tag);
374
inline void JIT::emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
376
ASSERT(tag != payload);
378
if (base == callFrameRegister) {
379
ASSERT(payload != base);
380
emitLoadPayload(index, payload);
381
emitLoadTag(index, tag);
385
if (payload == base) { // avoid stomping base
386
load32(tagFor(index, base), tag);
387
load32(payloadFor(index, base), payload);
391
load32(payloadFor(index, base), payload);
392
load32(tagFor(index, base), tag);
395
inline void JIT::emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2)
397
if (isMapped(index1)) {
398
emitLoad(index1, tag1, payload1);
399
emitLoad(index2, tag2, payload2);
402
emitLoad(index2, tag2, payload2);
403
emitLoad(index1, tag1, payload1);
406
inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
408
if (m_codeBlock->isConstantRegisterIndex(index)) {
409
Register& inConstantPool = m_codeBlock->constantRegister(index);
410
loadDouble(&inConstantPool, value);
412
loadDouble(addressFor(index), value);
415
inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
417
if (m_codeBlock->isConstantRegisterIndex(index)) {
418
Register& inConstantPool = m_codeBlock->constantRegister(index);
419
char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
420
convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
422
convertInt32ToDouble(payloadFor(index), value);
425
inline void JIT::emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
427
store32(payload, payloadFor(index, base));
428
store32(tag, tagFor(index, base));
431
inline void JIT::emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32)
433
store32(payload, payloadFor(index, callFrameRegister));
435
store32(Imm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
438
inline void JIT::emitStoreInt32(unsigned index, Imm32 payload, bool indexIsInt32)
440
store32(payload, payloadFor(index, callFrameRegister));
442
store32(Imm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
445
inline void JIT::emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell)
447
store32(payload, payloadFor(index, callFrameRegister));
449
store32(Imm32(JSValue::CellTag), tagFor(index, callFrameRegister));
452
inline void JIT::emitStoreBool(unsigned index, RegisterID tag, bool indexIsBool)
455
store32(Imm32(0), payloadFor(index, callFrameRegister));
456
store32(tag, tagFor(index, callFrameRegister));
459
inline void JIT::emitStoreDouble(unsigned index, FPRegisterID value)
461
storeDouble(value, addressFor(index));
464
inline void JIT::emitStore(unsigned index, const JSValue constant, RegisterID base)
466
store32(Imm32(constant.payload()), payloadFor(index, base));
467
store32(Imm32(constant.tag()), tagFor(index, base));
470
ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
472
emitStore(dst, jsUndefined());
475
inline bool JIT::isLabeled(unsigned bytecodeIndex)
477
for (size_t numberOfJumpTargets = m_codeBlock->numberOfJumpTargets(); m_jumpTargetIndex != numberOfJumpTargets; ++m_jumpTargetIndex) {
478
unsigned jumpTarget = m_codeBlock->jumpTarget(m_jumpTargetIndex);
479
if (jumpTarget == bytecodeIndex)
481
if (jumpTarget > bytecodeIndex)
487
inline void JIT::map(unsigned bytecodeIndex, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload)
489
if (isLabeled(bytecodeIndex))
492
m_mappedBytecodeIndex = bytecodeIndex;
493
m_mappedVirtualRegisterIndex = virtualRegisterIndex;
495
m_mappedPayload = payload;
498
inline void JIT::unmap(RegisterID registerID)
500
if (m_mappedTag == registerID)
501
m_mappedTag = (RegisterID)-1;
502
else if (m_mappedPayload == registerID)
503
m_mappedPayload = (RegisterID)-1;
506
inline void JIT::unmap()
508
m_mappedBytecodeIndex = (unsigned)-1;
509
m_mappedVirtualRegisterIndex = (unsigned)-1;
510
m_mappedTag = (RegisterID)-1;
511
m_mappedPayload = (RegisterID)-1;
514
inline bool JIT::isMapped(unsigned virtualRegisterIndex)
516
if (m_mappedBytecodeIndex != m_bytecodeIndex)
518
if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
523
inline bool JIT::getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload)
525
if (m_mappedBytecodeIndex != m_bytecodeIndex)
527
if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
529
if (m_mappedPayload == (RegisterID)-1)
531
payload = m_mappedPayload;
535
inline bool JIT::getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag)
537
if (m_mappedBytecodeIndex != m_bytecodeIndex)
539
if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
541
if (m_mappedTag == (RegisterID)-1)
547
inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex)
549
if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
550
addSlowCase(branch32(NotEqual, tagFor(virtualRegisterIndex), Imm32(JSValue::CellTag)));
553
inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag)
555
if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
556
addSlowCase(branch32(NotEqual, tag, Imm32(JSValue::CellTag)));
559
inline void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, unsigned virtualRegisterIndex)
561
if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
565
ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
567
return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
570
ALWAYS_INLINE bool JIT::getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant)
572
if (isOperandConstantImmediateInt(op1)) {
573
constant = getConstantOperand(op1).asInt32();
578
if (isOperandConstantImmediateInt(op2)) {
579
constant = getConstantOperand(op2).asInt32();
587
/* Deprecated: Please use JITStubCall instead. */
589
ALWAYS_INLINE void JIT::emitPutJITStubArg(RegisterID tag, RegisterID payload, unsigned argumentNumber)
591
unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + 1;
592
poke(payload, argumentStackOffset);
593
poke(tag, argumentStackOffset + 1);
596
/* Deprecated: Please use JITStubCall instead. */
598
ALWAYS_INLINE void JIT::emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch1, RegisterID scratch2)
600
unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + 1;
601
if (m_codeBlock->isConstantRegisterIndex(src)) {
602
JSValue constant = m_codeBlock->getConstant(src);
603
poke(Imm32(constant.payload()), argumentStackOffset);
604
poke(Imm32(constant.tag()), argumentStackOffset + 1);
606
emitLoad(src, scratch1, scratch2);
607
poke(scratch2, argumentStackOffset);
608
poke(scratch1, argumentStackOffset + 1);
612
#else // USE(JSVALUE32_64)
44
614
ALWAYS_INLINE void JIT::killLastResultRegister()
46
616
m_lastResultBytecodeRegister = std::numeric_limits<int>::max();
94
// puts an arg onto the stack, as an arg to a context threaded function.
95
ALWAYS_INLINE void JIT::emitPutJITStubArg(RegisterID src, unsigned argumentNumber)
97
poke(src, argumentNumber);
100
ALWAYS_INLINE void JIT::emitPutJITStubArgConstant(unsigned value, unsigned argumentNumber)
102
poke(Imm32(value), argumentNumber);
105
ALWAYS_INLINE void JIT::emitPutJITStubArgConstant(void* value, unsigned argumentNumber)
107
poke(ImmPtr(value), argumentNumber);
110
ALWAYS_INLINE void JIT::emitGetJITStubArg(unsigned argumentNumber, RegisterID dst)
112
peek(dst, argumentNumber);
115
ALWAYS_INLINE JSValuePtr JIT::getConstantOperand(unsigned src)
117
ASSERT(m_codeBlock->isConstantRegisterIndex(src));
118
return m_codeBlock->getConstant(src);
121
664
ALWAYS_INLINE int32_t JIT::getConstantOperandImmediateInt(unsigned src)
123
return static_cast<int32_t>(JSImmediate::intValue(getConstantOperand(src)));
666
return getConstantOperand(src).asInt32();
126
669
ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
128
return m_codeBlock->isConstantRegisterIndex(src) && JSImmediate::isNumber(getConstantOperand(src));
131
// get arg puts an arg from the SF register array onto the stack, as an arg to a context threaded function.
132
ALWAYS_INLINE void JIT::emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch)
134
if (m_codeBlock->isConstantRegisterIndex(src)) {
135
JSValuePtr value = m_codeBlock->getConstant(src);
136
emitPutJITStubArgConstant(JSValuePtr::encode(value), argumentNumber);
138
loadPtr(Address(callFrameRegister, src * sizeof(Register)), scratch);
139
emitPutJITStubArg(scratch, argumentNumber);
142
killLastResultRegister();
145
ALWAYS_INLINE void JIT::emitPutCTIParam(void* value, unsigned name)
147
poke(ImmPtr(value), name);
150
ALWAYS_INLINE void JIT::emitPutCTIParam(RegisterID from, unsigned name)
155
ALWAYS_INLINE void JIT::emitGetCTIParam(unsigned name, RegisterID to)
158
killLastResultRegister();
161
ALWAYS_INLINE void JIT::emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
163
storePtr(from, Address(callFrameRegister, entry * sizeof(Register)));
166
ALWAYS_INLINE void JIT::emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry)
168
storePtr(ImmPtr(value), Address(callFrameRegister, entry * sizeof(Register)));
171
ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader(RegisterFile::CallFrameHeaderEntry entry, RegisterID to)
173
loadPtr(Address(callFrameRegister, entry * sizeof(Register)), to);
174
killLastResultRegister();
671
return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
177
674
ALWAYS_INLINE void JIT::emitPutVirtualRegister(unsigned dst, RegisterID from)
179
676
storePtr(from, Address(callFrameRegister, dst * sizeof(Register)));
180
m_lastResultBytecodeRegister = (from == X86::eax) ? dst : std::numeric_limits<int>::max();
181
// FIXME: #ifndef NDEBUG, Write the correct m_type to the register.
677
m_lastResultBytecodeRegister = (from == cachedResultRegister) ? dst : std::numeric_limits<int>::max();
184
680
ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
186
storePtr(ImmPtr(JSValuePtr::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
187
// FIXME: #ifndef NDEBUG, Write the correct m_type to the register.
190
ALWAYS_INLINE JIT::Jump JIT::emitNakedCall(X86::RegisterID r)
192
ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
194
Jump nakedCall = call(r);
195
m_calls.append(CallRecord(nakedCall, m_bytecodeIndex));
199
ALWAYS_INLINE JIT::Jump JIT::emitNakedCall(void* function)
201
ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
203
Jump nakedCall = call();
204
m_calls.append(CallRecord(nakedCall, m_bytecodeIndex, function));
208
#if USE(JIT_STUB_ARGUMENT_REGISTER)
209
ALWAYS_INLINE void JIT::restoreArgumentReference()
212
move(X86::esp, X86::edi);
214
move(X86::esp, X86::ecx);
216
emitPutCTIParam(callFrameRegister, STUB_ARGS_callFrame);
218
ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline()
220
// In the trampoline on x86-64, the first argument register is not overwritten.
221
#if !PLATFORM(X86_64)
222
move(X86::esp, X86::ecx);
223
addPtr(Imm32(sizeof(void*)), X86::ecx);
226
#elif USE(JIT_STUB_ARGUMENT_STACK)
227
ALWAYS_INLINE void JIT::restoreArgumentReference()
229
storePtr(X86::esp, X86::esp);
230
emitPutCTIParam(callFrameRegister, STUB_ARGS_callFrame);
232
ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline() {}
233
#else // JIT_STUB_ARGUMENT_VA_LIST
234
ALWAYS_INLINE void JIT::restoreArgumentReference()
236
emitPutCTIParam(callFrameRegister, STUB_ARGS_callFrame);
238
ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline() {}
241
ALWAYS_INLINE JIT::Jump JIT::emitCTICall_internal(void* helper)
243
ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
245
#if ENABLE(OPCODE_SAMPLING)
246
store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + m_bytecodeIndex, true)), m_interpreter->sampler()->sampleSlot());
248
restoreArgumentReference();
249
Jump ctiCall = call();
250
m_calls.append(CallRecord(ctiCall, m_bytecodeIndex, helper));
251
#if ENABLE(OPCODE_SAMPLING)
252
store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + m_bytecodeIndex, false)), m_interpreter->sampler()->sampleSlot());
254
killLastResultRegister();
259
ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
261
return jnePtr(Address(reg, FIELD_OFFSET(JSCell, m_structure)), ImmPtr(structure));
682
storePtr(ImmPtr(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
264
685
ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
266
#if USE(ALTERNATE_JSIMMEDIATE)
267
return jzPtr(reg, ImmPtr(reinterpret_cast<void*>(JSImmediate::TagMask)));
688
return branchTestPtr(Zero, reg, tagMaskRegister);
269
return jz32(reg, Imm32(JSImmediate::TagMask));
690
return branchTest32(Zero, reg, Imm32(JSImmediate::TagMask));
694
ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
697
orPtr(reg2, scratch);
698
return emitJumpIfJSCell(scratch);
273
701
ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
275
703
addSlowCase(emitJumpIfJSCell(reg));
295
723
emitJumpSlowCaseIfNotJSCell(reg);
298
ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
300
if (!m_codeBlock->isKnownNotImmediate(vReg))
304
ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmNum(RegisterID reg)
306
#if USE(ALTERNATE_JSIMMEDIATE)
307
return jaePtr(reg, ImmPtr(reinterpret_cast<void*>(JSImmediate::TagTypeInteger)));
309
return jnz32(reg, Imm32(JSImmediate::TagTypeInteger));
313
ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmNum(RegisterID reg)
315
#if USE(ALTERNATE_JSIMMEDIATE)
316
addSlowCase(jbPtr(reg, ImmPtr(reinterpret_cast<void*>(JSImmediate::TagTypeInteger))));
318
addSlowCase(jz32(reg, Imm32(JSImmediate::TagTypeInteger)));
322
ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmNums(RegisterID reg1, RegisterID reg2, RegisterID scratch)
727
ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateNumber(RegisterID reg)
729
return branchTestPtr(NonZero, reg, tagTypeNumberRegister);
731
ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateNumber(RegisterID reg)
733
return branchTestPtr(Zero, reg, tagTypeNumberRegister);
736
inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
738
if (m_codeBlock->isConstantRegisterIndex(index)) {
739
Register& inConstantPool = m_codeBlock->constantRegister(index);
740
loadDouble(&inConstantPool, value);
742
loadDouble(addressFor(index), value);
745
inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
747
if (m_codeBlock->isConstantRegisterIndex(index)) {
748
Register& inConstantPool = m_codeBlock->constantRegister(index);
749
convertInt32ToDouble(AbsoluteAddress(&inConstantPool), value);
751
convertInt32ToDouble(addressFor(index), value);
755
ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg)
758
return branchPtr(AboveOrEqual, reg, tagTypeNumberRegister);
760
return branchTest32(NonZero, reg, Imm32(JSImmediate::TagTypeNumber));
764
ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg)
767
return branchPtr(Below, reg, tagTypeNumberRegister);
769
return branchTest32(Zero, reg, Imm32(JSImmediate::TagTypeNumber));
773
ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
324
775
move(reg1, scratch);
325
776
andPtr(reg2, scratch);
326
emitJumpSlowCaseIfNotImmNum(scratch);
329
#if !USE(ALTERNATE_JSIMMEDIATE)
777
return emitJumpIfNotImmediateInteger(scratch);
780
ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)
782
addSlowCase(emitJumpIfNotImmediateInteger(reg));
785
ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
787
addSlowCase(emitJumpIfNotImmediateIntegers(reg1, reg2, scratch));
790
ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateNumber(RegisterID reg)
792
addSlowCase(emitJumpIfNotImmediateNumber(reg));
330
796
ALWAYS_INLINE void JIT::emitFastArithDeTagImmediate(RegisterID reg)
332
subPtr(Imm32(JSImmediate::TagTypeInteger), reg);
798
subPtr(Imm32(JSImmediate::TagTypeNumber), reg);
335
801
ALWAYS_INLINE JIT::Jump JIT::emitFastArithDeTagImmediateJumpIfZero(RegisterID reg)
337
return jzSubPtr(Imm32(JSImmediate::TagTypeInteger), reg);
803
return branchSubPtr(Zero, Imm32(JSImmediate::TagTypeNumber), reg);
341
807
ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest)
343
#if USE(ALTERNATE_JSIMMEDIATE)
344
810
emitFastArithIntToImmNoCheck(src, dest);
348
addPtr(Imm32(JSImmediate::TagTypeInteger), dest);
814
addPtr(Imm32(JSImmediate::TagTypeNumber), dest);
352
818
ALWAYS_INLINE void JIT::emitFastArithImmToInt(RegisterID reg)
354
#if USE(ALTERNATE_JSIMMEDIATE)
355
821
UNUSED_PARAM(reg);
357
823
rshiftPtr(Imm32(JSImmediate::IntegerPayloadShift), reg);