2
* Copyright (C) 2009 Apple Inc. All rights reserved.
4
* Redistribution and use in source and binary forms, with or without
5
* modification, are permitted provided that the following conditions
7
* 1. Redistributions of source code must retain the above copyright
8
* notice, this list of conditions and the following disclaimer.
9
* 2. Redistributions in binary form must reproduce the above copyright
10
* notice, this list of conditions and the following disclaimer in the
11
* documentation and/or other materials provided with the distribution.
13
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
#include "JITInlineMethods.h"
32
#include "JITStubCall.h"
35
#include "JSFunction.h"
36
#include "LinkBuffer.h"
42
void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
44
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
45
// (1) This function provides fast property access for string length
46
Label stringLengthBegin = align();
48
// regT0 holds payload, regT1 holds tag
50
Jump string_failureCases1 = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
51
Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
53
// Checks out okay! - get the length from the Ustring.
54
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSString, m_value) + OBJECT_OFFSETOF(UString, m_rep)), regT2);
55
load32(Address(regT2, OBJECT_OFFSETOF(UString::Rep, len)), regT2);
57
Jump string_failureCases3 = branch32(Above, regT2, Imm32(INT_MAX));
59
move(Imm32(JSValue::Int32Tag), regT1);
64
// (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
66
#if ENABLE(JIT_OPTIMIZE_CALL)
67
// VirtualCallLink Trampoline
68
// regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
69
Label virtualCallLinkBegin = align();
70
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
72
Jump isNativeFunc2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
74
Jump hasCodeBlock2 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
75
preserveReturnAddressAfterCall(regT3);
76
restoreArgumentReference();
77
Call callJSFunction2 = call();
78
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
79
emitGetJITStubArg(2, regT1); // argCount
80
restoreReturnAddressBeforeReturn(regT3);
81
hasCodeBlock2.link(this);
83
// Check argCount matches callee arity.
84
Jump arityCheckOkay2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
85
preserveReturnAddressAfterCall(regT3);
86
emitPutJITStubArg(regT3, 1); // return address
87
restoreArgumentReference();
88
Call callArityCheck2 = call();
89
move(regT1, callFrameRegister);
90
emitGetJITStubArg(2, regT1); // argCount
91
restoreReturnAddressBeforeReturn(regT3);
92
arityCheckOkay2.link(this);
94
isNativeFunc2.link(this);
96
compileOpCallInitializeCallFrame();
98
preserveReturnAddressAfterCall(regT3);
99
emitPutJITStubArg(regT3, 1); // return address
100
restoreArgumentReference();
101
Call callLazyLinkCall = call();
102
restoreReturnAddressBeforeReturn(regT3);
104
#endif // ENABLE(JIT_OPTIMIZE_CALL)
106
// VirtualCall Trampoline
107
// regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
108
Label virtualCallBegin = align();
109
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
111
Jump isNativeFunc3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
113
Jump hasCodeBlock3 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
114
preserveReturnAddressAfterCall(regT3);
115
restoreArgumentReference();
116
Call callJSFunction1 = call();
117
emitGetJITStubArg(2, regT1); // argCount
118
restoreReturnAddressBeforeReturn(regT3);
119
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
120
hasCodeBlock3.link(this);
122
// Check argCount matches callee arity.
123
Jump arityCheckOkay3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
124
preserveReturnAddressAfterCall(regT3);
125
emitPutJITStubArg(regT3, 1); // return address
126
restoreArgumentReference();
127
Call callArityCheck1 = call();
128
move(regT1, callFrameRegister);
129
emitGetJITStubArg(2, regT1); // argCount
130
restoreReturnAddressBeforeReturn(regT3);
131
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
132
arityCheckOkay3.link(this);
134
isNativeFunc3.link(this);
136
compileOpCallInitializeCallFrame();
137
loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCode)), regT0);
141
Label nativeCallThunk = align();
142
preserveReturnAddressAfterCall(regT0);
143
emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
145
// Load caller frame's scope chain into this callframe so that whatever we call can
146
// get to its global data.
147
emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
148
emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
149
emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
151
emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
153
/* We have two structs that we use to describe the stackframe we set up for our
154
* call to native code. NativeCallFrameStructure describes the how we set up the stack
155
* in advance of the call. NativeFunctionCalleeSignature describes the callframe
156
* as the native code expects it. We do this as we are using the fastcall calling
157
* convention which results in the callee popping its arguments off the stack, but
158
* not the rest of the callframe so we need a nice way to ensure we increment the
159
* stack pointer by the right amount after the call.
162
#if COMPILER(MSVC) || PLATFORM(LINUX)
166
#endif // COMPILER(MSVC)
167
struct NativeCallFrameStructure {
168
// CallFrame* callFrame; // passed in EDX
175
struct NativeFunctionCalleeSignature {
182
#endif // COMPILER(MSVC)
184
struct NativeCallFrameStructure {
185
// CallFrame* callFrame; // passed in ECX
186
// JSObject* callee; // passed in EDX
191
struct NativeFunctionCalleeSignature {
197
const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
198
// Allocate system stack frame
199
subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
202
subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
205
storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
207
// Calculate the start of the callframe header, and store in regT1
208
addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
210
// Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
211
mul32(Imm32(sizeof(Register)), regT0, regT0);
212
subPtr(regT0, regT1);
213
storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
215
// ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
216
addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
217
storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
219
// regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
220
loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
221
loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT3);
222
storePtr(regT2, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
223
storePtr(regT3, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
225
#if COMPILER(MSVC) || PLATFORM(LINUX)
226
// ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
227
addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86Registers::ecx);
230
emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::eax);
231
storePtr(X86Registers::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
234
move(callFrameRegister, X86Registers::edx);
236
call(Address(X86Registers::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
238
// JSValue is a non-POD type, so eax points to it
239
emitLoad(0, regT1, regT0, X86Registers::eax);
241
emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::edx); // callee
242
move(callFrameRegister, X86Registers::ecx); // callFrame
243
call(Address(X86Registers::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
246
// We've put a few temporaries on the stack in addition to the actual arguments
247
// so pull them off now
248
addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
250
// Check for an exception
251
move(ImmPtr(&globalData->exception), regT2);
252
Jump sawException = branch32(NotEqual, tagFor(0, regT2), Imm32(JSValue::EmptyValueTag));
254
// Grab the return address.
255
emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT3);
257
// Restore our caller's "r".
258
emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
261
restoreReturnAddressBeforeReturn(regT3);
264
// Handle an exception
265
sawException.link(this);
266
// Grab the return address.
267
emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
268
move(ImmPtr(&globalData->exceptionLocation), regT2);
269
storePtr(regT1, regT2);
270
move(ImmPtr(reinterpret_cast<void*>(ctiVMThrowTrampoline)), regT2);
271
emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
272
poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
273
restoreReturnAddressBeforeReturn(regT2);
276
#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
277
#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
282
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
283
Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
284
Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
285
Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
288
// All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
289
LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
291
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
292
patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
293
patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
294
patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
296
patchBuffer.link(callArityCheck1, FunctionPtr(cti_op_call_arityCheck));
297
patchBuffer.link(callJSFunction1, FunctionPtr(cti_op_call_JSFunction));
298
#if ENABLE(JIT_OPTIMIZE_CALL)
299
patchBuffer.link(callArityCheck2, FunctionPtr(cti_op_call_arityCheck));
300
patchBuffer.link(callJSFunction2, FunctionPtr(cti_op_call_JSFunction));
301
patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
304
CodeRef finalCode = patchBuffer.finalizeCode();
305
*executablePool = finalCode.m_executablePool;
307
*ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
308
*ctiNativeCallThunk = trampolineAt(finalCode, nativeCallThunk);
309
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
310
*ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
312
UNUSED_PARAM(ctiStringLengthTrampoline);
314
#if ENABLE(JIT_OPTIMIZE_CALL)
315
*ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
317
UNUSED_PARAM(ctiVirtualCallLink);
321
void JIT::emit_op_mov(Instruction* currentInstruction)
323
unsigned dst = currentInstruction[1].u.operand;
324
unsigned src = currentInstruction[2].u.operand;
326
if (m_codeBlock->isConstantRegisterIndex(src))
327
emitStore(dst, getConstantOperand(src));
329
emitLoad(src, regT1, regT0);
330
emitStore(dst, regT1, regT0);
331
map(m_bytecodeIndex + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
335
void JIT::emit_op_end(Instruction* currentInstruction)
337
if (m_codeBlock->needsFullScopeChain())
338
JITStubCall(this, cti_op_end).call();
339
ASSERT(returnValueRegister != callFrameRegister);
340
emitLoad(currentInstruction[1].u.operand, regT1, regT0);
341
restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
345
void JIT::emit_op_jmp(Instruction* currentInstruction)
347
unsigned target = currentInstruction[1].u.operand;
348
addJump(jump(), target + 1);
351
void JIT::emit_op_loop(Instruction* currentInstruction)
353
unsigned target = currentInstruction[1].u.operand;
355
addJump(jump(), target + 1);
358
void JIT::emit_op_loop_if_less(Instruction* currentInstruction)
360
unsigned op1 = currentInstruction[1].u.operand;
361
unsigned op2 = currentInstruction[2].u.operand;
362
unsigned target = currentInstruction[3].u.operand;
366
if (isOperandConstantImmediateInt(op1)) {
367
emitLoad(op2, regT1, regT0);
368
addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
369
addJump(branch32(GreaterThan, regT0, Imm32(getConstantOperand(op1).asInt32())), target + 3);
373
if (isOperandConstantImmediateInt(op2)) {
374
emitLoad(op1, regT1, regT0);
375
addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
376
addJump(branch32(LessThan, regT0, Imm32(getConstantOperand(op2).asInt32())), target + 3);
380
emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
381
addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
382
addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
383
addJump(branch32(LessThan, regT0, regT2), target + 3);
386
void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
388
unsigned op1 = currentInstruction[1].u.operand;
389
unsigned op2 = currentInstruction[2].u.operand;
390
unsigned target = currentInstruction[3].u.operand;
392
if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
393
linkSlowCase(iter); // int32 check
394
linkSlowCase(iter); // int32 check
396
JITStubCall stubCall(this, cti_op_loop_if_less);
397
stubCall.addArgument(op1);
398
stubCall.addArgument(op2);
400
emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
403
void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
405
unsigned op1 = currentInstruction[1].u.operand;
406
unsigned op2 = currentInstruction[2].u.operand;
407
unsigned target = currentInstruction[3].u.operand;
411
if (isOperandConstantImmediateInt(op1)) {
412
emitLoad(op2, regT1, regT0);
413
addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
414
addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op1).asInt32())), target + 3);
418
if (isOperandConstantImmediateInt(op2)) {
419
emitLoad(op1, regT1, regT0);
420
addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
421
addJump(branch32(LessThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target + 3);
425
emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
426
addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
427
addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
428
addJump(branch32(LessThanOrEqual, regT0, regT2), target + 3);
431
void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
433
unsigned op1 = currentInstruction[1].u.operand;
434
unsigned op2 = currentInstruction[2].u.operand;
435
unsigned target = currentInstruction[3].u.operand;
437
if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
438
linkSlowCase(iter); // int32 check
439
linkSlowCase(iter); // int32 check
441
JITStubCall stubCall(this, cti_op_loop_if_lesseq);
442
stubCall.addArgument(op1);
443
stubCall.addArgument(op2);
445
emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
448
void JIT::emit_op_new_object(Instruction* currentInstruction)
450
JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
453
void JIT::emit_op_instanceof(Instruction* currentInstruction)
455
unsigned dst = currentInstruction[1].u.operand;
456
unsigned value = currentInstruction[2].u.operand;
457
unsigned baseVal = currentInstruction[3].u.operand;
458
unsigned proto = currentInstruction[4].u.operand;
460
// Load the operands (baseVal, proto, and value respectively) into registers.
461
// We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
462
emitLoadPayload(proto, regT1);
463
emitLoadPayload(baseVal, regT0);
464
emitLoadPayload(value, regT2);
466
// Check that baseVal & proto are cells.
467
emitJumpSlowCaseIfNotJSCell(proto);
468
emitJumpSlowCaseIfNotJSCell(baseVal);
470
// Check that baseVal is an object, that it 'ImplementsHasInstance' but that it does not 'OverridesHasInstance'.
471
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
472
addSlowCase(branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType))); // FIXME: Maybe remove this test.
473
addSlowCase(branchTest32(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsHasInstance))); // FIXME: TOT checks ImplementsDefaultHasInstance.
475
// If value is not an Object, return false.
476
emitLoadTag(value, regT0);
477
Jump valueIsImmediate = branch32(NotEqual, regT0, Imm32(JSValue::CellTag));
478
loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
479
Jump valueIsNotObject = branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)); // FIXME: Maybe remove this test.
481
// Check proto is object.
482
loadPtr(Address(regT1, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
483
addSlowCase(branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
485
// Optimistically load the result true, and start looping.
486
// Initially, regT1 still contains proto and regT2 still contains value.
487
// As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
488
move(Imm32(JSValue::TrueTag), regT0);
491
// Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
492
// Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
493
loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
494
load32(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
495
Jump isInstance = branchPtr(Equal, regT2, regT1);
496
branch32(NotEqual, regT2, Imm32(0), loop);
498
// We get here either by dropping out of the loop, or if value was not an Object. Result is false.
499
valueIsImmediate.link(this);
500
valueIsNotObject.link(this);
501
move(Imm32(JSValue::FalseTag), regT0);
503
// isInstance jumps right down to here, to skip setting the result to false (it has already set true).
504
isInstance.link(this);
505
emitStoreBool(dst, regT0);
508
void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
510
unsigned dst = currentInstruction[1].u.operand;
511
unsigned value = currentInstruction[2].u.operand;
512
unsigned baseVal = currentInstruction[3].u.operand;
513
unsigned proto = currentInstruction[4].u.operand;
515
linkSlowCaseIfNotJSCell(iter, baseVal);
516
linkSlowCaseIfNotJSCell(iter, proto);
521
JITStubCall stubCall(this, cti_op_instanceof);
522
stubCall.addArgument(value);
523
stubCall.addArgument(baseVal);
524
stubCall.addArgument(proto);
528
void JIT::emit_op_new_func(Instruction* currentInstruction)
530
JITStubCall stubCall(this, cti_op_new_func);
531
stubCall.addArgument(ImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
532
stubCall.call(currentInstruction[1].u.operand);
535
void JIT::emit_op_get_global_var(Instruction* currentInstruction)
537
int dst = currentInstruction[1].u.operand;
538
JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[2].u.jsCell);
539
ASSERT(globalObject->isGlobalObject());
540
int index = currentInstruction[3].u.operand;
542
loadPtr(&globalObject->d()->registers, regT2);
544
emitLoad(index, regT1, regT0, regT2);
545
emitStore(dst, regT1, regT0);
546
map(m_bytecodeIndex + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
549
void JIT::emit_op_put_global_var(Instruction* currentInstruction)
551
JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[1].u.jsCell);
552
ASSERT(globalObject->isGlobalObject());
553
int index = currentInstruction[2].u.operand;
554
int value = currentInstruction[3].u.operand;
556
emitLoad(value, regT1, regT0);
558
loadPtr(&globalObject->d()->registers, regT2);
559
emitStore(index, regT1, regT0, regT2);
560
map(m_bytecodeIndex + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
563
void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
565
int dst = currentInstruction[1].u.operand;
566
int index = currentInstruction[2].u.operand;
567
int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
569
emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
571
loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
573
loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
574
loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
575
loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
577
emitLoad(index, regT1, regT0, regT2);
578
emitStore(dst, regT1, regT0);
579
map(m_bytecodeIndex + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
582
void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
584
int index = currentInstruction[1].u.operand;
585
int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
586
int value = currentInstruction[3].u.operand;
588
emitLoad(value, regT1, regT0);
590
emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
592
loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
594
loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
595
loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
596
loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
598
emitStore(index, regT1, regT0, regT2);
599
map(m_bytecodeIndex + OPCODE_LENGTH(op_put_scoped_var), value, regT1, regT0);
602
void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
604
JITStubCall stubCall(this, cti_op_tear_off_activation);
605
stubCall.addArgument(currentInstruction[1].u.operand);
609
void JIT::emit_op_tear_off_arguments(Instruction*)
611
JITStubCall(this, cti_op_tear_off_arguments).call();
614
void JIT::emit_op_new_array(Instruction* currentInstruction)
616
JITStubCall stubCall(this, cti_op_new_array);
617
stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
618
stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
619
stubCall.call(currentInstruction[1].u.operand);
622
void JIT::emit_op_resolve(Instruction* currentInstruction)
624
JITStubCall stubCall(this, cti_op_resolve);
625
stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
626
stubCall.call(currentInstruction[1].u.operand);
629
void JIT::emit_op_to_primitive(Instruction* currentInstruction)
631
int dst = currentInstruction[1].u.operand;
632
int src = currentInstruction[2].u.operand;
634
emitLoad(src, regT1, regT0);
636
Jump isImm = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
637
addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
641
emitStore(dst, regT1, regT0);
642
map(m_bytecodeIndex + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
645
void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
647
int dst = currentInstruction[1].u.operand;
651
JITStubCall stubCall(this, cti_op_to_primitive);
652
stubCall.addArgument(regT1, regT0);
656
void JIT::emit_op_strcat(Instruction* currentInstruction)
658
JITStubCall stubCall(this, cti_op_strcat);
659
stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
660
stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
661
stubCall.call(currentInstruction[1].u.operand);
664
void JIT::emit_op_loop_if_true(Instruction* currentInstruction)
666
unsigned cond = currentInstruction[1].u.operand;
667
unsigned target = currentInstruction[2].u.operand;
671
emitLoad(cond, regT1, regT0);
673
Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
674
addJump(branch32(NotEqual, regT0, Imm32(0)), target + 2);
675
Jump isNotZero = jump();
677
isNotInteger.link(this);
679
addJump(branch32(Equal, regT1, Imm32(JSValue::TrueTag)), target + 2);
680
addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::FalseTag)));
682
isNotZero.link(this);
685
void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
687
unsigned cond = currentInstruction[1].u.operand;
688
unsigned target = currentInstruction[2].u.operand;
692
JITStubCall stubCall(this, cti_op_jtrue);
693
stubCall.addArgument(cond);
695
emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 2);
698
void JIT::emit_op_resolve_base(Instruction* currentInstruction)
700
JITStubCall stubCall(this, cti_op_resolve_base);
701
stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
702
stubCall.call(currentInstruction[1].u.operand);
705
void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
707
JITStubCall stubCall(this, cti_op_resolve_skip);
708
stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
709
stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
710
stubCall.call(currentInstruction[1].u.operand);
713
void JIT::emit_op_resolve_global(Instruction* currentInstruction)
715
// FIXME: Optimize to use patching instead of so many memory accesses.
717
unsigned dst = currentInstruction[1].u.operand;
718
void* globalObject = currentInstruction[2].u.jsCell;
720
unsigned currentIndex = m_globalResolveInfoIndex++;
721
void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
722
void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
725
move(ImmPtr(globalObject), regT0);
726
loadPtr(structureAddress, regT1);
727
addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))));
730
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT2);
731
load32(offsetAddr, regT3);
732
load32(BaseIndex(regT2, regT3, TimesEight), regT0); // payload
733
load32(BaseIndex(regT2, regT3, TimesEight, 4), regT1); // tag
734
emitStore(dst, regT1, regT0);
735
map(m_bytecodeIndex + OPCODE_LENGTH(op_resolve_global), dst, regT1, regT0);
738
void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
740
unsigned dst = currentInstruction[1].u.operand;
741
void* globalObject = currentInstruction[2].u.jsCell;
742
Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
744
unsigned currentIndex = m_globalResolveInfoIndex++;
747
JITStubCall stubCall(this, cti_op_resolve_global);
748
stubCall.addArgument(ImmPtr(globalObject));
749
stubCall.addArgument(ImmPtr(ident));
750
stubCall.addArgument(Imm32(currentIndex));
754
void JIT::emit_op_not(Instruction* currentInstruction)
756
unsigned dst = currentInstruction[1].u.operand;
757
unsigned src = currentInstruction[2].u.operand;
759
emitLoadTag(src, regT0);
761
xor32(Imm32(JSValue::FalseTag), regT0);
762
addSlowCase(branchTest32(NonZero, regT0, Imm32(~1)));
763
xor32(Imm32(JSValue::TrueTag), regT0);
765
emitStoreBool(dst, regT0, (dst == src));
768
void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
770
unsigned dst = currentInstruction[1].u.operand;
771
unsigned src = currentInstruction[2].u.operand;
775
JITStubCall stubCall(this, cti_op_not);
776
stubCall.addArgument(src);
780
void JIT::emit_op_jfalse(Instruction* currentInstruction)
782
unsigned cond = currentInstruction[1].u.operand;
783
unsigned target = currentInstruction[2].u.operand;
785
emitLoad(cond, regT1, regT0);
787
Jump isTrue = branch32(Equal, regT1, Imm32(JSValue::TrueTag));
788
addJump(branch32(Equal, regT1, Imm32(JSValue::FalseTag)), target + 2);
790
Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
791
Jump isTrue2 = branch32(NotEqual, regT0, Imm32(0));
792
addJump(jump(), target + 2);
794
if (supportsFloatingPoint()) {
795
isNotInteger.link(this);
797
addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
800
emitLoadDouble(cond, fpRegT1);
801
addJump(branchDouble(DoubleEqual, fpRegT0, fpRegT1), target + 2);
803
addSlowCase(isNotInteger);
809
void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
811
unsigned cond = currentInstruction[1].u.operand;
812
unsigned target = currentInstruction[2].u.operand;
815
JITStubCall stubCall(this, cti_op_jtrue);
816
stubCall.addArgument(cond);
818
emitJumpSlowToHot(branchTest32(Zero, regT0), target + 2); // Inverted.
821
void JIT::emit_op_jtrue(Instruction* currentInstruction)
823
unsigned cond = currentInstruction[1].u.operand;
824
unsigned target = currentInstruction[2].u.operand;
826
emitLoad(cond, regT1, regT0);
828
Jump isFalse = branch32(Equal, regT1, Imm32(JSValue::FalseTag));
829
addJump(branch32(Equal, regT1, Imm32(JSValue::TrueTag)), target + 2);
831
Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
832
Jump isFalse2 = branch32(Equal, regT0, Imm32(0));
833
addJump(jump(), target + 2);
835
if (supportsFloatingPoint()) {
836
isNotInteger.link(this);
838
addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
841
emitLoadDouble(cond, fpRegT1);
842
addJump(branchDouble(DoubleNotEqual, fpRegT0, fpRegT1), target + 2);
844
addSlowCase(isNotInteger);
850
void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
852
unsigned cond = currentInstruction[1].u.operand;
853
unsigned target = currentInstruction[2].u.operand;
856
JITStubCall stubCall(this, cti_op_jtrue);
857
stubCall.addArgument(cond);
859
emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 2);
862
void JIT::emit_op_jeq_null(Instruction* currentInstruction)
864
unsigned src = currentInstruction[1].u.operand;
865
unsigned target = currentInstruction[2].u.operand;
867
emitLoad(src, regT1, regT0);
869
Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
871
// First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
872
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
873
addJump(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
875
Jump wasNotImmediate = jump();
877
// Now handle the immediate cases - undefined & null
878
isImmediate.link(this);
880
set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
881
set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
884
addJump(branchTest32(NonZero, regT1), target + 2);
886
wasNotImmediate.link(this);
889
void JIT::emit_op_jneq_null(Instruction* currentInstruction)
891
unsigned src = currentInstruction[1].u.operand;
892
unsigned target = currentInstruction[2].u.operand;
894
emitLoad(src, regT1, regT0);
896
Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
898
// First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
899
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
900
addJump(branchTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
902
Jump wasNotImmediate = jump();
904
// Now handle the immediate cases - undefined & null
905
isImmediate.link(this);
907
set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
908
set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
911
addJump(branchTest32(Zero, regT1), target + 2);
913
wasNotImmediate.link(this);
916
void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
918
unsigned src = currentInstruction[1].u.operand;
919
JSCell* ptr = currentInstruction[2].u.jsCell;
920
unsigned target = currentInstruction[3].u.operand;
922
emitLoad(src, regT1, regT0);
923
addJump(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)), target + 3);
924
addJump(branchPtr(NotEqual, regT0, ImmPtr(ptr)), target + 3);
927
void JIT::emit_op_jsr(Instruction* currentInstruction)
929
int retAddrDst = currentInstruction[1].u.operand;
930
int target = currentInstruction[2].u.operand;
931
DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
932
addJump(jump(), target + 2);
933
m_jsrSites.append(JSRInfo(storeLocation, label()));
936
void JIT::emit_op_sret(Instruction* currentInstruction)
938
jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
941
void JIT::emit_op_eq(Instruction* currentInstruction)
943
unsigned dst = currentInstruction[1].u.operand;
944
unsigned src1 = currentInstruction[2].u.operand;
945
unsigned src2 = currentInstruction[3].u.operand;
947
emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
948
addSlowCase(branch32(NotEqual, regT1, regT3));
949
addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
950
addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
952
set8(Equal, regT0, regT2, regT0);
953
or32(Imm32(JSValue::FalseTag), regT0);
955
emitStoreBool(dst, regT0);
958
void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
960
unsigned dst = currentInstruction[1].u.operand;
961
unsigned op1 = currentInstruction[2].u.operand;
962
unsigned op2 = currentInstruction[3].u.operand;
964
JumpList storeResult;
965
JumpList genericCase;
967
genericCase.append(getSlowCase(iter)); // tags not equal
969
linkSlowCase(iter); // tags equal and JSCell
970
genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
971
genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
974
JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
975
stubCallEqStrings.addArgument(regT0);
976
stubCallEqStrings.addArgument(regT2);
977
stubCallEqStrings.call();
978
storeResult.append(jump());
981
genericCase.append(getSlowCase(iter)); // doubles
982
genericCase.link(this);
983
JITStubCall stubCallEq(this, cti_op_eq);
984
stubCallEq.addArgument(op1);
985
stubCallEq.addArgument(op2);
986
stubCallEq.call(regT0);
988
storeResult.link(this);
989
or32(Imm32(JSValue::FalseTag), regT0);
990
emitStoreBool(dst, regT0);
993
void JIT::emit_op_neq(Instruction* currentInstruction)
995
unsigned dst = currentInstruction[1].u.operand;
996
unsigned src1 = currentInstruction[2].u.operand;
997
unsigned src2 = currentInstruction[3].u.operand;
999
emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
1000
addSlowCase(branch32(NotEqual, regT1, regT3));
1001
addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
1002
addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
1004
set8(NotEqual, regT0, regT2, regT0);
1005
or32(Imm32(JSValue::FalseTag), regT0);
1007
emitStoreBool(dst, regT0);
1010
void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1012
unsigned dst = currentInstruction[1].u.operand;
1014
JumpList storeResult;
1015
JumpList genericCase;
1017
genericCase.append(getSlowCase(iter)); // tags not equal
1019
linkSlowCase(iter); // tags equal and JSCell
1020
genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
1021
genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
1024
JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
1025
stubCallEqStrings.addArgument(regT0);
1026
stubCallEqStrings.addArgument(regT2);
1027
stubCallEqStrings.call(regT0);
1028
storeResult.append(jump());
1031
genericCase.append(getSlowCase(iter)); // doubles
1032
genericCase.link(this);
1033
JITStubCall stubCallEq(this, cti_op_eq);
1034
stubCallEq.addArgument(regT1, regT0);
1035
stubCallEq.addArgument(regT3, regT2);
1036
stubCallEq.call(regT0);
1038
storeResult.link(this);
1039
xor32(Imm32(0x1), regT0);
1040
or32(Imm32(JSValue::FalseTag), regT0);
1041
emitStoreBool(dst, regT0);
1044
void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1046
unsigned dst = currentInstruction[1].u.operand;
1047
unsigned src1 = currentInstruction[2].u.operand;
1048
unsigned src2 = currentInstruction[3].u.operand;
1050
emitLoadTag(src1, regT0);
1051
emitLoadTag(src2, regT1);
1053
// Jump to a slow case if either operand is double, or if both operands are
1054
// cells and/or Int32s.
1056
and32(regT1, regT2);
1057
addSlowCase(branch32(Below, regT2, Imm32(JSValue::LowestTag)));
1058
addSlowCase(branch32(AboveOrEqual, regT2, Imm32(JSValue::CellTag)));
1060
if (type == OpStrictEq)
1061
set8(Equal, regT0, regT1, regT0);
1063
set8(NotEqual, regT0, regT1, regT0);
1065
or32(Imm32(JSValue::FalseTag), regT0);
1067
emitStoreBool(dst, regT0);
1070
void JIT::emit_op_stricteq(Instruction* currentInstruction)
1072
compileOpStrictEq(currentInstruction, OpStrictEq);
1075
void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1077
unsigned dst = currentInstruction[1].u.operand;
1078
unsigned src1 = currentInstruction[2].u.operand;
1079
unsigned src2 = currentInstruction[3].u.operand;
1084
JITStubCall stubCall(this, cti_op_stricteq);
1085
stubCall.addArgument(src1);
1086
stubCall.addArgument(src2);
1090
void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1092
compileOpStrictEq(currentInstruction, OpNStrictEq);
1095
void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1097
unsigned dst = currentInstruction[1].u.operand;
1098
unsigned src1 = currentInstruction[2].u.operand;
1099
unsigned src2 = currentInstruction[3].u.operand;
1104
JITStubCall stubCall(this, cti_op_nstricteq);
1105
stubCall.addArgument(src1);
1106
stubCall.addArgument(src2);
1110
void JIT::emit_op_eq_null(Instruction* currentInstruction)
1112
unsigned dst = currentInstruction[1].u.operand;
1113
unsigned src = currentInstruction[2].u.operand;
1115
emitLoad(src, regT1, regT0);
1116
Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
1118
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
1119
setTest8(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
1121
Jump wasNotImmediate = jump();
1123
isImmediate.link(this);
1125
set8(Equal, regT1, Imm32(JSValue::NullTag), regT2);
1126
set8(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
1129
wasNotImmediate.link(this);
1131
or32(Imm32(JSValue::FalseTag), regT1);
1133
emitStoreBool(dst, regT1);
1136
void JIT::emit_op_neq_null(Instruction* currentInstruction)
1138
unsigned dst = currentInstruction[1].u.operand;
1139
unsigned src = currentInstruction[2].u.operand;
1141
emitLoad(src, regT1, regT0);
1142
Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
1144
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
1145
setTest8(Zero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
1147
Jump wasNotImmediate = jump();
1149
isImmediate.link(this);
1151
set8(NotEqual, regT1, Imm32(JSValue::NullTag), regT2);
1152
set8(NotEqual, regT1, Imm32(JSValue::UndefinedTag), regT1);
1153
and32(regT2, regT1);
1155
wasNotImmediate.link(this);
1157
or32(Imm32(JSValue::FalseTag), regT1);
1159
emitStoreBool(dst, regT1);
1162
void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
1164
JITStubCall stubCall(this, cti_op_resolve_with_base);
1165
stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1166
stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1167
stubCall.call(currentInstruction[2].u.operand);
1170
void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1172
JITStubCall stubCall(this, cti_op_new_func_exp);
1173
stubCall.addArgument(ImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
1174
stubCall.call(currentInstruction[1].u.operand);
1177
void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1179
JITStubCall stubCall(this, cti_op_new_regexp);
1180
stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1181
stubCall.call(currentInstruction[1].u.operand);
1184
void JIT::emit_op_throw(Instruction* currentInstruction)
1186
unsigned exception = currentInstruction[1].u.operand;
1187
JITStubCall stubCall(this, cti_op_throw);
1188
stubCall.addArgument(exception);
1192
// cti_op_throw always changes it's return address,
1193
// this point in the code should never be reached.
1198
void JIT::emit_op_next_pname(Instruction* currentInstruction)
1200
int dst = currentInstruction[1].u.operand;
1201
int iter = currentInstruction[2].u.operand;
1202
int target = currentInstruction[3].u.operand;
1204
load32(Address(callFrameRegister, (iter * sizeof(Register))), regT0);
1206
JITStubCall stubCall(this, cti_op_next_pname);
1207
stubCall.addArgument(regT0);
1210
Jump endOfIter = branchTestPtr(Zero, regT0);
1211
emitStore(dst, regT1, regT0);
1212
map(m_bytecodeIndex + OPCODE_LENGTH(op_next_pname), dst, regT1, regT0);
1213
addJump(jump(), target + 3);
1214
endOfIter.link(this);
1217
void JIT::emit_op_push_scope(Instruction* currentInstruction)
1219
JITStubCall stubCall(this, cti_op_push_scope);
1220
stubCall.addArgument(currentInstruction[1].u.operand);
1221
stubCall.call(currentInstruction[1].u.operand);
1224
void JIT::emit_op_pop_scope(Instruction*)
1226
JITStubCall(this, cti_op_pop_scope).call();
1229
void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1231
int dst = currentInstruction[1].u.operand;
1232
int src = currentInstruction[2].u.operand;
1234
emitLoad(src, regT1, regT0);
1236
Jump isInt32 = branch32(Equal, regT1, Imm32(JSValue::Int32Tag));
1237
addSlowCase(branch32(AboveOrEqual, regT1, Imm32(JSValue::EmptyValueTag)));
1241
emitStore(dst, regT1, regT0);
1242
map(m_bytecodeIndex + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
1245
void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1247
int dst = currentInstruction[1].u.operand;
1251
JITStubCall stubCall(this, cti_op_to_jsnumber);
1252
stubCall.addArgument(regT1, regT0);
1256
void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1258
JITStubCall stubCall(this, cti_op_push_new_scope);
1259
stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1260
stubCall.addArgument(currentInstruction[3].u.operand);
1261
stubCall.call(currentInstruction[1].u.operand);
1264
void JIT::emit_op_catch(Instruction* currentInstruction)
1266
unsigned exception = currentInstruction[1].u.operand;
1268
// This opcode only executes after a return from cti_op_throw.
1270
// cti_op_throw may have taken us to a call frame further up the stack; reload
1271
// the call frame pointer to adjust.
1272
peek(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
1274
// Now store the exception returned by cti_op_throw.
1275
emitStore(exception, regT1, regT0);
1276
map(m_bytecodeIndex + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
1277
#ifdef QT_BUILD_SCRIPT_LIB
1278
JITStubCall stubCall(this, cti_op_debug_catch);
1279
stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1284
void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1286
JITStubCall stubCall(this, cti_op_jmp_scopes);
1287
stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1289
addJump(jump(), currentInstruction[2].u.operand + 2);
1292
void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1294
unsigned tableIndex = currentInstruction[1].u.operand;
1295
unsigned defaultOffset = currentInstruction[2].u.operand;
1296
unsigned scrutinee = currentInstruction[3].u.operand;
1298
// create jump table for switch destinations, track this switch statement.
1299
SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1300
m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
1301
jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1303
JITStubCall stubCall(this, cti_op_switch_imm);
1304
stubCall.addArgument(scrutinee);
1305
stubCall.addArgument(Imm32(tableIndex));
1310
void JIT::emit_op_switch_char(Instruction* currentInstruction)
1312
unsigned tableIndex = currentInstruction[1].u.operand;
1313
unsigned defaultOffset = currentInstruction[2].u.operand;
1314
unsigned scrutinee = currentInstruction[3].u.operand;
1316
// create jump table for switch destinations, track this switch statement.
1317
SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1318
m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
1319
jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1321
JITStubCall stubCall(this, cti_op_switch_char);
1322
stubCall.addArgument(scrutinee);
1323
stubCall.addArgument(Imm32(tableIndex));
1328
void JIT::emit_op_switch_string(Instruction* currentInstruction)
1330
unsigned tableIndex = currentInstruction[1].u.operand;
1331
unsigned defaultOffset = currentInstruction[2].u.operand;
1332
unsigned scrutinee = currentInstruction[3].u.operand;
1334
// create jump table for switch destinations, track this switch statement.
1335
StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1336
m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
1338
JITStubCall stubCall(this, cti_op_switch_string);
1339
stubCall.addArgument(scrutinee);
1340
stubCall.addArgument(Imm32(tableIndex));
1345
void JIT::emit_op_new_error(Instruction* currentInstruction)
1347
unsigned dst = currentInstruction[1].u.operand;
1348
unsigned type = currentInstruction[2].u.operand;
1349
unsigned message = currentInstruction[3].u.operand;
1351
JITStubCall stubCall(this, cti_op_new_error);
1352
stubCall.addArgument(Imm32(type));
1353
stubCall.addArgument(m_codeBlock->getConstant(message));
1354
stubCall.addArgument(Imm32(m_bytecodeIndex));
1358
void JIT::emit_op_debug(Instruction* currentInstruction)
1360
JITStubCall stubCall(this, cti_op_debug);
1361
stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1362
stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1363
stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1364
stubCall.addArgument(Imm32(currentInstruction[4].u.operand));
1369
void JIT::emit_op_enter(Instruction*)
1371
// Even though JIT code doesn't use them, we initialize our constant
1372
// registers to zap stale pointers, to avoid unnecessarily prolonging
1373
// object lifetime and increasing GC pressure.
1374
for (int i = 0; i < m_codeBlock->m_numVars; ++i)
1375
emitStore(i, jsUndefined());
1378
void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
1380
emit_op_enter(currentInstruction);
1382
JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
1385
void JIT::emit_op_create_arguments(Instruction*)
1387
Jump argsCreated = branch32(NotEqual, tagFor(RegisterFile::ArgumentsRegister, callFrameRegister), Imm32(JSValue::EmptyValueTag));
1389
// If we get here the arguments pointer is a null cell - i.e. arguments need lazy creation.
1390
if (m_codeBlock->m_numParameters == 1)
1391
JITStubCall(this, cti_op_create_arguments_no_params).call();
1393
JITStubCall(this, cti_op_create_arguments).call();
1395
argsCreated.link(this);
1398
void JIT::emit_op_init_arguments(Instruction*)
1400
emitStore(RegisterFile::ArgumentsRegister, JSValue(), callFrameRegister);
1403
void JIT::emit_op_convert_this(Instruction* currentInstruction)
1405
unsigned thisRegister = currentInstruction[1].u.operand;
1407
emitLoad(thisRegister, regT1, regT0);
1409
addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
1411
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1412
addSlowCase(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
1414
map(m_bytecodeIndex + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
1417
void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1419
unsigned thisRegister = currentInstruction[1].u.operand;
1424
JITStubCall stubCall(this, cti_op_convert_this);
1425
stubCall.addArgument(regT1, regT0);
1426
stubCall.call(thisRegister);
1429
void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1431
peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
1432
Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1434
JITStubCall stubCall(this, cti_op_profile_will_call);
1435
stubCall.addArgument(currentInstruction[1].u.operand);
1437
noProfiler.link(this);
1440
void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1442
peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
1443
Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1445
JITStubCall stubCall(this, cti_op_profile_did_call);
1446
stubCall.addArgument(currentInstruction[1].u.operand);
1448
noProfiler.link(this);
1451
#else // USE(JSVALUE32_64)
1453
#define RECORD_JUMP_TARGET(targetOffset) \
1454
do { m_labels[m_bytecodeIndex + (targetOffset)].used(); } while (false)
1456
void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
1458
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1459
// (2) The second function provides fast property access for string length
1460
Label stringLengthBegin = align();
1462
// Check eax is a string
1463
Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
1464
Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
1466
// Checks out okay! - get the length from the Ustring.
1467
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSString, m_value) + OBJECT_OFFSETOF(UString, m_rep)), regT0);
1468
load32(Address(regT0, OBJECT_OFFSETOF(UString::Rep, len)), regT0);
1470
Jump string_failureCases3 = branch32(Above, regT0, Imm32(JSImmediate::maxImmediateInt));
1472
// regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
1473
emitFastArithIntToImmNoCheck(regT0, regT0);
1478
// (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
1479
COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
1481
// VirtualCallLink Trampoline
1482
// regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
1483
Label virtualCallLinkBegin = align();
1484
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
1486
Jump isNativeFunc2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
1488
Jump hasCodeBlock2 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
1489
preserveReturnAddressAfterCall(regT3);
1490
restoreArgumentReference();
1491
Call callJSFunction2 = call();
1492
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
1493
emitGetJITStubArg(2, regT1); // argCount
1494
restoreReturnAddressBeforeReturn(regT3);
1495
hasCodeBlock2.link(this);
1497
// Check argCount matches callee arity.
1498
Jump arityCheckOkay2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
1499
preserveReturnAddressAfterCall(regT3);
1500
emitPutJITStubArg(regT3, 1); // return address
1501
restoreArgumentReference();
1502
Call callArityCheck2 = call();
1503
move(regT1, callFrameRegister);
1504
emitGetJITStubArg(2, regT1); // argCount
1505
restoreReturnAddressBeforeReturn(regT3);
1506
arityCheckOkay2.link(this);
1508
isNativeFunc2.link(this);
1510
compileOpCallInitializeCallFrame();
1511
preserveReturnAddressAfterCall(regT3);
1512
emitPutJITStubArg(regT3, 1); // return address
1513
restoreArgumentReference();
1514
Call callLazyLinkCall = call();
1515
restoreReturnAddressBeforeReturn(regT3);
1518
// VirtualCall Trampoline
1519
// regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
1520
Label virtualCallBegin = align();
1521
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
1523
Jump isNativeFunc3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
1525
Jump hasCodeBlock3 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
1526
preserveReturnAddressAfterCall(regT3);
1527
restoreArgumentReference();
1528
Call callJSFunction1 = call();
1529
emitGetJITStubArg(2, regT1); // argCount
1530
restoreReturnAddressBeforeReturn(regT3);
1531
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
1532
hasCodeBlock3.link(this);
1534
// Check argCount matches callee arity.
1535
Jump arityCheckOkay3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
1536
preserveReturnAddressAfterCall(regT3);
1537
emitPutJITStubArg(regT3, 1); // return address
1538
restoreArgumentReference();
1539
Call callArityCheck1 = call();
1540
move(regT1, callFrameRegister);
1541
emitGetJITStubArg(2, regT1); // argCount
1542
restoreReturnAddressBeforeReturn(regT3);
1543
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
1544
arityCheckOkay3.link(this);
1546
isNativeFunc3.link(this);
1548
compileOpCallInitializeCallFrame();
1549
loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCode)), regT0);
1552
Label nativeCallThunk = align();
1553
preserveReturnAddressAfterCall(regT0);
1554
emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
1556
// Load caller frame's scope chain into this callframe so that whatever we call can
1557
// get to its global data.
1558
emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
1559
emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
1560
emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
1563
#if PLATFORM(X86_64)
1564
emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, X86Registers::ecx);
1566
// Allocate stack space for our arglist
1567
subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1568
COMPILE_ASSERT((sizeof(ArgList) & 0xf) == 0, ArgList_should_by_16byte_aligned);
1571
subPtr(Imm32(1), X86Registers::ecx); // Don't include 'this' in argcount
1574
storePtr(X86Registers::ecx, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
1576
// Calculate the start of the callframe header, and store in edx
1577
addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), callFrameRegister, X86Registers::edx);
1579
// Calculate start of arguments as callframe header - sizeof(Register) * argcount (ecx)
1580
mul32(Imm32(sizeof(Register)), X86Registers::ecx, X86Registers::ecx);
1581
subPtr(X86Registers::ecx, X86Registers::edx);
1583
// push pointer to arguments
1584
storePtr(X86Registers::edx, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
1586
// ArgList is passed by reference so is stackPointerRegister
1587
move(stackPointerRegister, X86Registers::ecx);
1589
// edx currently points to the first argument, edx-sizeof(Register) points to 'this'
1590
loadPtr(Address(X86Registers::edx, -(int32_t)sizeof(Register)), X86Registers::edx);
1592
emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::esi);
1594
move(callFrameRegister, X86Registers::edi);
1596
call(Address(X86Registers::esi, OBJECT_OFFSETOF(JSFunction, m_data)));
1598
addPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1600
emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1602
/* We have two structs that we use to describe the stackframe we set up for our
1603
* call to native code. NativeCallFrameStructure describes the how we set up the stack
1604
* in advance of the call. NativeFunctionCalleeSignature describes the callframe
1605
* as the native code expects it. We do this as we are using the fastcall calling
1606
* convention which results in the callee popping its arguments off the stack, but
1607
* not the rest of the callframe so we need a nice way to ensure we increment the
1608
* stack pointer by the right amount after the call.
1610
#if COMPILER(MSVC) || PLATFORM(LINUX)
1611
struct NativeCallFrameStructure {
1612
// CallFrame* callFrame; // passed in EDX
1615
ArgList* argPointer;
1619
struct NativeFunctionCalleeSignature {
1622
ArgList* argPointer;
1625
struct NativeCallFrameStructure {
1626
// CallFrame* callFrame; // passed in ECX
1627
// JSObject* callee; // passed in EDX
1629
ArgList* argPointer;
1632
struct NativeFunctionCalleeSignature {
1634
ArgList* argPointer;
1637
const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
1638
// Allocate system stack frame
1639
subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
1642
subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
1645
storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
1647
// Calculate the start of the callframe header, and store in regT1
1648
addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
1650
// Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
1651
mul32(Imm32(sizeof(Register)), regT0, regT0);
1652
subPtr(regT0, regT1);
1653
storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
1655
// ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
1656
addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
1657
storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
1659
// regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
1660
loadPtr(Address(regT1, -(int)sizeof(Register)), regT1);
1661
storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue)));
1663
#if COMPILER(MSVC) || PLATFORM(LINUX)
1664
// ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
1665
addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86Registers::ecx);
1668
emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::eax);
1669
storePtr(X86Registers::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
1672
move(callFrameRegister, X86Registers::edx);
1674
call(Address(X86Registers::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
1676
// JSValue is a non-POD type
1677
loadPtr(Address(X86Registers::eax), X86Registers::eax);
1680
emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::edx);
1683
move(callFrameRegister, X86Registers::ecx);
1684
call(Address(X86Registers::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
1687
// We've put a few temporaries on the stack in addition to the actual arguments
1688
// so pull them off now
1689
addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
1691
#elif PLATFORM(ARM_TRADITIONAL)
1692
emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1694
// Allocate stack space for our arglist
1695
COMPILE_ASSERT((sizeof(ArgList) & 0x7) == 0, ArgList_should_by_8byte_aligned);
1696
subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1699
subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
1702
storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
1704
// Calculate the start of the callframe header, and store in regT1
1705
move(callFrameRegister, regT1);
1706
sub32(Imm32(RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), regT1);
1708
// Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
1709
mul32(Imm32(sizeof(Register)), regT0, regT0);
1710
subPtr(regT0, regT1);
1712
// push pointer to arguments
1713
storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
1715
// Setup arg3: regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
1716
loadPtr(Address(regT1, -(int32_t)sizeof(Register)), regT2);
1719
emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT1);
1722
move(callFrameRegister, regT0);
1724
// Setup arg4: This is a plain hack
1725
move(stackPointerRegister, ARMRegisters::S0);
1727
move(ctiReturnRegister, ARMRegisters::lr);
1728
call(Address(regT1, OBJECT_OFFSETOF(JSFunction, m_data)));
1730
addPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1732
#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
1733
#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
1738
// Check for an exception
1739
loadPtr(&(globalData->exception), regT2);
1740
Jump exceptionHandler = branchTestPtr(NonZero, regT2);
1742
// Grab the return address.
1743
emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
1745
// Restore our caller's "r".
1746
emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
1749
restoreReturnAddressBeforeReturn(regT1);
1752
// Handle an exception
1753
exceptionHandler.link(this);
1754
// Grab the return address.
1755
emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
1756
move(ImmPtr(&globalData->exceptionLocation), regT2);
1757
storePtr(regT1, regT2);
1758
move(ImmPtr(reinterpret_cast<void*>(ctiVMThrowTrampoline)), regT2);
1759
emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
1760
poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
1761
restoreReturnAddressBeforeReturn(regT2);
1765
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1766
Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
1767
Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
1768
Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
1771
// All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
1772
LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
1774
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1775
patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
1776
patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
1777
patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
1779
patchBuffer.link(callArityCheck1, FunctionPtr(cti_op_call_arityCheck));
1780
patchBuffer.link(callJSFunction1, FunctionPtr(cti_op_call_JSFunction));
1781
#if ENABLE(JIT_OPTIMIZE_CALL)
1782
patchBuffer.link(callArityCheck2, FunctionPtr(cti_op_call_arityCheck));
1783
patchBuffer.link(callJSFunction2, FunctionPtr(cti_op_call_JSFunction));
1784
patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
1787
CodeRef finalCode = patchBuffer.finalizeCode();
1788
*executablePool = finalCode.m_executablePool;
1790
*ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
1791
*ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
1792
*ctiNativeCallThunk = trampolineAt(finalCode, nativeCallThunk);
1793
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1794
*ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
1796
UNUSED_PARAM(ctiStringLengthTrampoline);
1800
void JIT::emit_op_mov(Instruction* currentInstruction)
1802
int dst = currentInstruction[1].u.operand;
1803
int src = currentInstruction[2].u.operand;
1805
if (m_codeBlock->isConstantRegisterIndex(src)) {
1806
storePtr(ImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
1807
if (dst == m_lastResultBytecodeRegister)
1808
killLastResultRegister();
1809
} else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
1810
// If either the src or dst is the cached register go though
1811
// get/put registers to make sure we track this correctly.
1812
emitGetVirtualRegister(src, regT0);
1813
emitPutVirtualRegister(dst);
1815
// Perform the copy via regT1; do not disturb any mapping in regT0.
1816
loadPtr(Address(callFrameRegister, src * sizeof(Register)), regT1);
1817
storePtr(regT1, Address(callFrameRegister, dst * sizeof(Register)));
1821
void JIT::emit_op_end(Instruction* currentInstruction)
1823
if (m_codeBlock->needsFullScopeChain())
1824
JITStubCall(this, cti_op_end).call();
1825
ASSERT(returnValueRegister != callFrameRegister);
1826
emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
1827
restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
1831
void JIT::emit_op_jmp(Instruction* currentInstruction)
1833
unsigned target = currentInstruction[1].u.operand;
1834
addJump(jump(), target + 1);
1835
RECORD_JUMP_TARGET(target + 1);
1838
void JIT::emit_op_loop(Instruction* currentInstruction)
1842
unsigned target = currentInstruction[1].u.operand;
1843
addJump(jump(), target + 1);
1846
void JIT::emit_op_loop_if_less(Instruction* currentInstruction)
1850
unsigned op1 = currentInstruction[1].u.operand;
1851
unsigned op2 = currentInstruction[2].u.operand;
1852
unsigned target = currentInstruction[3].u.operand;
1853
if (isOperandConstantImmediateInt(op2)) {
1854
emitGetVirtualRegister(op1, regT0);
1855
emitJumpSlowCaseIfNotImmediateInteger(regT0);
1857
int32_t op2imm = getConstantOperandImmediateInt(op2);
1859
int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
1861
addJump(branch32(LessThan, regT0, Imm32(op2imm)), target + 3);
1862
} else if (isOperandConstantImmediateInt(op1)) {
1863
emitGetVirtualRegister(op2, regT0);
1864
emitJumpSlowCaseIfNotImmediateInteger(regT0);
1866
int32_t op1imm = getConstantOperandImmediateInt(op1);
1868
int32_t op1imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op1)));
1870
addJump(branch32(GreaterThan, regT0, Imm32(op1imm)), target + 3);
1872
emitGetVirtualRegisters(op1, regT0, op2, regT1);
1873
emitJumpSlowCaseIfNotImmediateInteger(regT0);
1874
emitJumpSlowCaseIfNotImmediateInteger(regT1);
1875
addJump(branch32(LessThan, regT0, regT1), target + 3);
1879
void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
1883
unsigned op1 = currentInstruction[1].u.operand;
1884
unsigned op2 = currentInstruction[2].u.operand;
1885
unsigned target = currentInstruction[3].u.operand;
1886
if (isOperandConstantImmediateInt(op2)) {
1887
emitGetVirtualRegister(op1, regT0);
1888
emitJumpSlowCaseIfNotImmediateInteger(regT0);
1890
int32_t op2imm = getConstantOperandImmediateInt(op2);
1892
int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
1894
addJump(branch32(LessThanOrEqual, regT0, Imm32(op2imm)), target + 3);
1896
emitGetVirtualRegisters(op1, regT0, op2, regT1);
1897
emitJumpSlowCaseIfNotImmediateInteger(regT0);
1898
emitJumpSlowCaseIfNotImmediateInteger(regT1);
1899
addJump(branch32(LessThanOrEqual, regT0, regT1), target + 3);
1903
void JIT::emit_op_new_object(Instruction* currentInstruction)
1905
JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
1908
void JIT::emit_op_instanceof(Instruction* currentInstruction)
1910
// Load the operands (baseVal, proto, and value respectively) into registers.
1911
// We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
1912
emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
1913
emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
1914
emitGetVirtualRegister(currentInstruction[2].u.operand, regT2);
1916
// Check that baseVal & proto are cells.
1917
emitJumpSlowCaseIfNotJSCell(regT0);
1918
emitJumpSlowCaseIfNotJSCell(regT1);
1920
// Check that baseVal is an object, that it 'ImplementsHasInstance' but that it does not 'OverridesHasInstance'.
1921
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
1922
addSlowCase(branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
1923
addSlowCase(branchTest32(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsDefaultHasInstance)));
1925
// If value is not an Object, return false.
1926
Jump valueIsImmediate = emitJumpIfNotJSCell(regT2);
1927
loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
1928
Jump valueIsNotObject = branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType));
1930
// Check proto is object.
1931
loadPtr(Address(regT1, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
1932
addSlowCase(branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
1934
// Optimistically load the result true, and start looping.
1935
// Initially, regT1 still contains proto and regT2 still contains value.
1936
// As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
1937
move(ImmPtr(JSValue::encode(jsBoolean(true))), regT0);
1940
// Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
1941
// Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
1942
loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1943
loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype)), regT2);
1944
Jump isInstance = branchPtr(Equal, regT2, regT1);
1945
branchPtr(NotEqual, regT2, ImmPtr(JSValue::encode(jsNull())), loop);
1947
// We get here either by dropping out of the loop, or if value was not an Object. Result is false.
1948
valueIsImmediate.link(this);
1949
valueIsNotObject.link(this);
1950
move(ImmPtr(JSValue::encode(jsBoolean(false))), regT0);
1952
// isInstance jumps right down to here, to skip setting the result to false (it has already set true).
1953
isInstance.link(this);
1954
emitPutVirtualRegister(currentInstruction[1].u.operand);
1957
void JIT::emit_op_new_func(Instruction* currentInstruction)
1959
JITStubCall stubCall(this, cti_op_new_func);
1960
stubCall.addArgument(ImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1961
stubCall.call(currentInstruction[1].u.operand);
1964
void JIT::emit_op_call(Instruction* currentInstruction)
1966
compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
1969
void JIT::emit_op_call_eval(Instruction* currentInstruction)
1971
compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
1974
void JIT::emit_op_load_varargs(Instruction* currentInstruction)
1976
int argCountDst = currentInstruction[1].u.operand;
1977
int argsOffset = currentInstruction[2].u.operand;
1979
JITStubCall stubCall(this, cti_op_load_varargs);
1980
stubCall.addArgument(Imm32(argsOffset));
1982
// Stores a naked int32 in the register file.
1983
store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
1986
void JIT::emit_op_call_varargs(Instruction* currentInstruction)
1988
compileOpCallVarargs(currentInstruction);
1991
void JIT::emit_op_construct(Instruction* currentInstruction)
1993
compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
1996
void JIT::emit_op_get_global_var(Instruction* currentInstruction)
1998
JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[2].u.jsCell);
1999
move(ImmPtr(globalObject), regT0);
2000
emitGetVariableObjectRegister(regT0, currentInstruction[3].u.operand, regT0);
2001
emitPutVirtualRegister(currentInstruction[1].u.operand);
2004
void JIT::emit_op_put_global_var(Instruction* currentInstruction)
2006
emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
2007
JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[1].u.jsCell);
2008
move(ImmPtr(globalObject), regT0);
2009
emitPutVariableObjectRegister(regT1, regT0, currentInstruction[2].u.operand);
2012
void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
2014
int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
2016
emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
2018
loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
2020
loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT0);
2021
emitGetVariableObjectRegister(regT0, currentInstruction[2].u.operand, regT0);
2022
emitPutVirtualRegister(currentInstruction[1].u.operand);
2025
void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
2027
int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
2029
emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1);
2030
emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
2032
loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
2034
loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
2035
emitPutVariableObjectRegister(regT0, regT1, currentInstruction[1].u.operand);
2038
void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
2040
JITStubCall stubCall(this, cti_op_tear_off_activation);
2041
stubCall.addArgument(currentInstruction[1].u.operand, regT2);
2045
void JIT::emit_op_tear_off_arguments(Instruction*)
2047
JITStubCall(this, cti_op_tear_off_arguments).call();
2050
void JIT::emit_op_ret(Instruction* currentInstruction)
2052
#ifdef QT_BUILD_SCRIPT_LIB
2053
JITStubCall stubCall(this, cti_op_debug_return);
2054
stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
2057
// We could JIT generate the deref, only calling out to C when the refcount hits zero.
2058
if (m_codeBlock->needsFullScopeChain())
2059
JITStubCall(this, cti_op_ret_scopeChain).call();
2061
ASSERT(callFrameRegister != regT1);
2062
ASSERT(regT1 != returnValueRegister);
2063
ASSERT(returnValueRegister != callFrameRegister);
2065
// Return the result in %eax.
2066
emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
2068
// Grab the return address.
2069
emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
2071
// Restore our caller's "r".
2072
emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
2075
restoreReturnAddressBeforeReturn(regT1);
2079
void JIT::emit_op_new_array(Instruction* currentInstruction)
2081
JITStubCall stubCall(this, cti_op_new_array);
2082
stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
2083
stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
2084
stubCall.call(currentInstruction[1].u.operand);
2087
void JIT::emit_op_resolve(Instruction* currentInstruction)
2089
JITStubCall stubCall(this, cti_op_resolve);
2090
stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
2091
stubCall.call(currentInstruction[1].u.operand);
2094
void JIT::emit_op_construct_verify(Instruction* currentInstruction)
2096
emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2098
emitJumpSlowCaseIfNotJSCell(regT0);
2099
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2100
addSlowCase(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo) + OBJECT_OFFSETOF(TypeInfo, m_type)), Imm32(ObjectType)));
2104
void JIT::emit_op_to_primitive(Instruction* currentInstruction)
2106
int dst = currentInstruction[1].u.operand;
2107
int src = currentInstruction[2].u.operand;
2109
emitGetVirtualRegister(src, regT0);
2111
Jump isImm = emitJumpIfNotJSCell(regT0);
2112
addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
2116
emitPutVirtualRegister(dst);
2120
void JIT::emit_op_strcat(Instruction* currentInstruction)
2122
JITStubCall stubCall(this, cti_op_strcat);
2123
stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
2124
stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
2125
stubCall.call(currentInstruction[1].u.operand);
2128
void JIT::emit_op_loop_if_true(Instruction* currentInstruction)
2132
unsigned target = currentInstruction[2].u.operand;
2133
emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2135
Jump isZero = branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0))));
2136
addJump(emitJumpIfImmediateInteger(regT0), target + 2);
2138
addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))), target + 2);
2139
addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))));
2143
void JIT::emit_op_resolve_base(Instruction* currentInstruction)
2145
JITStubCall stubCall(this, cti_op_resolve_base);
2146
stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
2147
stubCall.call(currentInstruction[1].u.operand);
2150
void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
2152
JITStubCall stubCall(this, cti_op_resolve_skip);
2153
stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
2154
stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
2155
stubCall.call(currentInstruction[1].u.operand);
2158
void JIT::emit_op_resolve_global(Instruction* currentInstruction)
2161
void* globalObject = currentInstruction[2].u.jsCell;
2162
Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
2164
unsigned currentIndex = m_globalResolveInfoIndex++;
2165
void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
2166
void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
2168
// Check Structure of global object
2169
move(ImmPtr(globalObject), regT0);
2170
loadPtr(structureAddress, regT1);
2171
Jump noMatch = branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))); // Structures don't match
2173
// Load cached property
2174
// Assume that the global object always uses external storage.
2175
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT0);
2176
load32(offsetAddr, regT1);
2177
loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
2178
emitPutVirtualRegister(currentInstruction[1].u.operand);
2183
JITStubCall stubCall(this, cti_op_resolve_global);
2184
stubCall.addArgument(ImmPtr(globalObject));
2185
stubCall.addArgument(ImmPtr(ident));
2186
stubCall.addArgument(Imm32(currentIndex));
2187
stubCall.call(currentInstruction[1].u.operand);
2191
void JIT::emit_op_not(Instruction* currentInstruction)
2193
emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
2194
xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
2195
addSlowCase(branchTestPtr(NonZero, regT0, Imm32(static_cast<int32_t>(~JSImmediate::ExtendedPayloadBitBoolValue))));
2196
xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool | JSImmediate::ExtendedPayloadBitBoolValue)), regT0);
2197
emitPutVirtualRegister(currentInstruction[1].u.operand);
2200
void JIT::emit_op_jfalse(Instruction* currentInstruction)
2202
unsigned target = currentInstruction[2].u.operand;
2203
emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2205
addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0)))), target + 2);
2206
Jump isNonZero = emitJumpIfImmediateInteger(regT0);
2208
addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))), target + 2);
2209
addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))));
2211
isNonZero.link(this);
2212
RECORD_JUMP_TARGET(target + 2);
2214
void JIT::emit_op_jeq_null(Instruction* currentInstruction)
2216
unsigned src = currentInstruction[1].u.operand;
2217
unsigned target = currentInstruction[2].u.operand;
2219
emitGetVirtualRegister(src, regT0);
2220
Jump isImmediate = emitJumpIfNotJSCell(regT0);
2222
// First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
2223
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2224
addJump(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
2225
Jump wasNotImmediate = jump();
2227
// Now handle the immediate cases - undefined & null
2228
isImmediate.link(this);
2229
andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
2230
addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNull()))), target + 2);
2232
wasNotImmediate.link(this);
2233
RECORD_JUMP_TARGET(target + 2);
2235
void JIT::emit_op_jneq_null(Instruction* currentInstruction)
2237
unsigned src = currentInstruction[1].u.operand;
2238
unsigned target = currentInstruction[2].u.operand;
2240
emitGetVirtualRegister(src, regT0);
2241
Jump isImmediate = emitJumpIfNotJSCell(regT0);
2243
// First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
2244
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2245
addJump(branchTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
2246
Jump wasNotImmediate = jump();
2248
// Now handle the immediate cases - undefined & null
2249
isImmediate.link(this);
2250
andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
2251
addJump(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsNull()))), target + 2);
2253
wasNotImmediate.link(this);
2254
RECORD_JUMP_TARGET(target + 2);
2257
void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
2259
unsigned src = currentInstruction[1].u.operand;
2260
JSCell* ptr = currentInstruction[2].u.jsCell;
2261
unsigned target = currentInstruction[3].u.operand;
2263
emitGetVirtualRegister(src, regT0);
2264
addJump(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(JSValue(ptr)))), target + 3);
2266
RECORD_JUMP_TARGET(target + 3);
2269
void JIT::emit_op_jsr(Instruction* currentInstruction)
2271
int retAddrDst = currentInstruction[1].u.operand;
2272
int target = currentInstruction[2].u.operand;
2273
DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
2274
addJump(jump(), target + 2);
2275
m_jsrSites.append(JSRInfo(storeLocation, label()));
2276
killLastResultRegister();
2277
RECORD_JUMP_TARGET(target + 2);
2280
void JIT::emit_op_sret(Instruction* currentInstruction)
2282
jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
2283
killLastResultRegister();
2286
void JIT::emit_op_eq(Instruction* currentInstruction)
2288
emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
2289
emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
2290
set32(Equal, regT1, regT0, regT0);
2291
emitTagAsBoolImmediate(regT0);
2292
emitPutVirtualRegister(currentInstruction[1].u.operand);
2295
void JIT::emit_op_bitnot(Instruction* currentInstruction)
2297
emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
2298
emitJumpSlowCaseIfNotImmediateInteger(regT0);
2301
emitFastArithIntToImmNoCheck(regT0, regT0);
2303
xorPtr(Imm32(~JSImmediate::TagTypeNumber), regT0);
2305
emitPutVirtualRegister(currentInstruction[1].u.operand);
2308
void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
2310
JITStubCall stubCall(this, cti_op_resolve_with_base);
2311
stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
2312
stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
2313
stubCall.call(currentInstruction[2].u.operand);
2316
void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
2318
JITStubCall stubCall(this, cti_op_new_func_exp);
2319
stubCall.addArgument(ImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
2320
stubCall.call(currentInstruction[1].u.operand);
2323
void JIT::emit_op_jtrue(Instruction* currentInstruction)
2325
unsigned target = currentInstruction[2].u.operand;
2326
emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2328
Jump isZero = branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0))));
2329
addJump(emitJumpIfImmediateInteger(regT0), target + 2);
2331
addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))), target + 2);
2332
addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))));
2335
RECORD_JUMP_TARGET(target + 2);
2338
void JIT::emit_op_neq(Instruction* currentInstruction)
2340
emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
2341
emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
2342
set32(NotEqual, regT1, regT0, regT0);
2343
emitTagAsBoolImmediate(regT0);
2345
emitPutVirtualRegister(currentInstruction[1].u.operand);
2349
void JIT::emit_op_bitxor(Instruction* currentInstruction)
2351
emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
2352
emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
2353
xorPtr(regT1, regT0);
2354
emitFastArithReTagImmediate(regT0, regT0);
2355
emitPutVirtualRegister(currentInstruction[1].u.operand);
2358
void JIT::emit_op_new_regexp(Instruction* currentInstruction)
2360
JITStubCall stubCall(this, cti_op_new_regexp);
2361
stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
2362
stubCall.call(currentInstruction[1].u.operand);
2365
void JIT::emit_op_bitor(Instruction* currentInstruction)
2367
emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
2368
emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
2369
orPtr(regT1, regT0);
2370
emitPutVirtualRegister(currentInstruction[1].u.operand);
2373
void JIT::emit_op_throw(Instruction* currentInstruction)
2375
JITStubCall stubCall(this, cti_op_throw);
2376
stubCall.addArgument(currentInstruction[1].u.operand, regT2);
2378
ASSERT(regT0 == returnValueRegister);
2380
// cti_op_throw always changes it's return address,
2381
// this point in the code should never be reached.
2386
void JIT::emit_op_next_pname(Instruction* currentInstruction)
2388
JITStubCall stubCall(this, cti_op_next_pname);
2389
stubCall.addArgument(currentInstruction[2].u.operand, regT2);
2391
Jump endOfIter = branchTestPtr(Zero, regT0);
2392
emitPutVirtualRegister(currentInstruction[1].u.operand);
2393
addJump(jump(), currentInstruction[3].u.operand + 3);
2394
endOfIter.link(this);
2397
void JIT::emit_op_push_scope(Instruction* currentInstruction)
2399
JITStubCall stubCall(this, cti_op_push_scope);
2400
stubCall.addArgument(currentInstruction[1].u.operand, regT2);
2401
stubCall.call(currentInstruction[1].u.operand);
2404
void JIT::emit_op_pop_scope(Instruction*)
2406
JITStubCall(this, cti_op_pop_scope).call();
2409
void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
2411
unsigned dst = currentInstruction[1].u.operand;
2412
unsigned src1 = currentInstruction[2].u.operand;
2413
unsigned src2 = currentInstruction[3].u.operand;
2415
emitGetVirtualRegisters(src1, regT0, src2, regT1);
2417
// Jump to a slow case if either operand is a number, or if both are JSCell*s.
2419
orPtr(regT1, regT2);
2420
addSlowCase(emitJumpIfJSCell(regT2));
2421
addSlowCase(emitJumpIfImmediateNumber(regT2));
2423
if (type == OpStrictEq)
2424
set32(Equal, regT1, regT0, regT0);
2426
set32(NotEqual, regT1, regT0, regT0);
2427
emitTagAsBoolImmediate(regT0);
2429
emitPutVirtualRegister(dst);
2432
void JIT::emit_op_stricteq(Instruction* currentInstruction)
2434
compileOpStrictEq(currentInstruction, OpStrictEq);
2437
void JIT::emit_op_nstricteq(Instruction* currentInstruction)
2439
compileOpStrictEq(currentInstruction, OpNStrictEq);
2442
void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
2444
int srcVReg = currentInstruction[2].u.operand;
2445
emitGetVirtualRegister(srcVReg, regT0);
2447
Jump wasImmediate = emitJumpIfImmediateInteger(regT0);
2449
emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
2450
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2451
addSlowCase(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(NumberType)));
2453
wasImmediate.link(this);
2455
emitPutVirtualRegister(currentInstruction[1].u.operand);
2458
void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
2460
JITStubCall stubCall(this, cti_op_push_new_scope);
2461
stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
2462
stubCall.addArgument(currentInstruction[3].u.operand, regT2);
2463
stubCall.call(currentInstruction[1].u.operand);
2466
void JIT::emit_op_catch(Instruction* currentInstruction)
2468
killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
2469
peek(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
2470
emitPutVirtualRegister(currentInstruction[1].u.operand);
2471
#ifdef QT_BUILD_SCRIPT_LIB
2472
JITStubCall stubCall(this, cti_op_debug_catch);
2473
stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
2478
void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
2480
JITStubCall stubCall(this, cti_op_jmp_scopes);
2481
stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
2483
addJump(jump(), currentInstruction[2].u.operand + 2);
2484
RECORD_JUMP_TARGET(currentInstruction[2].u.operand + 2);
2487
void JIT::emit_op_switch_imm(Instruction* currentInstruction)
2489
unsigned tableIndex = currentInstruction[1].u.operand;
2490
unsigned defaultOffset = currentInstruction[2].u.operand;
2491
unsigned scrutinee = currentInstruction[3].u.operand;
2493
// create jump table for switch destinations, track this switch statement.
2494
SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
2495
m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
2496
jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
2498
JITStubCall stubCall(this, cti_op_switch_imm);
2499
stubCall.addArgument(scrutinee, regT2);
2500
stubCall.addArgument(Imm32(tableIndex));
2505
void JIT::emit_op_switch_char(Instruction* currentInstruction)
2507
unsigned tableIndex = currentInstruction[1].u.operand;
2508
unsigned defaultOffset = currentInstruction[2].u.operand;
2509
unsigned scrutinee = currentInstruction[3].u.operand;
2511
// create jump table for switch destinations, track this switch statement.
2512
SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
2513
m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
2514
jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
2516
JITStubCall stubCall(this, cti_op_switch_char);
2517
stubCall.addArgument(scrutinee, regT2);
2518
stubCall.addArgument(Imm32(tableIndex));
2523
void JIT::emit_op_switch_string(Instruction* currentInstruction)
2525
unsigned tableIndex = currentInstruction[1].u.operand;
2526
unsigned defaultOffset = currentInstruction[2].u.operand;
2527
unsigned scrutinee = currentInstruction[3].u.operand;
2529
// create jump table for switch destinations, track this switch statement.
2530
StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
2531
m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
2533
JITStubCall stubCall(this, cti_op_switch_string);
2534
stubCall.addArgument(scrutinee, regT2);
2535
stubCall.addArgument(Imm32(tableIndex));
2540
void JIT::emit_op_new_error(Instruction* currentInstruction)
2542
JITStubCall stubCall(this, cti_op_new_error);
2543
stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
2544
stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[3].u.operand))));
2545
stubCall.addArgument(Imm32(m_bytecodeIndex));
2546
stubCall.call(currentInstruction[1].u.operand);
2549
void JIT::emit_op_debug(Instruction* currentInstruction)
2551
JITStubCall stubCall(this, cti_op_debug);
2552
stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
2553
stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
2554
stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
2555
stubCall.addArgument(Imm32(currentInstruction[4].u.operand));
2559
void JIT::emit_op_eq_null(Instruction* currentInstruction)
2561
unsigned dst = currentInstruction[1].u.operand;
2562
unsigned src1 = currentInstruction[2].u.operand;
2564
emitGetVirtualRegister(src1, regT0);
2565
Jump isImmediate = emitJumpIfNotJSCell(regT0);
2567
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2568
setTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
2570
Jump wasNotImmediate = jump();
2572
isImmediate.link(this);
2574
andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
2575
setPtr(Equal, regT0, Imm32(JSImmediate::FullTagTypeNull), regT0);
2577
wasNotImmediate.link(this);
2579
emitTagAsBoolImmediate(regT0);
2580
emitPutVirtualRegister(dst);
2584
void JIT::emit_op_neq_null(Instruction* currentInstruction)
2586
unsigned dst = currentInstruction[1].u.operand;
2587
unsigned src1 = currentInstruction[2].u.operand;
2589
emitGetVirtualRegister(src1, regT0);
2590
Jump isImmediate = emitJumpIfNotJSCell(regT0);
2592
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
2593
setTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
2595
Jump wasNotImmediate = jump();
2597
isImmediate.link(this);
2599
andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
2600
setPtr(NotEqual, regT0, Imm32(JSImmediate::FullTagTypeNull), regT0);
2602
wasNotImmediate.link(this);
2604
emitTagAsBoolImmediate(regT0);
2605
emitPutVirtualRegister(dst);
2609
void JIT::emit_op_enter(Instruction*)
2611
// Even though CTI doesn't use them, we initialize our constant
2612
// registers to zap stale pointers, to avoid unnecessarily prolonging
2613
// object lifetime and increasing GC pressure.
2614
size_t count = m_codeBlock->m_numVars;
2615
for (size_t j = 0; j < count; ++j)
2616
emitInitRegister(j);
2620
void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
2622
// Even though CTI doesn't use them, we initialize our constant
2623
// registers to zap stale pointers, to avoid unnecessarily prolonging
2624
// object lifetime and increasing GC pressure.
2625
size_t count = m_codeBlock->m_numVars;
2626
for (size_t j = 0; j < count; ++j)
2627
emitInitRegister(j);
2629
JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
2632
void JIT::emit_op_create_arguments(Instruction*)
2634
Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * RegisterFile::ArgumentsRegister));
2635
if (m_codeBlock->m_numParameters == 1)
2636
JITStubCall(this, cti_op_create_arguments_no_params).call();
2638
JITStubCall(this, cti_op_create_arguments).call();
2639
argsCreated.link(this);
2642
void JIT::emit_op_init_arguments(Instruction*)
2644
storePtr(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * RegisterFile::ArgumentsRegister));
2647
void JIT::emit_op_convert_this(Instruction* currentInstruction)
2649
emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
2651
emitJumpSlowCaseIfNotJSCell(regT0);
2652
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
2653
addSlowCase(branchTest32(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
2657
void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
2659
peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
2660
Jump noProfiler = branchTestPtr(Zero, Address(regT1));
2662
JITStubCall stubCall(this, cti_op_profile_will_call);
2663
stubCall.addArgument(currentInstruction[1].u.operand, regT1);
2665
noProfiler.link(this);
2669
void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
2671
peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
2672
Jump noProfiler = branchTestPtr(Zero, Address(regT1));
2674
JITStubCall stubCall(this, cti_op_profile_did_call);
2675
stubCall.addArgument(currentInstruction[1].u.operand, regT1);
2677
noProfiler.link(this);
2683
void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2687
JITStubCall stubCall(this, cti_op_convert_this);
2688
stubCall.addArgument(regT0);
2689
stubCall.call(currentInstruction[1].u.operand);
2692
void JIT::emitSlow_op_construct_verify(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2696
emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
2697
emitPutVirtualRegister(currentInstruction[1].u.operand);
2700
void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2704
JITStubCall stubCall(this, cti_op_to_primitive);
2705
stubCall.addArgument(regT0);
2706
stubCall.call(currentInstruction[1].u.operand);
2709
void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2711
unsigned dst = currentInstruction[1].u.operand;
2712
unsigned base = currentInstruction[2].u.operand;
2713
unsigned property = currentInstruction[3].u.operand;
2715
linkSlowCase(iter); // property int32 check
2716
linkSlowCaseIfNotJSCell(iter, base); // base cell check
2717
linkSlowCase(iter); // base array check
2718
linkSlowCase(iter); // vector length check
2719
linkSlowCase(iter); // empty value
2721
JITStubCall stubCall(this, cti_op_get_by_val);
2722
stubCall.addArgument(base, regT2);
2723
stubCall.addArgument(property, regT2);
2727
void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2729
unsigned op1 = currentInstruction[1].u.operand;
2730
unsigned op2 = currentInstruction[2].u.operand;
2731
unsigned target = currentInstruction[3].u.operand;
2732
if (isOperandConstantImmediateInt(op2)) {
2734
JITStubCall stubCall(this, cti_op_loop_if_less);
2735
stubCall.addArgument(regT0);
2736
stubCall.addArgument(op2, regT2);
2738
emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
2739
} else if (isOperandConstantImmediateInt(op1)) {
2741
JITStubCall stubCall(this, cti_op_loop_if_less);
2742
stubCall.addArgument(op1, regT2);
2743
stubCall.addArgument(regT0);
2745
emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
2749
JITStubCall stubCall(this, cti_op_loop_if_less);
2750
stubCall.addArgument(regT0);
2751
stubCall.addArgument(regT1);
2753
emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
2757
void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2759
unsigned op2 = currentInstruction[2].u.operand;
2760
unsigned target = currentInstruction[3].u.operand;
2761
if (isOperandConstantImmediateInt(op2)) {
2763
JITStubCall stubCall(this, cti_op_loop_if_lesseq);
2764
stubCall.addArgument(regT0);
2765
stubCall.addArgument(currentInstruction[2].u.operand, regT2);
2767
emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
2771
JITStubCall stubCall(this, cti_op_loop_if_lesseq);
2772
stubCall.addArgument(regT0);
2773
stubCall.addArgument(regT1);
2775
emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
2779
void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2781
unsigned base = currentInstruction[1].u.operand;
2782
unsigned property = currentInstruction[2].u.operand;
2783
unsigned value = currentInstruction[3].u.operand;
2785
linkSlowCase(iter); // property int32 check
2786
linkSlowCaseIfNotJSCell(iter, base); // base cell check
2787
linkSlowCase(iter); // base not array check
2788
linkSlowCase(iter); // in vector check
2790
JITStubCall stubPutByValCall(this, cti_op_put_by_val);
2791
stubPutByValCall.addArgument(regT0);
2792
stubPutByValCall.addArgument(property, regT2);
2793
stubPutByValCall.addArgument(value, regT2);
2794
stubPutByValCall.call();
2797
void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2800
JITStubCall stubCall(this, cti_op_jtrue);
2801
stubCall.addArgument(regT0);
2803
emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand + 2);
2806
void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2809
xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
2810
JITStubCall stubCall(this, cti_op_not);
2811
stubCall.addArgument(regT0);
2812
stubCall.call(currentInstruction[1].u.operand);
2815
void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2818
JITStubCall stubCall(this, cti_op_jtrue);
2819
stubCall.addArgument(regT0);
2821
emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand + 2); // inverted!
2824
void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2827
JITStubCall stubCall(this, cti_op_bitnot);
2828
stubCall.addArgument(regT0);
2829
stubCall.call(currentInstruction[1].u.operand);
2832
void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2835
JITStubCall stubCall(this, cti_op_jtrue);
2836
stubCall.addArgument(regT0);
2838
emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand + 2);
2841
void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2844
JITStubCall stubCall(this, cti_op_bitxor);
2845
stubCall.addArgument(regT0);
2846
stubCall.addArgument(regT1);
2847
stubCall.call(currentInstruction[1].u.operand);
2850
void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2853
JITStubCall stubCall(this, cti_op_bitor);
2854
stubCall.addArgument(regT0);
2855
stubCall.addArgument(regT1);
2856
stubCall.call(currentInstruction[1].u.operand);
2859
void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2862
JITStubCall stubCall(this, cti_op_eq);
2863
stubCall.addArgument(regT0);
2864
stubCall.addArgument(regT1);
2866
emitTagAsBoolImmediate(regT0);
2867
emitPutVirtualRegister(currentInstruction[1].u.operand);
2870
void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2873
JITStubCall stubCall(this, cti_op_eq);
2874
stubCall.addArgument(regT0);
2875
stubCall.addArgument(regT1);
2877
xor32(Imm32(0x1), regT0);
2878
emitTagAsBoolImmediate(regT0);
2879
emitPutVirtualRegister(currentInstruction[1].u.operand);
2882
void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2886
JITStubCall stubCall(this, cti_op_stricteq);
2887
stubCall.addArgument(regT0);
2888
stubCall.addArgument(regT1);
2889
stubCall.call(currentInstruction[1].u.operand);
2892
void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2896
JITStubCall stubCall(this, cti_op_nstricteq);
2897
stubCall.addArgument(regT0);
2898
stubCall.addArgument(regT1);
2899
stubCall.call(currentInstruction[1].u.operand);
2902
void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2909
JITStubCall stubCall(this, cti_op_instanceof);
2910
stubCall.addArgument(currentInstruction[2].u.operand, regT2);
2911
stubCall.addArgument(currentInstruction[3].u.operand, regT2);
2912
stubCall.addArgument(currentInstruction[4].u.operand, regT2);
2913
stubCall.call(currentInstruction[1].u.operand);
2916
void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2918
compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
2921
void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2923
compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
2926
void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2928
compileOpCallVarargsSlowCase(currentInstruction, iter);
2931
void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2933
compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
2936
void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2938
linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
2941
JITStubCall stubCall(this, cti_op_to_jsnumber);
2942
stubCall.addArgument(regT0);
2943
stubCall.call(currentInstruction[1].u.operand);
2946
#endif // USE(JSVALUE32_64)
2950
#endif // ENABLE(JIT)