~oif-team/ubuntu/natty/qt4-x11/xi2.1

« back to all changes in this revision

Viewing changes to src/3rdparty/webkit/JavaScriptCore/jit/JIT.cpp

  • Committer: Bazaar Package Importer
  • Author(s): Alessandro Ghersi
  • Date: 2009-11-02 18:30:08 UTC
  • mfrom: (1.2.2 upstream)
  • mto: (15.2.5 experimental)
  • mto: This revision was merged to the branch mainline in revision 88.
  • Revision ID: james.westby@ubuntu.com-20091102183008-b6a4gcs128mvfb3m
Tags: upstream-4.6.0~beta1
ImportĀ upstreamĀ versionĀ 4.6.0~beta1

Show diffs side-by-side

added added

removed removed

Lines of Context:
26
26
#include "config.h"
27
27
#include "JIT.h"
28
28
 
 
29
// This probably does not belong here; adding here for now as a quick Windows build fix.
 
30
#if ENABLE(ASSEMBLER) && PLATFORM(X86) && !PLATFORM(MAC)
 
31
#include "MacroAssembler.h"
 
32
JSC::MacroAssemblerX86Common::SSE2CheckState JSC::MacroAssemblerX86Common::s_sse2CheckState = NotCheckedSSE2;
 
33
#endif
 
34
 
29
35
#if ENABLE(JIT)
30
36
 
31
37
#include "CodeBlock.h"
 
38
#include "Interpreter.h"
32
39
#include "JITInlineMethods.h"
 
40
#include "JITStubs.h"
 
41
#include "JITStubCall.h"
33
42
#include "JSArray.h"
34
43
#include "JSFunction.h"
35
 
#include "Interpreter.h"
 
44
#include "LinkBuffer.h"
 
45
#include "RepatchBuffer.h"
36
46
#include "ResultType.h"
37
47
#include "SamplingTool.h"
38
48
 
44
54
 
45
55
namespace JSC {
46
56
 
47
 
COMPILE_ASSERT(STUB_ARGS_code == 0xC, STUB_ARGS_code_is_C);
48
 
COMPILE_ASSERT(STUB_ARGS_callFrame == 0xE, STUB_ARGS_callFrame_is_E);
49
 
 
50
 
#if COMPILER(GCC) && PLATFORM(X86)
51
 
 
52
 
#if PLATFORM(DARWIN)
53
 
#define SYMBOL_STRING(name) "_" #name
54
 
#else
55
 
#define SYMBOL_STRING(name) #name
56
 
#endif
57
 
 
58
 
asm(
59
 
".globl " SYMBOL_STRING(ctiTrampoline) "\n"
60
 
SYMBOL_STRING(ctiTrampoline) ":" "\n"
61
 
    "pushl %ebp" "\n"
62
 
    "movl %esp, %ebp" "\n"
63
 
    "pushl %esi" "\n"
64
 
    "pushl %edi" "\n"
65
 
    "pushl %ebx" "\n"
66
 
    "subl $0x1c, %esp" "\n"
67
 
    "movl $512, %esi" "\n"
68
 
    "movl 0x38(%esp), %edi" "\n" // Ox38 = 0x0E * 4, 0x0E = STUB_ARGS_callFrame (see assertion above)
69
 
    "call *0x30(%esp)" "\n" // Ox30 = 0x0C * 4, 0x0C = STUB_ARGS_code (see assertion above)
70
 
    "addl $0x1c, %esp" "\n"
71
 
    "popl %ebx" "\n"
72
 
    "popl %edi" "\n"
73
 
    "popl %esi" "\n"
74
 
    "popl %ebp" "\n"
75
 
    "ret" "\n"
76
 
);
77
 
 
78
 
asm(
79
 
".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
80
 
SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
81
 
#if USE(JIT_STUB_ARGUMENT_VA_LIST)
82
 
    "call " SYMBOL_STRING(_ZN3JSC11Interpreter12cti_vm_throwEPvz) "\n"
83
 
#else
84
 
#if USE(JIT_STUB_ARGUMENT_REGISTER)
85
 
    "movl %esp, %ecx" "\n"
86
 
#else // JIT_STUB_ARGUMENT_STACK
87
 
    "movl %esp, 0(%esp)" "\n"
88
 
#endif
89
 
    "call " SYMBOL_STRING(_ZN3JSC11Interpreter12cti_vm_throwEPPv) "\n"
90
 
#endif
91
 
    "addl $0x1c, %esp" "\n"
92
 
    "popl %ebx" "\n"
93
 
    "popl %edi" "\n"
94
 
    "popl %esi" "\n"
95
 
    "popl %ebp" "\n"
96
 
    "ret" "\n"
97
 
);
98
 
    
99
 
#elif COMPILER(GCC) && PLATFORM(X86_64)
100
 
 
101
 
#if PLATFORM(DARWIN)
102
 
#define SYMBOL_STRING(name) "_" #name
103
 
#else
104
 
#define SYMBOL_STRING(name) #name
105
 
#endif
106
 
 
107
 
asm(
108
 
".globl " SYMBOL_STRING(ctiTrampoline) "\n"
109
 
SYMBOL_STRING(ctiTrampoline) ":" "\n"
110
 
    "pushq %rbp" "\n"
111
 
    "movq %rsp, %rbp" "\n"
112
 
    "pushq %r12" "\n"
113
 
    "pushq %r13" "\n"
114
 
    "pushq %rbx" "\n"
115
 
    "subq $0x38, %rsp" "\n"
116
 
    "movq $512, %r12" "\n"
117
 
    "movq 0x70(%rsp), %r13" "\n" // Ox70 = 0x0E * 8, 0x0E = STUB_ARGS_callFrame (see assertion above)
118
 
    "call *0x60(%rsp)" "\n" // Ox60 = 0x0C * 8, 0x0C = STUB_ARGS_code (see assertion above)
119
 
    "addq $0x38, %rsp" "\n"
120
 
    "popq %rbx" "\n"
121
 
    "popq %r13" "\n"
122
 
    "popq %r12" "\n"
123
 
    "popq %rbp" "\n"
124
 
    "ret" "\n"
125
 
);
126
 
 
127
 
asm(
128
 
".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
129
 
SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
130
 
#if USE(JIT_STUB_ARGUMENT_REGISTER)
131
 
    "movq %rsp, %rdi" "\n"
132
 
    "call " SYMBOL_STRING(_ZN3JSC11Interpreter12cti_vm_throwEPPv) "\n"
133
 
#else // JIT_STUB_ARGUMENT_VA_LIST or JIT_STUB_ARGUMENT_STACK
134
 
#error "JIT_STUB_ARGUMENT configuration not supported."
135
 
#endif
136
 
    "addq $0x38, %rsp" "\n"
137
 
    "popq %rbx" "\n"
138
 
    "popq %r13" "\n"
139
 
    "popq %r12" "\n"
140
 
    "popq %rbp" "\n"
141
 
    "ret" "\n"
142
 
);
143
 
    
144
 
#elif COMPILER(MSVC)
145
 
 
146
 
extern "C" {
147
 
    
148
 
    __declspec(naked) JSValueEncodedAsPointer* ctiTrampoline(void* code, RegisterFile*, CallFrame*, JSValuePtr* exception, Profiler**, JSGlobalData*)
149
 
    {
150
 
        __asm {
151
 
            push ebp;
152
 
            mov ebp, esp;
153
 
            push esi;
154
 
            push edi;
155
 
            push ebx;
156
 
            sub esp, 0x1c;
157
 
            mov esi, 512;
158
 
            mov ecx, esp;
159
 
            mov edi, [esp + 0x38];
160
 
            call [esp + 0x30]; // Ox30 = 0x0C * 4, 0x0C = STUB_ARGS_code (see assertion above)
161
 
            add esp, 0x1c;
162
 
            pop ebx;
163
 
            pop edi;
164
 
            pop esi;
165
 
            pop ebp;
166
 
            ret;
167
 
        }
168
 
    }
169
 
    
170
 
    __declspec(naked) void ctiVMThrowTrampoline()
171
 
    {
172
 
        __asm {
173
 
#if USE(JIT_STUB_ARGUMENT_REGISTER)
174
 
            mov ecx, esp;
175
 
#else // JIT_STUB_ARGUMENT_VA_LIST or JIT_STUB_ARGUMENT_STACK
176
 
#error "JIT_STUB_ARGUMENT configuration not supported."
177
 
#endif
178
 
            call JSC::Interpreter::cti_vm_throw;
179
 
            add esp, 0x1c;
180
 
            pop ebx;
181
 
            pop edi;
182
 
            pop esi;
183
 
            pop ebp;
184
 
            ret;
185
 
        }
186
 
    }
187
 
    
188
 
}
189
 
 
190
 
#endif
191
 
 
192
 
void ctiSetReturnAddress(void** where, void* what)
193
 
{
194
 
    *where = what;
195
 
}
196
 
 
197
 
void ctiPatchCallByReturnAddress(void* where, void* what)
198
 
{
199
 
    MacroAssembler::Jump::patch(where, what);
 
57
void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction)
 
58
{
 
59
    RepatchBuffer repatchBuffer(codeblock);
 
60
    repatchBuffer.relinkNearCallerToTrampoline(returnAddress, newCalleeFunction);
 
61
}
 
62
 
 
63
void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction)
 
64
{
 
65
    RepatchBuffer repatchBuffer(codeblock);
 
66
    repatchBuffer.relinkCallerToTrampoline(returnAddress, newCalleeFunction);
 
67
}
 
68
 
 
69
void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction)
 
70
{
 
71
    RepatchBuffer repatchBuffer(codeblock);
 
72
    repatchBuffer.relinkCallerToFunction(returnAddress, newCalleeFunction);
200
73
}
201
74
 
202
75
JIT::JIT(JSGlobalData* globalData, CodeBlock* codeBlock)
206
79
    , m_labels(codeBlock ? codeBlock->instructions().size() : 0)
207
80
    , m_propertyAccessCompilationInfo(codeBlock ? codeBlock->numberOfStructureStubInfos() : 0)
208
81
    , m_callStructureStubCompilationInfo(codeBlock ? codeBlock->numberOfCallLinkInfos() : 0)
 
82
    , m_bytecodeIndex((unsigned)-1)
 
83
#if USE(JSVALUE32_64)
 
84
    , m_jumpTargetIndex(0)
 
85
    , m_mappedBytecodeIndex((unsigned)-1)
 
86
    , m_mappedVirtualRegisterIndex((unsigned)-1)
 
87
    , m_mappedTag((RegisterID)-1)
 
88
    , m_mappedPayload((RegisterID)-1)
 
89
#else
209
90
    , m_lastResultBytecodeRegister(std::numeric_limits<int>::max())
210
91
    , m_jumpTargetsPosition(0)
211
 
{
212
 
}
213
 
 
214
 
void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
215
 
{
216
 
    bool negated = (type == OpNStrictEq);
217
 
 
218
 
    unsigned dst = currentInstruction[1].u.operand;
219
 
    unsigned src1 = currentInstruction[2].u.operand;
220
 
    unsigned src2 = currentInstruction[3].u.operand;
221
 
 
222
 
    emitGetVirtualRegisters(src1, X86::eax, src2, X86::edx);
223
 
 
224
 
    // Check that bot are immediates, if so check if they're equal
225
 
    Jump firstNotImmediate = emitJumpIfJSCell(X86::eax);
226
 
    Jump secondNotImmediate = emitJumpIfJSCell(X86::edx);
227
 
    Jump bothWereImmediatesButNotEqual = jne32(X86::edx, X86::eax);
228
 
 
229
 
    // They are equal - set the result to true. (Or false, if negated).
230
 
    move(ImmPtr(JSValuePtr::encode(jsBoolean(!negated))), X86::eax);
231
 
    Jump bothWereImmediatesAndEqual = jump();
232
 
 
233
 
    // eax was not an immediate, we haven't yet checked edx.
234
 
    // If edx is also a JSCell, or is 0, then jump to a slow case,
235
 
    // otherwise these values are not equal.
236
 
    firstNotImmediate.link(this);
237
 
    emitJumpSlowCaseIfJSCell(X86::edx);
238
 
    addSlowCase(jePtr(X86::edx, ImmPtr(JSValuePtr::encode(JSImmediate::zeroImmediate()))));
239
 
    Jump firstWasNotImmediate = jump();
240
 
 
241
 
    // eax was an immediate, but edx wasn't.
242
 
    // If eax is 0 jump to a slow case, otherwise these values are not equal.
243
 
    secondNotImmediate.link(this);
244
 
    addSlowCase(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(JSImmediate::zeroImmediate()))));
245
 
 
246
 
    // We get here if the two values are different immediates, or one is 0 and the other is a JSCell.
247
 
    // Vaelues are not equal, set the result to false.
248
 
    bothWereImmediatesButNotEqual.link(this);
249
 
    firstWasNotImmediate.link(this);
250
 
    move(ImmPtr(JSValuePtr::encode(jsBoolean(negated))), X86::eax);
251
 
    
252
 
    bothWereImmediatesAndEqual.link(this);
253
 
    emitPutVirtualRegister(dst);
254
 
}
255
 
 
256
 
void JIT::emitSlowScriptCheck()
257
 
{
258
 
    Jump skipTimeout = jnzSub32(Imm32(1), timeoutCheckRegister);
259
 
    emitCTICall(Interpreter::cti_timeout_check);
260
 
    move(X86::eax, timeoutCheckRegister);
 
92
#endif
 
93
{
 
94
}
 
95
 
 
96
#if USE(JSVALUE32_64)
 
97
void JIT::emitTimeoutCheck()
 
98
{
 
99
    Jump skipTimeout = branchSub32(NonZero, Imm32(1), timeoutCheckRegister);
 
100
    JITStubCall stubCall(this, cti_timeout_check);
 
101
    stubCall.addArgument(regT1, regT0); // save last result registers.
 
102
    stubCall.call(timeoutCheckRegister);
 
103
    stubCall.getArgument(0, regT1, regT0); // reload last result registers.
 
104
    skipTimeout.link(this);
 
105
}
 
106
#else
 
107
void JIT::emitTimeoutCheck()
 
108
{
 
109
    Jump skipTimeout = branchSub32(NonZero, Imm32(1), timeoutCheckRegister);
 
110
    JITStubCall(this, cti_timeout_check).call(timeoutCheckRegister);
261
111
    skipTimeout.link(this);
262
112
 
263
113
    killLastResultRegister();
264
114
}
265
 
 
 
115
#endif
266
116
 
267
117
#define NEXT_OPCODE(name) \
268
118
    m_bytecodeIndex += OPCODE_LENGTH(name); \
269
119
    break;
270
120
 
271
 
#define CTI_COMPILE_BINARY_OP(name) \
272
 
    case name: { \
273
 
        emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx); \
274
 
        emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx); \
275
 
        emitCTICall(Interpreter::cti_##name); \
276
 
        emitPutVirtualRegister(currentInstruction[1].u.operand); \
277
 
        NEXT_OPCODE(name); \
278
 
    }
279
 
 
280
 
#define CTI_COMPILE_UNARY_OP(name) \
281
 
    case name: { \
282
 
        emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx); \
283
 
        emitCTICall(Interpreter::cti_##name); \
284
 
        emitPutVirtualRegister(currentInstruction[1].u.operand); \
 
121
#if USE(JSVALUE32_64)
 
122
#define DEFINE_BINARY_OP(name) \
 
123
    case name: { \
 
124
        JITStubCall stubCall(this, cti_##name); \
 
125
        stubCall.addArgument(currentInstruction[2].u.operand); \
 
126
        stubCall.addArgument(currentInstruction[3].u.operand); \
 
127
        stubCall.call(currentInstruction[1].u.operand); \
 
128
        NEXT_OPCODE(name); \
 
129
    }
 
130
 
 
131
#define DEFINE_UNARY_OP(name) \
 
132
    case name: { \
 
133
        JITStubCall stubCall(this, cti_##name); \
 
134
        stubCall.addArgument(currentInstruction[2].u.operand); \
 
135
        stubCall.call(currentInstruction[1].u.operand); \
 
136
        NEXT_OPCODE(name); \
 
137
    }
 
138
 
 
139
#else // USE(JSVALUE32_64)
 
140
 
 
141
#define DEFINE_BINARY_OP(name) \
 
142
    case name: { \
 
143
        JITStubCall stubCall(this, cti_##name); \
 
144
        stubCall.addArgument(currentInstruction[2].u.operand, regT2); \
 
145
        stubCall.addArgument(currentInstruction[3].u.operand, regT2); \
 
146
        stubCall.call(currentInstruction[1].u.operand); \
 
147
        NEXT_OPCODE(name); \
 
148
    }
 
149
 
 
150
#define DEFINE_UNARY_OP(name) \
 
151
    case name: { \
 
152
        JITStubCall stubCall(this, cti_##name); \
 
153
        stubCall.addArgument(currentInstruction[2].u.operand, regT2); \
 
154
        stubCall.call(currentInstruction[1].u.operand); \
 
155
        NEXT_OPCODE(name); \
 
156
    }
 
157
#endif // USE(JSVALUE32_64)
 
158
 
 
159
#define DEFINE_OP(name) \
 
160
    case name: { \
 
161
        emit_##name(currentInstruction); \
 
162
        NEXT_OPCODE(name); \
 
163
    }
 
164
 
 
165
#define DEFINE_SLOWCASE_OP(name) \
 
166
    case name: { \
 
167
        emitSlow_##name(currentInstruction, iter); \
285
168
        NEXT_OPCODE(name); \
286
169
    }
287
170
 
289
172
{
290
173
    Instruction* instructionsBegin = m_codeBlock->instructions().begin();
291
174
    unsigned instructionCount = m_codeBlock->instructions().size();
292
 
    unsigned propertyAccessInstructionIndex = 0;
293
 
    unsigned globalResolveInfoIndex = 0;
294
 
    unsigned callLinkInfoIndex = 0;
 
175
 
 
176
    m_propertyAccessInstructionIndex = 0;
 
177
    m_globalResolveInfoIndex = 0;
 
178
    m_callLinkInfoIndex = 0;
295
179
 
296
180
    for (m_bytecodeIndex = 0; m_bytecodeIndex < instructionCount; ) {
297
181
        Instruction* currentInstruction = instructionsBegin + m_bytecodeIndex;
299
183
 
300
184
#if ENABLE(OPCODE_SAMPLING)
301
185
        if (m_bytecodeIndex > 0) // Avoid the overhead of sampling op_enter twice.
302
 
            store32(m_interpreter->sampler()->encodeSample(currentInstruction), m_interpreter->sampler()->sampleSlot());
 
186
            sampleInstruction(currentInstruction);
 
187
#endif
 
188
 
 
189
#if !USE(JSVALUE32_64)
 
190
        if (m_labels[m_bytecodeIndex].isUsed())
 
191
            killLastResultRegister();
303
192
#endif
304
193
 
305
194
        m_labels[m_bytecodeIndex] = label();
306
 
        OpcodeID opcodeID = m_interpreter->getOpcodeID(currentInstruction->u.opcode);
307
 
 
308
 
        switch (opcodeID) {
309
 
        case op_mov: {
310
 
            emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
311
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
312
 
            NEXT_OPCODE(op_mov);
313
 
        }
314
 
        case op_add: {
315
 
            compileFastArith_op_add(currentInstruction);
316
 
            NEXT_OPCODE(op_add);
317
 
        }
318
 
        case op_end: {
319
 
            if (m_codeBlock->needsFullScopeChain())
320
 
                emitCTICall(Interpreter::cti_op_end);
321
 
            emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
322
 
            push(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
323
 
            ret();
324
 
            NEXT_OPCODE(op_end);
325
 
        }
326
 
        case op_jmp: {
327
 
            unsigned target = currentInstruction[1].u.operand;
328
 
            addJump(jump(), target + 1);
329
 
            NEXT_OPCODE(op_jmp);
330
 
        }
331
 
        case op_pre_inc: {
332
 
            compileFastArith_op_pre_inc(currentInstruction[1].u.operand);
333
 
            NEXT_OPCODE(op_pre_inc);
334
 
        }
335
 
        case op_loop: {
336
 
            emitSlowScriptCheck();
337
 
 
338
 
            unsigned target = currentInstruction[1].u.operand;
339
 
            addJump(jump(), target + 1);
340
 
            NEXT_OPCODE(op_end);
341
 
        }
342
 
        case op_loop_if_less: {
343
 
            emitSlowScriptCheck();
344
 
 
345
 
            unsigned op1 = currentInstruction[1].u.operand;
346
 
            unsigned op2 = currentInstruction[2].u.operand;
347
 
            unsigned target = currentInstruction[3].u.operand;
348
 
            if (isOperandConstantImmediateInt(op2)) {
349
 
                emitGetVirtualRegister(op1, X86::eax);
350
 
                emitJumpSlowCaseIfNotImmNum(X86::eax);
351
 
#if USE(ALTERNATE_JSIMMEDIATE)
352
 
                int32_t op2imm = JSImmediate::intValue(getConstantOperand(op2));
353
 
#else
354
 
                int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
355
 
#endif
356
 
                addJump(jl32(X86::eax, Imm32(op2imm)), target + 3);
357
 
            } else {
358
 
                emitGetVirtualRegisters(op1, X86::eax, op2, X86::edx);
359
 
                emitJumpSlowCaseIfNotImmNum(X86::eax);
360
 
                emitJumpSlowCaseIfNotImmNum(X86::edx);
361
 
                addJump(jl32(X86::eax, X86::edx), target + 3);
362
 
            }
363
 
            NEXT_OPCODE(op_loop_if_less);
364
 
        }
365
 
        case op_loop_if_lesseq: {
366
 
            emitSlowScriptCheck();
367
 
 
368
 
            unsigned op1 = currentInstruction[1].u.operand;
369
 
            unsigned op2 = currentInstruction[2].u.operand;
370
 
            unsigned target = currentInstruction[3].u.operand;
371
 
            if (isOperandConstantImmediateInt(op2)) {
372
 
                emitGetVirtualRegister(op1, X86::eax);
373
 
                emitJumpSlowCaseIfNotImmNum(X86::eax);
374
 
#if USE(ALTERNATE_JSIMMEDIATE)
375
 
                int32_t op2imm = JSImmediate::intValue(getConstantOperand(op2));
376
 
#else
377
 
                int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
378
 
#endif
379
 
                addJump(jle32(X86::eax, Imm32(op2imm)), target + 3);
380
 
            } else {
381
 
                emitGetVirtualRegisters(op1, X86::eax, op2, X86::edx);
382
 
                emitJumpSlowCaseIfNotImmNum(X86::eax);
383
 
                emitJumpSlowCaseIfNotImmNum(X86::edx);
384
 
                addJump(jle32(X86::eax, X86::edx), target + 3);
385
 
            }
386
 
            NEXT_OPCODE(op_loop_if_less);
387
 
        }
388
 
        case op_new_object: {
389
 
            emitCTICall(Interpreter::cti_op_new_object);
390
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
391
 
            NEXT_OPCODE(op_new_object);
392
 
        }
393
 
        case op_put_by_id: {
394
 
            compilePutByIdHotPath(currentInstruction[1].u.operand, &(m_codeBlock->identifier(currentInstruction[2].u.operand)), currentInstruction[3].u.operand, propertyAccessInstructionIndex++);
395
 
            NEXT_OPCODE(op_put_by_id);
396
 
        }
397
 
        case op_get_by_id: {
398
 
            compileGetByIdHotPath(currentInstruction[1].u.operand, currentInstruction[2].u.operand, &(m_codeBlock->identifier(currentInstruction[3].u.operand)), propertyAccessInstructionIndex++);
399
 
            NEXT_OPCODE(op_get_by_id);
400
 
        }
401
 
        case op_instanceof: {
402
 
            emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax); // value
403
 
            emitGetVirtualRegister(currentInstruction[3].u.operand, X86::ecx); // baseVal
404
 
            emitGetVirtualRegister(currentInstruction[4].u.operand, X86::edx); // proto
405
 
 
406
 
            // check if any are immediates
407
 
            move(X86::eax, X86::ebx);
408
 
            orPtr(X86::ecx, X86::ebx);
409
 
            orPtr(X86::edx, X86::ebx);
410
 
            emitJumpSlowCaseIfNotJSCell(X86::ebx);
411
 
 
412
 
            // check that all are object type - this is a bit of a bithack to avoid excess branching;
413
 
            // we check that the sum of the three type codes from Structures is exactly 3 * ObjectType,
414
 
            // this works because NumberType and StringType are smaller
415
 
            move(Imm32(3 * ObjectType), X86::ebx);
416
 
            loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::eax);
417
 
            loadPtr(Address(X86::ecx, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
418
 
            loadPtr(Address(X86::edx, FIELD_OFFSET(JSCell, m_structure)), X86::edx);
419
 
            sub32(Address(X86::eax, FIELD_OFFSET(Structure, m_typeInfo.m_type)), X86::ebx);
420
 
            sub32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_type)), X86::ebx);
421
 
            addSlowCase(jne32(Address(X86::edx, FIELD_OFFSET(Structure, m_typeInfo.m_type)), X86::ebx));
422
 
 
423
 
            // check that baseVal's flags include ImplementsHasInstance but not OverridesHasInstance
424
 
            load32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), X86::ecx);
425
 
            and32(Imm32(ImplementsHasInstance | OverridesHasInstance), X86::ecx);
426
 
            addSlowCase(jne32(X86::ecx, Imm32(ImplementsHasInstance)));
427
 
 
428
 
            emitGetVirtualRegister(currentInstruction[2].u.operand, X86::ecx); // reload value
429
 
            emitGetVirtualRegister(currentInstruction[4].u.operand, X86::edx); // reload proto
430
 
 
431
 
            // optimistically load true result
432
 
            move(ImmPtr(JSValuePtr::encode(jsBoolean(true))), X86::eax);
433
 
 
434
 
            Label loop(this);
435
 
 
436
 
            // load value's prototype
437
 
            loadPtr(Address(X86::ecx, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
438
 
            loadPtr(Address(X86::ecx, FIELD_OFFSET(Structure, m_prototype)), X86::ecx);
439
 
 
440
 
            Jump exit = jePtr(X86::ecx, X86::edx);
441
 
 
442
 
            jnePtr(X86::ecx, ImmPtr(JSValuePtr::encode(jsNull())), loop);
443
 
 
444
 
            move(ImmPtr(JSValuePtr::encode(jsBoolean(false))), X86::eax);
445
 
 
446
 
            exit.link(this);
447
 
 
448
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
449
 
 
450
 
            NEXT_OPCODE(op_instanceof);
451
 
        }
452
 
        case op_del_by_id: {
453
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
454
 
            Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
455
 
            emitPutJITStubArgConstant(ident, 2);
456
 
            emitCTICall(Interpreter::cti_op_del_by_id);
457
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
458
 
            NEXT_OPCODE(op_del_by_id);
459
 
        }
460
 
        case op_mul: {
461
 
            compileFastArith_op_mul(currentInstruction);
462
 
            NEXT_OPCODE(op_mul);
463
 
        }
464
 
        case op_new_func: {
465
 
            FuncDeclNode* func = m_codeBlock->function(currentInstruction[2].u.operand);
466
 
            emitPutJITStubArgConstant(func, 1);
467
 
            emitCTICall(Interpreter::cti_op_new_func);
468
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
469
 
            NEXT_OPCODE(op_new_func);
470
 
        }
471
 
        case op_call: {
472
 
            compileOpCall(opcodeID, currentInstruction, callLinkInfoIndex++);
473
 
            NEXT_OPCODE(op_call);
474
 
        }
475
 
        case op_call_eval: {
476
 
            compileOpCall(opcodeID, currentInstruction, callLinkInfoIndex++);
477
 
            NEXT_OPCODE(op_call_eval);
478
 
        }
479
 
        case op_construct: {
480
 
            compileOpCall(opcodeID, currentInstruction, callLinkInfoIndex++);
481
 
            NEXT_OPCODE(op_construct);
482
 
        }
483
 
        case op_get_global_var: {
484
 
            JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[2].u.jsCell);
485
 
            move(ImmPtr(globalObject), X86::eax);
486
 
            emitGetVariableObjectRegister(X86::eax, currentInstruction[3].u.operand, X86::eax);
487
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
488
 
            NEXT_OPCODE(op_get_global_var);
489
 
        }
490
 
        case op_put_global_var: {
491
 
            emitGetVirtualRegister(currentInstruction[3].u.operand, X86::edx);
492
 
            JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[1].u.jsCell);
493
 
            move(ImmPtr(globalObject), X86::eax);
494
 
            emitPutVariableObjectRegister(X86::edx, X86::eax, currentInstruction[2].u.operand);
495
 
            NEXT_OPCODE(op_put_global_var);
496
 
        }
497
 
        case op_get_scoped_var: {
498
 
            int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
499
 
 
500
 
            emitGetFromCallFrameHeader(RegisterFile::ScopeChain, X86::eax);
501
 
            while (skip--)
502
 
                loadPtr(Address(X86::eax, FIELD_OFFSET(ScopeChainNode, next)), X86::eax);
503
 
 
504
 
            loadPtr(Address(X86::eax, FIELD_OFFSET(ScopeChainNode, object)), X86::eax);
505
 
            emitGetVariableObjectRegister(X86::eax, currentInstruction[2].u.operand, X86::eax);
506
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
507
 
            NEXT_OPCODE(op_get_scoped_var);
508
 
        }
509
 
        case op_put_scoped_var: {
510
 
            int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
511
 
 
512
 
            emitGetFromCallFrameHeader(RegisterFile::ScopeChain, X86::edx);
513
 
            emitGetVirtualRegister(currentInstruction[3].u.operand, X86::eax);
514
 
            while (skip--)
515
 
                loadPtr(Address(X86::edx, FIELD_OFFSET(ScopeChainNode, next)), X86::edx);
516
 
 
517
 
            loadPtr(Address(X86::edx, FIELD_OFFSET(ScopeChainNode, object)), X86::edx);
518
 
            emitPutVariableObjectRegister(X86::eax, X86::edx, currentInstruction[1].u.operand);
519
 
            NEXT_OPCODE(op_put_scoped_var);
520
 
        }
521
 
        case op_tear_off_activation: {
522
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
523
 
            emitCTICall(Interpreter::cti_op_tear_off_activation);
524
 
            NEXT_OPCODE(op_tear_off_activation);
525
 
        }
526
 
        case op_tear_off_arguments: {
527
 
            emitCTICall(Interpreter::cti_op_tear_off_arguments);
528
 
            NEXT_OPCODE(op_tear_off_arguments);
529
 
        }
530
 
        case op_ret: {
531
 
            // We could JIT generate the deref, only calling out to C when the refcount hits zero.
532
 
            if (m_codeBlock->needsFullScopeChain())
533
 
                emitCTICall(Interpreter::cti_op_ret_scopeChain);
534
 
 
535
 
            // Return the result in %eax.
536
 
            emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
537
 
 
538
 
            // Grab the return address.
539
 
            emitGetFromCallFrameHeader(RegisterFile::ReturnPC, X86::edx);
540
 
 
541
 
            // Restore our caller's "r".
542
 
            emitGetFromCallFrameHeader(RegisterFile::CallerFrame, callFrameRegister);
543
 
 
544
 
            // Return.
545
 
            push(X86::edx);
546
 
            ret();
547
 
 
548
 
            NEXT_OPCODE(op_ret);
549
 
        }
550
 
        case op_new_array: {
551
 
            emitPutJITStubArgConstant(currentInstruction[2].u.operand, 1);
552
 
            emitPutJITStubArgConstant(currentInstruction[3].u.operand, 2);
553
 
            emitCTICall(Interpreter::cti_op_new_array);
554
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
555
 
            NEXT_OPCODE(op_new_array);
556
 
        }
557
 
        case op_resolve: {
558
 
            Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
559
 
            emitPutJITStubArgConstant(ident, 1);
560
 
            emitCTICall(Interpreter::cti_op_resolve);
561
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
562
 
            NEXT_OPCODE(op_resolve);
563
 
        }
564
 
        case op_construct_verify: {
565
 
            emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
566
 
 
567
 
            emitJumpSlowCaseIfNotJSCell(X86::eax);
568
 
            loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
569
 
            addSlowCase(jne32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo) + FIELD_OFFSET(TypeInfo, m_type)), Imm32(ObjectType)));
570
 
 
571
 
            NEXT_OPCODE(op_construct_verify);
572
 
        }
573
 
        case op_get_by_val: {
574
 
            emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
575
 
            emitJumpSlowCaseIfNotImmNum(X86::edx);
576
 
#if USE(ALTERNATE_JSIMMEDIATE)
577
 
            // This is technically incorrect - we're zero-extending an int32.  On the hot path this doesn't matter.
578
 
            // We check the value as if it was a uint32 against the m_fastAccessCutoff - which will always fail if
579
 
            // number was signed since m_fastAccessCutoff is always less than intmax (since the total allocation
580
 
            // size is always less than 4Gb).  As such zero extending wil have been correct (and extending the value
581
 
            // to 64-bits is necessary since it's used in the address calculation.  We zero extend rather than sign
582
 
            // extending since it makes it easier to re-tag the value in the slow case.
583
 
            zeroExtend32ToPtr(X86::edx, X86::edx);
584
 
#else
585
 
            emitFastArithImmToInt(X86::edx);
586
 
#endif
587
 
            emitJumpSlowCaseIfNotJSCell(X86::eax);
588
 
            addSlowCase(jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr)));
589
 
 
590
 
            // This is an array; get the m_storage pointer into ecx, then check if the index is below the fast cutoff
591
 
            loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::ecx);
592
 
            addSlowCase(jae32(X86::edx, Address(X86::eax, FIELD_OFFSET(JSArray, m_fastAccessCutoff))));
593
 
 
594
 
            // Get the value from the vector
595
 
            loadPtr(BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0])), X86::eax);
596
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
597
 
            NEXT_OPCODE(op_get_by_val);
598
 
        }
599
 
        case op_resolve_func: {
600
 
            Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
601
 
            emitPutJITStubArgConstant(ident, 1);
602
 
            emitCTICall(Interpreter::cti_op_resolve_func);
603
 
            emitPutVirtualRegister(currentInstruction[2].u.operand, X86::edx);
604
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
605
 
            NEXT_OPCODE(op_resolve_func);
606
 
        }
607
 
        case op_sub: {
608
 
            compileBinaryArithOp(op_sub, currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, OperandTypes::fromInt(currentInstruction[4].u.operand));
609
 
            NEXT_OPCODE(op_sub);
610
 
        }
611
 
        case op_put_by_val: {
612
 
            emitGetVirtualRegisters(currentInstruction[1].u.operand, X86::eax, currentInstruction[2].u.operand, X86::edx);
613
 
            emitJumpSlowCaseIfNotImmNum(X86::edx);
614
 
#if USE(ALTERNATE_JSIMMEDIATE)
615
 
            // See comment in op_get_by_val.
616
 
            zeroExtend32ToPtr(X86::edx, X86::edx);
617
 
#else
618
 
            emitFastArithImmToInt(X86::edx);
619
 
#endif
620
 
            emitJumpSlowCaseIfNotJSCell(X86::eax);
621
 
            addSlowCase(jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr)));
622
 
 
623
 
            // This is an array; get the m_storage pointer into ecx, then check if the index is below the fast cutoff
624
 
            loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::ecx);
625
 
            Jump inFastVector = jb32(X86::edx, Address(X86::eax, FIELD_OFFSET(JSArray, m_fastAccessCutoff)));
626
 
            // No; oh well, check if the access if within the vector - if so, we may still be okay.
627
 
            addSlowCase(jae32(X86::edx, Address(X86::ecx, FIELD_OFFSET(ArrayStorage, m_vectorLength))));
628
 
 
629
 
            // This is a write to the slow part of the vector; first, we have to check if this would be the first write to this location.
630
 
            // FIXME: should be able to handle initial write to array; increment the the number of items in the array, and potentially update fast access cutoff. 
631
 
            addSlowCase(jzPtr(BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0]))));
632
 
 
633
 
            // All good - put the value into the array.
634
 
            inFastVector.link(this);
635
 
            emitGetVirtualRegister(currentInstruction[3].u.operand, X86::eax);
636
 
            storePtr(X86::eax, BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0])));
637
 
            NEXT_OPCODE(op_put_by_val);
638
 
        }
639
 
        CTI_COMPILE_BINARY_OP(op_lesseq)
640
 
        case op_loop_if_true: {
641
 
            emitSlowScriptCheck();
642
 
 
643
 
            unsigned target = currentInstruction[2].u.operand;
644
 
            emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
645
 
 
646
 
            Jump isZero = jePtr(X86::eax, ImmPtr(JSValuePtr::encode(JSImmediate::zeroImmediate())));
647
 
            addJump(emitJumpIfImmNum(X86::eax), target + 2);
648
 
 
649
 
            addJump(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(true)))), target + 2);
650
 
            addSlowCase(jnePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(false)))));
651
 
 
652
 
            isZero.link(this);
653
 
            NEXT_OPCODE(op_loop_if_true);
654
 
        };
655
 
        case op_resolve_base: {
656
 
            Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
657
 
            emitPutJITStubArgConstant(ident, 1);
658
 
            emitCTICall(Interpreter::cti_op_resolve_base);
659
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
660
 
            NEXT_OPCODE(op_resolve_base);
661
 
        }
662
 
        case op_negate: {
663
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
664
 
            emitCTICall(Interpreter::cti_op_negate);
665
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
666
 
            NEXT_OPCODE(op_negate);
667
 
        }
668
 
        case op_resolve_skip: {
669
 
            Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
670
 
            emitPutJITStubArgConstant(ident, 1);
671
 
            emitPutJITStubArgConstant(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain(), 2);
672
 
            emitCTICall(Interpreter::cti_op_resolve_skip);
673
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
674
 
            NEXT_OPCODE(op_resolve_skip);
675
 
        }
676
 
        case op_resolve_global: {
677
 
            // Fast case
678
 
            void* globalObject = currentInstruction[2].u.jsCell;
679
 
            Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
680
 
            
681
 
            unsigned currentIndex = globalResolveInfoIndex++;
682
 
            void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
683
 
            void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
684
 
 
685
 
            // Check Structure of global object
686
 
            move(ImmPtr(globalObject), X86::eax);
687
 
            loadPtr(structureAddress, X86::edx);
688
 
            Jump noMatch = jnePtr(X86::edx, Address(X86::eax, FIELD_OFFSET(JSCell, m_structure))); // Structures don't match
689
 
 
690
 
            // Load cached property
691
 
            loadPtr(Address(X86::eax, FIELD_OFFSET(JSGlobalObject, m_propertyStorage)), X86::eax);
692
 
            load32(offsetAddr, X86::edx);
693
 
            loadPtr(BaseIndex(X86::eax, X86::edx, ScalePtr), X86::eax);
694
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
695
 
            Jump end = jump();
696
 
 
697
 
            // Slow case
698
 
            noMatch.link(this);
699
 
            emitPutJITStubArgConstant(globalObject, 1);
700
 
            emitPutJITStubArgConstant(ident, 2);
701
 
            emitPutJITStubArgConstant(currentIndex, 3);
702
 
            emitCTICall(Interpreter::cti_op_resolve_global);
703
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
704
 
            end.link(this);
705
 
            NEXT_OPCODE(op_resolve_global);
706
 
        }
707
 
        CTI_COMPILE_BINARY_OP(op_div)
708
 
        case op_pre_dec: {
709
 
            compileFastArith_op_pre_dec(currentInstruction[1].u.operand);
710
 
            NEXT_OPCODE(op_pre_dec);
711
 
        }
712
 
        case op_jnless: {
713
 
            unsigned op1 = currentInstruction[1].u.operand;
714
 
            unsigned op2 = currentInstruction[2].u.operand;
715
 
            unsigned target = currentInstruction[3].u.operand;
716
 
            if (isOperandConstantImmediateInt(op2)) {
717
 
                emitGetVirtualRegister(op1, X86::eax);
718
 
                emitJumpSlowCaseIfNotImmNum(X86::eax);
719
 
#if USE(ALTERNATE_JSIMMEDIATE)
720
 
                int32_t op2imm = JSImmediate::intValue(getConstantOperand(op2));
721
 
#else
722
 
                int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
723
 
#endif
724
 
                addJump(jge32(X86::eax, Imm32(op2imm)), target + 3);
725
 
            } else {
726
 
                emitGetVirtualRegisters(op1, X86::eax, op2, X86::edx);
727
 
                emitJumpSlowCaseIfNotImmNum(X86::eax);
728
 
                emitJumpSlowCaseIfNotImmNum(X86::edx);
729
 
                addJump(jge32(X86::eax, X86::edx), target + 3);
730
 
            }
731
 
            NEXT_OPCODE(op_jnless);
732
 
        }
733
 
        case op_not: {
734
 
            emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
735
 
            xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), X86::eax);
736
 
            addSlowCase(jnzPtr(X86::eax, Imm32(static_cast<int32_t>(~JSImmediate::ExtendedPayloadBitBoolValue))));
737
 
            xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool | JSImmediate::ExtendedPayloadBitBoolValue)), X86::eax);
738
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
739
 
            NEXT_OPCODE(op_not);
740
 
        }
741
 
        case op_jfalse: {
742
 
            unsigned target = currentInstruction[2].u.operand;
743
 
            emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
744
 
 
745
 
            addJump(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(JSImmediate::zeroImmediate()))), target + 2);
746
 
            Jump isNonZero = emitJumpIfImmNum(X86::eax);
747
 
 
748
 
            addJump(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(false)))), target + 2);
749
 
            addSlowCase(jnePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(true)))));
750
 
 
751
 
            isNonZero.link(this);
752
 
            NEXT_OPCODE(op_jfalse);
753
 
        };
754
 
        case op_jeq_null: {
755
 
            unsigned src = currentInstruction[1].u.operand;
756
 
            unsigned target = currentInstruction[2].u.operand;
757
 
 
758
 
            emitGetVirtualRegister(src, X86::eax);
759
 
            Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
760
 
 
761
 
            // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
762
 
            loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
763
 
            addJump(jnz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
764
 
            Jump wasNotImmediate = jump();
765
 
 
766
 
            // Now handle the immediate cases - undefined & null
767
 
            isImmediate.link(this);
768
 
            and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
769
 
            addJump(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsNull()))), target + 2);            
770
 
 
771
 
            wasNotImmediate.link(this);
772
 
            NEXT_OPCODE(op_jeq_null);
773
 
        };
774
 
        case op_jneq_null: {
775
 
            unsigned src = currentInstruction[1].u.operand;
776
 
            unsigned target = currentInstruction[2].u.operand;
777
 
 
778
 
            emitGetVirtualRegister(src, X86::eax);
779
 
            Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
780
 
 
781
 
            // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
782
 
            loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
783
 
            addJump(jz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
784
 
            Jump wasNotImmediate = jump();
785
 
 
786
 
            // Now handle the immediate cases - undefined & null
787
 
            isImmediate.link(this);
788
 
            and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
789
 
            addJump(jnePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsNull()))), target + 2);            
790
 
 
791
 
            wasNotImmediate.link(this);
792
 
            NEXT_OPCODE(op_jneq_null);
793
 
        }
794
 
        case op_post_inc: {
795
 
            compileFastArith_op_post_inc(currentInstruction[1].u.operand, currentInstruction[2].u.operand);
796
 
            NEXT_OPCODE(op_post_inc);
797
 
        }
798
 
        case op_unexpected_load: {
799
 
            JSValuePtr v = m_codeBlock->unexpectedConstant(currentInstruction[2].u.operand);
800
 
            move(ImmPtr(JSValuePtr::encode(v)), X86::eax);
801
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
802
 
            NEXT_OPCODE(op_unexpected_load);
803
 
        }
804
 
        case op_jsr: {
805
 
            int retAddrDst = currentInstruction[1].u.operand;
806
 
            int target = currentInstruction[2].u.operand;
807
 
            DataLabelPtr storeLocation = storePtrWithPatch(Address(callFrameRegister, sizeof(Register) * retAddrDst));
808
 
            addJump(jump(), target + 2);
809
 
            m_jsrSites.append(JSRInfo(storeLocation, label()));
810
 
            NEXT_OPCODE(op_jsr);
811
 
        }
812
 
        case op_sret: {
813
 
            jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
814
 
            NEXT_OPCODE(op_sret);
815
 
        }
816
 
        case op_eq: {
817
 
            emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
818
 
            emitJumpSlowCaseIfNotImmNums(X86::eax, X86::edx, X86::ecx);
819
 
            sete32(X86::edx, X86::eax);
820
 
            emitTagAsBoolImmediate(X86::eax);
821
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
822
 
            NEXT_OPCODE(op_eq);
823
 
        }
824
 
        case op_lshift: {
825
 
            compileFastArith_op_lshift(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand);
826
 
            NEXT_OPCODE(op_lshift);
827
 
        }
828
 
        case op_bitand: {
829
 
            compileFastArith_op_bitand(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand);
830
 
            NEXT_OPCODE(op_bitand);
831
 
        }
832
 
        case op_rshift: {
833
 
            compileFastArith_op_rshift(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand);
834
 
            NEXT_OPCODE(op_rshift);
835
 
        }
836
 
        case op_bitnot: {
837
 
            emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
838
 
            emitJumpSlowCaseIfNotImmNum(X86::eax);
839
 
#if USE(ALTERNATE_JSIMMEDIATE)
840
 
            not32(X86::eax);
841
 
            emitFastArithIntToImmNoCheck(X86::eax, X86::eax);
842
 
#else
843
 
            xorPtr(Imm32(~JSImmediate::TagTypeInteger), X86::eax);
844
 
#endif
845
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
846
 
            NEXT_OPCODE(op_bitnot);
847
 
        }
848
 
        case op_resolve_with_base: {
849
 
            Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
850
 
            emitPutJITStubArgConstant(ident, 1);
851
 
            emitCTICall(Interpreter::cti_op_resolve_with_base);
852
 
            emitPutVirtualRegister(currentInstruction[2].u.operand, X86::edx);
853
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
854
 
            NEXT_OPCODE(op_resolve_with_base);
855
 
        }
856
 
        case op_new_func_exp: {
857
 
            FuncExprNode* func = m_codeBlock->functionExpression(currentInstruction[2].u.operand);
858
 
            emitPutJITStubArgConstant(func, 1);
859
 
            emitCTICall(Interpreter::cti_op_new_func_exp);
860
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
861
 
            NEXT_OPCODE(op_new_func_exp);
862
 
        }
863
 
        case op_mod: {
864
 
            compileFastArith_op_mod(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand);
865
 
            NEXT_OPCODE(op_mod);
866
 
        }
867
 
        case op_jtrue: {
868
 
            unsigned target = currentInstruction[2].u.operand;
869
 
            emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
870
 
 
871
 
            Jump isZero = jePtr(X86::eax, ImmPtr(JSValuePtr::encode(JSImmediate::zeroImmediate())));
872
 
            addJump(emitJumpIfImmNum(X86::eax), target + 2);
873
 
 
874
 
            addJump(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(true)))), target + 2);
875
 
            addSlowCase(jnePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(false)))));
876
 
 
877
 
            isZero.link(this);
878
 
            NEXT_OPCODE(op_jtrue);
879
 
        }
880
 
        CTI_COMPILE_BINARY_OP(op_less)
881
 
        case op_neq: {
882
 
            emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
883
 
            emitJumpSlowCaseIfNotImmNums(X86::eax, X86::edx, X86::ecx);
884
 
            setne32(X86::edx, X86::eax);
885
 
            emitTagAsBoolImmediate(X86::eax);
886
 
 
887
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
888
 
 
889
 
            NEXT_OPCODE(op_neq);
890
 
        }
891
 
        case op_post_dec: {
892
 
            compileFastArith_op_post_dec(currentInstruction[1].u.operand, currentInstruction[2].u.operand);
893
 
            NEXT_OPCODE(op_post_dec);
894
 
        }
895
 
        CTI_COMPILE_BINARY_OP(op_urshift)
896
 
        case op_bitxor: {
897
 
            emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
898
 
            emitJumpSlowCaseIfNotImmNums(X86::eax, X86::edx, X86::ecx);
899
 
            xorPtr(X86::edx, X86::eax);
900
 
            emitFastArithReTagImmediate(X86::eax, X86::eax);
901
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
902
 
            NEXT_OPCODE(op_bitxor);
903
 
        }
904
 
        case op_new_regexp: {
905
 
            RegExp* regExp = m_codeBlock->regexp(currentInstruction[2].u.operand);
906
 
            emitPutJITStubArgConstant(regExp, 1);
907
 
            emitCTICall(Interpreter::cti_op_new_regexp);
908
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
909
 
            NEXT_OPCODE(op_new_regexp);
910
 
        }
911
 
        case op_bitor: {
912
 
            emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
913
 
            emitJumpSlowCaseIfNotImmNums(X86::eax, X86::edx, X86::ecx);
914
 
            orPtr(X86::edx, X86::eax);
915
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
916
 
            NEXT_OPCODE(op_bitor);
917
 
        }
918
 
        case op_throw: {
919
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
920
 
            emitCTICall(Interpreter::cti_op_throw);
921
 
#if PLATFORM(X86_64)
922
 
            addPtr(Imm32(0x38), X86::esp);
923
 
            pop(X86::ebx);
924
 
            pop(X86::r13);
925
 
            pop(X86::r12);
926
 
            pop(X86::ebp);
927
 
            ret();
928
 
#else
929
 
            addPtr(Imm32(0x1c), X86::esp);
930
 
            pop(X86::ebx);
931
 
            pop(X86::edi);
932
 
            pop(X86::esi);
933
 
            pop(X86::ebp);
934
 
            ret();
935
 
#endif
936
 
            NEXT_OPCODE(op_throw);
937
 
        }
938
 
        case op_get_pnames: {
939
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
940
 
            emitCTICall(Interpreter::cti_op_get_pnames);
941
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
942
 
            NEXT_OPCODE(op_get_pnames);
943
 
        }
944
 
        case op_next_pname: {
945
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
946
 
            unsigned target = currentInstruction[3].u.operand;
947
 
            emitCTICall(Interpreter::cti_op_next_pname);
948
 
            Jump endOfIter = jzPtr(X86::eax);
949
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
950
 
            addJump(jump(), target + 3);
951
 
            endOfIter.link(this);
952
 
            NEXT_OPCODE(op_next_pname);
953
 
        }
954
 
        case op_push_scope: {
955
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
956
 
            emitCTICall(Interpreter::cti_op_push_scope);
957
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
958
 
            NEXT_OPCODE(op_push_scope);
959
 
        }
960
 
        case op_pop_scope: {
961
 
            emitCTICall(Interpreter::cti_op_pop_scope);
962
 
            NEXT_OPCODE(op_pop_scope);
963
 
        }
964
 
        CTI_COMPILE_UNARY_OP(op_typeof)
965
 
        CTI_COMPILE_UNARY_OP(op_is_undefined)
966
 
        CTI_COMPILE_UNARY_OP(op_is_boolean)
967
 
        CTI_COMPILE_UNARY_OP(op_is_number)
968
 
        CTI_COMPILE_UNARY_OP(op_is_string)
969
 
        CTI_COMPILE_UNARY_OP(op_is_object)
970
 
        CTI_COMPILE_UNARY_OP(op_is_function)
971
 
        case op_stricteq: {
972
 
            compileOpStrictEq(currentInstruction, OpStrictEq);
973
 
            NEXT_OPCODE(op_stricteq);
974
 
        }
975
 
        case op_nstricteq: {
976
 
            compileOpStrictEq(currentInstruction, OpNStrictEq);
977
 
            NEXT_OPCODE(op_nstricteq);
978
 
        }
979
 
        case op_to_jsnumber: {
980
 
            int srcVReg = currentInstruction[2].u.operand;
981
 
            emitGetVirtualRegister(srcVReg, X86::eax);
982
 
            
983
 
            Jump wasImmediate = emitJumpIfImmNum(X86::eax);
984
 
 
985
 
            emitJumpSlowCaseIfNotJSCell(X86::eax, srcVReg);
986
 
            loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
987
 
            addSlowCase(jne32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_type)), Imm32(NumberType)));
988
 
            
989
 
            wasImmediate.link(this);
990
 
 
991
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
992
 
            NEXT_OPCODE(op_to_jsnumber);
993
 
        }
994
 
        CTI_COMPILE_BINARY_OP(op_in)
995
 
        case op_push_new_scope: {
996
 
            Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
997
 
            emitPutJITStubArgConstant(ident, 1);
998
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx);
999
 
            emitCTICall(Interpreter::cti_op_push_new_scope);
1000
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1001
 
            NEXT_OPCODE(op_push_new_scope);
1002
 
        }
1003
 
        case op_catch: {
1004
 
            emitGetCTIParam(STUB_ARGS_callFrame, callFrameRegister);
1005
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1006
 
            NEXT_OPCODE(op_catch);
1007
 
        }
1008
 
        case op_jmp_scopes: {
1009
 
            unsigned count = currentInstruction[1].u.operand;
1010
 
            emitPutJITStubArgConstant(count, 1);
1011
 
            emitCTICall(Interpreter::cti_op_jmp_scopes);
1012
 
            unsigned target = currentInstruction[2].u.operand;
1013
 
            addJump(jump(), target + 2);
1014
 
            NEXT_OPCODE(op_jmp_scopes);
1015
 
        }
1016
 
        case op_put_by_index: {
1017
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
1018
 
            emitPutJITStubArgConstant(currentInstruction[2].u.operand, 2);
1019
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 3, X86::ecx);
1020
 
            emitCTICall(Interpreter::cti_op_put_by_index);
1021
 
            NEXT_OPCODE(op_put_by_index);
1022
 
        }
1023
 
        case op_switch_imm: {
1024
 
            unsigned tableIndex = currentInstruction[1].u.operand;
1025
 
            unsigned defaultOffset = currentInstruction[2].u.operand;
1026
 
            unsigned scrutinee = currentInstruction[3].u.operand;
1027
 
 
1028
 
            // create jump table for switch destinations, track this switch statement.
1029
 
            SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1030
 
            m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
1031
 
            jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1032
 
 
1033
 
            emitPutJITStubArgFromVirtualRegister(scrutinee, 1, X86::ecx);
1034
 
            emitPutJITStubArgConstant(tableIndex, 2);
1035
 
            emitCTICall(Interpreter::cti_op_switch_imm);
1036
 
            jump(X86::eax);
1037
 
            NEXT_OPCODE(op_switch_imm);
1038
 
        }
1039
 
        case op_switch_char: {
1040
 
            unsigned tableIndex = currentInstruction[1].u.operand;
1041
 
            unsigned defaultOffset = currentInstruction[2].u.operand;
1042
 
            unsigned scrutinee = currentInstruction[3].u.operand;
1043
 
 
1044
 
            // create jump table for switch destinations, track this switch statement.
1045
 
            SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1046
 
            m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
1047
 
            jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1048
 
 
1049
 
            emitPutJITStubArgFromVirtualRegister(scrutinee, 1, X86::ecx);
1050
 
            emitPutJITStubArgConstant(tableIndex, 2);
1051
 
            emitCTICall(Interpreter::cti_op_switch_char);
1052
 
            jump(X86::eax);
1053
 
            NEXT_OPCODE(op_switch_char);
1054
 
        }
1055
 
        case op_switch_string: {
1056
 
            unsigned tableIndex = currentInstruction[1].u.operand;
1057
 
            unsigned defaultOffset = currentInstruction[2].u.operand;
1058
 
            unsigned scrutinee = currentInstruction[3].u.operand;
1059
 
 
1060
 
            // create jump table for switch destinations, track this switch statement.
1061
 
            StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1062
 
            m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
1063
 
 
1064
 
            emitPutJITStubArgFromVirtualRegister(scrutinee, 1, X86::ecx);
1065
 
            emitPutJITStubArgConstant(tableIndex, 2);
1066
 
            emitCTICall(Interpreter::cti_op_switch_string);
1067
 
            jump(X86::eax);
1068
 
            NEXT_OPCODE(op_switch_string);
1069
 
        }
1070
 
        case op_del_by_val: {
1071
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
1072
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx);
1073
 
            emitCTICall(Interpreter::cti_op_del_by_val);
1074
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1075
 
            NEXT_OPCODE(op_del_by_val);
1076
 
        }
1077
 
        case op_put_getter: {
1078
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
1079
 
            Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
1080
 
            emitPutJITStubArgConstant(ident, 2);
1081
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 3, X86::ecx);
1082
 
            emitCTICall(Interpreter::cti_op_put_getter);
1083
 
            NEXT_OPCODE(op_put_getter);
1084
 
        }
1085
 
        case op_put_setter: {
1086
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
1087
 
            Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
1088
 
            emitPutJITStubArgConstant(ident, 2);
1089
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 3, X86::ecx);
1090
 
            emitCTICall(Interpreter::cti_op_put_setter);
1091
 
            NEXT_OPCODE(op_put_setter);
1092
 
        }
1093
 
        case op_new_error: {
1094
 
            JSValuePtr message = m_codeBlock->unexpectedConstant(currentInstruction[3].u.operand);
1095
 
            emitPutJITStubArgConstant(currentInstruction[2].u.operand, 1);
1096
 
            emitPutJITStubArgConstant(JSValuePtr::encode(message), 2);
1097
 
            emitPutJITStubArgConstant(m_bytecodeIndex, 3);
1098
 
            emitCTICall(Interpreter::cti_op_new_error);
1099
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1100
 
            NEXT_OPCODE(op_new_error);
1101
 
        }
1102
 
        case op_debug: {
1103
 
            emitPutJITStubArgConstant(currentInstruction[1].u.operand, 1);
1104
 
            emitPutJITStubArgConstant(currentInstruction[2].u.operand, 2);
1105
 
            emitPutJITStubArgConstant(currentInstruction[3].u.operand, 3);
1106
 
            emitCTICall(Interpreter::cti_op_debug);
1107
 
            NEXT_OPCODE(op_debug);
1108
 
        }
1109
 
        case op_eq_null: {
1110
 
            unsigned dst = currentInstruction[1].u.operand;
1111
 
            unsigned src1 = currentInstruction[2].u.operand;
1112
 
 
1113
 
            emitGetVirtualRegister(src1, X86::eax);
1114
 
            Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
1115
 
 
1116
 
            loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
1117
 
            setnz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), X86::eax);
1118
 
 
1119
 
            Jump wasNotImmediate = jump();
1120
 
 
1121
 
            isImmediate.link(this);
1122
 
 
1123
 
            and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
1124
 
            sete32(Imm32(JSImmediate::FullTagTypeNull), X86::eax);
1125
 
 
1126
 
            wasNotImmediate.link(this);
1127
 
 
1128
 
            emitTagAsBoolImmediate(X86::eax);
1129
 
            emitPutVirtualRegister(dst);
1130
 
 
1131
 
            NEXT_OPCODE(op_eq_null);
1132
 
        }
1133
 
        case op_neq_null: {
1134
 
            unsigned dst = currentInstruction[1].u.operand;
1135
 
            unsigned src1 = currentInstruction[2].u.operand;
1136
 
 
1137
 
            emitGetVirtualRegister(src1, X86::eax);
1138
 
            Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
1139
 
 
1140
 
            loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
1141
 
            setz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), X86::eax);
1142
 
 
1143
 
            Jump wasNotImmediate = jump();
1144
 
 
1145
 
            isImmediate.link(this);
1146
 
 
1147
 
            and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
1148
 
            setne32(Imm32(JSImmediate::FullTagTypeNull), X86::eax);
1149
 
 
1150
 
            wasNotImmediate.link(this);
1151
 
 
1152
 
            emitTagAsBoolImmediate(X86::eax);
1153
 
            emitPutVirtualRegister(dst);
1154
 
 
1155
 
            NEXT_OPCODE(op_neq_null);
1156
 
        }
1157
 
        case op_enter: {
1158
 
            // Even though CTI doesn't use them, we initialize our constant
1159
 
            // registers to zap stale pointers, to avoid unnecessarily prolonging
1160
 
            // object lifetime and increasing GC pressure.
1161
 
            size_t count = m_codeBlock->m_numVars + m_codeBlock->numberOfConstantRegisters();
1162
 
            for (size_t j = 0; j < count; ++j)
1163
 
                emitInitRegister(j);
1164
 
 
1165
 
            NEXT_OPCODE(op_enter);
1166
 
        }
1167
 
        case op_enter_with_activation: {
1168
 
            // Even though CTI doesn't use them, we initialize our constant
1169
 
            // registers to zap stale pointers, to avoid unnecessarily prolonging
1170
 
            // object lifetime and increasing GC pressure.
1171
 
            size_t count = m_codeBlock->m_numVars + m_codeBlock->numberOfConstantRegisters();
1172
 
            for (size_t j = 0; j < count; ++j)
1173
 
                emitInitRegister(j);
1174
 
 
1175
 
            emitCTICall(Interpreter::cti_op_push_activation);
1176
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1177
 
 
1178
 
            NEXT_OPCODE(op_enter_with_activation);
1179
 
        }
1180
 
        case op_create_arguments: {
1181
 
            if (m_codeBlock->m_numParameters == 1)
1182
 
                emitCTICall(Interpreter::cti_op_create_arguments_no_params);
1183
 
            else
1184
 
                emitCTICall(Interpreter::cti_op_create_arguments);
1185
 
            NEXT_OPCODE(op_create_arguments);
1186
 
        }
1187
 
        case op_convert_this: {
1188
 
            emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
1189
 
 
1190
 
            emitJumpSlowCaseIfNotJSCell(X86::eax);
1191
 
            loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::edx);
1192
 
            addSlowCase(jnz32(Address(X86::edx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
1193
 
 
1194
 
            NEXT_OPCODE(op_convert_this);
1195
 
        }
1196
 
        case op_profile_will_call: {
1197
 
            emitGetCTIParam(STUB_ARGS_profilerReference, X86::eax);
1198
 
            Jump noProfiler = jzPtr(Address(X86::eax));
1199
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::eax);
1200
 
            emitCTICall(Interpreter::cti_op_profile_will_call);
1201
 
            noProfiler.link(this);
1202
 
 
1203
 
            NEXT_OPCODE(op_profile_will_call);
1204
 
        }
1205
 
        case op_profile_did_call: {
1206
 
            emitGetCTIParam(STUB_ARGS_profilerReference, X86::eax);
1207
 
            Jump noProfiler = jzPtr(Address(X86::eax));
1208
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::eax);
1209
 
            emitCTICall(Interpreter::cti_op_profile_did_call);
1210
 
            noProfiler.link(this);
1211
 
 
1212
 
            NEXT_OPCODE(op_profile_did_call);
1213
 
        }
 
195
 
 
196
        switch (m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
 
197
        DEFINE_BINARY_OP(op_del_by_val)
 
198
#if USE(JSVALUE32)
 
199
        DEFINE_BINARY_OP(op_div)
 
200
#endif
 
201
        DEFINE_BINARY_OP(op_in)
 
202
        DEFINE_BINARY_OP(op_less)
 
203
        DEFINE_BINARY_OP(op_lesseq)
 
204
        DEFINE_BINARY_OP(op_urshift)
 
205
        DEFINE_UNARY_OP(op_get_pnames)
 
206
        DEFINE_UNARY_OP(op_is_boolean)
 
207
        DEFINE_UNARY_OP(op_is_function)
 
208
        DEFINE_UNARY_OP(op_is_number)
 
209
        DEFINE_UNARY_OP(op_is_object)
 
210
        DEFINE_UNARY_OP(op_is_string)
 
211
        DEFINE_UNARY_OP(op_is_undefined)
 
212
#if !USE(JSVALUE32_64)
 
213
        DEFINE_UNARY_OP(op_negate)
 
214
#endif
 
215
        DEFINE_UNARY_OP(op_typeof)
 
216
 
 
217
        DEFINE_OP(op_add)
 
218
        DEFINE_OP(op_bitand)
 
219
        DEFINE_OP(op_bitnot)
 
220
        DEFINE_OP(op_bitor)
 
221
        DEFINE_OP(op_bitxor)
 
222
        DEFINE_OP(op_call)
 
223
        DEFINE_OP(op_call_eval)
 
224
        DEFINE_OP(op_call_varargs)
 
225
        DEFINE_OP(op_catch)
 
226
        DEFINE_OP(op_construct)
 
227
        DEFINE_OP(op_construct_verify)
 
228
        DEFINE_OP(op_convert_this)
 
229
        DEFINE_OP(op_init_arguments)
 
230
        DEFINE_OP(op_create_arguments)
 
231
        DEFINE_OP(op_debug)
 
232
        DEFINE_OP(op_del_by_id)
 
233
#if !USE(JSVALUE32)
 
234
        DEFINE_OP(op_div)
 
235
#endif
 
236
        DEFINE_OP(op_end)
 
237
        DEFINE_OP(op_enter)
 
238
        DEFINE_OP(op_enter_with_activation)
 
239
        DEFINE_OP(op_eq)
 
240
        DEFINE_OP(op_eq_null)
 
241
        DEFINE_OP(op_get_by_id)
 
242
        DEFINE_OP(op_get_by_val)
 
243
        DEFINE_OP(op_get_global_var)
 
244
        DEFINE_OP(op_get_scoped_var)
 
245
        DEFINE_OP(op_instanceof)
 
246
        DEFINE_OP(op_jeq_null)
 
247
        DEFINE_OP(op_jfalse)
 
248
        DEFINE_OP(op_jmp)
 
249
        DEFINE_OP(op_jmp_scopes)
 
250
        DEFINE_OP(op_jneq_null)
 
251
        DEFINE_OP(op_jneq_ptr)
 
252
        DEFINE_OP(op_jnless)
 
253
        DEFINE_OP(op_jnlesseq)
 
254
        DEFINE_OP(op_jsr)
 
255
        DEFINE_OP(op_jtrue)
 
256
        DEFINE_OP(op_load_varargs)
 
257
        DEFINE_OP(op_loop)
 
258
        DEFINE_OP(op_loop_if_less)
 
259
        DEFINE_OP(op_loop_if_lesseq)
 
260
        DEFINE_OP(op_loop_if_true)
 
261
        DEFINE_OP(op_lshift)
 
262
        DEFINE_OP(op_method_check)
 
263
        DEFINE_OP(op_mod)
 
264
        DEFINE_OP(op_mov)
 
265
        DEFINE_OP(op_mul)
 
266
#if USE(JSVALUE32_64)
 
267
        DEFINE_OP(op_negate)
 
268
#endif
 
269
        DEFINE_OP(op_neq)
 
270
        DEFINE_OP(op_neq_null)
 
271
        DEFINE_OP(op_new_array)
 
272
        DEFINE_OP(op_new_error)
 
273
        DEFINE_OP(op_new_func)
 
274
        DEFINE_OP(op_new_func_exp)
 
275
        DEFINE_OP(op_new_object)
 
276
        DEFINE_OP(op_new_regexp)
 
277
        DEFINE_OP(op_next_pname)
 
278
        DEFINE_OP(op_not)
 
279
        DEFINE_OP(op_nstricteq)
 
280
        DEFINE_OP(op_pop_scope)
 
281
        DEFINE_OP(op_post_dec)
 
282
        DEFINE_OP(op_post_inc)
 
283
        DEFINE_OP(op_pre_dec)
 
284
        DEFINE_OP(op_pre_inc)
 
285
        DEFINE_OP(op_profile_did_call)
 
286
        DEFINE_OP(op_profile_will_call)
 
287
        DEFINE_OP(op_push_new_scope)
 
288
        DEFINE_OP(op_push_scope)
 
289
        DEFINE_OP(op_put_by_id)
 
290
        DEFINE_OP(op_put_by_index)
 
291
        DEFINE_OP(op_put_by_val)
 
292
        DEFINE_OP(op_put_getter)
 
293
        DEFINE_OP(op_put_global_var)
 
294
        DEFINE_OP(op_put_scoped_var)
 
295
        DEFINE_OP(op_put_setter)
 
296
        DEFINE_OP(op_resolve)
 
297
        DEFINE_OP(op_resolve_base)
 
298
        DEFINE_OP(op_resolve_global)
 
299
        DEFINE_OP(op_resolve_skip)
 
300
        DEFINE_OP(op_resolve_with_base)
 
301
        DEFINE_OP(op_ret)
 
302
        DEFINE_OP(op_rshift)
 
303
        DEFINE_OP(op_sret)
 
304
        DEFINE_OP(op_strcat)
 
305
        DEFINE_OP(op_stricteq)
 
306
        DEFINE_OP(op_sub)
 
307
        DEFINE_OP(op_switch_char)
 
308
        DEFINE_OP(op_switch_imm)
 
309
        DEFINE_OP(op_switch_string)
 
310
        DEFINE_OP(op_tear_off_activation)
 
311
        DEFINE_OP(op_tear_off_arguments)
 
312
        DEFINE_OP(op_throw)
 
313
        DEFINE_OP(op_to_jsnumber)
 
314
        DEFINE_OP(op_to_primitive)
 
315
 
1214
316
        case op_get_array_length:
1215
317
        case op_get_by_id_chain:
1216
318
        case op_get_by_id_generic:
1226
328
        }
1227
329
    }
1228
330
 
1229
 
    ASSERT(propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos());
1230
 
    ASSERT(callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos());
 
331
    ASSERT(m_propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos());
 
332
    ASSERT(m_callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos());
1231
333
 
1232
334
#ifndef NDEBUG
1233
 
    // reset this, in order to guard it's use with asserts
 
335
    // Reset this, in order to guard its use with ASSERTs.
1234
336
    m_bytecodeIndex = (unsigned)-1;
1235
337
#endif
1236
338
}
1247
349
void JIT::privateCompileSlowCases()
1248
350
{
1249
351
    Instruction* instructionsBegin = m_codeBlock->instructions().begin();
1250
 
    unsigned propertyAccessInstructionIndex = 0;
1251
 
    unsigned callLinkInfoIndex = 0;
 
352
 
 
353
    m_propertyAccessInstructionIndex = 0;
 
354
#if USE(JSVALUE32_64)
 
355
    m_globalResolveInfoIndex = 0;
 
356
#endif
 
357
    m_callLinkInfoIndex = 0;
1252
358
 
1253
359
    for (Vector<SlowCaseEntry>::iterator iter = m_slowCases.begin(); iter != m_slowCases.end();) {
1254
 
        // FIXME: enable peephole optimizations for slow cases when applicable
 
360
#if !USE(JSVALUE32_64)
1255
361
        killLastResultRegister();
 
362
#endif
1256
363
 
1257
364
        m_bytecodeIndex = iter->to;
1258
365
#ifndef NDEBUG
1260
367
#endif
1261
368
        Instruction* currentInstruction = instructionsBegin + m_bytecodeIndex;
1262
369
 
1263
 
        switch (OpcodeID opcodeID = m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
1264
 
        case op_convert_this: {
1265
 
            linkSlowCase(iter);
1266
 
            linkSlowCase(iter);
1267
 
            emitPutJITStubArg(X86::eax, 1);
1268
 
            emitCTICall(Interpreter::cti_op_convert_this);
1269
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1270
 
            NEXT_OPCODE(op_convert_this);
1271
 
        }
1272
 
        case op_add: {
1273
 
            compileFastArithSlow_op_add(currentInstruction, iter);
1274
 
            NEXT_OPCODE(op_add);
1275
 
        }
1276
 
        case op_construct_verify: {
1277
 
            linkSlowCase(iter);
1278
 
            linkSlowCase(iter);
1279
 
            emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
1280
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1281
 
 
1282
 
            NEXT_OPCODE(op_construct_verify);
1283
 
        }
1284
 
        case op_get_by_val: {
1285
 
            // The slow case that handles accesses to arrays (below) may jump back up to here. 
1286
 
            Label beginGetByValSlow(this);
1287
 
 
1288
 
            Jump notImm = getSlowCase(iter);
1289
 
            linkSlowCase(iter);
1290
 
            linkSlowCase(iter);
1291
 
            emitFastArithIntToImmNoCheck(X86::edx, X86::edx);
1292
 
            notImm.link(this);
1293
 
            emitPutJITStubArg(X86::eax, 1);
1294
 
            emitPutJITStubArg(X86::edx, 2);
1295
 
            emitCTICall(Interpreter::cti_op_get_by_val);
1296
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1297
 
            emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
1298
 
 
1299
 
            // This is slow case that handles accesses to arrays above the fast cut-off.
1300
 
            // First, check if this is an access to the vector
1301
 
            linkSlowCase(iter);
1302
 
            jae32(X86::edx, Address(X86::ecx, FIELD_OFFSET(ArrayStorage, m_vectorLength)), beginGetByValSlow);
1303
 
 
1304
 
            // okay, missed the fast region, but it is still in the vector.  Get the value.
1305
 
            loadPtr(BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0])), X86::ecx);
1306
 
            // Check whether the value loaded is zero; if so we need to return undefined.
1307
 
            jzPtr(X86::ecx, beginGetByValSlow);
1308
 
            move(X86::ecx, X86::eax);
1309
 
            emitPutVirtualRegister(currentInstruction[1].u.operand, X86::eax);
1310
 
 
1311
 
            NEXT_OPCODE(op_get_by_val);
1312
 
        }
1313
 
        case op_sub: {
1314
 
            compileBinaryArithOpSlowCase(op_sub, iter, currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, OperandTypes::fromInt(currentInstruction[4].u.operand));
1315
 
            NEXT_OPCODE(op_sub);
1316
 
        }
1317
 
        case op_rshift: {
1318
 
            compileFastArithSlow_op_rshift(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, iter);
1319
 
            NEXT_OPCODE(op_rshift);
1320
 
        }
1321
 
        case op_lshift: {
1322
 
            compileFastArithSlow_op_lshift(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, iter);
1323
 
            NEXT_OPCODE(op_lshift);
1324
 
        }
1325
 
        case op_loop_if_less: {
1326
 
            unsigned op2 = currentInstruction[2].u.operand;
1327
 
            unsigned target = currentInstruction[3].u.operand;
1328
 
            if (isOperandConstantImmediateInt(op2)) {
1329
 
                linkSlowCase(iter);
1330
 
                emitPutJITStubArg(X86::eax, 1);
1331
 
                emitPutJITStubArgFromVirtualRegister(op2, 2, X86::ecx);
1332
 
                emitCTICall(Interpreter::cti_op_loop_if_less);
1333
 
                emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1334
 
            } else {
1335
 
                linkSlowCase(iter);
1336
 
                linkSlowCase(iter);
1337
 
                emitPutJITStubArg(X86::eax, 1);
1338
 
                emitPutJITStubArg(X86::edx, 2);
1339
 
                emitCTICall(Interpreter::cti_op_loop_if_less);
1340
 
                emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1341
 
            }
1342
 
            NEXT_OPCODE(op_loop_if_less);
1343
 
        }
1344
 
        case op_put_by_id: {
1345
 
            compilePutByIdSlowCase(currentInstruction[1].u.operand, &(m_codeBlock->identifier(currentInstruction[2].u.operand)), currentInstruction[3].u.operand, iter, propertyAccessInstructionIndex++);
1346
 
            NEXT_OPCODE(op_put_by_id);
1347
 
        }
1348
 
        case op_get_by_id: {
1349
 
            compileGetByIdSlowCase(currentInstruction[1].u.operand, currentInstruction[2].u.operand, &(m_codeBlock->identifier(currentInstruction[3].u.operand)), iter, propertyAccessInstructionIndex++);
1350
 
            NEXT_OPCODE(op_get_by_id);
1351
 
        }
1352
 
        case op_loop_if_lesseq: {
1353
 
            unsigned op2 = currentInstruction[2].u.operand;
1354
 
            unsigned target = currentInstruction[3].u.operand;
1355
 
            if (isOperandConstantImmediateInt(op2)) {
1356
 
                linkSlowCase(iter);
1357
 
                emitPutJITStubArg(X86::eax, 1);
1358
 
                emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 2, X86::ecx);
1359
 
                emitCTICall(Interpreter::cti_op_loop_if_lesseq);
1360
 
                emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1361
 
            } else {
1362
 
                linkSlowCase(iter);
1363
 
                linkSlowCase(iter);
1364
 
                emitPutJITStubArg(X86::eax, 1);
1365
 
                emitPutJITStubArg(X86::edx, 2);
1366
 
                emitCTICall(Interpreter::cti_op_loop_if_lesseq);
1367
 
                emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1368
 
            }
1369
 
            NEXT_OPCODE(op_loop_if_lesseq);
1370
 
        }
1371
 
        case op_pre_inc: {
1372
 
            compileFastArithSlow_op_pre_inc(currentInstruction[1].u.operand, iter);
1373
 
            NEXT_OPCODE(op_pre_inc);
1374
 
        }
1375
 
        case op_put_by_val: {
1376
 
            // Normal slow cases - either is not an immediate imm, or is an array.
1377
 
            Jump notImm = getSlowCase(iter);
1378
 
            linkSlowCase(iter);
1379
 
            linkSlowCase(iter);
1380
 
            emitFastArithIntToImmNoCheck(X86::edx, X86::edx);
1381
 
            notImm.link(this);
1382
 
            emitGetVirtualRegister(currentInstruction[3].u.operand, X86::ecx);
1383
 
            emitPutJITStubArg(X86::eax, 1);
1384
 
            emitPutJITStubArg(X86::edx, 2);
1385
 
            emitPutJITStubArg(X86::ecx, 3);
1386
 
            emitCTICall(Interpreter::cti_op_put_by_val);
1387
 
            emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_put_by_val));
1388
 
 
1389
 
            // slow cases for immediate int accesses to arrays
1390
 
            linkSlowCase(iter);
1391
 
            linkSlowCase(iter);
1392
 
            emitGetVirtualRegister(currentInstruction[3].u.operand, X86::ecx);
1393
 
            emitPutJITStubArg(X86::eax, 1);
1394
 
            emitPutJITStubArg(X86::edx, 2);
1395
 
            emitPutJITStubArg(X86::ecx, 3);
1396
 
            emitCTICall(Interpreter::cti_op_put_by_val_array);
1397
 
 
1398
 
            NEXT_OPCODE(op_put_by_val);
1399
 
        }
1400
 
        case op_loop_if_true: {
1401
 
            linkSlowCase(iter);
1402
 
            emitPutJITStubArg(X86::eax, 1);
1403
 
            emitCTICall(Interpreter::cti_op_jtrue);
1404
 
            unsigned target = currentInstruction[2].u.operand;
1405
 
            emitJumpSlowToHot(jnz32(X86::eax), target + 2);
1406
 
            NEXT_OPCODE(op_loop_if_true);
1407
 
        }
1408
 
        case op_pre_dec: {
1409
 
            compileFastArithSlow_op_pre_dec(currentInstruction[1].u.operand, iter);
1410
 
            NEXT_OPCODE(op_pre_dec);
1411
 
        }
1412
 
        case op_jnless: {
1413
 
            unsigned op2 = currentInstruction[2].u.operand;
1414
 
            unsigned target = currentInstruction[3].u.operand;
1415
 
            if (isOperandConstantImmediateInt(op2)) {
1416
 
                linkSlowCase(iter);
1417
 
                emitPutJITStubArg(X86::eax, 1);
1418
 
                emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 2, X86::ecx);
1419
 
                emitCTICall(Interpreter::cti_op_jless);
1420
 
                emitJumpSlowToHot(jz32(X86::eax), target + 3);
1421
 
            } else {
1422
 
                linkSlowCase(iter);
1423
 
                linkSlowCase(iter);
1424
 
                emitPutJITStubArg(X86::eax, 1);
1425
 
                emitPutJITStubArg(X86::edx, 2);
1426
 
                emitCTICall(Interpreter::cti_op_jless);
1427
 
                emitJumpSlowToHot(jz32(X86::eax), target + 3);
1428
 
            }
1429
 
            NEXT_OPCODE(op_jnless);
1430
 
        }
1431
 
        case op_not: {
1432
 
            linkSlowCase(iter);
1433
 
            xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), X86::eax);
1434
 
            emitPutJITStubArg(X86::eax, 1);
1435
 
            emitCTICall(Interpreter::cti_op_not);
1436
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1437
 
            NEXT_OPCODE(op_not);
1438
 
        }
1439
 
        case op_jfalse: {
1440
 
            linkSlowCase(iter);
1441
 
            emitPutJITStubArg(X86::eax, 1);
1442
 
            emitCTICall(Interpreter::cti_op_jtrue);
1443
 
            unsigned target = currentInstruction[2].u.operand;
1444
 
            emitJumpSlowToHot(jz32(X86::eax), target + 2); // inverted!
1445
 
            NEXT_OPCODE(op_jfalse);
1446
 
        }
1447
 
        case op_post_inc: {
1448
 
            compileFastArithSlow_op_post_inc(currentInstruction[1].u.operand, currentInstruction[2].u.operand, iter);
1449
 
            NEXT_OPCODE(op_post_inc);
1450
 
        }
1451
 
        case op_bitnot: {
1452
 
            linkSlowCase(iter);
1453
 
            emitPutJITStubArg(X86::eax, 1);
1454
 
            emitCTICall(Interpreter::cti_op_bitnot);
1455
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1456
 
            NEXT_OPCODE(op_bitnot);
1457
 
        }
1458
 
        case op_bitand: {
1459
 
            compileFastArithSlow_op_bitand(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, iter);
1460
 
            NEXT_OPCODE(op_bitand);
1461
 
        }
1462
 
        case op_jtrue: {
1463
 
            linkSlowCase(iter);
1464
 
            emitPutJITStubArg(X86::eax, 1);
1465
 
            emitCTICall(Interpreter::cti_op_jtrue);
1466
 
            unsigned target = currentInstruction[2].u.operand;
1467
 
            emitJumpSlowToHot(jnz32(X86::eax), target + 2);
1468
 
            NEXT_OPCODE(op_jtrue);
1469
 
        }
1470
 
        case op_post_dec: {
1471
 
            compileFastArithSlow_op_post_dec(currentInstruction[1].u.operand, currentInstruction[2].u.operand, iter);
1472
 
            NEXT_OPCODE(op_post_dec);
1473
 
        }
1474
 
        case op_bitxor: {
1475
 
            linkSlowCase(iter);
1476
 
            emitPutJITStubArg(X86::eax, 1);
1477
 
            emitPutJITStubArg(X86::edx, 2);
1478
 
            emitCTICall(Interpreter::cti_op_bitxor);
1479
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1480
 
            NEXT_OPCODE(op_bitxor);
1481
 
        }
1482
 
        case op_bitor: {
1483
 
            linkSlowCase(iter);
1484
 
            emitPutJITStubArg(X86::eax, 1);
1485
 
            emitPutJITStubArg(X86::edx, 2);
1486
 
            emitCTICall(Interpreter::cti_op_bitor);
1487
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1488
 
            NEXT_OPCODE(op_bitor);
1489
 
        }
1490
 
        case op_eq: {
1491
 
            linkSlowCase(iter);
1492
 
            emitPutJITStubArg(X86::eax, 1);
1493
 
            emitPutJITStubArg(X86::edx, 2);
1494
 
            emitCTICall(Interpreter::cti_op_eq);
1495
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1496
 
            NEXT_OPCODE(op_eq);
1497
 
        }
1498
 
        case op_neq: {
1499
 
            linkSlowCase(iter);
1500
 
            emitPutJITStubArg(X86::eax, 1);
1501
 
            emitPutJITStubArg(X86::edx, 2);
1502
 
            emitCTICall(Interpreter::cti_op_neq);
1503
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1504
 
            NEXT_OPCODE(op_neq);
1505
 
        }
1506
 
        case op_stricteq: {
1507
 
            linkSlowCase(iter);
1508
 
            linkSlowCase(iter);
1509
 
            linkSlowCase(iter);
1510
 
            emitPutJITStubArg(X86::eax, 1);
1511
 
            emitPutJITStubArg(X86::edx, 2);
1512
 
            emitCTICall(Interpreter::cti_op_stricteq);
1513
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1514
 
            NEXT_OPCODE(op_stricteq);
1515
 
        }
1516
 
        case op_nstricteq: {
1517
 
            linkSlowCase(iter);
1518
 
            linkSlowCase(iter);
1519
 
            linkSlowCase(iter);
1520
 
            emitPutJITStubArg(X86::eax, 1);
1521
 
            emitPutJITStubArg(X86::edx, 2);
1522
 
            emitCTICall(Interpreter::cti_op_nstricteq);
1523
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1524
 
            NEXT_OPCODE(op_nstricteq);
1525
 
        }
1526
 
        case op_instanceof: {
1527
 
            linkSlowCase(iter);
1528
 
            linkSlowCase(iter);
1529
 
            linkSlowCase(iter);
1530
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
1531
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx);
1532
 
            emitPutJITStubArgFromVirtualRegister(currentInstruction[4].u.operand, 3, X86::ecx);
1533
 
            emitCTICall(Interpreter::cti_op_instanceof);
1534
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1535
 
            NEXT_OPCODE(op_instanceof);
1536
 
        }
1537
 
        case op_mod: {
1538
 
            compileFastArithSlow_op_mod(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, iter);
1539
 
            NEXT_OPCODE(op_mod);
1540
 
        }
1541
 
        case op_mul: {
1542
 
            compileFastArithSlow_op_mul(currentInstruction, iter);
1543
 
            NEXT_OPCODE(op_mul);
1544
 
        }
1545
 
 
1546
 
        case op_call: {
1547
 
            compileOpCallSlowCase(currentInstruction, iter, callLinkInfoIndex++, opcodeID);
1548
 
            NEXT_OPCODE(op_call);
1549
 
        }
1550
 
        case op_call_eval: {
1551
 
            compileOpCallSlowCase(currentInstruction, iter, callLinkInfoIndex++, opcodeID);
1552
 
            NEXT_OPCODE(op_call_eval);
1553
 
        }
1554
 
        case op_construct: {
1555
 
            compileOpCallSlowCase(currentInstruction, iter, callLinkInfoIndex++, opcodeID);
1556
 
            NEXT_OPCODE(op_construct);
1557
 
        }
1558
 
        case op_to_jsnumber: {
1559
 
            linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
1560
 
            linkSlowCase(iter);
1561
 
 
1562
 
            emitPutJITStubArg(X86::eax, 1);
1563
 
            emitCTICall(Interpreter::cti_op_to_jsnumber);
1564
 
 
1565
 
            emitPutVirtualRegister(currentInstruction[1].u.operand);
1566
 
            NEXT_OPCODE(op_to_jsnumber);
1567
 
        }
1568
 
 
 
370
        switch (m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
 
371
        DEFINE_SLOWCASE_OP(op_add)
 
372
        DEFINE_SLOWCASE_OP(op_bitand)
 
373
        DEFINE_SLOWCASE_OP(op_bitnot)
 
374
        DEFINE_SLOWCASE_OP(op_bitor)
 
375
        DEFINE_SLOWCASE_OP(op_bitxor)
 
376
        DEFINE_SLOWCASE_OP(op_call)
 
377
        DEFINE_SLOWCASE_OP(op_call_eval)
 
378
        DEFINE_SLOWCASE_OP(op_call_varargs)
 
379
        DEFINE_SLOWCASE_OP(op_construct)
 
380
        DEFINE_SLOWCASE_OP(op_construct_verify)
 
381
        DEFINE_SLOWCASE_OP(op_convert_this)
 
382
#if !USE(JSVALUE32)
 
383
        DEFINE_SLOWCASE_OP(op_div)
 
384
#endif
 
385
        DEFINE_SLOWCASE_OP(op_eq)
 
386
        DEFINE_SLOWCASE_OP(op_get_by_id)
 
387
        DEFINE_SLOWCASE_OP(op_get_by_val)
 
388
        DEFINE_SLOWCASE_OP(op_instanceof)
 
389
        DEFINE_SLOWCASE_OP(op_jfalse)
 
390
        DEFINE_SLOWCASE_OP(op_jnless)
 
391
        DEFINE_SLOWCASE_OP(op_jnlesseq)
 
392
        DEFINE_SLOWCASE_OP(op_jtrue)
 
393
        DEFINE_SLOWCASE_OP(op_loop_if_less)
 
394
        DEFINE_SLOWCASE_OP(op_loop_if_lesseq)
 
395
        DEFINE_SLOWCASE_OP(op_loop_if_true)
 
396
        DEFINE_SLOWCASE_OP(op_lshift)
 
397
        DEFINE_SLOWCASE_OP(op_method_check)
 
398
        DEFINE_SLOWCASE_OP(op_mod)
 
399
        DEFINE_SLOWCASE_OP(op_mul)
 
400
#if USE(JSVALUE32_64)
 
401
        DEFINE_SLOWCASE_OP(op_negate)
 
402
#endif
 
403
        DEFINE_SLOWCASE_OP(op_neq)
 
404
        DEFINE_SLOWCASE_OP(op_not)
 
405
        DEFINE_SLOWCASE_OP(op_nstricteq)
 
406
        DEFINE_SLOWCASE_OP(op_post_dec)
 
407
        DEFINE_SLOWCASE_OP(op_post_inc)
 
408
        DEFINE_SLOWCASE_OP(op_pre_dec)
 
409
        DEFINE_SLOWCASE_OP(op_pre_inc)
 
410
        DEFINE_SLOWCASE_OP(op_put_by_id)
 
411
        DEFINE_SLOWCASE_OP(op_put_by_val)
 
412
#if USE(JSVALUE32_64)
 
413
        DEFINE_SLOWCASE_OP(op_resolve_global)
 
414
#endif
 
415
        DEFINE_SLOWCASE_OP(op_rshift)
 
416
        DEFINE_SLOWCASE_OP(op_stricteq)
 
417
        DEFINE_SLOWCASE_OP(op_sub)
 
418
        DEFINE_SLOWCASE_OP(op_to_jsnumber)
 
419
        DEFINE_SLOWCASE_OP(op_to_primitive)
1569
420
        default:
1570
421
            ASSERT_NOT_REACHED();
1571
422
        }
1577
428
    }
1578
429
 
1579
430
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1580
 
    ASSERT(propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos());
 
431
    ASSERT(m_propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos());
1581
432
#endif
1582
 
    ASSERT(callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos());
 
433
    ASSERT(m_callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos());
1583
434
 
1584
435
#ifndef NDEBUG
1585
 
    // reset this, in order to guard it's use with asserts
 
436
    // Reset this, in order to guard its use with ASSERTs.
1586
437
    m_bytecodeIndex = (unsigned)-1;
1587
438
#endif
1588
439
}
1589
440
 
1590
 
void JIT::privateCompile()
 
441
JITCode JIT::privateCompile()
1591
442
{
1592
 
#if ENABLE(CODEBLOCK_SAMPLING)
1593
 
        storePtr(ImmPtr(m_codeBlock), m_interpreter->sampler()->codeBlockSlot());
1594
 
#endif
 
443
    sampleCodeBlock(m_codeBlock);
1595
444
#if ENABLE(OPCODE_SAMPLING)
1596
 
        store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin())), m_interpreter->sampler()->sampleSlot());
 
445
    sampleInstruction(m_codeBlock->instructions().begin());
1597
446
#endif
1598
447
 
1599
448
    // Could use a pop_m, but would need to offset the following instruction if so.
1600
 
    pop(X86::ecx);
1601
 
    emitPutToCallFrameHeader(X86::ecx, RegisterFile::ReturnPC);
 
449
    preserveReturnAddressAfterCall(regT2);
 
450
    emitPutToCallFrameHeader(regT2, RegisterFile::ReturnPC);
1602
451
 
1603
452
    Jump slowRegisterFileCheck;
1604
453
    Label afterRegisterFileCheck;
1606
455
        // In the case of a fast linked call, we do not set this up in the caller.
1607
456
        emitPutImmediateToCallFrameHeader(m_codeBlock, RegisterFile::CodeBlock);
1608
457
 
1609
 
        emitGetCTIParam(STUB_ARGS_registerFile, X86::eax);
1610
 
        addPtr(Imm32(m_codeBlock->m_numCalleeRegisters * sizeof(Register)), callFrameRegister, X86::edx);
1611
 
        
1612
 
        slowRegisterFileCheck = jg32(X86::edx, Address(X86::eax, FIELD_OFFSET(RegisterFile, m_end)));
 
458
        peek(regT0, OBJECT_OFFSETOF(JITStackFrame, registerFile) / sizeof (void*));
 
459
        addPtr(Imm32(m_codeBlock->m_numCalleeRegisters * sizeof(Register)), callFrameRegister, regT1);
 
460
 
 
461
        slowRegisterFileCheck = branchPtr(Above, regT1, Address(regT0, OBJECT_OFFSETOF(RegisterFile, m_end)));
1613
462
        afterRegisterFileCheck = label();
1614
463
    }
1615
464
 
1619
468
 
1620
469
    if (m_codeBlock->codeType() == FunctionCode) {
1621
470
        slowRegisterFileCheck.link(this);
1622
 
        m_bytecodeIndex = 0; // emitCTICall will add to the map, but doesn't actually need this...
1623
 
        emitCTICall(Interpreter::cti_register_file_check);
 
471
        m_bytecodeIndex = 0;
 
472
        JITStubCall(this, cti_register_file_check).call();
1624
473
#ifndef NDEBUG
1625
 
        // reset this, in order to guard it's use with asserts
1626
 
        m_bytecodeIndex = (unsigned)-1;
 
474
        m_bytecodeIndex = (unsigned)-1; // Reset this, in order to guard its use with ASSERTs.
1627
475
#endif
1628
476
        jump(afterRegisterFileCheck);
1629
477
    }
1630
478
 
1631
479
    ASSERT(m_jmpTable.isEmpty());
1632
480
 
1633
 
    RefPtr<ExecutablePool> allocator = m_globalData->poolForSize(m_assembler.size());
1634
 
    void* code = m_assembler.executableCopy(allocator.get());
1635
 
    JITCodeRef codeRef(code, allocator);
1636
 
 
1637
 
    PatchBuffer patchBuffer(code);
 
481
    LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
1638
482
 
1639
483
    // Translate vPC offsets into addresses in JIT generated code, for switch tables.
1640
484
    for (unsigned i = 0; i < m_switches.size(); ++i) {
1645
489
            ASSERT(record.type == SwitchRecord::Immediate || record.type == SwitchRecord::Character); 
1646
490
            ASSERT(record.jumpTable.simpleJumpTable->branchOffsets.size() == record.jumpTable.simpleJumpTable->ctiOffsets.size());
1647
491
 
1648
 
            record.jumpTable.simpleJumpTable->ctiDefault = patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + record.defaultOffset]);
 
492
            record.jumpTable.simpleJumpTable->ctiDefault = patchBuffer.locationOf(m_labels[bytecodeIndex + 3 + record.defaultOffset]);
1649
493
 
1650
494
            for (unsigned j = 0; j < record.jumpTable.simpleJumpTable->branchOffsets.size(); ++j) {
1651
495
                unsigned offset = record.jumpTable.simpleJumpTable->branchOffsets[j];
1652
 
                record.jumpTable.simpleJumpTable->ctiOffsets[j] = offset ? patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + offset]) : record.jumpTable.simpleJumpTable->ctiDefault;
 
496
                record.jumpTable.simpleJumpTable->ctiOffsets[j] = offset ? patchBuffer.locationOf(m_labels[bytecodeIndex + 3 + offset]) : record.jumpTable.simpleJumpTable->ctiDefault;
1653
497
            }
1654
498
        } else {
1655
499
            ASSERT(record.type == SwitchRecord::String);
1656
500
 
1657
 
            record.jumpTable.stringJumpTable->ctiDefault = patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + record.defaultOffset]);
 
501
            record.jumpTable.stringJumpTable->ctiDefault = patchBuffer.locationOf(m_labels[bytecodeIndex + 3 + record.defaultOffset]);
1658
502
 
1659
503
            StringJumpTable::StringOffsetTable::iterator end = record.jumpTable.stringJumpTable->offsetTable.end();            
1660
504
            for (StringJumpTable::StringOffsetTable::iterator it = record.jumpTable.stringJumpTable->offsetTable.begin(); it != end; ++it) {
1661
505
                unsigned offset = it->second.branchOffset;
1662
 
                it->second.ctiOffset = offset ? patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + offset]) : record.jumpTable.stringJumpTable->ctiDefault;
 
506
                it->second.ctiOffset = offset ? patchBuffer.locationOf(m_labels[bytecodeIndex + 3 + offset]) : record.jumpTable.stringJumpTable->ctiDefault;
1663
507
            }
1664
508
        }
1665
509
    }
1666
510
 
1667
511
    for (size_t i = 0; i < m_codeBlock->numberOfExceptionHandlers(); ++i) {
1668
512
        HandlerInfo& handler = m_codeBlock->exceptionHandler(i);
1669
 
        handler.nativeCode = patchBuffer.addressOf(m_labels[handler.target]);
 
513
        handler.nativeCode = patchBuffer.locationOf(m_labels[handler.target]);
1670
514
    }
1671
515
 
1672
 
    m_codeBlock->pcVector().reserveCapacity(m_calls.size());
1673
516
    for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
1674
517
        if (iter->to)
1675
 
            patchBuffer.link(iter->from, iter->to);
1676
 
        m_codeBlock->pcVector().append(PC(reinterpret_cast<void**>(patchBuffer.addressOf(iter->from)) - reinterpret_cast<void**>(code), iter->bytecodeIndex));
 
518
            patchBuffer.link(iter->from, FunctionPtr(iter->to));
 
519
    }
 
520
 
 
521
    if (m_codeBlock->hasExceptionInfo()) {
 
522
        m_codeBlock->callReturnIndexVector().reserveCapacity(m_calls.size());
 
523
        for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter)
 
524
            m_codeBlock->callReturnIndexVector().append(CallReturnOffsetToBytecodeIndex(patchBuffer.returnAddressOffset(iter->from), iter->bytecodeIndex));
1677
525
    }
1678
526
 
1679
527
    // Link absolute addresses for jsr
1680
528
    for (Vector<JSRInfo>::iterator iter = m_jsrSites.begin(); iter != m_jsrSites.end(); ++iter)
1681
 
        patchBuffer.setPtr(iter->storeLocation, patchBuffer.addressOf(iter->target));
 
529
        patchBuffer.patch(iter->storeLocation, patchBuffer.locationOf(iter->target).executableAddress());
1682
530
 
 
531
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1683
532
    for (unsigned i = 0; i < m_codeBlock->numberOfStructureStubInfos(); ++i) {
1684
533
        StructureStubInfo& info = m_codeBlock->structureStubInfo(i);
1685
 
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1686
 
        info.callReturnLocation = patchBuffer.addressOf(m_propertyAccessCompilationInfo[i].callReturnLocation);
1687
 
        info.hotPathBegin = patchBuffer.addressOf(m_propertyAccessCompilationInfo[i].hotPathBegin);
1688
 
#else
1689
 
        info.callReturnLocation = 0;
1690
 
        info.hotPathBegin = 0;
 
534
        info.callReturnLocation = patchBuffer.locationOf(m_propertyAccessCompilationInfo[i].callReturnLocation);
 
535
        info.hotPathBegin = patchBuffer.locationOf(m_propertyAccessCompilationInfo[i].hotPathBegin);
 
536
    }
1691
537
#endif
1692
 
    }
 
538
#if ENABLE(JIT_OPTIMIZE_CALL)
1693
539
    for (unsigned i = 0; i < m_codeBlock->numberOfCallLinkInfos(); ++i) {
1694
540
        CallLinkInfo& info = m_codeBlock->callLinkInfo(i);
1695
 
#if ENABLE(JIT_OPTIMIZE_CALL)
1696
 
        info.callReturnLocation = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].callReturnLocation);
1697
 
        info.hotPathBegin = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].hotPathBegin);
1698
 
        info.hotPathOther = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].hotPathOther);
1699
 
        info.coldPathOther = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].coldPathOther);
1700
 
#else
1701
 
        info.callReturnLocation = 0;
1702
 
        info.hotPathBegin = 0;
1703
 
        info.hotPathOther = 0;
1704
 
        info.coldPathOther = 0;
1705
 
#endif
1706
 
    }
1707
 
 
1708
 
    m_codeBlock->setJITCode(codeRef);
1709
 
}
1710
 
 
1711
 
void JIT::privateCompileCTIMachineTrampolines()
1712
 
{
1713
 
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1714
 
    // (1) The first function provides fast property access for array length
1715
 
    Label arrayLengthBegin = align();
1716
 
 
1717
 
    // Check eax is an array
1718
 
    Jump array_failureCases1 = emitJumpIfNotJSCell(X86::eax);
1719
 
    Jump array_failureCases2 = jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr));
1720
 
 
1721
 
    // Checks out okay! - get the length from the storage
1722
 
    loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::eax);
1723
 
    load32(Address(X86::eax, FIELD_OFFSET(ArrayStorage, m_length)), X86::eax);
1724
 
 
1725
 
    Jump array_failureCases3 = ja32(X86::eax, Imm32(JSImmediate::maxImmediateInt));
1726
 
 
1727
 
    // X86::eax contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
1728
 
    emitFastArithIntToImmNoCheck(X86::eax, X86::eax);
1729
 
 
1730
 
    ret();
1731
 
 
1732
 
    // (2) The second function provides fast property access for string length
1733
 
    Label stringLengthBegin = align();
1734
 
 
1735
 
    // Check eax is a string
1736
 
    Jump string_failureCases1 = emitJumpIfNotJSCell(X86::eax);
1737
 
    Jump string_failureCases2 = jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsStringVptr));
1738
 
 
1739
 
    // Checks out okay! - get the length from the Ustring.
1740
 
    loadPtr(Address(X86::eax, FIELD_OFFSET(JSString, m_value) + FIELD_OFFSET(UString, m_rep)), X86::eax);
1741
 
    load32(Address(X86::eax, FIELD_OFFSET(UString::Rep, len)), X86::eax);
1742
 
 
1743
 
    Jump string_failureCases3 = ja32(X86::eax, Imm32(JSImmediate::maxImmediateInt));
1744
 
 
1745
 
    // X86::eax contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
1746
 
    emitFastArithIntToImmNoCheck(X86::eax, X86::eax);
1747
 
    
1748
 
    ret();
1749
 
#endif
1750
 
 
1751
 
    // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
1752
 
    
1753
 
    Label virtualCallPreLinkBegin = align();
1754
 
 
1755
 
    // Load the callee CodeBlock* into eax
1756
 
    loadPtr(Address(X86::ecx, FIELD_OFFSET(JSFunction, m_body)), X86::eax);
1757
 
    loadPtr(Address(X86::eax, FIELD_OFFSET(FunctionBodyNode, m_code)), X86::eax);
1758
 
    Jump hasCodeBlock1 = jnzPtr(X86::eax);
1759
 
    pop(X86::ebx);
1760
 
    restoreArgumentReference();
1761
 
    Jump callJSFunction1 = call();
1762
 
    emitGetJITStubArg(1, X86::ecx);
1763
 
    emitGetJITStubArg(3, X86::edx);
1764
 
    push(X86::ebx);
1765
 
    hasCodeBlock1.link(this);
1766
 
 
1767
 
    // Check argCount matches callee arity.
1768
 
    Jump arityCheckOkay1 = je32(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_numParameters)), X86::edx);
1769
 
    pop(X86::ebx);
1770
 
    emitPutJITStubArg(X86::ebx, 2);
1771
 
    emitPutJITStubArg(X86::eax, 4);
1772
 
    restoreArgumentReference();
1773
 
    Jump callArityCheck1 = call();
1774
 
    move(X86::edx, callFrameRegister);
1775
 
    emitGetJITStubArg(1, X86::ecx);
1776
 
    emitGetJITStubArg(3, X86::edx);
1777
 
    push(X86::ebx);
1778
 
    arityCheckOkay1.link(this);
1779
 
    
1780
 
    compileOpCallInitializeCallFrame();
1781
 
 
1782
 
    pop(X86::ebx);
1783
 
    emitPutJITStubArg(X86::ebx, 2);
1784
 
    restoreArgumentReference();
1785
 
    Jump callDontLazyLinkCall = call();
1786
 
    push(X86::ebx);
1787
 
 
1788
 
    jump(X86::eax);
1789
 
 
1790
 
    Label virtualCallLinkBegin = align();
1791
 
 
1792
 
    // Load the callee CodeBlock* into eax
1793
 
    loadPtr(Address(X86::ecx, FIELD_OFFSET(JSFunction, m_body)), X86::eax);
1794
 
    loadPtr(Address(X86::eax, FIELD_OFFSET(FunctionBodyNode, m_code)), X86::eax);
1795
 
    Jump hasCodeBlock2 = jnzPtr(X86::eax);
1796
 
    pop(X86::ebx);
1797
 
    restoreArgumentReference();
1798
 
    Jump callJSFunction2 = call();
1799
 
    emitGetJITStubArg(1, X86::ecx);
1800
 
    emitGetJITStubArg(3, X86::edx);
1801
 
    push(X86::ebx);
1802
 
    hasCodeBlock2.link(this);
1803
 
 
1804
 
    // Check argCount matches callee arity.
1805
 
    Jump arityCheckOkay2 = je32(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_numParameters)), X86::edx);
1806
 
    pop(X86::ebx);
1807
 
    emitPutJITStubArg(X86::ebx, 2);
1808
 
    emitPutJITStubArg(X86::eax, 4);
1809
 
    restoreArgumentReference();
1810
 
    Jump callArityCheck2 = call();
1811
 
    move(X86::edx, callFrameRegister);
1812
 
    emitGetJITStubArg(1, X86::ecx);
1813
 
    emitGetJITStubArg(3, X86::edx);
1814
 
    push(X86::ebx);
1815
 
    arityCheckOkay2.link(this);
1816
 
 
1817
 
    compileOpCallInitializeCallFrame();
1818
 
 
1819
 
    pop(X86::ebx);
1820
 
    emitPutJITStubArg(X86::ebx, 2);
1821
 
    restoreArgumentReference();
1822
 
    Jump callLazyLinkCall = call();
1823
 
    push(X86::ebx);
1824
 
 
1825
 
    jump(X86::eax);
1826
 
 
1827
 
    Label virtualCallBegin = align();
1828
 
 
1829
 
    // Load the callee CodeBlock* into eax
1830
 
    loadPtr(Address(X86::ecx, FIELD_OFFSET(JSFunction, m_body)), X86::eax);
1831
 
    loadPtr(Address(X86::eax, FIELD_OFFSET(FunctionBodyNode, m_code)), X86::eax);
1832
 
    Jump hasCodeBlock3 = jnzPtr(X86::eax);
1833
 
    pop(X86::ebx);
1834
 
    restoreArgumentReference();
1835
 
    Jump callJSFunction3 = call();
1836
 
    emitGetJITStubArg(1, X86::ecx);
1837
 
    emitGetJITStubArg(3, X86::edx);
1838
 
    push(X86::ebx);
1839
 
    hasCodeBlock3.link(this);
1840
 
 
1841
 
    // Check argCount matches callee arity.
1842
 
    Jump arityCheckOkay3 = je32(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_numParameters)), X86::edx);
1843
 
    pop(X86::ebx);
1844
 
    emitPutJITStubArg(X86::ebx, 2);
1845
 
    emitPutJITStubArg(X86::eax, 4);
1846
 
    restoreArgumentReference();
1847
 
    Jump callArityCheck3 = call();
1848
 
    move(X86::edx, callFrameRegister);
1849
 
    emitGetJITStubArg(1, X86::ecx);
1850
 
    emitGetJITStubArg(3, X86::edx);
1851
 
    push(X86::ebx);
1852
 
    arityCheckOkay3.link(this);
1853
 
 
1854
 
    compileOpCallInitializeCallFrame();
1855
 
 
1856
 
    // load ctiCode from the new codeBlock.
1857
 
    loadPtr(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_jitCode)), X86::eax);
1858
 
 
1859
 
    jump(X86::eax);
1860
 
 
1861
 
    // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
1862
 
    m_interpreter->m_executablePool = m_globalData->poolForSize(m_assembler.size());
1863
 
    void* code = m_assembler.executableCopy(m_interpreter->m_executablePool.get());
1864
 
    PatchBuffer patchBuffer(code);
1865
 
 
1866
 
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1867
 
    patchBuffer.link(array_failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
1868
 
    patchBuffer.link(array_failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
1869
 
    patchBuffer.link(array_failureCases3, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
1870
 
    patchBuffer.link(string_failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_string_fail));
1871
 
    patchBuffer.link(string_failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_string_fail));
1872
 
    patchBuffer.link(string_failureCases3, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_string_fail));
1873
 
 
1874
 
    m_interpreter->m_ctiArrayLengthTrampoline = patchBuffer.addressOf(arrayLengthBegin);
1875
 
    m_interpreter->m_ctiStringLengthTrampoline = patchBuffer.addressOf(stringLengthBegin);
1876
 
#endif
1877
 
    patchBuffer.link(callArityCheck1, reinterpret_cast<void*>(Interpreter::cti_op_call_arityCheck));
1878
 
    patchBuffer.link(callArityCheck2, reinterpret_cast<void*>(Interpreter::cti_op_call_arityCheck));
1879
 
    patchBuffer.link(callArityCheck3, reinterpret_cast<void*>(Interpreter::cti_op_call_arityCheck));
1880
 
    patchBuffer.link(callJSFunction1, reinterpret_cast<void*>(Interpreter::cti_op_call_JSFunction));
1881
 
    patchBuffer.link(callJSFunction2, reinterpret_cast<void*>(Interpreter::cti_op_call_JSFunction));
1882
 
    patchBuffer.link(callJSFunction3, reinterpret_cast<void*>(Interpreter::cti_op_call_JSFunction));
1883
 
    patchBuffer.link(callDontLazyLinkCall, reinterpret_cast<void*>(Interpreter::cti_vm_dontLazyLinkCall));
1884
 
    patchBuffer.link(callLazyLinkCall, reinterpret_cast<void*>(Interpreter::cti_vm_lazyLinkCall));
1885
 
 
1886
 
    m_interpreter->m_ctiVirtualCallPreLink = patchBuffer.addressOf(virtualCallPreLinkBegin);
1887
 
    m_interpreter->m_ctiVirtualCallLink = patchBuffer.addressOf(virtualCallLinkBegin);
1888
 
    m_interpreter->m_ctiVirtualCall = patchBuffer.addressOf(virtualCallBegin);
1889
 
}
1890
 
 
 
541
        info.ownerCodeBlock = m_codeBlock;
 
542
        info.callReturnLocation = patchBuffer.locationOfNearCall(m_callStructureStubCompilationInfo[i].callReturnLocation);
 
543
        info.hotPathBegin = patchBuffer.locationOf(m_callStructureStubCompilationInfo[i].hotPathBegin);
 
544
        info.hotPathOther = patchBuffer.locationOfNearCall(m_callStructureStubCompilationInfo[i].hotPathOther);
 
545
    }
 
546
#endif
 
547
    unsigned methodCallCount = m_methodCallCompilationInfo.size();
 
548
    m_codeBlock->addMethodCallLinkInfos(methodCallCount);
 
549
    for (unsigned i = 0; i < methodCallCount; ++i) {
 
550
        MethodCallLinkInfo& info = m_codeBlock->methodCallLinkInfo(i);
 
551
        info.structureLabel = patchBuffer.locationOf(m_methodCallCompilationInfo[i].structureToCompare);
 
552
        info.callReturnLocation = m_codeBlock->structureStubInfo(m_methodCallCompilationInfo[i].propertyAccessIndex).callReturnLocation;
 
553
    }
 
554
 
 
555
    return patchBuffer.finalizeCode();
 
556
}
 
557
 
 
558
#if !USE(JSVALUE32_64)
1891
559
void JIT::emitGetVariableObjectRegister(RegisterID variableObject, int index, RegisterID dst)
1892
560
{
1893
 
    loadPtr(Address(variableObject, FIELD_OFFSET(JSVariableObject, d)), dst);
1894
 
    loadPtr(Address(dst, FIELD_OFFSET(JSVariableObject::JSVariableObjectData, registers)), dst);
 
561
    loadPtr(Address(variableObject, OBJECT_OFFSETOF(JSVariableObject, d)), dst);
 
562
    loadPtr(Address(dst, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), dst);
1895
563
    loadPtr(Address(dst, index * sizeof(Register)), dst);
1896
564
}
1897
565
 
1898
566
void JIT::emitPutVariableObjectRegister(RegisterID src, RegisterID variableObject, int index)
1899
567
{
1900
 
    loadPtr(Address(variableObject, FIELD_OFFSET(JSVariableObject, d)), variableObject);
1901
 
    loadPtr(Address(variableObject, FIELD_OFFSET(JSVariableObject::JSVariableObjectData, registers)), variableObject);
 
568
    loadPtr(Address(variableObject, OBJECT_OFFSETOF(JSVariableObject, d)), variableObject);
 
569
    loadPtr(Address(variableObject, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), variableObject);
1902
570
    storePtr(src, Address(variableObject, index * sizeof(Register)));
1903
571
}
 
572
#endif
 
573
 
 
574
#if ENABLE(JIT_OPTIMIZE_CALL)
 
575
void JIT::unlinkCall(CallLinkInfo* callLinkInfo)
 
576
{
 
577
    // When the JSFunction is deleted the pointer embedded in the instruction stream will no longer be valid
 
578
    // (and, if a new JSFunction happened to be constructed at the same location, we could get a false positive
 
579
    // match).  Reset the check so it no longer matches.
 
580
    RepatchBuffer repatchBuffer(callLinkInfo->ownerCodeBlock.get());
 
581
#if USE(JSVALUE32_64)
 
582
    repatchBuffer.repatch(callLinkInfo->hotPathBegin, 0);
 
583
#else
 
584
    repatchBuffer.repatch(callLinkInfo->hotPathBegin, JSValue::encode(JSValue()));
 
585
#endif
 
586
}
 
587
 
 
588
void JIT::linkCall(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JITCode& code, CallLinkInfo* callLinkInfo, int callerArgCount, JSGlobalData* globalData)
 
589
{
 
590
    RepatchBuffer repatchBuffer(callerCodeBlock);
 
591
 
 
592
    // Currently we only link calls with the exact number of arguments.
 
593
    // If this is a native call calleeCodeBlock is null so the number of parameters is unimportant
 
594
    if (!calleeCodeBlock || (callerArgCount == calleeCodeBlock->m_numParameters)) {
 
595
        ASSERT(!callLinkInfo->isLinked());
 
596
    
 
597
        if (calleeCodeBlock)
 
598
            calleeCodeBlock->addCaller(callLinkInfo);
 
599
    
 
600
        repatchBuffer.repatch(callLinkInfo->hotPathBegin, callee);
 
601
        repatchBuffer.relink(callLinkInfo->hotPathOther, code.addressForCall());
 
602
    }
 
603
 
 
604
    // patch the call so we do not continue to try to link.
 
605
    repatchBuffer.relink(callLinkInfo->callReturnLocation, globalData->jitStubs.ctiVirtualCall());
 
606
}
 
607
#endif // ENABLE(JIT_OPTIMIZE_CALL)
1904
608
 
1905
609
} // namespace JSC
1906
610