~oif-team/ubuntu/natty/qt4-x11/xi2.1

« back to all changes in this revision

Viewing changes to src/3rdparty/javascriptcore/JavaScriptCore/jit/JITOpcodes.cpp

  • Committer: Bazaar Package Importer
  • Author(s): Alessandro Ghersi
  • Date: 2009-11-02 18:30:08 UTC
  • mfrom: (1.2.2 upstream)
  • mto: (15.2.5 experimental)
  • mto: This revision was merged to the branch mainline in revision 88.
  • Revision ID: james.westby@ubuntu.com-20091102183008-b6a4gcs128mvfb3m
Tags: upstream-4.6.0~beta1
ImportĀ upstreamĀ versionĀ 4.6.0~beta1

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
/*
 
2
 * Copyright (C) 2009 Apple Inc. All rights reserved.
 
3
 *
 
4
 * Redistribution and use in source and binary forms, with or without
 
5
 * modification, are permitted provided that the following conditions
 
6
 * are met:
 
7
 * 1. Redistributions of source code must retain the above copyright
 
8
 *    notice, this list of conditions and the following disclaimer.
 
9
 * 2. Redistributions in binary form must reproduce the above copyright
 
10
 *    notice, this list of conditions and the following disclaimer in the
 
11
 *    documentation and/or other materials provided with the distribution.
 
12
 *
 
13
 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
 
14
 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 
15
 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
 
16
 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
 
17
 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
 
18
 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
 
19
 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
 
20
 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
 
21
 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 
22
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 
23
 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
 
24
 */
 
25
 
 
26
#include "config.h"
 
27
#include "JIT.h"
 
28
 
 
29
#if ENABLE(JIT)
 
30
 
 
31
#include "JITInlineMethods.h"
 
32
#include "JITStubCall.h"
 
33
#include "JSArray.h"
 
34
#include "JSCell.h"
 
35
#include "JSFunction.h"
 
36
#include "LinkBuffer.h"
 
37
 
 
38
namespace JSC {
 
39
 
 
40
#if USE(JSVALUE32_64)
 
41
 
 
42
void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
 
43
{
 
44
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
 
45
    // (1) This function provides fast property access for string length
 
46
    Label stringLengthBegin = align();
 
47
    
 
48
    // regT0 holds payload, regT1 holds tag
 
49
    
 
50
    Jump string_failureCases1 = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
 
51
    Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
 
52
 
 
53
    // Checks out okay! - get the length from the Ustring.
 
54
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSString, m_value) + OBJECT_OFFSETOF(UString, m_rep)), regT2);
 
55
    load32(Address(regT2, OBJECT_OFFSETOF(UString::Rep, len)), regT2);
 
56
 
 
57
    Jump string_failureCases3 = branch32(Above, regT2, Imm32(INT_MAX));
 
58
    move(regT2, regT0);
 
59
    move(Imm32(JSValue::Int32Tag), regT1);
 
60
 
 
61
    ret();
 
62
#endif
 
63
 
 
64
    // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
 
65
 
 
66
#if ENABLE(JIT_OPTIMIZE_CALL)
 
67
    // VirtualCallLink Trampoline
 
68
    // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
 
69
    Label virtualCallLinkBegin = align();
 
70
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
 
71
 
 
72
    Jump isNativeFunc2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
 
73
 
 
74
    Jump hasCodeBlock2 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
 
75
    preserveReturnAddressAfterCall(regT3);
 
76
    restoreArgumentReference();
 
77
    Call callJSFunction2 = call();
 
78
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
 
79
    emitGetJITStubArg(2, regT1); // argCount
 
80
    restoreReturnAddressBeforeReturn(regT3);
 
81
    hasCodeBlock2.link(this);
 
82
 
 
83
    // Check argCount matches callee arity.
 
84
    Jump arityCheckOkay2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
 
85
    preserveReturnAddressAfterCall(regT3);
 
86
    emitPutJITStubArg(regT3, 1); // return address
 
87
    restoreArgumentReference();
 
88
    Call callArityCheck2 = call();
 
89
    move(regT1, callFrameRegister);
 
90
    emitGetJITStubArg(2, regT1); // argCount
 
91
    restoreReturnAddressBeforeReturn(regT3);
 
92
    arityCheckOkay2.link(this);
 
93
 
 
94
    isNativeFunc2.link(this);
 
95
 
 
96
    compileOpCallInitializeCallFrame();
 
97
 
 
98
    preserveReturnAddressAfterCall(regT3);
 
99
    emitPutJITStubArg(regT3, 1); // return address
 
100
    restoreArgumentReference();
 
101
    Call callLazyLinkCall = call();
 
102
    restoreReturnAddressBeforeReturn(regT3);
 
103
    jump(regT0);
 
104
#endif // ENABLE(JIT_OPTIMIZE_CALL)
 
105
 
 
106
    // VirtualCall Trampoline
 
107
    // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
 
108
    Label virtualCallBegin = align();
 
109
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
 
110
 
 
111
    Jump isNativeFunc3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
 
112
 
 
113
    Jump hasCodeBlock3 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
 
114
    preserveReturnAddressAfterCall(regT3);
 
115
    restoreArgumentReference();
 
116
    Call callJSFunction1 = call();
 
117
    emitGetJITStubArg(2, regT1); // argCount
 
118
    restoreReturnAddressBeforeReturn(regT3);
 
119
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
 
120
    hasCodeBlock3.link(this);
 
121
    
 
122
    // Check argCount matches callee arity.
 
123
    Jump arityCheckOkay3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
 
124
    preserveReturnAddressAfterCall(regT3);
 
125
    emitPutJITStubArg(regT3, 1); // return address
 
126
    restoreArgumentReference();
 
127
    Call callArityCheck1 = call();
 
128
    move(regT1, callFrameRegister);
 
129
    emitGetJITStubArg(2, regT1); // argCount
 
130
    restoreReturnAddressBeforeReturn(regT3);
 
131
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
 
132
    arityCheckOkay3.link(this);
 
133
 
 
134
    isNativeFunc3.link(this);
 
135
 
 
136
    compileOpCallInitializeCallFrame();
 
137
    loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCode)), regT0);
 
138
    jump(regT0);
 
139
 
 
140
#if PLATFORM(X86)
 
141
    Label nativeCallThunk = align();
 
142
    preserveReturnAddressAfterCall(regT0);
 
143
    emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
 
144
 
 
145
    // Load caller frame's scope chain into this callframe so that whatever we call can
 
146
    // get to its global data.
 
147
    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
 
148
    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
 
149
    emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
 
150
    
 
151
    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
 
152
 
 
153
    /* We have two structs that we use to describe the stackframe we set up for our
 
154
     * call to native code.  NativeCallFrameStructure describes the how we set up the stack
 
155
     * in advance of the call.  NativeFunctionCalleeSignature describes the callframe
 
156
     * as the native code expects it.  We do this as we are using the fastcall calling
 
157
     * convention which results in the callee popping its arguments off the stack, but
 
158
     * not the rest of the callframe so we need a nice way to ensure we increment the
 
159
     * stack pointer by the right amount after the call.
 
160
     */
 
161
 
 
162
#if COMPILER(MSVC) || PLATFORM(LINUX)
 
163
#if COMPILER(MSVC)
 
164
#pragma pack(push)
 
165
#pragma pack(4)
 
166
#endif // COMPILER(MSVC)
 
167
    struct NativeCallFrameStructure {
 
168
      //  CallFrame* callFrame; // passed in EDX
 
169
        JSObject* callee;
 
170
        JSValue thisValue;
 
171
        ArgList* argPointer;
 
172
        ArgList args;
 
173
        JSValue result;
 
174
    };
 
175
    struct NativeFunctionCalleeSignature {
 
176
        JSObject* callee;
 
177
        JSValue thisValue;
 
178
        ArgList* argPointer;
 
179
    };
 
180
#if COMPILER(MSVC)
 
181
#pragma pack(pop)
 
182
#endif // COMPILER(MSVC)
 
183
#else
 
184
    struct NativeCallFrameStructure {
 
185
      //  CallFrame* callFrame; // passed in ECX
 
186
      //  JSObject* callee; // passed in EDX
 
187
        JSValue thisValue;
 
188
        ArgList* argPointer;
 
189
        ArgList args;
 
190
    };
 
191
    struct NativeFunctionCalleeSignature {
 
192
        JSValue thisValue;
 
193
        ArgList* argPointer;
 
194
    };
 
195
#endif
 
196
    
 
197
    const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
 
198
    // Allocate system stack frame
 
199
    subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
 
200
 
 
201
    // Set up arguments
 
202
    subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
 
203
 
 
204
    // push argcount
 
205
    storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
 
206
    
 
207
    // Calculate the start of the callframe header, and store in regT1
 
208
    addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
 
209
    
 
210
    // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
 
211
    mul32(Imm32(sizeof(Register)), regT0, regT0);
 
212
    subPtr(regT0, regT1);
 
213
    storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
 
214
 
 
215
    // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
 
216
    addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
 
217
    storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
 
218
 
 
219
    // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
 
220
    loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
 
221
    loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT3);
 
222
    storePtr(regT2, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
 
223
    storePtr(regT3, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
 
224
 
 
225
#if COMPILER(MSVC) || PLATFORM(LINUX)
 
226
    // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
 
227
    addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86Registers::ecx);
 
228
 
 
229
    // Plant callee
 
230
    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::eax);
 
231
    storePtr(X86Registers::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
 
232
 
 
233
    // Plant callframe
 
234
    move(callFrameRegister, X86Registers::edx);
 
235
 
 
236
    call(Address(X86Registers::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
 
237
 
 
238
    // JSValue is a non-POD type, so eax points to it
 
239
    emitLoad(0, regT1, regT0, X86Registers::eax);
 
240
#else
 
241
    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::edx); // callee
 
242
    move(callFrameRegister, X86Registers::ecx); // callFrame
 
243
    call(Address(X86Registers::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
 
244
#endif
 
245
 
 
246
    // We've put a few temporaries on the stack in addition to the actual arguments
 
247
    // so pull them off now
 
248
    addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
 
249
 
 
250
    // Check for an exception
 
251
    move(ImmPtr(&globalData->exception), regT2);
 
252
    Jump sawException = branch32(NotEqual, tagFor(0, regT2), Imm32(JSValue::EmptyValueTag));
 
253
 
 
254
    // Grab the return address.
 
255
    emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT3);
 
256
    
 
257
    // Restore our caller's "r".
 
258
    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
 
259
    
 
260
    // Return.
 
261
    restoreReturnAddressBeforeReturn(regT3);
 
262
    ret();
 
263
 
 
264
    // Handle an exception
 
265
    sawException.link(this);
 
266
    // Grab the return address.
 
267
    emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
 
268
    move(ImmPtr(&globalData->exceptionLocation), regT2);
 
269
    storePtr(regT1, regT2);
 
270
    move(ImmPtr(reinterpret_cast<void*>(ctiVMThrowTrampoline)), regT2);
 
271
    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
 
272
    poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
 
273
    restoreReturnAddressBeforeReturn(regT2);
 
274
    ret();
 
275
 
 
276
#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
 
277
#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
 
278
#else
 
279
    breakpoint();
 
280
#endif
 
281
    
 
282
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
 
283
    Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
 
284
    Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
 
285
    Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
 
286
#endif
 
287
 
 
288
    // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
 
289
    LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
 
290
 
 
291
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
 
292
    patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
 
293
    patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
 
294
    patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
 
295
#endif
 
296
    patchBuffer.link(callArityCheck1, FunctionPtr(cti_op_call_arityCheck));
 
297
    patchBuffer.link(callJSFunction1, FunctionPtr(cti_op_call_JSFunction));
 
298
#if ENABLE(JIT_OPTIMIZE_CALL)
 
299
    patchBuffer.link(callArityCheck2, FunctionPtr(cti_op_call_arityCheck));
 
300
    patchBuffer.link(callJSFunction2, FunctionPtr(cti_op_call_JSFunction));
 
301
    patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
 
302
#endif
 
303
 
 
304
    CodeRef finalCode = patchBuffer.finalizeCode();
 
305
    *executablePool = finalCode.m_executablePool;
 
306
 
 
307
    *ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
 
308
    *ctiNativeCallThunk = trampolineAt(finalCode, nativeCallThunk);
 
309
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
 
310
    *ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
 
311
#else
 
312
    UNUSED_PARAM(ctiStringLengthTrampoline);
 
313
#endif
 
314
#if ENABLE(JIT_OPTIMIZE_CALL)
 
315
    *ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
 
316
#else
 
317
    UNUSED_PARAM(ctiVirtualCallLink);
 
318
#endif
 
319
}
 
320
 
 
321
void JIT::emit_op_mov(Instruction* currentInstruction)
 
322
{
 
323
    unsigned dst = currentInstruction[1].u.operand;
 
324
    unsigned src = currentInstruction[2].u.operand;
 
325
 
 
326
    if (m_codeBlock->isConstantRegisterIndex(src))
 
327
        emitStore(dst, getConstantOperand(src));
 
328
    else {
 
329
        emitLoad(src, regT1, regT0);
 
330
        emitStore(dst, regT1, regT0);
 
331
        map(m_bytecodeIndex + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
 
332
    }
 
333
}
 
334
 
 
335
void JIT::emit_op_end(Instruction* currentInstruction)
 
336
{
 
337
    if (m_codeBlock->needsFullScopeChain())
 
338
        JITStubCall(this, cti_op_end).call();
 
339
    ASSERT(returnValueRegister != callFrameRegister);
 
340
    emitLoad(currentInstruction[1].u.operand, regT1, regT0);
 
341
    restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
 
342
    ret();
 
343
}
 
344
 
 
345
void JIT::emit_op_jmp(Instruction* currentInstruction)
 
346
{
 
347
    unsigned target = currentInstruction[1].u.operand;
 
348
    addJump(jump(), target + 1);
 
349
}
 
350
 
 
351
void JIT::emit_op_loop(Instruction* currentInstruction)
 
352
{
 
353
    unsigned target = currentInstruction[1].u.operand;
 
354
    emitTimeoutCheck();
 
355
    addJump(jump(), target + 1);
 
356
}
 
357
 
 
358
void JIT::emit_op_loop_if_less(Instruction* currentInstruction)
 
359
{
 
360
    unsigned op1 = currentInstruction[1].u.operand;
 
361
    unsigned op2 = currentInstruction[2].u.operand;
 
362
    unsigned target = currentInstruction[3].u.operand;
 
363
 
 
364
    emitTimeoutCheck();
 
365
 
 
366
    if (isOperandConstantImmediateInt(op1)) {
 
367
        emitLoad(op2, regT1, regT0);
 
368
        addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
 
369
        addJump(branch32(GreaterThan, regT0, Imm32(getConstantOperand(op1).asInt32())), target + 3);
 
370
        return;
 
371
    }
 
372
    
 
373
    if (isOperandConstantImmediateInt(op2)) {
 
374
        emitLoad(op1, regT1, regT0);
 
375
        addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
 
376
        addJump(branch32(LessThan, regT0, Imm32(getConstantOperand(op2).asInt32())), target + 3);
 
377
        return;
 
378
    }
 
379
 
 
380
    emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
 
381
    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
 
382
    addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
 
383
    addJump(branch32(LessThan, regT0, regT2), target + 3);
 
384
}
 
385
 
 
386
void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
387
{
 
388
    unsigned op1 = currentInstruction[1].u.operand;
 
389
    unsigned op2 = currentInstruction[2].u.operand;
 
390
    unsigned target = currentInstruction[3].u.operand;
 
391
 
 
392
    if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
 
393
        linkSlowCase(iter); // int32 check
 
394
    linkSlowCase(iter); // int32 check
 
395
 
 
396
    JITStubCall stubCall(this, cti_op_loop_if_less);
 
397
    stubCall.addArgument(op1);
 
398
    stubCall.addArgument(op2);
 
399
    stubCall.call();
 
400
    emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
 
401
}
 
402
 
 
403
void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
 
404
{
 
405
    unsigned op1 = currentInstruction[1].u.operand;
 
406
    unsigned op2 = currentInstruction[2].u.operand;
 
407
    unsigned target = currentInstruction[3].u.operand;
 
408
 
 
409
    emitTimeoutCheck();
 
410
 
 
411
    if (isOperandConstantImmediateInt(op1)) {
 
412
        emitLoad(op2, regT1, regT0);
 
413
        addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
 
414
        addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op1).asInt32())), target + 3);
 
415
        return;
 
416
    }
 
417
 
 
418
    if (isOperandConstantImmediateInt(op2)) {
 
419
        emitLoad(op1, regT1, regT0);
 
420
        addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
 
421
        addJump(branch32(LessThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target + 3);
 
422
        return;
 
423
    }
 
424
 
 
425
    emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
 
426
    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
 
427
    addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
 
428
    addJump(branch32(LessThanOrEqual, regT0, regT2), target + 3);
 
429
}
 
430
 
 
431
void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
432
{
 
433
    unsigned op1 = currentInstruction[1].u.operand;
 
434
    unsigned op2 = currentInstruction[2].u.operand;
 
435
    unsigned target = currentInstruction[3].u.operand;
 
436
 
 
437
    if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
 
438
        linkSlowCase(iter); // int32 check
 
439
    linkSlowCase(iter); // int32 check
 
440
 
 
441
    JITStubCall stubCall(this, cti_op_loop_if_lesseq);
 
442
    stubCall.addArgument(op1);
 
443
    stubCall.addArgument(op2);
 
444
    stubCall.call();
 
445
    emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
 
446
}
 
447
 
 
448
void JIT::emit_op_new_object(Instruction* currentInstruction)
 
449
{
 
450
    JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
 
451
}
 
452
 
 
453
void JIT::emit_op_instanceof(Instruction* currentInstruction)
 
454
{
 
455
    unsigned dst = currentInstruction[1].u.operand;
 
456
    unsigned value = currentInstruction[2].u.operand;
 
457
    unsigned baseVal = currentInstruction[3].u.operand;
 
458
    unsigned proto = currentInstruction[4].u.operand;
 
459
 
 
460
    // Load the operands (baseVal, proto, and value respectively) into registers.
 
461
    // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
 
462
    emitLoadPayload(proto, regT1);
 
463
    emitLoadPayload(baseVal, regT0);
 
464
    emitLoadPayload(value, regT2);
 
465
 
 
466
    // Check that baseVal & proto are cells.
 
467
    emitJumpSlowCaseIfNotJSCell(proto);
 
468
    emitJumpSlowCaseIfNotJSCell(baseVal);
 
469
 
 
470
    // Check that baseVal is an object, that it 'ImplementsHasInstance' but that it does not 'OverridesHasInstance'.
 
471
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
 
472
    addSlowCase(branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType))); // FIXME: Maybe remove this test.
 
473
    addSlowCase(branchTest32(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsHasInstance))); // FIXME: TOT checks ImplementsDefaultHasInstance.
 
474
 
 
475
    // If value is not an Object, return false.
 
476
    emitLoadTag(value, regT0);
 
477
    Jump valueIsImmediate = branch32(NotEqual, regT0, Imm32(JSValue::CellTag));
 
478
    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
 
479
    Jump valueIsNotObject = branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)); // FIXME: Maybe remove this test.
 
480
 
 
481
    // Check proto is object.
 
482
    loadPtr(Address(regT1, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
 
483
    addSlowCase(branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
 
484
 
 
485
    // Optimistically load the result true, and start looping.
 
486
    // Initially, regT1 still contains proto and regT2 still contains value.
 
487
    // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
 
488
    move(Imm32(JSValue::TrueTag), regT0);
 
489
    Label loop(this);
 
490
 
 
491
    // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
 
492
    // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
 
493
    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
 
494
    load32(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
 
495
    Jump isInstance = branchPtr(Equal, regT2, regT1);
 
496
    branch32(NotEqual, regT2, Imm32(0), loop);
 
497
 
 
498
    // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
 
499
    valueIsImmediate.link(this);
 
500
    valueIsNotObject.link(this);
 
501
    move(Imm32(JSValue::FalseTag), regT0);
 
502
 
 
503
    // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
 
504
    isInstance.link(this);
 
505
    emitStoreBool(dst, regT0);
 
506
}
 
507
 
 
508
void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
509
{
 
510
    unsigned dst = currentInstruction[1].u.operand;
 
511
    unsigned value = currentInstruction[2].u.operand;
 
512
    unsigned baseVal = currentInstruction[3].u.operand;
 
513
    unsigned proto = currentInstruction[4].u.operand;
 
514
 
 
515
    linkSlowCaseIfNotJSCell(iter, baseVal);
 
516
    linkSlowCaseIfNotJSCell(iter, proto);
 
517
    linkSlowCase(iter);
 
518
    linkSlowCase(iter);
 
519
    linkSlowCase(iter);
 
520
 
 
521
    JITStubCall stubCall(this, cti_op_instanceof);
 
522
    stubCall.addArgument(value);
 
523
    stubCall.addArgument(baseVal);
 
524
    stubCall.addArgument(proto);
 
525
    stubCall.call(dst);
 
526
}
 
527
 
 
528
void JIT::emit_op_new_func(Instruction* currentInstruction)
 
529
{
 
530
    JITStubCall stubCall(this, cti_op_new_func);
 
531
    stubCall.addArgument(ImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
 
532
    stubCall.call(currentInstruction[1].u.operand);
 
533
}
 
534
 
 
535
void JIT::emit_op_get_global_var(Instruction* currentInstruction)
 
536
{
 
537
    int dst = currentInstruction[1].u.operand;
 
538
    JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[2].u.jsCell);
 
539
    ASSERT(globalObject->isGlobalObject());
 
540
    int index = currentInstruction[3].u.operand;
 
541
 
 
542
    loadPtr(&globalObject->d()->registers, regT2);
 
543
 
 
544
    emitLoad(index, regT1, regT0, regT2);
 
545
    emitStore(dst, regT1, regT0);
 
546
    map(m_bytecodeIndex + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
 
547
}
 
548
 
 
549
void JIT::emit_op_put_global_var(Instruction* currentInstruction)
 
550
{
 
551
    JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[1].u.jsCell);
 
552
    ASSERT(globalObject->isGlobalObject());
 
553
    int index = currentInstruction[2].u.operand;
 
554
    int value = currentInstruction[3].u.operand;
 
555
 
 
556
    emitLoad(value, regT1, regT0);
 
557
 
 
558
    loadPtr(&globalObject->d()->registers, regT2);
 
559
    emitStore(index, regT1, regT0, regT2);
 
560
    map(m_bytecodeIndex + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
 
561
}
 
562
 
 
563
void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
 
564
{
 
565
    int dst = currentInstruction[1].u.operand;
 
566
    int index = currentInstruction[2].u.operand;
 
567
    int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
 
568
 
 
569
    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
 
570
    while (skip--)
 
571
        loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
 
572
 
 
573
    loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
 
574
    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
 
575
    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
 
576
 
 
577
    emitLoad(index, regT1, regT0, regT2);
 
578
    emitStore(dst, regT1, regT0);
 
579
    map(m_bytecodeIndex + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
 
580
}
 
581
 
 
582
void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
 
583
{
 
584
    int index = currentInstruction[1].u.operand;
 
585
    int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
 
586
    int value = currentInstruction[3].u.operand;
 
587
 
 
588
    emitLoad(value, regT1, regT0);
 
589
 
 
590
    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
 
591
    while (skip--)
 
592
        loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
 
593
 
 
594
    loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
 
595
    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
 
596
    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
 
597
 
 
598
    emitStore(index, regT1, regT0, regT2);
 
599
    map(m_bytecodeIndex + OPCODE_LENGTH(op_put_scoped_var), value, regT1, regT0);
 
600
}
 
601
 
 
602
void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
 
603
{
 
604
    JITStubCall stubCall(this, cti_op_tear_off_activation);
 
605
    stubCall.addArgument(currentInstruction[1].u.operand);
 
606
    stubCall.call();
 
607
}
 
608
 
 
609
void JIT::emit_op_tear_off_arguments(Instruction*)
 
610
{
 
611
    JITStubCall(this, cti_op_tear_off_arguments).call();
 
612
}
 
613
 
 
614
void JIT::emit_op_new_array(Instruction* currentInstruction)
 
615
{
 
616
    JITStubCall stubCall(this, cti_op_new_array);
 
617
    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
 
618
    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
 
619
    stubCall.call(currentInstruction[1].u.operand);
 
620
}
 
621
 
 
622
void JIT::emit_op_resolve(Instruction* currentInstruction)
 
623
{
 
624
    JITStubCall stubCall(this, cti_op_resolve);
 
625
    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
 
626
    stubCall.call(currentInstruction[1].u.operand);
 
627
}
 
628
 
 
629
void JIT::emit_op_to_primitive(Instruction* currentInstruction)
 
630
{
 
631
    int dst = currentInstruction[1].u.operand;
 
632
    int src = currentInstruction[2].u.operand;
 
633
 
 
634
    emitLoad(src, regT1, regT0);
 
635
 
 
636
    Jump isImm = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
 
637
    addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
 
638
    isImm.link(this);
 
639
 
 
640
    if (dst != src)
 
641
        emitStore(dst, regT1, regT0);
 
642
    map(m_bytecodeIndex + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
 
643
}
 
644
 
 
645
void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
646
{
 
647
    int dst = currentInstruction[1].u.operand;
 
648
 
 
649
    linkSlowCase(iter);
 
650
 
 
651
    JITStubCall stubCall(this, cti_op_to_primitive);
 
652
    stubCall.addArgument(regT1, regT0);
 
653
    stubCall.call(dst);
 
654
}
 
655
 
 
656
void JIT::emit_op_strcat(Instruction* currentInstruction)
 
657
{
 
658
    JITStubCall stubCall(this, cti_op_strcat);
 
659
    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
 
660
    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
 
661
    stubCall.call(currentInstruction[1].u.operand);
 
662
}
 
663
 
 
664
void JIT::emit_op_loop_if_true(Instruction* currentInstruction)
 
665
{
 
666
    unsigned cond = currentInstruction[1].u.operand;
 
667
    unsigned target = currentInstruction[2].u.operand;
 
668
 
 
669
    emitTimeoutCheck();
 
670
 
 
671
    emitLoad(cond, regT1, regT0);
 
672
 
 
673
    Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
 
674
    addJump(branch32(NotEqual, regT0, Imm32(0)), target + 2);
 
675
    Jump isNotZero = jump();
 
676
 
 
677
    isNotInteger.link(this);
 
678
 
 
679
    addJump(branch32(Equal, regT1, Imm32(JSValue::TrueTag)), target + 2);
 
680
    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::FalseTag)));
 
681
 
 
682
    isNotZero.link(this);
 
683
}
 
684
 
 
685
void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
686
{
 
687
    unsigned cond = currentInstruction[1].u.operand;
 
688
    unsigned target = currentInstruction[2].u.operand;
 
689
 
 
690
    linkSlowCase(iter);
 
691
 
 
692
    JITStubCall stubCall(this, cti_op_jtrue);
 
693
    stubCall.addArgument(cond);
 
694
    stubCall.call();
 
695
    emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 2);
 
696
}
 
697
 
 
698
void JIT::emit_op_resolve_base(Instruction* currentInstruction)
 
699
{
 
700
    JITStubCall stubCall(this, cti_op_resolve_base);
 
701
    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
 
702
    stubCall.call(currentInstruction[1].u.operand);
 
703
}
 
704
 
 
705
void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
 
706
{
 
707
    JITStubCall stubCall(this, cti_op_resolve_skip);
 
708
    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
 
709
    stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
 
710
    stubCall.call(currentInstruction[1].u.operand);
 
711
}
 
712
 
 
713
void JIT::emit_op_resolve_global(Instruction* currentInstruction)
 
714
{
 
715
    // FIXME: Optimize to use patching instead of so many memory accesses.
 
716
 
 
717
    unsigned dst = currentInstruction[1].u.operand;
 
718
    void* globalObject = currentInstruction[2].u.jsCell;
 
719
    
 
720
    unsigned currentIndex = m_globalResolveInfoIndex++;
 
721
    void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
 
722
    void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
 
723
 
 
724
    // Verify structure.
 
725
    move(ImmPtr(globalObject), regT0);
 
726
    loadPtr(structureAddress, regT1);
 
727
    addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))));
 
728
 
 
729
    // Load property.
 
730
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT2);
 
731
    load32(offsetAddr, regT3);
 
732
    load32(BaseIndex(regT2, regT3, TimesEight), regT0); // payload
 
733
    load32(BaseIndex(regT2, regT3, TimesEight, 4), regT1); // tag
 
734
    emitStore(dst, regT1, regT0);
 
735
    map(m_bytecodeIndex + OPCODE_LENGTH(op_resolve_global), dst, regT1, regT0);
 
736
}
 
737
 
 
738
void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
739
{
 
740
    unsigned dst = currentInstruction[1].u.operand;
 
741
    void* globalObject = currentInstruction[2].u.jsCell;
 
742
    Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
 
743
 
 
744
    unsigned currentIndex = m_globalResolveInfoIndex++;
 
745
 
 
746
    linkSlowCase(iter);
 
747
    JITStubCall stubCall(this, cti_op_resolve_global);
 
748
    stubCall.addArgument(ImmPtr(globalObject));
 
749
    stubCall.addArgument(ImmPtr(ident));
 
750
    stubCall.addArgument(Imm32(currentIndex));
 
751
    stubCall.call(dst);
 
752
}
 
753
 
 
754
void JIT::emit_op_not(Instruction* currentInstruction)
 
755
{
 
756
    unsigned dst = currentInstruction[1].u.operand;
 
757
    unsigned src = currentInstruction[2].u.operand;
 
758
 
 
759
    emitLoadTag(src, regT0);
 
760
 
 
761
    xor32(Imm32(JSValue::FalseTag), regT0);
 
762
    addSlowCase(branchTest32(NonZero, regT0, Imm32(~1)));
 
763
    xor32(Imm32(JSValue::TrueTag), regT0);
 
764
 
 
765
    emitStoreBool(dst, regT0, (dst == src));
 
766
}
 
767
 
 
768
void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
769
{
 
770
    unsigned dst = currentInstruction[1].u.operand;
 
771
    unsigned src = currentInstruction[2].u.operand;
 
772
 
 
773
    linkSlowCase(iter);
 
774
 
 
775
    JITStubCall stubCall(this, cti_op_not);
 
776
    stubCall.addArgument(src);
 
777
    stubCall.call(dst);
 
778
}
 
779
 
 
780
void JIT::emit_op_jfalse(Instruction* currentInstruction)
 
781
{
 
782
    unsigned cond = currentInstruction[1].u.operand;
 
783
    unsigned target = currentInstruction[2].u.operand;
 
784
 
 
785
    emitLoad(cond, regT1, regT0);
 
786
 
 
787
    Jump isTrue = branch32(Equal, regT1, Imm32(JSValue::TrueTag));
 
788
    addJump(branch32(Equal, regT1, Imm32(JSValue::FalseTag)), target + 2);
 
789
 
 
790
    Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
 
791
    Jump isTrue2 = branch32(NotEqual, regT0, Imm32(0));
 
792
    addJump(jump(), target + 2);
 
793
 
 
794
    if (supportsFloatingPoint()) {
 
795
        isNotInteger.link(this);
 
796
 
 
797
        addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
 
798
 
 
799
        zeroDouble(fpRegT0);
 
800
        emitLoadDouble(cond, fpRegT1);
 
801
        addJump(branchDouble(DoubleEqual, fpRegT0, fpRegT1), target + 2);
 
802
    } else
 
803
        addSlowCase(isNotInteger);
 
804
 
 
805
    isTrue.link(this);
 
806
    isTrue2.link(this);
 
807
}
 
808
 
 
809
void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
810
{
 
811
    unsigned cond = currentInstruction[1].u.operand;
 
812
    unsigned target = currentInstruction[2].u.operand;
 
813
 
 
814
    linkSlowCase(iter);
 
815
    JITStubCall stubCall(this, cti_op_jtrue);
 
816
    stubCall.addArgument(cond);
 
817
    stubCall.call();
 
818
    emitJumpSlowToHot(branchTest32(Zero, regT0), target + 2); // Inverted.
 
819
}
 
820
 
 
821
void JIT::emit_op_jtrue(Instruction* currentInstruction)
 
822
{
 
823
    unsigned cond = currentInstruction[1].u.operand;
 
824
    unsigned target = currentInstruction[2].u.operand;
 
825
 
 
826
    emitLoad(cond, regT1, regT0);
 
827
 
 
828
    Jump isFalse = branch32(Equal, regT1, Imm32(JSValue::FalseTag));
 
829
    addJump(branch32(Equal, regT1, Imm32(JSValue::TrueTag)), target + 2);
 
830
 
 
831
    Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
 
832
    Jump isFalse2 = branch32(Equal, regT0, Imm32(0));
 
833
    addJump(jump(), target + 2);
 
834
 
 
835
    if (supportsFloatingPoint()) {
 
836
        isNotInteger.link(this);
 
837
 
 
838
        addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
 
839
 
 
840
        zeroDouble(fpRegT0);
 
841
        emitLoadDouble(cond, fpRegT1);
 
842
        addJump(branchDouble(DoubleNotEqual, fpRegT0, fpRegT1), target + 2);
 
843
    } else
 
844
        addSlowCase(isNotInteger);
 
845
 
 
846
    isFalse.link(this);
 
847
    isFalse2.link(this);
 
848
}
 
849
 
 
850
void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
851
{
 
852
    unsigned cond = currentInstruction[1].u.operand;
 
853
    unsigned target = currentInstruction[2].u.operand;
 
854
 
 
855
    linkSlowCase(iter);
 
856
    JITStubCall stubCall(this, cti_op_jtrue);
 
857
    stubCall.addArgument(cond);
 
858
    stubCall.call();
 
859
    emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 2);
 
860
}
 
861
 
 
862
void JIT::emit_op_jeq_null(Instruction* currentInstruction)
 
863
{
 
864
    unsigned src = currentInstruction[1].u.operand;
 
865
    unsigned target = currentInstruction[2].u.operand;
 
866
 
 
867
    emitLoad(src, regT1, regT0);
 
868
 
 
869
    Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
 
870
 
 
871
    // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
 
872
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
 
873
    addJump(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
 
874
 
 
875
    Jump wasNotImmediate = jump();
 
876
 
 
877
    // Now handle the immediate cases - undefined & null
 
878
    isImmediate.link(this);
 
879
 
 
880
    set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
 
881
    set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
 
882
    or32(regT2, regT1);
 
883
 
 
884
    addJump(branchTest32(NonZero, regT1), target + 2);
 
885
 
 
886
    wasNotImmediate.link(this);
 
887
}
 
888
 
 
889
void JIT::emit_op_jneq_null(Instruction* currentInstruction)
 
890
{
 
891
    unsigned src = currentInstruction[1].u.operand;
 
892
    unsigned target = currentInstruction[2].u.operand;
 
893
 
 
894
    emitLoad(src, regT1, regT0);
 
895
 
 
896
    Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
 
897
 
 
898
    // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
 
899
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
 
900
    addJump(branchTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
 
901
 
 
902
    Jump wasNotImmediate = jump();
 
903
 
 
904
    // Now handle the immediate cases - undefined & null
 
905
    isImmediate.link(this);
 
906
 
 
907
    set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
 
908
    set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
 
909
    or32(regT2, regT1);
 
910
 
 
911
    addJump(branchTest32(Zero, regT1), target + 2);
 
912
 
 
913
    wasNotImmediate.link(this);
 
914
}
 
915
 
 
916
void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
 
917
{
 
918
    unsigned src = currentInstruction[1].u.operand;
 
919
    JSCell* ptr = currentInstruction[2].u.jsCell;
 
920
    unsigned target = currentInstruction[3].u.operand;
 
921
 
 
922
    emitLoad(src, regT1, regT0);
 
923
    addJump(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)), target + 3);
 
924
    addJump(branchPtr(NotEqual, regT0, ImmPtr(ptr)), target + 3);
 
925
}
 
926
 
 
927
void JIT::emit_op_jsr(Instruction* currentInstruction)
 
928
{
 
929
    int retAddrDst = currentInstruction[1].u.operand;
 
930
    int target = currentInstruction[2].u.operand;
 
931
    DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
 
932
    addJump(jump(), target + 2);
 
933
    m_jsrSites.append(JSRInfo(storeLocation, label()));
 
934
}
 
935
 
 
936
void JIT::emit_op_sret(Instruction* currentInstruction)
 
937
{
 
938
    jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
 
939
}
 
940
 
 
941
void JIT::emit_op_eq(Instruction* currentInstruction)
 
942
{
 
943
    unsigned dst = currentInstruction[1].u.operand;
 
944
    unsigned src1 = currentInstruction[2].u.operand;
 
945
    unsigned src2 = currentInstruction[3].u.operand;
 
946
    
 
947
    emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
 
948
    addSlowCase(branch32(NotEqual, regT1, regT3));
 
949
    addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
 
950
    addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
 
951
 
 
952
    set8(Equal, regT0, regT2, regT0);
 
953
    or32(Imm32(JSValue::FalseTag), regT0);
 
954
 
 
955
    emitStoreBool(dst, regT0);
 
956
}
 
957
 
 
958
void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
959
{
 
960
    unsigned dst = currentInstruction[1].u.operand;
 
961
    unsigned op1 = currentInstruction[2].u.operand;
 
962
    unsigned op2 = currentInstruction[3].u.operand;
 
963
    
 
964
    JumpList storeResult;
 
965
    JumpList genericCase;
 
966
    
 
967
    genericCase.append(getSlowCase(iter)); // tags not equal
 
968
 
 
969
    linkSlowCase(iter); // tags equal and JSCell
 
970
    genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
 
971
    genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
 
972
 
 
973
    // String case.
 
974
    JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
 
975
    stubCallEqStrings.addArgument(regT0);
 
976
    stubCallEqStrings.addArgument(regT2);
 
977
    stubCallEqStrings.call();
 
978
    storeResult.append(jump());
 
979
 
 
980
    // Generic case.
 
981
    genericCase.append(getSlowCase(iter)); // doubles
 
982
    genericCase.link(this);
 
983
    JITStubCall stubCallEq(this, cti_op_eq);
 
984
    stubCallEq.addArgument(op1);
 
985
    stubCallEq.addArgument(op2);
 
986
    stubCallEq.call(regT0);
 
987
 
 
988
    storeResult.link(this);
 
989
    or32(Imm32(JSValue::FalseTag), regT0);
 
990
    emitStoreBool(dst, regT0);
 
991
}
 
992
 
 
993
void JIT::emit_op_neq(Instruction* currentInstruction)
 
994
{
 
995
    unsigned dst = currentInstruction[1].u.operand;
 
996
    unsigned src1 = currentInstruction[2].u.operand;
 
997
    unsigned src2 = currentInstruction[3].u.operand;
 
998
    
 
999
    emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
 
1000
    addSlowCase(branch32(NotEqual, regT1, regT3));
 
1001
    addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
 
1002
    addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
 
1003
 
 
1004
    set8(NotEqual, regT0, regT2, regT0);
 
1005
    or32(Imm32(JSValue::FalseTag), regT0);
 
1006
 
 
1007
    emitStoreBool(dst, regT0);
 
1008
}
 
1009
 
 
1010
void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
1011
{
 
1012
    unsigned dst = currentInstruction[1].u.operand;
 
1013
    
 
1014
    JumpList storeResult;
 
1015
    JumpList genericCase;
 
1016
    
 
1017
    genericCase.append(getSlowCase(iter)); // tags not equal
 
1018
 
 
1019
    linkSlowCase(iter); // tags equal and JSCell
 
1020
    genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
 
1021
    genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
 
1022
 
 
1023
    // String case.
 
1024
    JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
 
1025
    stubCallEqStrings.addArgument(regT0);
 
1026
    stubCallEqStrings.addArgument(regT2);
 
1027
    stubCallEqStrings.call(regT0);
 
1028
    storeResult.append(jump());
 
1029
 
 
1030
    // Generic case.
 
1031
    genericCase.append(getSlowCase(iter)); // doubles
 
1032
    genericCase.link(this);
 
1033
    JITStubCall stubCallEq(this, cti_op_eq);
 
1034
    stubCallEq.addArgument(regT1, regT0);
 
1035
    stubCallEq.addArgument(regT3, regT2);
 
1036
    stubCallEq.call(regT0);
 
1037
 
 
1038
    storeResult.link(this);
 
1039
    xor32(Imm32(0x1), regT0);
 
1040
    or32(Imm32(JSValue::FalseTag), regT0);
 
1041
    emitStoreBool(dst, regT0);
 
1042
}
 
1043
 
 
1044
void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
 
1045
{
 
1046
    unsigned dst = currentInstruction[1].u.operand;
 
1047
    unsigned src1 = currentInstruction[2].u.operand;
 
1048
    unsigned src2 = currentInstruction[3].u.operand;
 
1049
 
 
1050
    emitLoadTag(src1, regT0);
 
1051
    emitLoadTag(src2, regT1);
 
1052
 
 
1053
    // Jump to a slow case if either operand is double, or if both operands are
 
1054
    // cells and/or Int32s.
 
1055
    move(regT0, regT2);
 
1056
    and32(regT1, regT2);
 
1057
    addSlowCase(branch32(Below, regT2, Imm32(JSValue::LowestTag)));
 
1058
    addSlowCase(branch32(AboveOrEqual, regT2, Imm32(JSValue::CellTag)));
 
1059
 
 
1060
    if (type == OpStrictEq)
 
1061
        set8(Equal, regT0, regT1, regT0);
 
1062
    else
 
1063
        set8(NotEqual, regT0, regT1, regT0);
 
1064
 
 
1065
    or32(Imm32(JSValue::FalseTag), regT0);
 
1066
 
 
1067
    emitStoreBool(dst, regT0);
 
1068
}
 
1069
 
 
1070
void JIT::emit_op_stricteq(Instruction* currentInstruction)
 
1071
{
 
1072
    compileOpStrictEq(currentInstruction, OpStrictEq);
 
1073
}
 
1074
 
 
1075
void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
1076
{
 
1077
    unsigned dst = currentInstruction[1].u.operand;
 
1078
    unsigned src1 = currentInstruction[2].u.operand;
 
1079
    unsigned src2 = currentInstruction[3].u.operand;
 
1080
 
 
1081
    linkSlowCase(iter);
 
1082
    linkSlowCase(iter);
 
1083
 
 
1084
    JITStubCall stubCall(this, cti_op_stricteq);
 
1085
    stubCall.addArgument(src1);
 
1086
    stubCall.addArgument(src2);
 
1087
    stubCall.call(dst);
 
1088
}
 
1089
 
 
1090
void JIT::emit_op_nstricteq(Instruction* currentInstruction)
 
1091
{
 
1092
    compileOpStrictEq(currentInstruction, OpNStrictEq);
 
1093
}
 
1094
 
 
1095
void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
1096
{
 
1097
    unsigned dst = currentInstruction[1].u.operand;
 
1098
    unsigned src1 = currentInstruction[2].u.operand;
 
1099
    unsigned src2 = currentInstruction[3].u.operand;
 
1100
 
 
1101
    linkSlowCase(iter);
 
1102
    linkSlowCase(iter);
 
1103
 
 
1104
    JITStubCall stubCall(this, cti_op_nstricteq);
 
1105
    stubCall.addArgument(src1);
 
1106
    stubCall.addArgument(src2);
 
1107
    stubCall.call(dst);
 
1108
}
 
1109
 
 
1110
void JIT::emit_op_eq_null(Instruction* currentInstruction)
 
1111
{
 
1112
    unsigned dst = currentInstruction[1].u.operand;
 
1113
    unsigned src = currentInstruction[2].u.operand;
 
1114
 
 
1115
    emitLoad(src, regT1, regT0);
 
1116
    Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
 
1117
 
 
1118
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
 
1119
    setTest8(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
 
1120
 
 
1121
    Jump wasNotImmediate = jump();
 
1122
 
 
1123
    isImmediate.link(this);
 
1124
 
 
1125
    set8(Equal, regT1, Imm32(JSValue::NullTag), regT2);
 
1126
    set8(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
 
1127
    or32(regT2, regT1);
 
1128
 
 
1129
    wasNotImmediate.link(this);
 
1130
 
 
1131
    or32(Imm32(JSValue::FalseTag), regT1);
 
1132
 
 
1133
    emitStoreBool(dst, regT1);
 
1134
}
 
1135
 
 
1136
void JIT::emit_op_neq_null(Instruction* currentInstruction)
 
1137
{
 
1138
    unsigned dst = currentInstruction[1].u.operand;
 
1139
    unsigned src = currentInstruction[2].u.operand;
 
1140
 
 
1141
    emitLoad(src, regT1, regT0);
 
1142
    Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
 
1143
 
 
1144
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
 
1145
    setTest8(Zero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
 
1146
 
 
1147
    Jump wasNotImmediate = jump();
 
1148
 
 
1149
    isImmediate.link(this);
 
1150
 
 
1151
    set8(NotEqual, regT1, Imm32(JSValue::NullTag), regT2);
 
1152
    set8(NotEqual, regT1, Imm32(JSValue::UndefinedTag), regT1);
 
1153
    and32(regT2, regT1);
 
1154
 
 
1155
    wasNotImmediate.link(this);
 
1156
 
 
1157
    or32(Imm32(JSValue::FalseTag), regT1);
 
1158
 
 
1159
    emitStoreBool(dst, regT1);
 
1160
}
 
1161
 
 
1162
void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
 
1163
{
 
1164
    JITStubCall stubCall(this, cti_op_resolve_with_base);
 
1165
    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
 
1166
    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
 
1167
    stubCall.call(currentInstruction[2].u.operand);
 
1168
}
 
1169
 
 
1170
void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
 
1171
{
 
1172
    JITStubCall stubCall(this, cti_op_new_func_exp);
 
1173
    stubCall.addArgument(ImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
 
1174
    stubCall.call(currentInstruction[1].u.operand);
 
1175
}
 
1176
 
 
1177
void JIT::emit_op_new_regexp(Instruction* currentInstruction)
 
1178
{
 
1179
    JITStubCall stubCall(this, cti_op_new_regexp);
 
1180
    stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
 
1181
    stubCall.call(currentInstruction[1].u.operand);
 
1182
}
 
1183
 
 
1184
void JIT::emit_op_throw(Instruction* currentInstruction)
 
1185
{
 
1186
    unsigned exception = currentInstruction[1].u.operand;
 
1187
    JITStubCall stubCall(this, cti_op_throw);
 
1188
    stubCall.addArgument(exception);
 
1189
    stubCall.call();
 
1190
 
 
1191
#ifndef NDEBUG
 
1192
    // cti_op_throw always changes it's return address,
 
1193
    // this point in the code should never be reached.
 
1194
    breakpoint();
 
1195
#endif
 
1196
}
 
1197
 
 
1198
void JIT::emit_op_next_pname(Instruction* currentInstruction)
 
1199
{
 
1200
    int dst = currentInstruction[1].u.operand;
 
1201
    int iter = currentInstruction[2].u.operand;
 
1202
    int target = currentInstruction[3].u.operand;
 
1203
 
 
1204
    load32(Address(callFrameRegister, (iter * sizeof(Register))), regT0);
 
1205
 
 
1206
    JITStubCall stubCall(this, cti_op_next_pname);
 
1207
    stubCall.addArgument(regT0);
 
1208
    stubCall.call();
 
1209
 
 
1210
    Jump endOfIter = branchTestPtr(Zero, regT0);
 
1211
    emitStore(dst, regT1, regT0);
 
1212
    map(m_bytecodeIndex + OPCODE_LENGTH(op_next_pname), dst, regT1, regT0);
 
1213
    addJump(jump(), target + 3);
 
1214
    endOfIter.link(this);
 
1215
}
 
1216
 
 
1217
void JIT::emit_op_push_scope(Instruction* currentInstruction)
 
1218
{
 
1219
    JITStubCall stubCall(this, cti_op_push_scope);
 
1220
    stubCall.addArgument(currentInstruction[1].u.operand);
 
1221
    stubCall.call(currentInstruction[1].u.operand);
 
1222
}
 
1223
 
 
1224
void JIT::emit_op_pop_scope(Instruction*)
 
1225
{
 
1226
    JITStubCall(this, cti_op_pop_scope).call();
 
1227
}
 
1228
 
 
1229
void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
 
1230
{
 
1231
    int dst = currentInstruction[1].u.operand;
 
1232
    int src = currentInstruction[2].u.operand;
 
1233
 
 
1234
    emitLoad(src, regT1, regT0);
 
1235
 
 
1236
    Jump isInt32 = branch32(Equal, regT1, Imm32(JSValue::Int32Tag));
 
1237
    addSlowCase(branch32(AboveOrEqual, regT1, Imm32(JSValue::EmptyValueTag)));
 
1238
    isInt32.link(this);
 
1239
 
 
1240
    if (src != dst)
 
1241
        emitStore(dst, regT1, regT0);
 
1242
    map(m_bytecodeIndex + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
 
1243
}
 
1244
 
 
1245
void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
1246
{
 
1247
    int dst = currentInstruction[1].u.operand;
 
1248
 
 
1249
    linkSlowCase(iter);
 
1250
 
 
1251
    JITStubCall stubCall(this, cti_op_to_jsnumber);
 
1252
    stubCall.addArgument(regT1, regT0);
 
1253
    stubCall.call(dst);
 
1254
}
 
1255
 
 
1256
void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
 
1257
{
 
1258
    JITStubCall stubCall(this, cti_op_push_new_scope);
 
1259
    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
 
1260
    stubCall.addArgument(currentInstruction[3].u.operand);
 
1261
    stubCall.call(currentInstruction[1].u.operand);
 
1262
}
 
1263
 
 
1264
void JIT::emit_op_catch(Instruction* currentInstruction)
 
1265
{
 
1266
    unsigned exception = currentInstruction[1].u.operand;
 
1267
 
 
1268
    // This opcode only executes after a return from cti_op_throw.
 
1269
 
 
1270
    // cti_op_throw may have taken us to a call frame further up the stack; reload
 
1271
    // the call frame pointer to adjust.
 
1272
    peek(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
 
1273
 
 
1274
    // Now store the exception returned by cti_op_throw.
 
1275
    emitStore(exception, regT1, regT0);
 
1276
    map(m_bytecodeIndex + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
 
1277
#ifdef QT_BUILD_SCRIPT_LIB
 
1278
    JITStubCall stubCall(this, cti_op_debug_catch);
 
1279
    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
 
1280
    stubCall.call();
 
1281
#endif
 
1282
}
 
1283
 
 
1284
void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
 
1285
{
 
1286
    JITStubCall stubCall(this, cti_op_jmp_scopes);
 
1287
    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
 
1288
    stubCall.call();
 
1289
    addJump(jump(), currentInstruction[2].u.operand + 2);
 
1290
}
 
1291
 
 
1292
void JIT::emit_op_switch_imm(Instruction* currentInstruction)
 
1293
{
 
1294
    unsigned tableIndex = currentInstruction[1].u.operand;
 
1295
    unsigned defaultOffset = currentInstruction[2].u.operand;
 
1296
    unsigned scrutinee = currentInstruction[3].u.operand;
 
1297
 
 
1298
    // create jump table for switch destinations, track this switch statement.
 
1299
    SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
 
1300
    m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
 
1301
    jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
 
1302
 
 
1303
    JITStubCall stubCall(this, cti_op_switch_imm);
 
1304
    stubCall.addArgument(scrutinee);
 
1305
    stubCall.addArgument(Imm32(tableIndex));
 
1306
    stubCall.call();
 
1307
    jump(regT0);
 
1308
}
 
1309
 
 
1310
void JIT::emit_op_switch_char(Instruction* currentInstruction)
 
1311
{
 
1312
    unsigned tableIndex = currentInstruction[1].u.operand;
 
1313
    unsigned defaultOffset = currentInstruction[2].u.operand;
 
1314
    unsigned scrutinee = currentInstruction[3].u.operand;
 
1315
 
 
1316
    // create jump table for switch destinations, track this switch statement.
 
1317
    SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
 
1318
    m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
 
1319
    jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
 
1320
 
 
1321
    JITStubCall stubCall(this, cti_op_switch_char);
 
1322
    stubCall.addArgument(scrutinee);
 
1323
    stubCall.addArgument(Imm32(tableIndex));
 
1324
    stubCall.call();
 
1325
    jump(regT0);
 
1326
}
 
1327
 
 
1328
void JIT::emit_op_switch_string(Instruction* currentInstruction)
 
1329
{
 
1330
    unsigned tableIndex = currentInstruction[1].u.operand;
 
1331
    unsigned defaultOffset = currentInstruction[2].u.operand;
 
1332
    unsigned scrutinee = currentInstruction[3].u.operand;
 
1333
 
 
1334
    // create jump table for switch destinations, track this switch statement.
 
1335
    StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
 
1336
    m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
 
1337
 
 
1338
    JITStubCall stubCall(this, cti_op_switch_string);
 
1339
    stubCall.addArgument(scrutinee);
 
1340
    stubCall.addArgument(Imm32(tableIndex));
 
1341
    stubCall.call();
 
1342
    jump(regT0);
 
1343
}
 
1344
 
 
1345
void JIT::emit_op_new_error(Instruction* currentInstruction)
 
1346
{
 
1347
    unsigned dst = currentInstruction[1].u.operand;
 
1348
    unsigned type = currentInstruction[2].u.operand;
 
1349
    unsigned message = currentInstruction[3].u.operand;
 
1350
 
 
1351
    JITStubCall stubCall(this, cti_op_new_error);
 
1352
    stubCall.addArgument(Imm32(type));
 
1353
    stubCall.addArgument(m_codeBlock->getConstant(message));
 
1354
    stubCall.addArgument(Imm32(m_bytecodeIndex));
 
1355
    stubCall.call(dst);
 
1356
}
 
1357
 
 
1358
void JIT::emit_op_debug(Instruction* currentInstruction)
 
1359
{
 
1360
    JITStubCall stubCall(this, cti_op_debug);
 
1361
    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
 
1362
    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
 
1363
    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
 
1364
    stubCall.addArgument(Imm32(currentInstruction[4].u.operand));
 
1365
    stubCall.call();
 
1366
}
 
1367
 
 
1368
 
 
1369
void JIT::emit_op_enter(Instruction*)
 
1370
{
 
1371
    // Even though JIT code doesn't use them, we initialize our constant
 
1372
    // registers to zap stale pointers, to avoid unnecessarily prolonging
 
1373
    // object lifetime and increasing GC pressure.
 
1374
    for (int i = 0; i < m_codeBlock->m_numVars; ++i)
 
1375
        emitStore(i, jsUndefined());
 
1376
}
 
1377
 
 
1378
void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
 
1379
{
 
1380
    emit_op_enter(currentInstruction);
 
1381
 
 
1382
    JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
 
1383
}
 
1384
 
 
1385
void JIT::emit_op_create_arguments(Instruction*)
 
1386
{
 
1387
    Jump argsCreated = branch32(NotEqual, tagFor(RegisterFile::ArgumentsRegister, callFrameRegister), Imm32(JSValue::EmptyValueTag));
 
1388
 
 
1389
    // If we get here the arguments pointer is a null cell - i.e. arguments need lazy creation.
 
1390
    if (m_codeBlock->m_numParameters == 1)
 
1391
        JITStubCall(this, cti_op_create_arguments_no_params).call();
 
1392
    else
 
1393
        JITStubCall(this, cti_op_create_arguments).call();
 
1394
 
 
1395
    argsCreated.link(this);
 
1396
}
 
1397
    
 
1398
void JIT::emit_op_init_arguments(Instruction*)
 
1399
{
 
1400
    emitStore(RegisterFile::ArgumentsRegister, JSValue(), callFrameRegister);
 
1401
}
 
1402
 
 
1403
void JIT::emit_op_convert_this(Instruction* currentInstruction)
 
1404
{
 
1405
    unsigned thisRegister = currentInstruction[1].u.operand;
 
1406
    
 
1407
    emitLoad(thisRegister, regT1, regT0);
 
1408
 
 
1409
    addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
 
1410
 
 
1411
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
 
1412
    addSlowCase(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
 
1413
 
 
1414
    map(m_bytecodeIndex + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
 
1415
}
 
1416
 
 
1417
void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
1418
{
 
1419
    unsigned thisRegister = currentInstruction[1].u.operand;
 
1420
 
 
1421
    linkSlowCase(iter);
 
1422
    linkSlowCase(iter);
 
1423
 
 
1424
    JITStubCall stubCall(this, cti_op_convert_this);
 
1425
    stubCall.addArgument(regT1, regT0);
 
1426
    stubCall.call(thisRegister);
 
1427
}
 
1428
 
 
1429
void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
 
1430
{
 
1431
    peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
 
1432
    Jump noProfiler = branchTestPtr(Zero, Address(regT2));
 
1433
 
 
1434
    JITStubCall stubCall(this, cti_op_profile_will_call);
 
1435
    stubCall.addArgument(currentInstruction[1].u.operand);
 
1436
    stubCall.call();
 
1437
    noProfiler.link(this);
 
1438
}
 
1439
 
 
1440
void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
 
1441
{
 
1442
    peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
 
1443
    Jump noProfiler = branchTestPtr(Zero, Address(regT2));
 
1444
 
 
1445
    JITStubCall stubCall(this, cti_op_profile_did_call);
 
1446
    stubCall.addArgument(currentInstruction[1].u.operand);
 
1447
    stubCall.call();
 
1448
    noProfiler.link(this);
 
1449
}
 
1450
 
 
1451
#else // USE(JSVALUE32_64)
 
1452
 
 
1453
#define RECORD_JUMP_TARGET(targetOffset) \
 
1454
   do { m_labels[m_bytecodeIndex + (targetOffset)].used(); } while (false)
 
1455
 
 
1456
void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
 
1457
{
 
1458
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
 
1459
    // (2) The second function provides fast property access for string length
 
1460
    Label stringLengthBegin = align();
 
1461
 
 
1462
    // Check eax is a string
 
1463
    Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
 
1464
    Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
 
1465
 
 
1466
    // Checks out okay! - get the length from the Ustring.
 
1467
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSString, m_value) + OBJECT_OFFSETOF(UString, m_rep)), regT0);
 
1468
    load32(Address(regT0, OBJECT_OFFSETOF(UString::Rep, len)), regT0);
 
1469
 
 
1470
    Jump string_failureCases3 = branch32(Above, regT0, Imm32(JSImmediate::maxImmediateInt));
 
1471
 
 
1472
    // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
 
1473
    emitFastArithIntToImmNoCheck(regT0, regT0);
 
1474
    
 
1475
    ret();
 
1476
#endif
 
1477
 
 
1478
    // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
 
1479
    COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
 
1480
 
 
1481
    // VirtualCallLink Trampoline
 
1482
    // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
 
1483
    Label virtualCallLinkBegin = align();
 
1484
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
 
1485
 
 
1486
    Jump isNativeFunc2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
 
1487
 
 
1488
    Jump hasCodeBlock2 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
 
1489
    preserveReturnAddressAfterCall(regT3);
 
1490
    restoreArgumentReference();
 
1491
    Call callJSFunction2 = call();
 
1492
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
 
1493
    emitGetJITStubArg(2, regT1); // argCount
 
1494
    restoreReturnAddressBeforeReturn(regT3);
 
1495
    hasCodeBlock2.link(this);
 
1496
 
 
1497
    // Check argCount matches callee arity.
 
1498
    Jump arityCheckOkay2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
 
1499
    preserveReturnAddressAfterCall(regT3);
 
1500
    emitPutJITStubArg(regT3, 1); // return address
 
1501
    restoreArgumentReference();
 
1502
    Call callArityCheck2 = call();
 
1503
    move(regT1, callFrameRegister);
 
1504
    emitGetJITStubArg(2, regT1); // argCount
 
1505
    restoreReturnAddressBeforeReturn(regT3);
 
1506
    arityCheckOkay2.link(this);
 
1507
 
 
1508
    isNativeFunc2.link(this);
 
1509
 
 
1510
    compileOpCallInitializeCallFrame();
 
1511
    preserveReturnAddressAfterCall(regT3);
 
1512
    emitPutJITStubArg(regT3, 1); // return address
 
1513
    restoreArgumentReference();
 
1514
    Call callLazyLinkCall = call();
 
1515
    restoreReturnAddressBeforeReturn(regT3);
 
1516
    jump(regT0);
 
1517
 
 
1518
    // VirtualCall Trampoline
 
1519
    // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
 
1520
    Label virtualCallBegin = align();
 
1521
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
 
1522
 
 
1523
    Jump isNativeFunc3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
 
1524
 
 
1525
    Jump hasCodeBlock3 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
 
1526
    preserveReturnAddressAfterCall(regT3);
 
1527
    restoreArgumentReference();
 
1528
    Call callJSFunction1 = call();
 
1529
    emitGetJITStubArg(2, regT1); // argCount
 
1530
    restoreReturnAddressBeforeReturn(regT3);
 
1531
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
 
1532
    hasCodeBlock3.link(this);
 
1533
    
 
1534
    // Check argCount matches callee arity.
 
1535
    Jump arityCheckOkay3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
 
1536
    preserveReturnAddressAfterCall(regT3);
 
1537
    emitPutJITStubArg(regT3, 1); // return address
 
1538
    restoreArgumentReference();
 
1539
    Call callArityCheck1 = call();
 
1540
    move(regT1, callFrameRegister);
 
1541
    emitGetJITStubArg(2, regT1); // argCount
 
1542
    restoreReturnAddressBeforeReturn(regT3);
 
1543
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
 
1544
    arityCheckOkay3.link(this);
 
1545
 
 
1546
    isNativeFunc3.link(this);
 
1547
 
 
1548
    compileOpCallInitializeCallFrame();
 
1549
    loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCode)), regT0);
 
1550
    jump(regT0);
 
1551
 
 
1552
    Label nativeCallThunk = align();
 
1553
    preserveReturnAddressAfterCall(regT0);
 
1554
    emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
 
1555
 
 
1556
    // Load caller frame's scope chain into this callframe so that whatever we call can
 
1557
    // get to its global data.
 
1558
    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
 
1559
    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
 
1560
    emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
 
1561
    
 
1562
 
 
1563
#if PLATFORM(X86_64)
 
1564
    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, X86Registers::ecx);
 
1565
 
 
1566
    // Allocate stack space for our arglist
 
1567
    subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
 
1568
    COMPILE_ASSERT((sizeof(ArgList) & 0xf) == 0, ArgList_should_by_16byte_aligned);
 
1569
    
 
1570
    // Set up arguments
 
1571
    subPtr(Imm32(1), X86Registers::ecx); // Don't include 'this' in argcount
 
1572
 
 
1573
    // Push argcount
 
1574
    storePtr(X86Registers::ecx, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
 
1575
 
 
1576
    // Calculate the start of the callframe header, and store in edx
 
1577
    addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), callFrameRegister, X86Registers::edx);
 
1578
    
 
1579
    // Calculate start of arguments as callframe header - sizeof(Register) * argcount (ecx)
 
1580
    mul32(Imm32(sizeof(Register)), X86Registers::ecx, X86Registers::ecx);
 
1581
    subPtr(X86Registers::ecx, X86Registers::edx);
 
1582
 
 
1583
    // push pointer to arguments
 
1584
    storePtr(X86Registers::edx, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
 
1585
    
 
1586
    // ArgList is passed by reference so is stackPointerRegister
 
1587
    move(stackPointerRegister, X86Registers::ecx);
 
1588
    
 
1589
    // edx currently points to the first argument, edx-sizeof(Register) points to 'this'
 
1590
    loadPtr(Address(X86Registers::edx, -(int32_t)sizeof(Register)), X86Registers::edx);
 
1591
    
 
1592
    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::esi);
 
1593
 
 
1594
    move(callFrameRegister, X86Registers::edi); 
 
1595
 
 
1596
    call(Address(X86Registers::esi, OBJECT_OFFSETOF(JSFunction, m_data)));
 
1597
    
 
1598
    addPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
 
1599
#elif PLATFORM(X86)
 
1600
    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
 
1601
 
 
1602
    /* We have two structs that we use to describe the stackframe we set up for our
 
1603
     * call to native code.  NativeCallFrameStructure describes the how we set up the stack
 
1604
     * in advance of the call.  NativeFunctionCalleeSignature describes the callframe
 
1605
     * as the native code expects it.  We do this as we are using the fastcall calling
 
1606
     * convention which results in the callee popping its arguments off the stack, but
 
1607
     * not the rest of the callframe so we need a nice way to ensure we increment the
 
1608
     * stack pointer by the right amount after the call.
 
1609
     */
 
1610
#if COMPILER(MSVC) || PLATFORM(LINUX)
 
1611
    struct NativeCallFrameStructure {
 
1612
      //  CallFrame* callFrame; // passed in EDX
 
1613
        JSObject* callee;
 
1614
        JSValue thisValue;
 
1615
        ArgList* argPointer;
 
1616
        ArgList args;
 
1617
        JSValue result;
 
1618
    };
 
1619
    struct NativeFunctionCalleeSignature {
 
1620
        JSObject* callee;
 
1621
        JSValue thisValue;
 
1622
        ArgList* argPointer;
 
1623
    };
 
1624
#else
 
1625
    struct NativeCallFrameStructure {
 
1626
      //  CallFrame* callFrame; // passed in ECX
 
1627
      //  JSObject* callee; // passed in EDX
 
1628
        JSValue thisValue;
 
1629
        ArgList* argPointer;
 
1630
        ArgList args;
 
1631
    };
 
1632
    struct NativeFunctionCalleeSignature {
 
1633
        JSValue thisValue;
 
1634
        ArgList* argPointer;
 
1635
    };
 
1636
#endif
 
1637
    const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
 
1638
    // Allocate system stack frame
 
1639
    subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
 
1640
 
 
1641
    // Set up arguments
 
1642
    subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
 
1643
 
 
1644
    // push argcount
 
1645
    storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
 
1646
    
 
1647
    // Calculate the start of the callframe header, and store in regT1
 
1648
    addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
 
1649
    
 
1650
    // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
 
1651
    mul32(Imm32(sizeof(Register)), regT0, regT0);
 
1652
    subPtr(regT0, regT1);
 
1653
    storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
 
1654
 
 
1655
    // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
 
1656
    addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
 
1657
    storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
 
1658
 
 
1659
    // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
 
1660
    loadPtr(Address(regT1, -(int)sizeof(Register)), regT1);
 
1661
    storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue)));
 
1662
 
 
1663
#if COMPILER(MSVC) || PLATFORM(LINUX)
 
1664
    // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
 
1665
    addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86Registers::ecx);
 
1666
 
 
1667
    // Plant callee
 
1668
    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::eax);
 
1669
    storePtr(X86Registers::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
 
1670
 
 
1671
    // Plant callframe
 
1672
    move(callFrameRegister, X86Registers::edx);
 
1673
 
 
1674
    call(Address(X86Registers::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
 
1675
 
 
1676
    // JSValue is a non-POD type
 
1677
    loadPtr(Address(X86Registers::eax), X86Registers::eax);
 
1678
#else
 
1679
    // Plant callee
 
1680
    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::edx);
 
1681
 
 
1682
    // Plant callframe
 
1683
    move(callFrameRegister, X86Registers::ecx);
 
1684
    call(Address(X86Registers::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
 
1685
#endif
 
1686
 
 
1687
    // We've put a few temporaries on the stack in addition to the actual arguments
 
1688
    // so pull them off now
 
1689
    addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
 
1690
 
 
1691
#elif PLATFORM(ARM_TRADITIONAL)
 
1692
    emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
 
1693
 
 
1694
    // Allocate stack space for our arglist
 
1695
    COMPILE_ASSERT((sizeof(ArgList) & 0x7) == 0, ArgList_should_by_8byte_aligned);
 
1696
    subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
 
1697
 
 
1698
    // Set up arguments
 
1699
    subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
 
1700
 
 
1701
    // Push argcount
 
1702
    storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
 
1703
 
 
1704
    // Calculate the start of the callframe header, and store in regT1
 
1705
    move(callFrameRegister, regT1);
 
1706
    sub32(Imm32(RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), regT1);
 
1707
 
 
1708
    // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
 
1709
    mul32(Imm32(sizeof(Register)), regT0, regT0);
 
1710
    subPtr(regT0, regT1);
 
1711
 
 
1712
    // push pointer to arguments
 
1713
    storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
 
1714
 
 
1715
    // Setup arg3: regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
 
1716
    loadPtr(Address(regT1, -(int32_t)sizeof(Register)), regT2);
 
1717
 
 
1718
    // Setup arg2:
 
1719
    emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT1);
 
1720
 
 
1721
    // Setup arg1:
 
1722
    move(callFrameRegister, regT0);
 
1723
 
 
1724
    // Setup arg4: This is a plain hack
 
1725
    move(stackPointerRegister, ARMRegisters::S0);
 
1726
 
 
1727
    move(ctiReturnRegister, ARMRegisters::lr);
 
1728
    call(Address(regT1, OBJECT_OFFSETOF(JSFunction, m_data)));
 
1729
 
 
1730
    addPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
 
1731
 
 
1732
#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
 
1733
#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
 
1734
#else
 
1735
    breakpoint();
 
1736
#endif
 
1737
 
 
1738
    // Check for an exception
 
1739
    loadPtr(&(globalData->exception), regT2);
 
1740
    Jump exceptionHandler = branchTestPtr(NonZero, regT2);
 
1741
 
 
1742
    // Grab the return address.
 
1743
    emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
 
1744
    
 
1745
    // Restore our caller's "r".
 
1746
    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
 
1747
    
 
1748
    // Return.
 
1749
    restoreReturnAddressBeforeReturn(regT1);
 
1750
    ret();
 
1751
 
 
1752
    // Handle an exception
 
1753
    exceptionHandler.link(this);
 
1754
    // Grab the return address.
 
1755
    emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
 
1756
    move(ImmPtr(&globalData->exceptionLocation), regT2);
 
1757
    storePtr(regT1, regT2);
 
1758
    move(ImmPtr(reinterpret_cast<void*>(ctiVMThrowTrampoline)), regT2);
 
1759
    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
 
1760
    poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
 
1761
    restoreReturnAddressBeforeReturn(regT2);
 
1762
    ret();
 
1763
    
 
1764
 
 
1765
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
 
1766
    Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
 
1767
    Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
 
1768
    Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
 
1769
#endif
 
1770
 
 
1771
    // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
 
1772
    LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
 
1773
 
 
1774
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
 
1775
    patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
 
1776
    patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
 
1777
    patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
 
1778
#endif
 
1779
    patchBuffer.link(callArityCheck1, FunctionPtr(cti_op_call_arityCheck));
 
1780
    patchBuffer.link(callJSFunction1, FunctionPtr(cti_op_call_JSFunction));
 
1781
#if ENABLE(JIT_OPTIMIZE_CALL)
 
1782
    patchBuffer.link(callArityCheck2, FunctionPtr(cti_op_call_arityCheck));
 
1783
    patchBuffer.link(callJSFunction2, FunctionPtr(cti_op_call_JSFunction));
 
1784
    patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
 
1785
#endif
 
1786
 
 
1787
    CodeRef finalCode = patchBuffer.finalizeCode();
 
1788
    *executablePool = finalCode.m_executablePool;
 
1789
 
 
1790
    *ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
 
1791
    *ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
 
1792
    *ctiNativeCallThunk = trampolineAt(finalCode, nativeCallThunk);
 
1793
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
 
1794
    *ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
 
1795
#else
 
1796
    UNUSED_PARAM(ctiStringLengthTrampoline);
 
1797
#endif
 
1798
}
 
1799
 
 
1800
void JIT::emit_op_mov(Instruction* currentInstruction)
 
1801
{
 
1802
    int dst = currentInstruction[1].u.operand;
 
1803
    int src = currentInstruction[2].u.operand;
 
1804
 
 
1805
    if (m_codeBlock->isConstantRegisterIndex(src)) {
 
1806
        storePtr(ImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
 
1807
        if (dst == m_lastResultBytecodeRegister)
 
1808
            killLastResultRegister();
 
1809
    } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
 
1810
        // If either the src or dst is the cached register go though
 
1811
        // get/put registers to make sure we track this correctly.
 
1812
        emitGetVirtualRegister(src, regT0);
 
1813
        emitPutVirtualRegister(dst);
 
1814
    } else {
 
1815
        // Perform the copy via regT1; do not disturb any mapping in regT0.
 
1816
        loadPtr(Address(callFrameRegister, src * sizeof(Register)), regT1);
 
1817
        storePtr(regT1, Address(callFrameRegister, dst * sizeof(Register)));
 
1818
    }
 
1819
}
 
1820
 
 
1821
void JIT::emit_op_end(Instruction* currentInstruction)
 
1822
{
 
1823
    if (m_codeBlock->needsFullScopeChain())
 
1824
        JITStubCall(this, cti_op_end).call();
 
1825
    ASSERT(returnValueRegister != callFrameRegister);
 
1826
    emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
 
1827
    restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
 
1828
    ret();
 
1829
}
 
1830
 
 
1831
void JIT::emit_op_jmp(Instruction* currentInstruction)
 
1832
{
 
1833
    unsigned target = currentInstruction[1].u.operand;
 
1834
    addJump(jump(), target + 1);
 
1835
    RECORD_JUMP_TARGET(target + 1);
 
1836
}
 
1837
 
 
1838
void JIT::emit_op_loop(Instruction* currentInstruction)
 
1839
{
 
1840
    emitTimeoutCheck();
 
1841
 
 
1842
    unsigned target = currentInstruction[1].u.operand;
 
1843
    addJump(jump(), target + 1);
 
1844
}
 
1845
 
 
1846
void JIT::emit_op_loop_if_less(Instruction* currentInstruction)
 
1847
{
 
1848
    emitTimeoutCheck();
 
1849
 
 
1850
    unsigned op1 = currentInstruction[1].u.operand;
 
1851
    unsigned op2 = currentInstruction[2].u.operand;
 
1852
    unsigned target = currentInstruction[3].u.operand;
 
1853
    if (isOperandConstantImmediateInt(op2)) {
 
1854
        emitGetVirtualRegister(op1, regT0);
 
1855
        emitJumpSlowCaseIfNotImmediateInteger(regT0);
 
1856
#if USE(JSVALUE64)
 
1857
        int32_t op2imm = getConstantOperandImmediateInt(op2);
 
1858
#else
 
1859
        int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
 
1860
#endif
 
1861
        addJump(branch32(LessThan, regT0, Imm32(op2imm)), target + 3);
 
1862
    } else if (isOperandConstantImmediateInt(op1)) {
 
1863
        emitGetVirtualRegister(op2, regT0);
 
1864
        emitJumpSlowCaseIfNotImmediateInteger(regT0);
 
1865
#if USE(JSVALUE64)
 
1866
        int32_t op1imm = getConstantOperandImmediateInt(op1);
 
1867
#else
 
1868
        int32_t op1imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op1)));
 
1869
#endif
 
1870
        addJump(branch32(GreaterThan, regT0, Imm32(op1imm)), target + 3);
 
1871
    } else {
 
1872
        emitGetVirtualRegisters(op1, regT0, op2, regT1);
 
1873
        emitJumpSlowCaseIfNotImmediateInteger(regT0);
 
1874
        emitJumpSlowCaseIfNotImmediateInteger(regT1);
 
1875
        addJump(branch32(LessThan, regT0, regT1), target + 3);
 
1876
    }
 
1877
}
 
1878
 
 
1879
void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
 
1880
{
 
1881
    emitTimeoutCheck();
 
1882
 
 
1883
    unsigned op1 = currentInstruction[1].u.operand;
 
1884
    unsigned op2 = currentInstruction[2].u.operand;
 
1885
    unsigned target = currentInstruction[3].u.operand;
 
1886
    if (isOperandConstantImmediateInt(op2)) {
 
1887
        emitGetVirtualRegister(op1, regT0);
 
1888
        emitJumpSlowCaseIfNotImmediateInteger(regT0);
 
1889
#if USE(JSVALUE64)
 
1890
        int32_t op2imm = getConstantOperandImmediateInt(op2);
 
1891
#else
 
1892
        int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
 
1893
#endif
 
1894
        addJump(branch32(LessThanOrEqual, regT0, Imm32(op2imm)), target + 3);
 
1895
    } else {
 
1896
        emitGetVirtualRegisters(op1, regT0, op2, regT1);
 
1897
        emitJumpSlowCaseIfNotImmediateInteger(regT0);
 
1898
        emitJumpSlowCaseIfNotImmediateInteger(regT1);
 
1899
        addJump(branch32(LessThanOrEqual, regT0, regT1), target + 3);
 
1900
    }
 
1901
}
 
1902
 
 
1903
void JIT::emit_op_new_object(Instruction* currentInstruction)
 
1904
{
 
1905
    JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
 
1906
}
 
1907
 
 
1908
void JIT::emit_op_instanceof(Instruction* currentInstruction)
 
1909
{
 
1910
    // Load the operands (baseVal, proto, and value respectively) into registers.
 
1911
    // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
 
1912
    emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
 
1913
    emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
 
1914
    emitGetVirtualRegister(currentInstruction[2].u.operand, regT2);
 
1915
 
 
1916
    // Check that baseVal & proto are cells.
 
1917
    emitJumpSlowCaseIfNotJSCell(regT0);
 
1918
    emitJumpSlowCaseIfNotJSCell(regT1);
 
1919
 
 
1920
    // Check that baseVal is an object, that it 'ImplementsHasInstance' but that it does not 'OverridesHasInstance'.
 
1921
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
 
1922
    addSlowCase(branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
 
1923
    addSlowCase(branchTest32(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsDefaultHasInstance)));
 
1924
 
 
1925
    // If value is not an Object, return false.
 
1926
    Jump valueIsImmediate = emitJumpIfNotJSCell(regT2);
 
1927
    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
 
1928
    Jump valueIsNotObject = branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType));
 
1929
 
 
1930
    // Check proto is object.
 
1931
    loadPtr(Address(regT1, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
 
1932
    addSlowCase(branch32(NotEqual, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
 
1933
 
 
1934
    // Optimistically load the result true, and start looping.
 
1935
    // Initially, regT1 still contains proto and regT2 still contains value.
 
1936
    // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
 
1937
    move(ImmPtr(JSValue::encode(jsBoolean(true))), regT0);
 
1938
    Label loop(this);
 
1939
 
 
1940
    // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
 
1941
    // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
 
1942
    loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
 
1943
    loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype)), regT2);
 
1944
    Jump isInstance = branchPtr(Equal, regT2, regT1);
 
1945
    branchPtr(NotEqual, regT2, ImmPtr(JSValue::encode(jsNull())), loop);
 
1946
 
 
1947
    // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
 
1948
    valueIsImmediate.link(this);
 
1949
    valueIsNotObject.link(this);
 
1950
    move(ImmPtr(JSValue::encode(jsBoolean(false))), regT0);
 
1951
 
 
1952
    // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
 
1953
    isInstance.link(this);
 
1954
    emitPutVirtualRegister(currentInstruction[1].u.operand);
 
1955
}
 
1956
 
 
1957
void JIT::emit_op_new_func(Instruction* currentInstruction)
 
1958
{
 
1959
    JITStubCall stubCall(this, cti_op_new_func);
 
1960
    stubCall.addArgument(ImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
 
1961
    stubCall.call(currentInstruction[1].u.operand);
 
1962
}
 
1963
 
 
1964
void JIT::emit_op_call(Instruction* currentInstruction)
 
1965
{
 
1966
    compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
 
1967
}
 
1968
 
 
1969
void JIT::emit_op_call_eval(Instruction* currentInstruction)
 
1970
{
 
1971
    compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
 
1972
}
 
1973
 
 
1974
void JIT::emit_op_load_varargs(Instruction* currentInstruction)
 
1975
{
 
1976
    int argCountDst = currentInstruction[1].u.operand;
 
1977
    int argsOffset = currentInstruction[2].u.operand;
 
1978
 
 
1979
    JITStubCall stubCall(this, cti_op_load_varargs);
 
1980
    stubCall.addArgument(Imm32(argsOffset));
 
1981
    stubCall.call();
 
1982
    // Stores a naked int32 in the register file.
 
1983
    store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
 
1984
}
 
1985
 
 
1986
void JIT::emit_op_call_varargs(Instruction* currentInstruction)
 
1987
{
 
1988
    compileOpCallVarargs(currentInstruction);
 
1989
}
 
1990
 
 
1991
void JIT::emit_op_construct(Instruction* currentInstruction)
 
1992
{
 
1993
    compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
 
1994
}
 
1995
 
 
1996
void JIT::emit_op_get_global_var(Instruction* currentInstruction)
 
1997
{
 
1998
    JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[2].u.jsCell);
 
1999
    move(ImmPtr(globalObject), regT0);
 
2000
    emitGetVariableObjectRegister(regT0, currentInstruction[3].u.operand, regT0);
 
2001
    emitPutVirtualRegister(currentInstruction[1].u.operand);
 
2002
}
 
2003
 
 
2004
void JIT::emit_op_put_global_var(Instruction* currentInstruction)
 
2005
{
 
2006
    emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
 
2007
    JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[1].u.jsCell);
 
2008
    move(ImmPtr(globalObject), regT0);
 
2009
    emitPutVariableObjectRegister(regT1, regT0, currentInstruction[2].u.operand);
 
2010
}
 
2011
 
 
2012
void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
 
2013
{
 
2014
    int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
 
2015
 
 
2016
    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
 
2017
    while (skip--)
 
2018
        loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
 
2019
 
 
2020
    loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT0);
 
2021
    emitGetVariableObjectRegister(regT0, currentInstruction[2].u.operand, regT0);
 
2022
    emitPutVirtualRegister(currentInstruction[1].u.operand);
 
2023
}
 
2024
 
 
2025
void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
 
2026
{
 
2027
    int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
 
2028
 
 
2029
    emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1);
 
2030
    emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
 
2031
    while (skip--)
 
2032
        loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
 
2033
 
 
2034
    loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
 
2035
    emitPutVariableObjectRegister(regT0, regT1, currentInstruction[1].u.operand);
 
2036
}
 
2037
 
 
2038
void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
 
2039
{
 
2040
    JITStubCall stubCall(this, cti_op_tear_off_activation);
 
2041
    stubCall.addArgument(currentInstruction[1].u.operand, regT2);
 
2042
    stubCall.call();
 
2043
}
 
2044
 
 
2045
void JIT::emit_op_tear_off_arguments(Instruction*)
 
2046
{
 
2047
    JITStubCall(this, cti_op_tear_off_arguments).call();
 
2048
}
 
2049
 
 
2050
void JIT::emit_op_ret(Instruction* currentInstruction)
 
2051
{
 
2052
#ifdef QT_BUILD_SCRIPT_LIB
 
2053
    JITStubCall stubCall(this, cti_op_debug_return);
 
2054
    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
 
2055
    stubCall.call();
 
2056
#endif
 
2057
    // We could JIT generate the deref, only calling out to C when the refcount hits zero.
 
2058
    if (m_codeBlock->needsFullScopeChain())
 
2059
        JITStubCall(this, cti_op_ret_scopeChain).call();
 
2060
 
 
2061
    ASSERT(callFrameRegister != regT1);
 
2062
    ASSERT(regT1 != returnValueRegister);
 
2063
    ASSERT(returnValueRegister != callFrameRegister);
 
2064
 
 
2065
    // Return the result in %eax.
 
2066
    emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
 
2067
 
 
2068
    // Grab the return address.
 
2069
    emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
 
2070
 
 
2071
    // Restore our caller's "r".
 
2072
    emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
 
2073
 
 
2074
    // Return.
 
2075
    restoreReturnAddressBeforeReturn(regT1);
 
2076
    ret();
 
2077
}
 
2078
 
 
2079
void JIT::emit_op_new_array(Instruction* currentInstruction)
 
2080
{
 
2081
    JITStubCall stubCall(this, cti_op_new_array);
 
2082
    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
 
2083
    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
 
2084
    stubCall.call(currentInstruction[1].u.operand);
 
2085
}
 
2086
 
 
2087
void JIT::emit_op_resolve(Instruction* currentInstruction)
 
2088
{
 
2089
    JITStubCall stubCall(this, cti_op_resolve);
 
2090
    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
 
2091
    stubCall.call(currentInstruction[1].u.operand);
 
2092
}
 
2093
 
 
2094
void JIT::emit_op_construct_verify(Instruction* currentInstruction)
 
2095
{
 
2096
    emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
 
2097
 
 
2098
    emitJumpSlowCaseIfNotJSCell(regT0);
 
2099
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
 
2100
    addSlowCase(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo) + OBJECT_OFFSETOF(TypeInfo, m_type)), Imm32(ObjectType)));
 
2101
 
 
2102
}
 
2103
 
 
2104
void JIT::emit_op_to_primitive(Instruction* currentInstruction)
 
2105
{
 
2106
    int dst = currentInstruction[1].u.operand;
 
2107
    int src = currentInstruction[2].u.operand;
 
2108
 
 
2109
    emitGetVirtualRegister(src, regT0);
 
2110
    
 
2111
    Jump isImm = emitJumpIfNotJSCell(regT0);
 
2112
    addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
 
2113
    isImm.link(this);
 
2114
 
 
2115
    if (dst != src)
 
2116
        emitPutVirtualRegister(dst);
 
2117
 
 
2118
}
 
2119
 
 
2120
void JIT::emit_op_strcat(Instruction* currentInstruction)
 
2121
{
 
2122
    JITStubCall stubCall(this, cti_op_strcat);
 
2123
    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
 
2124
    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
 
2125
    stubCall.call(currentInstruction[1].u.operand);
 
2126
}
 
2127
 
 
2128
void JIT::emit_op_loop_if_true(Instruction* currentInstruction)
 
2129
{
 
2130
    emitTimeoutCheck();
 
2131
 
 
2132
    unsigned target = currentInstruction[2].u.operand;
 
2133
    emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
 
2134
 
 
2135
    Jump isZero = branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0))));
 
2136
    addJump(emitJumpIfImmediateInteger(regT0), target + 2);
 
2137
 
 
2138
    addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))), target + 2);
 
2139
    addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))));
 
2140
 
 
2141
    isZero.link(this);
 
2142
};
 
2143
void JIT::emit_op_resolve_base(Instruction* currentInstruction)
 
2144
{
 
2145
    JITStubCall stubCall(this, cti_op_resolve_base);
 
2146
    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
 
2147
    stubCall.call(currentInstruction[1].u.operand);
 
2148
}
 
2149
 
 
2150
void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
 
2151
{
 
2152
    JITStubCall stubCall(this, cti_op_resolve_skip);
 
2153
    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
 
2154
    stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
 
2155
    stubCall.call(currentInstruction[1].u.operand);
 
2156
}
 
2157
 
 
2158
void JIT::emit_op_resolve_global(Instruction* currentInstruction)
 
2159
{
 
2160
    // Fast case
 
2161
    void* globalObject = currentInstruction[2].u.jsCell;
 
2162
    Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
 
2163
    
 
2164
    unsigned currentIndex = m_globalResolveInfoIndex++;
 
2165
    void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
 
2166
    void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
 
2167
 
 
2168
    // Check Structure of global object
 
2169
    move(ImmPtr(globalObject), regT0);
 
2170
    loadPtr(structureAddress, regT1);
 
2171
    Jump noMatch = branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))); // Structures don't match
 
2172
 
 
2173
    // Load cached property
 
2174
    // Assume that the global object always uses external storage.
 
2175
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT0);
 
2176
    load32(offsetAddr, regT1);
 
2177
    loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
 
2178
    emitPutVirtualRegister(currentInstruction[1].u.operand);
 
2179
    Jump end = jump();
 
2180
 
 
2181
    // Slow case
 
2182
    noMatch.link(this);
 
2183
    JITStubCall stubCall(this, cti_op_resolve_global);
 
2184
    stubCall.addArgument(ImmPtr(globalObject));
 
2185
    stubCall.addArgument(ImmPtr(ident));
 
2186
    stubCall.addArgument(Imm32(currentIndex));
 
2187
    stubCall.call(currentInstruction[1].u.operand);
 
2188
    end.link(this);
 
2189
}
 
2190
 
 
2191
void JIT::emit_op_not(Instruction* currentInstruction)
 
2192
{
 
2193
    emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
 
2194
    xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
 
2195
    addSlowCase(branchTestPtr(NonZero, regT0, Imm32(static_cast<int32_t>(~JSImmediate::ExtendedPayloadBitBoolValue))));
 
2196
    xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool | JSImmediate::ExtendedPayloadBitBoolValue)), regT0);
 
2197
    emitPutVirtualRegister(currentInstruction[1].u.operand);
 
2198
}
 
2199
 
 
2200
void JIT::emit_op_jfalse(Instruction* currentInstruction)
 
2201
{
 
2202
    unsigned target = currentInstruction[2].u.operand;
 
2203
    emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
 
2204
 
 
2205
    addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0)))), target + 2);
 
2206
    Jump isNonZero = emitJumpIfImmediateInteger(regT0);
 
2207
 
 
2208
    addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))), target + 2);
 
2209
    addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))));
 
2210
 
 
2211
    isNonZero.link(this);
 
2212
    RECORD_JUMP_TARGET(target + 2);
 
2213
};
 
2214
void JIT::emit_op_jeq_null(Instruction* currentInstruction)
 
2215
{
 
2216
    unsigned src = currentInstruction[1].u.operand;
 
2217
    unsigned target = currentInstruction[2].u.operand;
 
2218
 
 
2219
    emitGetVirtualRegister(src, regT0);
 
2220
    Jump isImmediate = emitJumpIfNotJSCell(regT0);
 
2221
 
 
2222
    // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
 
2223
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
 
2224
    addJump(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
 
2225
    Jump wasNotImmediate = jump();
 
2226
 
 
2227
    // Now handle the immediate cases - undefined & null
 
2228
    isImmediate.link(this);
 
2229
    andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
 
2230
    addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNull()))), target + 2);            
 
2231
 
 
2232
    wasNotImmediate.link(this);
 
2233
    RECORD_JUMP_TARGET(target + 2);
 
2234
};
 
2235
void JIT::emit_op_jneq_null(Instruction* currentInstruction)
 
2236
{
 
2237
    unsigned src = currentInstruction[1].u.operand;
 
2238
    unsigned target = currentInstruction[2].u.operand;
 
2239
 
 
2240
    emitGetVirtualRegister(src, regT0);
 
2241
    Jump isImmediate = emitJumpIfNotJSCell(regT0);
 
2242
 
 
2243
    // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
 
2244
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
 
2245
    addJump(branchTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
 
2246
    Jump wasNotImmediate = jump();
 
2247
 
 
2248
    // Now handle the immediate cases - undefined & null
 
2249
    isImmediate.link(this);
 
2250
    andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
 
2251
    addJump(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsNull()))), target + 2);            
 
2252
 
 
2253
    wasNotImmediate.link(this);
 
2254
    RECORD_JUMP_TARGET(target + 2);
 
2255
}
 
2256
 
 
2257
void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
 
2258
{
 
2259
    unsigned src = currentInstruction[1].u.operand;
 
2260
    JSCell* ptr = currentInstruction[2].u.jsCell;
 
2261
    unsigned target = currentInstruction[3].u.operand;
 
2262
    
 
2263
    emitGetVirtualRegister(src, regT0);
 
2264
    addJump(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(JSValue(ptr)))), target + 3);            
 
2265
 
 
2266
    RECORD_JUMP_TARGET(target + 3);
 
2267
}
 
2268
 
 
2269
void JIT::emit_op_jsr(Instruction* currentInstruction)
 
2270
{
 
2271
    int retAddrDst = currentInstruction[1].u.operand;
 
2272
    int target = currentInstruction[2].u.operand;
 
2273
    DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
 
2274
    addJump(jump(), target + 2);
 
2275
    m_jsrSites.append(JSRInfo(storeLocation, label()));
 
2276
    killLastResultRegister();
 
2277
    RECORD_JUMP_TARGET(target + 2);
 
2278
}
 
2279
 
 
2280
void JIT::emit_op_sret(Instruction* currentInstruction)
 
2281
{
 
2282
    jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
 
2283
    killLastResultRegister();
 
2284
}
 
2285
 
 
2286
void JIT::emit_op_eq(Instruction* currentInstruction)
 
2287
{
 
2288
    emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
 
2289
    emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
 
2290
    set32(Equal, regT1, regT0, regT0);
 
2291
    emitTagAsBoolImmediate(regT0);
 
2292
    emitPutVirtualRegister(currentInstruction[1].u.operand);
 
2293
}
 
2294
 
 
2295
void JIT::emit_op_bitnot(Instruction* currentInstruction)
 
2296
{
 
2297
    emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
 
2298
    emitJumpSlowCaseIfNotImmediateInteger(regT0);
 
2299
#if USE(JSVALUE64)
 
2300
    not32(regT0);
 
2301
    emitFastArithIntToImmNoCheck(regT0, regT0);
 
2302
#else
 
2303
    xorPtr(Imm32(~JSImmediate::TagTypeNumber), regT0);
 
2304
#endif
 
2305
    emitPutVirtualRegister(currentInstruction[1].u.operand);
 
2306
}
 
2307
 
 
2308
void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
 
2309
{
 
2310
    JITStubCall stubCall(this, cti_op_resolve_with_base);
 
2311
    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
 
2312
    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
 
2313
    stubCall.call(currentInstruction[2].u.operand);
 
2314
}
 
2315
 
 
2316
void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
 
2317
{
 
2318
    JITStubCall stubCall(this, cti_op_new_func_exp);
 
2319
    stubCall.addArgument(ImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
 
2320
    stubCall.call(currentInstruction[1].u.operand);
 
2321
}
 
2322
 
 
2323
void JIT::emit_op_jtrue(Instruction* currentInstruction)
 
2324
{
 
2325
    unsigned target = currentInstruction[2].u.operand;
 
2326
    emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
 
2327
 
 
2328
    Jump isZero = branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0))));
 
2329
    addJump(emitJumpIfImmediateInteger(regT0), target + 2);
 
2330
 
 
2331
    addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))), target + 2);
 
2332
    addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))));
 
2333
 
 
2334
    isZero.link(this);
 
2335
    RECORD_JUMP_TARGET(target + 2);
 
2336
}
 
2337
 
 
2338
void JIT::emit_op_neq(Instruction* currentInstruction)
 
2339
{
 
2340
    emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
 
2341
    emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
 
2342
    set32(NotEqual, regT1, regT0, regT0);
 
2343
    emitTagAsBoolImmediate(regT0);
 
2344
 
 
2345
    emitPutVirtualRegister(currentInstruction[1].u.operand);
 
2346
 
 
2347
}
 
2348
 
 
2349
void JIT::emit_op_bitxor(Instruction* currentInstruction)
 
2350
{
 
2351
    emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
 
2352
    emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
 
2353
    xorPtr(regT1, regT0);
 
2354
    emitFastArithReTagImmediate(regT0, regT0);
 
2355
    emitPutVirtualRegister(currentInstruction[1].u.operand);
 
2356
}
 
2357
 
 
2358
void JIT::emit_op_new_regexp(Instruction* currentInstruction)
 
2359
{
 
2360
    JITStubCall stubCall(this, cti_op_new_regexp);
 
2361
    stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
 
2362
    stubCall.call(currentInstruction[1].u.operand);
 
2363
}
 
2364
 
 
2365
void JIT::emit_op_bitor(Instruction* currentInstruction)
 
2366
{
 
2367
    emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
 
2368
    emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
 
2369
    orPtr(regT1, regT0);
 
2370
    emitPutVirtualRegister(currentInstruction[1].u.operand);
 
2371
}
 
2372
 
 
2373
void JIT::emit_op_throw(Instruction* currentInstruction)
 
2374
{
 
2375
    JITStubCall stubCall(this, cti_op_throw);
 
2376
    stubCall.addArgument(currentInstruction[1].u.operand, regT2);
 
2377
    stubCall.call();
 
2378
    ASSERT(regT0 == returnValueRegister);
 
2379
#ifndef NDEBUG
 
2380
    // cti_op_throw always changes it's return address,
 
2381
    // this point in the code should never be reached.
 
2382
    breakpoint();
 
2383
#endif
 
2384
}
 
2385
 
 
2386
void JIT::emit_op_next_pname(Instruction* currentInstruction)
 
2387
{
 
2388
    JITStubCall stubCall(this, cti_op_next_pname);
 
2389
    stubCall.addArgument(currentInstruction[2].u.operand, regT2);
 
2390
    stubCall.call();
 
2391
    Jump endOfIter = branchTestPtr(Zero, regT0);
 
2392
    emitPutVirtualRegister(currentInstruction[1].u.operand);
 
2393
    addJump(jump(), currentInstruction[3].u.operand + 3);
 
2394
    endOfIter.link(this);
 
2395
}
 
2396
 
 
2397
void JIT::emit_op_push_scope(Instruction* currentInstruction)
 
2398
{
 
2399
    JITStubCall stubCall(this, cti_op_push_scope);
 
2400
    stubCall.addArgument(currentInstruction[1].u.operand, regT2);
 
2401
    stubCall.call(currentInstruction[1].u.operand);
 
2402
}
 
2403
 
 
2404
void JIT::emit_op_pop_scope(Instruction*)
 
2405
{
 
2406
    JITStubCall(this, cti_op_pop_scope).call();
 
2407
}
 
2408
 
 
2409
void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
 
2410
{
 
2411
    unsigned dst = currentInstruction[1].u.operand;
 
2412
    unsigned src1 = currentInstruction[2].u.operand;
 
2413
    unsigned src2 = currentInstruction[3].u.operand;
 
2414
 
 
2415
    emitGetVirtualRegisters(src1, regT0, src2, regT1);
 
2416
 
 
2417
    // Jump to a slow case if either operand is a number, or if both are JSCell*s.
 
2418
    move(regT0, regT2);
 
2419
    orPtr(regT1, regT2);
 
2420
    addSlowCase(emitJumpIfJSCell(regT2));
 
2421
    addSlowCase(emitJumpIfImmediateNumber(regT2));
 
2422
 
 
2423
    if (type == OpStrictEq)
 
2424
        set32(Equal, regT1, regT0, regT0);
 
2425
    else
 
2426
        set32(NotEqual, regT1, regT0, regT0);
 
2427
    emitTagAsBoolImmediate(regT0);
 
2428
 
 
2429
    emitPutVirtualRegister(dst);
 
2430
}
 
2431
 
 
2432
void JIT::emit_op_stricteq(Instruction* currentInstruction)
 
2433
{
 
2434
    compileOpStrictEq(currentInstruction, OpStrictEq);
 
2435
}
 
2436
 
 
2437
void JIT::emit_op_nstricteq(Instruction* currentInstruction)
 
2438
{
 
2439
    compileOpStrictEq(currentInstruction, OpNStrictEq);
 
2440
}
 
2441
 
 
2442
void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
 
2443
{
 
2444
    int srcVReg = currentInstruction[2].u.operand;
 
2445
    emitGetVirtualRegister(srcVReg, regT0);
 
2446
    
 
2447
    Jump wasImmediate = emitJumpIfImmediateInteger(regT0);
 
2448
 
 
2449
    emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
 
2450
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
 
2451
    addSlowCase(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(NumberType)));
 
2452
    
 
2453
    wasImmediate.link(this);
 
2454
 
 
2455
    emitPutVirtualRegister(currentInstruction[1].u.operand);
 
2456
}
 
2457
 
 
2458
void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
 
2459
{
 
2460
    JITStubCall stubCall(this, cti_op_push_new_scope);
 
2461
    stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
 
2462
    stubCall.addArgument(currentInstruction[3].u.operand, regT2);
 
2463
    stubCall.call(currentInstruction[1].u.operand);
 
2464
}
 
2465
 
 
2466
void JIT::emit_op_catch(Instruction* currentInstruction)
 
2467
{
 
2468
    killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
 
2469
    peek(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
 
2470
    emitPutVirtualRegister(currentInstruction[1].u.operand);
 
2471
#ifdef QT_BUILD_SCRIPT_LIB
 
2472
    JITStubCall stubCall(this, cti_op_debug_catch);
 
2473
    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
 
2474
    stubCall.call();
 
2475
#endif
 
2476
}
 
2477
 
 
2478
void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
 
2479
{
 
2480
    JITStubCall stubCall(this, cti_op_jmp_scopes);
 
2481
    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
 
2482
    stubCall.call();
 
2483
    addJump(jump(), currentInstruction[2].u.operand + 2);
 
2484
    RECORD_JUMP_TARGET(currentInstruction[2].u.operand + 2);
 
2485
}
 
2486
 
 
2487
void JIT::emit_op_switch_imm(Instruction* currentInstruction)
 
2488
{
 
2489
    unsigned tableIndex = currentInstruction[1].u.operand;
 
2490
    unsigned defaultOffset = currentInstruction[2].u.operand;
 
2491
    unsigned scrutinee = currentInstruction[3].u.operand;
 
2492
 
 
2493
    // create jump table for switch destinations, track this switch statement.
 
2494
    SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
 
2495
    m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
 
2496
    jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
 
2497
 
 
2498
    JITStubCall stubCall(this, cti_op_switch_imm);
 
2499
    stubCall.addArgument(scrutinee, regT2);
 
2500
    stubCall.addArgument(Imm32(tableIndex));
 
2501
    stubCall.call();
 
2502
    jump(regT0);
 
2503
}
 
2504
 
 
2505
void JIT::emit_op_switch_char(Instruction* currentInstruction)
 
2506
{
 
2507
    unsigned tableIndex = currentInstruction[1].u.operand;
 
2508
    unsigned defaultOffset = currentInstruction[2].u.operand;
 
2509
    unsigned scrutinee = currentInstruction[3].u.operand;
 
2510
 
 
2511
    // create jump table for switch destinations, track this switch statement.
 
2512
    SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
 
2513
    m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
 
2514
    jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
 
2515
 
 
2516
    JITStubCall stubCall(this, cti_op_switch_char);
 
2517
    stubCall.addArgument(scrutinee, regT2);
 
2518
    stubCall.addArgument(Imm32(tableIndex));
 
2519
    stubCall.call();
 
2520
    jump(regT0);
 
2521
}
 
2522
 
 
2523
void JIT::emit_op_switch_string(Instruction* currentInstruction)
 
2524
{
 
2525
    unsigned tableIndex = currentInstruction[1].u.operand;
 
2526
    unsigned defaultOffset = currentInstruction[2].u.operand;
 
2527
    unsigned scrutinee = currentInstruction[3].u.operand;
 
2528
 
 
2529
    // create jump table for switch destinations, track this switch statement.
 
2530
    StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
 
2531
    m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
 
2532
 
 
2533
    JITStubCall stubCall(this, cti_op_switch_string);
 
2534
    stubCall.addArgument(scrutinee, regT2);
 
2535
    stubCall.addArgument(Imm32(tableIndex));
 
2536
    stubCall.call();
 
2537
    jump(regT0);
 
2538
}
 
2539
 
 
2540
void JIT::emit_op_new_error(Instruction* currentInstruction)
 
2541
{
 
2542
    JITStubCall stubCall(this, cti_op_new_error);
 
2543
    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
 
2544
    stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[3].u.operand))));
 
2545
    stubCall.addArgument(Imm32(m_bytecodeIndex));
 
2546
    stubCall.call(currentInstruction[1].u.operand);
 
2547
}
 
2548
 
 
2549
void JIT::emit_op_debug(Instruction* currentInstruction)
 
2550
{
 
2551
    JITStubCall stubCall(this, cti_op_debug);
 
2552
    stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
 
2553
    stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
 
2554
    stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
 
2555
    stubCall.addArgument(Imm32(currentInstruction[4].u.operand));
 
2556
    stubCall.call();
 
2557
}
 
2558
 
 
2559
void JIT::emit_op_eq_null(Instruction* currentInstruction)
 
2560
{
 
2561
    unsigned dst = currentInstruction[1].u.operand;
 
2562
    unsigned src1 = currentInstruction[2].u.operand;
 
2563
 
 
2564
    emitGetVirtualRegister(src1, regT0);
 
2565
    Jump isImmediate = emitJumpIfNotJSCell(regT0);
 
2566
 
 
2567
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
 
2568
    setTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
 
2569
 
 
2570
    Jump wasNotImmediate = jump();
 
2571
 
 
2572
    isImmediate.link(this);
 
2573
 
 
2574
    andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
 
2575
    setPtr(Equal, regT0, Imm32(JSImmediate::FullTagTypeNull), regT0);
 
2576
 
 
2577
    wasNotImmediate.link(this);
 
2578
 
 
2579
    emitTagAsBoolImmediate(regT0);
 
2580
    emitPutVirtualRegister(dst);
 
2581
 
 
2582
}
 
2583
 
 
2584
void JIT::emit_op_neq_null(Instruction* currentInstruction)
 
2585
{
 
2586
    unsigned dst = currentInstruction[1].u.operand;
 
2587
    unsigned src1 = currentInstruction[2].u.operand;
 
2588
 
 
2589
    emitGetVirtualRegister(src1, regT0);
 
2590
    Jump isImmediate = emitJumpIfNotJSCell(regT0);
 
2591
 
 
2592
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
 
2593
    setTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
 
2594
 
 
2595
    Jump wasNotImmediate = jump();
 
2596
 
 
2597
    isImmediate.link(this);
 
2598
 
 
2599
    andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
 
2600
    setPtr(NotEqual, regT0, Imm32(JSImmediate::FullTagTypeNull), regT0);
 
2601
 
 
2602
    wasNotImmediate.link(this);
 
2603
 
 
2604
    emitTagAsBoolImmediate(regT0);
 
2605
    emitPutVirtualRegister(dst);
 
2606
 
 
2607
}
 
2608
 
 
2609
void JIT::emit_op_enter(Instruction*)
 
2610
{
 
2611
    // Even though CTI doesn't use them, we initialize our constant
 
2612
    // registers to zap stale pointers, to avoid unnecessarily prolonging
 
2613
    // object lifetime and increasing GC pressure.
 
2614
    size_t count = m_codeBlock->m_numVars;
 
2615
    for (size_t j = 0; j < count; ++j)
 
2616
        emitInitRegister(j);
 
2617
 
 
2618
}
 
2619
 
 
2620
void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
 
2621
{
 
2622
    // Even though CTI doesn't use them, we initialize our constant
 
2623
    // registers to zap stale pointers, to avoid unnecessarily prolonging
 
2624
    // object lifetime and increasing GC pressure.
 
2625
    size_t count = m_codeBlock->m_numVars;
 
2626
    for (size_t j = 0; j < count; ++j)
 
2627
        emitInitRegister(j);
 
2628
 
 
2629
    JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
 
2630
}
 
2631
 
 
2632
void JIT::emit_op_create_arguments(Instruction*)
 
2633
{
 
2634
    Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * RegisterFile::ArgumentsRegister));
 
2635
    if (m_codeBlock->m_numParameters == 1)
 
2636
        JITStubCall(this, cti_op_create_arguments_no_params).call();
 
2637
    else
 
2638
        JITStubCall(this, cti_op_create_arguments).call();
 
2639
    argsCreated.link(this);
 
2640
}
 
2641
    
 
2642
void JIT::emit_op_init_arguments(Instruction*)
 
2643
{
 
2644
    storePtr(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * RegisterFile::ArgumentsRegister));
 
2645
}
 
2646
 
 
2647
void JIT::emit_op_convert_this(Instruction* currentInstruction)
 
2648
{
 
2649
    emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
 
2650
 
 
2651
    emitJumpSlowCaseIfNotJSCell(regT0);
 
2652
    loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
 
2653
    addSlowCase(branchTest32(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
 
2654
 
 
2655
}
 
2656
 
 
2657
void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
 
2658
{
 
2659
    peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
 
2660
    Jump noProfiler = branchTestPtr(Zero, Address(regT1));
 
2661
 
 
2662
    JITStubCall stubCall(this, cti_op_profile_will_call);
 
2663
    stubCall.addArgument(currentInstruction[1].u.operand, regT1);
 
2664
    stubCall.call();
 
2665
    noProfiler.link(this);
 
2666
 
 
2667
}
 
2668
 
 
2669
void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
 
2670
{
 
2671
    peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
 
2672
    Jump noProfiler = branchTestPtr(Zero, Address(regT1));
 
2673
 
 
2674
    JITStubCall stubCall(this, cti_op_profile_did_call);
 
2675
    stubCall.addArgument(currentInstruction[1].u.operand, regT1);
 
2676
    stubCall.call();
 
2677
    noProfiler.link(this);
 
2678
}
 
2679
 
 
2680
 
 
2681
// Slow cases
 
2682
 
 
2683
void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2684
{
 
2685
    linkSlowCase(iter);
 
2686
    linkSlowCase(iter);
 
2687
    JITStubCall stubCall(this, cti_op_convert_this);
 
2688
    stubCall.addArgument(regT0);
 
2689
    stubCall.call(currentInstruction[1].u.operand);
 
2690
}
 
2691
 
 
2692
void JIT::emitSlow_op_construct_verify(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2693
{
 
2694
    linkSlowCase(iter);
 
2695
    linkSlowCase(iter);
 
2696
    emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
 
2697
    emitPutVirtualRegister(currentInstruction[1].u.operand);
 
2698
}
 
2699
 
 
2700
void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2701
{
 
2702
    linkSlowCase(iter);
 
2703
 
 
2704
    JITStubCall stubCall(this, cti_op_to_primitive);
 
2705
    stubCall.addArgument(regT0);
 
2706
    stubCall.call(currentInstruction[1].u.operand);
 
2707
}
 
2708
 
 
2709
void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2710
{
 
2711
    unsigned dst = currentInstruction[1].u.operand;
 
2712
    unsigned base = currentInstruction[2].u.operand;
 
2713
    unsigned property = currentInstruction[3].u.operand;
 
2714
 
 
2715
    linkSlowCase(iter); // property int32 check
 
2716
    linkSlowCaseIfNotJSCell(iter, base); // base cell check
 
2717
    linkSlowCase(iter); // base array check
 
2718
    linkSlowCase(iter); // vector length check
 
2719
    linkSlowCase(iter); // empty value
 
2720
 
 
2721
    JITStubCall stubCall(this, cti_op_get_by_val);
 
2722
    stubCall.addArgument(base, regT2);
 
2723
    stubCall.addArgument(property, regT2);
 
2724
    stubCall.call(dst);
 
2725
}
 
2726
 
 
2727
void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2728
{
 
2729
    unsigned op1 = currentInstruction[1].u.operand;
 
2730
    unsigned op2 = currentInstruction[2].u.operand;
 
2731
    unsigned target = currentInstruction[3].u.operand;
 
2732
    if (isOperandConstantImmediateInt(op2)) {
 
2733
        linkSlowCase(iter);
 
2734
        JITStubCall stubCall(this, cti_op_loop_if_less);
 
2735
        stubCall.addArgument(regT0);
 
2736
        stubCall.addArgument(op2, regT2);
 
2737
        stubCall.call();
 
2738
        emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
 
2739
    } else if (isOperandConstantImmediateInt(op1)) {
 
2740
        linkSlowCase(iter);
 
2741
        JITStubCall stubCall(this, cti_op_loop_if_less);
 
2742
        stubCall.addArgument(op1, regT2);
 
2743
        stubCall.addArgument(regT0);
 
2744
        stubCall.call();
 
2745
        emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
 
2746
    } else {
 
2747
        linkSlowCase(iter);
 
2748
        linkSlowCase(iter);
 
2749
        JITStubCall stubCall(this, cti_op_loop_if_less);
 
2750
        stubCall.addArgument(regT0);
 
2751
        stubCall.addArgument(regT1);
 
2752
        stubCall.call();
 
2753
        emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
 
2754
    }
 
2755
}
 
2756
 
 
2757
void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2758
{
 
2759
    unsigned op2 = currentInstruction[2].u.operand;
 
2760
    unsigned target = currentInstruction[3].u.operand;
 
2761
    if (isOperandConstantImmediateInt(op2)) {
 
2762
        linkSlowCase(iter);
 
2763
        JITStubCall stubCall(this, cti_op_loop_if_lesseq);
 
2764
        stubCall.addArgument(regT0);
 
2765
        stubCall.addArgument(currentInstruction[2].u.operand, regT2);
 
2766
        stubCall.call();
 
2767
        emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
 
2768
    } else {
 
2769
        linkSlowCase(iter);
 
2770
        linkSlowCase(iter);
 
2771
        JITStubCall stubCall(this, cti_op_loop_if_lesseq);
 
2772
        stubCall.addArgument(regT0);
 
2773
        stubCall.addArgument(regT1);
 
2774
        stubCall.call();
 
2775
        emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
 
2776
    }
 
2777
}
 
2778
 
 
2779
void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2780
{
 
2781
    unsigned base = currentInstruction[1].u.operand;
 
2782
    unsigned property = currentInstruction[2].u.operand;
 
2783
    unsigned value = currentInstruction[3].u.operand;
 
2784
 
 
2785
    linkSlowCase(iter); // property int32 check
 
2786
    linkSlowCaseIfNotJSCell(iter, base); // base cell check
 
2787
    linkSlowCase(iter); // base not array check
 
2788
    linkSlowCase(iter); // in vector check
 
2789
 
 
2790
    JITStubCall stubPutByValCall(this, cti_op_put_by_val);
 
2791
    stubPutByValCall.addArgument(regT0);
 
2792
    stubPutByValCall.addArgument(property, regT2);
 
2793
    stubPutByValCall.addArgument(value, regT2);
 
2794
    stubPutByValCall.call();
 
2795
}
 
2796
 
 
2797
void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2798
{
 
2799
    linkSlowCase(iter);
 
2800
    JITStubCall stubCall(this, cti_op_jtrue);
 
2801
    stubCall.addArgument(regT0);
 
2802
    stubCall.call();
 
2803
    emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand + 2);
 
2804
}
 
2805
 
 
2806
void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2807
{
 
2808
    linkSlowCase(iter);
 
2809
    xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
 
2810
    JITStubCall stubCall(this, cti_op_not);
 
2811
    stubCall.addArgument(regT0);
 
2812
    stubCall.call(currentInstruction[1].u.operand);
 
2813
}
 
2814
 
 
2815
void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2816
{
 
2817
    linkSlowCase(iter);
 
2818
    JITStubCall stubCall(this, cti_op_jtrue);
 
2819
    stubCall.addArgument(regT0);
 
2820
    stubCall.call();
 
2821
    emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand + 2); // inverted!
 
2822
}
 
2823
 
 
2824
void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2825
{
 
2826
    linkSlowCase(iter);
 
2827
    JITStubCall stubCall(this, cti_op_bitnot);
 
2828
    stubCall.addArgument(regT0);
 
2829
    stubCall.call(currentInstruction[1].u.operand);
 
2830
}
 
2831
 
 
2832
void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2833
{
 
2834
    linkSlowCase(iter);
 
2835
    JITStubCall stubCall(this, cti_op_jtrue);
 
2836
    stubCall.addArgument(regT0);
 
2837
    stubCall.call();
 
2838
    emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand + 2);
 
2839
}
 
2840
 
 
2841
void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2842
{
 
2843
    linkSlowCase(iter);
 
2844
    JITStubCall stubCall(this, cti_op_bitxor);
 
2845
    stubCall.addArgument(regT0);
 
2846
    stubCall.addArgument(regT1);
 
2847
    stubCall.call(currentInstruction[1].u.operand);
 
2848
}
 
2849
 
 
2850
void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2851
{
 
2852
    linkSlowCase(iter);
 
2853
    JITStubCall stubCall(this, cti_op_bitor);
 
2854
    stubCall.addArgument(regT0);
 
2855
    stubCall.addArgument(regT1);
 
2856
    stubCall.call(currentInstruction[1].u.operand);
 
2857
}
 
2858
 
 
2859
void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2860
{
 
2861
    linkSlowCase(iter);
 
2862
    JITStubCall stubCall(this, cti_op_eq);
 
2863
    stubCall.addArgument(regT0);
 
2864
    stubCall.addArgument(regT1);
 
2865
    stubCall.call();
 
2866
    emitTagAsBoolImmediate(regT0);
 
2867
    emitPutVirtualRegister(currentInstruction[1].u.operand);
 
2868
}
 
2869
 
 
2870
void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2871
{
 
2872
    linkSlowCase(iter);
 
2873
    JITStubCall stubCall(this, cti_op_eq);
 
2874
    stubCall.addArgument(regT0);
 
2875
    stubCall.addArgument(regT1);
 
2876
    stubCall.call();
 
2877
    xor32(Imm32(0x1), regT0);
 
2878
    emitTagAsBoolImmediate(regT0);
 
2879
    emitPutVirtualRegister(currentInstruction[1].u.operand);
 
2880
}
 
2881
 
 
2882
void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2883
{
 
2884
    linkSlowCase(iter);
 
2885
    linkSlowCase(iter);
 
2886
    JITStubCall stubCall(this, cti_op_stricteq);
 
2887
    stubCall.addArgument(regT0);
 
2888
    stubCall.addArgument(regT1);
 
2889
    stubCall.call(currentInstruction[1].u.operand);
 
2890
}
 
2891
 
 
2892
void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2893
{
 
2894
    linkSlowCase(iter);
 
2895
    linkSlowCase(iter);
 
2896
    JITStubCall stubCall(this, cti_op_nstricteq);
 
2897
    stubCall.addArgument(regT0);
 
2898
    stubCall.addArgument(regT1);
 
2899
    stubCall.call(currentInstruction[1].u.operand);
 
2900
}
 
2901
 
 
2902
void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2903
{
 
2904
    linkSlowCase(iter);
 
2905
    linkSlowCase(iter);
 
2906
    linkSlowCase(iter);
 
2907
    linkSlowCase(iter);
 
2908
    linkSlowCase(iter);
 
2909
    JITStubCall stubCall(this, cti_op_instanceof);
 
2910
    stubCall.addArgument(currentInstruction[2].u.operand, regT2);
 
2911
    stubCall.addArgument(currentInstruction[3].u.operand, regT2);
 
2912
    stubCall.addArgument(currentInstruction[4].u.operand, regT2);
 
2913
    stubCall.call(currentInstruction[1].u.operand);
 
2914
}
 
2915
 
 
2916
void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2917
{
 
2918
    compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
 
2919
}
 
2920
 
 
2921
void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2922
{
 
2923
    compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
 
2924
}
 
2925
 
 
2926
void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2927
{
 
2928
    compileOpCallVarargsSlowCase(currentInstruction, iter);
 
2929
}
 
2930
 
 
2931
void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2932
{
 
2933
    compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
 
2934
}
 
2935
 
 
2936
void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
 
2937
{
 
2938
    linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
 
2939
    linkSlowCase(iter);
 
2940
 
 
2941
    JITStubCall stubCall(this, cti_op_to_jsnumber);
 
2942
    stubCall.addArgument(regT0);
 
2943
    stubCall.call(currentInstruction[1].u.operand);
 
2944
}
 
2945
 
 
2946
#endif // USE(JSVALUE32_64)
 
2947
 
 
2948
} // namespace JSC
 
2949
 
 
2950
#endif // ENABLE(JIT)