300
184
#if ENABLE(OPCODE_SAMPLING)
301
185
if (m_bytecodeIndex > 0) // Avoid the overhead of sampling op_enter twice.
302
store32(m_interpreter->sampler()->encodeSample(currentInstruction), m_interpreter->sampler()->sampleSlot());
186
sampleInstruction(currentInstruction);
189
#if !USE(JSVALUE32_64)
190
if (m_labels[m_bytecodeIndex].isUsed())
191
killLastResultRegister();
305
194
m_labels[m_bytecodeIndex] = label();
306
OpcodeID opcodeID = m_interpreter->getOpcodeID(currentInstruction->u.opcode);
310
emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
311
emitPutVirtualRegister(currentInstruction[1].u.operand);
315
compileFastArith_op_add(currentInstruction);
319
if (m_codeBlock->needsFullScopeChain())
320
emitCTICall(Interpreter::cti_op_end);
321
emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
322
push(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
327
unsigned target = currentInstruction[1].u.operand;
328
addJump(jump(), target + 1);
332
compileFastArith_op_pre_inc(currentInstruction[1].u.operand);
333
NEXT_OPCODE(op_pre_inc);
336
emitSlowScriptCheck();
338
unsigned target = currentInstruction[1].u.operand;
339
addJump(jump(), target + 1);
342
case op_loop_if_less: {
343
emitSlowScriptCheck();
345
unsigned op1 = currentInstruction[1].u.operand;
346
unsigned op2 = currentInstruction[2].u.operand;
347
unsigned target = currentInstruction[3].u.operand;
348
if (isOperandConstantImmediateInt(op2)) {
349
emitGetVirtualRegister(op1, X86::eax);
350
emitJumpSlowCaseIfNotImmNum(X86::eax);
351
#if USE(ALTERNATE_JSIMMEDIATE)
352
int32_t op2imm = JSImmediate::intValue(getConstantOperand(op2));
354
int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
356
addJump(jl32(X86::eax, Imm32(op2imm)), target + 3);
358
emitGetVirtualRegisters(op1, X86::eax, op2, X86::edx);
359
emitJumpSlowCaseIfNotImmNum(X86::eax);
360
emitJumpSlowCaseIfNotImmNum(X86::edx);
361
addJump(jl32(X86::eax, X86::edx), target + 3);
363
NEXT_OPCODE(op_loop_if_less);
365
case op_loop_if_lesseq: {
366
emitSlowScriptCheck();
368
unsigned op1 = currentInstruction[1].u.operand;
369
unsigned op2 = currentInstruction[2].u.operand;
370
unsigned target = currentInstruction[3].u.operand;
371
if (isOperandConstantImmediateInt(op2)) {
372
emitGetVirtualRegister(op1, X86::eax);
373
emitJumpSlowCaseIfNotImmNum(X86::eax);
374
#if USE(ALTERNATE_JSIMMEDIATE)
375
int32_t op2imm = JSImmediate::intValue(getConstantOperand(op2));
377
int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
379
addJump(jle32(X86::eax, Imm32(op2imm)), target + 3);
381
emitGetVirtualRegisters(op1, X86::eax, op2, X86::edx);
382
emitJumpSlowCaseIfNotImmNum(X86::eax);
383
emitJumpSlowCaseIfNotImmNum(X86::edx);
384
addJump(jle32(X86::eax, X86::edx), target + 3);
386
NEXT_OPCODE(op_loop_if_less);
388
case op_new_object: {
389
emitCTICall(Interpreter::cti_op_new_object);
390
emitPutVirtualRegister(currentInstruction[1].u.operand);
391
NEXT_OPCODE(op_new_object);
394
compilePutByIdHotPath(currentInstruction[1].u.operand, &(m_codeBlock->identifier(currentInstruction[2].u.operand)), currentInstruction[3].u.operand, propertyAccessInstructionIndex++);
395
NEXT_OPCODE(op_put_by_id);
398
compileGetByIdHotPath(currentInstruction[1].u.operand, currentInstruction[2].u.operand, &(m_codeBlock->identifier(currentInstruction[3].u.operand)), propertyAccessInstructionIndex++);
399
NEXT_OPCODE(op_get_by_id);
401
case op_instanceof: {
402
emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax); // value
403
emitGetVirtualRegister(currentInstruction[3].u.operand, X86::ecx); // baseVal
404
emitGetVirtualRegister(currentInstruction[4].u.operand, X86::edx); // proto
406
// check if any are immediates
407
move(X86::eax, X86::ebx);
408
orPtr(X86::ecx, X86::ebx);
409
orPtr(X86::edx, X86::ebx);
410
emitJumpSlowCaseIfNotJSCell(X86::ebx);
412
// check that all are object type - this is a bit of a bithack to avoid excess branching;
413
// we check that the sum of the three type codes from Structures is exactly 3 * ObjectType,
414
// this works because NumberType and StringType are smaller
415
move(Imm32(3 * ObjectType), X86::ebx);
416
loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::eax);
417
loadPtr(Address(X86::ecx, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
418
loadPtr(Address(X86::edx, FIELD_OFFSET(JSCell, m_structure)), X86::edx);
419
sub32(Address(X86::eax, FIELD_OFFSET(Structure, m_typeInfo.m_type)), X86::ebx);
420
sub32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_type)), X86::ebx);
421
addSlowCase(jne32(Address(X86::edx, FIELD_OFFSET(Structure, m_typeInfo.m_type)), X86::ebx));
423
// check that baseVal's flags include ImplementsHasInstance but not OverridesHasInstance
424
load32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), X86::ecx);
425
and32(Imm32(ImplementsHasInstance | OverridesHasInstance), X86::ecx);
426
addSlowCase(jne32(X86::ecx, Imm32(ImplementsHasInstance)));
428
emitGetVirtualRegister(currentInstruction[2].u.operand, X86::ecx); // reload value
429
emitGetVirtualRegister(currentInstruction[4].u.operand, X86::edx); // reload proto
431
// optimistically load true result
432
move(ImmPtr(JSValuePtr::encode(jsBoolean(true))), X86::eax);
436
// load value's prototype
437
loadPtr(Address(X86::ecx, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
438
loadPtr(Address(X86::ecx, FIELD_OFFSET(Structure, m_prototype)), X86::ecx);
440
Jump exit = jePtr(X86::ecx, X86::edx);
442
jnePtr(X86::ecx, ImmPtr(JSValuePtr::encode(jsNull())), loop);
444
move(ImmPtr(JSValuePtr::encode(jsBoolean(false))), X86::eax);
448
emitPutVirtualRegister(currentInstruction[1].u.operand);
450
NEXT_OPCODE(op_instanceof);
453
emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
454
Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
455
emitPutJITStubArgConstant(ident, 2);
456
emitCTICall(Interpreter::cti_op_del_by_id);
457
emitPutVirtualRegister(currentInstruction[1].u.operand);
458
NEXT_OPCODE(op_del_by_id);
461
compileFastArith_op_mul(currentInstruction);
465
FuncDeclNode* func = m_codeBlock->function(currentInstruction[2].u.operand);
466
emitPutJITStubArgConstant(func, 1);
467
emitCTICall(Interpreter::cti_op_new_func);
468
emitPutVirtualRegister(currentInstruction[1].u.operand);
469
NEXT_OPCODE(op_new_func);
472
compileOpCall(opcodeID, currentInstruction, callLinkInfoIndex++);
473
NEXT_OPCODE(op_call);
476
compileOpCall(opcodeID, currentInstruction, callLinkInfoIndex++);
477
NEXT_OPCODE(op_call_eval);
480
compileOpCall(opcodeID, currentInstruction, callLinkInfoIndex++);
481
NEXT_OPCODE(op_construct);
483
case op_get_global_var: {
484
JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[2].u.jsCell);
485
move(ImmPtr(globalObject), X86::eax);
486
emitGetVariableObjectRegister(X86::eax, currentInstruction[3].u.operand, X86::eax);
487
emitPutVirtualRegister(currentInstruction[1].u.operand);
488
NEXT_OPCODE(op_get_global_var);
490
case op_put_global_var: {
491
emitGetVirtualRegister(currentInstruction[3].u.operand, X86::edx);
492
JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[1].u.jsCell);
493
move(ImmPtr(globalObject), X86::eax);
494
emitPutVariableObjectRegister(X86::edx, X86::eax, currentInstruction[2].u.operand);
495
NEXT_OPCODE(op_put_global_var);
497
case op_get_scoped_var: {
498
int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
500
emitGetFromCallFrameHeader(RegisterFile::ScopeChain, X86::eax);
502
loadPtr(Address(X86::eax, FIELD_OFFSET(ScopeChainNode, next)), X86::eax);
504
loadPtr(Address(X86::eax, FIELD_OFFSET(ScopeChainNode, object)), X86::eax);
505
emitGetVariableObjectRegister(X86::eax, currentInstruction[2].u.operand, X86::eax);
506
emitPutVirtualRegister(currentInstruction[1].u.operand);
507
NEXT_OPCODE(op_get_scoped_var);
509
case op_put_scoped_var: {
510
int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
512
emitGetFromCallFrameHeader(RegisterFile::ScopeChain, X86::edx);
513
emitGetVirtualRegister(currentInstruction[3].u.operand, X86::eax);
515
loadPtr(Address(X86::edx, FIELD_OFFSET(ScopeChainNode, next)), X86::edx);
517
loadPtr(Address(X86::edx, FIELD_OFFSET(ScopeChainNode, object)), X86::edx);
518
emitPutVariableObjectRegister(X86::eax, X86::edx, currentInstruction[1].u.operand);
519
NEXT_OPCODE(op_put_scoped_var);
521
case op_tear_off_activation: {
522
emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
523
emitCTICall(Interpreter::cti_op_tear_off_activation);
524
NEXT_OPCODE(op_tear_off_activation);
526
case op_tear_off_arguments: {
527
emitCTICall(Interpreter::cti_op_tear_off_arguments);
528
NEXT_OPCODE(op_tear_off_arguments);
531
// We could JIT generate the deref, only calling out to C when the refcount hits zero.
532
if (m_codeBlock->needsFullScopeChain())
533
emitCTICall(Interpreter::cti_op_ret_scopeChain);
535
// Return the result in %eax.
536
emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
538
// Grab the return address.
539
emitGetFromCallFrameHeader(RegisterFile::ReturnPC, X86::edx);
541
// Restore our caller's "r".
542
emitGetFromCallFrameHeader(RegisterFile::CallerFrame, callFrameRegister);
551
emitPutJITStubArgConstant(currentInstruction[2].u.operand, 1);
552
emitPutJITStubArgConstant(currentInstruction[3].u.operand, 2);
553
emitCTICall(Interpreter::cti_op_new_array);
554
emitPutVirtualRegister(currentInstruction[1].u.operand);
555
NEXT_OPCODE(op_new_array);
558
Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
559
emitPutJITStubArgConstant(ident, 1);
560
emitCTICall(Interpreter::cti_op_resolve);
561
emitPutVirtualRegister(currentInstruction[1].u.operand);
562
NEXT_OPCODE(op_resolve);
564
case op_construct_verify: {
565
emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
567
emitJumpSlowCaseIfNotJSCell(X86::eax);
568
loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
569
addSlowCase(jne32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo) + FIELD_OFFSET(TypeInfo, m_type)), Imm32(ObjectType)));
571
NEXT_OPCODE(op_construct_verify);
573
case op_get_by_val: {
574
emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
575
emitJumpSlowCaseIfNotImmNum(X86::edx);
576
#if USE(ALTERNATE_JSIMMEDIATE)
577
// This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
578
// We check the value as if it was a uint32 against the m_fastAccessCutoff - which will always fail if
579
// number was signed since m_fastAccessCutoff is always less than intmax (since the total allocation
580
// size is always less than 4Gb). As such zero extending wil have been correct (and extending the value
581
// to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
582
// extending since it makes it easier to re-tag the value in the slow case.
583
zeroExtend32ToPtr(X86::edx, X86::edx);
585
emitFastArithImmToInt(X86::edx);
587
emitJumpSlowCaseIfNotJSCell(X86::eax);
588
addSlowCase(jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr)));
590
// This is an array; get the m_storage pointer into ecx, then check if the index is below the fast cutoff
591
loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::ecx);
592
addSlowCase(jae32(X86::edx, Address(X86::eax, FIELD_OFFSET(JSArray, m_fastAccessCutoff))));
594
// Get the value from the vector
595
loadPtr(BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0])), X86::eax);
596
emitPutVirtualRegister(currentInstruction[1].u.operand);
597
NEXT_OPCODE(op_get_by_val);
599
case op_resolve_func: {
600
Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
601
emitPutJITStubArgConstant(ident, 1);
602
emitCTICall(Interpreter::cti_op_resolve_func);
603
emitPutVirtualRegister(currentInstruction[2].u.operand, X86::edx);
604
emitPutVirtualRegister(currentInstruction[1].u.operand);
605
NEXT_OPCODE(op_resolve_func);
608
compileBinaryArithOp(op_sub, currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, OperandTypes::fromInt(currentInstruction[4].u.operand));
611
case op_put_by_val: {
612
emitGetVirtualRegisters(currentInstruction[1].u.operand, X86::eax, currentInstruction[2].u.operand, X86::edx);
613
emitJumpSlowCaseIfNotImmNum(X86::edx);
614
#if USE(ALTERNATE_JSIMMEDIATE)
615
// See comment in op_get_by_val.
616
zeroExtend32ToPtr(X86::edx, X86::edx);
618
emitFastArithImmToInt(X86::edx);
620
emitJumpSlowCaseIfNotJSCell(X86::eax);
621
addSlowCase(jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr)));
623
// This is an array; get the m_storage pointer into ecx, then check if the index is below the fast cutoff
624
loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::ecx);
625
Jump inFastVector = jb32(X86::edx, Address(X86::eax, FIELD_OFFSET(JSArray, m_fastAccessCutoff)));
626
// No; oh well, check if the access if within the vector - if so, we may still be okay.
627
addSlowCase(jae32(X86::edx, Address(X86::ecx, FIELD_OFFSET(ArrayStorage, m_vectorLength))));
629
// This is a write to the slow part of the vector; first, we have to check if this would be the first write to this location.
630
// FIXME: should be able to handle initial write to array; increment the the number of items in the array, and potentially update fast access cutoff.
631
addSlowCase(jzPtr(BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0]))));
633
// All good - put the value into the array.
634
inFastVector.link(this);
635
emitGetVirtualRegister(currentInstruction[3].u.operand, X86::eax);
636
storePtr(X86::eax, BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0])));
637
NEXT_OPCODE(op_put_by_val);
639
CTI_COMPILE_BINARY_OP(op_lesseq)
640
case op_loop_if_true: {
641
emitSlowScriptCheck();
643
unsigned target = currentInstruction[2].u.operand;
644
emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
646
Jump isZero = jePtr(X86::eax, ImmPtr(JSValuePtr::encode(JSImmediate::zeroImmediate())));
647
addJump(emitJumpIfImmNum(X86::eax), target + 2);
649
addJump(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(true)))), target + 2);
650
addSlowCase(jnePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(false)))));
653
NEXT_OPCODE(op_loop_if_true);
655
case op_resolve_base: {
656
Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
657
emitPutJITStubArgConstant(ident, 1);
658
emitCTICall(Interpreter::cti_op_resolve_base);
659
emitPutVirtualRegister(currentInstruction[1].u.operand);
660
NEXT_OPCODE(op_resolve_base);
663
emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
664
emitCTICall(Interpreter::cti_op_negate);
665
emitPutVirtualRegister(currentInstruction[1].u.operand);
666
NEXT_OPCODE(op_negate);
668
case op_resolve_skip: {
669
Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
670
emitPutJITStubArgConstant(ident, 1);
671
emitPutJITStubArgConstant(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain(), 2);
672
emitCTICall(Interpreter::cti_op_resolve_skip);
673
emitPutVirtualRegister(currentInstruction[1].u.operand);
674
NEXT_OPCODE(op_resolve_skip);
676
case op_resolve_global: {
678
void* globalObject = currentInstruction[2].u.jsCell;
679
Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
681
unsigned currentIndex = globalResolveInfoIndex++;
682
void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
683
void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
685
// Check Structure of global object
686
move(ImmPtr(globalObject), X86::eax);
687
loadPtr(structureAddress, X86::edx);
688
Jump noMatch = jnePtr(X86::edx, Address(X86::eax, FIELD_OFFSET(JSCell, m_structure))); // Structures don't match
690
// Load cached property
691
loadPtr(Address(X86::eax, FIELD_OFFSET(JSGlobalObject, m_propertyStorage)), X86::eax);
692
load32(offsetAddr, X86::edx);
693
loadPtr(BaseIndex(X86::eax, X86::edx, ScalePtr), X86::eax);
694
emitPutVirtualRegister(currentInstruction[1].u.operand);
699
emitPutJITStubArgConstant(globalObject, 1);
700
emitPutJITStubArgConstant(ident, 2);
701
emitPutJITStubArgConstant(currentIndex, 3);
702
emitCTICall(Interpreter::cti_op_resolve_global);
703
emitPutVirtualRegister(currentInstruction[1].u.operand);
705
NEXT_OPCODE(op_resolve_global);
707
CTI_COMPILE_BINARY_OP(op_div)
709
compileFastArith_op_pre_dec(currentInstruction[1].u.operand);
710
NEXT_OPCODE(op_pre_dec);
713
unsigned op1 = currentInstruction[1].u.operand;
714
unsigned op2 = currentInstruction[2].u.operand;
715
unsigned target = currentInstruction[3].u.operand;
716
if (isOperandConstantImmediateInt(op2)) {
717
emitGetVirtualRegister(op1, X86::eax);
718
emitJumpSlowCaseIfNotImmNum(X86::eax);
719
#if USE(ALTERNATE_JSIMMEDIATE)
720
int32_t op2imm = JSImmediate::intValue(getConstantOperand(op2));
722
int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
724
addJump(jge32(X86::eax, Imm32(op2imm)), target + 3);
726
emitGetVirtualRegisters(op1, X86::eax, op2, X86::edx);
727
emitJumpSlowCaseIfNotImmNum(X86::eax);
728
emitJumpSlowCaseIfNotImmNum(X86::edx);
729
addJump(jge32(X86::eax, X86::edx), target + 3);
731
NEXT_OPCODE(op_jnless);
734
emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
735
xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), X86::eax);
736
addSlowCase(jnzPtr(X86::eax, Imm32(static_cast<int32_t>(~JSImmediate::ExtendedPayloadBitBoolValue))));
737
xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool | JSImmediate::ExtendedPayloadBitBoolValue)), X86::eax);
738
emitPutVirtualRegister(currentInstruction[1].u.operand);
742
unsigned target = currentInstruction[2].u.operand;
743
emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
745
addJump(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(JSImmediate::zeroImmediate()))), target + 2);
746
Jump isNonZero = emitJumpIfImmNum(X86::eax);
748
addJump(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(false)))), target + 2);
749
addSlowCase(jnePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(true)))));
751
isNonZero.link(this);
752
NEXT_OPCODE(op_jfalse);
755
unsigned src = currentInstruction[1].u.operand;
756
unsigned target = currentInstruction[2].u.operand;
758
emitGetVirtualRegister(src, X86::eax);
759
Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
761
// First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
762
loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
763
addJump(jnz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
764
Jump wasNotImmediate = jump();
766
// Now handle the immediate cases - undefined & null
767
isImmediate.link(this);
768
and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
769
addJump(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsNull()))), target + 2);
771
wasNotImmediate.link(this);
772
NEXT_OPCODE(op_jeq_null);
775
unsigned src = currentInstruction[1].u.operand;
776
unsigned target = currentInstruction[2].u.operand;
778
emitGetVirtualRegister(src, X86::eax);
779
Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
781
// First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
782
loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
783
addJump(jz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
784
Jump wasNotImmediate = jump();
786
// Now handle the immediate cases - undefined & null
787
isImmediate.link(this);
788
and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
789
addJump(jnePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsNull()))), target + 2);
791
wasNotImmediate.link(this);
792
NEXT_OPCODE(op_jneq_null);
795
compileFastArith_op_post_inc(currentInstruction[1].u.operand, currentInstruction[2].u.operand);
796
NEXT_OPCODE(op_post_inc);
798
case op_unexpected_load: {
799
JSValuePtr v = m_codeBlock->unexpectedConstant(currentInstruction[2].u.operand);
800
move(ImmPtr(JSValuePtr::encode(v)), X86::eax);
801
emitPutVirtualRegister(currentInstruction[1].u.operand);
802
NEXT_OPCODE(op_unexpected_load);
805
int retAddrDst = currentInstruction[1].u.operand;
806
int target = currentInstruction[2].u.operand;
807
DataLabelPtr storeLocation = storePtrWithPatch(Address(callFrameRegister, sizeof(Register) * retAddrDst));
808
addJump(jump(), target + 2);
809
m_jsrSites.append(JSRInfo(storeLocation, label()));
813
jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
814
NEXT_OPCODE(op_sret);
817
emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
818
emitJumpSlowCaseIfNotImmNums(X86::eax, X86::edx, X86::ecx);
819
sete32(X86::edx, X86::eax);
820
emitTagAsBoolImmediate(X86::eax);
821
emitPutVirtualRegister(currentInstruction[1].u.operand);
825
compileFastArith_op_lshift(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand);
826
NEXT_OPCODE(op_lshift);
829
compileFastArith_op_bitand(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand);
830
NEXT_OPCODE(op_bitand);
833
compileFastArith_op_rshift(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand);
834
NEXT_OPCODE(op_rshift);
837
emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
838
emitJumpSlowCaseIfNotImmNum(X86::eax);
839
#if USE(ALTERNATE_JSIMMEDIATE)
841
emitFastArithIntToImmNoCheck(X86::eax, X86::eax);
843
xorPtr(Imm32(~JSImmediate::TagTypeInteger), X86::eax);
845
emitPutVirtualRegister(currentInstruction[1].u.operand);
846
NEXT_OPCODE(op_bitnot);
848
case op_resolve_with_base: {
849
Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
850
emitPutJITStubArgConstant(ident, 1);
851
emitCTICall(Interpreter::cti_op_resolve_with_base);
852
emitPutVirtualRegister(currentInstruction[2].u.operand, X86::edx);
853
emitPutVirtualRegister(currentInstruction[1].u.operand);
854
NEXT_OPCODE(op_resolve_with_base);
856
case op_new_func_exp: {
857
FuncExprNode* func = m_codeBlock->functionExpression(currentInstruction[2].u.operand);
858
emitPutJITStubArgConstant(func, 1);
859
emitCTICall(Interpreter::cti_op_new_func_exp);
860
emitPutVirtualRegister(currentInstruction[1].u.operand);
861
NEXT_OPCODE(op_new_func_exp);
864
compileFastArith_op_mod(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand);
868
unsigned target = currentInstruction[2].u.operand;
869
emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
871
Jump isZero = jePtr(X86::eax, ImmPtr(JSValuePtr::encode(JSImmediate::zeroImmediate())));
872
addJump(emitJumpIfImmNum(X86::eax), target + 2);
874
addJump(jePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(true)))), target + 2);
875
addSlowCase(jnePtr(X86::eax, ImmPtr(JSValuePtr::encode(jsBoolean(false)))));
878
NEXT_OPCODE(op_jtrue);
880
CTI_COMPILE_BINARY_OP(op_less)
882
emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
883
emitJumpSlowCaseIfNotImmNums(X86::eax, X86::edx, X86::ecx);
884
setne32(X86::edx, X86::eax);
885
emitTagAsBoolImmediate(X86::eax);
887
emitPutVirtualRegister(currentInstruction[1].u.operand);
892
compileFastArith_op_post_dec(currentInstruction[1].u.operand, currentInstruction[2].u.operand);
893
NEXT_OPCODE(op_post_dec);
895
CTI_COMPILE_BINARY_OP(op_urshift)
897
emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
898
emitJumpSlowCaseIfNotImmNums(X86::eax, X86::edx, X86::ecx);
899
xorPtr(X86::edx, X86::eax);
900
emitFastArithReTagImmediate(X86::eax, X86::eax);
901
emitPutVirtualRegister(currentInstruction[1].u.operand);
902
NEXT_OPCODE(op_bitxor);
904
case op_new_regexp: {
905
RegExp* regExp = m_codeBlock->regexp(currentInstruction[2].u.operand);
906
emitPutJITStubArgConstant(regExp, 1);
907
emitCTICall(Interpreter::cti_op_new_regexp);
908
emitPutVirtualRegister(currentInstruction[1].u.operand);
909
NEXT_OPCODE(op_new_regexp);
912
emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
913
emitJumpSlowCaseIfNotImmNums(X86::eax, X86::edx, X86::ecx);
914
orPtr(X86::edx, X86::eax);
915
emitPutVirtualRegister(currentInstruction[1].u.operand);
916
NEXT_OPCODE(op_bitor);
919
emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
920
emitCTICall(Interpreter::cti_op_throw);
922
addPtr(Imm32(0x38), X86::esp);
929
addPtr(Imm32(0x1c), X86::esp);
936
NEXT_OPCODE(op_throw);
938
case op_get_pnames: {
939
emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
940
emitCTICall(Interpreter::cti_op_get_pnames);
941
emitPutVirtualRegister(currentInstruction[1].u.operand);
942
NEXT_OPCODE(op_get_pnames);
944
case op_next_pname: {
945
emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
946
unsigned target = currentInstruction[3].u.operand;
947
emitCTICall(Interpreter::cti_op_next_pname);
948
Jump endOfIter = jzPtr(X86::eax);
949
emitPutVirtualRegister(currentInstruction[1].u.operand);
950
addJump(jump(), target + 3);
951
endOfIter.link(this);
952
NEXT_OPCODE(op_next_pname);
954
case op_push_scope: {
955
emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
956
emitCTICall(Interpreter::cti_op_push_scope);
957
emitPutVirtualRegister(currentInstruction[1].u.operand);
958
NEXT_OPCODE(op_push_scope);
961
emitCTICall(Interpreter::cti_op_pop_scope);
962
NEXT_OPCODE(op_pop_scope);
964
CTI_COMPILE_UNARY_OP(op_typeof)
965
CTI_COMPILE_UNARY_OP(op_is_undefined)
966
CTI_COMPILE_UNARY_OP(op_is_boolean)
967
CTI_COMPILE_UNARY_OP(op_is_number)
968
CTI_COMPILE_UNARY_OP(op_is_string)
969
CTI_COMPILE_UNARY_OP(op_is_object)
970
CTI_COMPILE_UNARY_OP(op_is_function)
972
compileOpStrictEq(currentInstruction, OpStrictEq);
973
NEXT_OPCODE(op_stricteq);
976
compileOpStrictEq(currentInstruction, OpNStrictEq);
977
NEXT_OPCODE(op_nstricteq);
979
case op_to_jsnumber: {
980
int srcVReg = currentInstruction[2].u.operand;
981
emitGetVirtualRegister(srcVReg, X86::eax);
983
Jump wasImmediate = emitJumpIfImmNum(X86::eax);
985
emitJumpSlowCaseIfNotJSCell(X86::eax, srcVReg);
986
loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
987
addSlowCase(jne32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_type)), Imm32(NumberType)));
989
wasImmediate.link(this);
991
emitPutVirtualRegister(currentInstruction[1].u.operand);
992
NEXT_OPCODE(op_to_jsnumber);
994
CTI_COMPILE_BINARY_OP(op_in)
995
case op_push_new_scope: {
996
Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
997
emitPutJITStubArgConstant(ident, 1);
998
emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx);
999
emitCTICall(Interpreter::cti_op_push_new_scope);
1000
emitPutVirtualRegister(currentInstruction[1].u.operand);
1001
NEXT_OPCODE(op_push_new_scope);
1004
emitGetCTIParam(STUB_ARGS_callFrame, callFrameRegister);
1005
emitPutVirtualRegister(currentInstruction[1].u.operand);
1006
NEXT_OPCODE(op_catch);
1008
case op_jmp_scopes: {
1009
unsigned count = currentInstruction[1].u.operand;
1010
emitPutJITStubArgConstant(count, 1);
1011
emitCTICall(Interpreter::cti_op_jmp_scopes);
1012
unsigned target = currentInstruction[2].u.operand;
1013
addJump(jump(), target + 2);
1014
NEXT_OPCODE(op_jmp_scopes);
1016
case op_put_by_index: {
1017
emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
1018
emitPutJITStubArgConstant(currentInstruction[2].u.operand, 2);
1019
emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 3, X86::ecx);
1020
emitCTICall(Interpreter::cti_op_put_by_index);
1021
NEXT_OPCODE(op_put_by_index);
1023
case op_switch_imm: {
1024
unsigned tableIndex = currentInstruction[1].u.operand;
1025
unsigned defaultOffset = currentInstruction[2].u.operand;
1026
unsigned scrutinee = currentInstruction[3].u.operand;
1028
// create jump table for switch destinations, track this switch statement.
1029
SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1030
m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
1031
jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1033
emitPutJITStubArgFromVirtualRegister(scrutinee, 1, X86::ecx);
1034
emitPutJITStubArgConstant(tableIndex, 2);
1035
emitCTICall(Interpreter::cti_op_switch_imm);
1037
NEXT_OPCODE(op_switch_imm);
1039
case op_switch_char: {
1040
unsigned tableIndex = currentInstruction[1].u.operand;
1041
unsigned defaultOffset = currentInstruction[2].u.operand;
1042
unsigned scrutinee = currentInstruction[3].u.operand;
1044
// create jump table for switch destinations, track this switch statement.
1045
SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1046
m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
1047
jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1049
emitPutJITStubArgFromVirtualRegister(scrutinee, 1, X86::ecx);
1050
emitPutJITStubArgConstant(tableIndex, 2);
1051
emitCTICall(Interpreter::cti_op_switch_char);
1053
NEXT_OPCODE(op_switch_char);
1055
case op_switch_string: {
1056
unsigned tableIndex = currentInstruction[1].u.operand;
1057
unsigned defaultOffset = currentInstruction[2].u.operand;
1058
unsigned scrutinee = currentInstruction[3].u.operand;
1060
// create jump table for switch destinations, track this switch statement.
1061
StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1062
m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
1064
emitPutJITStubArgFromVirtualRegister(scrutinee, 1, X86::ecx);
1065
emitPutJITStubArgConstant(tableIndex, 2);
1066
emitCTICall(Interpreter::cti_op_switch_string);
1068
NEXT_OPCODE(op_switch_string);
1070
case op_del_by_val: {
1071
emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
1072
emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx);
1073
emitCTICall(Interpreter::cti_op_del_by_val);
1074
emitPutVirtualRegister(currentInstruction[1].u.operand);
1075
NEXT_OPCODE(op_del_by_val);
1077
case op_put_getter: {
1078
emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
1079
Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
1080
emitPutJITStubArgConstant(ident, 2);
1081
emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 3, X86::ecx);
1082
emitCTICall(Interpreter::cti_op_put_getter);
1083
NEXT_OPCODE(op_put_getter);
1085
case op_put_setter: {
1086
emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
1087
Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
1088
emitPutJITStubArgConstant(ident, 2);
1089
emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 3, X86::ecx);
1090
emitCTICall(Interpreter::cti_op_put_setter);
1091
NEXT_OPCODE(op_put_setter);
1093
case op_new_error: {
1094
JSValuePtr message = m_codeBlock->unexpectedConstant(currentInstruction[3].u.operand);
1095
emitPutJITStubArgConstant(currentInstruction[2].u.operand, 1);
1096
emitPutJITStubArgConstant(JSValuePtr::encode(message), 2);
1097
emitPutJITStubArgConstant(m_bytecodeIndex, 3);
1098
emitCTICall(Interpreter::cti_op_new_error);
1099
emitPutVirtualRegister(currentInstruction[1].u.operand);
1100
NEXT_OPCODE(op_new_error);
1103
emitPutJITStubArgConstant(currentInstruction[1].u.operand, 1);
1104
emitPutJITStubArgConstant(currentInstruction[2].u.operand, 2);
1105
emitPutJITStubArgConstant(currentInstruction[3].u.operand, 3);
1106
emitCTICall(Interpreter::cti_op_debug);
1107
NEXT_OPCODE(op_debug);
1110
unsigned dst = currentInstruction[1].u.operand;
1111
unsigned src1 = currentInstruction[2].u.operand;
1113
emitGetVirtualRegister(src1, X86::eax);
1114
Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
1116
loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
1117
setnz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), X86::eax);
1119
Jump wasNotImmediate = jump();
1121
isImmediate.link(this);
1123
and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
1124
sete32(Imm32(JSImmediate::FullTagTypeNull), X86::eax);
1126
wasNotImmediate.link(this);
1128
emitTagAsBoolImmediate(X86::eax);
1129
emitPutVirtualRegister(dst);
1131
NEXT_OPCODE(op_eq_null);
1134
unsigned dst = currentInstruction[1].u.operand;
1135
unsigned src1 = currentInstruction[2].u.operand;
1137
emitGetVirtualRegister(src1, X86::eax);
1138
Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
1140
loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
1141
setz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), X86::eax);
1143
Jump wasNotImmediate = jump();
1145
isImmediate.link(this);
1147
and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
1148
setne32(Imm32(JSImmediate::FullTagTypeNull), X86::eax);
1150
wasNotImmediate.link(this);
1152
emitTagAsBoolImmediate(X86::eax);
1153
emitPutVirtualRegister(dst);
1155
NEXT_OPCODE(op_neq_null);
1158
// Even though CTI doesn't use them, we initialize our constant
1159
// registers to zap stale pointers, to avoid unnecessarily prolonging
1160
// object lifetime and increasing GC pressure.
1161
size_t count = m_codeBlock->m_numVars + m_codeBlock->numberOfConstantRegisters();
1162
for (size_t j = 0; j < count; ++j)
1163
emitInitRegister(j);
1165
NEXT_OPCODE(op_enter);
1167
case op_enter_with_activation: {
1168
// Even though CTI doesn't use them, we initialize our constant
1169
// registers to zap stale pointers, to avoid unnecessarily prolonging
1170
// object lifetime and increasing GC pressure.
1171
size_t count = m_codeBlock->m_numVars + m_codeBlock->numberOfConstantRegisters();
1172
for (size_t j = 0; j < count; ++j)
1173
emitInitRegister(j);
1175
emitCTICall(Interpreter::cti_op_push_activation);
1176
emitPutVirtualRegister(currentInstruction[1].u.operand);
1178
NEXT_OPCODE(op_enter_with_activation);
1180
case op_create_arguments: {
1181
if (m_codeBlock->m_numParameters == 1)
1182
emitCTICall(Interpreter::cti_op_create_arguments_no_params);
1184
emitCTICall(Interpreter::cti_op_create_arguments);
1185
NEXT_OPCODE(op_create_arguments);
1187
case op_convert_this: {
1188
emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
1190
emitJumpSlowCaseIfNotJSCell(X86::eax);
1191
loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::edx);
1192
addSlowCase(jnz32(Address(X86::edx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
1194
NEXT_OPCODE(op_convert_this);
1196
case op_profile_will_call: {
1197
emitGetCTIParam(STUB_ARGS_profilerReference, X86::eax);
1198
Jump noProfiler = jzPtr(Address(X86::eax));
1199
emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::eax);
1200
emitCTICall(Interpreter::cti_op_profile_will_call);
1201
noProfiler.link(this);
1203
NEXT_OPCODE(op_profile_will_call);
1205
case op_profile_did_call: {
1206
emitGetCTIParam(STUB_ARGS_profilerReference, X86::eax);
1207
Jump noProfiler = jzPtr(Address(X86::eax));
1208
emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::eax);
1209
emitCTICall(Interpreter::cti_op_profile_did_call);
1210
noProfiler.link(this);
1212
NEXT_OPCODE(op_profile_did_call);
196
switch (m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
197
DEFINE_BINARY_OP(op_del_by_val)
199
DEFINE_BINARY_OP(op_div)
201
DEFINE_BINARY_OP(op_in)
202
DEFINE_BINARY_OP(op_less)
203
DEFINE_BINARY_OP(op_lesseq)
204
DEFINE_BINARY_OP(op_urshift)
205
DEFINE_UNARY_OP(op_get_pnames)
206
DEFINE_UNARY_OP(op_is_boolean)
207
DEFINE_UNARY_OP(op_is_function)
208
DEFINE_UNARY_OP(op_is_number)
209
DEFINE_UNARY_OP(op_is_object)
210
DEFINE_UNARY_OP(op_is_string)
211
DEFINE_UNARY_OP(op_is_undefined)
212
#if !USE(JSVALUE32_64)
213
DEFINE_UNARY_OP(op_negate)
215
DEFINE_UNARY_OP(op_typeof)
223
DEFINE_OP(op_call_eval)
224
DEFINE_OP(op_call_varargs)
226
DEFINE_OP(op_construct)
227
DEFINE_OP(op_construct_verify)
228
DEFINE_OP(op_convert_this)
229
DEFINE_OP(op_init_arguments)
230
DEFINE_OP(op_create_arguments)
232
DEFINE_OP(op_del_by_id)
238
DEFINE_OP(op_enter_with_activation)
240
DEFINE_OP(op_eq_null)
241
DEFINE_OP(op_get_by_id)
242
DEFINE_OP(op_get_by_val)
243
DEFINE_OP(op_get_global_var)
244
DEFINE_OP(op_get_scoped_var)
245
DEFINE_OP(op_instanceof)
246
DEFINE_OP(op_jeq_null)
249
DEFINE_OP(op_jmp_scopes)
250
DEFINE_OP(op_jneq_null)
251
DEFINE_OP(op_jneq_ptr)
253
DEFINE_OP(op_jnlesseq)
256
DEFINE_OP(op_load_varargs)
258
DEFINE_OP(op_loop_if_less)
259
DEFINE_OP(op_loop_if_lesseq)
260
DEFINE_OP(op_loop_if_true)
262
DEFINE_OP(op_method_check)
266
#if USE(JSVALUE32_64)
270
DEFINE_OP(op_neq_null)
271
DEFINE_OP(op_new_array)
272
DEFINE_OP(op_new_error)
273
DEFINE_OP(op_new_func)
274
DEFINE_OP(op_new_func_exp)
275
DEFINE_OP(op_new_object)
276
DEFINE_OP(op_new_regexp)
277
DEFINE_OP(op_next_pname)
279
DEFINE_OP(op_nstricteq)
280
DEFINE_OP(op_pop_scope)
281
DEFINE_OP(op_post_dec)
282
DEFINE_OP(op_post_inc)
283
DEFINE_OP(op_pre_dec)
284
DEFINE_OP(op_pre_inc)
285
DEFINE_OP(op_profile_did_call)
286
DEFINE_OP(op_profile_will_call)
287
DEFINE_OP(op_push_new_scope)
288
DEFINE_OP(op_push_scope)
289
DEFINE_OP(op_put_by_id)
290
DEFINE_OP(op_put_by_index)
291
DEFINE_OP(op_put_by_val)
292
DEFINE_OP(op_put_getter)
293
DEFINE_OP(op_put_global_var)
294
DEFINE_OP(op_put_scoped_var)
295
DEFINE_OP(op_put_setter)
296
DEFINE_OP(op_resolve)
297
DEFINE_OP(op_resolve_base)
298
DEFINE_OP(op_resolve_global)
299
DEFINE_OP(op_resolve_skip)
300
DEFINE_OP(op_resolve_with_base)
305
DEFINE_OP(op_stricteq)
307
DEFINE_OP(op_switch_char)
308
DEFINE_OP(op_switch_imm)
309
DEFINE_OP(op_switch_string)
310
DEFINE_OP(op_tear_off_activation)
311
DEFINE_OP(op_tear_off_arguments)
313
DEFINE_OP(op_to_jsnumber)
314
DEFINE_OP(op_to_primitive)
1214
316
case op_get_array_length:
1215
317
case op_get_by_id_chain:
1216
318
case op_get_by_id_generic:
1261
368
Instruction* currentInstruction = instructionsBegin + m_bytecodeIndex;
1263
switch (OpcodeID opcodeID = m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
1264
case op_convert_this: {
1267
emitPutJITStubArg(X86::eax, 1);
1268
emitCTICall(Interpreter::cti_op_convert_this);
1269
emitPutVirtualRegister(currentInstruction[1].u.operand);
1270
NEXT_OPCODE(op_convert_this);
1273
compileFastArithSlow_op_add(currentInstruction, iter);
1274
NEXT_OPCODE(op_add);
1276
case op_construct_verify: {
1279
emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
1280
emitPutVirtualRegister(currentInstruction[1].u.operand);
1282
NEXT_OPCODE(op_construct_verify);
1284
case op_get_by_val: {
1285
// The slow case that handles accesses to arrays (below) may jump back up to here.
1286
Label beginGetByValSlow(this);
1288
Jump notImm = getSlowCase(iter);
1291
emitFastArithIntToImmNoCheck(X86::edx, X86::edx);
1293
emitPutJITStubArg(X86::eax, 1);
1294
emitPutJITStubArg(X86::edx, 2);
1295
emitCTICall(Interpreter::cti_op_get_by_val);
1296
emitPutVirtualRegister(currentInstruction[1].u.operand);
1297
emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
1299
// This is slow case that handles accesses to arrays above the fast cut-off.
1300
// First, check if this is an access to the vector
1302
jae32(X86::edx, Address(X86::ecx, FIELD_OFFSET(ArrayStorage, m_vectorLength)), beginGetByValSlow);
1304
// okay, missed the fast region, but it is still in the vector. Get the value.
1305
loadPtr(BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0])), X86::ecx);
1306
// Check whether the value loaded is zero; if so we need to return undefined.
1307
jzPtr(X86::ecx, beginGetByValSlow);
1308
move(X86::ecx, X86::eax);
1309
emitPutVirtualRegister(currentInstruction[1].u.operand, X86::eax);
1311
NEXT_OPCODE(op_get_by_val);
1314
compileBinaryArithOpSlowCase(op_sub, iter, currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, OperandTypes::fromInt(currentInstruction[4].u.operand));
1315
NEXT_OPCODE(op_sub);
1318
compileFastArithSlow_op_rshift(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, iter);
1319
NEXT_OPCODE(op_rshift);
1322
compileFastArithSlow_op_lshift(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, iter);
1323
NEXT_OPCODE(op_lshift);
1325
case op_loop_if_less: {
1326
unsigned op2 = currentInstruction[2].u.operand;
1327
unsigned target = currentInstruction[3].u.operand;
1328
if (isOperandConstantImmediateInt(op2)) {
1330
emitPutJITStubArg(X86::eax, 1);
1331
emitPutJITStubArgFromVirtualRegister(op2, 2, X86::ecx);
1332
emitCTICall(Interpreter::cti_op_loop_if_less);
1333
emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1337
emitPutJITStubArg(X86::eax, 1);
1338
emitPutJITStubArg(X86::edx, 2);
1339
emitCTICall(Interpreter::cti_op_loop_if_less);
1340
emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1342
NEXT_OPCODE(op_loop_if_less);
1344
case op_put_by_id: {
1345
compilePutByIdSlowCase(currentInstruction[1].u.operand, &(m_codeBlock->identifier(currentInstruction[2].u.operand)), currentInstruction[3].u.operand, iter, propertyAccessInstructionIndex++);
1346
NEXT_OPCODE(op_put_by_id);
1348
case op_get_by_id: {
1349
compileGetByIdSlowCase(currentInstruction[1].u.operand, currentInstruction[2].u.operand, &(m_codeBlock->identifier(currentInstruction[3].u.operand)), iter, propertyAccessInstructionIndex++);
1350
NEXT_OPCODE(op_get_by_id);
1352
case op_loop_if_lesseq: {
1353
unsigned op2 = currentInstruction[2].u.operand;
1354
unsigned target = currentInstruction[3].u.operand;
1355
if (isOperandConstantImmediateInt(op2)) {
1357
emitPutJITStubArg(X86::eax, 1);
1358
emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 2, X86::ecx);
1359
emitCTICall(Interpreter::cti_op_loop_if_lesseq);
1360
emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1364
emitPutJITStubArg(X86::eax, 1);
1365
emitPutJITStubArg(X86::edx, 2);
1366
emitCTICall(Interpreter::cti_op_loop_if_lesseq);
1367
emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1369
NEXT_OPCODE(op_loop_if_lesseq);
1372
compileFastArithSlow_op_pre_inc(currentInstruction[1].u.operand, iter);
1373
NEXT_OPCODE(op_pre_inc);
1375
case op_put_by_val: {
1376
// Normal slow cases - either is not an immediate imm, or is an array.
1377
Jump notImm = getSlowCase(iter);
1380
emitFastArithIntToImmNoCheck(X86::edx, X86::edx);
1382
emitGetVirtualRegister(currentInstruction[3].u.operand, X86::ecx);
1383
emitPutJITStubArg(X86::eax, 1);
1384
emitPutJITStubArg(X86::edx, 2);
1385
emitPutJITStubArg(X86::ecx, 3);
1386
emitCTICall(Interpreter::cti_op_put_by_val);
1387
emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_put_by_val));
1389
// slow cases for immediate int accesses to arrays
1392
emitGetVirtualRegister(currentInstruction[3].u.operand, X86::ecx);
1393
emitPutJITStubArg(X86::eax, 1);
1394
emitPutJITStubArg(X86::edx, 2);
1395
emitPutJITStubArg(X86::ecx, 3);
1396
emitCTICall(Interpreter::cti_op_put_by_val_array);
1398
NEXT_OPCODE(op_put_by_val);
1400
case op_loop_if_true: {
1402
emitPutJITStubArg(X86::eax, 1);
1403
emitCTICall(Interpreter::cti_op_jtrue);
1404
unsigned target = currentInstruction[2].u.operand;
1405
emitJumpSlowToHot(jnz32(X86::eax), target + 2);
1406
NEXT_OPCODE(op_loop_if_true);
1409
compileFastArithSlow_op_pre_dec(currentInstruction[1].u.operand, iter);
1410
NEXT_OPCODE(op_pre_dec);
1413
unsigned op2 = currentInstruction[2].u.operand;
1414
unsigned target = currentInstruction[3].u.operand;
1415
if (isOperandConstantImmediateInt(op2)) {
1417
emitPutJITStubArg(X86::eax, 1);
1418
emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 2, X86::ecx);
1419
emitCTICall(Interpreter::cti_op_jless);
1420
emitJumpSlowToHot(jz32(X86::eax), target + 3);
1424
emitPutJITStubArg(X86::eax, 1);
1425
emitPutJITStubArg(X86::edx, 2);
1426
emitCTICall(Interpreter::cti_op_jless);
1427
emitJumpSlowToHot(jz32(X86::eax), target + 3);
1429
NEXT_OPCODE(op_jnless);
1433
xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), X86::eax);
1434
emitPutJITStubArg(X86::eax, 1);
1435
emitCTICall(Interpreter::cti_op_not);
1436
emitPutVirtualRegister(currentInstruction[1].u.operand);
1437
NEXT_OPCODE(op_not);
1441
emitPutJITStubArg(X86::eax, 1);
1442
emitCTICall(Interpreter::cti_op_jtrue);
1443
unsigned target = currentInstruction[2].u.operand;
1444
emitJumpSlowToHot(jz32(X86::eax), target + 2); // inverted!
1445
NEXT_OPCODE(op_jfalse);
1448
compileFastArithSlow_op_post_inc(currentInstruction[1].u.operand, currentInstruction[2].u.operand, iter);
1449
NEXT_OPCODE(op_post_inc);
1453
emitPutJITStubArg(X86::eax, 1);
1454
emitCTICall(Interpreter::cti_op_bitnot);
1455
emitPutVirtualRegister(currentInstruction[1].u.operand);
1456
NEXT_OPCODE(op_bitnot);
1459
compileFastArithSlow_op_bitand(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, iter);
1460
NEXT_OPCODE(op_bitand);
1464
emitPutJITStubArg(X86::eax, 1);
1465
emitCTICall(Interpreter::cti_op_jtrue);
1466
unsigned target = currentInstruction[2].u.operand;
1467
emitJumpSlowToHot(jnz32(X86::eax), target + 2);
1468
NEXT_OPCODE(op_jtrue);
1471
compileFastArithSlow_op_post_dec(currentInstruction[1].u.operand, currentInstruction[2].u.operand, iter);
1472
NEXT_OPCODE(op_post_dec);
1476
emitPutJITStubArg(X86::eax, 1);
1477
emitPutJITStubArg(X86::edx, 2);
1478
emitCTICall(Interpreter::cti_op_bitxor);
1479
emitPutVirtualRegister(currentInstruction[1].u.operand);
1480
NEXT_OPCODE(op_bitxor);
1484
emitPutJITStubArg(X86::eax, 1);
1485
emitPutJITStubArg(X86::edx, 2);
1486
emitCTICall(Interpreter::cti_op_bitor);
1487
emitPutVirtualRegister(currentInstruction[1].u.operand);
1488
NEXT_OPCODE(op_bitor);
1492
emitPutJITStubArg(X86::eax, 1);
1493
emitPutJITStubArg(X86::edx, 2);
1494
emitCTICall(Interpreter::cti_op_eq);
1495
emitPutVirtualRegister(currentInstruction[1].u.operand);
1500
emitPutJITStubArg(X86::eax, 1);
1501
emitPutJITStubArg(X86::edx, 2);
1502
emitCTICall(Interpreter::cti_op_neq);
1503
emitPutVirtualRegister(currentInstruction[1].u.operand);
1504
NEXT_OPCODE(op_neq);
1510
emitPutJITStubArg(X86::eax, 1);
1511
emitPutJITStubArg(X86::edx, 2);
1512
emitCTICall(Interpreter::cti_op_stricteq);
1513
emitPutVirtualRegister(currentInstruction[1].u.operand);
1514
NEXT_OPCODE(op_stricteq);
1516
case op_nstricteq: {
1520
emitPutJITStubArg(X86::eax, 1);
1521
emitPutJITStubArg(X86::edx, 2);
1522
emitCTICall(Interpreter::cti_op_nstricteq);
1523
emitPutVirtualRegister(currentInstruction[1].u.operand);
1524
NEXT_OPCODE(op_nstricteq);
1526
case op_instanceof: {
1530
emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
1531
emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx);
1532
emitPutJITStubArgFromVirtualRegister(currentInstruction[4].u.operand, 3, X86::ecx);
1533
emitCTICall(Interpreter::cti_op_instanceof);
1534
emitPutVirtualRegister(currentInstruction[1].u.operand);
1535
NEXT_OPCODE(op_instanceof);
1538
compileFastArithSlow_op_mod(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, iter);
1539
NEXT_OPCODE(op_mod);
1542
compileFastArithSlow_op_mul(currentInstruction, iter);
1543
NEXT_OPCODE(op_mul);
1547
compileOpCallSlowCase(currentInstruction, iter, callLinkInfoIndex++, opcodeID);
1548
NEXT_OPCODE(op_call);
1550
case op_call_eval: {
1551
compileOpCallSlowCase(currentInstruction, iter, callLinkInfoIndex++, opcodeID);
1552
NEXT_OPCODE(op_call_eval);
1554
case op_construct: {
1555
compileOpCallSlowCase(currentInstruction, iter, callLinkInfoIndex++, opcodeID);
1556
NEXT_OPCODE(op_construct);
1558
case op_to_jsnumber: {
1559
linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
1562
emitPutJITStubArg(X86::eax, 1);
1563
emitCTICall(Interpreter::cti_op_to_jsnumber);
1565
emitPutVirtualRegister(currentInstruction[1].u.operand);
1566
NEXT_OPCODE(op_to_jsnumber);
370
switch (m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
371
DEFINE_SLOWCASE_OP(op_add)
372
DEFINE_SLOWCASE_OP(op_bitand)
373
DEFINE_SLOWCASE_OP(op_bitnot)
374
DEFINE_SLOWCASE_OP(op_bitor)
375
DEFINE_SLOWCASE_OP(op_bitxor)
376
DEFINE_SLOWCASE_OP(op_call)
377
DEFINE_SLOWCASE_OP(op_call_eval)
378
DEFINE_SLOWCASE_OP(op_call_varargs)
379
DEFINE_SLOWCASE_OP(op_construct)
380
DEFINE_SLOWCASE_OP(op_construct_verify)
381
DEFINE_SLOWCASE_OP(op_convert_this)
383
DEFINE_SLOWCASE_OP(op_div)
385
DEFINE_SLOWCASE_OP(op_eq)
386
DEFINE_SLOWCASE_OP(op_get_by_id)
387
DEFINE_SLOWCASE_OP(op_get_by_val)
388
DEFINE_SLOWCASE_OP(op_instanceof)
389
DEFINE_SLOWCASE_OP(op_jfalse)
390
DEFINE_SLOWCASE_OP(op_jnless)
391
DEFINE_SLOWCASE_OP(op_jnlesseq)
392
DEFINE_SLOWCASE_OP(op_jtrue)
393
DEFINE_SLOWCASE_OP(op_loop_if_less)
394
DEFINE_SLOWCASE_OP(op_loop_if_lesseq)
395
DEFINE_SLOWCASE_OP(op_loop_if_true)
396
DEFINE_SLOWCASE_OP(op_lshift)
397
DEFINE_SLOWCASE_OP(op_method_check)
398
DEFINE_SLOWCASE_OP(op_mod)
399
DEFINE_SLOWCASE_OP(op_mul)
400
#if USE(JSVALUE32_64)
401
DEFINE_SLOWCASE_OP(op_negate)
403
DEFINE_SLOWCASE_OP(op_neq)
404
DEFINE_SLOWCASE_OP(op_not)
405
DEFINE_SLOWCASE_OP(op_nstricteq)
406
DEFINE_SLOWCASE_OP(op_post_dec)
407
DEFINE_SLOWCASE_OP(op_post_inc)
408
DEFINE_SLOWCASE_OP(op_pre_dec)
409
DEFINE_SLOWCASE_OP(op_pre_inc)
410
DEFINE_SLOWCASE_OP(op_put_by_id)
411
DEFINE_SLOWCASE_OP(op_put_by_val)
412
#if USE(JSVALUE32_64)
413
DEFINE_SLOWCASE_OP(op_resolve_global)
415
DEFINE_SLOWCASE_OP(op_rshift)
416
DEFINE_SLOWCASE_OP(op_stricteq)
417
DEFINE_SLOWCASE_OP(op_sub)
418
DEFINE_SLOWCASE_OP(op_to_jsnumber)
419
DEFINE_SLOWCASE_OP(op_to_primitive)
1570
421
ASSERT_NOT_REACHED();
1645
489
ASSERT(record.type == SwitchRecord::Immediate || record.type == SwitchRecord::Character);
1646
490
ASSERT(record.jumpTable.simpleJumpTable->branchOffsets.size() == record.jumpTable.simpleJumpTable->ctiOffsets.size());
1648
record.jumpTable.simpleJumpTable->ctiDefault = patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + record.defaultOffset]);
492
record.jumpTable.simpleJumpTable->ctiDefault = patchBuffer.locationOf(m_labels[bytecodeIndex + 3 + record.defaultOffset]);
1650
494
for (unsigned j = 0; j < record.jumpTable.simpleJumpTable->branchOffsets.size(); ++j) {
1651
495
unsigned offset = record.jumpTable.simpleJumpTable->branchOffsets[j];
1652
record.jumpTable.simpleJumpTable->ctiOffsets[j] = offset ? patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + offset]) : record.jumpTable.simpleJumpTable->ctiDefault;
496
record.jumpTable.simpleJumpTable->ctiOffsets[j] = offset ? patchBuffer.locationOf(m_labels[bytecodeIndex + 3 + offset]) : record.jumpTable.simpleJumpTable->ctiDefault;
1655
499
ASSERT(record.type == SwitchRecord::String);
1657
record.jumpTable.stringJumpTable->ctiDefault = patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + record.defaultOffset]);
501
record.jumpTable.stringJumpTable->ctiDefault = patchBuffer.locationOf(m_labels[bytecodeIndex + 3 + record.defaultOffset]);
1659
503
StringJumpTable::StringOffsetTable::iterator end = record.jumpTable.stringJumpTable->offsetTable.end();
1660
504
for (StringJumpTable::StringOffsetTable::iterator it = record.jumpTable.stringJumpTable->offsetTable.begin(); it != end; ++it) {
1661
505
unsigned offset = it->second.branchOffset;
1662
it->second.ctiOffset = offset ? patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + offset]) : record.jumpTable.stringJumpTable->ctiDefault;
506
it->second.ctiOffset = offset ? patchBuffer.locationOf(m_labels[bytecodeIndex + 3 + offset]) : record.jumpTable.stringJumpTable->ctiDefault;
1667
511
for (size_t i = 0; i < m_codeBlock->numberOfExceptionHandlers(); ++i) {
1668
512
HandlerInfo& handler = m_codeBlock->exceptionHandler(i);
1669
handler.nativeCode = patchBuffer.addressOf(m_labels[handler.target]);
513
handler.nativeCode = patchBuffer.locationOf(m_labels[handler.target]);
1672
m_codeBlock->pcVector().reserveCapacity(m_calls.size());
1673
516
for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
1675
patchBuffer.link(iter->from, iter->to);
1676
m_codeBlock->pcVector().append(PC(reinterpret_cast<void**>(patchBuffer.addressOf(iter->from)) - reinterpret_cast<void**>(code), iter->bytecodeIndex));
518
patchBuffer.link(iter->from, FunctionPtr(iter->to));
521
if (m_codeBlock->hasExceptionInfo()) {
522
m_codeBlock->callReturnIndexVector().reserveCapacity(m_calls.size());
523
for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter)
524
m_codeBlock->callReturnIndexVector().append(CallReturnOffsetToBytecodeIndex(patchBuffer.returnAddressOffset(iter->from), iter->bytecodeIndex));
1679
527
// Link absolute addresses for jsr
1680
528
for (Vector<JSRInfo>::iterator iter = m_jsrSites.begin(); iter != m_jsrSites.end(); ++iter)
1681
patchBuffer.setPtr(iter->storeLocation, patchBuffer.addressOf(iter->target));
529
patchBuffer.patch(iter->storeLocation, patchBuffer.locationOf(iter->target).executableAddress());
531
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1683
532
for (unsigned i = 0; i < m_codeBlock->numberOfStructureStubInfos(); ++i) {
1684
533
StructureStubInfo& info = m_codeBlock->structureStubInfo(i);
1685
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1686
info.callReturnLocation = patchBuffer.addressOf(m_propertyAccessCompilationInfo[i].callReturnLocation);
1687
info.hotPathBegin = patchBuffer.addressOf(m_propertyAccessCompilationInfo[i].hotPathBegin);
1689
info.callReturnLocation = 0;
1690
info.hotPathBegin = 0;
534
info.callReturnLocation = patchBuffer.locationOf(m_propertyAccessCompilationInfo[i].callReturnLocation);
535
info.hotPathBegin = patchBuffer.locationOf(m_propertyAccessCompilationInfo[i].hotPathBegin);
538
#if ENABLE(JIT_OPTIMIZE_CALL)
1693
539
for (unsigned i = 0; i < m_codeBlock->numberOfCallLinkInfos(); ++i) {
1694
540
CallLinkInfo& info = m_codeBlock->callLinkInfo(i);
1695
#if ENABLE(JIT_OPTIMIZE_CALL)
1696
info.callReturnLocation = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].callReturnLocation);
1697
info.hotPathBegin = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].hotPathBegin);
1698
info.hotPathOther = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].hotPathOther);
1699
info.coldPathOther = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].coldPathOther);
1701
info.callReturnLocation = 0;
1702
info.hotPathBegin = 0;
1703
info.hotPathOther = 0;
1704
info.coldPathOther = 0;
1708
m_codeBlock->setJITCode(codeRef);
1711
void JIT::privateCompileCTIMachineTrampolines()
1713
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1714
// (1) The first function provides fast property access for array length
1715
Label arrayLengthBegin = align();
1717
// Check eax is an array
1718
Jump array_failureCases1 = emitJumpIfNotJSCell(X86::eax);
1719
Jump array_failureCases2 = jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr));
1721
// Checks out okay! - get the length from the storage
1722
loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::eax);
1723
load32(Address(X86::eax, FIELD_OFFSET(ArrayStorage, m_length)), X86::eax);
1725
Jump array_failureCases3 = ja32(X86::eax, Imm32(JSImmediate::maxImmediateInt));
1727
// X86::eax contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
1728
emitFastArithIntToImmNoCheck(X86::eax, X86::eax);
1732
// (2) The second function provides fast property access for string length
1733
Label stringLengthBegin = align();
1735
// Check eax is a string
1736
Jump string_failureCases1 = emitJumpIfNotJSCell(X86::eax);
1737
Jump string_failureCases2 = jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsStringVptr));
1739
// Checks out okay! - get the length from the Ustring.
1740
loadPtr(Address(X86::eax, FIELD_OFFSET(JSString, m_value) + FIELD_OFFSET(UString, m_rep)), X86::eax);
1741
load32(Address(X86::eax, FIELD_OFFSET(UString::Rep, len)), X86::eax);
1743
Jump string_failureCases3 = ja32(X86::eax, Imm32(JSImmediate::maxImmediateInt));
1745
// X86::eax contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
1746
emitFastArithIntToImmNoCheck(X86::eax, X86::eax);
1751
// (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
1753
Label virtualCallPreLinkBegin = align();
1755
// Load the callee CodeBlock* into eax
1756
loadPtr(Address(X86::ecx, FIELD_OFFSET(JSFunction, m_body)), X86::eax);
1757
loadPtr(Address(X86::eax, FIELD_OFFSET(FunctionBodyNode, m_code)), X86::eax);
1758
Jump hasCodeBlock1 = jnzPtr(X86::eax);
1760
restoreArgumentReference();
1761
Jump callJSFunction1 = call();
1762
emitGetJITStubArg(1, X86::ecx);
1763
emitGetJITStubArg(3, X86::edx);
1765
hasCodeBlock1.link(this);
1767
// Check argCount matches callee arity.
1768
Jump arityCheckOkay1 = je32(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_numParameters)), X86::edx);
1770
emitPutJITStubArg(X86::ebx, 2);
1771
emitPutJITStubArg(X86::eax, 4);
1772
restoreArgumentReference();
1773
Jump callArityCheck1 = call();
1774
move(X86::edx, callFrameRegister);
1775
emitGetJITStubArg(1, X86::ecx);
1776
emitGetJITStubArg(3, X86::edx);
1778
arityCheckOkay1.link(this);
1780
compileOpCallInitializeCallFrame();
1783
emitPutJITStubArg(X86::ebx, 2);
1784
restoreArgumentReference();
1785
Jump callDontLazyLinkCall = call();
1790
Label virtualCallLinkBegin = align();
1792
// Load the callee CodeBlock* into eax
1793
loadPtr(Address(X86::ecx, FIELD_OFFSET(JSFunction, m_body)), X86::eax);
1794
loadPtr(Address(X86::eax, FIELD_OFFSET(FunctionBodyNode, m_code)), X86::eax);
1795
Jump hasCodeBlock2 = jnzPtr(X86::eax);
1797
restoreArgumentReference();
1798
Jump callJSFunction2 = call();
1799
emitGetJITStubArg(1, X86::ecx);
1800
emitGetJITStubArg(3, X86::edx);
1802
hasCodeBlock2.link(this);
1804
// Check argCount matches callee arity.
1805
Jump arityCheckOkay2 = je32(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_numParameters)), X86::edx);
1807
emitPutJITStubArg(X86::ebx, 2);
1808
emitPutJITStubArg(X86::eax, 4);
1809
restoreArgumentReference();
1810
Jump callArityCheck2 = call();
1811
move(X86::edx, callFrameRegister);
1812
emitGetJITStubArg(1, X86::ecx);
1813
emitGetJITStubArg(3, X86::edx);
1815
arityCheckOkay2.link(this);
1817
compileOpCallInitializeCallFrame();
1820
emitPutJITStubArg(X86::ebx, 2);
1821
restoreArgumentReference();
1822
Jump callLazyLinkCall = call();
1827
Label virtualCallBegin = align();
1829
// Load the callee CodeBlock* into eax
1830
loadPtr(Address(X86::ecx, FIELD_OFFSET(JSFunction, m_body)), X86::eax);
1831
loadPtr(Address(X86::eax, FIELD_OFFSET(FunctionBodyNode, m_code)), X86::eax);
1832
Jump hasCodeBlock3 = jnzPtr(X86::eax);
1834
restoreArgumentReference();
1835
Jump callJSFunction3 = call();
1836
emitGetJITStubArg(1, X86::ecx);
1837
emitGetJITStubArg(3, X86::edx);
1839
hasCodeBlock3.link(this);
1841
// Check argCount matches callee arity.
1842
Jump arityCheckOkay3 = je32(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_numParameters)), X86::edx);
1844
emitPutJITStubArg(X86::ebx, 2);
1845
emitPutJITStubArg(X86::eax, 4);
1846
restoreArgumentReference();
1847
Jump callArityCheck3 = call();
1848
move(X86::edx, callFrameRegister);
1849
emitGetJITStubArg(1, X86::ecx);
1850
emitGetJITStubArg(3, X86::edx);
1852
arityCheckOkay3.link(this);
1854
compileOpCallInitializeCallFrame();
1856
// load ctiCode from the new codeBlock.
1857
loadPtr(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_jitCode)), X86::eax);
1861
// All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
1862
m_interpreter->m_executablePool = m_globalData->poolForSize(m_assembler.size());
1863
void* code = m_assembler.executableCopy(m_interpreter->m_executablePool.get());
1864
PatchBuffer patchBuffer(code);
1866
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1867
patchBuffer.link(array_failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
1868
patchBuffer.link(array_failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
1869
patchBuffer.link(array_failureCases3, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
1870
patchBuffer.link(string_failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_string_fail));
1871
patchBuffer.link(string_failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_string_fail));
1872
patchBuffer.link(string_failureCases3, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_string_fail));
1874
m_interpreter->m_ctiArrayLengthTrampoline = patchBuffer.addressOf(arrayLengthBegin);
1875
m_interpreter->m_ctiStringLengthTrampoline = patchBuffer.addressOf(stringLengthBegin);
1877
patchBuffer.link(callArityCheck1, reinterpret_cast<void*>(Interpreter::cti_op_call_arityCheck));
1878
patchBuffer.link(callArityCheck2, reinterpret_cast<void*>(Interpreter::cti_op_call_arityCheck));
1879
patchBuffer.link(callArityCheck3, reinterpret_cast<void*>(Interpreter::cti_op_call_arityCheck));
1880
patchBuffer.link(callJSFunction1, reinterpret_cast<void*>(Interpreter::cti_op_call_JSFunction));
1881
patchBuffer.link(callJSFunction2, reinterpret_cast<void*>(Interpreter::cti_op_call_JSFunction));
1882
patchBuffer.link(callJSFunction3, reinterpret_cast<void*>(Interpreter::cti_op_call_JSFunction));
1883
patchBuffer.link(callDontLazyLinkCall, reinterpret_cast<void*>(Interpreter::cti_vm_dontLazyLinkCall));
1884
patchBuffer.link(callLazyLinkCall, reinterpret_cast<void*>(Interpreter::cti_vm_lazyLinkCall));
1886
m_interpreter->m_ctiVirtualCallPreLink = patchBuffer.addressOf(virtualCallPreLinkBegin);
1887
m_interpreter->m_ctiVirtualCallLink = patchBuffer.addressOf(virtualCallLinkBegin);
1888
m_interpreter->m_ctiVirtualCall = patchBuffer.addressOf(virtualCallBegin);
541
info.ownerCodeBlock = m_codeBlock;
542
info.callReturnLocation = patchBuffer.locationOfNearCall(m_callStructureStubCompilationInfo[i].callReturnLocation);
543
info.hotPathBegin = patchBuffer.locationOf(m_callStructureStubCompilationInfo[i].hotPathBegin);
544
info.hotPathOther = patchBuffer.locationOfNearCall(m_callStructureStubCompilationInfo[i].hotPathOther);
547
unsigned methodCallCount = m_methodCallCompilationInfo.size();
548
m_codeBlock->addMethodCallLinkInfos(methodCallCount);
549
for (unsigned i = 0; i < methodCallCount; ++i) {
550
MethodCallLinkInfo& info = m_codeBlock->methodCallLinkInfo(i);
551
info.structureLabel = patchBuffer.locationOf(m_methodCallCompilationInfo[i].structureToCompare);
552
info.callReturnLocation = m_codeBlock->structureStubInfo(m_methodCallCompilationInfo[i].propertyAccessIndex).callReturnLocation;
555
return patchBuffer.finalizeCode();
558
#if !USE(JSVALUE32_64)
1891
559
void JIT::emitGetVariableObjectRegister(RegisterID variableObject, int index, RegisterID dst)
1893
loadPtr(Address(variableObject, FIELD_OFFSET(JSVariableObject, d)), dst);
1894
loadPtr(Address(dst, FIELD_OFFSET(JSVariableObject::JSVariableObjectData, registers)), dst);
561
loadPtr(Address(variableObject, OBJECT_OFFSETOF(JSVariableObject, d)), dst);
562
loadPtr(Address(dst, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), dst);
1895
563
loadPtr(Address(dst, index * sizeof(Register)), dst);
1898
566
void JIT::emitPutVariableObjectRegister(RegisterID src, RegisterID variableObject, int index)
1900
loadPtr(Address(variableObject, FIELD_OFFSET(JSVariableObject, d)), variableObject);
1901
loadPtr(Address(variableObject, FIELD_OFFSET(JSVariableObject::JSVariableObjectData, registers)), variableObject);
568
loadPtr(Address(variableObject, OBJECT_OFFSETOF(JSVariableObject, d)), variableObject);
569
loadPtr(Address(variableObject, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), variableObject);
1902
570
storePtr(src, Address(variableObject, index * sizeof(Register)));
574
#if ENABLE(JIT_OPTIMIZE_CALL)
575
void JIT::unlinkCall(CallLinkInfo* callLinkInfo)
577
// When the JSFunction is deleted the pointer embedded in the instruction stream will no longer be valid
578
// (and, if a new JSFunction happened to be constructed at the same location, we could get a false positive
579
// match). Reset the check so it no longer matches.
580
RepatchBuffer repatchBuffer(callLinkInfo->ownerCodeBlock.get());
581
#if USE(JSVALUE32_64)
582
repatchBuffer.repatch(callLinkInfo->hotPathBegin, 0);
584
repatchBuffer.repatch(callLinkInfo->hotPathBegin, JSValue::encode(JSValue()));
588
void JIT::linkCall(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JITCode& code, CallLinkInfo* callLinkInfo, int callerArgCount, JSGlobalData* globalData)
590
RepatchBuffer repatchBuffer(callerCodeBlock);
592
// Currently we only link calls with the exact number of arguments.
593
// If this is a native call calleeCodeBlock is null so the number of parameters is unimportant
594
if (!calleeCodeBlock || (callerArgCount == calleeCodeBlock->m_numParameters)) {
595
ASSERT(!callLinkInfo->isLinked());
598
calleeCodeBlock->addCaller(callLinkInfo);
600
repatchBuffer.repatch(callLinkInfo->hotPathBegin, callee);
601
repatchBuffer.relink(callLinkInfo->hotPathOther, code.addressForCall());
604
// patch the call so we do not continue to try to link.
605
repatchBuffer.relink(callLinkInfo->callReturnLocation, globalData->jitStubs.ctiVirtualCall());
607
#endif // ENABLE(JIT_OPTIMIZE_CALL)
1905
609
} // namespace JSC