142
214
__ CallStub(&stub);
143
215
// Duplicate the value; move-to-slot operation might clobber registers.
145
Move(arguments->slot(), r0, r1, r2);
146
Slot* dot_arguments_slot =
147
scope()->arguments_shadow()->AsVariable()->slot();
217
Move(arguments->AsSlot(), r0, r1, r2);
218
Slot* dot_arguments_slot = scope()->arguments_shadow()->AsSlot();
148
219
Move(dot_arguments_slot, r3, r1, r2);
151
{ Comment cmnt(masm_, "[ Declarations");
152
// For named function expressions, declare the function name as a
154
if (scope()->is_function_scope() && scope()->function() != NULL) {
155
EmitDeclaration(scope()->function(), Variable::CONST, NULL);
157
// Visit all the explicit declarations unless there is an illegal
159
if (scope()->HasIllegalRedeclaration()) {
160
scope()->VisitIllegalRedeclaration(this);
162
VisitDeclarations(scope()->declarations());
166
// Check the stack for overflow or break request.
167
// Put the lr setup instruction in the delay slot. The kInstrSize is
168
// added to the implicit 8 byte offset that always applies to operations
169
// with pc and gives a return address 12 bytes down.
170
{ Comment cmnt(masm_, "[ Stack check");
171
__ LoadRoot(r2, Heap::kStackLimitRootIndex);
172
__ add(lr, pc, Operand(Assembler::kInstrSize));
173
__ cmp(sp, Operand(r2));
176
Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
177
RelocInfo::CODE_TARGET),
182
222
if (FLAG_trace) {
183
223
__ CallRuntime(Runtime::kTraceEnter, 0);
186
{ Comment cmnt(masm_, "[ Body");
187
ASSERT(loop_depth() == 0);
188
VisitStatements(function()->body());
189
ASSERT(loop_depth() == 0);
226
// Visit the declarations and body unless there is an illegal
228
if (scope()->HasIllegalRedeclaration()) {
229
Comment cmnt(masm_, "[ Declarations");
230
scope()->VisitIllegalRedeclaration(this);
233
{ Comment cmnt(masm_, "[ Declarations");
234
// For named function expressions, declare the function name as a
236
if (scope()->is_function_scope() && scope()->function() != NULL) {
237
EmitDeclaration(scope()->function(), Variable::CONST, NULL);
239
VisitDeclarations(scope()->declarations());
242
{ Comment cmnt(masm_, "[ Stack check");
243
PrepareForBailout(info->function(), NO_REGISTERS);
245
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
246
__ cmp(sp, Operand(ip));
253
{ Comment cmnt(masm_, "[ Body");
254
ASSERT(loop_depth() == 0);
255
VisitStatements(function()->body());
256
ASSERT(loop_depth() == 0);
260
// Always emit a 'return undefined' in case control fell off the end of
192
262
{ Comment cmnt(masm_, "[ return <undefined>;");
193
// Emit a 'return undefined' in case control fell off the end of the
195
263
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
197
265
EmitReturnSequence();
267
// Force emit the constant pool, so it doesn't get emitted in the middle
268
// of the stack check table.
269
masm()->CheckConstPool(true, false);
273
void FullCodeGenerator::ClearAccumulator() {
274
__ mov(r0, Operand(Smi::FromInt(0)));
278
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
279
Comment cmnt(masm_, "[ Stack check");
281
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
282
__ cmp(sp, Operand(ip));
286
// Record a mapping of this PC offset to the OSR id. This is used to find
287
// the AST id from the unoptimized code in order to use it as a key into
288
// the deoptimization input data found in the optimized code.
289
RecordStackCheck(stmt->OsrEntryId());
292
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
293
// Record a mapping of the OSR id to this PC. This is used if the OSR
294
// entry becomes the target of a bailout. We don't expect it to be, but
295
// we want it to work if it is.
296
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
234
// Check that the size of the code used for returning matches what is
235
// expected by the debugger. If the sp_delts above cannot be encoded in the
236
// add instruction the add will generate two instructions.
237
int return_sequence_length =
238
masm_->InstructionsGeneratedSince(&check_exit_codesize);
239
CHECK(return_sequence_length ==
240
Assembler::kJSReturnSequenceInstructions ||
241
return_sequence_length ==
242
Assembler::kJSReturnSequenceInstructions + 1);
333
// Check that the size of the code used for returning is large enough
334
// for the debugger's requirements.
335
ASSERT(Assembler::kJSReturnSequenceInstructions <=
336
masm_->InstructionsGeneratedSince(&check_exit_codesize));
248
void FullCodeGenerator::Apply(Expression::Context context, Register reg) {
250
case Expression::kUninitialized:
253
case Expression::kEffect:
257
case Expression::kValue:
258
// Move value into place.
261
if (!reg.is(result_register())) __ mov(result_register(), reg);
269
case Expression::kValueTest:
270
case Expression::kTestValue:
271
// Push an extra copy of the value in case it's needed.
275
case Expression::kTest:
276
// We always call the runtime on ARM, so push the value as argument.
284
void FullCodeGenerator::Apply(Expression::Context context, Slot* slot) {
286
case Expression::kUninitialized:
288
case Expression::kEffect:
291
case Expression::kValue:
292
case Expression::kTest:
293
case Expression::kValueTest:
294
case Expression::kTestValue:
295
// On ARM we have to move the value into a register to do anything
297
Move(result_register(), slot);
298
Apply(context, result_register());
304
void FullCodeGenerator::Apply(Expression::Context context, Literal* lit) {
306
case Expression::kUninitialized:
308
case Expression::kEffect:
311
case Expression::kValue:
312
case Expression::kTest:
313
case Expression::kValueTest:
314
case Expression::kTestValue:
315
// On ARM we have to move the value into a register to do anything
317
__ mov(result_register(), Operand(lit->handle()));
318
Apply(context, result_register());
324
void FullCodeGenerator::ApplyTOS(Expression::Context context) {
326
case Expression::kUninitialized:
329
case Expression::kEffect:
333
case Expression::kValue:
336
__ pop(result_register());
343
case Expression::kValueTest:
344
case Expression::kTestValue:
345
// Duplicate the value on the stack in case it's needed.
346
__ ldr(ip, MemOperand(sp));
350
case Expression::kTest:
357
void FullCodeGenerator::DropAndApply(int count,
358
Expression::Context context,
363
case Expression::kUninitialized:
366
case Expression::kEffect:
370
case Expression::kValue:
374
if (!reg.is(result_register())) __ mov(result_register(), reg);
377
if (count > 1) __ Drop(count - 1);
378
__ str(reg, MemOperand(sp));
383
case Expression::kTest:
384
if (count > 1) __ Drop(count - 1);
385
__ str(reg, MemOperand(sp));
389
case Expression::kValueTest:
390
case Expression::kTestValue:
392
__ str(reg, MemOperand(sp));
394
} else { // count > 1
396
__ str(reg, MemOperand(sp, kPointerSize));
397
__ str(reg, MemOperand(sp));
404
void FullCodeGenerator::PrepareTest(Label* materialize_true,
405
Label* materialize_false,
409
case Expression::kUninitialized:
412
case Expression::kEffect:
413
// In an effect context, the true and the false case branch to the
415
*if_true = *if_false = materialize_true;
417
case Expression::kValue:
418
*if_true = materialize_true;
419
*if_false = materialize_false;
421
case Expression::kTest:
422
*if_true = true_label_;
423
*if_false = false_label_;
425
case Expression::kValueTest:
426
*if_true = materialize_true;
427
*if_false = false_label_;
429
case Expression::kTestValue:
430
*if_true = true_label_;
431
*if_false = materialize_false;
437
void FullCodeGenerator::Apply(Expression::Context context,
438
Label* materialize_true,
439
Label* materialize_false) {
441
case Expression::kUninitialized:
443
case Expression::kEffect:
444
ASSERT_EQ(materialize_true, materialize_false);
445
__ bind(materialize_true);
448
case Expression::kValue: {
452
__ bind(materialize_true);
453
__ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
455
__ bind(materialize_false);
456
__ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
459
__ bind(materialize_true);
460
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
463
__ bind(materialize_false);
464
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
472
case Expression::kTest:
475
case Expression::kValueTest:
476
__ bind(materialize_true);
479
__ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
482
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
489
case Expression::kTestValue:
490
__ bind(materialize_false);
493
__ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
496
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
500
__ jmp(false_label_);
506
// Convert constant control flow (true or false) to the result expected for
507
// a given expression context.
508
void FullCodeGenerator::Apply(Expression::Context context, bool flag) {
510
case Expression::kUninitialized:
513
case Expression::kEffect:
515
case Expression::kValue: {
516
Heap::RootListIndex value_root_index =
517
flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
520
__ LoadRoot(result_register(), value_root_index);
523
__ LoadRoot(ip, value_root_index);
529
case Expression::kTest:
530
__ b(flag ? true_label_ : false_label_);
532
case Expression::kTestValue:
535
// If value is false it's needed.
536
if (!flag) __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
539
// If value is false it's needed.
541
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
546
__ b(flag ? true_label_ : false_label_);
548
case Expression::kValueTest:
551
// If value is true it's needed.
552
if (flag) __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
555
// If value is true it's needed.
557
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
562
__ b(flag ? true_label_ : false_label_);
568
void FullCodeGenerator::DoTest(Expression::Context context) {
569
// The value to test is pushed on the stack, and duplicated on the stack
570
// if necessary (for value/test and test/value contexts).
571
ASSERT_NE(NULL, true_label_);
572
ASSERT_NE(NULL, false_label_);
574
// Call the runtime to find the boolean value of the source and then
575
// translate it into control flow to the pair of labels.
576
__ CallRuntime(Runtime::kToBool, 1);
342
void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
346
void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
347
codegen()->Move(result_register(), slot);
351
void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
352
codegen()->Move(result_register(), slot);
353
__ push(result_register());
357
void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
358
// For simplicity we always test the accumulator register.
359
codegen()->Move(result_register(), slot);
360
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
361
codegen()->DoTest(true_label_, false_label_, fall_through_);
365
void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
369
void FullCodeGenerator::AccumulatorValueContext::Plug(
370
Heap::RootListIndex index) const {
371
__ LoadRoot(result_register(), index);
375
void FullCodeGenerator::StackValueContext::Plug(
376
Heap::RootListIndex index) const {
377
__ LoadRoot(result_register(), index);
378
__ push(result_register());
382
void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
383
codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
387
if (index == Heap::kUndefinedValueRootIndex ||
388
index == Heap::kNullValueRootIndex ||
389
index == Heap::kFalseValueRootIndex) {
390
if (false_label_ != fall_through_) __ b(false_label_);
391
} else if (index == Heap::kTrueValueRootIndex) {
392
if (true_label_ != fall_through_) __ b(true_label_);
394
__ LoadRoot(result_register(), index);
395
codegen()->DoTest(true_label_, false_label_, fall_through_);
400
void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
404
void FullCodeGenerator::AccumulatorValueContext::Plug(
405
Handle<Object> lit) const {
406
__ mov(result_register(), Operand(lit));
410
void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
411
// Immediates cannot be pushed directly.
412
__ mov(result_register(), Operand(lit));
413
__ push(result_register());
417
void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
418
codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
422
ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
423
if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
424
if (false_label_ != fall_through_) __ b(false_label_);
425
} else if (lit->IsTrue() || lit->IsJSObject()) {
426
if (true_label_ != fall_through_) __ b(true_label_);
427
} else if (lit->IsString()) {
428
if (String::cast(*lit)->length() == 0) {
429
if (false_label_ != fall_through_) __ b(false_label_);
432
if (true_label_ != fall_through_) __ b(true_label_);
434
} else if (lit->IsSmi()) {
435
if (Smi::cast(*lit)->value() == 0) {
436
if (false_label_ != fall_through_) __ b(false_label_);
438
if (true_label_ != fall_through_) __ b(true_label_);
441
// For simplicity we always test the accumulator register.
442
__ mov(result_register(), Operand(lit));
443
codegen()->DoTest(true_label_, false_label_, fall_through_);
448
void FullCodeGenerator::EffectContext::DropAndPlug(int count,
449
Register reg) const {
455
void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
457
Register reg) const {
460
__ Move(result_register(), reg);
464
void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
465
Register reg) const {
467
if (count > 1) __ Drop(count - 1);
468
__ str(reg, MemOperand(sp, 0));
472
void FullCodeGenerator::TestContext::DropAndPlug(int count,
473
Register reg) const {
475
// For simplicity we always test the accumulator register.
477
__ Move(result_register(), reg);
478
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
479
codegen()->DoTest(true_label_, false_label_, fall_through_);
483
void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
484
Label* materialize_false) const {
485
ASSERT(materialize_true == materialize_false);
486
__ bind(materialize_true);
490
void FullCodeGenerator::AccumulatorValueContext::Plug(
491
Label* materialize_true,
492
Label* materialize_false) const {
494
__ bind(materialize_true);
495
__ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
497
__ bind(materialize_false);
498
__ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
503
void FullCodeGenerator::StackValueContext::Plug(
504
Label* materialize_true,
505
Label* materialize_false) const {
507
__ bind(materialize_true);
577
508
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
580
// Complete based on the context.
582
case Expression::kUninitialized:
583
case Expression::kEffect:
584
case Expression::kValue:
587
case Expression::kTest:
588
__ b(eq, true_label_);
589
__ jmp(false_label_);
592
case Expression::kValueTest: {
597
__ pop(result_register());
601
__ b(eq, true_label_);
606
__ jmp(false_label_);
610
case Expression::kTestValue: {
615
__ pop(result_register());
616
__ jmp(false_label_);
619
__ b(ne, false_label_);
511
__ bind(materialize_false);
512
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
518
void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
519
Label* materialize_false) const {
520
ASSERT(materialize_true == true_label_);
521
ASSERT(materialize_false == false_label_);
525
void FullCodeGenerator::EffectContext::Plug(bool flag) const {
529
void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
530
Heap::RootListIndex value_root_index =
531
flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
532
__ LoadRoot(result_register(), value_root_index);
536
void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
537
Heap::RootListIndex value_root_index =
538
flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
539
__ LoadRoot(ip, value_root_index);
544
void FullCodeGenerator::TestContext::Plug(bool flag) const {
545
codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
550
if (true_label_ != fall_through_) __ b(true_label_);
552
if (false_label_ != fall_through_) __ b(false_label_);
557
void FullCodeGenerator::DoTest(Label* if_true,
559
Label* fall_through) {
560
if (CpuFeatures::IsSupported(VFP3)) {
561
CpuFeatures::Scope scope(VFP3);
562
// Emit the inlined tests assumed by the stub.
563
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
564
__ cmp(result_register(), ip);
566
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
567
__ cmp(result_register(), ip);
569
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
570
__ cmp(result_register(), ip);
572
STATIC_ASSERT(kSmiTag == 0);
573
__ tst(result_register(), result_register());
575
__ JumpIfSmi(result_register(), if_true);
577
// Call the ToBoolean stub for all other cases.
578
ToBooleanStub stub(result_register());
580
__ tst(result_register(), result_register());
582
// Call the runtime to find the boolean value of the source and then
583
// translate it into control flow to the pair of labels.
584
__ push(result_register());
585
__ CallRuntime(Runtime::kToBool, 1);
586
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
590
// The stub returns nonzero for true.
591
Split(ne, if_true, if_false, fall_through);
595
void FullCodeGenerator::Split(Condition cond,
598
Label* fall_through) {
599
if (if_false == fall_through) {
601
} else if (if_true == fall_through) {
602
__ b(NegateCondition(cond), if_false);
1004
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info) {
1073
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1005
1075
// Use the fast case closure allocation code that allocates in new
1006
// space for nested functions that don't need literals cloning.
1007
if (scope()->is_function_scope() && info->num_literals() == 0) {
1076
// space for nested functions that don't need literals cloning. If
1077
// we're running with the --always-opt or the --prepare-always-opt
1078
// flag, we need to use the runtime function so that the new function
1079
// we are creating here gets a chance to have its code optimized and
1080
// doesn't just get a copy of the existing unoptimized code.
1081
if (!FLAG_always_opt &&
1082
!FLAG_prepare_always_opt &&
1083
scope()->is_function_scope() &&
1084
info->num_literals() == 0 &&
1008
1086
FastNewClosureStub stub;
1009
1087
__ mov(r0, Operand(info));
1011
1089
__ CallStub(&stub);
1013
1091
__ mov(r0, Operand(info));
1015
__ CallRuntime(Runtime::kNewClosure, 2);
1092
__ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1093
: Heap::kFalseValueRootIndex);
1094
__ Push(cp, r0, r1);
1095
__ CallRuntime(Runtime::kNewClosure, 3);
1017
Apply(context_, r0);
1097
context()->Plug(r0);
1021
1101
void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1022
1102
Comment cmnt(masm_, "[ VariableProxy");
1023
EmitVariableLoad(expr->var(), context_);
1027
void FullCodeGenerator::EmitVariableLoad(Variable* var,
1028
Expression::Context context) {
1103
EmitVariableLoad(expr->var());
1107
MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
1110
ASSERT(slot->type() == Slot::CONTEXT);
1111
Register context = cp;
1115
for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
1116
if (s->num_heap_slots() > 0) {
1117
if (s->calls_eval()) {
1118
// Check that extension is NULL.
1119
__ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1123
__ ldr(next, ContextOperand(context, Context::CLOSURE_INDEX));
1124
__ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
1125
// Walk the rest of the chain without clobbering cp.
1129
// Check that last extension is NULL.
1130
__ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1134
// This function is used only for loads, not stores, so it's safe to
1135
// return an cp-based operand (the write barrier cannot be allowed to
1136
// destroy the cp register).
1137
return ContextOperand(context, slot->index());
1141
void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
1143
TypeofState typeof_state,
1146
// Generate fast-case code for variables that might be shadowed by
1147
// eval-introduced variables. Eval is used a lot without
1148
// introducing variables. In those cases, we do not want to
1149
// perform a runtime call for all variables in the scope
1150
// containing the eval.
1151
if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
1152
EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
1154
} else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
1155
Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
1156
Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
1157
if (potential_slot != NULL) {
1158
// Generate fast case for locals that rewrite to slots.
1159
__ ldr(r0, ContextSlotOperandCheckExtensions(potential_slot, slow));
1160
if (potential_slot->var()->mode() == Variable::CONST) {
1161
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1163
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1166
} else if (rewrite != NULL) {
1167
// Generate fast case for calls of an argument function.
1168
Property* property = rewrite->AsProperty();
1169
if (property != NULL) {
1170
VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1171
Literal* key_literal = property->key()->AsLiteral();
1172
if (obj_proxy != NULL &&
1173
key_literal != NULL &&
1174
obj_proxy->IsArguments() &&
1175
key_literal->handle()->IsSmi()) {
1176
// Load arguments object if there are no eval-introduced
1177
// variables. Then load the argument from the arguments
1178
// object using keyed load.
1180
ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
1182
__ mov(r0, Operand(key_literal->handle()));
1183
Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1184
EmitCallIC(ic, RelocInfo::CODE_TARGET);
1193
void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
1195
TypeofState typeof_state,
1197
Register current = cp;
1203
if (s->num_heap_slots() > 0) {
1204
if (s->calls_eval()) {
1205
// Check that extension is NULL.
1206
__ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1210
// Load next context in chain.
1211
__ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX));
1212
__ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
1213
// Walk the rest of the chain without clobbering cp.
1216
// If no outer scope calls eval, we do not need to check more
1217
// context extensions.
1218
if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
1219
s = s->outer_scope();
1222
if (s->is_eval_scope()) {
1224
if (!current.is(next)) {
1225
__ Move(next, current);
1228
// Terminate at global context.
1229
__ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1230
__ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
1233
// Check that extension is NULL.
1234
__ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1237
// Load next context in chain.
1238
__ ldr(next, ContextOperand(next, Context::CLOSURE_INDEX));
1239
__ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
1244
__ ldr(r0, GlobalObjectOperand());
1245
__ mov(r2, Operand(slot->var()->name()));
1246
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1247
? RelocInfo::CODE_TARGET
1248
: RelocInfo::CODE_TARGET_CONTEXT;
1249
Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1250
EmitCallIC(ic, mode);
1254
void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1029
1255
// Four cases: non-this global variables, lookup slots, all other
1030
1256
// types of slots, and parameters that rewrite to explicit property
1031
1257
// accesses on the arguments object.
1032
Slot* slot = var->slot();
1258
Slot* slot = var->AsSlot();
1033
1259
Property* property = var->AsProperty();
1035
1261
if (var->is_global() && !var->is_this()) {
1036
1262
Comment cmnt(masm_, "Global variable");
1037
1263
// Use inline caching. Variable name is passed in r2 and the global
1038
1264
// object (receiver) in r0.
1039
__ ldr(r0, CodeGenerator::GlobalObject());
1265
__ ldr(r0, GlobalObjectOperand());
1040
1266
__ mov(r2, Operand(var->name()));
1041
1267
Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1042
__ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1268
EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1269
context()->Plug(r0);
1045
1271
} else if (slot != NULL && slot->type() == Slot::LOOKUP) {
1274
// Generate code for loading from variables potentially shadowed
1275
// by eval-introduced variables.
1276
EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
1046
1279
Comment cmnt(masm_, "Lookup slot");
1047
1280
__ mov(r1, Operand(var->name()));
1048
1281
__ Push(cp, r1); // Context and name.
1049
1282
__ CallRuntime(Runtime::kLoadContextSlot, 2);
1285
context()->Plug(r0);
1052
1287
} else if (slot != NULL) {
1053
1288
Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1054
1289
? "Context slot"
1055
1290
: "Stack slot");
1056
1291
if (var->mode() == Variable::CONST) {
1057
// Constants may be the hole value if they have not been initialized.
1060
MemOperand slot_operand = EmitSlotSearch(slot, r0);
1061
__ ldr(r0, slot_operand);
1062
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1065
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1069
Apply(context, slot);
1292
// Constants may be the hole value if they have not been initialized.
1294
MemOperand slot_operand = EmitSlotSearch(slot, r0);
1295
__ ldr(r0, slot_operand);
1296
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1298
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1299
context()->Plug(r0);
1301
context()->Plug(slot);
1072
1304
Comment cmnt(masm_, "Rewritten parameter");
1073
1305
ASSERT_NOT_NULL(property);
1297
1567
case NAMED_PROPERTY:
1298
1568
if (expr->is_compound()) {
1299
1569
// We need the receiver both on the stack and in the accumulator.
1300
VisitForValue(prop->obj(), kAccumulator);
1570
VisitForAccumulatorValue(property->obj());
1301
1571
__ push(result_register());
1303
VisitForValue(prop->obj(), kStack);
1573
VisitForStackValue(property->obj());
1306
1576
case KEYED_PROPERTY:
1307
// We need the key and receiver on both the stack and in r0 and r1.
1308
1577
if (expr->is_compound()) {
1309
VisitForValue(prop->obj(), kStack);
1310
VisitForValue(prop->key(), kAccumulator);
1578
if (property->is_arguments_access()) {
1579
VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1580
__ ldr(r0, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
1582
__ mov(r0, Operand(property->key()->AsLiteral()->handle()));
1584
VisitForStackValue(property->obj());
1585
VisitForAccumulatorValue(property->key());
1311
1587
__ ldr(r1, MemOperand(sp, 0));
1314
VisitForValue(prop->obj(), kStack);
1315
VisitForValue(prop->key(), kStack);
1590
if (property->is_arguments_access()) {
1591
VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1592
__ ldr(r1, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
1593
__ mov(r0, Operand(property->key()->AsLiteral()->handle()));
1596
VisitForStackValue(property->obj());
1597
VisitForStackValue(property->key());
1320
// If we have a compound assignment: Get value of LHS expression and
1321
// store in on top of the stack.
1322
if (expr->is_compound()) {
1323
Location saved_location = location_;
1325
switch (assign_type) {
1327
EmitVariableLoad(expr->target()->AsVariableProxy()->var(),
1328
Expression::kValue);
1330
case NAMED_PROPERTY:
1331
EmitNamedPropertyLoad(prop);
1332
__ push(result_register());
1334
case KEYED_PROPERTY:
1335
EmitKeyedPropertyLoad(prop);
1336
__ push(result_register());
1339
location_ = saved_location;
1342
// Evaluate RHS expression.
1343
Expression* rhs = expr->value();
1344
VisitForValue(rhs, kAccumulator);
1346
// If we have a compound assignment: Apply operator.
1347
if (expr->is_compound()) {
1348
Location saved_location = location_;
1349
location_ = kAccumulator;
1350
EmitBinaryOp(expr->binary_op(), Expression::kValue);
1351
location_ = saved_location;
1603
if (expr->is_compound()) {
1604
{ AccumulatorValueContext context(this);
1605
switch (assign_type) {
1607
EmitVariableLoad(expr->target()->AsVariableProxy()->var());
1609
case NAMED_PROPERTY:
1610
EmitNamedPropertyLoad(property);
1612
case KEYED_PROPERTY:
1613
EmitKeyedPropertyLoad(property);
1618
// For property compound assignments we need another deoptimization
1619
// point after the property load.
1620
if (property != NULL) {
1621
PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1624
Token::Value op = expr->binary_op();
1625
__ push(r0); // Left operand goes on the stack.
1626
VisitForAccumulatorValue(expr->value());
1628
OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1631
SetSourcePosition(expr->position() + 1);
1632
AccumulatorValueContext context(this);
1633
if (ShouldInlineSmiCase(op)) {
1634
EmitInlineSmiBinaryOp(expr,
1640
EmitBinaryOp(op, mode);
1643
// Deoptimization point in case the binary operation may have side effects.
1644
PrepareForBailout(expr->binary_operation(), TOS_REG);
1646
VisitForAccumulatorValue(expr->value());
1354
1649
// Record source position before possible IC call.
1385
1681
SetSourcePosition(prop->position());
1386
1682
// Call keyed load IC. It has arguments key and receiver in r0 and r1.
1387
1683
Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1388
__ Call(ic, RelocInfo::CODE_TARGET);
1684
EmitCallIC(ic, RelocInfo::CODE_TARGET);
1688
void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
1691
Expression* left_expr,
1692
Expression* right_expr) {
1693
Label done, smi_case, stub_call;
1695
Register scratch1 = r2;
1696
Register scratch2 = r3;
1698
// Get the arguments.
1700
Register right = r0;
1703
// Perform combined smi check on both operands.
1704
__ orr(scratch1, left, Operand(right));
1705
STATIC_ASSERT(kSmiTag == 0);
1706
JumpPatchSite patch_site(masm_);
1707
patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1709
__ bind(&stub_call);
1710
TypeRecordingBinaryOpStub stub(op, mode);
1711
EmitCallIC(stub.GetCode(), &patch_site);
1715
// Smi case. This code works the same way as the smi-smi case in the type
1716
// recording binary operation stub, see
1717
// TypeRecordingBinaryOpStub::GenerateSmiSmiOperation for comments.
1721
__ GetLeastBitsFromSmi(scratch1, right, 5);
1722
__ mov(right, Operand(left, ASR, scratch1));
1723
__ bic(right, right, Operand(kSmiTagMask));
1727
__ SmiUntag(scratch1, left);
1728
__ GetLeastBitsFromSmi(scratch2, right, 5);
1729
__ mov(scratch1, Operand(scratch1, LSL, scratch2));
1730
__ add(scratch2, scratch1, Operand(0x40000000), SetCC);
1731
__ b(mi, &stub_call);
1732
__ SmiTag(right, scratch1);
1737
__ SmiUntag(scratch1, left);
1738
__ GetLeastBitsFromSmi(scratch2, right, 5);
1739
__ mov(scratch1, Operand(scratch1, LSR, scratch2));
1740
__ tst(scratch1, Operand(0xc0000000));
1741
__ b(ne, &stub_call);
1742
__ SmiTag(right, scratch1);
1746
__ add(scratch1, left, Operand(right), SetCC);
1747
__ b(vs, &stub_call);
1748
__ mov(right, scratch1);
1751
__ sub(scratch1, left, Operand(right), SetCC);
1752
__ b(vs, &stub_call);
1753
__ mov(right, scratch1);
1756
__ SmiUntag(ip, right);
1757
__ smull(scratch1, scratch2, left, ip);
1758
__ mov(ip, Operand(scratch1, ASR, 31));
1759
__ cmp(ip, Operand(scratch2));
1760
__ b(ne, &stub_call);
1761
__ tst(scratch1, Operand(scratch1));
1762
__ mov(right, Operand(scratch1), LeaveCC, ne);
1764
__ add(scratch2, right, Operand(left), SetCC);
1765
__ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
1766
__ b(mi, &stub_call);
1770
__ orr(right, left, Operand(right));
1772
case Token::BIT_AND:
1773
__ and_(right, left, Operand(right));
1775
case Token::BIT_XOR:
1776
__ eor(right, left, Operand(right));
1783
context()->Plug(r0);
1392
1787
void FullCodeGenerator::EmitBinaryOp(Token::Value op,
1393
Expression::Context context) {
1788
OverwriteMode mode) {
1395
GenericBinaryOpStub stub(op, NO_OVERWRITE, r1, r0);
1790
TypeRecordingBinaryOpStub stub(op, mode);
1791
EmitCallIC(stub.GetCode(), NULL);
1792
context()->Plug(r0);
1401
void FullCodeGenerator::EmitAssignment(Expression* expr) {
1796
void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1402
1797
// Invalid left-hand sides are rewritten to have a 'throw
1403
1798
// ReferenceError' on the left-hand side.
1404
1799
if (!expr->IsValidLeftHandSide()) {
1462
1873
// assignment. Right-hand-side value is passed in r0, variable name in
1463
1874
// r2, and the global object in r1.
1464
1875
__ mov(r2, Operand(var->name()));
1465
__ ldr(r1, CodeGenerator::GlobalObject());
1466
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1467
__ Call(ic, RelocInfo::CODE_TARGET);
1469
} else if (var->mode() != Variable::CONST || op == Token::INIT_CONST) {
1470
// Perform the assignment for non-const variables and for initialization
1471
// of const variables. Const assignments are simply skipped.
1473
Slot* slot = var->slot();
1474
switch (slot->type()) {
1475
case Slot::PARAMETER:
1477
if (op == Token::INIT_CONST) {
1478
// Detect const reinitialization by checking for the hole value.
1479
__ ldr(r1, MemOperand(fp, SlotOffset(slot)));
1480
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1876
__ ldr(r1, GlobalObjectOperand());
1877
Handle<Code> ic(Builtins::builtin(
1878
is_strict() ? Builtins::StoreIC_Initialize_Strict
1879
: Builtins::StoreIC_Initialize));
1880
EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1882
} else if (op == Token::INIT_CONST) {
1883
// Like var declarations, const declarations are hoisted to function
1884
// scope. However, unlike var initializers, const initializers are able
1885
// to drill a hole to that function context, even from inside a 'with'
1886
// context. We thus bypass the normal static scope lookup.
1887
Slot* slot = var->AsSlot();
1889
switch (slot->type()) {
1890
case Slot::PARAMETER:
1891
// No const parameters.
1895
// Detect const reinitialization by checking for the hole value.
1896
__ ldr(r1, MemOperand(fp, SlotOffset(slot)));
1897
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1900
__ str(result_register(), MemOperand(fp, SlotOffset(slot)));
1902
case Slot::CONTEXT: {
1903
__ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX));
1904
__ ldr(r2, ContextOperand(r1, slot->index()));
1905
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1908
__ str(r0, ContextOperand(r1, slot->index()));
1909
int offset = Context::SlotOffset(slot->index());
1910
__ mov(r3, r0); // Preserve the stored value in r0.
1911
__ RecordWrite(r1, Operand(offset), r3, r2);
1916
__ mov(r0, Operand(slot->var()->name()));
1917
__ Push(cp, r0); // Context and name.
1918
__ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1923
} else if (var->mode() != Variable::CONST) {
1924
// Perform the assignment for non-const variables. Const assignments
1925
// are simply skipped.
1926
Slot* slot = var->AsSlot();
1927
switch (slot->type()) {
1928
case Slot::PARAMETER:
1484
1930
// Perform the assignment.
1485
1931
__ str(result_register(), MemOperand(fp, SlotOffset(slot)));
1488
1934
case Slot::CONTEXT: {
1489
1935
MemOperand target = EmitSlotSearch(slot, r1);
1490
if (op == Token::INIT_CONST) {
1491
// Detect const reinitialization by checking for the hole value.
1493
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1497
1936
// Perform the assignment and issue the write barrier.
1498
1937
__ str(result_register(), target);
1499
1938
// RecordWrite may destroy all its register arguments.
1705
2185
// resolve the function we need to call and the receiver of the
1706
2186
// call. Then we call the resolved function using the given
1708
VisitForValue(fun, kStack);
1709
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1710
__ push(r2); // Reserved receiver slot.
1712
// Push the arguments.
1713
2188
ZoneList<Expression*>* args = expr->arguments();
1714
2189
int arg_count = args->length();
1715
for (int i = 0; i < arg_count; i++) {
1716
VisitForValue(args->at(i), kStack);
1719
// Push copy of the function - found below the arguments.
1720
__ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
1723
// Push copy of the first argument or undefined if it doesn't exist.
1724
if (arg_count > 0) {
1725
__ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2191
{ PreservePositionScope pos_scope(masm()->positions_recorder());
2192
VisitForStackValue(fun);
2193
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2194
__ push(r2); // Reserved receiver slot.
2196
// Push the arguments.
2197
for (int i = 0; i < arg_count; i++) {
2198
VisitForStackValue(args->at(i));
2201
// If we know that eval can only be shadowed by eval-introduced
2202
// variables we attempt to load the global eval function directly
2203
// in generated code. If we succeed, there is no need to perform a
2204
// context lookup in the runtime system.
2206
if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
2208
EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
2211
// Push the function and resolve eval.
2213
EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
2218
// Push copy of the function (found below the arguments) and
2220
__ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2222
EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
2223
if (done.is_linked()) {
2227
// The runtime call returns a pair of values in r0 (function) and
2228
// r1 (receiver). Touch up the stack with the right values.
2229
__ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2230
__ str(r1, MemOperand(sp, arg_count * kPointerSize));
1731
// Push the receiver of the enclosing function and do runtime call.
1732
__ ldr(r1, MemOperand(fp, (2 + scope()->num_parameters()) * kPointerSize));
1734
__ CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
1736
// The runtime call returns a pair of values in r0 (function) and
1737
// r1 (receiver). Touch up the stack with the right values.
1738
__ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
1739
__ str(r1, MemOperand(sp, arg_count * kPointerSize));
1741
2233
// Record source position for debugger.
1742
2234
SetSourcePosition(expr->position());
1743
2235
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
1744
2236
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
1745
2237
__ CallStub(&stub);
2238
RecordJSReturnSite(expr);
1746
2239
// Restore context register.
1747
2240
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1748
DropAndApply(1, context_, r0);
2241
context()->DropAndPlug(1, r0);
1749
2242
} else if (var != NULL && !var->is_this() && var->is_global()) {
1750
2243
// Push global object as receiver for the call IC.
1751
__ ldr(r0, CodeGenerator::GlobalObject());
2244
__ ldr(r0, GlobalObjectOperand());
1753
2246
EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
1754
} else if (var != NULL && var->slot() != NULL &&
1755
var->slot()->type() == Slot::LOOKUP) {
1756
// Call to a lookup slot (dynamically introduced variable). Call the
1757
// runtime to find the function to call (returned in eax) and the object
1758
// holding it (returned in edx).
2247
} else if (var != NULL && var->AsSlot() != NULL &&
2248
var->AsSlot()->type() == Slot::LOOKUP) {
2249
// Call to a lookup slot (dynamically introduced variable).
2252
{ PreservePositionScope scope(masm()->positions_recorder());
2253
// Generate code for loading from variables potentially shadowed
2254
// by eval-introduced variables.
2255
EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
2262
// Call the runtime to find the function to call (returned in r0)
2263
// and the object holding it (returned in edx).
1759
2264
__ push(context_register());
1760
2265
__ mov(r2, Operand(var->name()));
1762
2267
__ CallRuntime(Runtime::kLoadContextSlot, 2);
1763
__ push(r0); // Function.
1764
__ push(r1); // Receiver.
2268
__ Push(r0, r1); // Function, receiver.
2270
// If fast case code has been generated, emit code to push the
2271
// function and receiver and have the slow path jump around this
2273
if (done.is_linked()) {
2279
// Push global receiver.
2280
__ ldr(r1, GlobalObjectOperand());
2281
__ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
1765
2286
EmitCallWithStub(expr);
1766
2287
} else if (fun->AsProperty() != NULL) {
1767
2288
// Call to an object property.
1821
2361
// According to ECMA-262, section 11.2.2, page 44, the function
1822
2362
// expression in new calls must be evaluated before the
1824
// Push function on the stack.
1825
VisitForValue(expr->expression(), kStack);
1827
// Push global object (receiver).
1828
__ ldr(r0, CodeGenerator::GlobalObject());
2365
// Push constructor on the stack. If it's not a function it's used as
2366
// receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2368
VisitForStackValue(expr->expression());
1830
2370
// Push the arguments ("left-to-right") on the stack.
1831
2371
ZoneList<Expression*>* args = expr->arguments();
1832
2372
int arg_count = args->length();
1833
2373
for (int i = 0; i < arg_count; i++) {
1834
VisitForValue(args->at(i), kStack);
2374
VisitForStackValue(args->at(i));
1837
2377
// Call the construct call builtin that handles allocation and
1838
2378
// constructor invocation.
1839
2379
SetSourcePosition(expr->position());
1841
// Load function, arg_count into r1 and r0.
2381
// Load function and argument count into r1 and r0.
1842
2382
__ mov(r0, Operand(arg_count));
1843
// Function is in sp[arg_count + 1].
1844
__ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2383
__ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
1846
2385
Handle<Code> construct_builtin(Builtins::builtin(Builtins::JSConstructCall));
1847
2386
__ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
1849
// Replace function on TOS with result in r0, or pop it.
1850
DropAndApply(1, context_, r0);
2387
context()->Plug(r0);
1854
2391
void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) {
1855
2392
ASSERT(args->length() == 1);
1857
VisitForValue(args->at(0), kAccumulator);
2394
VisitForAccumulatorValue(args->at(0));
1859
2396
Label materialize_true, materialize_false;
1860
2397
Label* if_true = NULL;
1861
2398
Label* if_false = NULL;
1862
PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
1864
__ BranchOnSmi(r0, if_true);
1867
Apply(context_, if_true, if_false);
2399
Label* fall_through = NULL;
2400
context()->PrepareTest(&materialize_true, &materialize_false,
2401
&if_true, &if_false, &fall_through);
2403
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2404
__ tst(r0, Operand(kSmiTagMask));
2405
Split(eq, if_true, if_false, fall_through);
2407
context()->Plug(if_true, if_false);
1871
2411
void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) {
1872
2412
ASSERT(args->length() == 1);
1874
VisitForValue(args->at(0), kAccumulator);
2414
VisitForAccumulatorValue(args->at(0));
1876
2416
Label materialize_true, materialize_false;
1877
2417
Label* if_true = NULL;
1878
2418
Label* if_false = NULL;
1879
PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
2419
Label* fall_through = NULL;
2420
context()->PrepareTest(&materialize_true, &materialize_false,
2421
&if_true, &if_false, &fall_through);
2423
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
1881
2424
__ tst(r0, Operand(kSmiTagMask | 0x80000000));
2425
Split(eq, if_true, if_false, fall_through);
1885
Apply(context_, if_true, if_false);
2427
context()->Plug(if_true, if_false);
1889
2431
void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
1890
2432
ASSERT(args->length() == 1);
1892
VisitForValue(args->at(0), kAccumulator);
2434
VisitForAccumulatorValue(args->at(0));
1894
2436
Label materialize_true, materialize_false;
1895
2437
Label* if_true = NULL;
1896
2438
Label* if_false = NULL;
1897
PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
1898
__ BranchOnSmi(r0, if_false);
2439
Label* fall_through = NULL;
2440
context()->PrepareTest(&materialize_true, &materialize_false,
2441
&if_true, &if_false, &fall_through);
2443
__ JumpIfSmi(r0, if_false);
1899
2444
__ LoadRoot(ip, Heap::kNullValueRootIndex);
1900
2445
__ cmp(r0, ip);
1901
2446
__ b(eq, if_true);
1908
2453
__ cmp(r1, Operand(FIRST_JS_OBJECT_TYPE));
1909
2454
__ b(lt, if_false);
1910
2455
__ cmp(r1, Operand(LAST_JS_OBJECT_TYPE));
2456
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2457
Split(le, if_true, if_false, fall_through);
1914
Apply(context_, if_true, if_false);
2459
context()->Plug(if_true, if_false);
1918
2463
void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
1919
2464
ASSERT(args->length() == 1);
1921
VisitForValue(args->at(0), kAccumulator);
2466
VisitForAccumulatorValue(args->at(0));
1923
2468
Label materialize_true, materialize_false;
1924
2469
Label* if_true = NULL;
1925
2470
Label* if_false = NULL;
1926
PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
2471
Label* fall_through = NULL;
2472
context()->PrepareTest(&materialize_true, &materialize_false,
2473
&if_true, &if_false, &fall_through);
1928
__ BranchOnSmi(r0, if_false);
2475
__ JumpIfSmi(r0, if_false);
1929
2476
__ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
2477
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2478
Split(ge, if_true, if_false, fall_through);
1933
Apply(context_, if_true, if_false);
2480
context()->Plug(if_true, if_false);
1937
2484
void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
1938
2485
ASSERT(args->length() == 1);
1940
VisitForValue(args->at(0), kAccumulator);
2487
VisitForAccumulatorValue(args->at(0));
1942
2489
Label materialize_true, materialize_false;
1943
2490
Label* if_true = NULL;
1944
2491
Label* if_false = NULL;
1945
PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
2492
Label* fall_through = NULL;
2493
context()->PrepareTest(&materialize_true, &materialize_false,
2494
&if_true, &if_false, &fall_through);
1947
__ BranchOnSmi(r0, if_false);
2496
__ JumpIfSmi(r0, if_false);
1948
2497
__ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
1949
2498
__ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
1950
2499
__ tst(r1, Operand(1 << Map::kIsUndetectable));
2500
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2501
Split(ne, if_true, if_false, fall_through);
1954
Apply(context_, if_true, if_false);
2503
context()->Plug(if_true, if_false);
1961
2510
ASSERT(args->length() == 1);
1963
VisitForValue(args->at(0), kAccumulator);
2512
VisitForAccumulatorValue(args->at(0));
1965
2514
Label materialize_true, materialize_false;
1966
2515
Label* if_true = NULL;
1967
2516
Label* if_false = NULL;
1968
PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
2517
Label* fall_through = NULL;
2518
context()->PrepareTest(&materialize_true, &materialize_false,
2519
&if_true, &if_false, &fall_through);
1970
2521
// Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only
1971
2522
// used in a few functions in runtime.js which should not normally be hit by
1972
2523
// this compiler.
2524
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
1973
2525
__ jmp(if_false);
1974
Apply(context_, if_true, if_false);
2526
context()->Plug(if_true, if_false);
1978
2530
void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
1979
2531
ASSERT(args->length() == 1);
1981
VisitForValue(args->at(0), kAccumulator);
2533
VisitForAccumulatorValue(args->at(0));
1983
2535
Label materialize_true, materialize_false;
1984
2536
Label* if_true = NULL;
1985
2537
Label* if_false = NULL;
1986
PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
2538
Label* fall_through = NULL;
2539
context()->PrepareTest(&materialize_true, &materialize_false,
2540
&if_true, &if_false, &fall_through);
1988
__ BranchOnSmi(r0, if_false);
2542
__ JumpIfSmi(r0, if_false);
1989
2543
__ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
2544
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2545
Split(eq, if_true, if_false, fall_through);
1993
Apply(context_, if_true, if_false);
2547
context()->Plug(if_true, if_false);
1997
2551
void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
1998
2552
ASSERT(args->length() == 1);
2000
VisitForValue(args->at(0), kAccumulator);
2554
VisitForAccumulatorValue(args->at(0));
2002
2556
Label materialize_true, materialize_false;
2003
2557
Label* if_true = NULL;
2004
2558
Label* if_false = NULL;
2005
PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
2559
Label* fall_through = NULL;
2560
context()->PrepareTest(&materialize_true, &materialize_false,
2561
&if_true, &if_false, &fall_through);
2007
__ BranchOnSmi(r0, if_false);
2563
__ JumpIfSmi(r0, if_false);
2008
2564
__ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
2565
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2566
Split(eq, if_true, if_false, fall_through);
2012
Apply(context_, if_true, if_false);
2568
context()->Plug(if_true, if_false);
2016
2572
void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
2017
2573
ASSERT(args->length() == 1);
2019
VisitForValue(args->at(0), kAccumulator);
2575
VisitForAccumulatorValue(args->at(0));
2021
2577
Label materialize_true, materialize_false;
2022
2578
Label* if_true = NULL;
2023
2579
Label* if_false = NULL;
2024
PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
2580
Label* fall_through = NULL;
2581
context()->PrepareTest(&materialize_true, &materialize_false,
2582
&if_true, &if_false, &fall_through);
2026
__ BranchOnSmi(r0, if_false);
2584
__ JumpIfSmi(r0, if_false);
2027
2585
__ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
2586
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2587
Split(eq, if_true, if_false, fall_through);
2031
Apply(context_, if_true, if_false);
2589
context()->Plug(if_true, if_false);
2630
3203
__ LoadRoot(r0, Heap::kTrueValueRootIndex);
2631
3204
__ bind(&done);
2633
Apply(context_, r0);
3206
context()->Plug(r0);
3210
void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
3211
VisitForAccumulatorValue(args->at(0));
3213
Label materialize_true, materialize_false;
3214
Label* if_true = NULL;
3215
Label* if_false = NULL;
3216
Label* fall_through = NULL;
3217
context()->PrepareTest(&materialize_true, &materialize_false,
3218
&if_true, &if_false, &fall_through);
3220
__ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3221
__ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
3222
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3223
Split(eq, if_true, if_false, fall_through);
3225
context()->Plug(if_true, if_false);
3229
void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
3230
ASSERT(args->length() == 1);
3231
VisitForAccumulatorValue(args->at(0));
3233
if (FLAG_debug_code) {
3234
__ AbortIfNotString(r0);
3237
__ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3238
__ IndexFromHash(r0, r0);
3240
context()->Plug(r0);
3244
void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
3245
Label bailout, done, one_char_separator, long_separator,
3246
non_trivial_array, not_size_one_array, loop,
3247
empty_separator_loop, one_char_separator_loop,
3248
one_char_separator_loop_entry, long_separator_loop;
3250
ASSERT(args->length() == 2);
3251
VisitForStackValue(args->at(1));
3252
VisitForAccumulatorValue(args->at(0));
3254
// All aliases of the same register have disjoint lifetimes.
3255
Register array = r0;
3256
Register elements = no_reg; // Will be r0.
3257
Register result = no_reg; // Will be r0.
3258
Register separator = r1;
3259
Register array_length = r2;
3260
Register result_pos = no_reg; // Will be r2
3261
Register string_length = r3;
3262
Register string = r4;
3263
Register element = r5;
3264
Register elements_end = r6;
3265
Register scratch1 = r7;
3266
Register scratch2 = r9;
3268
// Separator operand is on the stack.
3271
// Check that the array is a JSArray.
3272
__ JumpIfSmi(array, &bailout);
3273
__ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
3276
// Check that the array has fast elements.
3277
__ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitField2Offset));
3278
__ tst(scratch2, Operand(1 << Map::kHasFastElements));
3281
// If the array has length zero, return the empty string.
3282
__ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3283
__ SmiUntag(array_length, SetCC);
3284
__ b(ne, &non_trivial_array);
3285
__ LoadRoot(r0, Heap::kEmptyStringRootIndex);
3288
__ bind(&non_trivial_array);
3290
// Get the FixedArray containing array's elements.
3292
__ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3293
array = no_reg; // End of array's live range.
3295
// Check that all array elements are sequential ASCII strings, and
3296
// accumulate the sum of their lengths, as a smi-encoded value.
3297
__ mov(string_length, Operand(0));
3299
elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3300
__ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3301
// Loop condition: while (element < elements_end).
3302
// Live values in registers:
3303
// elements: Fixed array of strings.
3304
// array_length: Length of the fixed array of strings (not smi)
3305
// separator: Separator string
3306
// string_length: Accumulated sum of string lengths (smi).
3307
// element: Current array element.
3308
// elements_end: Array end.
3309
if (FLAG_debug_code) {
3310
__ cmp(array_length, Operand(0));
3311
__ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
3314
__ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3315
__ JumpIfSmi(string, &bailout);
3316
__ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3317
__ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3318
__ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3319
__ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
3320
__ add(string_length, string_length, Operand(scratch1));
3322
__ cmp(element, elements_end);
3325
// If array_length is 1, return elements[0], a string.
3326
__ cmp(array_length, Operand(1));
3327
__ b(ne, ¬_size_one_array);
3328
__ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3331
__ bind(¬_size_one_array);
3333
// Live values in registers:
3334
// separator: Separator string
3335
// array_length: Length of the array.
3336
// string_length: Sum of string lengths (smi).
3337
// elements: FixedArray of strings.
3339
// Check that the separator is a flat ASCII string.
3340
__ JumpIfSmi(separator, &bailout);
3341
__ ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3342
__ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3343
__ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3345
// Add (separator length times array_length) - separator length to the
3346
// string_length to get the length of the result string. array_length is not
3347
// smi but the other values are, so the result is a smi
3348
__ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3349
__ sub(string_length, string_length, Operand(scratch1));
3350
__ smull(scratch2, ip, array_length, scratch1);
3351
// Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3353
__ cmp(ip, Operand(0));
3355
__ tst(scratch2, Operand(0x80000000));
3357
__ add(string_length, string_length, Operand(scratch2));
3359
__ SmiUntag(string_length);
3361
// Get first element in the array to free up the elements register to be used
3364
elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3365
result = elements; // End of live range for elements.
3367
// Live values in registers:
3368
// element: First array element
3369
// separator: Separator string
3370
// string_length: Length of result string (not smi)
3371
// array_length: Length of the array.
3372
__ AllocateAsciiString(result,
3378
// Prepare for looping. Set up elements_end to end of the array. Set
3379
// result_pos to the position of the result where to write the first
3381
__ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3382
result_pos = array_length; // End of live range for array_length.
3383
array_length = no_reg;
3386
Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3388
// Check the length of the separator.
3389
__ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3390
__ cmp(scratch1, Operand(Smi::FromInt(1)));
3391
__ b(eq, &one_char_separator);
3392
__ b(gt, &long_separator);
3394
// Empty separator case
3395
__ bind(&empty_separator_loop);
3396
// Live values in registers:
3397
// result_pos: the position to which we are currently copying characters.
3398
// element: Current array element.
3399
// elements_end: Array end.
3401
// Copy next array element to the result.
3402
__ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3403
__ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3404
__ SmiUntag(string_length);
3405
__ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3406
__ CopyBytes(string, result_pos, string_length, scratch1);
3407
__ cmp(element, elements_end);
3408
__ b(lt, &empty_separator_loop); // End while (element < elements_end).
3409
ASSERT(result.is(r0));
3412
// One-character separator case
3413
__ bind(&one_char_separator);
3414
// Replace separator with its ascii character value.
3415
__ ldrb(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
3416
// Jump into the loop after the code that copies the separator, so the first
3417
// element is not preceded by a separator
3418
__ jmp(&one_char_separator_loop_entry);
3420
__ bind(&one_char_separator_loop);
3421
// Live values in registers:
3422
// result_pos: the position to which we are currently copying characters.
3423
// element: Current array element.
3424
// elements_end: Array end.
3425
// separator: Single separator ascii char (in lower byte).
3427
// Copy the separator character to the result.
3428
__ strb(separator, MemOperand(result_pos, 1, PostIndex));
3430
// Copy next array element to the result.
3431
__ bind(&one_char_separator_loop_entry);
3432
__ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3433
__ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3434
__ SmiUntag(string_length);
3435
__ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3436
__ CopyBytes(string, result_pos, string_length, scratch1);
3437
__ cmp(element, elements_end);
3438
__ b(lt, &one_char_separator_loop); // End while (element < elements_end).
3439
ASSERT(result.is(r0));
3442
// Long separator case (separator is more than one character). Entry is at the
3443
// label long_separator below.
3444
__ bind(&long_separator_loop);
3445
// Live values in registers:
3446
// result_pos: the position to which we are currently copying characters.
3447
// element: Current array element.
3448
// elements_end: Array end.
3449
// separator: Separator string.
3451
// Copy the separator to the result.
3452
__ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
3453
__ SmiUntag(string_length);
3456
Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3457
__ CopyBytes(string, result_pos, string_length, scratch1);
3459
__ bind(&long_separator);
3460
__ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3461
__ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3462
__ SmiUntag(string_length);
3463
__ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3464
__ CopyBytes(string, result_pos, string_length, scratch1);
3465
__ cmp(element, elements_end);
3466
__ b(lt, &long_separator_loop); // End while (element < elements_end).
3467
ASSERT(result.is(r0));
3471
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3473
context()->Plug(r0);
2722
3570
case Token::VOID: {
2723
3571
Comment cmnt(masm_, "[ UnaryOperation (VOID)");
2724
3572
VisitForEffect(expr->expression());
2726
case Expression::kUninitialized:
2729
case Expression::kEffect:
2731
case Expression::kValue:
2732
__ LoadRoot(result_register(), Heap::kUndefinedValueRootIndex);
2733
switch (location_) {
2737
__ push(result_register());
2741
case Expression::kTestValue:
2742
// Value is false so it's needed.
2743
__ LoadRoot(result_register(), Heap::kUndefinedValueRootIndex);
2744
switch (location_) {
2748
__ push(result_register());
2752
case Expression::kTest:
2753
case Expression::kValueTest:
2754
__ jmp(false_label_);
3573
context()->Plug(Heap::kUndefinedValueRootIndex);
2760
3577
case Token::NOT: {
2761
3578
Comment cmnt(masm_, "[ UnaryOperation (NOT)");
2762
Label materialize_true, materialize_false;
2763
Label* if_true = NULL;
2764
Label* if_false = NULL;
2766
// Notice that the labels are swapped.
2767
PrepareTest(&materialize_true, &materialize_false, &if_false, &if_true);
2769
VisitForControl(expr->expression(), if_true, if_false);
2771
Apply(context_, if_false, if_true); // Labels swapped.
3579
if (context()->IsEffect()) {
3580
// Unary NOT has no side effects so it's only necessary to visit the
3581
// subexpression. Match the optimizing compiler by not branching.
3582
VisitForEffect(expr->expression());
3584
Label materialize_true, materialize_false;
3585
Label* if_true = NULL;
3586
Label* if_false = NULL;
3587
Label* fall_through = NULL;
3589
// Notice that the labels are swapped.
3590
context()->PrepareTest(&materialize_true, &materialize_false,
3591
&if_false, &if_true, &fall_through);
3592
if (context()->IsTest()) ForwardBailoutToChild(expr);
3593
VisitForControl(expr->expression(), if_true, if_false, fall_through);
3594
context()->Plug(if_false, if_true); // Labels swapped.
2775
3599
case Token::TYPEOF: {
2776
3600
Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
2777
VariableProxy* proxy = expr->expression()->AsVariableProxy();
2778
if (proxy != NULL &&
2779
!proxy->var()->is_this() &&
2780
proxy->var()->is_global()) {
2781
Comment cmnt(masm_, "Global variable");
2782
__ ldr(r0, CodeGenerator::GlobalObject());
2783
__ mov(r2, Operand(proxy->name()));
2784
Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
2785
// Use a regular load, not a contextual load, to avoid a reference
2787
__ Call(ic, RelocInfo::CODE_TARGET);
2789
} else if (proxy != NULL &&
2790
proxy->var()->slot() != NULL &&
2791
proxy->var()->slot()->type() == Slot::LOOKUP) {
2792
__ mov(r0, Operand(proxy->name()));
2794
__ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
2797
// This expression cannot throw a reference error at the top level.
2798
VisitForValue(expr->expression(), kStack);
3601
{ StackValueContext context(this);
3602
VisitForTypeofValue(expr->expression());
2801
3604
__ CallRuntime(Runtime::kTypeof, 1);
2802
Apply(context_, r0);
3605
context()->Plug(r0);
2806
3609
case Token::ADD: {
2807
3610
Comment cmt(masm_, "[ UnaryOperation (ADD)");
2808
VisitForValue(expr->expression(), kAccumulator);
3611
VisitForAccumulatorValue(expr->expression());
2809
3612
Label no_conversion;
2810
3613
__ tst(result_register(), Operand(kSmiTagMask));
2811
3614
__ b(eq, &no_conversion);
2813
__ InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS);
3615
ToNumberStub convert_stub;
3616
__ CallStub(&convert_stub);
2814
3617
__ bind(&no_conversion);
2815
Apply(context_, result_register());
3618
context()->Plug(result_register());
2819
3622
case Token::SUB: {
2820
3623
Comment cmt(masm_, "[ UnaryOperation (SUB)");
2821
bool can_overwrite =
2822
(expr->expression()->AsBinaryOperation() != NULL &&
2823
expr->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
3624
bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
2824
3625
UnaryOverwriteMode overwrite =
2825
3626
can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
2826
GenericUnaryOpStub stub(Token::SUB, overwrite);
3627
GenericUnaryOpStub stub(Token::SUB, overwrite, NO_UNARY_FLAGS);
2827
3628
// GenericUnaryOpStub expects the argument to be in the
2828
3629
// accumulator register r0.
2829
VisitForValue(expr->expression(), kAccumulator);
3630
VisitForAccumulatorValue(expr->expression());
2830
3631
__ CallStub(&stub);
2831
Apply(context_, r0);
3632
context()->Plug(r0);
2835
3636
case Token::BIT_NOT: {
2836
3637
Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)");
2837
bool can_overwrite =
2838
(expr->expression()->AsBinaryOperation() != NULL &&
2839
expr->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
2840
UnaryOverwriteMode overwrite =
2841
can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
2842
GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
2843
// GenericUnaryOpStub expects the argument to be in the
2844
// accumulator register r0.
2845
VisitForValue(expr->expression(), kAccumulator);
2846
// Avoid calling the stub for Smis.
2848
__ BranchOnSmi(result_register(), &smi);
2849
// Non-smi: call stub leaving result in accumulator register.
3638
// The generic unary operation stub expects the argument to be
3639
// in the accumulator register r0.
3640
VisitForAccumulatorValue(expr->expression());
3642
bool inline_smi_code = ShouldInlineSmiCase(expr->op());
3643
if (inline_smi_code) {
3645
__ JumpIfNotSmi(r0, &call_stub);
3646
__ mvn(r0, Operand(r0));
3647
// Bit-clear inverted smi-tag.
3648
__ bic(r0, r0, Operand(kSmiTagMask));
3650
__ bind(&call_stub);
3652
bool overwrite = expr->expression()->ResultOverwriteAllowed();
3653
UnaryOpFlags flags = inline_smi_code
3654
? NO_UNARY_SMI_CODE_IN_STUB
3656
UnaryOverwriteMode mode =
3657
overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3658
GenericUnaryOpStub stub(Token::BIT_NOT, mode, flags);
2850
3659
__ CallStub(&stub);
2852
// Perform operation directly on Smis.
2854
__ mvn(result_register(), Operand(result_register()));
2855
// Bit-clear inverted smi-tag.
2856
__ bic(result_register(), result_register(), Operand(kSmiTagMask));
2857
3660
__ bind(&done);
2858
Apply(context_, result_register());
3661
context()->Plug(r0);
2889
3694
// Evaluate expression and get value.
2890
3695
if (assign_type == VARIABLE) {
2891
3696
ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
2892
Location saved_location = location_;
2893
location_ = kAccumulator;
2894
EmitVariableLoad(expr->expression()->AsVariableProxy()->var(),
2895
Expression::kValue);
2896
location_ = saved_location;
3697
AccumulatorValueContext context(this);
3698
EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
2898
3700
// Reserve space for result of postfix operation.
2899
if (expr->is_postfix() && context_ != Expression::kEffect) {
3701
if (expr->is_postfix() && !context()->IsEffect()) {
2900
3702
__ mov(ip, Operand(Smi::FromInt(0)));
2903
3705
if (assign_type == NAMED_PROPERTY) {
2904
3706
// Put the object both on the stack and in the accumulator.
2905
VisitForValue(prop->obj(), kAccumulator);
3707
VisitForAccumulatorValue(prop->obj());
2907
3709
EmitNamedPropertyLoad(prop);
2909
VisitForValue(prop->obj(), kStack);
2910
VisitForValue(prop->key(), kAccumulator);
3711
if (prop->is_arguments_access()) {
3712
VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
3713
__ ldr(r0, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
3715
__ mov(r0, Operand(prop->key()->AsLiteral()->handle()));
3717
VisitForStackValue(prop->obj());
3718
VisitForAccumulatorValue(prop->key());
2911
3720
__ ldr(r1, MemOperand(sp, 0));
2913
3722
EmitKeyedPropertyLoad(prop);
3726
// We need a second deoptimization point after loading the value
3727
// in case evaluating the property load my have a side effect.
3728
PrepareForBailout(expr->increment(), TOS_REG);
2917
3730
// Call ToNumber only if operand is not a smi.
2918
3731
Label no_conversion;
2919
__ BranchOnSmi(r0, &no_conversion);
2921
__ InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS);
3732
__ JumpIfSmi(r0, &no_conversion);
3733
ToNumberStub convert_stub;
3734
__ CallStub(&convert_stub);
2922
3735
__ bind(&no_conversion);
2924
3737
// Save result for postfix expressions.
2925
3738
if (expr->is_postfix()) {
2927
case Expression::kUninitialized:
2929
case Expression::kEffect:
2930
// Do not save result.
2932
case Expression::kValue:
2933
case Expression::kTest:
2934
case Expression::kValueTest:
2935
case Expression::kTestValue:
2936
// Save the result on the stack. If we have a named or keyed property
2937
// we store the result under the receiver that is currently on top
2939
switch (assign_type) {
2943
case NAMED_PROPERTY:
2944
__ str(r0, MemOperand(sp, kPointerSize));
2946
case KEYED_PROPERTY:
2947
__ str(r0, MemOperand(sp, 2 * kPointerSize));
3739
if (!context()->IsEffect()) {
3740
// Save the result on the stack. If we have a named or keyed property
3741
// we store the result under the receiver that is currently on top
3743
switch (assign_type) {
3747
case NAMED_PROPERTY:
3748
__ str(r0, MemOperand(sp, kPointerSize));
3750
case KEYED_PROPERTY:
3751
__ str(r0, MemOperand(sp, 2 * kPointerSize));
2955
3758
// Inline smi case if we are in a loop.
2956
3759
Label stub_call, done;
3760
JumpPatchSite patch_site(masm_);
2957
3762
int count_value = expr->op() == Token::INC ? 1 : -1;
2958
if (loop_depth() > 0) {
3763
if (ShouldInlineSmiCase(expr->op())) {
2959
3764
__ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
2960
3765
__ b(vs, &stub_call);
2961
3766
// We could eliminate this smi check if we split the code at
2962
3767
// the first smi check before calling ToNumber.
2963
__ BranchOnSmi(r0, &done);
3768
patch_site.EmitJumpIfSmi(r0, &done);
2964
3770
__ bind(&stub_call);
2965
3771
// Call stub. Undo operation first.
2966
3772
__ sub(r0, r0, Operand(Smi::FromInt(count_value)));
2968
3774
__ mov(r1, Operand(Smi::FromInt(count_value)));
2969
GenericBinaryOpStub stub(Token::ADD, NO_OVERWRITE, r1, r0);
3776
// Record position before stub call.
3777
SetSourcePosition(expr->position());
3779
TypeRecordingBinaryOpStub stub(Token::ADD, NO_OVERWRITE);
3780
EmitCallIC(stub.GetCode(), &patch_site);
2971
3781
__ bind(&done);
2973
3783
// Store the value returned in r0.
2974
3784
switch (assign_type) {
2976
3786
if (expr->is_postfix()) {
2977
EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
2979
Expression::kEffect);
2980
// For all contexts except kEffect: We have the result on
3787
{ EffectContext context(this);
3788
EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3790
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3793
// For all contexts except EffectConstant We have the result on
2981
3794
// top of the stack.
2982
if (context_ != Expression::kEffect) {
3795
if (!context()->IsEffect()) {
3796
context()->PlugTOS();
2986
3799
EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3801
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3802
context()->Plug(r0);
2991
3805
case NAMED_PROPERTY: {
2992
3806
__ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
2994
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
2995
__ Call(ic, RelocInfo::CODE_TARGET);
3808
Handle<Code> ic(Builtins::builtin(
3809
is_strict() ? Builtins::StoreIC_Initialize_Strict
3810
: Builtins::StoreIC_Initialize));
3811
EmitCallIC(ic, RelocInfo::CODE_TARGET);
3812
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2996
3813
if (expr->is_postfix()) {
2997
if (context_ != Expression::kEffect) {
3814
if (!context()->IsEffect()) {
3815
context()->PlugTOS();
3001
Apply(context_, r0);
3818
context()->Plug(r0);
3005
3822
case KEYED_PROPERTY: {
3006
3823
__ pop(r1); // Key.
3007
3824
__ pop(r2); // Receiver.
3008
Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
3009
__ Call(ic, RelocInfo::CODE_TARGET);
3825
Handle<Code> ic(Builtins::builtin(
3826
is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
3827
: Builtins::KeyedStoreIC_Initialize));
3828
EmitCallIC(ic, RelocInfo::CODE_TARGET);
3829
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3010
3830
if (expr->is_postfix()) {
3011
if (context_ != Expression::kEffect) {
3831
if (!context()->IsEffect()) {
3832
context()->PlugTOS();
3015
Apply(context_, r0);
3835
context()->Plug(r0);
3023
void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
3024
Comment cmnt(masm_, "[ BinaryOperation");
3025
switch (expr->op()) {
3027
VisitForEffect(expr->left());
3028
Visit(expr->right());
3033
EmitLogicalOperation(expr);
3042
case Token::BIT_AND:
3043
case Token::BIT_XOR:
3047
VisitForValue(expr->left(), kStack);
3048
VisitForValue(expr->right(), kAccumulator);
3049
EmitBinaryOp(expr->op(), context_);
3058
void FullCodeGenerator::EmitNullCompare(bool strict,
3060
Register null_const,
3064
__ cmp(obj, null_const);
3843
void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
3844
ASSERT(!context()->IsEffect());
3845
ASSERT(!context()->IsTest());
3846
VariableProxy* proxy = expr->AsVariableProxy();
3847
if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
3848
Comment cmnt(masm_, "Global variable");
3849
__ ldr(r0, GlobalObjectOperand());
3850
__ mov(r2, Operand(proxy->name()));
3851
Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
3852
// Use a regular load, not a contextual load, to avoid a reference
3854
EmitCallIC(ic, RelocInfo::CODE_TARGET);
3855
PrepareForBailout(expr, TOS_REG);
3856
context()->Plug(r0);
3857
} else if (proxy != NULL &&
3858
proxy->var()->AsSlot() != NULL &&
3859
proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
3862
// Generate code for loading from variables potentially shadowed
3863
// by eval-introduced variables.
3864
Slot* slot = proxy->var()->AsSlot();
3865
EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
3868
__ mov(r0, Operand(proxy->name()));
3870
__ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
3871
PrepareForBailout(expr, TOS_REG);
3874
context()->Plug(r0);
3876
// This expression cannot throw a reference error at the top level.
3877
context()->HandleExpression(expr);
3882
bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
3887
Label* fall_through) {
3888
if (op != Token::EQ && op != Token::EQ_STRICT) return false;
3890
// Check for the pattern: typeof <expression> == <string literal>.
3891
Literal* right_literal = right->AsLiteral();
3892
if (right_literal == NULL) return false;
3893
Handle<Object> right_literal_value = right_literal->handle();
3894
if (!right_literal_value->IsString()) return false;
3895
UnaryOperation* left_unary = left->AsUnaryOperation();
3896
if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false;
3897
Handle<String> check = Handle<String>::cast(right_literal_value);
3899
{ AccumulatorValueContext context(this);
3900
VisitForTypeofValue(left_unary->expression());
3902
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3904
if (check->Equals(Heap::number_symbol())) {
3905
__ tst(r0, Operand(kSmiTagMask));
3907
__ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3908
__ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3910
Split(eq, if_true, if_false, fall_through);
3911
} else if (check->Equals(Heap::string_symbol())) {
3912
__ tst(r0, Operand(kSmiTagMask));
3914
// Check for undetectable objects => false.
3915
__ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3916
__ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3917
__ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
3918
__ cmp(r1, Operand(1 << Map::kIsUndetectable));
3920
__ ldrb(r1, FieldMemOperand(r0, Map::kInstanceTypeOffset));
3921
__ cmp(r1, Operand(FIRST_NONSTRING_TYPE));
3922
Split(lt, if_true, if_false, fall_through);
3923
} else if (check->Equals(Heap::boolean_symbol())) {
3924
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
3927
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
3929
Split(eq, if_true, if_false, fall_through);
3930
} else if (check->Equals(Heap::undefined_symbol())) {
3069
3931
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3072
__ BranchOnSmi(obj, if_false);
3073
// It can be an undetectable object.
3074
__ ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
3075
__ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
3076
__ tst(scratch, Operand(1 << Map::kIsUndetectable));
3934
__ tst(r0, Operand(kSmiTagMask));
3936
// Check for undetectable objects => true.
3937
__ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3938
__ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3939
__ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
3940
__ cmp(r1, Operand(1 << Map::kIsUndetectable));
3941
Split(eq, if_true, if_false, fall_through);
3942
} else if (check->Equals(Heap::function_symbol())) {
3943
__ tst(r0, Operand(kSmiTagMask));
3945
__ CompareObjectType(r0, r1, r0, JS_FUNCTION_TYPE);
3947
// Regular expressions => 'function' (they are callable).
3948
__ CompareInstanceType(r1, r0, JS_REGEXP_TYPE);
3949
Split(eq, if_true, if_false, fall_through);
3950
} else if (check->Equals(Heap::object_symbol())) {
3951
__ tst(r0, Operand(kSmiTagMask));
3953
__ LoadRoot(ip, Heap::kNullValueRootIndex);
3956
// Regular expressions => 'function', not 'object'.
3957
__ CompareObjectType(r0, r1, r0, JS_REGEXP_TYPE);
3959
// Check for undetectable objects => false.
3960
__ ldrb(r0, FieldMemOperand(r1, Map::kBitFieldOffset));
3961
__ and_(r0, r0, Operand(1 << Map::kIsUndetectable));
3962
__ cmp(r0, Operand(1 << Map::kIsUndetectable));
3964
// Check for JS objects => true.
3965
__ ldrb(r0, FieldMemOperand(r1, Map::kInstanceTypeOffset));
3966
__ cmp(r0, Operand(FIRST_JS_OBJECT_TYPE));
3968
__ cmp(r0, Operand(LAST_JS_OBJECT_TYPE));
3969
Split(le, if_true, if_false, fall_through);
3971
if (if_false != fall_through) __ jmp(if_false);
3083
3978
void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3084
3979
Comment cmnt(masm_, "[ CompareOperation");
3980
SetSourcePosition(expr->position());
3086
3982
// Always perform the comparison for its control flow. Pack the result
3087
3983
// into the expression's context after the comparison is performed.
3089
3985
Label materialize_true, materialize_false;
3090
3986
Label* if_true = NULL;
3091
3987
Label* if_false = NULL;
3092
PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
3094
VisitForValue(expr->left(), kStack);
3095
switch (expr->op()) {
3988
Label* fall_through = NULL;
3989
context()->PrepareTest(&materialize_true, &materialize_false,
3990
&if_true, &if_false, &fall_through);
3992
// First we try a fast inlined version of the compare when one of
3993
// the operands is a literal.
3994
Token::Value op = expr->op();
3995
Expression* left = expr->left();
3996
Expression* right = expr->right();
3997
if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) {
3998
context()->Plug(if_true, if_false);
4002
VisitForStackValue(expr->left());
3096
4004
case Token::IN:
3097
VisitForValue(expr->right(), kStack);
4005
VisitForStackValue(expr->right());
3098
4006
__ InvokeBuiltin(Builtins::IN, CALL_JS);
4007
PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
3099
4008
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
3100
4009
__ cmp(r0, ip);
4010
Split(eq, if_true, if_false, fall_through);
3105
4013
case Token::INSTANCEOF: {
3106
VisitForValue(expr->right(), kStack);
3107
InstanceofStub stub;
4014
VisitForStackValue(expr->right());
4015
InstanceofStub stub(InstanceofStub::kNoFlags);
3108
4016
__ CallStub(&stub);
4017
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4018
// The stub returns 0 for true.
3109
4019
__ tst(r0, r0);
3110
__ b(eq, if_true); // The stub returns 0 for true.
4020
Split(eq, if_true, if_false, fall_through);
3116
VisitForValue(expr->right(), kAccumulator);
4025
VisitForAccumulatorValue(expr->right());
4026
Condition cond = eq;
3118
4027
bool strict = false;
3119
switch (expr->op()) {
3120
4029
case Token::EQ_STRICT:
3122
4031
// Fall through
3126
// If either operand is constant null we do a fast compare
3128
Literal* right_literal = expr->right()->AsLiteral();
3129
Literal* left_literal = expr->left()->AsLiteral();
3130
if (right_literal != NULL && right_literal->handle()->IsNull()) {
3131
EmitNullCompare(strict, r1, r0, if_true, if_false, r2);
3132
Apply(context_, if_true, if_false);
3134
} else if (left_literal != NULL && left_literal->handle()->IsNull()) {
3135
EmitNullCompare(strict, r0, r1, if_true, if_false, r2);
3136
Apply(context_, if_true, if_false);
3141
4036
case Token::LT:
3145
4040
case Token::GT:
3146
4041
// Reverse left and right sides to obtain ECMA-262 conversion order.
3148
4043
__ mov(r1, result_register());
3151
4046
case Token::LTE:
3152
4047
// Reverse left and right sides to obtain ECMA-262 conversion order.
3154
4049
__ mov(r1, result_register());
3157
4052
case Token::GTE:
3161
4056
case Token::IN:
3167
// The comparison stub expects the smi vs. smi case to be handled
3168
// before it is called.
3170
__ orr(r2, r0, Operand(r1));
3171
__ BranchOnNotSmi(r2, &slow_case);
4062
bool inline_smi_code = ShouldInlineSmiCase(op);
4063
JumpPatchSite patch_site(masm_);
4064
if (inline_smi_code) {
4066
__ orr(r2, r0, Operand(r1));
4067
patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4069
Split(cond, if_true, if_false, NULL);
4070
__ bind(&slow_case);
3176
__ bind(&slow_case);
3177
CompareStub stub(cc, strict, kBothCouldBeNaN, true, r1, r0);
4073
// Record position and call the compare IC.
4074
SetSourcePosition(expr->position());
4075
Handle<Code> ic = CompareIC::GetUninitialized(op);
4076
EmitCallIC(ic, &patch_site);
4077
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3179
4078
__ cmp(r0, Operand(0));
4079
Split(cond, if_true, if_false, fall_through);
3185
4083
// Convert the result of the comparison into one expected for this
3186
4084
// expression's context.
3187
Apply(context_, if_true, if_false);
4085
context()->Plug(if_true, if_false);
4089
void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
4090
Comment cmnt(masm_, "[ CompareToNull");
4091
Label materialize_true, materialize_false;
4092
Label* if_true = NULL;
4093
Label* if_false = NULL;
4094
Label* fall_through = NULL;
4095
context()->PrepareTest(&materialize_true, &materialize_false,
4096
&if_true, &if_false, &fall_through);
4098
VisitForAccumulatorValue(expr->expression());
4099
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4100
__ LoadRoot(r1, Heap::kNullValueRootIndex);
4102
if (expr->is_strict()) {
4103
Split(eq, if_true, if_false, fall_through);
4106
__ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
4109
__ tst(r0, Operand(kSmiTagMask));
4111
// It can be an undetectable object.
4112
__ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
4113
__ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
4114
__ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
4115
__ cmp(r1, Operand(1 << Map::kIsUndetectable));
4116
Split(eq, if_true, if_false, fall_through);
4118
context()->Plug(if_true, if_false);
3191
4122
void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
3192
4123
__ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3193
Apply(context_, r0);
3197
Register FullCodeGenerator::result_register() { return r0; }
3200
Register FullCodeGenerator::context_register() { return cp; }
4124
context()->Plug(r0);
4128
Register FullCodeGenerator::result_register() {
4133
Register FullCodeGenerator::context_register() {
4138
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
4139
ASSERT(mode == RelocInfo::CODE_TARGET ||
4140
mode == RelocInfo::CODE_TARGET_CONTEXT);
4141
switch (ic->kind()) {
4143
__ IncrementCounter(&Counters::named_load_full, 1, r1, r2);
4145
case Code::KEYED_LOAD_IC:
4146
__ IncrementCounter(&Counters::keyed_load_full, 1, r1, r2);
4148
case Code::STORE_IC:
4149
__ IncrementCounter(&Counters::named_store_full, 1, r1, r2);
4151
case Code::KEYED_STORE_IC:
4152
__ IncrementCounter(&Counters::keyed_store_full, 1, r1, r2);
4161
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
4162
switch (ic->kind()) {
4164
__ IncrementCounter(&Counters::named_load_full, 1, r1, r2);
4166
case Code::KEYED_LOAD_IC:
4167
__ IncrementCounter(&Counters::keyed_load_full, 1, r1, r2);
4169
case Code::STORE_IC:
4170
__ IncrementCounter(&Counters::named_store_full, 1, r1, r2);
4172
case Code::KEYED_STORE_IC:
4173
__ IncrementCounter(&Counters::keyed_store_full, 1, r1, r2);
4178
__ Call(ic, RelocInfo::CODE_TARGET);
4179
if (patch_site != NULL && patch_site->is_bound()) {
4180
patch_site->EmitPatchInfo();
4182
__ nop(); // Signals no inlined code.
3203
4187
void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {