315
// Both name_reg and receiver_reg are preserved on jumps to miss_label,
316
// but may be destroyed if store is successful.
317
void StubCompiler::GenerateStoreField(MacroAssembler* masm,
321
Register receiver_reg,
325
// Check that the object isn't a smi.
326
__ JumpIfSmi(receiver_reg, miss_label);
328
// Check that the map of the object hasn't changed.
329
__ Cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset),
330
Handle<Map>(object->map()));
331
__ j(not_equal, miss_label);
333
// Perform global security token check if needed.
334
if (object->IsJSGlobalProxy()) {
335
__ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
338
// Stub never generated for non-global objects that require access
340
ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
342
// Perform map transition for the receiver if necessary.
343
if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
344
// The properties must be extended before we can store the value.
345
// We jump to a runtime call that extends the properties array.
346
__ pop(scratch); // Return address.
347
__ push(receiver_reg);
348
__ Push(Handle<Map>(transition));
351
__ TailCallExternalReference(
352
ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)), 3, 1);
356
if (transition != NULL) {
357
// Update the map of the object; no write barrier updating is
358
// needed because the map is never in new space.
359
__ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset),
360
Handle<Map>(transition));
363
// Adjust for the number of properties stored in the object. Even in the
364
// face of a transition we can use the old map here because the size of the
365
// object and the number of in-object properties is not going to change.
366
index -= object->map()->inobject_properties();
369
// Set the property straight into the object.
370
int offset = object->map()->instance_size() + (index * kPointerSize);
371
__ movq(FieldOperand(receiver_reg, offset), rax);
373
// Update the write barrier for the array address.
374
// Pass the value being stored in the now unused name_reg.
375
__ movq(name_reg, rax);
376
__ RecordWrite(receiver_reg, offset, name_reg, scratch);
378
// Write to the properties array.
379
int offset = index * kPointerSize + FixedArray::kHeaderSize;
380
// Get the properties array (optimistically).
381
__ movq(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
382
__ movq(FieldOperand(scratch, offset), rax);
384
// Update the write barrier for the array address.
385
// Pass the value being stored in the now unused name_reg.
386
__ movq(name_reg, rax);
387
__ RecordWrite(scratch, offset, name_reg, receiver_reg);
390
// Return the value (register rax).
235
void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
237
Register prototype) {
238
// Load the global or builtins object from the current context.
240
Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
241
// Load the global context from the global or builtins object.
243
FieldOperand(prototype, GlobalObject::kGlobalContextOffset));
244
// Load the function from the global context.
245
__ movq(prototype, Operand(prototype, Context::SlotOffset(index)));
246
// Load the initial map. The global functions all have initial maps.
248
FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
249
// Load the prototype from the initial map.
250
__ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
254
void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
255
MacroAssembler* masm, int index, Register prototype, Label* miss) {
256
// Check we're still in the same context.
257
__ Move(prototype, Top::global());
258
__ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)),
260
__ j(not_equal, miss);
261
// Get the global function with the given index.
262
JSFunction* function = JSFunction::cast(Top::global_context()->get(index));
263
// Load its initial map. The global functions all have initial maps.
264
__ Move(prototype, Handle<Map>(function->initial_map()));
265
// Load the prototype from the initial map.
266
__ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
496
415
// -- rsp[8] : last argument in the internal frame of the caller
497
416
// -----------------------------------
498
417
__ movq(scratch, Operand(rsp, 0));
499
__ subq(rsp, Immediate(4 * kPointerSize));
418
__ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
500
419
__ movq(Operand(rsp, 0), scratch);
501
420
__ Move(scratch, Smi::FromInt(0));
502
__ movq(Operand(rsp, 1 * kPointerSize), scratch);
503
__ movq(Operand(rsp, 2 * kPointerSize), scratch);
504
__ movq(Operand(rsp, 3 * kPointerSize), scratch);
505
__ movq(Operand(rsp, 4 * kPointerSize), scratch);
421
for (int i = 1; i <= kFastApiCallArguments; i++) {
422
__ movq(Operand(rsp, i * kPointerSize), scratch);
509
427
// Undoes the effects of ReserveSpaceForFastApiCall.
510
428
static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
511
429
// ----------- S t a t e -------------
512
// -- rsp[0] : return address
513
// -- rsp[8] : last fast api call extra argument
430
// -- rsp[0] : return address.
431
// -- rsp[8] : last fast api call extra argument.
515
// -- rsp[32] : first fast api call extra argument
516
// -- rsp[40] : last argument in the internal frame
433
// -- rsp[kFastApiCallArguments * 8] : first fast api call extra argument.
434
// -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal
517
436
// -----------------------------------
518
437
__ movq(scratch, Operand(rsp, 0));
519
__ movq(Operand(rsp, 4 * kPointerSize), scratch);
520
__ addq(rsp, Immediate(kPointerSize * 4));
438
__ movq(Operand(rsp, kFastApiCallArguments * kPointerSize), scratch);
439
__ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments));
524
// Generates call to FastHandleApiCall builtin.
525
static void GenerateFastApiCall(MacroAssembler* masm,
526
const CallOptimization& optimization,
443
// Generates call to API function.
444
static MaybeObject* GenerateFastApiCall(MacroAssembler* masm,
445
const CallOptimization& optimization,
528
447
// ----------- S t a t e -------------
529
448
// -- rsp[0] : return address
530
449
// -- rsp[8] : object passing the type check
531
450
// (last fast api call extra argument,
532
451
// set by CheckPrototypes)
533
// -- rsp[16] : api call data
534
// -- rsp[24] : api callback
535
// -- rsp[32] : api function
452
// -- rsp[16] : api function
536
453
// (first fast api call extra argument)
537
// -- rsp[40] : last argument
454
// -- rsp[24] : api call data
455
// -- rsp[32] : last argument
539
// -- rsp[(argc + 5) * 8] : first argument
540
// -- rsp[(argc + 6) * 8] : receiver
457
// -- rsp[(argc + 3) * 8] : first argument
458
// -- rsp[(argc + 4) * 8] : receiver
541
459
// -----------------------------------
543
460
// Get the function and setup the context.
544
461
JSFunction* function = optimization.constant_function();
545
462
__ Move(rdi, Handle<JSFunction>(function));
546
463
__ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
548
// Pass the additional arguments FastHandleApiCall expects.
549
__ movq(Operand(rsp, 4 * kPointerSize), rdi);
550
bool info_loaded = false;
551
Object* callback = optimization.api_call_info()->callback();
552
if (Heap::InNewSpace(callback)) {
554
__ Move(rcx, Handle<CallHandlerInfo>(optimization.api_call_info()));
555
__ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kCallbackOffset));
556
__ movq(Operand(rsp, 3 * kPointerSize), rbx);
558
__ Move(Operand(rsp, 3 * kPointerSize), Handle<Object>(callback));
465
// Pass the additional arguments.
466
__ movq(Operand(rsp, 2 * kPointerSize), rdi);
560
467
Object* call_data = optimization.api_call_info()->data();
468
Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
561
469
if (Heap::InNewSpace(call_data)) {
563
__ Move(rcx, Handle<CallHandlerInfo>(optimization.api_call_info()));
470
__ Move(rcx, api_call_info_handle);
565
471
__ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset));
566
__ movq(Operand(rsp, 2 * kPointerSize), rbx);
472
__ movq(Operand(rsp, 3 * kPointerSize), rbx);
568
__ Move(Operand(rsp, 2 * kPointerSize), Handle<Object>(call_data));
474
__ Move(Operand(rsp, 3 * kPointerSize), Handle<Object>(call_data));
571
// Set the number of arguments.
572
__ movq(rax, Immediate(argc + 4));
574
// Jump to the fast api call builtin (tail call).
575
Handle<Code> code = Handle<Code>(
576
Builtins::builtin(Builtins::FastHandleApiCall));
577
ParameterCount expected(0);
578
__ InvokeCode(code, expected, expected,
579
RelocInfo::CODE_TARGET, JUMP_FUNCTION);
477
// Prepare arguments.
478
__ lea(rbx, Operand(rsp, 3 * kPointerSize));
480
Object* callback = optimization.api_call_info()->callback();
481
Address api_function_address = v8::ToCData<Address>(callback);
482
ApiFunction fun(api_function_address);
485
// Win64 uses first register--rcx--for returned value.
486
Register arguments_arg = rdx;
488
Register arguments_arg = rdi;
491
// Allocate the v8::Arguments structure in the arguments' space since
492
// it's not controlled by GC.
493
const int kApiStackSpace = 4;
495
__ PrepareCallApiFunction(kApiStackSpace);
497
__ movq(StackSpaceOperand(0), rbx); // v8::Arguments::implicit_args_.
498
__ addq(rbx, Immediate(argc * kPointerSize));
499
__ movq(StackSpaceOperand(1), rbx); // v8::Arguments::values_.
500
__ Set(StackSpaceOperand(2), argc); // v8::Arguments::length_.
501
// v8::Arguments::is_construct_call_.
502
__ Set(StackSpaceOperand(3), 0);
504
// v8::InvocationCallback's argument.
505
__ lea(arguments_arg, StackSpaceOperand(0));
506
// Emitting a stub call may try to allocate (if the code is not
507
// already generated). Do not allow the assembler to perform a
508
// garbage collection but instead return the allocation failure
510
return masm->TryCallApiFunctionAndReturn(&fun,
511
argc + kFastApiCallArguments + 1);
812
846
#define __ ACCESS_MASM((masm()))
849
Register StubCompiler::CheckPrototypes(JSObject* object,
858
// Make sure there's no overlap between holder and object registers.
859
ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
860
ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
861
&& !scratch2.is(scratch1));
863
// Keep track of the current object in register reg. On the first
864
// iteration, reg is an alias for object_reg, on later iterations,
865
// it is an alias for holder_reg.
866
Register reg = object_reg;
869
if (save_at_depth == depth) {
870
__ movq(Operand(rsp, kPointerSize), object_reg);
873
// Check the maps in the prototype chain.
874
// Traverse the prototype chain from the object and do map checks.
875
JSObject* current = object;
876
while (current != holder) {
879
// Only global objects and objects that do not require access
880
// checks are allowed in stubs.
881
ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
883
JSObject* prototype = JSObject::cast(current->GetPrototype());
884
if (!current->HasFastProperties() &&
885
!current->IsJSGlobalObject() &&
886
!current->IsJSGlobalProxy()) {
887
if (!name->IsSymbol()) {
888
MaybeObject* lookup_result = Heap::LookupSymbol(name);
889
if (lookup_result->IsFailure()) {
890
set_failure(Failure::cast(lookup_result));
893
name = String::cast(lookup_result->ToObjectUnchecked());
896
ASSERT(current->property_dictionary()->FindEntry(name) ==
897
StringDictionary::kNotFound);
899
GenerateDictionaryNegativeLookup(masm(),
905
__ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
906
reg = holder_reg; // from now the object is in holder_reg
907
__ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
908
} else if (Heap::InNewSpace(prototype)) {
909
// Get the map of the current object.
910
__ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
911
__ Cmp(scratch1, Handle<Map>(current->map()));
912
// Branch on the result of the map check.
913
__ j(not_equal, miss);
914
// Check access rights to the global object. This has to happen
915
// after the map check so that we know that the object is
916
// actually a global object.
917
if (current->IsJSGlobalProxy()) {
918
__ CheckAccessGlobalProxy(reg, scratch1, miss);
920
// Restore scratch register to be the map of the object.
921
// We load the prototype from the map in the scratch register.
922
__ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
924
// The prototype is in new space; we cannot store a reference
925
// to it in the code. Load it from the map.
926
reg = holder_reg; // from now the object is in holder_reg
927
__ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
930
// Check the map of the current object.
931
__ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
932
Handle<Map>(current->map()));
933
// Branch on the result of the map check.
934
__ j(not_equal, miss);
935
// Check access rights to the global object. This has to happen
936
// after the map check so that we know that the object is
937
// actually a global object.
938
if (current->IsJSGlobalProxy()) {
939
__ CheckAccessGlobalProxy(reg, scratch1, miss);
941
// The prototype is in old space; load it directly.
942
reg = holder_reg; // from now the object is in holder_reg
943
__ Move(reg, Handle<JSObject>(prototype));
946
if (save_at_depth == depth) {
947
__ movq(Operand(rsp, kPointerSize), reg);
950
// Go to the next object in the prototype chain.
954
// Check the holder map.
955
__ Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
956
__ j(not_equal, miss);
958
// Log the check depth.
959
LOG(IntEvent("check-maps-depth", depth + 1));
961
// Perform security check for access to the global object and return
962
// the holder register.
963
ASSERT(current == holder);
964
ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
965
if (current->IsJSGlobalProxy()) {
966
__ CheckAccessGlobalProxy(reg, scratch1, miss);
969
// If we've skipped any global objects, it's not enough to verify
970
// that their maps haven't changed. We also need to check that the
971
// property cell for the property is still empty.
973
while (current != holder) {
974
if (current->IsGlobalObject()) {
975
MaybeObject* cell = GenerateCheckPropertyCell(masm(),
976
GlobalObject::cast(current),
980
if (cell->IsFailure()) {
981
set_failure(Failure::cast(cell));
985
current = JSObject::cast(current->GetPrototype());
988
// Return the register containing the holder.
993
void StubCompiler::GenerateLoadField(JSObject* object,
1002
// Check that the receiver isn't a smi.
1003
__ JumpIfSmi(receiver, miss);
1005
// Check the prototype chain.
1007
CheckPrototypes(object, receiver, holder,
1008
scratch1, scratch2, scratch3, name, miss);
1010
// Get the value from the properties.
1011
GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
1016
MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
1023
AccessorInfo* callback,
1026
// Check that the receiver isn't a smi.
1027
__ JumpIfSmi(receiver, miss);
1029
// Check that the maps haven't changed.
1031
CheckPrototypes(object, receiver, holder, scratch1,
1032
scratch2, scratch3, name, miss);
1034
Handle<AccessorInfo> callback_handle(callback);
1036
// Insert additional parameters into the stack frame above return address.
1037
ASSERT(!scratch2.is(reg));
1038
__ pop(scratch2); // Get return address to place it below.
1040
__ push(receiver); // receiver
1041
__ push(reg); // holder
1042
if (Heap::InNewSpace(callback_handle->data())) {
1043
__ Move(scratch1, callback_handle);
1044
__ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data
1046
__ Push(Handle<Object>(callback_handle->data()));
1048
__ push(name_reg); // name
1049
// Save a pointer to where we pushed the arguments pointer.
1050
// This will be passed as the const AccessorInfo& to the C++ callback.
1053
// Win64 uses first register--rcx--for returned value.
1054
Register accessor_info_arg = r8;
1055
Register name_arg = rdx;
1057
Register accessor_info_arg = rsi;
1058
Register name_arg = rdi;
1061
ASSERT(!name_arg.is(scratch2));
1062
__ movq(name_arg, rsp);
1063
__ push(scratch2); // Restore return address.
1065
// Do call through the api.
1066
Address getter_address = v8::ToCData<Address>(callback->getter());
1067
ApiFunction fun(getter_address);
1069
// 3 elements array for v8::Agruments::values_ and handler for name.
1070
const int kStackSpace = 4;
1072
// Allocate v8::AccessorInfo in non-GCed stack space.
1073
const int kArgStackSpace = 1;
1075
__ PrepareCallApiFunction(kArgStackSpace);
1076
__ lea(rax, Operand(name_arg, 3 * kPointerSize));
1078
// v8::AccessorInfo::args_.
1079
__ movq(StackSpaceOperand(0), rax);
1081
// The context register (rsi) has been saved in PrepareCallApiFunction and
1082
// could be used to pass arguments.
1083
__ lea(accessor_info_arg, StackSpaceOperand(0));
1085
// Emitting a stub call may try to allocate (if the code is not
1086
// already generated). Do not allow the assembler to perform a
1087
// garbage collection but instead return the allocation failure
1089
return masm()->TryCallApiFunctionAndReturn(&fun, kStackSpace);
1093
void StubCompiler::GenerateLoadConstant(JSObject* object,
1102
// Check that the receiver isn't a smi.
1103
__ JumpIfSmi(receiver, miss);
1105
// Check that the maps haven't changed.
1107
CheckPrototypes(object, receiver, holder,
1108
scratch1, scratch2, scratch3, name, miss);
1110
// Return the constant value.
1111
__ Move(rax, Handle<Object>(value));
1116
void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1117
JSObject* interceptor_holder,
1118
LookupResult* lookup,
1126
ASSERT(interceptor_holder->HasNamedInterceptor());
1127
ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1129
// Check that the receiver isn't a smi.
1130
__ JumpIfSmi(receiver, miss);
1132
// So far the most popular follow ups for interceptor loads are FIELD
1133
// and CALLBACKS, so inline only them, other cases may be added
1135
bool compile_followup_inline = false;
1136
if (lookup->IsProperty() && lookup->IsCacheable()) {
1137
if (lookup->type() == FIELD) {
1138
compile_followup_inline = true;
1139
} else if (lookup->type() == CALLBACKS &&
1140
lookup->GetCallbackObject()->IsAccessorInfo() &&
1141
AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1142
compile_followup_inline = true;
1146
if (compile_followup_inline) {
1147
// Compile the interceptor call, followed by inline code to load the
1148
// property from further up the prototype chain if the call fails.
1149
// Check that the maps haven't changed.
1150
Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1151
scratch1, scratch2, scratch3,
1153
ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1155
// Save necessary data before invoking an interceptor.
1156
// Requires a frame to make GC aware of pushed pointers.
1157
__ EnterInternalFrame();
1159
if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1160
// CALLBACKS case needs a receiver to be passed into C++ callback.
1163
__ push(holder_reg);
1166
// Invoke an interceptor. Note: map checks from receiver to
1167
// interceptor's holder has been compiled before (see a caller
1169
CompileCallLoadPropertyWithInterceptor(masm(),
1173
interceptor_holder);
1175
// Check if interceptor provided a value for property. If it's
1176
// the case, return immediately.
1177
Label interceptor_failed;
1178
__ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
1179
__ j(equal, &interceptor_failed);
1180
__ LeaveInternalFrame();
1183
__ bind(&interceptor_failed);
1186
if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1190
__ LeaveInternalFrame();
1192
// Check that the maps from interceptor's holder to lookup's holder
1193
// haven't changed. And load lookup's holder into |holder| register.
1194
if (interceptor_holder != lookup->holder()) {
1195
holder_reg = CheckPrototypes(interceptor_holder,
1205
if (lookup->type() == FIELD) {
1206
// We found FIELD property in prototype chain of interceptor's holder.
1207
// Retrieve a field from field's holder.
1208
GenerateFastPropertyLoad(masm(), rax, holder_reg,
1209
lookup->holder(), lookup->GetFieldIndex());
1212
// We found CALLBACKS property in prototype chain of interceptor's
1214
ASSERT(lookup->type() == CALLBACKS);
1215
ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1216
AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1217
ASSERT(callback != NULL);
1218
ASSERT(callback->getter() != NULL);
1220
// Tail call to runtime.
1221
// Important invariant in CALLBACKS case: the code above must be
1222
// structured to never clobber |receiver| register.
1223
__ pop(scratch2); // return address
1225
__ push(holder_reg);
1226
__ Move(holder_reg, Handle<AccessorInfo>(callback));
1227
__ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
1228
__ push(holder_reg);
1230
__ push(scratch2); // restore return address
1232
ExternalReference ref =
1233
ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
1234
__ TailCallExternalReference(ref, 5, 1);
1236
} else { // !compile_followup_inline
1237
// Call the runtime system to load the interceptor.
1238
// Check that the maps haven't changed.
1239
Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1240
scratch1, scratch2, scratch3,
1242
__ pop(scratch2); // save old return address
1243
PushInterceptorArguments(masm(), receiver, holder_reg,
1244
name_reg, interceptor_holder);
1245
__ push(scratch2); // restore old return address
1247
ExternalReference ref = ExternalReference(
1248
IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
1249
__ TailCallExternalReference(ref, 5, 1);
815
1254
void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
816
1255
if (kind_ == Code::KEYED_CALL_IC) {
817
1256
__ Cmp(rcx, Handle<String>(name));
823
Object* CallStubCompiler::GenerateMissBranch() {
824
Object* obj = StubCache::ComputeCallMiss(arguments().immediate(), kind_);
825
if (obj->IsFailure()) return obj;
1262
void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
1266
ASSERT(holder->IsGlobalObject());
1268
// Get the number of arguments.
1269
const int argc = arguments().immediate();
1271
// Get the receiver from the stack.
1272
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1274
// If the object is the holder then we know that it's a global
1275
// object which can only happen for contextual calls. In this case,
1276
// the receiver cannot be a smi.
1277
if (object != holder) {
1278
__ JumpIfSmi(rdx, miss);
1281
// Check that the maps haven't changed.
1282
CheckPrototypes(object, rdx, holder, rbx, rax, rdi, name, miss);
1286
void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1287
JSFunction* function,
1289
// Get the value from the cell.
1290
__ Move(rdi, Handle<JSGlobalPropertyCell>(cell));
1291
__ movq(rdi, FieldOperand(rdi, JSGlobalPropertyCell::kValueOffset));
1293
// Check that the cell contains the same function.
1294
if (Heap::InNewSpace(function)) {
1295
// We can't embed a pointer to a function in new space so we have
1296
// to verify that the shared function info is unchanged. This has
1297
// the nice side effect that multiple closures based on the same
1298
// function can all use this call IC. Before we load through the
1299
// function, we have to verify that it still is a function.
1300
__ JumpIfSmi(rdi, miss);
1301
__ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax);
1302
__ j(not_equal, miss);
1304
// Check the shared function info. Make sure it hasn't changed.
1305
__ Move(rax, Handle<SharedFunctionInfo>(function->shared()));
1306
__ cmpq(FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset), rax);
1307
__ j(not_equal, miss);
1309
__ Cmp(rdi, Handle<JSFunction>(function));
1310
__ j(not_equal, miss);
1315
MaybeObject* CallStubCompiler::GenerateMissBranch() {
1316
MaybeObject* maybe_obj = StubCache::ComputeCallMiss(arguments().immediate(),
1319
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
826
1320
__ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
831
Object* CallStubCompiler::CompileCallConstant(Object* object,
833
JSFunction* function,
835
StubCompiler::CheckType check) {
836
// ----------- S t a t e -------------
837
// rcx : function name
838
// rsp[0] : return address
839
// rsp[8] : argument argc
840
// rsp[16] : argument argc - 1
842
// rsp[argc * 8] : argument 1
843
// rsp[(argc + 1) * 8] : argument 0 = receiver
844
// -----------------------------------
846
SharedFunctionInfo* function_info = function->shared();
847
if (function_info->HasCustomCallGenerator()) {
848
const int id = function_info->custom_call_generator_id();
850
CompileCustomCall(id, object, holder, function, name, check);
851
// undefined means bail out to regular compiler.
852
if (!result->IsUndefined()) {
857
Label miss_in_smi_check;
859
GenerateNameCheck(name, &miss_in_smi_check);
861
// Get the receiver from the stack.
862
const int argc = arguments().immediate();
863
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
865
// Check that the receiver isn't a smi.
866
if (check != NUMBER_CHECK) {
867
__ JumpIfSmi(rdx, &miss_in_smi_check);
870
// Make sure that it's okay not to patch the on stack receiver
871
// unless we're doing a receiver map check.
872
ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
874
CallOptimization optimization(function);
875
int depth = kInvalidProtoDepth;
879
case RECEIVER_MAP_CHECK:
880
__ IncrementCounter(&Counters::call_const, 1);
882
if (optimization.is_simple_api_call() && !object->IsGlobalObject()) {
883
depth = optimization.GetPrototypeDepthOfExpectedType(
884
JSObject::cast(object), holder);
887
if (depth != kInvalidProtoDepth) {
888
__ IncrementCounter(&Counters::call_const_fast_api, 1);
889
ReserveSpaceForFastApiCall(masm(), rax);
892
// Check that the maps haven't changed.
893
CheckPrototypes(JSObject::cast(object), rdx, holder,
894
rbx, rax, rdi, name, depth, &miss);
896
// Patch the receiver on the stack with the global proxy if
898
if (object->IsGlobalObject()) {
899
ASSERT(depth == kInvalidProtoDepth);
900
__ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
901
__ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
906
if (!function->IsBuiltin()) {
907
// Calling non-builtins with a value as receiver requires boxing.
910
// Check that the object is a two-byte string or a symbol.
911
__ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax);
912
__ j(above_equal, &miss);
913
// Check that the maps starting from the prototype haven't changed.
914
GenerateDirectLoadGlobalFunctionPrototype(
915
masm(), Context::STRING_FUNCTION_INDEX, rax);
916
CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
917
rbx, rdx, rdi, name, &miss);
922
if (!function->IsBuiltin()) {
923
// Calling non-builtins with a value as receiver requires boxing.
927
// Check that the object is a smi or a heap number.
928
__ JumpIfSmi(rdx, &fast);
929
__ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rax);
930
__ j(not_equal, &miss);
932
// Check that the maps starting from the prototype haven't changed.
933
GenerateDirectLoadGlobalFunctionPrototype(
934
masm(), Context::NUMBER_FUNCTION_INDEX, rax);
935
CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
936
rbx, rdx, rdi, name, &miss);
941
case BOOLEAN_CHECK: {
942
if (!function->IsBuiltin()) {
943
// Calling non-builtins with a value as receiver requires boxing.
947
// Check that the object is a boolean.
948
__ CompareRoot(rdx, Heap::kTrueValueRootIndex);
950
__ CompareRoot(rdx, Heap::kFalseValueRootIndex);
951
__ j(not_equal, &miss);
953
// Check that the maps starting from the prototype haven't changed.
954
GenerateDirectLoadGlobalFunctionPrototype(
955
masm(), Context::BOOLEAN_FUNCTION_INDEX, rax);
956
CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
957
rbx, rdx, rdi, name, &miss);
966
if (depth != kInvalidProtoDepth) {
967
GenerateFastApiCall(masm(), optimization, argc);
969
__ InvokeFunction(function, arguments(), JUMP_FUNCTION);
972
// Handle call cache miss.
974
if (depth != kInvalidProtoDepth) {
975
FreeSpaceForFastApiCall(masm(), rax);
978
// Handle call cache miss.
979
__ bind(&miss_in_smi_check);
980
Object* obj = GenerateMissBranch();
981
if (obj->IsFailure()) return obj;
983
// Return the generated code.
984
return GetCode(function);
988
Object* CallStubCompiler::CompileCallField(JSObject* object,
1325
MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
992
1329
// ----------- S t a t e -------------
993
1330
// rcx : function name
994
1331
// rsp[0] : return address
1265
1607
__ ret((argc + 1) * kPointerSize);
1267
1609
__ bind(&return_undefined);
1269
__ Move(rax, Factory::undefined_value());
1610
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1270
1611
__ ret((argc + 1) * kPointerSize);
1272
1613
__ bind(&call_builtin);
1273
1614
__ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop),
1277
Object* obj = GenerateMissBranch();
1278
if (obj->IsFailure()) return obj;
1280
// Return the generated code.
1281
return GetCode(function);
1285
Object* CallStubCompiler::CompileStringCharAtCall(Object* object,
1620
{ MaybeObject* maybe_obj = GenerateMissBranch();
1621
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1624
// Return the generated code.
1625
return GetCode(function);
1629
MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1632
JSGlobalPropertyCell* cell,
1633
JSFunction* function,
1635
// ----------- S t a t e -------------
1636
// -- rcx : function name
1637
// -- rsp[0] : return address
1638
// -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1640
// -- rsp[(argc + 1) * 8] : receiver
1641
// -----------------------------------
1643
// If object is not a string, bail out to regular call.
1644
if (!object->IsString() || cell != NULL) return Heap::undefined_value();
1646
const int argc = arguments().immediate();
1650
Label index_out_of_range;
1651
Label* index_out_of_range_label = &index_out_of_range;
1653
if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1654
index_out_of_range_label = &miss;
1657
GenerateNameCheck(name, &name_miss);
1659
// Check that the maps starting from the prototype haven't changed.
1660
GenerateDirectLoadGlobalFunctionPrototype(masm(),
1661
Context::STRING_FUNCTION_INDEX,
1664
ASSERT(object != holder);
1665
CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1666
rbx, rdx, rdi, name, &miss);
1668
Register receiver = rbx;
1669
Register index = rdi;
1670
Register scratch = rdx;
1671
Register result = rax;
1672
__ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
1674
__ movq(index, Operand(rsp, (argc - 0) * kPointerSize));
1676
__ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1679
StringCharCodeAtGenerator char_code_at_generator(receiver,
1683
&miss, // When not a string.
1684
&miss, // When not a number.
1685
index_out_of_range_label,
1686
STRING_INDEX_IS_NUMBER);
1687
char_code_at_generator.GenerateFast(masm());
1688
__ ret((argc + 1) * kPointerSize);
1690
StubRuntimeCallHelper call_helper;
1691
char_code_at_generator.GenerateSlow(masm(), call_helper);
1693
if (index_out_of_range.is_linked()) {
1694
__ bind(&index_out_of_range);
1695
__ LoadRoot(rax, Heap::kNanValueRootIndex);
1696
__ ret((argc + 1) * kPointerSize);
1700
// Restore function name in rcx.
1701
__ Move(rcx, Handle<String>(name));
1702
__ bind(&name_miss);
1704
{ MaybeObject* maybe_obj = GenerateMissBranch();
1705
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1708
// Return the generated code.
1709
return GetCode(function);
1713
MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1716
JSGlobalPropertyCell* cell,
1717
JSFunction* function,
1719
// ----------- S t a t e -------------
1720
// -- rcx : function name
1721
// -- rsp[0] : return address
1722
// -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1724
// -- rsp[(argc + 1) * 8] : receiver
1725
// -----------------------------------
1727
// If object is not a string, bail out to regular call.
1728
if (!object->IsString() || cell != NULL) return Heap::undefined_value();
1730
const int argc = arguments().immediate();
1734
Label index_out_of_range;
1735
Label* index_out_of_range_label = &index_out_of_range;
1737
if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1738
index_out_of_range_label = &miss;
1741
GenerateNameCheck(name, &name_miss);
1743
// Check that the maps starting from the prototype haven't changed.
1744
GenerateDirectLoadGlobalFunctionPrototype(masm(),
1745
Context::STRING_FUNCTION_INDEX,
1748
ASSERT(object != holder);
1749
CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1750
rbx, rdx, rdi, name, &miss);
1752
Register receiver = rax;
1753
Register index = rdi;
1754
Register scratch1 = rbx;
1755
Register scratch2 = rdx;
1756
Register result = rax;
1757
__ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
1759
__ movq(index, Operand(rsp, (argc - 0) * kPointerSize));
1761
__ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1764
StringCharAtGenerator char_at_generator(receiver,
1769
&miss, // When not a string.
1770
&miss, // When not a number.
1771
index_out_of_range_label,
1772
STRING_INDEX_IS_NUMBER);
1773
char_at_generator.GenerateFast(masm());
1774
__ ret((argc + 1) * kPointerSize);
1776
StubRuntimeCallHelper call_helper;
1777
char_at_generator.GenerateSlow(masm(), call_helper);
1779
if (index_out_of_range.is_linked()) {
1780
__ bind(&index_out_of_range);
1781
__ LoadRoot(rax, Heap::kEmptyStringRootIndex);
1782
__ ret((argc + 1) * kPointerSize);
1786
// Restore function name in rcx.
1787
__ Move(rcx, Handle<String>(name));
1788
__ bind(&name_miss);
1790
{ MaybeObject* maybe_obj = GenerateMissBranch();
1791
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1794
// Return the generated code.
1795
return GetCode(function);
1799
MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1802
JSGlobalPropertyCell* cell,
1803
JSFunction* function,
1805
// ----------- S t a t e -------------
1806
// -- rcx : function name
1807
// -- rsp[0] : return address
1808
// -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1810
// -- rsp[(argc + 1) * 8] : receiver
1811
// -----------------------------------
1813
const int argc = arguments().immediate();
1815
// If the object is not a JSObject or we got an unexpected number of
1816
// arguments, bail out to the regular call.
1817
if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
1820
GenerateNameCheck(name, &miss);
1823
__ movq(rdx, Operand(rsp, 2 * kPointerSize));
1825
__ JumpIfSmi(rdx, &miss);
1827
CheckPrototypes(JSObject::cast(object), rdx, holder, rbx, rax, rdi, name,
1830
ASSERT(cell->value() == function);
1831
GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1832
GenerateLoadFunctionFromCell(cell, function, &miss);
1835
// Load the char code argument.
1836
Register code = rbx;
1837
__ movq(code, Operand(rsp, 1 * kPointerSize));
1839
// Check the code is a smi.
1841
__ JumpIfNotSmi(code, &slow);
1843
// Convert the smi code to uint16.
1844
__ SmiAndConstant(code, code, Smi::FromInt(0xffff));
1846
StringCharFromCodeGenerator char_from_code_generator(code, rax);
1847
char_from_code_generator.GenerateFast(masm());
1848
__ ret(2 * kPointerSize);
1850
StubRuntimeCallHelper call_helper;
1851
char_from_code_generator.GenerateSlow(masm(), call_helper);
1853
// Tail call the full function. We do not have to patch the receiver
1854
// because the function makes no use of it.
1856
__ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1859
// rcx: function name.
1861
{ MaybeObject* maybe_obj = GenerateMissBranch();
1862
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1865
// Return the generated code.
1866
return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1870
MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
1872
JSGlobalPropertyCell* cell,
1873
JSFunction* function,
1875
// TODO(872): implement this.
1876
return Heap::undefined_value();
1880
MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
1286
1881
JSObject* holder,
1882
JSGlobalPropertyCell* cell,
1287
1883
JSFunction* function,
1290
// TODO(722): implement this.
1291
return Heap::undefined_value();
1295
Object* CallStubCompiler::CompileStringCharCodeAtCall(Object* object,
1885
// ----------- S t a t e -------------
1886
// -- rcx : function name
1887
// -- rsp[0] : return address
1888
// -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1890
// -- rsp[(argc + 1) * 8] : receiver
1891
// -----------------------------------
1893
const int argc = arguments().immediate();
1895
// If the object is not a JSObject or we got an unexpected number of
1896
// arguments, bail out to the regular call.
1897
if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
1900
GenerateNameCheck(name, &miss);
1903
__ movq(rdx, Operand(rsp, 2 * kPointerSize));
1905
__ JumpIfSmi(rdx, &miss);
1907
CheckPrototypes(JSObject::cast(object), rdx, holder, rbx, rax, rdi, name,
1910
ASSERT(cell->value() == function);
1911
GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1912
GenerateLoadFunctionFromCell(cell, function, &miss);
1915
// Load the (only) argument into rax.
1916
__ movq(rax, Operand(rsp, 1 * kPointerSize));
1918
// Check if the argument is a smi.
1920
STATIC_ASSERT(kSmiTag == 0);
1921
__ JumpIfNotSmi(rax, ¬_smi);
1922
__ SmiToInteger32(rax, rax);
1924
// Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0
1927
__ sarl(rbx, Immediate(kBitsPerInt - 1));
1929
// Do bitwise not or do nothing depending on ebx.
1932
// Add 1 or do nothing depending on ebx.
1935
// If the result is still negative, go to the slow case.
1936
// This only happens for the most negative smi.
1938
__ j(negative, &slow);
1941
__ Integer32ToSmi(rax, rax);
1942
__ ret(2 * kPointerSize);
1944
// Check if the argument is a heap number and load its value.
1946
__ CheckMap(rax, Factory::heap_number_map(), &slow, true);
1947
__ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
1949
// Check the sign of the argument. If the argument is positive,
1951
Label negative_sign;
1952
const int sign_mask_shift =
1953
(HeapNumber::kExponentOffset - HeapNumber::kValueOffset) * kBitsPerByte;
1954
__ movq(rdi, static_cast<int64_t>(HeapNumber::kSignMask) << sign_mask_shift,
1957
__ j(not_zero, &negative_sign);
1958
__ ret(2 * kPointerSize);
1960
// If the argument is negative, clear the sign, and return a new
1961
// number. We still have the sign mask in rdi.
1962
__ bind(&negative_sign);
1964
__ AllocateHeapNumber(rax, rdx, &slow);
1965
__ movq(FieldOperand(rax, HeapNumber::kValueOffset), rbx);
1966
__ ret(2 * kPointerSize);
1968
// Tail call the full function. We do not have to patch the receiver
1969
// because the function makes no use of it.
1971
__ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1974
// rcx: function name.
1976
{ MaybeObject* maybe_obj = GenerateMissBranch();
1977
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1980
// Return the generated code.
1981
return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1985
MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
1987
JSFunction* function,
1990
// ----------- S t a t e -------------
1991
// rcx : function name
1992
// rsp[0] : return address
1993
// rsp[8] : argument argc
1994
// rsp[16] : argument argc - 1
1996
// rsp[argc * 8] : argument 1
1997
// rsp[(argc + 1) * 8] : argument 0 = receiver
1998
// -----------------------------------
2000
SharedFunctionInfo* function_info = function->shared();
2001
if (function_info->HasBuiltinFunctionId()) {
2002
BuiltinFunctionId id = function_info->builtin_function_id();
2003
MaybeObject* maybe_result = CompileCustomCall(
2004
id, object, holder, NULL, function, name);
2006
if (!maybe_result->ToObject(&result)) return maybe_result;
2007
// undefined means bail out to regular compiler.
2008
if (!result->IsUndefined()) return result;
2011
Label miss_in_smi_check;
2013
GenerateNameCheck(name, &miss_in_smi_check);
2015
// Get the receiver from the stack.
2016
const int argc = arguments().immediate();
2017
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2019
// Check that the receiver isn't a smi.
2020
if (check != NUMBER_CHECK) {
2021
__ JumpIfSmi(rdx, &miss_in_smi_check);
2024
// Make sure that it's okay not to patch the on stack receiver
2025
// unless we're doing a receiver map check.
2026
ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2028
CallOptimization optimization(function);
2029
int depth = kInvalidProtoDepth;
2033
case RECEIVER_MAP_CHECK:
2034
__ IncrementCounter(&Counters::call_const, 1);
2036
if (optimization.is_simple_api_call() && !object->IsGlobalObject()) {
2037
depth = optimization.GetPrototypeDepthOfExpectedType(
2038
JSObject::cast(object), holder);
2041
if (depth != kInvalidProtoDepth) {
2042
__ IncrementCounter(&Counters::call_const_fast_api, 1);
2044
// Allocate space for v8::Arguments implicit values. Must be initialized
2045
// before to call any runtime function.
2046
__ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2049
// Check that the maps haven't changed.
2050
CheckPrototypes(JSObject::cast(object), rdx, holder,
2051
rbx, rax, rdi, name, depth, &miss);
2053
// Patch the receiver on the stack with the global proxy if
2055
if (object->IsGlobalObject()) {
2056
ASSERT(depth == kInvalidProtoDepth);
2057
__ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2058
__ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2063
if (!function->IsBuiltin() && !function_info->strict_mode()) {
2064
// Calling non-strict non-builtins with a value as the receiver
2068
// Check that the object is a two-byte string or a symbol.
2069
__ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax);
2070
__ j(above_equal, &miss);
2071
// Check that the maps starting from the prototype haven't changed.
2072
GenerateDirectLoadGlobalFunctionPrototype(
2073
masm(), Context::STRING_FUNCTION_INDEX, rax, &miss);
2074
CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2075
rbx, rdx, rdi, name, &miss);
2079
case NUMBER_CHECK: {
2080
if (!function->IsBuiltin() && !function_info->strict_mode()) {
2081
// Calling non-strict non-builtins with a value as the receiver
2086
// Check that the object is a smi or a heap number.
2087
__ JumpIfSmi(rdx, &fast);
2088
__ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rax);
2089
__ j(not_equal, &miss);
2091
// Check that the maps starting from the prototype haven't changed.
2092
GenerateDirectLoadGlobalFunctionPrototype(
2093
masm(), Context::NUMBER_FUNCTION_INDEX, rax, &miss);
2094
CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2095
rbx, rdx, rdi, name, &miss);
2100
case BOOLEAN_CHECK: {
2101
if (!function->IsBuiltin() && !function_info->strict_mode()) {
2102
// Calling non-strict non-builtins with a value as the receiver
2107
// Check that the object is a boolean.
2108
__ CompareRoot(rdx, Heap::kTrueValueRootIndex);
2110
__ CompareRoot(rdx, Heap::kFalseValueRootIndex);
2111
__ j(not_equal, &miss);
2113
// Check that the maps starting from the prototype haven't changed.
2114
GenerateDirectLoadGlobalFunctionPrototype(
2115
masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, &miss);
2116
CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2117
rbx, rdx, rdi, name, &miss);
2126
if (depth != kInvalidProtoDepth) {
2127
// Move the return address on top of the stack.
2128
__ movq(rax, Operand(rsp, 3 * kPointerSize));
2129
__ movq(Operand(rsp, 0 * kPointerSize), rax);
2131
// rsp[2 * kPointerSize] is uninitialized, rsp[3 * kPointerSize] contains
2132
// duplicate of return address and will be overwritten.
2133
MaybeObject* result = GenerateFastApiCall(masm(), optimization, argc);
2134
if (result->IsFailure()) return result;
2136
__ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2139
// Handle call cache miss.
2141
if (depth != kInvalidProtoDepth) {
2142
__ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2145
// Handle call cache miss.
2146
__ bind(&miss_in_smi_check);
2148
{ MaybeObject* maybe_obj = GenerateMissBranch();
2149
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2152
// Return the generated code.
2153
return GetCode(function);
2157
MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
1296
2158
JSObject* holder,
1297
JSFunction* function,
1300
// TODO(722): implement this.
1301
return Heap::undefined_value();
1306
Object* CallStubCompiler::CompileCallInterceptor(JSObject* object,
1309
2160
// ----------- S t a t e -------------
1310
2161
// rcx : function name
1311
2162
// rsp[0] : return address
1433
2266
__ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
1436
// Setup the context (function already in edi).
2269
// Setup the context (function already in rdi).
1437
2270
__ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1439
2272
// Jump to the cached code (tail call).
1440
2273
__ IncrementCounter(&Counters::call_global_inline, 1);
1441
2274
ASSERT(function->is_compiled());
1442
Handle<Code> code(function->code());
1443
2275
ParameterCount expected(function->shared()->formal_parameter_count());
1444
__ InvokeCode(code, expected, arguments(),
1445
RelocInfo::CODE_TARGET, JUMP_FUNCTION);
2276
if (V8::UseCrankshaft()) {
2277
// TODO(kasperl): For now, we always call indirectly through the
2278
// code field in the function to allow recompilation to take effect
2279
// without changing any of the call sites.
2280
__ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2281
__ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION);
2283
Handle<Code> code(function->code());
2284
__ InvokeCode(code, expected, arguments(),
2285
RelocInfo::CODE_TARGET, JUMP_FUNCTION);
1447
2287
// Handle call cache miss.
1448
2288
__ bind(&miss);
1449
2289
__ IncrementCounter(&Counters::call_global_inline_miss, 1);
1450
Object* obj = GenerateMissBranch();
1451
if (obj->IsFailure()) return obj;
2291
{ MaybeObject* maybe_obj = GenerateMissBranch();
2292
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1453
2295
// Return the generated code.
1454
2296
return GetCode(NORMAL, name);
1458
Object* LoadStubCompiler::CompileLoadCallback(String* name,
1461
AccessorInfo* callback) {
1462
// ----------- S t a t e -------------
1463
// -- rax : receiver
1465
// -- rsp[0] : return address
1466
// -----------------------------------
1469
Failure* failure = Failure::InternalError();
1470
bool success = GenerateLoadCallback(object, holder, rax, rcx, rbx, rdx, rdi,
1471
callback, name, &miss, &failure);
1472
if (!success) return failure;
1475
GenerateLoadMiss(masm(), Code::LOAD_IC);
2300
MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2304
// ----------- S t a t e -------------
2307
// -- rdx : receiver
2308
// -- rsp[0] : return address
2309
// -----------------------------------
2312
// Generate store field code. Preserves receiver and name on jump to miss.
2313
GenerateStoreField(masm(),
2320
// Handle store cache miss.
2322
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2323
__ Jump(ic, RelocInfo::CODE_TARGET);
2325
// Return the generated code.
2326
return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2330
MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2331
AccessorInfo* callback,
2333
// ----------- S t a t e -------------
2336
// -- rdx : receiver
2337
// -- rsp[0] : return address
2338
// -----------------------------------
2341
// Check that the object isn't a smi.
2342
__ JumpIfSmi(rdx, &miss);
2344
// Check that the map of the object hasn't changed.
2345
__ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2346
Handle<Map>(object->map()));
2347
__ j(not_equal, &miss);
2349
// Perform global security token check if needed.
2350
if (object->IsJSGlobalProxy()) {
2351
__ CheckAccessGlobalProxy(rdx, rbx, &miss);
2354
// Stub never generated for non-global objects that require access
2356
ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2358
__ pop(rbx); // remove the return address
2359
__ push(rdx); // receiver
2360
__ Push(Handle<AccessorInfo>(callback)); // callback info
2361
__ push(rcx); // name
2362
__ push(rax); // value
2363
__ push(rbx); // restore return address
2365
// Do tail-call to the runtime system.
2366
ExternalReference store_callback_property =
2367
ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
2368
__ TailCallExternalReference(store_callback_property, 4, 1);
2370
// Handle store cache miss.
2372
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2373
__ Jump(ic, RelocInfo::CODE_TARGET);
1477
2375
// Return the generated code.
1478
2376
return GetCode(CALLBACKS, name);
1482
Object* LoadStubCompiler::CompileLoadConstant(JSObject* object,
1486
// ----------- S t a t e -------------
1487
// -- rax : receiver
1489
// -- rsp[0] : return address
1490
// -----------------------------------
1493
GenerateLoadConstant(object, holder, rax, rbx, rdx, rdi, value, name, &miss);
1495
GenerateLoadMiss(masm(), Code::LOAD_IC);
1497
// Return the generated code.
1498
return GetCode(CONSTANT_FUNCTION, name);
1502
Object* LoadStubCompiler::CompileLoadNonexistent(String* name,
2380
MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2382
// ----------- S t a t e -------------
2385
// -- rdx : receiver
2386
// -- rsp[0] : return address
2387
// -----------------------------------
2390
// Check that the object isn't a smi.
2391
__ JumpIfSmi(rdx, &miss);
2393
// Check that the map of the object hasn't changed.
2394
__ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2395
Handle<Map>(receiver->map()));
2396
__ j(not_equal, &miss);
2398
// Perform global security token check if needed.
2399
if (receiver->IsJSGlobalProxy()) {
2400
__ CheckAccessGlobalProxy(rdx, rbx, &miss);
2403
// Stub never generated for non-global objects that require access
2405
ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2407
__ pop(rbx); // remove the return address
2408
__ push(rdx); // receiver
2409
__ push(rcx); // name
2410
__ push(rax); // value
2411
__ Push(Smi::FromInt(strict_mode_));
2412
__ push(rbx); // restore return address
2414
// Do tail-call to the runtime system.
2415
ExternalReference store_ic_property =
2416
ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
2417
__ TailCallExternalReference(store_ic_property, 4, 1);
2419
// Handle store cache miss.
2421
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2422
__ Jump(ic, RelocInfo::CODE_TARGET);
2424
// Return the generated code.
2425
return GetCode(INTERCEPTOR, name);
2429
MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2430
JSGlobalPropertyCell* cell,
2432
// ----------- S t a t e -------------
2435
// -- rdx : receiver
2436
// -- rsp[0] : return address
2437
// -----------------------------------
2440
// Check that the map of the global has not changed.
2441
__ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2442
Handle<Map>(object->map()));
2443
__ j(not_equal, &miss);
2445
// Check that the value in the cell is not the hole. If it is, this
2446
// cell could have been deleted and reintroducing the global needs
2447
// to update the property details in the property dictionary of the
2448
// global object. We bail out to the runtime system to do that.
2449
__ Move(rbx, Handle<JSGlobalPropertyCell>(cell));
2450
__ CompareRoot(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
2451
Heap::kTheHoleValueRootIndex);
2454
// Store the value in the cell.
2455
__ movq(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset), rax);
2457
// Return the value (register rax).
2458
__ IncrementCounter(&Counters::named_store_global_inline, 1);
2461
// Handle store cache miss.
2463
__ IncrementCounter(&Counters::named_store_global_inline_miss, 1);
2464
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2465
__ Jump(ic, RelocInfo::CODE_TARGET);
2467
// Return the generated code.
2468
return GetCode(NORMAL, name);
2472
MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2476
// ----------- S t a t e -------------
2479
// -- rdx : receiver
2480
// -- rsp[0] : return address
2481
// -----------------------------------
2484
__ IncrementCounter(&Counters::keyed_store_field, 1);
2486
// Check that the name has not changed.
2487
__ Cmp(rcx, Handle<String>(name));
2488
__ j(not_equal, &miss);
2490
// Generate store field code. Preserves receiver and name on jump to miss.
2491
GenerateStoreField(masm(),
2498
// Handle store cache miss.
2500
__ DecrementCounter(&Counters::keyed_store_field, 1);
2501
Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2502
__ Jump(ic, RelocInfo::CODE_TARGET);
2504
// Return the generated code.
2505
return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2509
MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
2510
JSObject* receiver) {
2511
// ----------- S t a t e -------------
2514
// -- rdx : receiver
2515
// -- rsp[0] : return address
2516
// -----------------------------------
2519
// Check that the receiver isn't a smi.
2520
__ JumpIfSmi(rdx, &miss);
2522
// Check that the map matches.
2523
__ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2524
Handle<Map>(receiver->map()));
2525
__ j(not_equal, &miss);
2527
// Check that the key is a smi.
2528
__ JumpIfNotSmi(rcx, &miss);
2530
// Get the elements array and make sure it is a fast element array, not 'cow'.
2531
__ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
2532
__ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
2533
Factory::fixed_array_map());
2534
__ j(not_equal, &miss);
2536
// Check that the key is within bounds.
2537
if (receiver->IsJSArray()) {
2538
__ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
2539
__ j(above_equal, &miss);
2541
__ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
2542
__ j(above_equal, &miss);
2545
// Do the store and update the write barrier. Make sure to preserve
2546
// the value in register eax.
2548
__ SmiToInteger32(rcx, rcx);
2549
__ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize),
2551
__ RecordWrite(rdi, 0, rdx, rcx);
2556
// Handle store cache miss.
2558
Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2559
__ jmp(ic, RelocInfo::CODE_TARGET);
2561
// Return the generated code.
2562
return GetCode(NORMAL, NULL);
2566
MaybeObject* KeyedStoreStubCompiler::CompileStorePixelArray(
2567
JSObject* receiver) {
2568
// ----------- S t a t e -------------
2571
// -- rdx : receiver
2572
// -- rsp[0] : return address
2573
// -----------------------------------
2576
// Check that the map matches.
2577
__ CheckMap(rdx, Handle<Map>(receiver->map()), &miss, false);
2580
GenerateFastPixelArrayStore(masm(),
2593
// Handle store cache miss.
2595
Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2596
__ jmp(ic, RelocInfo::CODE_TARGET);
2598
// Return the generated code.
2599
return GetCode(NORMAL, NULL);
2603
MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
1505
2606
// ----------- S t a t e -------------
1506
2607
// -- rax : receiver
1507
2608
// -- rcx : name
1814
Object* StoreStubCompiler::CompileStoreCallback(JSObject* object,
1815
AccessorInfo* callback,
2972
MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
1817
2973
// ----------- S t a t e -------------
1820
2975
// -- rdx : receiver
1821
// -- rsp[0] : return address
2976
// -- rsp[0] : return address
1822
2977
// -----------------------------------
1825
// Check that the object isn't a smi.
1826
__ JumpIfSmi(rdx, &miss);
2980
__ IncrementCounter(&Counters::keyed_load_function_prototype, 1);
1828
// Check that the map of the object hasn't changed.
1829
__ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
1830
Handle<Map>(object->map()));
2982
// Check that the name has not changed.
2983
__ Cmp(rax, Handle<String>(name));
1831
2984
__ j(not_equal, &miss);
1833
// Perform global security token check if needed.
1834
if (object->IsJSGlobalProxy()) {
1835
__ CheckAccessGlobalProxy(rdx, rbx, &miss);
1838
// Stub never generated for non-global objects that require access
1840
ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
1842
__ pop(rbx); // remove the return address
1843
__ push(rdx); // receiver
1844
__ Push(Handle<AccessorInfo>(callback)); // callback info
1845
__ push(rcx); // name
1846
__ push(rax); // value
1847
__ push(rbx); // restore return address
1849
// Do tail-call to the runtime system.
1850
ExternalReference store_callback_property =
1851
ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
1852
__ TailCallExternalReference(store_callback_property, 4, 1);
1854
// Handle store cache miss.
2986
GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
1855
2987
__ bind(&miss);
1856
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
1857
__ Jump(ic, RelocInfo::CODE_TARGET);
2988
__ DecrementCounter(&Counters::keyed_load_function_prototype, 1);
2989
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1859
2991
// Return the generated code.
1860
2992
return GetCode(CALLBACKS, name);
1864
Object* StoreStubCompiler::CompileStoreField(JSObject* object,
1868
// ----------- S t a t e -------------
1871
// -- rdx : receiver
1872
// -- rsp[0] : return address
1873
// -----------------------------------
1876
// Generate store field code. Preserves receiver and name on jump to miss.
1877
GenerateStoreField(masm(),
1884
// Handle store cache miss.
1886
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
1887
__ Jump(ic, RelocInfo::CODE_TARGET);
1889
// Return the generated code.
1890
return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
1894
Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
1896
// ----------- S t a t e -------------
1899
// -- rdx : receiver
1900
// -- rsp[0] : return address
1901
// -----------------------------------
1904
// Check that the object isn't a smi.
2996
MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
2997
// ----------- S t a t e -------------
2999
// -- rdx : receiver
3000
// -- esp[0] : return address
3001
// -----------------------------------
3004
// Check that the receiver isn't a smi.
1905
3005
__ JumpIfSmi(rdx, &miss);
1907
// Check that the map of the object hasn't changed.
3007
// Check that the map matches.
1908
3008
__ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
1909
3009
Handle<Map>(receiver->map()));
1910
3010
__ j(not_equal, &miss);
1912
// Perform global security token check if needed.
1913
if (receiver->IsJSGlobalProxy()) {
1914
__ CheckAccessGlobalProxy(rdx, rbx, &miss);
1917
// Stub never generated for non-global objects that require access
1919
ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
1921
__ pop(rbx); // remove the return address
1922
__ push(rdx); // receiver
1923
__ push(rcx); // name
1924
__ push(rax); // value
1925
__ push(rbx); // restore return address
1927
// Do tail-call to the runtime system.
1928
ExternalReference store_ic_property =
1929
ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
1930
__ TailCallExternalReference(store_ic_property, 3, 1);
1932
// Handle store cache miss.
3012
// Check that the key is a smi.
3013
__ JumpIfNotSmi(rax, &miss);
3015
// Get the elements array.
3016
__ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
3017
__ AssertFastElements(rcx);
3019
// Check that the key is within bounds.
3020
__ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset));
3021
__ j(above_equal, &miss);
3023
// Load the result and make sure it's not the hole.
3024
SmiIndex index = masm()->SmiToIndex(rbx, rax, kPointerSizeLog2);
3025
__ movq(rbx, FieldOperand(rcx,
3028
FixedArray::kHeaderSize));
3029
__ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
1933
3034
__ bind(&miss);
1934
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
1935
__ Jump(ic, RelocInfo::CODE_TARGET);
3035
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1937
3037
// Return the generated code.
1938
return GetCode(INTERCEPTOR, name);
3038
return GetCode(NORMAL, NULL);
1942
Object* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
1943
JSGlobalPropertyCell* cell,
3042
MaybeObject* KeyedLoadStubCompiler::CompileLoadPixelArray(JSObject* receiver) {
1945
3043
// ----------- S t a t e -------------
1948
3045
// -- rdx : receiver
1949
// -- rsp[0] : return address
1950
// -----------------------------------
1953
// Check that the map of the global has not changed.
1954
__ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
1955
Handle<Map>(object->map()));
1956
__ j(not_equal, &miss);
1958
// Store the value in the cell.
1959
__ Move(rcx, Handle<JSGlobalPropertyCell>(cell));
1960
__ movq(FieldOperand(rcx, JSGlobalPropertyCell::kValueOffset), rax);
1962
// Return the value (register rax).
1963
__ IncrementCounter(&Counters::named_store_global_inline, 1);
1966
// Handle store cache miss.
1968
__ IncrementCounter(&Counters::named_store_global_inline_miss, 1);
1969
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
1970
__ Jump(ic, RelocInfo::CODE_TARGET);
1972
// Return the generated code.
1973
return GetCode(NORMAL, name);
1977
Object* KeyedLoadStubCompiler::CompileLoadField(String* name,
1981
// ----------- S t a t e -------------
1983
// -- rdx : receiver
1984
// -- rsp[0] : return address
1985
// -----------------------------------
1988
__ IncrementCounter(&Counters::keyed_load_field, 1);
1990
// Check that the name has not changed.
1991
__ Cmp(rax, Handle<String>(name));
1992
__ j(not_equal, &miss);
1994
GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
1997
__ DecrementCounter(&Counters::keyed_load_field, 1);
3046
// -- esp[0] : return address
3047
// -----------------------------------
3050
// Check that the map matches.
3051
__ CheckMap(rdx, Handle<Map>(receiver->map()), &miss, false);
3053
GenerateFastPixelArrayLoad(masm(),
1998
3064
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2000
3066
// Return the generated code.
2001
return GetCode(FIELD, name);
2005
Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2009
// ----------- S t a t e -------------
2012
// -- rdx : receiver
2013
// -- rsp[0] : return address
2014
// -----------------------------------
2017
__ IncrementCounter(&Counters::keyed_store_field, 1);
2019
// Check that the name has not changed.
2020
__ Cmp(rcx, Handle<String>(name));
2021
__ j(not_equal, &miss);
2023
// Generate store field code. Preserves receiver and name on jump to miss.
2024
GenerateStoreField(masm(),
2031
// Handle store cache miss.
2033
__ DecrementCounter(&Counters::keyed_store_field, 1);
2034
Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2035
__ Jump(ic, RelocInfo::CODE_TARGET);
2037
// Return the generated code.
2038
return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2042
void StubCompiler::GenerateLoadInterceptor(JSObject* object,
2043
JSObject* interceptor_holder,
2044
LookupResult* lookup,
2052
ASSERT(interceptor_holder->HasNamedInterceptor());
2053
ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
2055
// Check that the receiver isn't a smi.
2056
__ JumpIfSmi(receiver, miss);
2058
// So far the most popular follow ups for interceptor loads are FIELD
2059
// and CALLBACKS, so inline only them, other cases may be added
2061
bool compile_followup_inline = false;
2062
if (lookup->IsProperty() && lookup->IsCacheable()) {
2063
if (lookup->type() == FIELD) {
2064
compile_followup_inline = true;
2065
} else if (lookup->type() == CALLBACKS &&
2066
lookup->GetCallbackObject()->IsAccessorInfo() &&
2067
AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
2068
compile_followup_inline = true;
2072
if (compile_followup_inline) {
2073
// Compile the interceptor call, followed by inline code to load the
2074
// property from further up the prototype chain if the call fails.
2075
// Check that the maps haven't changed.
2076
Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
2077
scratch1, scratch2, scratch3,
2079
ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
2081
// Save necessary data before invoking an interceptor.
2082
// Requires a frame to make GC aware of pushed pointers.
2083
__ EnterInternalFrame();
2085
if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
2086
// CALLBACKS case needs a receiver to be passed into C++ callback.
2089
__ push(holder_reg);
2092
// Invoke an interceptor. Note: map checks from receiver to
2093
// interceptor's holder has been compiled before (see a caller
2095
CompileCallLoadPropertyWithInterceptor(masm(),
2099
interceptor_holder);
2101
// Check if interceptor provided a value for property. If it's
2102
// the case, return immediately.
2103
Label interceptor_failed;
2104
__ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
2105
__ j(equal, &interceptor_failed);
2106
__ LeaveInternalFrame();
2109
__ bind(&interceptor_failed);
2112
if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
2116
__ LeaveInternalFrame();
2118
// Check that the maps from interceptor's holder to lookup's holder
2119
// haven't changed. And load lookup's holder into |holder| register.
2120
if (interceptor_holder != lookup->holder()) {
2121
holder_reg = CheckPrototypes(interceptor_holder,
2131
if (lookup->type() == FIELD) {
2132
// We found FIELD property in prototype chain of interceptor's holder.
2133
// Retrieve a field from field's holder.
2134
GenerateFastPropertyLoad(masm(), rax, holder_reg,
2135
lookup->holder(), lookup->GetFieldIndex());
2138
// We found CALLBACKS property in prototype chain of interceptor's
2140
ASSERT(lookup->type() == CALLBACKS);
2141
ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
2142
AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
2143
ASSERT(callback != NULL);
2144
ASSERT(callback->getter() != NULL);
2146
// Tail call to runtime.
2147
// Important invariant in CALLBACKS case: the code above must be
2148
// structured to never clobber |receiver| register.
2149
__ pop(scratch2); // return address
2151
__ push(holder_reg);
2152
__ Move(holder_reg, Handle<AccessorInfo>(callback));
2153
__ push(holder_reg);
2154
__ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
2156
__ push(scratch2); // restore return address
2158
ExternalReference ref =
2159
ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
2160
__ TailCallExternalReference(ref, 5, 1);
2162
} else { // !compile_followup_inline
2163
// Call the runtime system to load the interceptor.
2164
// Check that the maps haven't changed.
2165
Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
2166
scratch1, scratch2, scratch3,
2168
__ pop(scratch2); // save old return address
2169
PushInterceptorArguments(masm(), receiver, holder_reg,
2170
name_reg, interceptor_holder);
2171
__ push(scratch2); // restore old return address
2173
ExternalReference ref = ExternalReference(
2174
IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
2175
__ TailCallExternalReference(ref, 5, 1);
2180
bool StubCompiler::GenerateLoadCallback(JSObject* object,
2187
AccessorInfo* callback,
2190
Failure** failure) {
2191
// Check that the receiver isn't a smi.
2192
__ JumpIfSmi(receiver, miss);
2194
// Check that the maps haven't changed.
2196
CheckPrototypes(object, receiver, holder, scratch1,
2197
scratch2, scratch3, name, miss);
2199
Handle<AccessorInfo> callback_handle(callback);
2201
__ EnterInternalFrame();
2202
__ PushHandleScope(scratch2);
2203
// Push the stack address where the list of arguments ends.
2204
__ movq(scratch2, rsp);
2205
__ subq(scratch2, Immediate(2 * kPointerSize));
2207
__ push(receiver); // receiver
2208
__ push(reg); // holder
2209
if (Heap::InNewSpace(callback_handle->data())) {
2210
__ Move(scratch2, callback_handle);
2211
__ push(FieldOperand(scratch2, AccessorInfo::kDataOffset)); // data
2213
__ Push(Handle<Object>(callback_handle->data()));
2215
__ push(name_reg); // name
2216
// Save a pointer to where we pushed the arguments pointer.
2217
// This will be passed as the const AccessorInfo& to the C++ callback.
2220
// Win64 uses first register--rcx--for returned value.
2221
Register accessor_info_arg = r8;
2222
Register name_arg = rdx;
2224
Register accessor_info_arg = rdx; // temporary, copied to rsi by the stub.
2225
Register name_arg = rdi;
2228
__ movq(accessor_info_arg, rsp);
2229
__ addq(accessor_info_arg, Immediate(4 * kPointerSize));
2230
__ movq(name_arg, rsp);
2232
// Do call through the api.
2233
ASSERT_EQ(5, ApiGetterEntryStub::kStackSpace);
2234
Address getter_address = v8::ToCData<Address>(callback->getter());
2235
ApiFunction fun(getter_address);
2236
ApiGetterEntryStub stub(callback_handle, &fun);
2238
// We need to prepare a slot for result handle on stack and put
2239
// a pointer to it into 1st arg register.
2240
__ push(Immediate(0));
2243
// Emitting a stub call may try to allocate (if the code is not
2244
// already generated). Do not allow the assembler to perform a
2245
// garbage collection but instead return the allocation failure
2247
Object* result = masm()->TryCallStub(&stub);
2248
if (result->IsFailure()) {
2249
*failure = Failure::cast(result);
2253
// Discard allocated slot.
2254
__ addq(rsp, Immediate(kPointerSize));
2257
// We need to avoid using rax since that now holds the result.
2258
Register tmp = scratch2.is(rax) ? reg : scratch2;
2259
// Emitting PopHandleScope may try to allocate. Do not allow the
2260
// assembler to perform a garbage collection but instead return a
2262
result = masm()->TryPopHandleScope(rax, tmp);
2263
if (result->IsFailure()) {
2264
*failure = Failure::cast(result);
2267
__ LeaveInternalFrame();
2275
Register StubCompiler::CheckPrototypes(JSObject* object,
2276
Register object_reg,
2278
Register holder_reg,
2284
// Make sure there's no overlap between holder and object registers.
2285
ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
2286
ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
2287
&& !scratch2.is(scratch1));
2289
// Keep track of the current object in register reg. On the first
2290
// iteration, reg is an alias for object_reg, on later iterations,
2291
// it is an alias for holder_reg.
2292
Register reg = object_reg;
2295
if (save_at_depth == depth) {
2296
__ movq(Operand(rsp, kPointerSize), object_reg);
2299
// Check the maps in the prototype chain.
2300
// Traverse the prototype chain from the object and do map checks.
2301
JSObject* current = object;
2302
while (current != holder) {
2305
// Only global objects and objects that do not require access
2306
// checks are allowed in stubs.
2307
ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
2309
JSObject* prototype = JSObject::cast(current->GetPrototype());
2310
if (!current->HasFastProperties() &&
2311
!current->IsJSGlobalObject() &&
2312
!current->IsJSGlobalProxy()) {
2313
if (!name->IsSymbol()) {
2314
Object* lookup_result = Heap::LookupSymbol(name);
2315
if (lookup_result->IsFailure()) {
2316
set_failure(Failure::cast(lookup_result));
2319
name = String::cast(lookup_result);
2322
ASSERT(current->property_dictionary()->FindEntry(name) ==
2323
StringDictionary::kNotFound);
2325
GenerateDictionaryNegativeLookup(masm(),
2331
__ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
2332
reg = holder_reg; // from now the object is in holder_reg
2333
__ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
2334
} else if (Heap::InNewSpace(prototype)) {
2335
// Get the map of the current object.
2336
__ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
2337
__ Cmp(scratch1, Handle<Map>(current->map()));
2338
// Branch on the result of the map check.
2339
__ j(not_equal, miss);
2340
// Check access rights to the global object. This has to happen
2341
// after the map check so that we know that the object is
2342
// actually a global object.
2343
if (current->IsJSGlobalProxy()) {
2344
__ CheckAccessGlobalProxy(reg, scratch1, miss);
2346
// Restore scratch register to be the map of the object.
2347
// We load the prototype from the map in the scratch register.
2348
__ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
2350
// The prototype is in new space; we cannot store a reference
2351
// to it in the code. Load it from the map.
2352
reg = holder_reg; // from now the object is in holder_reg
2353
__ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
2356
// Check the map of the current object.
2357
__ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
2358
Handle<Map>(current->map()));
2359
// Branch on the result of the map check.
2360
__ j(not_equal, miss);
2361
// Check access rights to the global object. This has to happen
2362
// after the map check so that we know that the object is
2363
// actually a global object.
2364
if (current->IsJSGlobalProxy()) {
2365
__ CheckAccessGlobalProxy(reg, scratch1, miss);
2367
// The prototype is in old space; load it directly.
2368
reg = holder_reg; // from now the object is in holder_reg
2369
__ Move(reg, Handle<JSObject>(prototype));
2372
if (save_at_depth == depth) {
2373
__ movq(Operand(rsp, kPointerSize), reg);
2376
// Go to the next object in the prototype chain.
2377
current = prototype;
2380
// Check the holder map.
2381
__ Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
2382
__ j(not_equal, miss);
2384
// Log the check depth.
2385
LOG(IntEvent("check-maps-depth", depth + 1));
2387
// Perform security check for access to the global object and return
2388
// the holder register.
2389
ASSERT(current == holder);
2390
ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
2391
if (current->IsJSGlobalProxy()) {
2392
__ CheckAccessGlobalProxy(reg, scratch1, miss);
2395
// If we've skipped any global objects, it's not enough to verify
2396
// that their maps haven't changed. We also need to check that the
2397
// property cell for the property is still empty.
2399
while (current != holder) {
2400
if (current->IsGlobalObject()) {
2401
Object* cell = GenerateCheckPropertyCell(masm(),
2402
GlobalObject::cast(current),
2406
if (cell->IsFailure()) {
2407
set_failure(Failure::cast(cell));
2411
current = JSObject::cast(current->GetPrototype());
2414
// Return the register containing the holder.
2419
void StubCompiler::GenerateLoadField(JSObject* object,
2428
// Check that the receiver isn't a smi.
2429
__ JumpIfSmi(receiver, miss);
2431
// Check the prototype chain.
2433
CheckPrototypes(object, receiver, holder,
2434
scratch1, scratch2, scratch3, name, miss);
2436
// Get the value from the properties.
2437
GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
2442
void StubCompiler::GenerateLoadConstant(JSObject* object,
2451
// Check that the receiver isn't a smi.
2452
__ JumpIfSmi(receiver, miss);
2454
// Check that the maps haven't changed.
2456
CheckPrototypes(object, receiver, holder,
2457
scratch1, scratch2, scratch3, name, miss);
2459
// Return the constant value.
2460
__ Move(rax, Handle<Object>(value));
3067
return GetCode(NORMAL, NULL);
2465
3071
// Specialized stub for constructing objects from functions which only have only
2466
3072
// simple assignments of the form this.x = ...; in their body.
2467
Object* ConstructStubCompiler::CompileConstructStub(
2468
SharedFunctionInfo* shared) {
3073
MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
2469
3074
// ----------- S t a t e -------------
2470
3075
// -- rax : argc
2471
3076
// -- rdi : constructor
3202
MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3203
ExternalArrayType array_type, Code::Flags flags) {
3204
// ----------- S t a t e -------------
3206
// -- rdx : receiver
3207
// -- rsp[0] : return address
3208
// -----------------------------------
3211
// Check that the object isn't a smi.
3212
__ JumpIfSmi(rdx, &slow);
3214
// Check that the key is a smi.
3215
__ JumpIfNotSmi(rax, &slow);
3217
// Check that the object is a JS object.
3218
__ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
3219
__ j(not_equal, &slow);
3220
// Check that the receiver does not require access checks. We need
3221
// to check this explicitly since this generic stub does not perform
3222
// map checks. The map is already in rdx.
3223
__ testb(FieldOperand(rcx, Map::kBitFieldOffset),
3224
Immediate(1 << Map::kIsAccessCheckNeeded));
3225
__ j(not_zero, &slow);
3227
// Check that the elements array is the appropriate type of
3229
// rax: index (as a smi)
3231
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
3232
__ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
3233
Heap::RootIndexForExternalArrayType(array_type));
3234
__ j(not_equal, &slow);
3236
// Check that the index is in range.
3237
__ SmiToInteger32(rcx, rax);
3238
__ cmpl(rcx, FieldOperand(rbx, ExternalArray::kLengthOffset));
3239
// Unsigned comparison catches both negative and too-large values.
3240
__ j(above_equal, &slow);
3242
// rax: index (as a smi)
3243
// rdx: receiver (JSObject)
3244
// rcx: untagged index
3245
// rbx: elements array
3246
__ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
3247
// rbx: base pointer of external storage
3248
switch (array_type) {
3249
case kExternalByteArray:
3250
__ movsxbq(rcx, Operand(rbx, rcx, times_1, 0));
3252
case kExternalUnsignedByteArray:
3253
__ movzxbq(rcx, Operand(rbx, rcx, times_1, 0));
3255
case kExternalShortArray:
3256
__ movsxwq(rcx, Operand(rbx, rcx, times_2, 0));
3258
case kExternalUnsignedShortArray:
3259
__ movzxwq(rcx, Operand(rbx, rcx, times_2, 0));
3261
case kExternalIntArray:
3262
__ movsxlq(rcx, Operand(rbx, rcx, times_4, 0));
3264
case kExternalUnsignedIntArray:
3265
__ movl(rcx, Operand(rbx, rcx, times_4, 0));
3267
case kExternalFloatArray:
3268
__ cvtss2sd(xmm0, Operand(rbx, rcx, times_4, 0));
3277
// For integer array types:
3279
// For floating-point array type:
3280
// xmm0: value as double.
3282
ASSERT(kSmiValueSize == 32);
3283
if (array_type == kExternalUnsignedIntArray) {
3284
// For the UnsignedInt array type, we need to see whether
3285
// the value can be represented in a Smi. If not, we need to convert
3286
// it to a HeapNumber.
3289
__ JumpIfUIntNotValidSmiValue(rcx, &box_int);
3291
__ Integer32ToSmi(rax, rcx);
3296
// Allocate a HeapNumber for the int and perform int-to-double
3298
// The value is zero-extended since we loaded the value from memory
3300
__ cvtqsi2sd(xmm0, rcx);
3302
__ AllocateHeapNumber(rcx, rbx, &slow);
3304
__ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
3307
} else if (array_type == kExternalFloatArray) {
3308
// For the floating-point array type, we need to always allocate a
3310
__ AllocateHeapNumber(rcx, rbx, &slow);
3312
__ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
3316
__ Integer32ToSmi(rax, rcx);
3320
// Slow case: Jump to runtime.
3322
__ IncrementCounter(&Counters::keyed_load_external_array_slow, 1);
3324
// ----------- S t a t e -------------
3326
// -- rdx : receiver
3327
// -- rsp[0] : return address
3328
// -----------------------------------
3331
__ push(rdx); // receiver
3332
__ push(rax); // name
3333
__ push(rbx); // return address
3335
// Perform tail call to the entry.
3336
__ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3338
// Return the generated code.
3339
return GetCode(flags);
3343
MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
3344
ExternalArrayType array_type, Code::Flags flags) {
3345
// ----------- S t a t e -------------
3348
// -- rdx : receiver
3349
// -- rsp[0] : return address
3350
// -----------------------------------
3353
// Check that the object isn't a smi.
3354
__ JumpIfSmi(rdx, &slow);
3355
// Get the map from the receiver.
3356
__ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
3357
// Check that the receiver does not require access checks. We need
3358
// to do this because this generic stub does not perform map checks.
3359
__ testb(FieldOperand(rbx, Map::kBitFieldOffset),
3360
Immediate(1 << Map::kIsAccessCheckNeeded));
3361
__ j(not_zero, &slow);
3362
// Check that the key is a smi.
3363
__ JumpIfNotSmi(rcx, &slow);
3365
// Check that the object is a JS object.
3366
__ CmpInstanceType(rbx, JS_OBJECT_TYPE);
3367
__ j(not_equal, &slow);
3369
// Check that the elements array is the appropriate type of
3373
// rdx: receiver (a JSObject)
3374
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
3375
__ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
3376
Heap::RootIndexForExternalArrayType(array_type));
3377
__ j(not_equal, &slow);
3379
// Check that the index is in range.
3380
__ SmiToInteger32(rdi, rcx); // Untag the index.
3381
__ cmpl(rdi, FieldOperand(rbx, ExternalArray::kLengthOffset));
3382
// Unsigned comparison catches both negative and too-large values.
3383
__ j(above_equal, &slow);
3385
// Handle both smis and HeapNumbers in the fast path. Go to the
3386
// runtime for all other kinds of values.
3389
// rdx: receiver (a JSObject)
3390
// rbx: elements array
3391
// rdi: untagged key
3392
NearLabel check_heap_number;
3393
__ JumpIfNotSmi(rax, &check_heap_number);
3394
// No more branches to slow case on this path. Key and receiver not needed.
3395
__ SmiToInteger32(rdx, rax);
3396
__ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
3397
// rbx: base pointer of external storage
3398
switch (array_type) {
3399
case kExternalByteArray:
3400
case kExternalUnsignedByteArray:
3401
__ movb(Operand(rbx, rdi, times_1, 0), rdx);
3403
case kExternalShortArray:
3404
case kExternalUnsignedShortArray:
3405
__ movw(Operand(rbx, rdi, times_2, 0), rdx);
3407
case kExternalIntArray:
3408
case kExternalUnsignedIntArray:
3409
__ movl(Operand(rbx, rdi, times_4, 0), rdx);
3411
case kExternalFloatArray:
3412
// Need to perform int-to-float conversion.
3413
__ cvtlsi2ss(xmm0, rdx);
3414
__ movss(Operand(rbx, rdi, times_4, 0), xmm0);
3422
__ bind(&check_heap_number);
3425
// rdx: receiver (a JSObject)
3426
// rbx: elements array
3427
// rdi: untagged key
3428
__ CmpObjectType(rax, HEAP_NUMBER_TYPE, kScratchRegister);
3429
__ j(not_equal, &slow);
3430
// No more branches to slow case on this path.
3432
// The WebGL specification leaves the behavior of storing NaN and
3433
// +/-Infinity into integer arrays basically undefined. For more
3434
// reproducible behavior, convert these to zero.
3435
__ movsd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
3436
__ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
3437
// rdi: untagged index
3438
// rbx: base pointer of external storage
3439
// top of FPU stack: value
3440
if (array_type == kExternalFloatArray) {
3441
__ cvtsd2ss(xmm0, xmm0);
3442
__ movss(Operand(rbx, rdi, times_4, 0), xmm0);
3445
// Perform float-to-int conversion with truncation (round-to-zero)
3448
// Convert to int32 and store the low byte/word.
3449
// If the value is NaN or +/-infinity, the result is 0x80000000,
3450
// which is automatically zero when taken mod 2^n, n < 32.
3451
// rdx: value (converted to an untagged integer)
3452
// rdi: untagged index
3453
// rbx: base pointer of external storage
3454
switch (array_type) {
3455
case kExternalByteArray:
3456
case kExternalUnsignedByteArray:
3457
__ cvttsd2si(rdx, xmm0);
3458
__ movb(Operand(rbx, rdi, times_1, 0), rdx);
3460
case kExternalShortArray:
3461
case kExternalUnsignedShortArray:
3462
__ cvttsd2si(rdx, xmm0);
3463
__ movw(Operand(rbx, rdi, times_2, 0), rdx);
3465
case kExternalIntArray:
3466
case kExternalUnsignedIntArray: {
3467
// Convert to int64, so that NaN and infinities become
3468
// 0x8000000000000000, which is zero mod 2^32.
3469
__ cvttsd2siq(rdx, xmm0);
3470
__ movl(Operand(rbx, rdi, times_4, 0), rdx);
3480
// Slow case: call runtime.
3483
// ----------- S t a t e -------------
3486
// -- rdx : receiver
3487
// -- rsp[0] : return address
3488
// -----------------------------------
3491
__ push(rdx); // receiver
3492
__ push(rcx); // key
3493
__ push(rax); // value
3494
__ Push(Smi::FromInt(NONE)); // PropertyAttributes
3495
__ Push(Smi::FromInt(
3496
Code::ExtractExtraICStateFromFlags(flags) & kStrictMode));
3497
__ push(rbx); // return address
3499
// Do tail-call to runtime routine.
3500
__ TailCallRuntime(Runtime::kSetProperty, 5, 1);
3502
return GetCode(flags);
2597
3507
} } // namespace v8::internal