1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
6
// * Redistributions of source code must retain the above copyright
7
// notice, this list of conditions and the following disclaimer.
8
// * Redistributions in binary form must reproduce the above
9
// copyright notice, this list of conditions and the following
10
// disclaimer in the documentation and/or other materials provided
11
// with the distribution.
12
// * Neither the name of Google Inc. nor the names of its
13
// contributors may be used to endorse or promote products derived
14
// from this software without specific prior written permission.
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
#if defined(V8_TARGET_ARCH_X64)
35
#include "stub-cache.h"
40
// ----------------------------------------------------------------------------
41
// Static IC stub generators.
44
#define __ ACCESS_MASM(masm)
47
static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
49
Label* global_object) {
51
// type: holds the receiver instance type on entry.
52
__ cmpb(type, Immediate(JS_GLOBAL_OBJECT_TYPE));
53
__ j(equal, global_object);
54
__ cmpb(type, Immediate(JS_BUILTINS_OBJECT_TYPE));
55
__ j(equal, global_object);
56
__ cmpb(type, Immediate(JS_GLOBAL_PROXY_TYPE));
57
__ j(equal, global_object);
61
// Generated code falls through if the receiver is a regular non-global
62
// JS object with slow properties and no interceptors.
63
static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
69
// receiver: holds the receiver on entry and is unchanged.
70
// r0: used to hold receiver instance type.
71
// Holds the property dictionary on fall through.
72
// r1: used to hold receivers map.
74
__ JumpIfSmi(receiver, miss);
76
// Check that the receiver is a valid JS object.
77
__ movq(r1, FieldOperand(receiver, HeapObject::kMapOffset));
78
__ movb(r0, FieldOperand(r1, Map::kInstanceTypeOffset));
79
__ cmpb(r0, Immediate(FIRST_SPEC_OBJECT_TYPE));
82
// If this assert fails, we have to check upper bound too.
83
STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
85
GenerateGlobalInstanceTypeCheck(masm, r0, miss);
87
// Check for non-global object that requires access check.
88
__ testb(FieldOperand(r1, Map::kBitFieldOffset),
89
Immediate((1 << Map::kIsAccessCheckNeeded) |
90
(1 << Map::kHasNamedInterceptor)));
93
__ movq(r0, FieldOperand(receiver, JSObject::kPropertiesOffset));
94
__ CompareRoot(FieldOperand(r0, HeapObject::kMapOffset),
95
Heap::kHashTableMapRootIndex);
96
__ j(not_equal, miss);
101
// Helper function used to load a property from a dictionary backing storage.
102
// This function may return false negatives, so miss_label
103
// must always call a backup property load that is complete.
104
// This function is safe to call if name is not a symbol, and will jump to
105
// the miss_label in that case.
106
// The generated code assumes that the receiver has slow properties,
107
// is not a global object and does not have interceptors.
108
static void GenerateDictionaryLoad(MacroAssembler* masm,
117
// elements - holds the property dictionary on entry and is unchanged.
119
// name - holds the name of the property on entry and is unchanged.
121
// r0 - used to hold the capacity of the property dictionary.
123
// r1 - used to hold the index into the property dictionary.
125
// result - holds the result on exit if the load succeeded.
129
// Probe the dictionary.
130
StringDictionaryLookupStub::GeneratePositiveLookup(masm,
138
// If probing finds an entry in the dictionary, r1 contains the
139
// index into the dictionary. Check that the value is a normal
142
const int kElementsStartOffset =
143
StringDictionary::kHeaderSize +
144
StringDictionary::kElementsStartIndex * kPointerSize;
145
const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
146
__ Test(Operand(elements, r1, times_pointer_size,
147
kDetailsOffset - kHeapObjectTag),
148
Smi::FromInt(PropertyDetails::TypeField::kMask));
149
__ j(not_zero, miss_label);
151
// Get the value at the masked, scaled index.
152
const int kValueOffset = kElementsStartOffset + kPointerSize;
154
Operand(elements, r1, times_pointer_size,
155
kValueOffset - kHeapObjectTag));
159
// Helper function used to store a property to a dictionary backing
160
// storage. This function may fail to store a property even though it
161
// is in the dictionary, so code at miss_label must always call a
162
// backup property store that is complete. This function is safe to
163
// call if name is not a symbol, and will jump to the miss_label in
164
// that case. The generated code assumes that the receiver has slow
165
// properties, is not a global object and does not have interceptors.
166
static void GenerateDictionaryStore(MacroAssembler* masm,
175
// elements - holds the property dictionary on entry and is clobbered.
177
// name - holds the name of the property on entry and is unchanged.
179
// value - holds the value to store and is unchanged.
181
// scratch0 - used during the positive dictionary lookup and is clobbered.
183
// scratch1 - used for index into the property dictionary and is clobbered.
186
// Probe the dictionary.
187
StringDictionaryLookupStub::GeneratePositiveLookup(masm,
195
// If probing finds an entry in the dictionary, scratch0 contains the
196
// index into the dictionary. Check that the value is a normal
197
// property that is not read only.
199
const int kElementsStartOffset =
200
StringDictionary::kHeaderSize +
201
StringDictionary::kElementsStartIndex * kPointerSize;
202
const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
203
const int kTypeAndReadOnlyMask =
204
(PropertyDetails::TypeField::kMask |
205
PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
206
__ Test(Operand(elements,
209
kDetailsOffset - kHeapObjectTag),
210
Smi::FromInt(kTypeAndReadOnlyMask));
211
__ j(not_zero, miss_label);
213
// Store the value at the masked, scaled index.
214
const int kValueOffset = kElementsStartOffset + kPointerSize;
215
__ lea(scratch1, Operand(elements,
218
kValueOffset - kHeapObjectTag));
219
__ movq(Operand(scratch1, 0), value);
221
// Update write barrier. Make sure not to clobber the value.
222
__ movq(scratch0, value);
223
__ RecordWrite(elements, scratch1, scratch0, kDontSaveFPRegs);
227
void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
228
// ----------- S t a t e -------------
231
// -- rsp[0] : return address
232
// -----------------------------------
235
StubCompiler::GenerateLoadArrayLength(masm, rax, rdx, &miss);
237
StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
241
void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) {
242
// ----------- S t a t e -------------
245
// -- rsp[0] : return address
246
// -----------------------------------
249
StubCompiler::GenerateLoadStringLength(masm, rax, rdx, rbx, &miss,
252
StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
256
void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
257
// ----------- S t a t e -------------
260
// -- rsp[0] : return address
261
// -----------------------------------
264
StubCompiler::GenerateLoadFunctionPrototype(masm, rax, rdx, rbx, &miss);
266
StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
270
// Checks the receiver for special cases (value type, slow case bits).
271
// Falls through for regular JS object.
272
static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
278
// receiver - holds the receiver and is unchanged.
279
// Scratch registers:
280
// map - used to hold the map of the receiver.
282
// Check that the object isn't a smi.
283
__ JumpIfSmi(receiver, slow);
285
// Check that the object is some kind of JS object EXCEPT JS Value type.
286
// In the case that the object is a value-wrapper object,
287
// we enter the runtime system to make sure that indexing
288
// into string objects work as intended.
289
ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
290
__ CmpObjectType(receiver, JS_OBJECT_TYPE, map);
294
__ testb(FieldOperand(map, Map::kBitFieldOffset),
295
Immediate((1 << Map::kIsAccessCheckNeeded) |
296
(1 << interceptor_bit)));
297
__ j(not_zero, slow);
301
// Loads an indexed element from a fast case array.
302
// If not_fast_array is NULL, doesn't perform the elements map check.
303
static void GenerateFastArrayLoad(MacroAssembler* masm,
309
Label* not_fast_array,
310
Label* out_of_range) {
313
// receiver - holds the receiver on entry.
314
// Unchanged unless 'result' is the same register.
316
// key - holds the smi key on entry.
317
// Unchanged unless 'result' is the same register.
319
// elements - holds the elements of the receiver on exit.
321
// result - holds the result on exit if the load succeeded.
322
// Allowed to be the the same as 'receiver' or 'key'.
323
// Unchanged on bailout so 'receiver' and 'key' can be safely
324
// used by further computation.
326
// Scratch registers:
328
// scratch - used to hold elements of the receiver and the loaded value.
330
__ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
331
if (not_fast_array != NULL) {
332
// Check that the object is in fast mode and writable.
333
__ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
334
Heap::kFixedArrayMapRootIndex);
335
__ j(not_equal, not_fast_array);
337
__ AssertFastElements(elements);
339
// Check that the key (index) is within bounds.
340
__ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset));
341
// Unsigned comparison rejects negative indices.
342
__ j(above_equal, out_of_range);
343
// Fast case: Do the load.
344
SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2);
345
__ movq(scratch, FieldOperand(elements,
348
FixedArray::kHeaderSize));
349
__ CompareRoot(scratch, Heap::kTheHoleValueRootIndex);
350
// In case the loaded value is the_hole we have to consult GetProperty
351
// to ensure the prototype chain is searched.
352
__ j(equal, out_of_range);
353
if (!result.is(scratch)) {
354
__ movq(result, scratch);
359
// Checks whether a key is an array index string or a symbol string.
360
// Falls through if the key is a symbol.
361
static void GenerateKeyStringCheck(MacroAssembler* masm,
368
// key - holds the key and is unchanged. Assumed to be non-smi.
369
// Scratch registers:
370
// map - used to hold the map of the key.
371
// hash - used to hold the hash of the key.
372
__ CmpObjectType(key, FIRST_NONSTRING_TYPE, map);
373
__ j(above_equal, not_symbol);
374
// Is the string an array index, with cached numeric value?
375
__ movl(hash, FieldOperand(key, String::kHashFieldOffset));
376
__ testl(hash, Immediate(String::kContainsCachedArrayIndexMask));
377
__ j(zero, index_string); // The value in hash is used at jump target.
379
// Is the string a symbol?
380
STATIC_ASSERT(kSymbolTag != 0);
381
__ testb(FieldOperand(map, Map::kInstanceTypeOffset),
382
Immediate(kIsSymbolMask));
383
__ j(zero, not_symbol);
388
void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
389
// ----------- S t a t e -------------
392
// -- rsp[0] : return address
393
// -----------------------------------
394
Label slow, check_string, index_smi, index_string, property_array_property;
395
Label probe_dictionary, check_number_dictionary;
397
// Check that the key is a smi.
398
__ JumpIfNotSmi(rax, &check_string);
400
// Now the key is known to be a smi. This place is also jumped to from below
401
// where a numeric string is converted to a smi.
403
GenerateKeyedLoadReceiverCheck(
404
masm, rdx, rcx, Map::kHasIndexedInterceptor, &slow);
406
// Check the receiver's map to see if it has fast elements.
407
__ CheckFastElements(rcx, &check_number_dictionary);
409
GenerateFastArrayLoad(masm,
417
Counters* counters = masm->isolate()->counters();
418
__ IncrementCounter(counters->keyed_load_generic_smi(), 1);
421
__ bind(&check_number_dictionary);
422
__ SmiToInteger32(rbx, rax);
423
__ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
425
// Check whether the elements is a number dictionary.
428
// rbx: key as untagged int32
430
__ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
431
Heap::kHashTableMapRootIndex);
432
__ j(not_equal, &slow);
433
__ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax);
437
// Slow case: Jump to runtime.
440
__ IncrementCounter(counters->keyed_load_generic_slow(), 1);
441
GenerateRuntimeGetProperty(masm);
443
__ bind(&check_string);
444
GenerateKeyStringCheck(masm, rax, rcx, rbx, &index_string, &slow);
446
GenerateKeyedLoadReceiverCheck(
447
masm, rdx, rcx, Map::kHasNamedInterceptor, &slow);
449
// If the receiver is a fast-case object, check the keyed lookup
450
// cache. Otherwise probe the dictionary leaving result in rcx.
451
__ movq(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset));
452
__ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
453
Heap::kHashTableMapRootIndex);
454
__ j(equal, &probe_dictionary);
456
// Load the map of the receiver, compute the keyed lookup cache hash
457
// based on 32 bits of the map pointer and the string hash.
458
__ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
460
__ shr(rcx, Immediate(KeyedLookupCache::kMapHashShift));
461
__ movl(rdi, FieldOperand(rax, String::kHashFieldOffset));
462
__ shr(rdi, Immediate(String::kHashShift));
464
int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
465
__ and_(rcx, Immediate(mask));
467
// Load the key (consisting of map and symbol) from the cache and
469
Label load_in_object_property;
470
static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
471
Label hit_on_nth_entry[kEntriesPerBucket];
472
ExternalReference cache_keys
473
= ExternalReference::keyed_lookup_cache_keys(masm->isolate());
475
for (int i = 0; i < kEntriesPerBucket - 1; i++) {
476
Label try_next_entry;
478
__ shl(rdi, Immediate(kPointerSizeLog2 + 1));
479
__ LoadAddress(kScratchRegister, cache_keys);
480
int off = kPointerSize * i * 2;
481
__ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, off));
482
__ j(not_equal, &try_next_entry);
483
__ cmpq(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
484
__ j(equal, &hit_on_nth_entry[i]);
485
__ bind(&try_next_entry);
488
int off = kPointerSize * (kEntriesPerBucket - 1) * 2;
489
__ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, off));
490
__ j(not_equal, &slow);
491
__ cmpq(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
492
__ j(not_equal, &slow);
494
// Get field offset, which is a 32-bit integer.
495
ExternalReference cache_field_offsets
496
= ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
499
for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
500
__ bind(&hit_on_nth_entry[i]);
502
__ addl(rcx, Immediate(i));
504
__ LoadAddress(kScratchRegister, cache_field_offsets);
505
__ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0));
506
__ movzxbq(rcx, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
508
__ j(above_equal, &property_array_property);
510
__ jmp(&load_in_object_property);
514
// Load in-object property.
515
__ bind(&load_in_object_property);
516
__ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
518
__ movq(rax, FieldOperand(rdx, rcx, times_pointer_size, 0));
519
__ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
522
// Load property array property.
523
__ bind(&property_array_property);
524
__ movq(rax, FieldOperand(rdx, JSObject::kPropertiesOffset));
525
__ movq(rax, FieldOperand(rax, rdi, times_pointer_size,
526
FixedArray::kHeaderSize));
527
__ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
530
// Do a quick inline probe of the receiver's dictionary, if it
532
__ bind(&probe_dictionary);
537
__ movq(rcx, FieldOperand(rdx, JSObject::kMapOffset));
538
__ movb(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
539
GenerateGlobalInstanceTypeCheck(masm, rcx, &slow);
541
GenerateDictionaryLoad(masm, &slow, rbx, rax, rcx, rdi, rax);
542
__ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
545
__ bind(&index_string);
546
__ IndexFromHash(rbx, rax);
551
void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
552
// ----------- S t a t e -------------
555
// -- rsp[0] : return address
556
// -----------------------------------
559
Register receiver = rdx;
560
Register index = rax;
561
Register scratch = rcx;
562
Register result = rax;
564
StringCharAtGenerator char_at_generator(receiver,
568
&miss, // When not a string.
569
&miss, // When not a number.
570
&miss, // When index out of range.
571
STRING_INDEX_IS_ARRAY_INDEX);
572
char_at_generator.GenerateFast(masm);
575
StubRuntimeCallHelper call_helper;
576
char_at_generator.GenerateSlow(masm, call_helper);
579
GenerateMiss(masm, false);
583
void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
584
// ----------- S t a t e -------------
587
// -- rsp[0] : return address
588
// -----------------------------------
591
// Check that the receiver isn't a smi.
592
__ JumpIfSmi(rdx, &slow);
594
// Check that the key is an array index, that is Uint32.
595
STATIC_ASSERT(kSmiValueSize <= 32);
596
__ JumpUnlessNonNegativeSmi(rax, &slow);
598
// Get the map of the receiver.
599
__ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
601
// Check that it has indexed interceptor and access checks
602
// are not enabled for this object.
603
__ movb(rcx, FieldOperand(rcx, Map::kBitFieldOffset));
604
__ andb(rcx, Immediate(kSlowCaseBitFieldMask));
605
__ cmpb(rcx, Immediate(1 << Map::kHasIndexedInterceptor));
606
__ j(not_zero, &slow);
608
// Everything is fine, call runtime.
610
__ push(rdx); // receiver
612
__ push(rcx); // return address
614
// Perform tail call to the entry.
615
__ TailCallExternalReference(
616
ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
622
GenerateMiss(masm, false);
626
void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
627
StrictModeFlag strict_mode) {
628
// ----------- S t a t e -------------
632
// -- rsp[0] : return address
633
// -----------------------------------
634
Label slow, slow_with_tagged_index, fast, array, extra, check_extra_double;
635
Label fast_object_with_map_check, fast_object_without_map_check;
636
Label fast_double_with_map_check, fast_double_without_map_check;
637
Label transition_smi_elements, finish_object_store, non_double_value;
638
Label transition_double_elements;
640
// Check that the object isn't a smi.
641
__ JumpIfSmi(rdx, &slow_with_tagged_index);
642
// Get the map from the receiver.
643
__ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
644
// Check that the receiver does not require access checks. We need
645
// to do this because this generic stub does not perform map checks.
646
__ testb(FieldOperand(r9, Map::kBitFieldOffset),
647
Immediate(1 << Map::kIsAccessCheckNeeded));
648
__ j(not_zero, &slow_with_tagged_index);
649
// Check that the key is a smi.
650
__ JumpIfNotSmi(rcx, &slow_with_tagged_index);
651
__ SmiToInteger32(rcx, rcx);
653
__ CmpInstanceType(r9, JS_ARRAY_TYPE);
655
// Check that the object is some kind of JSObject.
656
__ CmpInstanceType(r9, FIRST_JS_OBJECT_TYPE);
659
// Object case: Check key against length in the elements array.
663
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
664
// Check array bounds.
665
__ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx);
669
__ j(above, &fast_object_with_map_check);
671
// Slow case: call runtime.
673
__ Integer32ToSmi(rcx, rcx);
674
__ bind(&slow_with_tagged_index);
675
GenerateRuntimeSetProperty(masm, strict_mode);
676
// Never returns to here.
678
// Extra capacity case: Check if there is extra capacity to
679
// perform the store and update the length. Used for adding one
680
// element to the array by writing to array[array.length].
683
// rdx: receiver (a JSArray)
684
// rbx: receiver's elements array (a FixedArray)
686
// flags: smicompare (rdx.length(), rbx)
687
__ j(not_equal, &slow); // do not leave holes in the array
688
__ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx);
689
__ j(below_equal, &slow);
690
// Increment index to get new length.
691
__ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
692
__ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
693
__ j(not_equal, &check_extra_double);
694
__ leal(rdi, Operand(rcx, 1));
695
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
696
__ jmp(&fast_object_without_map_check);
698
__ bind(&check_extra_double);
699
// rdi: elements array's map
700
__ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
701
__ j(not_equal, &slow);
702
__ leal(rdi, Operand(rcx, 1));
703
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
704
__ jmp(&fast_double_without_map_check);
706
// Array case: Get the length and the elements array from the JS
707
// array. Check that the array is in fast mode (and writable); if it
708
// is the length is always a smi.
711
// rdx: receiver (a JSArray)
713
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
715
// Check the key against the length in the array, compute the
716
// address to store into and fall through to fast case.
717
__ SmiCompareInteger32(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
718
__ j(below_equal, &extra);
720
// Fast case: Do the store.
721
__ bind(&fast_object_with_map_check);
723
// rbx: receiver's elements array (a FixedArray)
725
// rdx: receiver (a JSArray)
726
__ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
727
__ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
728
__ j(not_equal, &fast_double_with_map_check);
729
__ bind(&fast_object_without_map_check);
730
// Smi stores don't require further checks.
732
__ JumpIfNotSmi(rax, &non_smi_value);
733
// It's irrelevant whether array is smi-only or not when writing a smi.
734
__ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize),
738
__ bind(&non_smi_value);
739
// Writing a non-smi, check whether array allows non-smi elements.
740
// r9: receiver's map
741
__ CheckFastObjectElements(r9, &transition_smi_elements);
742
__ bind(&finish_object_store);
743
__ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize),
745
__ movq(rdx, rax); // Preserve the value which is returned.
747
rbx, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
750
__ bind(&fast_double_with_map_check);
751
// Check for fast double array case. If this fails, call through to the
753
// rdi: elements array's map
754
__ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
755
__ j(not_equal, &slow);
756
__ bind(&fast_double_without_map_check);
757
// If the value is a number, store it as a double in the FastDoubleElements
759
__ StoreNumberToDoubleElements(rax, rbx, rcx, xmm0,
760
&transition_double_elements);
763
__ bind(&transition_smi_elements);
764
__ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
766
// Transition the array appropriately depending on the value type.
767
__ movq(r9, FieldOperand(rax, HeapObject::kMapOffset));
768
__ CompareRoot(r9, Heap::kHeapNumberMapRootIndex);
769
__ j(not_equal, &non_double_value);
771
// Value is a double. Transition FAST_SMI_ELEMENTS ->
772
// FAST_DOUBLE_ELEMENTS and complete the store.
773
__ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
774
FAST_DOUBLE_ELEMENTS,
778
ElementsTransitionGenerator::GenerateSmiToDouble(masm, &slow);
779
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
780
__ jmp(&fast_double_without_map_check);
782
__ bind(&non_double_value);
783
// Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
784
__ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
789
ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm);
790
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
791
__ jmp(&finish_object_store);
793
__ bind(&transition_double_elements);
794
// Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
795
// HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
796
// transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
797
__ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
798
__ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
803
ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
804
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
805
__ jmp(&finish_object_store);
809
// The generated code does not accept smi keys.
810
// The generated code falls through if both probes miss.
811
void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
814
Code::ExtraICState extra_state) {
815
// ----------- S t a t e -------------
816
// rcx : function name
818
// -----------------------------------
819
Label number, non_number, non_string, boolean, probe, miss;
821
// Probe the stub cache.
822
Code::Flags flags = Code::ComputeFlags(kind,
827
Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
830
// If the stub cache probing failed, the receiver might be a value.
831
// For value objects, we use the map of the prototype objects for
832
// the corresponding JSValue for the cache and that is what we need
836
__ JumpIfSmi(rdx, &number);
837
__ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rbx);
838
__ j(not_equal, &non_number);
840
StubCompiler::GenerateLoadGlobalFunctionPrototype(
841
masm, Context::NUMBER_FUNCTION_INDEX, rdx);
845
__ bind(&non_number);
846
__ CmpInstanceType(rbx, FIRST_NONSTRING_TYPE);
847
__ j(above_equal, &non_string);
848
StubCompiler::GenerateLoadGlobalFunctionPrototype(
849
masm, Context::STRING_FUNCTION_INDEX, rdx);
852
// Check for boolean.
853
__ bind(&non_string);
854
__ CompareRoot(rdx, Heap::kTrueValueRootIndex);
855
__ j(equal, &boolean);
856
__ CompareRoot(rdx, Heap::kFalseValueRootIndex);
857
__ j(not_equal, &miss);
859
StubCompiler::GenerateLoadGlobalFunctionPrototype(
860
masm, Context::BOOLEAN_FUNCTION_INDEX, rdx);
862
// Probe the stub cache for the value object.
864
Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
871
static void GenerateFunctionTailCall(MacroAssembler* masm,
874
// ----------- S t a t e -------------
875
// rcx : function name
877
// rsp[0] : return address
878
// rsp[8] : argument argc
879
// rsp[16] : argument argc - 1
881
// rsp[argc * 8] : argument 1
882
// rsp[(argc + 1) * 8] : argument 0 = receiver
883
// -----------------------------------
884
__ JumpIfSmi(rdi, miss);
885
// Check that the value is a JavaScript function.
886
__ CmpObjectType(rdi, JS_FUNCTION_TYPE, rdx);
887
__ j(not_equal, miss);
889
// Invoke the function.
890
ParameterCount actual(argc);
891
__ InvokeFunction(rdi, actual, JUMP_FUNCTION,
892
NullCallWrapper(), CALL_AS_METHOD);
896
// The generated code falls through if the call should be handled by runtime.
897
void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
898
// ----------- S t a t e -------------
899
// rcx : function name
900
// rsp[0] : return address
901
// rsp[8] : argument argc
902
// rsp[16] : argument argc - 1
904
// rsp[argc * 8] : argument 1
905
// rsp[(argc + 1) * 8] : argument 0 = receiver
906
// -----------------------------------
909
// Get the receiver of the function from the stack.
910
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
912
GenerateStringDictionaryReceiverCheck(masm, rdx, rax, rbx, &miss);
915
// Search the dictionary placing the result in rdi.
916
GenerateDictionaryLoad(masm, &miss, rax, rcx, rbx, rdi, rdi);
918
GenerateFunctionTailCall(masm, argc, &miss);
924
void CallICBase::GenerateMiss(MacroAssembler* masm,
927
Code::ExtraICState extra_state) {
928
// ----------- S t a t e -------------
929
// rcx : function name
930
// rsp[0] : return address
931
// rsp[8] : argument argc
932
// rsp[16] : argument argc - 1
934
// rsp[argc * 8] : argument 1
935
// rsp[(argc + 1) * 8] : argument 0 = receiver
936
// -----------------------------------
938
Counters* counters = masm->isolate()->counters();
939
if (id == IC::kCallIC_Miss) {
940
__ IncrementCounter(counters->call_miss(), 1);
942
__ IncrementCounter(counters->keyed_call_miss(), 1);
945
// Get the receiver of the function from the stack; 1 ~ return address.
946
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
948
// Enter an internal frame.
950
FrameScope scope(masm, StackFrame::INTERNAL);
952
// Push the receiver and the name of the function.
959
__ LoadAddress(rbx, ExternalReference(IC_Utility(id), masm->isolate()));
962
// Move result to rdi and exit the internal frame.
966
// Check if the receiver is a global object of some sort.
967
// This can happen only for regular CallIC but not KeyedCallIC.
968
if (id == IC::kCallIC_Miss) {
969
Label invoke, global;
970
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); // receiver
971
__ JumpIfSmi(rdx, &invoke);
972
__ CmpObjectType(rdx, JS_GLOBAL_OBJECT_TYPE, rcx);
973
__ j(equal, &global);
974
__ CmpInstanceType(rcx, JS_BUILTINS_OBJECT_TYPE);
975
__ j(not_equal, &invoke);
977
// Patch the receiver on the stack.
979
__ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
980
__ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
984
// Invoke the function.
985
CallKind call_kind = CallICBase::Contextual::decode(extra_state)
988
ParameterCount actual(argc);
989
__ InvokeFunction(rdi,
997
void CallIC::GenerateMegamorphic(MacroAssembler* masm,
999
Code::ExtraICState extra_ic_state) {
1000
// ----------- S t a t e -------------
1001
// rcx : function name
1002
// rsp[0] : return address
1003
// rsp[8] : argument argc
1004
// rsp[16] : argument argc - 1
1006
// rsp[argc * 8] : argument 1
1007
// rsp[(argc + 1) * 8] : argument 0 = receiver
1008
// -----------------------------------
1010
// Get the receiver of the function from the stack; 1 ~ return address.
1011
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1012
GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
1013
GenerateMiss(masm, argc, extra_ic_state);
1017
void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
1018
// ----------- S t a t e -------------
1019
// rcx : function name
1020
// rsp[0] : return address
1021
// rsp[8] : argument argc
1022
// rsp[16] : argument argc - 1
1024
// rsp[argc * 8] : argument 1
1025
// rsp[(argc + 1) * 8] : argument 0 = receiver
1026
// -----------------------------------
1028
// Get the receiver of the function from the stack; 1 ~ return address.
1029
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1031
Label do_call, slow_call, slow_load;
1032
Label check_number_dictionary, check_string, lookup_monomorphic_cache;
1033
Label index_smi, index_string;
1035
// Check that the key is a smi.
1036
__ JumpIfNotSmi(rcx, &check_string);
1038
__ bind(&index_smi);
1039
// Now the key is known to be a smi. This place is also jumped to from below
1040
// where a numeric string is converted to a smi.
1042
GenerateKeyedLoadReceiverCheck(
1043
masm, rdx, rax, Map::kHasIndexedInterceptor, &slow_call);
1045
GenerateFastArrayLoad(
1046
masm, rdx, rcx, rax, rbx, rdi, &check_number_dictionary, &slow_load);
1047
Counters* counters = masm->isolate()->counters();
1048
__ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1);
1051
// receiver in rdx is not used after this point.
1054
GenerateFunctionTailCall(masm, argc, &slow_call);
1056
__ bind(&check_number_dictionary);
1059
// Check whether the elements is a number dictionary.
1060
__ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1061
Heap::kHashTableMapRootIndex);
1062
__ j(not_equal, &slow_load);
1063
__ SmiToInteger32(rbx, rcx);
1064
// ebx: untagged index
1065
__ LoadFromNumberDictionary(&slow_load, rax, rcx, rbx, r9, rdi, rdi);
1066
__ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1);
1069
__ bind(&slow_load);
1070
// This branch is taken when calling KeyedCallIC_Miss is neither required
1072
__ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
1074
FrameScope scope(masm, StackFrame::INTERNAL);
1075
__ push(rcx); // save the key
1076
__ push(rdx); // pass the receiver
1077
__ push(rcx); // pass the key
1078
__ CallRuntime(Runtime::kKeyedGetProperty, 2);
1079
__ pop(rcx); // restore the key
1084
__ bind(&check_string);
1085
GenerateKeyStringCheck(masm, rcx, rax, rbx, &index_string, &slow_call);
1087
// The key is known to be a symbol.
1088
// If the receiver is a regular JS object with slow properties then do
1089
// a quick inline probe of the receiver's dictionary.
1090
// Otherwise do the monomorphic cache probe.
1091
GenerateKeyedLoadReceiverCheck(
1092
masm, rdx, rax, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
1094
__ movq(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset));
1095
__ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1096
Heap::kHashTableMapRootIndex);
1097
__ j(not_equal, &lookup_monomorphic_cache);
1099
GenerateDictionaryLoad(masm, &slow_load, rbx, rcx, rax, rdi, rdi);
1100
__ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1);
1103
__ bind(&lookup_monomorphic_cache);
1104
__ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1);
1105
GenerateMonomorphicCacheProbe(masm,
1107
Code::KEYED_CALL_IC,
1108
Code::kNoExtraICState);
1109
// Fall through on miss.
1111
__ bind(&slow_call);
1112
// This branch is taken if:
1113
// - the receiver requires boxing or access check,
1114
// - the key is neither smi nor symbol,
1115
// - the value loaded is not a function,
1116
// - there is hope that the runtime will create a monomorphic call stub
1117
// that will get fetched next time.
1118
__ IncrementCounter(counters->keyed_call_generic_slow(), 1);
1119
GenerateMiss(masm, argc);
1121
__ bind(&index_string);
1122
__ IndexFromHash(rbx, rcx);
1123
// Now jump to the place where smi keys are handled.
1128
void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
1129
// ----------- S t a t e -------------
1130
// rcx : function name
1131
// rsp[0] : return address
1132
// rsp[8] : argument argc
1133
// rsp[16] : argument argc - 1
1135
// rsp[argc * 8] : argument 1
1136
// rsp[(argc + 1) * 8] : argument 0 = receiver
1137
// -----------------------------------
1139
// Check if the name is a string.
1141
__ JumpIfSmi(rcx, &miss);
1142
Condition cond = masm->IsObjectStringType(rcx, rax, rax);
1143
__ j(NegateCondition(cond), &miss);
1144
CallICBase::GenerateNormal(masm, argc);
1146
GenerateMiss(masm, argc);
1150
static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm,
1156
Label* unmapped_case,
1158
Heap* heap = masm->isolate()->heap();
1160
// Check that the receiver is a JSObject. Because of the elements
1161
// map check later, we do not need to check for interceptors or
1162
// whether it requires access checks.
1163
__ JumpIfSmi(object, slow_case);
1164
// Check that the object is some kind of JSObject.
1165
__ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1);
1166
__ j(below, slow_case);
1168
// Check that the key is a positive smi.
1169
Condition check = masm->CheckNonNegativeSmi(key);
1170
__ j(NegateCondition(check), slow_case);
1172
// Load the elements into scratch1 and check its map. If not, jump
1173
// to the unmapped lookup with the parameter map in scratch1.
1174
Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
1175
__ movq(scratch1, FieldOperand(object, JSObject::kElementsOffset));
1176
__ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
1178
// Check if element is in the range of mapped arguments.
1179
__ movq(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
1180
__ SmiSubConstant(scratch2, scratch2, Smi::FromInt(2));
1181
__ cmpq(key, scratch2);
1182
__ j(greater_equal, unmapped_case);
1184
// Load element index and check whether it is the hole.
1185
const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
1186
__ SmiToInteger64(scratch3, key);
1187
__ movq(scratch2, FieldOperand(scratch1,
1191
__ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex);
1192
__ j(equal, unmapped_case);
1194
// Load value from context and return it. We can reuse scratch1 because
1195
// we do not jump to the unmapped lookup (which requires the parameter
1196
// map in scratch1).
1197
__ movq(scratch1, FieldOperand(scratch1, FixedArray::kHeaderSize));
1198
__ SmiToInteger64(scratch3, scratch2);
1199
return FieldOperand(scratch1,
1202
Context::kHeaderSize);
1206
static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
1208
Register parameter_map,
1211
// Element is in arguments backing store, which is referenced by the
1212
// second element of the parameter_map. The parameter_map register
1213
// must be loaded with the parameter map of the arguments object and is
1215
const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
1216
Register backing_store = parameter_map;
1217
__ movq(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
1218
Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
1219
__ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
1220
__ movq(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
1221
__ cmpq(key, scratch);
1222
__ j(greater_equal, slow_case);
1223
__ SmiToInteger64(scratch, key);
1224
return FieldOperand(backing_store,
1227
FixedArray::kHeaderSize);
1231
void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
1232
// ----------- S t a t e -------------
1234
// -- rdx : receiver
1235
// -- rsp[0] : return address
1236
// -----------------------------------
1238
Operand mapped_location =
1239
GenerateMappedArgumentsLookup(
1240
masm, rdx, rax, rbx, rcx, rdi, ¬in, &slow);
1241
__ movq(rax, mapped_location);
1244
// The unmapped lookup expects that the parameter map is in rbx.
1245
Operand unmapped_location =
1246
GenerateUnmappedArgumentsLookup(masm, rax, rbx, rcx, &slow);
1247
__ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
1249
__ movq(rax, unmapped_location);
1252
GenerateMiss(masm, false);
1256
void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
1257
// ----------- S t a t e -------------
1260
// -- rdx : receiver
1261
// -- rsp[0] : return address
1262
// -----------------------------------
1264
Operand mapped_location = GenerateMappedArgumentsLookup(
1265
masm, rdx, rcx, rbx, rdi, r8, ¬in, &slow);
1266
__ movq(mapped_location, rax);
1267
__ lea(r9, mapped_location);
1273
EMIT_REMEMBERED_SET,
1277
// The unmapped lookup expects that the parameter map is in rbx.
1278
Operand unmapped_location =
1279
GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rdi, &slow);
1280
__ movq(unmapped_location, rax);
1281
__ lea(r9, unmapped_location);
1287
EMIT_REMEMBERED_SET,
1291
GenerateMiss(masm, false);
1295
void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
1297
// ----------- S t a t e -------------
1298
// rcx : function name
1299
// rsp[0] : return address
1300
// rsp[8] : argument argc
1301
// rsp[16] : argument argc - 1
1303
// rsp[argc * 8] : argument 1
1304
// rsp[(argc + 1) * 8] : argument 0 = receiver
1305
// -----------------------------------
1307
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1308
Operand mapped_location = GenerateMappedArgumentsLookup(
1309
masm, rdx, rcx, rbx, rax, r8, ¬in, &slow);
1310
__ movq(rdi, mapped_location);
1311
GenerateFunctionTailCall(masm, argc, &slow);
1313
// The unmapped lookup expects that the parameter map is in rbx.
1314
Operand unmapped_location =
1315
GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rax, &slow);
1316
__ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
1318
__ movq(rdi, unmapped_location);
1319
GenerateFunctionTailCall(masm, argc, &slow);
1321
GenerateMiss(masm, argc);
1325
void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
1326
// ----------- S t a t e -------------
1327
// -- rax : receiver
1329
// -- rsp[0] : return address
1330
// -----------------------------------
1332
// Probe the stub cache.
1333
Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC);
1334
Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rax, rcx, rbx,
1337
// Cache miss: Jump to runtime.
1338
StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
1342
void LoadIC::GenerateNormal(MacroAssembler* masm) {
1343
// ----------- S t a t e -------------
1344
// -- rax : receiver
1346
// -- rsp[0] : return address
1347
// -----------------------------------
1350
GenerateStringDictionaryReceiverCheck(masm, rax, rdx, rbx, &miss);
1353
// Search the dictionary placing the result in rax.
1354
GenerateDictionaryLoad(masm, &miss, rdx, rcx, rbx, rdi, rax);
1357
// Cache miss: Jump to runtime.
1363
void LoadIC::GenerateMiss(MacroAssembler* masm) {
1364
// ----------- S t a t e -------------
1365
// -- rax : receiver
1367
// -- rsp[0] : return address
1368
// -----------------------------------
1370
Counters* counters = masm->isolate()->counters();
1371
__ IncrementCounter(counters->load_miss(), 1);
1374
__ push(rax); // receiver
1375
__ push(rcx); // name
1376
__ push(rbx); // return address
1378
// Perform tail call to the entry.
1379
ExternalReference ref =
1380
ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
1381
__ TailCallExternalReference(ref, 2, 1);
1385
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1386
// ----------- S t a t e -------------
1388
// -- rdx : receiver
1389
// -- rsp[0] : return address
1390
// -----------------------------------
1392
Counters* counters = masm->isolate()->counters();
1393
__ IncrementCounter(counters->keyed_load_miss(), 1);
1396
__ push(rdx); // receiver
1397
__ push(rax); // name
1398
__ push(rbx); // return address
1400
// Perform tail call to the entry.
1401
ExternalReference ref = force_generic
1402
? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric),
1404
: ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
1405
__ TailCallExternalReference(ref, 2, 1);
1409
void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
1410
// ----------- S t a t e -------------
1412
// -- rdx : receiver
1413
// -- rsp[0] : return address
1414
// -----------------------------------
1417
__ push(rdx); // receiver
1418
__ push(rax); // name
1419
__ push(rbx); // return address
1421
// Perform tail call to the entry.
1422
__ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
1426
void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1427
StrictModeFlag strict_mode) {
1428
// ----------- S t a t e -------------
1431
// -- rdx : receiver
1432
// -- rsp[0] : return address
1433
// -----------------------------------
1435
// Get the receiver from the stack and probe the stub cache.
1437
Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC, strict_mode);
1438
Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
1441
// Cache miss: Jump to runtime.
1446
void StoreIC::GenerateMiss(MacroAssembler* masm) {
1447
// ----------- S t a t e -------------
1450
// -- rdx : receiver
1451
// -- rsp[0] : return address
1452
// -----------------------------------
1455
__ push(rdx); // receiver
1456
__ push(rcx); // name
1457
__ push(rax); // value
1458
__ push(rbx); // return address
1460
// Perform tail call to the entry.
1461
ExternalReference ref =
1462
ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1463
__ TailCallExternalReference(ref, 3, 1);
1467
void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1468
// ----------- S t a t e -------------
1471
// -- rdx : receiver
1472
// -- rsp[0] : return address
1473
// -----------------------------------
1475
// This accepts as a receiver anything JSArray::SetElementsLength accepts
1476
// (currently anything except for external arrays which means anything with
1477
// elements of FixedArray type). Value must be a number, but only smis are
1478
// accepted as the most common case.
1482
Register receiver = rdx;
1483
Register value = rax;
1484
Register scratch = rbx;
1486
// Check that the receiver isn't a smi.
1487
__ JumpIfSmi(receiver, &miss);
1489
// Check that the object is a JS array.
1490
__ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
1491
__ j(not_equal, &miss);
1493
// Check that elements are FixedArray.
1494
// We rely on StoreIC_ArrayLength below to deal with all types of
1495
// fast elements (including COW).
1496
__ movq(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
1497
__ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
1498
__ j(not_equal, &miss);
1500
// Check that the array has fast properties, otherwise the length
1501
// property might have been redefined.
1502
__ movq(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
1503
__ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
1504
Heap::kHashTableMapRootIndex);
1507
// Check that value is a smi.
1508
__ JumpIfNotSmi(value, &miss);
1510
// Prepare tail call to StoreIC_ArrayLength.
1514
__ push(scratch); // return address
1516
ExternalReference ref =
1517
ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
1518
__ TailCallExternalReference(ref, 2, 1);
1526
void StoreIC::GenerateNormal(MacroAssembler* masm) {
1527
// ----------- S t a t e -------------
1530
// -- rdx : receiver
1531
// -- rsp[0] : return address
1532
// -----------------------------------
1536
GenerateStringDictionaryReceiverCheck(masm, rdx, rbx, rdi, &miss);
1538
GenerateDictionaryStore(masm, &miss, rbx, rcx, rax, r8, r9);
1539
Counters* counters = masm->isolate()->counters();
1540
__ IncrementCounter(counters->store_normal_hit(), 1);
1544
__ IncrementCounter(counters->store_normal_miss(), 1);
1549
void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
1550
StrictModeFlag strict_mode) {
1551
// ----------- S t a t e -------------
1554
// -- rdx : receiver
1555
// -- rsp[0] : return address
1556
// -----------------------------------
1561
__ Push(Smi::FromInt(NONE)); // PropertyAttributes
1562
__ Push(Smi::FromInt(strict_mode));
1563
__ push(rbx); // return address
1565
// Do tail-call to runtime routine.
1566
__ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1570
void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1571
StrictModeFlag strict_mode) {
1572
// ----------- S t a t e -------------
1575
// -- rdx : receiver
1576
// -- rsp[0] : return address
1577
// -----------------------------------
1580
__ push(rdx); // receiver
1581
__ push(rcx); // key
1582
__ push(rax); // value
1583
__ Push(Smi::FromInt(NONE)); // PropertyAttributes
1584
__ Push(Smi::FromInt(strict_mode)); // Strict mode.
1585
__ push(rbx); // return address
1587
// Do tail-call to runtime routine.
1588
__ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1592
void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1593
// ----------- S t a t e -------------
1596
// -- rdx : receiver
1597
// -- rsp[0] : return address
1598
// -----------------------------------
1601
__ push(rdx); // receiver
1602
__ push(rcx); // key
1603
__ push(rax); // value
1604
__ push(rbx); // return address
1606
// Do tail-call to runtime routine.
1607
ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1608
__ TailCallExternalReference(ref, 3, 1);
1612
void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1613
// ----------- S t a t e -------------
1616
// -- rdx : receiver
1617
// -- rsp[0] : return address
1618
// -----------------------------------
1621
__ push(rdx); // receiver
1622
__ push(rcx); // key
1623
__ push(rax); // value
1624
__ push(rbx); // return address
1626
// Do tail-call to runtime routine.
1627
ExternalReference ref = force_generic
1628
? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1630
: ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1631
__ TailCallExternalReference(ref, 3, 1);
1635
void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
1636
// ----------- S t a t e -------------
1637
// -- rbx : target map
1638
// -- rdx : receiver
1639
// -- rsp[0] : return address
1640
// -----------------------------------
1641
// Must return the modified receiver in eax.
1642
if (!FLAG_trace_elements_transitions) {
1644
ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
1652
__ push(rbx); // return address
1653
__ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
1657
void KeyedStoreIC::GenerateTransitionElementsDoubleToObject(
1658
MacroAssembler* masm) {
1659
// ----------- S t a t e -------------
1660
// -- rbx : target map
1661
// -- rdx : receiver
1662
// -- rsp[0] : return address
1663
// -----------------------------------
1664
// Must return the modified receiver in eax.
1665
if (!FLAG_trace_elements_transitions) {
1667
ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
1675
__ push(rbx); // return address
1676
__ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
1683
Condition CompareIC::ComputeCondition(Token::Value op) {
1685
case Token::EQ_STRICT:
1695
return greater_equal;
1698
return no_condition;
1703
static bool HasInlinedSmiCode(Address address) {
1704
// The address of the instruction following the call.
1705
Address test_instruction_address =
1706
address + Assembler::kCallTargetAddressOffset;
1708
// If the instruction following the call is not a test al, nothing
1710
return *test_instruction_address == Assembler::kTestAlByte;
1714
void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
1716
Handle<Code> rewritten;
1717
State previous_state = GetState();
1719
State state = TargetState(previous_state, HasInlinedSmiCode(address()), x, y);
1720
if (state == GENERIC) {
1721
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
1722
rewritten = stub.GetCode();
1724
ICCompareStub stub(op_, state);
1725
if (state == KNOWN_OBJECTS) {
1726
stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
1728
rewritten = stub.GetCode();
1730
set_target(*rewritten);
1733
if (FLAG_trace_ic) {
1734
PrintF("[CompareIC (%s->%s)#%s]\n",
1735
GetStateName(previous_state),
1736
GetStateName(state),
1741
// Activate inlined smi code.
1742
if (previous_state == UNINITIALIZED) {
1743
PatchInlinedSmiCode(address(), ENABLE_INLINED_SMI_CHECK);
1747
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
1748
// The address of the instruction following the call.
1749
Address test_instruction_address =
1750
address + Assembler::kCallTargetAddressOffset;
1752
// If the instruction following the call is not a test al, nothing
1754
if (*test_instruction_address != Assembler::kTestAlByte) {
1755
ASSERT(*test_instruction_address == Assembler::kNopByte);
1759
Address delta_address = test_instruction_address + 1;
1760
// The delta to the start of the map check instruction and the
1761
// condition code uses at the patched jump.
1762
int8_t delta = *reinterpret_cast<int8_t*>(delta_address);
1763
if (FLAG_trace_ic) {
1764
PrintF("[ patching ic at %p, test=%p, delta=%d\n",
1765
address, test_instruction_address, delta);
1768
// Patch with a short conditional jump. Enabling means switching from a short
1769
// jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
1770
// reverse operation of that.
1771
Address jmp_address = test_instruction_address - delta;
1772
ASSERT((check == ENABLE_INLINED_SMI_CHECK)
1773
? (*jmp_address == Assembler::kJncShortOpcode ||
1774
*jmp_address == Assembler::kJcShortOpcode)
1775
: (*jmp_address == Assembler::kJnzShortOpcode ||
1776
*jmp_address == Assembler::kJzShortOpcode));
1777
Condition cc = (check == ENABLE_INLINED_SMI_CHECK)
1778
? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
1779
: (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
1780
*jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
1784
} } // namespace v8::internal
1786
#endif // V8_TARGET_ARCH_X64