1
// Copyright 2012 the V8 project authors. All rights reserved.
3
// Copyright IBM Corp. 2012, 2013. All rights reserved.
5
// Redistribution and use in source and binary forms, with or without
6
// modification, are permitted provided that the following conditions are
9
// * Redistributions of source code must retain the above copyright
10
// notice, this list of conditions and the following disclaimer.
11
// * Redistributions in binary form must reproduce the above
12
// copyright notice, this list of conditions and the following
13
// disclaimer in the documentation and/or other materials provided
14
// with the distribution.
15
// * Neither the name of Google Inc. nor the names of its
16
// contributors may be used to endorse or promote products derived
17
// from this software without specific prior written permission.
19
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33
#if defined(V8_TARGET_ARCH_PPC)
37
#include "stub-cache.h"
42
#define __ ACCESS_MASM(masm)
44
static void ProbeTable(Isolate* isolate,
47
StubCache::Table table,
50
// Number of the cache entry, not scaled.
54
Register offset_scratch) {
55
ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
56
ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
57
ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
59
uintptr_t key_off_addr = reinterpret_cast<uintptr_t>(key_offset.address());
60
uintptr_t value_off_addr =
61
reinterpret_cast<uintptr_t>(value_offset.address());
62
uintptr_t map_off_addr = reinterpret_cast<uintptr_t>(map_offset.address());
64
// Check the relative positions of the address fields.
65
ASSERT(value_off_addr > key_off_addr);
66
ASSERT((value_off_addr - key_off_addr) % 4 == 0);
67
ASSERT((value_off_addr - key_off_addr) < (256 * 4));
68
ASSERT(map_off_addr > key_off_addr);
69
ASSERT((map_off_addr - key_off_addr) % 4 == 0);
70
ASSERT((map_off_addr - key_off_addr) < (256 * 4));
73
Register base_addr = scratch;
76
// Multiply by 3 because there are 3 fields per entry (name, code, map).
77
__ ShiftLeftImm(offset_scratch, offset, Operand(1));
78
__ add(offset_scratch, offset, offset_scratch);
80
// Calculate the base address of the entry.
81
__ mov(base_addr, Operand(key_offset));
82
__ ShiftLeftImm(scratch2, offset_scratch, Operand(kPointerSizeLog2));
83
__ add(base_addr, base_addr, scratch2);
85
// Check that the key in the entry matches the name.
86
__ LoadP(ip, MemOperand(base_addr, 0));
90
// Check the map matches.
91
__ LoadP(ip, MemOperand(base_addr, map_off_addr - key_off_addr));
92
__ LoadP(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
96
// Get the code entry from the cache.
97
Register code = scratch2;
99
__ LoadP(code, MemOperand(base_addr, value_off_addr - key_off_addr));
101
// Check that the flags match what we're looking for.
102
Register flags_reg = base_addr;
104
__ lwz(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
106
ASSERT(!r0.is(flags_reg));
107
__ li(r0, Operand(Code::kFlagsNotUsedInLookup));
108
__ andc(flags_reg, flags_reg, r0);
109
__ mov(r0, Operand(flags));
110
__ cmpl(flags_reg, r0);
114
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
116
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
121
// Jump to the first instruction in the code stub.
122
__ addi(r0, code, Operand(Code::kHeaderSize - kHeapObjectTag));
126
// Miss: fall through.
131
// Helper function used to check that the dictionary doesn't contain
132
// the property. This function may return false negatives, so miss_label
133
// must always call a backup property check that is complete.
134
// This function is safe to call if the receiver has fast properties.
135
// Name must be a symbol and receiver must be a heap object.
136
static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
142
ASSERT(name->IsSymbol());
143
Counters* counters = masm->isolate()->counters();
144
__ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
145
__ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
149
const int kInterceptorOrAccessCheckNeededMask =
150
(1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
152
// Bail out if the receiver has a named interceptor or requires access checks.
153
Register map = scratch1;
154
__ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
155
__ lbz(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
156
__ andi(r0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
157
__ bne(miss_label, cr0);
159
// Check that receiver is a JSObject.
160
__ lbz(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
161
__ cmpi(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
164
// Load properties array.
165
Register properties = scratch0;
166
__ LoadP(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
167
// Check that the properties array is a dictionary.
168
__ LoadP(map, FieldMemOperand(properties, HeapObject::kMapOffset));
169
Register tmp = properties;
170
__ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
174
// Restore the temporarily used register.
175
__ LoadP(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
178
StringDictionaryLookupStub::GenerateNegativeLookup(masm,
186
__ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
190
void StubCache::GenerateProbe(MacroAssembler* masm,
198
Isolate* isolate = masm->isolate();
201
#if V8_TARGET_ARCH_PPC64
202
// Make sure that code is valid. The multiplying code relies on the
203
// entry size being 24.
204
ASSERT(sizeof(Entry) == 24);
206
// Make sure that code is valid. The multiplying code relies on the
207
// entry size being 12.
208
ASSERT(sizeof(Entry) == 12);
211
// Make sure the flags does not name a specific type.
212
ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
214
// Make sure that there are no register conflicts.
215
ASSERT(!scratch.is(receiver));
216
ASSERT(!scratch.is(name));
217
ASSERT(!extra.is(receiver));
218
ASSERT(!extra.is(name));
219
ASSERT(!extra.is(scratch));
220
ASSERT(!extra2.is(receiver));
221
ASSERT(!extra2.is(name));
222
ASSERT(!extra2.is(scratch));
223
ASSERT(!extra2.is(extra));
225
// Check scratch, extra and extra2 registers are valid.
226
ASSERT(!scratch.is(no_reg));
227
ASSERT(!extra.is(no_reg));
228
ASSERT(!extra2.is(no_reg));
229
ASSERT(!extra3.is(no_reg));
231
Counters* counters = masm->isolate()->counters();
232
__ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
235
// Check that the receiver isn't a smi.
236
__ JumpIfSmi(receiver, &miss);
238
// Get the map of the receiver and compute the hash.
239
__ lwz(scratch, FieldMemOperand(name, String::kHashFieldOffset));
240
__ LoadP(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
241
__ add(scratch, scratch, ip);
242
#if V8_TARGET_ARCH_PPC64
243
// Use only the low 32 bits of the map pointer.
244
__ rldicl(scratch, scratch, 0, 32);
246
uint32_t mask = kPrimaryTableSize - 1;
247
// We shift out the last two bits because they are not part of the hash and
248
// they are always 01 for maps.
249
__ ShiftRightImm(scratch, scratch, Operand(kHeapObjectTagSize));
250
// Mask down the eor argument to the minimum to keep the immediate
252
__ xori(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
253
// Prefer and_ to ubfx here because ubfx takes 2 cycles.
254
__ andi(scratch, scratch, Operand(mask));
256
// Probe the primary table.
268
// Primary miss: Compute hash for secondary probe.
269
__ ShiftRightImm(extra, name, Operand(kHeapObjectTagSize));
270
__ sub(scratch, scratch, extra);
271
uint32_t mask2 = kSecondaryTableSize - 1;
272
__ addi(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
273
__ andi(scratch, scratch, Operand(mask2));
275
// Probe the secondary table.
287
// Cache miss: Fall-through and let caller handle the miss by
288
// entering the runtime system.
290
__ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
295
void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
297
Register prototype) {
298
// Load the global or builtins object from the current context.
300
MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
301
// Load the native context from the global or builtins object.
303
FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
304
// Load the function from the native context.
305
__ LoadP(prototype, MemOperand(prototype, Context::SlotOffset(index)), r0);
306
// Load the initial map. The global functions all have initial maps.
308
FieldMemOperand(prototype,
309
JSFunction::kPrototypeOrInitialMapOffset));
310
// Load the prototype from the initial map.
311
__ LoadP(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
315
void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
316
MacroAssembler* masm,
320
Isolate* isolate = masm->isolate();
321
// Check we're still in the same context.
323
MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
324
__ Move(ip, isolate->global_object());
325
__ cmp(prototype, ip);
327
// Get the global function with the given index.
328
Handle<JSFunction> function(
329
JSFunction::cast(isolate->native_context()->get(index)));
330
// Load its initial map. The global functions all have initial maps.
331
__ Move(prototype, Handle<Map>(function->initial_map()));
332
// Load the prototype from the initial map.
333
__ LoadP(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
337
// Load a fast property out of a holder object (src). In-object properties
338
// are loaded directly otherwise the property is loaded from the properties
340
void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
343
Handle<JSObject> holder,
345
// Adjust for the number of properties stored in the holder.
346
index -= holder->map()->inobject_properties();
348
// Get the property straight out of the holder.
349
int offset = holder->map()->instance_size() + (index * kPointerSize);
350
__ LoadP(dst, FieldMemOperand(src, offset), r0);
352
// Calculate the offset into the properties array.
353
int offset = index * kPointerSize + FixedArray::kHeaderSize;
354
__ LoadP(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
355
__ LoadP(dst, FieldMemOperand(dst, offset), r0);
360
void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
364
// Check that the receiver isn't a smi.
365
__ JumpIfSmi(receiver, miss_label);
367
// Check that the object is a JS array.
368
__ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
371
// Load length directly from the JS array.
372
__ LoadP(r3, FieldMemOperand(receiver, JSArray::kLengthOffset));
377
// Generate code to check if an object is a string. If the object is a
378
// heap object, its map's instance type is left in the scratch1 register.
379
// If this is not needed, scratch1 and scratch2 may be the same register.
380
static void GenerateStringCheck(MacroAssembler* masm,
385
Label* non_string_object) {
386
// Check that the receiver isn't a smi.
387
__ JumpIfSmi(receiver, smi);
389
// Check that the object is a string.
390
__ LoadP(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
391
__ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
392
__ andi(scratch2, scratch1, Operand(kIsNotStringMask));
393
// The cast is to resolve the overload for the argument of 0x0.
394
__ cmpi(scratch2, Operand(static_cast<intptr_t>(kStringTag)));
395
__ bne(non_string_object);
399
// Generate code to load the length from a string object and return the length.
400
// If the receiver object is not a string or a wrapped string object the
401
// execution continues at the miss label. The register containing the
402
// receiver is potentially clobbered.
403
void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
408
bool support_wrappers) {
411
// Check if the object is a string leaving the instance type in the
412
// scratch1 register.
413
GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
414
support_wrappers ? &check_wrapper : miss);
416
// Load length directly from the string.
417
__ LoadP(r3, FieldMemOperand(receiver, String::kLengthOffset));
420
if (support_wrappers) {
421
// Check if the object is a JSValue wrapper.
422
__ bind(&check_wrapper);
423
__ cmpi(scratch1, Operand(JS_VALUE_TYPE));
426
// Unwrap the value and check if the wrapped value is a string.
427
__ LoadP(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
428
GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
429
__ LoadP(r3, FieldMemOperand(scratch1, String::kLengthOffset));
435
void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
440
__ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
446
// Generate StoreField code, value is passed in r3 register.
447
// When leaving generated code after success, the receiver_reg and name_reg
448
// may be clobbered. Upon branch to miss_label, the receiver and name
449
// registers have their original values.
450
void StubCompiler::GenerateStoreField(MacroAssembler* masm,
451
Handle<JSObject> object,
453
Handle<Map> transition,
455
Register receiver_reg,
463
LookupResult lookup(masm->isolate());
464
object->Lookup(*name, &lookup);
465
if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
466
// In sloppy mode, we could just return the value and be done. However, we
467
// might be in strict mode, where we have to throw. Since we cannot tell,
468
// go into slow case unconditionally.
473
// Check that the map of the object hasn't changed.
474
CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
476
__ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label,
479
// Perform global security token check if needed.
480
if (object->IsJSGlobalProxy()) {
481
__ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
484
// Check that we are allowed to write this.
485
if (!transition.is_null() && object->GetPrototype()->IsJSObject()) {
487
if (lookup.IsFound()) {
488
holder = lookup.holder();
490
// Find the top object.
493
holder = JSObject::cast(holder->GetPrototype());
494
} while (holder->GetPrototype()->IsJSObject());
496
// We need an extra register, push
498
Label miss_pop, done_check;
499
CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg,
500
scratch1, scratch2, name, &miss_pop);
505
__ bind(&done_check);
509
// Stub never generated for non-global objects that require access
511
ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
513
// Perform map transition for the receiver if necessary.
514
if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) {
515
// The properties must be extended before we can store the value.
516
// We jump to a runtime call that extends the properties array.
517
__ push(receiver_reg);
518
__ mov(r5, Operand(transition));
520
__ TailCallExternalReference(
521
ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
528
if (!transition.is_null()) {
529
// Update the map of the object.
530
__ mov(scratch1, Operand(transition));
531
__ StoreP(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset),
534
// Update the write barrier for the map field and pass the now unused
535
// name_reg as scratch register.
536
__ RecordWriteField(receiver_reg,
537
HeapObject::kMapOffset,
546
// Adjust for the number of properties stored in the object. Even in the
547
// face of a transition we can use the old map here because the size of the
548
// object and the number of in-object properties is not going to change.
549
index -= object->map()->inobject_properties();
552
// Set the property straight into the object.
553
int offset = object->map()->instance_size() + (index * kPointerSize);
554
__ StoreP(r3, FieldMemOperand(receiver_reg, offset), r0);
556
// Skip updating write barrier if storing a smi.
557
__ JumpIfSmi(r3, &exit);
559
// Update the write barrier for the array address.
560
// Pass the now unused name_reg as a scratch register.
562
__ RecordWriteField(receiver_reg,
569
// Write to the properties array.
570
int offset = index * kPointerSize + FixedArray::kHeaderSize;
571
// Get the properties array
573
FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
574
__ StoreP(r3, FieldMemOperand(scratch1, offset), r0);
576
// Skip updating write barrier if storing a smi.
577
__ JumpIfSmi(r3, &exit);
579
// Update the write barrier for the array address.
580
// Ok to clobber receiver_reg and name_reg, since we return.
582
__ RecordWriteField(scratch1,
590
// Return the value (register r3).
596
void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
597
ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
598
Handle<Code> code = (kind == Code::LOAD_IC)
599
? masm->isolate()->builtins()->LoadIC_Miss()
600
: masm->isolate()->builtins()->KeyedLoadIC_Miss();
601
__ Jump(code, RelocInfo::CODE_TARGET);
605
static void GenerateCallFunction(MacroAssembler* masm,
606
Handle<Object> object,
607
const ParameterCount& arguments,
609
Code::ExtraICState extra_ic_state) {
610
// ----------- S t a t e -------------
612
// -- r4: function to call
613
// -----------------------------------
615
// Check that the function really is a function.
616
__ JumpIfSmi(r4, miss);
617
__ CompareObjectType(r4, r6, r6, JS_FUNCTION_TYPE);
620
// Patch the receiver on the stack with the global proxy if
622
if (object->IsGlobalObject()) {
623
__ LoadP(r6, FieldMemOperand(r3, GlobalObject::kGlobalReceiverOffset));
624
__ StoreP(r6, MemOperand(sp, arguments.immediate() * kPointerSize), r0);
627
// Invoke the function.
628
CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
631
__ InvokeFunction(r4, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind);
635
static void PushInterceptorArguments(MacroAssembler* masm,
639
Handle<JSObject> holder_obj) {
641
Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
642
ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
643
Register scratch = name;
644
__ mov(scratch, Operand(interceptor));
648
__ LoadP(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
650
__ mov(scratch, Operand(ExternalReference::isolate_address()));
655
static void CompileCallLoadPropertyWithInterceptor(
656
MacroAssembler* masm,
660
Handle<JSObject> holder_obj) {
661
PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
663
ExternalReference ref =
664
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
666
__ li(r3, Operand(6));
667
__ mov(r4, Operand(ref));
674
static const int kFastApiCallArguments = 4;
676
// Reserves space for the extra arguments to API function in the
679
// These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
680
static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
682
__ LoadSmiLiteral(scratch, Smi::FromInt(0));
683
for (int i = 0; i < kFastApiCallArguments; i++) {
689
// Undoes the effects of ReserveSpaceForFastApiCall.
690
static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
691
__ Drop(kFastApiCallArguments);
695
static void GenerateFastApiDirectCall(MacroAssembler* masm,
696
const CallOptimization& optimization,
698
// ----------- S t a t e -------------
699
// -- sp[0] : holder (set by CheckPrototypes)
700
// -- sp[4] : callee JS function
701
// -- sp[8] : call data
702
// -- sp[12] : isolate
703
// -- sp[16] : last JS argument
705
// -- sp[(argc + 3) * 4] : first JS argument
706
// -- sp[(argc + 4) * 4] : receiver
707
// -----------------------------------
708
// Get the function and setup the context.
709
Handle<JSFunction> function = optimization.constant_function();
710
__ LoadHeapObject(r8, function);
711
__ LoadP(cp, FieldMemOperand(r8, JSFunction::kContextOffset));
713
// Pass the additional arguments.
714
Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
715
Handle<Object> call_data(api_call_info->data());
716
if (masm->isolate()->heap()->InNewSpace(*call_data)) {
717
__ Move(r3, api_call_info);
718
__ LoadP(r9, FieldMemOperand(r3, CallHandlerInfo::kDataOffset));
720
__ Move(r9, call_data);
722
__ mov(r10, Operand(ExternalReference::isolate_address()));
723
// Store JS function, call data and isolate.
724
__ StoreP(r8, MemOperand(sp, 1 * kPointerSize));
725
__ StoreP(r9, MemOperand(sp, 2 * kPointerSize));
726
__ StoreP(r10, MemOperand(sp, 3 * kPointerSize));
728
// Prepare arguments.
729
__ addi(r5, sp, Operand(3 * kPointerSize));
731
#if !ABI_RETURNS_HANDLES_IN_REGS
732
bool alloc_return_buf = true;
734
bool alloc_return_buf = false;
737
// Allocate the v8::Arguments structure in the arguments' space since
738
// it's not controlled by GC.
741
// Create 5 or 6 extra slots on stack (depending on alloc_return_buf):
742
// [0] space for DirectCEntryStub's LR save
743
// [1] space for pointer-sized non-scalar return value (r3)
744
// [2-5] v8:Arguments
746
// If alloc_return buf, we shift the arguments over a register
747
// (e.g. r3 -> r4) to allow for the return value buffer in implicit
748
// first arg. CallApiFunctionAndReturn will setup r3.
749
int kApiStackSpace = 5 + (alloc_return_buf ? 1 : 0);
750
Register arg0 = alloc_return_buf ? r4 : r3;
752
FrameScope frame_scope(masm, StackFrame::MANUAL);
753
__ EnterExitFrame(false, kApiStackSpace);
757
// arg0 = v8::Arguments&
758
// Arguments is after the return address.
759
__ addi(arg0, sp, Operand((kStackFrameExtraParamSlot +
760
(alloc_return_buf ? 2 : 1)) * kPointerSize));
761
// v8::Arguments::implicit_args_
762
__ StoreP(r5, MemOperand(arg0, 0 * kPointerSize));
763
// v8::Arguments::values_
764
__ addi(ip, r5, Operand(argc * kPointerSize));
765
__ StoreP(ip, MemOperand(arg0, 1 * kPointerSize));
766
// v8::Arguments::length_ = argc
767
__ li(ip, Operand(argc));
768
__ stw(ip, MemOperand(arg0, 2 * kPointerSize));
769
// v8::Arguments::is_construct_call = 0
770
__ li(ip, Operand::Zero());
771
__ StoreP(ip, MemOperand(arg0, 3 * kPointerSize));
773
const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
774
Address function_address = v8::ToCData<Address>(api_call_info->callback());
775
ApiFunction fun(function_address);
776
ExternalReference ref = ExternalReference(&fun,
777
ExternalReference::DIRECT_API_CALL,
779
AllowExternalCallThatCantCauseGC scope(masm);
781
__ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
785
class CallInterceptorCompiler BASE_EMBEDDED {
787
CallInterceptorCompiler(StubCompiler* stub_compiler,
788
const ParameterCount& arguments,
790
Code::ExtraICState extra_ic_state)
791
: stub_compiler_(stub_compiler),
792
arguments_(arguments),
794
extra_ic_state_(extra_ic_state) {}
796
void Compile(MacroAssembler* masm,
797
Handle<JSObject> object,
798
Handle<JSObject> holder,
800
LookupResult* lookup,
806
ASSERT(holder->HasNamedInterceptor());
807
ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
809
// Check that the receiver isn't a smi.
810
__ JumpIfSmi(receiver, miss);
811
CallOptimization optimization(lookup);
812
if (optimization.is_constant_call()) {
813
CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
814
holder, lookup, name, optimization, miss);
816
CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
822
void CompileCacheable(MacroAssembler* masm,
823
Handle<JSObject> object,
828
Handle<JSObject> interceptor_holder,
829
LookupResult* lookup,
831
const CallOptimization& optimization,
833
ASSERT(optimization.is_constant_call());
834
ASSERT(!lookup->holder()->IsGlobalObject());
835
Counters* counters = masm->isolate()->counters();
836
int depth1 = kInvalidProtoDepth;
837
int depth2 = kInvalidProtoDepth;
838
bool can_do_fast_api_call = false;
839
if (optimization.is_simple_api_call() &&
840
!lookup->holder()->IsGlobalObject()) {
841
depth1 = optimization.GetPrototypeDepthOfExpectedType(
842
object, interceptor_holder);
843
if (depth1 == kInvalidProtoDepth) {
844
depth2 = optimization.GetPrototypeDepthOfExpectedType(
845
interceptor_holder, Handle<JSObject>(lookup->holder()));
847
can_do_fast_api_call =
848
depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
851
__ IncrementCounter(counters->call_const_interceptor(), 1,
854
if (can_do_fast_api_call) {
855
__ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
857
ReserveSpaceForFastApiCall(masm, scratch1);
860
// Check that the maps from receiver to interceptor's holder
861
// haven't changed and thus we can invoke interceptor.
863
Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
865
stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
866
scratch1, scratch2, scratch3,
869
// Invoke an interceptor and if it provides a value,
870
// branch to |regular_invoke|.
871
Label regular_invoke;
872
LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
875
// Interceptor returned nothing for this property. Try to use cached
876
// constant function.
878
// Check that the maps from interceptor's holder to constant function's
879
// holder haven't changed and thus we can use cached constant function.
880
if (*interceptor_holder != lookup->holder()) {
881
stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
882
Handle<JSObject>(lookup->holder()),
883
scratch1, scratch2, scratch3,
886
// CheckPrototypes has a side effect of fetching a 'holder'
887
// for API (object which is instanceof for the signature). It's
888
// safe to omit it here, as if present, it should be fetched
889
// by the previous CheckPrototypes.
890
ASSERT(depth2 == kInvalidProtoDepth);
894
if (can_do_fast_api_call) {
895
GenerateFastApiDirectCall(masm, optimization, arguments_.immediate());
897
CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
900
__ InvokeFunction(optimization.constant_function(), arguments_,
901
JUMP_FUNCTION, NullCallWrapper(), call_kind);
904
// Deferred code for fast API call case---clean preallocated space.
905
if (can_do_fast_api_call) {
906
__ bind(&miss_cleanup);
907
FreeSpaceForFastApiCall(masm);
911
// Invoke a regular function.
912
__ bind(®ular_invoke);
913
if (can_do_fast_api_call) {
914
FreeSpaceForFastApiCall(masm);
918
void CompileRegular(MacroAssembler* masm,
919
Handle<JSObject> object,
925
Handle<JSObject> interceptor_holder,
928
stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
929
scratch1, scratch2, scratch3,
932
// Call a runtime function to load the interceptor property.
933
FrameScope scope(masm, StackFrame::INTERNAL);
934
// Save the name_ register across the call.
936
PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder);
937
__ CallExternalReference(
938
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
941
// Restore the name_ register.
943
// Leave the internal frame.
946
void LoadWithInterceptor(MacroAssembler* masm,
949
Handle<JSObject> holder_obj,
951
Label* interceptor_succeeded) {
953
FrameScope scope(masm, StackFrame::INTERNAL);
954
__ Push(holder, name_);
955
CompileCallLoadPropertyWithInterceptor(masm,
960
__ pop(name_); // Restore the name.
961
__ pop(receiver); // Restore the holder.
963
// If interceptor returns no-result sentinel, call the constant function.
964
__ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
966
__ bne(interceptor_succeeded);
969
StubCompiler* stub_compiler_;
970
const ParameterCount& arguments_;
972
Code::ExtraICState extra_ic_state_;
976
// Generate code to check that a global property cell is empty. Create
977
// the property cell at compilation time if no cell exists for the
979
static void GenerateCheckPropertyCell(MacroAssembler* masm,
980
Handle<GlobalObject> global,
984
Handle<JSGlobalPropertyCell> cell =
985
GlobalObject::EnsurePropertyCell(global, name);
986
ASSERT(cell->value()->IsTheHole());
987
__ mov(scratch, Operand(cell));
989
FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
990
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
996
// Calls GenerateCheckPropertyCell for each global object in the prototype chain
997
// from object to (but not including) holder.
998
static void GenerateCheckPropertyCells(MacroAssembler* masm,
999
Handle<JSObject> object,
1000
Handle<JSObject> holder,
1001
Handle<String> name,
1004
Handle<JSObject> current = object;
1005
while (!current.is_identical_to(holder)) {
1006
if (current->IsGlobalObject()) {
1007
GenerateCheckPropertyCell(masm,
1008
Handle<GlobalObject>::cast(current),
1013
current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
1018
#define __ ACCESS_MASM(masm())
1021
Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
1022
Register object_reg,
1023
Handle<JSObject> holder,
1024
Register holder_reg,
1027
Handle<String> name,
1030
// Make sure there's no overlap between holder and object registers.
1031
ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1032
ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1033
&& !scratch2.is(scratch1));
1035
// Keep track of the current object in register reg.
1036
Register reg = object_reg;
1039
if (save_at_depth == depth) {
1040
__ StoreP(reg, MemOperand(sp));
1043
// Check the maps in the prototype chain.
1044
// Traverse the prototype chain from the object and do map checks.
1045
Handle<JSObject> current = object;
1046
while (!current.is_identical_to(holder)) {
1049
// Only global objects and objects that do not require access
1050
// checks are allowed in stubs.
1051
ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1053
Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
1054
if (!current->HasFastProperties() &&
1055
!current->IsJSGlobalObject() &&
1056
!current->IsJSGlobalProxy()) {
1057
if (!name->IsSymbol()) {
1058
name = factory()->LookupSymbol(name);
1060
ASSERT(current->property_dictionary()->FindEntry(*name) ==
1061
StringDictionary::kNotFound);
1063
GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
1064
scratch1, scratch2);
1066
__ LoadP(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1067
reg = holder_reg; // From now on the object will be in holder_reg.
1068
__ LoadP(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1070
Handle<Map> current_map(current->map());
1071
__ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK,
1072
ALLOW_ELEMENT_TRANSITION_MAPS);
1074
// Check access rights to the global object. This has to happen after
1075
// the map check so that we know that the object is actually a global
1077
if (current->IsJSGlobalProxy()) {
1078
__ CheckAccessGlobalProxy(reg, scratch2, miss);
1080
reg = holder_reg; // From now on the object will be in holder_reg.
1082
if (heap()->InNewSpace(*prototype)) {
1083
// The prototype is in new space; we cannot store a reference to it
1084
// in the code. Load it from the map.
1085
__ LoadP(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1087
// The prototype is in old space; load it directly.
1088
__ mov(reg, Operand(prototype));
1092
if (save_at_depth == depth) {
1093
__ StoreP(reg, MemOperand(sp));
1096
// Go to the next object in the prototype chain.
1097
current = prototype;
1100
// Log the check depth.
1101
LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
1103
// Check the holder map.
1104
__ CheckMap(reg, scratch1, Handle<Map>(current->map()), miss,
1105
DONT_DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
1107
// Perform security check for access to the global object.
1108
ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1109
if (holder->IsJSGlobalProxy()) {
1110
__ CheckAccessGlobalProxy(reg, scratch1, miss);
1113
// If we've skipped any global objects, it's not enough to verify that
1114
// their maps haven't changed. We also need to check that the property
1115
// cell for the property is still empty.
1116
GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
1118
// Return the register containing the holder.
1123
void StubCompiler::GenerateLoadField(Handle<JSObject> object,
1124
Handle<JSObject> holder,
1130
Handle<String> name,
1132
// Check that the receiver isn't a smi.
1133
__ JumpIfSmi(receiver, miss);
1135
// Check that the maps haven't changed.
1136
Register reg = CheckPrototypes(
1137
object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1138
GenerateFastPropertyLoad(masm(), r3, reg, holder, index);
1143
void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
1144
Handle<JSObject> holder,
1149
Handle<JSFunction> value,
1150
Handle<String> name,
1152
// Check that the receiver isn't a smi.
1153
__ JumpIfSmi(receiver, miss);
1155
// Check that the maps haven't changed.
1157
object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1159
// Return the constant value.
1160
__ LoadHeapObject(r3, value);
1165
void StubCompiler::GenerateDictionaryLoadCallback(Register receiver,
1170
Handle<AccessorInfo> callback,
1171
Handle<String> name,
1173
ASSERT(!receiver.is(scratch1));
1174
ASSERT(!receiver.is(scratch2));
1175
ASSERT(!receiver.is(scratch3));
1177
// Load the properties dictionary.
1178
Register dictionary = scratch1;
1179
__ LoadP(dictionary, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
1181
// Probe the dictionary.
1183
StringDictionaryLookupStub::GeneratePositiveLookup(masm(),
1190
__ bind(&probe_done);
1192
// If probing finds an entry in the dictionary, scratch3 contains the
1193
// pointer into the dictionary. Check that the value is the callback.
1194
Register pointer = scratch3;
1195
const int kElementsStartOffset = StringDictionary::kHeaderSize +
1196
StringDictionary::kElementsStartIndex * kPointerSize;
1197
const int kValueOffset = kElementsStartOffset + kPointerSize;
1198
__ LoadP(scratch2, FieldMemOperand(pointer, kValueOffset));
1199
__ mov(scratch3, Operand(callback));
1200
__ cmp(scratch2, scratch3);
1205
void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
1206
Handle<JSObject> holder,
1213
Handle<AccessorInfo> callback,
1214
Handle<String> name,
1216
#if !ABI_RETURNS_HANDLES_IN_REGS
1217
bool alloc_return_buf = true;
1219
bool alloc_return_buf = false;
1222
// Check that the receiver isn't a smi.
1223
__ JumpIfSmi(receiver, miss);
1225
// Check that the maps haven't changed.
1226
Register reg = CheckPrototypes(object, receiver, holder, scratch1,
1227
scratch2, scratch3, name, miss);
1229
if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
1230
GenerateDictionaryLoadCallback(
1231
reg, name_reg, scratch2, scratch3, scratch4, callback, name, miss);
1234
// Build AccessorInfo::args_ list on the stack and push property name below
1235
// the exit frame to make GC aware of them and store pointers to them.
1237
__ mr(scratch2, sp); // ip = AccessorInfo::args_
1238
if (heap()->InNewSpace(callback->data())) {
1239
__ Move(scratch3, callback);
1240
__ LoadP(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1242
__ Move(scratch3, Handle<Object>(callback->data()));
1244
__ Push(reg, scratch3);
1245
__ mov(scratch3, Operand(ExternalReference::isolate_address()));
1246
__ Push(scratch3, name_reg);
1248
// If ABI passes Handles (pointer-sized struct) in a register:
1250
// Create 2 or 3 extra slots on stack (depending on alloc_return_buf):
1251
// [0] space for DirectCEntryStub's LR save
1252
// [1] space for pointer-sized non-scalar return value (r3)
1257
// Create 3 or 4 extra slots on stack (depending on alloc_return_buf):
1258
// [0] space for DirectCEntryStub's LR save
1259
// [1] (optional) space for pointer-sized non-scalar return value (r3)
1260
// [2] copy of Handle (first arg)
1263
// If alloc_return_buf, we shift the arguments over a register
1264
// (e.g. r3 -> r4) to allow for the return value buffer in implicit
1265
// first arg. CallApiFunctionAndReturn will setup r3.
1266
#if ABI_PASSES_HANDLES_IN_REGS
1267
const int kAccessorInfoSlot = kStackFrameExtraParamSlot +
1268
(alloc_return_buf ? 2 : 1);
1270
const int kAccessorInfoSlot = kStackFrameExtraParamSlot +
1271
(alloc_return_buf ? 3 : 2);
1272
int kArg0Slot = kStackFrameExtraParamSlot + (alloc_return_buf ? 2 : 1);
1274
const int kApiStackSpace = (alloc_return_buf ? 4 : 3);
1275
Register arg0 = (alloc_return_buf ? r4 : r3);
1276
Register arg1 = (alloc_return_buf ? r5 : r4);
1278
__ mr(arg1, scratch2); // Saved in case scratch2 == arg0.
1279
__ mr(arg0, sp); // arg0 = Handle<String>
1281
FrameScope frame_scope(masm(), StackFrame::MANUAL);
1282
__ EnterExitFrame(false, kApiStackSpace);
1284
#if !ABI_PASSES_HANDLES_IN_REGS
1285
// pass 1st arg by reference
1286
__ StoreP(arg0, MemOperand(sp, kArg0Slot * kPointerSize));
1287
__ addi(arg0, sp, Operand(kArg0Slot * kPointerSize));
1290
// Create AccessorInfo instance on the stack above the exit frame with
1291
// ip (internal::Object** args_) as the data.
1292
__ StoreP(arg1, MemOperand(sp, kAccessorInfoSlot * kPointerSize));
1293
// arg1 = AccessorInfo&
1294
__ addi(arg1, sp, Operand(kAccessorInfoSlot * kPointerSize));
1296
const int kStackUnwindSpace = 5;
1297
Address getter_address = v8::ToCData<Address>(callback->getter());
1298
ApiFunction fun(getter_address);
1299
ExternalReference ref =
1300
ExternalReference(&fun,
1301
ExternalReference::DIRECT_GETTER_CALL,
1303
__ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
1307
void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
1308
Handle<JSObject> interceptor_holder,
1309
LookupResult* lookup,
1315
Handle<String> name,
1317
ASSERT(interceptor_holder->HasNamedInterceptor());
1318
ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1320
// Check that the receiver isn't a smi.
1321
__ JumpIfSmi(receiver, miss);
1323
// So far the most popular follow ups for interceptor loads are FIELD
1324
// and CALLBACKS, so inline only them, other cases may be added
1326
bool compile_followup_inline = false;
1327
if (lookup->IsFound() && lookup->IsCacheable()) {
1328
if (lookup->IsField()) {
1329
compile_followup_inline = true;
1330
} else if (lookup->type() == CALLBACKS &&
1331
lookup->GetCallbackObject()->IsAccessorInfo()) {
1332
AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1333
compile_followup_inline = callback->getter() != NULL &&
1334
callback->IsCompatibleReceiver(*object);
1338
if (compile_followup_inline) {
1339
// Compile the interceptor call, followed by inline code to load the
1340
// property from further up the prototype chain if the call fails.
1341
// Check that the maps haven't changed.
1342
Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1343
scratch1, scratch2, scratch3,
1345
ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1347
// Preserve the receiver register explicitly whenever it is different from
1348
// the holder and it is needed should the interceptor return without any
1349
// result. The CALLBACKS case needs the receiver to be passed into C++ code,
1350
// the FIELD case might cause a miss during the prototype check.
1351
bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1352
bool must_preserve_receiver_reg = !receiver.is(holder_reg) &&
1353
(lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1355
// Save necessary data before invoking an interceptor.
1356
// Requires a frame to make GC aware of pushed pointers.
1358
FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1359
if (must_preserve_receiver_reg) {
1360
__ Push(receiver, holder_reg, name_reg);
1362
__ Push(holder_reg, name_reg);
1364
// Invoke an interceptor. Note: map checks from receiver to
1365
// interceptor's holder has been compiled before (see a caller
1367
CompileCallLoadPropertyWithInterceptor(masm(),
1371
interceptor_holder);
1372
// Check if interceptor provided a value for property. If it's
1373
// the case, return immediately.
1374
Label interceptor_failed;
1375
__ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1376
__ cmp(r3, scratch1);
1377
__ beq(&interceptor_failed);
1378
frame_scope.GenerateLeaveFrame();
1381
__ bind(&interceptor_failed);
1384
if (must_preserve_receiver_reg) {
1387
// Leave the internal frame.
1389
// Check that the maps from interceptor's holder to lookup's holder
1390
// haven't changed. And load lookup's holder into |holder| register.
1391
if (must_perfrom_prototype_check) {
1392
holder_reg = CheckPrototypes(interceptor_holder,
1394
Handle<JSObject>(lookup->holder()),
1402
if (lookup->IsField()) {
1403
// We found FIELD property in prototype chain of interceptor's holder.
1404
// Retrieve a field from field's holder.
1405
GenerateFastPropertyLoad(masm(), r3, holder_reg,
1406
Handle<JSObject>(lookup->holder()),
1407
lookup->GetFieldIndex());
1410
// We found CALLBACKS property in prototype chain of interceptor's
1412
ASSERT(lookup->type() == CALLBACKS);
1413
Handle<AccessorInfo> callback(
1414
AccessorInfo::cast(lookup->GetCallbackObject()));
1415
ASSERT(callback->getter() != NULL);
1417
// Tail call to runtime.
1418
// Important invariant in CALLBACKS case: the code above must be
1419
// structured to never clobber |receiver| register.
1420
__ Move(scratch2, callback);
1421
// holder_reg is either receiver or scratch1.
1422
if (!receiver.is(holder_reg)) {
1423
ASSERT(scratch1.is(holder_reg));
1424
__ Push(receiver, holder_reg);
1427
__ push(holder_reg);
1430
FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1431
__ mov(scratch1, Operand(ExternalReference::isolate_address()));
1432
__ Push(scratch3, scratch1, scratch2, name_reg);
1434
ExternalReference ref =
1435
ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1437
__ TailCallExternalReference(ref, 6, 1);
1439
} else { // !compile_followup_inline
1440
// Call the runtime system to load the interceptor.
1441
// Check that the maps haven't changed.
1442
Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1443
scratch1, scratch2, scratch3,
1445
PushInterceptorArguments(masm(), receiver, holder_reg,
1446
name_reg, interceptor_holder);
1448
ExternalReference ref =
1449
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1451
__ TailCallExternalReference(ref, 6, 1);
1456
void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
1457
if (kind_ == Code::KEYED_CALL_IC) {
1458
__ Cmpi(r5, Operand(name), r0);
1464
void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
1465
Handle<JSObject> holder,
1466
Handle<String> name,
1468
ASSERT(holder->IsGlobalObject());
1470
// Get the number of arguments.
1471
const int argc = arguments().immediate();
1473
// Get the receiver from the stack.
1474
__ LoadP(r3, MemOperand(sp, argc * kPointerSize), r0);
1476
// Check that the maps haven't changed.
1477
__ JumpIfSmi(r3, miss);
1478
CheckPrototypes(object, r3, holder, r6, r4, r7, name, miss);
1482
void CallStubCompiler::GenerateLoadFunctionFromCell(
1483
Handle<JSGlobalPropertyCell> cell,
1484
Handle<JSFunction> function,
1486
// Get the value from the cell.
1487
__ mov(r6, Operand(cell));
1488
__ LoadP(r4, FieldMemOperand(r6, JSGlobalPropertyCell::kValueOffset));
1490
// Check that the cell contains the same function.
1491
if (heap()->InNewSpace(*function)) {
1492
// We can't embed a pointer to a function in new space so we have
1493
// to verify that the shared function info is unchanged. This has
1494
// the nice side effect that multiple closures based on the same
1495
// function can all use this call IC. Before we load through the
1496
// function, we have to verify that it still is a function.
1497
__ JumpIfSmi(r4, miss);
1498
__ CompareObjectType(r4, r6, r6, JS_FUNCTION_TYPE);
1501
// Check the shared function info. Make sure it hasn't changed.
1502
__ Move(r6, Handle<SharedFunctionInfo>(function->shared()));
1503
__ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1506
__ mov(r6, Operand(function));
1513
void CallStubCompiler::GenerateMissBranch() {
1515
isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1518
__ Jump(code, RelocInfo::CODE_TARGET);
1522
Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
1523
Handle<JSObject> holder,
1525
Handle<String> name) {
1526
// ----------- S t a t e -------------
1528
// -- lr : return address
1529
// -----------------------------------
1532
GenerateNameCheck(name, &miss);
1534
const int argc = arguments().immediate();
1536
// Get the receiver of the function from the stack into r3.
1537
__ LoadP(r3, MemOperand(sp, argc * kPointerSize), r0);
1538
// Check that the receiver isn't a smi.
1539
__ JumpIfSmi(r3, &miss);
1541
// Do the right check and compute the holder register.
1542
Register reg = CheckPrototypes(object, r3, holder, r4, r6, r7, name, &miss);
1543
GenerateFastPropertyLoad(masm(), r4, reg, holder, index);
1545
GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
1547
// Handle call cache miss.
1549
GenerateMissBranch();
1551
// Return the generated code.
1552
return GetCode(Code::FIELD, name);
1556
Handle<Code> CallStubCompiler::CompileArrayPushCall(
1557
Handle<Object> object,
1558
Handle<JSObject> holder,
1559
Handle<JSGlobalPropertyCell> cell,
1560
Handle<JSFunction> function,
1561
Handle<String> name) {
1562
// ----------- S t a t e -------------
1564
// -- lr : return address
1565
// -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1567
// -- sp[argc * 4] : receiver
1568
// -----------------------------------
1570
// If object is not an array, bail out to regular call.
1571
if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1574
GenerateNameCheck(name, &miss);
1576
Register receiver = r4;
1577
// Get the receiver from the stack
1578
const int argc = arguments().immediate();
1579
__ LoadP(receiver, MemOperand(sp, argc * kPointerSize), r0);
1581
// Check that the receiver isn't a smi.
1582
__ JumpIfSmi(receiver, &miss);
1584
// Check that the maps haven't changed.
1585
CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, r6, r3, r7,
1589
// Nothing to do, just return the length.
1590
__ LoadP(r3, FieldMemOperand(receiver, JSArray::kLengthOffset));
1596
if (argc == 1) { // Otherwise fall through to call the builtin.
1597
Label attempt_to_grow_elements;
1599
Register elements = r9;
1600
Register end_elements = r8;
1601
// Get the elements array of the object.
1602
__ LoadP(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1604
// Check that the elements are in fast mode and writable.
1605
__ CheckMap(elements,
1607
Heap::kFixedArrayMapRootIndex,
1612
// Get the array's length into r3 and calculate new length.
1613
__ LoadP(r3, FieldMemOperand(receiver, JSArray::kLengthOffset));
1614
__ AddSmiLiteral(r3, r3, Smi::FromInt(argc), r0);
1616
// Get the elements' length.
1617
__ LoadP(r7, FieldMemOperand(elements, FixedArray::kLengthOffset));
1619
// Check if we could survive without allocation.
1621
__ bgt(&attempt_to_grow_elements);
1623
// Check if value is a smi.
1624
Label with_write_barrier;
1625
__ LoadP(r7, MemOperand(sp, (argc - 1) * kPointerSize), r0);
1626
__ JumpIfNotSmi(r7, &with_write_barrier);
1629
__ StoreP(r3, FieldMemOperand(receiver, JSArray::kLengthOffset), r0);
1632
// We may need a register containing the address end_elements below,
1633
// so write back the value in end_elements.
1634
__ SmiToPtrArrayOffset(end_elements, r3);
1635
__ add(end_elements, elements, end_elements);
1636
const int kEndElementsOffset =
1637
FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1638
__ Add(end_elements, end_elements, kEndElementsOffset, r0);
1639
__ StoreP(r7, MemOperand(end_elements));
1645
__ bind(&with_write_barrier);
1647
__ LoadP(r6, FieldMemOperand(receiver, HeapObject::kMapOffset));
1649
if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
1650
Label fast_object, not_fast_object;
1651
__ CheckFastObjectElements(r6, r10, ¬_fast_object);
1653
// In case of fast smi-only, convert to fast object, otherwise bail out.
1654
__ bind(¬_fast_object);
1655
__ CheckFastSmiElements(r6, r10, &call_builtin);
1658
Label try_holey_map;
1659
__ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1664
__ mr(r5, receiver);
1665
ElementsTransitionGenerator::
1666
GenerateMapChangeElementsTransition(masm());
1669
__ bind(&try_holey_map);
1670
__ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
1671
FAST_HOLEY_ELEMENTS,
1675
__ mr(r5, receiver);
1676
ElementsTransitionGenerator::
1677
GenerateMapChangeElementsTransition(masm());
1678
__ bind(&fast_object);
1680
__ CheckFastObjectElements(r6, r6, &call_builtin);
1684
__ StoreP(r3, FieldMemOperand(receiver, JSArray::kLengthOffset), r0);
1687
// We may need a register containing the address end_elements below,
1688
// so write back the value in end_elements.
1689
__ SmiToPtrArrayOffset(end_elements, r3);
1690
__ add(end_elements, elements, end_elements);
1691
__ Add(end_elements, end_elements, kEndElementsOffset, r0);
1692
__ StoreP(r7, MemOperand(end_elements));
1694
__ RecordWrite(elements,
1699
EMIT_REMEMBERED_SET,
1704
__ bind(&attempt_to_grow_elements);
1705
// r3: array's length + 1.
1706
// r7: elements' length.
1708
if (!FLAG_inline_new) {
1709
__ b(&call_builtin);
1712
__ LoadP(r5, MemOperand(sp, (argc - 1) * kPointerSize), r0);
1713
// Growing elements that are SMI-only requires special handling in case
1714
// the new element is non-Smi. For now, delegate to the builtin.
1715
Label no_fast_elements_check;
1716
__ JumpIfSmi(r5, &no_fast_elements_check);
1717
__ LoadP(r10, FieldMemOperand(receiver, HeapObject::kMapOffset));
1718
__ CheckFastObjectElements(r10, r10, &call_builtin);
1719
__ bind(&no_fast_elements_check);
1721
Isolate* isolate = masm()->isolate();
1722
ExternalReference new_space_allocation_top =
1723
ExternalReference::new_space_allocation_top_address(isolate);
1724
ExternalReference new_space_allocation_limit =
1725
ExternalReference::new_space_allocation_limit_address(isolate);
1727
const int kAllocationDelta = 4;
1728
// Load top and check if it is the end of elements.
1729
__ SmiToPtrArrayOffset(end_elements, r3);
1730
__ add(end_elements, elements, end_elements);
1731
__ Add(end_elements, end_elements, kEndElementsOffset, r0);
1732
__ mov(r10, Operand(new_space_allocation_top));
1733
__ LoadP(r6, MemOperand(r10));
1734
__ cmp(end_elements, r6);
1735
__ bne(&call_builtin);
1737
__ mov(r22, Operand(new_space_allocation_limit));
1738
__ LoadP(r22, MemOperand(r22));
1739
__ addi(r6, r6, Operand(kAllocationDelta * kPointerSize));
1741
__ bgt(&call_builtin);
1743
// We fit and could grow elements.
1744
// Update new_space_allocation_top.
1745
__ StoreP(r6, MemOperand(r10));
1746
// Push the argument.
1747
__ StoreP(r5, MemOperand(end_elements));
1748
// Fill the rest with holes.
1749
__ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
1750
for (int i = 1; i < kAllocationDelta; i++) {
1751
__ StoreP(r6, MemOperand(end_elements, i * kPointerSize), r0);
1754
// Update elements' and array's sizes.
1755
__ StoreP(r3, FieldMemOperand(receiver, JSArray::kLengthOffset), r0);
1756
__ AddSmiLiteral(r7, r7, Smi::FromInt(kAllocationDelta), r0);
1757
__ StoreP(r7, FieldMemOperand(elements, FixedArray::kLengthOffset), r0);
1759
// Elements are in new space, so write barrier is not required.
1763
__ bind(&call_builtin);
1764
__ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1770
// Handle call cache miss.
1772
GenerateMissBranch();
1774
// Return the generated code.
1775
return GetCode(function);
1779
Handle<Code> CallStubCompiler::CompileArrayPopCall(
1780
Handle<Object> object,
1781
Handle<JSObject> holder,
1782
Handle<JSGlobalPropertyCell> cell,
1783
Handle<JSFunction> function,
1784
Handle<String> name) {
1785
// ----------- S t a t e -------------
1787
// -- lr : return address
1788
// -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1790
// -- sp[argc * 4] : receiver
1791
// -----------------------------------
1793
// If object is not an array, bail out to regular call.
1794
if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1796
Label miss, return_undefined, call_builtin;
1797
Register receiver = r4;
1798
Register elements = r6;
1799
GenerateNameCheck(name, &miss);
1801
// Get the receiver from the stack
1802
const int argc = arguments().immediate();
1803
__ LoadP(receiver, MemOperand(sp, argc * kPointerSize), r0);
1804
// Check that the receiver isn't a smi.
1805
__ JumpIfSmi(receiver, &miss);
1807
// Check that the maps haven't changed.
1808
CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, elements,
1809
r7, r3, name, &miss);
1811
// Get the elements array of the object.
1812
__ LoadP(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1814
// Check that the elements are in fast mode and writable.
1815
__ CheckMap(elements,
1817
Heap::kFixedArrayMapRootIndex,
1821
// Get the array's length into r7 and calculate new length.
1822
__ LoadP(r7, FieldMemOperand(receiver, JSArray::kLengthOffset));
1823
__ SubSmiLiteral(r7, r7, Smi::FromInt(1), r0);
1824
__ cmpi(r7, Operand::Zero());
1825
__ blt(&return_undefined);
1827
// Get the last element.
1828
__ LoadRoot(r9, Heap::kTheHoleValueRootIndex);
1829
// We can't address the last element in one operation. Compute the more
1830
// expensive shift first, and use an offset later on.
1831
__ SmiToPtrArrayOffset(r3, r7);
1832
__ add(elements, elements, r3);
1833
__ LoadP(r3, FieldMemOperand(elements, FixedArray::kHeaderSize));
1835
__ beq(&call_builtin);
1837
// Set the array's length.
1838
__ StoreP(r7, FieldMemOperand(receiver, JSArray::kLengthOffset), r0);
1840
// Fill with the hole.
1841
__ StoreP(r9, FieldMemOperand(elements, FixedArray::kHeaderSize), r0);
1845
__ bind(&return_undefined);
1846
__ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1850
__ bind(&call_builtin);
1851
__ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1856
// Handle call cache miss.
1858
GenerateMissBranch();
1860
// Return the generated code.
1861
return GetCode(function);
1865
Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
1866
Handle<Object> object,
1867
Handle<JSObject> holder,
1868
Handle<JSGlobalPropertyCell> cell,
1869
Handle<JSFunction> function,
1870
Handle<String> name) {
1871
// ----------- S t a t e -------------
1872
// -- r5 : function name
1873
// -- lr : return address
1874
// -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1876
// -- sp[argc * 4] : receiver
1877
// -----------------------------------
1879
// If object is not a string, bail out to regular call.
1880
if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1882
const int argc = arguments().immediate();
1885
Label index_out_of_range;
1886
Label* index_out_of_range_label = &index_out_of_range;
1888
if (kind_ == Code::CALL_IC &&
1889
(CallICBase::StringStubState::decode(extra_state_) ==
1890
DEFAULT_STRING_STUB)) {
1891
index_out_of_range_label = &miss;
1893
GenerateNameCheck(name, &name_miss);
1895
// Check that the maps starting from the prototype haven't changed.
1896
GenerateDirectLoadGlobalFunctionPrototype(masm(),
1897
Context::STRING_FUNCTION_INDEX,
1900
ASSERT(!object.is_identical_to(holder));
1901
CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())),
1902
r3, holder, r4, r6, r7, name, &miss);
1904
Register receiver = r4;
1905
Register index = r7;
1906
Register result = r3;
1907
__ LoadP(receiver, MemOperand(sp, argc * kPointerSize), r0);
1909
__ LoadP(index, MemOperand(sp, (argc - 1) * kPointerSize), r0);
1911
__ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1914
StringCharCodeAtGenerator generator(receiver,
1917
&miss, // When not a string.
1918
&miss, // When not a number.
1919
index_out_of_range_label,
1920
STRING_INDEX_IS_NUMBER);
1921
generator.GenerateFast(masm());
1925
StubRuntimeCallHelper call_helper;
1926
generator.GenerateSlow(masm(), call_helper);
1928
if (index_out_of_range.is_linked()) {
1929
__ bind(&index_out_of_range);
1930
__ LoadRoot(r3, Heap::kNanValueRootIndex);
1936
// Restore function name in r5.
1938
__ bind(&name_miss);
1939
GenerateMissBranch();
1941
// Return the generated code.
1942
return GetCode(function);
1946
Handle<Code> CallStubCompiler::CompileStringCharAtCall(
1947
Handle<Object> object,
1948
Handle<JSObject> holder,
1949
Handle<JSGlobalPropertyCell> cell,
1950
Handle<JSFunction> function,
1951
Handle<String> name) {
1952
// ----------- S t a t e -------------
1953
// -- r5 : function name
1954
// -- lr : return address
1955
// -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1957
// -- sp[argc * 4] : receiver
1958
// -----------------------------------
1960
// If object is not a string, bail out to regular call.
1961
if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1963
const int argc = arguments().immediate();
1966
Label index_out_of_range;
1967
Label* index_out_of_range_label = &index_out_of_range;
1968
if (kind_ == Code::CALL_IC &&
1969
(CallICBase::StringStubState::decode(extra_state_) ==
1970
DEFAULT_STRING_STUB)) {
1971
index_out_of_range_label = &miss;
1973
GenerateNameCheck(name, &name_miss);
1975
// Check that the maps starting from the prototype haven't changed.
1976
GenerateDirectLoadGlobalFunctionPrototype(masm(),
1977
Context::STRING_FUNCTION_INDEX,
1980
ASSERT(!object.is_identical_to(holder));
1981
CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())),
1982
r3, holder, r4, r6, r7, name, &miss);
1984
Register receiver = r3;
1985
Register index = r7;
1986
Register scratch = r6;
1987
Register result = r3;
1988
__ LoadP(receiver, MemOperand(sp, argc * kPointerSize), r0);
1990
__ LoadP(index, MemOperand(sp, (argc - 1) * kPointerSize), r0);
1992
__ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1995
StringCharAtGenerator generator(receiver,
1999
&miss, // When not a string.
2000
&miss, // When not a number.
2001
index_out_of_range_label,
2002
STRING_INDEX_IS_NUMBER);
2003
generator.GenerateFast(masm());
2007
StubRuntimeCallHelper call_helper;
2008
generator.GenerateSlow(masm(), call_helper);
2010
if (index_out_of_range.is_linked()) {
2011
__ bind(&index_out_of_range);
2012
__ LoadRoot(r3, Heap::kEmptyStringRootIndex);
2018
// Restore function name in r5.
2020
__ bind(&name_miss);
2021
GenerateMissBranch();
2023
// Return the generated code.
2024
return GetCode(function);
2028
Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
2029
Handle<Object> object,
2030
Handle<JSObject> holder,
2031
Handle<JSGlobalPropertyCell> cell,
2032
Handle<JSFunction> function,
2033
Handle<String> name) {
2034
// ----------- S t a t e -------------
2035
// -- r5 : function name
2036
// -- lr : return address
2037
// -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2039
// -- sp[argc * 4] : receiver
2040
// -----------------------------------
2042
const int argc = arguments().immediate();
2044
// If the object is not a JSObject or we got an unexpected number of
2045
// arguments, bail out to the regular call.
2046
if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2049
GenerateNameCheck(name, &miss);
2051
if (cell.is_null()) {
2052
__ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
2054
STATIC_ASSERT(kSmiTag == 0);
2055
__ JumpIfSmi(r4, &miss);
2057
CheckPrototypes(Handle<JSObject>::cast(object), r4, holder, r3, r6, r7,
2060
ASSERT(cell->value() == *function);
2061
GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2063
GenerateLoadFunctionFromCell(cell, function, &miss);
2066
// Load the char code argument.
2068
__ LoadP(code, MemOperand(sp, 0 * kPointerSize));
2070
// Check the code is a smi.
2072
STATIC_ASSERT(kSmiTag == 0);
2073
__ JumpIfNotSmi(code, &slow);
2075
// Convert the smi code to uint16.
2076
__ LoadSmiLiteral(r0, Smi::FromInt(0xffff));
2077
__ and_(code, code, r0);
2079
StringCharFromCodeGenerator generator(code, r3);
2080
generator.GenerateFast(masm());
2084
StubRuntimeCallHelper call_helper;
2085
generator.GenerateSlow(masm(), call_helper);
2087
// Tail call the full function. We do not have to patch the receiver
2088
// because the function makes no use of it.
2091
function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2094
// r5: function name.
2095
GenerateMissBranch();
2097
// Return the generated code.
2098
return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
2102
Handle<Code> CallStubCompiler::CompileMathFloorCall(
2103
Handle<Object> object,
2104
Handle<JSObject> holder,
2105
Handle<JSGlobalPropertyCell> cell,
2106
Handle<JSFunction> function,
2107
Handle<String> name) {
2108
// ----------- S t a t e -------------
2109
// -- r5 : function name
2110
// -- lr : return address
2111
// -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2113
// -- sp[argc * 4] : receiver
2114
// -----------------------------------
2116
const int argc = arguments().immediate();
2117
// If the object is not a JSObject or we got an unexpected number of
2118
// arguments, bail out to the regular call.
2119
if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2121
Label miss, slow, not_smi, positive, drop_arg_return;
2122
GenerateNameCheck(name, &miss);
2124
if (cell.is_null()) {
2125
__ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
2126
STATIC_ASSERT(kSmiTag == 0);
2127
__ JumpIfSmi(r4, &miss);
2128
CheckPrototypes(Handle<JSObject>::cast(object), r4, holder, r3, r6, r7,
2131
ASSERT(cell->value() == *function);
2132
GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2134
GenerateLoadFunctionFromCell(cell, function, &miss);
2137
// Load the (only) argument into r3.
2138
__ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
2140
// If the argument is a smi, just return.
2141
STATIC_ASSERT(kSmiTag == 0);
2142
__ andi(r0, r3, Operand(kSmiTagMask));
2143
__ bne(¬_smi, cr0);
2148
__ CheckMap(r3, r4, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2150
// Load the HeapNumber value.
2151
__ lfd(d1, FieldMemOperand(r3, HeapNumber::kValueOffset));
2153
// Round to integer minus
2154
if (CpuFeatures::IsSupported(FPU)) {
2155
// The frim instruction is only supported on POWER5
2158
#if V8_TARGET_ARCH_PPC64
2164
// This sequence is more portable (avoids frim)
2165
// This should be evaluated to determine if frim provides any
2166
// perf benefit or if we can simply use the compatible sequence
2168
__ SetRoundingMode(kRoundToMinusInf);
2169
#if V8_TARGET_ARCH_PPC64
2174
__ ResetRoundingMode();
2176
// Convert the argument to an integer.
2177
__ stfdu(d1, MemOperand(sp, -8));
2178
#if V8_TARGET_ARCH_PPC64
2179
__ ld(r3, MemOperand(sp, 0));
2181
#if __FLOAT_WORD_ORDER == __LITTLE_ENDIAN
2182
__ lwz(r3, MemOperand(sp, 0));
2184
__ lwz(r3, MemOperand(sp, 4));
2187
__ addi(sp, sp, Operand(8));
2189
// if resulting conversion is negative, invert for bit tests
2190
__ TestSignBit(r3, r0);
2192
__ beq(&positive, cr0);
2196
// if any of the high bits are set, fail to generic
2197
__ JumpIfNotUnsignedSmiCandidate(r0, r0, &slow);
2200
STATIC_ASSERT(kSmiTag == 0);
2204
__ cmpi(r3, Operand::Zero());
2205
__ bne(&drop_arg_return);
2207
__ LoadP(r4, MemOperand(sp, 0 * kPointerSize));
2208
__ lwz(r4, FieldMemOperand(r4, HeapNumber::kExponentOffset));
2209
__ TestSignBit32(r4, r0);
2210
__ beq(&drop_arg_return, cr0);
2211
// If our HeapNumber is negative it was -0, so load its address and return.
2212
__ LoadP(r3, MemOperand(sp));
2214
__ bind(&drop_arg_return);
2219
// Tail call the full function. We do not have to patch the receiver
2220
// because the function makes no use of it.
2222
function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2225
// r5: function name.
2226
GenerateMissBranch();
2228
// Return the generated code.
2229
return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
2233
Handle<Code> CallStubCompiler::CompileMathAbsCall(
2234
Handle<Object> object,
2235
Handle<JSObject> holder,
2236
Handle<JSGlobalPropertyCell> cell,
2237
Handle<JSFunction> function,
2238
Handle<String> name) {
2239
// ----------- S t a t e -------------
2240
// -- r5 : function name
2241
// -- lr : return address
2242
// -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2244
// -- sp[argc * 4] : receiver
2245
// -----------------------------------
2247
const int argc = arguments().immediate();
2248
// If the object is not a JSObject or we got an unexpected number of
2249
// arguments, bail out to the regular call.
2250
if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2253
GenerateNameCheck(name, &miss);
2254
if (cell.is_null()) {
2255
__ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
2256
STATIC_ASSERT(kSmiTag == 0);
2257
__ JumpIfSmi(r4, &miss);
2258
CheckPrototypes(Handle<JSObject>::cast(object), r4, holder, r3, r6, r7,
2261
ASSERT(cell->value() == *function);
2262
GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2264
GenerateLoadFunctionFromCell(cell, function, &miss);
2267
// Load the (only) argument into r3.
2268
__ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
2270
// Check if the argument is a smi.
2272
STATIC_ASSERT(kSmiTag == 0);
2273
__ JumpIfNotSmi(r3, ¬_smi);
2275
// Do bitwise not or do nothing depending on the sign of the
2277
__ ShiftRightArithImm(r0, r3, kBitsPerPointer - 1);
2278
__ xor_(r4, r3, r0);
2280
// Add 1 or do nothing depending on the sign of the argument.
2281
__ sub(r3, r4, r0, LeaveOE, SetRC);
2283
// If the result is still negative, go to the slow case.
2284
// This only happens for the most negative smi.
2292
// Check if the argument is a heap number and load its exponent and
2295
__ CheckMap(r3, r4, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2296
__ lwz(r4, FieldMemOperand(r3, HeapNumber::kExponentOffset));
2298
// Check the sign of the argument. If the argument is positive,
2300
Label negative_sign;
2301
__ andis(r0, r4, Operand(HeapNumber::kSignMask >> 16));
2302
__ bne(&negative_sign, cr0);
2306
// If the argument is negative, clear the sign, and return a new
2308
__ bind(&negative_sign);
2309
STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
2310
__ xoris(r4, r4, Operand(HeapNumber::kSignMask >> 16));
2311
__ lwz(r6, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
2312
__ LoadRoot(r9, Heap::kHeapNumberMapRootIndex);
2313
__ AllocateHeapNumber(r3, r7, r8, r9, &slow);
2314
__ stw(r4, FieldMemOperand(r3, HeapNumber::kExponentOffset));
2315
__ stw(r6, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
2319
// Tail call the full function. We do not have to patch the receiver
2320
// because the function makes no use of it.
2323
function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2326
// r5: function name.
2327
GenerateMissBranch();
2329
// Return the generated code.
2330
return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
2334
Handle<Code> CallStubCompiler::CompileFastApiCall(
2335
const CallOptimization& optimization,
2336
Handle<Object> object,
2337
Handle<JSObject> holder,
2338
Handle<JSGlobalPropertyCell> cell,
2339
Handle<JSFunction> function,
2340
Handle<String> name) {
2341
Counters* counters = isolate()->counters();
2343
ASSERT(optimization.is_simple_api_call());
2344
// Bail out if object is a global object as we don't want to
2345
// repatch it to global receiver.
2346
if (object->IsGlobalObject()) return Handle<Code>::null();
2347
if (!cell.is_null()) return Handle<Code>::null();
2348
if (!object->IsJSObject()) return Handle<Code>::null();
2349
int depth = optimization.GetPrototypeDepthOfExpectedType(
2350
Handle<JSObject>::cast(object), holder);
2351
if (depth == kInvalidProtoDepth) return Handle<Code>::null();
2353
Label miss, miss_before_stack_reserved;
2354
GenerateNameCheck(name, &miss_before_stack_reserved);
2356
// Get the receiver from the stack.
2357
const int argc = arguments().immediate();
2358
__ LoadP(r4, MemOperand(sp, argc * kPointerSize), r0);
2360
// Check that the receiver isn't a smi.
2361
__ JumpIfSmi(r4, &miss_before_stack_reserved);
2363
__ IncrementCounter(counters->call_const(), 1, r3, r6);
2364
__ IncrementCounter(counters->call_const_fast_api(), 1, r3, r6);
2366
ReserveSpaceForFastApiCall(masm(), r3);
2368
// Check that the maps haven't changed and find a Holder as a side effect.
2369
CheckPrototypes(Handle<JSObject>::cast(object), r4, holder, r3, r6, r7, name,
2372
GenerateFastApiDirectCall(masm(), optimization, argc);
2375
FreeSpaceForFastApiCall(masm());
2377
__ bind(&miss_before_stack_reserved);
2378
GenerateMissBranch();
2380
// Return the generated code.
2381
return GetCode(function);
2385
Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
2386
Handle<JSObject> holder,
2387
Handle<JSFunction> function,
2388
Handle<String> name,
2390
// ----------- S t a t e -------------
2392
// -- lr : return address
2393
// -----------------------------------
2394
if (HasCustomCallGenerator(function)) {
2395
Handle<Code> code = CompileCustomCall(object, holder,
2396
Handle<JSGlobalPropertyCell>::null(),
2398
// A null handle means bail out to the regular compiler code below.
2399
if (!code.is_null()) return code;
2403
GenerateNameCheck(name, &miss);
2405
// Get the receiver from the stack
2406
const int argc = arguments().immediate();
2407
__ LoadP(r4, MemOperand(sp, argc * kPointerSize), r0);
2409
// Check that the receiver isn't a smi.
2410
if (check != NUMBER_CHECK) {
2411
__ JumpIfSmi(r4, &miss);
2414
// Make sure that it's okay not to patch the on stack receiver
2415
// unless we're doing a receiver map check.
2416
ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2418
case RECEIVER_MAP_CHECK:
2419
__ IncrementCounter(masm()->isolate()->counters()->call_const(),
2422
// Check that the maps haven't changed.
2423
CheckPrototypes(Handle<JSObject>::cast(object), r4, holder, r3, r6, r7,
2426
// Patch the receiver on the stack with the global proxy if
2428
if (object->IsGlobalObject()) {
2429
__ LoadP(r6, FieldMemOperand(r4, GlobalObject::kGlobalReceiverOffset));
2430
__ StoreP(r6, MemOperand(sp, argc * kPointerSize));
2435
if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2436
// Check that the object is a two-byte string or a symbol.
2437
__ CompareObjectType(r4, r6, r6, FIRST_NONSTRING_TYPE);
2439
// Check that the maps starting from the prototype haven't changed.
2440
GenerateDirectLoadGlobalFunctionPrototype(
2441
masm(), Context::STRING_FUNCTION_INDEX, r3, &miss);
2443
Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2444
r3, holder, r6, r4, r7, name, &miss);
2446
// Calling non-strict non-builtins with a value as the receiver
2453
if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2455
// Check that the object is a smi or a heap number.
2456
__ JumpIfSmi(r4, &fast);
2457
__ CompareObjectType(r4, r3, r3, HEAP_NUMBER_TYPE);
2460
// Check that the maps starting from the prototype haven't changed.
2461
GenerateDirectLoadGlobalFunctionPrototype(
2462
masm(), Context::NUMBER_FUNCTION_INDEX, r3, &miss);
2464
Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2465
r3, holder, r6, r4, r7, name, &miss);
2467
// Calling non-strict non-builtins with a value as the receiver
2474
if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2476
// Check that the object is a boolean.
2477
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
2480
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
2484
// Check that the maps starting from the prototype haven't changed.
2485
GenerateDirectLoadGlobalFunctionPrototype(
2486
masm(), Context::BOOLEAN_FUNCTION_INDEX, r3, &miss);
2488
Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2489
r3, holder, r6, r4, r7, name, &miss);
2491
// Calling non-strict non-builtins with a value as the receiver
2498
CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2502
function, arguments(), JUMP_FUNCTION, NullCallWrapper(), call_kind);
2504
// Handle call cache miss.
2506
GenerateMissBranch();
2508
// Return the generated code.
2509
return GetCode(function);
2513
Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
2514
Handle<JSObject> holder,
2515
Handle<String> name) {
2516
// ----------- S t a t e -------------
2518
// -- lr : return address
2519
// -----------------------------------
2521
GenerateNameCheck(name, &miss);
2523
// Get the number of arguments.
2524
const int argc = arguments().immediate();
2525
LookupResult lookup(isolate());
2526
LookupPostInterceptor(holder, name, &lookup);
2528
// Get the receiver from the stack.
2529
__ LoadP(r4, MemOperand(sp, argc * kPointerSize), r0);
2531
CallInterceptorCompiler compiler(this, arguments(), r5, extra_state_);
2532
compiler.Compile(masm(), object, holder, name, &lookup, r4, r6, r7, r3,
2535
// Move returned value, the function to call, to r4.
2537
// Restore receiver.
2538
__ LoadP(r3, MemOperand(sp, argc * kPointerSize), r0);
2540
GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
2542
// Handle call cache miss.
2544
GenerateMissBranch();
2546
// Return the generated code.
2547
return GetCode(Code::INTERCEPTOR, name);
2551
Handle<Code> CallStubCompiler::CompileCallGlobal(
2552
Handle<JSObject> object,
2553
Handle<GlobalObject> holder,
2554
Handle<JSGlobalPropertyCell> cell,
2555
Handle<JSFunction> function,
2556
Handle<String> name) {
2557
// ----------- S t a t e -------------
2559
// -- lr : return address
2560
// -----------------------------------
2561
if (HasCustomCallGenerator(function)) {
2562
Handle<Code> code = CompileCustomCall(object, holder, cell, function, name);
2563
// A null handle means bail out to the regular compiler code below.
2564
if (!code.is_null()) return code;
2568
GenerateNameCheck(name, &miss);
2570
// Get the number of arguments.
2571
const int argc = arguments().immediate();
2572
GenerateGlobalReceiverCheck(object, holder, name, &miss);
2573
GenerateLoadFunctionFromCell(cell, function, &miss);
2575
// Patch the receiver on the stack with the global proxy if
2577
if (object->IsGlobalObject()) {
2578
__ LoadP(r6, FieldMemOperand(r3, GlobalObject::kGlobalReceiverOffset));
2579
__ StoreP(r6, MemOperand(sp, argc * kPointerSize), r0);
2582
// Set up the context (function already in r4).
2583
__ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
2585
// Jump to the cached code (tail call).
2586
Counters* counters = masm()->isolate()->counters();
2587
__ IncrementCounter(counters->call_global_inline(), 1, r6, r7);
2588
ParameterCount expected(function->shared()->formal_parameter_count());
2589
CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2592
// We call indirectly through the code field in the function to
2593
// allow recompilation to take effect without changing any of the
2595
__ LoadP(r6, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2596
__ InvokeCode(r6, expected, arguments(), JUMP_FUNCTION,
2597
NullCallWrapper(), call_kind);
2599
// Handle call cache miss.
2601
__ IncrementCounter(counters->call_global_inline_miss(), 1, r4, r6);
2602
GenerateMissBranch();
2604
// Return the generated code.
2605
return GetCode(Code::NORMAL, name);
2609
Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
2611
Handle<Map> transition,
2612
Handle<String> name) {
2613
// ----------- S t a t e -------------
2617
// -- lr : return address
2618
// -----------------------------------
2621
GenerateStoreField(masm(),
2629
Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2630
__ Jump(ic, RelocInfo::CODE_TARGET);
2632
// Return the generated code.
2633
return GetCode(transition.is_null()
2635
: Code::MAP_TRANSITION, name);
2639
Handle<Code> StoreStubCompiler::CompileStoreCallback(
2640
Handle<String> name,
2641
Handle<JSObject> receiver,
2642
Handle<JSObject> holder,
2643
Handle<AccessorInfo> callback) {
2644
// ----------- S t a t e -------------
2648
// -- lr : return address
2649
// -----------------------------------
2651
// Check that the maps haven't changed.
2652
__ JumpIfSmi(r4, &miss);
2653
CheckPrototypes(receiver, r4, holder, r6, r7, r8, name, &miss);
2655
// Stub never generated for non-global objects that require access checks.
2656
ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
2658
__ push(r4); // receiver
2659
__ mov(ip, Operand(callback)); // callback info
2660
__ Push(ip, r5, r3);
2662
// Do tail-call to the runtime system.
2663
ExternalReference store_callback_property =
2664
ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2666
__ TailCallExternalReference(store_callback_property, 4, 1);
2668
// Handle store cache miss.
2670
Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2671
__ Jump(ic, RelocInfo::CODE_TARGET);
2673
// Return the generated code.
2674
return GetCode(Code::CALLBACKS, name);
2679
#define __ ACCESS_MASM(masm)
2682
void StoreStubCompiler::GenerateStoreViaSetter(
2683
MacroAssembler* masm,
2684
Handle<JSFunction> setter) {
2685
// ----------- S t a t e -------------
2689
// -- lr : return address
2690
// -----------------------------------
2692
FrameScope scope(masm, StackFrame::INTERNAL);
2694
// Save value register, so we can restore it later.
2697
if (!setter.is_null()) {
2698
// Call the JavaScript setter with receiver and value on the stack.
2700
ParameterCount actual(1);
2701
__ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(),
2704
// If we generate a global code snippet for deoptimization only, remember
2705
// the place to continue after deoptimization.
2706
masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
2709
// We have to return the passed value, not the return value of the setter.
2712
// Restore context register.
2713
__ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2720
#define __ ACCESS_MASM(masm())
2723
Handle<Code> StoreStubCompiler::CompileStoreViaSetter(
2724
Handle<String> name,
2725
Handle<JSObject> receiver,
2726
Handle<JSObject> holder,
2727
Handle<JSFunction> setter) {
2728
// ----------- S t a t e -------------
2732
// -- lr : return address
2733
// -----------------------------------
2736
// Check that the maps haven't changed.
2737
__ JumpIfSmi(r4, &miss);
2738
CheckPrototypes(receiver, r4, holder, r6, r7, r8, name, &miss);
2740
GenerateStoreViaSetter(masm(), setter);
2743
Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2744
__ Jump(ic, RelocInfo::CODE_TARGET);
2746
// Return the generated code.
2747
return GetCode(Code::CALLBACKS, name);
2751
Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
2752
Handle<JSObject> receiver,
2753
Handle<String> name) {
2754
// ----------- S t a t e -------------
2758
// -- lr : return address
2759
// -----------------------------------
2762
// Check that the map of the object hasn't changed.
2763
__ CheckMap(r4, r6, Handle<Map>(receiver->map()), &miss,
2764
DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
2766
// Perform global security token check if needed.
2767
if (receiver->IsJSGlobalProxy()) {
2768
__ CheckAccessGlobalProxy(r4, r6, &miss);
2771
// Stub is never generated for non-global objects that require access
2773
ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2775
__ Push(r4, r5, r3); // Receiver, name, value.
2777
__ LoadSmiLiteral(r3, Smi::FromInt(strict_mode_));
2778
__ push(r3); // strict mode
2780
// Do tail-call to the runtime system.
2781
ExternalReference store_ic_property =
2782
ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2784
__ TailCallExternalReference(store_ic_property, 4, 1);
2786
// Handle store cache miss.
2788
Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2789
__ Jump(ic, RelocInfo::CODE_TARGET);
2791
// Return the generated code.
2792
return GetCode(Code::INTERCEPTOR, name);
2796
Handle<Code> StoreStubCompiler::CompileStoreGlobal(
2797
Handle<GlobalObject> object,
2798
Handle<JSGlobalPropertyCell> cell,
2799
Handle<String> name) {
2800
// ----------- S t a t e -------------
2804
// -- lr : return address
2805
// -----------------------------------
2808
// Check that the map of the global has not changed.
2809
__ LoadP(r6, FieldMemOperand(r4, HeapObject::kMapOffset));
2810
__ mov(r7, Operand(Handle<Map>(object->map())));
2814
// Check that the value in the cell is not the hole. If it is, this
2815
// cell could have been deleted and reintroducing the global needs
2816
// to update the property details in the property dictionary of the
2817
// global object. We bail out to the runtime system to do that.
2818
__ mov(r7, Operand(cell));
2819
__ LoadRoot(r8, Heap::kTheHoleValueRootIndex);
2820
__ LoadP(r9, FieldMemOperand(r7, JSGlobalPropertyCell::kValueOffset));
2824
// Store the value in the cell.
2825
__ StoreP(r3, FieldMemOperand(r7, JSGlobalPropertyCell::kValueOffset), r0);
2826
// Cells are always rescanned, so no write barrier here.
2828
Counters* counters = masm()->isolate()->counters();
2829
__ IncrementCounter(counters->named_store_global_inline(), 1, r7, r6);
2832
// Handle store cache miss.
2834
__ IncrementCounter(counters->named_store_global_inline_miss(), 1, r7, r6);
2835
Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2836
__ Jump(ic, RelocInfo::CODE_TARGET);
2838
// Return the generated code.
2839
return GetCode(Code::NORMAL, name);
2843
Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
2844
Handle<JSObject> object,
2845
Handle<JSObject> last) {
2846
// ----------- S t a t e -------------
2848
// -- lr : return address
2849
// -----------------------------------
2852
// Check that receiver is not a smi.
2853
__ JumpIfSmi(r3, &miss);
2855
// Check the maps of the full prototype chain.
2856
CheckPrototypes(object, r3, last, r6, r4, r7, name, &miss);
2858
// If the last object in the prototype chain is a global object,
2859
// check that the global property cell is empty.
2860
if (last->IsGlobalObject()) {
2861
GenerateCheckPropertyCell(
2862
masm(), Handle<GlobalObject>::cast(last), name, r4, &miss);
2865
// Return undefined if maps of the full prototype chain are still the
2866
// same and no global property with this name contains a value.
2867
__ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2871
GenerateLoadMiss(masm(), Code::LOAD_IC);
2873
// Return the generated code.
2874
return GetCode(Code::NONEXISTENT, factory()->empty_string());
2878
Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
2879
Handle<JSObject> holder,
2881
Handle<String> name) {
2882
// ----------- S t a t e -------------
2885
// -- lr : return address
2886
// -----------------------------------
2889
GenerateLoadField(object, holder, r3, r6, r4, r7, index, name, &miss);
2891
GenerateLoadMiss(masm(), Code::LOAD_IC);
2893
// Return the generated code.
2894
return GetCode(Code::FIELD, name);
2898
Handle<Code> LoadStubCompiler::CompileLoadCallback(
2899
Handle<String> name,
2900
Handle<JSObject> object,
2901
Handle<JSObject> holder,
2902
Handle<AccessorInfo> callback) {
2903
// ----------- S t a t e -------------
2906
// -- lr : return address
2907
// -----------------------------------
2909
GenerateLoadCallback(object, holder, r3, r5, r6, r4, r7, r8, callback, name,
2912
GenerateLoadMiss(masm(), Code::LOAD_IC);
2914
// Return the generated code.
2915
return GetCode(Code::CALLBACKS, name);
2920
#define __ ACCESS_MASM(masm)
2923
void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
2924
Handle<JSFunction> getter) {
2925
// ----------- S t a t e -------------
2928
// -- lr : return address
2929
// -----------------------------------
2931
FrameScope scope(masm, StackFrame::INTERNAL);
2933
if (!getter.is_null()) {
2934
// Call the JavaScript getter with the receiver on the stack.
2936
ParameterCount actual(0);
2937
__ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(),
2940
// If we generate a global code snippet for deoptimization only, remember
2941
// the place to continue after deoptimization.
2942
masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
2945
// Restore context register.
2946
__ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2953
#define __ ACCESS_MASM(masm())
2956
Handle<Code> LoadStubCompiler::CompileLoadViaGetter(
2957
Handle<String> name,
2958
Handle<JSObject> receiver,
2959
Handle<JSObject> holder,
2960
Handle<JSFunction> getter) {
2961
// ----------- S t a t e -------------
2964
// -- lr : return address
2965
// -----------------------------------
2968
// Check that the maps haven't changed.
2969
__ JumpIfSmi(r3, &miss);
2970
CheckPrototypes(receiver, r3, holder, r6, r7, r4, name, &miss);
2972
GenerateLoadViaGetter(masm(), getter);
2975
GenerateLoadMiss(masm(), Code::LOAD_IC);
2977
// Return the generated code.
2978
return GetCode(Code::CALLBACKS, name);
2982
Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
2983
Handle<JSObject> holder,
2984
Handle<JSFunction> value,
2985
Handle<String> name) {
2986
// ----------- S t a t e -------------
2989
// -- lr : return address
2990
// -----------------------------------
2993
GenerateLoadConstant(object, holder, r3, r6, r4, r7, value, name, &miss);
2995
GenerateLoadMiss(masm(), Code::LOAD_IC);
2997
// Return the generated code.
2998
return GetCode(Code::CONSTANT_FUNCTION, name);
3002
Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> object,
3003
Handle<JSObject> holder,
3004
Handle<String> name) {
3005
// ----------- S t a t e -------------
3008
// -- lr : return address
3009
// -----------------------------------
3012
LookupResult lookup(isolate());
3013
LookupPostInterceptor(holder, name, &lookup);
3014
GenerateLoadInterceptor(object, holder, &lookup, r3, r5, r6, r4, r7, name,
3017
GenerateLoadMiss(masm(), Code::LOAD_IC);
3019
// Return the generated code.
3020
return GetCode(Code::INTERCEPTOR, name);
3024
Handle<Code> LoadStubCompiler::CompileLoadGlobal(
3025
Handle<JSObject> object,
3026
Handle<GlobalObject> holder,
3027
Handle<JSGlobalPropertyCell> cell,
3028
Handle<String> name,
3029
bool is_dont_delete) {
3030
// ----------- S t a t e -------------
3033
// -- lr : return address
3034
// -----------------------------------
3037
// Check that the map of the global has not changed.
3038
__ JumpIfSmi(r3, &miss);
3039
CheckPrototypes(object, r3, holder, r6, r7, r4, name, &miss);
3041
// Get the value from the cell.
3042
__ mov(r6, Operand(cell));
3043
__ LoadP(r7, FieldMemOperand(r6, JSGlobalPropertyCell::kValueOffset));
3045
// Check for deleted property if property can actually be deleted.
3046
if (!is_dont_delete) {
3047
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3053
Counters* counters = masm()->isolate()->counters();
3054
__ IncrementCounter(counters->named_load_global_stub(), 1, r4, r6);
3058
__ IncrementCounter(counters->named_load_global_stub_miss(), 1, r4, r6);
3059
GenerateLoadMiss(masm(), Code::LOAD_IC);
3061
// Return the generated code.
3062
return GetCode(Code::NORMAL, name);
3066
Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
3067
Handle<JSObject> receiver,
3068
Handle<JSObject> holder,
3070
// ----------- S t a t e -------------
3071
// -- lr : return address
3074
// -----------------------------------
3077
// Check the key is the cached one.
3078
__ Cmpi(r3, Operand(name), r0);
3081
GenerateLoadField(receiver, holder, r4, r5, r6, r7, index, name, &miss);
3083
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3085
return GetCode(Code::FIELD, name);
3089
Handle<Code> KeyedLoadStubCompiler::CompileLoadCallback(
3090
Handle<String> name,
3091
Handle<JSObject> receiver,
3092
Handle<JSObject> holder,
3093
Handle<AccessorInfo> callback) {
3094
// ----------- S t a t e -------------
3095
// -- lr : return address
3098
// -----------------------------------
3101
// Check the key is the cached one.
3102
__ Cmpi(r3, Operand(name), r0);
3105
GenerateLoadCallback(receiver, holder, r4, r3, r5, r6, r7, r8, callback, name,
3108
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3110
return GetCode(Code::CALLBACKS, name);
3114
Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
3115
Handle<String> name,
3116
Handle<JSObject> receiver,
3117
Handle<JSObject> holder,
3118
Handle<JSFunction> value) {
3119
// ----------- S t a t e -------------
3120
// -- lr : return address
3123
// -----------------------------------
3126
// Check the key is the cached one.
3127
__ mov(r5, Operand(name));
3131
GenerateLoadConstant(receiver, holder, r4, r5, r6, r7, value, name, &miss);
3133
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3135
// Return the generated code.
3136
return GetCode(Code::CONSTANT_FUNCTION, name);
3140
Handle<Code> KeyedLoadStubCompiler::CompileLoadInterceptor(
3141
Handle<JSObject> receiver,
3142
Handle<JSObject> holder,
3143
Handle<String> name) {
3144
// ----------- S t a t e -------------
3145
// -- lr : return address
3148
// -----------------------------------
3151
// Check the key is the cached one.
3152
__ Cmpi(r3, Operand(name), r0);
3155
LookupResult lookup(isolate());
3156
LookupPostInterceptor(holder, name, &lookup);
3157
GenerateLoadInterceptor(receiver, holder, &lookup, r4, r3, r5, r6, r7, name,
3160
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3162
return GetCode(Code::INTERCEPTOR, name);
3166
Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength(
3167
Handle<String> name) {
3168
// ----------- S t a t e -------------
3169
// -- lr : return address
3172
// -----------------------------------
3175
// Check the key is the cached one.
3176
__ Cmpi(r3, Operand(name), r0);
3179
GenerateLoadArrayLength(masm(), r4, r5, &miss);
3181
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3183
return GetCode(Code::CALLBACKS, name);
3187
Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength(
3188
Handle<String> name) {
3189
// ----------- S t a t e -------------
3190
// -- lr : return address
3193
// -----------------------------------
3196
Counters* counters = masm()->isolate()->counters();
3197
__ IncrementCounter(counters->keyed_load_string_length(), 1, r5, r6);
3199
// Check the key is the cached one.
3200
__ Cmpi(r3, Operand(name), r0);
3203
GenerateLoadStringLength(masm(), r4, r5, r6, &miss, true);
3205
__ DecrementCounter(counters->keyed_load_string_length(), 1, r5, r6);
3207
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3209
return GetCode(Code::CALLBACKS, name);
3213
Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype(
3214
Handle<String> name) {
3215
// ----------- S t a t e -------------
3216
// -- lr : return address
3219
// -----------------------------------
3222
Counters* counters = masm()->isolate()->counters();
3223
__ IncrementCounter(counters->keyed_load_function_prototype(), 1, r5, r6);
3225
// Check the name hasn't changed.
3226
__ Cmpi(r3, Operand(name), r0);
3229
GenerateLoadFunctionPrototype(masm(), r4, r5, r6, &miss);
3231
__ DecrementCounter(counters->keyed_load_function_prototype(), 1, r5, r6);
3232
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3234
return GetCode(Code::CALLBACKS, name);
3238
Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
3239
Handle<Map> receiver_map) {
3240
// ----------- S t a t e -------------
3241
// -- lr : return address
3244
// -----------------------------------
3245
ElementsKind elements_kind = receiver_map->elements_kind();
3246
Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode();
3248
__ DispatchMap(r4, r5, receiver_map, stub, DO_SMI_CHECK);
3250
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3251
__ Jump(ic, RelocInfo::CODE_TARGET);
3253
// Return the generated code.
3254
return GetCode(Code::NORMAL, factory()->empty_string());
3258
Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic(
3259
MapHandleList* receiver_maps,
3260
CodeHandleList* handler_ics) {
3261
// ----------- S t a t e -------------
3262
// -- lr : return address
3265
// -----------------------------------
3267
__ JumpIfSmi(r4, &miss);
3269
int receiver_count = receiver_maps->length();
3270
__ LoadP(r5, FieldMemOperand(r4, HeapObject::kMapOffset));
3271
for (int current = 0; current < receiver_count; ++current) {
3273
__ mov(ip, Operand(receiver_maps->at(current)));
3276
__ Jump(handler_ics->at(current), RelocInfo::CODE_TARGET, al);
3281
Handle<Code> miss_ic = isolate()->builtins()->KeyedLoadIC_Miss();
3282
__ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
3284
// Return the generated code.
3285
return GetCode(Code::NORMAL, factory()->empty_string(), MEGAMORPHIC);
3289
Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
3291
Handle<Map> transition,
3292
Handle<String> name) {
3293
// ----------- S t a t e -------------
3297
// -- lr : return address
3298
// -----------------------------------
3301
Counters* counters = masm()->isolate()->counters();
3302
__ IncrementCounter(counters->keyed_store_field(), 1, r6, r7);
3304
// Check that the name has not changed.
3305
__ Cmpi(r4, Operand(name), r0);
3308
// r6 is used as scratch register. r4 and r5 keep their values if a jump to
3309
// the miss label is generated.
3310
GenerateStoreField(masm(),
3319
__ DecrementCounter(counters->keyed_store_field(), 1, r6, r7);
3320
Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3321
__ Jump(ic, RelocInfo::CODE_TARGET);
3323
// Return the generated code.
3324
return GetCode(transition.is_null()
3326
: Code::MAP_TRANSITION, name);
3330
Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
3331
Handle<Map> receiver_map) {
3332
// ----------- S t a t e -------------
3336
// -- lr : return address
3338
// -----------------------------------
3339
ElementsKind elements_kind = receiver_map->elements_kind();
3340
bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
3342
KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode();
3344
__ DispatchMap(r5, r6, receiver_map, stub, DO_SMI_CHECK);
3346
Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
3347
__ Jump(ic, RelocInfo::CODE_TARGET);
3349
// Return the generated code.
3350
return GetCode(Code::NORMAL, factory()->empty_string());
3354
Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
3355
MapHandleList* receiver_maps,
3356
CodeHandleList* handler_stubs,
3357
MapHandleList* transitioned_maps) {
3358
// ----------- S t a t e -------------
3362
// -- lr : return address
3364
// -----------------------------------
3366
__ JumpIfSmi(r5, &miss);
3368
int receiver_count = receiver_maps->length();
3369
__ LoadP(r6, FieldMemOperand(r5, HeapObject::kMapOffset));
3370
for (int i = 0; i < receiver_count; ++i) {
3371
__ mov(ip, Operand(receiver_maps->at(i)));
3373
if (transitioned_maps->at(i).is_null()) {
3376
__ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET);
3381
__ mov(r6, Operand(transitioned_maps->at(i)));
3382
__ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, al);
3388
Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
3389
__ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
3391
// Return the generated code.
3392
return GetCode(Code::NORMAL, factory()->empty_string(), MEGAMORPHIC);
3396
Handle<Code> ConstructStubCompiler::CompileConstructStub(
3397
Handle<JSFunction> function) {
3398
// ----------- S t a t e -------------
3400
// -- r4 : constructor
3401
// -- lr : return address
3402
// -- [sp] : last argument
3403
// -----------------------------------
3404
Label generic_stub_call;
3406
// Use r10 for holding undefined which is used in several places below.
3407
__ LoadRoot(r10, Heap::kUndefinedValueRootIndex);
3409
#ifdef ENABLE_DEBUGGER_SUPPORT
3410
// Check to see whether there are any break points in the function code. If
3411
// there are jump to the generic constructor stub which calls the actual
3412
// code for the function thereby hitting the break points.
3413
__ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
3414
__ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kDebugInfoOffset));
3416
__ bne(&generic_stub_call);
3419
// Load the initial map and verify that it is in fact a map.
3420
// r4: constructor function
3422
__ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
3423
__ JumpIfSmi(r5, &generic_stub_call);
3424
__ CompareObjectType(r5, r6, r7, MAP_TYPE);
3425
__ bne(&generic_stub_call);
3428
// Cannot construct functions this way.
3430
// r4: constructor function
3433
__ CompareInstanceType(r5, r6, JS_FUNCTION_TYPE);
3434
__ Check(ne, "Function constructed by construct stub.");
3437
// Now allocate the JSObject in new space.
3439
// r4: constructor function
3442
__ lbz(r6, FieldMemOperand(r5, Map::kInstanceSizeOffset));
3443
__ AllocateInNewSpace(r6, r7, r8, r9, &generic_stub_call, SIZE_IN_WORDS);
3445
// Allocated the JSObject, now initialize the fields. Map is set to initial
3446
// map and properties and elements are set to empty fixed array.
3448
// r4: constructor function
3450
// r6: object size (in words)
3451
// r7: JSObject (not tagged)
3453
__ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
3455
ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
3456
__ StoreP(r5, MemOperand(r8));
3457
__ addi(r8, r8, Operand(kPointerSize));
3458
ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
3459
__ StoreP(r9, MemOperand(r8));
3460
__ addi(r8, r8, Operand(kPointerSize));
3461
ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
3462
__ StoreP(r9, MemOperand(r8));
3463
__ addi(r8, r8, Operand(kPointerSize));
3465
// Calculate the location of the first argument. The stack contains only the
3467
__ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2));
3470
// Fill all the in-object properties with undefined.
3472
// r4: first argument
3473
// r6: object size (in words)
3474
// r7: JSObject (not tagged)
3475
// r8: First in-object property of JSObject (not tagged)
3477
// Fill the initialized properties with a constant value or a passed argument
3478
// depending on the this.x = ...; assignment in the function.
3479
Handle<SharedFunctionInfo> shared(function->shared());
3480
for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3481
if (shared->IsThisPropertyAssignmentArgument(i)) {
3482
Label not_passed, next;
3483
// Check if the argument assigned to the property is actually passed.
3484
int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3485
__ cmpi(r3, Operand(arg_number));
3486
__ ble(¬_passed);
3487
// Argument passed - find it on the stack.
3488
__ LoadP(r5, MemOperand(r4, (arg_number + 1) * -kPointerSize), r0);
3489
__ StoreP(r5, MemOperand(r8));
3490
__ addi(r8, r8, Operand(kPointerSize));
3492
__ bind(¬_passed);
3493
// Set the property to undefined.
3494
__ StoreP(r10, MemOperand(r8));
3495
__ addi(r8, r8, Operand(kPointerSize));
3498
// Set the property to the constant value.
3499
Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3500
__ mov(r5, Operand(constant));
3501
__ StoreP(r5, MemOperand(r8));
3502
__ addi(r8, r8, Operand(kPointerSize));
3506
// Fill the unused in-object property fields with undefined.
3507
ASSERT(function->has_initial_map());
3508
for (int i = shared->this_property_assignments_count();
3509
i < function->initial_map()->inobject_properties();
3511
__ StoreP(r10, MemOperand(r8));
3512
__ addi(r8, r8, Operand(kPointerSize));
3516
// r7: JSObject (not tagged)
3517
// Move argc to r4 and the JSObject to return to r3 and tag it.
3520
__ ori(r3, r3, Operand(kHeapObjectTag));
3524
// Remove caller arguments and receiver from the stack and return.
3525
__ ShiftLeftImm(r4, r4, Operand(kPointerSizeLog2));
3527
__ addi(sp, sp, Operand(kPointerSize));
3528
Counters* counters = masm()->isolate()->counters();
3529
__ IncrementCounter(counters->constructed_objects(), 1, r4, r5);
3530
__ IncrementCounter(counters->constructed_objects_stub(), 1, r4, r5);
3533
// Jump to the generic stub in case the specialized code cannot handle the
3535
__ bind(&generic_stub_call);
3536
Handle<Code> code = masm()->isolate()->builtins()->JSConstructStubGeneric();
3537
__ Jump(code, RelocInfo::CODE_TARGET);
3539
// Return the generated code.
3545
#define __ ACCESS_MASM(masm)
3548
void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
3549
MacroAssembler* masm) {
3550
// ---------- S t a t e --------------
3551
// -- lr : return address
3554
// -----------------------------------
3555
Label slow, miss_force_generic;
3558
Register receiver = r4;
3560
__ JumpIfNotSmi(key, &miss_force_generic);
3561
__ SmiUntag(r5, key);
3562
__ LoadP(r7, FieldMemOperand(receiver, JSObject::kElementsOffset));
3563
__ LoadFromNumberDictionary(&slow, r7, key, r3, r5, r6, r8);
3567
__ IncrementCounter(
3568
masm->isolate()->counters()->keyed_load_external_array_slow(),
3571
// ---------- S t a t e --------------
3572
// -- lr : return address
3575
// -----------------------------------
3576
Handle<Code> slow_ic =
3577
masm->isolate()->builtins()->KeyedLoadIC_Slow();
3578
__ Jump(slow_ic, RelocInfo::CODE_TARGET);
3580
// Miss case, call the runtime.
3581
__ bind(&miss_force_generic);
3583
// ---------- S t a t e --------------
3584
// -- lr : return address
3587
// -----------------------------------
3589
Handle<Code> miss_ic =
3590
masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3591
__ Jump(miss_ic, RelocInfo::CODE_TARGET);
3595
static void GenerateSmiKeyCheck(MacroAssembler* masm,
3599
DwVfpRegister double_scratch0,
3600
DwVfpRegister double_scratch1,
3603
// Check for smi or a smi inside a heap number. We convert the heap
3604
// number and check if the conversion is exact and fits into the smi
3606
__ JumpIfSmi(key, &key_ok);
3609
Heap::kHeapNumberMapRootIndex,
3612
__ lfd(double_scratch0, FieldMemOperand(key, HeapNumber::kValueOffset));
3613
__ EmitVFPTruncate(kRoundToZero,
3618
kCheckForInexactConversion);
3620
#if V8_TARGET_ARCH_PPC64
3621
__ SmiTag(key, scratch0);
3623
__ SmiTagCheckOverflow(scratch1, scratch0, r0);
3624
__ BranchOnOverflow(fail);
3625
__ mr(key, scratch1);
3631
void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3632
MacroAssembler* masm,
3633
ElementsKind elements_kind) {
3634
// ---------- S t a t e --------------
3635
// -- lr : return address
3638
// -----------------------------------
3639
Label miss_force_generic, slow, failed_allocation;
3642
Register receiver = r4;
3644
// This stub is meant to be tail-jumped to, the receiver must already
3645
// have been verified by the caller to not be a smi.
3647
// Check that the key is a smi or a heap number convertible to a smi.
3648
GenerateSmiKeyCheck(masm, key, r7, r8, d1, d2, &miss_force_generic);
3650
__ LoadP(r6, FieldMemOperand(receiver, JSObject::kElementsOffset));
3651
// r6: elements array
3653
// Check that the index is in range.
3654
__ LoadP(ip, FieldMemOperand(r6, ExternalArray::kLengthOffset));
3656
// Unsigned comparison catches both negative and too-large values.
3657
__ bge(&miss_force_generic);
3659
__ LoadP(r6, FieldMemOperand(r6, ExternalArray::kExternalPointerOffset));
3660
// r6: base pointer of external storage
3662
// We are not untagging smi key since an additional shift operation
3663
// may be required to compute the array element's offset.
3665
Register value = r5;
3666
switch (elements_kind) {
3667
case EXTERNAL_BYTE_ELEMENTS:
3668
__ SmiToByteArrayOffset(value, key);
3669
__ lbzx(value, MemOperand(r6, value));
3670
__ extsb(value, value);
3672
case EXTERNAL_PIXEL_ELEMENTS:
3673
case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3674
__ SmiToByteArrayOffset(value, key);
3675
__ lbzx(value, MemOperand(r6, value));
3677
case EXTERNAL_SHORT_ELEMENTS:
3678
__ SmiToShortArrayOffset(value, key);
3679
__ lhzx(value, MemOperand(r6, value));
3680
__ extsh(value, value);
3682
case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3683
__ SmiToShortArrayOffset(value, key);
3684
__ lhzx(value, MemOperand(r6, value));
3686
case EXTERNAL_INT_ELEMENTS:
3687
__ SmiToIntArrayOffset(value, key);
3688
__ lwzx(value, MemOperand(r6, value));
3689
#if V8_TARGET_ARCH_PPC64
3690
__ extsw(value, value);
3693
case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3694
__ SmiToIntArrayOffset(value, key);
3695
__ lwzx(value, MemOperand(r6, value));
3697
case EXTERNAL_FLOAT_ELEMENTS:
3698
__ SmiToFloatArrayOffset(value, key);
3699
__ lfsx(d0, MemOperand(r6, value));
3701
case EXTERNAL_DOUBLE_ELEMENTS:
3702
__ SmiToDoubleArrayOffset(value, key);
3703
__ lfdx(d0, MemOperand(r6, value));
3706
case FAST_SMI_ELEMENTS:
3707
case FAST_DOUBLE_ELEMENTS:
3708
case FAST_HOLEY_ELEMENTS:
3709
case FAST_HOLEY_SMI_ELEMENTS:
3710
case FAST_HOLEY_DOUBLE_ELEMENTS:
3711
case DICTIONARY_ELEMENTS:
3712
case NON_STRICT_ARGUMENTS_ELEMENTS:
3717
// For integer array types:
3719
// For float array type:
3721
// For double array type:
3724
if (elements_kind == EXTERNAL_INT_ELEMENTS) {
3725
// For the Int and UnsignedInt array types, we need to see whether
3726
// the value can be represented in a Smi. If not, we need to convert
3727
// it to a HeapNumber.
3728
#if !V8_TARGET_ARCH_PPC64
3730
// Check that the value fits in a smi.
3731
__ JumpIfNotSmiCandidate(value, r0, &box_int);
3733
// Tag integer as smi and return it.
3734
__ SmiTag(r3, value);
3737
#if !V8_TARGET_ARCH_PPC64
3739
// Allocate a HeapNumber for the result and perform int-to-double
3740
// conversion. Don't touch r3 or r4 as they are needed if allocation
3742
__ LoadRoot(r9, Heap::kHeapNumberMapRootIndex);
3743
__ AllocateHeapNumber(r8, r6, r7, r9, &slow);
3744
// Now we can use r3 for the result as key is not needed any more.
3747
FloatingPointHelper::ConvertIntToDouble(
3749
__ stfd(d0, FieldMemOperand(r3, HeapNumber::kValueOffset));
3752
} else if (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) {
3753
// The test is different for unsigned int values. Since we need
3754
// the value to be in the range of a positive smi, we can't
3755
// handle any of the high bits being set in the value.
3757
__ JumpIfNotUnsignedSmiCandidate(value, r0, &box_int);
3759
// Tag integer as smi and return it.
3760
__ SmiTag(r3, value);
3764
// Allocate a HeapNumber for the result and perform int-to-double
3765
// conversion. Don't use r3 and r4 as AllocateHeapNumber clobbers all
3766
// registers - also when jumping due to exhausted young space.
3767
__ LoadRoot(r9, Heap::kHeapNumberMapRootIndex);
3768
__ AllocateHeapNumber(r8, r6, r7, r9, &slow);
3771
FloatingPointHelper::ConvertUnsignedIntToDouble(
3773
__ stfd(d0, FieldMemOperand(r3, HeapNumber::kValueOffset));
3776
} else if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3777
// For the floating-point array type, we need to always allocate a
3779
// Allocate a HeapNumber for the result. Don't use r3 and r4 as
3780
// AllocateHeapNumber clobbers all registers - also when jumping due to
3781
// exhausted young space.
3782
__ LoadRoot(r9, Heap::kHeapNumberMapRootIndex);
3783
__ AllocateHeapNumber(r5, r6, r7, r9, &slow);
3784
__ stfd(d0, FieldMemOperand(r5, HeapNumber::kValueOffset));
3788
} else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3789
// Allocate a HeapNumber for the result. Don't use r3 and r4 as
3790
// AllocateHeapNumber clobbers all registers - also when jumping due to
3791
// exhausted young space.
3792
__ LoadRoot(r9, Heap::kHeapNumberMapRootIndex);
3793
__ AllocateHeapNumber(r5, r6, r7, r9, &slow);
3794
__ stfd(d0, FieldMemOperand(r5, HeapNumber::kValueOffset));
3799
// Tag integer as smi and return it.
3800
__ SmiTag(r3, value);
3804
// Slow case, key and receiver still in r3 and r4.
3806
__ IncrementCounter(
3807
masm->isolate()->counters()->keyed_load_external_array_slow(),
3810
// ---------- S t a t e --------------
3811
// -- lr : return address
3814
// -----------------------------------
3818
__ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3820
__ bind(&miss_force_generic);
3822
masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3823
__ Jump(stub, RelocInfo::CODE_TARGET);
3827
void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3828
MacroAssembler* masm,
3829
ElementsKind elements_kind) {
3830
// ---------- S t a t e --------------
3834
// -- lr : return address
3835
// -----------------------------------
3836
Label slow, check_heap_number, miss_force_generic;
3839
Register value = r3;
3841
Register receiver = r5;
3842
// r6 mostly holds the elements array or the destination external array.
3844
// This stub is meant to be tail-jumped to, the receiver must already
3845
// have been verified by the caller to not be a smi.
3847
// Check that the key is a smi or a heap number convertible to a smi.
3848
GenerateSmiKeyCheck(masm, key, r7, r8, d1, d2, &miss_force_generic);
3850
__ LoadP(r6, FieldMemOperand(receiver, JSObject::kElementsOffset));
3852
// Check that the index is in range
3853
__ LoadP(ip, FieldMemOperand(r6, ExternalArray::kLengthOffset));
3855
// Unsigned comparison catches both negative and too-large values.
3856
__ bge(&miss_force_generic);
3858
// Handle both smis and HeapNumbers in the fast path. Go to the
3859
// runtime for all other kinds of values.
3860
// r6: external array.
3861
if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
3862
// Double to pixel conversion is only implemented in the runtime for now.
3863
__ JumpIfNotSmi(value, &slow);
3865
__ JumpIfNotSmi(value, &check_heap_number);
3867
__ SmiUntag(r8, value);
3868
__ LoadP(r6, FieldMemOperand(r6, ExternalArray::kExternalPointerOffset));
3870
// r6: base pointer of external storage.
3871
// r8: value (integer).
3872
// r10: scratch register
3873
switch (elements_kind) {
3874
case EXTERNAL_PIXEL_ELEMENTS:
3875
// Clamp the value to [0..255].
3876
__ ClampUint8(r8, r8);
3877
__ SmiToByteArrayOffset(r10, key);
3878
__ stbx(r8, MemOperand(r6, r10));
3880
case EXTERNAL_BYTE_ELEMENTS:
3881
case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3882
__ SmiToByteArrayOffset(r10, key);
3883
__ stbx(r8, MemOperand(r6, r10));
3885
case EXTERNAL_SHORT_ELEMENTS:
3886
case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3887
__ SmiToShortArrayOffset(r10, key);
3888
__ sthx(r8, MemOperand(r6, r10));
3890
case EXTERNAL_INT_ELEMENTS:
3891
case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3892
__ SmiToIntArrayOffset(r10, key);
3893
__ stwx(r8, MemOperand(r6, r10));
3895
case EXTERNAL_FLOAT_ELEMENTS:
3896
// Perform int-to-float conversion and store to memory.
3897
__ SmiToFloatArrayOffset(r10, key);
3898
// r10: efective address of the float element
3899
FloatingPointHelper::ConvertIntToFloat(masm, d0, r8, r9);
3900
__ stfsx(d0, MemOperand(r6, r10));
3902
case EXTERNAL_DOUBLE_ELEMENTS:
3903
__ SmiToDoubleArrayOffset(r10, key);
3904
// __ add(r6, r6, r10);
3905
// r6: effective address of the double element
3906
FloatingPointHelper::ConvertIntToDouble(
3908
__ stfdx(d0, MemOperand(r6, r10));
3911
case FAST_SMI_ELEMENTS:
3912
case FAST_DOUBLE_ELEMENTS:
3913
case FAST_HOLEY_ELEMENTS:
3914
case FAST_HOLEY_SMI_ELEMENTS:
3915
case FAST_HOLEY_DOUBLE_ELEMENTS:
3916
case DICTIONARY_ELEMENTS:
3917
case NON_STRICT_ARGUMENTS_ELEMENTS:
3922
// Entry registers are intact, r3 holds the value which is the return value.
3925
if (elements_kind != EXTERNAL_PIXEL_ELEMENTS) {
3926
// r6: external array.
3927
__ bind(&check_heap_number);
3928
__ CompareObjectType(value, r8, r9, HEAP_NUMBER_TYPE);
3931
__ LoadP(r6, FieldMemOperand(r6, ExternalArray::kExternalPointerOffset));
3933
// r6: base pointer of external storage.
3935
// The WebGL specification leaves the behavior of storing NaN and
3936
// +/-Infinity into integer arrays basically undefined. For more
3937
// reproducible behavior, convert these to zero.
3939
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3940
__ lfd(d0, FieldMemOperand(r3, HeapNumber::kValueOffset));
3941
__ SmiToFloatArrayOffset(r8, key);
3943
__ stfsx(d0, MemOperand(r6, r8));
3944
} else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3945
__ lfd(d0, FieldMemOperand(r3, HeapNumber::kValueOffset));
3946
__ SmiToDoubleArrayOffset(r8, key);
3947
__ stfdx(d0, MemOperand(r6, r8));
3951
__ lfd(d0, FieldMemOperand(r8, HeapNumber::kValueOffset));
3953
__ EmitECMATruncate(r8, d0, d1, r10, r7, r9);
3954
switch (elements_kind) {
3955
case EXTERNAL_BYTE_ELEMENTS:
3956
case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3957
__ SmiToByteArrayOffset(r10, key);
3958
__ stbx(r8, MemOperand(r6, r10));
3960
case EXTERNAL_SHORT_ELEMENTS:
3961
case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3962
__ SmiToShortArrayOffset(r10, key);
3963
__ sthx(r8, MemOperand(r6, r10));
3965
case EXTERNAL_INT_ELEMENTS:
3966
case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3967
__ SmiToIntArrayOffset(r10, key);
3968
__ stwx(r8, MemOperand(r6, r10));
3970
case EXTERNAL_PIXEL_ELEMENTS:
3971
case EXTERNAL_FLOAT_ELEMENTS:
3972
case EXTERNAL_DOUBLE_ELEMENTS:
3974
case FAST_SMI_ELEMENTS:
3975
case FAST_DOUBLE_ELEMENTS:
3976
case FAST_HOLEY_ELEMENTS:
3977
case FAST_HOLEY_SMI_ELEMENTS:
3978
case FAST_HOLEY_DOUBLE_ELEMENTS:
3979
case DICTIONARY_ELEMENTS:
3980
case NON_STRICT_ARGUMENTS_ELEMENTS:
3986
// Entry registers are intact, r3 holds the value which is the return
3991
// Slow case, key and receiver still in r3 and r4.
3993
__ IncrementCounter(
3994
masm->isolate()->counters()->keyed_load_external_array_slow(),
3997
// ---------- S t a t e --------------
3998
// -- lr : return address
4001
// -----------------------------------
4002
Handle<Code> slow_ic =
4003
masm->isolate()->builtins()->KeyedStoreIC_Slow();
4004
__ Jump(slow_ic, RelocInfo::CODE_TARGET);
4006
// Miss case, call the runtime.
4007
__ bind(&miss_force_generic);
4009
// ---------- S t a t e --------------
4010
// -- lr : return address
4013
// -----------------------------------
4015
Handle<Code> miss_ic =
4016
masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4017
__ Jump(miss_ic, RelocInfo::CODE_TARGET);
4021
void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
4022
// ----------- S t a t e -------------
4023
// -- lr : return address
4026
// -----------------------------------
4027
Label miss_force_generic;
4029
// This stub is meant to be tail-jumped to, the receiver must already
4030
// have been verified by the caller to not be a smi.
4032
// Check that the key is a smi or a heap number convertible to a smi.
4033
GenerateSmiKeyCheck(masm, r3, r7, r8, d1, d2, &miss_force_generic);
4035
// Get the elements array.
4036
__ LoadP(r5, FieldMemOperand(r4, JSObject::kElementsOffset));
4037
__ AssertFastElements(r5);
4039
// Check that the key is within bounds.
4040
__ LoadP(r6, FieldMemOperand(r5, FixedArray::kLengthOffset));
4042
__ bge(&miss_force_generic);
4044
// Load the result and make sure it's not the hole.
4045
__ addi(r6, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4046
__ SmiToPtrArrayOffset(r7, r3);
4047
__ LoadPX(r7, MemOperand(r7, r6));
4048
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
4050
__ beq(&miss_force_generic);
4054
__ bind(&miss_force_generic);
4056
masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4057
__ Jump(stub, RelocInfo::CODE_TARGET);
4061
void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
4062
MacroAssembler* masm) {
4063
// ----------- S t a t e -------------
4064
// -- lr : return address
4067
// -----------------------------------
4068
Label miss_force_generic, slow_allocate_heapnumber;
4070
Register key_reg = r3;
4071
Register receiver_reg = r4;
4072
Register elements_reg = r5;
4073
Register heap_number_reg = r5;
4074
Register indexed_double_offset = r6;
4075
Register scratch = r7;
4076
Register scratch2 = r8;
4077
Register scratch3 = r9;
4078
Register heap_number_map = r10;
4080
// This stub is meant to be tail-jumped to, the receiver must already
4081
// have been verified by the caller to not be a smi.
4083
// Check that the key is a smi or a heap number convertible to a smi.
4084
GenerateSmiKeyCheck(masm, key_reg, r7, r8, d1, d2, &miss_force_generic);
4086
// Get the elements array.
4087
__ LoadP(elements_reg,
4088
FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4090
// Check that the key is within bounds.
4091
__ LoadP(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4092
__ cmpl(key_reg, scratch);
4093
__ bge(&miss_force_generic);
4095
// Load the upper word of the double in the fixed array and test for NaN.
4096
__ SmiToDoubleArrayOffset(indexed_double_offset, key_reg);
4097
__ add(indexed_double_offset, elements_reg, indexed_double_offset);
4098
#if __FLOAT_WORD_ORDER == __LITTLE_ENDIAN
4099
uint32_t upper_32_offset = FixedArray::kHeaderSize + sizeof(kHoleNanLower32);
4101
uint32_t upper_32_offset = FixedArray::kHeaderSize;
4103
__ lwz(scratch, FieldMemOperand(indexed_double_offset, upper_32_offset));
4104
__ Cmpi(scratch, Operand(kHoleNanUpper32), r0);
4105
__ beq(&miss_force_generic);
4107
// Non-NaN. Allocate a new heap number and copy the double value into it.
4108
__ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4109
__ AllocateHeapNumber(heap_number_reg, scratch2, scratch3,
4110
heap_number_map, &slow_allocate_heapnumber);
4112
// Don't need to reload the upper 32 bits of the double, it's already in
4114
__ stw(scratch, FieldMemOperand(heap_number_reg,
4115
HeapNumber::kExponentOffset));
4116
#if __FLOAT_WORD_ORDER == __LITTLE_ENDIAN
4117
__ lwz(scratch, FieldMemOperand(indexed_double_offset,
4118
FixedArray::kHeaderSize));
4120
__ lwz(scratch, FieldMemOperand(indexed_double_offset,
4121
FixedArray::kHeaderSize+4));
4123
__ stw(scratch, FieldMemOperand(heap_number_reg,
4124
HeapNumber::kMantissaOffset));
4126
__ mr(r3, heap_number_reg);
4129
__ bind(&slow_allocate_heapnumber);
4130
Handle<Code> slow_ic =
4131
masm->isolate()->builtins()->KeyedLoadIC_Slow();
4132
__ Jump(slow_ic, RelocInfo::CODE_TARGET);
4134
__ bind(&miss_force_generic);
4135
Handle<Code> miss_ic =
4136
masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4137
__ Jump(miss_ic, RelocInfo::CODE_TARGET);
4141
void KeyedStoreStubCompiler::GenerateStoreFastElement(
4142
MacroAssembler* masm,
4144
ElementsKind elements_kind,
4145
KeyedAccessGrowMode grow_mode) {
4146
// ----------- S t a t e -------------
4150
// -- lr : return address
4152
// -- r7 : scratch (elements)
4153
// -----------------------------------
4154
Label miss_force_generic, transition_elements_kind, grow, slow;
4155
Label finish_store, check_capacity;
4157
Register value_reg = r3;
4158
Register key_reg = r4;
4159
Register receiver_reg = r5;
4160
Register scratch = r7;
4161
Register elements_reg = r6;
4162
Register length_reg = r8;
4163
Register scratch2 = r9;
4165
// This stub is meant to be tail-jumped to, the receiver must already
4166
// have been verified by the caller to not be a smi.
4168
// Check that the key is a smi or a heap number convertible to a smi.
4169
GenerateSmiKeyCheck(masm, key_reg, r7, r8, d1, d2, &miss_force_generic);
4171
if (IsFastSmiElementsKind(elements_kind)) {
4172
__ JumpIfNotSmi(value_reg, &transition_elements_kind);
4175
// Check that the key is within bounds.
4176
__ LoadP(elements_reg,
4177
FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4179
__ LoadP(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4181
__ LoadP(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4184
__ cmpl(key_reg, scratch);
4185
if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
4188
__ bge(&miss_force_generic);
4191
// Make sure elements is a fast element array, not 'cow'.
4192
__ CheckMap(elements_reg,
4194
Heap::kFixedArrayMapRootIndex,
4195
&miss_force_generic,
4198
__ bind(&finish_store);
4199
if (IsFastSmiElementsKind(elements_kind)) {
4202
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4203
__ SmiToPtrArrayOffset(scratch2, key_reg);
4204
__ StorePX(value_reg, MemOperand(scratch, scratch2));
4206
ASSERT(IsFastObjectElementsKind(elements_kind));
4209
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4210
__ SmiToPtrArrayOffset(scratch2, key_reg);
4211
__ StorePUX(value_reg, MemOperand(scratch, scratch2));
4212
__ mr(receiver_reg, value_reg);
4213
__ RecordWrite(elements_reg, // Object.
4214
scratch, // Address.
4215
receiver_reg, // Value.
4219
// value_reg (r3) is preserved.
4223
__ bind(&miss_force_generic);
4225
masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4226
__ Jump(ic, RelocInfo::CODE_TARGET);
4228
__ bind(&transition_elements_kind);
4229
Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4230
__ Jump(ic_miss, RelocInfo::CODE_TARGET);
4232
if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
4233
// Grow the array by a single element if possible.
4236
// Make sure the array is only growing by a single element, anything else
4237
// must be handled by the runtime. Flags already set by previous compare.
4238
__ bne(&miss_force_generic);
4240
// Check for the empty array, and preallocate a small backing store if
4242
__ LoadP(length_reg,
4243
FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4244
__ LoadP(elements_reg,
4245
FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4246
__ CompareRoot(elements_reg, Heap::kEmptyFixedArrayRootIndex);
4247
__ bne(&check_capacity);
4249
int size = FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
4250
__ AllocateInNewSpace(size, elements_reg, scratch, scratch2, &slow,
4253
__ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
4254
__ StoreP(scratch, FieldMemOperand(elements_reg, JSObject::kMapOffset), r0);
4255
__ LoadSmiLiteral(scratch,
4256
Smi::FromInt(JSArray::kPreallocatedArrayElements));
4257
__ StoreP(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset),
4259
__ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
4260
for (int i = 1; i < JSArray::kPreallocatedArrayElements; ++i) {
4262
FieldMemOperand(elements_reg, FixedArray::SizeFor(i)), r0);
4265
// Store the element at index zero.
4266
__ StoreP(value_reg, FieldMemOperand(elements_reg, FixedArray::SizeFor(0)),
4269
// Install the new backing store in the JSArray.
4270
__ StoreP(elements_reg,
4271
FieldMemOperand(receiver_reg, JSObject::kElementsOffset), r0);
4272
__ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
4273
scratch, kLRHasNotBeenSaved, kDontSaveFPRegs,
4274
EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
4276
// Increment the length of the array.
4277
__ LoadSmiLiteral(length_reg, Smi::FromInt(1));
4278
__ StoreP(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset),
4282
__ bind(&check_capacity);
4283
// Check for cow elements, in general they are not handled by this stub
4284
__ CheckMap(elements_reg,
4286
Heap::kFixedCOWArrayMapRootIndex,
4287
&miss_force_generic,
4290
__ LoadP(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4291
__ cmpl(length_reg, scratch);
4294
// Grow the array and finish the store.
4295
__ AddSmiLiteral(length_reg, length_reg, Smi::FromInt(1), r0);
4296
__ StoreP(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset),
4298
__ b(&finish_store);
4301
Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
4302
__ Jump(ic_slow, RelocInfo::CODE_TARGET);
4307
void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
4308
MacroAssembler* masm,
4310
KeyedAccessGrowMode grow_mode) {
4311
// ----------- S t a t e -------------
4315
// -- lr : return address
4319
// -----------------------------------
4320
Label miss_force_generic, transition_elements_kind, grow, slow;
4321
Label finish_store, check_capacity;
4323
Register value_reg = r3;
4324
Register key_reg = r4;
4325
Register receiver_reg = r5;
4326
Register elements_reg = r6;
4327
Register scratch1 = r7;
4328
Register scratch2 = r8;
4329
Register scratch3 = r9;
4330
Register scratch4 = r10;
4331
Register length_reg = r10;
4333
// This stub is meant to be tail-jumped to, the receiver must already
4334
// have been verified by the caller to not be a smi.
4336
// Check that the key is a smi or a heap number convertible to a smi.
4337
GenerateSmiKeyCheck(masm, key_reg, r7, r8, d1, d2, &miss_force_generic);
4339
__ LoadP(elements_reg,
4340
FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4342
// Check that the key is within bounds.
4344
__ LoadP(scratch1, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4347
FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4349
// Compare smis, unsigned compare catches both negative and out-of-bound
4351
__ cmpl(key_reg, scratch1);
4352
if (grow_mode == ALLOW_JSARRAY_GROWTH) {
4355
__ bge(&miss_force_generic);
4358
__ bind(&finish_store);
4359
__ StoreNumberToDoubleElements(value_reg,
4362
// All registers after this are overwritten.
4368
&transition_elements_kind);
4371
// Handle store cache miss, replacing the ic with the generic stub.
4372
__ bind(&miss_force_generic);
4374
masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4375
__ Jump(ic, RelocInfo::CODE_TARGET);
4377
__ bind(&transition_elements_kind);
4378
Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4379
__ Jump(ic_miss, RelocInfo::CODE_TARGET);
4381
if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
4382
// Grow the array by a single element if possible.
4385
// Make sure the array is only growing by a single element, anything else
4386
// must be handled by the runtime. Flags already set by previous compare.
4387
__ bne(&miss_force_generic);
4389
// Transition on values that can't be stored in a FixedDoubleArray.
4391
__ JumpIfSmi(value_reg, &value_is_smi);
4392
__ LoadP(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
4393
__ CompareRoot(scratch1, Heap::kHeapNumberMapRootIndex);
4394
__ bne(&transition_elements_kind);
4395
__ bind(&value_is_smi);
4397
// Check for the empty array, and preallocate a small backing store if
4399
__ LoadP(length_reg,
4400
FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4401
__ LoadP(elements_reg,
4402
FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4403
__ CompareRoot(elements_reg, Heap::kEmptyFixedArrayRootIndex);
4404
__ bne(&check_capacity);
4406
int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
4407
__ AllocateInNewSpace(size, elements_reg, scratch1, scratch2, &slow,
4410
// Initialize the new FixedDoubleArray. Leave elements unitialized for
4411
// efficiency, they are guaranteed to be initialized before use.
4412
__ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex);
4413
__ StoreP(scratch1, FieldMemOperand(elements_reg, JSObject::kMapOffset),
4415
__ LoadSmiLiteral(scratch1,
4416
Smi::FromInt(JSArray::kPreallocatedArrayElements));
4418
FieldMemOperand(elements_reg, FixedDoubleArray::kLengthOffset),
4421
// Install the new backing store in the JSArray.
4422
__ StoreP(elements_reg,
4423
FieldMemOperand(receiver_reg, JSObject::kElementsOffset), r0);
4424
__ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
4425
scratch1, kLRHasNotBeenSaved, kDontSaveFPRegs,
4426
EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
4428
// Increment the length of the array.
4429
__ LoadSmiLiteral(length_reg, Smi::FromInt(1));
4430
__ StoreP(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset),
4432
__ LoadP(elements_reg,
4433
FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4434
__ b(&finish_store);
4436
__ bind(&check_capacity);
4437
// Make sure that the backing store can hold additional elements.
4439
FieldMemOperand(elements_reg, FixedDoubleArray::kLengthOffset));
4440
__ cmpl(length_reg, scratch1);
4443
// Grow the array and finish the store.
4444
__ AddSmiLiteral(length_reg, length_reg, Smi::FromInt(1), r0);
4445
__ StoreP(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset),
4447
__ b(&finish_store);
4450
Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
4451
__ Jump(ic_slow, RelocInfo::CODE_TARGET);
4458
} } // namespace v8::internal
4460
#endif // V8_TARGET_ARCH_PPC