1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
6
// * Redistributions of source code must retain the above copyright
7
// notice, this list of conditions and the following disclaimer.
8
// * Redistributions in binary form must reproduce the above
9
// copyright notice, this list of conditions and the following
10
// disclaimer in the documentation and/or other materials provided
11
// with the distribution.
12
// * Neither the name of Google Inc. nor the names of its
13
// contributors may be used to endorse or promote products derived
14
// from this software without specific prior written permission.
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
#include "arguments.h"
32
#include "bootstrapper.h"
36
#include "heap-profiler.h"
37
#include "mark-compact.h"
38
#include "vm-state-inl.h"
45
// Arguments object passed to C++ builtins.
46
template <BuiltinExtraArguments extra_args>
47
class BuiltinArguments : public Arguments {
49
BuiltinArguments(int length, Object** arguments)
50
: Arguments(length, arguments) { }
52
Object*& operator[] (int index) {
53
ASSERT(index < length());
54
return Arguments::operator[](index);
57
template <class S> Handle<S> at(int index) {
58
ASSERT(index < length());
59
return Arguments::at<S>(index);
62
Handle<Object> receiver() {
63
return Arguments::at<Object>(0);
66
Handle<JSFunction> called_function() {
67
STATIC_ASSERT(extra_args == NEEDS_CALLED_FUNCTION);
68
return Arguments::at<JSFunction>(Arguments::length() - 1);
71
// Gets the total number of arguments including the receiver (but
72
// excluding extra arguments).
74
STATIC_ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
75
return Arguments::length();
80
// Check we have at least the receiver.
81
ASSERT(Arguments::length() >= 1);
87
// Specialize BuiltinArguments for the called function extra argument.
90
int BuiltinArguments<NEEDS_CALLED_FUNCTION>::length() const {
91
return Arguments::length() - 1;
96
void BuiltinArguments<NEEDS_CALLED_FUNCTION>::Verify() {
97
// Check we have at least the receiver and the called function.
98
ASSERT(Arguments::length() >= 2);
99
// Make sure cast to JSFunction succeeds.
105
#define DEF_ARG_TYPE(name, spec) \
106
typedef BuiltinArguments<spec> name##ArgumentsType;
107
BUILTIN_LIST_C(DEF_ARG_TYPE)
112
// ----------------------------------------------------------------------------
113
// Support macro for defining builtins in C++.
114
// ----------------------------------------------------------------------------
116
// A builtin function is defined by writing:
122
// In the body of the builtin function the arguments can be accessed
123
// through the BuiltinArguments object args.
127
#define BUILTIN(name) \
128
MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \
129
name##ArgumentsType args, Isolate* isolate); \
130
MUST_USE_RESULT static MaybeObject* Builtin_##name( \
131
name##ArgumentsType args, Isolate* isolate) { \
132
ASSERT(isolate == Isolate::Current()); \
134
return Builtin_Impl_##name(args, isolate); \
136
MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \
137
name##ArgumentsType args, Isolate* isolate)
139
#else // For release mode.
141
#define BUILTIN(name) \
142
static MaybeObject* Builtin_##name(name##ArgumentsType args, Isolate* isolate)
147
static inline bool CalledAsConstructor(Isolate* isolate) {
149
// Calculate the result using a full stack frame iterator and check
150
// that the state of the stack is as we assume it to be in the
152
StackFrameIterator it;
153
ASSERT(it.frame()->is_exit());
155
StackFrame* frame = it.frame();
156
bool reference_result = frame->is_construct();
158
Address fp = Isolate::c_entry_fp(isolate->thread_local_top());
159
// Because we know fp points to an exit frame we can use the relevant
160
// part of ExitFrame::ComputeCallerState directly.
161
const int kCallerOffset = ExitFrameConstants::kCallerFPOffset;
162
Address caller_fp = Memory::Address_at(fp + kCallerOffset);
163
// This inlines the part of StackFrame::ComputeType that grabs the
164
// type of the current frame. Note that StackFrame::ComputeType
165
// has been specialized for each architecture so if any one of them
166
// changes this code has to be changed as well.
167
const int kMarkerOffset = StandardFrameConstants::kMarkerOffset;
168
const Smi* kConstructMarker = Smi::FromInt(StackFrame::CONSTRUCT);
169
Object* marker = Memory::Object_at(caller_fp + kMarkerOffset);
170
bool result = (marker == kConstructMarker);
171
ASSERT_EQ(result, reference_result);
175
// ----------------------------------------------------------------------------
179
return isolate->heap()->undefined_value(); // Make compiler happy.
183
BUILTIN(EmptyFunction) {
184
return isolate->heap()->undefined_value();
188
static MaybeObject* ArrayCodeGenericCommon(Arguments* args,
190
JSFunction* constructor) {
191
Heap* heap = isolate->heap();
192
isolate->counters()->array_function_runtime()->Increment();
195
if (CalledAsConstructor(isolate)) {
196
array = JSArray::cast((*args)[0]);
197
// Initialize elements and length in case later allocations fail so that the
198
// array object is initialized in a valid state.
199
array->set_length(Smi::FromInt(0));
200
array->set_elements(heap->empty_fixed_array());
201
if (!FLAG_smi_only_arrays) {
202
Context* global_context = isolate->context()->global_context();
203
if (array->GetElementsKind() == GetInitialFastElementsKind() &&
204
!global_context->js_array_maps()->IsUndefined()) {
205
FixedArray* map_array =
206
FixedArray::cast(global_context->js_array_maps());
207
array->set_map(Map::cast(map_array->
208
get(TERMINAL_FAST_ELEMENTS_KIND)));
212
// Allocate the JS Array
213
MaybeObject* maybe_obj = heap->AllocateJSObject(constructor);
214
if (!maybe_obj->To(&array)) return maybe_obj;
217
// Optimize the case where there is one argument and the argument is a
219
if (args->length() == 2) {
220
Object* obj = (*args)[1];
222
int len = Smi::cast(obj)->value();
223
if (len >= 0 && len < JSObject::kInitialMaxFastElementArray) {
225
{ MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(len);
226
if (!maybe_obj->ToObject(&fixed_array)) return maybe_obj;
228
ElementsKind elements_kind = array->GetElementsKind();
229
if (!IsFastHoleyElementsKind(elements_kind)) {
230
elements_kind = GetHoleyElementsKind(elements_kind);
231
MaybeObject* maybe_array =
232
array->TransitionElementsKind(elements_kind);
233
if (maybe_array->IsFailure()) return maybe_array;
235
// We do not use SetContent to skip the unnecessary elements type check.
236
array->set_elements(FixedArray::cast(fixed_array));
237
array->set_length(Smi::cast(obj));
241
// Take the argument as the length.
242
{ MaybeObject* maybe_obj = array->Initialize(0);
243
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
245
return array->SetElementsLength((*args)[1]);
248
// Optimize the case where there are no parameters passed.
249
if (args->length() == 1) {
250
return array->Initialize(JSArray::kPreallocatedArrayElements);
253
// Set length and elements on the array.
254
int number_of_elements = args->length() - 1;
255
MaybeObject* maybe_object =
256
array->EnsureCanContainElements(args, 1, number_of_elements,
257
ALLOW_CONVERTED_DOUBLE_ELEMENTS);
258
if (maybe_object->IsFailure()) return maybe_object;
260
// Allocate an appropriately typed elements array.
261
MaybeObject* maybe_elms;
262
ElementsKind elements_kind = array->GetElementsKind();
263
if (IsFastDoubleElementsKind(elements_kind)) {
264
maybe_elms = heap->AllocateUninitializedFixedDoubleArray(
267
maybe_elms = heap->AllocateFixedArrayWithHoles(number_of_elements);
269
FixedArrayBase* elms;
270
if (!maybe_elms->To<FixedArrayBase>(&elms)) return maybe_elms;
272
// Fill in the content
273
switch (array->GetElementsKind()) {
274
case FAST_HOLEY_SMI_ELEMENTS:
275
case FAST_SMI_ELEMENTS: {
276
FixedArray* smi_elms = FixedArray::cast(elms);
277
for (int index = 0; index < number_of_elements; index++) {
278
smi_elms->set(index, (*args)[index+1], SKIP_WRITE_BARRIER);
282
case FAST_HOLEY_ELEMENTS:
283
case FAST_ELEMENTS: {
284
AssertNoAllocation no_gc;
285
WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
286
FixedArray* object_elms = FixedArray::cast(elms);
287
for (int index = 0; index < number_of_elements; index++) {
288
object_elms->set(index, (*args)[index+1], mode);
292
case FAST_HOLEY_DOUBLE_ELEMENTS:
293
case FAST_DOUBLE_ELEMENTS: {
294
FixedDoubleArray* double_elms = FixedDoubleArray::cast(elms);
295
for (int index = 0; index < number_of_elements; index++) {
296
double_elms->set(index, (*args)[index+1]->Number());
305
array->set_elements(elms);
306
array->set_length(Smi::FromInt(number_of_elements));
311
BUILTIN(InternalArrayCodeGeneric) {
312
return ArrayCodeGenericCommon(
315
isolate->context()->global_context()->internal_array_function());
319
BUILTIN(ArrayCodeGeneric) {
320
return ArrayCodeGenericCommon(
323
isolate->context()->global_context()->array_function());
327
static void MoveElements(Heap* heap,
328
AssertNoAllocation* no_gc,
334
if (len == 0) return;
335
ASSERT(dst->map() != HEAP->fixed_cow_array_map());
336
memmove(dst->data_start() + dst_index,
337
src->data_start() + src_index,
339
WriteBarrierMode mode = dst->GetWriteBarrierMode(*no_gc);
340
if (mode == UPDATE_WRITE_BARRIER) {
341
heap->RecordWrites(dst->address(), dst->OffsetOfElementAt(dst_index), len);
343
heap->incremental_marking()->RecordWrites(dst);
347
static void FillWithHoles(Heap* heap, FixedArray* dst, int from, int to) {
348
ASSERT(dst->map() != heap->fixed_cow_array_map());
349
MemsetPointer(dst->data_start() + from, heap->the_hole_value(), to - from);
353
static FixedArray* LeftTrimFixedArray(Heap* heap,
356
ASSERT(elms->map() != HEAP->fixed_cow_array_map());
357
// For now this trick is only applied to fixed arrays in new and paged space.
358
// In large object space the object's start must coincide with chunk
359
// and thus the trick is just not applicable.
360
ASSERT(!HEAP->lo_space()->Contains(elms));
362
STATIC_ASSERT(FixedArray::kMapOffset == 0);
363
STATIC_ASSERT(FixedArray::kLengthOffset == kPointerSize);
364
STATIC_ASSERT(FixedArray::kHeaderSize == 2 * kPointerSize);
366
Object** former_start = HeapObject::RawField(elms, 0);
368
const int len = elms->length();
370
if (to_trim > FixedArray::kHeaderSize / kPointerSize &&
371
!heap->new_space()->Contains(elms)) {
372
// If we are doing a big trim in old space then we zap the space that was
373
// formerly part of the array so that the GC (aided by the card-based
374
// remembered set) won't find pointers to new-space there.
375
Object** zap = reinterpret_cast<Object**>(elms->address());
376
zap++; // Header of filler must be at least one word so skip that.
377
for (int i = 1; i < to_trim; i++) {
378
*zap++ = Smi::FromInt(0);
381
// Technically in new space this write might be omitted (except for
382
// debug mode which iterates through the heap), but to play safer
384
heap->CreateFillerObjectAt(elms->address(), to_trim * kPointerSize);
386
former_start[to_trim] = heap->fixed_array_map();
387
former_start[to_trim + 1] = Smi::FromInt(len - to_trim);
389
// Maintain marking consistency for HeapObjectIterator and
390
// IncrementalMarking.
391
int size_delta = to_trim * kPointerSize;
392
if (heap->marking()->TransferMark(elms->address(),
393
elms->address() + size_delta)) {
394
MemoryChunk::IncrementLiveBytesFromMutator(elms->address(), -size_delta);
397
HEAP_PROFILE(heap, ObjectMoveEvent(elms->address(),
398
elms->address() + size_delta));
399
return FixedArray::cast(HeapObject::FromAddress(
400
elms->address() + to_trim * kPointerSize));
404
static bool ArrayPrototypeHasNoElements(Heap* heap,
405
Context* global_context,
406
JSObject* array_proto) {
407
// This method depends on non writability of Object and Array prototype
409
if (array_proto->elements() != heap->empty_fixed_array()) return false;
411
Object* proto = array_proto->GetPrototype();
412
if (proto == heap->null_value()) return false;
413
array_proto = JSObject::cast(proto);
414
if (array_proto != global_context->initial_object_prototype()) return false;
415
if (array_proto->elements() != heap->empty_fixed_array()) return false;
416
return array_proto->GetPrototype()->IsNull();
421
static inline MaybeObject* EnsureJSArrayWithWritableFastElements(
422
Heap* heap, Object* receiver, Arguments* args, int first_added_arg) {
423
if (!receiver->IsJSArray()) return NULL;
424
JSArray* array = JSArray::cast(receiver);
425
HeapObject* elms = array->elements();
426
Map* map = elms->map();
427
if (map == heap->fixed_array_map()) {
428
if (args == NULL || array->HasFastObjectElements()) return elms;
429
if (array->HasFastDoubleElements()) {
430
ASSERT(elms == heap->empty_fixed_array());
431
MaybeObject* maybe_transition =
432
array->TransitionElementsKind(FAST_ELEMENTS);
433
if (maybe_transition->IsFailure()) return maybe_transition;
436
} else if (map == heap->fixed_cow_array_map()) {
437
MaybeObject* maybe_writable_result = array->EnsureWritableFastElements();
438
if (args == NULL || array->HasFastObjectElements() ||
439
maybe_writable_result->IsFailure()) {
440
return maybe_writable_result;
446
// Need to ensure that the arguments passed in args can be contained in
448
int args_length = args->length();
449
if (first_added_arg >= args_length) return array->elements();
451
MaybeObject* maybe_array = array->EnsureCanContainElements(
454
args_length - first_added_arg,
455
DONT_ALLOW_DOUBLE_ELEMENTS);
456
if (maybe_array->IsFailure()) return maybe_array;
457
return array->elements();
461
static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap,
463
if (!FLAG_clever_optimizations) return false;
464
Context* global_context = heap->isolate()->context()->global_context();
465
JSObject* array_proto =
466
JSObject::cast(global_context->array_function()->prototype());
467
return receiver->GetPrototype() == array_proto &&
468
ArrayPrototypeHasNoElements(heap, global_context, array_proto);
472
MUST_USE_RESULT static MaybeObject* CallJsBuiltin(
475
BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
476
HandleScope handleScope(isolate);
478
Handle<Object> js_builtin =
479
GetProperty(Handle<JSObject>(isolate->global_context()->builtins()),
481
Handle<JSFunction> function = Handle<JSFunction>::cast(js_builtin);
482
int argc = args.length() - 1;
483
ScopedVector<Handle<Object> > argv(argc);
484
for (int i = 0; i < argc; ++i) {
485
argv[i] = args.at<Object>(i + 1);
487
bool pending_exception;
488
Handle<Object> result = Execution::Call(function,
493
if (pending_exception) return Failure::Exception();
499
Heap* heap = isolate->heap();
500
Object* receiver = *args.receiver();
502
{ MaybeObject* maybe_elms_obj =
503
EnsureJSArrayWithWritableFastElements(heap, receiver, &args, 1);
504
if (maybe_elms_obj == NULL) {
505
return CallJsBuiltin(isolate, "ArrayPush", args);
507
if (!maybe_elms_obj->ToObject(&elms_obj)) return maybe_elms_obj;
509
FixedArray* elms = FixedArray::cast(elms_obj);
510
JSArray* array = JSArray::cast(receiver);
512
int len = Smi::cast(array->length())->value();
513
int to_add = args.length() - 1;
515
return Smi::FromInt(len);
517
// Currently fixed arrays cannot grow too big, so
518
// we should never hit this case.
519
ASSERT(to_add <= (Smi::kMaxValue - len));
521
int new_length = len + to_add;
523
if (new_length > elms->length()) {
524
// New backing storage is needed.
525
int capacity = new_length + (new_length >> 1) + 16;
527
{ MaybeObject* maybe_obj = heap->AllocateUninitializedFixedArray(capacity);
528
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
530
FixedArray* new_elms = FixedArray::cast(obj);
532
ElementsKind kind = array->GetElementsKind();
533
CopyObjectToObjectElements(elms, kind, 0, new_elms, kind, 0, len);
534
FillWithHoles(heap, new_elms, new_length, capacity);
539
// Add the provided values.
540
AssertNoAllocation no_gc;
541
WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
542
for (int index = 0; index < to_add; index++) {
543
elms->set(index + len, args[index + 1], mode);
546
if (elms != array->elements()) {
547
array->set_elements(elms);
551
array->set_length(Smi::FromInt(new_length));
552
return Smi::FromInt(new_length);
557
Heap* heap = isolate->heap();
558
Object* receiver = *args.receiver();
560
{ MaybeObject* maybe_elms_obj =
561
EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0);
562
if (maybe_elms_obj == NULL) return CallJsBuiltin(isolate, "ArrayPop", args);
563
if (!maybe_elms_obj->ToObject(&elms_obj)) return maybe_elms_obj;
565
FixedArray* elms = FixedArray::cast(elms_obj);
566
JSArray* array = JSArray::cast(receiver);
568
int len = Smi::cast(array->length())->value();
569
if (len == 0) return heap->undefined_value();
572
MaybeObject* top = elms->get(len - 1);
575
array->set_length(Smi::FromInt(len - 1));
577
if (!top->IsTheHole()) {
578
// Delete the top element.
579
elms->set_the_hole(len - 1);
583
top = array->GetPrototype()->GetElement(len - 1);
589
BUILTIN(ArrayShift) {
590
Heap* heap = isolate->heap();
591
Object* receiver = *args.receiver();
593
{ MaybeObject* maybe_elms_obj =
594
EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0);
595
if (maybe_elms_obj == NULL)
596
return CallJsBuiltin(isolate, "ArrayShift", args);
597
if (!maybe_elms_obj->ToObject(&elms_obj)) return maybe_elms_obj;
599
if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
600
return CallJsBuiltin(isolate, "ArrayShift", args);
602
FixedArray* elms = FixedArray::cast(elms_obj);
603
JSArray* array = JSArray::cast(receiver);
604
ASSERT(array->HasFastSmiOrObjectElements());
606
int len = Smi::cast(array->length())->value();
607
if (len == 0) return heap->undefined_value();
610
Object* first = elms->get(0);
611
if (first->IsTheHole()) {
612
first = heap->undefined_value();
615
if (!heap->lo_space()->Contains(elms)) {
616
array->set_elements(LeftTrimFixedArray(heap, elms, 1));
618
// Shift the elements.
619
AssertNoAllocation no_gc;
620
MoveElements(heap, &no_gc, elms, 0, elms, 1, len - 1);
621
elms->set(len - 1, heap->the_hole_value());
625
array->set_length(Smi::FromInt(len - 1));
631
BUILTIN(ArrayUnshift) {
632
Heap* heap = isolate->heap();
633
Object* receiver = *args.receiver();
635
{ MaybeObject* maybe_elms_obj =
636
EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0);
637
if (maybe_elms_obj == NULL)
638
return CallJsBuiltin(isolate, "ArrayUnshift", args);
639
if (!maybe_elms_obj->ToObject(&elms_obj)) return maybe_elms_obj;
641
if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
642
return CallJsBuiltin(isolate, "ArrayUnshift", args);
644
FixedArray* elms = FixedArray::cast(elms_obj);
645
JSArray* array = JSArray::cast(receiver);
646
ASSERT(array->HasFastSmiOrObjectElements());
648
int len = Smi::cast(array->length())->value();
649
int to_add = args.length() - 1;
650
int new_length = len + to_add;
651
// Currently fixed arrays cannot grow too big, so
652
// we should never hit this case.
653
ASSERT(to_add <= (Smi::kMaxValue - len));
655
MaybeObject* maybe_object =
656
array->EnsureCanContainElements(&args, 1, to_add,
657
DONT_ALLOW_DOUBLE_ELEMENTS);
658
if (maybe_object->IsFailure()) return maybe_object;
660
if (new_length > elms->length()) {
661
// New backing storage is needed.
662
int capacity = new_length + (new_length >> 1) + 16;
664
{ MaybeObject* maybe_obj = heap->AllocateUninitializedFixedArray(capacity);
665
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
667
FixedArray* new_elms = FixedArray::cast(obj);
668
ElementsKind kind = array->GetElementsKind();
669
CopyObjectToObjectElements(elms, kind, 0, new_elms, kind, to_add, len);
670
FillWithHoles(heap, new_elms, new_length, capacity);
672
array->set_elements(elms);
674
AssertNoAllocation no_gc;
675
MoveElements(heap, &no_gc, elms, to_add, elms, 0, len);
678
// Add the provided values.
679
AssertNoAllocation no_gc;
680
WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
681
for (int i = 0; i < to_add; i++) {
682
elms->set(i, args[i + 1], mode);
686
array->set_length(Smi::FromInt(new_length));
687
return Smi::FromInt(new_length);
691
BUILTIN(ArraySlice) {
692
Heap* heap = isolate->heap();
693
Object* receiver = *args.receiver();
696
if (receiver->IsJSArray()) {
697
JSArray* array = JSArray::cast(receiver);
698
if (!array->HasFastSmiOrObjectElements() ||
699
!IsJSArrayFastElementMovingAllowed(heap, array)) {
700
return CallJsBuiltin(isolate, "ArraySlice", args);
703
elms = FixedArray::cast(array->elements());
704
len = Smi::cast(array->length())->value();
706
// Array.slice(arguments, ...) is quite a common idiom (notably more
707
// than 50% of invocations in Web apps). Treat it in C++ as well.
709
isolate->context()->global_context()->arguments_boilerplate()->map();
711
bool is_arguments_object_with_fast_elements =
712
receiver->IsJSObject()
713
&& JSObject::cast(receiver)->map() == arguments_map
714
&& JSObject::cast(receiver)->HasFastSmiOrObjectElements();
715
if (!is_arguments_object_with_fast_elements) {
716
return CallJsBuiltin(isolate, "ArraySlice", args);
718
elms = FixedArray::cast(JSObject::cast(receiver)->elements());
719
Object* len_obj = JSObject::cast(receiver)
720
->InObjectPropertyAt(Heap::kArgumentsLengthIndex);
721
if (!len_obj->IsSmi()) {
722
return CallJsBuiltin(isolate, "ArraySlice", args);
724
len = Smi::cast(len_obj)->value();
725
if (len > elms->length()) {
726
return CallJsBuiltin(isolate, "ArraySlice", args);
728
for (int i = 0; i < len; i++) {
729
if (elms->get(i) == heap->the_hole_value()) {
730
return CallJsBuiltin(isolate, "ArraySlice", args);
735
int n_arguments = args.length() - 1;
737
// Note carefully choosen defaults---if argument is missing,
738
// it's undefined which gets converted to 0 for relative_start
739
// and to len for relative_end.
740
int relative_start = 0;
741
int relative_end = len;
742
if (n_arguments > 0) {
743
Object* arg1 = args[1];
745
relative_start = Smi::cast(arg1)->value();
746
} else if (!arg1->IsUndefined()) {
747
return CallJsBuiltin(isolate, "ArraySlice", args);
749
if (n_arguments > 1) {
750
Object* arg2 = args[2];
752
relative_end = Smi::cast(arg2)->value();
753
} else if (!arg2->IsUndefined()) {
754
return CallJsBuiltin(isolate, "ArraySlice", args);
759
// ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 6.
760
int k = (relative_start < 0) ? Max(len + relative_start, 0)
761
: Min(relative_start, len);
763
// ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 8.
764
int final = (relative_end < 0) ? Max(len + relative_end, 0)
765
: Min(relative_end, len);
767
ElementsKind elements_kind = JSObject::cast(receiver)->GetElementsKind();
769
// Calculate the length of result array.
770
int result_len = Max(final - k, 0);
772
MaybeObject* maybe_array =
773
heap->AllocateJSArrayAndStorage(elements_kind,
776
JSArray* result_array;
777
if (!maybe_array->To(&result_array)) return maybe_array;
779
CopyObjectToObjectElements(elms, elements_kind, k,
780
FixedArray::cast(result_array->elements()),
781
elements_kind, 0, result_len);
787
BUILTIN(ArraySplice) {
788
Heap* heap = isolate->heap();
789
Object* receiver = *args.receiver();
791
{ MaybeObject* maybe_elms_obj =
792
EnsureJSArrayWithWritableFastElements(heap, receiver, &args, 3);
793
if (maybe_elms_obj == NULL)
794
return CallJsBuiltin(isolate, "ArraySplice", args);
795
if (!maybe_elms_obj->ToObject(&elms_obj)) return maybe_elms_obj;
797
if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
798
return CallJsBuiltin(isolate, "ArraySplice", args);
800
FixedArray* elms = FixedArray::cast(elms_obj);
801
JSArray* array = JSArray::cast(receiver);
802
ASSERT(array->HasFastSmiOrObjectElements());
804
int len = Smi::cast(array->length())->value();
806
int n_arguments = args.length() - 1;
808
int relative_start = 0;
809
if (n_arguments > 0) {
810
Object* arg1 = args[1];
812
relative_start = Smi::cast(arg1)->value();
813
} else if (!arg1->IsUndefined()) {
814
return CallJsBuiltin(isolate, "ArraySplice", args);
817
int actual_start = (relative_start < 0) ? Max(len + relative_start, 0)
818
: Min(relative_start, len);
820
// SpiderMonkey, TraceMonkey and JSC treat the case where no delete count is
821
// given as a request to delete all the elements from the start.
822
// And it differs from the case of undefined delete count.
823
// This does not follow ECMA-262, but we do the same for
825
int actual_delete_count;
826
if (n_arguments == 1) {
827
ASSERT(len - actual_start >= 0);
828
actual_delete_count = len - actual_start;
830
int value = 0; // ToInteger(undefined) == 0
831
if (n_arguments > 1) {
832
Object* arg2 = args[2];
834
value = Smi::cast(arg2)->value();
836
return CallJsBuiltin(isolate, "ArraySplice", args);
839
actual_delete_count = Min(Max(value, 0), len - actual_start);
842
JSArray* result_array = NULL;
843
ElementsKind elements_kind =
844
JSObject::cast(receiver)->GetElementsKind();
845
MaybeObject* maybe_array =
846
heap->AllocateJSArrayAndStorage(elements_kind,
848
actual_delete_count);
849
if (!maybe_array->To(&result_array)) return maybe_array;
852
// Fill newly created array.
853
CopyObjectToObjectElements(elms, elements_kind, actual_start,
854
FixedArray::cast(result_array->elements()),
855
elements_kind, 0, actual_delete_count);
858
int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
859
int new_length = len - actual_delete_count + item_count;
861
bool elms_changed = false;
862
if (item_count < actual_delete_count) {
864
const bool trim_array = !heap->lo_space()->Contains(elms) &&
865
((actual_start + item_count) <
866
(len - actual_delete_count - actual_start));
868
const int delta = actual_delete_count - item_count;
871
AssertNoAllocation no_gc;
872
MoveElements(heap, &no_gc, elms, delta, elms, 0, actual_start);
875
elms = LeftTrimFixedArray(heap, elms, delta);
879
AssertNoAllocation no_gc;
880
MoveElements(heap, &no_gc,
881
elms, actual_start + item_count,
882
elms, actual_start + actual_delete_count,
883
(len - actual_delete_count - actual_start));
884
FillWithHoles(heap, elms, new_length, len);
886
} else if (item_count > actual_delete_count) {
887
// Currently fixed arrays cannot grow too big, so
888
// we should never hit this case.
889
ASSERT((item_count - actual_delete_count) <= (Smi::kMaxValue - len));
891
// Check if array need to grow.
892
if (new_length > elms->length()) {
893
// New backing storage is needed.
894
int capacity = new_length + (new_length >> 1) + 16;
896
{ MaybeObject* maybe_obj =
897
heap->AllocateUninitializedFixedArray(capacity);
898
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
900
FixedArray* new_elms = FixedArray::cast(obj);
903
// Copy the part before actual_start as is.
904
ElementsKind kind = array->GetElementsKind();
905
CopyObjectToObjectElements(elms, kind, 0,
906
new_elms, kind, 0, actual_start);
907
const int to_copy = len - actual_delete_count - actual_start;
908
CopyObjectToObjectElements(elms, kind,
909
actual_start + actual_delete_count,
911
actual_start + item_count, to_copy);
914
FillWithHoles(heap, new_elms, new_length, capacity);
919
AssertNoAllocation no_gc;
920
MoveElements(heap, &no_gc,
921
elms, actual_start + item_count,
922
elms, actual_start + actual_delete_count,
923
(len - actual_delete_count - actual_start));
927
AssertNoAllocation no_gc;
928
WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
929
for (int k = actual_start; k < actual_start + item_count; k++) {
930
elms->set(k, args[3 + k - actual_start], mode);
934
array->set_elements(elms);
938
array->set_length(Smi::FromInt(new_length));
944
BUILTIN(ArrayConcat) {
945
Heap* heap = isolate->heap();
946
Context* global_context = isolate->context()->global_context();
947
JSObject* array_proto =
948
JSObject::cast(global_context->array_function()->prototype());
949
if (!ArrayPrototypeHasNoElements(heap, global_context, array_proto)) {
950
return CallJsBuiltin(isolate, "ArrayConcat", args);
953
// Iterate through all the arguments performing checks
954
// and calculating total length.
955
int n_arguments = args.length();
957
ElementsKind elements_kind = GetInitialFastElementsKind();
958
for (int i = 0; i < n_arguments; i++) {
959
Object* arg = args[i];
960
if (!arg->IsJSArray() ||
961
!JSArray::cast(arg)->HasFastSmiOrObjectElements() ||
962
JSArray::cast(arg)->GetPrototype() != array_proto) {
963
return CallJsBuiltin(isolate, "ArrayConcat", args);
966
int len = Smi::cast(JSArray::cast(arg)->length())->value();
968
// We shouldn't overflow when adding another len.
969
const int kHalfOfMaxInt = 1 << (kBitsPerInt - 2);
970
STATIC_ASSERT(FixedArray::kMaxLength < kHalfOfMaxInt);
973
ASSERT(result_len >= 0);
975
if (result_len > FixedArray::kMaxLength) {
976
return CallJsBuiltin(isolate, "ArrayConcat", args);
979
if (!JSArray::cast(arg)->HasFastSmiElements()) {
980
if (IsFastSmiElementsKind(elements_kind)) {
981
if (IsFastHoleyElementsKind(elements_kind)) {
982
elements_kind = FAST_HOLEY_ELEMENTS;
984
elements_kind = FAST_ELEMENTS;
989
if (JSArray::cast(arg)->HasFastHoleyElements()) {
990
elements_kind = GetHoleyElementsKind(elements_kind);
995
JSArray* result_array;
996
MaybeObject* maybe_array =
997
heap->AllocateJSArrayAndStorage(elements_kind,
1000
if (!maybe_array->To(&result_array)) return maybe_array;
1001
if (result_len == 0) return result_array;
1005
FixedArray* result_elms(FixedArray::cast(result_array->elements()));
1006
for (int i = 0; i < n_arguments; i++) {
1007
JSArray* array = JSArray::cast(args[i]);
1008
int len = Smi::cast(array->length())->value();
1009
FixedArray* elms = FixedArray::cast(array->elements());
1010
CopyObjectToObjectElements(elms, elements_kind, 0,
1011
result_elms, elements_kind,
1015
ASSERT(start_pos == result_len);
1017
return result_array;
1021
// -----------------------------------------------------------------------------
1022
// Strict mode poison pills
1025
BUILTIN(StrictModePoisonPill) {
1027
return isolate->Throw(*isolate->factory()->NewTypeError(
1028
"strict_poison_pill", HandleVector<Object>(NULL, 0)));
1031
// -----------------------------------------------------------------------------
1035
// Returns the holder JSObject if the function can legally be called
1036
// with this receiver. Returns Heap::null_value() if the call is
1037
// illegal. Any arguments that don't fit the expected type is
1038
// overwritten with undefined. Arguments that do fit the expected
1039
// type is overwritten with the object in the prototype chain that
1040
// actually has that type.
1041
static inline Object* TypeCheck(Heap* heap,
1044
FunctionTemplateInfo* info) {
1045
Object* recv = argv[0];
1046
// API calls are only supported with JSObject receivers.
1047
if (!recv->IsJSObject()) return heap->null_value();
1048
Object* sig_obj = info->signature();
1049
if (sig_obj->IsUndefined()) return recv;
1050
SignatureInfo* sig = SignatureInfo::cast(sig_obj);
1051
// If necessary, check the receiver
1052
Object* recv_type = sig->receiver();
1054
Object* holder = recv;
1055
if (!recv_type->IsUndefined()) {
1056
for (; holder != heap->null_value(); holder = holder->GetPrototype()) {
1057
if (holder->IsInstanceOf(FunctionTemplateInfo::cast(recv_type))) {
1061
if (holder == heap->null_value()) return holder;
1063
Object* args_obj = sig->args();
1064
// If there is no argument signature we're done
1065
if (args_obj->IsUndefined()) return holder;
1066
FixedArray* args = FixedArray::cast(args_obj);
1067
int length = args->length();
1068
if (argc <= length) length = argc - 1;
1069
for (int i = 0; i < length; i++) {
1070
Object* argtype = args->get(i);
1071
if (argtype->IsUndefined()) continue;
1072
Object** arg = &argv[-1 - i];
1073
Object* current = *arg;
1074
for (; current != heap->null_value(); current = current->GetPrototype()) {
1075
if (current->IsInstanceOf(FunctionTemplateInfo::cast(argtype))) {
1080
if (current == heap->null_value()) *arg = heap->undefined_value();
1086
template <bool is_construct>
1087
MUST_USE_RESULT static MaybeObject* HandleApiCallHelper(
1088
BuiltinArguments<NEEDS_CALLED_FUNCTION> args, Isolate* isolate) {
1089
ASSERT(is_construct == CalledAsConstructor(isolate));
1090
Heap* heap = isolate->heap();
1092
HandleScope scope(isolate);
1093
Handle<JSFunction> function = args.called_function();
1094
ASSERT(function->shared()->IsApiFunction());
1096
FunctionTemplateInfo* fun_data = function->shared()->get_api_func_data();
1098
Handle<FunctionTemplateInfo> desc(fun_data, isolate);
1099
bool pending_exception = false;
1100
isolate->factory()->ConfigureInstance(
1101
desc, Handle<JSObject>::cast(args.receiver()), &pending_exception);
1102
ASSERT(isolate->has_pending_exception() == pending_exception);
1103
if (pending_exception) return Failure::Exception();
1107
Object* raw_holder = TypeCheck(heap, args.length(), &args[0], fun_data);
1109
if (raw_holder->IsNull()) {
1110
// This function cannot be called with the given receiver. Abort!
1111
Handle<Object> obj =
1112
isolate->factory()->NewTypeError(
1113
"illegal_invocation", HandleVector(&function, 1));
1114
return isolate->Throw(*obj);
1117
Object* raw_call_data = fun_data->call_code();
1118
if (!raw_call_data->IsUndefined()) {
1119
CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data);
1120
Object* callback_obj = call_data->callback();
1121
v8::InvocationCallback callback =
1122
v8::ToCData<v8::InvocationCallback>(callback_obj);
1123
Object* data_obj = call_data->data();
1126
LOG(isolate, ApiObjectAccess("call", JSObject::cast(*args.receiver())));
1127
ASSERT(raw_holder->IsJSObject());
1129
CustomArguments custom(isolate);
1130
v8::ImplementationUtilities::PrepareArgumentsData(custom.end(),
1131
isolate, data_obj, *function, raw_holder);
1133
v8::Arguments new_args = v8::ImplementationUtilities::NewArguments(
1139
v8::Handle<v8::Value> value;
1141
// Leaving JavaScript.
1142
VMState state(isolate, EXTERNAL);
1143
ExternalCallbackScope call_scope(isolate,
1144
v8::ToCData<Address>(callback_obj));
1145
value = callback(new_args);
1147
if (value.IsEmpty()) {
1148
result = heap->undefined_value();
1150
result = *reinterpret_cast<Object**>(*value);
1153
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
1154
if (!is_construct || result->IsJSObject()) return result;
1157
return *args.receiver();
1161
BUILTIN(HandleApiCall) {
1162
return HandleApiCallHelper<false>(args, isolate);
1166
BUILTIN(HandleApiCallConstruct) {
1167
return HandleApiCallHelper<true>(args, isolate);
1171
// Helper function to handle calls to non-function objects created through the
1172
// API. The object can be called as either a constructor (using new) or just as
1173
// a function (without new).
1174
MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor(
1176
bool is_construct_call,
1177
BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
1178
// Non-functions are never called as constructors. Even if this is an object
1179
// called as a constructor the delegate call is not a construct call.
1180
ASSERT(!CalledAsConstructor(isolate));
1181
Heap* heap = isolate->heap();
1183
Handle<Object> receiver = args.receiver();
1185
// Get the object called.
1186
JSObject* obj = JSObject::cast(*receiver);
1188
// Get the invocation callback from the function descriptor that was
1189
// used to create the called object.
1190
ASSERT(obj->map()->has_instance_call_handler());
1191
JSFunction* constructor = JSFunction::cast(obj->map()->constructor());
1192
ASSERT(constructor->shared()->IsApiFunction());
1194
constructor->shared()->get_api_func_data()->instance_call_handler();
1195
ASSERT(!handler->IsUndefined());
1196
CallHandlerInfo* call_data = CallHandlerInfo::cast(handler);
1197
Object* callback_obj = call_data->callback();
1198
v8::InvocationCallback callback =
1199
v8::ToCData<v8::InvocationCallback>(callback_obj);
1201
// Get the data for the call and perform the callback.
1204
HandleScope scope(isolate);
1205
LOG(isolate, ApiObjectAccess("call non-function", obj));
1207
CustomArguments custom(isolate);
1208
v8::ImplementationUtilities::PrepareArgumentsData(custom.end(),
1209
isolate, call_data->data(), constructor, obj);
1210
v8::Arguments new_args = v8::ImplementationUtilities::NewArguments(
1215
v8::Handle<v8::Value> value;
1217
// Leaving JavaScript.
1218
VMState state(isolate, EXTERNAL);
1219
ExternalCallbackScope call_scope(isolate,
1220
v8::ToCData<Address>(callback_obj));
1221
value = callback(new_args);
1223
if (value.IsEmpty()) {
1224
result = heap->undefined_value();
1226
result = *reinterpret_cast<Object**>(*value);
1229
// Check for exceptions and return result.
1230
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
1235
// Handle calls to non-function objects created through the API. This delegate
1236
// function is used when the call is a normal function call.
1237
BUILTIN(HandleApiCallAsFunction) {
1238
return HandleApiCallAsFunctionOrConstructor(isolate, false, args);
1242
// Handle calls to non-function objects created through the API. This delegate
1243
// function is used when the call is a construct call.
1244
BUILTIN(HandleApiCallAsConstructor) {
1245
return HandleApiCallAsFunctionOrConstructor(isolate, true, args);
1249
static void Generate_LoadIC_ArrayLength(MacroAssembler* masm) {
1250
LoadIC::GenerateArrayLength(masm);
1254
static void Generate_LoadIC_StringLength(MacroAssembler* masm) {
1255
LoadIC::GenerateStringLength(masm, false);
1259
static void Generate_LoadIC_StringWrapperLength(MacroAssembler* masm) {
1260
LoadIC::GenerateStringLength(masm, true);
1264
static void Generate_LoadIC_FunctionPrototype(MacroAssembler* masm) {
1265
LoadIC::GenerateFunctionPrototype(masm);
1269
static void Generate_LoadIC_Initialize(MacroAssembler* masm) {
1270
LoadIC::GenerateInitialize(masm);
1274
static void Generate_LoadIC_PreMonomorphic(MacroAssembler* masm) {
1275
LoadIC::GeneratePreMonomorphic(masm);
1279
static void Generate_LoadIC_Miss(MacroAssembler* masm) {
1280
LoadIC::GenerateMiss(masm);
1284
static void Generate_LoadIC_Megamorphic(MacroAssembler* masm) {
1285
LoadIC::GenerateMegamorphic(masm);
1289
static void Generate_LoadIC_Normal(MacroAssembler* masm) {
1290
LoadIC::GenerateNormal(masm);
1294
static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) {
1295
KeyedLoadIC::GenerateInitialize(masm);
1299
static void Generate_KeyedLoadIC_Slow(MacroAssembler* masm) {
1300
KeyedLoadIC::GenerateRuntimeGetProperty(masm);
1304
static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) {
1305
KeyedLoadIC::GenerateMiss(masm, false);
1309
static void Generate_KeyedLoadIC_MissForceGeneric(MacroAssembler* masm) {
1310
KeyedLoadIC::GenerateMiss(masm, true);
1314
static void Generate_KeyedLoadIC_Generic(MacroAssembler* masm) {
1315
KeyedLoadIC::GenerateGeneric(masm);
1319
static void Generate_KeyedLoadIC_String(MacroAssembler* masm) {
1320
KeyedLoadIC::GenerateString(masm);
1324
static void Generate_KeyedLoadIC_PreMonomorphic(MacroAssembler* masm) {
1325
KeyedLoadIC::GeneratePreMonomorphic(masm);
1328
static void Generate_KeyedLoadIC_IndexedInterceptor(MacroAssembler* masm) {
1329
KeyedLoadIC::GenerateIndexedInterceptor(masm);
1332
static void Generate_KeyedLoadIC_NonStrictArguments(MacroAssembler* masm) {
1333
KeyedLoadIC::GenerateNonStrictArguments(masm);
1336
static void Generate_StoreIC_Initialize(MacroAssembler* masm) {
1337
StoreIC::GenerateInitialize(masm);
1341
static void Generate_StoreIC_Initialize_Strict(MacroAssembler* masm) {
1342
StoreIC::GenerateInitialize(masm);
1346
static void Generate_StoreIC_Miss(MacroAssembler* masm) {
1347
StoreIC::GenerateMiss(masm);
1351
static void Generate_StoreIC_Normal(MacroAssembler* masm) {
1352
StoreIC::GenerateNormal(masm);
1356
static void Generate_StoreIC_Normal_Strict(MacroAssembler* masm) {
1357
StoreIC::GenerateNormal(masm);
1361
static void Generate_StoreIC_Megamorphic(MacroAssembler* masm) {
1362
StoreIC::GenerateMegamorphic(masm, kNonStrictMode);
1366
static void Generate_StoreIC_Megamorphic_Strict(MacroAssembler* masm) {
1367
StoreIC::GenerateMegamorphic(masm, kStrictMode);
1371
static void Generate_StoreIC_ArrayLength(MacroAssembler* masm) {
1372
StoreIC::GenerateArrayLength(masm);
1376
static void Generate_StoreIC_ArrayLength_Strict(MacroAssembler* masm) {
1377
StoreIC::GenerateArrayLength(masm);
1381
static void Generate_StoreIC_GlobalProxy(MacroAssembler* masm) {
1382
StoreIC::GenerateGlobalProxy(masm, kNonStrictMode);
1386
static void Generate_StoreIC_GlobalProxy_Strict(MacroAssembler* masm) {
1387
StoreIC::GenerateGlobalProxy(masm, kStrictMode);
1391
static void Generate_KeyedStoreIC_Generic(MacroAssembler* masm) {
1392
KeyedStoreIC::GenerateGeneric(masm, kNonStrictMode);
1396
static void Generate_KeyedStoreIC_Generic_Strict(MacroAssembler* masm) {
1397
KeyedStoreIC::GenerateGeneric(masm, kStrictMode);
1401
static void Generate_KeyedStoreIC_Miss(MacroAssembler* masm) {
1402
KeyedStoreIC::GenerateMiss(masm, false);
1406
static void Generate_KeyedStoreIC_MissForceGeneric(MacroAssembler* masm) {
1407
KeyedStoreIC::GenerateMiss(masm, true);
1411
static void Generate_KeyedStoreIC_Slow(MacroAssembler* masm) {
1412
KeyedStoreIC::GenerateSlow(masm);
1416
static void Generate_KeyedStoreIC_Initialize(MacroAssembler* masm) {
1417
KeyedStoreIC::GenerateInitialize(masm);
1421
static void Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler* masm) {
1422
KeyedStoreIC::GenerateInitialize(masm);
1425
static void Generate_KeyedStoreIC_NonStrictArguments(MacroAssembler* masm) {
1426
KeyedStoreIC::GenerateNonStrictArguments(masm);
1429
static void Generate_TransitionElementsSmiToDouble(MacroAssembler* masm) {
1430
KeyedStoreIC::GenerateTransitionElementsSmiToDouble(masm);
1433
static void Generate_TransitionElementsDoubleToObject(MacroAssembler* masm) {
1434
KeyedStoreIC::GenerateTransitionElementsDoubleToObject(masm);
1437
#ifdef ENABLE_DEBUGGER_SUPPORT
1438
static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {
1439
Debug::GenerateLoadICDebugBreak(masm);
1443
static void Generate_StoreIC_DebugBreak(MacroAssembler* masm) {
1444
Debug::GenerateStoreICDebugBreak(masm);
1448
static void Generate_KeyedLoadIC_DebugBreak(MacroAssembler* masm) {
1449
Debug::GenerateKeyedLoadICDebugBreak(masm);
1453
static void Generate_KeyedStoreIC_DebugBreak(MacroAssembler* masm) {
1454
Debug::GenerateKeyedStoreICDebugBreak(masm);
1458
static void Generate_Return_DebugBreak(MacroAssembler* masm) {
1459
Debug::GenerateReturnDebugBreak(masm);
1463
static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) {
1464
Debug::GenerateCallFunctionStubDebugBreak(masm);
1468
static void Generate_CallFunctionStub_Recording_DebugBreak(
1469
MacroAssembler* masm) {
1470
Debug::GenerateCallFunctionStubRecordDebugBreak(masm);
1474
static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) {
1475
Debug::GenerateCallConstructStubDebugBreak(masm);
1479
static void Generate_CallConstructStub_Recording_DebugBreak(
1480
MacroAssembler* masm) {
1481
Debug::GenerateCallConstructStubRecordDebugBreak(masm);
1485
static void Generate_Slot_DebugBreak(MacroAssembler* masm) {
1486
Debug::GenerateSlotDebugBreak(masm);
1490
static void Generate_PlainReturn_LiveEdit(MacroAssembler* masm) {
1491
Debug::GeneratePlainReturnLiveEdit(masm);
1495
static void Generate_FrameDropper_LiveEdit(MacroAssembler* masm) {
1496
Debug::GenerateFrameDropperLiveEdit(masm);
1501
Builtins::Builtins() : initialized_(false) {
1502
memset(builtins_, 0, sizeof(builtins_[0]) * builtin_count);
1503
memset(names_, 0, sizeof(names_[0]) * builtin_count);
1507
Builtins::~Builtins() {
1511
#define DEF_ENUM_C(name, ignore) FUNCTION_ADDR(Builtin_##name),
1512
Address const Builtins::c_functions_[cfunction_count] = {
1513
BUILTIN_LIST_C(DEF_ENUM_C)
1517
#define DEF_JS_NAME(name, ignore) #name,
1518
#define DEF_JS_ARGC(ignore, argc) argc,
1519
const char* const Builtins::javascript_names_[id_count] = {
1520
BUILTINS_LIST_JS(DEF_JS_NAME)
1523
int const Builtins::javascript_argc_[id_count] = {
1524
BUILTINS_LIST_JS(DEF_JS_ARGC)
1529
struct BuiltinDesc {
1532
const char* s_name; // name is only used for generating log information.
1535
BuiltinExtraArguments extra_args;
1538
#define BUILTIN_FUNCTION_TABLE_INIT { V8_ONCE_INIT, {} }
1540
class BuiltinFunctionTable {
1542
BuiltinDesc* functions() {
1543
CallOnce(&once_, &Builtins::InitBuiltinFunctionTable);
1548
BuiltinDesc functions_[Builtins::builtin_count + 1];
1550
friend class Builtins;
1553
static BuiltinFunctionTable builtin_function_table =
1554
BUILTIN_FUNCTION_TABLE_INIT;
1556
// Define array of pointers to generators and C builtin functions.
1557
// We do this in a sort of roundabout way so that we can do the initialization
1558
// within the lexical scope of Builtins:: and within a context where
1559
// Code::Flags names a non-abstract type.
1560
void Builtins::InitBuiltinFunctionTable() {
1561
BuiltinDesc* functions = builtin_function_table.functions_;
1562
functions[builtin_count].generator = NULL;
1563
functions[builtin_count].c_code = NULL;
1564
functions[builtin_count].s_name = NULL;
1565
functions[builtin_count].name = builtin_count;
1566
functions[builtin_count].flags = static_cast<Code::Flags>(0);
1567
functions[builtin_count].extra_args = NO_EXTRA_ARGUMENTS;
1569
#define DEF_FUNCTION_PTR_C(aname, aextra_args) \
1570
functions->generator = FUNCTION_ADDR(Generate_Adaptor); \
1571
functions->c_code = FUNCTION_ADDR(Builtin_##aname); \
1572
functions->s_name = #aname; \
1573
functions->name = c_##aname; \
1574
functions->flags = Code::ComputeFlags(Code::BUILTIN); \
1575
functions->extra_args = aextra_args; \
1578
#define DEF_FUNCTION_PTR_A(aname, kind, state, extra) \
1579
functions->generator = FUNCTION_ADDR(Generate_##aname); \
1580
functions->c_code = NULL; \
1581
functions->s_name = #aname; \
1582
functions->name = k##aname; \
1583
functions->flags = Code::ComputeFlags(Code::kind, \
1586
functions->extra_args = NO_EXTRA_ARGUMENTS; \
1589
BUILTIN_LIST_C(DEF_FUNCTION_PTR_C)
1590
BUILTIN_LIST_A(DEF_FUNCTION_PTR_A)
1591
BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A)
1593
#undef DEF_FUNCTION_PTR_C
1594
#undef DEF_FUNCTION_PTR_A
1597
void Builtins::SetUp(bool create_heap_objects) {
1598
ASSERT(!initialized_);
1599
Isolate* isolate = Isolate::Current();
1600
Heap* heap = isolate->heap();
1602
// Create a scope for the handles in the builtins.
1603
HandleScope scope(isolate);
1605
const BuiltinDesc* functions = builtin_function_table.functions();
1607
// For now we generate builtin adaptor code into a stack-allocated
1608
// buffer, before copying it into individual code objects. Be careful
1609
// with alignment, some platforms don't like unaligned code.
1610
union { int force_alignment; byte buffer[4*KB]; } u;
1612
// Traverse the list of builtins and generate an adaptor in a
1613
// separate code object for each one.
1614
for (int i = 0; i < builtin_count; i++) {
1615
if (create_heap_objects) {
1616
MacroAssembler masm(isolate, u.buffer, sizeof u.buffer);
1617
// Generate the code/adaptor.
1618
typedef void (*Generator)(MacroAssembler*, int, BuiltinExtraArguments);
1619
Generator g = FUNCTION_CAST<Generator>(functions[i].generator);
1620
// We pass all arguments to the generator, but it may not use all of
1621
// them. This works because the first arguments are on top of the
1623
ASSERT(!masm.has_frame());
1624
g(&masm, functions[i].name, functions[i].extra_args);
1625
// Move the code into the object heap.
1627
masm.GetCode(&desc);
1628
Code::Flags flags = functions[i].flags;
1629
Object* code = NULL;
1631
// During startup it's OK to always allocate and defer GC to later.
1632
// This simplifies things because we don't need to retry.
1633
AlwaysAllocateScope __scope__;
1634
{ MaybeObject* maybe_code =
1635
heap->CreateCode(desc, flags, masm.CodeObject());
1636
if (!maybe_code->ToObject(&code)) {
1637
v8::internal::V8::FatalProcessOutOfMemory("CreateCode");
1641
// Log the event and add the code to the builtins array.
1643
CodeCreateEvent(Logger::BUILTIN_TAG,
1645
functions[i].s_name));
1646
GDBJIT(AddCode(GDBJITInterface::BUILTIN,
1647
functions[i].s_name,
1649
builtins_[i] = code;
1650
#ifdef ENABLE_DISASSEMBLER
1651
if (FLAG_print_builtin_code) {
1652
PrintF("Builtin: %s\n", functions[i].s_name);
1653
Code::cast(code)->Disassemble(functions[i].s_name);
1658
// Deserializing. The values will be filled in during IterateBuiltins.
1659
builtins_[i] = NULL;
1661
names_[i] = functions[i].s_name;
1664
// Mark as initialized.
1665
initialized_ = true;
1669
void Builtins::TearDown() {
1670
initialized_ = false;
1674
void Builtins::IterateBuiltins(ObjectVisitor* v) {
1675
v->VisitPointers(&builtins_[0], &builtins_[0] + builtin_count);
1679
const char* Builtins::Lookup(byte* pc) {
1680
// may be called during initialization (disassembler!)
1682
for (int i = 0; i < builtin_count; i++) {
1683
Code* entry = Code::cast(builtins_[i]);
1684
if (entry->contains(pc)) {
1693
#define DEFINE_BUILTIN_ACCESSOR_C(name, ignore) \
1694
Handle<Code> Builtins::name() { \
1695
Code** code_address = \
1696
reinterpret_cast<Code**>(builtin_address(k##name)); \
1697
return Handle<Code>(code_address); \
1699
#define DEFINE_BUILTIN_ACCESSOR_A(name, kind, state, extra) \
1700
Handle<Code> Builtins::name() { \
1701
Code** code_address = \
1702
reinterpret_cast<Code**>(builtin_address(k##name)); \
1703
return Handle<Code>(code_address); \
1705
BUILTIN_LIST_C(DEFINE_BUILTIN_ACCESSOR_C)
1706
BUILTIN_LIST_A(DEFINE_BUILTIN_ACCESSOR_A)
1707
BUILTIN_LIST_DEBUG_A(DEFINE_BUILTIN_ACCESSOR_A)
1708
#undef DEFINE_BUILTIN_ACCESSOR_C
1709
#undef DEFINE_BUILTIN_ACCESSOR_A
1712
} } // namespace v8::internal