1
// Copyright 2012 the V8 project authors. All rights reserved.
7
#include "compilation-cache.h"
10
#include "macro-assembler.h"
11
#include "global-handles.h"
12
#include "stub-cache.h"
15
using namespace v8::internal;
17
static v8::Persistent<v8::Context> env;
19
static void InitializeVM() {
20
if (env.IsEmpty()) env = v8::Context::New();
21
v8::HandleScope scope;
26
static void CheckMap(Map* map, int type, int instance_size) {
27
CHECK(map->IsHeapObject());
29
CHECK(HEAP->Contains(map));
31
CHECK_EQ(HEAP->meta_map(), map->map());
32
CHECK_EQ(type, map->instance_type());
33
CHECK_EQ(instance_size, map->instance_size());
39
CheckMap(HEAP->meta_map(), MAP_TYPE, Map::kSize);
40
CheckMap(HEAP->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
41
CheckMap(HEAP->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
42
CheckMap(HEAP->string_map(), STRING_TYPE, kVariableSizeSentinel);
46
static void CheckOddball(Object* obj, const char* string) {
47
CHECK(obj->IsOddball());
49
Object* print_string = *Execution::ToString(Handle<Object>(obj), &exc);
50
CHECK(String::cast(print_string)->IsEqualTo(CStrVector(string)));
54
static void CheckSmi(int value, const char* string) {
56
Object* print_string =
57
*Execution::ToString(Handle<Object>(Smi::FromInt(value)), &exc);
58
CHECK(String::cast(print_string)->IsEqualTo(CStrVector(string)));
62
static void CheckNumber(double value, const char* string) {
63
Object* obj = HEAP->NumberFromDouble(value)->ToObjectChecked();
64
CHECK(obj->IsNumber());
66
Object* print_string = *Execution::ToString(Handle<Object>(obj), &exc);
67
CHECK(String::cast(print_string)->IsEqualTo(CStrVector(string)));
71
static void CheckFindCodeObject() {
72
// Test FindCodeObject
75
Assembler assm(Isolate::Current(), NULL, 0);
77
__ nop(); // supported on all architectures
81
Object* code = HEAP->CreateCode(
83
Code::ComputeFlags(Code::STUB),
84
Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
85
CHECK(code->IsCode());
87
HeapObject* obj = HeapObject::cast(code);
88
Address obj_addr = obj->address();
90
for (int i = 0; i < obj->Size(); i += kPointerSize) {
91
Object* found = HEAP->FindCodeObject(obj_addr + i);
92
CHECK_EQ(code, found);
95
Object* copy = HEAP->CreateCode(
97
Code::ComputeFlags(Code::STUB),
98
Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
99
CHECK(copy->IsCode());
100
HeapObject* obj_copy = HeapObject::cast(copy);
101
Object* not_right = HEAP->FindCodeObject(obj_copy->address() +
102
obj_copy->Size() / 2);
103
CHECK(not_right != code);
111
Object* value = HEAP->NumberFromDouble(1.000123)->ToObjectChecked();
112
CHECK(value->IsHeapNumber());
113
CHECK(value->IsNumber());
114
CHECK_EQ(1.000123, value->Number());
116
value = HEAP->NumberFromDouble(1.0)->ToObjectChecked();
117
CHECK(value->IsSmi());
118
CHECK(value->IsNumber());
119
CHECK_EQ(1.0, value->Number());
121
value = HEAP->NumberFromInt32(1024)->ToObjectChecked();
122
CHECK(value->IsSmi());
123
CHECK(value->IsNumber());
124
CHECK_EQ(1024.0, value->Number());
126
value = HEAP->NumberFromInt32(Smi::kMinValue)->ToObjectChecked();
127
CHECK(value->IsSmi());
128
CHECK(value->IsNumber());
129
CHECK_EQ(Smi::kMinValue, Smi::cast(value)->value());
131
value = HEAP->NumberFromInt32(Smi::kMaxValue)->ToObjectChecked();
132
CHECK(value->IsSmi());
133
CHECK(value->IsNumber());
134
CHECK_EQ(Smi::kMaxValue, Smi::cast(value)->value());
136
#ifndef V8_TARGET_ARCH_X64
137
// TODO(lrn): We need a NumberFromIntptr function in order to test this.
138
value = HEAP->NumberFromInt32(Smi::kMinValue - 1)->ToObjectChecked();
139
CHECK(value->IsHeapNumber());
140
CHECK(value->IsNumber());
141
CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
144
MaybeObject* maybe_value =
145
HEAP->NumberFromUint32(static_cast<uint32_t>(Smi::kMaxValue) + 1);
146
value = maybe_value->ToObjectChecked();
147
CHECK(value->IsHeapNumber());
148
CHECK(value->IsNumber());
149
CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
152
// nan oddball checks
153
CHECK(HEAP->nan_value()->IsNumber());
154
CHECK(isnan(HEAP->nan_value()->Number()));
156
Handle<String> s = FACTORY->NewStringFromAscii(CStrVector("fisk hest "));
157
CHECK(s->IsString());
158
CHECK_EQ(10, s->length());
160
String* object_symbol = String::cast(HEAP->Object_symbol());
162
Isolate::Current()->context()->global_object()->HasLocalProperty(
165
// Check ToString for oddballs
166
CheckOddball(HEAP->true_value(), "true");
167
CheckOddball(HEAP->false_value(), "false");
168
CheckOddball(HEAP->null_value(), "null");
169
CheckOddball(HEAP->undefined_value(), "undefined");
171
// Check ToString for Smis
174
CheckSmi(-42, "-42");
176
// Check ToString for Numbers
177
CheckNumber(1.1, "1.1");
179
CheckFindCodeObject();
186
CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
187
CHECK(Smi::FromInt(42)->IsSmi());
188
CHECK(Failure::RetryAfterGC(NEW_SPACE)->IsFailure());
190
Failure::RetryAfterGC(NEW_SPACE)->allocation_space());
191
CHECK_EQ(OLD_POINTER_SPACE,
192
Failure::RetryAfterGC(OLD_POINTER_SPACE)->allocation_space());
193
CHECK(Failure::Exception()->IsFailure());
194
CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
195
CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
199
TEST(GarbageCollection) {
204
HEAP->CollectGarbage(NEW_SPACE);
206
Handle<String> name = FACTORY->LookupAsciiSymbol("theFunction");
207
Handle<String> prop_name = FACTORY->LookupAsciiSymbol("theSlot");
208
Handle<String> prop_namex = FACTORY->LookupAsciiSymbol("theSlotx");
209
Handle<String> obj_name = FACTORY->LookupAsciiSymbol("theObject");
212
v8::HandleScope inner_scope;
213
// Allocate a function and keep it in global object's property.
214
Handle<JSFunction> function =
215
FACTORY->NewFunction(name, FACTORY->undefined_value());
216
Handle<Map> initial_map =
217
FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
218
function->set_initial_map(*initial_map);
219
Isolate::Current()->context()->global_object()->SetProperty(
220
*name, *function, NONE, kNonStrictMode)->ToObjectChecked();
221
// Allocate an object. Unrooted after leaving the scope.
222
Handle<JSObject> obj = FACTORY->NewJSObject(function);
224
*prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
226
*prop_namex, Smi::FromInt(24), NONE, kNonStrictMode)->ToObjectChecked();
228
CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
229
CHECK_EQ(Smi::FromInt(24), obj->GetProperty(*prop_namex));
232
HEAP->CollectGarbage(NEW_SPACE);
234
// Function should be alive.
235
CHECK(Isolate::Current()->context()->global_object()->
236
HasLocalProperty(*name));
237
// Check function is retained.
238
Object* func_value = Isolate::Current()->context()->global_object()->
239
GetProperty(*name)->ToObjectChecked();
240
CHECK(func_value->IsJSFunction());
241
Handle<JSFunction> function(JSFunction::cast(func_value));
244
HandleScope inner_scope;
245
// Allocate another object, make it reachable from global.
246
Handle<JSObject> obj = FACTORY->NewJSObject(function);
247
Isolate::Current()->context()->global_object()->SetProperty(
248
*obj_name, *obj, NONE, kNonStrictMode)->ToObjectChecked();
250
*prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
253
// After gc, it should survive.
254
HEAP->CollectGarbage(NEW_SPACE);
256
CHECK(Isolate::Current()->context()->global_object()->
257
HasLocalProperty(*obj_name));
258
CHECK(Isolate::Current()->context()->global_object()->
259
GetProperty(*obj_name)->ToObjectChecked()->IsJSObject());
260
Object* obj = Isolate::Current()->context()->global_object()->
261
GetProperty(*obj_name)->ToObjectChecked();
262
JSObject* js_obj = JSObject::cast(obj);
263
CHECK_EQ(Smi::FromInt(23), js_obj->GetProperty(*prop_name));
267
static void VerifyStringAllocation(const char* string) {
268
v8::HandleScope scope;
269
Handle<String> s = FACTORY->NewStringFromUtf8(CStrVector(string));
270
CHECK_EQ(StrLength(string), s->length());
271
for (int index = 0; index < s->length(); index++) {
272
CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
280
VerifyStringAllocation("a");
281
VerifyStringAllocation("ab");
282
VerifyStringAllocation("abc");
283
VerifyStringAllocation("abcd");
284
VerifyStringAllocation("fiskerdrengen er paa havet");
291
v8::HandleScope scope;
292
const char* name = "Kasper the spunky";
293
Handle<String> string = FACTORY->NewStringFromAscii(CStrVector(name));
294
CHECK_EQ(StrLength(name), string->length());
298
TEST(GlobalHandles) {
300
GlobalHandles* global_handles = Isolate::Current()->global_handles();
310
Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk"));
311
Handle<Object> u = FACTORY->NewNumber(1.12344);
313
h1 = global_handles->Create(*i);
314
h2 = global_handles->Create(*u);
315
h3 = global_handles->Create(*i);
316
h4 = global_handles->Create(*u);
319
// after gc, it should survive
320
HEAP->CollectGarbage(NEW_SPACE);
322
CHECK((*h1)->IsString());
323
CHECK((*h2)->IsHeapNumber());
324
CHECK((*h3)->IsString());
325
CHECK((*h4)->IsHeapNumber());
328
global_handles->Destroy(h1.location());
329
global_handles->Destroy(h3.location());
332
global_handles->Destroy(h2.location());
333
global_handles->Destroy(h4.location());
337
static bool WeakPointerCleared = false;
339
static void TestWeakGlobalHandleCallback(v8::Persistent<v8::Value> handle,
341
if (1234 == reinterpret_cast<intptr_t>(id)) WeakPointerCleared = true;
346
TEST(WeakGlobalHandlesScavenge) {
348
GlobalHandles* global_handles = Isolate::Current()->global_handles();
350
WeakPointerCleared = false;
358
Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk"));
359
Handle<Object> u = FACTORY->NewNumber(1.12344);
361
h1 = global_handles->Create(*i);
362
h2 = global_handles->Create(*u);
365
global_handles->MakeWeak(h2.location(),
366
reinterpret_cast<void*>(1234),
367
&TestWeakGlobalHandleCallback);
369
// Scavenge treats weak pointers as normal roots.
370
HEAP->PerformScavenge();
372
CHECK((*h1)->IsString());
373
CHECK((*h2)->IsHeapNumber());
375
CHECK(!WeakPointerCleared);
376
CHECK(!global_handles->IsNearDeath(h2.location()));
377
CHECK(!global_handles->IsNearDeath(h1.location()));
379
global_handles->Destroy(h1.location());
380
global_handles->Destroy(h2.location());
384
TEST(WeakGlobalHandlesMark) {
386
GlobalHandles* global_handles = Isolate::Current()->global_handles();
388
WeakPointerCleared = false;
396
Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk"));
397
Handle<Object> u = FACTORY->NewNumber(1.12344);
399
h1 = global_handles->Create(*i);
400
h2 = global_handles->Create(*u);
403
HEAP->CollectGarbage(OLD_POINTER_SPACE);
404
HEAP->CollectGarbage(NEW_SPACE);
405
// Make sure the object is promoted.
407
global_handles->MakeWeak(h2.location(),
408
reinterpret_cast<void*>(1234),
409
&TestWeakGlobalHandleCallback);
410
CHECK(!GlobalHandles::IsNearDeath(h1.location()));
411
CHECK(!GlobalHandles::IsNearDeath(h2.location()));
413
HEAP->CollectGarbage(OLD_POINTER_SPACE);
415
CHECK((*h1)->IsString());
417
CHECK(WeakPointerCleared);
418
CHECK(!GlobalHandles::IsNearDeath(h1.location()));
420
global_handles->Destroy(h1.location());
424
TEST(DeleteWeakGlobalHandle) {
426
GlobalHandles* global_handles = Isolate::Current()->global_handles();
428
WeakPointerCleared = false;
435
Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk"));
436
h = global_handles->Create(*i);
439
global_handles->MakeWeak(h.location(),
440
reinterpret_cast<void*>(1234),
441
&TestWeakGlobalHandleCallback);
443
// Scanvenge does not recognize weak reference.
444
HEAP->PerformScavenge();
446
CHECK(!WeakPointerCleared);
448
// Mark-compact treats weak reference properly.
449
HEAP->CollectGarbage(OLD_POINTER_SPACE);
451
CHECK(WeakPointerCleared);
455
static const char* not_so_random_string_table[] = {
519
static void CheckSymbols(const char** strings) {
520
for (const char* string = *strings; *strings != 0; string = *strings++) {
522
MaybeObject* maybe_a = HEAP->LookupAsciiSymbol(string);
523
// LookupAsciiSymbol may return a failure if a GC is needed.
524
if (!maybe_a->ToObject(&a)) continue;
525
CHECK(a->IsSymbol());
527
MaybeObject* maybe_b = HEAP->LookupAsciiSymbol(string);
528
if (!maybe_b->ToObject(&b)) continue;
530
CHECK(String::cast(b)->IsEqualTo(CStrVector(string)));
538
CheckSymbols(not_so_random_string_table);
539
CheckSymbols(not_so_random_string_table);
543
TEST(FunctionAllocation) {
547
Handle<String> name = FACTORY->LookupAsciiSymbol("theFunction");
548
Handle<JSFunction> function =
549
FACTORY->NewFunction(name, FACTORY->undefined_value());
550
Handle<Map> initial_map =
551
FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
552
function->set_initial_map(*initial_map);
554
Handle<String> prop_name = FACTORY->LookupAsciiSymbol("theSlot");
555
Handle<JSObject> obj = FACTORY->NewJSObject(function);
557
*prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
558
CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
559
// Check that we can add properties to function objects.
560
function->SetProperty(
561
*prop_name, Smi::FromInt(24), NONE, kNonStrictMode)->ToObjectChecked();
562
CHECK_EQ(Smi::FromInt(24), function->GetProperty(*prop_name));
566
TEST(ObjectProperties) {
570
String* object_symbol = String::cast(HEAP->Object_symbol());
571
Object* raw_object = Isolate::Current()->context()->global_object()->
572
GetProperty(object_symbol)->ToObjectChecked();
573
JSFunction* object_function = JSFunction::cast(raw_object);
574
Handle<JSFunction> constructor(object_function);
575
Handle<JSObject> obj = FACTORY->NewJSObject(constructor);
576
Handle<String> first = FACTORY->LookupAsciiSymbol("first");
577
Handle<String> second = FACTORY->LookupAsciiSymbol("second");
580
CHECK(!obj->HasLocalProperty(*first));
584
*first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
585
CHECK(obj->HasLocalProperty(*first));
588
CHECK(obj->DeleteProperty(*first, JSObject::NORMAL_DELETION));
589
CHECK(!obj->HasLocalProperty(*first));
591
// add first and then second
593
*first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
595
*second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
596
CHECK(obj->HasLocalProperty(*first));
597
CHECK(obj->HasLocalProperty(*second));
599
// delete first and then second
600
CHECK(obj->DeleteProperty(*first, JSObject::NORMAL_DELETION));
601
CHECK(obj->HasLocalProperty(*second));
602
CHECK(obj->DeleteProperty(*second, JSObject::NORMAL_DELETION));
603
CHECK(!obj->HasLocalProperty(*first));
604
CHECK(!obj->HasLocalProperty(*second));
606
// add first and then second
608
*first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
610
*second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
611
CHECK(obj->HasLocalProperty(*first));
612
CHECK(obj->HasLocalProperty(*second));
614
// delete second and then first
615
CHECK(obj->DeleteProperty(*second, JSObject::NORMAL_DELETION));
616
CHECK(obj->HasLocalProperty(*first));
617
CHECK(obj->DeleteProperty(*first, JSObject::NORMAL_DELETION));
618
CHECK(!obj->HasLocalProperty(*first));
619
CHECK(!obj->HasLocalProperty(*second));
621
// check string and symbol match
622
const char* string1 = "fisk";
623
Handle<String> s1 = FACTORY->NewStringFromAscii(CStrVector(string1));
625
*s1, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
626
Handle<String> s1_symbol = FACTORY->LookupAsciiSymbol(string1);
627
CHECK(obj->HasLocalProperty(*s1_symbol));
629
// check symbol and string match
630
const char* string2 = "fugl";
631
Handle<String> s2_symbol = FACTORY->LookupAsciiSymbol(string2);
633
*s2_symbol, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
634
Handle<String> s2 = FACTORY->NewStringFromAscii(CStrVector(string2));
635
CHECK(obj->HasLocalProperty(*s2));
643
Handle<String> name = FACTORY->LookupAsciiSymbol("theFunction");
644
Handle<JSFunction> function =
645
FACTORY->NewFunction(name, FACTORY->undefined_value());
646
Handle<Map> initial_map =
647
FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
648
function->set_initial_map(*initial_map);
650
Handle<String> prop_name = FACTORY->LookupAsciiSymbol("theSlot");
651
Handle<JSObject> obj = FACTORY->NewJSObject(function);
655
*prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
656
CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
658
// Check the map has changed
659
CHECK(*initial_map != obj->map());
667
Handle<String> name = FACTORY->LookupAsciiSymbol("Array");
668
Object* raw_object = Isolate::Current()->context()->global_object()->
669
GetProperty(*name)->ToObjectChecked();
670
Handle<JSFunction> function = Handle<JSFunction>(
671
JSFunction::cast(raw_object));
673
// Allocate the object.
674
Handle<JSObject> object = FACTORY->NewJSObject(function);
675
Handle<JSArray> array = Handle<JSArray>::cast(object);
676
// We just initialized the VM, no heap allocation failure yet.
677
array->Initialize(0)->ToObjectChecked();
679
// Set array length to 0.
680
array->SetElementsLength(Smi::FromInt(0))->ToObjectChecked();
681
CHECK_EQ(Smi::FromInt(0), array->length());
682
// Must be in fast mode.
683
CHECK(array->HasFastSmiOrObjectElements());
685
// array[length] = name.
686
array->SetElement(0, *name, NONE, kNonStrictMode)->ToObjectChecked();
687
CHECK_EQ(Smi::FromInt(1), array->length());
688
CHECK_EQ(array->GetElement(0), *name);
690
// Set array length with larger than smi value.
691
Handle<Object> length =
692
FACTORY->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
693
array->SetElementsLength(*length)->ToObjectChecked();
695
uint32_t int_length = 0;
696
CHECK(length->ToArrayIndex(&int_length));
697
CHECK_EQ(*length, array->length());
698
CHECK(array->HasDictionaryElements()); // Must be in slow mode.
700
// array[length] = name.
701
array->SetElement(int_length, *name, NONE, kNonStrictMode)->ToObjectChecked();
702
uint32_t new_int_length = 0;
703
CHECK(array->length()->ToArrayIndex(&new_int_length));
704
CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
705
CHECK_EQ(array->GetElement(int_length), *name);
706
CHECK_EQ(array->GetElement(0), *name);
714
String* object_symbol = String::cast(HEAP->Object_symbol());
715
Object* raw_object = Isolate::Current()->context()->global_object()->
716
GetProperty(object_symbol)->ToObjectChecked();
717
JSFunction* object_function = JSFunction::cast(raw_object);
718
Handle<JSFunction> constructor(object_function);
719
Handle<JSObject> obj = FACTORY->NewJSObject(constructor);
720
Handle<String> first = FACTORY->LookupAsciiSymbol("first");
721
Handle<String> second = FACTORY->LookupAsciiSymbol("second");
724
*first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
726
*second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
728
obj->SetElement(0, *first, NONE, kNonStrictMode)->ToObjectChecked();
729
obj->SetElement(1, *second, NONE, kNonStrictMode)->ToObjectChecked();
732
Handle<JSObject> clone = Copy(obj);
733
CHECK(!clone.is_identical_to(obj));
735
CHECK_EQ(obj->GetElement(0), clone->GetElement(0));
736
CHECK_EQ(obj->GetElement(1), clone->GetElement(1));
738
CHECK_EQ(obj->GetProperty(*first), clone->GetProperty(*first));
739
CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*second));
743
*first, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
745
*second, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
747
clone->SetElement(0, *second, NONE, kNonStrictMode)->ToObjectChecked();
748
clone->SetElement(1, *first, NONE, kNonStrictMode)->ToObjectChecked();
750
CHECK_EQ(obj->GetElement(1), clone->GetElement(0));
751
CHECK_EQ(obj->GetElement(0), clone->GetElement(1));
753
CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*first));
754
CHECK_EQ(obj->GetProperty(*first), clone->GetProperty(*second));
758
TEST(StringAllocation) {
762
const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
763
for (int length = 0; length < 100; length++) {
764
v8::HandleScope scope;
765
char* non_ascii = NewArray<char>(3 * length + 1);
766
char* ascii = NewArray<char>(length + 1);
767
non_ascii[3 * length] = 0;
769
for (int i = 0; i < length; i++) {
771
non_ascii[3 * i] = chars[0];
772
non_ascii[3 * i + 1] = chars[1];
773
non_ascii[3 * i + 2] = chars[2];
775
Handle<String> non_ascii_sym =
776
FACTORY->LookupSymbol(Vector<const char>(non_ascii, 3 * length));
777
CHECK_EQ(length, non_ascii_sym->length());
778
Handle<String> ascii_sym =
779
FACTORY->LookupSymbol(Vector<const char>(ascii, length));
780
CHECK_EQ(length, ascii_sym->length());
781
Handle<String> non_ascii_str =
782
FACTORY->NewStringFromUtf8(Vector<const char>(non_ascii, 3 * length));
783
non_ascii_str->Hash();
784
CHECK_EQ(length, non_ascii_str->length());
785
Handle<String> ascii_str =
786
FACTORY->NewStringFromUtf8(Vector<const char>(ascii, length));
788
CHECK_EQ(length, ascii_str->length());
789
DeleteArray(non_ascii);
795
static int ObjectsFoundInHeap(Handle<Object> objs[], int size) {
796
// Count the number of objects found in the heap.
798
HeapIterator iterator;
799
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
800
for (int i = 0; i < size; i++) {
801
if (*objs[i] == obj) {
812
v8::HandleScope scope;
814
// Array of objects to scan haep for.
815
const int objs_count = 6;
816
Handle<Object> objs[objs_count];
817
int next_objs_index = 0;
819
// Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
820
objs[next_objs_index++] = FACTORY->NewJSArray(10);
821
objs[next_objs_index++] = FACTORY->NewJSArray(10,
825
// Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
826
objs[next_objs_index++] =
827
FACTORY->NewStringFromAscii(CStrVector("abcdefghij"));
828
objs[next_objs_index++] =
829
FACTORY->NewStringFromAscii(CStrVector("abcdefghij"), TENURED);
831
// Allocate a large string (for large object space).
832
int large_size = Page::kMaxNonCodeHeapObjectSize + 1;
833
char* str = new char[large_size];
834
for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
835
str[large_size - 1] = '\0';
836
objs[next_objs_index++] =
837
FACTORY->NewStringFromAscii(CStrVector(str), TENURED);
840
// Add a Map object to look for.
841
objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
843
CHECK_EQ(objs_count, next_objs_index);
844
CHECK_EQ(objs_count, ObjectsFoundInHeap(objs, objs_count));
848
TEST(EmptyHandleEscapeFrom) {
851
v8::HandleScope scope;
852
Handle<JSObject> runaway;
855
v8::HandleScope nested;
856
Handle<JSObject> empty;
857
runaway = empty.EscapeFrom(&nested);
860
CHECK(runaway.is_null());
864
static int LenFromSize(int size) {
865
return (size - FixedArray::kHeaderSize) / kPointerSize;
869
TEST(Regression39128) {
870
// Test case for crbug.com/39128.
873
// Increase the chance of 'bump-the-pointer' allocation in old space.
874
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
876
v8::HandleScope scope;
878
// The plan: create JSObject which references objects in new space.
879
// Then clone this object (forcing it to go into old space) and check
880
// that region dirty marks are updated correctly.
882
// Step 1: prepare a map for the object. We add 1 inobject property to it.
883
Handle<JSFunction> object_ctor(
884
Isolate::Current()->native_context()->object_function());
885
CHECK(object_ctor->has_initial_map());
886
Handle<Map> object_map(object_ctor->initial_map());
887
// Create a map with single inobject property.
888
Handle<Map> my_map = FACTORY->CopyMap(object_map, 1);
889
int n_properties = my_map->inobject_properties();
890
CHECK_GT(n_properties, 0);
892
int object_size = my_map->instance_size();
894
// Step 2: allocate a lot of objects so to almost fill new space: we need
895
// just enough room to allocate JSObject and thus fill the newspace.
897
int allocation_amount = Min(FixedArray::kMaxSize,
898
HEAP->MaxObjectSizeInNewSpace());
899
int allocation_len = LenFromSize(allocation_amount);
900
NewSpace* new_space = HEAP->new_space();
901
Address* top_addr = new_space->allocation_top_address();
902
Address* limit_addr = new_space->allocation_limit_address();
903
while ((*limit_addr - *top_addr) > allocation_amount) {
904
CHECK(!HEAP->always_allocate());
905
Object* array = HEAP->AllocateFixedArray(allocation_len)->ToObjectChecked();
906
CHECK(!array->IsFailure());
907
CHECK(new_space->Contains(array));
910
// Step 3: now allocate fixed array and JSObject to fill the whole new space.
911
int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
912
int fixed_array_len = LenFromSize(to_fill);
913
CHECK(fixed_array_len < FixedArray::kMaxLength);
915
CHECK(!HEAP->always_allocate());
916
Object* array = HEAP->AllocateFixedArray(fixed_array_len)->ToObjectChecked();
917
CHECK(!array->IsFailure());
918
CHECK(new_space->Contains(array));
920
Object* object = HEAP->AllocateJSObjectFromMap(*my_map)->ToObjectChecked();
921
CHECK(new_space->Contains(object));
922
JSObject* jsobject = JSObject::cast(object);
923
CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
924
CHECK_EQ(0, jsobject->properties()->length());
925
// Create a reference to object in new space in jsobject.
926
jsobject->FastPropertyAtPut(-1, array);
928
CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
930
// Step 4: clone jsobject, but force always allocate first to create a clone
931
// in old pointer space.
932
Address old_pointer_space_top = HEAP->old_pointer_space()->top();
933
AlwaysAllocateScope aa_scope;
934
Object* clone_obj = HEAP->CopyJSObject(jsobject)->ToObjectChecked();
935
JSObject* clone = JSObject::cast(clone_obj);
936
if (clone->address() != old_pointer_space_top) {
937
// Alas, got allocated from free list, we cannot do checks.
940
CHECK(HEAP->old_pointer_space()->Contains(clone->address()));
944
TEST(TestCodeFlushing) {
945
i::FLAG_allow_natives_syntax = true;
946
// If we do not flush code this test is invalid.
947
if (!FLAG_flush_code) return;
949
v8::HandleScope scope;
950
const char* source = "function foo() {"
956
Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo");
958
// This compile will add the code to the compilation cache.
959
{ v8::HandleScope scope;
963
// Check function is compiled.
964
Object* func_value = Isolate::Current()->context()->global_object()->
965
GetProperty(*foo_name)->ToObjectChecked();
966
CHECK(func_value->IsJSFunction());
967
Handle<JSFunction> function(JSFunction::cast(func_value));
968
CHECK(function->shared()->is_compiled());
970
// TODO(1609) Currently incremental marker does not support code flushing.
971
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
972
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
974
CHECK(function->shared()->is_compiled());
976
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
977
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
978
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
979
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
980
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
981
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
983
// foo should no longer be in the compilation cache
984
CHECK(!function->shared()->is_compiled() || function->IsOptimized());
985
CHECK(!function->is_compiled() || function->IsOptimized());
986
// Call foo to get it recompiled.
988
CHECK(function->shared()->is_compiled());
989
CHECK(function->is_compiled());
993
// Count the number of native contexts in the weak list of native contexts.
994
int CountNativeContexts() {
996
Object* object = HEAP->native_contexts_list();
997
while (!object->IsUndefined()) {
999
object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1005
// Count the number of user functions in the weak list of optimized
1006
// functions attached to a native context.
1007
static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
1009
Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1010
Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1011
while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
1013
object = JSFunction::cast(object)->next_function_link();
1019
TEST(TestInternalWeakLists) {
1020
v8::V8::Initialize();
1022
static const int kNumTestContexts = 10;
1024
v8::HandleScope scope;
1025
v8::Persistent<v8::Context> ctx[kNumTestContexts];
1027
CHECK_EQ(0, CountNativeContexts());
1029
// Create a number of global contests which gets linked together.
1030
for (int i = 0; i < kNumTestContexts; i++) {
1031
ctx[i] = v8::Context::New();
1033
bool opt = (FLAG_always_opt && i::V8::UseCrankshaft());
1035
CHECK_EQ(i + 1, CountNativeContexts());
1039
// Create a handle scope so no function objects get stuch in the outer
1041
v8::HandleScope scope;
1042
const char* source = "function f1() { };"
1043
"function f2() { };"
1044
"function f3() { };"
1045
"function f4() { };"
1046
"function f5() { };";
1048
CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1050
CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[i]));
1052
CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1054
CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1056
CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1058
CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1060
// Remove function f1, and
1061
CompileRun("f1=null");
1063
// Scavenge treats these references as strong.
1064
for (int j = 0; j < 10; j++) {
1065
HEAP->PerformScavenge();
1066
CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1069
// Mark compact handles the weak references.
1070
ISOLATE->compilation_cache()->Clear();
1071
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1072
CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1074
// Get rid of f3 and f5 in the same way.
1075
CompileRun("f3=null");
1076
for (int j = 0; j < 10; j++) {
1077
HEAP->PerformScavenge();
1078
CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1080
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1081
CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1082
CompileRun("f5=null");
1083
for (int j = 0; j < 10; j++) {
1084
HEAP->PerformScavenge();
1085
CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1087
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1088
CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1093
// Force compilation cache cleanup.
1094
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1096
// Dispose the native contexts one by one.
1097
for (int i = 0; i < kNumTestContexts; i++) {
1101
// Scavenge treats these references as strong.
1102
for (int j = 0; j < 10; j++) {
1103
HEAP->PerformScavenge();
1104
CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1107
// Mark compact handles the weak references.
1108
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1109
CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1112
CHECK_EQ(0, CountNativeContexts());
1116
// Count the number of native contexts in the weak list of native contexts
1117
// causing a GC after the specified number of elements.
1118
static int CountNativeContextsWithGC(int n) {
1120
Handle<Object> object(HEAP->native_contexts_list());
1121
while (!object->IsUndefined()) {
1123
if (count == n) HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1125
Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK));
1131
// Count the number of user functions in the weak list of optimized
1132
// functions attached to a native context causing a GC after the
1133
// specified number of elements.
1134
static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
1137
Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1138
Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST));
1139
while (object->IsJSFunction() &&
1140
!Handle<JSFunction>::cast(object)->IsBuiltin()) {
1142
if (count == n) HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1143
object = Handle<Object>(
1144
Object::cast(JSFunction::cast(*object)->next_function_link()));
1150
TEST(TestInternalWeakListsTraverseWithGC) {
1151
v8::V8::Initialize();
1153
static const int kNumTestContexts = 10;
1155
v8::HandleScope scope;
1156
v8::Persistent<v8::Context> ctx[kNumTestContexts];
1158
CHECK_EQ(0, CountNativeContexts());
1160
// Create an number of contexts and check the length of the weak list both
1161
// with and without GCs while iterating the list.
1162
for (int i = 0; i < kNumTestContexts; i++) {
1163
ctx[i] = v8::Context::New();
1164
CHECK_EQ(i + 1, CountNativeContexts());
1165
CHECK_EQ(i + 1, CountNativeContextsWithGC(i / 2 + 1));
1168
bool opt = (FLAG_always_opt && i::V8::UseCrankshaft());
1170
// Compile a number of functions the length of the weak list of optimized
1171
// functions both with and without GCs while iterating the list.
1173
const char* source = "function f1() { };"
1174
"function f2() { };"
1175
"function f3() { };"
1176
"function f4() { };"
1177
"function f5() { };";
1179
CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1181
CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[0]));
1182
CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1184
CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[0]));
1185
CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1187
CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[0]));
1188
CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1190
CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[0]));
1191
CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1193
CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[0]));
1194
CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1200
TEST(TestSizeOfObjects) {
1201
v8::V8::Initialize();
1203
// Get initial heap size after several full GCs, which will stabilize
1204
// the heap size and return with sweeping finished completely.
1205
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1206
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1207
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1208
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1209
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1210
CHECK(HEAP->old_pointer_space()->IsSweepingComplete());
1211
int initial_size = static_cast<int>(HEAP->SizeOfObjects());
1214
// Allocate objects on several different old-space pages so that
1215
// lazy sweeping kicks in for subsequent GC runs.
1216
AlwaysAllocateScope always_allocate;
1217
int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1218
for (int i = 1; i <= 100; i++) {
1219
HEAP->AllocateFixedArray(8192, TENURED)->ToObjectChecked();
1220
CHECK_EQ(initial_size + i * filler_size,
1221
static_cast<int>(HEAP->SizeOfObjects()));
1225
// The heap size should go back to initial size after a full GC, even
1226
// though sweeping didn't finish yet.
1227
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1229
// Normally sweeping would not be complete here, but no guarantees.
1231
CHECK_EQ(initial_size, static_cast<int>(HEAP->SizeOfObjects()));
1233
// Advancing the sweeper step-wise should not change the heap size.
1234
while (!HEAP->old_pointer_space()->IsSweepingComplete()) {
1235
HEAP->old_pointer_space()->AdvanceSweeper(KB);
1236
CHECK_EQ(initial_size, static_cast<int>(HEAP->SizeOfObjects()));
1241
TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
1243
HEAP->EnsureHeapIsIterable();
1244
intptr_t size_of_objects_1 = HEAP->SizeOfObjects();
1245
HeapIterator iterator;
1246
intptr_t size_of_objects_2 = 0;
1247
for (HeapObject* obj = iterator.next();
1249
obj = iterator.next()) {
1250
if (!obj->IsFreeSpace()) {
1251
size_of_objects_2 += obj->Size();
1254
// Delta must be within 5% of the larger result.
1255
// TODO(gc): Tighten this up by distinguishing between byte
1256
// arrays that are real and those that merely mark free space
1258
if (size_of_objects_1 > size_of_objects_2) {
1259
intptr_t delta = size_of_objects_1 - size_of_objects_2;
1260
PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1261
"Iterator: %" V8_PTR_PREFIX "d, "
1262
"delta: %" V8_PTR_PREFIX "d\n",
1263
size_of_objects_1, size_of_objects_2, delta);
1264
CHECK_GT(size_of_objects_1 / 20, delta);
1266
intptr_t delta = size_of_objects_2 - size_of_objects_1;
1267
PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1268
"Iterator: %" V8_PTR_PREFIX "d, "
1269
"delta: %" V8_PTR_PREFIX "d\n",
1270
size_of_objects_1, size_of_objects_2, delta);
1271
CHECK_GT(size_of_objects_2 / 20, delta);
1276
static void FillUpNewSpace(NewSpace* new_space) {
1277
// Fill up new space to the point that it is completely full. Make sure
1278
// that the scavenger does not undo the filling.
1279
v8::HandleScope scope;
1280
AlwaysAllocateScope always_allocate;
1281
intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
1282
intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
1283
for (intptr_t i = 0; i < number_of_fillers; i++) {
1284
CHECK(HEAP->InNewSpace(*FACTORY->NewFixedArray(32, NOT_TENURED)));
1289
TEST(GrowAndShrinkNewSpace) {
1291
NewSpace* new_space = HEAP->new_space();
1293
if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize() ||
1294
HEAP->MaxSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
1295
// The max size cannot exceed the reserved size, since semispaces must be
1296
// always within the reserved space. We can't test new space growing and
1297
// shrinking if the reserved size is the same as the minimum (initial) size.
1301
// Explicitly growing should double the space capacity.
1302
intptr_t old_capacity, new_capacity;
1303
old_capacity = new_space->Capacity();
1305
new_capacity = new_space->Capacity();
1306
CHECK(2 * old_capacity == new_capacity);
1308
old_capacity = new_space->Capacity();
1309
FillUpNewSpace(new_space);
1310
new_capacity = new_space->Capacity();
1311
CHECK(old_capacity == new_capacity);
1313
// Explicitly shrinking should not affect space capacity.
1314
old_capacity = new_space->Capacity();
1315
new_space->Shrink();
1316
new_capacity = new_space->Capacity();
1317
CHECK(old_capacity == new_capacity);
1319
// Let the scavenger empty the new space.
1320
HEAP->CollectGarbage(NEW_SPACE);
1321
CHECK_LE(new_space->Size(), old_capacity);
1323
// Explicitly shrinking should halve the space capacity.
1324
old_capacity = new_space->Capacity();
1325
new_space->Shrink();
1326
new_capacity = new_space->Capacity();
1327
CHECK(old_capacity == 2 * new_capacity);
1329
// Consecutive shrinking should not affect space capacity.
1330
old_capacity = new_space->Capacity();
1331
new_space->Shrink();
1332
new_space->Shrink();
1333
new_space->Shrink();
1334
new_capacity = new_space->Capacity();
1335
CHECK(old_capacity == new_capacity);
1339
TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
1342
if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize() ||
1343
HEAP->MaxSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
1344
// The max size cannot exceed the reserved size, since semispaces must be
1345
// always within the reserved space. We can't test new space growing and
1346
// shrinking if the reserved size is the same as the minimum (initial) size.
1350
v8::HandleScope scope;
1351
NewSpace* new_space = HEAP->new_space();
1352
intptr_t old_capacity, new_capacity;
1353
old_capacity = new_space->Capacity();
1355
new_capacity = new_space->Capacity();
1356
CHECK(2 * old_capacity == new_capacity);
1357
FillUpNewSpace(new_space);
1358
HEAP->CollectAllAvailableGarbage();
1359
new_capacity = new_space->Capacity();
1360
CHECK(old_capacity == new_capacity);
1364
static int NumberOfGlobalObjects() {
1366
HeapIterator iterator;
1367
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1368
if (obj->IsGlobalObject()) count++;
1374
// Test that we don't embed maps from foreign contexts into
1376
TEST(LeakNativeContextViaMap) {
1377
i::FLAG_allow_natives_syntax = true;
1378
v8::HandleScope outer_scope;
1379
v8::Persistent<v8::Context> ctx1 = v8::Context::New();
1380
v8::Persistent<v8::Context> ctx2 = v8::Context::New();
1383
HEAP->CollectAllAvailableGarbage();
1384
CHECK_EQ(4, NumberOfGlobalObjects());
1387
v8::HandleScope inner_scope;
1388
CompileRun("var v = {x: 42}");
1389
v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1391
ctx2->Global()->Set(v8_str("o"), v);
1392
v8::Local<v8::Value> res = CompileRun(
1393
"function f() { return o.x; }"
1394
"for (var i = 0; i < 10; ++i) f();"
1395
"%OptimizeFunctionOnNextCall(f);"
1397
CHECK_EQ(42, res->Int32Value());
1398
ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1402
v8::V8::ContextDisposedNotification();
1404
HEAP->CollectAllAvailableGarbage();
1405
CHECK_EQ(2, NumberOfGlobalObjects());
1407
HEAP->CollectAllAvailableGarbage();
1408
CHECK_EQ(0, NumberOfGlobalObjects());
1412
// Test that we don't embed functions from foreign contexts into
1414
TEST(LeakNativeContextViaFunction) {
1415
i::FLAG_allow_natives_syntax = true;
1416
v8::HandleScope outer_scope;
1417
v8::Persistent<v8::Context> ctx1 = v8::Context::New();
1418
v8::Persistent<v8::Context> ctx2 = v8::Context::New();
1421
HEAP->CollectAllAvailableGarbage();
1422
CHECK_EQ(4, NumberOfGlobalObjects());
1425
v8::HandleScope inner_scope;
1426
CompileRun("var v = function() { return 42; }");
1427
v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1429
ctx2->Global()->Set(v8_str("o"), v);
1430
v8::Local<v8::Value> res = CompileRun(
1431
"function f(x) { return x(); }"
1432
"for (var i = 0; i < 10; ++i) f(o);"
1433
"%OptimizeFunctionOnNextCall(f);"
1435
CHECK_EQ(42, res->Int32Value());
1436
ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1440
v8::V8::ContextDisposedNotification();
1442
HEAP->CollectAllAvailableGarbage();
1443
CHECK_EQ(2, NumberOfGlobalObjects());
1445
HEAP->CollectAllAvailableGarbage();
1446
CHECK_EQ(0, NumberOfGlobalObjects());
1450
TEST(LeakNativeContextViaMapKeyed) {
1451
i::FLAG_allow_natives_syntax = true;
1452
v8::HandleScope outer_scope;
1453
v8::Persistent<v8::Context> ctx1 = v8::Context::New();
1454
v8::Persistent<v8::Context> ctx2 = v8::Context::New();
1457
HEAP->CollectAllAvailableGarbage();
1458
CHECK_EQ(4, NumberOfGlobalObjects());
1461
v8::HandleScope inner_scope;
1462
CompileRun("var v = [42, 43]");
1463
v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1465
ctx2->Global()->Set(v8_str("o"), v);
1466
v8::Local<v8::Value> res = CompileRun(
1467
"function f() { return o[0]; }"
1468
"for (var i = 0; i < 10; ++i) f();"
1469
"%OptimizeFunctionOnNextCall(f);"
1471
CHECK_EQ(42, res->Int32Value());
1472
ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1476
v8::V8::ContextDisposedNotification();
1478
HEAP->CollectAllAvailableGarbage();
1479
CHECK_EQ(2, NumberOfGlobalObjects());
1481
HEAP->CollectAllAvailableGarbage();
1482
CHECK_EQ(0, NumberOfGlobalObjects());
1486
TEST(LeakNativeContextViaMapProto) {
1487
i::FLAG_allow_natives_syntax = true;
1488
v8::HandleScope outer_scope;
1489
v8::Persistent<v8::Context> ctx1 = v8::Context::New();
1490
v8::Persistent<v8::Context> ctx2 = v8::Context::New();
1493
HEAP->CollectAllAvailableGarbage();
1494
CHECK_EQ(4, NumberOfGlobalObjects());
1497
v8::HandleScope inner_scope;
1498
CompileRun("var v = { y: 42}");
1499
v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1501
ctx2->Global()->Set(v8_str("o"), v);
1502
v8::Local<v8::Value> res = CompileRun(
1508
"for (var i = 0; i < 10; ++i) f();"
1509
"%OptimizeFunctionOnNextCall(f);"
1511
CHECK_EQ(42, res->Int32Value());
1512
ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1516
v8::V8::ContextDisposedNotification();
1518
HEAP->CollectAllAvailableGarbage();
1519
CHECK_EQ(2, NumberOfGlobalObjects());
1521
HEAP->CollectAllAvailableGarbage();
1522
CHECK_EQ(0, NumberOfGlobalObjects());
1526
TEST(InstanceOfStubWriteBarrier) {
1527
i::FLAG_allow_natives_syntax = true;
1529
i::FLAG_verify_heap = true;
1533
if (!i::V8::UseCrankshaft()) return;
1534
v8::HandleScope outer_scope;
1537
v8::HandleScope scope;
1539
"function foo () { }"
1540
"function mkbar () { return new (new Function(\"\")) (); }"
1541
"function f (x) { return (x instanceof foo); }"
1542
"function g () { f(mkbar()); }"
1543
"f(new foo()); f(new foo());"
1544
"%OptimizeFunctionOnNextCall(f);"
1545
"f(new foo()); g();");
1548
IncrementalMarking* marking = HEAP->incremental_marking();
1552
Handle<JSFunction> f =
1553
v8::Utils::OpenHandle(
1554
*v8::Handle<v8::Function>::Cast(
1555
v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
1557
CHECK(f->IsOptimized());
1559
while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
1560
!marking->IsStopped()) {
1561
// Discard any pending GC requests otherwise we will get GC when we enter
1563
marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
1566
CHECK(marking->IsMarking());
1569
v8::HandleScope scope;
1570
v8::Handle<v8::Object> global = v8::Context::GetCurrent()->Global();
1571
v8::Handle<v8::Function> g =
1572
v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
1573
g->Call(global, 0, NULL);
1576
HEAP->incremental_marking()->set_should_hurry(true);
1577
HEAP->CollectGarbage(OLD_POINTER_SPACE);
1581
TEST(PrototypeTransitionClearing) {
1583
v8::HandleScope scope;
1588
"for (var i = 0; i < 10; i++) {"
1590
" var prototype = {};"
1591
" object.__proto__ = prototype;"
1592
" if (i >= 3) live.push(object, prototype);"
1595
Handle<JSObject> baseObject =
1596
v8::Utils::OpenHandle(
1597
*v8::Handle<v8::Object>::Cast(
1598
v8::Context::GetCurrent()->Global()->Get(v8_str("base"))));
1600
// Verify that only dead prototype transitions are cleared.
1601
CHECK_EQ(10, baseObject->map()->NumberOfProtoTransitions());
1602
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1603
const int transitions = 10 - 3;
1604
CHECK_EQ(transitions, baseObject->map()->NumberOfProtoTransitions());
1606
// Verify that prototype transitions array was compacted.
1607
FixedArray* trans = baseObject->map()->GetPrototypeTransitions();
1608
for (int i = 0; i < transitions; i++) {
1609
int j = Map::kProtoTransitionHeaderSize +
1610
i * Map::kProtoTransitionElementsPerEntry;
1611
CHECK(trans->get(j + Map::kProtoTransitionMapOffset)->IsMap());
1612
Object* proto = trans->get(j + Map::kProtoTransitionPrototypeOffset);
1613
CHECK(proto->IsTheHole() || proto->IsJSObject());
1616
// Make sure next prototype is placed on an old-space evacuation candidate.
1617
Handle<JSObject> prototype;
1618
PagedSpace* space = HEAP->old_pointer_space();
1620
prototype = FACTORY->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
1621
} while (space->FirstPage() == space->LastPage() ||
1622
!space->LastPage()->Contains(prototype->address()));
1624
// Add a prototype on an evacuation candidate and verify that transition
1625
// clearing correctly records slots in prototype transition array.
1626
i::FLAG_always_compact = true;
1627
Handle<Map> map(baseObject->map());
1628
CHECK(!space->LastPage()->Contains(
1629
map->GetPrototypeTransitions()->address()));
1630
CHECK(space->LastPage()->Contains(prototype->address()));
1631
baseObject->SetPrototype(*prototype, false)->ToObjectChecked();
1632
CHECK(map->GetPrototypeTransition(*prototype)->IsMap());
1633
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1634
CHECK(map->GetPrototypeTransition(*prototype)->IsMap());
1638
TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
1639
i::FLAG_allow_natives_syntax = true;
1641
i::FLAG_verify_heap = true;
1645
if (!i::V8::UseCrankshaft()) return;
1646
v8::HandleScope outer_scope;
1649
v8::HandleScope scope;
1653
" for (var i = 0; i < 100; i++) s += i;"
1657
"%OptimizeFunctionOnNextCall(f);"
1660
Handle<JSFunction> f =
1661
v8::Utils::OpenHandle(
1662
*v8::Handle<v8::Function>::Cast(
1663
v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
1664
CHECK(f->IsOptimized());
1666
IncrementalMarking* marking = HEAP->incremental_marking();
1670
// The following two calls will increment HEAP->global_ic_age().
1671
const int kLongIdlePauseInMs = 1000;
1672
v8::V8::ContextDisposedNotification();
1673
v8::V8::IdleNotification(kLongIdlePauseInMs);
1675
while (!marking->IsStopped() && !marking->IsComplete()) {
1676
marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
1678
if (!marking->IsStopped() || marking->should_hurry()) {
1679
// We don't normally finish a GC via Step(), we normally finish by
1680
// setting the stack guard and then do the final steps in the stack
1681
// guard interrupt. But here we didn't ask for that, and there is no
1682
// JS code running to trigger the interrupt, so we explicitly finalize
1684
HEAP->CollectAllGarbage(Heap::kNoGCFlags,
1685
"Test finalizing incremental mark-sweep");
1688
CHECK_EQ(HEAP->global_ic_age(), f->shared()->ic_age());
1689
CHECK_EQ(0, f->shared()->opt_count());
1690
CHECK_EQ(0, f->shared()->code()->profiler_ticks());
1694
TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
1695
i::FLAG_allow_natives_syntax = true;
1697
i::FLAG_verify_heap = true;
1701
if (!i::V8::UseCrankshaft()) return;
1702
v8::HandleScope outer_scope;
1705
v8::HandleScope scope;
1709
" for (var i = 0; i < 100; i++) s += i;"
1713
"%OptimizeFunctionOnNextCall(f);"
1716
Handle<JSFunction> f =
1717
v8::Utils::OpenHandle(
1718
*v8::Handle<v8::Function>::Cast(
1719
v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
1720
CHECK(f->IsOptimized());
1722
HEAP->incremental_marking()->Abort();
1724
// The following two calls will increment HEAP->global_ic_age().
1725
// Since incremental marking is off, IdleNotification will do full GC.
1726
const int kLongIdlePauseInMs = 1000;
1727
v8::V8::ContextDisposedNotification();
1728
v8::V8::IdleNotification(kLongIdlePauseInMs);
1730
CHECK_EQ(HEAP->global_ic_age(), f->shared()->ic_age());
1731
CHECK_EQ(0, f->shared()->opt_count());
1732
CHECK_EQ(0, f->shared()->code()->profiler_ticks());
1736
// Test that HAllocateObject will always return an object in new-space.
1737
TEST(OptimizedAllocationAlwaysInNewSpace) {
1738
i::FLAG_allow_natives_syntax = true;
1740
if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
1741
v8::HandleScope scope;
1743
FillUpNewSpace(HEAP->new_space());
1744
AlwaysAllocateScope always_allocate;
1745
v8::Local<v8::Value> res = CompileRun(
1748
" for (var i = 0; i < 32; i++) {"
1749
" this['x' + i] = x;"
1752
"function f(x) { return new c(x); };"
1754
"%OptimizeFunctionOnNextCall(f);"
1756
CHECK_EQ(4, res->ToObject()->GetRealNamedProperty(v8_str("x"))->Int32Value());
1758
Handle<JSObject> o =
1759
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
1761
CHECK(HEAP->InNewSpace(*o));
1765
static int CountMapTransitions(Map* map) {
1766
return map->transitions()->number_of_transitions();
1770
// Go through all incremental marking steps in one swoop.
1771
static void SimulateIncrementalMarking() {
1772
IncrementalMarking* marking = HEAP->incremental_marking();
1773
CHECK(marking->IsStopped());
1775
CHECK(marking->IsMarking());
1776
while (!marking->IsComplete()) {
1777
marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
1779
CHECK(marking->IsComplete());
1783
// Test that map transitions are cleared and maps are collected with
1784
// incremental marking as well.
1786
i::FLAG_allow_natives_syntax = true;
1787
i::FLAG_trace_incremental_marking = true;
1789
v8::HandleScope scope;
1790
static const int transitions_count = 256;
1793
AlwaysAllocateScope always_allocate;
1794
for (int i = 0; i < transitions_count; i++) {
1795
EmbeddedVector<char, 64> buffer;
1796
OS::SNPrintF(buffer, "var o = new Object; o.prop%d = %d;", i, i);
1797
CompileRun(buffer.start());
1799
CompileRun("var root = new Object;");
1802
Handle<JSObject> root =
1803
v8::Utils::OpenHandle(
1804
*v8::Handle<v8::Object>::Cast(
1805
v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
1807
// Count number of live transitions before marking.
1808
int transitions_before = CountMapTransitions(root->map());
1809
CompileRun("%DebugPrint(root);");
1810
CHECK_EQ(transitions_count, transitions_before);
1812
SimulateIncrementalMarking();
1813
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1815
// Count number of live transitions after marking. Note that one transition
1816
// is left, because 'o' still holds an instance of one transition target.
1817
int transitions_after = CountMapTransitions(root->map());
1818
CompileRun("%DebugPrint(root);");
1819
CHECK_EQ(1, transitions_after);
1823
TEST(Regress2143a) {
1824
i::FLAG_collect_maps = true;
1825
i::FLAG_incremental_marking = true;
1827
v8::HandleScope scope;
1829
// Prepare a map transition from the root object together with a yet
1830
// untransitioned root object.
1831
CompileRun("var root = new Object;"
1833
"root = new Object;");
1835
SimulateIncrementalMarking();
1837
// Compile a StoreIC that performs the prepared map transition. This
1838
// will restart incremental marking and should make sure the root is
1839
// marked grey again.
1840
CompileRun("function f(o) {"
1846
// This bug only triggers with aggressive IC clearing.
1847
HEAP->AgeInlineCaches();
1849
// Explicitly request GC to perform final marking step and sweeping.
1850
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1852
Handle<JSObject> root =
1853
v8::Utils::OpenHandle(
1854
*v8::Handle<v8::Object>::Cast(
1855
v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
1857
// The root object should be in a sane state.
1858
CHECK(root->IsJSObject());
1859
CHECK(root->map()->IsMap());
1863
TEST(Regress2143b) {
1864
i::FLAG_collect_maps = true;
1865
i::FLAG_incremental_marking = true;
1866
i::FLAG_allow_natives_syntax = true;
1868
v8::HandleScope scope;
1870
// Prepare a map transition from the root object together with a yet
1871
// untransitioned root object.
1872
CompileRun("var root = new Object;"
1874
"root = new Object;");
1876
SimulateIncrementalMarking();
1878
// Compile an optimized LStoreNamedField that performs the prepared
1879
// map transition. This will restart incremental marking and should
1880
// make sure the root is marked grey again.
1881
CompileRun("function f(o) {"
1886
"%OptimizeFunctionOnNextCall(f);"
1888
"%DeoptimizeFunction(f);");
1890
// This bug only triggers with aggressive IC clearing.
1891
HEAP->AgeInlineCaches();
1893
// Explicitly request GC to perform final marking step and sweeping.
1894
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1896
Handle<JSObject> root =
1897
v8::Utils::OpenHandle(
1898
*v8::Handle<v8::Object>::Cast(
1899
v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
1901
// The root object should be in a sane state.
1902
CHECK(root->IsJSObject());
1903
CHECK(root->map()->IsMap());
1907
// Implemented in the test-alloc.cc test suite.
1908
void SimulateFullSpace(PagedSpace* space);
1911
TEST(ReleaseOverReservedPages) {
1912
i::FLAG_trace_gc = true;
1913
// The optimizer can allocate stuff, messing up the test.
1914
i::FLAG_crankshaft = false;
1915
i::FLAG_always_opt = false;
1917
v8::HandleScope scope;
1918
static const int number_of_test_pages = 20;
1920
// Prepare many pages with low live-bytes count.
1921
PagedSpace* old_pointer_space = HEAP->old_pointer_space();
1922
CHECK_EQ(1, old_pointer_space->CountTotalPages());
1923
for (int i = 0; i < number_of_test_pages; i++) {
1924
AlwaysAllocateScope always_allocate;
1925
SimulateFullSpace(old_pointer_space);
1926
FACTORY->NewFixedArray(1, TENURED);
1928
CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
1930
// Triggering one GC will cause a lot of garbage to be discovered but
1931
// even spread across all allocated pages.
1932
HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered for preparation");
1933
CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
1935
// Triggering subsequent GCs should cause at least half of the pages
1936
// to be released to the OS after at most two cycles.
1937
HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
1938
CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
1939
HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
1940
CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
1942
// Triggering a last-resort GC should cause all pages to be released to the
1943
// OS so that other processes can seize the memory. If we get a failure here
1944
// where there are 2 pages left instead of 1, then we should increase the
1945
// size of the first page a little in SizeOfFirstPage in spaces.cc. The
1946
// first page should be small in order to reduce memory used when the VM
1947
// boots, but if the 20 small arrays don't fit on the first page then that's
1948
// an indication that it is too small.
1949
HEAP->CollectAllAvailableGarbage("triggered really hard");
1950
CHECK_EQ(1, old_pointer_space->CountTotalPages());
1956
v8::HandleScope scope;
1957
Handle<String> slice(HEAP->empty_string());
1960
// Generate a parent that lives in new-space.
1961
v8::HandleScope inner_scope;
1962
const char* c = "This text is long enough to trigger sliced strings.";
1963
Handle<String> s = FACTORY->NewStringFromAscii(CStrVector(c));
1964
CHECK(s->IsSeqAsciiString());
1965
CHECK(HEAP->InNewSpace(*s));
1967
// Generate a sliced string that is based on the above parent and
1968
// lives in old-space.
1969
FillUpNewSpace(HEAP->new_space());
1970
AlwaysAllocateScope always_allocate;
1972
// TODO(mstarzinger): Unfortunately FillUpNewSpace() still leaves
1973
// some slack, so we need to allocate a few sliced strings.
1974
for (int i = 0; i < 16; i++) {
1975
t = FACTORY->NewProperSubString(s, 5, 35);
1977
CHECK(t->IsSlicedString());
1978
CHECK(!HEAP->InNewSpace(*t));
1979
*slice.location() = *t.location();
1982
CHECK(SlicedString::cast(*slice)->parent()->IsSeqAsciiString());
1983
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1984
CHECK(SlicedString::cast(*slice)->parent()->IsSeqAsciiString());
1989
TEST(PrintSharedFunctionInfo) {
1991
v8::HandleScope scope;
1992
const char* source = "f = function() { return 987654321; }\n"
1993
"g = function() { return 123456789; }\n";
1995
Handle<JSFunction> g =
1996
v8::Utils::OpenHandle(
1997
*v8::Handle<v8::Function>::Cast(
1998
v8::Context::GetCurrent()->Global()->Get(v8_str("g"))));
2000
AssertNoAllocation no_alloc;
2001
g->shared()->PrintLn();
2003
#endif // OBJECT_PRINT
2008
v8::HandleScope scope;
2010
v8::Handle<v8::String> value = v8_str("val string");
2011
Smi* hash = Smi::FromInt(321);
2012
Heap* heap = Isolate::Current()->heap();
2014
for (int i = 0; i < 2; i++) {
2015
// Store identity hash first and common hidden property second.
2016
v8::Handle<v8::Object> obj = v8::Object::New();
2017
Handle<JSObject> internal_obj = v8::Utils::OpenHandle(*obj);
2018
CHECK(internal_obj->HasFastProperties());
2020
// In the first iteration, set hidden value first and identity hash second.
2021
// In the second iteration, reverse the order.
2022
if (i == 0) obj->SetHiddenValue(v8_str("key string"), value);
2023
MaybeObject* maybe_obj = internal_obj->SetIdentityHash(hash,
2025
CHECK(!maybe_obj->IsFailure());
2026
if (i == 1) obj->SetHiddenValue(v8_str("key string"), value);
2030
internal_obj->GetHiddenProperty(heap->identity_hash_symbol()));
2031
CHECK(value->Equals(obj->GetHiddenValue(v8_str("key string"))));
2034
DescriptorArray* descriptors = internal_obj->map()->instance_descriptors();
2035
ObjectHashTable* hashtable = ObjectHashTable::cast(
2036
internal_obj->FastPropertyAt(descriptors->GetFieldIndex(0)));
2037
// HashTable header (5) and 4 initial entries (8).
2038
CHECK_LE(hashtable->SizeFor(hashtable->length()), 13 * kPointerSize);
2043
TEST(IncrementalMarkingClearsTypeFeedbackCells) {
2044
if (i::FLAG_always_opt) return;
2046
v8::HandleScope scope;
2047
v8::Local<v8::Value> fun1, fun2;
2051
CompileRun("function fun() {};");
2052
fun1 = env->Global()->Get(v8_str("fun"));
2057
CompileRun("function fun() {};");
2058
fun2 = env->Global()->Get(v8_str("fun"));
2061
// Prepare function f that contains type feedback for closures
2062
// originating from two different native contexts.
2063
v8::Context::GetCurrent()->Global()->Set(v8_str("fun1"), fun1);
2064
v8::Context::GetCurrent()->Global()->Set(v8_str("fun2"), fun2);
2065
CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
2066
Handle<JSFunction> f =
2067
v8::Utils::OpenHandle(
2068
*v8::Handle<v8::Function>::Cast(
2069
v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2070
Handle<TypeFeedbackCells> cells(TypeFeedbackInfo::cast(
2071
f->shared()->code()->type_feedback_info())->type_feedback_cells());
2073
CHECK_EQ(2, cells->CellCount());
2074
CHECK(cells->Cell(0)->value()->IsJSFunction());
2075
CHECK(cells->Cell(1)->value()->IsJSFunction());
2077
SimulateIncrementalMarking();
2078
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2080
CHECK_EQ(2, cells->CellCount());
2081
CHECK(cells->Cell(0)->value()->IsTheHole());
2082
CHECK(cells->Cell(1)->value()->IsTheHole());
2086
static Code* FindFirstIC(Code* code, Code::Kind kind) {
2087
int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
2088
RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
2089
RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID) |
2090
RelocInfo::ModeMask(RelocInfo::CODE_TARGET_CONTEXT);
2091
for (RelocIterator it(code, mask); !it.done(); it.next()) {
2092
RelocInfo* info = it.rinfo();
2093
Code* target = Code::GetCodeFromTargetAddress(info->target_address());
2094
if (target->is_inline_cache_stub() && target->kind() == kind) {
2102
TEST(IncrementalMarkingPreservesMonomorhpicIC) {
2103
if (i::FLAG_always_opt) return;
2105
v8::HandleScope scope;
2107
// Prepare function f that contains a monomorphic IC for object
2108
// originating from the same native context.
2109
CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
2110
"function f(o) { return o.x; } f(obj); f(obj);");
2111
Handle<JSFunction> f =
2112
v8::Utils::OpenHandle(
2113
*v8::Handle<v8::Function>::Cast(
2114
v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2116
Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2117
CHECK(ic_before->ic_state() == MONOMORPHIC);
2119
SimulateIncrementalMarking();
2120
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2122
Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2123
CHECK(ic_after->ic_state() == MONOMORPHIC);
2127
TEST(IncrementalMarkingClearsMonomorhpicIC) {
2128
if (i::FLAG_always_opt) return;
2130
v8::HandleScope scope;
2131
v8::Local<v8::Value> obj1;
2135
CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
2136
obj1 = env->Global()->Get(v8_str("obj"));
2139
// Prepare function f that contains a monomorphic IC for object
2140
// originating from a different native context.
2141
v8::Context::GetCurrent()->Global()->Set(v8_str("obj1"), obj1);
2142
CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
2143
Handle<JSFunction> f =
2144
v8::Utils::OpenHandle(
2145
*v8::Handle<v8::Function>::Cast(
2146
v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2148
Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2149
CHECK(ic_before->ic_state() == MONOMORPHIC);
2151
// Fire context dispose notification.
2152
v8::V8::ContextDisposedNotification();
2153
SimulateIncrementalMarking();
2154
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2156
Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2157
CHECK(ic_after->ic_state() == UNINITIALIZED);
2161
TEST(IncrementalMarkingClearsPolymorhpicIC) {
2162
if (i::FLAG_always_opt) return;
2164
v8::HandleScope scope;
2165
v8::Local<v8::Value> obj1, obj2;
2169
CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
2170
obj1 = env->Global()->Get(v8_str("obj"));
2175
CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
2176
obj2 = env->Global()->Get(v8_str("obj"));
2179
// Prepare function f that contains a polymorphic IC for objects
2180
// originating from two different native contexts.
2181
v8::Context::GetCurrent()->Global()->Set(v8_str("obj1"), obj1);
2182
v8::Context::GetCurrent()->Global()->Set(v8_str("obj2"), obj2);
2183
CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
2184
Handle<JSFunction> f =
2185
v8::Utils::OpenHandle(
2186
*v8::Handle<v8::Function>::Cast(
2187
v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2189
Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2190
CHECK(ic_before->ic_state() == MEGAMORPHIC);
2192
// Fire context dispose notification.
2193
v8::V8::ContextDisposedNotification();
2194
SimulateIncrementalMarking();
2195
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2197
Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2198
CHECK(ic_after->ic_state() == UNINITIALIZED);
2202
class SourceResource: public v8::String::ExternalAsciiStringResource {
2204
explicit SourceResource(const char* data)
2205
: data_(data), length_(strlen(data)) { }
2207
virtual void Dispose() {
2208
i::DeleteArray(data_);
2212
const char* data() const { return data_; }
2214
size_t length() const { return length_; }
2216
bool IsDisposed() { return data_ == NULL; }
2224
TEST(ReleaseStackTraceData) {
2225
// Test that the data retained by the Error.stack accessor is released
2226
// after the first time the accessor is fired. We use external string
2227
// to check whether the data is being released since the external string
2228
// resource's callback is fired when the external string is GC'ed.
2230
v8::HandleScope scope;
2231
static const char* source = "var error = 1; "
2233
" throw new Error(); "
2237
SourceResource* resource = new SourceResource(i::StrDup(source));
2239
v8::HandleScope scope;
2240
v8::Handle<v8::String> source_string = v8::String::NewExternal(resource);
2241
v8::Script::Compile(source_string)->Run();
2242
CHECK(!resource->IsDisposed());
2244
HEAP->CollectAllAvailableGarbage();
2245
// External source is being retained by the stack trace.
2246
CHECK(!resource->IsDisposed());
2248
CompileRun("error.stack; error.stack;");
2249
HEAP->CollectAllAvailableGarbage();
2250
// External source has been released.
2251
CHECK(resource->IsDisposed());
2257
TEST(Regression144230) {
2259
v8::HandleScope scope;
2261
// First make sure that the uninitialized CallIC stub is on a single page
2262
// that will later be selected as an evacuation candidate.
2264
v8::HandleScope inner_scope;
2265
AlwaysAllocateScope always_allocate;
2266
SimulateFullSpace(HEAP->code_space());
2267
ISOLATE->stub_cache()->ComputeCallInitialize(9, RelocInfo::CODE_TARGET);
2270
// Second compile a CallIC and execute it once so that it gets patched to
2271
// the pre-monomorphic stub. These code objects are on yet another page.
2273
v8::HandleScope inner_scope;
2274
AlwaysAllocateScope always_allocate;
2275
SimulateFullSpace(HEAP->code_space());
2276
CompileRun("var o = { f:function(a,b,c,d,e,f,g,h,i) {}};"
2277
"function call() { o.f(1,2,3,4,5,6,7,8,9); };"
2281
// Third we fill up the last page of the code space so that it does not get
2282
// chosen as an evacuation candidate.
2284
v8::HandleScope inner_scope;
2285
AlwaysAllocateScope always_allocate;
2286
CompileRun("for (var i = 0; i < 2000; i++) {"
2287
" eval('function f' + i + '() { return ' + i +'; };' +"
2288
" 'f' + i + '();');"
2291
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2293
// Fourth is the tricky part. Make sure the code containing the CallIC is
2294
// visited first without clearing the IC. The shared function info is then
2295
// visited later, causing the CallIC to be cleared.
2296
Handle<String> name = FACTORY->LookupAsciiSymbol("call");
2297
Handle<GlobalObject> global(ISOLATE->context()->global_object());
2298
MaybeObject* maybe_call = global->GetProperty(*name);
2299
JSFunction* call = JSFunction::cast(maybe_call->ToObjectChecked());
2300
USE(global->SetProperty(*name, Smi::FromInt(0), NONE, kNonStrictMode));
2301
ISOLATE->compilation_cache()->Clear();
2302
call->shared()->set_ic_age(HEAP->global_ic_age() + 1);
2303
Handle<Object> call_code(call->code());
2304
Handle<Object> call_function(call);
2306
// Now we are ready to mess up the heap.
2307
HEAP->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2309
// Either heap verification caught the problem already or we go kaboom once
2310
// the CallIC is executed the next time.
2311
USE(global->SetProperty(*name, *call_function, NONE, kNonStrictMode));
2312
CompileRun("call();");