1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
6
// * Redistributions of source code must retain the above copyright
7
// notice, this list of conditions and the following disclaimer.
8
// * Redistributions in binary form must reproduce the above
9
// copyright notice, this list of conditions and the following
10
// disclaimer in the documentation and/or other materials provided
11
// with the distribution.
12
// * Neither the name of Google Inc. nor the names of its
13
// contributors may be used to endorse or promote products derived
14
// from this software without specific prior written permission.
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
#include "profile-generator-inl.h"
32
#include "global-handles.h"
33
#include "heap-profiler.h"
34
#include "scopeinfo.h"
43
TokenEnumerator::TokenEnumerator()
44
: token_locations_(4),
49
TokenEnumerator::~TokenEnumerator() {
50
Isolate* isolate = Isolate::Current();
51
for (int i = 0; i < token_locations_.length(); ++i) {
52
if (!token_removed_[i]) {
53
isolate->global_handles()->ClearWeakness(token_locations_[i]);
54
isolate->global_handles()->Destroy(token_locations_[i]);
60
int TokenEnumerator::GetTokenId(Object* token) {
61
Isolate* isolate = Isolate::Current();
62
if (token == NULL) return TokenEnumerator::kNoSecurityToken;
63
for (int i = 0; i < token_locations_.length(); ++i) {
64
if (*token_locations_[i] == token && !token_removed_[i]) return i;
66
Handle<Object> handle = isolate->global_handles()->Create(token);
67
// handle.location() points to a memory cell holding a pointer
68
// to a token object in the V8's heap.
69
isolate->global_handles()->MakeWeak(handle.location(), this,
70
TokenRemovedCallback);
71
token_locations_.Add(handle.location());
72
token_removed_.Add(false);
73
return token_locations_.length() - 1;
77
void TokenEnumerator::TokenRemovedCallback(v8::Persistent<v8::Value> handle,
79
reinterpret_cast<TokenEnumerator*>(parameter)->TokenRemoved(
80
Utils::OpenHandle(*handle).location());
85
void TokenEnumerator::TokenRemoved(Object** token_location) {
86
for (int i = 0; i < token_locations_.length(); ++i) {
87
if (token_locations_[i] == token_location && !token_removed_[i]) {
88
token_removed_[i] = true;
95
StringsStorage::StringsStorage()
96
: names_(StringsMatch) {
100
StringsStorage::~StringsStorage() {
101
for (HashMap::Entry* p = names_.Start();
103
p = names_.Next(p)) {
104
DeleteArray(reinterpret_cast<const char*>(p->value));
109
const char* StringsStorage::GetCopy(const char* src) {
110
int len = static_cast<int>(strlen(src));
111
Vector<char> dst = Vector<char>::New(len + 1);
112
OS::StrNCpy(dst, src, len);
115
HashSequentialString(dst.start(), len, HEAP->HashSeed());
116
return AddOrDisposeString(dst.start(), hash);
120
const char* StringsStorage::GetFormatted(const char* format, ...) {
122
va_start(args, format);
123
const char* result = GetVFormatted(format, args);
129
const char* StringsStorage::AddOrDisposeString(char* str, uint32_t hash) {
130
HashMap::Entry* cache_entry = names_.Lookup(str, hash, true);
131
if (cache_entry->value == NULL) {
133
cache_entry->value = str;
137
return reinterpret_cast<const char*>(cache_entry->value);
141
const char* StringsStorage::GetVFormatted(const char* format, va_list args) {
142
Vector<char> str = Vector<char>::New(1024);
143
int len = OS::VSNPrintF(str, format, args);
145
DeleteArray(str.start());
148
uint32_t hash = HashSequentialString(
149
str.start(), len, HEAP->HashSeed());
150
return AddOrDisposeString(str.start(), hash);
154
const char* StringsStorage::GetName(String* name) {
155
if (name->IsString()) {
156
int length = Min(kMaxNameSize, name->length());
157
SmartArrayPointer<char> data =
158
name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length);
160
HashSequentialString(*data, length, name->GetHeap()->HashSeed());
161
return AddOrDisposeString(data.Detach(), hash);
167
const char* StringsStorage::GetName(int index) {
168
return GetFormatted("%d", index);
172
size_t StringsStorage::GetUsedMemorySize() const {
173
size_t size = sizeof(*this);
174
size += sizeof(HashMap::Entry) * names_.capacity();
175
for (HashMap::Entry* p = names_.Start(); p != NULL; p = names_.Next(p)) {
176
size += strlen(reinterpret_cast<const char*>(p->value)) + 1;
181
const char* const CodeEntry::kEmptyNamePrefix = "";
184
void CodeEntry::CopyData(const CodeEntry& source) {
186
name_prefix_ = source.name_prefix_;
187
name_ = source.name_;
188
resource_name_ = source.resource_name_;
189
line_number_ = source.line_number_;
193
uint32_t CodeEntry::GetCallUid() const {
194
uint32_t hash = ComputeIntegerHash(tag_, v8::internal::kZeroHashSeed);
195
if (shared_id_ != 0) {
196
hash ^= ComputeIntegerHash(static_cast<uint32_t>(shared_id_),
197
v8::internal::kZeroHashSeed);
199
hash ^= ComputeIntegerHash(
200
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_prefix_)),
201
v8::internal::kZeroHashSeed);
202
hash ^= ComputeIntegerHash(
203
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_)),
204
v8::internal::kZeroHashSeed);
205
hash ^= ComputeIntegerHash(
206
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(resource_name_)),
207
v8::internal::kZeroHashSeed);
208
hash ^= ComputeIntegerHash(line_number_, v8::internal::kZeroHashSeed);
214
bool CodeEntry::IsSameAs(CodeEntry* entry) const {
216
|| (tag_ == entry->tag_
217
&& shared_id_ == entry->shared_id_
219
|| (name_prefix_ == entry->name_prefix_
220
&& name_ == entry->name_
221
&& resource_name_ == entry->resource_name_
222
&& line_number_ == entry->line_number_)));
226
ProfileNode* ProfileNode::FindChild(CodeEntry* entry) {
227
HashMap::Entry* map_entry =
228
children_.Lookup(entry, CodeEntryHash(entry), false);
229
return map_entry != NULL ?
230
reinterpret_cast<ProfileNode*>(map_entry->value) : NULL;
234
ProfileNode* ProfileNode::FindOrAddChild(CodeEntry* entry) {
235
HashMap::Entry* map_entry =
236
children_.Lookup(entry, CodeEntryHash(entry), true);
237
if (map_entry->value == NULL) {
239
ProfileNode* new_node = new ProfileNode(tree_, entry);
240
map_entry->value = new_node;
241
children_list_.Add(new_node);
243
return reinterpret_cast<ProfileNode*>(map_entry->value);
247
double ProfileNode::GetSelfMillis() const {
248
return tree_->TicksToMillis(self_ticks_);
252
double ProfileNode::GetTotalMillis() const {
253
return tree_->TicksToMillis(total_ticks_);
257
void ProfileNode::Print(int indent) {
258
OS::Print("%5u %5u %*c %s%s [%d]",
259
total_ticks_, self_ticks_,
261
entry_->name_prefix(),
263
entry_->security_token_id());
264
if (entry_->resource_name()[0] != '\0')
265
OS::Print(" %s:%d", entry_->resource_name(), entry_->line_number());
267
for (HashMap::Entry* p = children_.Start();
269
p = children_.Next(p)) {
270
reinterpret_cast<ProfileNode*>(p->value)->Print(indent + 2);
275
class DeleteNodesCallback {
277
void BeforeTraversingChild(ProfileNode*, ProfileNode*) { }
279
void AfterAllChildrenTraversed(ProfileNode* node) {
283
void AfterChildTraversed(ProfileNode*, ProfileNode*) { }
287
ProfileTree::ProfileTree()
288
: root_entry_(Logger::FUNCTION_TAG,
293
TokenEnumerator::kNoSecurityToken),
294
root_(new ProfileNode(this, &root_entry_)) {
298
ProfileTree::~ProfileTree() {
299
DeleteNodesCallback cb;
300
TraverseDepthFirst(&cb);
304
void ProfileTree::AddPathFromEnd(const Vector<CodeEntry*>& path) {
305
ProfileNode* node = root_;
306
for (CodeEntry** entry = path.start() + path.length() - 1;
307
entry != path.start() - 1;
309
if (*entry != NULL) {
310
node = node->FindOrAddChild(*entry);
313
node->IncrementSelfTicks();
317
void ProfileTree::AddPathFromStart(const Vector<CodeEntry*>& path) {
318
ProfileNode* node = root_;
319
for (CodeEntry** entry = path.start();
320
entry != path.start() + path.length();
322
if (*entry != NULL) {
323
node = node->FindOrAddChild(*entry);
326
node->IncrementSelfTicks();
331
NodesPair(ProfileNode* src, ProfileNode* dst)
332
: src(src), dst(dst) { }
338
class FilteredCloneCallback {
340
FilteredCloneCallback(ProfileNode* dst_root, int security_token_id)
342
security_token_id_(security_token_id) {
343
stack_.Add(NodesPair(NULL, dst_root));
346
void BeforeTraversingChild(ProfileNode* parent, ProfileNode* child) {
347
if (IsTokenAcceptable(child->entry()->security_token_id(),
348
parent->entry()->security_token_id())) {
349
ProfileNode* clone = stack_.last().dst->FindOrAddChild(child->entry());
350
clone->IncreaseSelfTicks(child->self_ticks());
351
stack_.Add(NodesPair(child, clone));
353
// Attribute ticks to parent node.
354
stack_.last().dst->IncreaseSelfTicks(child->self_ticks());
358
void AfterAllChildrenTraversed(ProfileNode* parent) { }
360
void AfterChildTraversed(ProfileNode*, ProfileNode* child) {
361
if (stack_.last().src == child) {
367
bool IsTokenAcceptable(int token, int parent_token) {
368
if (token == TokenEnumerator::kNoSecurityToken
369
|| token == security_token_id_) return true;
370
if (token == TokenEnumerator::kInheritsSecurityToken) {
371
ASSERT(parent_token != TokenEnumerator::kInheritsSecurityToken);
372
return parent_token == TokenEnumerator::kNoSecurityToken
373
|| parent_token == security_token_id_;
378
List<NodesPair> stack_;
379
int security_token_id_;
382
void ProfileTree::FilteredClone(ProfileTree* src, int security_token_id) {
383
ms_to_ticks_scale_ = src->ms_to_ticks_scale_;
384
FilteredCloneCallback cb(root_, security_token_id);
385
src->TraverseDepthFirst(&cb);
386
CalculateTotalTicks();
390
void ProfileTree::SetTickRatePerMs(double ticks_per_ms) {
391
ms_to_ticks_scale_ = ticks_per_ms > 0 ? 1.0 / ticks_per_ms : 1.0;
397
explicit Position(ProfileNode* node)
398
: node(node), child_idx_(0) { }
399
INLINE(ProfileNode* current_child()) {
400
return node->children()->at(child_idx_);
402
INLINE(bool has_current_child()) {
403
return child_idx_ < node->children()->length();
405
INLINE(void next_child()) { ++child_idx_; }
413
// Non-recursive implementation of a depth-first post-order tree traversal.
414
template <typename Callback>
415
void ProfileTree::TraverseDepthFirst(Callback* callback) {
416
List<Position> stack(10);
417
stack.Add(Position(root_));
418
while (stack.length() > 0) {
419
Position& current = stack.last();
420
if (current.has_current_child()) {
421
callback->BeforeTraversingChild(current.node, current.current_child());
422
stack.Add(Position(current.current_child()));
424
callback->AfterAllChildrenTraversed(current.node);
425
if (stack.length() > 1) {
426
Position& parent = stack[stack.length() - 2];
427
callback->AfterChildTraversed(parent.node, current.node);
430
// Remove child from the stack.
437
class CalculateTotalTicksCallback {
439
void BeforeTraversingChild(ProfileNode*, ProfileNode*) { }
441
void AfterAllChildrenTraversed(ProfileNode* node) {
442
node->IncreaseTotalTicks(node->self_ticks());
445
void AfterChildTraversed(ProfileNode* parent, ProfileNode* child) {
446
parent->IncreaseTotalTicks(child->total_ticks());
451
void ProfileTree::CalculateTotalTicks() {
452
CalculateTotalTicksCallback cb;
453
TraverseDepthFirst(&cb);
457
void ProfileTree::ShortPrint() {
458
OS::Print("root: %u %u %.2fms %.2fms\n",
459
root_->total_ticks(), root_->self_ticks(),
460
root_->GetTotalMillis(), root_->GetSelfMillis());
464
void CpuProfile::AddPath(const Vector<CodeEntry*>& path) {
465
top_down_.AddPathFromEnd(path);
466
bottom_up_.AddPathFromStart(path);
470
void CpuProfile::CalculateTotalTicks() {
471
top_down_.CalculateTotalTicks();
472
bottom_up_.CalculateTotalTicks();
476
void CpuProfile::SetActualSamplingRate(double actual_sampling_rate) {
477
top_down_.SetTickRatePerMs(actual_sampling_rate);
478
bottom_up_.SetTickRatePerMs(actual_sampling_rate);
482
CpuProfile* CpuProfile::FilteredClone(int security_token_id) {
483
ASSERT(security_token_id != TokenEnumerator::kNoSecurityToken);
484
CpuProfile* clone = new CpuProfile(title_, uid_);
485
clone->top_down_.FilteredClone(&top_down_, security_token_id);
486
clone->bottom_up_.FilteredClone(&bottom_up_, security_token_id);
491
void CpuProfile::ShortPrint() {
492
OS::Print("top down ");
493
top_down_.ShortPrint();
494
OS::Print("bottom up ");
495
bottom_up_.ShortPrint();
499
void CpuProfile::Print() {
500
OS::Print("[Top down]:\n");
502
OS::Print("[Bottom up]:\n");
507
CodeEntry* const CodeMap::kSharedFunctionCodeEntry = NULL;
508
const CodeMap::CodeTreeConfig::Key CodeMap::CodeTreeConfig::kNoKey = NULL;
511
void CodeMap::AddCode(Address addr, CodeEntry* entry, unsigned size) {
512
DeleteAllCoveredCode(addr, addr + size);
513
CodeTree::Locator locator;
514
tree_.Insert(addr, &locator);
515
locator.set_value(CodeEntryInfo(entry, size));
519
void CodeMap::DeleteAllCoveredCode(Address start, Address end) {
520
List<Address> to_delete;
521
Address addr = end - 1;
522
while (addr >= start) {
523
CodeTree::Locator locator;
524
if (!tree_.FindGreatestLessThan(addr, &locator)) break;
525
Address start2 = locator.key(), end2 = start2 + locator.value().size;
526
if (start2 < end && start < end2) to_delete.Add(start2);
529
for (int i = 0; i < to_delete.length(); ++i) tree_.Remove(to_delete[i]);
533
CodeEntry* CodeMap::FindEntry(Address addr) {
534
CodeTree::Locator locator;
535
if (tree_.FindGreatestLessThan(addr, &locator)) {
536
// locator.key() <= addr. Need to check that addr is within entry.
537
const CodeEntryInfo& entry = locator.value();
538
if (addr < (locator.key() + entry.size))
545
int CodeMap::GetSharedId(Address addr) {
546
CodeTree::Locator locator;
547
// For shared function entries, 'size' field is used to store their IDs.
548
if (tree_.Find(addr, &locator)) {
549
const CodeEntryInfo& entry = locator.value();
550
ASSERT(entry.entry == kSharedFunctionCodeEntry);
553
tree_.Insert(addr, &locator);
554
int id = next_shared_id_++;
555
locator.set_value(CodeEntryInfo(kSharedFunctionCodeEntry, id));
561
void CodeMap::MoveCode(Address from, Address to) {
562
if (from == to) return;
563
CodeTree::Locator locator;
564
if (!tree_.Find(from, &locator)) return;
565
CodeEntryInfo entry = locator.value();
567
AddCode(to, entry.entry, entry.size);
571
void CodeMap::CodeTreePrinter::Call(
572
const Address& key, const CodeMap::CodeEntryInfo& value) {
573
OS::Print("%p %5d %s\n", key, value.size, value.entry->name());
577
void CodeMap::Print() {
578
CodeTreePrinter printer;
579
tree_.ForEach(&printer);
583
CpuProfilesCollection::CpuProfilesCollection()
584
: profiles_uids_(UidsMatch),
585
current_profiles_semaphore_(OS::CreateSemaphore(1)) {
586
// Create list of unabridged profiles.
587
profiles_by_token_.Add(new List<CpuProfile*>());
591
static void DeleteCodeEntry(CodeEntry** entry_ptr) {
595
static void DeleteCpuProfile(CpuProfile** profile_ptr) {
599
static void DeleteProfilesList(List<CpuProfile*>** list_ptr) {
600
if (*list_ptr != NULL) {
601
(*list_ptr)->Iterate(DeleteCpuProfile);
606
CpuProfilesCollection::~CpuProfilesCollection() {
607
delete current_profiles_semaphore_;
608
current_profiles_.Iterate(DeleteCpuProfile);
609
detached_profiles_.Iterate(DeleteCpuProfile);
610
profiles_by_token_.Iterate(DeleteProfilesList);
611
code_entries_.Iterate(DeleteCodeEntry);
615
bool CpuProfilesCollection::StartProfiling(const char* title, unsigned uid) {
617
current_profiles_semaphore_->Wait();
618
if (current_profiles_.length() >= kMaxSimultaneousProfiles) {
619
current_profiles_semaphore_->Signal();
622
for (int i = 0; i < current_profiles_.length(); ++i) {
623
if (strcmp(current_profiles_[i]->title(), title) == 0) {
624
// Ignore attempts to start profile with the same title.
625
current_profiles_semaphore_->Signal();
629
current_profiles_.Add(new CpuProfile(title, uid));
630
current_profiles_semaphore_->Signal();
635
bool CpuProfilesCollection::StartProfiling(String* title, unsigned uid) {
636
return StartProfiling(GetName(title), uid);
640
CpuProfile* CpuProfilesCollection::StopProfiling(int security_token_id,
642
double actual_sampling_rate) {
643
const int title_len = StrLength(title);
644
CpuProfile* profile = NULL;
645
current_profiles_semaphore_->Wait();
646
for (int i = current_profiles_.length() - 1; i >= 0; --i) {
647
if (title_len == 0 || strcmp(current_profiles_[i]->title(), title) == 0) {
648
profile = current_profiles_.Remove(i);
652
current_profiles_semaphore_->Signal();
654
if (profile != NULL) {
655
profile->CalculateTotalTicks();
656
profile->SetActualSamplingRate(actual_sampling_rate);
657
List<CpuProfile*>* unabridged_list =
658
profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
659
unabridged_list->Add(profile);
660
HashMap::Entry* entry =
661
profiles_uids_.Lookup(reinterpret_cast<void*>(profile->uid()),
662
static_cast<uint32_t>(profile->uid()),
664
ASSERT(entry->value == NULL);
665
entry->value = reinterpret_cast<void*>(unabridged_list->length() - 1);
666
return GetProfile(security_token_id, profile->uid());
672
CpuProfile* CpuProfilesCollection::GetProfile(int security_token_id,
674
int index = GetProfileIndex(uid);
675
if (index < 0) return NULL;
676
List<CpuProfile*>* unabridged_list =
677
profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
678
if (security_token_id == TokenEnumerator::kNoSecurityToken) {
679
return unabridged_list->at(index);
681
List<CpuProfile*>* list = GetProfilesList(security_token_id);
682
if (list->at(index) == NULL) {
684
unabridged_list->at(index)->FilteredClone(security_token_id);
686
return list->at(index);
690
int CpuProfilesCollection::GetProfileIndex(unsigned uid) {
691
HashMap::Entry* entry = profiles_uids_.Lookup(reinterpret_cast<void*>(uid),
692
static_cast<uint32_t>(uid),
694
return entry != NULL ?
695
static_cast<int>(reinterpret_cast<intptr_t>(entry->value)) : -1;
699
bool CpuProfilesCollection::IsLastProfile(const char* title) {
700
// Called from VM thread, and only it can mutate the list,
701
// so no locking is needed here.
702
if (current_profiles_.length() != 1) return false;
703
return StrLength(title) == 0
704
|| strcmp(current_profiles_[0]->title(), title) == 0;
708
void CpuProfilesCollection::RemoveProfile(CpuProfile* profile) {
709
// Called from VM thread for a completed profile.
710
unsigned uid = profile->uid();
711
int index = GetProfileIndex(uid);
713
detached_profiles_.RemoveElement(profile);
716
profiles_uids_.Remove(reinterpret_cast<void*>(uid),
717
static_cast<uint32_t>(uid));
718
// Decrement all indexes above the deleted one.
719
for (HashMap::Entry* p = profiles_uids_.Start();
721
p = profiles_uids_.Next(p)) {
722
intptr_t p_index = reinterpret_cast<intptr_t>(p->value);
723
if (p_index > index) {
724
p->value = reinterpret_cast<void*>(p_index - 1);
727
for (int i = 0; i < profiles_by_token_.length(); ++i) {
728
List<CpuProfile*>* list = profiles_by_token_[i];
729
if (list != NULL && index < list->length()) {
730
// Move all filtered clones into detached_profiles_,
731
// so we can know that they are still in use.
732
CpuProfile* cloned_profile = list->Remove(index);
733
if (cloned_profile != NULL && cloned_profile != profile) {
734
detached_profiles_.Add(cloned_profile);
741
int CpuProfilesCollection::TokenToIndex(int security_token_id) {
742
ASSERT(TokenEnumerator::kNoSecurityToken == -1);
743
return security_token_id + 1; // kNoSecurityToken -> 0, 0 -> 1, ...
747
List<CpuProfile*>* CpuProfilesCollection::GetProfilesList(
748
int security_token_id) {
749
const int index = TokenToIndex(security_token_id);
750
const int lists_to_add = index - profiles_by_token_.length() + 1;
751
if (lists_to_add > 0) profiles_by_token_.AddBlock(NULL, lists_to_add);
752
List<CpuProfile*>* unabridged_list =
753
profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
754
const int current_count = unabridged_list->length();
755
if (profiles_by_token_[index] == NULL) {
756
profiles_by_token_[index] = new List<CpuProfile*>(current_count);
758
List<CpuProfile*>* list = profiles_by_token_[index];
759
const int profiles_to_add = current_count - list->length();
760
if (profiles_to_add > 0) list->AddBlock(NULL, profiles_to_add);
765
List<CpuProfile*>* CpuProfilesCollection::Profiles(int security_token_id) {
766
List<CpuProfile*>* unabridged_list =
767
profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
768
if (security_token_id == TokenEnumerator::kNoSecurityToken) {
769
return unabridged_list;
771
List<CpuProfile*>* list = GetProfilesList(security_token_id);
772
const int current_count = unabridged_list->length();
773
for (int i = 0; i < current_count; ++i) {
774
if (list->at(i) == NULL) {
775
(*list)[i] = unabridged_list->at(i)->FilteredClone(security_token_id);
782
CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
784
String* resource_name,
786
CodeEntry* entry = new CodeEntry(tag,
787
CodeEntry::kEmptyNamePrefix,
788
GetFunctionName(name),
789
GetName(resource_name),
791
TokenEnumerator::kNoSecurityToken);
792
code_entries_.Add(entry);
797
CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
799
CodeEntry* entry = new CodeEntry(tag,
800
CodeEntry::kEmptyNamePrefix,
801
GetFunctionName(name),
803
v8::CpuProfileNode::kNoLineNumberInfo,
804
TokenEnumerator::kNoSecurityToken);
805
code_entries_.Add(entry);
810
CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
811
const char* name_prefix,
813
CodeEntry* entry = new CodeEntry(tag,
817
v8::CpuProfileNode::kNoLineNumberInfo,
818
TokenEnumerator::kInheritsSecurityToken);
819
code_entries_.Add(entry);
824
CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
826
CodeEntry* entry = new CodeEntry(tag,
830
v8::CpuProfileNode::kNoLineNumberInfo,
831
TokenEnumerator::kInheritsSecurityToken);
832
code_entries_.Add(entry);
837
void CpuProfilesCollection::AddPathToCurrentProfiles(
838
const Vector<CodeEntry*>& path) {
839
// As starting / stopping profiles is rare relatively to this
840
// method, we don't bother minimizing the duration of lock holding,
841
// e.g. copying contents of the list to a local vector.
842
current_profiles_semaphore_->Wait();
843
for (int i = 0; i < current_profiles_.length(); ++i) {
844
current_profiles_[i]->AddPath(path);
846
current_profiles_semaphore_->Signal();
850
void SampleRateCalculator::Tick() {
851
if (--wall_time_query_countdown_ == 0)
852
UpdateMeasurements(OS::TimeCurrentMillis());
856
void SampleRateCalculator::UpdateMeasurements(double current_time) {
857
if (measurements_count_++ != 0) {
858
const double measured_ticks_per_ms =
859
(kWallTimeQueryIntervalMs * ticks_per_ms_) /
860
(current_time - last_wall_time_);
861
// Update the average value.
863
(measured_ticks_per_ms - ticks_per_ms_) / measurements_count_;
864
// Update the externally accessible result.
865
result_ = static_cast<AtomicWord>(ticks_per_ms_ * kResultScale);
867
last_wall_time_ = current_time;
868
wall_time_query_countdown_ =
869
static_cast<unsigned>(kWallTimeQueryIntervalMs * ticks_per_ms_);
873
const char* const ProfileGenerator::kAnonymousFunctionName =
874
"(anonymous function)";
875
const char* const ProfileGenerator::kProgramEntryName =
877
const char* const ProfileGenerator::kGarbageCollectorEntryName =
878
"(garbage collector)";
881
ProfileGenerator::ProfileGenerator(CpuProfilesCollection* profiles)
882
: profiles_(profiles),
884
profiles->NewCodeEntry(Logger::FUNCTION_TAG, kProgramEntryName)),
886
profiles->NewCodeEntry(Logger::BUILTIN_TAG,
887
kGarbageCollectorEntryName)) {
891
void ProfileGenerator::RecordTickSample(const TickSample& sample) {
892
// Allocate space for stack frames + pc + function + vm-state.
893
ScopedVector<CodeEntry*> entries(sample.frames_count + 3);
894
// As actual number of decoded code entries may vary, initialize
895
// entries vector with NULL values.
896
CodeEntry** entry = entries.start();
897
memset(entry, 0, entries.length() * sizeof(*entry));
898
if (sample.pc != NULL) {
899
*entry++ = code_map_.FindEntry(sample.pc);
901
if (sample.has_external_callback) {
902
// Don't use PC when in external callback code, as it can point
903
// inside callback's code, and we will erroneously report
904
// that a callback calls itself.
905
*(entries.start()) = NULL;
906
*entry++ = code_map_.FindEntry(sample.external_callback);
907
} else if (sample.tos != NULL) {
908
// Find out, if top of stack was pointing inside a JS function
909
// meaning that we have encountered a frameless invocation.
910
*entry = code_map_.FindEntry(sample.tos);
911
if (*entry != NULL && !(*entry)->is_js_function()) {
917
for (const Address* stack_pos = sample.stack,
918
*stack_end = stack_pos + sample.frames_count;
919
stack_pos != stack_end;
921
*entry++ = code_map_.FindEntry(*stack_pos);
925
if (FLAG_prof_browser_mode) {
926
bool no_symbolized_entries = true;
927
for (CodeEntry** e = entries.start(); e != entry; ++e) {
929
no_symbolized_entries = false;
933
// If no frames were symbolized, put the VM state entry in.
934
if (no_symbolized_entries) {
935
*entry++ = EntryForVMState(sample.state);
939
profiles_->AddPathToCurrentProfiles(entries);
943
HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
948
ASSERT(type == kContextVariable
951
|| type == kShortcut);
955
HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
960
ASSERT(type == kElement || type == kHidden || type == kWeak);
964
void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
965
to_entry_ = &snapshot->entries()[to_index_];
969
const int HeapEntry::kNoEntry = -1;
971
HeapEntry::HeapEntry(HeapSnapshot* snapshot,
979
self_size_(self_size),
985
void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
988
HeapGraphEdge edge(type, name, this->index(), entry->index());
989
snapshot_->edges().Add(edge);
994
void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
997
HeapGraphEdge edge(type, index, this->index(), entry->index());
998
snapshot_->edges().Add(edge);
1003
Handle<HeapObject> HeapEntry::GetHeapObject() {
1004
return snapshot_->collection()->FindHeapObjectById(id());
1008
void HeapEntry::Print(
1009
const char* prefix, const char* edge_name, int max_depth, int indent) {
1010
STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
1011
OS::Print("%6d @%6u %*c %s%s: ",
1012
self_size(), id(), indent, ' ', prefix, edge_name);
1013
if (type() != kString) {
1014
OS::Print("%s %.40s\n", TypeAsString(), name_);
1017
const char* c = name_;
1018
while (*c && (c - name_) <= 40) {
1020
OS::Print("%c", *c);
1027
if (--max_depth == 0) return;
1028
Vector<HeapGraphEdge*> ch = children();
1029
for (int i = 0; i < ch.length(); ++i) {
1030
HeapGraphEdge& edge = *ch[i];
1031
const char* edge_prefix = "";
1032
EmbeddedVector<char, 64> index;
1033
const char* edge_name = index.start();
1034
switch (edge.type()) {
1035
case HeapGraphEdge::kContextVariable:
1037
edge_name = edge.name();
1039
case HeapGraphEdge::kElement:
1040
OS::SNPrintF(index, "%d", edge.index());
1042
case HeapGraphEdge::kInternal:
1044
edge_name = edge.name();
1046
case HeapGraphEdge::kProperty:
1047
edge_name = edge.name();
1049
case HeapGraphEdge::kHidden:
1051
OS::SNPrintF(index, "%d", edge.index());
1053
case HeapGraphEdge::kShortcut:
1055
edge_name = edge.name();
1057
case HeapGraphEdge::kWeak:
1059
OS::SNPrintF(index, "%d", edge.index());
1062
OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
1064
edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
1069
const char* HeapEntry::TypeAsString() {
1071
case kHidden: return "/hidden/";
1072
case kObject: return "/object/";
1073
case kClosure: return "/closure/";
1074
case kString: return "/string/";
1075
case kCode: return "/code/";
1076
case kArray: return "/array/";
1077
case kRegExp: return "/regexp/";
1078
case kHeapNumber: return "/number/";
1079
case kNative: return "/native/";
1080
case kSynthetic: return "/synthetic/";
1081
default: return "???";
1086
// It is very important to keep objects that form a heap snapshot
1087
// as small as possible.
1088
namespace { // Avoid littering the global namespace.
1090
template <size_t ptr_size> struct SnapshotSizeConstants;
1092
template <> struct SnapshotSizeConstants<4> {
1093
static const int kExpectedHeapGraphEdgeSize = 12;
1094
static const int kExpectedHeapEntrySize = 24;
1095
static const int kExpectedHeapSnapshotsCollectionSize = 96;
1096
static const int kExpectedHeapSnapshotSize = 136;
1097
static const size_t kMaxSerializableSnapshotRawSize = 256 * MB;
1100
template <> struct SnapshotSizeConstants<8> {
1101
static const int kExpectedHeapGraphEdgeSize = 24;
1102
static const int kExpectedHeapEntrySize = 32;
1103
static const int kExpectedHeapSnapshotsCollectionSize = 144;
1104
static const int kExpectedHeapSnapshotSize = 168;
1105
static const uint64_t kMaxSerializableSnapshotRawSize =
1106
static_cast<uint64_t>(6000) * MB;
1111
HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection,
1112
HeapSnapshot::Type type,
1115
: collection_(collection),
1119
root_index_(HeapEntry::kNoEntry),
1120
gc_roots_index_(HeapEntry::kNoEntry),
1121
natives_root_index_(HeapEntry::kNoEntry),
1122
max_snapshot_js_object_id_(0) {
1124
sizeof(HeapGraphEdge) ==
1125
SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
1127
sizeof(HeapEntry) ==
1128
SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
1129
for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
1130
gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
1135
void HeapSnapshot::Delete() {
1136
collection_->RemoveSnapshot(this);
1141
void HeapSnapshot::RememberLastJSObjectId() {
1142
max_snapshot_js_object_id_ = collection_->last_assigned_id();
1146
HeapEntry* HeapSnapshot::AddRootEntry() {
1147
ASSERT(root_index_ == HeapEntry::kNoEntry);
1148
ASSERT(entries_.is_empty()); // Root entry must be the first one.
1149
HeapEntry* entry = AddEntry(HeapEntry::kObject,
1151
HeapObjectsMap::kInternalRootObjectId,
1153
root_index_ = entry->index();
1154
ASSERT(root_index_ == 0);
1159
HeapEntry* HeapSnapshot::AddGcRootsEntry() {
1160
ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
1161
HeapEntry* entry = AddEntry(HeapEntry::kObject,
1163
HeapObjectsMap::kGcRootsObjectId,
1165
gc_roots_index_ = entry->index();
1170
HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) {
1171
ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
1172
ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
1173
HeapEntry* entry = AddEntry(
1175
VisitorSynchronization::kTagNames[tag],
1176
HeapObjectsMap::GetNthGcSubrootId(tag),
1178
gc_subroot_indexes_[tag] = entry->index();
1183
HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
1185
SnapshotObjectId id,
1187
HeapEntry entry(this, type, name, id, size);
1188
entries_.Add(entry);
1189
return &entries_.last();
1193
void HeapSnapshot::FillChildren() {
1194
ASSERT(children().is_empty());
1195
children().Allocate(edges().length());
1196
int children_index = 0;
1197
for (int i = 0; i < entries().length(); ++i) {
1198
HeapEntry* entry = &entries()[i];
1199
children_index = entry->set_children_index(children_index);
1201
ASSERT(edges().length() == children_index);
1202
for (int i = 0; i < edges().length(); ++i) {
1203
HeapGraphEdge* edge = &edges()[i];
1204
edge->ReplaceToIndexWithEntry(this);
1205
edge->from()->add_child(edge);
1210
class FindEntryById {
1212
explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
1213
int operator()(HeapEntry* const* entry) {
1214
if ((*entry)->id() == id_) return 0;
1215
return (*entry)->id() < id_ ? -1 : 1;
1218
SnapshotObjectId id_;
1222
HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
1223
List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
1224
// Perform a binary search by id.
1225
int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
1228
return entries_by_id->at(index);
1233
static int SortByIds(const T* entry1_ptr,
1234
const T* entry2_ptr) {
1235
if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
1236
return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
1240
List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
1241
if (sorted_entries_.is_empty()) {
1242
sorted_entries_.Allocate(entries_.length());
1243
for (int i = 0; i < entries_.length(); ++i) {
1244
sorted_entries_[i] = &entries_[i];
1246
sorted_entries_.Sort(SortByIds);
1248
return &sorted_entries_;
1252
void HeapSnapshot::Print(int max_depth) {
1253
root()->Print("", "", max_depth, 0);
1257
template<typename T, class P>
1258
static size_t GetMemoryUsedByList(const List<T, P>& list) {
1259
return list.length() * sizeof(T) + sizeof(list);
1263
size_t HeapSnapshot::RawSnapshotSize() const {
1264
STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::kExpectedHeapSnapshotSize ==
1265
sizeof(HeapSnapshot)); // NOLINT
1268
GetMemoryUsedByList(entries_) +
1269
GetMemoryUsedByList(edges_) +
1270
GetMemoryUsedByList(children_) +
1271
GetMemoryUsedByList(sorted_entries_);
1275
// We split IDs on evens for embedder objects (see
1276
// HeapObjectsMap::GenerateId) and odds for native objects.
1277
const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
1278
const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
1279
HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
1280
const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
1281
HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
1282
const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
1283
HeapObjectsMap::kGcRootsFirstSubrootId +
1284
VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
1286
HeapObjectsMap::HeapObjectsMap()
1287
: next_id_(kFirstAvailableObjectId),
1288
entries_map_(AddressesMatch) {
1289
// This dummy element solves a problem with entries_map_.
1290
// When we do lookup in HashMap we see no difference between two cases:
1291
// it has an entry with NULL as the value or it has created
1292
// a new entry on the fly with NULL as the default value.
1293
// With such dummy element we have a guaranty that all entries_map_ entries
1294
// will have the value field grater than 0.
1295
// This fact is using in MoveObject method.
1296
entries_.Add(EntryInfo(0, NULL, 0));
1300
void HeapObjectsMap::SnapshotGenerationFinished() {
1301
RemoveDeadEntries();
1305
void HeapObjectsMap::MoveObject(Address from, Address to) {
1307
ASSERT(from != NULL);
1308
if (from == to) return;
1309
void* from_value = entries_map_.Remove(from, AddressHash(from));
1310
if (from_value == NULL) return;
1311
int from_entry_info_index =
1312
static_cast<int>(reinterpret_cast<intptr_t>(from_value));
1313
entries_.at(from_entry_info_index).addr = to;
1314
HashMap::Entry* to_entry = entries_map_.Lookup(to, AddressHash(to), true);
1315
if (to_entry->value != NULL) {
1316
int to_entry_info_index =
1317
static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
1318
// Without this operation we will have two EntryInfo's with the same
1319
// value in addr field. It is bad because later at RemoveDeadEntries
1320
// one of this entry will be removed with the corresponding entries_map_
1322
entries_.at(to_entry_info_index).addr = NULL;
1324
to_entry->value = reinterpret_cast<void*>(from_entry_info_index);
1328
SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
1329
HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
1330
if (entry == NULL) return 0;
1331
int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
1332
EntryInfo& entry_info = entries_.at(entry_index);
1333
ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1334
return entry_info.id;
1338
SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
1339
unsigned int size) {
1340
ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1341
HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
1342
if (entry->value != NULL) {
1344
static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
1345
EntryInfo& entry_info = entries_.at(entry_index);
1346
entry_info.accessed = true;
1347
entry_info.size = size;
1348
return entry_info.id;
1350
entry->value = reinterpret_cast<void*>(entries_.length());
1351
SnapshotObjectId id = next_id_;
1352
next_id_ += kObjectIdStep;
1353
entries_.Add(EntryInfo(id, addr, size));
1354
ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1359
void HeapObjectsMap::StopHeapObjectsTracking() {
1360
time_intervals_.Clear();
1363
void HeapObjectsMap::UpdateHeapObjectsMap() {
1364
HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
1365
"HeapSnapshotsCollection::UpdateHeapObjectsMap");
1366
HeapIterator iterator;
1367
for (HeapObject* obj = iterator.next();
1369
obj = iterator.next()) {
1370
FindOrAddEntry(obj->address(), obj->Size());
1372
RemoveDeadEntries();
1376
SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
1377
UpdateHeapObjectsMap();
1378
time_intervals_.Add(TimeInterval(next_id_));
1379
int prefered_chunk_size = stream->GetChunkSize();
1380
List<v8::HeapStatsUpdate> stats_buffer;
1381
ASSERT(!entries_.is_empty());
1382
EntryInfo* entry_info = &entries_.first();
1383
EntryInfo* end_entry_info = &entries_.last() + 1;
1384
for (int time_interval_index = 0;
1385
time_interval_index < time_intervals_.length();
1386
++time_interval_index) {
1387
TimeInterval& time_interval = time_intervals_[time_interval_index];
1388
SnapshotObjectId time_interval_id = time_interval.id;
1389
uint32_t entries_size = 0;
1390
EntryInfo* start_entry_info = entry_info;
1391
while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
1392
entries_size += entry_info->size;
1395
uint32_t entries_count =
1396
static_cast<uint32_t>(entry_info - start_entry_info);
1397
if (time_interval.count != entries_count ||
1398
time_interval.size != entries_size) {
1399
stats_buffer.Add(v8::HeapStatsUpdate(
1400
time_interval_index,
1401
time_interval.count = entries_count,
1402
time_interval.size = entries_size));
1403
if (stats_buffer.length() >= prefered_chunk_size) {
1404
OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
1405
&stats_buffer.first(), stats_buffer.length());
1406
if (result == OutputStream::kAbort) return last_assigned_id();
1407
stats_buffer.Clear();
1411
ASSERT(entry_info == end_entry_info);
1412
if (!stats_buffer.is_empty()) {
1413
OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
1414
&stats_buffer.first(), stats_buffer.length());
1415
if (result == OutputStream::kAbort) return last_assigned_id();
1417
stream->EndOfStream();
1418
return last_assigned_id();
1422
void HeapObjectsMap::RemoveDeadEntries() {
1423
ASSERT(entries_.length() > 0 &&
1424
entries_.at(0).id == 0 &&
1425
entries_.at(0).addr == NULL);
1426
int first_free_entry = 1;
1427
for (int i = 1; i < entries_.length(); ++i) {
1428
EntryInfo& entry_info = entries_.at(i);
1429
if (entry_info.accessed) {
1430
if (first_free_entry != i) {
1431
entries_.at(first_free_entry) = entry_info;
1433
entries_.at(first_free_entry).accessed = false;
1434
HashMap::Entry* entry = entries_map_.Lookup(
1435
entry_info.addr, AddressHash(entry_info.addr), false);
1437
entry->value = reinterpret_cast<void*>(first_free_entry);
1440
if (entry_info.addr) {
1441
entries_map_.Remove(entry_info.addr, AddressHash(entry_info.addr));
1445
entries_.Rewind(first_free_entry);
1446
ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
1447
entries_map_.occupancy());
1451
SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
1452
SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
1453
const char* label = info->GetLabel();
1454
id ^= HashSequentialString(label,
1455
static_cast<int>(strlen(label)),
1457
intptr_t element_count = info->GetElementCount();
1458
if (element_count != -1)
1459
id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
1460
v8::internal::kZeroHashSeed);
1465
size_t HeapObjectsMap::GetUsedMemorySize() const {
1468
sizeof(HashMap::Entry) * entries_map_.capacity() +
1469
GetMemoryUsedByList(entries_) +
1470
GetMemoryUsedByList(time_intervals_);
1474
HeapSnapshotsCollection::HeapSnapshotsCollection()
1475
: is_tracking_objects_(false),
1476
snapshots_uids_(HeapSnapshotsMatch),
1477
token_enumerator_(new TokenEnumerator()) {
1481
static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) {
1482
delete *snapshot_ptr;
1486
HeapSnapshotsCollection::~HeapSnapshotsCollection() {
1487
delete token_enumerator_;
1488
snapshots_.Iterate(DeleteHeapSnapshot);
1492
HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(HeapSnapshot::Type type,
1495
is_tracking_objects_ = true; // Start watching for heap objects moves.
1496
return new HeapSnapshot(this, type, name, uid);
1500
void HeapSnapshotsCollection::SnapshotGenerationFinished(
1501
HeapSnapshot* snapshot) {
1502
ids_.SnapshotGenerationFinished();
1503
if (snapshot != NULL) {
1504
snapshots_.Add(snapshot);
1505
HashMap::Entry* entry =
1506
snapshots_uids_.Lookup(reinterpret_cast<void*>(snapshot->uid()),
1507
static_cast<uint32_t>(snapshot->uid()),
1509
ASSERT(entry->value == NULL);
1510
entry->value = snapshot;
1515
HeapSnapshot* HeapSnapshotsCollection::GetSnapshot(unsigned uid) {
1516
HashMap::Entry* entry = snapshots_uids_.Lookup(reinterpret_cast<void*>(uid),
1517
static_cast<uint32_t>(uid),
1519
return entry != NULL ? reinterpret_cast<HeapSnapshot*>(entry->value) : NULL;
1523
void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) {
1524
snapshots_.RemoveElement(snapshot);
1525
unsigned uid = snapshot->uid();
1526
snapshots_uids_.Remove(reinterpret_cast<void*>(uid),
1527
static_cast<uint32_t>(uid));
1531
Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(
1532
SnapshotObjectId id) {
1533
// First perform a full GC in order to avoid dead objects.
1534
HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
1535
"HeapSnapshotsCollection::FindHeapObjectById");
1536
AssertNoAllocation no_allocation;
1537
HeapObject* object = NULL;
1538
HeapIterator iterator(HeapIterator::kFilterUnreachable);
1539
// Make sure that object with the given id is still reachable.
1540
for (HeapObject* obj = iterator.next();
1542
obj = iterator.next()) {
1543
if (ids_.FindEntry(obj->address()) == id) {
1544
ASSERT(object == NULL);
1546
// Can't break -- kFilterUnreachable requires full heap traversal.
1549
return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>();
1553
size_t HeapSnapshotsCollection::GetUsedMemorySize() const {
1554
STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::
1555
kExpectedHeapSnapshotsCollectionSize ==
1556
sizeof(HeapSnapshotsCollection)); // NOLINT
1557
size_t size = sizeof(*this);
1558
size += names_.GetUsedMemorySize();
1559
size += ids_.GetUsedMemorySize();
1560
size += sizeof(HashMap::Entry) * snapshots_uids_.capacity();
1561
size += GetMemoryUsedByList(snapshots_);
1562
for (int i = 0; i < snapshots_.length(); ++i) {
1563
size += snapshots_[i]->RawSnapshotSize();
1569
HeapEntriesMap::HeapEntriesMap()
1570
: entries_(HeapThingsMatch) {
1574
int HeapEntriesMap::Map(HeapThing thing) {
1575
HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
1576
if (cache_entry == NULL) return HeapEntry::kNoEntry;
1577
return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
1581
void HeapEntriesMap::Pair(HeapThing thing, int entry) {
1582
HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
1583
ASSERT(cache_entry->value == NULL);
1584
cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
1588
HeapObjectsSet::HeapObjectsSet()
1589
: entries_(HeapEntriesMap::HeapThingsMatch) {
1593
void HeapObjectsSet::Clear() {
1598
bool HeapObjectsSet::Contains(Object* obj) {
1599
if (!obj->IsHeapObject()) return false;
1600
HeapObject* object = HeapObject::cast(obj);
1601
return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
1605
void HeapObjectsSet::Insert(Object* obj) {
1606
if (!obj->IsHeapObject()) return;
1607
HeapObject* object = HeapObject::cast(obj);
1608
entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
1612
const char* HeapObjectsSet::GetTag(Object* obj) {
1613
HeapObject* object = HeapObject::cast(obj);
1614
HashMap::Entry* cache_entry =
1615
entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
1616
return cache_entry != NULL
1617
? reinterpret_cast<const char*>(cache_entry->value)
1622
void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
1623
if (!obj->IsHeapObject()) return;
1624
HeapObject* object = HeapObject::cast(obj);
1625
HashMap::Entry* cache_entry =
1626
entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
1627
cache_entry->value = const_cast<char*>(tag);
1631
HeapObject* const V8HeapExplorer::kInternalRootObject =
1632
reinterpret_cast<HeapObject*>(
1633
static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
1634
HeapObject* const V8HeapExplorer::kGcRootsObject =
1635
reinterpret_cast<HeapObject*>(
1636
static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
1637
HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
1638
reinterpret_cast<HeapObject*>(
1639
static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
1640
HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
1641
reinterpret_cast<HeapObject*>(
1642
static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
1645
V8HeapExplorer::V8HeapExplorer(
1646
HeapSnapshot* snapshot,
1647
SnapshottingProgressReportingInterface* progress)
1648
: heap_(Isolate::Current()->heap()),
1649
snapshot_(snapshot),
1650
collection_(snapshot_->collection()),
1651
progress_(progress),
1656
V8HeapExplorer::~V8HeapExplorer() {
1660
HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
1661
return AddEntry(reinterpret_cast<HeapObject*>(ptr));
1665
HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
1666
if (object == kInternalRootObject) {
1667
snapshot_->AddRootEntry();
1668
return snapshot_->root();
1669
} else if (object == kGcRootsObject) {
1670
HeapEntry* entry = snapshot_->AddGcRootsEntry();
1672
} else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
1673
HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object));
1675
} else if (object->IsJSFunction()) {
1676
JSFunction* func = JSFunction::cast(object);
1677
SharedFunctionInfo* shared = func->shared();
1678
const char* name = shared->bound() ? "native_bind" :
1679
collection_->names()->GetName(String::cast(shared->name()));
1680
return AddEntry(object, HeapEntry::kClosure, name);
1681
} else if (object->IsJSRegExp()) {
1682
JSRegExp* re = JSRegExp::cast(object);
1683
return AddEntry(object,
1685
collection_->names()->GetName(re->Pattern()));
1686
} else if (object->IsJSObject()) {
1687
const char* name = collection_->names()->GetName(
1688
GetConstructorName(JSObject::cast(object)));
1689
if (object->IsJSGlobalObject()) {
1690
const char* tag = objects_tags_.GetTag(object);
1692
name = collection_->names()->GetFormatted("%s / %s", name, tag);
1695
return AddEntry(object, HeapEntry::kObject, name);
1696
} else if (object->IsString()) {
1697
return AddEntry(object,
1699
collection_->names()->GetName(String::cast(object)));
1700
} else if (object->IsCode()) {
1701
return AddEntry(object, HeapEntry::kCode, "");
1702
} else if (object->IsSharedFunctionInfo()) {
1703
String* name = String::cast(SharedFunctionInfo::cast(object)->name());
1704
return AddEntry(object,
1706
collection_->names()->GetName(name));
1707
} else if (object->IsScript()) {
1708
Object* name = Script::cast(object)->name();
1709
return AddEntry(object,
1712
? collection_->names()->GetName(String::cast(name))
1714
} else if (object->IsNativeContext()) {
1715
return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
1716
} else if (object->IsContext()) {
1717
return AddEntry(object, HeapEntry::kHidden, "system / Context");
1718
} else if (object->IsFixedArray() ||
1719
object->IsFixedDoubleArray() ||
1720
object->IsByteArray() ||
1721
object->IsExternalArray()) {
1722
return AddEntry(object, HeapEntry::kArray, "");
1723
} else if (object->IsHeapNumber()) {
1724
return AddEntry(object, HeapEntry::kHeapNumber, "number");
1726
return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
1730
HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
1731
HeapEntry::Type type,
1733
int object_size = object->Size();
1734
SnapshotObjectId object_id =
1735
collection_->GetObjectId(object->address(), object_size);
1736
return snapshot_->AddEntry(type, name, object_id, object_size);
1740
class GcSubrootsEnumerator : public ObjectVisitor {
1742
GcSubrootsEnumerator(
1743
SnapshotFillerInterface* filler, V8HeapExplorer* explorer)
1745
explorer_(explorer),
1746
previous_object_count_(0),
1749
void VisitPointers(Object** start, Object** end) {
1750
object_count_ += end - start;
1752
void Synchronize(VisitorSynchronization::SyncTag tag) {
1753
// Skip empty subroots.
1754
if (previous_object_count_ != object_count_) {
1755
previous_object_count_ = object_count_;
1756
filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
1760
SnapshotFillerInterface* filler_;
1761
V8HeapExplorer* explorer_;
1762
intptr_t previous_object_count_;
1763
intptr_t object_count_;
1767
void V8HeapExplorer::AddRootEntries(SnapshotFillerInterface* filler) {
1768
filler->AddEntry(kInternalRootObject, this);
1769
filler->AddEntry(kGcRootsObject, this);
1770
GcSubrootsEnumerator enumerator(filler, this);
1771
heap_->IterateRoots(&enumerator, VISIT_ALL);
1775
const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
1776
switch (object->map()->instance_type()) {
1777
case MAP_TYPE: return "system / Map";
1778
case JS_GLOBAL_PROPERTY_CELL_TYPE: return "system / JSGlobalPropertyCell";
1779
case FOREIGN_TYPE: return "system / Foreign";
1780
case ODDBALL_TYPE: return "system / Oddball";
1781
#define MAKE_STRUCT_CASE(NAME, Name, name) \
1782
case NAME##_TYPE: return "system / "#Name;
1783
STRUCT_LIST(MAKE_STRUCT_CASE)
1784
#undef MAKE_STRUCT_CASE
1785
default: return "system";
1790
int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
1791
int objects_count = 0;
1792
for (HeapObject* obj = iterator->next();
1794
obj = iterator->next()) {
1797
return objects_count;
1801
class IndexedReferencesExtractor : public ObjectVisitor {
1803
IndexedReferencesExtractor(V8HeapExplorer* generator,
1804
HeapObject* parent_obj,
1806
: generator_(generator),
1807
parent_obj_(parent_obj),
1811
void VisitPointers(Object** start, Object** end) {
1812
for (Object** p = start; p < end; p++) {
1813
if (CheckVisitedAndUnmark(p)) continue;
1814
generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p);
1817
static void MarkVisitedField(HeapObject* obj, int offset) {
1818
if (offset < 0) return;
1819
Address field = obj->address() + offset;
1820
ASSERT(!Memory::Object_at(field)->IsFailure());
1821
ASSERT(Memory::Object_at(field)->IsHeapObject());
1822
*field |= kFailureTag;
1826
bool CheckVisitedAndUnmark(Object** field) {
1827
if ((*field)->IsFailure()) {
1828
intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
1829
*field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
1830
ASSERT((*field)->IsHeapObject());
1835
V8HeapExplorer* generator_;
1836
HeapObject* parent_obj_;
1842
void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
1843
HeapEntry* heap_entry = GetEntry(obj);
1844
if (heap_entry == NULL) return; // No interest in this object.
1845
int entry = heap_entry->index();
1847
bool extract_indexed_refs = true;
1848
if (obj->IsJSGlobalProxy()) {
1849
ExtractJSGlobalProxyReferences(JSGlobalProxy::cast(obj));
1850
} else if (obj->IsJSObject()) {
1851
ExtractJSObjectReferences(entry, JSObject::cast(obj));
1852
} else if (obj->IsString()) {
1853
ExtractStringReferences(entry, String::cast(obj));
1854
extract_indexed_refs = false;
1855
} else if (obj->IsContext()) {
1856
ExtractContextReferences(entry, Context::cast(obj));
1857
} else if (obj->IsMap()) {
1858
ExtractMapReferences(entry, Map::cast(obj));
1859
} else if (obj->IsSharedFunctionInfo()) {
1860
ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1861
} else if (obj->IsScript()) {
1862
ExtractScriptReferences(entry, Script::cast(obj));
1863
} else if (obj->IsCodeCache()) {
1864
ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1865
} else if (obj->IsCode()) {
1866
ExtractCodeReferences(entry, Code::cast(obj));
1867
} else if (obj->IsJSGlobalPropertyCell()) {
1868
ExtractJSGlobalPropertyCellReferences(
1869
entry, JSGlobalPropertyCell::cast(obj));
1870
extract_indexed_refs = false;
1872
if (extract_indexed_refs) {
1873
SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1874
IndexedReferencesExtractor refs_extractor(this, obj, entry);
1875
obj->Iterate(&refs_extractor);
1880
void V8HeapExplorer::ExtractJSGlobalProxyReferences(JSGlobalProxy* proxy) {
1881
// We need to reference JS global objects from snapshot's root.
1882
// We use JSGlobalProxy because this is what embedder (e.g. browser)
1883
// uses for the global object.
1884
Object* object = proxy->map()->prototype();
1885
bool is_debug_object = false;
1886
#ifdef ENABLE_DEBUGGER_SUPPORT
1887
is_debug_object = object->IsGlobalObject() &&
1888
Isolate::Current()->debug()->IsDebugGlobal(GlobalObject::cast(object));
1890
if (!is_debug_object) {
1891
SetUserGlobalReference(object);
1896
void V8HeapExplorer::ExtractJSObjectReferences(
1897
int entry, JSObject* js_obj) {
1898
HeapObject* obj = js_obj;
1899
ExtractClosureReferences(js_obj, entry);
1900
ExtractPropertyReferences(js_obj, entry);
1901
ExtractElementReferences(js_obj, entry);
1902
ExtractInternalReferences(js_obj, entry);
1903
SetPropertyReference(
1904
obj, entry, heap_->Proto_symbol(), js_obj->GetPrototype());
1905
if (obj->IsJSFunction()) {
1906
JSFunction* js_fun = JSFunction::cast(js_obj);
1907
Object* proto_or_map = js_fun->prototype_or_initial_map();
1908
if (!proto_or_map->IsTheHole()) {
1909
if (!proto_or_map->IsMap()) {
1910
SetPropertyReference(
1912
heap_->prototype_symbol(), proto_or_map,
1914
JSFunction::kPrototypeOrInitialMapOffset);
1916
SetPropertyReference(
1918
heap_->prototype_symbol(), js_fun->prototype());
1921
SharedFunctionInfo* shared_info = js_fun->shared();
1922
// JSFunction has either bindings or literals and never both.
1923
bool bound = shared_info->bound();
1924
TagObject(js_fun->literals_or_bindings(),
1925
bound ? "(function bindings)" : "(function literals)");
1926
SetInternalReference(js_fun, entry,
1927
bound ? "bindings" : "literals",
1928
js_fun->literals_or_bindings(),
1929
JSFunction::kLiteralsOffset);
1930
TagObject(shared_info, "(shared function info)");
1931
SetInternalReference(js_fun, entry,
1932
"shared", shared_info,
1933
JSFunction::kSharedFunctionInfoOffset);
1934
TagObject(js_fun->unchecked_context(), "(context)");
1935
SetInternalReference(js_fun, entry,
1936
"context", js_fun->unchecked_context(),
1937
JSFunction::kContextOffset);
1938
for (int i = JSFunction::kNonWeakFieldsEndOffset;
1939
i < JSFunction::kSize;
1940
i += kPointerSize) {
1941
SetWeakReference(js_fun, entry, i, *HeapObject::RawField(js_fun, i), i);
1943
} else if (obj->IsGlobalObject()) {
1944
GlobalObject* global_obj = GlobalObject::cast(obj);
1945
SetInternalReference(global_obj, entry,
1946
"builtins", global_obj->builtins(),
1947
GlobalObject::kBuiltinsOffset);
1948
SetInternalReference(global_obj, entry,
1949
"native_context", global_obj->native_context(),
1950
GlobalObject::kNativeContextOffset);
1951
SetInternalReference(global_obj, entry,
1952
"global_receiver", global_obj->global_receiver(),
1953
GlobalObject::kGlobalReceiverOffset);
1955
TagObject(js_obj->properties(), "(object properties)");
1956
SetInternalReference(obj, entry,
1957
"properties", js_obj->properties(),
1958
JSObject::kPropertiesOffset);
1959
TagObject(js_obj->elements(), "(object elements)");
1960
SetInternalReference(obj, entry,
1961
"elements", js_obj->elements(),
1962
JSObject::kElementsOffset);
1966
void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1967
if (string->IsConsString()) {
1968
ConsString* cs = ConsString::cast(string);
1969
SetInternalReference(cs, entry, "first", cs->first());
1970
SetInternalReference(cs, entry, "second", cs->second());
1971
} else if (string->IsSlicedString()) {
1972
SlicedString* ss = SlicedString::cast(string);
1973
SetInternalReference(ss, entry, "parent", ss->parent());
1978
void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1979
#define EXTRACT_CONTEXT_FIELD(index, type, name) \
1980
SetInternalReference(context, entry, #name, context->get(Context::index), \
1981
FixedArray::OffsetOfElementAt(Context::index));
1982
EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1983
EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1984
EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1985
EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
1986
if (context->IsNativeContext()) {
1987
TagObject(context->jsfunction_result_caches(),
1988
"(context func. result caches)");
1989
TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1990
TagObject(context->runtime_context(), "(runtime context)");
1991
TagObject(context->data(), "(context data)");
1992
NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
1993
#undef EXTRACT_CONTEXT_FIELD
1994
for (int i = Context::FIRST_WEAK_SLOT;
1995
i < Context::NATIVE_CONTEXT_SLOTS;
1997
SetWeakReference(context, entry, i, context->get(i),
1998
FixedArray::OffsetOfElementAt(i));
2004
void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
2005
SetInternalReference(map, entry,
2006
"prototype", map->prototype(), Map::kPrototypeOffset);
2007
SetInternalReference(map, entry,
2008
"constructor", map->constructor(),
2009
Map::kConstructorOffset);
2010
if (map->HasTransitionArray()) {
2011
TransitionArray* transitions = map->transitions();
2013
Object* back_pointer = transitions->back_pointer_storage();
2014
TagObject(transitions->back_pointer_storage(), "(back pointer)");
2015
SetInternalReference(transitions, entry,
2016
"backpointer", back_pointer,
2017
TransitionArray::kBackPointerStorageOffset);
2018
IndexedReferencesExtractor transitions_refs(this, transitions, entry);
2019
transitions->Iterate(&transitions_refs);
2021
TagObject(transitions, "(transition array)");
2022
SetInternalReference(map, entry,
2023
"transitions", transitions,
2024
Map::kTransitionsOrBackPointerOffset);
2026
Object* back_pointer = map->GetBackPointer();
2027
TagObject(back_pointer, "(back pointer)");
2028
SetInternalReference(map, entry,
2029
"backpointer", back_pointer,
2030
Map::kTransitionsOrBackPointerOffset);
2032
DescriptorArray* descriptors = map->instance_descriptors();
2033
TagObject(descriptors, "(map descriptors)");
2034
SetInternalReference(map, entry,
2035
"descriptors", descriptors,
2036
Map::kDescriptorsOffset);
2038
SetInternalReference(map, entry,
2039
"code_cache", map->code_cache(),
2040
Map::kCodeCacheOffset);
2044
void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
2045
int entry, SharedFunctionInfo* shared) {
2046
HeapObject* obj = shared;
2047
SetInternalReference(obj, entry,
2048
"name", shared->name(),
2049
SharedFunctionInfo::kNameOffset);
2050
TagObject(shared->code(), "(code)");
2051
SetInternalReference(obj, entry,
2052
"code", shared->code(),
2053
SharedFunctionInfo::kCodeOffset);
2054
TagObject(shared->scope_info(), "(function scope info)");
2055
SetInternalReference(obj, entry,
2056
"scope_info", shared->scope_info(),
2057
SharedFunctionInfo::kScopeInfoOffset);
2058
SetInternalReference(obj, entry,
2059
"instance_class_name", shared->instance_class_name(),
2060
SharedFunctionInfo::kInstanceClassNameOffset);
2061
SetInternalReference(obj, entry,
2062
"script", shared->script(),
2063
SharedFunctionInfo::kScriptOffset);
2064
TagObject(shared->construct_stub(), "(code)");
2065
SetInternalReference(obj, entry,
2066
"construct_stub", shared->construct_stub(),
2067
SharedFunctionInfo::kConstructStubOffset);
2068
SetInternalReference(obj, entry,
2069
"function_data", shared->function_data(),
2070
SharedFunctionInfo::kFunctionDataOffset);
2071
SetInternalReference(obj, entry,
2072
"debug_info", shared->debug_info(),
2073
SharedFunctionInfo::kDebugInfoOffset);
2074
SetInternalReference(obj, entry,
2075
"inferred_name", shared->inferred_name(),
2076
SharedFunctionInfo::kInferredNameOffset);
2077
SetInternalReference(obj, entry,
2078
"this_property_assignments",
2079
shared->this_property_assignments(),
2080
SharedFunctionInfo::kThisPropertyAssignmentsOffset);
2081
SetWeakReference(obj, entry,
2082
1, shared->initial_map(),
2083
SharedFunctionInfo::kInitialMapOffset);
2087
void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
2088
HeapObject* obj = script;
2089
SetInternalReference(obj, entry,
2090
"source", script->source(),
2091
Script::kSourceOffset);
2092
SetInternalReference(obj, entry,
2093
"name", script->name(),
2094
Script::kNameOffset);
2095
SetInternalReference(obj, entry,
2096
"data", script->data(),
2097
Script::kDataOffset);
2098
SetInternalReference(obj, entry,
2099
"context_data", script->context_data(),
2100
Script::kContextOffset);
2101
TagObject(script->line_ends(), "(script line ends)");
2102
SetInternalReference(obj, entry,
2103
"line_ends", script->line_ends(),
2104
Script::kLineEndsOffset);
2108
void V8HeapExplorer::ExtractCodeCacheReferences(
2109
int entry, CodeCache* code_cache) {
2110
TagObject(code_cache->default_cache(), "(default code cache)");
2111
SetInternalReference(code_cache, entry,
2112
"default_cache", code_cache->default_cache(),
2113
CodeCache::kDefaultCacheOffset);
2114
TagObject(code_cache->normal_type_cache(), "(code type cache)");
2115
SetInternalReference(code_cache, entry,
2116
"type_cache", code_cache->normal_type_cache(),
2117
CodeCache::kNormalTypeCacheOffset);
2121
void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
2122
TagObject(code->relocation_info(), "(code relocation info)");
2123
SetInternalReference(code, entry,
2124
"relocation_info", code->relocation_info(),
2125
Code::kRelocationInfoOffset);
2126
SetInternalReference(code, entry,
2127
"handler_table", code->handler_table(),
2128
Code::kHandlerTableOffset);
2129
TagObject(code->deoptimization_data(), "(code deopt data)");
2130
SetInternalReference(code, entry,
2131
"deoptimization_data", code->deoptimization_data(),
2132
Code::kDeoptimizationDataOffset);
2133
SetInternalReference(code, entry,
2134
"type_feedback_info", code->type_feedback_info(),
2135
Code::kTypeFeedbackInfoOffset);
2136
SetInternalReference(code, entry,
2137
"gc_metadata", code->gc_metadata(),
2138
Code::kGCMetadataOffset);
2142
void V8HeapExplorer::ExtractJSGlobalPropertyCellReferences(
2143
int entry, JSGlobalPropertyCell* cell) {
2144
SetInternalReference(cell, entry, "value", cell->value());
2148
void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
2149
if (!js_obj->IsJSFunction()) return;
2151
JSFunction* func = JSFunction::cast(js_obj);
2152
if (func->shared()->bound()) {
2153
FixedArray* bindings = func->function_bindings();
2154
SetNativeBindReference(js_obj, entry, "bound_this",
2155
bindings->get(JSFunction::kBoundThisIndex));
2156
SetNativeBindReference(js_obj, entry, "bound_function",
2157
bindings->get(JSFunction::kBoundFunctionIndex));
2158
for (int i = JSFunction::kBoundArgumentsStartIndex;
2159
i < bindings->length(); i++) {
2160
const char* reference_name = collection_->names()->GetFormatted(
2161
"bound_argument_%d",
2162
i - JSFunction::kBoundArgumentsStartIndex);
2163
SetNativeBindReference(js_obj, entry, reference_name,
2167
Context* context = func->context()->declaration_context();
2168
ScopeInfo* scope_info = context->closure()->shared()->scope_info();
2169
// Add context allocated locals.
2170
int context_locals = scope_info->ContextLocalCount();
2171
for (int i = 0; i < context_locals; ++i) {
2172
String* local_name = scope_info->ContextLocalName(i);
2173
int idx = Context::MIN_CONTEXT_SLOTS + i;
2174
SetClosureReference(js_obj, entry, local_name, context->get(idx));
2177
// Add function variable.
2178
if (scope_info->HasFunctionName()) {
2179
String* name = scope_info->FunctionName();
2181
int idx = scope_info->FunctionContextSlotIndex(name, &mode);
2183
SetClosureReference(js_obj, entry, name, context->get(idx));
2190
void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
2191
if (js_obj->HasFastProperties()) {
2192
DescriptorArray* descs = js_obj->map()->instance_descriptors();
2193
int real_size = js_obj->map()->NumberOfOwnDescriptors();
2194
for (int i = 0; i < descs->number_of_descriptors(); i++) {
2195
if (descs->GetDetails(i).descriptor_index() > real_size) continue;
2196
switch (descs->GetType(i)) {
2198
int index = descs->GetFieldIndex(i);
2200
String* k = descs->GetKey(i);
2201
if (index < js_obj->map()->inobject_properties()) {
2202
Object* value = js_obj->InObjectPropertyAt(index);
2203
if (k != heap_->hidden_symbol()) {
2204
SetPropertyReference(
2208
js_obj->GetInObjectPropertyOffset(index));
2210
TagObject(value, "(hidden properties)");
2211
SetInternalReference(
2213
"hidden_properties", value,
2214
js_obj->GetInObjectPropertyOffset(index));
2217
Object* value = js_obj->FastPropertyAt(index);
2218
if (k != heap_->hidden_symbol()) {
2219
SetPropertyReference(js_obj, entry, k, value);
2221
TagObject(value, "(hidden properties)");
2222
SetInternalReference(js_obj, entry, "hidden_properties", value);
2227
case CONSTANT_FUNCTION:
2228
SetPropertyReference(
2230
descs->GetKey(i), descs->GetConstantFunction(i));
2233
Object* callback_obj = descs->GetValue(i);
2234
if (callback_obj->IsAccessorPair()) {
2235
AccessorPair* accessors = AccessorPair::cast(callback_obj);
2236
if (Object* getter = accessors->getter()) {
2237
SetPropertyReference(js_obj, entry, descs->GetKey(i),
2240
if (Object* setter = accessors->setter()) {
2241
SetPropertyReference(js_obj, entry, descs->GetKey(i),
2247
case NORMAL: // only in slow mode
2248
case HANDLER: // only in lookup results, not in descriptors
2249
case INTERCEPTOR: // only in lookup results, not in descriptors
2258
StringDictionary* dictionary = js_obj->property_dictionary();
2259
int length = dictionary->Capacity();
2260
for (int i = 0; i < length; ++i) {
2261
Object* k = dictionary->KeyAt(i);
2262
if (dictionary->IsKey(k)) {
2263
Object* target = dictionary->ValueAt(i);
2264
// We assume that global objects can only have slow properties.
2265
Object* value = target->IsJSGlobalPropertyCell()
2266
? JSGlobalPropertyCell::cast(target)->value()
2268
if (k != heap_->hidden_symbol()) {
2269
SetPropertyReference(js_obj, entry, String::cast(k), value);
2271
TagObject(value, "(hidden properties)");
2272
SetInternalReference(js_obj, entry, "hidden_properties", value);
2280
void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
2281
if (js_obj->HasFastObjectElements()) {
2282
FixedArray* elements = FixedArray::cast(js_obj->elements());
2283
int length = js_obj->IsJSArray() ?
2284
Smi::cast(JSArray::cast(js_obj)->length())->value() :
2286
for (int i = 0; i < length; ++i) {
2287
if (!elements->get(i)->IsTheHole()) {
2288
SetElementReference(js_obj, entry, i, elements->get(i));
2291
} else if (js_obj->HasDictionaryElements()) {
2292
SeededNumberDictionary* dictionary = js_obj->element_dictionary();
2293
int length = dictionary->Capacity();
2294
for (int i = 0; i < length; ++i) {
2295
Object* k = dictionary->KeyAt(i);
2296
if (dictionary->IsKey(k)) {
2297
ASSERT(k->IsNumber());
2298
uint32_t index = static_cast<uint32_t>(k->Number());
2299
SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
2306
void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
2307
int length = js_obj->GetInternalFieldCount();
2308
for (int i = 0; i < length; ++i) {
2309
Object* o = js_obj->GetInternalField(i);
2310
SetInternalReference(
2311
js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
2316
String* V8HeapExplorer::GetConstructorName(JSObject* object) {
2317
Heap* heap = object->GetHeap();
2318
if (object->IsJSFunction()) return heap->closure_symbol();
2319
String* constructor_name = object->constructor_name();
2320
if (constructor_name == heap->Object_symbol()) {
2321
// Look up an immediate "constructor" property, if it is a function,
2322
// return its name. This is for instances of binding objects, which
2323
// have prototype constructor type "Object".
2324
Object* constructor_prop = NULL;
2325
LookupResult result(heap->isolate());
2326
object->LocalLookupRealNamedProperty(heap->constructor_symbol(), &result);
2327
if (!result.IsFound()) return object->constructor_name();
2329
constructor_prop = result.GetLazyValue();
2330
if (constructor_prop->IsJSFunction()) {
2331
Object* maybe_name =
2332
JSFunction::cast(constructor_prop)->shared()->name();
2333
if (maybe_name->IsString()) {
2334
String* name = String::cast(maybe_name);
2335
if (name->length() > 0) return name;
2339
return object->constructor_name();
2343
HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
2344
if (!obj->IsHeapObject()) return NULL;
2345
return filler_->FindOrAddEntry(obj, this);
2349
class RootsReferencesExtractor : public ObjectVisitor {
2352
IndexTag(int index, VisitorSynchronization::SyncTag tag)
2353
: index(index), tag(tag) { }
2355
VisitorSynchronization::SyncTag tag;
2359
RootsReferencesExtractor()
2360
: collecting_all_references_(false),
2361
previous_reference_count_(0) {
2364
void VisitPointers(Object** start, Object** end) {
2365
if (collecting_all_references_) {
2366
for (Object** p = start; p < end; p++) all_references_.Add(*p);
2368
for (Object** p = start; p < end; p++) strong_references_.Add(*p);
2372
void SetCollectingAllReferences() { collecting_all_references_ = true; }
2374
void FillReferences(V8HeapExplorer* explorer) {
2375
ASSERT(strong_references_.length() <= all_references_.length());
2376
for (int i = 0; i < reference_tags_.length(); ++i) {
2377
explorer->SetGcRootsReference(reference_tags_[i].tag);
2379
int strong_index = 0, all_index = 0, tags_index = 0;
2380
while (all_index < all_references_.length()) {
2381
if (strong_index < strong_references_.length() &&
2382
strong_references_[strong_index] == all_references_[all_index]) {
2383
explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
2385
all_references_[all_index++]);
2388
explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
2390
all_references_[all_index++]);
2392
if (reference_tags_[tags_index].index == all_index) ++tags_index;
2396
void Synchronize(VisitorSynchronization::SyncTag tag) {
2397
if (collecting_all_references_ &&
2398
previous_reference_count_ != all_references_.length()) {
2399
previous_reference_count_ = all_references_.length();
2400
reference_tags_.Add(IndexTag(previous_reference_count_, tag));
2405
bool collecting_all_references_;
2406
List<Object*> strong_references_;
2407
List<Object*> all_references_;
2408
int previous_reference_count_;
2409
List<IndexTag> reference_tags_;
2413
bool V8HeapExplorer::IterateAndExtractReferences(
2414
SnapshotFillerInterface* filler) {
2415
HeapIterator iterator(HeapIterator::kFilterUnreachable);
2418
bool interrupted = false;
2420
// Heap iteration with filtering must be finished in any case.
2421
for (HeapObject* obj = iterator.next();
2423
obj = iterator.next(), progress_->ProgressStep()) {
2425
ExtractReferences(obj);
2426
if (!progress_->ProgressReport(false)) interrupted = true;
2434
SetRootGcRootsReference();
2435
RootsReferencesExtractor extractor;
2436
heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
2437
extractor.SetCollectingAllReferences();
2438
heap_->IterateRoots(&extractor, VISIT_ALL);
2439
extractor.FillReferences(this);
2441
return progress_->ProgressReport(true);
2445
bool V8HeapExplorer::IsEssentialObject(Object* object) {
2446
// We have to use raw_unchecked_* versions because checked versions
2447
// would fail during iteration over object properties.
2448
return object->IsHeapObject()
2449
&& !object->IsOddball()
2450
&& object != heap_->raw_unchecked_empty_byte_array()
2451
&& object != heap_->raw_unchecked_empty_fixed_array()
2452
&& object != heap_->raw_unchecked_empty_descriptor_array()
2453
&& object != heap_->raw_unchecked_fixed_array_map()
2454
&& object != heap_->raw_unchecked_global_property_cell_map()
2455
&& object != heap_->raw_unchecked_shared_function_info_map()
2456
&& object != heap_->raw_unchecked_free_space_map()
2457
&& object != heap_->raw_unchecked_one_pointer_filler_map()
2458
&& object != heap_->raw_unchecked_two_pointer_filler_map();
2462
void V8HeapExplorer::SetClosureReference(HeapObject* parent_obj,
2464
String* reference_name,
2465
Object* child_obj) {
2466
HeapEntry* child_entry = GetEntry(child_obj);
2467
if (child_entry != NULL) {
2468
filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
2470
collection_->names()->GetName(reference_name),
2476
void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
2478
const char* reference_name,
2479
Object* child_obj) {
2480
HeapEntry* child_entry = GetEntry(child_obj);
2481
if (child_entry != NULL) {
2482
filler_->SetNamedReference(HeapGraphEdge::kShortcut,
2490
void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
2493
Object* child_obj) {
2494
HeapEntry* child_entry = GetEntry(child_obj);
2495
if (child_entry != NULL) {
2496
filler_->SetIndexedReference(HeapGraphEdge::kElement,
2504
void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2506
const char* reference_name,
2509
HeapEntry* child_entry = GetEntry(child_obj);
2510
if (child_entry == NULL) return;
2511
if (IsEssentialObject(child_obj)) {
2512
filler_->SetNamedReference(HeapGraphEdge::kInternal,
2517
IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2521
void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2526
HeapEntry* child_entry = GetEntry(child_obj);
2527
if (child_entry == NULL) return;
2528
if (IsEssentialObject(child_obj)) {
2529
filler_->SetNamedReference(HeapGraphEdge::kInternal,
2531
collection_->names()->GetName(index),
2534
IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2538
void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
2541
Object* child_obj) {
2542
HeapEntry* child_entry = GetEntry(child_obj);
2543
if (child_entry != NULL && IsEssentialObject(child_obj)) {
2544
filler_->SetIndexedReference(HeapGraphEdge::kHidden,
2552
void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2557
HeapEntry* child_entry = GetEntry(child_obj);
2558
if (child_entry != NULL) {
2559
filler_->SetIndexedReference(HeapGraphEdge::kWeak,
2563
IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2568
void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2570
String* reference_name,
2572
const char* name_format_string,
2574
HeapEntry* child_entry = GetEntry(child_obj);
2575
if (child_entry != NULL) {
2576
HeapGraphEdge::Type type = reference_name->length() > 0 ?
2577
HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
2578
const char* name = name_format_string != NULL ?
2579
collection_->names()->GetFormatted(
2581
*reference_name->ToCString(DISALLOW_NULLS,
2582
ROBUST_STRING_TRAVERSAL)) :
2583
collection_->names()->GetName(reference_name);
2585
filler_->SetNamedReference(type,
2589
IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2594
void V8HeapExplorer::SetRootGcRootsReference() {
2595
filler_->SetIndexedAutoIndexReference(
2596
HeapGraphEdge::kElement,
2597
snapshot_->root()->index(),
2598
snapshot_->gc_roots());
2602
void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2603
HeapEntry* child_entry = GetEntry(child_obj);
2604
ASSERT(child_entry != NULL);
2605
filler_->SetNamedAutoIndexReference(
2606
HeapGraphEdge::kShortcut,
2607
snapshot_->root()->index(),
2612
void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2613
filler_->SetIndexedAutoIndexReference(
2614
HeapGraphEdge::kElement,
2615
snapshot_->gc_roots()->index(),
2616
snapshot_->gc_subroot(tag));
2620
void V8HeapExplorer::SetGcSubrootReference(
2621
VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2622
HeapEntry* child_entry = GetEntry(child_obj);
2623
if (child_entry != NULL) {
2624
const char* name = GetStrongGcSubrootName(child_obj);
2626
filler_->SetNamedReference(
2627
HeapGraphEdge::kInternal,
2628
snapshot_->gc_subroot(tag)->index(),
2632
filler_->SetIndexedAutoIndexReference(
2633
is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kElement,
2634
snapshot_->gc_subroot(tag)->index(),
2641
const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2642
if (strong_gc_subroot_names_.is_empty()) {
2643
#define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2644
#define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2645
STRONG_ROOT_LIST(ROOT_NAME)
2647
#define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2648
STRUCT_LIST(STRUCT_MAP_NAME)
2649
#undef STRUCT_MAP_NAME
2650
#define SYMBOL_NAME(name, str) NAME_ENTRY(name)
2651
SYMBOL_LIST(SYMBOL_NAME)
2654
CHECK(!strong_gc_subroot_names_.is_empty());
2656
return strong_gc_subroot_names_.GetTag(object);
2660
void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2661
if (IsEssentialObject(obj)) {
2662
HeapEntry* entry = GetEntry(obj);
2663
if (entry->name()[0] == '\0') {
2664
entry->set_name(tag);
2670
class GlobalObjectsEnumerator : public ObjectVisitor {
2672
virtual void VisitPointers(Object** start, Object** end) {
2673
for (Object** p = start; p < end; p++) {
2674
if ((*p)->IsNativeContext()) {
2675
Context* context = Context::cast(*p);
2676
JSObject* proxy = context->global_proxy();
2677
if (proxy->IsJSGlobalProxy()) {
2678
Object* global = proxy->map()->prototype();
2679
if (global->IsJSGlobalObject()) {
2680
objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2686
int count() { return objects_.length(); }
2687
Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2690
List<Handle<JSGlobalObject> > objects_;
2694
// Modifies heap. Must not be run during heap traversal.
2695
void V8HeapExplorer::TagGlobalObjects() {
2697
Isolate* isolate = Isolate::Current();
2698
GlobalObjectsEnumerator enumerator;
2699
isolate->global_handles()->IterateAllRoots(&enumerator);
2700
Handle<String> document_string =
2701
isolate->factory()->NewStringFromAscii(CStrVector("document"));
2702
Handle<String> url_string =
2703
isolate->factory()->NewStringFromAscii(CStrVector("URL"));
2704
const char** urls = NewArray<const char*>(enumerator.count());
2705
for (int i = 0, l = enumerator.count(); i < l; ++i) {
2708
Handle<JSGlobalObject> global_obj = enumerator.at(i);
2709
Object* obj_document;
2710
if (global_obj->GetProperty(*document_string)->ToObject(&obj_document) &&
2711
obj_document->IsJSObject()) {
2712
// FixMe: Workaround: SharedWorker's current Isolate has NULL context.
2713
// As result GetProperty(*url_string) will crash.
2714
if (!Isolate::Current()->context() && obj_document->IsJSGlobalProxy())
2716
JSObject* document = JSObject::cast(obj_document);
2718
if (document->GetProperty(*url_string)->ToObject(&obj_url) &&
2719
obj_url->IsString()) {
2720
urls[i] = collection_->names()->GetName(String::cast(obj_url));
2725
AssertNoAllocation no_allocation;
2726
for (int i = 0, l = enumerator.count(); i < l; ++i) {
2727
objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2734
class GlobalHandlesExtractor : public ObjectVisitor {
2736
explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2737
: explorer_(explorer) {}
2738
virtual ~GlobalHandlesExtractor() {}
2739
virtual void VisitPointers(Object** start, Object** end) {
2742
virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2743
explorer_->VisitSubtreeWrapper(p, class_id);
2746
NativeObjectsExplorer* explorer_;
2750
class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2752
BasicHeapEntriesAllocator(
2753
HeapSnapshot* snapshot,
2754
HeapEntry::Type entries_type)
2755
: snapshot_(snapshot),
2756
collection_(snapshot_->collection()),
2757
entries_type_(entries_type) {
2759
virtual HeapEntry* AllocateEntry(HeapThing ptr);
2761
HeapSnapshot* snapshot_;
2762
HeapSnapshotsCollection* collection_;
2763
HeapEntry::Type entries_type_;
2767
HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2768
v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2769
intptr_t elements = info->GetElementCount();
2770
intptr_t size = info->GetSizeInBytes();
2771
const char* name = elements != -1
2772
? collection_->names()->GetFormatted(
2773
"%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2774
: collection_->names()->GetCopy(info->GetLabel());
2775
return snapshot_->AddEntry(
2778
HeapObjectsMap::GenerateId(info),
2779
size != -1 ? static_cast<int>(size) : 0);
2783
NativeObjectsExplorer::NativeObjectsExplorer(
2784
HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
2785
: snapshot_(snapshot),
2786
collection_(snapshot_->collection()),
2787
progress_(progress),
2788
embedder_queried_(false),
2789
objects_by_info_(RetainedInfosMatch),
2790
native_groups_(StringsMatch),
2792
synthetic_entries_allocator_ =
2793
new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2794
native_entries_allocator_ =
2795
new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2799
NativeObjectsExplorer::~NativeObjectsExplorer() {
2800
for (HashMap::Entry* p = objects_by_info_.Start();
2802
p = objects_by_info_.Next(p)) {
2803
v8::RetainedObjectInfo* info =
2804
reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2806
List<HeapObject*>* objects =
2807
reinterpret_cast<List<HeapObject*>* >(p->value);
2810
for (HashMap::Entry* p = native_groups_.Start();
2812
p = native_groups_.Next(p)) {
2813
v8::RetainedObjectInfo* info =
2814
reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2817
delete synthetic_entries_allocator_;
2818
delete native_entries_allocator_;
2822
int NativeObjectsExplorer::EstimateObjectsCount() {
2823
FillRetainedObjects();
2824
return objects_by_info_.occupancy();
2828
void NativeObjectsExplorer::FillRetainedObjects() {
2829
if (embedder_queried_) return;
2830
Isolate* isolate = Isolate::Current();
2831
// Record objects that are joined into ObjectGroups.
2832
isolate->heap()->CallGlobalGCPrologueCallback();
2833
List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2834
for (int i = 0; i < groups->length(); ++i) {
2835
ObjectGroup* group = groups->at(i);
2836
if (group->info_ == NULL) continue;
2837
List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info_);
2838
for (size_t j = 0; j < group->length_; ++j) {
2839
HeapObject* obj = HeapObject::cast(*group->objects_[j]);
2841
in_groups_.Insert(obj);
2843
group->info_ = NULL; // Acquire info object ownership.
2845
isolate->global_handles()->RemoveObjectGroups();
2846
isolate->heap()->CallGlobalGCEpilogueCallback();
2847
// Record objects that are not in ObjectGroups, but have class ID.
2848
GlobalHandlesExtractor extractor(this);
2849
isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2850
embedder_queried_ = true;
2853
void NativeObjectsExplorer::FillImplicitReferences() {
2854
Isolate* isolate = Isolate::Current();
2855
List<ImplicitRefGroup*>* groups =
2856
isolate->global_handles()->implicit_ref_groups();
2857
for (int i = 0; i < groups->length(); ++i) {
2858
ImplicitRefGroup* group = groups->at(i);
2859
HeapObject* parent = *group->parent_;
2861
filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2862
ASSERT(parent_entry != HeapEntry::kNoEntry);
2863
Object*** children = group->children_;
2864
for (size_t j = 0; j < group->length_; ++j) {
2865
Object* child = *children[j];
2866
HeapEntry* child_entry =
2867
filler_->FindOrAddEntry(child, native_entries_allocator_);
2868
filler_->SetNamedReference(
2869
HeapGraphEdge::kInternal,
2877
List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2878
v8::RetainedObjectInfo* info) {
2879
HashMap::Entry* entry =
2880
objects_by_info_.Lookup(info, InfoHash(info), true);
2881
if (entry->value != NULL) {
2884
entry->value = new List<HeapObject*>(4);
2886
return reinterpret_cast<List<HeapObject*>* >(entry->value);
2890
bool NativeObjectsExplorer::IterateAndExtractReferences(
2891
SnapshotFillerInterface* filler) {
2893
FillRetainedObjects();
2894
FillImplicitReferences();
2895
if (EstimateObjectsCount() > 0) {
2896
for (HashMap::Entry* p = objects_by_info_.Start();
2898
p = objects_by_info_.Next(p)) {
2899
v8::RetainedObjectInfo* info =
2900
reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2901
SetNativeRootReference(info);
2902
List<HeapObject*>* objects =
2903
reinterpret_cast<List<HeapObject*>* >(p->value);
2904
for (int i = 0; i < objects->length(); ++i) {
2905
SetWrapperNativeReferences(objects->at(i), info);
2908
SetRootNativeRootsReference();
2915
class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2917
explicit NativeGroupRetainedObjectInfo(const char* label)
2919
hash_(reinterpret_cast<intptr_t>(label)),
2923
virtual ~NativeGroupRetainedObjectInfo() {}
2924
virtual void Dispose() {
2929
virtual bool IsEquivalent(RetainedObjectInfo* other) {
2930
return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2932
virtual intptr_t GetHash() { return hash_; }
2933
virtual const char* GetLabel() { return label_; }
2942
NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2943
const char* label) {
2944
const char* label_copy = collection_->names()->GetCopy(label);
2945
uint32_t hash = HashSequentialString(label_copy,
2946
static_cast<int>(strlen(label_copy)),
2948
HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2950
if (entry->value == NULL) {
2951
entry->value = new NativeGroupRetainedObjectInfo(label);
2953
return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2957
void NativeObjectsExplorer::SetNativeRootReference(
2958
v8::RetainedObjectInfo* info) {
2959
HeapEntry* child_entry =
2960
filler_->FindOrAddEntry(info, native_entries_allocator_);
2961
ASSERT(child_entry != NULL);
2962
NativeGroupRetainedObjectInfo* group_info =
2963
FindOrAddGroupInfo(info->GetGroupLabel());
2964
HeapEntry* group_entry =
2965
filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2966
filler_->SetNamedAutoIndexReference(
2967
HeapGraphEdge::kInternal,
2968
group_entry->index(),
2973
void NativeObjectsExplorer::SetWrapperNativeReferences(
2974
HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2975
HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2976
ASSERT(wrapper_entry != NULL);
2977
HeapEntry* info_entry =
2978
filler_->FindOrAddEntry(info, native_entries_allocator_);
2979
ASSERT(info_entry != NULL);
2980
filler_->SetNamedReference(HeapGraphEdge::kInternal,
2981
wrapper_entry->index(),
2984
filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2985
info_entry->index(),
2990
void NativeObjectsExplorer::SetRootNativeRootsReference() {
2991
for (HashMap::Entry* entry = native_groups_.Start();
2993
entry = native_groups_.Next(entry)) {
2994
NativeGroupRetainedObjectInfo* group_info =
2995
static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2996
HeapEntry* group_entry =
2997
filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2998
ASSERT(group_entry != NULL);
2999
filler_->SetIndexedAutoIndexReference(
3000
HeapGraphEdge::kElement,
3001
snapshot_->root()->index(),
3007
void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
3008
if (in_groups_.Contains(*p)) return;
3009
Isolate* isolate = Isolate::Current();
3010
v8::RetainedObjectInfo* info =
3011
isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
3012
if (info == NULL) return;
3013
GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
3017
class SnapshotFiller : public SnapshotFillerInterface {
3019
explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
3020
: snapshot_(snapshot),
3021
collection_(snapshot->collection()),
3022
entries_(entries) { }
3023
HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
3024
HeapEntry* entry = allocator->AllocateEntry(ptr);
3025
entries_->Pair(ptr, entry->index());
3028
HeapEntry* FindEntry(HeapThing ptr) {
3029
int index = entries_->Map(ptr);
3030
return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
3032
HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
3033
HeapEntry* entry = FindEntry(ptr);
3034
return entry != NULL ? entry : AddEntry(ptr, allocator);
3036
void SetIndexedReference(HeapGraphEdge::Type type,
3039
HeapEntry* child_entry) {
3040
HeapEntry* parent_entry = &snapshot_->entries()[parent];
3041
parent_entry->SetIndexedReference(type, index, child_entry);
3043
void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
3045
HeapEntry* child_entry) {
3046
HeapEntry* parent_entry = &snapshot_->entries()[parent];
3047
int index = parent_entry->children_count() + 1;
3048
parent_entry->SetIndexedReference(type, index, child_entry);
3050
void SetNamedReference(HeapGraphEdge::Type type,
3052
const char* reference_name,
3053
HeapEntry* child_entry) {
3054
HeapEntry* parent_entry = &snapshot_->entries()[parent];
3055
parent_entry->SetNamedReference(type, reference_name, child_entry);
3057
void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
3059
HeapEntry* child_entry) {
3060
HeapEntry* parent_entry = &snapshot_->entries()[parent];
3061
int index = parent_entry->children_count() + 1;
3062
parent_entry->SetNamedReference(
3064
collection_->names()->GetName(index),
3069
HeapSnapshot* snapshot_;
3070
HeapSnapshotsCollection* collection_;
3071
HeapEntriesMap* entries_;
3075
HeapSnapshotGenerator::HeapSnapshotGenerator(HeapSnapshot* snapshot,
3076
v8::ActivityControl* control)
3077
: snapshot_(snapshot),
3079
v8_heap_explorer_(snapshot_, this),
3080
dom_explorer_(snapshot_, this) {
3084
bool HeapSnapshotGenerator::GenerateSnapshot() {
3085
v8_heap_explorer_.TagGlobalObjects();
3087
// TODO(1562) Profiler assumes that any object that is in the heap after
3088
// full GC is reachable from the root when computing dominators.
3089
// This is not true for weakly reachable objects.
3090
// As a temporary solution we call GC twice.
3091
Isolate::Current()->heap()->CollectAllGarbage(
3092
Heap::kMakeHeapIterableMask,
3093
"HeapSnapshotGenerator::GenerateSnapshot");
3094
Isolate::Current()->heap()->CollectAllGarbage(
3095
Heap::kMakeHeapIterableMask,
3096
"HeapSnapshotGenerator::GenerateSnapshot");
3099
Heap* debug_heap = Isolate::Current()->heap();
3100
CHECK(!debug_heap->old_data_space()->was_swept_conservatively());
3101
CHECK(!debug_heap->old_pointer_space()->was_swept_conservatively());
3102
CHECK(!debug_heap->code_space()->was_swept_conservatively());
3103
CHECK(!debug_heap->cell_space()->was_swept_conservatively());
3104
CHECK(!debug_heap->map_space()->was_swept_conservatively());
3107
// The following code uses heap iterators, so we want the heap to be
3108
// stable. It should follow TagGlobalObjects as that can allocate.
3109
AssertNoAllocation no_alloc;
3112
debug_heap->Verify();
3115
SetProgressTotal(1); // 1 pass.
3118
debug_heap->Verify();
3121
if (!FillReferences()) return false;
3123
snapshot_->FillChildren();
3124
snapshot_->RememberLastJSObjectId();
3126
progress_counter_ = progress_total_;
3127
if (!ProgressReport(true)) return false;
3132
void HeapSnapshotGenerator::ProgressStep() {
3133
++progress_counter_;
3137
bool HeapSnapshotGenerator::ProgressReport(bool force) {
3138
const int kProgressReportGranularity = 10000;
3139
if (control_ != NULL
3140
&& (force || progress_counter_ % kProgressReportGranularity == 0)) {
3142
control_->ReportProgressValue(progress_counter_, progress_total_) ==
3143
v8::ActivityControl::kContinue;
3149
void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
3150
if (control_ == NULL) return;
3151
HeapIterator iterator(HeapIterator::kFilterUnreachable);
3152
progress_total_ = iterations_count * (
3153
v8_heap_explorer_.EstimateObjectsCount(&iterator) +
3154
dom_explorer_.EstimateObjectsCount());
3155
progress_counter_ = 0;
3159
bool HeapSnapshotGenerator::FillReferences() {
3160
SnapshotFiller filler(snapshot_, &entries_);
3161
v8_heap_explorer_.AddRootEntries(&filler);
3162
return v8_heap_explorer_.IterateAndExtractReferences(&filler)
3163
&& dom_explorer_.IterateAndExtractReferences(&filler);
3167
template<int bytes> struct MaxDecimalDigitsIn;
3168
template<> struct MaxDecimalDigitsIn<4> {
3169
static const int kSigned = 11;
3170
static const int kUnsigned = 10;
3172
template<> struct MaxDecimalDigitsIn<8> {
3173
static const int kSigned = 20;
3174
static const int kUnsigned = 20;
3178
class OutputStreamWriter {
3180
explicit OutputStreamWriter(v8::OutputStream* stream)
3182
chunk_size_(stream->GetChunkSize()),
3183
chunk_(chunk_size_),
3186
ASSERT(chunk_size_ > 0);
3188
bool aborted() { return aborted_; }
3189
void AddCharacter(char c) {
3191
ASSERT(chunk_pos_ < chunk_size_);
3192
chunk_[chunk_pos_++] = c;
3195
void AddString(const char* s) {
3196
AddSubstring(s, StrLength(s));
3198
void AddSubstring(const char* s, int n) {
3200
ASSERT(static_cast<size_t>(n) <= strlen(s));
3201
const char* s_end = s + n;
3203
int s_chunk_size = Min(
3204
chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
3205
ASSERT(s_chunk_size > 0);
3206
memcpy(chunk_.start() + chunk_pos_, s, s_chunk_size);
3208
chunk_pos_ += s_chunk_size;
3212
void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
3214
if (aborted_) return;
3215
ASSERT(chunk_pos_ < chunk_size_);
3216
if (chunk_pos_ != 0) {
3219
stream_->EndOfStream();
3223
template<typename T>
3224
void AddNumberImpl(T n, const char* format) {
3225
// Buffer for the longest value plus trailing \0
3226
static const int kMaxNumberSize =
3227
MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
3228
if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
3229
int result = OS::SNPrintF(
3230
chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
3231
ASSERT(result != -1);
3232
chunk_pos_ += result;
3235
EmbeddedVector<char, kMaxNumberSize> buffer;
3236
int result = OS::SNPrintF(buffer, format, n);
3238
ASSERT(result != -1);
3239
AddString(buffer.start());
3242
void MaybeWriteChunk() {
3243
ASSERT(chunk_pos_ <= chunk_size_);
3244
if (chunk_pos_ == chunk_size_) {
3249
if (aborted_) return;
3250
if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
3251
v8::OutputStream::kAbort) aborted_ = true;
3255
v8::OutputStream* stream_;
3257
ScopedVector<char> chunk_;
3263
// type, name|index, to_node.
3264
const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
3265
// type, name, id, self_size, children_index.
3266
const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5;
3268
void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
3269
ASSERT(writer_ == NULL);
3270
writer_ = new OutputStreamWriter(stream);
3272
HeapSnapshot* original_snapshot = NULL;
3273
if (snapshot_->RawSnapshotSize() >=
3274
SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize) {
3275
// The snapshot is too big. Serialize a fake snapshot.
3276
original_snapshot = snapshot_;
3277
snapshot_ = CreateFakeSnapshot();
3285
if (original_snapshot != NULL) {
3287
snapshot_ = original_snapshot;
3292
HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
3293
HeapSnapshot* result = new HeapSnapshot(snapshot_->collection(),
3294
HeapSnapshot::kFull,
3297
result->AddRootEntry();
3298
const char* text = snapshot_->collection()->names()->GetFormatted(
3299
"The snapshot is too big. "
3300
"Maximum snapshot size is %" V8_PTR_PREFIX "u MB. "
3301
"Actual snapshot size is %" V8_PTR_PREFIX "u MB.",
3302
SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize / MB,
3303
(snapshot_->RawSnapshotSize() + MB - 1) / MB);
3304
HeapEntry* message = result->AddEntry(HeapEntry::kString, text, 0, 4);
3305
result->root()->SetIndexedReference(HeapGraphEdge::kElement, 1, message);
3306
result->FillChildren();
3311
void HeapSnapshotJSONSerializer::SerializeImpl() {
3312
ASSERT(0 == snapshot_->root()->index());
3313
writer_->AddCharacter('{');
3314
writer_->AddString("\"snapshot\":{");
3315
SerializeSnapshot();
3316
if (writer_->aborted()) return;
3317
writer_->AddString("},\n");
3318
writer_->AddString("\"nodes\":[");
3320
if (writer_->aborted()) return;
3321
writer_->AddString("],\n");
3322
writer_->AddString("\"edges\":[");
3324
if (writer_->aborted()) return;
3325
writer_->AddString("],\n");
3326
writer_->AddString("\"strings\":[");
3328
if (writer_->aborted()) return;
3329
writer_->AddCharacter(']');
3330
writer_->AddCharacter('}');
3331
writer_->Finalize();
3335
int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
3336
HashMap::Entry* cache_entry = strings_.Lookup(
3337
const_cast<char*>(s), ObjectHash(s), true);
3338
if (cache_entry->value == NULL) {
3339
cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
3341
return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
3345
static int utoa(unsigned value, const Vector<char>& buffer, int buffer_pos) {
3346
int number_of_digits = 0;
3352
buffer_pos += number_of_digits;
3353
int result = buffer_pos;
3355
int last_digit = value % 10;
3356
buffer[--buffer_pos] = '0' + last_digit;
3363
void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
3365
// The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
3366
static const int kBufferSize =
3367
MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
3368
EmbeddedVector<char, kBufferSize> buffer;
3369
int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
3370
|| edge->type() == HeapGraphEdge::kHidden
3371
|| edge->type() == HeapGraphEdge::kWeak
3372
? edge->index() : GetStringId(edge->name());
3375
buffer[buffer_pos++] = ',';
3377
buffer_pos = utoa(edge->type(), buffer, buffer_pos);
3378
buffer[buffer_pos++] = ',';
3379
buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
3380
buffer[buffer_pos++] = ',';
3381
buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
3382
buffer[buffer_pos++] = '\n';
3383
buffer[buffer_pos++] = '\0';
3384
writer_->AddString(buffer.start());
3388
void HeapSnapshotJSONSerializer::SerializeEdges() {
3389
List<HeapGraphEdge*>& edges = snapshot_->children();
3390
for (int i = 0; i < edges.length(); ++i) {
3392
edges[i - 1]->from()->index() <= edges[i]->from()->index());
3393
SerializeEdge(edges[i], i == 0);
3394
if (writer_->aborted()) return;
3399
void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
3400
// The buffer needs space for 5 unsigned ints, 5 commas, \n and \0
3401
static const int kBufferSize =
3402
5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
3404
EmbeddedVector<char, kBufferSize> buffer;
3406
if (entry_index(entry) != 0) {
3407
buffer[buffer_pos++] = ',';
3409
buffer_pos = utoa(entry->type(), buffer, buffer_pos);
3410
buffer[buffer_pos++] = ',';
3411
buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
3412
buffer[buffer_pos++] = ',';
3413
buffer_pos = utoa(entry->id(), buffer, buffer_pos);
3414
buffer[buffer_pos++] = ',';
3415
buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
3416
buffer[buffer_pos++] = ',';
3417
buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
3418
buffer[buffer_pos++] = '\n';
3419
buffer[buffer_pos++] = '\0';
3420
writer_->AddString(buffer.start());
3424
void HeapSnapshotJSONSerializer::SerializeNodes() {
3425
List<HeapEntry>& entries = snapshot_->entries();
3426
for (int i = 0; i < entries.length(); ++i) {
3427
SerializeNode(&entries[i]);
3428
if (writer_->aborted()) return;
3433
void HeapSnapshotJSONSerializer::SerializeSnapshot() {
3434
writer_->AddString("\"title\":\"");
3435
writer_->AddString(snapshot_->title());
3436
writer_->AddString("\"");
3437
writer_->AddString(",\"uid\":");
3438
writer_->AddNumber(snapshot_->uid());
3439
writer_->AddString(",\"meta\":");
3440
// The object describing node serialization layout.
3441
// We use a set of macros to improve readability.
3442
#define JSON_A(s) "[" s "]"
3443
#define JSON_O(s) "{" s "}"
3444
#define JSON_S(s) "\"" s "\""
3445
writer_->AddString(JSON_O(
3446
JSON_S("node_fields") ":" JSON_A(
3450
JSON_S("self_size") ","
3451
JSON_S("edge_count")) ","
3452
JSON_S("node_types") ":" JSON_A(
3454
JSON_S("hidden") ","
3456
JSON_S("string") ","
3457
JSON_S("object") ","
3459
JSON_S("closure") ","
3460
JSON_S("regexp") ","
3461
JSON_S("number") ","
3462
JSON_S("native") ","
3463
JSON_S("synthetic")) ","
3464
JSON_S("string") ","
3465
JSON_S("number") ","
3466
JSON_S("number") ","
3467
JSON_S("number") ","
3468
JSON_S("number") ","
3469
JSON_S("number")) ","
3470
JSON_S("edge_fields") ":" JSON_A(
3472
JSON_S("name_or_index") ","
3473
JSON_S("to_node")) ","
3474
JSON_S("edge_types") ":" JSON_A(
3476
JSON_S("context") ","
3477
JSON_S("element") ","
3478
JSON_S("property") ","
3479
JSON_S("internal") ","
3480
JSON_S("hidden") ","
3481
JSON_S("shortcut") ","
3483
JSON_S("string_or_number") ","
3488
writer_->AddString(",\"node_count\":");
3489
writer_->AddNumber(snapshot_->entries().length());
3490
writer_->AddString(",\"edge_count\":");
3491
writer_->AddNumber(snapshot_->edges().length());
3495
static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
3496
static const char hex_chars[] = "0123456789ABCDEF";
3497
w->AddString("\\u");
3498
w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
3499
w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
3500
w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
3501
w->AddCharacter(hex_chars[u & 0xf]);
3504
void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3505
writer_->AddCharacter('\n');
3506
writer_->AddCharacter('\"');
3507
for ( ; *s != '\0'; ++s) {
3510
writer_->AddString("\\b");
3513
writer_->AddString("\\f");
3516
writer_->AddString("\\n");
3519
writer_->AddString("\\r");
3522
writer_->AddString("\\t");
3526
writer_->AddCharacter('\\');
3527
writer_->AddCharacter(*s);
3530
if (*s > 31 && *s < 128) {
3531
writer_->AddCharacter(*s);
3532
} else if (*s <= 31) {
3533
// Special character with no dedicated literal.
3534
WriteUChar(writer_, *s);
3536
// Convert UTF-8 into \u UTF-16 literal.
3537
unsigned length = 1, cursor = 0;
3538
for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3539
unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3540
if (c != unibrow::Utf8::kBadChar) {
3541
WriteUChar(writer_, c);
3542
ASSERT(cursor != 0);
3545
writer_->AddCharacter('?');
3550
writer_->AddCharacter('\"');
3554
void HeapSnapshotJSONSerializer::SerializeStrings() {
3555
List<HashMap::Entry*> sorted_strings;
3556
SortHashMap(&strings_, &sorted_strings);
3557
writer_->AddString("\"<dummy>\"");
3558
for (int i = 0; i < sorted_strings.length(); ++i) {
3559
writer_->AddCharacter(',');
3561
reinterpret_cast<const unsigned char*>(sorted_strings[i]->key));
3562
if (writer_->aborted()) return;
3567
template<typename T>
3568
inline static int SortUsingEntryValue(const T* x, const T* y) {
3569
uintptr_t x_uint = reinterpret_cast<uintptr_t>((*x)->value);
3570
uintptr_t y_uint = reinterpret_cast<uintptr_t>((*y)->value);
3571
if (x_uint > y_uint) {
3573
} else if (x_uint == y_uint) {
3581
void HeapSnapshotJSONSerializer::SortHashMap(
3582
HashMap* map, List<HashMap::Entry*>* sorted_entries) {
3583
for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p))
3584
sorted_entries->Add(p);
3585
sorted_entries->Sort(SortUsingEntryValue);
3588
} } // namespace v8::internal