OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/heap-snapshot-generator-inl.h" | 7 #include "src/heap-snapshot-generator-inl.h" |
8 | 8 |
9 #include "src/allocation-tracker.h" | 9 #include "src/allocation-tracker.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
11 #include "src/conversions.h" | 11 #include "src/conversions.h" |
12 #include "src/debug.h" | 12 #include "src/debug.h" |
13 #include "src/heap-profiler.h" | 13 #include "src/heap-profiler.h" |
14 #include "src/types.h" | 14 #include "src/types.h" |
15 | 15 |
16 namespace v8 { | 16 namespace v8 { |
17 namespace internal { | 17 namespace internal { |
18 | 18 |
19 | 19 |
20 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to) | 20 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to) |
21 : type_(type), | 21 : type_(type), |
22 from_index_(from), | 22 from_index_(from), |
23 to_index_(to), | 23 to_index_(to), |
24 name_(name) { | 24 name_(name) { |
25 ASSERT(type == kContextVariable | 25 DCHECK(type == kContextVariable |
26 || type == kProperty | 26 || type == kProperty |
27 || type == kInternal | 27 || type == kInternal |
28 || type == kShortcut | 28 || type == kShortcut |
29 || type == kWeak); | 29 || type == kWeak); |
30 } | 30 } |
31 | 31 |
32 | 32 |
33 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to) | 33 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to) |
34 : type_(type), | 34 : type_(type), |
35 from_index_(from), | 35 from_index_(from), |
36 to_index_(to), | 36 to_index_(to), |
37 index_(index) { | 37 index_(index) { |
38 ASSERT(type == kElement || type == kHidden); | 38 DCHECK(type == kElement || type == kHidden); |
39 } | 39 } |
40 | 40 |
41 | 41 |
42 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) { | 42 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) { |
43 to_entry_ = &snapshot->entries()[to_index_]; | 43 to_entry_ = &snapshot->entries()[to_index_]; |
44 } | 44 } |
45 | 45 |
46 | 46 |
47 const int HeapEntry::kNoEntry = -1; | 47 const int HeapEntry::kNoEntry = -1; |
48 | 48 |
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
211 delete this; | 211 delete this; |
212 } | 212 } |
213 | 213 |
214 | 214 |
215 void HeapSnapshot::RememberLastJSObjectId() { | 215 void HeapSnapshot::RememberLastJSObjectId() { |
216 max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id(); | 216 max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id(); |
217 } | 217 } |
218 | 218 |
219 | 219 |
220 HeapEntry* HeapSnapshot::AddRootEntry() { | 220 HeapEntry* HeapSnapshot::AddRootEntry() { |
221 ASSERT(root_index_ == HeapEntry::kNoEntry); | 221 DCHECK(root_index_ == HeapEntry::kNoEntry); |
222 ASSERT(entries_.is_empty()); // Root entry must be the first one. | 222 DCHECK(entries_.is_empty()); // Root entry must be the first one. |
223 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic, | 223 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic, |
224 "", | 224 "", |
225 HeapObjectsMap::kInternalRootObjectId, | 225 HeapObjectsMap::kInternalRootObjectId, |
226 0, | 226 0, |
227 0); | 227 0); |
228 root_index_ = entry->index(); | 228 root_index_ = entry->index(); |
229 ASSERT(root_index_ == 0); | 229 DCHECK(root_index_ == 0); |
230 return entry; | 230 return entry; |
231 } | 231 } |
232 | 232 |
233 | 233 |
234 HeapEntry* HeapSnapshot::AddGcRootsEntry() { | 234 HeapEntry* HeapSnapshot::AddGcRootsEntry() { |
235 ASSERT(gc_roots_index_ == HeapEntry::kNoEntry); | 235 DCHECK(gc_roots_index_ == HeapEntry::kNoEntry); |
236 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic, | 236 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic, |
237 "(GC roots)", | 237 "(GC roots)", |
238 HeapObjectsMap::kGcRootsObjectId, | 238 HeapObjectsMap::kGcRootsObjectId, |
239 0, | 239 0, |
240 0); | 240 0); |
241 gc_roots_index_ = entry->index(); | 241 gc_roots_index_ = entry->index(); |
242 return entry; | 242 return entry; |
243 } | 243 } |
244 | 244 |
245 | 245 |
246 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) { | 246 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) { |
247 ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry); | 247 DCHECK(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry); |
248 ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags); | 248 DCHECK(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags); |
249 HeapEntry* entry = AddEntry( | 249 HeapEntry* entry = AddEntry( |
250 HeapEntry::kSynthetic, | 250 HeapEntry::kSynthetic, |
251 VisitorSynchronization::kTagNames[tag], | 251 VisitorSynchronization::kTagNames[tag], |
252 HeapObjectsMap::GetNthGcSubrootId(tag), | 252 HeapObjectsMap::GetNthGcSubrootId(tag), |
253 0, | 253 0, |
254 0); | 254 0); |
255 gc_subroot_indexes_[tag] = entry->index(); | 255 gc_subroot_indexes_[tag] = entry->index(); |
256 return entry; | 256 return entry; |
257 } | 257 } |
258 | 258 |
259 | 259 |
260 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type, | 260 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type, |
261 const char* name, | 261 const char* name, |
262 SnapshotObjectId id, | 262 SnapshotObjectId id, |
263 size_t size, | 263 size_t size, |
264 unsigned trace_node_id) { | 264 unsigned trace_node_id) { |
265 HeapEntry entry(this, type, name, id, size, trace_node_id); | 265 HeapEntry entry(this, type, name, id, size, trace_node_id); |
266 entries_.Add(entry); | 266 entries_.Add(entry); |
267 return &entries_.last(); | 267 return &entries_.last(); |
268 } | 268 } |
269 | 269 |
270 | 270 |
271 void HeapSnapshot::FillChildren() { | 271 void HeapSnapshot::FillChildren() { |
272 ASSERT(children().is_empty()); | 272 DCHECK(children().is_empty()); |
273 children().Allocate(edges().length()); | 273 children().Allocate(edges().length()); |
274 int children_index = 0; | 274 int children_index = 0; |
275 for (int i = 0; i < entries().length(); ++i) { | 275 for (int i = 0; i < entries().length(); ++i) { |
276 HeapEntry* entry = &entries()[i]; | 276 HeapEntry* entry = &entries()[i]; |
277 children_index = entry->set_children_index(children_index); | 277 children_index = entry->set_children_index(children_index); |
278 } | 278 } |
279 ASSERT(edges().length() == children_index); | 279 DCHECK(edges().length() == children_index); |
280 for (int i = 0; i < edges().length(); ++i) { | 280 for (int i = 0; i < edges().length(); ++i) { |
281 HeapGraphEdge* edge = &edges()[i]; | 281 HeapGraphEdge* edge = &edges()[i]; |
282 edge->ReplaceToIndexWithEntry(this); | 282 edge->ReplaceToIndexWithEntry(this); |
283 edge->from()->add_child(edge); | 283 edge->from()->add_child(edge); |
284 } | 284 } |
285 } | 285 } |
286 | 286 |
287 | 287 |
288 class FindEntryById { | 288 class FindEntryById { |
289 public: | 289 public: |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
368 // it has an entry with NULL as the value or it has created | 368 // it has an entry with NULL as the value or it has created |
369 // a new entry on the fly with NULL as the default value. | 369 // a new entry on the fly with NULL as the default value. |
370 // With such dummy element we have a guaranty that all entries_map_ entries | 370 // With such dummy element we have a guaranty that all entries_map_ entries |
371 // will have the value field grater than 0. | 371 // will have the value field grater than 0. |
372 // This fact is using in MoveObject method. | 372 // This fact is using in MoveObject method. |
373 entries_.Add(EntryInfo(0, NULL, 0)); | 373 entries_.Add(EntryInfo(0, NULL, 0)); |
374 } | 374 } |
375 | 375 |
376 | 376 |
377 bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) { | 377 bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) { |
378 ASSERT(to != NULL); | 378 DCHECK(to != NULL); |
379 ASSERT(from != NULL); | 379 DCHECK(from != NULL); |
380 if (from == to) return false; | 380 if (from == to) return false; |
381 void* from_value = entries_map_.Remove(from, ComputePointerHash(from)); | 381 void* from_value = entries_map_.Remove(from, ComputePointerHash(from)); |
382 if (from_value == NULL) { | 382 if (from_value == NULL) { |
383 // It may occur that some untracked object moves to an address X and there | 383 // It may occur that some untracked object moves to an address X and there |
384 // is a tracked object at that address. In this case we should remove the | 384 // is a tracked object at that address. In this case we should remove the |
385 // entry as we know that the object has died. | 385 // entry as we know that the object has died. |
386 void* to_value = entries_map_.Remove(to, ComputePointerHash(to)); | 386 void* to_value = entries_map_.Remove(to, ComputePointerHash(to)); |
387 if (to_value != NULL) { | 387 if (to_value != NULL) { |
388 int to_entry_info_index = | 388 int to_entry_info_index = |
389 static_cast<int>(reinterpret_cast<intptr_t>(to_value)); | 389 static_cast<int>(reinterpret_cast<intptr_t>(to_value)); |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
426 FindOrAddEntry(addr, size, false); | 426 FindOrAddEntry(addr, size, false); |
427 } | 427 } |
428 | 428 |
429 | 429 |
430 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) { | 430 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) { |
431 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr), | 431 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr), |
432 false); | 432 false); |
433 if (entry == NULL) return 0; | 433 if (entry == NULL) return 0; |
434 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); | 434 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); |
435 EntryInfo& entry_info = entries_.at(entry_index); | 435 EntryInfo& entry_info = entries_.at(entry_index); |
436 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); | 436 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); |
437 return entry_info.id; | 437 return entry_info.id; |
438 } | 438 } |
439 | 439 |
440 | 440 |
441 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr, | 441 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr, |
442 unsigned int size, | 442 unsigned int size, |
443 bool accessed) { | 443 bool accessed) { |
444 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); | 444 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); |
445 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr), | 445 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr), |
446 true); | 446 true); |
447 if (entry->value != NULL) { | 447 if (entry->value != NULL) { |
448 int entry_index = | 448 int entry_index = |
449 static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); | 449 static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); |
450 EntryInfo& entry_info = entries_.at(entry_index); | 450 EntryInfo& entry_info = entries_.at(entry_index); |
451 entry_info.accessed = accessed; | 451 entry_info.accessed = accessed; |
452 if (FLAG_heap_profiler_trace_objects) { | 452 if (FLAG_heap_profiler_trace_objects) { |
453 PrintF("Update object size : %p with old size %d and new size %d\n", | 453 PrintF("Update object size : %p with old size %d and new size %d\n", |
454 addr, | 454 addr, |
455 entry_info.size, | 455 entry_info.size, |
456 size); | 456 size); |
457 } | 457 } |
458 entry_info.size = size; | 458 entry_info.size = size; |
459 return entry_info.id; | 459 return entry_info.id; |
460 } | 460 } |
461 entry->value = reinterpret_cast<void*>(entries_.length()); | 461 entry->value = reinterpret_cast<void*>(entries_.length()); |
462 SnapshotObjectId id = next_id_; | 462 SnapshotObjectId id = next_id_; |
463 next_id_ += kObjectIdStep; | 463 next_id_ += kObjectIdStep; |
464 entries_.Add(EntryInfo(id, addr, size, accessed)); | 464 entries_.Add(EntryInfo(id, addr, size, accessed)); |
465 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); | 465 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); |
466 return id; | 466 return id; |
467 } | 467 } |
468 | 468 |
469 | 469 |
470 void HeapObjectsMap::StopHeapObjectsTracking() { | 470 void HeapObjectsMap::StopHeapObjectsTracking() { |
471 time_intervals_.Clear(); | 471 time_intervals_.Clear(); |
472 } | 472 } |
473 | 473 |
474 | 474 |
475 void HeapObjectsMap::UpdateHeapObjectsMap() { | 475 void HeapObjectsMap::UpdateHeapObjectsMap() { |
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
608 } | 608 } |
609 return untracked; | 609 return untracked; |
610 } | 610 } |
611 | 611 |
612 | 612 |
613 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) { | 613 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) { |
614 UpdateHeapObjectsMap(); | 614 UpdateHeapObjectsMap(); |
615 time_intervals_.Add(TimeInterval(next_id_)); | 615 time_intervals_.Add(TimeInterval(next_id_)); |
616 int prefered_chunk_size = stream->GetChunkSize(); | 616 int prefered_chunk_size = stream->GetChunkSize(); |
617 List<v8::HeapStatsUpdate> stats_buffer; | 617 List<v8::HeapStatsUpdate> stats_buffer; |
618 ASSERT(!entries_.is_empty()); | 618 DCHECK(!entries_.is_empty()); |
619 EntryInfo* entry_info = &entries_.first(); | 619 EntryInfo* entry_info = &entries_.first(); |
620 EntryInfo* end_entry_info = &entries_.last() + 1; | 620 EntryInfo* end_entry_info = &entries_.last() + 1; |
621 for (int time_interval_index = 0; | 621 for (int time_interval_index = 0; |
622 time_interval_index < time_intervals_.length(); | 622 time_interval_index < time_intervals_.length(); |
623 ++time_interval_index) { | 623 ++time_interval_index) { |
624 TimeInterval& time_interval = time_intervals_[time_interval_index]; | 624 TimeInterval& time_interval = time_intervals_[time_interval_index]; |
625 SnapshotObjectId time_interval_id = time_interval.id; | 625 SnapshotObjectId time_interval_id = time_interval.id; |
626 uint32_t entries_size = 0; | 626 uint32_t entries_size = 0; |
627 EntryInfo* start_entry_info = entry_info; | 627 EntryInfo* start_entry_info = entry_info; |
628 while (entry_info < end_entry_info && entry_info->id < time_interval_id) { | 628 while (entry_info < end_entry_info && entry_info->id < time_interval_id) { |
629 entries_size += entry_info->size; | 629 entries_size += entry_info->size; |
630 ++entry_info; | 630 ++entry_info; |
631 } | 631 } |
632 uint32_t entries_count = | 632 uint32_t entries_count = |
633 static_cast<uint32_t>(entry_info - start_entry_info); | 633 static_cast<uint32_t>(entry_info - start_entry_info); |
634 if (time_interval.count != entries_count || | 634 if (time_interval.count != entries_count || |
635 time_interval.size != entries_size) { | 635 time_interval.size != entries_size) { |
636 stats_buffer.Add(v8::HeapStatsUpdate( | 636 stats_buffer.Add(v8::HeapStatsUpdate( |
637 time_interval_index, | 637 time_interval_index, |
638 time_interval.count = entries_count, | 638 time_interval.count = entries_count, |
639 time_interval.size = entries_size)); | 639 time_interval.size = entries_size)); |
640 if (stats_buffer.length() >= prefered_chunk_size) { | 640 if (stats_buffer.length() >= prefered_chunk_size) { |
641 OutputStream::WriteResult result = stream->WriteHeapStatsChunk( | 641 OutputStream::WriteResult result = stream->WriteHeapStatsChunk( |
642 &stats_buffer.first(), stats_buffer.length()); | 642 &stats_buffer.first(), stats_buffer.length()); |
643 if (result == OutputStream::kAbort) return last_assigned_id(); | 643 if (result == OutputStream::kAbort) return last_assigned_id(); |
644 stats_buffer.Clear(); | 644 stats_buffer.Clear(); |
645 } | 645 } |
646 } | 646 } |
647 } | 647 } |
648 ASSERT(entry_info == end_entry_info); | 648 DCHECK(entry_info == end_entry_info); |
649 if (!stats_buffer.is_empty()) { | 649 if (!stats_buffer.is_empty()) { |
650 OutputStream::WriteResult result = stream->WriteHeapStatsChunk( | 650 OutputStream::WriteResult result = stream->WriteHeapStatsChunk( |
651 &stats_buffer.first(), stats_buffer.length()); | 651 &stats_buffer.first(), stats_buffer.length()); |
652 if (result == OutputStream::kAbort) return last_assigned_id(); | 652 if (result == OutputStream::kAbort) return last_assigned_id(); |
653 } | 653 } |
654 stream->EndOfStream(); | 654 stream->EndOfStream(); |
655 return last_assigned_id(); | 655 return last_assigned_id(); |
656 } | 656 } |
657 | 657 |
658 | 658 |
659 void HeapObjectsMap::RemoveDeadEntries() { | 659 void HeapObjectsMap::RemoveDeadEntries() { |
660 ASSERT(entries_.length() > 0 && | 660 DCHECK(entries_.length() > 0 && |
661 entries_.at(0).id == 0 && | 661 entries_.at(0).id == 0 && |
662 entries_.at(0).addr == NULL); | 662 entries_.at(0).addr == NULL); |
663 int first_free_entry = 1; | 663 int first_free_entry = 1; |
664 for (int i = 1; i < entries_.length(); ++i) { | 664 for (int i = 1; i < entries_.length(); ++i) { |
665 EntryInfo& entry_info = entries_.at(i); | 665 EntryInfo& entry_info = entries_.at(i); |
666 if (entry_info.accessed) { | 666 if (entry_info.accessed) { |
667 if (first_free_entry != i) { | 667 if (first_free_entry != i) { |
668 entries_.at(first_free_entry) = entry_info; | 668 entries_.at(first_free_entry) = entry_info; |
669 } | 669 } |
670 entries_.at(first_free_entry).accessed = false; | 670 entries_.at(first_free_entry).accessed = false; |
671 HashMap::Entry* entry = entries_map_.Lookup( | 671 HashMap::Entry* entry = entries_map_.Lookup( |
672 entry_info.addr, ComputePointerHash(entry_info.addr), false); | 672 entry_info.addr, ComputePointerHash(entry_info.addr), false); |
673 ASSERT(entry); | 673 DCHECK(entry); |
674 entry->value = reinterpret_cast<void*>(first_free_entry); | 674 entry->value = reinterpret_cast<void*>(first_free_entry); |
675 ++first_free_entry; | 675 ++first_free_entry; |
676 } else { | 676 } else { |
677 if (entry_info.addr) { | 677 if (entry_info.addr) { |
678 entries_map_.Remove(entry_info.addr, | 678 entries_map_.Remove(entry_info.addr, |
679 ComputePointerHash(entry_info.addr)); | 679 ComputePointerHash(entry_info.addr)); |
680 } | 680 } |
681 } | 681 } |
682 } | 682 } |
683 entries_.Rewind(first_free_entry); | 683 entries_.Rewind(first_free_entry); |
684 ASSERT(static_cast<uint32_t>(entries_.length()) - 1 == | 684 DCHECK(static_cast<uint32_t>(entries_.length()) - 1 == |
685 entries_map_.occupancy()); | 685 entries_map_.occupancy()); |
686 } | 686 } |
687 | 687 |
688 | 688 |
689 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) { | 689 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) { |
690 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash()); | 690 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash()); |
691 const char* label = info->GetLabel(); | 691 const char* label = info->GetLabel(); |
692 id ^= StringHasher::HashSequentialString(label, | 692 id ^= StringHasher::HashSequentialString(label, |
693 static_cast<int>(strlen(label)), | 693 static_cast<int>(strlen(label)), |
694 heap_->HashSeed()); | 694 heap_->HashSeed()); |
(...skipping 21 matching lines...) Expand all Loading... |
716 | 716 |
717 int HeapEntriesMap::Map(HeapThing thing) { | 717 int HeapEntriesMap::Map(HeapThing thing) { |
718 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false); | 718 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false); |
719 if (cache_entry == NULL) return HeapEntry::kNoEntry; | 719 if (cache_entry == NULL) return HeapEntry::kNoEntry; |
720 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value)); | 720 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value)); |
721 } | 721 } |
722 | 722 |
723 | 723 |
724 void HeapEntriesMap::Pair(HeapThing thing, int entry) { | 724 void HeapEntriesMap::Pair(HeapThing thing, int entry) { |
725 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true); | 725 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true); |
726 ASSERT(cache_entry->value == NULL); | 726 DCHECK(cache_entry->value == NULL); |
727 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry)); | 727 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry)); |
728 } | 728 } |
729 | 729 |
730 | 730 |
731 HeapObjectsSet::HeapObjectsSet() | 731 HeapObjectsSet::HeapObjectsSet() |
732 : entries_(HashMap::PointersMatch) { | 732 : entries_(HashMap::PointersMatch) { |
733 } | 733 } |
734 | 734 |
735 | 735 |
736 void HeapObjectsSet::Clear() { | 736 void HeapObjectsSet::Clear() { |
(...skipping 315 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1052 void VisitPointers(Object** start, Object** end) { | 1052 void VisitPointers(Object** start, Object** end) { |
1053 for (Object** p = start; p < end; p++) { | 1053 for (Object** p = start; p < end; p++) { |
1054 ++next_index_; | 1054 ++next_index_; |
1055 if (CheckVisitedAndUnmark(p)) continue; | 1055 if (CheckVisitedAndUnmark(p)) continue; |
1056 generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p); | 1056 generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p); |
1057 } | 1057 } |
1058 } | 1058 } |
1059 static void MarkVisitedField(HeapObject* obj, int offset) { | 1059 static void MarkVisitedField(HeapObject* obj, int offset) { |
1060 if (offset < 0) return; | 1060 if (offset < 0) return; |
1061 Address field = obj->address() + offset; | 1061 Address field = obj->address() + offset; |
1062 ASSERT(Memory::Object_at(field)->IsHeapObject()); | 1062 DCHECK(Memory::Object_at(field)->IsHeapObject()); |
1063 intptr_t p = reinterpret_cast<intptr_t>(Memory::Object_at(field)); | 1063 intptr_t p = reinterpret_cast<intptr_t>(Memory::Object_at(field)); |
1064 ASSERT(!IsMarked(p)); | 1064 DCHECK(!IsMarked(p)); |
1065 intptr_t p_tagged = p | kTag; | 1065 intptr_t p_tagged = p | kTag; |
1066 Memory::Object_at(field) = reinterpret_cast<Object*>(p_tagged); | 1066 Memory::Object_at(field) = reinterpret_cast<Object*>(p_tagged); |
1067 } | 1067 } |
1068 | 1068 |
1069 private: | 1069 private: |
1070 bool CheckVisitedAndUnmark(Object** field) { | 1070 bool CheckVisitedAndUnmark(Object** field) { |
1071 intptr_t p = reinterpret_cast<intptr_t>(*field); | 1071 intptr_t p = reinterpret_cast<intptr_t>(*field); |
1072 if (IsMarked(p)) { | 1072 if (IsMarked(p)) { |
1073 intptr_t p_untagged = (p & ~kTaggingMask) | kHeapObjectTag; | 1073 intptr_t p_untagged = (p & ~kTaggingMask) | kHeapObjectTag; |
1074 *field = reinterpret_cast<Object*>(p_untagged); | 1074 *field = reinterpret_cast<Object*>(p_untagged); |
1075 ASSERT((*field)->IsHeapObject()); | 1075 DCHECK((*field)->IsHeapObject()); |
1076 return true; | 1076 return true; |
1077 } | 1077 } |
1078 return false; | 1078 return false; |
1079 } | 1079 } |
1080 | 1080 |
1081 static const intptr_t kTaggingMask = 3; | 1081 static const intptr_t kTaggingMask = 3; |
1082 static const intptr_t kTag = 3; | 1082 static const intptr_t kTag = 3; |
1083 | 1083 |
1084 static bool IsMarked(intptr_t p) { return (p & kTaggingMask) == kTag; } | 1084 static bool IsMarked(intptr_t p) { return (p & kTaggingMask) == kTag; } |
1085 | 1085 |
(...skipping 669 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1755 if (!elements->get(i)->IsTheHole()) { | 1755 if (!elements->get(i)->IsTheHole()) { |
1756 SetElementReference(js_obj, entry, i, elements->get(i)); | 1756 SetElementReference(js_obj, entry, i, elements->get(i)); |
1757 } | 1757 } |
1758 } | 1758 } |
1759 } else if (js_obj->HasDictionaryElements()) { | 1759 } else if (js_obj->HasDictionaryElements()) { |
1760 SeededNumberDictionary* dictionary = js_obj->element_dictionary(); | 1760 SeededNumberDictionary* dictionary = js_obj->element_dictionary(); |
1761 int length = dictionary->Capacity(); | 1761 int length = dictionary->Capacity(); |
1762 for (int i = 0; i < length; ++i) { | 1762 for (int i = 0; i < length; ++i) { |
1763 Object* k = dictionary->KeyAt(i); | 1763 Object* k = dictionary->KeyAt(i); |
1764 if (dictionary->IsKey(k)) { | 1764 if (dictionary->IsKey(k)) { |
1765 ASSERT(k->IsNumber()); | 1765 DCHECK(k->IsNumber()); |
1766 uint32_t index = static_cast<uint32_t>(k->Number()); | 1766 uint32_t index = static_cast<uint32_t>(k->Number()); |
1767 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i)); | 1767 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i)); |
1768 } | 1768 } |
1769 } | 1769 } |
1770 } | 1770 } |
1771 } | 1771 } |
1772 | 1772 |
1773 | 1773 |
1774 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) { | 1774 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) { |
1775 int length = js_obj->GetInternalFieldCount(); | 1775 int length = js_obj->GetInternalFieldCount(); |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1836 if (collecting_all_references_) { | 1836 if (collecting_all_references_) { |
1837 for (Object** p = start; p < end; p++) all_references_.Add(*p); | 1837 for (Object** p = start; p < end; p++) all_references_.Add(*p); |
1838 } else { | 1838 } else { |
1839 for (Object** p = start; p < end; p++) strong_references_.Add(*p); | 1839 for (Object** p = start; p < end; p++) strong_references_.Add(*p); |
1840 } | 1840 } |
1841 } | 1841 } |
1842 | 1842 |
1843 void SetCollectingAllReferences() { collecting_all_references_ = true; } | 1843 void SetCollectingAllReferences() { collecting_all_references_ = true; } |
1844 | 1844 |
1845 void FillReferences(V8HeapExplorer* explorer) { | 1845 void FillReferences(V8HeapExplorer* explorer) { |
1846 ASSERT(strong_references_.length() <= all_references_.length()); | 1846 DCHECK(strong_references_.length() <= all_references_.length()); |
1847 Builtins* builtins = heap_->isolate()->builtins(); | 1847 Builtins* builtins = heap_->isolate()->builtins(); |
1848 for (int i = 0; i < reference_tags_.length(); ++i) { | 1848 for (int i = 0; i < reference_tags_.length(); ++i) { |
1849 explorer->SetGcRootsReference(reference_tags_[i].tag); | 1849 explorer->SetGcRootsReference(reference_tags_[i].tag); |
1850 } | 1850 } |
1851 int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0; | 1851 int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0; |
1852 while (all_index < all_references_.length()) { | 1852 while (all_index < all_references_.length()) { |
1853 bool is_strong = strong_index < strong_references_.length() | 1853 bool is_strong = strong_index < strong_references_.length() |
1854 && strong_references_[strong_index] == all_references_[all_index]; | 1854 && strong_references_[strong_index] == all_references_[all_index]; |
1855 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag, | 1855 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag, |
1856 !is_strong, | 1856 !is_strong, |
1857 all_references_[all_index]); | 1857 all_references_[all_index]); |
1858 if (reference_tags_[tags_index].tag == | 1858 if (reference_tags_[tags_index].tag == |
1859 VisitorSynchronization::kBuiltins) { | 1859 VisitorSynchronization::kBuiltins) { |
1860 ASSERT(all_references_[all_index]->IsCode()); | 1860 DCHECK(all_references_[all_index]->IsCode()); |
1861 explorer->TagBuiltinCodeObject( | 1861 explorer->TagBuiltinCodeObject( |
1862 Code::cast(all_references_[all_index]), | 1862 Code::cast(all_references_[all_index]), |
1863 builtins->name(builtin_index++)); | 1863 builtins->name(builtin_index++)); |
1864 } | 1864 } |
1865 ++all_index; | 1865 ++all_index; |
1866 if (is_strong) ++strong_index; | 1866 if (is_strong) ++strong_index; |
1867 if (reference_tags_[tags_index].index == all_index) ++tags_index; | 1867 if (reference_tags_[tags_index].index == all_index) ++tags_index; |
1868 } | 1868 } |
1869 } | 1869 } |
1870 | 1870 |
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1959 && object != heap_->one_pointer_filler_map() | 1959 && object != heap_->one_pointer_filler_map() |
1960 && object != heap_->two_pointer_filler_map(); | 1960 && object != heap_->two_pointer_filler_map(); |
1961 } | 1961 } |
1962 | 1962 |
1963 | 1963 |
1964 void V8HeapExplorer::SetContextReference(HeapObject* parent_obj, | 1964 void V8HeapExplorer::SetContextReference(HeapObject* parent_obj, |
1965 int parent_entry, | 1965 int parent_entry, |
1966 String* reference_name, | 1966 String* reference_name, |
1967 Object* child_obj, | 1967 Object* child_obj, |
1968 int field_offset) { | 1968 int field_offset) { |
1969 ASSERT(parent_entry == GetEntry(parent_obj)->index()); | 1969 DCHECK(parent_entry == GetEntry(parent_obj)->index()); |
1970 HeapEntry* child_entry = GetEntry(child_obj); | 1970 HeapEntry* child_entry = GetEntry(child_obj); |
1971 if (child_entry != NULL) { | 1971 if (child_entry != NULL) { |
1972 filler_->SetNamedReference(HeapGraphEdge::kContextVariable, | 1972 filler_->SetNamedReference(HeapGraphEdge::kContextVariable, |
1973 parent_entry, | 1973 parent_entry, |
1974 names_->GetName(reference_name), | 1974 names_->GetName(reference_name), |
1975 child_entry); | 1975 child_entry); |
1976 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); | 1976 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); |
1977 } | 1977 } |
1978 } | 1978 } |
1979 | 1979 |
1980 | 1980 |
1981 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj, | 1981 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj, |
1982 int parent_entry, | 1982 int parent_entry, |
1983 const char* reference_name, | 1983 const char* reference_name, |
1984 Object* child_obj) { | 1984 Object* child_obj) { |
1985 ASSERT(parent_entry == GetEntry(parent_obj)->index()); | 1985 DCHECK(parent_entry == GetEntry(parent_obj)->index()); |
1986 HeapEntry* child_entry = GetEntry(child_obj); | 1986 HeapEntry* child_entry = GetEntry(child_obj); |
1987 if (child_entry != NULL) { | 1987 if (child_entry != NULL) { |
1988 filler_->SetNamedReference(HeapGraphEdge::kShortcut, | 1988 filler_->SetNamedReference(HeapGraphEdge::kShortcut, |
1989 parent_entry, | 1989 parent_entry, |
1990 reference_name, | 1990 reference_name, |
1991 child_entry); | 1991 child_entry); |
1992 } | 1992 } |
1993 } | 1993 } |
1994 | 1994 |
1995 | 1995 |
1996 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj, | 1996 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj, |
1997 int parent_entry, | 1997 int parent_entry, |
1998 int index, | 1998 int index, |
1999 Object* child_obj) { | 1999 Object* child_obj) { |
2000 ASSERT(parent_entry == GetEntry(parent_obj)->index()); | 2000 DCHECK(parent_entry == GetEntry(parent_obj)->index()); |
2001 HeapEntry* child_entry = GetEntry(child_obj); | 2001 HeapEntry* child_entry = GetEntry(child_obj); |
2002 if (child_entry != NULL) { | 2002 if (child_entry != NULL) { |
2003 filler_->SetIndexedReference(HeapGraphEdge::kElement, | 2003 filler_->SetIndexedReference(HeapGraphEdge::kElement, |
2004 parent_entry, | 2004 parent_entry, |
2005 index, | 2005 index, |
2006 child_entry); | 2006 child_entry); |
2007 } | 2007 } |
2008 } | 2008 } |
2009 | 2009 |
2010 | 2010 |
2011 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj, | 2011 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj, |
2012 int parent_entry, | 2012 int parent_entry, |
2013 const char* reference_name, | 2013 const char* reference_name, |
2014 Object* child_obj, | 2014 Object* child_obj, |
2015 int field_offset) { | 2015 int field_offset) { |
2016 ASSERT(parent_entry == GetEntry(parent_obj)->index()); | 2016 DCHECK(parent_entry == GetEntry(parent_obj)->index()); |
2017 HeapEntry* child_entry = GetEntry(child_obj); | 2017 HeapEntry* child_entry = GetEntry(child_obj); |
2018 if (child_entry == NULL) return; | 2018 if (child_entry == NULL) return; |
2019 if (IsEssentialObject(child_obj)) { | 2019 if (IsEssentialObject(child_obj)) { |
2020 filler_->SetNamedReference(HeapGraphEdge::kInternal, | 2020 filler_->SetNamedReference(HeapGraphEdge::kInternal, |
2021 parent_entry, | 2021 parent_entry, |
2022 reference_name, | 2022 reference_name, |
2023 child_entry); | 2023 child_entry); |
2024 } | 2024 } |
2025 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); | 2025 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); |
2026 } | 2026 } |
2027 | 2027 |
2028 | 2028 |
2029 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj, | 2029 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj, |
2030 int parent_entry, | 2030 int parent_entry, |
2031 int index, | 2031 int index, |
2032 Object* child_obj, | 2032 Object* child_obj, |
2033 int field_offset) { | 2033 int field_offset) { |
2034 ASSERT(parent_entry == GetEntry(parent_obj)->index()); | 2034 DCHECK(parent_entry == GetEntry(parent_obj)->index()); |
2035 HeapEntry* child_entry = GetEntry(child_obj); | 2035 HeapEntry* child_entry = GetEntry(child_obj); |
2036 if (child_entry == NULL) return; | 2036 if (child_entry == NULL) return; |
2037 if (IsEssentialObject(child_obj)) { | 2037 if (IsEssentialObject(child_obj)) { |
2038 filler_->SetNamedReference(HeapGraphEdge::kInternal, | 2038 filler_->SetNamedReference(HeapGraphEdge::kInternal, |
2039 parent_entry, | 2039 parent_entry, |
2040 names_->GetName(index), | 2040 names_->GetName(index), |
2041 child_entry); | 2041 child_entry); |
2042 } | 2042 } |
2043 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); | 2043 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); |
2044 } | 2044 } |
2045 | 2045 |
2046 | 2046 |
2047 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj, | 2047 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj, |
2048 int parent_entry, | 2048 int parent_entry, |
2049 int index, | 2049 int index, |
2050 Object* child_obj) { | 2050 Object* child_obj) { |
2051 ASSERT(parent_entry == GetEntry(parent_obj)->index()); | 2051 DCHECK(parent_entry == GetEntry(parent_obj)->index()); |
2052 HeapEntry* child_entry = GetEntry(child_obj); | 2052 HeapEntry* child_entry = GetEntry(child_obj); |
2053 if (child_entry != NULL && IsEssentialObject(child_obj)) { | 2053 if (child_entry != NULL && IsEssentialObject(child_obj)) { |
2054 filler_->SetIndexedReference(HeapGraphEdge::kHidden, | 2054 filler_->SetIndexedReference(HeapGraphEdge::kHidden, |
2055 parent_entry, | 2055 parent_entry, |
2056 index, | 2056 index, |
2057 child_entry); | 2057 child_entry); |
2058 } | 2058 } |
2059 } | 2059 } |
2060 | 2060 |
2061 | 2061 |
2062 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj, | 2062 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj, |
2063 int parent_entry, | 2063 int parent_entry, |
2064 const char* reference_name, | 2064 const char* reference_name, |
2065 Object* child_obj, | 2065 Object* child_obj, |
2066 int field_offset) { | 2066 int field_offset) { |
2067 ASSERT(parent_entry == GetEntry(parent_obj)->index()); | 2067 DCHECK(parent_entry == GetEntry(parent_obj)->index()); |
2068 HeapEntry* child_entry = GetEntry(child_obj); | 2068 HeapEntry* child_entry = GetEntry(child_obj); |
2069 if (child_entry == NULL) return; | 2069 if (child_entry == NULL) return; |
2070 if (IsEssentialObject(child_obj)) { | 2070 if (IsEssentialObject(child_obj)) { |
2071 filler_->SetNamedReference(HeapGraphEdge::kWeak, | 2071 filler_->SetNamedReference(HeapGraphEdge::kWeak, |
2072 parent_entry, | 2072 parent_entry, |
2073 reference_name, | 2073 reference_name, |
2074 child_entry); | 2074 child_entry); |
2075 } | 2075 } |
2076 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); | 2076 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); |
2077 } | 2077 } |
2078 | 2078 |
2079 | 2079 |
2080 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj, | 2080 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj, |
2081 int parent_entry, | 2081 int parent_entry, |
2082 int index, | 2082 int index, |
2083 Object* child_obj, | 2083 Object* child_obj, |
2084 int field_offset) { | 2084 int field_offset) { |
2085 ASSERT(parent_entry == GetEntry(parent_obj)->index()); | 2085 DCHECK(parent_entry == GetEntry(parent_obj)->index()); |
2086 HeapEntry* child_entry = GetEntry(child_obj); | 2086 HeapEntry* child_entry = GetEntry(child_obj); |
2087 if (child_entry == NULL) return; | 2087 if (child_entry == NULL) return; |
2088 if (IsEssentialObject(child_obj)) { | 2088 if (IsEssentialObject(child_obj)) { |
2089 filler_->SetNamedReference(HeapGraphEdge::kWeak, | 2089 filler_->SetNamedReference(HeapGraphEdge::kWeak, |
2090 parent_entry, | 2090 parent_entry, |
2091 names_->GetFormatted("%d", index), | 2091 names_->GetFormatted("%d", index), |
2092 child_entry); | 2092 child_entry); |
2093 } | 2093 } |
2094 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); | 2094 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); |
2095 } | 2095 } |
2096 | 2096 |
2097 | 2097 |
2098 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj, | 2098 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj, |
2099 int parent_entry, | 2099 int parent_entry, |
2100 Name* reference_name, | 2100 Name* reference_name, |
2101 Object* child_obj, | 2101 Object* child_obj, |
2102 const char* name_format_string, | 2102 const char* name_format_string, |
2103 int field_offset) { | 2103 int field_offset) { |
2104 ASSERT(parent_entry == GetEntry(parent_obj)->index()); | 2104 DCHECK(parent_entry == GetEntry(parent_obj)->index()); |
2105 HeapEntry* child_entry = GetEntry(child_obj); | 2105 HeapEntry* child_entry = GetEntry(child_obj); |
2106 if (child_entry != NULL) { | 2106 if (child_entry != NULL) { |
2107 HeapGraphEdge::Type type = | 2107 HeapGraphEdge::Type type = |
2108 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0 | 2108 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0 |
2109 ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal; | 2109 ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal; |
2110 const char* name = name_format_string != NULL && reference_name->IsString() | 2110 const char* name = name_format_string != NULL && reference_name->IsString() |
2111 ? names_->GetFormatted( | 2111 ? names_->GetFormatted( |
2112 name_format_string, | 2112 name_format_string, |
2113 String::cast(reference_name)->ToCString( | 2113 String::cast(reference_name)->ToCString( |
2114 DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).get()) : | 2114 DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).get()) : |
(...skipping 11 matching lines...) Expand all Loading... |
2126 void V8HeapExplorer::SetRootGcRootsReference() { | 2126 void V8HeapExplorer::SetRootGcRootsReference() { |
2127 filler_->SetIndexedAutoIndexReference( | 2127 filler_->SetIndexedAutoIndexReference( |
2128 HeapGraphEdge::kElement, | 2128 HeapGraphEdge::kElement, |
2129 snapshot_->root()->index(), | 2129 snapshot_->root()->index(), |
2130 snapshot_->gc_roots()); | 2130 snapshot_->gc_roots()); |
2131 } | 2131 } |
2132 | 2132 |
2133 | 2133 |
2134 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) { | 2134 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) { |
2135 HeapEntry* child_entry = GetEntry(child_obj); | 2135 HeapEntry* child_entry = GetEntry(child_obj); |
2136 ASSERT(child_entry != NULL); | 2136 DCHECK(child_entry != NULL); |
2137 filler_->SetNamedAutoIndexReference( | 2137 filler_->SetNamedAutoIndexReference( |
2138 HeapGraphEdge::kShortcut, | 2138 HeapGraphEdge::kShortcut, |
2139 snapshot_->root()->index(), | 2139 snapshot_->root()->index(), |
2140 child_entry); | 2140 child_entry); |
2141 } | 2141 } |
2142 | 2142 |
2143 | 2143 |
2144 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) { | 2144 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) { |
2145 filler_->SetIndexedAutoIndexReference( | 2145 filler_->SetIndexedAutoIndexReference( |
2146 HeapGraphEdge::kElement, | 2146 HeapGraphEdge::kElement, |
(...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2407 | 2407 |
2408 void NativeObjectsExplorer::FillImplicitReferences() { | 2408 void NativeObjectsExplorer::FillImplicitReferences() { |
2409 Isolate* isolate = isolate_; | 2409 Isolate* isolate = isolate_; |
2410 List<ImplicitRefGroup*>* groups = | 2410 List<ImplicitRefGroup*>* groups = |
2411 isolate->global_handles()->implicit_ref_groups(); | 2411 isolate->global_handles()->implicit_ref_groups(); |
2412 for (int i = 0; i < groups->length(); ++i) { | 2412 for (int i = 0; i < groups->length(); ++i) { |
2413 ImplicitRefGroup* group = groups->at(i); | 2413 ImplicitRefGroup* group = groups->at(i); |
2414 HeapObject* parent = *group->parent; | 2414 HeapObject* parent = *group->parent; |
2415 int parent_entry = | 2415 int parent_entry = |
2416 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index(); | 2416 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index(); |
2417 ASSERT(parent_entry != HeapEntry::kNoEntry); | 2417 DCHECK(parent_entry != HeapEntry::kNoEntry); |
2418 Object*** children = group->children; | 2418 Object*** children = group->children; |
2419 for (size_t j = 0; j < group->length; ++j) { | 2419 for (size_t j = 0; j < group->length; ++j) { |
2420 Object* child = *children[j]; | 2420 Object* child = *children[j]; |
2421 HeapEntry* child_entry = | 2421 HeapEntry* child_entry = |
2422 filler_->FindOrAddEntry(child, native_entries_allocator_); | 2422 filler_->FindOrAddEntry(child, native_entries_allocator_); |
2423 filler_->SetNamedReference( | 2423 filler_->SetNamedReference( |
2424 HeapGraphEdge::kInternal, | 2424 HeapGraphEdge::kInternal, |
2425 parent_entry, | 2425 parent_entry, |
2426 "native", | 2426 "native", |
2427 child_entry); | 2427 child_entry); |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2508 entry->value = new NativeGroupRetainedObjectInfo(label); | 2508 entry->value = new NativeGroupRetainedObjectInfo(label); |
2509 } | 2509 } |
2510 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value); | 2510 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value); |
2511 } | 2511 } |
2512 | 2512 |
2513 | 2513 |
2514 void NativeObjectsExplorer::SetNativeRootReference( | 2514 void NativeObjectsExplorer::SetNativeRootReference( |
2515 v8::RetainedObjectInfo* info) { | 2515 v8::RetainedObjectInfo* info) { |
2516 HeapEntry* child_entry = | 2516 HeapEntry* child_entry = |
2517 filler_->FindOrAddEntry(info, native_entries_allocator_); | 2517 filler_->FindOrAddEntry(info, native_entries_allocator_); |
2518 ASSERT(child_entry != NULL); | 2518 DCHECK(child_entry != NULL); |
2519 NativeGroupRetainedObjectInfo* group_info = | 2519 NativeGroupRetainedObjectInfo* group_info = |
2520 FindOrAddGroupInfo(info->GetGroupLabel()); | 2520 FindOrAddGroupInfo(info->GetGroupLabel()); |
2521 HeapEntry* group_entry = | 2521 HeapEntry* group_entry = |
2522 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_); | 2522 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_); |
2523 filler_->SetNamedAutoIndexReference( | 2523 filler_->SetNamedAutoIndexReference( |
2524 HeapGraphEdge::kInternal, | 2524 HeapGraphEdge::kInternal, |
2525 group_entry->index(), | 2525 group_entry->index(), |
2526 child_entry); | 2526 child_entry); |
2527 } | 2527 } |
2528 | 2528 |
2529 | 2529 |
2530 void NativeObjectsExplorer::SetWrapperNativeReferences( | 2530 void NativeObjectsExplorer::SetWrapperNativeReferences( |
2531 HeapObject* wrapper, v8::RetainedObjectInfo* info) { | 2531 HeapObject* wrapper, v8::RetainedObjectInfo* info) { |
2532 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper); | 2532 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper); |
2533 ASSERT(wrapper_entry != NULL); | 2533 DCHECK(wrapper_entry != NULL); |
2534 HeapEntry* info_entry = | 2534 HeapEntry* info_entry = |
2535 filler_->FindOrAddEntry(info, native_entries_allocator_); | 2535 filler_->FindOrAddEntry(info, native_entries_allocator_); |
2536 ASSERT(info_entry != NULL); | 2536 DCHECK(info_entry != NULL); |
2537 filler_->SetNamedReference(HeapGraphEdge::kInternal, | 2537 filler_->SetNamedReference(HeapGraphEdge::kInternal, |
2538 wrapper_entry->index(), | 2538 wrapper_entry->index(), |
2539 "native", | 2539 "native", |
2540 info_entry); | 2540 info_entry); |
2541 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement, | 2541 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement, |
2542 info_entry->index(), | 2542 info_entry->index(), |
2543 wrapper_entry); | 2543 wrapper_entry); |
2544 } | 2544 } |
2545 | 2545 |
2546 | 2546 |
2547 void NativeObjectsExplorer::SetRootNativeRootsReference() { | 2547 void NativeObjectsExplorer::SetRootNativeRootsReference() { |
2548 for (HashMap::Entry* entry = native_groups_.Start(); | 2548 for (HashMap::Entry* entry = native_groups_.Start(); |
2549 entry; | 2549 entry; |
2550 entry = native_groups_.Next(entry)) { | 2550 entry = native_groups_.Next(entry)) { |
2551 NativeGroupRetainedObjectInfo* group_info = | 2551 NativeGroupRetainedObjectInfo* group_info = |
2552 static_cast<NativeGroupRetainedObjectInfo*>(entry->value); | 2552 static_cast<NativeGroupRetainedObjectInfo*>(entry->value); |
2553 HeapEntry* group_entry = | 2553 HeapEntry* group_entry = |
2554 filler_->FindOrAddEntry(group_info, native_entries_allocator_); | 2554 filler_->FindOrAddEntry(group_info, native_entries_allocator_); |
2555 ASSERT(group_entry != NULL); | 2555 DCHECK(group_entry != NULL); |
2556 filler_->SetIndexedAutoIndexReference( | 2556 filler_->SetIndexedAutoIndexReference( |
2557 HeapGraphEdge::kElement, | 2557 HeapGraphEdge::kElement, |
2558 snapshot_->root()->index(), | 2558 snapshot_->root()->index(), |
2559 group_entry); | 2559 group_entry); |
2560 } | 2560 } |
2561 } | 2561 } |
2562 | 2562 |
2563 | 2563 |
2564 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) { | 2564 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) { |
2565 if (in_groups_.Contains(*p)) return; | 2565 if (in_groups_.Contains(*p)) return; |
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2676 | 2676 |
2677 | 2677 |
2678 class OutputStreamWriter { | 2678 class OutputStreamWriter { |
2679 public: | 2679 public: |
2680 explicit OutputStreamWriter(v8::OutputStream* stream) | 2680 explicit OutputStreamWriter(v8::OutputStream* stream) |
2681 : stream_(stream), | 2681 : stream_(stream), |
2682 chunk_size_(stream->GetChunkSize()), | 2682 chunk_size_(stream->GetChunkSize()), |
2683 chunk_(chunk_size_), | 2683 chunk_(chunk_size_), |
2684 chunk_pos_(0), | 2684 chunk_pos_(0), |
2685 aborted_(false) { | 2685 aborted_(false) { |
2686 ASSERT(chunk_size_ > 0); | 2686 DCHECK(chunk_size_ > 0); |
2687 } | 2687 } |
2688 bool aborted() { return aborted_; } | 2688 bool aborted() { return aborted_; } |
2689 void AddCharacter(char c) { | 2689 void AddCharacter(char c) { |
2690 ASSERT(c != '\0'); | 2690 DCHECK(c != '\0'); |
2691 ASSERT(chunk_pos_ < chunk_size_); | 2691 DCHECK(chunk_pos_ < chunk_size_); |
2692 chunk_[chunk_pos_++] = c; | 2692 chunk_[chunk_pos_++] = c; |
2693 MaybeWriteChunk(); | 2693 MaybeWriteChunk(); |
2694 } | 2694 } |
2695 void AddString(const char* s) { | 2695 void AddString(const char* s) { |
2696 AddSubstring(s, StrLength(s)); | 2696 AddSubstring(s, StrLength(s)); |
2697 } | 2697 } |
2698 void AddSubstring(const char* s, int n) { | 2698 void AddSubstring(const char* s, int n) { |
2699 if (n <= 0) return; | 2699 if (n <= 0) return; |
2700 ASSERT(static_cast<size_t>(n) <= strlen(s)); | 2700 DCHECK(static_cast<size_t>(n) <= strlen(s)); |
2701 const char* s_end = s + n; | 2701 const char* s_end = s + n; |
2702 while (s < s_end) { | 2702 while (s < s_end) { |
2703 int s_chunk_size = | 2703 int s_chunk_size = |
2704 Min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s)); | 2704 Min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s)); |
2705 ASSERT(s_chunk_size > 0); | 2705 DCHECK(s_chunk_size > 0); |
2706 MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size); | 2706 MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size); |
2707 s += s_chunk_size; | 2707 s += s_chunk_size; |
2708 chunk_pos_ += s_chunk_size; | 2708 chunk_pos_ += s_chunk_size; |
2709 MaybeWriteChunk(); | 2709 MaybeWriteChunk(); |
2710 } | 2710 } |
2711 } | 2711 } |
2712 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); } | 2712 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); } |
2713 void Finalize() { | 2713 void Finalize() { |
2714 if (aborted_) return; | 2714 if (aborted_) return; |
2715 ASSERT(chunk_pos_ < chunk_size_); | 2715 DCHECK(chunk_pos_ < chunk_size_); |
2716 if (chunk_pos_ != 0) { | 2716 if (chunk_pos_ != 0) { |
2717 WriteChunk(); | 2717 WriteChunk(); |
2718 } | 2718 } |
2719 stream_->EndOfStream(); | 2719 stream_->EndOfStream(); |
2720 } | 2720 } |
2721 | 2721 |
2722 private: | 2722 private: |
2723 template<typename T> | 2723 template<typename T> |
2724 void AddNumberImpl(T n, const char* format) { | 2724 void AddNumberImpl(T n, const char* format) { |
2725 // Buffer for the longest value plus trailing \0 | 2725 // Buffer for the longest value plus trailing \0 |
2726 static const int kMaxNumberSize = | 2726 static const int kMaxNumberSize = |
2727 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1; | 2727 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1; |
2728 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) { | 2728 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) { |
2729 int result = SNPrintF( | 2729 int result = SNPrintF( |
2730 chunk_.SubVector(chunk_pos_, chunk_size_), format, n); | 2730 chunk_.SubVector(chunk_pos_, chunk_size_), format, n); |
2731 ASSERT(result != -1); | 2731 DCHECK(result != -1); |
2732 chunk_pos_ += result; | 2732 chunk_pos_ += result; |
2733 MaybeWriteChunk(); | 2733 MaybeWriteChunk(); |
2734 } else { | 2734 } else { |
2735 EmbeddedVector<char, kMaxNumberSize> buffer; | 2735 EmbeddedVector<char, kMaxNumberSize> buffer; |
2736 int result = SNPrintF(buffer, format, n); | 2736 int result = SNPrintF(buffer, format, n); |
2737 USE(result); | 2737 USE(result); |
2738 ASSERT(result != -1); | 2738 DCHECK(result != -1); |
2739 AddString(buffer.start()); | 2739 AddString(buffer.start()); |
2740 } | 2740 } |
2741 } | 2741 } |
2742 void MaybeWriteChunk() { | 2742 void MaybeWriteChunk() { |
2743 ASSERT(chunk_pos_ <= chunk_size_); | 2743 DCHECK(chunk_pos_ <= chunk_size_); |
2744 if (chunk_pos_ == chunk_size_) { | 2744 if (chunk_pos_ == chunk_size_) { |
2745 WriteChunk(); | 2745 WriteChunk(); |
2746 } | 2746 } |
2747 } | 2747 } |
2748 void WriteChunk() { | 2748 void WriteChunk() { |
2749 if (aborted_) return; | 2749 if (aborted_) return; |
2750 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) == | 2750 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) == |
2751 v8::OutputStream::kAbort) aborted_ = true; | 2751 v8::OutputStream::kAbort) aborted_ = true; |
2752 chunk_pos_ = 0; | 2752 chunk_pos_ = 0; |
2753 } | 2753 } |
2754 | 2754 |
2755 v8::OutputStream* stream_; | 2755 v8::OutputStream* stream_; |
2756 int chunk_size_; | 2756 int chunk_size_; |
2757 ScopedVector<char> chunk_; | 2757 ScopedVector<char> chunk_; |
2758 int chunk_pos_; | 2758 int chunk_pos_; |
2759 bool aborted_; | 2759 bool aborted_; |
2760 }; | 2760 }; |
2761 | 2761 |
2762 | 2762 |
2763 // type, name|index, to_node. | 2763 // type, name|index, to_node. |
2764 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3; | 2764 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3; |
2765 // type, name, id, self_size, edge_count, trace_node_id. | 2765 // type, name, id, self_size, edge_count, trace_node_id. |
2766 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6; | 2766 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6; |
2767 | 2767 |
2768 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) { | 2768 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) { |
2769 if (AllocationTracker* allocation_tracker = | 2769 if (AllocationTracker* allocation_tracker = |
2770 snapshot_->profiler()->allocation_tracker()) { | 2770 snapshot_->profiler()->allocation_tracker()) { |
2771 allocation_tracker->PrepareForSerialization(); | 2771 allocation_tracker->PrepareForSerialization(); |
2772 } | 2772 } |
2773 ASSERT(writer_ == NULL); | 2773 DCHECK(writer_ == NULL); |
2774 writer_ = new OutputStreamWriter(stream); | 2774 writer_ = new OutputStreamWriter(stream); |
2775 SerializeImpl(); | 2775 SerializeImpl(); |
2776 delete writer_; | 2776 delete writer_; |
2777 writer_ = NULL; | 2777 writer_ = NULL; |
2778 } | 2778 } |
2779 | 2779 |
2780 | 2780 |
2781 void HeapSnapshotJSONSerializer::SerializeImpl() { | 2781 void HeapSnapshotJSONSerializer::SerializeImpl() { |
2782 ASSERT(0 == snapshot_->root()->index()); | 2782 DCHECK(0 == snapshot_->root()->index()); |
2783 writer_->AddCharacter('{'); | 2783 writer_->AddCharacter('{'); |
2784 writer_->AddString("\"snapshot\":{"); | 2784 writer_->AddString("\"snapshot\":{"); |
2785 SerializeSnapshot(); | 2785 SerializeSnapshot(); |
2786 if (writer_->aborted()) return; | 2786 if (writer_->aborted()) return; |
2787 writer_->AddString("},\n"); | 2787 writer_->AddString("},\n"); |
2788 writer_->AddString("\"nodes\":["); | 2788 writer_->AddString("\"nodes\":["); |
2789 SerializeNodes(); | 2789 SerializeNodes(); |
2790 if (writer_->aborted()) return; | 2790 if (writer_->aborted()) return; |
2791 writer_->AddString("],\n"); | 2791 writer_->AddString("],\n"); |
2792 writer_->AddString("\"edges\":["); | 2792 writer_->AddString("\"edges\":["); |
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2885 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos); | 2885 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos); |
2886 buffer[buffer_pos++] = '\n'; | 2886 buffer[buffer_pos++] = '\n'; |
2887 buffer[buffer_pos++] = '\0'; | 2887 buffer[buffer_pos++] = '\0'; |
2888 writer_->AddString(buffer.start()); | 2888 writer_->AddString(buffer.start()); |
2889 } | 2889 } |
2890 | 2890 |
2891 | 2891 |
2892 void HeapSnapshotJSONSerializer::SerializeEdges() { | 2892 void HeapSnapshotJSONSerializer::SerializeEdges() { |
2893 List<HeapGraphEdge*>& edges = snapshot_->children(); | 2893 List<HeapGraphEdge*>& edges = snapshot_->children(); |
2894 for (int i = 0; i < edges.length(); ++i) { | 2894 for (int i = 0; i < edges.length(); ++i) { |
2895 ASSERT(i == 0 || | 2895 DCHECK(i == 0 || |
2896 edges[i - 1]->from()->index() <= edges[i]->from()->index()); | 2896 edges[i - 1]->from()->index() <= edges[i]->from()->index()); |
2897 SerializeEdge(edges[i], i == 0); | 2897 SerializeEdge(edges[i], i == 0); |
2898 if (writer_->aborted()) return; | 2898 if (writer_->aborted()) return; |
2899 } | 2899 } |
2900 } | 2900 } |
2901 | 2901 |
2902 | 2902 |
2903 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) { | 2903 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) { |
2904 // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0 | 2904 // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0 |
2905 static const int kBufferSize = | 2905 static const int kBufferSize = |
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3069 writer_->AddCharacter(']'); | 3069 writer_->AddCharacter(']'); |
3070 } | 3070 } |
3071 | 3071 |
3072 | 3072 |
3073 // 0-based position is converted to 1-based during the serialization. | 3073 // 0-based position is converted to 1-based during the serialization. |
3074 static int SerializePosition(int position, const Vector<char>& buffer, | 3074 static int SerializePosition(int position, const Vector<char>& buffer, |
3075 int buffer_pos) { | 3075 int buffer_pos) { |
3076 if (position == -1) { | 3076 if (position == -1) { |
3077 buffer[buffer_pos++] = '0'; | 3077 buffer[buffer_pos++] = '0'; |
3078 } else { | 3078 } else { |
3079 ASSERT(position >= 0); | 3079 DCHECK(position >= 0); |
3080 buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos); | 3080 buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos); |
3081 } | 3081 } |
3082 return buffer_pos; | 3082 return buffer_pos; |
3083 } | 3083 } |
3084 | 3084 |
3085 | 3085 |
3086 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() { | 3086 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() { |
3087 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker(); | 3087 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker(); |
3088 if (!tracker) return; | 3088 if (!tracker) return; |
3089 // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0 | 3089 // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0 |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3153 } else if (*s <= 31) { | 3153 } else if (*s <= 31) { |
3154 // Special character with no dedicated literal. | 3154 // Special character with no dedicated literal. |
3155 WriteUChar(writer_, *s); | 3155 WriteUChar(writer_, *s); |
3156 } else { | 3156 } else { |
3157 // Convert UTF-8 into \u UTF-16 literal. | 3157 // Convert UTF-8 into \u UTF-16 literal. |
3158 unsigned length = 1, cursor = 0; | 3158 unsigned length = 1, cursor = 0; |
3159 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { } | 3159 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { } |
3160 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor); | 3160 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor); |
3161 if (c != unibrow::Utf8::kBadChar) { | 3161 if (c != unibrow::Utf8::kBadChar) { |
3162 WriteUChar(writer_, c); | 3162 WriteUChar(writer_, c); |
3163 ASSERT(cursor != 0); | 3163 DCHECK(cursor != 0); |
3164 s += cursor - 1; | 3164 s += cursor - 1; |
3165 } else { | 3165 } else { |
3166 writer_->AddCharacter('?'); | 3166 writer_->AddCharacter('?'); |
3167 } | 3167 } |
3168 } | 3168 } |
3169 } | 3169 } |
3170 } | 3170 } |
3171 writer_->AddCharacter('\"'); | 3171 writer_->AddCharacter('\"'); |
3172 } | 3172 } |
3173 | 3173 |
(...skipping 10 matching lines...) Expand all Loading... |
3184 writer_->AddString("\"<dummy>\""); | 3184 writer_->AddString("\"<dummy>\""); |
3185 for (int i = 1; i < sorted_strings.length(); ++i) { | 3185 for (int i = 1; i < sorted_strings.length(); ++i) { |
3186 writer_->AddCharacter(','); | 3186 writer_->AddCharacter(','); |
3187 SerializeString(sorted_strings[i]); | 3187 SerializeString(sorted_strings[i]); |
3188 if (writer_->aborted()) return; | 3188 if (writer_->aborted()) return; |
3189 } | 3189 } |
3190 } | 3190 } |
3191 | 3191 |
3192 | 3192 |
3193 } } // namespace v8::internal | 3193 } } // namespace v8::internal |
OLD | NEW |