Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(48)

Side by Side Diff: src/heap-snapshot-generator.cc

Issue 18701002: Remove deprecated heap profiler methods from V8 public API (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap-snapshot-generator.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after
182 182
183 // It is very important to keep objects that form a heap snapshot 183 // It is very important to keep objects that form a heap snapshot
184 // as small as possible. 184 // as small as possible.
185 namespace { // Avoid littering the global namespace. 185 namespace { // Avoid littering the global namespace.
186 186
187 template <size_t ptr_size> struct SnapshotSizeConstants; 187 template <size_t ptr_size> struct SnapshotSizeConstants;
188 188
189 template <> struct SnapshotSizeConstants<4> { 189 template <> struct SnapshotSizeConstants<4> {
190 static const int kExpectedHeapGraphEdgeSize = 12; 190 static const int kExpectedHeapGraphEdgeSize = 12;
191 static const int kExpectedHeapEntrySize = 24; 191 static const int kExpectedHeapEntrySize = 24;
192 static const int kExpectedHeapSnapshotsCollectionSize = 100;
193 static const int kExpectedHeapSnapshotSize = 132;
194 }; 192 };
195 193
196 template <> struct SnapshotSizeConstants<8> { 194 template <> struct SnapshotSizeConstants<8> {
197 static const int kExpectedHeapGraphEdgeSize = 24; 195 static const int kExpectedHeapGraphEdgeSize = 24;
198 static const int kExpectedHeapEntrySize = 32; 196 static const int kExpectedHeapEntrySize = 32;
199 static const int kExpectedHeapSnapshotsCollectionSize = 152;
200 static const int kExpectedHeapSnapshotSize = 160;
201 }; 197 };
202 198
203 } // namespace 199 } // namespace
204 200
205 HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection, 201 HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection,
206 const char* title, 202 const char* title,
207 unsigned uid) 203 unsigned uid)
208 : collection_(collection), 204 : collection_(collection),
209 title_(title), 205 title_(title),
210 uid_(uid), 206 uid_(uid),
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after
346 } 342 }
347 343
348 344
349 template<typename T, class P> 345 template<typename T, class P>
350 static size_t GetMemoryUsedByList(const List<T, P>& list) { 346 static size_t GetMemoryUsedByList(const List<T, P>& list) {
351 return list.length() * sizeof(T) + sizeof(list); 347 return list.length() * sizeof(T) + sizeof(list);
352 } 348 }
353 349
354 350
355 size_t HeapSnapshot::RawSnapshotSize() const { 351 size_t HeapSnapshot::RawSnapshotSize() const {
356 STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::kExpectedHeapSnapshotSize ==
357 sizeof(HeapSnapshot)); // NOLINT
358 return 352 return
359 sizeof(*this) + 353 sizeof(*this) +
360 GetMemoryUsedByList(entries_) + 354 GetMemoryUsedByList(entries_) +
361 GetMemoryUsedByList(edges_) + 355 GetMemoryUsedByList(edges_) +
362 GetMemoryUsedByList(children_) + 356 GetMemoryUsedByList(children_) +
363 GetMemoryUsedByList(sorted_entries_); 357 GetMemoryUsedByList(sorted_entries_);
364 } 358 }
365 359
366 360
367 // We split IDs on evens for embedder objects (see 361 // We split IDs on evens for embedder objects (see
(...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after
571 return 565 return
572 sizeof(*this) + 566 sizeof(*this) +
573 sizeof(HashMap::Entry) * entries_map_.capacity() + 567 sizeof(HashMap::Entry) * entries_map_.capacity() +
574 GetMemoryUsedByList(entries_) + 568 GetMemoryUsedByList(entries_) +
575 GetMemoryUsedByList(time_intervals_); 569 GetMemoryUsedByList(time_intervals_);
576 } 570 }
577 571
578 572
579 HeapSnapshotsCollection::HeapSnapshotsCollection(Heap* heap) 573 HeapSnapshotsCollection::HeapSnapshotsCollection(Heap* heap)
580 : is_tracking_objects_(false), 574 : is_tracking_objects_(false),
581 snapshots_uids_(HeapSnapshotsMatch),
582 token_enumerator_(new TokenEnumerator()), 575 token_enumerator_(new TokenEnumerator()),
583 ids_(heap) { 576 ids_(heap) {
584 } 577 }
585 578
586 579
587 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) { 580 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) {
588 delete *snapshot_ptr; 581 delete *snapshot_ptr;
589 } 582 }
590 583
591 584
592 HeapSnapshotsCollection::~HeapSnapshotsCollection() { 585 HeapSnapshotsCollection::~HeapSnapshotsCollection() {
593 delete token_enumerator_; 586 delete token_enumerator_;
594 snapshots_.Iterate(DeleteHeapSnapshot); 587 snapshots_.Iterate(DeleteHeapSnapshot);
595 } 588 }
596 589
597 590
598 HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(const char* name, 591 HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(const char* name,
599 unsigned uid) { 592 unsigned uid) {
600 is_tracking_objects_ = true; // Start watching for heap objects moves. 593 is_tracking_objects_ = true; // Start watching for heap objects moves.
601 return new HeapSnapshot(this, name, uid); 594 return new HeapSnapshot(this, name, uid);
602 } 595 }
603 596
604 597
605 void HeapSnapshotsCollection::SnapshotGenerationFinished( 598 void HeapSnapshotsCollection::SnapshotGenerationFinished(
606 HeapSnapshot* snapshot) { 599 HeapSnapshot* snapshot) {
607 ids_.SnapshotGenerationFinished(); 600 ids_.SnapshotGenerationFinished();
608 if (snapshot != NULL) { 601 if (snapshot != NULL) {
609 snapshots_.Add(snapshot); 602 snapshots_.Add(snapshot);
610 HashMap::Entry* entry =
611 snapshots_uids_.Lookup(reinterpret_cast<void*>(snapshot->uid()),
612 static_cast<uint32_t>(snapshot->uid()),
613 true);
614 ASSERT(entry->value == NULL);
615 entry->value = snapshot;
616 } 603 }
617 } 604 }
618 605
619 606
620 HeapSnapshot* HeapSnapshotsCollection::GetSnapshot(unsigned uid) {
621 HashMap::Entry* entry = snapshots_uids_.Lookup(reinterpret_cast<void*>(uid),
622 static_cast<uint32_t>(uid),
623 false);
624 return entry != NULL ? reinterpret_cast<HeapSnapshot*>(entry->value) : NULL;
625 }
626
627
628 void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) { 607 void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) {
629 snapshots_.RemoveElement(snapshot); 608 snapshots_.RemoveElement(snapshot);
630 unsigned uid = snapshot->uid();
631 snapshots_uids_.Remove(reinterpret_cast<void*>(uid),
632 static_cast<uint32_t>(uid));
633 } 609 }
634 610
635 611
636 Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById( 612 Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(
637 SnapshotObjectId id) { 613 SnapshotObjectId id) {
638 // First perform a full GC in order to avoid dead objects. 614 // First perform a full GC in order to avoid dead objects.
639 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask, 615 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
640 "HeapSnapshotsCollection::FindHeapObjectById"); 616 "HeapSnapshotsCollection::FindHeapObjectById");
641 DisallowHeapAllocation no_allocation; 617 DisallowHeapAllocation no_allocation;
642 HeapObject* object = NULL; 618 HeapObject* object = NULL;
643 HeapIterator iterator(heap(), HeapIterator::kFilterUnreachable); 619 HeapIterator iterator(heap(), HeapIterator::kFilterUnreachable);
644 // Make sure that object with the given id is still reachable. 620 // Make sure that object with the given id is still reachable.
645 for (HeapObject* obj = iterator.next(); 621 for (HeapObject* obj = iterator.next();
646 obj != NULL; 622 obj != NULL;
647 obj = iterator.next()) { 623 obj = iterator.next()) {
648 if (ids_.FindEntry(obj->address()) == id) { 624 if (ids_.FindEntry(obj->address()) == id) {
649 ASSERT(object == NULL); 625 ASSERT(object == NULL);
650 object = obj; 626 object = obj;
651 // Can't break -- kFilterUnreachable requires full heap traversal. 627 // Can't break -- kFilterUnreachable requires full heap traversal.
652 } 628 }
653 } 629 }
654 return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>(); 630 return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>();
655 } 631 }
656 632
657 633
658 size_t HeapSnapshotsCollection::GetUsedMemorySize() const { 634 size_t HeapSnapshotsCollection::GetUsedMemorySize() const {
659 STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::
660 kExpectedHeapSnapshotsCollectionSize ==
661 sizeof(HeapSnapshotsCollection)); // NOLINT
662 size_t size = sizeof(*this); 635 size_t size = sizeof(*this);
663 size += names_.GetUsedMemorySize(); 636 size += names_.GetUsedMemorySize();
664 size += ids_.GetUsedMemorySize(); 637 size += ids_.GetUsedMemorySize();
665 size += sizeof(HashMap::Entry) * snapshots_uids_.capacity();
666 size += GetMemoryUsedByList(snapshots_); 638 size += GetMemoryUsedByList(snapshots_);
667 for (int i = 0; i < snapshots_.length(); ++i) { 639 for (int i = 0; i < snapshots_.length(); ++i) {
668 size += snapshots_[i]->RawSnapshotSize(); 640 size += snapshots_[i]->RawSnapshotSize();
669 } 641 }
670 return size; 642 return size;
671 } 643 }
672 644
673 645
674 HeapEntriesMap::HeapEntriesMap() 646 HeapEntriesMap::HeapEntriesMap()
675 : entries_(HeapThingsMatch) { 647 : entries_(HeapThingsMatch) {
(...skipping 2018 matching lines...) Expand 10 before | Expand all | Expand 10 after
2694 2666
2695 2667
2696 void HeapSnapshotJSONSerializer::SortHashMap( 2668 void HeapSnapshotJSONSerializer::SortHashMap(
2697 HashMap* map, List<HashMap::Entry*>* sorted_entries) { 2669 HashMap* map, List<HashMap::Entry*>* sorted_entries) {
2698 for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) 2670 for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p))
2699 sorted_entries->Add(p); 2671 sorted_entries->Add(p);
2700 sorted_entries->Sort(SortUsingEntryValue); 2672 sorted_entries->Sort(SortUsingEntryValue);
2701 } 2673 }
2702 2674
2703 } } // namespace v8::internal 2675 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap-snapshot-generator.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698