| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 182 // It is very important to keep objects that form a heap snapshot | 182 // It is very important to keep objects that form a heap snapshot |
| 183 // as small as possible. | 183 // as small as possible. |
| 184 namespace { // Avoid littering the global namespace. | 184 namespace { // Avoid littering the global namespace. |
| 185 | 185 |
| 186 template <size_t ptr_size> struct SnapshotSizeConstants; | 186 template <size_t ptr_size> struct SnapshotSizeConstants; |
| 187 | 187 |
| 188 template <> struct SnapshotSizeConstants<4> { | 188 template <> struct SnapshotSizeConstants<4> { |
| 189 static const int kExpectedHeapGraphEdgeSize = 12; | 189 static const int kExpectedHeapGraphEdgeSize = 12; |
| 190 static const int kExpectedHeapEntrySize = 24; | 190 static const int kExpectedHeapEntrySize = 24; |
| 191 static const int kExpectedHeapSnapshotsCollectionSize = 100; | 191 static const int kExpectedHeapSnapshotsCollectionSize = 100; |
| 192 static const int kExpectedHeapSnapshotSize = 136; | 192 static const int kExpectedHeapSnapshotSize = 132; |
| 193 static const size_t kMaxSerializableSnapshotRawSize = 256 * MB; | 193 static const size_t kMaxSerializableSnapshotRawSize = 256 * MB; |
| 194 }; | 194 }; |
| 195 | 195 |
| 196 template <> struct SnapshotSizeConstants<8> { | 196 template <> struct SnapshotSizeConstants<8> { |
| 197 static const int kExpectedHeapGraphEdgeSize = 24; | 197 static const int kExpectedHeapGraphEdgeSize = 24; |
| 198 static const int kExpectedHeapEntrySize = 32; | 198 static const int kExpectedHeapEntrySize = 32; |
| 199 static const int kExpectedHeapSnapshotsCollectionSize = 152; | 199 static const int kExpectedHeapSnapshotsCollectionSize = 152; |
| 200 static const int kExpectedHeapSnapshotSize = 168; | 200 static const int kExpectedHeapSnapshotSize = 160; |
| 201 static const uint64_t kMaxSerializableSnapshotRawSize = | 201 static const uint64_t kMaxSerializableSnapshotRawSize = |
| 202 static_cast<uint64_t>(6000) * MB; | 202 static_cast<uint64_t>(6000) * MB; |
| 203 }; | 203 }; |
| 204 | 204 |
| 205 } // namespace | 205 } // namespace |
| 206 | 206 |
| 207 HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection, | 207 HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection, |
| 208 HeapSnapshot::Type type, | |
| 209 const char* title, | 208 const char* title, |
| 210 unsigned uid) | 209 unsigned uid) |
| 211 : collection_(collection), | 210 : collection_(collection), |
| 212 type_(type), | |
| 213 title_(title), | 211 title_(title), |
| 214 uid_(uid), | 212 uid_(uid), |
| 215 root_index_(HeapEntry::kNoEntry), | 213 root_index_(HeapEntry::kNoEntry), |
| 216 gc_roots_index_(HeapEntry::kNoEntry), | 214 gc_roots_index_(HeapEntry::kNoEntry), |
| 217 natives_root_index_(HeapEntry::kNoEntry), | 215 natives_root_index_(HeapEntry::kNoEntry), |
| 218 max_snapshot_js_object_id_(0) { | 216 max_snapshot_js_object_id_(0) { |
| 219 STATIC_CHECK( | 217 STATIC_CHECK( |
| 220 sizeof(HeapGraphEdge) == | 218 sizeof(HeapGraphEdge) == |
| 221 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize); | 219 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize); |
| 222 STATIC_CHECK( | 220 STATIC_CHECK( |
| (...skipping 369 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 592 delete *snapshot_ptr; | 590 delete *snapshot_ptr; |
| 593 } | 591 } |
| 594 | 592 |
| 595 | 593 |
| 596 HeapSnapshotsCollection::~HeapSnapshotsCollection() { | 594 HeapSnapshotsCollection::~HeapSnapshotsCollection() { |
| 597 delete token_enumerator_; | 595 delete token_enumerator_; |
| 598 snapshots_.Iterate(DeleteHeapSnapshot); | 596 snapshots_.Iterate(DeleteHeapSnapshot); |
| 599 } | 597 } |
| 600 | 598 |
| 601 | 599 |
| 602 HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(HeapSnapshot::Type type, | 600 HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(const char* name, |
| 603 const char* name, | |
| 604 unsigned uid) { | 601 unsigned uid) { |
| 605 is_tracking_objects_ = true; // Start watching for heap objects moves. | 602 is_tracking_objects_ = true; // Start watching for heap objects moves. |
| 606 return new HeapSnapshot(this, type, name, uid); | 603 return new HeapSnapshot(this, name, uid); |
| 607 } | 604 } |
| 608 | 605 |
| 609 | 606 |
| 610 void HeapSnapshotsCollection::SnapshotGenerationFinished( | 607 void HeapSnapshotsCollection::SnapshotGenerationFinished( |
| 611 HeapSnapshot* snapshot) { | 608 HeapSnapshot* snapshot) { |
| 612 ids_.SnapshotGenerationFinished(); | 609 ids_.SnapshotGenerationFinished(); |
| 613 if (snapshot != NULL) { | 610 if (snapshot != NULL) { |
| 614 snapshots_.Add(snapshot); | 611 snapshots_.Add(snapshot); |
| 615 HashMap::Entry* entry = | 612 HashMap::Entry* entry = |
| 616 snapshots_uids_.Lookup(reinterpret_cast<void*>(snapshot->uid()), | 613 snapshots_uids_.Lookup(reinterpret_cast<void*>(snapshot->uid()), |
| (...skipping 1786 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2403 | 2400 |
| 2404 if (original_snapshot != NULL) { | 2401 if (original_snapshot != NULL) { |
| 2405 delete snapshot_; | 2402 delete snapshot_; |
| 2406 snapshot_ = original_snapshot; | 2403 snapshot_ = original_snapshot; |
| 2407 } | 2404 } |
| 2408 } | 2405 } |
| 2409 | 2406 |
| 2410 | 2407 |
| 2411 HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() { | 2408 HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() { |
| 2412 HeapSnapshot* result = new HeapSnapshot(snapshot_->collection(), | 2409 HeapSnapshot* result = new HeapSnapshot(snapshot_->collection(), |
| 2413 HeapSnapshot::kFull, | |
| 2414 snapshot_->title(), | 2410 snapshot_->title(), |
| 2415 snapshot_->uid()); | 2411 snapshot_->uid()); |
| 2416 result->AddRootEntry(); | 2412 result->AddRootEntry(); |
| 2417 const char* text = snapshot_->collection()->names()->GetFormatted( | 2413 const char* text = snapshot_->collection()->names()->GetFormatted( |
| 2418 "The snapshot is too big. " | 2414 "The snapshot is too big. " |
| 2419 "Maximum snapshot size is %" V8_PTR_PREFIX "u MB. " | 2415 "Maximum snapshot size is %" V8_PTR_PREFIX "u MB. " |
| 2420 "Actual snapshot size is %" V8_PTR_PREFIX "u MB.", | 2416 "Actual snapshot size is %" V8_PTR_PREFIX "u MB.", |
| 2421 SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize / MB, | 2417 SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize / MB, |
| 2422 (snapshot_->RawSnapshotSize() + MB - 1) / MB); | 2418 (snapshot_->RawSnapshotSize() + MB - 1) / MB); |
| 2423 HeapEntry* message = result->AddEntry(HeapEntry::kString, text, 0, 4); | 2419 HeapEntry* message = result->AddEntry(HeapEntry::kString, text, 0, 4); |
| (...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2698 | 2694 |
| 2699 | 2695 |
| 2700 void HeapSnapshotJSONSerializer::SortHashMap( | 2696 void HeapSnapshotJSONSerializer::SortHashMap( |
| 2701 HashMap* map, List<HashMap::Entry*>* sorted_entries) { | 2697 HashMap* map, List<HashMap::Entry*>* sorted_entries) { |
| 2702 for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) | 2698 for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) |
| 2703 sorted_entries->Add(p); | 2699 sorted_entries->Add(p); |
| 2704 sorted_entries->Sort(SortUsingEntryValue); | 2700 sorted_entries->Sort(SortUsingEntryValue); |
| 2705 } | 2701 } |
| 2706 | 2702 |
| 2707 } } // namespace v8::internal | 2703 } } // namespace v8::internal |
| OLD | NEW |