OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1077 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1088 | 1088 |
1089 // It is very important to keep objects that form a heap snapshot | 1089 // It is very important to keep objects that form a heap snapshot |
1090 // as small as possible. | 1090 // as small as possible. |
1091 namespace { // Avoid littering the global namespace. | 1091 namespace { // Avoid littering the global namespace. |
1092 | 1092 |
1093 template <size_t ptr_size> struct SnapshotSizeConstants; | 1093 template <size_t ptr_size> struct SnapshotSizeConstants; |
1094 | 1094 |
1095 template <> struct SnapshotSizeConstants<4> { | 1095 template <> struct SnapshotSizeConstants<4> { |
1096 static const int kExpectedHeapGraphEdgeSize = 12; | 1096 static const int kExpectedHeapGraphEdgeSize = 12; |
1097 static const int kExpectedHeapEntrySize = 24; | 1097 static const int kExpectedHeapEntrySize = 24; |
1098 static const int kExpectedHeapSnapshotsCollectionSize = 96; | 1098 static const int kExpectedHeapSnapshotsCollectionSize = 100; |
1099 static const int kExpectedHeapSnapshotSize = 136; | 1099 static const int kExpectedHeapSnapshotSize = 136; |
1100 static const size_t kMaxSerializableSnapshotRawSize = 256 * MB; | 1100 static const size_t kMaxSerializableSnapshotRawSize = 256 * MB; |
1101 }; | 1101 }; |
1102 | 1102 |
1103 template <> struct SnapshotSizeConstants<8> { | 1103 template <> struct SnapshotSizeConstants<8> { |
1104 static const int kExpectedHeapGraphEdgeSize = 24; | 1104 static const int kExpectedHeapGraphEdgeSize = 24; |
1105 static const int kExpectedHeapEntrySize = 32; | 1105 static const int kExpectedHeapEntrySize = 32; |
1106 static const int kExpectedHeapSnapshotsCollectionSize = 144; | 1106 static const int kExpectedHeapSnapshotsCollectionSize = 152; |
1107 static const int kExpectedHeapSnapshotSize = 168; | 1107 static const int kExpectedHeapSnapshotSize = 168; |
1108 static const uint64_t kMaxSerializableSnapshotRawSize = | 1108 static const uint64_t kMaxSerializableSnapshotRawSize = |
1109 static_cast<uint64_t>(6000) * MB; | 1109 static_cast<uint64_t>(6000) * MB; |
1110 }; | 1110 }; |
1111 | 1111 |
1112 } // namespace | 1112 } // namespace |
1113 | 1113 |
1114 HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection, | 1114 HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection, |
1115 HeapSnapshot::Type type, | 1115 HeapSnapshot::Type type, |
1116 const char* title, | 1116 const char* title, |
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1279 // HeapObjectsMap::GenerateId) and odds for native objects. | 1279 // HeapObjectsMap::GenerateId) and odds for native objects. |
1280 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1; | 1280 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1; |
1281 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId = | 1281 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId = |
1282 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep; | 1282 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep; |
1283 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId = | 1283 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId = |
1284 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep; | 1284 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep; |
1285 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId = | 1285 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId = |
1286 HeapObjectsMap::kGcRootsFirstSubrootId + | 1286 HeapObjectsMap::kGcRootsFirstSubrootId + |
1287 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep; | 1287 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep; |
1288 | 1288 |
1289 HeapObjectsMap::HeapObjectsMap() | 1289 HeapObjectsMap::HeapObjectsMap(Heap* heap) |
1290 : next_id_(kFirstAvailableObjectId), | 1290 : next_id_(kFirstAvailableObjectId), |
1291 entries_map_(AddressesMatch) { | 1291 entries_map_(AddressesMatch), |
| 1292 heap_(heap) { |
1292 // This dummy element solves a problem with entries_map_. | 1293 // This dummy element solves a problem with entries_map_. |
1293 // When we do lookup in HashMap we see no difference between two cases: | 1294 // When we do lookup in HashMap we see no difference between two cases: |
1294 // it has an entry with NULL as the value or it has created | 1295 // it has an entry with NULL as the value or it has created |
1295 // a new entry on the fly with NULL as the default value. | 1296 // a new entry on the fly with NULL as the default value. |
1296 // With such dummy element we have a guaranty that all entries_map_ entries | 1297 // With such dummy element we have a guaranty that all entries_map_ entries |
1297 // will have the value field grater than 0. | 1298 // will have the value field grater than 0. |
1298 // This fact is using in MoveObject method. | 1299 // This fact is using in MoveObject method. |
1299 entries_.Add(EntryInfo(0, NULL, 0)); | 1300 entries_.Add(EntryInfo(0, NULL, 0)); |
1300 } | 1301 } |
1301 | 1302 |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1359 } | 1360 } |
1360 | 1361 |
1361 | 1362 |
1362 void HeapObjectsMap::StopHeapObjectsTracking() { | 1363 void HeapObjectsMap::StopHeapObjectsTracking() { |
1363 time_intervals_.Clear(); | 1364 time_intervals_.Clear(); |
1364 } | 1365 } |
1365 | 1366 |
1366 void HeapObjectsMap::UpdateHeapObjectsMap() { | 1367 void HeapObjectsMap::UpdateHeapObjectsMap() { |
1367 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask, | 1368 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask, |
1368 "HeapSnapshotsCollection::UpdateHeapObjectsMap"); | 1369 "HeapSnapshotsCollection::UpdateHeapObjectsMap"); |
1369 HeapIterator iterator; | 1370 HeapIterator iterator(heap_); |
1370 for (HeapObject* obj = iterator.next(); | 1371 for (HeapObject* obj = iterator.next(); |
1371 obj != NULL; | 1372 obj != NULL; |
1372 obj = iterator.next()) { | 1373 obj = iterator.next()) { |
1373 FindOrAddEntry(obj->address(), obj->Size()); | 1374 FindOrAddEntry(obj->address(), obj->Size()); |
1374 } | 1375 } |
1375 RemoveDeadEntries(); | 1376 RemoveDeadEntries(); |
1376 } | 1377 } |
1377 | 1378 |
1378 | 1379 |
1379 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) { | 1380 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) { |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1467 | 1468 |
1468 size_t HeapObjectsMap::GetUsedMemorySize() const { | 1469 size_t HeapObjectsMap::GetUsedMemorySize() const { |
1469 return | 1470 return |
1470 sizeof(*this) + | 1471 sizeof(*this) + |
1471 sizeof(HashMap::Entry) * entries_map_.capacity() + | 1472 sizeof(HashMap::Entry) * entries_map_.capacity() + |
1472 GetMemoryUsedByList(entries_) + | 1473 GetMemoryUsedByList(entries_) + |
1473 GetMemoryUsedByList(time_intervals_); | 1474 GetMemoryUsedByList(time_intervals_); |
1474 } | 1475 } |
1475 | 1476 |
1476 | 1477 |
1477 HeapSnapshotsCollection::HeapSnapshotsCollection() | 1478 HeapSnapshotsCollection::HeapSnapshotsCollection(Heap* heap) |
1478 : is_tracking_objects_(false), | 1479 : is_tracking_objects_(false), |
1479 snapshots_uids_(HeapSnapshotsMatch), | 1480 snapshots_uids_(HeapSnapshotsMatch), |
1480 token_enumerator_(new TokenEnumerator()) { | 1481 token_enumerator_(new TokenEnumerator()), |
| 1482 ids_(heap) { |
1481 } | 1483 } |
1482 | 1484 |
1483 | 1485 |
1484 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) { | 1486 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) { |
1485 delete *snapshot_ptr; | 1487 delete *snapshot_ptr; |
1486 } | 1488 } |
1487 | 1489 |
1488 | 1490 |
1489 HeapSnapshotsCollection::~HeapSnapshotsCollection() { | 1491 HeapSnapshotsCollection::~HeapSnapshotsCollection() { |
1490 delete token_enumerator_; | 1492 delete token_enumerator_; |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1531 } | 1533 } |
1532 | 1534 |
1533 | 1535 |
1534 Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById( | 1536 Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById( |
1535 SnapshotObjectId id) { | 1537 SnapshotObjectId id) { |
1536 // First perform a full GC in order to avoid dead objects. | 1538 // First perform a full GC in order to avoid dead objects. |
1537 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask, | 1539 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask, |
1538 "HeapSnapshotsCollection::FindHeapObjectById"); | 1540 "HeapSnapshotsCollection::FindHeapObjectById"); |
1539 AssertNoAllocation no_allocation; | 1541 AssertNoAllocation no_allocation; |
1540 HeapObject* object = NULL; | 1542 HeapObject* object = NULL; |
1541 HeapIterator iterator(HeapIterator::kFilterUnreachable); | 1543 HeapIterator iterator(heap(), HeapIterator::kFilterUnreachable); |
1542 // Make sure that object with the given id is still reachable. | 1544 // Make sure that object with the given id is still reachable. |
1543 for (HeapObject* obj = iterator.next(); | 1545 for (HeapObject* obj = iterator.next(); |
1544 obj != NULL; | 1546 obj != NULL; |
1545 obj = iterator.next()) { | 1547 obj = iterator.next()) { |
1546 if (ids_.FindEntry(obj->address()) == id) { | 1548 if (ids_.FindEntry(obj->address()) == id) { |
1547 ASSERT(object == NULL); | 1549 ASSERT(object == NULL); |
1548 object = obj; | 1550 object = obj; |
1549 // Can't break -- kFilterUnreachable requires full heap traversal. | 1551 // Can't break -- kFilterUnreachable requires full heap traversal. |
1550 } | 1552 } |
1551 } | 1553 } |
(...skipping 869 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2421 bool collecting_all_references_; | 2423 bool collecting_all_references_; |
2422 List<Object*> strong_references_; | 2424 List<Object*> strong_references_; |
2423 List<Object*> all_references_; | 2425 List<Object*> all_references_; |
2424 int previous_reference_count_; | 2426 int previous_reference_count_; |
2425 List<IndexTag> reference_tags_; | 2427 List<IndexTag> reference_tags_; |
2426 }; | 2428 }; |
2427 | 2429 |
2428 | 2430 |
2429 bool V8HeapExplorer::IterateAndExtractReferences( | 2431 bool V8HeapExplorer::IterateAndExtractReferences( |
2430 SnapshotFillerInterface* filler) { | 2432 SnapshotFillerInterface* filler) { |
2431 HeapIterator iterator(HeapIterator::kFilterUnreachable); | 2433 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable); |
2432 | 2434 |
2433 filler_ = filler; | 2435 filler_ = filler; |
2434 bool interrupted = false; | 2436 bool interrupted = false; |
2435 | 2437 |
2436 // Heap iteration with filtering must be finished in any case. | 2438 // Heap iteration with filtering must be finished in any case. |
2437 for (HeapObject* obj = iterator.next(); | 2439 for (HeapObject* obj = iterator.next(); |
2438 obj != NULL; | 2440 obj != NULL; |
2439 obj = iterator.next(), progress_->ProgressStep()) { | 2441 obj = iterator.next(), progress_->ProgressStep()) { |
2440 if (!interrupted) { | 2442 if (!interrupted) { |
2441 ExtractReferences(obj); | 2443 ExtractReferences(obj); |
(...skipping 630 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3072 private: | 3074 private: |
3073 HeapSnapshot* snapshot_; | 3075 HeapSnapshot* snapshot_; |
3074 HeapSnapshotsCollection* collection_; | 3076 HeapSnapshotsCollection* collection_; |
3075 HeapEntriesMap* entries_; | 3077 HeapEntriesMap* entries_; |
3076 }; | 3078 }; |
3077 | 3079 |
3078 | 3080 |
3079 HeapSnapshotGenerator::HeapSnapshotGenerator( | 3081 HeapSnapshotGenerator::HeapSnapshotGenerator( |
3080 HeapSnapshot* snapshot, | 3082 HeapSnapshot* snapshot, |
3081 v8::ActivityControl* control, | 3083 v8::ActivityControl* control, |
3082 v8::HeapProfiler::ObjectNameResolver* resolver) | 3084 v8::HeapProfiler::ObjectNameResolver* resolver, |
| 3085 Heap* heap) |
3083 : snapshot_(snapshot), | 3086 : snapshot_(snapshot), |
3084 control_(control), | 3087 control_(control), |
3085 v8_heap_explorer_(snapshot_, this, resolver), | 3088 v8_heap_explorer_(snapshot_, this, resolver), |
3086 dom_explorer_(snapshot_, this) { | 3089 dom_explorer_(snapshot_, this), |
| 3090 heap_(heap) { |
3087 } | 3091 } |
3088 | 3092 |
3089 | 3093 |
3090 bool HeapSnapshotGenerator::GenerateSnapshot() { | 3094 bool HeapSnapshotGenerator::GenerateSnapshot() { |
3091 v8_heap_explorer_.TagGlobalObjects(); | 3095 v8_heap_explorer_.TagGlobalObjects(); |
3092 | 3096 |
3093 // TODO(1562) Profiler assumes that any object that is in the heap after | 3097 // TODO(1562) Profiler assumes that any object that is in the heap after |
3094 // full GC is reachable from the root when computing dominators. | 3098 // full GC is reachable from the root when computing dominators. |
3095 // This is not true for weakly reachable objects. | 3099 // This is not true for weakly reachable objects. |
3096 // As a temporary solution we call GC twice. | 3100 // As a temporary solution we call GC twice. |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3147 return | 3151 return |
3148 control_->ReportProgressValue(progress_counter_, progress_total_) == | 3152 control_->ReportProgressValue(progress_counter_, progress_total_) == |
3149 v8::ActivityControl::kContinue; | 3153 v8::ActivityControl::kContinue; |
3150 } | 3154 } |
3151 return true; | 3155 return true; |
3152 } | 3156 } |
3153 | 3157 |
3154 | 3158 |
3155 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) { | 3159 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) { |
3156 if (control_ == NULL) return; | 3160 if (control_ == NULL) return; |
3157 HeapIterator iterator(HeapIterator::kFilterUnreachable); | 3161 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable); |
3158 progress_total_ = iterations_count * ( | 3162 progress_total_ = iterations_count * ( |
3159 v8_heap_explorer_.EstimateObjectsCount(&iterator) + | 3163 v8_heap_explorer_.EstimateObjectsCount(&iterator) + |
3160 dom_explorer_.EstimateObjectsCount()); | 3164 dom_explorer_.EstimateObjectsCount()); |
3161 progress_counter_ = 0; | 3165 progress_counter_ = 0; |
3162 } | 3166 } |
3163 | 3167 |
3164 | 3168 |
3165 bool HeapSnapshotGenerator::FillReferences() { | 3169 bool HeapSnapshotGenerator::FillReferences() { |
3166 SnapshotFiller filler(snapshot_, &entries_); | 3170 SnapshotFiller filler(snapshot_, &entries_); |
3167 v8_heap_explorer_.AddRootEntries(&filler); | 3171 v8_heap_explorer_.AddRootEntries(&filler); |
(...skipping 417 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3585 | 3589 |
3586 | 3590 |
3587 void HeapSnapshotJSONSerializer::SortHashMap( | 3591 void HeapSnapshotJSONSerializer::SortHashMap( |
3588 HashMap* map, List<HashMap::Entry*>* sorted_entries) { | 3592 HashMap* map, List<HashMap::Entry*>* sorted_entries) { |
3589 for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) | 3593 for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) |
3590 sorted_entries->Add(p); | 3594 sorted_entries->Add(p); |
3591 sorted_entries->Sort(SortUsingEntryValue); | 3595 sorted_entries->Sort(SortUsingEntryValue); |
3592 } | 3596 } |
3593 | 3597 |
3594 } } // namespace v8::internal | 3598 } } // namespace v8::internal |
OLD | NEW |