OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 413 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
424 int to_entry_info_index = | 424 int to_entry_info_index = |
425 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value)); | 425 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value)); |
426 entries_.at(to_entry_info_index).addr = NULL; | 426 entries_.at(to_entry_info_index).addr = NULL; |
427 } | 427 } |
428 int from_entry_info_index = | 428 int from_entry_info_index = |
429 static_cast<int>(reinterpret_cast<intptr_t>(from_value)); | 429 static_cast<int>(reinterpret_cast<intptr_t>(from_value)); |
430 entries_.at(from_entry_info_index).addr = to; | 430 entries_.at(from_entry_info_index).addr = to; |
431 // Size of an object can change during its life, so to keep information | 431 // Size of an object can change during its life, so to keep information |
432 // about the object in entries_ consistent, we have to adjust size when the | 432 // about the object in entries_ consistent, we have to adjust size when the |
433 // object is migrated. | 433 // object is migrated. |
| 434 if (FLAG_heap_profiler_trace_objects) { |
| 435 PrintF("Move object from %p to %p old size %6d new size %6d\n", |
| 436 from, |
| 437 to, |
| 438 entries_.at(from_entry_info_index).size, |
| 439 object_size); |
| 440 } |
434 entries_.at(from_entry_info_index).size = object_size; | 441 entries_.at(from_entry_info_index).size = object_size; |
435 to_entry->value = from_value; | 442 to_entry->value = from_value; |
436 } | 443 } |
437 } | 444 } |
438 | 445 |
439 | 446 |
440 void HeapObjectsMap::NewObject(Address addr, int size) { | 447 void HeapObjectsMap::NewObject(Address addr, int size) { |
| 448 if (FLAG_heap_profiler_trace_objects) { |
| 449 PrintF("New object : %p %6d. Next address is %p\n", |
| 450 addr, |
| 451 size, |
| 452 addr + size); |
| 453 } |
441 ASSERT(addr != NULL); | 454 ASSERT(addr != NULL); |
442 FindOrAddEntry(addr, size, false); | 455 FindOrAddEntry(addr, size, false); |
443 } | 456 } |
444 | 457 |
445 | 458 |
446 void HeapObjectsMap::UpdateObjectSize(Address addr, int size) { | 459 void HeapObjectsMap::UpdateObjectSize(Address addr, int size) { |
447 FindOrAddEntry(addr, size, false); | 460 FindOrAddEntry(addr, size, false); |
448 } | 461 } |
449 | 462 |
450 | 463 |
(...skipping 12 matching lines...) Expand all Loading... |
463 unsigned int size, | 476 unsigned int size, |
464 bool accessed) { | 477 bool accessed) { |
465 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); | 478 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); |
466 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr), | 479 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr), |
467 true); | 480 true); |
468 if (entry->value != NULL) { | 481 if (entry->value != NULL) { |
469 int entry_index = | 482 int entry_index = |
470 static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); | 483 static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); |
471 EntryInfo& entry_info = entries_.at(entry_index); | 484 EntryInfo& entry_info = entries_.at(entry_index); |
472 entry_info.accessed = accessed; | 485 entry_info.accessed = accessed; |
| 486 if (FLAG_heap_profiler_trace_objects) { |
| 487 PrintF("Update object size : %p with old size %d and new size %d\n", |
| 488 addr, |
| 489 entry_info.size, |
| 490 size); |
| 491 } |
473 entry_info.size = size; | 492 entry_info.size = size; |
474 return entry_info.id; | 493 return entry_info.id; |
475 } | 494 } |
476 entry->value = reinterpret_cast<void*>(entries_.length()); | 495 entry->value = reinterpret_cast<void*>(entries_.length()); |
477 SnapshotObjectId id = next_id_; | 496 SnapshotObjectId id = next_id_; |
478 next_id_ += kObjectIdStep; | 497 next_id_ += kObjectIdStep; |
479 entries_.Add(EntryInfo(id, addr, size, accessed)); | 498 entries_.Add(EntryInfo(id, addr, size, accessed)); |
480 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); | 499 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); |
481 return id; | 500 return id; |
482 } | 501 } |
483 | 502 |
484 | 503 |
485 void HeapObjectsMap::StopHeapObjectsTracking() { | 504 void HeapObjectsMap::StopHeapObjectsTracking() { |
486 time_intervals_.Clear(); | 505 time_intervals_.Clear(); |
487 } | 506 } |
488 | 507 |
489 | 508 |
490 void HeapObjectsMap::UpdateHeapObjectsMap() { | 509 void HeapObjectsMap::UpdateHeapObjectsMap() { |
| 510 if (FLAG_heap_profiler_trace_objects) { |
| 511 PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n", |
| 512 entries_map_.occupancy()); |
| 513 } |
491 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask, | 514 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask, |
492 "HeapSnapshotsCollection::UpdateHeapObjectsMap"); | 515 "HeapSnapshotsCollection::UpdateHeapObjectsMap"); |
493 HeapIterator iterator(heap_); | 516 HeapIterator iterator(heap_); |
494 for (HeapObject* obj = iterator.next(); | 517 for (HeapObject* obj = iterator.next(); |
495 obj != NULL; | 518 obj != NULL; |
496 obj = iterator.next()) { | 519 obj = iterator.next()) { |
497 FindOrAddEntry(obj->address(), obj->Size()); | 520 FindOrAddEntry(obj->address(), obj->Size()); |
| 521 if (FLAG_heap_profiler_trace_objects) { |
| 522 PrintF("Update object : %p %6d. Next address is %p\n", |
| 523 obj->address(), |
| 524 obj->Size(), |
| 525 obj->address() + obj->Size()); |
| 526 } |
498 } | 527 } |
499 RemoveDeadEntries(); | 528 RemoveDeadEntries(); |
| 529 if (FLAG_heap_profiler_trace_objects) { |
| 530 PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n", |
| 531 entries_map_.occupancy()); |
| 532 } |
500 } | 533 } |
501 | 534 |
502 | 535 |
| 536 namespace { |
| 537 |
| 538 |
| 539 struct HeapObjectInfo { |
| 540 HeapObjectInfo(HeapObject* obj, int expected_size) |
| 541 : obj(obj), |
| 542 expected_size(expected_size) { |
| 543 } |
| 544 |
| 545 HeapObject* obj; |
| 546 int expected_size; |
| 547 |
| 548 bool IsValid() const { return expected_size == obj->Size(); } |
| 549 |
| 550 void Print() const { |
| 551 if (expected_size == 0) { |
| 552 PrintF("Untracked object : %p %6d. Next address is %p\n", |
| 553 obj->address(), |
| 554 obj->Size(), |
| 555 obj->address() + obj->Size()); |
| 556 } else if (obj->Size() != expected_size) { |
| 557 PrintF("Wrong size %6d: %p %6d. Next address is %p\n", |
| 558 expected_size, |
| 559 obj->address(), |
| 560 obj->Size(), |
| 561 obj->address() + obj->Size()); |
| 562 } else { |
| 563 PrintF("Good object : %p %6d. Next address is %p\n", |
| 564 obj->address(), |
| 565 expected_size, |
| 566 obj->address() + obj->Size()); |
| 567 } |
| 568 } |
| 569 }; |
| 570 |
| 571 |
| 572 static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) { |
| 573 if (a->obj < b->obj) return -1; |
| 574 if (a->obj > b->obj) return 1; |
| 575 return 0; |
| 576 } |
| 577 |
| 578 |
| 579 } // namespace |
| 580 |
| 581 |
503 int HeapObjectsMap::FindUntrackedObjects() { | 582 int HeapObjectsMap::FindUntrackedObjects() { |
| 583 List<HeapObjectInfo> heap_objects(1000); |
| 584 |
504 HeapIterator iterator(heap_); | 585 HeapIterator iterator(heap_); |
505 int untracked = 0; | 586 int untracked = 0; |
506 for (HeapObject* obj = iterator.next(); | 587 for (HeapObject* obj = iterator.next(); |
507 obj != NULL; | 588 obj != NULL; |
508 obj = iterator.next()) { | 589 obj = iterator.next()) { |
509 HashMap::Entry* entry = entries_map_.Lookup( | 590 HashMap::Entry* entry = entries_map_.Lookup( |
510 obj->address(), ComputePointerHash(obj->address()), false); | 591 obj->address(), ComputePointerHash(obj->address()), false); |
511 if (entry == NULL) { | 592 if (entry == NULL) { |
512 untracked++; | 593 ++untracked; |
| 594 if (FLAG_heap_profiler_trace_objects) { |
| 595 heap_objects.Add(HeapObjectInfo(obj, 0)); |
| 596 } |
513 } else { | 597 } else { |
514 int entry_index = static_cast<int>( | 598 int entry_index = static_cast<int>( |
515 reinterpret_cast<intptr_t>(entry->value)); | 599 reinterpret_cast<intptr_t>(entry->value)); |
516 EntryInfo& entry_info = entries_.at(entry_index); | 600 EntryInfo& entry_info = entries_.at(entry_index); |
517 CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size)); | 601 if (FLAG_heap_profiler_trace_objects) { |
| 602 heap_objects.Add(HeapObjectInfo(obj, |
| 603 static_cast<int>(entry_info.size))); |
| 604 if (obj->Size() != static_cast<int>(entry_info.size)) |
| 605 ++untracked; |
| 606 } else { |
| 607 CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size)); |
| 608 } |
518 } | 609 } |
519 } | 610 } |
| 611 if (FLAG_heap_profiler_trace_objects) { |
| 612 PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n", |
| 613 entries_map_.occupancy()); |
| 614 heap_objects.Sort(comparator); |
| 615 int last_printed_object = -1; |
| 616 bool print_next_object = false; |
| 617 for (int i = 0; i < heap_objects.length(); ++i) { |
| 618 const HeapObjectInfo& object_info = heap_objects[i]; |
| 619 if (!object_info.IsValid()) { |
| 620 ++untracked; |
| 621 if (last_printed_object != i - 1) { |
| 622 if (i > 0) { |
| 623 PrintF("%d objects were skipped\n", i - 1 - last_printed_object); |
| 624 heap_objects[i - 1].Print(); |
| 625 } |
| 626 } |
| 627 object_info.Print(); |
| 628 last_printed_object = i; |
| 629 print_next_object = true; |
| 630 } else if (print_next_object) { |
| 631 object_info.Print(); |
| 632 print_next_object = false; |
| 633 last_printed_object = i; |
| 634 } |
| 635 } |
| 636 if (last_printed_object < heap_objects.length() - 1) { |
| 637 PrintF("Last %d objects were skipped\n", |
| 638 heap_objects.length() - 1 - last_printed_object); |
| 639 } |
| 640 PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n", |
| 641 entries_map_.occupancy()); |
| 642 } |
520 return untracked; | 643 return untracked; |
521 } | 644 } |
522 | 645 |
523 | 646 |
524 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) { | 647 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) { |
525 UpdateHeapObjectsMap(); | 648 UpdateHeapObjectsMap(); |
526 time_intervals_.Add(TimeInterval(next_id_)); | 649 time_intervals_.Add(TimeInterval(next_id_)); |
527 int prefered_chunk_size = stream->GetChunkSize(); | 650 int prefered_chunk_size = stream->GetChunkSize(); |
528 List<v8::HeapStatsUpdate> stats_buffer; | 651 List<v8::HeapStatsUpdate> stats_buffer; |
529 ASSERT(!entries_.is_empty()); | 652 ASSERT(!entries_.is_empty()); |
(...skipping 2215 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2745 writer_->AddString("\"<dummy>\""); | 2868 writer_->AddString("\"<dummy>\""); |
2746 for (int i = 1; i < sorted_strings.length(); ++i) { | 2869 for (int i = 1; i < sorted_strings.length(); ++i) { |
2747 writer_->AddCharacter(','); | 2870 writer_->AddCharacter(','); |
2748 SerializeString(sorted_strings[i]); | 2871 SerializeString(sorted_strings[i]); |
2749 if (writer_->aborted()) return; | 2872 if (writer_->aborted()) return; |
2750 } | 2873 } |
2751 } | 2874 } |
2752 | 2875 |
2753 | 2876 |
2754 } } // namespace v8::internal | 2877 } } // namespace v8::internal |
OLD | NEW |