Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(246)

Side by Side Diff: Source/platform/heap/Heap.cpp

Issue 383743002: Oilpan: GC profiling. (Closed) Base URL: svn://svn.chromium.org/blink/trunk
Patch Set: TracedValue update Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « Source/platform/heap/Heap.h ('k') | Source/platform/heap/HeapTest.cpp » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2013 Google Inc. All rights reserved. 2 * Copyright (C) 2013 Google Inc. All rights reserved.
3 * 3 *
4 * Redistribution and use in source and binary forms, with or without 4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are 5 * modification, are permitted provided that the following conditions are
6 * met: 6 * met:
7 * 7 *
8 * * Redistributions of source code must retain the above copyright 8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer. 9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above 10 * * Redistributions in binary form must reproduce the above
(...skipping 21 matching lines...) Expand all
32 #include "platform/heap/Heap.h" 32 #include "platform/heap/Heap.h"
33 33
34 #include "platform/ScriptForbiddenScope.h" 34 #include "platform/ScriptForbiddenScope.h"
35 #include "platform/TraceEvent.h" 35 #include "platform/TraceEvent.h"
36 #include "platform/heap/ThreadState.h" 36 #include "platform/heap/ThreadState.h"
37 #include "public/platform/Platform.h" 37 #include "public/platform/Platform.h"
38 #include "wtf/AddressSpaceRandomization.h" 38 #include "wtf/AddressSpaceRandomization.h"
39 #include "wtf/Assertions.h" 39 #include "wtf/Assertions.h"
40 #include "wtf/LeakAnnotations.h" 40 #include "wtf/LeakAnnotations.h"
41 #include "wtf/PassOwnPtr.h" 41 #include "wtf/PassOwnPtr.h"
42 #if ENABLE(GC_TRACING) 42 #if ENABLE(GC_PROFILE_MARKING)
43 #include "wtf/HashMap.h" 43 #include "wtf/HashMap.h"
44 #include "wtf/HashSet.h" 44 #include "wtf/HashSet.h"
45 #include "wtf/text/StringBuilder.h" 45 #include "wtf/text/StringBuilder.h"
46 #include "wtf/text/StringHash.h" 46 #include "wtf/text/StringHash.h"
47 #include <stdio.h> 47 #include <stdio.h>
48 #include <utility> 48 #include <utility>
49 #endif 49 #endif
50 #if ENABLE(GC_PROFILE_HEAP)
51 #include "platform/TracedValue.h"
52 #endif
50 53
51 #if OS(POSIX) 54 #if OS(POSIX)
52 #include <sys/mman.h> 55 #include <sys/mman.h>
53 #include <unistd.h> 56 #include <unistd.h>
54 #elif OS(WIN) 57 #elif OS(WIN)
55 #include <windows.h> 58 #include <windows.h>
56 #endif 59 #endif
57 60
58 namespace blink { 61 namespace blink {
59 62
60 #if ENABLE(GC_TRACING) 63 #if ENABLE(GC_PROFILE_MARKING)
61 static String classOf(const void* object) 64 static String classOf(const void* object)
62 { 65 {
63 const GCInfo* gcInfo = Heap::findGCInfo(reinterpret_cast<Address>(const_cast <void*>(object))); 66 const GCInfo* gcInfo = Heap::findGCInfo(reinterpret_cast<Address>(const_cast <void*>(object)));
64 if (gcInfo) 67 if (gcInfo)
65 return gcInfo->m_className; 68 return gcInfo->m_className;
66 69
67 return "unknown"; 70 return "unknown";
68 } 71 }
69 #endif 72 #endif
70 73
(...skipping 291 matching lines...) Expand 10 before | Expand all | Expand 10 after
362 MemoryRegion m_writable; 365 MemoryRegion m_writable;
363 }; 366 };
364 367
365 class GCScope { 368 class GCScope {
366 public: 369 public:
367 explicit GCScope(ThreadState::StackState stackState) 370 explicit GCScope(ThreadState::StackState stackState)
368 : m_state(ThreadState::current()) 371 : m_state(ThreadState::current())
369 , m_safePointScope(stackState) 372 , m_safePointScope(stackState)
370 , m_parkedAllThreads(false) 373 , m_parkedAllThreads(false)
371 { 374 {
372 TRACE_EVENT0("blink", "Heap::GCScope"); 375 TRACE_EVENT0("blink_gc", "Heap::GCScope");
373 const char* samplingState = TRACE_EVENT_GET_SAMPLING_STATE(); 376 const char* samplingState = TRACE_EVENT_GET_SAMPLING_STATE();
374 if (m_state->isMainThread()) 377 if (m_state->isMainThread())
375 TRACE_EVENT_SET_SAMPLING_STATE("blink", "BlinkGCWaiting"); 378 TRACE_EVENT_SET_SAMPLING_STATE("blink_gc", "BlinkGCWaiting");
376 379
377 m_state->checkThread(); 380 m_state->checkThread();
378 381
379 // FIXME: in an unlikely coincidence that two threads decide 382 // FIXME: in an unlikely coincidence that two threads decide
380 // to collect garbage at the same time, avoid doing two GCs in 383 // to collect garbage at the same time, avoid doing two GCs in
381 // a row. 384 // a row.
382 RELEASE_ASSERT(!m_state->isInGC()); 385 RELEASE_ASSERT(!m_state->isInGC());
383 RELEASE_ASSERT(!m_state->isSweepInProgress()); 386 RELEASE_ASSERT(!m_state->isSweepInProgress());
384 if (LIKELY(ThreadState::stopThreads())) { 387 if (LIKELY(ThreadState::stopThreads())) {
385 m_parkedAllThreads = true; 388 m_parkedAllThreads = true;
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
505 { 508 {
506 heapObjectHeader()->setDeadMark(); 509 heapObjectHeader()->setDeadMark();
507 } 510 }
508 511
509 template<typename Header> 512 template<typename Header>
510 void LargeHeapObject<Header>::checkAndMarkPointer(Visitor* visitor, Address addr ess) 513 void LargeHeapObject<Header>::checkAndMarkPointer(Visitor* visitor, Address addr ess)
511 { 514 {
512 ASSERT(contains(address)); 515 ASSERT(contains(address));
513 if (!objectContains(address) || heapObjectHeader()->hasDeadMark()) 516 if (!objectContains(address) || heapObjectHeader()->hasDeadMark())
514 return; 517 return;
515 #if ENABLE(GC_TRACING) 518 #if ENABLE(GC_PROFILE_MARKING)
516 visitor->setHostInfo(&address, "stack"); 519 visitor->setHostInfo(&address, "stack");
517 #endif 520 #endif
518 mark(visitor); 521 mark(visitor);
519 } 522 }
520 523
521 template<> 524 template<>
522 void LargeHeapObject<FinalizedHeapObjectHeader>::mark(Visitor* visitor) 525 void LargeHeapObject<FinalizedHeapObjectHeader>::mark(Visitor* visitor)
523 { 526 {
524 if (heapObjectHeader()->hasVTable() && !vTableInitialized(payload())) 527 if (heapObjectHeader()->hasVTable() && !vTableInitialized(payload()))
525 visitor->markConservatively(heapObjectHeader()); 528 visitor->markConservatively(heapObjectHeader());
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
655 for (LargeHeapObject<Header>* current = m_firstLargeHeapObject; current; cur rent = current->next()) { 658 for (LargeHeapObject<Header>* current = m_firstLargeHeapObject; current; cur rent = current->next()) {
656 // Check that large pages are blinkPageSize aligned (modulo the 659 // Check that large pages are blinkPageSize aligned (modulo the
657 // osPageSize for the guard page). 660 // osPageSize for the guard page).
658 ASSERT(reinterpret_cast<Address>(current) - osPageSize() == roundToBlink PageStart(reinterpret_cast<Address>(current))); 661 ASSERT(reinterpret_cast<Address>(current) - osPageSize() == roundToBlink PageStart(reinterpret_cast<Address>(current)));
659 if (current->contains(address)) 662 if (current->contains(address))
660 return current; 663 return current;
661 } 664 }
662 return 0; 665 return 0;
663 } 666 }
664 667
665 #if ENABLE(GC_TRACING) 668 #if ENABLE(GC_PROFILE_MARKING)
666 template<typename Header> 669 template<typename Header>
667 const GCInfo* ThreadHeap<Header>::findGCInfoOfLargeHeapObject(Address address) 670 const GCInfo* ThreadHeap<Header>::findGCInfoOfLargeHeapObject(Address address)
668 { 671 {
669 for (LargeHeapObject<Header>* current = m_firstLargeHeapObject; current; cur rent = current->next()) { 672 for (LargeHeapObject<Header>* current = m_firstLargeHeapObject; current; cur rent = current->next()) {
670 if (current->contains(address)) 673 if (current->contains(address))
671 return current->gcInfo(); 674 return current->gcInfo();
672 } 675 }
673 return 0; 676 return 0;
674 } 677 }
675 #endif 678 #endif
676 679
680 #if ENABLE(GC_PROFILE_HEAP)
681 #define GC_PROFILE_HEAP_PAGE_SNAPSHOT_THRESHOLD 0
682 template<typename Header>
683 void ThreadHeap<Header>::snapshot(TracedValue* json, ThreadState::SnapshotInfo* info)
684 {
685 size_t previousPageCount = info->pageCount;
686
687 json->beginArray("pages");
688 for (HeapPage<Header>* page = m_firstPage; page; page = page->next(), ++info ->pageCount) {
689 // FIXME: To limit the size of the snapshot we only output "threshold" m any page snapshots.
690 if (info->pageCount < GC_PROFILE_HEAP_PAGE_SNAPSHOT_THRESHOLD) {
691 json->beginArray();
692 json->pushInteger(reinterpret_cast<intptr_t>(page));
693 page->snapshot(json, info);
694 json->endArray();
695 } else {
696 page->snapshot(0, info);
697 }
698 }
699 json->endArray();
700
701 json->beginArray("largeObjects");
702 for (LargeHeapObject<Header>* current = m_firstLargeHeapObject; current; cur rent = current->next()) {
703 json->beginDictionary();
704 current->snapshot(json, info);
705 json->endDictionary();
706 }
707 json->endArray();
708
709 json->setInteger("pageCount", info->pageCount - previousPageCount);
710 }
711 #endif
712
677 template<typename Header> 713 template<typename Header>
678 void ThreadHeap<Header>::addToFreeList(Address address, size_t size) 714 void ThreadHeap<Header>::addToFreeList(Address address, size_t size)
679 { 715 {
680 ASSERT(heapPageFromAddress(address)); 716 ASSERT(heapPageFromAddress(address));
681 ASSERT(heapPageFromAddress(address + size - 1)); 717 ASSERT(heapPageFromAddress(address + size - 1));
682 ASSERT(size < blinkPagePayloadSize()); 718 ASSERT(size < blinkPagePayloadSize());
683 // The free list entries are only pointer aligned (but when we allocate 719 // The free list entries are only pointer aligned (but when we allocate
684 // from them we are 8 byte aligned due to the header size). 720 // from them we are 8 byte aligned due to the header size).
685 ASSERT(!((reinterpret_cast<uintptr_t>(address) + sizeof(Header)) & allocatio nMask)); 721 ASSERT(!((reinterpret_cast<uintptr_t>(address) + sizeof(Header)) & allocatio nMask));
686 ASSERT(!(size & allocationMask)); 722 ASSERT(!(size & allocationMask));
(...skipping 629 matching lines...) Expand 10 before | Expand all | Expand 10 after
1316 } 1352 }
1317 1353
1318 template<typename Header> 1354 template<typename Header>
1319 void HeapPage<Header>::checkAndMarkPointer(Visitor* visitor, Address address) 1355 void HeapPage<Header>::checkAndMarkPointer(Visitor* visitor, Address address)
1320 { 1356 {
1321 ASSERT(contains(address)); 1357 ASSERT(contains(address));
1322 Header* header = findHeaderFromAddress(address); 1358 Header* header = findHeaderFromAddress(address);
1323 if (!header || header->hasDeadMark()) 1359 if (!header || header->hasDeadMark())
1324 return; 1360 return;
1325 1361
1326 #if ENABLE(GC_TRACING) 1362 #if ENABLE(GC_PROFILE_MARKING)
1327 visitor->setHostInfo(&address, "stack"); 1363 visitor->setHostInfo(&address, "stack");
1328 #endif 1364 #endif
1329 if (hasVTable(header) && !vTableInitialized(header->payload())) 1365 if (hasVTable(header) && !vTableInitialized(header->payload()))
1330 visitor->markConservatively(header); 1366 visitor->markConservatively(header);
1331 else 1367 else
1332 visitor->mark(header, traceCallback(header)); 1368 visitor->mark(header, traceCallback(header));
1333 } 1369 }
1334 1370
1335 #if ENABLE(GC_TRACING) 1371 #if ENABLE(GC_PROFILE_MARKING)
1336 template<typename Header> 1372 template<typename Header>
1337 const GCInfo* HeapPage<Header>::findGCInfo(Address address) 1373 const GCInfo* HeapPage<Header>::findGCInfo(Address address)
1338 { 1374 {
1339 if (address < payload()) 1375 if (address < payload())
1340 return 0; 1376 return 0;
1341 1377
1342 if (gcInfo()) // for non FinalizedObjectHeader 1378 if (gcInfo()) // for non FinalizedObjectHeader
1343 return gcInfo(); 1379 return gcInfo();
1344 1380
1345 Header* header = findHeaderFromAddress(address); 1381 Header* header = findHeaderFromAddress(address);
1346 if (!header) 1382 if (!header)
1347 return 0; 1383 return 0;
1348 1384
1349 return header->gcInfo(); 1385 return header->gcInfo();
1350 } 1386 }
1351 #endif 1387 #endif
1352 1388
1389 #if ENABLE(GC_PROFILE_HEAP)
1390 template<typename Header>
1391 void HeapPage<Header>::snapshot(TracedValue* json, ThreadState::SnapshotInfo* in fo)
1392 {
1393 Header* header = 0;
1394 for (Address addr = payload(); addr < end(); addr += header->size()) {
1395 header = reinterpret_cast<Header*>(addr);
1396 if (json)
1397 json->pushInteger(header->encodedSize());
1398 if (header->isFree()) {
1399 info->freeSize += header->size();
1400 continue;
1401 }
1402
1403 size_t tag = info->getClassTag(header->gcInfo());
1404 size_t age = header->age();
1405 if (json)
1406 json->pushInteger(tag);
1407 if (header->isMarked()) {
1408 info->liveCount[tag] += 1;
1409 info->liveSize += header->size();
1410 // Count objects that are live when promoted to the final generation .
1411 if (age == maxHeapObjectAge - 1)
1412 info->generations[tag][maxHeapObjectAge] += 1;
1413 header->incAge();
1414 } else {
1415 info->deadCount[tag] += 1;
1416 info->deadSize += header->size();
1417 // Count objects that are dead before the final generation.
1418 if (age < maxHeapObjectAge)
1419 info->generations[tag][age] += 1;
1420 }
1421 }
1422 }
1423 #endif
1424
1353 #if defined(ADDRESS_SANITIZER) 1425 #if defined(ADDRESS_SANITIZER)
1354 template<typename Header> 1426 template<typename Header>
1355 void HeapPage<Header>::poisonUnmarkedObjects() 1427 void HeapPage<Header>::poisonUnmarkedObjects()
1356 { 1428 {
1357 for (Address headerAddress = payload(); headerAddress < end(); ) { 1429 for (Address headerAddress = payload(); headerAddress < end(); ) {
1358 Header* header = reinterpret_cast<Header*>(headerAddress); 1430 Header* header = reinterpret_cast<Header*>(headerAddress);
1359 ASSERT(header->size() < blinkPagePayloadSize()); 1431 ASSERT(header->size() < blinkPagePayloadSize());
1360 1432
1361 if (!header->isFree() && !header->isMarked()) 1433 if (!header->isFree() && !header->isMarked())
1362 ASAN_POISON_MEMORY_REGION(header->payload(), header->payloadSize()); 1434 ASAN_POISON_MEMORY_REGION(header->payload(), header->payloadSize());
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
1404 return header->hasVTable(); 1476 return header->hasVTable();
1405 } 1477 }
1406 1478
1407 template<typename Header> 1479 template<typename Header>
1408 void LargeHeapObject<Header>::getStats(HeapStats& stats) 1480 void LargeHeapObject<Header>::getStats(HeapStats& stats)
1409 { 1481 {
1410 stats.increaseAllocatedSpace(size()); 1482 stats.increaseAllocatedSpace(size());
1411 stats.increaseObjectSpace(payloadSize()); 1483 stats.increaseObjectSpace(payloadSize());
1412 } 1484 }
1413 1485
1486 #if ENABLE(GC_PROFILE_HEAP)
1487 template<typename Header>
1488 void LargeHeapObject<Header>::snapshot(TracedValue* json, ThreadState::SnapshotI nfo* info)
1489 {
1490 Header* header = heapObjectHeader();
1491 size_t tag = info->getClassTag(header->gcInfo());
1492 size_t age = header->age();
1493 if (isMarked()) {
1494 info->liveCount[tag] += 1;
1495 info->liveSize += header->size();
1496 // Count objects that are live when promoted to the final generation.
1497 if (age == maxHeapObjectAge - 1)
1498 info->generations[tag][maxHeapObjectAge] += 1;
1499 header->incAge();
1500 } else {
1501 info->deadCount[tag] += 1;
1502 info->deadSize += header->size();
1503 // Count objects that are dead before the final generation.
1504 if (age < maxHeapObjectAge)
1505 info->generations[tag][age] += 1;
1506 }
1507
1508 if (json) {
1509 json->setInteger("class", tag);
1510 json->setInteger("size", header->size());
1511 json->setInteger("isMarked", isMarked());
1512 }
1513 }
1514 #endif
1515
1414 template<typename Entry> 1516 template<typename Entry>
1415 void HeapExtentCache<Entry>::flush() 1517 void HeapExtentCache<Entry>::flush()
1416 { 1518 {
1417 if (m_hasEntries) { 1519 if (m_hasEntries) {
1418 for (int i = 0; i < numberOfEntries; i++) 1520 for (int i = 0; i < numberOfEntries; i++)
1419 m_entries[i] = Entry(); 1521 m_entries[i] = Entry();
1420 m_hasEntries = false; 1522 m_hasEntries = false;
1421 } 1523 }
1422 } 1524 }
1423 1525
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
1542 return true; 1644 return true;
1543 } 1645 }
1544 if (Mode == ThreadLocalMarking && (heapPage->orphaned() || !heapPage->termin ating())) 1646 if (Mode == ThreadLocalMarking && (heapPage->orphaned() || !heapPage->termin ating()))
1545 return true; 1647 return true;
1546 // For WeaknessProcessing we should never reach orphaned pages since 1648 // For WeaknessProcessing we should never reach orphaned pages since
1547 // they should never be registered as objects on orphaned pages are not 1649 // they should never be registered as objects on orphaned pages are not
1548 // traced. We cannot assert this here since we might have an off-heap 1650 // traced. We cannot assert this here since we might have an off-heap
1549 // collection. However we assert it in Heap::pushWeakObjectPointerCallback. 1651 // collection. However we assert it in Heap::pushWeakObjectPointerCallback.
1550 1652
1551 VisitorCallback callback = item->callback(); 1653 VisitorCallback callback = item->callback();
1552 #if ENABLE(GC_TRACING) 1654 #if ENABLE(GC_PROFILE_MARKING)
1553 if (ThreadState::isAnyThreadInGC()) // weak-processing will also use popAndI nvokeCallback 1655 if (ThreadState::isAnyThreadInGC()) // weak-processing will also use popAndI nvokeCallback
1554 visitor->setHostInfo(item->object(), classOf(item->object())); 1656 visitor->setHostInfo(item->object(), classOf(item->object()));
1555 #endif 1657 #endif
1556 callback(visitor, item->object()); 1658 callback(visitor, item->object());
1557 1659
1558 return true; 1660 return true;
1559 } 1661 }
1560 1662
1561 void CallbackStack::invokeCallbacks(CallbackStack** first, Visitor* visitor) 1663 void CallbackStack::invokeCallbacks(CallbackStack** first, Visitor* visitor)
1562 { 1664 {
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
1617 } 1719 }
1618 if (m_next) 1720 if (m_next)
1619 return m_next->hasCallbackForObject(object); 1721 return m_next->hasCallbackForObject(object);
1620 1722
1621 return false; 1723 return false;
1622 } 1724 }
1623 #endif 1725 #endif
1624 1726
1625 class MarkingVisitor : public Visitor { 1727 class MarkingVisitor : public Visitor {
1626 public: 1728 public:
1627 #if ENABLE(GC_TRACING) 1729 #if ENABLE(GC_PROFILE_MARKING)
1628 typedef HashSet<uintptr_t> LiveObjectSet; 1730 typedef HashSet<uintptr_t> LiveObjectSet;
1629 typedef HashMap<String, LiveObjectSet> LiveObjectMap; 1731 typedef HashMap<String, LiveObjectSet> LiveObjectMap;
1630 typedef HashMap<uintptr_t, std::pair<uintptr_t, String> > ObjectGraph; 1732 typedef HashMap<uintptr_t, std::pair<uintptr_t, String> > ObjectGraph;
1631 #endif 1733 #endif
1632 1734
1633 inline void visitHeader(HeapObjectHeader* header, const void* objectPointer, TraceCallback callback) 1735 inline void visitHeader(HeapObjectHeader* header, const void* objectPointer, TraceCallback callback)
1634 { 1736 {
1635 ASSERT(header); 1737 ASSERT(header);
1636 // Check that we are not marking objects that are outside the heap by ca lling Heap::contains. 1738 // Check that we are not marking objects that are outside the heap by ca lling Heap::contains.
1637 // However we cannot call Heap::contains when outside a GC and we call m ark when doing weakness 1739 // However we cannot call Heap::contains when outside a GC and we call m ark when doing weakness
1638 // for ephemerons. Hence we only check when called within. 1740 // for ephemerons. Hence we only check when called within.
1639 ASSERT(!ThreadState::isAnyThreadInGC() || Heap::containedInHeapOrOrphane dPage(header)); 1741 ASSERT(!ThreadState::isAnyThreadInGC() || Heap::containedInHeapOrOrphane dPage(header));
1640 ASSERT(objectPointer); 1742 ASSERT(objectPointer);
1641 if (header->isMarked()) 1743 if (header->isMarked())
1642 return; 1744 return;
1643 header->mark(); 1745 header->mark();
1644 #if ENABLE(GC_TRACING) 1746 #if ENABLE(GC_PROFILE_MARKING)
1645 MutexLocker locker(objectGraphMutex()); 1747 MutexLocker locker(objectGraphMutex());
1646 String className(classOf(objectPointer)); 1748 String className(classOf(objectPointer));
1647 { 1749 {
1648 LiveObjectMap::AddResult result = currentlyLive().add(className, Liv eObjectSet()); 1750 LiveObjectMap::AddResult result = currentlyLive().add(className, Liv eObjectSet());
1649 result.storedValue->value.add(reinterpret_cast<uintptr_t>(objectPoin ter)); 1751 result.storedValue->value.add(reinterpret_cast<uintptr_t>(objectPoin ter));
1650 } 1752 }
1651 ObjectGraph::AddResult result = objectGraph().add(reinterpret_cast<uintp tr_t>(objectPointer), std::make_pair(reinterpret_cast<uintptr_t>(m_hostObject), m_hostName)); 1753 ObjectGraph::AddResult result = objectGraph().add(reinterpret_cast<uintp tr_t>(objectPointer), std::make_pair(reinterpret_cast<uintptr_t>(m_hostObject), m_hostName));
1652 ASSERT(result.isNewEntry); 1754 ASSERT(result.isNewEntry);
1653 // fprintf(stderr, "%s[%p] -> %s[%p]\n", m_hostName.ascii().data(), m_ho stObject, className.ascii().data(), objectPointer); 1755 // fprintf(stderr, "%s[%p] -> %s[%p]\n", m_hostName.ascii().data(), m_ho stObject, className.ascii().data(), objectPointer);
1654 #endif 1756 #endif
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
1740 visitHeader(header, header->payload(), callback); \ 1842 visitHeader(header, header->payload(), callback); \
1741 } \ 1843 } \
1742 virtual bool isMarked(const Type* objectPointer) OVERRIDE \ 1844 virtual bool isMarked(const Type* objectPointer) OVERRIDE \
1743 { \ 1845 { \
1744 return HeapObjectHeader::fromPayload(objectPointer)->isMarked(); \ 1846 return HeapObjectHeader::fromPayload(objectPointer)->isMarked(); \
1745 } 1847 }
1746 1848
1747 FOR_EACH_TYPED_HEAP(DEFINE_VISITOR_METHODS) 1849 FOR_EACH_TYPED_HEAP(DEFINE_VISITOR_METHODS)
1748 #undef DEFINE_VISITOR_METHODS 1850 #undef DEFINE_VISITOR_METHODS
1749 1851
1750 #if ENABLE(GC_TRACING) 1852 #if ENABLE(GC_PROFILE_MARKING)
1751 void reportStats() 1853 void reportStats()
1752 { 1854 {
1753 fprintf(stderr, "\n---------- AFTER MARKING -------------------\n"); 1855 fprintf(stderr, "\n---------- AFTER MARKING -------------------\n");
1754 for (LiveObjectMap::iterator it = currentlyLive().begin(), end = current lyLive().end(); it != end; ++it) { 1856 for (LiveObjectMap::iterator it = currentlyLive().begin(), end = current lyLive().end(); it != end; ++it) {
1755 fprintf(stderr, "%s %u", it->key.ascii().data(), it->value.size()); 1857 fprintf(stderr, "%s %u", it->key.ascii().data(), it->value.size());
1756 1858
1757 if (it->key == "blink::Document") 1859 if (it->key == "blink::Document")
1758 reportStillAlive(it->value, previouslyLive().get(it->key)); 1860 reportStillAlive(it->value, previouslyLive().get(it->key));
1759 1861
1760 fprintf(stderr, "\n"); 1862 fprintf(stderr, "\n");
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
1931 2033
1932 #if !ENABLE(ASSERT) 2034 #if !ENABLE(ASSERT)
1933 s_heapDoesNotContainCache->addEntry(address, true); 2035 s_heapDoesNotContainCache->addEntry(address, true);
1934 #else 2036 #else
1935 if (!s_heapDoesNotContainCache->lookup(address)) 2037 if (!s_heapDoesNotContainCache->lookup(address))
1936 s_heapDoesNotContainCache->addEntry(address, true); 2038 s_heapDoesNotContainCache->addEntry(address, true);
1937 #endif 2039 #endif
1938 return 0; 2040 return 0;
1939 } 2041 }
1940 2042
1941 #if ENABLE(GC_TRACING) 2043 #if ENABLE(GC_PROFILE_MARKING)
1942 const GCInfo* Heap::findGCInfo(Address address) 2044 const GCInfo* Heap::findGCInfo(Address address)
1943 { 2045 {
1944 return ThreadState::findGCInfoFromAllThreads(address); 2046 return ThreadState::findGCInfoFromAllThreads(address);
1945 } 2047 }
2048 #endif
1946 2049
2050 #if ENABLE(GC_PROFILE_MARKING)
1947 void Heap::dumpPathToObjectOnNextGC(void* p) 2051 void Heap::dumpPathToObjectOnNextGC(void* p)
1948 { 2052 {
1949 static_cast<MarkingVisitor*>(s_markingVisitor)->dumpPathToObjectOnNextGC(p); 2053 static_cast<MarkingVisitor*>(s_markingVisitor)->dumpPathToObjectOnNextGC(p);
1950 } 2054 }
1951 2055
1952 String Heap::createBacktraceString() 2056 String Heap::createBacktraceString()
1953 { 2057 {
1954 int framesToShow = 3; 2058 int framesToShow = 3;
1955 int stackFrameSize = 16; 2059 int stackFrameSize = 16;
1956 ASSERT(stackFrameSize >= framesToShow); 2060 ASSERT(stackFrameSize >= framesToShow);
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
2061 if (!gcScope.allThreadsParked()) { 2165 if (!gcScope.allThreadsParked()) {
2062 ThreadState::current()->setGCRequested(); 2166 ThreadState::current()->setGCRequested();
2063 return; 2167 return;
2064 } 2168 }
2065 2169
2066 if (state->isMainThread()) 2170 if (state->isMainThread())
2067 ScriptForbiddenScope::enter(); 2171 ScriptForbiddenScope::enter();
2068 2172
2069 s_lastGCWasConservative = false; 2173 s_lastGCWasConservative = false;
2070 2174
2071 TRACE_EVENT0("blink", "Heap::collectGarbage"); 2175 TRACE_EVENT0("blink_gc", "Heap::collectGarbage");
2072 TRACE_EVENT_SCOPED_SAMPLING_STATE("blink", "BlinkGC"); 2176 TRACE_EVENT_SCOPED_SAMPLING_STATE("blink_gc", "BlinkGC");
2073 double timeStamp = WTF::currentTimeMS(); 2177 double timeStamp = WTF::currentTimeMS();
2074 #if ENABLE(GC_TRACING) 2178 #if ENABLE(GC_PROFILE_MARKING)
2075 static_cast<MarkingVisitor*>(s_markingVisitor)->objectGraph().clear(); 2179 static_cast<MarkingVisitor*>(s_markingVisitor)->objectGraph().clear();
2076 #endif 2180 #endif
2077 2181
2078 // Disallow allocation during garbage collection (but not 2182 // Disallow allocation during garbage collection (but not
2079 // during the finalization that happens when the gcScope is 2183 // during the finalization that happens when the gcScope is
2080 // torn down). 2184 // torn down).
2081 NoAllocationScope<AnyThread> noAllocationScope; 2185 NoAllocationScope<AnyThread> noAllocationScope;
2082 2186
2083 prepareForGC(); 2187 prepareForGC();
2084 2188
(...skipping 17 matching lines...) Expand all
2102 2206
2103 // After a global marking we know that any orphaned page that was not reache d 2207 // After a global marking we know that any orphaned page that was not reache d
2104 // cannot be reached in a subsequent GC. This is due to a thread either havi ng 2208 // cannot be reached in a subsequent GC. This is due to a thread either havi ng
2105 // swept its heap or having done a "poor mans sweep" in prepareForGC which m arks 2209 // swept its heap or having done a "poor mans sweep" in prepareForGC which m arks
2106 // objects that are dead, but not swept in the previous GC as dead. In this GC's 2210 // objects that are dead, but not swept in the previous GC as dead. In this GC's
2107 // marking we check that any object marked as dead is not traced. E.g. via a 2211 // marking we check that any object marked as dead is not traced. E.g. via a
2108 // conservatively found pointer or a programming error with an object contai ning 2212 // conservatively found pointer or a programming error with an object contai ning
2109 // a dangling pointer. 2213 // a dangling pointer.
2110 orphanedPagePool()->decommitOrphanedPages(); 2214 orphanedPagePool()->decommitOrphanedPages();
2111 2215
2112 #if ENABLE(GC_TRACING) 2216 #if ENABLE(GC_PROFILE_MARKING)
2113 static_cast<MarkingVisitor*>(s_markingVisitor)->reportStats(); 2217 static_cast<MarkingVisitor*>(s_markingVisitor)->reportStats();
2114 #endif 2218 #endif
2115 2219
2116 if (blink::Platform::current()) { 2220 if (blink::Platform::current()) {
2117 uint64_t objectSpaceSize; 2221 uint64_t objectSpaceSize;
2118 uint64_t allocatedSpaceSize; 2222 uint64_t allocatedSpaceSize;
2119 getHeapSpaceSize(&objectSpaceSize, &allocatedSpaceSize); 2223 getHeapSpaceSize(&objectSpaceSize, &allocatedSpaceSize);
2120 blink::Platform::current()->histogramCustomCounts("BlinkGC.CollectGarbag e", WTF::currentTimeMS() - timeStamp, 0, 10 * 1000, 50); 2224 blink::Platform::current()->histogramCustomCounts("BlinkGC.CollectGarbag e", WTF::currentTimeMS() - timeStamp, 0, 10 * 1000, 50);
2121 blink::Platform::current()->histogramCustomCounts("BlinkGC.TotalObjectSp ace", objectSpaceSize / 1024, 0, 4 * 1024 * 1024, 50); 2225 blink::Platform::current()->histogramCustomCounts("BlinkGC.TotalObjectSp ace", objectSpaceSize / 1024, 0, 4 * 1024 * 1024, 50);
2122 blink::Platform::current()->histogramCustomCounts("BlinkGC.TotalAllocate dSpace", allocatedSpaceSize / 1024, 0, 4 * 1024 * 1024, 50); 2226 blink::Platform::current()->histogramCustomCounts("BlinkGC.TotalAllocate dSpace", allocatedSpaceSize / 1024, 0, 4 * 1024 * 1024, 50);
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after
2279 Visitor* Heap::s_markingVisitor; 2383 Visitor* Heap::s_markingVisitor;
2280 CallbackStack* Heap::s_markingStack; 2384 CallbackStack* Heap::s_markingStack;
2281 CallbackStack* Heap::s_weakCallbackStack; 2385 CallbackStack* Heap::s_weakCallbackStack;
2282 CallbackStack* Heap::s_ephemeronStack; 2386 CallbackStack* Heap::s_ephemeronStack;
2283 HeapDoesNotContainCache* Heap::s_heapDoesNotContainCache; 2387 HeapDoesNotContainCache* Heap::s_heapDoesNotContainCache;
2284 bool Heap::s_shutdownCalled = false; 2388 bool Heap::s_shutdownCalled = false;
2285 bool Heap::s_lastGCWasConservative = false; 2389 bool Heap::s_lastGCWasConservative = false;
2286 FreePagePool* Heap::s_freePagePool; 2390 FreePagePool* Heap::s_freePagePool;
2287 OrphanedPagePool* Heap::s_orphanedPagePool; 2391 OrphanedPagePool* Heap::s_orphanedPagePool;
2288 } 2392 }
OLDNEW
« no previous file with comments | « Source/platform/heap/Heap.h ('k') | Source/platform/heap/HeapTest.cpp » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698