OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
6 * met: | 6 * met: |
7 * | 7 * |
8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
(...skipping 503 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
514 { | 514 { |
515 ASSERT(contains(address)); | 515 ASSERT(contains(address)); |
516 if (!objectContains(address) || heapObjectHeader()->hasDeadMark()) | 516 if (!objectContains(address) || heapObjectHeader()->hasDeadMark()) |
517 return; | 517 return; |
518 #if ENABLE(GC_PROFILE_MARKING) | 518 #if ENABLE(GC_PROFILE_MARKING) |
519 visitor->setHostInfo(&address, "stack"); | 519 visitor->setHostInfo(&address, "stack"); |
520 #endif | 520 #endif |
521 mark(visitor); | 521 mark(visitor); |
522 } | 522 } |
523 | 523 |
| 524 #if ENABLE(ASSERT) |
| 525 static bool isUninitializedMemory(void* objectPointer, size_t objectSize) |
| 526 { |
| 527 // Scan through the object's fields and check that they are all zero. |
| 528 Address* objectFields = reinterpret_cast<Address*>(objectPointer); |
| 529 for (size_t i = 0; i < objectSize / sizeof(Address); ++i) { |
| 530 if (objectFields[i] != 0) |
| 531 return false; |
| 532 } |
| 533 return true; |
| 534 } |
| 535 #endif |
| 536 |
524 template<> | 537 template<> |
525 void LargeHeapObject<FinalizedHeapObjectHeader>::mark(Visitor* visitor) | 538 void LargeHeapObject<FinalizedHeapObjectHeader>::mark(Visitor* visitor) |
526 { | 539 { |
527 if (heapObjectHeader()->hasVTable() && !vTableInitialized(payload())) | 540 if (heapObjectHeader()->hasVTable() && !vTableInitialized(payload())) { |
528 visitor->markConservatively(heapObjectHeader()); | 541 FinalizedHeapObjectHeader* header = heapObjectHeader(); |
529 else | 542 visitor->markNoTracing(header); |
| 543 ASSERT(isUninitializedMemory(header->payload(), header->payloadSize())); |
| 544 } else { |
530 visitor->mark(heapObjectHeader(), heapObjectHeader()->traceCallback()); | 545 visitor->mark(heapObjectHeader(), heapObjectHeader()->traceCallback()); |
| 546 } |
531 } | 547 } |
532 | 548 |
533 template<> | 549 template<> |
534 void LargeHeapObject<HeapObjectHeader>::mark(Visitor* visitor) | 550 void LargeHeapObject<HeapObjectHeader>::mark(Visitor* visitor) |
535 { | 551 { |
536 ASSERT(gcInfo()); | 552 ASSERT(gcInfo()); |
537 if (gcInfo()->hasVTable() && !vTableInitialized(payload())) | 553 if (gcInfo()->hasVTable() && !vTableInitialized(payload())) { |
538 visitor->markConservatively(heapObjectHeader()); | 554 HeapObjectHeader* header = heapObjectHeader(); |
539 else | 555 visitor->markNoTracing(header); |
| 556 ASSERT(isUninitializedMemory(header->payload(), header->payloadSize())); |
| 557 } else { |
540 visitor->mark(heapObjectHeader(), gcInfo()->m_trace); | 558 visitor->mark(heapObjectHeader(), gcInfo()->m_trace); |
| 559 } |
541 } | 560 } |
542 | 561 |
543 template<> | 562 template<> |
544 void LargeHeapObject<FinalizedHeapObjectHeader>::finalize() | 563 void LargeHeapObject<FinalizedHeapObjectHeader>::finalize() |
545 { | 564 { |
546 heapObjectHeader()->finalize(); | 565 heapObjectHeader()->finalize(); |
547 } | 566 } |
548 | 567 |
549 template<> | 568 template<> |
550 void LargeHeapObject<HeapObjectHeader>::finalize() | 569 void LargeHeapObject<HeapObjectHeader>::finalize() |
(...skipping 804 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1355 void HeapPage<Header>::checkAndMarkPointer(Visitor* visitor, Address address) | 1374 void HeapPage<Header>::checkAndMarkPointer(Visitor* visitor, Address address) |
1356 { | 1375 { |
1357 ASSERT(contains(address)); | 1376 ASSERT(contains(address)); |
1358 Header* header = findHeaderFromAddress(address); | 1377 Header* header = findHeaderFromAddress(address); |
1359 if (!header || header->hasDeadMark()) | 1378 if (!header || header->hasDeadMark()) |
1360 return; | 1379 return; |
1361 | 1380 |
1362 #if ENABLE(GC_PROFILE_MARKING) | 1381 #if ENABLE(GC_PROFILE_MARKING) |
1363 visitor->setHostInfo(&address, "stack"); | 1382 visitor->setHostInfo(&address, "stack"); |
1364 #endif | 1383 #endif |
1365 if (hasVTable(header) && !vTableInitialized(header->payload())) | 1384 if (hasVTable(header) && !vTableInitialized(header->payload())) { |
1366 visitor->markConservatively(header); | 1385 visitor->markNoTracing(header); |
1367 else | 1386 ASSERT(isUninitializedMemory(header->payload(), header->payloadSize())); |
| 1387 } else { |
1368 visitor->mark(header, traceCallback(header)); | 1388 visitor->mark(header, traceCallback(header)); |
| 1389 } |
1369 } | 1390 } |
1370 | 1391 |
1371 #if ENABLE(GC_PROFILE_MARKING) | 1392 #if ENABLE(GC_PROFILE_MARKING) |
1372 template<typename Header> | 1393 template<typename Header> |
1373 const GCInfo* HeapPage<Header>::findGCInfo(Address address) | 1394 const GCInfo* HeapPage<Header>::findGCInfo(Address address) |
1374 { | 1395 { |
1375 if (address < payload()) | 1396 if (address < payload()) |
1376 return 0; | 1397 return 0; |
1377 | 1398 |
1378 if (gcInfo()) // for non FinalizedObjectHeader | 1399 if (gcInfo()) // for non FinalizedObjectHeader |
(...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1773 } | 1794 } |
1774 | 1795 |
1775 virtual void mark(const void* objectPointer, TraceCallback callback) OVERRID
E | 1796 virtual void mark(const void* objectPointer, TraceCallback callback) OVERRID
E |
1776 { | 1797 { |
1777 if (!objectPointer) | 1798 if (!objectPointer) |
1778 return; | 1799 return; |
1779 FinalizedHeapObjectHeader* header = FinalizedHeapObjectHeader::fromPaylo
ad(objectPointer); | 1800 FinalizedHeapObjectHeader* header = FinalizedHeapObjectHeader::fromPaylo
ad(objectPointer); |
1780 visitHeader(header, header->payload(), callback); | 1801 visitHeader(header, header->payload(), callback); |
1781 } | 1802 } |
1782 | 1803 |
1783 | |
1784 inline void visitConservatively(HeapObjectHeader* header, void* objectPointe
r, size_t objectSize) | |
1785 { | |
1786 ASSERT(header); | |
1787 ASSERT(objectPointer); | |
1788 if (header->isMarked()) | |
1789 return; | |
1790 header->mark(); | |
1791 | |
1792 // Scan through the object's fields and visit them conservatively. | |
1793 Address* objectFields = reinterpret_cast<Address*>(objectPointer); | |
1794 for (size_t i = 0; i < objectSize / sizeof(Address); ++i) | |
1795 Heap::checkAndMarkPointer(this, objectFields[i]); | |
1796 } | |
1797 | |
1798 virtual void markConservatively(HeapObjectHeader* header) | |
1799 { | |
1800 // We need both the HeapObjectHeader and FinalizedHeapObjectHeader | |
1801 // version to correctly find the payload. | |
1802 visitConservatively(header, header->payload(), header->payloadSize()); | |
1803 } | |
1804 | |
1805 virtual void markConservatively(FinalizedHeapObjectHeader* header) | |
1806 { | |
1807 // We need both the HeapObjectHeader and FinalizedHeapObjectHeader | |
1808 // version to correctly find the payload. | |
1809 visitConservatively(header, header->payload(), header->payloadSize()); | |
1810 } | |
1811 | |
1812 virtual void registerWeakMembers(const void* closure, const void* containing
Object, WeakPointerCallback callback) OVERRIDE | 1804 virtual void registerWeakMembers(const void* closure, const void* containing
Object, WeakPointerCallback callback) OVERRIDE |
1813 { | 1805 { |
1814 Heap::pushWeakObjectPointerCallback(const_cast<void*>(closure), const_ca
st<void*>(containingObject), callback); | 1806 Heap::pushWeakObjectPointerCallback(const_cast<void*>(closure), const_ca
st<void*>(containingObject), callback); |
1815 } | 1807 } |
1816 | 1808 |
1817 virtual void registerWeakTable(const void* closure, EphemeronCallback iterat
ionCallback, EphemeronCallback iterationDoneCallback) | 1809 virtual void registerWeakTable(const void* closure, EphemeronCallback iterat
ionCallback, EphemeronCallback iterationDoneCallback) |
1818 { | 1810 { |
1819 Heap::registerWeakTable(const_cast<void*>(closure), iterationCallback, i
terationDoneCallback); | 1811 Heap::registerWeakTable(const_cast<void*>(closure), iterationCallback, i
terationDoneCallback); |
1820 } | 1812 } |
1821 | 1813 |
(...skipping 561 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2383 Visitor* Heap::s_markingVisitor; | 2375 Visitor* Heap::s_markingVisitor; |
2384 CallbackStack* Heap::s_markingStack; | 2376 CallbackStack* Heap::s_markingStack; |
2385 CallbackStack* Heap::s_weakCallbackStack; | 2377 CallbackStack* Heap::s_weakCallbackStack; |
2386 CallbackStack* Heap::s_ephemeronStack; | 2378 CallbackStack* Heap::s_ephemeronStack; |
2387 HeapDoesNotContainCache* Heap::s_heapDoesNotContainCache; | 2379 HeapDoesNotContainCache* Heap::s_heapDoesNotContainCache; |
2388 bool Heap::s_shutdownCalled = false; | 2380 bool Heap::s_shutdownCalled = false; |
2389 bool Heap::s_lastGCWasConservative = false; | 2381 bool Heap::s_lastGCWasConservative = false; |
2390 FreePagePool* Heap::s_freePagePool; | 2382 FreePagePool* Heap::s_freePagePool; |
2391 OrphanedPagePool* Heap::s_orphanedPagePool; | 2383 OrphanedPagePool* Heap::s_orphanedPagePool; |
2392 } | 2384 } |
OLD | NEW |