| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
| 6 * met: | 6 * met: |
| 7 * | 7 * |
| 8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
| (...skipping 565 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 576 void BaseHeap::prepareForSweep() | 576 void BaseHeap::prepareForSweep() |
| 577 { | 577 { |
| 578 ASSERT(!threadState()->isInGC()); | 578 ASSERT(!threadState()->isInGC()); |
| 579 ASSERT(!m_firstUnsweptPage); | 579 ASSERT(!m_firstUnsweptPage); |
| 580 | 580 |
| 581 // Move all pages to a list of unswept pages. | 581 // Move all pages to a list of unswept pages. |
| 582 m_firstUnsweptPage = m_firstPage; | 582 m_firstUnsweptPage = m_firstPage; |
| 583 m_firstPage = nullptr; | 583 m_firstPage = nullptr; |
| 584 } | 584 } |
| 585 | 585 |
| 586 #if defined(ADDRESS_SANITIZER) |
| 587 void BaseHeap::poisonUnmarkedObjects() |
| 588 { |
| 589 // This method is called just before starting sweeping. |
| 590 // Thus all dead objects are in the list of m_firstUnsweptPage. |
| 591 for (BasePage* page = m_firstUnsweptPage; page; page = page->next()) { |
| 592 page->poisonUnmarkedObjects(); |
| 593 } |
| 594 } |
| 595 #endif |
| 596 |
| 586 Address BaseHeap::lazySweep(size_t allocationSize, size_t gcInfoIndex) | 597 Address BaseHeap::lazySweep(size_t allocationSize, size_t gcInfoIndex) |
| 587 { | 598 { |
| 588 // If there are no pages to be swept, return immediately. | 599 // If there are no pages to be swept, return immediately. |
| 589 if (!m_firstUnsweptPage) | 600 if (!m_firstUnsweptPage) |
| 590 return nullptr; | 601 return nullptr; |
| 591 | 602 |
| 592 RELEASE_ASSERT(threadState()->isSweepingInProgress()); | 603 RELEASE_ASSERT(threadState()->isSweepingInProgress()); |
| 593 | 604 |
| 594 // lazySweepPages() can be called recursively if finalizers invoked in | 605 // lazySweepPages() can be called recursively if finalizers invoked in |
| 595 // page->sweep() allocate memory and the allocation triggers | 606 // page->sweep() allocate memory and the allocation triggers |
| (...skipping 903 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1499 } | 1510 } |
| 1500 header->checkHeader(); | 1511 header->checkHeader(); |
| 1501 | 1512 |
| 1502 if (!header->isMarked()) { | 1513 if (!header->isMarked()) { |
| 1503 size_t size = header->size(); | 1514 size_t size = header->size(); |
| 1504 // This is a fast version of header->payloadSize(). | 1515 // This is a fast version of header->payloadSize(). |
| 1505 size_t payloadSize = size - sizeof(HeapObjectHeader); | 1516 size_t payloadSize = size - sizeof(HeapObjectHeader); |
| 1506 Address payload = header->payload(); | 1517 Address payload = header->payload(); |
| 1507 // For ASan we unpoison the specific object when calling the | 1518 // For ASan we unpoison the specific object when calling the |
| 1508 // finalizer and poison it again when done to allow the object's own | 1519 // finalizer and poison it again when done to allow the object's own |
| 1509 // finalizer to operate on the object, but not have other finalizers | 1520 // finalizer to operate on the object. Given all other unmarked |
| 1510 // be allowed to access it. | 1521 // objects are poisoned, ASan will detect an error if the finalizer |
| 1522 // touches any other on-heap object that die at the same GC cycle. |
| 1511 ASAN_UNPOISON_MEMORY_REGION(payload, payloadSize); | 1523 ASAN_UNPOISON_MEMORY_REGION(payload, payloadSize); |
| 1512 header->finalize(payload, payloadSize); | 1524 header->finalize(payload, payloadSize); |
| 1513 // This memory will be added to the freelist. Maintain the invariant | 1525 // This memory will be added to the freelist. Maintain the invariant |
| 1514 // that memory on the freelist is zero filled. | 1526 // that memory on the freelist is zero filled. |
| 1515 FILL_ZERO_IF_PRODUCTION(headerAddress, size); | 1527 FILL_ZERO_IF_PRODUCTION(headerAddress, size); |
| 1516 ASAN_POISON_MEMORY_REGION(payload, payloadSize); | 1528 ASAN_POISON_MEMORY_REGION(payload, payloadSize); |
| 1517 headerAddress += size; | 1529 headerAddress += size; |
| 1518 continue; | 1530 continue; |
| 1519 } | 1531 } |
| 1520 | 1532 |
| (...skipping 29 matching lines...) Expand all Loading... |
| 1550 markedObjectSize += header->size(); | 1562 markedObjectSize += header->size(); |
| 1551 } else { | 1563 } else { |
| 1552 header->markDead(); | 1564 header->markDead(); |
| 1553 } | 1565 } |
| 1554 headerAddress += header->size(); | 1566 headerAddress += header->size(); |
| 1555 } | 1567 } |
| 1556 if (markedObjectSize) | 1568 if (markedObjectSize) |
| 1557 Heap::increaseMarkedObjectSize(markedObjectSize); | 1569 Heap::increaseMarkedObjectSize(markedObjectSize); |
| 1558 } | 1570 } |
| 1559 | 1571 |
| 1572 #if defined(ADDRESS_SANITIZER) |
| 1573 void NormalPage::poisonUnmarkedObjects() |
| 1574 { |
| 1575 for (Address headerAddress = payload(); headerAddress < payloadEnd();) { |
| 1576 HeapObjectHeader* header = reinterpret_cast<HeapObjectHeader*>(headerAdd
ress); |
| 1577 ASSERT(header->size() < blinkPagePayloadSize()); |
| 1578 // Check if a free list entry first since we cannot call |
| 1579 // isMarked on a free list entry. |
| 1580 if (header->isFree()) { |
| 1581 headerAddress += header->size(); |
| 1582 continue; |
| 1583 } |
| 1584 header->checkHeader(); |
| 1585 if (!header->isMarked()) { |
| 1586 ASAN_POISON_MEMORY_REGION(header->payload(), header->payloadSize()); |
| 1587 } |
| 1588 headerAddress += header->size(); |
| 1589 } |
| 1590 } |
| 1591 #endif |
| 1592 |
| 1560 void NormalPage::populateObjectStartBitMap() | 1593 void NormalPage::populateObjectStartBitMap() |
| 1561 { | 1594 { |
| 1562 memset(&m_objectStartBitMap, 0, objectStartBitMapSize); | 1595 memset(&m_objectStartBitMap, 0, objectStartBitMapSize); |
| 1563 Address start = payload(); | 1596 Address start = payload(); |
| 1564 for (Address headerAddress = start; headerAddress < payloadEnd();) { | 1597 for (Address headerAddress = start; headerAddress < payloadEnd();) { |
| 1565 HeapObjectHeader* header = reinterpret_cast<HeapObjectHeader*>(headerAdd
ress); | 1598 HeapObjectHeader* header = reinterpret_cast<HeapObjectHeader*>(headerAdd
ress); |
| 1566 size_t objectOffset = headerAddress - start; | 1599 size_t objectOffset = headerAddress - start; |
| 1567 ASSERT(!(objectOffset & allocationMask)); | 1600 ASSERT(!(objectOffset & allocationMask)); |
| 1568 size_t objectStartNumber = objectOffset / allocationGranularity; | 1601 size_t objectStartNumber = objectOffset / allocationGranularity; |
| 1569 size_t mapIndex = objectStartNumber / 8; | 1602 size_t mapIndex = objectStartNumber / 8; |
| (...skipping 246 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1816 { | 1849 { |
| 1817 HeapObjectHeader* header = heapObjectHeader(); | 1850 HeapObjectHeader* header = heapObjectHeader(); |
| 1818 if (header->isMarked()) { | 1851 if (header->isMarked()) { |
| 1819 header->unmark(); | 1852 header->unmark(); |
| 1820 Heap::increaseMarkedObjectSize(size()); | 1853 Heap::increaseMarkedObjectSize(size()); |
| 1821 } else { | 1854 } else { |
| 1822 header->markDead(); | 1855 header->markDead(); |
| 1823 } | 1856 } |
| 1824 } | 1857 } |
| 1825 | 1858 |
| 1859 #if defined(ADDRESS_SANITIZER) |
| 1860 void LargeObjectPage::poisonUnmarkedObjects() |
| 1861 { |
| 1862 HeapObjectHeader* header = heapObjectHeader(); |
| 1863 if (!header->isMarked()) |
| 1864 ASAN_POISON_MEMORY_REGION(header->payload(), header->payloadSize()); |
| 1865 } |
| 1866 #endif |
| 1867 |
| 1826 void LargeObjectPage::checkAndMarkPointer(Visitor* visitor, Address address) | 1868 void LargeObjectPage::checkAndMarkPointer(Visitor* visitor, Address address) |
| 1827 { | 1869 { |
| 1828 ASSERT(contains(address)); | 1870 ASSERT(contains(address)); |
| 1829 if (!containedInObjectPayload(address) || heapObjectHeader()->isDead()) | 1871 if (!containedInObjectPayload(address) || heapObjectHeader()->isDead()) |
| 1830 return; | 1872 return; |
| 1831 #if ENABLE(GC_PROFILING) | 1873 #if ENABLE(GC_PROFILING) |
| 1832 visitor->setHostInfo(&address, "stack"); | 1874 visitor->setHostInfo(&address, "stack"); |
| 1833 #endif | 1875 #endif |
| 1834 markPointer(visitor, heapObjectHeader()); | 1876 markPointer(visitor, heapObjectHeader()); |
| 1835 } | 1877 } |
| (...skipping 852 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2688 size_t Heap::s_allocatedObjectSize = 0; | 2730 size_t Heap::s_allocatedObjectSize = 0; |
| 2689 size_t Heap::s_allocatedSpace = 0; | 2731 size_t Heap::s_allocatedSpace = 0; |
| 2690 size_t Heap::s_markedObjectSize = 0; | 2732 size_t Heap::s_markedObjectSize = 0; |
| 2691 // We don't want to use 0 KB for the initial value because it may end up | 2733 // We don't want to use 0 KB for the initial value because it may end up |
| 2692 // triggering the first GC of some thread too prematurely. | 2734 // triggering the first GC of some thread too prematurely. |
| 2693 size_t Heap::s_estimatedLiveObjectSize = 512 * 1024; | 2735 size_t Heap::s_estimatedLiveObjectSize = 512 * 1024; |
| 2694 size_t Heap::s_externalObjectSizeAtLastGC = 0; | 2736 size_t Heap::s_externalObjectSizeAtLastGC = 0; |
| 2695 double Heap::s_estimatedMarkingTimePerByte = 0.0; | 2737 double Heap::s_estimatedMarkingTimePerByte = 0.0; |
| 2696 | 2738 |
| 2697 } // namespace blink | 2739 } // namespace blink |
| OLD | NEW |