Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
| 6 * met: | 6 * met: |
| 7 * | 7 * |
| 8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
| (...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 327 m_firstUnsweptPage = m_firstPage; | 327 m_firstUnsweptPage = m_firstPage; |
| 328 m_firstPage = nullptr; | 328 m_firstPage = nullptr; |
| 329 } | 329 } |
| 330 | 330 |
| 331 #if defined(ADDRESS_SANITIZER) | 331 #if defined(ADDRESS_SANITIZER) |
| 332 void BaseHeap::poisonUnmarkedObjects() | 332 void BaseHeap::poisonUnmarkedObjects() |
| 333 { | 333 { |
| 334 // This method is called just before starting sweeping. | 334 // This method is called just before starting sweeping. |
| 335 // Thus all dead objects are in the list of m_firstUnsweptPage. | 335 // Thus all dead objects are in the list of m_firstUnsweptPage. |
| 336 for (BasePage* page = m_firstUnsweptPage; page; page = page->next()) { | 336 for (BasePage* page = m_firstUnsweptPage; page; page = page->next()) { |
| 337 page->poisonUnmarkedObjects(); | 337 page->poisonObjects(BasePage::UnmarkedOnly, BasePage::SetPoison); |
| 338 } | |
| 339 } | |
| 340 | |
| 341 void BaseHeap::poisonHeap(bool setPoison) | |
|
haraken
2015/05/28 12:30:04
poisonHeap => poisonEagerHeap
I'd use SetPoison/C
sof
2015/05/29 21:25:07
Moved enums around to make that possible.
Keeping
| |
| 342 { | |
| 343 ASSERT(heapIndex() == EagerSweepHeapIndex); | |
| 344 // This method is called just before starting sweeping | |
| 345 // of eager heaps. Hence, all objects will be in | |
| 346 // m_firstUnsweptPage before start. | |
| 347 if (setPoison) { | |
| 348 for (BasePage* page = m_firstUnsweptPage; page; page = page->next()) { | |
|
haraken
2015/05/28 12:30:04
Add ASSERT(!m_firstPage).
sof
2015/05/29 21:25:07
Done.
| |
| 349 page->poisonObjects(BasePage::UnmarkedOrMarked, BasePage::SetPoison) ; | |
|
haraken
2015/05/28 12:30:04
Why do we need to poison marked objects? I'm ok wi
sof
2015/05/29 21:25:07
You're not allowed to touch other eagerly finalize
| |
| 350 } | |
| 351 return; | |
| 352 } | |
| 353 for (BasePage* page = m_firstPage; page; page = page->next()) { | |
| 354 page->poisonObjects(BasePage::UnmarkedOnly, BasePage::ClearPoison); | |
|
haraken
2015/05/28 12:30:04
Who unpoisons the marked objects that have been po
sof
2015/05/29 21:25:07
This very loop when poisonHeap() is called on the
| |
| 338 } | 355 } |
| 339 } | 356 } |
| 340 #endif | 357 #endif |
| 341 | 358 |
| 342 Address BaseHeap::lazySweep(size_t allocationSize, size_t gcInfoIndex) | 359 Address BaseHeap::lazySweep(size_t allocationSize, size_t gcInfoIndex) |
| 343 { | 360 { |
| 344 // If there are no pages to be swept, return immediately. | 361 // If there are no pages to be swept, return immediately. |
| 345 if (!m_firstUnsweptPage) | 362 if (!m_firstUnsweptPage) |
| 346 return nullptr; | 363 return nullptr; |
| 347 | 364 |
| (...skipping 414 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 762 { | 779 { |
| 763 ASSERT(allocationSize > remainingAllocationSize()); | 780 ASSERT(allocationSize > remainingAllocationSize()); |
| 764 ASSERT(allocationSize >= allocationGranularity); | 781 ASSERT(allocationSize >= allocationGranularity); |
| 765 | 782 |
| 766 #if ENABLE(GC_PROFILING) | 783 #if ENABLE(GC_PROFILING) |
| 767 threadState()->snapshotFreeListIfNecessary(); | 784 threadState()->snapshotFreeListIfNecessary(); |
| 768 #endif | 785 #endif |
| 769 | 786 |
| 770 // 1. If this allocation is big enough, allocate a large object. | 787 // 1. If this allocation is big enough, allocate a large object. |
| 771 if (allocationSize >= largeObjectSizeThreshold) { | 788 if (allocationSize >= largeObjectSizeThreshold) { |
| 789 // TODO(sof): support eagerly finalized large objects, if ever needed. | |
| 790 ASSERT(heapIndex() != EagerSweepHeapIndex); | |
| 772 LargeObjectHeap* largeObjectHeap = static_cast<LargeObjectHeap*>(threadS tate()->heap(LargeObjectHeapIndex)); | 791 LargeObjectHeap* largeObjectHeap = static_cast<LargeObjectHeap*>(threadS tate()->heap(LargeObjectHeapIndex)); |
| 773 Address largeObject = largeObjectHeap->allocateLargeObjectPage(allocatio nSize, gcInfoIndex); | 792 Address largeObject = largeObjectHeap->allocateLargeObjectPage(allocatio nSize, gcInfoIndex); |
| 774 ASAN_MARK_LARGE_VECTOR_CONTAINER(this, largeObject); | 793 ASAN_MARK_LARGE_VECTOR_CONTAINER(this, largeObject); |
| 775 return largeObject; | 794 return largeObject; |
| 776 } | 795 } |
| 777 | 796 |
| 778 // 2. Check if we should trigger a GC. | 797 // 2. Check if we should trigger a GC. |
| 779 updateRemainingAllocationSize(); | 798 updateRemainingAllocationSize(); |
| 780 threadState()->scheduleGCIfNeeded(); | 799 threadState()->scheduleGCIfNeeded(); |
| 781 | 800 |
| (...skipping 350 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1132 // touches any other on-heap object that die at the same GC cycle. | 1151 // touches any other on-heap object that die at the same GC cycle. |
| 1133 ASAN_UNPOISON_MEMORY_REGION(payload, payloadSize); | 1152 ASAN_UNPOISON_MEMORY_REGION(payload, payloadSize); |
| 1134 header->finalize(payload, payloadSize); | 1153 header->finalize(payload, payloadSize); |
| 1135 // This memory will be added to the freelist. Maintain the invariant | 1154 // This memory will be added to the freelist. Maintain the invariant |
| 1136 // that memory on the freelist is zero filled. | 1155 // that memory on the freelist is zero filled. |
| 1137 FILL_ZERO_IF_PRODUCTION(headerAddress, size); | 1156 FILL_ZERO_IF_PRODUCTION(headerAddress, size); |
| 1138 ASAN_POISON_MEMORY_REGION(payload, payloadSize); | 1157 ASAN_POISON_MEMORY_REGION(payload, payloadSize); |
| 1139 headerAddress += size; | 1158 headerAddress += size; |
| 1140 continue; | 1159 continue; |
| 1141 } | 1160 } |
| 1142 | |
| 1143 if (startOfGap != headerAddress) | 1161 if (startOfGap != headerAddress) |
| 1144 heapForNormalPage()->addToFreeList(startOfGap, headerAddress - start OfGap); | 1162 heapForNormalPage()->addToFreeList(startOfGap, headerAddress - start OfGap); |
| 1145 header->unmark(); | 1163 header->unmark(); |
| 1146 headerAddress += header->size(); | 1164 headerAddress += header->size(); |
| 1147 markedObjectSize += header->size(); | 1165 markedObjectSize += header->size(); |
| 1148 startOfGap = headerAddress; | 1166 startOfGap = headerAddress; |
| 1149 } | 1167 } |
| 1150 if (startOfGap != payloadEnd()) | 1168 if (startOfGap != payloadEnd()) |
| 1151 heapForNormalPage()->addToFreeList(startOfGap, payloadEnd() - startOfGap ); | 1169 heapForNormalPage()->addToFreeList(startOfGap, payloadEnd() - startOfGap ); |
| 1152 | 1170 |
| (...skipping 20 matching lines...) Expand all Loading... | |
| 1173 } else { | 1191 } else { |
| 1174 header->markDead(); | 1192 header->markDead(); |
| 1175 } | 1193 } |
| 1176 headerAddress += header->size(); | 1194 headerAddress += header->size(); |
| 1177 } | 1195 } |
| 1178 if (markedObjectSize) | 1196 if (markedObjectSize) |
| 1179 Heap::increaseMarkedObjectSize(markedObjectSize); | 1197 Heap::increaseMarkedObjectSize(markedObjectSize); |
| 1180 } | 1198 } |
| 1181 | 1199 |
| 1182 #if defined(ADDRESS_SANITIZER) | 1200 #if defined(ADDRESS_SANITIZER) |
| 1183 void NormalPage::poisonUnmarkedObjects() | 1201 void NormalPage::poisonObjects(ObjectsToPoison objectsToPoison, Poisoning poison ing) |
| 1184 { | 1202 { |
| 1185 for (Address headerAddress = payload(); headerAddress < payloadEnd();) { | 1203 for (Address headerAddress = payload(); headerAddress < payloadEnd();) { |
| 1186 HeapObjectHeader* header = reinterpret_cast<HeapObjectHeader*>(headerAdd ress); | 1204 HeapObjectHeader* header = reinterpret_cast<HeapObjectHeader*>(headerAdd ress); |
| 1187 ASSERT(header->size() < blinkPagePayloadSize()); | 1205 ASSERT(header->size() < blinkPagePayloadSize()); |
| 1188 // Check if a free list entry first since we cannot call | 1206 // Check if a free list entry first since we cannot call |
| 1189 // isMarked on a free list entry. | 1207 // isMarked on a free list entry. |
| 1190 if (header->isFree()) { | 1208 if (header->isFree()) { |
| 1191 headerAddress += header->size(); | 1209 headerAddress += header->size(); |
| 1192 continue; | 1210 continue; |
| 1193 } | 1211 } |
| 1194 header->checkHeader(); | 1212 header->checkHeader(); |
| 1195 if (!header->isMarked()) { | 1213 if (objectsToPoison == UnmarkedOrMarked || !header->isMarked()) { |
| 1196 ASAN_POISON_MEMORY_REGION(header->payload(), header->payloadSize()); | 1214 if (poisoning == SetPoison) |
| 1215 ASAN_POISON_MEMORY_REGION(header->payload(), header->payloadSize ()); | |
| 1216 else | |
| 1217 ASAN_UNPOISON_MEMORY_REGION(header->payload(), header->payloadSi ze()); | |
| 1197 } | 1218 } |
| 1198 headerAddress += header->size(); | 1219 headerAddress += header->size(); |
| 1199 } | 1220 } |
| 1200 } | 1221 } |
| 1201 #endif | 1222 #endif |
| 1202 | 1223 |
| 1203 void NormalPage::populateObjectStartBitMap() | 1224 void NormalPage::populateObjectStartBitMap() |
| 1204 { | 1225 { |
| 1205 memset(&m_objectStartBitMap, 0, objectStartBitMapSize); | 1226 memset(&m_objectStartBitMap, 0, objectStartBitMapSize); |
| 1206 Address start = payload(); | 1227 Address start = payload(); |
| (...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1462 HeapObjectHeader* header = heapObjectHeader(); | 1483 HeapObjectHeader* header = heapObjectHeader(); |
| 1463 if (header->isMarked()) { | 1484 if (header->isMarked()) { |
| 1464 header->unmark(); | 1485 header->unmark(); |
| 1465 Heap::increaseMarkedObjectSize(size()); | 1486 Heap::increaseMarkedObjectSize(size()); |
| 1466 } else { | 1487 } else { |
| 1467 header->markDead(); | 1488 header->markDead(); |
| 1468 } | 1489 } |
| 1469 } | 1490 } |
| 1470 | 1491 |
| 1471 #if defined(ADDRESS_SANITIZER) | 1492 #if defined(ADDRESS_SANITIZER) |
| 1472 void LargeObjectPage::poisonUnmarkedObjects() | 1493 void LargeObjectPage::poisonObjects(ObjectsToPoison objectsToPoison, Poisoning p oisoning) |
| 1473 { | 1494 { |
| 1474 HeapObjectHeader* header = heapObjectHeader(); | 1495 HeapObjectHeader* header = heapObjectHeader(); |
| 1475 if (!header->isMarked()) | 1496 if (objectsToPoison == UnmarkedOrMarked || !header->isMarked()) { |
| 1476 ASAN_POISON_MEMORY_REGION(header->payload(), header->payloadSize()); | 1497 if (poisoning == BasePage::SetPoison) |
| 1498 ASAN_POISON_MEMORY_REGION(header->payload(), header->payloadSize()); | |
| 1499 else | |
| 1500 ASAN_UNPOISON_MEMORY_REGION(header->payload(), header->payloadSize() ); | |
| 1501 } | |
| 1477 } | 1502 } |
| 1478 #endif | 1503 #endif |
| 1479 | 1504 |
| 1480 void LargeObjectPage::checkAndMarkPointer(Visitor* visitor, Address address) | 1505 void LargeObjectPage::checkAndMarkPointer(Visitor* visitor, Address address) |
| 1481 { | 1506 { |
| 1482 ASSERT(contains(address)); | 1507 ASSERT(contains(address)); |
| 1483 if (!containedInObjectPayload(address) || heapObjectHeader()->isDead()) | 1508 if (!containedInObjectPayload(address) || heapObjectHeader()->isDead()) |
| 1484 return; | 1509 return; |
| 1485 #if ENABLE(GC_PROFILING) | 1510 #if ENABLE(GC_PROFILING) |
| 1486 visitor->setHostInfo(&address, "stack"); | 1511 visitor->setHostInfo(&address, "stack"); |
| (...skipping 736 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2223 size_t Heap::s_allocatedObjectSize = 0; | 2248 size_t Heap::s_allocatedObjectSize = 0; |
| 2224 size_t Heap::s_allocatedSpace = 0; | 2249 size_t Heap::s_allocatedSpace = 0; |
| 2225 size_t Heap::s_markedObjectSize = 0; | 2250 size_t Heap::s_markedObjectSize = 0; |
| 2226 // We don't want to use 0 KB for the initial value because it may end up | 2251 // We don't want to use 0 KB for the initial value because it may end up |
| 2227 // triggering the first GC of some thread too prematurely. | 2252 // triggering the first GC of some thread too prematurely. |
| 2228 size_t Heap::s_estimatedLiveObjectSize = 512 * 1024; | 2253 size_t Heap::s_estimatedLiveObjectSize = 512 * 1024; |
| 2229 size_t Heap::s_externalObjectSizeAtLastGC = 0; | 2254 size_t Heap::s_externalObjectSizeAtLastGC = 0; |
| 2230 double Heap::s_estimatedMarkingTimePerByte = 0.0; | 2255 double Heap::s_estimatedMarkingTimePerByte = 0.0; |
| 2231 | 2256 |
| 2232 } // namespace blink | 2257 } // namespace blink |
| OLD | NEW |