Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(641)

Side by Side Diff: third_party/WebKit/Source/platform/heap/HeapPage.cpp

Issue 2054673002: Mark shouldMarkObject(), arenaForNormalPage() accessors as const. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2013 Google Inc. All rights reserved. 2 * Copyright (C) 2013 Google Inc. All rights reserved.
3 * 3 *
4 * Redistribution and use in source and binary forms, with or without 4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are 5 * modification, are permitted provided that the following conditions are
6 * met: 6 * met:
7 * 7 *
8 * * Redistributions of source code must retain the above copyright 8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer. 9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above 10 * * Redistributions in binary form must reproduce the above
(...skipping 974 matching lines...) Expand 10 before | Expand all | Expand 10 after
985 } 985 }
986 entry = new (NotNull, address) FreeListEntry(size); 986 entry = new (NotNull, address) FreeListEntry(size);
987 987
988 #if ENABLE(ASSERT) || defined(LEAK_SANITIZER) || defined(ADDRESS_SANITIZER) 988 #if ENABLE(ASSERT) || defined(LEAK_SANITIZER) || defined(ADDRESS_SANITIZER)
989 // The following logic delays reusing free lists for (at least) one GC 989 // The following logic delays reusing free lists for (at least) one GC
990 // cycle or coalescing. This is helpful to detect use-after-free errors 990 // cycle or coalescing. This is helpful to detect use-after-free errors
991 // that could be caused by lazy sweeping etc. 991 // that could be caused by lazy sweeping etc.
992 size_t allowedCount = 0; 992 size_t allowedCount = 0;
993 size_t forbiddenCount = 0; 993 size_t forbiddenCount = 0;
994 for (size_t i = sizeof(FreeListEntry); i < size; i++) { 994 for (size_t i = sizeof(FreeListEntry); i < size; i++) {
995 if (address[i] == reuseAllowedZapValue) { 995 if (address[i] == reuseAllowedZapValue)
996 allowedCount++; 996 allowedCount++;
997 } else if (address[i] == reuseForbiddenZapValue) { 997 else if (address[i] == reuseForbiddenZapValue)
998 forbiddenCount++; 998 forbiddenCount++;
999 } else { 999 else
1000 ASSERT_NOT_REACHED(); 1000 ASSERT_NOT_REACHED();
1001 }
1002 } 1001 }
1003 size_t entryCount = size - sizeof(FreeListEntry); 1002 size_t entryCount = size - sizeof(FreeListEntry);
1004 if (forbiddenCount == entryCount) { 1003 if (forbiddenCount == entryCount) {
1005 // If all values in the memory region are reuseForbiddenZapValue, 1004 // If all values in the memory region are reuseForbiddenZapValue,
1006 // we flip them to reuseAllowedZapValue. This allows the next 1005 // we flip them to reuseAllowedZapValue. This allows the next
1007 // addToFreeList() to add the memory region to the free list 1006 // addToFreeList() to add the memory region to the free list
1008 // (unless someone concatenates the memory region with another memory 1007 // (unless someone concatenates the memory region with another memory
1009 // region that contains reuseForbiddenZapValue.) 1008 // region that contains reuseForbiddenZapValue.)
1010 for (size_t i = sizeof(FreeListEntry); i < size; i++) 1009 for (size_t i = sizeof(FreeListEntry); i < size; i++)
1011 address[i] = reuseAllowedZapValue; 1010 address[i] = reuseAllowedZapValue;
(...skipping 164 matching lines...) Expand 10 before | Expand all | Expand 10 after
1176 if (header->isFree()) { 1175 if (header->isFree()) {
1177 // Zero the memory in the free list header to maintain the 1176 // Zero the memory in the free list header to maintain the
1178 // invariant that memory on the free list is zero filled. 1177 // invariant that memory on the free list is zero filled.
1179 // The rest of the memory is already on the free list and is 1178 // The rest of the memory is already on the free list and is
1180 // therefore already zero filled. 1179 // therefore already zero filled.
1181 SET_MEMORY_INACCESSIBLE(headerAddress, size < sizeof(FreeListEntry) ? size : sizeof(FreeListEntry)); 1180 SET_MEMORY_INACCESSIBLE(headerAddress, size < sizeof(FreeListEntry) ? size : sizeof(FreeListEntry));
1182 CHECK_MEMORY_INACCESSIBLE(headerAddress, size); 1181 CHECK_MEMORY_INACCESSIBLE(headerAddress, size);
1183 headerAddress += size; 1182 headerAddress += size;
1184 continue; 1183 continue;
1185 } 1184 }
1186 ASSERT(header->checkHeader());
1187
1188 if (!header->isMarked()) { 1185 if (!header->isMarked()) {
1189 // This is a fast version of header->payloadSize(). 1186 // This is a fast version of header->payloadSize().
1190 size_t payloadSize = size - sizeof(HeapObjectHeader); 1187 size_t payloadSize = size - sizeof(HeapObjectHeader);
1191 Address payload = header->payload(); 1188 Address payload = header->payload();
1192 // For ASan, unpoison the object before calling the finalizer. The 1189 // For ASan, unpoison the object before calling the finalizer. The
1193 // finalized object will be zero-filled and poison'ed afterwards. 1190 // finalized object will be zero-filled and poison'ed afterwards.
1194 // Given all other unmarked objects are poisoned, ASan will detect 1191 // Given all other unmarked objects are poisoned, ASan will detect
1195 // an error if the finalizer touches any other on-heap object that 1192 // an error if the finalizer touches any other on-heap object that
1196 // die at the same GC cycle. 1193 // die at the same GC cycle.
1197 ASAN_UNPOISON_MEMORY_REGION(payload, payloadSize); 1194 ASAN_UNPOISON_MEMORY_REGION(payload, payloadSize);
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1233 size_t markedObjectSize = 0; 1230 size_t markedObjectSize = 0;
1234 for (Address headerAddress = payload(); headerAddress < payloadEnd();) { 1231 for (Address headerAddress = payload(); headerAddress < payloadEnd();) {
1235 HeapObjectHeader* header = reinterpret_cast<HeapObjectHeader*>(headerAdd ress); 1232 HeapObjectHeader* header = reinterpret_cast<HeapObjectHeader*>(headerAdd ress);
1236 ASSERT(header->size() < blinkPagePayloadSize()); 1233 ASSERT(header->size() < blinkPagePayloadSize());
1237 // Check if a free list entry first since we cannot call 1234 // Check if a free list entry first since we cannot call
1238 // isMarked on a free list entry. 1235 // isMarked on a free list entry.
1239 if (header->isFree()) { 1236 if (header->isFree()) {
1240 headerAddress += header->size(); 1237 headerAddress += header->size();
1241 continue; 1238 continue;
1242 } 1239 }
1243 ASSERT(header->checkHeader());
1244 if (header->isMarked()) { 1240 if (header->isMarked()) {
1245 header->unmark(); 1241 header->unmark();
1246 markedObjectSize += header->size(); 1242 markedObjectSize += header->size();
1247 } else { 1243 } else {
1248 header->markDead(); 1244 header->markDead();
1249 } 1245 }
1250 headerAddress += header->size(); 1246 headerAddress += header->size();
1251 } 1247 }
1252 if (markedObjectSize) 1248 if (markedObjectSize)
1253 arenaForNormalPage()->getThreadState()->increaseMarkedObjectSize(markedO bjectSize); 1249 arenaForNormalPage()->getThreadState()->increaseMarkedObjectSize(markedO bjectSize);
1254 } 1250 }
1255 1251
1256 void NormalPage::makeConsistentForMutator() 1252 void NormalPage::makeConsistentForMutator()
1257 { 1253 {
1258 Address startOfGap = payload(); 1254 Address startOfGap = payload();
1255 NormalPageArena* normalArena = arenaForNormalPage();
1259 for (Address headerAddress = payload(); headerAddress < payloadEnd();) { 1256 for (Address headerAddress = payload(); headerAddress < payloadEnd();) {
1260 HeapObjectHeader* header = reinterpret_cast<HeapObjectHeader*>(headerAdd ress); 1257 HeapObjectHeader* header = reinterpret_cast<HeapObjectHeader*>(headerAdd ress);
1261 size_t size = header->size(); 1258 size_t size = header->size();
1262 ASSERT(size < blinkPagePayloadSize()); 1259 ASSERT(size < blinkPagePayloadSize());
1263 if (header->isPromptlyFreed()) 1260 if (header->isPromptlyFreed())
1264 arenaForNormalPage()->decreasePromptlyFreedSize(size); 1261 arenaForNormalPage()->decreasePromptlyFreedSize(size);
1265 if (header->isFree()) { 1262 if (header->isFree()) {
1266 // Zero the memory in the free list header to maintain the 1263 // Zero the memory in the free list header to maintain the
1267 // invariant that memory on the free list is zero filled. 1264 // invariant that memory on the free list is zero filled.
1268 // The rest of the memory is already on the free list and is 1265 // The rest of the memory is already on the free list and is
1269 // therefore already zero filled. 1266 // therefore already zero filled.
1270 SET_MEMORY_INACCESSIBLE(headerAddress, size < sizeof(FreeListEntry) ? size : sizeof(FreeListEntry)); 1267 SET_MEMORY_INACCESSIBLE(headerAddress, size < sizeof(FreeListEntry) ? size : sizeof(FreeListEntry));
1271 CHECK_MEMORY_INACCESSIBLE(headerAddress, size); 1268 CHECK_MEMORY_INACCESSIBLE(headerAddress, size);
1272 headerAddress += size; 1269 headerAddress += size;
1273 continue; 1270 continue;
1274 } 1271 }
1275 ASSERT(header->checkHeader());
1276
1277 if (startOfGap != headerAddress) 1272 if (startOfGap != headerAddress)
1278 arenaForNormalPage()->addToFreeList(startOfGap, headerAddress - star tOfGap); 1273 normalArena->addToFreeList(startOfGap, headerAddress - startOfGap);
1279 if (header->isMarked()) 1274 if (header->isMarked())
1280 header->unmark(); 1275 header->unmark();
1281 headerAddress += size; 1276 headerAddress += size;
1282 startOfGap = headerAddress; 1277 startOfGap = headerAddress;
1283 ASSERT(headerAddress <= payloadEnd()); 1278 ASSERT(headerAddress <= payloadEnd());
1284 } 1279 }
1285 if (startOfGap != payloadEnd()) 1280 if (startOfGap != payloadEnd())
1286 arenaForNormalPage()->addToFreeList(startOfGap, payloadEnd() - startOfGa p); 1281 normalArena->addToFreeList(startOfGap, payloadEnd() - startOfGap);
1287 } 1282 }
1288 1283
1289 #if defined(ADDRESS_SANITIZER) 1284 #if defined(ADDRESS_SANITIZER)
1290 void NormalPage::poisonUnmarkedObjects() 1285 void NormalPage::poisonUnmarkedObjects()
1291 { 1286 {
1292 for (Address headerAddress = payload(); headerAddress < payloadEnd();) { 1287 for (Address headerAddress = payload(); headerAddress < payloadEnd();) {
1293 HeapObjectHeader* header = reinterpret_cast<HeapObjectHeader*>(headerAdd ress); 1288 HeapObjectHeader* header = reinterpret_cast<HeapObjectHeader*>(headerAdd ress);
1294 ASSERT(header->size() < blinkPagePayloadSize()); 1289 ASSERT(header->size() < blinkPagePayloadSize());
1295 // Check if a free list entry first since we cannot call 1290 // Check if a free list entry first since we cannot call
1296 // isMarked on a free list entry. 1291 // isMarked on a free list entry.
1297 if (header->isFree()) { 1292 if (header->isFree()) {
1298 headerAddress += header->size(); 1293 headerAddress += header->size();
1299 continue; 1294 continue;
1300 } 1295 }
1301 ASSERT(header->checkHeader());
1302 if (!header->isMarked()) 1296 if (!header->isMarked())
1303 ASAN_POISON_MEMORY_REGION(header->payload(), header->payloadSize()); 1297 ASAN_POISON_MEMORY_REGION(header->payload(), header->payloadSize());
1304 headerAddress += header->size(); 1298 headerAddress += header->size();
1305 } 1299 }
1306 } 1300 }
1307 #endif 1301 #endif
1308 1302
1309 void NormalPage::populateObjectStartBitMap() 1303 void NormalPage::populateObjectStartBitMap()
1310 { 1304 {
1311 memset(&m_objectStartBitMap, 0, objectStartBitMapSize); 1305 memset(&m_objectStartBitMap, 0, objectStartBitMapSize);
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
1470 1464
1471 #if ENABLE(ASSERT) 1465 #if ENABLE(ASSERT)
1472 bool NormalPage::contains(Address addr) 1466 bool NormalPage::contains(Address addr)
1473 { 1467 {
1474 Address blinkPageStart = roundToBlinkPageStart(getAddress()); 1468 Address blinkPageStart = roundToBlinkPageStart(getAddress());
1475 ASSERT(blinkPageStart == getAddress() - blinkGuardPageSize); // Page is at a ligned address plus guard page size. 1469 ASSERT(blinkPageStart == getAddress() - blinkGuardPageSize); // Page is at a ligned address plus guard page size.
1476 return blinkPageStart <= addr && addr < blinkPageStart + blinkPageSize; 1470 return blinkPageStart <= addr && addr < blinkPageStart + blinkPageSize;
1477 } 1471 }
1478 #endif 1472 #endif
1479 1473
1480 NormalPageArena* NormalPage::arenaForNormalPage()
1481 {
1482 return static_cast<NormalPageArena*>(arena());
1483 }
1484
1485 LargeObjectPage::LargeObjectPage(PageMemory* storage, BaseArena* arena, size_t p ayloadSize) 1474 LargeObjectPage::LargeObjectPage(PageMemory* storage, BaseArena* arena, size_t p ayloadSize)
1486 : BasePage(storage, arena) 1475 : BasePage(storage, arena)
1487 , m_payloadSize(payloadSize) 1476 , m_payloadSize(payloadSize)
1488 #if ENABLE(ASAN_CONTAINER_ANNOTATIONS) 1477 #if ENABLE(ASAN_CONTAINER_ANNOTATIONS)
1489 , m_isVectorBackingPage(false) 1478 , m_isVectorBackingPage(false)
1490 #endif 1479 #endif
1491 { 1480 {
1492 } 1481 }
1493 1482
1494 size_t LargeObjectPage::objectPayloadSizeForTesting() 1483 size_t LargeObjectPage::objectPayloadSizeForTesting()
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
1628 1617
1629 m_hasEntries = true; 1618 m_hasEntries = true;
1630 size_t index = hash(address); 1619 size_t index = hash(address);
1631 ASSERT(!(index & 1)); 1620 ASSERT(!(index & 1));
1632 Address cachePage = roundToBlinkPageStart(address); 1621 Address cachePage = roundToBlinkPageStart(address);
1633 m_entries[index + 1] = m_entries[index]; 1622 m_entries[index + 1] = m_entries[index];
1634 m_entries[index] = cachePage; 1623 m_entries[index] = cachePage;
1635 } 1624 }
1636 1625
1637 } // namespace blink 1626 } // namespace blink
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698