| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
| 6 * met: | 6 * met: |
| 7 * | 7 * |
| 8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
| (...skipping 199 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 210 s_heapDoesNotContainCache->addEntry(address); | 210 s_heapDoesNotContainCache->addEntry(address); |
| 211 #endif | 211 #endif |
| 212 return nullptr; | 212 return nullptr; |
| 213 } | 213 } |
| 214 | 214 |
| 215 void Heap::pushTraceCallback(void* object, TraceCallback callback) | 215 void Heap::pushTraceCallback(void* object, TraceCallback callback) |
| 216 { | 216 { |
| 217 ASSERT(ThreadState::current()->isInGC()); | 217 ASSERT(ThreadState::current()->isInGC()); |
| 218 | 218 |
| 219 // Trace should never reach an orphaned page. | 219 // Trace should never reach an orphaned page. |
| 220 ASSERT(!Heap::orphanedPagePool()->contains(object)); | 220 ASSERT(!Heap::getOrphanedPagePool()->contains(object)); |
| 221 CallbackStack::Item* slot = s_markingStack->allocateEntry(); | 221 CallbackStack::Item* slot = s_markingStack->allocateEntry(); |
| 222 *slot = CallbackStack::Item(object, callback); | 222 *slot = CallbackStack::Item(object, callback); |
| 223 } | 223 } |
| 224 | 224 |
| 225 bool Heap::popAndInvokeTraceCallback(Visitor* visitor) | 225 bool Heap::popAndInvokeTraceCallback(Visitor* visitor) |
| 226 { | 226 { |
| 227 CallbackStack::Item* item = s_markingStack->pop(); | 227 CallbackStack::Item* item = s_markingStack->pop(); |
| 228 if (!item) | 228 if (!item) |
| 229 return false; | 229 return false; |
| 230 item->call(visitor); | 230 item->call(visitor); |
| 231 return true; | 231 return true; |
| 232 } | 232 } |
| 233 | 233 |
| 234 void Heap::pushPostMarkingCallback(void* object, TraceCallback callback) | 234 void Heap::pushPostMarkingCallback(void* object, TraceCallback callback) |
| 235 { | 235 { |
| 236 ASSERT(ThreadState::current()->isInGC()); | 236 ASSERT(ThreadState::current()->isInGC()); |
| 237 | 237 |
| 238 // Trace should never reach an orphaned page. | 238 // Trace should never reach an orphaned page. |
| 239 ASSERT(!Heap::orphanedPagePool()->contains(object)); | 239 ASSERT(!Heap::getOrphanedPagePool()->contains(object)); |
| 240 CallbackStack::Item* slot = s_postMarkingCallbackStack->allocateEntry(); | 240 CallbackStack::Item* slot = s_postMarkingCallbackStack->allocateEntry(); |
| 241 *slot = CallbackStack::Item(object, callback); | 241 *slot = CallbackStack::Item(object, callback); |
| 242 } | 242 } |
| 243 | 243 |
| 244 bool Heap::popAndInvokePostMarkingCallback(Visitor* visitor) | 244 bool Heap::popAndInvokePostMarkingCallback(Visitor* visitor) |
| 245 { | 245 { |
| 246 if (CallbackStack::Item* item = s_postMarkingCallbackStack->pop()) { | 246 if (CallbackStack::Item* item = s_postMarkingCallbackStack->pop()) { |
| 247 item->call(visitor); | 247 item->call(visitor); |
| 248 return true; | 248 return true; |
| 249 } | 249 } |
| 250 return false; | 250 return false; |
| 251 } | 251 } |
| 252 | 252 |
| 253 void Heap::pushGlobalWeakCallback(void** cell, WeakCallback callback) | 253 void Heap::pushGlobalWeakCallback(void** cell, WeakCallback callback) |
| 254 { | 254 { |
| 255 ASSERT(ThreadState::current()->isInGC()); | 255 ASSERT(ThreadState::current()->isInGC()); |
| 256 | 256 |
| 257 // Trace should never reach an orphaned page. | 257 // Trace should never reach an orphaned page. |
| 258 ASSERT(!Heap::orphanedPagePool()->contains(cell)); | 258 ASSERT(!Heap::getOrphanedPagePool()->contains(cell)); |
| 259 CallbackStack::Item* slot = s_globalWeakCallbackStack->allocateEntry(); | 259 CallbackStack::Item* slot = s_globalWeakCallbackStack->allocateEntry(); |
| 260 *slot = CallbackStack::Item(cell, callback); | 260 *slot = CallbackStack::Item(cell, callback); |
| 261 } | 261 } |
| 262 | 262 |
| 263 void Heap::pushThreadLocalWeakCallback(void* closure, void* object, WeakCallback
callback) | 263 void Heap::pushThreadLocalWeakCallback(void* closure, void* object, WeakCallback
callback) |
| 264 { | 264 { |
| 265 ASSERT(ThreadState::current()->isInGC()); | 265 ASSERT(ThreadState::current()->isInGC()); |
| 266 | 266 |
| 267 // Trace should never reach an orphaned page. | 267 // Trace should never reach an orphaned page. |
| 268 ASSERT(!Heap::orphanedPagePool()->contains(object)); | 268 ASSERT(!Heap::getOrphanedPagePool()->contains(object)); |
| 269 ThreadState* state = pageFromObject(object)->arena()->threadState(); | 269 ThreadState* state = pageFromObject(object)->arena()->getThreadState(); |
| 270 state->pushThreadLocalWeakCallback(closure, callback); | 270 state->pushThreadLocalWeakCallback(closure, callback); |
| 271 } | 271 } |
| 272 | 272 |
| 273 bool Heap::popAndInvokeGlobalWeakCallback(Visitor* visitor) | 273 bool Heap::popAndInvokeGlobalWeakCallback(Visitor* visitor) |
| 274 { | 274 { |
| 275 if (CallbackStack::Item* item = s_globalWeakCallbackStack->pop()) { | 275 if (CallbackStack::Item* item = s_globalWeakCallbackStack->pop()) { |
| 276 item->call(visitor); | 276 item->call(visitor); |
| 277 return true; | 277 return true; |
| 278 } | 278 } |
| 279 return false; | 279 return false; |
| 280 } | 280 } |
| 281 | 281 |
| 282 void Heap::registerWeakTable(void* table, EphemeronCallback iterationCallback, E
phemeronCallback iterationDoneCallback) | 282 void Heap::registerWeakTable(void* table, EphemeronCallback iterationCallback, E
phemeronCallback iterationDoneCallback) |
| 283 { | 283 { |
| 284 ASSERT(ThreadState::current()->isInGC()); | 284 ASSERT(ThreadState::current()->isInGC()); |
| 285 | 285 |
| 286 // Trace should never reach an orphaned page. | 286 // Trace should never reach an orphaned page. |
| 287 ASSERT(!Heap::orphanedPagePool()->contains(table)); | 287 ASSERT(!Heap::getOrphanedPagePool()->contains(table)); |
| 288 CallbackStack::Item* slot = s_ephemeronStack->allocateEntry(); | 288 CallbackStack::Item* slot = s_ephemeronStack->allocateEntry(); |
| 289 *slot = CallbackStack::Item(table, iterationCallback); | 289 *slot = CallbackStack::Item(table, iterationCallback); |
| 290 | 290 |
| 291 // Register a post-marking callback to tell the tables that | 291 // Register a post-marking callback to tell the tables that |
| 292 // ephemeron iteration is complete. | 292 // ephemeron iteration is complete. |
| 293 pushPostMarkingCallback(table, iterationDoneCallback); | 293 pushPostMarkingCallback(table, iterationDoneCallback); |
| 294 } | 294 } |
| 295 | 295 |
| 296 #if ENABLE(ASSERT) | 296 #if ENABLE(ASSERT) |
| 297 bool Heap::weakTableRegistered(const void* table) | 297 bool Heap::weakTableRegistered(const void* table) |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 396 | 396 |
| 397 // 3. Transitive closure to trace objects including ephemerons. | 397 // 3. Transitive closure to trace objects including ephemerons. |
| 398 processMarkingStack(visitor.get()); | 398 processMarkingStack(visitor.get()); |
| 399 | 399 |
| 400 postMarkingProcessing(visitor.get()); | 400 postMarkingProcessing(visitor.get()); |
| 401 globalWeakProcessing(visitor.get()); | 401 globalWeakProcessing(visitor.get()); |
| 402 | 402 |
| 403 // Now we can delete all orphaned pages because there are no dangling | 403 // Now we can delete all orphaned pages because there are no dangling |
| 404 // pointers to the orphaned pages. (If we have such dangling pointers, | 404 // pointers to the orphaned pages. (If we have such dangling pointers, |
| 405 // we should have crashed during marking before getting here.) | 405 // we should have crashed during marking before getting here.) |
| 406 orphanedPagePool()->decommitOrphanedPages(); | 406 getOrphanedPagePool()->decommitOrphanedPages(); |
| 407 | 407 |
| 408 double markingTimeInMilliseconds = WTF::currentTimeMS() - startTime; | 408 double markingTimeInMilliseconds = WTF::currentTimeMS() - startTime; |
| 409 s_estimatedMarkingTimePerByte = totalObjectSize ? (markingTimeInMilliseconds
/ 1000 / totalObjectSize) : 0; | 409 s_estimatedMarkingTimePerByte = totalObjectSize ? (markingTimeInMilliseconds
/ 1000 / totalObjectSize) : 0; |
| 410 | 410 |
| 411 #if PRINT_HEAP_STATS | 411 #if PRINT_HEAP_STATS |
| 412 dataLogF("Heap::collectGarbage (gcReason=%s, lazySweeping=%d, time=%.1lfms)\
n", gcReasonString(reason), gcType == BlinkGC::GCWithoutSweep, markingTimeInMill
iseconds); | 412 dataLogF("Heap::collectGarbage (gcReason=%s, lazySweeping=%d, time=%.1lfms)\
n", gcReasonString(reason), gcType == BlinkGC::GCWithoutSweep, markingTimeInMill
iseconds); |
| 413 #endif | 413 #endif |
| 414 | 414 |
| 415 DEFINE_THREAD_SAFE_STATIC_LOCAL(CustomCountHistogram, markingTimeHistogram,
new CustomCountHistogram("BlinkGC.CollectGarbage", 0, 10 * 1000, 50)); | 415 DEFINE_THREAD_SAFE_STATIC_LOCAL(CustomCountHistogram, markingTimeHistogram,
new CustomCountHistogram("BlinkGC.CollectGarbage", 0, 10 * 1000, 50)); |
| 416 markingTimeHistogram.count(markingTimeInMilliseconds); | 416 markingTimeHistogram.count(markingTimeInMilliseconds); |
| (...skipping 269 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 686 size_t Heap::s_collectedWrapperCount = 0; | 686 size_t Heap::s_collectedWrapperCount = 0; |
| 687 size_t Heap::s_partitionAllocSizeAtLastGC = 0; | 687 size_t Heap::s_partitionAllocSizeAtLastGC = 0; |
| 688 double Heap::s_estimatedMarkingTimePerByte = 0.0; | 688 double Heap::s_estimatedMarkingTimePerByte = 0.0; |
| 689 bool Heap::s_isLowEndDevice = false; | 689 bool Heap::s_isLowEndDevice = false; |
| 690 BlinkGC::GCReason Heap::s_lastGCReason = BlinkGC::NumberOfGCReason; | 690 BlinkGC::GCReason Heap::s_lastGCReason = BlinkGC::NumberOfGCReason; |
| 691 #if ENABLE(ASSERT) | 691 #if ENABLE(ASSERT) |
| 692 uint16_t Heap::s_gcGeneration = 0; | 692 uint16_t Heap::s_gcGeneration = 0; |
| 693 #endif | 693 #endif |
| 694 | 694 |
| 695 } // namespace blink | 695 } // namespace blink |
| OLD | NEW |