| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
| 6 * met: | 6 * met: |
| 7 * | 7 * |
| 8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
| (...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 145 ProcessHeap::decreaseTotalAllocatedSpace(delta); | 145 ProcessHeap::decreaseTotalAllocatedSpace(delta); |
| 146 } | 146 } |
| 147 | 147 |
| 148 ThreadHeap::ThreadHeap(ThreadState* threadState) | 148 ThreadHeap::ThreadHeap(ThreadState* threadState) |
| 149 : m_threadState(threadState), | 149 : m_threadState(threadState), |
| 150 m_regionTree(WTF::makeUnique<RegionTree>()), | 150 m_regionTree(WTF::makeUnique<RegionTree>()), |
| 151 m_heapDoesNotContainCache(WTF::wrapUnique(new HeapDoesNotContainCache)), | 151 m_heapDoesNotContainCache(WTF::wrapUnique(new HeapDoesNotContainCache)), |
| 152 m_freePagePool(WTF::wrapUnique(new PagePool)), | 152 m_freePagePool(WTF::wrapUnique(new PagePool)), |
| 153 m_markingStack(CallbackStack::create()), | 153 m_markingStack(CallbackStack::create()), |
| 154 m_postMarkingCallbackStack(CallbackStack::create()), | 154 m_postMarkingCallbackStack(CallbackStack::create()), |
| 155 m_globalWeakCallbackStack(CallbackStack::create()), | 155 m_weakCallbackStack(CallbackStack::create()), |
| 156 m_ephemeronStack(CallbackStack::create()) { | 156 m_ephemeronStack(CallbackStack::create()) { |
| 157 if (ThreadState::current()->isMainThread()) | 157 if (ThreadState::current()->isMainThread()) |
| 158 s_mainThreadHeap = this; | 158 s_mainThreadHeap = this; |
| 159 } | 159 } |
| 160 | 160 |
| 161 ThreadHeap::~ThreadHeap() { | 161 ThreadHeap::~ThreadHeap() { |
| 162 } | 162 } |
| 163 | 163 |
| 164 #if DCHECK_IS_ON() | 164 #if DCHECK_IS_ON() |
| 165 BasePage* ThreadHeap::findPageFromAddress(Address address) { | 165 BasePage* ThreadHeap::findPageFromAddress(Address address) { |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 241 if (CallbackStack::Item* item = m_postMarkingCallbackStack->pop()) { | 241 if (CallbackStack::Item* item = m_postMarkingCallbackStack->pop()) { |
| 242 item->call(visitor); | 242 item->call(visitor); |
| 243 return true; | 243 return true; |
| 244 } | 244 } |
| 245 return false; | 245 return false; |
| 246 } | 246 } |
| 247 | 247 |
| 248 void ThreadHeap::pushWeakCallback(void* closure, WeakCallback callback) { | 248 void ThreadHeap::pushWeakCallback(void* closure, WeakCallback callback) { |
| 249 ASSERT(ThreadState::current()->isInGC()); | 249 ASSERT(ThreadState::current()->isInGC()); |
| 250 | 250 |
| 251 CallbackStack::Item* slot = m_globalWeakCallbackStack->allocateEntry(); | 251 CallbackStack::Item* slot = m_weakCallbackStack->allocateEntry(); |
| 252 *slot = CallbackStack::Item(closure, callback); | 252 *slot = CallbackStack::Item(closure, callback); |
| 253 } | 253 } |
| 254 | 254 |
| 255 bool ThreadHeap::popAndInvokeGlobalWeakCallback(Visitor* visitor) { | 255 bool ThreadHeap::popAndInvokeWeakCallback(Visitor* visitor) { |
| 256 if (CallbackStack::Item* item = m_globalWeakCallbackStack->pop()) { | 256 if (CallbackStack::Item* item = m_weakCallbackStack->pop()) { |
| 257 item->call(visitor); | 257 item->call(visitor); |
| 258 return true; | 258 return true; |
| 259 } | 259 } |
| 260 return false; | 260 return false; |
| 261 } | 261 } |
| 262 | 262 |
| 263 void ThreadHeap::registerWeakTable(void* table, | 263 void ThreadHeap::registerWeakTable(void* table, |
| 264 EphemeronCallback iterationCallback, | 264 EphemeronCallback iterationCallback, |
| 265 EphemeronCallback iterationDoneCallback) { | 265 EphemeronCallback iterationDoneCallback) { |
| 266 ASSERT(ThreadState::current()->isInGC()); | 266 ASSERT(ThreadState::current()->isInGC()); |
| 267 | 267 |
| 268 CallbackStack::Item* slot = m_ephemeronStack->allocateEntry(); | 268 CallbackStack::Item* slot = m_ephemeronStack->allocateEntry(); |
| 269 *slot = CallbackStack::Item(table, iterationCallback); | 269 *slot = CallbackStack::Item(table, iterationCallback); |
| 270 | 270 |
| 271 // Register a post-marking callback to tell the tables that | 271 // Register a post-marking callback to tell the tables that |
| 272 // ephemeron iteration is complete. | 272 // ephemeron iteration is complete. |
| 273 pushPostMarkingCallback(table, iterationDoneCallback); | 273 pushPostMarkingCallback(table, iterationDoneCallback); |
| 274 } | 274 } |
| 275 | 275 |
| 276 #if DCHECK_IS_ON() | 276 #if DCHECK_IS_ON() |
| 277 bool ThreadHeap::weakTableRegistered(const void* table) { | 277 bool ThreadHeap::weakTableRegistered(const void* table) { |
| 278 ASSERT(m_ephemeronStack); | 278 ASSERT(m_ephemeronStack); |
| 279 return m_ephemeronStack->hasCallbackForObject(table); | 279 return m_ephemeronStack->hasCallbackForObject(table); |
| 280 } | 280 } |
| 281 #endif | 281 #endif |
| 282 | 282 |
| 283 void ThreadHeap::commitCallbackStacks() { | 283 void ThreadHeap::commitCallbackStacks() { |
| 284 m_markingStack->commit(); | 284 m_markingStack->commit(); |
| 285 m_postMarkingCallbackStack->commit(); | 285 m_postMarkingCallbackStack->commit(); |
| 286 m_globalWeakCallbackStack->commit(); | 286 m_weakCallbackStack->commit(); |
| 287 m_ephemeronStack->commit(); | 287 m_ephemeronStack->commit(); |
| 288 } | 288 } |
| 289 | 289 |
| 290 HeapCompact* ThreadHeap::compaction() { | 290 HeapCompact* ThreadHeap::compaction() { |
| 291 if (!m_compaction) | 291 if (!m_compaction) |
| 292 m_compaction = HeapCompact::create(); | 292 m_compaction = HeapCompact::create(); |
| 293 return m_compaction.get(); | 293 return m_compaction.get(); |
| 294 } | 294 } |
| 295 | 295 |
| 296 void ThreadHeap::registerMovingObjectReference(MovableReference* slot) { | 296 void ThreadHeap::registerMovingObjectReference(MovableReference* slot) { |
| 297 DCHECK(slot); | 297 DCHECK(slot); |
| 298 DCHECK(*slot); | 298 DCHECK(*slot); |
| 299 compaction()->registerMovingObjectReference(slot); | 299 compaction()->registerMovingObjectReference(slot); |
| 300 } | 300 } |
| 301 | 301 |
| 302 void ThreadHeap::registerMovingObjectCallback(MovableReference reference, | 302 void ThreadHeap::registerMovingObjectCallback(MovableReference reference, |
| 303 MovingObjectCallback callback, | 303 MovingObjectCallback callback, |
| 304 void* callbackData) { | 304 void* callbackData) { |
| 305 DCHECK(reference); | 305 DCHECK(reference); |
| 306 compaction()->registerMovingObjectCallback(reference, callback, callbackData); | 306 compaction()->registerMovingObjectCallback(reference, callback, callbackData); |
| 307 } | 307 } |
| 308 | 308 |
| 309 void ThreadHeap::decommitCallbackStacks() { | 309 void ThreadHeap::decommitCallbackStacks() { |
| 310 m_markingStack->decommit(); | 310 m_markingStack->decommit(); |
| 311 m_postMarkingCallbackStack->decommit(); | 311 m_postMarkingCallbackStack->decommit(); |
| 312 m_globalWeakCallbackStack->decommit(); | 312 m_weakCallbackStack->decommit(); |
| 313 m_ephemeronStack->decommit(); | 313 m_ephemeronStack->decommit(); |
| 314 } | 314 } |
| 315 | 315 |
| 316 void ThreadHeap::preGC() { | 316 void ThreadHeap::preGC() { |
| 317 ASSERT(!ThreadState::current()->isInGC()); | 317 ASSERT(!ThreadState::current()->isInGC()); |
| 318 m_threadState->preGC(); | 318 m_threadState->preGC(); |
| 319 } | 319 } |
| 320 | 320 |
| 321 void ThreadHeap::postGC(BlinkGC::GCType gcType) { | 321 void ThreadHeap::postGC(BlinkGC::GCType gcType) { |
| 322 ASSERT(ThreadState::current()->isInGC()); | 322 ASSERT(ThreadState::current()->isInGC()); |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 358 // if they are only reachable from their front objects. | 358 // if they are only reachable from their front objects. |
| 359 while (popAndInvokePostMarkingCallback(visitor)) { | 359 while (popAndInvokePostMarkingCallback(visitor)) { |
| 360 } | 360 } |
| 361 | 361 |
| 362 // Post-marking callbacks should not trace any objects and | 362 // Post-marking callbacks should not trace any objects and |
| 363 // therefore the marking stack should be empty after the | 363 // therefore the marking stack should be empty after the |
| 364 // post-marking callbacks. | 364 // post-marking callbacks. |
| 365 ASSERT(m_markingStack->isEmpty()); | 365 ASSERT(m_markingStack->isEmpty()); |
| 366 } | 366 } |
| 367 | 367 |
| 368 void ThreadHeap::globalWeakProcessing(Visitor* visitor) { | 368 void ThreadHeap::weakProcessing(Visitor* visitor) { |
| 369 TRACE_EVENT0("blink_gc", "ThreadHeap::globalWeakProcessing"); | 369 TRACE_EVENT0("blink_gc", "ThreadHeap::weakProcessing"); |
| 370 double startTime = WTF::currentTimeMS(); | 370 double startTime = WTF::currentTimeMS(); |
| 371 | 371 |
| 372 // Call weak callbacks on objects that may now be pointing to dead objects. | 372 // Call weak callbacks on objects that may now be pointing to dead objects. |
| 373 while (popAndInvokeGlobalWeakCallback(visitor)) { | 373 while (popAndInvokeWeakCallback(visitor)) { |
| 374 } | 374 } |
| 375 | 375 |
| 376 // It is not permitted to trace pointers of live objects in the weak | 376 // It is not permitted to trace pointers of live objects in the weak |
| 377 // callback phase, so the marking stack should still be empty here. | 377 // callback phase, so the marking stack should still be empty here. |
| 378 ASSERT(m_markingStack->isEmpty()); | 378 ASSERT(m_markingStack->isEmpty()); |
| 379 | 379 |
| 380 double timeForGlobalWeakProcessing = WTF::currentTimeMS() - startTime; | 380 double timeForWeakProcessing = WTF::currentTimeMS() - startTime; |
| 381 DEFINE_THREAD_SAFE_STATIC_LOCAL( | 381 DEFINE_THREAD_SAFE_STATIC_LOCAL( |
| 382 CustomCountHistogram, globalWeakTimeHistogram, | 382 CustomCountHistogram, weakProcessingTimeHistogram, |
| 383 new CustomCountHistogram("BlinkGC.TimeForGlobalWeakProcessing", 1, | 383 new CustomCountHistogram("BlinkGC.TimeForGlobalWeakProcessing", 1, |
| 384 10 * 1000, 50)); | 384 10 * 1000, 50)); |
| 385 globalWeakTimeHistogram.count(timeForGlobalWeakProcessing); | 385 weakProcessingTimeHistogram.count(timeForWeakProcessing); |
| 386 } | 386 } |
| 387 | 387 |
| 388 void ThreadHeap::reportMemoryUsageHistogram() { | 388 void ThreadHeap::reportMemoryUsageHistogram() { |
| 389 static size_t supportedMaxSizeInMB = 4 * 1024; | 389 static size_t supportedMaxSizeInMB = 4 * 1024; |
| 390 static size_t observedMaxSizeInMB = 0; | 390 static size_t observedMaxSizeInMB = 0; |
| 391 | 391 |
| 392 // We only report the memory in the main thread. | 392 // We only report the memory in the main thread. |
| 393 if (!isMainThread()) | 393 if (!isMainThread()) |
| 394 return; | 394 return; |
| 395 // +1 is for rounding up the sizeInMB. | 395 // +1 is for rounding up the sizeInMB. |
| (...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 515 ProcessHeap::decreaseTotalAllocatedObjectSize(m_stats.allocatedObjectSize()); | 515 ProcessHeap::decreaseTotalAllocatedObjectSize(m_stats.allocatedObjectSize()); |
| 516 ProcessHeap::decreaseTotalMarkedObjectSize(m_stats.markedObjectSize()); | 516 ProcessHeap::decreaseTotalMarkedObjectSize(m_stats.markedObjectSize()); |
| 517 | 517 |
| 518 m_stats.reset(); | 518 m_stats.reset(); |
| 519 m_threadState->resetHeapCounters(); | 519 m_threadState->resetHeapCounters(); |
| 520 } | 520 } |
| 521 | 521 |
| 522 ThreadHeap* ThreadHeap::s_mainThreadHeap = nullptr; | 522 ThreadHeap* ThreadHeap::s_mainThreadHeap = nullptr; |
| 523 | 523 |
| 524 } // namespace blink | 524 } // namespace blink |
| OLD | NEW |