| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
| 6 * met: | 6 * met: |
| 7 * | 7 * |
| 8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 119 s_mainThreadUnderestimatedStackSize = underestimatedStackSize - size
of(void*); | 119 s_mainThreadUnderestimatedStackSize = underestimatedStackSize - size
of(void*); |
| 120 } | 120 } |
| 121 | 121 |
| 122 for (int heapIndex = 0; heapIndex < LargeObjectHeapIndex; heapIndex++) | 122 for (int heapIndex = 0; heapIndex < LargeObjectHeapIndex; heapIndex++) |
| 123 m_heaps[heapIndex] = new NormalPageHeap(this, heapIndex); | 123 m_heaps[heapIndex] = new NormalPageHeap(this, heapIndex); |
| 124 m_heaps[LargeObjectHeapIndex] = new LargeObjectHeap(this, LargeObjectHeapInd
ex); | 124 m_heaps[LargeObjectHeapIndex] = new LargeObjectHeap(this, LargeObjectHeapInd
ex); |
| 125 | 125 |
| 126 m_likelyToBePromptlyFreed = adoptArrayPtr(new int[likelyToBePromptlyFreedArr
aySize]); | 126 m_likelyToBePromptlyFreed = adoptArrayPtr(new int[likelyToBePromptlyFreedArr
aySize]); |
| 127 clearHeapAges(); | 127 clearHeapAges(); |
| 128 | 128 |
| 129 m_weakCallbackStack = new CallbackStack(); | 129 m_threadLocalWeakCallbackStack = new CallbackStack(); |
| 130 } | 130 } |
| 131 | 131 |
| 132 ThreadState::~ThreadState() | 132 ThreadState::~ThreadState() |
| 133 { | 133 { |
| 134 checkThread(); | 134 checkThread(); |
| 135 delete m_weakCallbackStack; | 135 delete m_threadLocalWeakCallbackStack; |
| 136 m_weakCallbackStack = nullptr; | 136 m_threadLocalWeakCallbackStack = nullptr; |
| 137 for (int i = 0; i < NumberOfHeaps; ++i) | 137 for (int i = 0; i < NumberOfHeaps; ++i) |
| 138 delete m_heaps[i]; | 138 delete m_heaps[i]; |
| 139 deleteAllValues(m_interruptors); | 139 deleteAllValues(m_interruptors); |
| 140 **s_threadSpecific = nullptr; | 140 **s_threadSpecific = nullptr; |
| 141 if (isMainThread()) { | 141 if (isMainThread()) { |
| 142 s_mainThreadStackStart = 0; | 142 s_mainThreadStackStart = 0; |
| 143 s_mainThreadUnderestimatedStackSize = 0; | 143 s_mainThreadUnderestimatedStackSize = 0; |
| 144 } | 144 } |
| 145 } | 145 } |
| 146 | 146 |
| (...skipping 315 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 462 TRACE_EVENT_OBJECT_SNAPSHOT_WITH_ID("blink_gc", "ThreadState", this, json.re
lease()); | 462 TRACE_EVENT_OBJECT_SNAPSHOT_WITH_ID("blink_gc", "ThreadState", this, json.re
lease()); |
| 463 } | 463 } |
| 464 | 464 |
| 465 void ThreadState::incrementMarkedObjectsAge() | 465 void ThreadState::incrementMarkedObjectsAge() |
| 466 { | 466 { |
| 467 for (int i = 0; i < NumberOfHeaps; ++i) | 467 for (int i = 0; i < NumberOfHeaps; ++i) |
| 468 m_heaps[i]->incrementMarkedObjectsAge(); | 468 m_heaps[i]->incrementMarkedObjectsAge(); |
| 469 } | 469 } |
| 470 #endif | 470 #endif |
| 471 | 471 |
| 472 void ThreadState::pushWeakPointerCallback(void* object, WeakPointerCallback call
back) | 472 void ThreadState::pushThreadLocalWeakCallback(void* object, WeakPointerCallback
callback) |
| 473 { | 473 { |
| 474 CallbackStack::Item* slot = m_weakCallbackStack->allocateEntry(); | 474 CallbackStack::Item* slot = m_threadLocalWeakCallbackStack->allocateEntry(); |
| 475 *slot = CallbackStack::Item(object, callback); | 475 *slot = CallbackStack::Item(object, callback); |
| 476 } | 476 } |
| 477 | 477 |
| 478 bool ThreadState::popAndInvokeWeakPointerCallback(Visitor* visitor) | 478 bool ThreadState::popAndInvokeThreadLocalWeakCallback(Visitor* visitor) |
| 479 { | 479 { |
| 480 // For weak processing we should never reach orphaned pages since orphaned | 480 // For weak processing we should never reach orphaned pages since orphaned |
| 481 // pages are not traced and thus objects on those pages are never be | 481 // pages are not traced and thus objects on those pages are never be |
| 482 // registered as objects on orphaned pages. We cannot assert this here since | 482 // registered as objects on orphaned pages. We cannot assert this here since |
| 483 // we might have an off-heap collection. We assert it in | 483 // we might have an off-heap collection. We assert it in |
| 484 // Heap::pushWeakPointerCallback. | 484 // Heap::pushThreadLocalWeakCallback. |
| 485 if (CallbackStack::Item* item = m_weakCallbackStack->pop()) { | 485 if (CallbackStack::Item* item = m_threadLocalWeakCallbackStack->pop()) { |
| 486 item->call(visitor); | 486 item->call(visitor); |
| 487 return true; | 487 return true; |
| 488 } | 488 } |
| 489 return false; | 489 return false; |
| 490 } | 490 } |
| 491 | 491 |
| 492 PersistentNode& ThreadState::globalRoots() | 492 PersistentNode& ThreadState::globalRoots() |
| 493 { | 493 { |
| 494 AtomicallyInitializedStaticReference(PersistentNode, anchor, new PersistentA
nchor); | 494 AtomicallyInitializedStaticReference(PersistentNode, anchor, new PersistentA
nchor); |
| 495 return anchor; | 495 return anchor; |
| (...skipping 379 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 875 if (isMainThread()) | 875 if (isMainThread()) |
| 876 ScriptForbiddenScope::enter(); | 876 ScriptForbiddenScope::enter(); |
| 877 | 877 |
| 878 SweepForbiddenScope forbiddenScope(this); | 878 SweepForbiddenScope forbiddenScope(this); |
| 879 { | 879 { |
| 880 // Disallow allocation during weak processing. | 880 // Disallow allocation during weak processing. |
| 881 NoAllocationScope noAllocationScope(this); | 881 NoAllocationScope noAllocationScope(this); |
| 882 { | 882 { |
| 883 TRACE_EVENT0("blink_gc", "ThreadState::threadLocalWeakProcessing
"); | 883 TRACE_EVENT0("blink_gc", "ThreadState::threadLocalWeakProcessing
"); |
| 884 // Perform thread-specific weak processing. | 884 // Perform thread-specific weak processing. |
| 885 while (popAndInvokeWeakPointerCallback(Heap::s_markingVisitor))
{ } | 885 while (popAndInvokeThreadLocalWeakCallback(Heap::s_markingVisito
r)) { } |
| 886 } | 886 } |
| 887 { | 887 { |
| 888 TRACE_EVENT0("blink_gc", "ThreadState::invokePreFinalizers"); | 888 TRACE_EVENT0("blink_gc", "ThreadState::invokePreFinalizers"); |
| 889 invokePreFinalizers(*Heap::s_markingVisitor); | 889 invokePreFinalizers(*Heap::s_markingVisitor); |
| 890 } | 890 } |
| 891 } | 891 } |
| 892 | 892 |
| 893 if (isMainThread()) | 893 if (isMainThread()) |
| 894 ScriptForbiddenScope::exit(); | 894 ScriptForbiddenScope::exit(); |
| 895 } | 895 } |
| (...skipping 434 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1330 json->beginArray(it->key.ascii().data()); | 1330 json->beginArray(it->key.ascii().data()); |
| 1331 for (size_t age = 0; age <= maxHeapObjectAge; ++age) | 1331 for (size_t age = 0; age <= maxHeapObjectAge; ++age) |
| 1332 json->pushInteger(it->value.ages[age]); | 1332 json->pushInteger(it->value.ages[age]); |
| 1333 json->endArray(); | 1333 json->endArray(); |
| 1334 } | 1334 } |
| 1335 TRACE_EVENT_OBJECT_SNAPSHOT_WITH_ID(TRACE_DISABLED_BY_DEFAULT("blink_gc"), s
tatsName, this, json.release()); | 1335 TRACE_EVENT_OBJECT_SNAPSHOT_WITH_ID(TRACE_DISABLED_BY_DEFAULT("blink_gc"), s
tatsName, this, json.release()); |
| 1336 } | 1336 } |
| 1337 #endif | 1337 #endif |
| 1338 | 1338 |
| 1339 } // namespace blink | 1339 } // namespace blink |
| OLD | NEW |