Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
| 6 * met: | 6 * met: |
| 7 * | 7 * |
| 8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
| (...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 122 | 122 |
| 123 private: | 123 private: |
| 124 static bool s_isLowEndDevice; | 124 static bool s_isLowEndDevice; |
| 125 static size_t s_totalAllocatedSpace; | 125 static size_t s_totalAllocatedSpace; |
| 126 static size_t s_totalAllocatedObjectSize; | 126 static size_t s_totalAllocatedObjectSize; |
| 127 static size_t s_totalMarkedObjectSize; | 127 static size_t s_totalMarkedObjectSize; |
| 128 | 128 |
| 129 friend class ThreadState; | 129 friend class ThreadState; |
| 130 }; | 130 }; |
| 131 | 131 |
| 132 // Stats for the heap. | |
| 133 class ThreadHeapStats { | |
|
haraken
2016/03/30 04:37:20
Add USING_FAST_MALLOC().
keishi
2016/03/31 09:29:45
Done.
| |
| 134 public: | |
| 135 ThreadHeapStats(); | |
| 136 void setMarkedObjectSizeAtLastCompleteSweep(size_t size) { releaseStore(&m_m arkedObjectSizeAtLastCompleteSweep, size); } | |
| 137 size_t markedObjectSizeAtLastCompleteSweep() { return acquireLoad(&m_markedO bjectSizeAtLastCompleteSweep); } | |
| 138 void increaseAllocatedObjectSize(size_t delta); | |
| 139 void decreaseAllocatedObjectSize(size_t delta); | |
| 140 size_t allocatedObjectSize() { return acquireLoad(&m_allocatedObjectSize); } | |
| 141 void increaseMarkedObjectSize(size_t delta); | |
| 142 size_t markedObjectSize() { return acquireLoad(&m_markedObjectSize); } | |
| 143 void increaseAllocatedSpace(size_t delta); | |
| 144 void decreaseAllocatedSpace(size_t delta); | |
| 145 size_t allocatedSpace() { return acquireLoad(&m_allocatedSpace); } | |
| 146 size_t objectSizeAtLastGC() { return acquireLoad(&m_objectSizeAtLastGC); } | |
| 147 void increaseWrapperCount(size_t delta) { atomicAdd(&m_wrapperCount, static_ cast<long>(delta)); } | |
| 148 void decreaseWrapperCount(size_t delta) { atomicSubtract(&m_wrapperCount, st atic_cast<long>(delta)); } | |
| 149 size_t wrapperCount() { return acquireLoad(&m_wrapperCount); } | |
| 150 size_t wrapperCountAtLastGC() { return acquireLoad(&m_wrapperCountAtLastGC); } | |
| 151 void increaseCollectedWrapperCount(size_t delta) { atomicAdd(&m_collectedWra pperCount, static_cast<long>(delta)); } | |
| 152 size_t collectedWrapperCount() { return acquireLoad(&m_collectedWrapperCount ); } | |
| 153 size_t partitionAllocSizeAtLastGC() { return acquireLoad(&m_partitionAllocSi zeAtLastGC); } | |
| 154 void setEstimatedMarkingTimePerByte(double estimatedMarkingTimePerByte) { m_ estimatedMarkingTimePerByte = estimatedMarkingTimePerByte; } | |
| 155 double estimatedMarkingTimePerByte() const { return m_estimatedMarkingTimePe rByte; } | |
| 156 double estimatedMarkingTime(); | |
| 157 void reset(); | |
| 158 | |
| 159 private: | |
| 160 size_t m_allocatedSpace; | |
| 161 size_t m_allocatedObjectSize; | |
| 162 size_t m_objectSizeAtLastGC; | |
| 163 size_t m_markedObjectSize; | |
| 164 size_t m_markedObjectSizeAtLastCompleteSweep; | |
| 165 size_t m_wrapperCount; | |
| 166 size_t m_wrapperCountAtLastGC; | |
| 167 size_t m_collectedWrapperCount; | |
| 168 size_t m_partitionAllocSizeAtLastGC; | |
| 169 double m_estimatedMarkingTimePerByte; | |
| 170 }; | |
| 171 | |
| 132 class PLATFORM_EXPORT Heap { | 172 class PLATFORM_EXPORT Heap { |
| 133 STATIC_ONLY(Heap); | 173 STATIC_ONLY(Heap); |
| 134 public: | 174 public: |
| 135 static void init(); | 175 static void init(); |
| 136 static void shutdown(); | 176 static void shutdown(); |
| 137 | 177 |
| 138 #if ENABLE(ASSERT) | 178 #if ENABLE(ASSERT) |
| 139 static BasePage* findPageFromAddress(Address); | 179 static BasePage* findPageFromAddress(Address); |
| 140 static BasePage* findPageFromAddress(const void* pointer) { return findPageF romAddress(reinterpret_cast<Address>(const_cast<void*>(pointer))); } | 180 static BasePage* findPageFromAddress(const void* pointer) { return findPageF romAddress(reinterpret_cast<Address>(const_cast<void*>(pointer))); } |
| 141 #endif | 181 #endif |
| (...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 288 static const GCInfo* gcInfo(size_t gcInfoIndex) | 328 static const GCInfo* gcInfo(size_t gcInfoIndex) |
| 289 { | 329 { |
| 290 ASSERT(gcInfoIndex >= 1); | 330 ASSERT(gcInfoIndex >= 1); |
| 291 ASSERT(gcInfoIndex < GCInfoTable::maxIndex); | 331 ASSERT(gcInfoIndex < GCInfoTable::maxIndex); |
| 292 ASSERT(s_gcInfoTable); | 332 ASSERT(s_gcInfoTable); |
| 293 const GCInfo* info = s_gcInfoTable[gcInfoIndex]; | 333 const GCInfo* info = s_gcInfoTable[gcInfoIndex]; |
| 294 ASSERT(info); | 334 ASSERT(info); |
| 295 return info; | 335 return info; |
| 296 } | 336 } |
| 297 | 337 |
| 298 static void setMarkedObjectSizeAtLastCompleteSweep(size_t size) { releaseSto re(&s_markedObjectSizeAtLastCompleteSweep, size); } | 338 static ThreadHeapStats& heapStats(); |
| 299 static size_t markedObjectSizeAtLastCompleteSweep() { return acquireLoad(&s_ markedObjectSizeAtLastCompleteSweep); } | |
| 300 static void increaseAllocatedObjectSize(size_t delta) | |
| 301 { | |
| 302 atomicAdd(&s_allocatedObjectSize, static_cast<long>(delta)); | |
| 303 ProcessHeap::increaseTotalAllocatedObjectSize(delta); | |
| 304 } | |
| 305 static void decreaseAllocatedObjectSize(size_t delta) | |
| 306 { | |
| 307 atomicSubtract(&s_allocatedObjectSize, static_cast<long>(delta)); | |
| 308 ProcessHeap::decreaseTotalAllocatedObjectSize(delta); | |
| 309 } | |
| 310 static size_t allocatedObjectSize() { return acquireLoad(&s_allocatedObjectS ize); } | |
| 311 static void increaseMarkedObjectSize(size_t delta) | |
| 312 { | |
| 313 atomicAdd(&s_markedObjectSize, static_cast<long>(delta)); | |
| 314 ProcessHeap::increaseTotalMarkedObjectSize(delta); | |
| 315 } | |
| 316 static size_t markedObjectSize() { return acquireLoad(&s_markedObjectSize); } | |
| 317 static void increaseAllocatedSpace(size_t delta) | |
| 318 { | |
| 319 atomicAdd(&s_allocatedSpace, static_cast<long>(delta)); | |
| 320 ProcessHeap::increaseTotalAllocatedSpace(delta); | |
| 321 } | |
| 322 static void decreaseAllocatedSpace(size_t delta) | |
| 323 { | |
| 324 atomicSubtract(&s_allocatedSpace, static_cast<long>(delta)); | |
| 325 ProcessHeap::decreaseTotalAllocatedSpace(delta); | |
| 326 } | |
| 327 static size_t allocatedSpace() { return acquireLoad(&s_allocatedSpace); } | |
| 328 static size_t objectSizeAtLastGC() { return acquireLoad(&s_objectSizeAtLastG C); } | |
| 329 static void increaseWrapperCount(size_t delta) { atomicAdd(&s_wrapperCount, static_cast<long>(delta)); } | |
| 330 static void decreaseWrapperCount(size_t delta) { atomicSubtract(&s_wrapperCo unt, static_cast<long>(delta)); } | |
| 331 static size_t wrapperCount() { return acquireLoad(&s_wrapperCount); } | |
| 332 static size_t wrapperCountAtLastGC() { return acquireLoad(&s_wrapperCountAtL astGC); } | |
| 333 static void increaseCollectedWrapperCount(size_t delta) { atomicAdd(&s_colle ctedWrapperCount, static_cast<long>(delta)); } | |
| 334 static size_t collectedWrapperCount() { return acquireLoad(&s_collectedWrapp erCount); } | |
| 335 static size_t partitionAllocSizeAtLastGC() { return acquireLoad(&s_partition AllocSizeAtLastGC); } | |
| 336 | 339 |
| 337 static double estimatedMarkingTime(); | 340 static double estimatedMarkingTime(); |
| 338 static void reportMemoryUsageHistogram(); | 341 static void reportMemoryUsageHistogram(); |
| 339 static void reportMemoryUsageForTracing(); | 342 static void reportMemoryUsageForTracing(); |
| 340 static BlinkGC::GCReason lastGCReason() { return s_lastGCReason; } | 343 static BlinkGC::GCReason lastGCReason() { return s_lastGCReason; } |
| 341 | 344 |
| 342 private: | 345 private: |
| 343 // Reset counters that track live and allocated-since-last-GC sizes. | 346 // Reset counters that track live and allocated-since-last-GC sizes. |
| 344 static void resetHeapCounters(); | 347 static void resetHeapCounters(); |
| 345 | 348 |
| 346 static int arenaIndexForObjectSize(size_t); | 349 static int arenaIndexForObjectSize(size_t); |
| 347 static bool isNormalArenaIndex(int); | 350 static bool isNormalArenaIndex(int); |
| 348 | 351 |
| 349 static void decommitCallbackStacks(); | 352 static void decommitCallbackStacks(); |
| 350 | 353 |
| 351 static CallbackStack* s_markingStack; | 354 static CallbackStack* s_markingStack; |
| 352 static CallbackStack* s_postMarkingCallbackStack; | 355 static CallbackStack* s_postMarkingCallbackStack; |
| 353 static CallbackStack* s_globalWeakCallbackStack; | 356 static CallbackStack* s_globalWeakCallbackStack; |
| 354 static CallbackStack* s_ephemeronStack; | 357 static CallbackStack* s_ephemeronStack; |
| 355 static HeapDoesNotContainCache* s_heapDoesNotContainCache; | 358 static HeapDoesNotContainCache* s_heapDoesNotContainCache; |
| 356 static FreePagePool* s_freePagePool; | 359 static FreePagePool* s_freePagePool; |
| 357 static OrphanedPagePool* s_orphanedPagePool; | 360 static OrphanedPagePool* s_orphanedPagePool; |
| 358 static size_t s_allocatedSpace; | |
| 359 static size_t s_allocatedObjectSize; | |
| 360 static size_t s_objectSizeAtLastGC; | |
| 361 static size_t s_markedObjectSize; | |
| 362 static size_t s_markedObjectSizeAtLastCompleteSweep; | |
| 363 static size_t s_wrapperCount; | |
| 364 static size_t s_wrapperCountAtLastGC; | |
| 365 static size_t s_collectedWrapperCount; | |
| 366 static size_t s_partitionAllocSizeAtLastGC; | |
| 367 static double s_estimatedMarkingTimePerByte; | |
| 368 static BlinkGC::GCReason s_lastGCReason; | 361 static BlinkGC::GCReason s_lastGCReason; |
| 369 | 362 |
| 370 friend class ThreadState; | 363 friend class ThreadState; |
| 371 }; | 364 }; |
| 372 | 365 |
| 373 template<typename T> | 366 template<typename T> |
| 374 struct IsEagerlyFinalizedType { | 367 struct IsEagerlyFinalizedType { |
| 375 STATIC_ONLY(IsEagerlyFinalizedType); | 368 STATIC_ONLY(IsEagerlyFinalizedType); |
| 376 private: | 369 private: |
| 377 typedef char YesType; | 370 typedef char YesType; |
| (...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 565 void VisitorHelper<Derived>::handleWeakCell(Visitor* self, void* object) | 558 void VisitorHelper<Derived>::handleWeakCell(Visitor* self, void* object) |
| 566 { | 559 { |
| 567 T** cell = reinterpret_cast<T**>(object); | 560 T** cell = reinterpret_cast<T**>(object); |
| 568 if (*cell && !ObjectAliveTrait<T>::isHeapObjectAlive(*cell)) | 561 if (*cell && !ObjectAliveTrait<T>::isHeapObjectAlive(*cell)) |
| 569 *cell = nullptr; | 562 *cell = nullptr; |
| 570 } | 563 } |
| 571 | 564 |
| 572 } // namespace blink | 565 } // namespace blink |
| 573 | 566 |
| 574 #endif // Heap_h | 567 #endif // Heap_h |
| OLD | NEW |