Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
| 6 * met: | 6 * met: |
| 7 * | 7 * |
| 8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 74 if (UNLIKELY(!!freeHook)) | 74 if (UNLIKELY(!!freeHook)) |
| 75 freeHook(address); | 75 freeHook(address); |
| 76 } | 76 } |
| 77 | 77 |
| 78 private: | 78 private: |
| 79 static AllocationHook* m_allocationHook; | 79 static AllocationHook* m_allocationHook; |
| 80 static FreeHook* m_freeHook; | 80 static FreeHook* m_freeHook; |
| 81 }; | 81 }; |
| 82 | 82 |
| 83 class CrossThreadPersistentRegion; | 83 class CrossThreadPersistentRegion; |
| 84 class HeapCompact; | |
| 84 template <typename T> | 85 template <typename T> |
| 85 class Member; | 86 class Member; |
| 86 template <typename T> | 87 template <typename T> |
| 87 class WeakMember; | 88 class WeakMember; |
| 88 template <typename T> | 89 template <typename T> |
| 89 class UntracedMember; | 90 class UntracedMember; |
| 90 | 91 |
| 91 template <typename T, bool = NeedsAdjustAndMark<T>::value> | 92 template <typename T, bool = NeedsAdjustAndMark<T>::value> |
| 92 class ObjectAliveTrait; | 93 class ObjectAliveTrait; |
| 93 | 94 |
| (...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 374 bool popAndInvokeGlobalWeakCallback(Visitor*); | 375 bool popAndInvokeGlobalWeakCallback(Visitor*); |
| 375 | 376 |
| 376 // Register an ephemeron table for fixed-point iteration. | 377 // Register an ephemeron table for fixed-point iteration. |
| 377 void registerWeakTable(void* containerObject, | 378 void registerWeakTable(void* containerObject, |
| 378 EphemeronCallback, | 379 EphemeronCallback, |
| 379 EphemeronCallback); | 380 EphemeronCallback); |
| 380 #if ENABLE(ASSERT) | 381 #if ENABLE(ASSERT) |
| 381 bool weakTableRegistered(const void*); | 382 bool weakTableRegistered(const void*); |
| 382 #endif | 383 #endif |
| 383 | 384 |
| 385 // Heap compaction registration methods: | |
| 386 | |
| 387 // Register |slot| as containing a reference to a movable heap object. | |
| 388 // | |
| 389 // When compaction moves the object pointed to by |*slot| to |newAddress|, | |
| 390 // |*slot| must be updated to hold |newAddress| instead. | |
| 391 void registerMovingObjectReference(MovableReference*); | |
| 392 | |
| 393 // Register a callback to be invoked upon moving the object starting at | |
| 394 // |reference|; see |MovingObjectCallback| documentation for details. | |
| 395 // | |
| 396 // This callback mechanism is needed to account for backing store objects | |
| 397 // containing intra-object pointers, all of which must be relocated/rebased | |
| 398 // to be done wrt the moved-to location. | |
| 399 // | |
| 400 // For Blink, |LinkedHashSet<>| is currently the only abstraction which | |
| 401 // relies on this feature. | |
| 402 void registerMovingObjectCallback(MovableReference, | |
| 403 MovingObjectCallback, | |
| 404 void* callbackData); | |
| 405 | |
| 406 // Register a relocation; when |*slot| is compacted and moved, | |
| 407 // |slot| must be updated to point to |*slot|'s new location. | |
| 408 void registerRelocation(MovableReference* slot); | |
|
haraken
2016/11/30 06:29:52
What's a difference between registerMovingObjectRe
sof
2016/11/30 06:52:43
Have a look at the documentation of HeapCompact::r
| |
| 409 | |
| 384 BlinkGC::GCReason lastGCReason() { return m_lastGCReason; } | 410 BlinkGC::GCReason lastGCReason() { return m_lastGCReason; } |
| 385 RegionTree* getRegionTree() { return m_regionTree.get(); } | 411 RegionTree* getRegionTree() { return m_regionTree.get(); } |
| 386 | 412 |
| 387 static inline size_t allocationSizeFromSize(size_t size) { | 413 static inline size_t allocationSizeFromSize(size_t size) { |
| 388 // Add space for header. | 414 // Add space for header. |
| 389 size_t allocationSize = size + sizeof(HeapObjectHeader); | 415 size_t allocationSize = size + sizeof(HeapObjectHeader); |
| 390 // The allocation size calculation can overflow for large sizes. | 416 // The allocation size calculation can overflow for large sizes. |
| 391 RELEASE_ASSERT(allocationSize > size); | 417 RELEASE_ASSERT(allocationSize > size); |
| 392 // Align size with allocation granularity. | 418 // Align size with allocation granularity. |
| 393 allocationSize = (allocationSize + allocationMask) & ~allocationMask; | 419 allocationSize = (allocationSize + allocationMask) & ~allocationMask; |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 431 ASSERT(gcInfoIndex < GCInfoTable::maxIndex); | 457 ASSERT(gcInfoIndex < GCInfoTable::maxIndex); |
| 432 ASSERT(s_gcInfoTable); | 458 ASSERT(s_gcInfoTable); |
| 433 const GCInfo* info = s_gcInfoTable[gcInfoIndex]; | 459 const GCInfo* info = s_gcInfoTable[gcInfoIndex]; |
| 434 ASSERT(info); | 460 ASSERT(info); |
| 435 return info; | 461 return info; |
| 436 } | 462 } |
| 437 | 463 |
| 438 static void reportMemoryUsageHistogram(); | 464 static void reportMemoryUsageHistogram(); |
| 439 static void reportMemoryUsageForTracing(); | 465 static void reportMemoryUsageForTracing(); |
| 440 | 466 |
| 467 HeapCompact* compaction(); | |
| 468 | |
| 441 private: | 469 private: |
| 442 // Reset counters that track live and allocated-since-last-GC sizes. | 470 // Reset counters that track live and allocated-since-last-GC sizes. |
| 443 void resetHeapCounters(); | 471 void resetHeapCounters(); |
| 444 | 472 |
| 445 static int arenaIndexForObjectSize(size_t); | 473 static int arenaIndexForObjectSize(size_t); |
| 446 static bool isNormalArenaIndex(int); | 474 static bool isNormalArenaIndex(int); |
| 447 | 475 |
| 448 void commitCallbackStacks(); | 476 void commitCallbackStacks(); |
| 449 void decommitCallbackStacks(); | 477 void decommitCallbackStacks(); |
| 450 | 478 |
| 451 RecursiveMutex m_threadAttachMutex; | 479 RecursiveMutex m_threadAttachMutex; |
| 452 ThreadStateSet m_threads; | 480 ThreadStateSet m_threads; |
| 453 ThreadHeapStats m_stats; | 481 ThreadHeapStats m_stats; |
| 454 std::unique_ptr<RegionTree> m_regionTree; | 482 std::unique_ptr<RegionTree> m_regionTree; |
| 455 std::unique_ptr<HeapDoesNotContainCache> m_heapDoesNotContainCache; | 483 std::unique_ptr<HeapDoesNotContainCache> m_heapDoesNotContainCache; |
| 456 std::unique_ptr<SafePointBarrier> m_safePointBarrier; | 484 std::unique_ptr<SafePointBarrier> m_safePointBarrier; |
| 457 std::unique_ptr<FreePagePool> m_freePagePool; | 485 std::unique_ptr<FreePagePool> m_freePagePool; |
| 458 std::unique_ptr<OrphanedPagePool> m_orphanedPagePool; | 486 std::unique_ptr<OrphanedPagePool> m_orphanedPagePool; |
| 459 std::unique_ptr<CallbackStack> m_markingStack; | 487 std::unique_ptr<CallbackStack> m_markingStack; |
| 460 std::unique_ptr<CallbackStack> m_postMarkingCallbackStack; | 488 std::unique_ptr<CallbackStack> m_postMarkingCallbackStack; |
| 461 std::unique_ptr<CallbackStack> m_globalWeakCallbackStack; | 489 std::unique_ptr<CallbackStack> m_globalWeakCallbackStack; |
| 462 std::unique_ptr<CallbackStack> m_ephemeronStack; | 490 std::unique_ptr<CallbackStack> m_ephemeronStack; |
| 463 BlinkGC::GCReason m_lastGCReason; | 491 BlinkGC::GCReason m_lastGCReason; |
| 464 StackFrameDepth m_stackFrameDepth; | 492 StackFrameDepth m_stackFrameDepth; |
| 465 | 493 |
| 494 std::unique_ptr<HeapCompact> m_compaction; | |
| 495 | |
| 466 static ThreadHeap* s_mainThreadHeap; | 496 static ThreadHeap* s_mainThreadHeap; |
| 467 | 497 |
| 468 friend class ThreadState; | 498 friend class ThreadState; |
| 469 }; | 499 }; |
| 470 | 500 |
| 471 template <typename T> | 501 template <typename T> |
| 472 struct IsEagerlyFinalizedType { | 502 struct IsEagerlyFinalizedType { |
| 473 STATIC_ONLY(IsEagerlyFinalizedType); | 503 STATIC_ONLY(IsEagerlyFinalizedType); |
| 474 | 504 |
| 475 private: | 505 private: |
| (...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 670 template <typename T> | 700 template <typename T> |
| 671 void VisitorHelper<Derived>::handleWeakCell(Visitor* self, void* object) { | 701 void VisitorHelper<Derived>::handleWeakCell(Visitor* self, void* object) { |
| 672 T** cell = reinterpret_cast<T**>(object); | 702 T** cell = reinterpret_cast<T**>(object); |
| 673 if (*cell && !ObjectAliveTrait<T>::isHeapObjectAlive(*cell)) | 703 if (*cell && !ObjectAliveTrait<T>::isHeapObjectAlive(*cell)) |
| 674 *cell = nullptr; | 704 *cell = nullptr; |
| 675 } | 705 } |
| 676 | 706 |
| 677 } // namespace blink | 707 } // namespace blink |
| 678 | 708 |
| 679 #endif // Heap_h | 709 #endif // Heap_h |
| OLD | NEW |