| OLD | NEW | 
|---|
| 1 /* | 1 /* | 
| 2  * Copyright (C) 2013 Google Inc. All rights reserved. | 2  * Copyright (C) 2013 Google Inc. All rights reserved. | 
| 3  * | 3  * | 
| 4  * Redistribution and use in source and binary forms, with or without | 4  * Redistribution and use in source and binary forms, with or without | 
| 5  * modification, are permitted provided that the following conditions are | 5  * modification, are permitted provided that the following conditions are | 
| 6  * met: | 6  * met: | 
| 7  * | 7  * | 
| 8  *     * Redistributions of source code must retain the above copyright | 8  *     * Redistributions of source code must retain the above copyright | 
| 9  * notice, this list of conditions and the following disclaimer. | 9  * notice, this list of conditions and the following disclaimer. | 
| 10  *     * Redistributions in binary form must reproduce the above | 10  *     * Redistributions in binary form must reproduce the above | 
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 74     if (UNLIKELY(!!freeHook)) | 74     if (UNLIKELY(!!freeHook)) | 
| 75       freeHook(address); | 75       freeHook(address); | 
| 76   } | 76   } | 
| 77 | 77 | 
| 78  private: | 78  private: | 
| 79   static AllocationHook* m_allocationHook; | 79   static AllocationHook* m_allocationHook; | 
| 80   static FreeHook* m_freeHook; | 80   static FreeHook* m_freeHook; | 
| 81 }; | 81 }; | 
| 82 | 82 | 
| 83 class CrossThreadPersistentRegion; | 83 class CrossThreadPersistentRegion; | 
| 84 class HeapCompact; |  | 
| 85 template <typename T> | 84 template <typename T> | 
| 86 class Member; | 85 class Member; | 
| 87 template <typename T> | 86 template <typename T> | 
| 88 class WeakMember; | 87 class WeakMember; | 
| 89 template <typename T> | 88 template <typename T> | 
| 90 class UntracedMember; | 89 class UntracedMember; | 
| 91 | 90 | 
| 92 template <typename T, bool = NeedsAdjustAndMark<T>::value> | 91 template <typename T, bool = NeedsAdjustAndMark<T>::value> | 
| 93 class ObjectAliveTrait; | 92 class ObjectAliveTrait; | 
| 94 | 93 | 
| (...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 375   bool popAndInvokeGlobalWeakCallback(Visitor*); | 374   bool popAndInvokeGlobalWeakCallback(Visitor*); | 
| 376 | 375 | 
| 377   // Register an ephemeron table for fixed-point iteration. | 376   // Register an ephemeron table for fixed-point iteration. | 
| 378   void registerWeakTable(void* containerObject, | 377   void registerWeakTable(void* containerObject, | 
| 379                          EphemeronCallback, | 378                          EphemeronCallback, | 
| 380                          EphemeronCallback); | 379                          EphemeronCallback); | 
| 381 #if ENABLE(ASSERT) | 380 #if ENABLE(ASSERT) | 
| 382   bool weakTableRegistered(const void*); | 381   bool weakTableRegistered(const void*); | 
| 383 #endif | 382 #endif | 
| 384 | 383 | 
| 385   // Heap compaction registration methods: |  | 
| 386 |  | 
| 387   // Register |slot| as containing a reference to a movable heap object. |  | 
| 388   // |  | 
| 389   // When compaction moves the object pointed to by |*slot| to |newAddress|, |  | 
| 390   // |*slot| must be updated to hold |newAddress| instead. |  | 
| 391   void registerMovingObjectReference(MovableReference*); |  | 
| 392 |  | 
| 393   // Register a callback to be invoked upon moving the object starting at |  | 
| 394   // |reference|; see |MovingObjectCallback| documentation for details. |  | 
| 395   // |  | 
| 396   // This callback mechanism is needed to account for backing store objects |  | 
| 397   // containing intra-object pointers, all of which must be relocated/rebased |  | 
| 398   // with respect to the moved-to location. |  | 
| 399   // |  | 
| 400   // For Blink, |HeapLinkedHashSet<>| is currently the only abstraction which |  | 
| 401   // relies on this feature. |  | 
| 402   void registerMovingObjectCallback(MovableReference, |  | 
| 403                                     MovingObjectCallback, |  | 
| 404                                     void* callbackData); |  | 
| 405 |  | 
| 406   BlinkGC::GCReason lastGCReason() { return m_lastGCReason; } | 384   BlinkGC::GCReason lastGCReason() { return m_lastGCReason; } | 
| 407   RegionTree* getRegionTree() { return m_regionTree.get(); } | 385   RegionTree* getRegionTree() { return m_regionTree.get(); } | 
| 408 | 386 | 
| 409   static inline size_t allocationSizeFromSize(size_t size) { | 387   static inline size_t allocationSizeFromSize(size_t size) { | 
| 410     // Add space for header. | 388     // Add space for header. | 
| 411     size_t allocationSize = size + sizeof(HeapObjectHeader); | 389     size_t allocationSize = size + sizeof(HeapObjectHeader); | 
| 412     // The allocation size calculation can overflow for large sizes. | 390     // The allocation size calculation can overflow for large sizes. | 
| 413     RELEASE_ASSERT(allocationSize > size); | 391     RELEASE_ASSERT(allocationSize > size); | 
| 414     // Align size with allocation granularity. | 392     // Align size with allocation granularity. | 
| 415     allocationSize = (allocationSize + allocationMask) & ~allocationMask; | 393     allocationSize = (allocationSize + allocationMask) & ~allocationMask; | 
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 453     ASSERT(gcInfoIndex < GCInfoTable::maxIndex); | 431     ASSERT(gcInfoIndex < GCInfoTable::maxIndex); | 
| 454     ASSERT(s_gcInfoTable); | 432     ASSERT(s_gcInfoTable); | 
| 455     const GCInfo* info = s_gcInfoTable[gcInfoIndex]; | 433     const GCInfo* info = s_gcInfoTable[gcInfoIndex]; | 
| 456     ASSERT(info); | 434     ASSERT(info); | 
| 457     return info; | 435     return info; | 
| 458   } | 436   } | 
| 459 | 437 | 
| 460   static void reportMemoryUsageHistogram(); | 438   static void reportMemoryUsageHistogram(); | 
| 461   static void reportMemoryUsageForTracing(); | 439   static void reportMemoryUsageForTracing(); | 
| 462 | 440 | 
| 463   HeapCompact* compaction(); |  | 
| 464 |  | 
| 465  private: | 441  private: | 
| 466   // Reset counters that track live and allocated-since-last-GC sizes. | 442   // Reset counters that track live and allocated-since-last-GC sizes. | 
| 467   void resetHeapCounters(); | 443   void resetHeapCounters(); | 
| 468 | 444 | 
| 469   static int arenaIndexForObjectSize(size_t); | 445   static int arenaIndexForObjectSize(size_t); | 
| 470   static bool isNormalArenaIndex(int); | 446   static bool isNormalArenaIndex(int); | 
| 471 | 447 | 
| 472   void commitCallbackStacks(); | 448   void commitCallbackStacks(); | 
| 473   void decommitCallbackStacks(); | 449   void decommitCallbackStacks(); | 
| 474 | 450 | 
| 475   RecursiveMutex m_threadAttachMutex; | 451   RecursiveMutex m_threadAttachMutex; | 
| 476   ThreadStateSet m_threads; | 452   ThreadStateSet m_threads; | 
| 477   ThreadHeapStats m_stats; | 453   ThreadHeapStats m_stats; | 
| 478   std::unique_ptr<RegionTree> m_regionTree; | 454   std::unique_ptr<RegionTree> m_regionTree; | 
| 479   std::unique_ptr<HeapDoesNotContainCache> m_heapDoesNotContainCache; | 455   std::unique_ptr<HeapDoesNotContainCache> m_heapDoesNotContainCache; | 
| 480   std::unique_ptr<SafePointBarrier> m_safePointBarrier; | 456   std::unique_ptr<SafePointBarrier> m_safePointBarrier; | 
| 481   std::unique_ptr<FreePagePool> m_freePagePool; | 457   std::unique_ptr<FreePagePool> m_freePagePool; | 
| 482   std::unique_ptr<OrphanedPagePool> m_orphanedPagePool; | 458   std::unique_ptr<OrphanedPagePool> m_orphanedPagePool; | 
| 483   std::unique_ptr<CallbackStack> m_markingStack; | 459   std::unique_ptr<CallbackStack> m_markingStack; | 
| 484   std::unique_ptr<CallbackStack> m_postMarkingCallbackStack; | 460   std::unique_ptr<CallbackStack> m_postMarkingCallbackStack; | 
| 485   std::unique_ptr<CallbackStack> m_globalWeakCallbackStack; | 461   std::unique_ptr<CallbackStack> m_globalWeakCallbackStack; | 
| 486   std::unique_ptr<CallbackStack> m_ephemeronStack; | 462   std::unique_ptr<CallbackStack> m_ephemeronStack; | 
| 487   BlinkGC::GCReason m_lastGCReason; | 463   BlinkGC::GCReason m_lastGCReason; | 
| 488   StackFrameDepth m_stackFrameDepth; | 464   StackFrameDepth m_stackFrameDepth; | 
| 489 | 465 | 
| 490   std::unique_ptr<HeapCompact> m_compaction; |  | 
| 491 |  | 
| 492   static ThreadHeap* s_mainThreadHeap; | 466   static ThreadHeap* s_mainThreadHeap; | 
| 493 | 467 | 
| 494   friend class ThreadState; | 468   friend class ThreadState; | 
| 495 }; | 469 }; | 
| 496 | 470 | 
| 497 template <typename T> | 471 template <typename T> | 
| 498 struct IsEagerlyFinalizedType { | 472 struct IsEagerlyFinalizedType { | 
| 499   STATIC_ONLY(IsEagerlyFinalizedType); | 473   STATIC_ONLY(IsEagerlyFinalizedType); | 
| 500 | 474 | 
| 501  private: | 475  private: | 
| (...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 696 template <typename T> | 670 template <typename T> | 
| 697 void VisitorHelper<Derived>::handleWeakCell(Visitor* self, void* object) { | 671 void VisitorHelper<Derived>::handleWeakCell(Visitor* self, void* object) { | 
| 698   T** cell = reinterpret_cast<T**>(object); | 672   T** cell = reinterpret_cast<T**>(object); | 
| 699   if (*cell && !ObjectAliveTrait<T>::isHeapObjectAlive(*cell)) | 673   if (*cell && !ObjectAliveTrait<T>::isHeapObjectAlive(*cell)) | 
| 700     *cell = nullptr; | 674     *cell = nullptr; | 
| 701 } | 675 } | 
| 702 | 676 | 
| 703 }  // namespace blink | 677 }  // namespace blink | 
| 704 | 678 | 
| 705 #endif  // Heap_h | 679 #endif  // Heap_h | 
| OLD | NEW | 
|---|