Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
| 6 * met: | 6 * met: |
| 7 * | 7 * |
| 8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
| (...skipping 29 matching lines...) Expand all Loading... | |
| 40 #include "wtf/Assertions.h" | 40 #include "wtf/Assertions.h" |
| 41 #include "wtf/Atomics.h" | 41 #include "wtf/Atomics.h" |
| 42 #include "wtf/Forward.h" | 42 #include "wtf/Forward.h" |
| 43 | 43 |
| 44 namespace blink { | 44 namespace blink { |
| 45 | 45 |
| 46 template<typename T> class Member; | 46 template<typename T> class Member; |
| 47 template<typename T> class WeakMember; | 47 template<typename T> class WeakMember; |
| 48 template<typename T> class UntracedMember; | 48 template<typename T> class UntracedMember; |
| 49 | 49 |
| 50 // TODO(peria): Refactor following two sets of templates. | |
| 51 | |
| 50 template<typename T, bool = NeedsAdjustAndMark<T>::value> class ObjectAliveTrait ; | 52 template<typename T, bool = NeedsAdjustAndMark<T>::value> class ObjectAliveTrait ; |
| 51 | 53 |
| 52 template<typename T> | 54 template<typename T> |
| 53 class ObjectAliveTrait<T, false> { | 55 class ObjectAliveTrait<T, false> { |
| 54 public: | 56 public: |
| 55 static bool isHeapObjectAlive(T* object) | 57 static bool isHeapObjectAlive(T* object) |
| 56 { | 58 { |
| 57 static_assert(sizeof(T), "T must be fully defined"); | 59 static_assert(sizeof(T), "T must be fully defined"); |
| 58 return HeapObjectHeader::fromPayload(object)->isMarked(); | 60 return HeapObjectHeader::fromPayload(object)->isMarked(); |
| 59 } | 61 } |
| 60 }; | 62 }; |
| 61 | 63 |
| 62 template<typename T> | 64 template<typename T> |
| 63 class ObjectAliveTrait<T, true> { | 65 class ObjectAliveTrait<T, true> { |
| 64 public: | 66 public: |
| 65 static bool isHeapObjectAlive(T* object) | 67 static bool isHeapObjectAlive(T* object) |
| 66 { | 68 { |
| 69 return object->isHeapObjectAlive(); | |
| 70 } | |
| 71 }; | |
| 72 | |
| 73 template<typename T, bool = IsGarbageCollectedMixin<T>::value> class HeapObjectH eaderTrait; | |
| 74 | |
| 75 template<typename T> | |
| 76 class HeapObjectHeaderTrait<T, true> { | |
| 77 public: | |
| 78 static HeapObjectHeader* heapObjectHeader(T* obj) | |
| 79 { | |
| 67 static_assert(sizeof(T), "T must be fully defined"); | 80 static_assert(sizeof(T), "T must be fully defined"); |
| 68 return object->isHeapObjectAlive(); | 81 // TODO(peria): This ASSERT() is too restrictive. The ASSERT forbids |
| 82 // to call heapObjectHeader() for an object while another | |
| 83 // (totally independent) mixin object is under construction. | |
| 84 ASSERT(!ThreadState::current()->isConstructingGCMixin()); | |
| 85 return obj->heapObjectHeader(); | |
| 86 } | |
| 87 }; | |
| 88 | |
| 89 template<typename T> | |
| 90 class HeapObjectHeaderTrait<T, false> { | |
|
peria
2015/11/17 01:36:49
Yuta-san,
Is it safe to instantiate HeapObjectHead
Yuta Kitamura
2015/11/17 06:25:12
If the lines 95-96 are NOT present, I think it's s
peria
2015/11/17 07:24:57
Thank you for the clarification and additional exp
| |
| 91 public: | |
| 92 static HeapObjectHeader* heapObjectHeader(T* obj) | |
| 93 { | |
| 94 ASSERT(!ThreadState::current()->isConstructingGCMixin()); | |
| 95 if (!IsFullyDefined<T>::value) | |
| 96 return nullptr; | |
| 97 return HeapObjectHeader::fromPayload(obj); | |
| 69 } | 98 } |
| 70 }; | 99 }; |
| 71 | 100 |
| 72 class PLATFORM_EXPORT Heap { | 101 class PLATFORM_EXPORT Heap { |
| 73 public: | 102 public: |
| 74 static void init(); | 103 static void init(); |
| 75 static void shutdown(); | 104 static void shutdown(); |
| 76 static void doShutdown(); | 105 static void doShutdown(); |
| 77 | 106 |
| 78 #if ENABLE(ASSERT) | 107 #if ENABLE(ASSERT) |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 89 // non-live entries, so no entries will be removed. Since you can't set | 118 // non-live entries, so no entries will be removed. Since you can't set |
| 90 // the mark bit on a null pointer, that means that null pointers are | 119 // the mark bit on a null pointer, that means that null pointers are |
| 91 // always 'alive'. | 120 // always 'alive'. |
| 92 if (!object) | 121 if (!object) |
| 93 return true; | 122 return true; |
| 94 return ObjectAliveTrait<T>::isHeapObjectAlive(object); | 123 return ObjectAliveTrait<T>::isHeapObjectAlive(object); |
| 95 } | 124 } |
| 96 template<typename T> | 125 template<typename T> |
| 97 static inline bool isHeapObjectAlive(const Member<T>& member) | 126 static inline bool isHeapObjectAlive(const Member<T>& member) |
| 98 { | 127 { |
| 99 return isHeapObjectAlive(member.get()); | 128 return isHeapObjectAlive(member.unsafeGet()); |
| 100 } | 129 } |
| 101 template<typename T> | 130 template<typename T> |
| 102 static inline bool isHeapObjectAlive(const WeakMember<T>& member) | 131 static inline bool isHeapObjectAlive(const WeakMember<T>& member) |
| 103 { | 132 { |
| 104 return isHeapObjectAlive(member.get()); | 133 return isHeapObjectAlive(member.unsafeGet()); |
| 105 } | 134 } |
| 106 template<typename T> | 135 template<typename T> |
| 107 static inline bool isHeapObjectAlive(const UntracedMember<T>& member) | 136 static inline bool isHeapObjectAlive(const UntracedMember<T>& member) |
| 108 { | 137 { |
| 109 return isHeapObjectAlive(member.get()); | 138 return isHeapObjectAlive(member.unsafeGet()); |
| 110 } | 139 } |
| 111 template<typename T> | 140 template<typename T> |
| 112 static inline bool isHeapObjectAlive(const RawPtr<T>& ptr) | 141 static inline bool isHeapObjectAlive(const RawPtr<T>& ptr) |
| 113 { | 142 { |
| 114 return isHeapObjectAlive(ptr.get()); | 143 return isHeapObjectAlive(ptr.get()); |
| 115 } | 144 } |
| 116 | 145 |
| 117 // Is the finalizable GC object still alive, but slated for lazy sweeping? | 146 // Is the finalizable GC object still alive, but slated for lazy sweeping? |
| 118 // If a lazy sweep is in progress, returns true if the object was found | 147 // If a lazy sweep is in progress, returns true if the object was found |
| 119 // to be not reachable during the marking phase, but it has yet to be swept | 148 // to be not reachable during the marking phase, but it has yet to be swept |
| (...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 252 static size_t wrapperCount() { return acquireLoad(&s_wrapperCount); } | 281 static size_t wrapperCount() { return acquireLoad(&s_wrapperCount); } |
| 253 static size_t wrapperCountAtLastGC() { return acquireLoad(&s_wrapperCountAtL astGC); } | 282 static size_t wrapperCountAtLastGC() { return acquireLoad(&s_wrapperCountAtL astGC); } |
| 254 static void increaseCollectedWrapperCount(size_t delta) { atomicAdd(&s_colle ctedWrapperCount, static_cast<long>(delta)); } | 283 static void increaseCollectedWrapperCount(size_t delta) { atomicAdd(&s_colle ctedWrapperCount, static_cast<long>(delta)); } |
| 255 static size_t collectedWrapperCount() { return acquireLoad(&s_collectedWrapp erCount); } | 284 static size_t collectedWrapperCount() { return acquireLoad(&s_collectedWrapp erCount); } |
| 256 static size_t partitionAllocSizeAtLastGC() { return acquireLoad(&s_partition AllocSizeAtLastGC); } | 285 static size_t partitionAllocSizeAtLastGC() { return acquireLoad(&s_partition AllocSizeAtLastGC); } |
| 257 | 286 |
| 258 static double estimatedMarkingTime(); | 287 static double estimatedMarkingTime(); |
| 259 static void reportMemoryUsageHistogram(); | 288 static void reportMemoryUsageHistogram(); |
| 260 static void reportMemoryUsageForTracing(); | 289 static void reportMemoryUsageForTracing(); |
| 261 | 290 |
| 262 #if ENABLE(ASSERT) | 291 static uint32_t gcGeneration() { return s_gcGeneration; } |
| 263 static uint16_t gcGeneration() { return s_gcGeneration; } | |
| 264 #endif | |
| 265 | 292 |
| 266 private: | 293 private: |
| 267 // A RegionTree is a simple binary search tree of PageMemoryRegions sorted | 294 // A RegionTree is a simple binary search tree of PageMemoryRegions sorted |
| 268 // by base addresses. | 295 // by base addresses. |
| 269 class RegionTree { | 296 class RegionTree { |
| 270 public: | 297 public: |
| 271 explicit RegionTree(PageMemoryRegion* region) : m_region(region), m_left (nullptr), m_right(nullptr) { } | 298 explicit RegionTree(PageMemoryRegion* region) : m_region(region), m_left (nullptr), m_right(nullptr) { } |
| 272 ~RegionTree() | 299 ~RegionTree() |
| 273 { | 300 { |
| 274 delete m_left; | 301 delete m_left; |
| (...skipping 26 matching lines...) Expand all Loading... | |
| 301 static size_t s_allocatedSpace; | 328 static size_t s_allocatedSpace; |
| 302 static size_t s_allocatedObjectSize; | 329 static size_t s_allocatedObjectSize; |
| 303 static size_t s_objectSizeAtLastGC; | 330 static size_t s_objectSizeAtLastGC; |
| 304 static size_t s_markedObjectSize; | 331 static size_t s_markedObjectSize; |
| 305 static size_t s_markedObjectSizeAtLastCompleteSweep; | 332 static size_t s_markedObjectSizeAtLastCompleteSweep; |
| 306 static size_t s_wrapperCount; | 333 static size_t s_wrapperCount; |
| 307 static size_t s_wrapperCountAtLastGC; | 334 static size_t s_wrapperCountAtLastGC; |
| 308 static size_t s_collectedWrapperCount; | 335 static size_t s_collectedWrapperCount; |
| 309 static size_t s_partitionAllocSizeAtLastGC; | 336 static size_t s_partitionAllocSizeAtLastGC; |
| 310 static double s_estimatedMarkingTimePerByte; | 337 static double s_estimatedMarkingTimePerByte; |
| 311 #if ENABLE(ASSERT) | 338 static uint32_t s_gcGeneration; |
| 312 static uint16_t s_gcGeneration; | |
| 313 #endif | |
| 314 | 339 |
| 315 friend class ThreadState; | 340 friend class ThreadState; |
| 316 }; | 341 }; |
| 317 | 342 |
| 318 template<typename T> | 343 template<typename T> |
| 319 struct IsEagerlyFinalizedType { | 344 struct IsEagerlyFinalizedType { |
| 320 private: | 345 private: |
| 321 typedef char YesType; | 346 typedef char YesType; |
| 322 struct NoType { | 347 struct NoType { |
| 323 char padding[8]; | 348 char padding[8]; |
| (...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 448 #define EAGERLY_FINALIZE_WILL_BE_REMOVED() EAGERLY_FINALIZE() | 473 #define EAGERLY_FINALIZE_WILL_BE_REMOVED() EAGERLY_FINALIZE() |
| 449 #else | 474 #else |
| 450 #define EAGERLY_FINALIZE_WILL_BE_REMOVED() | 475 #define EAGERLY_FINALIZE_WILL_BE_REMOVED() |
| 451 #endif | 476 #endif |
| 452 | 477 |
| 453 inline Address Heap::allocateOnHeapIndex(ThreadState* state, size_t size, int he apIndex, size_t gcInfoIndex) | 478 inline Address Heap::allocateOnHeapIndex(ThreadState* state, size_t size, int he apIndex, size_t gcInfoIndex) |
| 454 { | 479 { |
| 455 ASSERT(state->isAllocationAllowed()); | 480 ASSERT(state->isAllocationAllowed()); |
| 456 ASSERT(heapIndex != BlinkGC::LargeObjectHeapIndex); | 481 ASSERT(heapIndex != BlinkGC::LargeObjectHeapIndex); |
| 457 NormalPageHeap* heap = static_cast<NormalPageHeap*>(state->heap(heapIndex)); | 482 NormalPageHeap* heap = static_cast<NormalPageHeap*>(state->heap(heapIndex)); |
| 458 return heap->allocateObject(allocationSizeFromSize(size), gcInfoIndex); | 483 return heap->allocateObject(allocationSizeFromSize(size), gcInfoIndex, gcGen eration()); |
| 459 } | 484 } |
| 460 | 485 |
| 461 template<typename T> | 486 template<typename T> |
| 462 Address Heap::allocate(size_t size, bool eagerlySweep) | 487 Address Heap::allocate(size_t size, bool eagerlySweep) |
| 463 { | 488 { |
| 464 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(); | 489 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(); |
| 465 return Heap::allocateOnHeapIndex(state, size, eagerlySweep ? BlinkGC::EagerS weepHeapIndex : Heap::heapIndexForObjectSize(size), GCInfoTrait<T>::index()); | 490 return Heap::allocateOnHeapIndex(state, size, eagerlySweep ? BlinkGC::EagerS weepHeapIndex : Heap::heapIndexForObjectSize(size), GCInfoTrait<T>::index()); |
| 466 } | 491 } |
| 467 | 492 |
| 468 template<typename T> | 493 template<typename T> |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 503 void VisitorHelper<Derived>::handleWeakCell(Visitor* self, void* object) | 528 void VisitorHelper<Derived>::handleWeakCell(Visitor* self, void* object) |
| 504 { | 529 { |
| 505 T** cell = reinterpret_cast<T**>(object); | 530 T** cell = reinterpret_cast<T**>(object); |
| 506 if (*cell && !ObjectAliveTrait<T>::isHeapObjectAlive(*cell)) | 531 if (*cell && !ObjectAliveTrait<T>::isHeapObjectAlive(*cell)) |
| 507 *cell = nullptr; | 532 *cell = nullptr; |
| 508 } | 533 } |
| 509 | 534 |
| 510 } // namespace blink | 535 } // namespace blink |
| 511 | 536 |
| 512 #endif // Heap_h | 537 #endif // Heap_h |
| OLD | NEW |