Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(7)

Side by Side Diff: third_party/WebKit/Source/platform/heap/Heap.h

Issue 1411603007: [Oilpan] Add use-after-free detector in Member<> Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Require full definition of T Created 5 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2013 Google Inc. All rights reserved. 2 * Copyright (C) 2013 Google Inc. All rights reserved.
3 * 3 *
4 * Redistribution and use in source and binary forms, with or without 4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are 5 * modification, are permitted provided that the following conditions are
6 * met: 6 * met:
7 * 7 *
8 * * Redistributions of source code must retain the above copyright 8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer. 9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above 10 * * Redistributions in binary form must reproduce the above
(...skipping 29 matching lines...) Expand all
40 #include "wtf/Assertions.h" 40 #include "wtf/Assertions.h"
41 #include "wtf/Atomics.h" 41 #include "wtf/Atomics.h"
42 #include "wtf/Forward.h" 42 #include "wtf/Forward.h"
43 43
44 namespace blink { 44 namespace blink {
45 45
46 template<typename T> class Member; 46 template<typename T> class Member;
47 template<typename T> class WeakMember; 47 template<typename T> class WeakMember;
48 template<typename T> class UntracedMember; 48 template<typename T> class UntracedMember;
49 49
50 // TODO(peria): Refactor following two sets of templates.
51
50 template<typename T, bool = NeedsAdjustAndMark<T>::value> class ObjectAliveTrait ; 52 template<typename T, bool = NeedsAdjustAndMark<T>::value> class ObjectAliveTrait ;
51 53
52 template<typename T> 54 template<typename T>
53 class ObjectAliveTrait<T, false> { 55 class ObjectAliveTrait<T, false> {
54 public: 56 public:
55 static bool isHeapObjectAlive(T* object) 57 static bool isHeapObjectAlive(T* object)
56 { 58 {
57 static_assert(sizeof(T), "T must be fully defined"); 59 static_assert(sizeof(T), "T must be fully defined");
58 return HeapObjectHeader::fromPayload(object)->isMarked(); 60 return HeapObjectHeader::fromPayload(object)->isMarked();
59 } 61 }
60 }; 62 };
61 63
62 template<typename T> 64 template<typename T>
63 class ObjectAliveTrait<T, true> { 65 class ObjectAliveTrait<T, true> {
64 public: 66 public:
65 static bool isHeapObjectAlive(T* object) 67 static bool isHeapObjectAlive(T* object)
66 { 68 {
67 static_assert(sizeof(T), "T must be fully defined");
68 return object->isHeapObjectAlive(); 69 return object->isHeapObjectAlive();
69 } 70 }
70 }; 71 };
71 72
73 template<typename T, bool = IsGarbageCollectedMixin<T>::value> class HeapObjectH eaderTrait;
74
75 template<typename T>
76 class HeapObjectHeaderTrait<T, true> {
77 public:
78 static HeapObjectHeader* heapObjectHeader(T* obj)
79 {
80 static_assert(IsFullyDefined<T>::value, "T must be fully defined.");
81 // TODO(peria): This ASSERT() is too restrictive. The ASSERT forbids
82 // to call heapObjectHeader() for an object while another
83 // (totally independent) mixin object is under construction.
84 ASSERT(!ThreadState::current()->isConstructingGCMixin());
85 return obj->heapObjectHeader();
86 }
87 };
88
89 template<typename T>
90 class HeapObjectHeaderTrait<T, false> {
91 public:
92 static HeapObjectHeader* heapObjectHeader(T* obj)
93 {
94 static_assert(IsFullyDefined<T>::value, "T must be fully defined.");
95 ASSERT(!ThreadState::current()->isConstructingGCMixin());
96 return HeapObjectHeader::fromPayload(obj);
97 }
98 };
99
72 class PLATFORM_EXPORT Heap { 100 class PLATFORM_EXPORT Heap {
73 public: 101 public:
74 static void init(); 102 static void init();
75 static void shutdown(); 103 static void shutdown();
76 static void doShutdown(); 104 static void doShutdown();
77 105
78 #if ENABLE(ASSERT) 106 #if ENABLE(ASSERT)
79 static BasePage* findPageFromAddress(Address); 107 static BasePage* findPageFromAddress(Address);
80 static BasePage* findPageFromAddress(const void* pointer) { return findPageF romAddress(reinterpret_cast<Address>(const_cast<void*>(pointer))); } 108 static BasePage* findPageFromAddress(const void* pointer) { return findPageF romAddress(reinterpret_cast<Address>(const_cast<void*>(pointer))); }
81 #endif 109 #endif
82 110
83 template<typename T> 111 template<typename T>
84 static inline bool isHeapObjectAlive(T* object) 112 static inline bool isHeapObjectAlive(T* object)
85 { 113 {
86 static_assert(sizeof(T), "T must be fully defined"); 114 static_assert(sizeof(T), "T must be fully defined");
87 // The strongification of collections relies on the fact that once a 115 // The strongification of collections relies on the fact that once a
88 // collection has been strongified, there is no way that it can contain 116 // collection has been strongified, there is no way that it can contain
89 // non-live entries, so no entries will be removed. Since you can't set 117 // non-live entries, so no entries will be removed. Since you can't set
90 // the mark bit on a null pointer, that means that null pointers are 118 // the mark bit on a null pointer, that means that null pointers are
91 // always 'alive'. 119 // always 'alive'.
92 if (!object) 120 if (!object)
93 return true; 121 return true;
94 return ObjectAliveTrait<T>::isHeapObjectAlive(object); 122 return ObjectAliveTrait<T>::isHeapObjectAlive(object);
95 } 123 }
96 template<typename T> 124 template<typename T>
97 static inline bool isHeapObjectAlive(const Member<T>& member) 125 static inline bool isHeapObjectAlive(const Member<T>& member)
98 { 126 {
99 return isHeapObjectAlive(member.get()); 127 return isHeapObjectAlive(member.unsafeGet());
100 } 128 }
101 template<typename T> 129 template<typename T>
102 static inline bool isHeapObjectAlive(const WeakMember<T>& member) 130 static inline bool isHeapObjectAlive(const WeakMember<T>& member)
103 { 131 {
104 return isHeapObjectAlive(member.get()); 132 return isHeapObjectAlive(member.unsafeGet());
105 } 133 }
106 template<typename T> 134 template<typename T>
107 static inline bool isHeapObjectAlive(const UntracedMember<T>& member) 135 static inline bool isHeapObjectAlive(const UntracedMember<T>& member)
108 { 136 {
109 return isHeapObjectAlive(member.get()); 137 return isHeapObjectAlive(member.unsafeGet());
110 } 138 }
111 template<typename T> 139 template<typename T>
112 static inline bool isHeapObjectAlive(const RawPtr<T>& ptr) 140 static inline bool isHeapObjectAlive(const RawPtr<T>& ptr)
113 { 141 {
114 return isHeapObjectAlive(ptr.get()); 142 return isHeapObjectAlive(ptr.get());
115 } 143 }
116 144
117 // Is the finalizable GC object still alive, but slated for lazy sweeping? 145 // Is the finalizable GC object still alive, but slated for lazy sweeping?
118 // If a lazy sweep is in progress, returns true if the object was found 146 // If a lazy sweep is in progress, returns true if the object was found
119 // to be not reachable during the marking phase, but it has yet to be swept 147 // to be not reachable during the marking phase, but it has yet to be swept
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after
252 static size_t wrapperCount() { return acquireLoad(&s_wrapperCount); } 280 static size_t wrapperCount() { return acquireLoad(&s_wrapperCount); }
253 static size_t wrapperCountAtLastGC() { return acquireLoad(&s_wrapperCountAtL astGC); } 281 static size_t wrapperCountAtLastGC() { return acquireLoad(&s_wrapperCountAtL astGC); }
254 static void increaseCollectedWrapperCount(size_t delta) { atomicAdd(&s_colle ctedWrapperCount, static_cast<long>(delta)); } 282 static void increaseCollectedWrapperCount(size_t delta) { atomicAdd(&s_colle ctedWrapperCount, static_cast<long>(delta)); }
255 static size_t collectedWrapperCount() { return acquireLoad(&s_collectedWrapp erCount); } 283 static size_t collectedWrapperCount() { return acquireLoad(&s_collectedWrapp erCount); }
256 static size_t partitionAllocSizeAtLastGC() { return acquireLoad(&s_partition AllocSizeAtLastGC); } 284 static size_t partitionAllocSizeAtLastGC() { return acquireLoad(&s_partition AllocSizeAtLastGC); }
257 285
258 static double estimatedMarkingTime(); 286 static double estimatedMarkingTime();
259 static void reportMemoryUsageHistogram(); 287 static void reportMemoryUsageHistogram();
260 static void reportMemoryUsageForTracing(); 288 static void reportMemoryUsageForTracing();
261 289
262 #if ENABLE(ASSERT) 290 static uint32_t gcGeneration() { return s_gcGeneration; }
haraken 2015/11/20 01:59:06 Can we add: ASSERT(s_gcGeneration != gcGenerati
peria 2015/11/20 02:27:00 Done.
263 static uint16_t gcGeneration() { return s_gcGeneration; }
264 #endif
265 291
266 private: 292 private:
267 // A RegionTree is a simple binary search tree of PageMemoryRegions sorted 293 // A RegionTree is a simple binary search tree of PageMemoryRegions sorted
268 // by base addresses. 294 // by base addresses.
269 class RegionTree { 295 class RegionTree {
270 public: 296 public:
271 explicit RegionTree(PageMemoryRegion* region) : m_region(region), m_left (nullptr), m_right(nullptr) { } 297 explicit RegionTree(PageMemoryRegion* region) : m_region(region), m_left (nullptr), m_right(nullptr) { }
272 ~RegionTree() 298 ~RegionTree()
273 { 299 {
274 delete m_left; 300 delete m_left;
(...skipping 26 matching lines...) Expand all
301 static size_t s_allocatedSpace; 327 static size_t s_allocatedSpace;
302 static size_t s_allocatedObjectSize; 328 static size_t s_allocatedObjectSize;
303 static size_t s_objectSizeAtLastGC; 329 static size_t s_objectSizeAtLastGC;
304 static size_t s_markedObjectSize; 330 static size_t s_markedObjectSize;
305 static size_t s_markedObjectSizeAtLastCompleteSweep; 331 static size_t s_markedObjectSizeAtLastCompleteSweep;
306 static size_t s_wrapperCount; 332 static size_t s_wrapperCount;
307 static size_t s_wrapperCountAtLastGC; 333 static size_t s_wrapperCountAtLastGC;
308 static size_t s_collectedWrapperCount; 334 static size_t s_collectedWrapperCount;
309 static size_t s_partitionAllocSizeAtLastGC; 335 static size_t s_partitionAllocSizeAtLastGC;
310 static double s_estimatedMarkingTimePerByte; 336 static double s_estimatedMarkingTimePerByte;
311 #if ENABLE(ASSERT) 337 static uint32_t s_gcGeneration;
312 static uint16_t s_gcGeneration;
313 #endif
314 338
315 friend class ThreadState; 339 friend class ThreadState;
316 }; 340 };
317 341
318 template<typename T> 342 template<typename T>
319 struct IsEagerlyFinalizedType { 343 struct IsEagerlyFinalizedType {
320 private: 344 private:
321 typedef char YesType; 345 typedef char YesType;
322 struct NoType { 346 struct NoType {
323 char padding[8]; 347 char padding[8];
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
448 #define EAGERLY_FINALIZE_WILL_BE_REMOVED() EAGERLY_FINALIZE() 472 #define EAGERLY_FINALIZE_WILL_BE_REMOVED() EAGERLY_FINALIZE()
449 #else 473 #else
450 #define EAGERLY_FINALIZE_WILL_BE_REMOVED() 474 #define EAGERLY_FINALIZE_WILL_BE_REMOVED()
451 #endif 475 #endif
452 476
453 inline Address Heap::allocateOnHeapIndex(ThreadState* state, size_t size, int he apIndex, size_t gcInfoIndex) 477 inline Address Heap::allocateOnHeapIndex(ThreadState* state, size_t size, int he apIndex, size_t gcInfoIndex)
454 { 478 {
455 ASSERT(state->isAllocationAllowed()); 479 ASSERT(state->isAllocationAllowed());
456 ASSERT(heapIndex != BlinkGC::LargeObjectHeapIndex); 480 ASSERT(heapIndex != BlinkGC::LargeObjectHeapIndex);
457 NormalPageHeap* heap = static_cast<NormalPageHeap*>(state->heap(heapIndex)); 481 NormalPageHeap* heap = static_cast<NormalPageHeap*>(state->heap(heapIndex));
458 return heap->allocateObject(allocationSizeFromSize(size), gcInfoIndex); 482 return heap->allocateObject(allocationSizeFromSize(size), gcInfoIndex, gcGen eration());
459 } 483 }
460 484
461 template<typename T> 485 template<typename T>
462 Address Heap::allocate(size_t size, bool eagerlySweep) 486 Address Heap::allocate(size_t size, bool eagerlySweep)
463 { 487 {
464 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(); 488 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state();
465 return Heap::allocateOnHeapIndex(state, size, eagerlySweep ? BlinkGC::EagerS weepHeapIndex : Heap::heapIndexForObjectSize(size), GCInfoTrait<T>::index()); 489 return Heap::allocateOnHeapIndex(state, size, eagerlySweep ? BlinkGC::EagerS weepHeapIndex : Heap::heapIndexForObjectSize(size), GCInfoTrait<T>::index());
466 } 490 }
467 491
468 template<typename T> 492 template<typename T>
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
503 void VisitorHelper<Derived>::handleWeakCell(Visitor* self, void* object) 527 void VisitorHelper<Derived>::handleWeakCell(Visitor* self, void* object)
504 { 528 {
505 T** cell = reinterpret_cast<T**>(object); 529 T** cell = reinterpret_cast<T**>(object);
506 if (*cell && !ObjectAliveTrait<T>::isHeapObjectAlive(*cell)) 530 if (*cell && !ObjectAliveTrait<T>::isHeapObjectAlive(*cell))
507 *cell = nullptr; 531 *cell = nullptr;
508 } 532 }
509 533
510 } // namespace blink 534 } // namespace blink
511 535
512 #endif // Heap_h 536 #endif // Heap_h
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698