Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1068)

Side by Side Diff: Source/platform/heap/Heap.cpp

Issue 782223002: [NotForCommit] inline ensureMarked. Base URL: https://chromium.googlesource.com/chromium/blink.git@master
Patch Set: Created 6 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « Source/platform/heap/Heap.h ('k') | Source/platform/heap/HeapTest.cpp » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2013 Google Inc. All rights reserved. 2 * Copyright (C) 2013 Google Inc. All rights reserved.
3 * 3 *
4 * Redistribution and use in source and binary forms, with or without 4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are 5 * modification, are permitted provided that the following conditions are
6 * met: 6 * met:
7 * 7 *
8 * * Redistributions of source code must retain the above copyright 8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer. 9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above 10 * * Redistributions in binary form must reproduce the above
(...skipping 434 matching lines...) Expand 10 before | Expand all | Expand 10 after
445 } 445 }
446 } 446 }
447 447
448 private: 448 private:
449 ThreadState* m_state; 449 ThreadState* m_state;
450 ThreadState::SafePointScope m_safePointScope; 450 ThreadState::SafePointScope m_safePointScope;
451 bool m_parkedAllThreads; // False if we fail to park all threads 451 bool m_parkedAllThreads; // False if we fail to park all threads
452 }; 452 };
453 453
454 NO_SANITIZE_ADDRESS inline 454 NO_SANITIZE_ADDRESS inline
455 bool HeapObjectHeader::isMarked() const
456 {
457 checkHeader();
458 return m_size & markBitMask;
459 }
460
461 NO_SANITIZE_ADDRESS inline
462 void HeapObjectHeader::mark()
463 {
464 checkHeader();
465 ASSERT(!isMarked());
466 m_size = m_size | markBitMask;
467 }
468
469 NO_SANITIZE_ADDRESS inline
470 void HeapObjectHeader::unmark() 455 void HeapObjectHeader::unmark()
471 { 456 {
472 checkHeader(); 457 checkHeader();
473 ASSERT(isMarked()); 458 ASSERT(isMarked());
474 m_size &= ~markBitMask; 459 m_size &= ~markBitMask;
475 } 460 }
476 461
477 NO_SANITIZE_ADDRESS inline 462 NO_SANITIZE_ADDRESS inline
478 bool HeapObjectHeader::isDead() const 463 bool HeapObjectHeader::isDead() const
479 { 464 {
(...skipping 11 matching lines...) Expand all
491 476
492 #if ENABLE(ASSERT) 477 #if ENABLE(ASSERT)
493 NO_SANITIZE_ADDRESS 478 NO_SANITIZE_ADDRESS
494 void HeapObjectHeader::zapMagic() 479 void HeapObjectHeader::zapMagic()
495 { 480 {
496 checkHeader(); 481 checkHeader();
497 m_magic = zappedMagic; 482 m_magic = zappedMagic;
498 } 483 }
499 #endif 484 #endif
500 485
501 HeapObjectHeader* HeapObjectHeader::fromPayload(const void* payload)
502 {
503 Address addr = reinterpret_cast<Address>(const_cast<void*>(payload));
504 HeapObjectHeader* header =
505 reinterpret_cast<HeapObjectHeader*>(addr - sizeof(HeapObjectHeader));
506 return header;
507 }
508
509 void HeapObjectHeader::finalize(const GCInfo* gcInfo, Address object, size_t obj ectSize) 486 void HeapObjectHeader::finalize(const GCInfo* gcInfo, Address object, size_t obj ectSize)
510 { 487 {
511 ASSERT(gcInfo); 488 ASSERT(gcInfo);
512 if (gcInfo->hasFinalizer()) { 489 if (gcInfo->hasFinalizer()) {
513 gcInfo->m_finalize(object); 490 gcInfo->m_finalize(object);
514 } 491 }
515 492
516 #if ENABLE(ASSERT) || defined(LEAK_SANITIZER) || defined(ADDRESS_SANITIZER) 493 #if ENABLE(ASSERT) || defined(LEAK_SANITIZER) || defined(ADDRESS_SANITIZER)
517 // In Debug builds, memory is zapped when it's freed, and the zapped memory is 494 // In Debug builds, memory is zapped when it's freed, and the zapped memory is
518 // zeroed out when the memory is reused. Memory is also zapped when using Le ak 495 // zeroed out when the memory is reused. Memory is also zapped when using Le ak
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
619 heapObjectHeader()->finalize(); 596 heapObjectHeader()->finalize();
620 } 597 }
621 598
622 template<> 599 template<>
623 void LargeObject<HeapObjectHeader>::finalize() 600 void LargeObject<HeapObjectHeader>::finalize()
624 { 601 {
625 ASSERT(gcInfo()); 602 ASSERT(gcInfo());
626 HeapObjectHeader::finalize(gcInfo(), payload(), payloadSize()); 603 HeapObjectHeader::finalize(gcInfo(), payload(), payloadSize());
627 } 604 }
628 605
629 GeneralHeapObjectHeader* GeneralHeapObjectHeader::fromPayload(const void* payloa d)
630 {
631 Address addr = reinterpret_cast<Address>(const_cast<void*>(payload));
632 GeneralHeapObjectHeader* header =
633 reinterpret_cast<GeneralHeapObjectHeader*>(addr - sizeof(GeneralHeapObje ctHeader));
634 return header;
635 }
636
637 template<typename Header> 606 template<typename Header>
638 ThreadHeap<Header>::ThreadHeap(ThreadState* state, int index) 607 ThreadHeap<Header>::ThreadHeap(ThreadState* state, int index)
639 : m_currentAllocationPoint(0) 608 : m_currentAllocationPoint(0)
640 , m_remainingAllocationSize(0) 609 , m_remainingAllocationSize(0)
641 , m_lastRemainingAllocationSize(0) 610 , m_lastRemainingAllocationSize(0)
642 , m_firstPage(0) 611 , m_firstPage(0)
643 , m_firstLargeObject(0) 612 , m_firstLargeObject(0)
644 , m_firstPageAllocatedDuringSweeping(0) 613 , m_firstPageAllocatedDuringSweeping(0)
645 , m_lastPageAllocatedDuringSweeping(0) 614 , m_lastPageAllocatedDuringSweeping(0)
646 , m_firstLargeObjectAllocatedDuringSweeping(0) 615 , m_firstLargeObjectAllocatedDuringSweeping(0)
(...skipping 1318 matching lines...) Expand 10 before | Expand all | Expand 10 after
1965 void Heap::flushHeapDoesNotContainCache() 1934 void Heap::flushHeapDoesNotContainCache()
1966 { 1935 {
1967 s_heapDoesNotContainCache->flush(); 1936 s_heapDoesNotContainCache->flush();
1968 } 1937 }
1969 1938
1970 static void markNoTracingCallback(Visitor* visitor, void* object) 1939 static void markNoTracingCallback(Visitor* visitor, void* object)
1971 { 1940 {
1972 visitor->markNoTracing(object); 1941 visitor->markNoTracing(object);
1973 } 1942 }
1974 1943
1975 enum MarkingMode {
1976 GlobalMarking,
1977 ThreadLocalMarking,
1978 };
1979
1980 template<MarkingMode Mode> 1944 template<MarkingMode Mode>
1981 class MarkingVisitor final : public Visitor { 1945 class MarkingVisitor final : public Visitor {
1982 public: 1946 public:
1983 #if ENABLE(GC_PROFILE_MARKING) 1947 #if ENABLE(GC_PROFILE_MARKING)
1984 typedef HashSet<uintptr_t> LiveObjectSet; 1948 typedef HashSet<uintptr_t> LiveObjectSet;
1985 typedef HashMap<String, LiveObjectSet> LiveObjectMap; 1949 typedef HashMap<String, LiveObjectSet> LiveObjectMap;
1986 typedef HashMap<uintptr_t, std::pair<uintptr_t, String> > ObjectGraph; 1950 typedef HashMap<uintptr_t, std::pair<uintptr_t, String> > ObjectGraph;
1987 #endif 1951 #endif
1988 1952
1989 explicit MarkingVisitor(CallbackStack* markingStack) 1953 explicit MarkingVisitor(CallbackStack* markingStack)
1990 : m_markingStack(markingStack) 1954 : Visitor(Mode)
1955 , m_markingStack(markingStack)
1991 { 1956 {
1992 } 1957 }
1993 1958
1994 inline void visitHeader(HeapObjectHeader* header, const void* objectPointer, TraceCallback callback) 1959 inline void visitHeader(HeapObjectHeader* header, const void* objectPointer, TraceCallback callback)
1995 { 1960 {
1996 ASSERT(header); 1961 ASSERT(header);
1997 ASSERT(objectPointer); 1962 ASSERT(objectPointer);
1998 // Check that we are not marking objects that are outside 1963 // Check that we are not marking objects that are outside
1999 // the heap by calling Heap::contains. However we cannot 1964 // the heap by calling Heap::contains. However we cannot
2000 // call Heap::contains when outside a GC and we call mark 1965 // call Heap::contains when outside a GC and we call mark
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
2071 { 2036 {
2072 return Heap::weakTableRegistered(closure); 2037 return Heap::weakTableRegistered(closure);
2073 } 2038 }
2074 #endif 2039 #endif
2075 2040
2076 virtual bool isMarked(const void* objectPointer) override 2041 virtual bool isMarked(const void* objectPointer) override
2077 { 2042 {
2078 return GeneralHeapObjectHeader::fromPayload(objectPointer)->isMarked(); 2043 return GeneralHeapObjectHeader::fromPayload(objectPointer)->isMarked();
2079 } 2044 }
2080 2045
2081 virtual bool ensureMarked(const void* objectPointer) override 2046 /*
2082 {
2083 if (!objectPointer)
2084 return false;
2085 if (Mode == ThreadLocalMarking && !objectInTerminatingThreadHeap(objectP ointer))
2086 return false;
2087 #if ENABLE(ASSERT)
2088 if (isMarked(objectPointer))
2089 return false;
2090
2091 markNoTracing(objectPointer);
2092 #else
2093 // Inline what the above markNoTracing() call expands to,
2094 // so as to make sure that we do get all the benefits.
2095 GeneralHeapObjectHeader* header =
2096 GeneralHeapObjectHeader::fromPayload(objectPointer);
2097 if (header->isMarked())
2098 return false;
2099 header->mark();
2100 #endif
2101 return true;
2102 }
2103
2104 #if ENABLE(ASSERT) 2047 #if ENABLE(ASSERT)
2105 #define DEFINE_ENSURE_MARKED_METHOD(Type) \ 2048 #define DEFINE_ENSURE_MARKED_METHOD(Type) \
2106 virtual bool ensureMarked(const Type* objectPointer) override \ 2049 virtual bool ensureMarked(const Type* objectPointer) override \
2107 { \ 2050 { \
2108 if (!objectPointer) \ 2051 if (!objectPointer) \
2109 return false; \ 2052 return false; \
2110 COMPILE_ASSERT(!NeedsAdjustAndMark<Type>::value, CanOnlyUseIsMarkedOnNon AdjustedTypes); \ 2053 COMPILE_ASSERT(!NeedsAdjustAndMark<Type>::value, CanOnlyUseIsMarkedOnNon AdjustedTypes); \
2111 if (Mode == ThreadLocalMarking && !objectInTerminatingThreadHeap(objectP ointer)) \ 2054 if (Mode == ThreadLocalMarking && !objectInTerminatingThreadHeap(objectP ointer)) \
2112 return false; \ 2055 return false; \
2113 if (isMarked(objectPointer)) \ 2056 if (isMarked(objectPointer)) \
(...skipping 10 matching lines...) Expand all
2124 if (Mode == ThreadLocalMarking && !objectInTerminatingThreadHeap(objectP ointer)) \ 2067 if (Mode == ThreadLocalMarking && !objectInTerminatingThreadHeap(objectP ointer)) \
2125 return false; \ 2068 return false; \
2126 HeapObjectHeader* header = \ 2069 HeapObjectHeader* header = \
2127 HeapObjectHeader::fromPayload(objectPointer); \ 2070 HeapObjectHeader::fromPayload(objectPointer); \
2128 if (header->isMarked()) \ 2071 if (header->isMarked()) \
2129 return false; \ 2072 return false; \
2130 header->mark(); \ 2073 header->mark(); \
2131 return true; \ 2074 return true; \
2132 } 2075 }
2133 #endif 2076 #endif
2077 */
2134 2078
2135 // This macro defines the necessary visitor methods for typed heaps 2079 // This macro defines the necessary visitor methods for typed heaps
2136 #define DEFINE_VISITOR_METHODS(Type) \ 2080 #define DEFINE_VISITOR_METHODS(Type) \
2137 virtual void mark(const Type* objectPointer, TraceCallback callback) overrid e \ 2081 virtual void mark(const Type* objectPointer, TraceCallback callback) overrid e \
2138 { \ 2082 { \
2139 if (!objectPointer) \ 2083 if (!objectPointer) \
2140 return; \ 2084 return; \
2141 HeapObjectHeader* header = \ 2085 HeapObjectHeader* header = \
2142 HeapObjectHeader::fromPayload(objectPointer); \ 2086 HeapObjectHeader::fromPayload(objectPointer); \
2143 visitHeader(header, header->payload(), callback); \ 2087 visitHeader(header, header->payload(), callback); \
2144 } \ 2088 } \
2145 virtual bool isMarked(const Type* objectPointer) override \ 2089 virtual bool isMarked(const Type* objectPointer) override \
2146 { \ 2090 { \
2147 return HeapObjectHeader::fromPayload(objectPointer)->isMarked(); \ 2091 return HeapObjectHeader::fromPayload(objectPointer)->isMarked(); \
2148 } \ 2092 }
2149 DEFINE_ENSURE_MARKED_METHOD(Type)
2150 2093
2151 FOR_EACH_TYPED_HEAP(DEFINE_VISITOR_METHODS) 2094 FOR_EACH_TYPED_HEAP(DEFINE_VISITOR_METHODS)
2152 #undef DEFINE_VISITOR_METHODS 2095 #undef DEFINE_VISITOR_METHODS
2153 2096
2154 #if ENABLE(GC_PROFILE_MARKING) 2097 #if ENABLE(GC_PROFILE_MARKING)
2155 void reportStats() 2098 void reportStats()
2156 { 2099 {
2157 fprintf(stderr, "\n---------- AFTER MARKING -------------------\n"); 2100 fprintf(stderr, "\n---------- AFTER MARKING -------------------\n");
2158 for (LiveObjectMap::iterator it = currentlyLive().begin(), end = current lyLive().end(); it != end; ++it) { 2101 for (LiveObjectMap::iterator it = currentlyLive().begin(), end = current lyLive().end(); it != end; ++it) {
2159 fprintf(stderr, "%s %u", it->key.ascii().data(), it->value.size()); 2102 fprintf(stderr, "%s %u", it->key.ascii().data(), it->value.size());
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
2240 return graph; 2183 return graph;
2241 } 2184 }
2242 2185
2243 static HashSet<uintptr_t>& objectsToFindPath() 2186 static HashSet<uintptr_t>& objectsToFindPath()
2244 { 2187 {
2245 DEFINE_STATIC_LOCAL(HashSet<uintptr_t>, set, ()); 2188 DEFINE_STATIC_LOCAL(HashSet<uintptr_t>, set, ());
2246 return set; 2189 return set;
2247 } 2190 }
2248 #endif 2191 #endif
2249 2192
2250 static inline bool objectInTerminatingThreadHeap(const void* objectPointer)
2251 {
2252 BaseHeapPage* page = pageFromObject(objectPointer);
2253 ASSERT(!page->orphaned());
2254 // When doing a thread local GC, the marker checks if
2255 // the object resides in another thread's heap. The
2256 // object should not be traced, if it does.
2257 return page->terminating();
2258 }
2259
2260 protected: 2193 protected:
2261 virtual void registerWeakCell(void** cell, WeakPointerCallback callback) ove rride 2194 virtual void registerWeakCell(void** cell, WeakPointerCallback callback) ove rride
2262 { 2195 {
2263 Heap::pushWeakCellPointerCallback(cell, callback); 2196 Heap::pushWeakCellPointerCallback(cell, callback);
2264 } 2197 }
2265 2198
2266 private: 2199 private:
2267 CallbackStack* m_markingStack; 2200 CallbackStack* m_markingStack;
2268 }; 2201 };
2269 2202
(...skipping 668 matching lines...) Expand 10 before | Expand all | Expand 10 after
2938 bool Heap::s_shutdownCalled = false; 2871 bool Heap::s_shutdownCalled = false;
2939 bool Heap::s_lastGCWasConservative = false; 2872 bool Heap::s_lastGCWasConservative = false;
2940 FreePagePool* Heap::s_freePagePool; 2873 FreePagePool* Heap::s_freePagePool;
2941 OrphanedPagePool* Heap::s_orphanedPagePool; 2874 OrphanedPagePool* Heap::s_orphanedPagePool;
2942 Heap::RegionTree* Heap::s_regionTree = 0; 2875 Heap::RegionTree* Heap::s_regionTree = 0;
2943 size_t Heap::s_allocatedObjectSize = 0; 2876 size_t Heap::s_allocatedObjectSize = 0;
2944 size_t Heap::s_allocatedSpace = 0; 2877 size_t Heap::s_allocatedSpace = 0;
2945 size_t Heap::s_markedObjectSize = 0; 2878 size_t Heap::s_markedObjectSize = 0;
2946 2879
2947 } // namespace blink 2880 } // namespace blink
OLDNEW
« no previous file with comments | « Source/platform/heap/Heap.h ('k') | Source/platform/heap/HeapTest.cpp » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698